mirror of
https://github.com/minio/minio-rs.git
synced 2025-12-06 15:26:51 +08:00
fixed clippy issues (#106)
This commit is contained in:
parent
c28f576cb8
commit
8facff7bad
20
Cargo.toml
20
Cargo.toml
@ -24,10 +24,10 @@ rustls-tls = ["reqwest/rustls-tls"]
|
||||
[dependencies]
|
||||
async-recursion = "1.1.1"
|
||||
async-trait = "0.1.83"
|
||||
base64 = "0.22"
|
||||
base64 = "0.22.1"
|
||||
byteorder = "1.5.0"
|
||||
bytes = "1.8.0"
|
||||
chrono = "0.4.38"
|
||||
chrono = "0.4.39"
|
||||
crc = "3.2.1"
|
||||
dashmap = "6.1.0"
|
||||
derivative = "2.2.0"
|
||||
@ -37,27 +37,27 @@ hex = "0.4.3"
|
||||
hmac = "0.12.1"
|
||||
home = "0.5.9"
|
||||
http = "1.1.0"
|
||||
hyper = { version = "1.5", features = ["full"] }
|
||||
hyper = { version = "1.5.1", features = ["full"] }
|
||||
lazy_static = "1.5.0"
|
||||
log = "0.4.22"
|
||||
md5 = "0.7.0"
|
||||
multimap = "0.10.0"
|
||||
os_info = "3.8.2"
|
||||
os_info = "3.9.0"
|
||||
percent-encoding = "2.3.1"
|
||||
rand = { version = "0.8.5", features = ["small_rng"] }
|
||||
regex = "1.11.1"
|
||||
serde = { version = "1.0.214", features = ["derive"] }
|
||||
serde_json = "1.0.132"
|
||||
serde = { version = "1.0.216", features = ["derive"] }
|
||||
serde_json = "1.0.133"
|
||||
sha2 = "0.10.8"
|
||||
tokio = { version = "1.41.0", features = ["full"] }
|
||||
tokio-stream = "0.1.16"
|
||||
tokio-util = { version = "0.7.12", features = ["io"] }
|
||||
tokio = { version = "1.42.0", features = ["full"] }
|
||||
tokio-stream = "0.1.17"
|
||||
tokio-util = { version = "0.7.13", features = ["io"] }
|
||||
urlencoding = "2.1.3"
|
||||
xmltree = "0.11.0"
|
||||
|
||||
[dev-dependencies]
|
||||
async-std = { version = "1.13.0", features = ["attributes", "tokio1"] }
|
||||
clap = { version = "4.5.20", features = ["derive"] }
|
||||
clap = { version = "4.5.23", features = ["derive"] }
|
||||
quickcheck = "1.0.3"
|
||||
|
||||
[[example]]
|
||||
|
||||
@ -28,14 +28,14 @@ async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
|
||||
// Check 'bucket_name' bucket exist or not.
|
||||
let exists: bool = client
|
||||
.bucket_exists(&BucketExistsArgs::new(&bucket_name).unwrap())
|
||||
.bucket_exists(&BucketExistsArgs::new(bucket_name).unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Make 'bucket_name' bucket if not exist.
|
||||
if !exists {
|
||||
client
|
||||
.make_bucket(&MakeBucketArgs::new(&bucket_name).unwrap())
|
||||
.make_bucket(&MakeBucketArgs::new(bucket_name).unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
@ -1154,7 +1154,7 @@ impl<'a> ComposeSource<'a> {
|
||||
}
|
||||
|
||||
pub fn get_headers(&self) -> Multimap {
|
||||
return self.headers.as_ref().expect("ABORT: ComposeSource::build_headers() must be called prior to this method invocation. This shoud not happen.").clone();
|
||||
self.headers.as_ref().expect("ABORT: ComposeSource::build_headers() must be called prior to this method invocation. This shoud not happen.").clone()
|
||||
}
|
||||
|
||||
pub fn build_headers(&mut self, object_size: usize, etag: String) -> Result<(), Error> {
|
||||
|
||||
@ -103,6 +103,14 @@ impl From<Vec<u8>> for ObjectContent {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&'static [u8]> for ObjectContent {
|
||||
fn from(value: &'static [u8]) -> Self {
|
||||
ObjectContent(ObjectContentInner::Bytes(SegmentedBytes::from(
|
||||
Bytes::from(value),
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Path> for ObjectContent {
|
||||
fn from(value: &Path) -> Self {
|
||||
ObjectContent(ObjectContentInner::FilePath(value.to_path_buf()))
|
||||
@ -144,6 +152,7 @@ impl ObjectContent {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::wrong_self_convention)]
|
||||
pub(crate) async fn to_content_stream(self) -> IoResult<ContentStream> {
|
||||
let (r, size) = self.to_stream().await?;
|
||||
Ok(ContentStream::new(r, size))
|
||||
@ -333,14 +342,6 @@ impl SegmentedBytes {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_iter(self) -> SegmentedBytesIntoIterator {
|
||||
SegmentedBytesIntoIterator {
|
||||
sb: self,
|
||||
current_segment: 0,
|
||||
current_segment_index: 0,
|
||||
}
|
||||
}
|
||||
|
||||
// Copy all the content into a single `Bytes` object.
|
||||
pub fn to_bytes(&self) -> Bytes {
|
||||
let mut buf = BytesMut::with_capacity(self.total_size);
|
||||
@ -398,7 +399,7 @@ pub struct SegmentedBytesIterator<'a> {
|
||||
current_segment_index: usize,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for SegmentedBytesIterator<'a> {
|
||||
impl Iterator for SegmentedBytesIterator<'_> {
|
||||
type Item = Bytes;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
||||
@ -421,7 +421,7 @@ impl Client {
|
||||
pub async fn do_execute(
|
||||
&self,
|
||||
method: &Method,
|
||||
region: &String,
|
||||
region: &str,
|
||||
headers: &mut Multimap,
|
||||
query_params: &Multimap,
|
||||
bucket_name: Option<&str>,
|
||||
@ -491,7 +491,7 @@ impl Client {
|
||||
pub async fn execute(
|
||||
&self,
|
||||
method: Method,
|
||||
region: &String,
|
||||
region: &str,
|
||||
headers: &mut Multimap,
|
||||
query_params: &Multimap,
|
||||
bucket_name: Option<&str>,
|
||||
@ -514,7 +514,7 @@ impl Client {
|
||||
pub async fn execute2(
|
||||
&self,
|
||||
method: Method,
|
||||
region: &String,
|
||||
region: &str,
|
||||
headers: &mut Multimap,
|
||||
query_params: &Multimap,
|
||||
bucket_name: Option<&str>,
|
||||
@ -564,7 +564,7 @@ impl Client {
|
||||
bucket_name: &str,
|
||||
region: Option<&str>,
|
||||
) -> Result<String, Error> {
|
||||
if !region.map_or(true, |v| v.is_empty()) {
|
||||
if !region.is_none_or(|v| v.is_empty()) {
|
||||
if !self.base_url.region.is_empty() && self.base_url.region != *region.unwrap() {
|
||||
return Err(Error::RegionMismatch(
|
||||
self.base_url.region.clone(),
|
||||
@ -764,10 +764,7 @@ impl Client {
|
||||
})
|
||||
}
|
||||
|
||||
async fn calculate_part_count<'a>(
|
||||
&self,
|
||||
sources: &'a mut [ComposeSource<'_>],
|
||||
) -> Result<u16, Error> {
|
||||
async fn calculate_part_count(&self, sources: &mut [ComposeSource<'_>]) -> Result<u16, Error> {
|
||||
let mut object_size = 0_usize;
|
||||
let mut i = 0;
|
||||
let mut part_count = 0_u16;
|
||||
@ -1784,14 +1781,12 @@ impl Client {
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(resp) => {
|
||||
return Ok(GetBucketPolicyResponse {
|
||||
Ok(resp) => Ok(GetBucketPolicyResponse {
|
||||
headers: resp.headers().clone(),
|
||||
region: region.clone(),
|
||||
bucket_name: args.bucket.to_string(),
|
||||
config: resp.text().await?,
|
||||
})
|
||||
}
|
||||
}),
|
||||
Err(e) => match e {
|
||||
Error::S3Error(ref err) => {
|
||||
if err.code == "NoSuchBucketPolicy" {
|
||||
@ -2034,7 +2029,7 @@ impl Client {
|
||||
let body = resp.bytes().await?;
|
||||
let root = Element::parse(body.reader())?;
|
||||
|
||||
return Ok(GetObjectRetentionResponse {
|
||||
Ok(GetObjectRetentionResponse {
|
||||
headers: header_map.clone(),
|
||||
region: region.clone(),
|
||||
bucket_name: args.bucket.to_string(),
|
||||
@ -2048,7 +2043,7 @@ impl Client {
|
||||
Some(v) => Some(from_iso8601utc(&v)?),
|
||||
_ => None,
|
||||
},
|
||||
});
|
||||
})
|
||||
}
|
||||
Err(e) => match e {
|
||||
Error::S3Error(ref err) => {
|
||||
@ -2114,14 +2109,14 @@ impl Client {
|
||||
tags.insert(get_text(&v, "Key")?, get_text(&v, "Value")?);
|
||||
}
|
||||
|
||||
return Ok(GetObjectTagsResponse {
|
||||
Ok(GetObjectTagsResponse {
|
||||
headers: header_map.clone(),
|
||||
region: region.clone(),
|
||||
bucket_name: args.bucket.to_string(),
|
||||
object_name: args.object.to_string(),
|
||||
version_id: args.version_id.as_ref().map(|v| v.to_string()),
|
||||
tags,
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_presigned_object_url(
|
||||
@ -2172,13 +2167,13 @@ impl Client {
|
||||
url.query = query_params;
|
||||
}
|
||||
|
||||
return Ok(GetPresignedObjectUrlResponse {
|
||||
Ok(GetPresignedObjectUrlResponse {
|
||||
region: region.clone(),
|
||||
bucket_name: args.bucket.to_string(),
|
||||
object_name: args.object.to_string(),
|
||||
version_id: args.version_id.as_ref().map(|v| v.to_string()),
|
||||
url: url.to_string(),
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_presigned_post_form_data(
|
||||
@ -2316,7 +2311,7 @@ impl Client {
|
||||
let resp = self
|
||||
.execute(
|
||||
Method::PUT,
|
||||
®ion.to_string(),
|
||||
region,
|
||||
&mut headers,
|
||||
query_params,
|
||||
Some(args.bucket),
|
||||
|
||||
@ -371,7 +371,7 @@ impl BaseUrl {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn build_list_buckets_url(&self, url: &mut Url, region: &String) {
|
||||
fn build_list_buckets_url(&self, url: &mut Url, region: &str) {
|
||||
if self.aws_domain_suffix.is_empty() {
|
||||
return;
|
||||
}
|
||||
@ -402,13 +402,12 @@ impl BaseUrl {
|
||||
pub fn build_url(
|
||||
&self,
|
||||
method: &Method,
|
||||
region: &String,
|
||||
region: &str,
|
||||
query: &Multimap,
|
||||
bucket_name: Option<&str>,
|
||||
object_name: Option<&str>,
|
||||
) -> Result<Url, Error> {
|
||||
if !object_name.map_or(true, |v| v.is_empty()) && bucket_name.map_or(true, |v| v.is_empty())
|
||||
{
|
||||
if !object_name.is_none_or(|v| v.is_empty()) && bucket_name.is_none_or(|v| v.is_empty()) {
|
||||
return Err(Error::UrlBuildError(String::from(
|
||||
"empty bucket name provided for object name",
|
||||
)));
|
||||
|
||||
@ -46,6 +46,7 @@ fn url_decode(
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn parse_common_list_objects_response(
|
||||
root: &Element,
|
||||
) -> Result<
|
||||
|
||||
@ -66,7 +66,7 @@ pub fn get_canonical_request_hash(
|
||||
"{}\n{}\n{}\n{}\n\n{}\n{}",
|
||||
method, uri, query_string, headers, signed_headers, content_sha256
|
||||
);
|
||||
return sha256_hash(canonical_request.as_bytes());
|
||||
sha256_hash(canonical_request.as_bytes())
|
||||
}
|
||||
|
||||
/// Returns string-to-sign value of given date, scope and canonical request hash
|
||||
@ -92,7 +92,7 @@ pub fn get_signing_key(
|
||||
let date_key = hmac_hash(key.as_slice(), to_signer_date(date).as_bytes());
|
||||
let date_region_key = hmac_hash(date_key.as_slice(), region.as_bytes());
|
||||
let date_region_service_key = hmac_hash(date_region_key.as_slice(), service_name.as_bytes());
|
||||
return hmac_hash(date_region_service_key.as_slice(), b"aws4_request");
|
||||
hmac_hash(date_region_service_key.as_slice(), b"aws4_request")
|
||||
}
|
||||
|
||||
/// Returns signature value for given signing key and string-to-sign
|
||||
@ -252,5 +252,5 @@ pub fn post_presign_v4(
|
||||
region: &str,
|
||||
) -> String {
|
||||
let signing_key = get_signing_key(secret_key, date, region, "s3");
|
||||
return get_signature(signing_key.as_slice(), string_to_sign.as_bytes());
|
||||
get_signature(signing_key.as_slice(), string_to_sign.as_bytes())
|
||||
}
|
||||
|
||||
@ -354,7 +354,7 @@ pub struct SelectRequest<'a> {
|
||||
|
||||
impl<'a> SelectRequest<'a> {
|
||||
pub fn new_csv_input_output(
|
||||
expr: &'a str,
|
||||
expr: &str,
|
||||
csv_input: CsvInputSerialization,
|
||||
csv_output: CsvOutputSerialization,
|
||||
) -> Result<SelectRequest, Error> {
|
||||
@ -381,7 +381,7 @@ impl<'a> SelectRequest<'a> {
|
||||
expr: &'a str,
|
||||
csv_input: CsvInputSerialization,
|
||||
json_output: JsonOutputSerialization,
|
||||
) -> Result<SelectRequest, Error> {
|
||||
) -> Result<SelectRequest<'a>, Error> {
|
||||
if expr.is_empty() {
|
||||
return Err(Error::InvalidSelectExpression(String::from(
|
||||
"select expression cannot be empty",
|
||||
@ -405,7 +405,7 @@ impl<'a> SelectRequest<'a> {
|
||||
expr: &'a str,
|
||||
json_input: JsonInputSerialization,
|
||||
json_output: JsonOutputSerialization,
|
||||
) -> Result<SelectRequest, Error> {
|
||||
) -> Result<SelectRequest<'a>, Error> {
|
||||
if expr.is_empty() {
|
||||
return Err(Error::InvalidSelectExpression(String::from(
|
||||
"select expression cannot be empty",
|
||||
@ -429,7 +429,7 @@ impl<'a> SelectRequest<'a> {
|
||||
expr: &'a str,
|
||||
parquet_input: ParquetInputSerialization,
|
||||
csv_output: CsvOutputSerialization,
|
||||
) -> Result<SelectRequest, Error> {
|
||||
) -> Result<SelectRequest<'a>, Error> {
|
||||
if expr.is_empty() {
|
||||
return Err(Error::InvalidSelectExpression(String::from(
|
||||
"select expression cannot be empty",
|
||||
@ -453,7 +453,7 @@ impl<'a> SelectRequest<'a> {
|
||||
expr: &'a str,
|
||||
parquet_input: ParquetInputSerialization,
|
||||
json_output: JsonOutputSerialization,
|
||||
) -> Result<SelectRequest, Error> {
|
||||
) -> Result<SelectRequest<'a>, Error> {
|
||||
if expr.is_empty() {
|
||||
return Err(Error::InvalidSelectExpression(String::from(
|
||||
"select expression cannot be empty",
|
||||
@ -495,12 +495,12 @@ impl<'a> SelectRequest<'a> {
|
||||
}
|
||||
if let Some(v) = c.comments {
|
||||
data.push_str("<Comments>");
|
||||
data.push_str(&v.to_string());
|
||||
data.push(v);
|
||||
data.push_str("</Comments>");
|
||||
}
|
||||
if let Some(v) = c.field_delimiter {
|
||||
data.push_str("<FieldDelimiter>");
|
||||
data.push_str(&v.to_string());
|
||||
data.push(v);
|
||||
data.push_str("</FieldDelimiter>");
|
||||
}
|
||||
if let Some(v) = &c.file_header_info {
|
||||
@ -510,12 +510,12 @@ impl<'a> SelectRequest<'a> {
|
||||
}
|
||||
if let Some(v) = c.quote_character {
|
||||
data.push_str("<QuoteCharacter>");
|
||||
data.push_str(&v.to_string());
|
||||
data.push(v);
|
||||
data.push_str("</QuoteCharacter>");
|
||||
}
|
||||
if let Some(v) = c.record_delimiter {
|
||||
data.push_str("<RecordDelimiter>");
|
||||
data.push_str(&v.to_string());
|
||||
data.push(v);
|
||||
data.push_str("</RecordDelimiter>");
|
||||
}
|
||||
data.push_str("</CSV>");
|
||||
@ -542,17 +542,17 @@ impl<'a> SelectRequest<'a> {
|
||||
data.push_str("<CSV>");
|
||||
if let Some(v) = c.field_delimiter {
|
||||
data.push_str("<FieldDelimiter>");
|
||||
data.push_str(&v.to_string());
|
||||
data.push(v);
|
||||
data.push_str("</FieldDelimiter>");
|
||||
}
|
||||
if let Some(v) = c.quote_character {
|
||||
data.push_str("<QuoteCharacter>");
|
||||
data.push_str(&v.to_string());
|
||||
data.push(v);
|
||||
data.push_str("</QuoteCharacter>");
|
||||
}
|
||||
if let Some(v) = c.quote_escape_character {
|
||||
data.push_str("<QuoteEscapeCharacter>");
|
||||
data.push_str(&v.to_string());
|
||||
data.push(v);
|
||||
data.push_str("</QuoteEscapeCharacter>");
|
||||
}
|
||||
if let Some(v) = &c.quote_fields {
|
||||
@ -562,7 +562,7 @@ impl<'a> SelectRequest<'a> {
|
||||
}
|
||||
if let Some(v) = c.record_delimiter {
|
||||
data.push_str("<RecordDelimiter>");
|
||||
data.push_str(&v.to_string());
|
||||
data.push(v);
|
||||
data.push_str("</RecordDelimiter>");
|
||||
}
|
||||
data.push_str("</CSV>");
|
||||
@ -570,7 +570,7 @@ impl<'a> SelectRequest<'a> {
|
||||
data.push_str("<JSON>");
|
||||
if let Some(v) = j.record_delimiter {
|
||||
data.push_str("<RecordDelimiter>");
|
||||
data.push_str(&v.to_string());
|
||||
data.push(v);
|
||||
data.push_str("</RecordDelimiter>");
|
||||
}
|
||||
data.push_str("</JSON>");
|
||||
@ -1263,6 +1263,7 @@ impl LifecycleConfig {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn parse_common_notification_config(
|
||||
element: &mut Element,
|
||||
) -> Result<
|
||||
|
||||
@ -763,7 +763,7 @@ impl ClientTest {
|
||||
if size == 0 {
|
||||
break;
|
||||
}
|
||||
got += &String::from_utf8(buf[..size].to_vec()).unwrap();
|
||||
got += core::str::from_utf8(&buf[..size]).unwrap();
|
||||
}
|
||||
assert_eq!(got, data);
|
||||
self.client
|
||||
@ -1269,10 +1269,7 @@ impl ClientTest {
|
||||
.send()
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(match resp.status {
|
||||
Some(v) => v,
|
||||
_ => false,
|
||||
});
|
||||
assert!(resp.status.unwrap_or_default());
|
||||
|
||||
self.client
|
||||
.set_bucket_versioning(&SetBucketVersioningArgs::new(&bucket_name, false).unwrap())
|
||||
@ -1285,12 +1282,7 @@ impl ClientTest {
|
||||
.send()
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(
|
||||
!(match resp.status {
|
||||
Some(v) => v,
|
||||
_ => false,
|
||||
})
|
||||
);
|
||||
assert!(!resp.status.unwrap_or_default());
|
||||
|
||||
self.client
|
||||
.remove_bucket(&RemoveBucketArgs::new(&bucket_name).unwrap())
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user