mirror of
https://github.com/minio/minio-rs.git
synced 2025-12-06 15:26:51 +08:00
Simplify code using clippy (#38)
Signed-off-by: Bala.FA <bala@minio.io>
This commit is contained in:
parent
f4cadad6ef
commit
bbc7945eb8
5
.github/workflows/rust.yml
vendored
5
.github/workflows/rust.yml
vendored
@ -16,10 +16,11 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Build and Check style
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
cargo build --verbose
|
|
||||||
cargo fmt --all -- --check
|
cargo fmt --all -- --check
|
||||||
|
cargo clippy --all-targets --all-features -- -A clippy::result_large_err -A clippy::type_complexity -A clippy::too_many_arguments
|
||||||
|
cargo build --verbose
|
||||||
|
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
36
Cargo.toml
36
Cargo.toml
@ -4,33 +4,33 @@ version = "0.1.0"
|
|||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
hyper = { version = "0.14", features = ["full"] }
|
hyper = { version = "0.14.27", features = ["full"] }
|
||||||
tokio = { version = "1", features = ["full"] }
|
tokio = { version = "1.32.0", features = ["full"] }
|
||||||
derivative = "2.2.0"
|
derivative = "2.2.0"
|
||||||
multimap = "0.8.3"
|
multimap = "0.9.0"
|
||||||
urlencoding = "2.1.0"
|
urlencoding = "2.1.3"
|
||||||
lazy_static = "1.4.0"
|
lazy_static = "1.4.0"
|
||||||
regex = "1.5.6"
|
regex = "1.9.4"
|
||||||
chrono = "0.4.19"
|
chrono = "0.4.27"
|
||||||
sha2 = "0.10.2"
|
sha2 = "0.10.7"
|
||||||
base64 = "0.13.0"
|
base64 = "0.21.3"
|
||||||
md5 = "0.7.0"
|
md5 = "0.7.0"
|
||||||
crc = "3.0.0"
|
crc = "3.0.1"
|
||||||
byteorder = "1.4.3"
|
byteorder = "1.4.3"
|
||||||
hmac = "0.12.1"
|
hmac = "0.12.1"
|
||||||
hex = "0.4.3"
|
hex = "0.4.3"
|
||||||
futures-core = "0.3.21"
|
futures-core = "0.3.28"
|
||||||
bytes = "1.2.0"
|
bytes = "1.4.0"
|
||||||
futures-util = "0.3.21"
|
futures-util = "0.3.28"
|
||||||
xmltree = "0.10.3"
|
xmltree = "0.10.3"
|
||||||
http = "0.2.8"
|
http = "0.2.9"
|
||||||
dashmap = "5.3.4"
|
dashmap = "5.5.3"
|
||||||
rand = "0.8.5"
|
rand = "0.8.5"
|
||||||
serde = { version = "1.0.143", features = ["derive"] }
|
serde = { version = "1.0.188", features = ["derive"] }
|
||||||
serde_json = "1.0.83"
|
serde_json = "1.0.105"
|
||||||
async-std = { version = "1.12.0", features = ["attributes", "tokio1"] }
|
async-std = { version = "1.12.0", features = ["attributes", "tokio1"] }
|
||||||
async-recursion = "1.0.0"
|
async-recursion = "1.0.4"
|
||||||
|
|
||||||
[dependencies.reqwest]
|
[dependencies.reqwest]
|
||||||
version = "0.11.11"
|
version = "0.11.20"
|
||||||
features = ["native-tls", "blocking", "rustls-tls", "stream"]
|
features = ["native-tls", "blocking", "rustls-tls", "stream"]
|
||||||
|
|||||||
147
src/s3/args.rs
147
src/s3/args.rs
@ -67,10 +67,10 @@ fn object_write_args_headers(
|
|||||||
let mut tagging = String::new();
|
let mut tagging = String::new();
|
||||||
for (key, value) in v.iter() {
|
for (key, value) in v.iter() {
|
||||||
if !tagging.is_empty() {
|
if !tagging.is_empty() {
|
||||||
tagging.push_str("&");
|
tagging.push('&');
|
||||||
}
|
}
|
||||||
tagging.push_str(&urlencode(key));
|
tagging.push_str(&urlencode(key));
|
||||||
tagging.push_str("=");
|
tagging.push('=');
|
||||||
tagging.push_str(&urlencode(value));
|
tagging.push_str(&urlencode(value));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -94,7 +94,7 @@ fn object_write_args_headers(
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return map;
|
map
|
||||||
}
|
}
|
||||||
|
|
||||||
fn calc_part_info(
|
fn calc_part_info(
|
||||||
@ -146,7 +146,7 @@ fn calc_part_info(
|
|||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
return Ok((psize, part_count));
|
Ok((psize, part_count))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Default)]
|
#[derive(Clone, Debug, Default)]
|
||||||
@ -307,7 +307,7 @@ impl<'a> AbortMultipartUploadArgs<'a> {
|
|||||||
region: None,
|
region: None,
|
||||||
bucket: bucket_name,
|
bucket: bucket_name,
|
||||||
object: object_name,
|
object: object_name,
|
||||||
upload_id: upload_id,
|
upload_id,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -344,7 +344,7 @@ impl<'a> CompleteMultipartUploadArgs<'a> {
|
|||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
if parts.len() == 0 {
|
if parts.is_empty() {
|
||||||
return Err(Error::EmptyParts(String::from("parts cannot be empty")));
|
return Err(Error::EmptyParts(String::from("parts cannot be empty")));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -354,8 +354,8 @@ impl<'a> CompleteMultipartUploadArgs<'a> {
|
|||||||
region: None,
|
region: None,
|
||||||
bucket: bucket_name,
|
bucket: bucket_name,
|
||||||
object: object_name,
|
object: object_name,
|
||||||
upload_id: upload_id,
|
upload_id,
|
||||||
parts: parts,
|
parts,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -437,7 +437,7 @@ impl<'a> PutObjectApiArgs<'a> {
|
|||||||
tags: None,
|
tags: None,
|
||||||
retention: None,
|
retention: None,
|
||||||
legal_hold: false,
|
legal_hold: false,
|
||||||
data: data,
|
data,
|
||||||
query_params: None,
|
query_params: None,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -495,7 +495,7 @@ impl<'a> UploadPartArgs<'a> {
|
|||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
if part_number < 1 || part_number > 10000 {
|
if !(1..=10000).contains(&part_number) {
|
||||||
return Err(Error::InvalidPartNumber(String::from(
|
return Err(Error::InvalidPartNumber(String::from(
|
||||||
"part number must be between 1 and 1000",
|
"part number must be between 1 and 1000",
|
||||||
)));
|
)));
|
||||||
@ -513,9 +513,9 @@ impl<'a> UploadPartArgs<'a> {
|
|||||||
tags: None,
|
tags: None,
|
||||||
retention: None,
|
retention: None,
|
||||||
legal_hold: false,
|
legal_hold: false,
|
||||||
upload_id: upload_id,
|
upload_id,
|
||||||
part_number: part_number,
|
part_number,
|
||||||
data: data,
|
data,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -581,11 +581,11 @@ impl<'a> PutObjectArgs<'a> {
|
|||||||
tags: None,
|
tags: None,
|
||||||
retention: None,
|
retention: None,
|
||||||
legal_hold: false,
|
legal_hold: false,
|
||||||
object_size: object_size,
|
object_size,
|
||||||
part_size: psize,
|
part_size: psize,
|
||||||
part_count: part_count,
|
part_count,
|
||||||
content_type: "application/octet-stream",
|
content_type: "application/octet-stream",
|
||||||
stream: stream,
|
stream,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -659,13 +659,13 @@ impl<'a> ObjectConditionalReadArgs<'a> {
|
|||||||
if let Some(o) = offset {
|
if let Some(o) = offset {
|
||||||
range.push_str("bytes=");
|
range.push_str("bytes=");
|
||||||
range.push_str(&o.to_string());
|
range.push_str(&o.to_string());
|
||||||
range.push_str("-");
|
range.push('-');
|
||||||
if let Some(l) = length {
|
if let Some(l) = length {
|
||||||
range.push_str(&(o + l - 1).to_string());
|
range.push_str(&(o + l - 1).to_string());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return range;
|
range
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_headers(&self) -> Multimap {
|
pub fn get_headers(&self) -> Multimap {
|
||||||
@ -673,7 +673,7 @@ impl<'a> ObjectConditionalReadArgs<'a> {
|
|||||||
|
|
||||||
let range = self.get_range_value();
|
let range = self.get_range_value();
|
||||||
if !range.is_empty() {
|
if !range.is_empty() {
|
||||||
headers.insert(String::from("Range"), range.clone());
|
headers.insert(String::from("Range"), range);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(v) = self.match_etag {
|
if let Some(v) = self.match_etag {
|
||||||
@ -696,7 +696,7 @@ impl<'a> ObjectConditionalReadArgs<'a> {
|
|||||||
merge(&mut headers, &v.headers());
|
merge(&mut headers, &v.headers());
|
||||||
}
|
}
|
||||||
|
|
||||||
return headers;
|
headers
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_copy_headers(&self) -> Multimap {
|
pub fn get_copy_headers(&self) -> Multimap {
|
||||||
@ -704,7 +704,7 @@ impl<'a> ObjectConditionalReadArgs<'a> {
|
|||||||
|
|
||||||
let mut copy_source = String::from("/");
|
let mut copy_source = String::from("/");
|
||||||
copy_source.push_str(self.bucket);
|
copy_source.push_str(self.bucket);
|
||||||
copy_source.push_str("/");
|
copy_source.push('/');
|
||||||
copy_source.push_str(self.object);
|
copy_source.push_str(self.object);
|
||||||
if let Some(v) = self.version_id {
|
if let Some(v) = self.version_id {
|
||||||
copy_source.push_str("?versionId=");
|
copy_source.push_str("?versionId=");
|
||||||
@ -714,7 +714,7 @@ impl<'a> ObjectConditionalReadArgs<'a> {
|
|||||||
|
|
||||||
let range = self.get_range_value();
|
let range = self.get_range_value();
|
||||||
if !range.is_empty() {
|
if !range.is_empty() {
|
||||||
headers.insert(String::from("x-amz-copy-source-range"), range.clone());
|
headers.insert(String::from("x-amz-copy-source-range"), range);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(v) = self.match_etag {
|
if let Some(v) = self.match_etag {
|
||||||
@ -746,7 +746,7 @@ impl<'a> ObjectConditionalReadArgs<'a> {
|
|||||||
merge(&mut headers, &v.copy_headers());
|
merge(&mut headers, &v.copy_headers());
|
||||||
}
|
}
|
||||||
|
|
||||||
return headers;
|
headers
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -782,7 +782,7 @@ impl<'a> RemoveObjectsApiArgs<'a> {
|
|||||||
bucket: bucket_name,
|
bucket: bucket_name,
|
||||||
bypass_governance_mode: false,
|
bypass_governance_mode: false,
|
||||||
quiet: true,
|
quiet: true,
|
||||||
objects: objects,
|
objects,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -809,7 +809,7 @@ impl<'a> RemoveObjectsArgs<'a> {
|
|||||||
region: None,
|
region: None,
|
||||||
bucket: bucket_name,
|
bucket: bucket_name,
|
||||||
bypass_governance_mode: false,
|
bypass_governance_mode: false,
|
||||||
objects: objects,
|
objects,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -960,7 +960,7 @@ impl<'a> ListObjectsArgs<'a> {
|
|||||||
recursive: false,
|
recursive: false,
|
||||||
use_api_v1: false,
|
use_api_v1: false,
|
||||||
include_versions: false,
|
include_versions: false,
|
||||||
result_fn: result_fn,
|
result_fn,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -998,7 +998,7 @@ impl<'a> SelectObjectContentArgs<'a> {
|
|||||||
object: object_name,
|
object: object_name,
|
||||||
version_id: None,
|
version_id: None,
|
||||||
ssec: None,
|
ssec: None,
|
||||||
request: request,
|
request,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1029,7 +1029,7 @@ impl<'a> ListenBucketNotificationArgs<'a> {
|
|||||||
prefix: None,
|
prefix: None,
|
||||||
suffix: None,
|
suffix: None,
|
||||||
events: None,
|
events: None,
|
||||||
event_fn: event_fn,
|
event_fn,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1068,7 +1068,7 @@ impl<'a> UploadPartCopyArgs<'a> {
|
|||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
if part_number < 1 || part_number > 10000 {
|
if !(1..=10000).contains(&part_number) {
|
||||||
return Err(Error::InvalidPartNumber(String::from(
|
return Err(Error::InvalidPartNumber(String::from(
|
||||||
"part number must be between 1 and 1000",
|
"part number must be between 1 and 1000",
|
||||||
)));
|
)));
|
||||||
@ -1080,9 +1080,9 @@ impl<'a> UploadPartCopyArgs<'a> {
|
|||||||
region: None,
|
region: None,
|
||||||
bucket: bucket_name,
|
bucket: bucket_name,
|
||||||
object: object_name,
|
object: object_name,
|
||||||
upload_id: upload_id,
|
upload_id,
|
||||||
part_number: part_number,
|
part_number,
|
||||||
headers: headers,
|
headers,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1131,7 +1131,7 @@ impl<'a> CopyObjectArgs<'a> {
|
|||||||
tags: None,
|
tags: None,
|
||||||
retention: None,
|
retention: None,
|
||||||
legal_hold: false,
|
legal_hold: false,
|
||||||
source: source,
|
source,
|
||||||
metadata_directive: None,
|
metadata_directive: None,
|
||||||
tagging_directive: None,
|
tagging_directive: None,
|
||||||
})
|
})
|
||||||
@ -1200,7 +1200,7 @@ impl<'a> ComposeSource<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_object_size(&self) -> usize {
|
pub fn get_object_size(&self) -> usize {
|
||||||
return self.object_size.expect("ABORT: ComposeSource::build_headers() must be called prior to this method invocation. This shoud not happen.");
|
self.object_size.expect("ABORT: ComposeSource::build_headers() must be called prior to this method invocation. This shoud not happen.")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_headers(&self) -> Multimap {
|
pub fn get_headers(&self) -> Multimap {
|
||||||
@ -1248,7 +1248,7 @@ impl<'a> ComposeSource<'a> {
|
|||||||
|
|
||||||
let mut copy_source = String::from("/");
|
let mut copy_source = String::from("/");
|
||||||
copy_source.push_str(self.bucket);
|
copy_source.push_str(self.bucket);
|
||||||
copy_source.push_str("/");
|
copy_source.push('/');
|
||||||
copy_source.push_str(self.object);
|
copy_source.push_str(self.object);
|
||||||
if let Some(v) = self.version_id {
|
if let Some(v) = self.version_id {
|
||||||
copy_source.push_str("?versionId=");
|
copy_source.push_str("?versionId=");
|
||||||
@ -1291,7 +1291,7 @@ impl<'a> ComposeSource<'a> {
|
|||||||
|
|
||||||
self.headers = Some(headers);
|
self.headers = Some(headers);
|
||||||
|
|
||||||
return Ok(());
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1336,7 +1336,7 @@ impl<'a> ComposeObjectArgs<'a> {
|
|||||||
tags: None,
|
tags: None,
|
||||||
retention: None,
|
retention: None,
|
||||||
legal_hold: false,
|
legal_hold: false,
|
||||||
sources: sources,
|
sources,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1378,7 +1378,7 @@ impl<'a> SetBucketEncryptionArgs<'a> {
|
|||||||
extra_query_params: None,
|
extra_query_params: None,
|
||||||
region: None,
|
region: None,
|
||||||
bucket: bucket_name,
|
bucket: bucket_name,
|
||||||
config: config,
|
config,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1425,7 +1425,7 @@ impl<'a> SetBucketNotificationArgs<'a> {
|
|||||||
extra_query_params: None,
|
extra_query_params: None,
|
||||||
region: None,
|
region: None,
|
||||||
bucket: bucket_name,
|
bucket: bucket_name,
|
||||||
config: config,
|
config,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1451,7 +1451,7 @@ impl<'a> SetBucketPolicyArgs<'a> {
|
|||||||
extra_query_params: None,
|
extra_query_params: None,
|
||||||
region: None,
|
region: None,
|
||||||
bucket: bucket_name,
|
bucket: bucket_name,
|
||||||
config: config,
|
config,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1492,7 +1492,7 @@ impl<'a> SetBucketTagsArgs<'a> {
|
|||||||
extra_query_params: None,
|
extra_query_params: None,
|
||||||
region: None,
|
region: None,
|
||||||
bucket: bucket_name,
|
bucket: bucket_name,
|
||||||
tags: tags,
|
tags,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1517,7 +1517,7 @@ impl<'a> SetBucketVersioningArgs<'a> {
|
|||||||
extra_query_params: None,
|
extra_query_params: None,
|
||||||
region: None,
|
region: None,
|
||||||
bucket: bucket_name,
|
bucket: bucket_name,
|
||||||
status: status,
|
status,
|
||||||
mfa_delete: None,
|
mfa_delete: None,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -1547,7 +1547,7 @@ impl<'a> SetObjectLockConfigArgs<'a> {
|
|||||||
extra_query_params: None,
|
extra_query_params: None,
|
||||||
region: None,
|
region: None,
|
||||||
bucket: bucket_name,
|
bucket: bucket_name,
|
||||||
config: config,
|
config,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1628,7 +1628,7 @@ impl<'a> SetObjectTagsArgs<'a> {
|
|||||||
bucket: bucket_name,
|
bucket: bucket_name,
|
||||||
object: object_name,
|
object: object_name,
|
||||||
version_id: None,
|
version_id: None,
|
||||||
tags: tags,
|
tags,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1664,7 +1664,7 @@ impl<'a> GetPresignedObjectUrlArgs<'a> {
|
|||||||
bucket: bucket_name,
|
bucket: bucket_name,
|
||||||
object: object_name,
|
object: object_name,
|
||||||
version_id: None,
|
version_id: None,
|
||||||
method: method,
|
method,
|
||||||
expiry_seconds: Some(DEFAULT_EXPIRY_SECONDS),
|
expiry_seconds: Some(DEFAULT_EXPIRY_SECONDS),
|
||||||
request_time: None,
|
request_time: None,
|
||||||
})
|
})
|
||||||
@ -1693,7 +1693,7 @@ impl<'a> PostPolicy<'a> {
|
|||||||
Ok(PostPolicy {
|
Ok(PostPolicy {
|
||||||
region: None,
|
region: None,
|
||||||
bucket: bucket_name,
|
bucket: bucket_name,
|
||||||
expiration: expiration,
|
expiration,
|
||||||
eq_conditions: HashMap::new(),
|
eq_conditions: HashMap::new(),
|
||||||
starts_with_conditions: HashMap::new(),
|
starts_with_conditions: HashMap::new(),
|
||||||
lower_limit: None,
|
lower_limit: None,
|
||||||
@ -1703,35 +1703,35 @@ impl<'a> PostPolicy<'a> {
|
|||||||
|
|
||||||
fn trim_dollar(value: &str) -> String {
|
fn trim_dollar(value: &str) -> String {
|
||||||
let mut s = value.to_string();
|
let mut s = value.to_string();
|
||||||
if s.starts_with("$") {
|
if s.starts_with('$') {
|
||||||
s.remove(0);
|
s.remove(0);
|
||||||
}
|
}
|
||||||
return s;
|
s
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_reserved_element(element: &str) -> bool {
|
fn is_reserved_element(element: &str) -> bool {
|
||||||
return element == "bucket"
|
element == "bucket"
|
||||||
|| element == "x-amz-algorithm"
|
|| element == "x-amz-algorithm"
|
||||||
|| element == "x-amz-credential"
|
|| element == "x-amz-credential"
|
||||||
|| element == "x-amz-date"
|
|| element == "x-amz-date"
|
||||||
|| element == "policy"
|
|| element == "policy"
|
||||||
|| element == "x-amz-signature";
|
|| element == "x-amz-signature"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_credential_string(access_key: &String, date: &UtcTime, region: &String) -> String {
|
fn get_credential_string(access_key: &String, date: &UtcTime, region: &String) -> String {
|
||||||
return format!(
|
format!(
|
||||||
"{}/{}/{}/s3/aws4_request",
|
"{}/{}/{}/s3/aws4_request",
|
||||||
access_key,
|
access_key,
|
||||||
to_signer_date(*date),
|
to_signer_date(*date),
|
||||||
region
|
region
|
||||||
);
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_equals_condition(&mut self, element: &str, value: &str) -> Result<(), Error> {
|
pub fn add_equals_condition(&mut self, element: &str, value: &str) -> Result<(), Error> {
|
||||||
if element.is_empty() {
|
if element.is_empty() {
|
||||||
return Err(Error::PostPolicyError(format!(
|
return Err(Error::PostPolicyError(
|
||||||
"condition element cannot be empty"
|
"condition element cannot be empty".to_string(),
|
||||||
)));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
let v = PostPolicy::trim_dollar(element);
|
let v = PostPolicy::trim_dollar(element);
|
||||||
@ -1742,7 +1742,7 @@ impl<'a> PostPolicy<'a> {
|
|||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
if PostPolicy::is_reserved_element(&v.as_str()) {
|
if PostPolicy::is_reserved_element(v.as_str()) {
|
||||||
return Err(Error::PostPolicyError(format!("{} cannot set", element)));
|
return Err(Error::PostPolicyError(format!("{} cannot set", element)));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1756,9 +1756,9 @@ impl<'a> PostPolicy<'a> {
|
|||||||
|
|
||||||
pub fn add_starts_with_condition(&mut self, element: &str, value: &str) -> Result<(), Error> {
|
pub fn add_starts_with_condition(&mut self, element: &str, value: &str) -> Result<(), Error> {
|
||||||
if element.is_empty() {
|
if element.is_empty() {
|
||||||
return Err(Error::PostPolicyError(format!(
|
return Err(Error::PostPolicyError(
|
||||||
"condition element cannot be empty"
|
"condition element cannot be empty".to_string(),
|
||||||
)));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
let v = PostPolicy::trim_dollar(element);
|
let v = PostPolicy::trim_dollar(element);
|
||||||
@ -1772,12 +1772,11 @@ impl<'a> PostPolicy<'a> {
|
|||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
if PostPolicy::is_reserved_element(&v.as_str()) {
|
if PostPolicy::is_reserved_element(v.as_str()) {
|
||||||
return Err(Error::PostPolicyError(format!("{} cannot set", element)));
|
return Err(Error::PostPolicyError(format!("{} cannot set", element)));
|
||||||
}
|
}
|
||||||
|
|
||||||
self.starts_with_conditions
|
self.starts_with_conditions.insert(v, value.to_string());
|
||||||
.insert(v.clone(), value.to_string());
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1791,9 +1790,9 @@ impl<'a> PostPolicy<'a> {
|
|||||||
upper_limit: usize,
|
upper_limit: usize,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
if lower_limit > upper_limit {
|
if lower_limit > upper_limit {
|
||||||
return Err(Error::PostPolicyError(format!(
|
return Err(Error::PostPolicyError(
|
||||||
"lower limit cannot be greater than upper limit"
|
"lower limit cannot be greater than upper limit".to_string(),
|
||||||
)));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
self.lower_limit = Some(lower_limit);
|
self.lower_limit = Some(lower_limit);
|
||||||
@ -1814,24 +1813,26 @@ impl<'a> PostPolicy<'a> {
|
|||||||
region: String,
|
region: String,
|
||||||
) -> Result<HashMap<String, String>, Error> {
|
) -> Result<HashMap<String, String>, Error> {
|
||||||
if region.is_empty() {
|
if region.is_empty() {
|
||||||
return Err(Error::PostPolicyError(format!("region cannot be empty")));
|
return Err(Error::PostPolicyError("region cannot be empty".to_string()));
|
||||||
}
|
}
|
||||||
|
|
||||||
if !self.eq_conditions.contains_key("key")
|
if !self.eq_conditions.contains_key("key")
|
||||||
&& !self.starts_with_conditions.contains_key("key")
|
&& !self.starts_with_conditions.contains_key("key")
|
||||||
{
|
{
|
||||||
return Err(Error::PostPolicyError(format!("key condition must be set")));
|
return Err(Error::PostPolicyError(
|
||||||
|
"key condition must be set".to_string(),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut conditions: Vec<Value> = Vec::new();
|
let mut conditions: Vec<Value> = Vec::new();
|
||||||
conditions.push(json!([PostPolicy::EQ, "$bucket", self.bucket]));
|
conditions.push(json!([PostPolicy::EQ, "$bucket", self.bucket]));
|
||||||
for (key, value) in &self.eq_conditions {
|
for (key, value) in &self.eq_conditions {
|
||||||
conditions.push(json!([PostPolicy::EQ, String::from("$") + &key, value]));
|
conditions.push(json!([PostPolicy::EQ, String::from("$") + key, value]));
|
||||||
}
|
}
|
||||||
for (key, value) in &self.starts_with_conditions {
|
for (key, value) in &self.starts_with_conditions {
|
||||||
conditions.push(json!([
|
conditions.push(json!([
|
||||||
PostPolicy::STARTS_WITH,
|
PostPolicy::STARTS_WITH,
|
||||||
String::from("$") + &key,
|
String::from("$") + key,
|
||||||
value
|
value
|
||||||
]));
|
]));
|
||||||
}
|
}
|
||||||
@ -1916,7 +1917,7 @@ impl<'a> DownloadObjectArgs<'a> {
|
|||||||
object: object_name,
|
object: object_name,
|
||||||
version_id: None,
|
version_id: None,
|
||||||
ssec: None,
|
ssec: None,
|
||||||
filename: filename,
|
filename,
|
||||||
overwrite: false,
|
overwrite: false,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -1978,11 +1979,11 @@ impl<'a> UploadObjectArgs<'a> {
|
|||||||
tags: None,
|
tags: None,
|
||||||
retention: None,
|
retention: None,
|
||||||
legal_hold: false,
|
legal_hold: false,
|
||||||
object_size: object_size,
|
object_size,
|
||||||
part_size: psize,
|
part_size: psize,
|
||||||
part_count: part_count,
|
part_count,
|
||||||
content_type: "application/octet-stream",
|
content_type: "application/octet-stream",
|
||||||
filename: filename,
|
filename,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
606
src/s3/client.rs
606
src/s3/client.rs
File diff suppressed because it is too large
Load Diff
@ -36,7 +36,7 @@ impl Url {
|
|||||||
if self.port > 0 {
|
if self.port > 0 {
|
||||||
return format!("{}:{}", self.host, self.port);
|
return format!("{}:{}", self.host, self.port);
|
||||||
}
|
}
|
||||||
return self.host.clone();
|
self.host.clone()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -58,7 +58,7 @@ impl fmt::Display for Url {
|
|||||||
f.write_str(&self.host)?;
|
f.write_str(&self.host)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if !self.path.starts_with("/") {
|
if !self.path.starts_with('/') {
|
||||||
f.write_str("/")?;
|
f.write_str("/")?;
|
||||||
}
|
}
|
||||||
f.write_str(&self.path)?;
|
f.write_str(&self.path)?;
|
||||||
@ -85,7 +85,7 @@ fn extract_region(host: &str) -> String {
|
|||||||
},
|
},
|
||||||
_ => "",
|
_ => "",
|
||||||
};
|
};
|
||||||
return region.to_string();
|
region.to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Derivative)]
|
#[derive(Derivative)]
|
||||||
@ -111,22 +111,23 @@ impl BaseUrl {
|
|||||||
bucket_name: Option<&str>,
|
bucket_name: Option<&str>,
|
||||||
object_name: Option<&str>,
|
object_name: Option<&str>,
|
||||||
) -> Result<Url, Error> {
|
) -> Result<Url, Error> {
|
||||||
if !object_name.map_or(true, |v| v.is_empty()) {
|
if !object_name.map_or(true, |v| v.is_empty()) && bucket_name.map_or(true, |v| v.is_empty())
|
||||||
if bucket_name.map_or(true, |v| v.is_empty()) {
|
{
|
||||||
return Err(Error::UrlBuildError(String::from(
|
return Err(Error::UrlBuildError(String::from(
|
||||||
"empty bucket name provided for object name",
|
"empty bucket name provided for object name",
|
||||||
)));
|
)));
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut url = Url::default();
|
let mut url = Url {
|
||||||
url.https = self.https;
|
https: self.https,
|
||||||
url.host = self.host.clone();
|
host: self.host.clone(),
|
||||||
url.port = self.port;
|
port: self.port,
|
||||||
url.query = query.clone();
|
query: query.clone(),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
if bucket_name.is_none() {
|
if bucket_name.is_none() {
|
||||||
url.path.push_str("/");
|
url.path.push('/');
|
||||||
if self.aws_host {
|
if self.aws_host {
|
||||||
url.host = format!("s3.{}.{}", region, self.host);
|
url.host = format!("s3.{}.{}", region, self.host);
|
||||||
}
|
}
|
||||||
@ -135,6 +136,7 @@ impl BaseUrl {
|
|||||||
|
|
||||||
let bucket = bucket_name.unwrap();
|
let bucket = bucket_name.unwrap();
|
||||||
|
|
||||||
|
#[allow(clippy::nonminimal_bool)]
|
||||||
let enforce_path_style = true &&
|
let enforce_path_style = true &&
|
||||||
// CreateBucket API requires path style in Amazon AWS S3.
|
// CreateBucket API requires path style in Amazon AWS S3.
|
||||||
(method == Method::PUT && object_name.is_none() && query.is_empty()) ||
|
(method == Method::PUT && object_name.is_none() && query.is_empty()) ||
|
||||||
@ -163,27 +165,27 @@ impl BaseUrl {
|
|||||||
}
|
}
|
||||||
if enforce_path_style || !self.accelerate_host {
|
if enforce_path_style || !self.accelerate_host {
|
||||||
s3_domain.push_str(region);
|
s3_domain.push_str(region);
|
||||||
s3_domain.push_str(".");
|
s3_domain.push('.');
|
||||||
}
|
}
|
||||||
url.host = s3_domain + &url.host;
|
url.host = s3_domain + &url.host;
|
||||||
}
|
}
|
||||||
|
|
||||||
if enforce_path_style || !self.virtual_style {
|
if enforce_path_style || !self.virtual_style {
|
||||||
url.path.push_str("/");
|
url.path.push('/');
|
||||||
url.path.push_str(bucket);
|
url.path.push_str(bucket);
|
||||||
} else {
|
} else {
|
||||||
url.host = format!("{}.{}", bucket, url.host);
|
url.host = format!("{}.{}", bucket, url.host);
|
||||||
}
|
}
|
||||||
|
|
||||||
if object_name.is_some() {
|
if let Some(v) = object_name {
|
||||||
if object_name.unwrap().chars().nth(0) != Some('/') {
|
if !v.starts_with('/') {
|
||||||
url.path.push_str("/");
|
url.path.push('/');
|
||||||
}
|
}
|
||||||
// FIXME: urlencode path
|
// FIXME: urlencode path
|
||||||
url.path.push_str(object_name.unwrap());
|
url.path.push_str(v);
|
||||||
}
|
}
|
||||||
|
|
||||||
return Ok(url);
|
Ok(url)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_string(s: String) -> Result<BaseUrl, Error> {
|
pub fn from_string(s: String) -> Result<BaseUrl, Error> {
|
||||||
@ -231,7 +233,7 @@ impl BaseUrl {
|
|||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
if !url.query().is_none() {
|
if url.query().is_some() {
|
||||||
return Err(Error::InvalidBaseUrl(String::from(
|
return Err(Error::InvalidBaseUrl(String::from(
|
||||||
"query must be none for base URL",
|
"query must be none for base URL",
|
||||||
)));
|
)));
|
||||||
@ -265,15 +267,15 @@ impl BaseUrl {
|
|||||||
accelerate_host = false;
|
accelerate_host = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
return Ok(BaseUrl {
|
Ok(BaseUrl {
|
||||||
https: https,
|
https,
|
||||||
host: host.to_string(),
|
host: host.to_string(),
|
||||||
port: port,
|
port,
|
||||||
region: region,
|
region,
|
||||||
aws_host: aws_host,
|
aws_host,
|
||||||
accelerate_host: accelerate_host,
|
accelerate_host,
|
||||||
dualstack_host: dualstack_host,
|
dualstack_host,
|
||||||
virtual_style: virtual_style,
|
virtual_style,
|
||||||
});
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -170,15 +170,15 @@ impl StatObjectResponse {
|
|||||||
region: region.to_string(),
|
region: region.to_string(),
|
||||||
bucket_name: bucket_name.to_string(),
|
bucket_name: bucket_name.to_string(),
|
||||||
object_name: object_name.to_string(),
|
object_name: object_name.to_string(),
|
||||||
size: size,
|
size,
|
||||||
etag: etag.to_string(),
|
etag: etag.to_string(),
|
||||||
version_id: version_id,
|
version_id,
|
||||||
last_modified: last_modified,
|
last_modified,
|
||||||
retention_mode: retention_mode,
|
retention_mode,
|
||||||
retention_retain_until_date: retention_retain_until_date,
|
retention_retain_until_date,
|
||||||
legal_hold: legal_hold,
|
legal_hold,
|
||||||
delete_marker: delete_marker,
|
delete_marker,
|
||||||
user_metadata: user_metadata,
|
user_metadata,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -324,7 +324,7 @@ impl SelectObjectContentResponse {
|
|||||||
let headers = resp.headers().clone();
|
let headers = resp.headers().clone();
|
||||||
|
|
||||||
SelectObjectContentResponse {
|
SelectObjectContentResponse {
|
||||||
headers: headers,
|
headers,
|
||||||
region: region.to_string(),
|
region: region.to_string(),
|
||||||
bucket_name: bucket_name.to_string(),
|
bucket_name: bucket_name.to_string(),
|
||||||
object_name: object_name.to_string(),
|
object_name: object_name.to_string(),
|
||||||
@ -333,7 +333,7 @@ impl SelectObjectContentResponse {
|
|||||||
bytes_progressed: 0,
|
bytes_progressed: 0,
|
||||||
bytes_returned: 0,
|
bytes_returned: 0,
|
||||||
},
|
},
|
||||||
resp: resp,
|
resp,
|
||||||
done: false,
|
done: false,
|
||||||
buf: VecDeque::<u8>::new(),
|
buf: VecDeque::<u8>::new(),
|
||||||
prelude: [0_u8; 8],
|
prelude: [0_u8; 8],
|
||||||
@ -371,7 +371,7 @@ impl SelectObjectContentResponse {
|
|||||||
self.prelude[i] = self.buf.pop_front().ok_or(Error::InsufficientData(8, i))?;
|
self.prelude[i] = self.buf.pop_front().ok_or(Error::InsufficientData(8, i))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
return Ok(true);
|
Ok(true)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_prelude_crc(&mut self) -> Result<bool, Error> {
|
fn read_prelude_crc(&mut self) -> Result<bool, Error> {
|
||||||
@ -384,7 +384,7 @@ impl SelectObjectContentResponse {
|
|||||||
self.prelude_crc[i] = self.buf.pop_front().ok_or(Error::InsufficientData(4, i))?;
|
self.prelude_crc[i] = self.buf.pop_front().ok_or(Error::InsufficientData(4, i))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
return Ok(true);
|
Ok(true)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_data(&mut self) -> Result<bool, Error> {
|
fn read_data(&mut self) -> Result<bool, Error> {
|
||||||
@ -404,7 +404,7 @@ impl SelectObjectContentResponse {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return Ok(true);
|
Ok(true)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_message_crc(&mut self) -> Result<bool, Error> {
|
fn read_message_crc(&mut self) -> Result<bool, Error> {
|
||||||
@ -417,7 +417,7 @@ impl SelectObjectContentResponse {
|
|||||||
self.message_crc[i] = self.buf.pop_front().ok_or(Error::InsufficientData(4, i))?;
|
self.message_crc[i] = self.buf.pop_front().ok_or(Error::InsufficientData(4, i))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
return Ok(true);
|
Ok(true)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn decode_header(&mut self, header_length: usize) -> Result<HashMap<String, String>, Error> {
|
fn decode_header(&mut self, header_length: usize) -> Result<HashMap<String, String>, Error> {
|
||||||
@ -450,7 +450,7 @@ impl SelectObjectContentResponse {
|
|||||||
headers.insert(name, value);
|
headers.insert(name, value);
|
||||||
}
|
}
|
||||||
|
|
||||||
return Ok(headers);
|
Ok(headers)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn do_read(&mut self) -> Result<(), Error> {
|
async fn do_read(&mut self) -> Result<(), Error> {
|
||||||
@ -596,7 +596,7 @@ impl SelectObjectContentResponse {
|
|||||||
));
|
));
|
||||||
}
|
}
|
||||||
Ok(_) => {
|
Ok(_) => {
|
||||||
if self.payload.len() == 0 {
|
if self.payload.is_empty() {
|
||||||
self.done = true;
|
self.done = true;
|
||||||
return Ok(0);
|
return Ok(0);
|
||||||
}
|
}
|
||||||
@ -620,7 +620,7 @@ impl ListenBucketNotificationResponse {
|
|||||||
bucket_name: &str,
|
bucket_name: &str,
|
||||||
) -> ListenBucketNotificationResponse {
|
) -> ListenBucketNotificationResponse {
|
||||||
ListenBucketNotificationResponse {
|
ListenBucketNotificationResponse {
|
||||||
headers: headers,
|
headers,
|
||||||
region: region.to_string(),
|
region: region.to_string(),
|
||||||
bucket_name: bucket_name.to_string(),
|
bucket_name: bucket_name.to_string(),
|
||||||
}
|
}
|
||||||
|
|||||||
@ -25,20 +25,20 @@ use sha2::Sha256;
|
|||||||
pub fn hmac_hash(key: &[u8], data: &[u8]) -> Vec<u8> {
|
pub fn hmac_hash(key: &[u8], data: &[u8]) -> Vec<u8> {
|
||||||
let mut hasher = Hmac::<Sha256>::new_from_slice(key).expect("HMAC can take key of any size");
|
let mut hasher = Hmac::<Sha256>::new_from_slice(key).expect("HMAC can take key of any size");
|
||||||
hasher.update(data);
|
hasher.update(data);
|
||||||
return hasher.finalize().into_bytes().to_vec();
|
hasher.finalize().into_bytes().to_vec()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn hmac_hash_hex(key: &[u8], data: &[u8]) -> String {
|
pub fn hmac_hash_hex(key: &[u8], data: &[u8]) -> String {
|
||||||
return hexencode(hmac_hash(key, data));
|
hexencode(hmac_hash(key, data))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_scope(date: UtcTime, region: &str, service_name: &str) -> String {
|
pub fn get_scope(date: UtcTime, region: &str, service_name: &str) -> String {
|
||||||
return format!(
|
format!(
|
||||||
"{}/{}/{}/aws4_request",
|
"{}/{}/{}/aws4_request",
|
||||||
to_signer_date(date),
|
to_signer_date(date),
|
||||||
region,
|
region,
|
||||||
service_name
|
service_name
|
||||||
);
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_canonical_request_hash(
|
pub fn get_canonical_request_hash(
|
||||||
@ -64,12 +64,12 @@ pub fn get_canonical_request_hash(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_string_to_sign(date: UtcTime, scope: &str, canonical_request_hash: &str) -> String {
|
pub fn get_string_to_sign(date: UtcTime, scope: &str, canonical_request_hash: &str) -> String {
|
||||||
return format!(
|
format!(
|
||||||
"AWS4-HMAC-SHA256\n{}\n{}\n{}",
|
"AWS4-HMAC-SHA256\n{}\n{}\n{}",
|
||||||
to_amz_date(date),
|
to_amz_date(date),
|
||||||
scope,
|
scope,
|
||||||
canonical_request_hash
|
canonical_request_hash
|
||||||
);
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_signing_key(
|
pub fn get_signing_key(
|
||||||
@ -97,10 +97,10 @@ pub fn get_authorization(
|
|||||||
signed_headers: &str,
|
signed_headers: &str,
|
||||||
signature: &str,
|
signature: &str,
|
||||||
) -> String {
|
) -> String {
|
||||||
return format!(
|
format!(
|
||||||
"AWS4-HMAC-SHA256 Credential={}/{}, SignedHeaders={}, Signature={}",
|
"AWS4-HMAC-SHA256 Credential={}/{}, SignedHeaders={}, Signature={}",
|
||||||
access_key, scope, signed_headers, signature
|
access_key, scope, signed_headers, signature
|
||||||
);
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn sign_v4(
|
pub fn sign_v4(
|
||||||
@ -220,7 +220,7 @@ pub fn presign_v4(
|
|||||||
uri,
|
uri,
|
||||||
&canonical_query_string,
|
&canonical_query_string,
|
||||||
&canonical_headers,
|
&canonical_headers,
|
||||||
&signed_headers,
|
signed_headers,
|
||||||
"UNSIGNED-PAYLOAD",
|
"UNSIGNED-PAYLOAD",
|
||||||
);
|
);
|
||||||
let string_to_sign = get_string_to_sign(date, &scope, &canonical_request_hash);
|
let string_to_sign = get_string_to_sign(date, &scope, &canonical_request_hash);
|
||||||
|
|||||||
@ -55,16 +55,16 @@ impl SseCustomerKey {
|
|||||||
);
|
);
|
||||||
copy_headers.insert(
|
copy_headers.insert(
|
||||||
String::from("X-Amz-Copy-Source-Server-Side-Encryption-Customer-Key"),
|
String::from("X-Amz-Copy-Source-Server-Side-Encryption-Customer-Key"),
|
||||||
b64key.clone(),
|
b64key,
|
||||||
);
|
);
|
||||||
copy_headers.insert(
|
copy_headers.insert(
|
||||||
String::from("X-Amz-Copy-Source-Server-Side-Encryption-Customer-Key-MD5"),
|
String::from("X-Amz-Copy-Source-Server-Side-Encryption-Customer-Key-MD5"),
|
||||||
md5key.clone(),
|
md5key,
|
||||||
);
|
);
|
||||||
|
|
||||||
SseCustomerKey {
|
SseCustomerKey {
|
||||||
headers: headers,
|
headers,
|
||||||
copy_headers: copy_headers,
|
copy_headers,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -110,7 +110,7 @@ impl SseKms {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
SseKms { headers: headers }
|
SseKms { headers }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -145,7 +145,13 @@ impl SseS3 {
|
|||||||
String::from("AES256"),
|
String::from("AES256"),
|
||||||
);
|
);
|
||||||
|
|
||||||
SseS3 { headers: headers }
|
SseS3 { headers }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for SseS3 {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
236
src/s3/types.rs
236
src/s3/types.rs
@ -221,7 +221,7 @@ impl<'a> SelectRequest<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Ok(SelectRequest {
|
Ok(SelectRequest {
|
||||||
expr: expr,
|
expr,
|
||||||
csv_input: Some(csv_input),
|
csv_input: Some(csv_input),
|
||||||
json_input: None,
|
json_input: None,
|
||||||
parquet_input: None,
|
parquet_input: None,
|
||||||
@ -245,7 +245,7 @@ impl<'a> SelectRequest<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Ok(SelectRequest {
|
Ok(SelectRequest {
|
||||||
expr: expr,
|
expr,
|
||||||
csv_input: Some(csv_input),
|
csv_input: Some(csv_input),
|
||||||
json_input: None,
|
json_input: None,
|
||||||
parquet_input: None,
|
parquet_input: None,
|
||||||
@ -269,7 +269,7 @@ impl<'a> SelectRequest<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Ok(SelectRequest {
|
Ok(SelectRequest {
|
||||||
expr: expr,
|
expr,
|
||||||
csv_input: None,
|
csv_input: None,
|
||||||
json_input: Some(json_input),
|
json_input: Some(json_input),
|
||||||
parquet_input: None,
|
parquet_input: None,
|
||||||
@ -293,7 +293,7 @@ impl<'a> SelectRequest<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Ok(SelectRequest {
|
Ok(SelectRequest {
|
||||||
expr: expr,
|
expr,
|
||||||
csv_input: None,
|
csv_input: None,
|
||||||
json_input: None,
|
json_input: None,
|
||||||
parquet_input: Some(parquet_input),
|
parquet_input: Some(parquet_input),
|
||||||
@ -317,7 +317,7 @@ impl<'a> SelectRequest<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Ok(SelectRequest {
|
Ok(SelectRequest {
|
||||||
expr: expr,
|
expr,
|
||||||
csv_input: None,
|
csv_input: None,
|
||||||
json_input: None,
|
json_input: None,
|
||||||
parquet_input: Some(parquet_input),
|
parquet_input: Some(parquet_input),
|
||||||
@ -388,7 +388,7 @@ impl<'a> SelectRequest<'a> {
|
|||||||
data.push_str("</Type>");
|
data.push_str("</Type>");
|
||||||
}
|
}
|
||||||
data.push_str("</JSON>");
|
data.push_str("</JSON>");
|
||||||
} else if let Some(_) = &self.parquet_input {
|
} else if self.parquet_input.is_some() {
|
||||||
data.push_str("<Parquet></Parquet>");
|
data.push_str("<Parquet></Parquet>");
|
||||||
}
|
}
|
||||||
data.push_str("</InputSerialization>");
|
data.push_str("</InputSerialization>");
|
||||||
@ -451,7 +451,7 @@ impl<'a> SelectRequest<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
data.push_str("</SelectObjectContentRequest>");
|
data.push_str("</SelectObjectContentRequest>");
|
||||||
return data;
|
data
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -612,7 +612,7 @@ impl SseConfig {
|
|||||||
pub fn kms(kms_master_key_id: Option<String>) -> SseConfig {
|
pub fn kms(kms_master_key_id: Option<String>) -> SseConfig {
|
||||||
SseConfig {
|
SseConfig {
|
||||||
sse_algorithm: String::from("aws:kms"),
|
sse_algorithm: String::from("aws:kms"),
|
||||||
kms_master_key_id: kms_master_key_id,
|
kms_master_key_id,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -631,7 +631,7 @@ impl SseConfig {
|
|||||||
data.push_str(
|
data.push_str(
|
||||||
"</ApplyServerSideEncryptionByDefault></Rule></ServerSideEncryptionConfiguration>",
|
"</ApplyServerSideEncryptionByDefault></Rule></ServerSideEncryptionConfiguration>",
|
||||||
);
|
);
|
||||||
return data;
|
data
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -661,7 +661,9 @@ impl Filter {
|
|||||||
prefix: match v.get_child("Prefix") {
|
prefix: match v.get_child("Prefix") {
|
||||||
Some(p) => Some(
|
Some(p) => Some(
|
||||||
p.get_text()
|
p.get_text()
|
||||||
.ok_or(Error::XmlError(format!("text of <Prefix> tag not found")))?
|
.ok_or(Error::XmlError(
|
||||||
|
"text of <Prefix> tag not found".to_string(),
|
||||||
|
))?
|
||||||
.to_string(),
|
.to_string(),
|
||||||
),
|
),
|
||||||
None => None,
|
None => None,
|
||||||
@ -672,7 +674,7 @@ impl Filter {
|
|||||||
for xml_node in &tags.children {
|
for xml_node in &tags.children {
|
||||||
let tag = xml_node
|
let tag = xml_node
|
||||||
.as_element()
|
.as_element()
|
||||||
.ok_or(Error::XmlError(format!("<Tag> element not found")))?;
|
.ok_or(Error::XmlError("<Tag> element not found".to_string()))?;
|
||||||
map.insert(get_text(tag, "Key")?, get_text(tag, "Value")?);
|
map.insert(get_text(tag, "Key")?, get_text(tag, "Value")?);
|
||||||
}
|
}
|
||||||
Some(map)
|
Some(map)
|
||||||
@ -686,7 +688,9 @@ impl Filter {
|
|||||||
let prefix = match element.get_child("Prefix") {
|
let prefix = match element.get_child("Prefix") {
|
||||||
Some(v) => Some(
|
Some(v) => Some(
|
||||||
v.get_text()
|
v.get_text()
|
||||||
.ok_or(Error::XmlError(format!("text of <Prefix> tag not found")))?
|
.ok_or(Error::XmlError(
|
||||||
|
"text of <Prefix> tag not found".to_string(),
|
||||||
|
))?
|
||||||
.to_string(),
|
.to_string(),
|
||||||
),
|
),
|
||||||
None => None,
|
None => None,
|
||||||
@ -701,9 +705,9 @@ impl Filter {
|
|||||||
};
|
};
|
||||||
|
|
||||||
Ok(Filter {
|
Ok(Filter {
|
||||||
and_operator: and_operator,
|
and_operator,
|
||||||
prefix: prefix,
|
prefix,
|
||||||
tag: tag,
|
tag,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -711,7 +715,7 @@ impl Filter {
|
|||||||
if self.and_operator.is_some() ^ self.prefix.is_some() ^ self.tag.is_some() {
|
if self.and_operator.is_some() ^ self.prefix.is_some() ^ self.tag.is_some() {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
return Err(Error::InvalidFilter);
|
Err(Error::InvalidFilter)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn to_xml(&self) -> String {
|
pub fn to_xml(&self) -> String {
|
||||||
@ -720,17 +724,17 @@ impl Filter {
|
|||||||
data.push_str("<And>");
|
data.push_str("<And>");
|
||||||
if self.and_operator.as_ref().unwrap().prefix.is_some() {
|
if self.and_operator.as_ref().unwrap().prefix.is_some() {
|
||||||
data.push_str("<Prefix>");
|
data.push_str("<Prefix>");
|
||||||
data.push_str(&self.and_operator.as_ref().unwrap().prefix.as_ref().unwrap());
|
data.push_str(self.and_operator.as_ref().unwrap().prefix.as_ref().unwrap());
|
||||||
data.push_str("</Prefix>");
|
data.push_str("</Prefix>");
|
||||||
}
|
}
|
||||||
if self.and_operator.as_ref().unwrap().tags.is_some() {
|
if self.and_operator.as_ref().unwrap().tags.is_some() {
|
||||||
for (key, value) in self.and_operator.as_ref().unwrap().tags.as_ref().unwrap() {
|
for (key, value) in self.and_operator.as_ref().unwrap().tags.as_ref().unwrap() {
|
||||||
data.push_str("<Tag>");
|
data.push_str("<Tag>");
|
||||||
data.push_str("<Key>");
|
data.push_str("<Key>");
|
||||||
data.push_str(&key);
|
data.push_str(key);
|
||||||
data.push_str("</Key>");
|
data.push_str("</Key>");
|
||||||
data.push_str("<Value>");
|
data.push_str("<Value>");
|
||||||
data.push_str(&value);
|
data.push_str(value);
|
||||||
data.push_str("</Value>");
|
data.push_str("</Value>");
|
||||||
data.push_str("</Tag>");
|
data.push_str("</Tag>");
|
||||||
}
|
}
|
||||||
@ -739,7 +743,7 @@ impl Filter {
|
|||||||
}
|
}
|
||||||
if self.prefix.is_some() {
|
if self.prefix.is_some() {
|
||||||
data.push_str("<Prefix>");
|
data.push_str("<Prefix>");
|
||||||
data.push_str(&self.prefix.as_ref().unwrap());
|
data.push_str(self.prefix.as_ref().unwrap());
|
||||||
data.push_str("</Prefix>");
|
data.push_str("</Prefix>");
|
||||||
}
|
}
|
||||||
if self.tag.is_some() {
|
if self.tag.is_some() {
|
||||||
@ -754,7 +758,7 @@ impl Filter {
|
|||||||
}
|
}
|
||||||
data.push_str("</Filter>");
|
data.push_str("</Filter>");
|
||||||
|
|
||||||
return data;
|
data
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -812,7 +816,7 @@ impl LifecycleRule {
|
|||||||
filter: Filter::from_xml(
|
filter: Filter::from_xml(
|
||||||
element
|
element
|
||||||
.get_child("Filter")
|
.get_child("Filter")
|
||||||
.ok_or(Error::XmlError(format!("<Filter> tag not found")))?,
|
.ok_or(Error::XmlError("<Filter> tag not found".to_string()))?,
|
||||||
)?,
|
)?,
|
||||||
id: get_default_text(element, "ID"),
|
id: get_default_text(element, "ID"),
|
||||||
noncurrent_version_expiration_noncurrent_days: match element
|
noncurrent_version_expiration_noncurrent_days: match element
|
||||||
@ -906,14 +910,15 @@ impl LifecycleConfig {
|
|||||||
|
|
||||||
if let Some(v) = root.get_child("Rule") {
|
if let Some(v) = root.get_child("Rule") {
|
||||||
for rule in &v.children {
|
for rule in &v.children {
|
||||||
config.rules.push(LifecycleRule::from_xml(
|
config
|
||||||
rule.as_element()
|
.rules
|
||||||
.ok_or(Error::XmlError(format!("<Rule> tag not found")))?,
|
.push(LifecycleRule::from_xml(rule.as_element().ok_or(
|
||||||
)?);
|
Error::XmlError("<Rule> tag not found".to_string()),
|
||||||
|
)?)?);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return Ok(config);
|
Ok(config)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn validate(&self) -> Result<(), Error> {
|
pub fn validate(&self) -> Result<(), Error> {
|
||||||
@ -1008,8 +1013,7 @@ impl LifecycleConfig {
|
|||||||
if rule.noncurrent_version_transition_storage_class.is_some() {
|
if rule.noncurrent_version_transition_storage_class.is_some() {
|
||||||
data.push_str("<StorageClass>");
|
data.push_str("<StorageClass>");
|
||||||
data.push_str(
|
data.push_str(
|
||||||
&rule
|
rule.noncurrent_version_transition_storage_class
|
||||||
.noncurrent_version_transition_storage_class
|
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
);
|
);
|
||||||
@ -1043,7 +1047,7 @@ impl LifecycleConfig {
|
|||||||
}
|
}
|
||||||
if rule.transition_storage_class.is_some() {
|
if rule.transition_storage_class.is_some() {
|
||||||
data.push_str("<StorageClass>");
|
data.push_str("<StorageClass>");
|
||||||
data.push_str(&rule.transition_storage_class.as_ref().unwrap());
|
data.push_str(rule.transition_storage_class.as_ref().unwrap());
|
||||||
data.push_str("</StorageClass>");
|
data.push_str("</StorageClass>");
|
||||||
}
|
}
|
||||||
data.push_str("</Transition>");
|
data.push_str("</Transition>");
|
||||||
@ -1054,7 +1058,7 @@ impl LifecycleConfig {
|
|||||||
|
|
||||||
data.push_str("</LifecycleConfiguration>");
|
data.push_str("</LifecycleConfiguration>");
|
||||||
|
|
||||||
return data;
|
data
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1070,15 +1074,12 @@ fn parse_common_notification_config(
|
|||||||
Error,
|
Error,
|
||||||
> {
|
> {
|
||||||
let mut events = Vec::new();
|
let mut events = Vec::new();
|
||||||
loop {
|
while let Some(v) = element.take_child("Event") {
|
||||||
match element.take_child("Event") {
|
events.push(
|
||||||
Some(v) => events.push(
|
v.get_text()
|
||||||
v.get_text()
|
.ok_or(Error::XmlError("text of <Event> tag not found".to_string()))?
|
||||||
.ok_or(Error::XmlError(format!("text of <Event> tag not found")))?
|
.to_string(),
|
||||||
.to_string(),
|
);
|
||||||
),
|
|
||||||
_ => break,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let id = get_option_text(element, "Id");
|
let id = get_option_text(element, "Id");
|
||||||
@ -1089,17 +1090,17 @@ fn parse_common_notification_config(
|
|||||||
let mut suffix = None;
|
let mut suffix = None;
|
||||||
let rules = filter
|
let rules = filter
|
||||||
.get_child("S3Key")
|
.get_child("S3Key")
|
||||||
.ok_or(Error::XmlError(format!("<S3Key> tag not found")))?;
|
.ok_or(Error::XmlError("<S3Key> tag not found".to_string()))?;
|
||||||
for rule in &rules.children {
|
for rule in &rules.children {
|
||||||
let v = rule
|
let v = rule
|
||||||
.as_element()
|
.as_element()
|
||||||
.ok_or(Error::XmlError(format!("<FilterRule> tag not found")))?;
|
.ok_or(Error::XmlError("<FilterRule> tag not found".to_string()))?;
|
||||||
let name = get_text(v, "Name")?;
|
let name = get_text(v, "Name")?;
|
||||||
let value = get_text(v, "Value")?;
|
let value = get_text(v, "Value")?;
|
||||||
if PrefixFilterRule::NAME == name {
|
if PrefixFilterRule::NAME == name {
|
||||||
prefix = Some(PrefixFilterRule { value: value });
|
prefix = Some(PrefixFilterRule { value });
|
||||||
} else {
|
} else {
|
||||||
suffix = Some(SuffixFilterRule { value: value });
|
suffix = Some(SuffixFilterRule { value });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
(prefix, suffix)
|
(prefix, suffix)
|
||||||
@ -1120,13 +1121,13 @@ fn to_xml_common_notification_config(
|
|||||||
|
|
||||||
for event in events {
|
for event in events {
|
||||||
data.push_str("<Event>");
|
data.push_str("<Event>");
|
||||||
data.push_str(&event);
|
data.push_str(event);
|
||||||
data.push_str("</Event>");
|
data.push_str("</Event>");
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(v) = id {
|
if let Some(v) = id {
|
||||||
data.push_str("<Id>");
|
data.push_str("<Id>");
|
||||||
data.push_str(&v);
|
data.push_str(v);
|
||||||
data.push_str("</Id>");
|
data.push_str("</Id>");
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1150,7 +1151,7 @@ fn to_xml_common_notification_config(
|
|||||||
data.push_str("</S3Key></Filter>");
|
data.push_str("</S3Key></Filter>");
|
||||||
}
|
}
|
||||||
|
|
||||||
return data;
|
data
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
@ -1185,20 +1186,20 @@ impl CloudFuncConfig {
|
|||||||
let (events, id, prefix_filter_rule, suffix_filter_rule) =
|
let (events, id, prefix_filter_rule, suffix_filter_rule) =
|
||||||
parse_common_notification_config(element)?;
|
parse_common_notification_config(element)?;
|
||||||
Ok(CloudFuncConfig {
|
Ok(CloudFuncConfig {
|
||||||
events: events,
|
events,
|
||||||
id: id,
|
id,
|
||||||
prefix_filter_rule: prefix_filter_rule,
|
prefix_filter_rule,
|
||||||
suffix_filter_rule: suffix_filter_rule,
|
suffix_filter_rule,
|
||||||
cloud_func: get_text(element, "CloudFunction")?,
|
cloud_func: get_text(element, "CloudFunction")?,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn validate(&self) -> Result<(), Error> {
|
pub fn validate(&self) -> Result<(), Error> {
|
||||||
if self.events.len() != 0 && self.cloud_func != "" {
|
if !self.events.is_empty() && !self.cloud_func.is_empty() {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
return Err(Error::InvalidFilter);
|
Err(Error::InvalidFilter)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn to_xml(&self) -> String {
|
pub fn to_xml(&self) -> String {
|
||||||
@ -1217,7 +1218,7 @@ impl CloudFuncConfig {
|
|||||||
|
|
||||||
data.push_str("</CloudFunctionConfiguration>");
|
data.push_str("</CloudFunctionConfiguration>");
|
||||||
|
|
||||||
return data;
|
data
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1235,20 +1236,20 @@ impl QueueConfig {
|
|||||||
let (events, id, prefix_filter_rule, suffix_filter_rule) =
|
let (events, id, prefix_filter_rule, suffix_filter_rule) =
|
||||||
parse_common_notification_config(element)?;
|
parse_common_notification_config(element)?;
|
||||||
Ok(QueueConfig {
|
Ok(QueueConfig {
|
||||||
events: events,
|
events,
|
||||||
id: id,
|
id,
|
||||||
prefix_filter_rule: prefix_filter_rule,
|
prefix_filter_rule,
|
||||||
suffix_filter_rule: suffix_filter_rule,
|
suffix_filter_rule,
|
||||||
queue: get_text(element, "Queue")?,
|
queue: get_text(element, "Queue")?,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn validate(&self) -> Result<(), Error> {
|
pub fn validate(&self) -> Result<(), Error> {
|
||||||
if self.events.len() != 0 && self.queue != "" {
|
if !self.events.is_empty() && !self.queue.is_empty() {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
return Err(Error::InvalidFilter);
|
Err(Error::InvalidFilter)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn to_xml(&self) -> String {
|
pub fn to_xml(&self) -> String {
|
||||||
@ -1267,7 +1268,7 @@ impl QueueConfig {
|
|||||||
|
|
||||||
data.push_str("</QueueConfiguration>");
|
data.push_str("</QueueConfiguration>");
|
||||||
|
|
||||||
return data;
|
data
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1285,20 +1286,20 @@ impl TopicConfig {
|
|||||||
let (events, id, prefix_filter_rule, suffix_filter_rule) =
|
let (events, id, prefix_filter_rule, suffix_filter_rule) =
|
||||||
parse_common_notification_config(element)?;
|
parse_common_notification_config(element)?;
|
||||||
Ok(TopicConfig {
|
Ok(TopicConfig {
|
||||||
events: events,
|
events,
|
||||||
id: id,
|
id,
|
||||||
prefix_filter_rule: prefix_filter_rule,
|
prefix_filter_rule,
|
||||||
suffix_filter_rule: suffix_filter_rule,
|
suffix_filter_rule,
|
||||||
topic: get_text(element, "Topic")?,
|
topic: get_text(element, "Topic")?,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn validate(&self) -> Result<(), Error> {
|
pub fn validate(&self) -> Result<(), Error> {
|
||||||
if self.events.len() != 0 && self.topic != "" {
|
if !self.events.is_empty() && !self.topic.is_empty() {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
return Err(Error::InvalidFilter);
|
Err(Error::InvalidFilter)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn to_xml(&self) -> String {
|
pub fn to_xml(&self) -> String {
|
||||||
@ -1317,7 +1318,7 @@ impl TopicConfig {
|
|||||||
|
|
||||||
data.push_str("</TopicConfiguration>");
|
data.push_str("</TopicConfiguration>");
|
||||||
|
|
||||||
return data;
|
data
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1337,39 +1338,30 @@ impl NotificationConfig {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let mut cloud_func_config_list = Vec::new();
|
let mut cloud_func_config_list = Vec::new();
|
||||||
loop {
|
while let Some(mut v) = root.take_child("CloudFunctionConfiguration") {
|
||||||
match root.take_child("CloudFunctionConfiguration") {
|
cloud_func_config_list.push(CloudFuncConfig::from_xml(&mut v)?);
|
||||||
Some(mut v) => cloud_func_config_list.push(CloudFuncConfig::from_xml(&mut v)?),
|
|
||||||
_ => break,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if cloud_func_config_list.len() != 0 {
|
if !cloud_func_config_list.is_empty() {
|
||||||
config.cloud_func_config_list = Some(cloud_func_config_list);
|
config.cloud_func_config_list = Some(cloud_func_config_list);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut queue_config_list = Vec::new();
|
let mut queue_config_list = Vec::new();
|
||||||
loop {
|
while let Some(mut v) = root.take_child("QueueConfiguration") {
|
||||||
match root.take_child("QueueConfiguration") {
|
queue_config_list.push(QueueConfig::from_xml(&mut v)?);
|
||||||
Some(mut v) => queue_config_list.push(QueueConfig::from_xml(&mut v)?),
|
|
||||||
_ => break,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if queue_config_list.len() != 0 {
|
if !queue_config_list.is_empty() {
|
||||||
config.queue_config_list = Some(queue_config_list);
|
config.queue_config_list = Some(queue_config_list);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut topic_config_list = Vec::new();
|
let mut topic_config_list = Vec::new();
|
||||||
loop {
|
while let Some(mut v) = root.take_child("TopicConfiguration") {
|
||||||
match root.take_child("TopicConfiguration") {
|
topic_config_list.push(TopicConfig::from_xml(&mut v)?);
|
||||||
Some(mut v) => topic_config_list.push(TopicConfig::from_xml(&mut v)?),
|
|
||||||
_ => break,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if topic_config_list.len() != 0 {
|
if !topic_config_list.is_empty() {
|
||||||
config.topic_config_list = Some(topic_config_list);
|
config.topic_config_list = Some(topic_config_list);
|
||||||
}
|
}
|
||||||
|
|
||||||
return Ok(config);
|
Ok(config)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn validate(&self) -> Result<(), Error> {
|
pub fn validate(&self) -> Result<(), Error> {
|
||||||
@ -1416,7 +1408,7 @@ impl NotificationConfig {
|
|||||||
}
|
}
|
||||||
|
|
||||||
data.push_str("</NotificationConfiguration>");
|
data.push_str("</NotificationConfiguration>");
|
||||||
return data;
|
data
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1433,6 +1425,12 @@ impl AccessControlTranslation {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Default for AccessControlTranslation {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct EncryptionConfig {
|
pub struct EncryptionConfig {
|
||||||
pub replica_kms_key_id: Option<String>,
|
pub replica_kms_key_id: Option<String>,
|
||||||
@ -1448,7 +1446,7 @@ impl Metrics {
|
|||||||
pub fn new(status: bool) -> Metrics {
|
pub fn new(status: bool) -> Metrics {
|
||||||
Metrics {
|
Metrics {
|
||||||
event_threshold_minutes: Some(15),
|
event_threshold_minutes: Some(15),
|
||||||
status: status,
|
status,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1463,7 +1461,7 @@ impl ReplicationTime {
|
|||||||
pub fn new(status: bool) -> ReplicationTime {
|
pub fn new(status: bool) -> ReplicationTime {
|
||||||
ReplicationTime {
|
ReplicationTime {
|
||||||
time_minutes: Some(15),
|
time_minutes: Some(15),
|
||||||
status: status,
|
status,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1490,17 +1488,16 @@ impl Destination {
|
|||||||
_ => None,
|
_ => None,
|
||||||
},
|
},
|
||||||
account: get_option_text(element, "Account"),
|
account: get_option_text(element, "Account"),
|
||||||
encryption_config: match element.get_child("EncryptionConfiguration") {
|
encryption_config: element.get_child("EncryptionConfiguration").map(|v| {
|
||||||
Some(v) => Some(EncryptionConfig {
|
EncryptionConfig {
|
||||||
replica_kms_key_id: get_option_text(v, "ReplicaKmsKeyID"),
|
replica_kms_key_id: get_option_text(v, "ReplicaKmsKeyID"),
|
||||||
}),
|
}
|
||||||
_ => None,
|
}),
|
||||||
},
|
|
||||||
metrics: match element.get_child("Metrics") {
|
metrics: match element.get_child("Metrics") {
|
||||||
Some(v) => Some(Metrics {
|
Some(v) => Some(Metrics {
|
||||||
event_threshold_minutes: match get_option_text(
|
event_threshold_minutes: match get_option_text(
|
||||||
v.get_child("EventThreshold")
|
v.get_child("EventThreshold")
|
||||||
.ok_or(Error::XmlError(format!("<Metrics> tag not found")))?,
|
.ok_or(Error::XmlError("<Metrics> tag not found".to_string()))?,
|
||||||
"Minutes",
|
"Minutes",
|
||||||
) {
|
) {
|
||||||
Some(v) => Some(v.parse::<i32>()?),
|
Some(v) => Some(v.parse::<i32>()?),
|
||||||
@ -1539,7 +1536,7 @@ impl Destination {
|
|||||||
|
|
||||||
if let Some(v) = &self.account {
|
if let Some(v) = &self.account {
|
||||||
data.push_str("<Account>");
|
data.push_str("<Account>");
|
||||||
data.push_str(&v);
|
data.push_str(v);
|
||||||
data.push_str("</Account>");
|
data.push_str("</Account>");
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1547,7 +1544,7 @@ impl Destination {
|
|||||||
data.push_str("<EncryptionConfiguration>");
|
data.push_str("<EncryptionConfiguration>");
|
||||||
if let Some(v) = &c.replica_kms_key_id {
|
if let Some(v) = &c.replica_kms_key_id {
|
||||||
data.push_str("<ReplicaKmsKeyID>");
|
data.push_str("<ReplicaKmsKeyID>");
|
||||||
data.push_str(&v);
|
data.push_str(v);
|
||||||
data.push_str("</ReplicaKmsKeyID>");
|
data.push_str("</ReplicaKmsKeyID>");
|
||||||
}
|
}
|
||||||
data.push_str("</EncryptionConfiguration>");
|
data.push_str("</EncryptionConfiguration>");
|
||||||
@ -1593,13 +1590,13 @@ impl Destination {
|
|||||||
|
|
||||||
if let Some(v) = &self.storage_class {
|
if let Some(v) = &self.storage_class {
|
||||||
data.push_str("<StorageClass>");
|
data.push_str("<StorageClass>");
|
||||||
data.push_str(&v);
|
data.push_str(v);
|
||||||
data.push_str("</StorageClass>");
|
data.push_str("</StorageClass>");
|
||||||
}
|
}
|
||||||
|
|
||||||
data.push_str("</Destination>");
|
data.push_str("</Destination>");
|
||||||
|
|
||||||
return data;
|
data
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1628,7 +1625,7 @@ impl ReplicationRule {
|
|||||||
destination: Destination::from_xml(
|
destination: Destination::from_xml(
|
||||||
element
|
element
|
||||||
.get_child("Destination")
|
.get_child("Destination")
|
||||||
.ok_or(Error::XmlError(format!("<Destination> tag not found")))?,
|
.ok_or(Error::XmlError("<Destination> tag not found".to_string()))?,
|
||||||
)?,
|
)?,
|
||||||
delete_marker_replication_status: match element.get_child("DeleteMarkerReplication") {
|
delete_marker_replication_status: match element.get_child("DeleteMarkerReplication") {
|
||||||
Some(v) => Some(get_text(v, "Status")? == "Enabled"),
|
Some(v) => Some(get_text(v, "Status")? == "Enabled"),
|
||||||
@ -1699,13 +1696,13 @@ impl ReplicationRule {
|
|||||||
|
|
||||||
if let Some(v) = &self.id {
|
if let Some(v) = &self.id {
|
||||||
data.push_str("<ID>");
|
data.push_str("<ID>");
|
||||||
data.push_str(&v);
|
data.push_str(v);
|
||||||
data.push_str("</ID>");
|
data.push_str("</ID>");
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(v) = &self.prefix {
|
if let Some(v) = &self.prefix {
|
||||||
data.push_str("<Prefix>");
|
data.push_str("<Prefix>");
|
||||||
data.push_str(&v);
|
data.push_str(v);
|
||||||
data.push_str("</Prefix>");
|
data.push_str("</Prefix>");
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1748,7 +1745,7 @@ impl ReplicationRule {
|
|||||||
});
|
});
|
||||||
data.push_str("</Status>");
|
data.push_str("</Status>");
|
||||||
|
|
||||||
return data;
|
data
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1767,14 +1764,15 @@ impl ReplicationConfig {
|
|||||||
|
|
||||||
if let Some(v) = root.get_child("Rule") {
|
if let Some(v) = root.get_child("Rule") {
|
||||||
for rule in &v.children {
|
for rule in &v.children {
|
||||||
config.rules.push(ReplicationRule::from_xml(
|
config
|
||||||
rule.as_element()
|
.rules
|
||||||
.ok_or(Error::XmlError(format!("<Rule> tag not found")))?,
|
.push(ReplicationRule::from_xml(rule.as_element().ok_or(
|
||||||
)?);
|
Error::XmlError("<Rule> tag not found".to_string()),
|
||||||
|
)?)?);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return Ok(config);
|
Ok(config)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn to_xml(&self) -> String {
|
pub fn to_xml(&self) -> String {
|
||||||
@ -1782,7 +1780,7 @@ impl ReplicationConfig {
|
|||||||
|
|
||||||
if let Some(v) = &self.role {
|
if let Some(v) = &self.role {
|
||||||
data.push_str("<Status>");
|
data.push_str("<Status>");
|
||||||
data.push_str(&v);
|
data.push_str(v);
|
||||||
data.push_str("</Status>");
|
data.push_str("</Status>");
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1791,7 +1789,7 @@ impl ReplicationConfig {
|
|||||||
}
|
}
|
||||||
|
|
||||||
data.push_str("</ReplicationConfiguration>");
|
data.push_str("</ReplicationConfiguration>");
|
||||||
return data;
|
data
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1816,9 +1814,9 @@ impl ObjectLockConfig {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(Error::InvalidObjectLockConfig(format!(
|
Err(Error::InvalidObjectLockConfig(
|
||||||
"only one days or years must be set"
|
"only one days or years must be set".to_string(),
|
||||||
)))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_xml(root: &Element) -> Result<ObjectLockConfig, Error> {
|
pub fn from_xml(root: &Element) -> Result<ObjectLockConfig, Error> {
|
||||||
@ -1829,9 +1827,9 @@ impl ObjectLockConfig {
|
|||||||
};
|
};
|
||||||
|
|
||||||
if let Some(r) = root.get_child("Rule") {
|
if let Some(r) = root.get_child("Rule") {
|
||||||
let default_retention = r
|
let default_retention = r.get_child("DefaultRetention").ok_or(Error::XmlError(
|
||||||
.get_child("DefaultRetention")
|
"<DefaultRetention> tag not found".to_string(),
|
||||||
.ok_or(Error::XmlError(format!("<DefaultRetention> tag not found")))?;
|
))?;
|
||||||
config.retention_mode =
|
config.retention_mode =
|
||||||
Some(RetentionMode::parse(&get_text(default_retention, "Mode")?)?);
|
Some(RetentionMode::parse(&get_text(default_retention, "Mode")?)?);
|
||||||
|
|
||||||
@ -1844,7 +1842,7 @@ impl ObjectLockConfig {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return Ok(config);
|
Ok(config)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn to_xml(&self) -> String {
|
pub fn to_xml(&self) -> String {
|
||||||
@ -1869,6 +1867,6 @@ impl ObjectLockConfig {
|
|||||||
}
|
}
|
||||||
data.push_str("</ObjectLockConfiguration>");
|
data.push_str("</ObjectLockConfiguration>");
|
||||||
|
|
||||||
return data;
|
data
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -14,7 +14,8 @@
|
|||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
use crate::s3::error::Error;
|
use crate::s3::error::Error;
|
||||||
pub use base64::encode as b64encode;
|
use base64::engine::general_purpose::STANDARD as BASE64;
|
||||||
|
use base64::engine::Engine as _;
|
||||||
use byteorder::{BigEndian, ReadBytesExt};
|
use byteorder::{BigEndian, ReadBytesExt};
|
||||||
use chrono::{DateTime, Datelike, NaiveDateTime, ParseError, Utc};
|
use chrono::{DateTime, Datelike, NaiveDateTime, ParseError, Utc};
|
||||||
use crc::{Crc, CRC_32_ISO_HDLC};
|
use crc::{Crc, CRC_32_ISO_HDLC};
|
||||||
@ -32,6 +33,10 @@ pub type UtcTime = DateTime<Utc>;
|
|||||||
|
|
||||||
pub type Multimap = MultiMap<String, String>;
|
pub type Multimap = MultiMap<String, String>;
|
||||||
|
|
||||||
|
pub fn b64encode<T: AsRef<[u8]>>(input: T) -> String {
|
||||||
|
BASE64.encode(input)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn merge(m1: &mut Multimap, m2: &Multimap) {
|
pub fn merge(m1: &mut Multimap, m2: &Multimap) {
|
||||||
for (key, values) in m2.iter_all() {
|
for (key, values) in m2.iter_all() {
|
||||||
for value in values {
|
for value in values {
|
||||||
@ -51,7 +56,7 @@ pub fn uint32(mut data: &[u8]) -> Result<u32, std::io::Error> {
|
|||||||
pub fn sha256_hash(data: &[u8]) -> String {
|
pub fn sha256_hash(data: &[u8]) -> String {
|
||||||
let mut hasher = Sha256::new();
|
let mut hasher = Sha256::new();
|
||||||
hasher.update(data);
|
hasher.update(data);
|
||||||
return format!("{:x}", hasher.finalize());
|
format!("{:x}", hasher.finalize())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn md5sum_hash(data: &[u8]) -> String {
|
pub fn md5sum_hash(data: &[u8]) -> String {
|
||||||
@ -90,7 +95,7 @@ pub fn to_http_header_value(time: UtcTime) -> String {
|
|||||||
12 => "Dec",
|
12 => "Dec",
|
||||||
_ => "",
|
_ => "",
|
||||||
},
|
},
|
||||||
time.format("%Y %H:%M:%S").to_string()
|
time.format("%Y %H:%M:%S")
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -99,7 +104,7 @@ pub fn to_iso8601utc(time: UtcTime) -> String {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_iso8601utc(s: &str) -> Result<UtcTime, ParseError> {
|
pub fn from_iso8601utc(s: &str) -> Result<UtcTime, ParseError> {
|
||||||
Ok(DateTime::<Utc>::from_utc(
|
Ok(DateTime::<Utc>::from_naive_utc_and_offset(
|
||||||
match NaiveDateTime::parse_from_str(s, "%Y-%m-%dT%H:%M:%S.%3fZ") {
|
match NaiveDateTime::parse_from_str(s, "%Y-%m-%dT%H:%M:%S.%3fZ") {
|
||||||
Ok(d) => d,
|
Ok(d) => d,
|
||||||
_ => NaiveDateTime::parse_from_str(s, "%Y-%m-%dT%H:%M:%SZ")?,
|
_ => NaiveDateTime::parse_from_str(s, "%Y-%m-%dT%H:%M:%SZ")?,
|
||||||
@ -109,7 +114,7 @@ pub fn from_iso8601utc(s: &str) -> Result<UtcTime, ParseError> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_http_header_value(s: &str) -> Result<UtcTime, ParseError> {
|
pub fn from_http_header_value(s: &str) -> Result<UtcTime, ParseError> {
|
||||||
Ok(DateTime::<Utc>::from_utc(
|
Ok(DateTime::<Utc>::from_naive_utc_and_offset(
|
||||||
NaiveDateTime::parse_from_str(s, "%a, %d %b %Y %H:%M:%S GMT")?,
|
NaiveDateTime::parse_from_str(s, "%a, %d %b %Y %H:%M:%S GMT")?,
|
||||||
Utc,
|
Utc,
|
||||||
))
|
))
|
||||||
@ -126,7 +131,7 @@ pub fn to_http_headers(map: &Multimap) -> Vec<String> {
|
|||||||
headers.push(s);
|
headers.push(s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return headers;
|
headers
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn to_query_string(map: &Multimap) -> String {
|
pub fn to_query_string(map: &Multimap) -> String {
|
||||||
@ -134,14 +139,14 @@ pub fn to_query_string(map: &Multimap) -> String {
|
|||||||
for (key, values) in map.iter_all() {
|
for (key, values) in map.iter_all() {
|
||||||
for value in values {
|
for value in values {
|
||||||
if !query.is_empty() {
|
if !query.is_empty() {
|
||||||
query.push_str("&");
|
query.push('&');
|
||||||
}
|
}
|
||||||
query.push_str(&urlencode(key));
|
query.push_str(&urlencode(key));
|
||||||
query.push_str("=");
|
query.push('=');
|
||||||
query.push_str(&urlencode(value));
|
query.push_str(&urlencode(value));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return query;
|
query
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_canonical_query_string(map: &Multimap) -> String {
|
pub fn get_canonical_query_string(map: &Multimap) -> String {
|
||||||
@ -157,10 +162,10 @@ pub fn get_canonical_query_string(map: &Multimap) -> String {
|
|||||||
Some(values) => {
|
Some(values) => {
|
||||||
for value in values {
|
for value in values {
|
||||||
if !query.is_empty() {
|
if !query.is_empty() {
|
||||||
query.push_str("&");
|
query.push('&');
|
||||||
}
|
}
|
||||||
query.push_str(&urlencode(key.as_str()));
|
query.push_str(&urlencode(key.as_str()));
|
||||||
query.push_str("=");
|
query.push('=');
|
||||||
query.push_str(&urlencode(value));
|
query.push_str(&urlencode(value));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -168,7 +173,7 @@ pub fn get_canonical_query_string(map: &Multimap) -> String {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
return query;
|
query
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_canonical_headers(map: &Multimap) -> (String, String) {
|
pub fn get_canonical_headers(map: &Multimap) -> (String, String) {
|
||||||
@ -189,7 +194,7 @@ pub fn get_canonical_headers(map: &Multimap) -> (String, String) {
|
|||||||
let mut value = String::new();
|
let mut value = String::new();
|
||||||
for v in vs {
|
for v in vs {
|
||||||
if !value.is_empty() {
|
if !value.is_empty() {
|
||||||
value.push_str(",");
|
value.push(',');
|
||||||
}
|
}
|
||||||
let s: String = MULTI_SPACE_REGEX.replace_all(&v, " ").to_string();
|
let s: String = MULTI_SPACE_REGEX.replace_all(&v, " ").to_string();
|
||||||
value.push_str(&s);
|
value.push_str(&s);
|
||||||
@ -202,20 +207,20 @@ pub fn get_canonical_headers(map: &Multimap) -> (String, String) {
|
|||||||
let mut add_delim = false;
|
let mut add_delim = false;
|
||||||
for (key, value) in &btmap {
|
for (key, value) in &btmap {
|
||||||
if add_delim {
|
if add_delim {
|
||||||
signed_headers.push_str(";");
|
signed_headers.push(';');
|
||||||
canonical_headers.push_str("\n");
|
canonical_headers.push('\n');
|
||||||
}
|
}
|
||||||
|
|
||||||
signed_headers.push_str(key);
|
signed_headers.push_str(key);
|
||||||
|
|
||||||
canonical_headers.push_str(key);
|
canonical_headers.push_str(key);
|
||||||
canonical_headers.push_str(":");
|
canonical_headers.push(':');
|
||||||
canonical_headers.push_str(value);
|
canonical_headers.push_str(value);
|
||||||
|
|
||||||
add_delim = true;
|
add_delim = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
return (signed_headers, canonical_headers);
|
(signed_headers, canonical_headers)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn check_bucket_name(bucket_name: &str, strict: bool) -> Result<(), Error> {
|
pub fn check_bucket_name(bucket_name: &str, strict: bool) -> Result<(), Error> {
|
||||||
@ -269,7 +274,7 @@ pub fn check_bucket_name(bucket_name: &str, strict: bool) -> Result<(), Error> {
|
|||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
return Ok(());
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_text(element: &Element, tag: &str) -> Result<String, Error> {
|
pub fn get_text(element: &Element, tag: &str) -> Result<String, Error> {
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
-----BEGIN PRIVATE KEY-----
|
-----BEGIN PRIVATE KEY-----
|
||||||
MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgs92/22T2vIGJCIHR
|
MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgkZrDqi6YL+RS5xEy
|
||||||
6KL78f37XJXTJCpIlyVozmEo9iahRANCAATWlbZ1mHD8YeMKa2kM7E7hptGcl+6h
|
YLMVrCDC9r/F39UYKDPAIS41ulGhRANCAAQgpenErxeS4XVDweO41kcBzeS911/B
|
||||||
mmq4ugD3bbJCh22wLTxHobqadlCnq976H91Z2yM2cXmZLByz8Epgg/9w
|
cLEDTGmuZF0ZeOHF6JJYtBEMXgxbglUIUPmE+3L21u/6fqR5aztM8GHy
|
||||||
-----END PRIVATE KEY-----
|
-----END PRIVATE KEY-----
|
||||||
|
|||||||
@ -1,13 +1,12 @@
|
|||||||
-----BEGIN CERTIFICATE-----
|
-----BEGIN CERTIFICATE-----
|
||||||
MIIB5zCCAY2gAwIBAgIQGDcDELutI2jM7kTgfeESPzAKBggqhkjOPQQDAjA7MRww
|
MIIB1TCCAXygAwIBAgIRAP0ihBYxR23zNUlka9xA4TYwCgYIKoZIzj0EAwIwMTEc
|
||||||
GgYDVQQKExNDZXJ0Z2VuIERldmVsb3BtZW50MRswGQYDVQQLDBJoYXJzaGFAbmFu
|
MBoGA1UEChMTQ2VydGdlbiBEZXZlbG9wbWVudDERMA8GA1UECwwIYmFsYUBmMzgw
|
||||||
byAobmFubykwHhcNMjIwODIzMDgxMDQwWhcNMjMwODIzMDgxMDQwWjA7MRwwGgYD
|
IBcNMjMwODMwMTIzMTQzWhgPMjEyMzA4MDYxMjMxNDNaMDExHDAaBgNVBAoTE0Nl
|
||||||
VQQKExNDZXJ0Z2VuIERldmVsb3BtZW50MRswGQYDVQQLDBJoYXJzaGFAbmFubyAo
|
cnRnZW4gRGV2ZWxvcG1lbnQxETAPBgNVBAsMCGJhbGFAZjM4MFkwEwYHKoZIzj0C
|
||||||
bmFubykwWTATBgcqhkjOPQIBBggqhkjOPQMBBwNCAATWlbZ1mHD8YeMKa2kM7E7h
|
AQYIKoZIzj0DAQcDQgAEIKXpxK8XkuF1Q8HjuNZHAc3kvddfwXCxA0xprmRdGXjh
|
||||||
ptGcl+6hmmq4ugD3bbJCh22wLTxHobqadlCnq976H91Z2yM2cXmZLByz8Epgg/9w
|
xeiSWLQRDF4MW4JVCFD5hPty9tbv+n6keWs7TPBh8qNzMHEwDgYDVR0PAQH/BAQD
|
||||||
o3MwcTAOBgNVHQ8BAf8EBAMCAqQwEwYDVR0lBAwwCgYIKwYBBQUHAwEwDwYDVR0T
|
AgKkMBMGA1UdJQQMMAoGCCsGAQUFBwMBMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O
|
||||||
AQH/BAUwAwEB/zAdBgNVHQ4EFgQUE9fw8e+S5H5lOzreN8FFcBP3r2gwGgYDVR0R
|
BBYEFOI5oRDgCm3lEefkHkKutkW6dzMyMBoGA1UdEQQTMBGCCWxvY2FsaG9zdIcE
|
||||||
BBMwEYIJbG9jYWxob3N0hwR/AAABMAoGCCqGSM49BAMCA0gAMEUCIBHcFxnYVTIu
|
fwAAATAKBggqhkjOPQQDAgNHADBEAiAN77OMdOC0OW1eubI6RygpWxv1SOtKqtRJ
|
||||||
KI9AcywtvtFIINknZ0dSVrR0nzUMSOskAiEAqbj+BqVogZzO8GC+1l71K/R+j4yP
|
CFbdqTLYKgIgVe7mPBqBECShFFjcGvSvRl2ev0f19EN37BEj9E+ZZXI=
|
||||||
NOwfeX1Aq+3fDQ0=
|
|
||||||
-----END CERTIFICATE-----
|
-----END CERTIFICATE-----
|
||||||
|
|||||||
161
tests/tests.rs
161
tests/tests.rs
@ -41,7 +41,7 @@ struct RandReader {
|
|||||||
|
|
||||||
impl RandReader {
|
impl RandReader {
|
||||||
fn new(size: usize) -> RandReader {
|
fn new(size: usize) -> RandReader {
|
||||||
RandReader { size: size }
|
RandReader { size }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -99,12 +99,12 @@ impl<'a> ClientTest<'_> {
|
|||||||
client.ssl_cert_file = ssl_cert_file.to_string();
|
client.ssl_cert_file = ssl_cert_file.to_string();
|
||||||
|
|
||||||
ClientTest {
|
ClientTest {
|
||||||
base_url: base_url,
|
base_url,
|
||||||
access_key: access_key,
|
access_key,
|
||||||
secret_key: secret_key,
|
secret_key,
|
||||||
ignore_cert_check: ignore_cert_check,
|
ignore_cert_check,
|
||||||
ssl_cert_file: ssl_cert_file,
|
ssl_cert_file,
|
||||||
client: client,
|
client,
|
||||||
test_bucket: rand_bucket_name(),
|
test_bucket: rand_bucket_name(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -134,7 +134,7 @@ impl<'a> ClientTest<'_> {
|
|||||||
.bucket_exists(&BucketExistsArgs::new(&bucket_name).unwrap())
|
.bucket_exists(&BucketExistsArgs::new(&bucket_name).unwrap())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(exists, true);
|
assert!(exists);
|
||||||
self.client
|
self.client
|
||||||
.remove_bucket(&RemoveBucketArgs::new(&bucket_name).unwrap())
|
.remove_bucket(&RemoveBucketArgs::new(&bucket_name).unwrap())
|
||||||
.await
|
.await
|
||||||
@ -149,7 +149,7 @@ impl<'a> ClientTest<'_> {
|
|||||||
|
|
||||||
for b in names.iter() {
|
for b in names.iter() {
|
||||||
self.client
|
self.client
|
||||||
.make_bucket(&MakeBucketArgs::new(&b).unwrap())
|
.make_bucket(&MakeBucketArgs::new(b).unwrap())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
@ -169,7 +169,7 @@ impl<'a> ClientTest<'_> {
|
|||||||
|
|
||||||
for b in names.iter() {
|
for b in names.iter() {
|
||||||
self.client
|
self.client
|
||||||
.remove_bucket(&RemoveBucketArgs::new(&b).unwrap())
|
.remove_bucket(&RemoveBucketArgs::new(b).unwrap())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
@ -268,7 +268,7 @@ impl<'a> ClientTest<'_> {
|
|||||||
let mut hasher = Sha256::new();
|
let mut hasher = Sha256::new();
|
||||||
let mut file = fs::File::open(filename).unwrap();
|
let mut file = fs::File::open(filename).unwrap();
|
||||||
io::copy(&mut file, &mut hasher).unwrap();
|
io::copy(&mut file, &mut hasher).unwrap();
|
||||||
return format!("{:x}", hasher.finalize());
|
format!("{:x}", hasher.finalize())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn upload_download_object(&self) {
|
async fn upload_download_object(&self) {
|
||||||
@ -279,7 +279,7 @@ impl<'a> ClientTest<'_> {
|
|||||||
file.sync_all().unwrap();
|
file.sync_all().unwrap();
|
||||||
self.client
|
self.client
|
||||||
.upload_object(
|
.upload_object(
|
||||||
&mut UploadObjectArgs::new(&self.test_bucket, &object_name, &object_name).unwrap(),
|
&UploadObjectArgs::new(&self.test_bucket, &object_name, &object_name).unwrap(),
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
@ -291,10 +291,7 @@ impl<'a> ClientTest<'_> {
|
|||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(
|
assert!(ClientTest::get_hash(&object_name) == ClientTest::get_hash(&filename));
|
||||||
ClientTest::get_hash(&object_name) == ClientTest::get_hash(&filename),
|
|
||||||
true
|
|
||||||
);
|
|
||||||
|
|
||||||
fs::remove_file(&object_name).unwrap();
|
fs::remove_file(&object_name).unwrap();
|
||||||
fs::remove_file(&filename).unwrap();
|
fs::remove_file(&filename).unwrap();
|
||||||
@ -316,7 +313,7 @@ impl<'a> ClientTest<'_> {
|
|||||||
file.sync_all().unwrap();
|
file.sync_all().unwrap();
|
||||||
self.client
|
self.client
|
||||||
.upload_object(
|
.upload_object(
|
||||||
&mut UploadObjectArgs::new(&self.test_bucket, &object_name, &object_name).unwrap(),
|
&UploadObjectArgs::new(&self.test_bucket, &object_name, &object_name).unwrap(),
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
@ -328,10 +325,7 @@ impl<'a> ClientTest<'_> {
|
|||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(
|
assert!(ClientTest::get_hash(&object_name) == ClientTest::get_hash(&filename));
|
||||||
ClientTest::get_hash(&object_name) == ClientTest::get_hash(&filename),
|
|
||||||
true
|
|
||||||
);
|
|
||||||
|
|
||||||
fs::remove_file(&object_name).unwrap();
|
fs::remove_file(&object_name).unwrap();
|
||||||
fs::remove_file(&filename).unwrap();
|
fs::remove_file(&filename).unwrap();
|
||||||
@ -376,7 +370,7 @@ impl<'a> ClientTest<'_> {
|
|||||||
let mut objects: Vec<DeleteObject> = Vec::new();
|
let mut objects: Vec<DeleteObject> = Vec::new();
|
||||||
for name in names.iter() {
|
for name in names.iter() {
|
||||||
objects.push(DeleteObject {
|
objects.push(DeleteObject {
|
||||||
name: &name,
|
name,
|
||||||
version_id: None,
|
version_id: None,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -423,9 +417,9 @@ impl<'a> ClientTest<'_> {
|
|||||||
|
|
||||||
self.client
|
self.client
|
||||||
.list_objects(
|
.list_objects(
|
||||||
&mut ListObjectsArgs::new(&self.test_bucket, &|items| {
|
&ListObjectsArgs::new(&self.test_bucket, &|items| {
|
||||||
for item in items.iter() {
|
for item in items.iter() {
|
||||||
assert_eq!(names.contains(&item.name), true);
|
assert!(names.contains(&item.name));
|
||||||
}
|
}
|
||||||
true
|
true
|
||||||
})
|
})
|
||||||
@ -437,7 +431,7 @@ impl<'a> ClientTest<'_> {
|
|||||||
let mut objects: Vec<DeleteObject> = Vec::new();
|
let mut objects: Vec<DeleteObject> = Vec::new();
|
||||||
for name in names.iter() {
|
for name in names.iter() {
|
||||||
objects.push(DeleteObject {
|
objects.push(DeleteObject {
|
||||||
name: &name,
|
name,
|
||||||
version_id: None,
|
version_id: None,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -557,11 +551,11 @@ impl<'a> ClientTest<'_> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
sender.send(false).unwrap();
|
sender.send(false).unwrap();
|
||||||
return false;
|
false
|
||||||
};
|
};
|
||||||
|
|
||||||
let args = &ListenBucketNotificationArgs::new(&test_bucket, &event_fn).unwrap();
|
let args = &ListenBucketNotificationArgs::new(&test_bucket, &event_fn).unwrap();
|
||||||
client.listen_bucket_notification(&args).await.unwrap();
|
client.listen_bucket_notification(args).await.unwrap();
|
||||||
};
|
};
|
||||||
|
|
||||||
let spawned_task = task::spawn(listen_task());
|
let spawned_task = task::spawn(listen_task());
|
||||||
@ -588,7 +582,7 @@ impl<'a> ClientTest<'_> {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
spawned_task.await;
|
spawned_task.await;
|
||||||
assert_eq!(receiver.recv().await.unwrap(), true);
|
assert!(receiver.recv().await.unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn copy_object(&self) {
|
async fn copy_object(&self) {
|
||||||
@ -732,18 +726,12 @@ impl<'a> ClientTest<'_> {
|
|||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(resp.config.queue_config_list.as_ref().unwrap().len(), 1);
|
assert_eq!(resp.config.queue_config_list.as_ref().unwrap().len(), 1);
|
||||||
assert_eq!(
|
assert!(resp.config.queue_config_list.as_ref().unwrap()[0]
|
||||||
resp.config.queue_config_list.as_ref().unwrap()[0]
|
.events
|
||||||
.events
|
.contains(&String::from("s3:ObjectCreated:Put")));
|
||||||
.contains(&String::from("s3:ObjectCreated:Put")),
|
assert!(resp.config.queue_config_list.as_ref().unwrap()[0]
|
||||||
true
|
.events
|
||||||
);
|
.contains(&String::from("s3:ObjectCreated:Copy")));
|
||||||
assert_eq!(
|
|
||||||
resp.config.queue_config_list.as_ref().unwrap()[0]
|
|
||||||
.events
|
|
||||||
.contains(&String::from("s3:ObjectCreated:Copy")),
|
|
||||||
true
|
|
||||||
);
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
resp.config.queue_config_list.as_ref().unwrap()[0]
|
resp.config.queue_config_list.as_ref().unwrap()[0]
|
||||||
.prefix_filter_rule
|
.prefix_filter_rule
|
||||||
@ -775,7 +763,7 @@ impl<'a> ClientTest<'_> {
|
|||||||
.get_bucket_notification(&GetBucketNotificationArgs::new(&bucket_name).unwrap())
|
.get_bucket_notification(&GetBucketNotificationArgs::new(&bucket_name).unwrap())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(resp.config.queue_config_list.is_none(), true);
|
assert!(resp.config.queue_config_list.is_none());
|
||||||
|
|
||||||
self.client
|
self.client
|
||||||
.remove_bucket(&RemoveBucketArgs::new(&bucket_name).unwrap())
|
.remove_bucket(&RemoveBucketArgs::new(&bucket_name).unwrap())
|
||||||
@ -824,7 +812,7 @@ impl<'a> ClientTest<'_> {
|
|||||||
.get_bucket_policy(&GetBucketPolicyArgs::new(&bucket_name).unwrap())
|
.get_bucket_policy(&GetBucketPolicyArgs::new(&bucket_name).unwrap())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(resp.config.is_empty(), false);
|
assert!(!resp.config.is_empty());
|
||||||
|
|
||||||
self.client
|
self.client
|
||||||
.delete_bucket_policy(&DeleteBucketPolicyArgs::new(&bucket_name).unwrap())
|
.delete_bucket_policy(&DeleteBucketPolicyArgs::new(&bucket_name).unwrap())
|
||||||
@ -866,10 +854,7 @@ impl<'a> ClientTest<'_> {
|
|||||||
.get_bucket_tags(&GetBucketTagsArgs::new(&bucket_name).unwrap())
|
.get_bucket_tags(&GetBucketTagsArgs::new(&bucket_name).unwrap())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(
|
assert!(resp.tags.len() == tags.len() && resp.tags.keys().all(|k| tags.contains_key(k)));
|
||||||
resp.tags.len() == tags.len() && resp.tags.keys().all(|k| tags.contains_key(k)),
|
|
||||||
true
|
|
||||||
);
|
|
||||||
|
|
||||||
self.client
|
self.client
|
||||||
.delete_bucket_tags(&DeleteBucketTagsArgs::new(&bucket_name).unwrap())
|
.delete_bucket_tags(&DeleteBucketTagsArgs::new(&bucket_name).unwrap())
|
||||||
@ -881,7 +866,7 @@ impl<'a> ClientTest<'_> {
|
|||||||
.get_bucket_tags(&GetBucketTagsArgs::new(&bucket_name).unwrap())
|
.get_bucket_tags(&GetBucketTagsArgs::new(&bucket_name).unwrap())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(resp.tags.is_empty(), true);
|
assert!(resp.tags.is_empty());
|
||||||
|
|
||||||
self.client
|
self.client
|
||||||
.remove_bucket(&RemoveBucketArgs::new(&bucket_name).unwrap())
|
.remove_bucket(&RemoveBucketArgs::new(&bucket_name).unwrap())
|
||||||
@ -912,19 +897,13 @@ impl<'a> ClientTest<'_> {
|
|||||||
.get_object_lock_config(&GetObjectLockConfigArgs::new(&bucket_name).unwrap())
|
.get_object_lock_config(&GetObjectLockConfigArgs::new(&bucket_name).unwrap())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(
|
assert!(match resp.config.retention_mode {
|
||||||
match resp.config.retention_mode {
|
Some(r) => matches!(r, RetentionMode::GOVERNANCE),
|
||||||
Some(r) => match r {
|
_ => false,
|
||||||
RetentionMode::GOVERNANCE => true,
|
});
|
||||||
_ => false,
|
|
||||||
},
|
|
||||||
_ => false,
|
|
||||||
},
|
|
||||||
true
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(resp.config.retention_duration_days == Some(7), true);
|
assert!(resp.config.retention_duration_days == Some(7));
|
||||||
assert_eq!(resp.config.retention_duration_years.is_none(), true);
|
assert!(resp.config.retention_duration_years.is_none());
|
||||||
|
|
||||||
self.client
|
self.client
|
||||||
.delete_object_lock_config(&DeleteObjectLockConfigArgs::new(&bucket_name).unwrap())
|
.delete_object_lock_config(&DeleteObjectLockConfigArgs::new(&bucket_name).unwrap())
|
||||||
@ -936,7 +915,7 @@ impl<'a> ClientTest<'_> {
|
|||||||
.get_object_lock_config(&GetObjectLockConfigArgs::new(&bucket_name).unwrap())
|
.get_object_lock_config(&GetObjectLockConfigArgs::new(&bucket_name).unwrap())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(resp.config.retention_mode.is_none(), true);
|
assert!(resp.config.retention_mode.is_none());
|
||||||
|
|
||||||
self.client
|
self.client
|
||||||
.remove_bucket(&RemoveBucketArgs::new(&bucket_name).unwrap())
|
.remove_bucket(&RemoveBucketArgs::new(&bucket_name).unwrap())
|
||||||
@ -979,10 +958,7 @@ impl<'a> ClientTest<'_> {
|
|||||||
.get_object_tags(&GetObjectTagsArgs::new(&self.test_bucket, &object_name).unwrap())
|
.get_object_tags(&GetObjectTagsArgs::new(&self.test_bucket, &object_name).unwrap())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(
|
assert!(resp.tags.len() == tags.len() && resp.tags.keys().all(|k| tags.contains_key(k)));
|
||||||
resp.tags.len() == tags.len() && resp.tags.keys().all(|k| tags.contains_key(k)),
|
|
||||||
true
|
|
||||||
);
|
|
||||||
|
|
||||||
self.client
|
self.client
|
||||||
.delete_object_tags(
|
.delete_object_tags(
|
||||||
@ -996,7 +972,7 @@ impl<'a> ClientTest<'_> {
|
|||||||
.get_object_tags(&GetObjectTagsArgs::new(&self.test_bucket, &object_name).unwrap())
|
.get_object_tags(&GetObjectTagsArgs::new(&self.test_bucket, &object_name).unwrap())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(resp.tags.is_empty(), true);
|
assert!(resp.tags.is_empty());
|
||||||
|
|
||||||
self.client
|
self.client
|
||||||
.remove_object(&RemoveObjectArgs::new(&self.test_bucket, &object_name).unwrap())
|
.remove_object(&RemoveObjectArgs::new(&self.test_bucket, &object_name).unwrap())
|
||||||
@ -1022,13 +998,10 @@ impl<'a> ClientTest<'_> {
|
|||||||
.get_bucket_versioning(&GetBucketVersioningArgs::new(&bucket_name).unwrap())
|
.get_bucket_versioning(&GetBucketVersioningArgs::new(&bucket_name).unwrap())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(
|
assert!(match resp.status {
|
||||||
match resp.status {
|
Some(v) => v,
|
||||||
Some(v) => v,
|
_ => false,
|
||||||
_ => false,
|
});
|
||||||
},
|
|
||||||
true
|
|
||||||
);
|
|
||||||
|
|
||||||
self.client
|
self.client
|
||||||
.set_bucket_versioning(&SetBucketVersioningArgs::new(&bucket_name, false).unwrap())
|
.set_bucket_versioning(&SetBucketVersioningArgs::new(&bucket_name, false).unwrap())
|
||||||
@ -1040,12 +1013,11 @@ impl<'a> ClientTest<'_> {
|
|||||||
.get_bucket_versioning(&GetBucketVersioningArgs::new(&bucket_name).unwrap())
|
.get_bucket_versioning(&GetBucketVersioningArgs::new(&bucket_name).unwrap())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(
|
assert!(
|
||||||
match resp.status {
|
!(match resp.status {
|
||||||
Some(v) => v,
|
Some(v) => v,
|
||||||
_ => false,
|
_ => false,
|
||||||
},
|
})
|
||||||
false
|
|
||||||
);
|
);
|
||||||
|
|
||||||
self.client
|
self.client
|
||||||
@ -1091,23 +1063,14 @@ impl<'a> ClientTest<'_> {
|
|||||||
.get_object_retention(&GetObjectRetentionArgs::new(&bucket_name, &object_name).unwrap())
|
.get_object_retention(&GetObjectRetentionArgs::new(&bucket_name, &object_name).unwrap())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(
|
assert!(match resp.retention_mode {
|
||||||
match resp.retention_mode {
|
Some(v) => matches!(v, RetentionMode::GOVERNANCE),
|
||||||
Some(v) => match v {
|
_ => false,
|
||||||
RetentionMode::GOVERNANCE => true,
|
});
|
||||||
_ => false,
|
assert!(match resp.retain_until_date {
|
||||||
},
|
Some(v) => to_iso8601utc(v) == to_iso8601utc(retain_until_date),
|
||||||
_ => false,
|
_ => false,
|
||||||
},
|
},);
|
||||||
true
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
match resp.retain_until_date {
|
|
||||||
Some(v) => to_iso8601utc(v) == to_iso8601utc(retain_until_date),
|
|
||||||
_ => false,
|
|
||||||
},
|
|
||||||
true,
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut args = SetObjectRetentionArgs::new(&bucket_name, &object_name).unwrap();
|
let mut args = SetObjectRetentionArgs::new(&bucket_name, &object_name).unwrap();
|
||||||
args.bypass_governance_mode = true;
|
args.bypass_governance_mode = true;
|
||||||
@ -1118,8 +1081,8 @@ impl<'a> ClientTest<'_> {
|
|||||||
.get_object_retention(&GetObjectRetentionArgs::new(&bucket_name, &object_name).unwrap())
|
.get_object_retention(&GetObjectRetentionArgs::new(&bucket_name, &object_name).unwrap())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(resp.retention_mode.is_none(), true);
|
assert!(resp.retention_mode.is_none());
|
||||||
assert_eq!(resp.retain_until_date.is_none(), true);
|
assert!(resp.retain_until_date.is_none());
|
||||||
|
|
||||||
let mut args = RemoveObjectArgs::new(&bucket_name, &object_name).unwrap();
|
let mut args = RemoveObjectArgs::new(&bucket_name, &object_name).unwrap();
|
||||||
let version_id = obj_resp.version_id.unwrap().clone();
|
let version_id = obj_resp.version_id.unwrap().clone();
|
||||||
@ -1142,7 +1105,7 @@ impl<'a> ClientTest<'_> {
|
|||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(resp.url.contains("X-Amz-Signature="), true);
|
assert!(resp.url.contains("X-Amz-Signature="));
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn get_presigned_post_form_data(&self) {
|
async fn get_presigned_post_form_data(&self) {
|
||||||
@ -1152,7 +1115,7 @@ impl<'a> ClientTest<'_> {
|
|||||||
let mut policy = PostPolicy::new(&self.test_bucket, &expiration).unwrap();
|
let mut policy = PostPolicy::new(&self.test_bucket, &expiration).unwrap();
|
||||||
policy.add_equals_condition("key", &object_name).unwrap();
|
policy.add_equals_condition("key", &object_name).unwrap();
|
||||||
policy
|
policy
|
||||||
.add_content_length_range_condition(1 * 1024 * 1024, 4 * 1024 * 1024)
|
.add_content_length_range_condition(1024 * 1024, 4 * 1024 * 1024)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let form_data = self
|
let form_data = self
|
||||||
@ -1160,8 +1123,8 @@ impl<'a> ClientTest<'_> {
|
|||||||
.get_presigned_post_form_data(&policy)
|
.get_presigned_post_form_data(&policy)
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(form_data.contains_key("x-amz-signature"), true);
|
assert!(form_data.contains_key("x-amz-signature"));
|
||||||
assert_eq!(form_data.contains_key("policy"), true);
|
assert!(form_data.contains_key("policy"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user