aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorStefan Boberg <[email protected]>2024-04-24 12:58:56 +0200
committerDan Engelbrecht <[email protected]>2024-10-10 09:56:40 +0200
commite1ab3c01ae5a34f7dbd95c03bb8ed4c51c07aab0 (patch)
tree7b91b3d3c741bb91e2e05535d41e1bde55a1e70c /src
parentsilence C4996 (diff)
downloadzen-e1ab3c01ae5a34f7dbd95c03bb8ed4c51c07aab0.tar.xz
zen-e1ab3c01ae5a34f7dbd95c03bb8ed4c51c07aab0.zip
removed test code
Diffstat (limited to 'src')
-rw-r--r--src/zencloud/zencloudmain.cpp25
1 files changed, 5 insertions, 20 deletions
diff --git a/src/zencloud/zencloudmain.cpp b/src/zencloud/zencloudmain.cpp
index e87850047..4251758fa 100644
--- a/src/zencloud/zencloudmain.cpp
+++ b/src/zencloud/zencloudmain.cpp
@@ -141,16 +141,6 @@ PutObject(const Aws::S3Crt::S3CrtClient& s3CrtClient,
std::cout << "Putting object: \"" << objectKey << "\" to bucket: \"" << bucketName << "\" ..." << std::endl;
Aws::S3Crt::Model::PutObjectRequest request;
- request.SetBucket(bucketName);
- request.SetKey(objectKey);
-
- // zen::SHA1Stream Sha1;
- // zen::ScanFile(fileName, 16 * 1024 * 1024, [&](const void* Data, size_t Size) { Sha1.Append(Data, Size); });
- // zen::SHA1 Sha1Hash = Sha1.GetHash();
- // zen::StringBuilder<64> Sha1String;
- // Sha1Hash.ToHexString(Sha1String);
-
- // request.SetChecksumSHA1(Sha1String.c_str());
std::shared_ptr<Aws::IOStream> bodyStream =
Aws::MakeShared<Aws::FStream>(ALLOCATION_TAG, fileName.c_str(), std::ios_base::in | std::ios_base::binary);
@@ -160,10 +150,8 @@ PutObject(const Aws::S3Crt::S3CrtClient& s3CrtClient,
return false;
}
- // auto Sha1Hash = Aws::Utils::HashingUtils::HexEncode(Aws::Utils::HashingUtils::CalculateSHA1(*bodyStream));
- // request.SetChecksumSHA1(Sha1Hash);
- // bodyStream->seekg(0);
-
+ request.SetBucket(bucketName);
+ request.SetKey(objectKey);
request.SetBody(bodyStream);
// A PUT operation turns into a multipart upload using the s3-crt client.
@@ -193,7 +181,6 @@ GetObject(const Aws::S3Crt::S3CrtClient& s3CrtClient, const Aws::String& bucketN
Aws::S3Crt::Model::GetObjectRequest request;
request.SetBucket(bucketName);
request.SetKey(objectKey);
-
Aws::S3Crt::Model::GetObjectOutcome outcome = s3CrtClient.GetObject(request);
if (outcome.IsSuccess())
@@ -265,7 +252,8 @@ DoWork(ZenCloudOptions& GlobalOptions)
// AWS SDK setup
Aws::SDKOptions options;
- options.loggingOptions.logLevel = Aws::Utils::Logging::LogLevel::Debug;
+ options.loggingOptions.logLevel = Aws::Utils::Logging::LogLevel::Info;
+
Aws::InitAPI(options);
{
// TODO: Set to your account AWS Region.
@@ -286,6 +274,7 @@ DoWork(ZenCloudOptions& GlobalOptions)
config.region = region;
config.throughputTargetGbps = throughput_target_gbps;
config.partSize = part_size;
+ // config.httpLibOverride = Aws::Http::TransferLibType::CURL_CLIENT;
std::vector<std::string_view> Tokens;
@@ -324,10 +313,6 @@ DoWork(ZenCloudOptions& GlobalOptions)
ListBuckets(s3_crt_client);
CreateBucket(s3_crt_client, bucket_name, locConstraint);
- // TODO: Add a large file to your executable folder, and update file_name to the name of that file.
- // File "ny.json" (1940 census data; https://www.archives.gov/developer/1940-census#accessportiondataset)
- // is an example data file large enough to demonstrate multipart upload.
- // Download "ny.json" from https://nara-1940-census.s3.us-east-2.amazonaws.com/metadata/json/ny.json
Aws::String file_name = GlobalOptions.TestDataDirectory;
PutObject(s3_crt_client, bucket_name, object_key, file_name);