本文属于机器翻译版本。若本译文内容与英语原文存在差异,则一律以英文原文为准。
使用 SAmazon DK 将档档上传到 Amazon S3 Glacier 文件库
以下代码示例显示如何将档案上传到 Amazon S3 Glacier 文件库。
- .NET
-
- Amazon SDK for .NET
-
注意
还有更多 GitHub。查找完整示例,学习如何在 Amazon 代码示例存储库
中进行设置和运行。 using System; using System.Threading.Tasks; using Amazon; using Amazon.Glacier; using Amazon.Glacier.Transfer; public class UploadArchiveHighLevel { private static readonly string VaultName = "example-vault"; private static readonly string ArchiveToUpload = "*** Provide file name (with full path) to upload ***"; public static async Task Main() { try { var manager = new ArchiveTransferManager(RegionEndpoint.USWest2); // Upload an archive. var response = await manager.UploadAsync(VaultName, "upload archive test", ArchiveToUpload); Console.WriteLine("Copy and save the ID for use in other examples."); Console.WriteLine($"Archive ID: {response.ArchiveId}"); Console.WriteLine("To continue, press Enter"); Console.ReadKey(); } catch (AmazonGlacierException ex) { Console.WriteLine(ex.Message); } } }
-
有关 API 的详细信息,请参阅 UploadArchiveAmazon SDK for .NETAPI 参考中的。
-
- Java
-
- SDK for Java 2.x
-
注意
还有更多 GitHub。查找完整示例,学习如何在 Amazon 代码示例存储库
中进行设置和运行。 public static String uploadContent(GlacierClient glacier, Path path, String vaultName, File myFile) { // Get an SHA-256 tree hash value. String checkVal = computeSHA256(myFile); try { UploadArchiveRequest uploadRequest = UploadArchiveRequest.builder() .vaultName(vaultName) .checksum(checkVal) .build(); UploadArchiveResponse res = glacier.uploadArchive(uploadRequest, path); return res.archiveId(); } catch(GlacierException e) { System.err.println(e.awsErrorDetails().errorMessage()); System.exit(1); } return ""; } private static String computeSHA256(File inputFile) { try { byte[] treeHash = computeSHA256TreeHash(inputFile); System.out.printf("SHA-256 tree hash = %s\n", toHex(treeHash)); return toHex(treeHash); } catch (IOException ioe) { System.err.format("Exception when reading from file %s: %s", inputFile, ioe.getMessage()); System.exit(-1); } catch (NoSuchAlgorithmException nsae) { System.err.format("Cannot locate MessageDigest algorithm for SHA-256: %s", nsae.getMessage()); System.exit(-1); } return ""; } public static byte[] computeSHA256TreeHash(File inputFile) throws IOException, NoSuchAlgorithmException { byte[][] chunkSHA256Hashes = getChunkSHA256Hashes(inputFile); return computeSHA256TreeHash(chunkSHA256Hashes); } /** * Computes an SHA256 checksum for each 1 MB chunk of the input file. This * includes the checksum for the last chunk, even if it's smaller than 1 MB. */ public static byte[][] getChunkSHA256Hashes(File file) throws IOException, NoSuchAlgorithmException { MessageDigest md = MessageDigest.getInstance("SHA-256"); long numChunks = file.length() / ONE_MB; if (file.length() % ONE_MB > 0) { numChunks++; } if (numChunks == 0) { return new byte[][] { md.digest() }; } byte[][] chunkSHA256Hashes = new byte[(int) numChunks][]; FileInputStream fileStream = null; try { fileStream = new FileInputStream(file); byte[] buff = new byte[ONE_MB]; int bytesRead; int idx = 0; while ((bytesRead = fileStream.read(buff, 0, ONE_MB)) > 0) { md.reset(); md.update(buff, 0, bytesRead); chunkSHA256Hashes[idx++] = md.digest(); } return chunkSHA256Hashes; } finally { if (fileStream != null) { try { fileStream.close(); } catch (IOException ioe) { System.err.printf("Exception while closing %s.\n %s", file.getName(), ioe.getMessage()); } } } } /** * Computes the SHA-256 tree hash for the passed array of 1 MB chunk * checksums. */ public static byte[] computeSHA256TreeHash(byte[][] chunkSHA256Hashes) throws NoSuchAlgorithmException { MessageDigest md = MessageDigest.getInstance("SHA-256"); byte[][] prevLvlHashes = chunkSHA256Hashes; while (prevLvlHashes.length > 1) { int len = prevLvlHashes.length / 2; if (prevLvlHashes.length % 2 != 0) { len++; } byte[][] currLvlHashes = new byte[len][]; int j = 0; for (int i = 0; i < prevLvlHashes.length; i = i + 2, j++) { // If there are at least two elements remaining. if (prevLvlHashes.length - i > 1) { // Calculate a digest of the concatenated nodes. md.reset(); md.update(prevLvlHashes[i]); md.update(prevLvlHashes[i + 1]); currLvlHashes[j] = md.digest(); } else { // Take care of the remaining odd chunk currLvlHashes[j] = prevLvlHashes[i]; } } prevLvlHashes = currLvlHashes; } return prevLvlHashes[0]; } /** * Returns the hexadecimal representation of the input byte array */ public static String toHex(byte[] data) { StringBuilder sb = new StringBuilder(data.length * 2); for (byte datum : data) { String hex = Integer.toHexString(datum & 0xFF); if (hex.length() == 1) { // Append leading zero. sb.append("0"); } sb.append(hex); } return sb.toString().toLowerCase(); }
-
有关 API 的详细信息,请参阅 UploadArchiveAmazon SDK for Java 2.xAPI 参考中的。
-
- JavaScript
-
- 适用于 JavaScript (v3) 的软件开发工具包
-
注意
还有更多 GitHub。查找完整示例,学习如何在 Amazon 代码示例存储库
中进行设置和运行。 创建客户端。
const { GlacierClient } = require("@aws-sdk/client-glacier"); // Set the AWS Region. const REGION = "REGION"; //Set the Redshift Service Object const glacierClient = new GlacierClient({ region: REGION }); export { glacierClient };
上传存档。
// Load the SDK for JavaScript import { UploadArchiveCommand } from "@aws-sdk/client-glacier"; import { glacierClient } from "./libs/glacierClient.js"; // Set the parameters const vaultname = "VAULT_NAME"; // VAULT_NAME // Create a new service object and buffer const buffer = new Buffer.alloc(2.5 * 1024 * 1024); // 2.5MB buffer const params = { vaultName: vaultname, body: buffer }; const run = async () => { try { const data = await glacierClient.send(new UploadArchiveCommand(params)); console.log("Archive ID", data.archiveId); return data; // For unit tests. } catch (err) { console.log("Error uploading archive!", err); } }; run();
-
有关更多信息,请参阅 Amazon SDK for JavaScript 开发人员指南。
-
有关 API 的详细信息,请参阅 UploadArchiveAmazon SDK for JavaScriptAPI 参考中的。
-
- 适用于 JavaScript (v2) 的开发工具包
-
注意
还有更多 GitHub。查找完整示例,学习如何在 Amazon 代码示例存储库
中进行设置和运行。 // Load the SDK for JavaScript var AWS = require('aws-sdk'); // Set the region AWS.config.update({region: 'REGION'}); // Create a new service object and buffer var glacier = new AWS.Glacier({apiVersion: '2012-06-01'}); buffer = Buffer.alloc(2.5 * 1024 * 1024); // 2.5MB buffer var params = {vaultName: 'YOUR_VAULT_NAME', body: buffer}; // Call Glacier to upload the archive. glacier.uploadArchive(params, function(err, data) { if (err) { console.log("Error uploading archive!", err); } else { console.log("Archive ID", data.archiveId); } });
-
有关更多信息,请参阅 Amazon SDK for JavaScript 开发人员指南。
-
有关 API 的详细信息,请参阅 UploadArchiveAmazon SDK for JavaScriptAPI 参考中的。
-
- Python
-
- 适用于 Python (Boto3) 的 SDK
-
注意
还有更多 GitHub。查找完整示例,学习如何在 Amazon 代码示例存储库
中进行设置和运行。 class GlacierWrapper: """Encapsulates Amazon S3 Glacier API operations.""" def __init__(self, glacier_resource): """ :param glacier_resource: A Boto3 Amazon S3 Glacier resource. """ self.glacier_resource = glacier_resource @staticmethod def upload_archive(vault, archive_description, archive_file): """ Uploads an archive to a vault. :param vault: The vault where the archive is put. :param archive_description: A description of the archive. :param archive_file: The archive file to put in the vault. :return: The uploaded archive. """ try: archive = vault.upload_archive( archiveDescription=archive_description, body=archive_file) logger.info( "Uploaded %s with ID %s to vault %s.", archive_description, archive.id, vault.name) except ClientError: logger.exception( "Couldn't upload %s to %s.", archive_description, vault.name) raise else: return archive
-
有关 API 详细信息,请参阅UploadArchive《AmazonSDK for Python (Boto3) API 参考》中的说明。
-
有关 Amazon 软件开发工具包开发人员指南和代码示例的完整列表,请参阅 将 S3 Glacier 与结合使用Amazon开发工具包。本主题还包括有关入门的信息以及有关先前的软件开发工具包版本的详细信息。