Every line of 's3 upload' code snippets is scanned for vulnerabilities by our powerful machine learning engine that combs millions of open source libraries, ensuring your JavaScript code is secure.
50 function uploadFile(s3, fileName, contents, callback) { 51 s3.putObject({ 52 ACL: 'public-read', 53 Body: contents, 54 Bucket: 'ember-graph-builds', 55 ContentType: (fileName.endsWith('.zip') ? 'application/zip' : 'application/javascript'), 56 Key: fileName 57 }, function(err, data) { 58 callback(!err, fileName); 59 }); 60 }
6 function upload() { 7 var options = require('../aws.json') 8 var params = { 9 localFile: path.join(__dirname, '../css/basscss.min.css'), 10 s3Params: { 11 Bucket: options.bucket, 12 Key: 'basscss/' + version + '/basscss.min.css', 13 ACL: 'public-read', 14 } 15 } 16 17 var client = s3.createClient({ 18 s3Options: { 19 accessKeyId: options.key, 20 secretAccessKey: options.secret, 21 } 22 }) 23 24 var uploader = client.uploadFile(params) 25 uploader.on('error', function(err) { 26 console.error("unable to upload:", err.stack) 27 }) 28 uploader.on('progress', function() { 29 console.log("progress", uploader.progressMd5Amount, uploader.progressAmount, uploader.progressTotal) 30 }) 31 uploader.on('end', function() { 32 console.log("done uploading") 33 }) 34 35 }
254 function uploadFile(s3Data, url) { 255 var formData = new FormData(); 256 /* 257 All the PreSigned URL fields to FormData as required by Amazon S3 258 */ 259 for (key in s3Data.fields) { 260 formData.append(key, s3Data.fields[key]); 261 }; 262 /* 263 Attach the file to be uploaded to FormData 264 */ 265 formData.append('file', $('input[type="file"]')[0].files[0]); 266 267 $.ajax({ 268 url: url, 269 type: 'POST', 270 data: formData, 271 cache: false, 272 contentType: false, 273 processData: false, 274 success: function(data, textStatus, request) { 275 if (request.status === 200 || request.status === 204) { 276 $("#urlTextId").html("Status: Uploaded Successfully.<br /> Object Key: " + s3Data.fields.key); 277 $("#SignedUrlId").html(""); 278 console.log("Status:" + request.status); 279 $("#div-obj-holderId").show(); 280 } else { 281 $("#urlTextId").html("Br!! Unable to upload Object. Try again!! <br />Status:" + request.status); 282 } 283 }, 284 }); 285 286 }
13 function uploadToS3(options) { 14 var s3 = require('s3'); 15 16 var client = s3.createClient({ 17 s3Options: { 18 accessKeyId: options.key, 19 secretAccessKey: options.secret, 20 }, 21 }); 22 23 var params = { 24 localFile: options.path, 25 26 s3Params: { 27 Bucket: options.bucket, 28 Key: options.name || options.path, 29 }, 30 }; 31 var uploader = client.uploadFile(params); 32 33 return uploader; 34 }
103 async uploadToS3(uploadOptions: S3UploadOptions): Promise { 104 const {bucket, filePath, s3Path} = uploadOptions; 105 106 const lstat = await fs.lstat(filePath); 107 108 if (!lstat.isFile()) { 109 throw new Error(`File "${filePath}" not found`); 110 } 111 112 const file = fs.createReadStream(filePath); 113 114 const uploadConfig = { 115 ACL: 'public-read', 116 Body: file, 117 Bucket: bucket, 118 Key: s3Path, 119 }; 120 121 if (this.options.dryRun) { 122 logDry('uploadToS3', {ACL: uploadConfig.ACL, Bucket: uploadConfig.Bucket, Key: uploadConfig.Key}); 123 return; 124 } 125 126 await this.S3Instance.upload(uploadConfig).promise(); 127 }
39 uploadString(key, data) { 40 let s3 = new AWS.S3({ 41 region: this.region, 42 params: { 43 Bucket: this.bucket, 44 Key: key 45 } 46 }); 47 48 return new Promise((resolve, reject) => { 49 s3.upload({Body: data}, (err, data) => { 50 if (err) { 51 console.log("Error uploaded to " + this.bucket + "/" + key + "\n" + err); 52 reject(err); 53 } 54 else { 55 console.log("Successfully uploaded to " + this.bucket + "/" + key); 56 resolve(data); 57 } 58 }); 59 }); 60 }
92 function uploadSuperFile(client, bucketName, key, blob, options) { 93 if (!options['Content-Type']) { 94 var ext = key.split(/\./g).pop(); 95 // Firefox在POST的时候,Content-Type 一定会有Charset的,因此 96 // 这里不管3721,都加上. 97 var mimeType = sdk.MimeType.guess(ext) + '; charset=UTF-8'; 98 u.extend(options, { 99 'Content-Type': mimeType 100 }); 101 } 102 103 var uploadId = null; 104 return client.initiateMultipartUpload(bucketName, key, options) 105 .then(function (response) { 106 uploadId = response.body.uploadId; 107 108 var deferred = sdk.Q.defer(); 109 var tasks = getTasks(blob, uploadId, bucketName, key); 110 var state = { 111 lengthComputable: true, 112 loaded: 0, 113 total: tasks.length 114 }; 115 async.mapLimit(tasks, config.kParallel, uploadPartFile(state, client), function (err, results) { 116 if (err) { 117 deferred.reject(err); 118 } 119 else { 120 deferred.resolve(results); 121 } 122 }); 123 return deferred.promise; 124 }) 125 .then(function (allResponse) { 126 var partList = []; 127 allResponse.forEach(function (response, index) { 128 partList.push({ 129 partNumber: index + 1, 130 eTag: response.http_headers.etag 131 }); 132 }); 133 134 return client.completeMultipartUpload(bucketName, key, uploadId, partList); 135 }); 136 }
115 function uploadS3 (args) { 116 const { 117 bucketName, 118 uuid, 119 tempZipFile, 120 tempZipFileSize, 121 skip, 122 zipVersionsList 123 } = args; 124 if (!skip) { debug(` zip size: ${tempZipFileSize}`); } 125 const s3Key = `${S3_ZIP_PREFIX}/${uuid}.zip`; 126 const zipS3Location = { 127 Bucket: bucketName, 128 Key: s3Key 129 // you must add VersionId here, when fullfilling promises 130 }; 131 if (skip) { 132 // get latest object's version & proceed 133 try { 134 const versionId = findZipVersionId({ uuid, zipVersionsList }); 135 return Promise.resolve({ 136 ...args, 137 zipS3Location: { 138 ...zipS3Location, 139 VersionId: versionId 140 } 141 }); 142 } catch (err) { 143 debug('Cannot find lambda zipfile, error:', err.message); 144 throw new Error('You cannot skip a lambda function which was has never been deployed'); 145 } 146 } 147 const s3Params = { 148 ...zipS3Location, 149 Body: fs.createReadStream(tempZipFile) 150 }; 151 return putObject(s3Params) 152 .then(data => { 153 return Promise.resolve({ 154 ...args, 155 zipS3Location: { 156 ...zipS3Location, 157 VersionId: data.VersionId 158 } 159 }); 160 }); 161 }
7 function upload(filename) { 8 var options = require('./config.json').s3 9 var params = { 10 localFile: path.join(__dirname, './resized', filename), 11 s3Params: { 12 Bucket: options.bucket, 13 Key: 'basscss/assets/' + filename, 14 //Key: 'basscss/' + version + '/basscss.min.css', 15 ACL: 'public-read', 16 } 17 } 18 19 var client = s3.createClient({ 20 s3Options: { 21 accessKeyId: options.key, 22 secretAccessKey: options.secret, 23 } 24 }) 25 26 var uploader = client.uploadFile(params) 27 uploader.on('error', function(err) { 28 console.error("unable to upload:", err.stack) 29 }) 30 uploader.on('progress', function() { 31 console.log("progress", uploader.progressMd5Amount, uploader.progressAmount, uploader.progressTotal) 32 }) 33 uploader.on('end', function() { 34 console.log("done uploading") 35 manifest.uploads = manifest.uploads || [] 36 manifest.uploads.push(filename) 37 fs.writeFileSync('manifest.json', JSON.stringify(manifest)) 38 fs.move('resized/' + filename, 'archive/' + filename) 39 }) 40 41 }
84 uploadFile (fileKey, fileStream, uploadOptions) { 85 const uploadFileKey = fileKey.replace(this.options.fullAssetPath, '').replace(/\\/g, '/') 86 const fullFileKey = `${this.options.deployPath}${uploadFileKey}` 87 88 const uploadParams = { 89 Bucket: this.name, 90 Key: fullFileKey, 91 Body: fileStream, 92 ContentType: this.contentTypeFor(fileKey) 93 } 94 95 if (uploadOptions.acl !== 'none') { 96 uploadParams.ACL = this.options.acl 97 } 98 99 if (uploadOptions.pwa) { 100 uploadParams.CacheControl = 'no-store, no-cache, must-revalidate, proxy-revalidate, max-age=0' 101 } else { 102 uploadParams.CacheControl = this.options.cacheControl 103 } 104 105 if (uploadOptions.gzip) { 106 uploadParams.ContentEncoding = 'gzip' 107 } 108 109 return this.connection.upload( 110 uploadParams, 111 { partSize: (5 * 1024 * 1024), queueSize: 4 } 112 ).promise() 113 }