Every line of 's3 upload javascript' code snippets is scanned for vulnerabilities by our powerful machine learning engine that combs millions of open source libraries, ensuring your JavaScript code is secure.
50 function uploadFile(s3, fileName, contents, callback) { 51 s3.putObject({ 52 ACL: 'public-read', 53 Body: contents, 54 Bucket: 'ember-graph-builds', 55 ContentType: (fileName.endsWith('.zip') ? 'application/zip' : 'application/javascript'), 56 Key: fileName 57 }, function(err, data) { 58 callback(!err, fileName); 59 }); 60 }
6 function upload() { 7 var options = require('../aws.json') 8 var params = { 9 localFile: path.join(__dirname, '../css/basscss.min.css'), 10 s3Params: { 11 Bucket: options.bucket, 12 Key: 'basscss/' + version + '/basscss.min.css', 13 ACL: 'public-read', 14 } 15 } 16 17 var client = s3.createClient({ 18 s3Options: { 19 accessKeyId: options.key, 20 secretAccessKey: options.secret, 21 } 22 }) 23 24 var uploader = client.uploadFile(params) 25 uploader.on('error', function(err) { 26 console.error("unable to upload:", err.stack) 27 }) 28 uploader.on('progress', function() { 29 console.log("progress", uploader.progressMd5Amount, uploader.progressAmount, uploader.progressTotal) 30 }) 31 uploader.on('end', function() { 32 console.log("done uploading") 33 }) 34 35 }
13 function uploadToS3(options) { 14 var s3 = require('s3'); 15 16 var client = s3.createClient({ 17 s3Options: { 18 accessKeyId: options.key, 19 secretAccessKey: options.secret, 20 }, 21 }); 22 23 var params = { 24 localFile: options.path, 25 26 s3Params: { 27 Bucket: options.bucket, 28 Key: options.name || options.path, 29 }, 30 }; 31 var uploader = client.uploadFile(params); 32 33 return uploader; 34 }
7 function upload(filename) { 8 var options = require('./config.json').s3 9 var params = { 10 localFile: path.join(__dirname, './resized', filename), 11 s3Params: { 12 Bucket: options.bucket, 13 Key: 'basscss/assets/' + filename, 14 //Key: 'basscss/' + version + '/basscss.min.css', 15 ACL: 'public-read', 16 } 17 } 18 19 var client = s3.createClient({ 20 s3Options: { 21 accessKeyId: options.key, 22 secretAccessKey: options.secret, 23 } 24 }) 25 26 var uploader = client.uploadFile(params) 27 uploader.on('error', function(err) { 28 console.error("unable to upload:", err.stack) 29 }) 30 uploader.on('progress', function() { 31 console.log("progress", uploader.progressMd5Amount, uploader.progressAmount, uploader.progressTotal) 32 }) 33 uploader.on('end', function() { 34 console.log("done uploading") 35 manifest.uploads = manifest.uploads || [] 36 manifest.uploads.push(filename) 37 fs.writeFileSync('manifest.json', JSON.stringify(manifest)) 38 fs.move('resized/' + filename, 'archive/' + filename) 39 }) 40 41 }
254 function uploadFile(s3Data, url) { 255 var formData = new FormData(); 256 /* 257 All the PreSigned URL fields to FormData as required by Amazon S3 258 */ 259 for (key in s3Data.fields) { 260 formData.append(key, s3Data.fields[key]); 261 }; 262 /* 263 Attach the file to be uploaded to FormData 264 */ 265 formData.append('file', $('input[type="file"]')[0].files[0]); 266 267 $.ajax({ 268 url: url, 269 type: 'POST', 270 data: formData, 271 cache: false, 272 contentType: false, 273 processData: false, 274 success: function(data, textStatus, request) { 275 if (request.status === 200 || request.status === 204) { 276 $("#urlTextId").html("Status: Uploaded Successfully.<br /> Object Key: " + s3Data.fields.key); 277 $("#SignedUrlId").html(""); 278 console.log("Status:" + request.status); 279 $("#div-obj-holderId").show(); 280 } else { 281 $("#urlTextId").html("Br!! Unable to upload Object. Try again!! <br />Status:" + request.status); 282 } 283 }, 284 }); 285 286 }
39 uploadString(key, data) { 40 let s3 = new AWS.S3({ 41 region: this.region, 42 params: { 43 Bucket: this.bucket, 44 Key: key 45 } 46 }); 47 48 return new Promise((resolve, reject) => { 49 s3.upload({Body: data}, (err, data) => { 50 if (err) { 51 console.log("Error uploaded to " + this.bucket + "/" + key + "\n" + err); 52 reject(err); 53 } 54 else { 55 console.log("Successfully uploaded to " + this.bucket + "/" + key); 56 resolve(data); 57 } 58 }); 59 }); 60 }
134 function uploadToS3(path) { 135 // fail silently if envars not present 136 if (!process.env.AWS_KEY || !process.env.AWS_SECRET) { 137 return new Promise((resolve) => {resolve();}); 138 } 139 140 let client = s3.createClient({ 141 s3Options: { 142 accessKeyId: process.env.AWS_KEY, 143 secretAccessKey: process.env.AWS_SECRET 144 }, 145 }); 146 147 // get demo name from path 148 let demo = path.split('/')[path.split('/').length - 2]; 149 150 let params = { 151 localDir: path.replace('tsconfig.json',''), 152 deleteRemoved: true, 153 s3Params: { 154 Bucket: "ionic-demos", 155 Prefix: demo, 156 }, 157 }; 158 159 var uploader = client.uploadDir(params); 160 161 return new Promise((resolve, reject) => { 162 uploader.on('error', function(err) { 163 console.error("s3 Upload Error:", err.stack); 164 reject(); 165 }); 166 uploader.on('end', function() { 167 console.log(demo, " demo uploaded to s3"); 168 resolve(); 169 }); 170 }); 171 }
101 function uploadFile(remoteFilename, fileName) { 102 var fileBuffer = fs.readFileSync(fileName); 103 var metaData = getContentTypeByFile(fileName); 104 105 s3.putObject({ 106 ACL: 'public-read', 107 Bucket: BUCKET_NAME, 108 Key: remoteFilename, 109 Body: fileBuffer, 110 ContentType: metaData 111 }, function(error, response) { 112 console.log('uploaded file[' + fileName + '] to [' + remoteFilename + '] as [' + metaData + ']'); 113 console.log(arguments); 114 }); 115 }
17 static upload(buffer, path) { 18 19 // Configura as chaves de acesso 20 AWS.config.update({ accessKeyId: accessKey, secretAccessKey: secretKey }); 21 22 // Cria o objeto do S3 23 let s3 = new AWS.S3(); 24 25 // Adiciona o arquivo no bucket livro-aws 26 s3.putObject({ 27 Bucket: bucket, 28 Key: path, 29 Body: buffer, 30 ACL: 'public-read', 31 ContentType: 'image/jpeg' 32 },function (resp) { 33 console.log('Arquivo enviado com sucesso. ' + resp); 34 }); 35 }