I have a djnago app and I want to upload files in the front end to by pass heroku timeout. I can’t get my code to work.
<script type="text/javascript">
AWS.config.update({
region: 'us-east-1',
credentials: new AWS.CognitoIdentityCredentials({
IdentityPoolId: 'MY-IDENTITY-POOL-ID',
})
});
var s3 = new AWS.S3({
apiVersion: '2006-03-01',
params: {Bucket: 'MYBUCKETNAME'}
});
<script type="text/javascript">
function s3upload() {
var files = document.getElementById('fileUpload').files;
if (files)
{
var file = files[0];
var fileName = file.name;
var fileUrl = 'https://' 'MYBUCKETNAME.s3.' 'us-east-1' '.amazonaws.com/' fileName;
alert(fileUrl)
var s3 = new AWS.S3({
apiVersion: '2006-03-01',
params: {Bucket: 'MYBUCKETNAME'}
});
s3.upload({
Key: fileName,
Body: file,
ACL: 'public-read'
}, function(err, data) {
if(err) {
reject('error');
}
alert('Successfully Uploaded!');
});
}
};
</script>
I understand that there is something wrong with how im passing in the variables to the aws API since this is my fist time using this method. Can anyone explain the right way to construct the api variables above. All documentations is very confusing.
CodePudding user response:
I got my code to work with the following:
<script type="text/javascript">
function s3upload() {
AWS.config.region = 'us-east-1'; // Region
AWS.config.credentials = new AWS.CognitoIdentityCredentials({
IdentityPoolId: 'My-CREDITIONALS',
});
var files = document.getElementById('fileUpload').files;
if (files)
{
var file = files[0];
var fileName = file.name;
var fileUrl = 'https://MY-BUCKET-NAME.s3.amazonaws.com/' fileName;
var s3 = new AWS.S3({apiVersion: '2006-03-01'});
var params = {
Bucket: 'MY-BUCKET',
Key: fileName,
Body: file,
};
var s3 = new AWS.S3({apiVersion: '2006-03-01'});
var options = {partSize: 5 * 1024 * 1024, queueSize: 1};
s3.upload(params, options, function(err, data) {
if(err) {
alert('error');
} else{
alert('uploaded suceessfully')
};
});
console.log(s3)
}
};
</script>
Althought it doesn't work with files over 40MB. I am currently trying to get it to work with large files.
Edit: The reason it wasn't uploading large files is because my CORS policy didin't contain ETag for multipart upload. It's now uploading 470MB files at about 1.5MB a second. It could even do much more but i did't go that far yet. Also Amazon messes up the url when the Key contains spaces, commas, and other symbols like the money sign. The code below handles this and adds a random integer at the end of the url to avoid name clashes.
<div>
<input type="file" id="fileUpload">
</div>
<div>
<button onclick="s3upload()">Submit</button>
</div>
<script type="text/javascript">
function s3upload() {
AWS.config.region = 'us-east-1'; // Region
AWS.config.credentials = new AWS.CognitoIdentityCredentials({
IdentityPoolId: 'MY-CREDENTIALS',
});
var files = document.getElementById('fileUpload').files;
if (files)
{
var file = files[0];
var fileNameAllChars = file.name;
var fileName = fileNameAllChars.replace(/\W/g, '')
var random = Math.floor(Math.random() * 101)
var fileUrl = 'https://MY-BUCKET.s3.amazonaws.com/' fileName random;
alert(fileUrl)
var s3 = new AWS.S3({apiVersion: '2006-03-01', Bucket: 'MY-BUCKET'});
var params = {
Bucket: 'MY-BUCKET',
Key: fileName,
Body: file,
};
var s3 = new AWS.S3({apiVersion: '2006-03-01'});
var options = {partSize: 15 * 1024 * 1024, queueSize: 1};
s3.upload(params, options, function(err, data) {
if(err) {
alert(err);
} else{
alert('success')
};
});
console.log(s3)
}
};
</script>
Also dont forget to load your JAVASCRIPT AWS SDK in the html header with:
<script type="text/javascript" src="https://code.jquery.com/jquery-3.2.1.min.js"></script>
Guys this code doesn’t work in production. javascript AWS SDK also passes heroku 30s timeout. I had to come up with custom solutio that builds off the code above. Since I’m on the phone I’ll have to upload that later.