You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Hi guys,
I am trying upload an file to s3, but shows me an error "Unable to store object contents to disk".
And the file is existed on S3 already.
The file's size is 50MB.
What is wrong?
Thanks.
'use strict';varAWS=require('aws-sdk');varAWSCred=require('aws-cred');varenv=process.env.NODE_ENV||"development";constEnv=use('Env');consttrim=use('trim');constdateFormat=use('dateformat');varbaseBucket=Env.get('AWS_BASE_BUCKET');varawscred=newAWSCred({"env": env});varwatermarkFolder=Env.get('AWS_WATERMARK_FOLDER');vars3Client=newAWS.S3({accessKeyId: awscred.access_key,secretAccessKey: awscred.secret_access_key,region: Env.get('AWS_REGION'),signatureVersion: Env.get('AWS_SIGNATURE_VERSION')});vars3RegionalClients={"artistarea.gallereplay.com": newAWS.S3({accessKeyId: awscred.access_key,secretAccessKey: awscred.secret_access_key,region: "eu-central-1",signatureVersion: Env.get('AWS_SIGNATURE_VERSION')})};s3RegionalClients[Env.get('AWS_BASE_BUCKET')]=s3Client;exports.uploadFormFileStreamToAWS=function*(folderName,fileName,fileStream,options){try{returnyielduploadFormFileStreamToAWSYield(folderName,fileName,fileStream,options);}catch(e){console.log('Error while uploading file to AWS:',e);throwe;}};exports.createBucketIfNotExists=function*(){try{console.log("Accessing Bucket with:");console.log(awscred);returnyieldcreateBucketIfNotExistsYield();}catch(e){console.log('Error while creating or checking bucket on AWS:',e);throwe;}};exports.getWatermarkUrl=function*(author,pattern,format){returnyieldthis.getUrl(watermarkFolder+"/"+format+"/"+author+(pattern ? "_pattern" : "")+".png");};exports.getUrl=function(key){returnfunction(fn){letparams={Bucket: baseBucket,Key: key};s3Client.getSignedUrl('getObject',params,function(err,url){err ? fn(err,null) : fn(null,url);});};};exports.convertToSignedUrl=function(url,options,output){returnfunction(fn){varsplit=url.split("/");varbucket=split[3];varpre=split[0]+'/'+split[1]+'/'+split[2]+'/'+split[3]+'/';varpath=url.replace(pre,"");letparams={Bucket: bucket,Key: path};if(output){output.bucket=bucket;output.path=path;}varclient=s3RegionalClients[bucket];client.getSignedUrl('getObject',params,function(err,url){err ? fn(err,null) : fn(null,url);});};};exports.getSignedUrl=function(userId,url){returnfunction(fn){letkey=url.substring(url.lastIndexOf('/')+1,url.length);//key = 'user_' + userId + '/' + key.replace(/%20/g, '+');key=buildKeyPath('user_'+userId,key);letparams={Bucket: baseBucket,Key: key};s3Client.getSignedUrl('getObject',params,function(err,url){err ? fn(err,null) : fn(null,url);});};};exports.addPreviewUrlToEntityCollection=function*(entityCollection){if(entityCollection!=null&&entityCollection.length&&entityCollection[0]&&entityCollection[0].user_id){for(vari=0;i<entityCollection.length;i++){entityCollection[i].previewUrl=yieldthis.getSignedUrl(entityCollection[i].user_id,entityCollection[i].upload_path);}returnentityCollection;}else{return[];}};exports.parseAwsInputKey=function(url){varresult=url.replace(/http[s]*://.+?//g,"");if(result.startsWith(baseBucket)){result=result.replace(baseBucket+'/',"");}returnresult;};exports.buildUniqueAWSFileName=function(originalTitle,extension){// taking the extension out to add it at the end, after inserting a unique timestamp to the filenamereturntrim(originalTitle.substring(0,originalTitle.lastIndexOf('.'))+'_'+dateFormat(newDate(),"ddmmyyyyhMMss")+'.'+extension).replace(//g,'');};functionbuildKeyPath(folderName,key){returnenv+'/'+folderName+'/'+key;}functioncreateBucketIfNotExistsYield(){returnfunction(fn){s3Client.headBucket({Bucket: baseBucket},function(err,data){if(err){returnfn(err,null);}//bucket existsif(data){returnfn(null,null);}else{s3Client.createBucket({Bucket: baseBucket},function(err,data){err ? fn(err,null) : fn(null,data);});}});};}functionuploadFormFileStreamToAWSYield(folderName,fileName,fileStream,options){options=options||{};varbaseOptions={Bucket: baseBucket,Key: options.isRootPath ? (folderName+'/'+fileName) : buildKeyPath(folderName,fileName),Body: fileStream};if(options.isRootPath!=null){deleteoptions.isRootPath;}varallOptions=Object.assign(baseOptions,options);returnfunction(fn){s3Client.upload(allOptions).send(function(err,data){err ? fn(err,null) : fn(null,data);});}}
The text was updated successfully, but these errors were encountered:
Hi @NuonDara
Can you provide the stack trace? The error message doesn't look like coming from our SDK for S3. I'm looking at the aws/aws-sdk-java#1288. It looks like the connection is closed by S3, where does the filestream come from. S3 will close inactive connection after the timeout.
This thread has been automatically locked since there has not been any recent activity after it was closed. Please open a new issue for related bugs and link to relevant comments in this thread.
lockbot
locked as resolved and limited conversation to collaborators
Sep 28, 2019
Sign up for freeto subscribe to this conversation on GitHub.
Already have an account?
Sign in.
Labels
guidanceQuestion that needs advice or information.
Uh oh!
There was an error while loading. Please reload this page.
Hi guys,
I am trying upload an file to s3, but shows me an error "Unable to store object contents to disk".
And the file is existed on S3 already.
The file's size is 50MB.
What is wrong?
Thanks.
The text was updated successfully, but these errors were encountered: