I have a pod deployed in Kubernetes environment and created service account to access my S3 bucket with full access. I want to upload my logs to s3 bucket.
module.exports.uploadFile = () => {
const s3 = new AWS.S3();
const fileContent = fs.readFileSync(path.resolve(__dirname,'../logger/MyLogFile.log'))
const params = {
Bucket: 'MYBUCKETNAME',
Key: 'MyLogFile.log',
Body: fileContent
};
s3.putObject(params, function(err, data) {
if (err) {
console.log(err)
logger.error('file upload error')
throw err;
}
logger.info(`File uploaded successfully. ${data.Location}`);
})}
This is the error I am getting...
Error [CredentialsError]: Missing credentials in config, if using AWS_CONFIG_FILE, set AWS_SDK_LOAD_CONFIG=1 at Timeout.connectTimeout [as _onTimeout] (/usr/src/app/node_modules/aws-sdk/lib/http/node.js:69:15) at listOnTimeout (internal/timers.js:557:17) at processTimers (internal/timers.js:500:7) { code: 'CredentialsError', time: 2021-12-09T10:43:29.712Z, retryable: true, originalError: { message: 'Could not load credentials from any providers', code: 'CredentialsError', time: 2021-12-09T10:43:29.705Z, retryable: true, originalError: { message: 'EC2 Metadata roleName request returned error', code: 'TimeoutError', time: 2021-12-09T10:43:29.705Z, retryable: true, originalError: { message: 'Socket timed out without establishing a connection', code: 'TimeoutError', time: 2021-12-09T10:43:29.705Z, retryable: true } } } }
CodePudding user response:
Creating a service Account from EKS side to my application resolved the problem.
After creating the service account my file changes will be
1.) aws.yml
serviceAccount:
enabled: true
name: MY_SERVICE_ACC_NAME
2.) my AWS.S3 object creation will not have any changes
const s3 = new AWS.S3();
Now the AWS.S3 object will be filled with mandatory parameters automatically.