aws-sdk-js
aws-sdk-js copied to clipboard
Intermittent RequestTimeout: Your socket connection to the server was not read from or written to within the timeout period. Idle connections will be closed.
Describe the bug
Getting request timeout in a pipeline job intermittently when trying to upload a directory of assets to S3 using aws-sdk. The error persists once it occurs in a workspace. Already checked connectivity, couldn't find any issue with that. Tried increasing the timeout also, that was not helpful too.
20:22:13 at Request.callListeners (/mnt1/jenkins/workspace/spr-template-preview-build-v2@2/node_modules/aws-sdk/lib/sequential_executor.js:116:18) { 20:22:13 code: 'RequestTimeout', 20:22:13 region: null, 20:22:13 time: 2024-01-05T14:52:09.752Z, 20:22:13 requestId: 'KXS1VEZVPB199NDR', 20:22:13 extendedRequestId: 'e8aDplpOSLzfFhfHaGt8NACPl3cAxEnImTuDxOP6XKx6nkJ0cTNlK5Rf4w7gwAGQqG8EsekJPA4=', 20:22:13 cfId: undefined, 20:22:13 statusCode: 400, 20:22:13 retryable: true 20:22:13 } 20:22:13 RequestTimeout: Your socket connection to the server was not read from or written to within the timeout period. Idle connections will be closed.
Expected Behavior
Should be able to upload the directly all the time without the timeout.
Current Behavior
Uploading failed with request timeout.
Reproduction Steps
No specific steps to reproduce, used below piece of code to upload the assets.
function uploadDir(localDirPath, { compress }) {
return new Promise((resolve, reject) => {
const client = getS3Client();
const uploader = client.uploadDir({
localDir: localDirPath,
s3Params: {
Bucket: envConfig.storage.aws.bucket,
Prefix: envConfig.storage.aws.prefix || '',
ContentEncoding: compress ? 'gzip' : '',
ACL: 'public-read',
},
});
uploader.on('end', resolve);
uploader.on('error', reject);
});
}
Possible Solution
No response
Additional Information/Context
No response
SDK version used
2.1336.0
Environment details (OS name and version, etc.)
Jenkins Pipeline Job
Hi @hetpatel33 - thanks for reaching out.
Can we start with verifying which upload operations (s3.upload or s3.putObject) and also share the part of the code where you're calling the operation method?
Hi @aBurmeseDev, below is the code snippet which we are using for uploading
const s3 = require('s3');
const AWS = require('aws-sdk');
function getS3Client() {
const s3ClientParams = {};
if (envConfig.storage.aws.region) {
s3ClientParams.region = envConfig.storage.aws.region;
}
if (envConfig.storage.aws.key) {
s3ClientParams.accessKeyId = envConfig.storage.aws.key;
}
if (envConfig.storage.aws.secret) {
s3ClientParams.secretAccessKey = envConfig.storage.aws.secret;
}
const s3Client = new AWS.S3(s3ClientParams);
return s3.createClient({ s3Client });
}
/**
* Syncs the directory at localDirPath,
* with the directory at dirPrefix in the s3 bucket specified in environment
* @returns {Promise} of uploading the directory
*/
export function uploadDir(localDirPath, { compress }) {
return new Promise((resolve, reject) => {
const client = getS3Client();
const uploader = client.uploadDir({
localDir: localDirPath,
s3Params: {
Bucket: envConfig.storage.aws.bucket,
Prefix: envConfig.storage.aws.prefix || '',
ContentEncoding: compress ? 'gzip' : '',
ACL: 'public-read',
},
});
uploader.on('end', resolve);
uploader.on('error', reject);
});
}
`