FS Fallback seems to be active all the time
It seems like fallback is active constantly.
When calling readdir I get the error Uncaught Error Error: ENOENT: no such file or directory, scandir 'c:\myBucketObject\'
The code looks like this: var files = await fs.readdir('myBucketObject/')

There's something very wrong with your code. Why is it setting Delimiter as \\? My understanding is that delimiter in S3 should be unix like: /.
Also, why is is prefix set as a localhost path?
The delimiter in s3 is arbitrary - it can be any character or a string of characters.
To make s3 transparent for the user, we chose to use the system delimiter for the os the library is running at to handle usecases where relative paths are used, ex: fs.readFileSync('../../file.txt')
When we use this library in local it seems like it cannot find the key that is on the cyclic app even if we have the credentials it is still using local directory when normalizing path
I wrote my own version because I couldn't get the cyclic one to work.
const AWS = require('aws-sdk')
const S3 = new AWS.S3()
module.exports = class AIDV_S3FS {
constructor(opt){
this.opt = opt
}
formatPath(path, head){
path = path.split('\\').join('/')
if (head) return path
var targetPath = path
if (targetPath[0] === '/') targetPath = targetPath.substr(1, targetPath.length - 1)
var lastChar = targetPath[targetPath.length - 1]
if (lastChar !== '/') targetPath += '/'
return targetPath
}
listContent(path){
return new Promise((resolve, reject)=>{
var targetPath = this.formatPath(path)
S3.listObjectsV2(
{
Bucket: this.opt.bucket,
Delimiter: '/',
Prefix: targetPath,
},
function (err, data) {
if (err) return reject(err)
var list = {}
for (var i in data.CommonPrefixes){
var _cP = data.CommonPrefixes[i].Prefix
list[_cP.split('/').join('_.,!"#¤')] = ''
}
for (var i in data.Contents){
var object = data.Contents[i].Key
list[object.split('/').join('_.,!"#¤')] = ''
}
var items = []
for (var _i in list){
var item = _i.split('_.,!"#¤').join('/')
if (item[item.length - 1] === '/') item = item.substr(0, item.length - 1)
var split = item.split('/')
items.push(split[split.length - 1])
}
resolve(items)
}
)
})
}
headObject(path){
return new Promise((resolve, reject)=>{
S3.headObject({Bucket: this.opt.bucket, Key: path},
function (err, data) {
if (err) return reject(err)
resolve(data)
}
)
})
}
exists(path){
return new Promise(async resolve => {
try {
var head = await this.headObject(path)
} catch(err) {
}
if (head) resolve(path)
var targetPath = this.formatPath(path)
var split = targetPath.split('/')
var lastObject = split.splice(split.length - 2, 1)[0]
var parentPath = split.join('/')
var res = await this.listContent(this.formatPath(parentPath))
if (!res.includes(lastObject)) return resolve(false)
resolve(targetPath)
})
}
readdir(path){
return new Promise(async (resolve, reject)=>{
var exists = await this.exists(path)
if (!exists) return reject()
try {
var res = await this.listContent(exists)
resolve(res)
} catch(err) {
reject(err)
}
})
}
readFile(path){
return new Promise(async (resolve, reject)=>{
var exists = await this.exists(path)
if (!exists) return reject()
S3.getObject({Bucket: this.opt.bucket, Key: exists}, function (err, data) {
if (err) return reject(err)
resolve(data.Body)
})
})
}
async readJSON(path){
return new Promise(async (resolve, reject)=>{
try {
var res = await this.readFile(path)
resolve(JSON.parse(res.toString()))
} catch(err) {
reject(err)
}
})
}
async stat(path){
return new Promise(async (resolve, reject)=>{
var exists = await this.exists(path)
if (!exists) return reject()
var res = await this.headObject(exists)
var msTime = res.LastModified.getTime()
resolve({
dev: 0,
ino: 0,
mode: 0,
nlink: 0,
uid: 0,
gid: 0,
rdev: 0,
size: res.ContentLength,
blksize: 0,
blocks: 0,
atimeMs: msTime,
mtimeMs: msTime,
ctimeMs: msTime,
birthtimeMs: msTime,
atime: res.LastModified,
mtime: res.LastModified,
ctime: res.LastModified,
birthtime: res.LastModified
})
})
}
}