Is there a way to load an existing file into an s3 collection
I have an external service that uploads a file to s3 that then returns the file key back to my meteor server.
Is there any way to sync the metadata about that file into my cfs collection?
I've tried inserting different forms of the data directly into the collection, but I get an error Error: DataMan constructor received data that it doesn't support So I don't think its possible this way.
Is there an API I'm missing? Or is this even possible?
Hi, I feel your pain, was in a similar sitch and developed a good enuff solution. I ended up using peerlibrary/meteor-aws-sdk to load the data into a cfs:s3 store. Here is the method I have in my server methods.js file: ` updateEmployeeThumbs: function(){
var configObj = {
"accessKeyId": Meteor.settings.AWS.accessKeyId,
"secretAccessKey": Meteor.settings.AWS.secretAccessKey,
"bucket": "salk-employee-pics"
};
var updateCollectionFs = function(listDataContents){
_.each(listDataContents,(chunk)=>{
_.each(chunk,(f)=>{
var e = EmployeeThumbs.findOne({'original.name':f.Key});
//for now only insert what's missing, don't update
if(typeof e == 'undefined' ){
console.log('here is supposed to be contents for insert');
console.log(f);
s3 = new AWS.S3();
AWS.config.update({
"accessKeyId": configObj.accessKeyId,
"secretAccessKey": configObj.secretAccessKey,
"bucket": configObj.bucket
});
var readStream = s3.getObjectSync({
"Key":f.Key,
"Bucket":"salk-employee-pics"}
);
// You must have a ReadStream with some data; it can be any stream.
// We're using the standard output from a command as an example.
//var readStream = spawn('ls', []).stdout;
// Create the FS.File instance
var newFile = new FS.File();
// Attach the ReadStream data to it.
newFile.attachData(readStream.Body, {type: 'image/jpeg'});
// Optionally provide a file name
newFile.name(f.Key);
// Insert the file, which will save the metadata and
// store the stream data into all defined stores.
// `Files` is an `FS.Collection` instance defined elsewhere.
var thumb = EmployeeThumbs.findOne({'original.name':f.Key});
if(typeof thumb !== 'undefined'){
console.log('reloading thumb for '+f.Key);
EmployeeThumbs.update({'original.name':f.Key},newFile);
} else{
console.log('inserting new thumb for '+f.Key);
EmployeeThumbs.insert(newFile);
}
}
});
});
};
var allKeys = [];
var listAllObjectsFromS3Bucket = function(configObj,marker){
AWS.config.update({
"accessKeyId": configObj.accessKeyId,
"secretAccessKey": configObj.secretAccessKey,
"bucket": configObj.bucket
});
s3 = new AWS.S3();
var list = s3.listObjectsSync({Bucket: configObj.bucket, Marker: marker});
allKeys.push(list.Contents);
console.log('pushed list of '+ list.Contents.length);
//console.log(list);
if(list.IsTruncated){
console.log('list was truncated, doing it again before upsert');
var contents = list.Contents;
var marker = contents[contents.length-1].Key;
console.log('next marker is:'+marker);
listAllObjectsFromS3Bucket(configObj,marker);
} else {
return allKeys;
}
};
listAllObjectsFromS3Bucket(configObj,'');
//console.log('here is the allKeys listing:');
//console.log(allKeys);
console.log('now going to upsert');
updateCollectionFs(allKeys);
},`
btw, EmployeeThumbs looks like this:
this.EmployeeThumbs = new FS.Collection("employee_thumbs", { stores: [new FS.Store.S3("employee_thumbs", { bucket: "salk-saw-assets", folder: "thumbs", transformWrite: "createThumb", accessKeyId: "xxx", secretAccessKey: "xxx" })] });
Yeah, that's what I figured I had to do. Does this end up downloading the file onto the server?
That is kind of what I was trying to avoid.