node-archiver icon indicating copy to clipboard operation
node-archiver copied to clipboard

archiver does not work in aws lambda

Open otwm opened this issue 5 years ago • 8 comments

'use strict';

const axios= require('axios')
const archiver= require('archiver')
const AWS = require('aws-sdk')

const { log } = console
const logging = value => log(value)
const errorHandler = logging

const loggingW = (value, b) => v2 => {
  if(b) console.log(value,v2)
}

const stream2 = (Bucket, Key) => {
  const stream = require('stream');
  const Body = new stream.PassThrough();
  s3.upload({
    Bucket, Key, Body,
  }, errorHandler)
  return Body
}

const s3 = new AWS.S3({
  accessKeyId: '===', secretAccessKey: '===' // ur key
})

const hello = async (event, context) => {

  const res = await axios({
    url: 'some image', // some image url
    responseType: 'stream'
  })

  const archive = archiver('zip')

  archive
    .on('entry', loggingW('a-e', true))
    .on('progress', loggingW('a-p', true))
    .on('warning', loggingW('a-w', true))
    .on('fininsh', loggingW('a-finish', true))
    .on('end', loggingW('a-end', true))
    .on('close', loggingW('a-close', true))
    .on('error', function(err) {
      console.log('archie', err)
      throw err;
    });

  const dest = stream2('bucket', 'ttt.zip')
  dest
    .on('drain', loggingW('a-d', false))
    .on('pipe', loggingW('s-p', false))
    .on('unpipe', loggingW('s-up', false))
    .on('close', loggingW('close', false))
    .on('finish', loggingW('s-f', false))
    .on('end', loggingW('end'))
    .on('error', loggingW('error', true))

  archive.pipe(dest)
  archive.append(res.data, { name: '/ok/tt.jpg' }).finalize()

  return {
    statusCode: 200,
    body: JSON.stringify({
      message: '1',
      input: event,
    }),
  };

  // Use this code if you don't use the http event with the LAMBDA-PROXY integration
  // return { message: 'Go Serverless v1.0! Your function executed successfully!', event };
};

module.exports.hello = hello

// hello()

This code works well locally. However, it does not behave strangely in the lambda environment. No error occurs. However, it does not actually work. ( lambda ) The entry and progress events of the archiver are not raised. What is the problem?

// local console.log
a-e { name: 'ok/tt.jpg',      
  type: 'file',
  date: 2019-01-30T11:31:19.255Z,
  mode: 420,
  prefix: null,
  sourcePath: null,
  stats: false,
  sourceType: 'stream',
  linkname: null,
  store: false,
  comment: '' }
a-p { entries: { total: 1, processed: 1 },
  fs: { totalBytes: 0, processedBytes: 0 } }
a-end undefined
null

// lambda
{
    "statusCode": 200,
    "body": "{\"message\":\"1\",\"input\":\"\"}"
}

otwm avatar Jan 30 '19 11:01 otwm

I may be wrong, but it seems that AWS Lambda don't support node.js streams and archiver is built around streams.

https://docs.aws.amazon.com/en_us/lambda/latest/dg/programming-model.html

0xtmphey avatar Mar 08 '19 14:03 0xtmphey

Here is the working example https://github.com/rokumatsumoto/aws-node-s3-zipper

rokumatsumoto avatar Oct 31 '19 19:10 rokumatsumoto

@timopl did you get this working. i keep getting dest.on is not a function. works locally to, but not remote. I do think lambda now supports piping streams to the response.

ricky11 avatar May 11 '20 10:05 ricky11

@ricky11 You can use buffer to collect the stream, and then send payload encoded in base64.

Simple example:

const Archiver = require('archiver')

const formatResponse = function(body){
  var response = {
    "statusCode": 200,
    "headers": {
      'Content-Type': 'application/zip',
      'Content-disposition': 'attachment; filename=myFile.zip',
    },
    "isBase64Encoded": true,
    "body": body.toString('base64')
  }
  return response
}

exports.handler = async event => {
  return promise = new Promise((resolve, reject) => {
    let zip = Archiver('zip')
    zip
      .append('Some text to go in file 1.', { name: '1.txt' })
      .append('Some text to go in file 2. I go in a folder!', {
        name: 'somefolder/2.txt',
      })
      .file('staticFiles/3.txt', { name: '3.txt' })
      .finalize()


    let buffer = []

    zip.on('data', (data =>  buffer.push(data)))

    zip.on('end', ( () => {
      let data = Buffer.concat(buffer);
      console.log(data)
      resolve(formatResponse(data))
    }))
  })
}

aakatev avatar Oct 12 '20 22:10 aakatev

I think the issue should be closed!

aakatev avatar Oct 13 '20 04:10 aakatev

@ricky11 You can use buffer to collect the stream, and then send payload encoded in base64.

Simple example:

const Archiver = require('archiver')

const formatResponse = function(body){
  var response = {
    "statusCode": 200,
    "headers": {
      'Content-Type': 'application/zip',
      'Content-disposition': 'attachment; filename=myFile.zip',
    },
    "isBase64Encoded": true,
    "body": body.toString('base64')
  }
  return response
}

exports.handler = async event => {
  return promise = new Promise((resolve, reject) => {
    let zip = Archiver('zip')
    zip
      .append('Some text to go in file 1.', { name: '1.txt' })
      .append('Some text to go in file 2. I go in a folder!', {
        name: 'somefolder/2.txt',
      })
      .file('staticFiles/3.txt', { name: '3.txt' })
      .finalize()


    let buffer = []

    zip.on('data', (data =>  buffer.push(data)))

    zip.on('end', ( () => {
      let data = Buffer.concat(buffer);
      console.log(data)
      resolve(formatResponse(data))
    }))
  })
}

This solution solved the purpose. Works like charm. This should be the pinned answer.

PiyushPriyadarshiSigma avatar Mar 03 '21 06:03 PiyushPriyadarshiSigma

@ricky11 You can use buffer to collect the stream, and then send payload encoded in base64. Simple example:

const Archiver = require('archiver')

const formatResponse = function(body){
  var response = {
    "statusCode": 200,
    "headers": {
      'Content-Type': 'application/zip',
      'Content-disposition': 'attachment; filename=myFile.zip',
    },
    "isBase64Encoded": true,
    "body": body.toString('base64')
  }
  return response
}

exports.handler = async event => {
  return promise = new Promise((resolve, reject) => {
    let zip = Archiver('zip')
    zip
      .append('Some text to go in file 1.', { name: '1.txt' })
      .append('Some text to go in file 2. I go in a folder!', {
        name: 'somefolder/2.txt',
      })
      .file('staticFiles/3.txt', { name: '3.txt' })
      .finalize()


    let buffer = []

    zip.on('data', (data =>  buffer.push(data)))

    zip.on('end', ( () => {
      let data = Buffer.concat(buffer);
      console.log(data)
      resolve(formatResponse(data))
    }))
  })
}

This solution solved the purpose. Works like charm. This should be the pinned answer.

The only problem with this solution is that it's not scalable and optimized for memory usage. Tried it with 10,000 images.

stephenasuncionDEV avatar Jun 30 '23 23:06 stephenasuncionDEV

For anyone struggling.... i found this gist that is well documented and has solutions for aws-sdk v2 and v3.

https://gist.github.com/amiantos/16bacc9ed742c91151fcf1a41012445e

I am however facing an issue where every child file is also zipped, rather than one single compressed file.

Neuroforge avatar Oct 01 '23 03:10 Neuroforge