crawl4ai icon indicating copy to clipboard operation
crawl4ai copied to clipboard

Docker Image

Open vikaskookna opened this issue 1 year ago • 24 comments

I created aws lambda docker image, and it fails on this line from crawl4ai import AsyncWebCrawler

  "errorMessage": "[Errno 30] Read-only file system: '/home/sbx_user1051'",
  "errorType": "OSError",
  "requestId": "",
  "stackTrace": [
    "  File \"/var/lang/lib/python3.12/importlib/__init__.py\", line 90, in import_module\n    return _bootstrap._gcd_import(name[level:], package, level)\n",
    "  File \"<frozen importlib._bootstrap>\", line 1387, in _gcd_import\n",
    "  File \"<frozen importlib._bootstrap>\", line 1360, in _find_and_load\n",
    "  File \"<frozen importlib._bootstrap>\", line 1331, in _find_and_load_unlocked\n",
    "  File \"<frozen importlib._bootstrap>\", line 935, in _load_unlocked\n",
    "  File \"<frozen importlib._bootstrap_external>\", line 995, in exec_module\n",
    "  File \"<frozen importlib._bootstrap>\", line 488, in _call_with_frames_removed\n",
    "  File \"/var/task/lambda_function.py\", line 3, in <module>\n    from crawl4ai import AsyncWebCrawler\n",
    "  File \"/var/lang/lib/python3.12/site-packages/crawl4ai/__init__.py\", line 3, in <module>\n    from .async_webcrawler import AsyncWebCrawler\n",
    "  File \"/var/lang/lib/python3.12/site-packages/crawl4ai/async_webcrawler.py\", line 8, in <module>\n    from .async_database import async_db_manager\n",
    "  File \"/var/lang/lib/python3.12/site-packages/crawl4ai/async_database.py\", line 8, in <module>\n    os.makedirs(DB_PATH, exist_ok=True)\n",
    "  File \"<frozen os>\", line 215, in makedirs\n",
    "  File \"<frozen os>\", line 225, in makedirs\n"
  ]
}

vikaskookna avatar Oct 11 '24 06:10 vikaskookna