Experiencing extreme slowness when running hatch run inside Docker
I having created several Docker images using Hatch as an entrypoint, and it seems that run hatch run inside Docker causes the image to time out on "Syncing dependencies...."
Please give me something reproducible
Yes sir! Thanks for the quick response.
dependencies = [
"bokeh>=2.4.3", # For dask diagnostic dashboard
"botocore",
"dask-kubernetes==2023.1.1",
"prefect>=2.10.6,<3",
"pydantic>=2.6.3",
"redis>=4.3.1",
"requests>=2.27.1",
"saga3>=0.6.7",
"sendgrid>=6.10.0",
"setuptools>=65.5.1",
"toolz>=0.12.0",
"paho-mqtt==1.6.*",
"prometheus-client>=0.16.0",
"pyowm>=3.3.0",
"heimdall>=0.3.9",
"pyarrow==15.0.0",
"fastapi>=0.77.1",
"uvicorn>=0.17.6",
]
[project.optional-dependencies]
dev = [
"anyio",
"ipython",
"jupyterlab",
"myst-parser",
"nbsphinx",
"pytest",
"python-dotenv",
"sphinx",
"sphinx-autodoc-typehints",
]
[tool.hatch.envs.dev]
dev-mode = true
python = "3.9"
features = [
"dev"
]
[tool.hatch.envs.worker]
dev-mode = false
python = "3.9"
path = "~/ivaldi/.venv"
[tool.hatch.envs.worker.scripts]
api = "uvicorn ivaldi.server.main:app --host=0.0.0.0 --port=8080"
Running
hatch -e worker run api
to start the API shows no logs and is seemingly stuck in "Syncing dependencies..."
Can you please give me a full pyproject.toml and Dockerfile? Normally I would do the extra work but I'm pretty sick this week, sorry!
Yes I can. Give me a second.
[build-system]
requires = ['hatchling']
build-backend = 'hatchling.build'
[tool.hatch]
root = 'src'
[tool.hatch.version]
path = 'src/ivaldi/__init__.py'
[project]
name = 'ivaldi'
description = 'Data ingress manager for HVAC.'
authors = [
{name = 'Will Vega-Brown', email = '[email protected]'},
{name = 'Robert Lauer', email = '[email protected]'},
{name = 'Irvin Tang', email = '[email protected]'},
{name = 'Alex Yee', email = '[email protected]'},
]
readme = 'README.md'
requires-python = '>=3.9'
dynamic = ['version']
dependencies = [
"bokeh>=2.4.3", # For dask diagnostic dashboard
"botocore",
"dask-kubernetes==2023.1.1",
"prefect>=2.10.6,<3",
"pydantic>=2.6.3",
"redis>=4.3.1",
"requests>=2.27.1",
"saga3>=0.6.7",
"sendgrid>=6.10.0",
"setuptools>=65.5.1",
"toolz>=0.12.0",
"paho-mqtt==1.6.*",
"prometheus-client>=0.16.0",
"pyowm>=3.3.0",
"heimdall>=0.3.9",
"pyarrow==15.0.0",
"fastapi>=0.77.1",
"uvicorn>=0.17.6",
]
[project.optional-dependencies]
dev = [
"anyio",
"ipython",
"jupyterlab",
"myst-parser",
"nbsphinx",
"pytest",
"python-dotenv",
"sphinx",
"sphinx-autodoc-typehints",
]
[tool.hatch.envs.default.env-vars]
PIP_INDEX_URL = "https://pypi.tagup.io/simple"
PIP_EXTRA_INDEX_URL = "https://pypi.tagup.io/simple"
[tool.hatch.envs.dev]
dev-mode = true
python = "3.9"
features = [
"dev"
]
[tool.hatch.envs.worker]
dev-mode = false
python = "3.9"
path = "~/ivaldi/.venv"
[tool.hatch.envs.worker.scripts]
api = "uvicorn ivaldi.server.main:app --host=0.0.0.0 --port=8080 --log-config=/ivaldi/logging.conf"
queue-writer = "python src/ivaldi/mqtt/mqtt_queue_writer.py"
saga-writer = "python src/ivaldi/mqtt/mqtt_saga_writer.py"
mock-recommendation-writer = "python src/ivaldi/mqtt/mock_recommendation_writer.py"
recommendation-listener = "python src/ivaldi/mqtt/recommendation_listener.py"
[project.scripts]
mqtt = "ivaldi.server.main:app"
[tool.pytest.ini_options]
testpaths = 'test'
[tool.hatch.build.targets.wheel]
packages = ['src/ivaldi']
[tool.ruff]
lint.ignore = [
"UP007",
"S101",
"ARG001",
"A003",
"T201",
"PLR2004",
"ARG001",
"FBT002",
"FBT001",
"BLE001",
"G004",
"EM101",
"PT006",
"PT007",
"RUF009",
"TRY003",
"TRY301",
"RUF006",
"DTZ006",
"FA100",
"EM102",
"S608",
"RUF002"
]
line-length = 100
lint.extend-select = ['Q', 'RUF100', 'C90', 'UP', 'I']
lint.flake8-quotes = {inline-quotes = 'double', multiline-quotes = 'double'}
lint.mccabe = { max-complexity = 14 }
lint.isort = { known-first-party = ['src/saga3', 'src/tests'] }
target-version = 'py39'
[tool.black]
color = true
line-length = 100
target-version = ['py39']
skip-string-normalization = true
[tagup]
release_version = ""
[tagup.project]
name = "ivaldi"
[tagup.images]
[tagup.images.dev]
[tagup.images.server]
[tagup.images.worker]
[tagup.registries]
[tagup.registries.dev]
account = "499303421264"
region = "us-east-1"
repository = "ivaldi-web"
role = "arn:aws:iam::499303421264:role/poweruser"
[tagup.registries.com]
account = 936436477723
region = "us-east-1"
repository = "ivaldi"
role = "arn:aws:iam::936436477723:role/poweruser"
[tagup.registries.gov]
account = 196960199065
region = "us-gov-east-1"
repository = "ivaldi"
url = "196960199065.dkr.ecr.us-gov-east-1.amazonaws.com"
[tagup.clusters]
[tagup.clusters.dev]
cluster_name = "tagup-dev-01"
role = "arn:aws:iam::499303421264:role/ci"
account = "dev"
[tagup.clusters.stage]
cluster_name = "stage"
role = "arn:aws:iam::936436477723:role/ci"
account = "com"
[tagup.clusters.prod]
cluster_name = "prod"
role = "arn:aws:iam::936436477723:role/ci"
account = "com"
[tagup.clusters.gov]
cluster_name = "gov-cluster"
role = "arn:aws-us-gov:iam::196960199065:role/ci"
account = "gov"
[tagup.deployments]
[tagup.deployments.com-dev]
overlay = "develop"
cluster = "dev"
registry = "dev"
namespace = "ivaldi"
images = {ivaldi = "server"}
[tagup.deployments.stage]
overlay = "stage"
cluster = "stage"
registry = "com"
namespace = "application"
images = {ivaldi = "server"}
[tagup.deployments.prod]
overlay = "prod"
cluster = "prod"
registry = "com"
namespace = "application"
images = {ivaldi = "server"}
[tagup.prefect]
mount_path = "/ivaldi"
[tagup.prefect.servers.stage]
ui_host = "https://prefect.stage.internal.tagup.io"
registry = "com"
storage_bucket = "tagup-stage-prefect-2"
role_alias = "tagup-com-poweruser"
[tagup.prefect.servers.prod]
ui_host = "https://prefect.prod.internal.tagup.io"
registry = "com"
storage_bucket = "tagup-prod-prefect-orion"
role_alias = "tagup-com-poweruser"
[tagup.prefect.flow_template]
image = "worker"
service_account = "ivaldi"
job_template_path = "deploy/templates/prefect_job.yaml"
[tagup.prefect.flows.always_fail]
file = "src/ivaldi/flows/testing_flows.py"
name = "always_fail"
project = "testing"
[tagup.prefect.flows.cleanup_old_runs]
file = "src/ivaldi/flows/prefect_management_flows.py"
name = "cleanup_old_runs"
project = "management"
[tagup.prefect.flows.transfer_metrics]
file = "src/ivaldi/flows/transfer_metrics_flow.py"
name = "transfer_metrics"
project = "management"
# syntax=docker/dockerfile:1
# base
# ------
FROM python:3.9-slim as base
RUN --mount=type=cache,target=/root/.cache/pip pip install hatch setuptools wheel awscli
ENV PIP_INDEX_URL=https://pypi.tagup.io
ENV PIP_EXTRA_INDEX_URL=https://pypi.org/simple
COPY bin/install_system_packages.sh /opt/install_system_packages.sh
RUN /opt/install_system_packages.sh git build-essential ssh libpq-dev curl groff less unzip gpg
ENV PYTHONFAULTHANDLER=1 \
PYTHONHASHSEED=random \
PYTHONUNBUFFERED=1
ARG KUBECTL_VERSION=v1.29
RUN apt update ; \
apt-get install -y apt-transport-https ca-certificates curl ; \
curl -fsSL https://pkgs.k8s.io/core:/stable:/$KUBECTL_VERSION/deb/Release.key | gpg --dearmor -o /etc/apt/keyrings/kubernetes-apt-keyring.gpg ; \
echo deb [signed-by=/etc/apt/keyrings/kubernetes-apt-keyring.gpg] https://pkgs.k8s.io/core:/stable:/$KUBECTL_VERSION/deb/ / | tee /etc/apt/sources.list.d/kubernetes.list ; \
apt-get update ; \
apt-get install -y kubectl
WORKDIR /ivaldi
COPY --chown=tagup:tagup src /ivaldi/src/
COPY --chown=tagup:tagup pyproject.toml README.md /ivaldi/
# Create non-root user
RUN mkdir -p /home/tagup \
&& useradd -s /bin/bash -d /home/tagup tagup \
&& chown -R tagup:tagup /ivaldi \
&& chown tagup:tagup /home/tagup
# dev
# ------
FROM base as dev
RUN --mount=type=cache,target=/root/.cache/pip hatch env create dev
USER tagup
ENTRYPOINT ["hatch", "-e", "dev", "run"]
CMD ["python"]
# worker
# ------
FROM base as base
RUN --mount=type=cache,target=/root/.cache/pip hatch env create worker
USER tagup
ENTRYPOINT ["hatch", "-e", "worker", "run"]
CMD ["python"]
Docker causes the image to time out on "Syncing dependencies...."
It's possible you have a DNS issue. I've experienced that before with the default DNS service (router) not handling certain lookups (reverse DNS) properly for example.
Try running the container with --dns 1.1.1.1 (cloudflare DNS). If you need it for image builds you'll need to adjust the /etc/docker/daemon.json or equivalent config.