287 lines
8.4 KiB
YAML
287 lines
8.4 KiB
YAML
# docker version
|
|
version: "3"
|
|
|
|
services:
|
|
keycloak:
|
|
image: quay.io/keycloak/keycloak:${KEYCLOAK_VERSION-22.0.0}
|
|
command: ["start-dev", "--import-realm", "--http-port", "8088"]
|
|
environment:
|
|
KC_DB: postgres
|
|
KC_DB_URL_HOST: postgres
|
|
KC_DB_URL_DATABASE: jan-keycloak
|
|
KC_DB_PASSWORD: postgrespassword
|
|
KC_DB_USERNAME: postgres
|
|
KC_DB_SCHEMA: public
|
|
KC_HEALTH_ENABLED: "true"
|
|
KEYCLOAK_ADMIN: ${KEYCLOAK_ADMIN-admin}
|
|
KEYCLOAK_ADMIN_PASSWORD: ${KEYCLOAK_ADMIN_PASSWORD-admin}
|
|
volumes:
|
|
- ./conf/keycloak_conf:/opt/keycloak/data/import
|
|
ports:
|
|
- "8088:8088"
|
|
depends_on:
|
|
postgres:
|
|
condition: service_healthy
|
|
networks:
|
|
jan_community:
|
|
ipv4_address: 172.20.0.9
|
|
|
|
postgres:
|
|
image: postgres:15
|
|
command: postgres -c jit=off
|
|
restart: always
|
|
environment:
|
|
POSTGRES_USER: postgres
|
|
POSTGRES_PASSWORD: postgrespassword
|
|
ports:
|
|
- "5432:5432"
|
|
healthcheck:
|
|
test: "exit 0"
|
|
volumes:
|
|
- ./conf/db/docker_psql_init.sql:/docker-entrypoint-initdb.d/docker_postgres_init.sql
|
|
networks:
|
|
jan_community:
|
|
ipv4_address: 172.20.0.11
|
|
|
|
graphql-engine:
|
|
image: hasura/graphql-engine:v2.31.0.cli-migrations-v3
|
|
ports:
|
|
- 8080:8080
|
|
restart: always
|
|
env_file:
|
|
- conf/sample.env_app-backend
|
|
volumes:
|
|
- ./app-backend/hasura/migrations:/migrations
|
|
- ./app-backend/hasura/metadata:/metadata
|
|
depends_on:
|
|
data-connector-agent:
|
|
condition: service_healthy
|
|
healthcheck:
|
|
test: ["CMD", "curl", "-f", "http://localhost:8080/healthz"]
|
|
interval: 30s
|
|
timeout: 10s
|
|
retries: 10
|
|
labels:
|
|
- "traefik.enable=true"
|
|
- "traefik.http.routers.graphql-engine.entrypoints=jan"
|
|
- "traefik.http.routers.graphql-engine.rule=(PathPrefix(`/graphql-engine`))"
|
|
- "traefik.http.middlewares.graphql-engine.stripprefix.prefixes=/graphql-engine"
|
|
- "traefik.http.middlewares.graphql-engine.stripprefix.forceslash=false"
|
|
- "traefik.http.routers.graphql-engine.middlewares=graphql-engine"
|
|
networks:
|
|
jan_community:
|
|
ipv4_address: 172.20.0.12
|
|
|
|
hasura-seed-apply:
|
|
image: hasura/graphql-engine:v2.31.0.cli-migrations-v3
|
|
entrypoint: [""]
|
|
command: ["/bin/sh", "-c", "hasura-cli seed apply --all-databases"]
|
|
env_file:
|
|
- conf/sample.env_app-backend
|
|
volumes:
|
|
- ./app-backend/hasura/config.yaml:/config.yaml
|
|
- ./app-backend/hasura/seeds:/seeds
|
|
depends_on:
|
|
graphql-engine:
|
|
condition: service_healthy
|
|
extra_hosts:
|
|
- "localhost:172.20.0.12"
|
|
networks:
|
|
jan_community:
|
|
|
|
worker:
|
|
build:
|
|
context: ./app-backend/worker
|
|
dockerfile: ./Dockerfile
|
|
restart: always
|
|
environment:
|
|
- "NODE_ENV=development"
|
|
volumes:
|
|
- ./app-backend/worker:/worker
|
|
ports:
|
|
- 8787:8787
|
|
networks:
|
|
jan_community:
|
|
ipv4_address: 172.20.0.13
|
|
|
|
data-connector-agent:
|
|
image: hasura/graphql-data-connector:v2.31.0
|
|
restart: always
|
|
ports:
|
|
- 8081:8081
|
|
environment:
|
|
QUARKUS_LOG_LEVEL: ERROR # FATAL, ERROR, WARN, INFO, DEBUG, TRACE
|
|
## https://quarkus.io/guides/opentelemetry#configuration-reference
|
|
QUARKUS_OPENTELEMETRY_ENABLED: "false"
|
|
## QUARKUS_OPENTELEMETRY_TRACER_EXPORTER_OTLP_ENDPOINT: http://jaeger:4317
|
|
healthcheck:
|
|
test: ["CMD", "curl", "-f", "http://localhost:8081/api/v1/athena/health"]
|
|
interval: 5s
|
|
timeout: 10s
|
|
retries: 5
|
|
start_period: 5s
|
|
networks:
|
|
jan_community:
|
|
ipv4_address: 172.20.0.14
|
|
|
|
web:
|
|
build:
|
|
context: ./web-client
|
|
dockerfile: ./dev.Dockerfile
|
|
restart: always
|
|
volumes:
|
|
- ./web-client/:/app
|
|
- /app/node_modules
|
|
- /app/.next
|
|
env_file:
|
|
- conf/sample.env_web-client
|
|
ports:
|
|
- 3000:3000
|
|
environment:
|
|
NODE_ENV: development
|
|
extra_hosts:
|
|
- "localhost:172.20.0.9"
|
|
labels:
|
|
- "traefik.enable=true"
|
|
- "traefik.http.routers.web.entrypoints=jan"
|
|
- "traefik.http.routers.web.rule=(Host(`localhost`) && PathPrefix(`/web`))"
|
|
- "traefik.http.routers.web.middlewares=rewritepath"
|
|
- "traefik.http.middlewares.rewritepath.addprefix.prefix=/web"
|
|
networks:
|
|
jan_community:
|
|
ipv4_address: 172.20.0.15
|
|
|
|
llm:
|
|
image: ghcr.io/abetlen/llama-cpp-python@sha256:b6d21ff8c4d9baad65e1fa741a0f8c898d68735fff3f3cd777e3f0c6a1839dd4
|
|
volumes:
|
|
- ./jan-inference/llm/models:/models
|
|
ports:
|
|
- 8000:8000
|
|
environment:
|
|
MODEL: /models/${LLM_MODEL_FILE}
|
|
PYTHONUNBUFFERED: 1
|
|
restart: on-failure
|
|
labels:
|
|
- "traefik.enable=true"
|
|
- "traefik.http.routers.llm.entrypoints=jan"
|
|
- "traefik.http.routers.llm.rule=(PathPrefix(`/llm`))"
|
|
- "traefik.http.middlewares.llm.stripprefix.prefixes=/llm"
|
|
- "traefik.http.middlewares.llm.stripprefix.forceslash=false"
|
|
- "traefik.http.routers.llm.middlewares=llm"
|
|
networks:
|
|
jan_community:
|
|
ipv4_address: 172.20.0.18
|
|
|
|
sd-downloader:
|
|
build:
|
|
context: ./jan-inference/sd/
|
|
dockerfile: compile.Dockerfile
|
|
command: /bin/sh -c "if [ ! -f /models/*.bin ]; then python /sd.cpp/sd_cpp/models/convert.py --out_type q4_0 --out_file /models/${SD_MODEL_FILE}.q4_0.bin /models/${SD_MODEL_FILE}; fi"
|
|
volumes:
|
|
- ./jan-inference/sd/models:/models
|
|
networks:
|
|
jan_community:
|
|
ipv4_address: 172.20.0.19
|
|
|
|
sd:
|
|
build:
|
|
context: ./jan-inference/sd/
|
|
dockerfile: inference.Dockerfile
|
|
volumes:
|
|
- ./jan-inference/sd/models:/models/
|
|
command: /bin/bash -c "python -m uvicorn main:app --proxy-headers --host 0.0.0.0 --port 8000"
|
|
environment:
|
|
S3_ENDPOINT_URL: ${S3_ENDPOINT_URL}
|
|
S3_PUBLIC_ENDPOINT_URL: ${S3_PUBLIC_ENDPOINT_URL}
|
|
S3_ACCESS_KEY_ID: ${S3_ACCESS_KEY_ID}
|
|
S3_SECRET_ACCESS_KEY: ${S3_SECRET_ACCESS_KEY}
|
|
S3_BUCKET_NAME: ${S3_BUCKET_NAME}
|
|
MODEL_NAME: ${SD_MODEL_FILE}.q4_0.bin
|
|
MODEL_DIR: /models
|
|
OUTPUT_DIR: /tmp
|
|
SD_PATH: /sd
|
|
PYTHONUNBUFFERED: 1
|
|
ports:
|
|
- 8001:8000
|
|
restart: on-failure
|
|
depends_on:
|
|
sd-downloader:
|
|
condition: service_completed_successfully
|
|
labels:
|
|
- "traefik.enable=true"
|
|
- "traefik.http.routers.sd.entrypoints=jan"
|
|
- "traefik.http.routers.sd.rule=(PathPrefix(`/sd`))"
|
|
- "traefik.http.middlewares.sd.stripprefix.prefixes=/sd"
|
|
- "traefik.http.middlewares.sd.stripprefix.forceslash=false"
|
|
- "traefik.http.routers.sd.middlewares=sd"
|
|
networks:
|
|
jan_community:
|
|
ipv4_address: 172.20.0.21
|
|
|
|
minio:
|
|
image: minio/minio
|
|
ports:
|
|
- 9000:9000
|
|
- 9001
|
|
volumes:
|
|
- ./minio/data:/export
|
|
- ./minio/config:/root/.minio
|
|
environment:
|
|
MINIO_ROOT_USER: ${S3_ACCESS_KEY_ID}
|
|
MINIO_ROOT_PASSWORD: ${S3_SECRET_ACCESS_KEY}
|
|
command: server /export --console-address ":9001"
|
|
labels:
|
|
- "traefik.enable=true"
|
|
- "traefik.http.routers.minio.entrypoints=jan"
|
|
- "traefik.http.routers.minio.rule=(PathPrefix(`/minio`))"
|
|
- "traefik.http.middlewares.minio.stripprefix.prefixes=/minio"
|
|
- "traefik.http.middlewares.minio.stripprefix.forceslash=false"
|
|
- "traefik.http.routers.minio.middlewares=minio"
|
|
networks:
|
|
jan_community:
|
|
ipv4_address: 172.20.0.23
|
|
|
|
createbuckets:
|
|
image: minio/mc
|
|
depends_on:
|
|
- minio
|
|
environment:
|
|
S3_ACCESS_KEY_ID: ${S3_ACCESS_KEY_ID}
|
|
S3_SECRET_ACCESS_KEY: ${S3_SECRET_ACCESS_KEY}
|
|
S3_BUCKET_NAME: ${S3_BUCKET_NAME}
|
|
entrypoint: >
|
|
/bin/sh -c "
|
|
/usr/bin/mc config host add myminio http://minio:9000 ${S3_ACCESS_KEY_ID} ${S3_SECRET_ACCESS_KEY};
|
|
/usr/bin/mc rm -r --force myminio/${S3_BUCKET_NAME};
|
|
/usr/bin/mc mb myminio/${S3_BUCKET_NAME};
|
|
/usr/bin/mc anonymous set public myminio/${S3_BUCKET_NAME};
|
|
exit 0;
|
|
"
|
|
networks:
|
|
jan_community:
|
|
|
|
traefik:
|
|
image: traefik:v2.10
|
|
command:
|
|
- "--api.insecure=true"
|
|
- "--providers.docker=true"
|
|
- "--providers.docker.exposedbydefault=false"
|
|
- "--log.level=debug"
|
|
- "--entrypoints.jan.address=:1337"
|
|
ports:
|
|
- "1337:1337"
|
|
- "9090:8080"
|
|
volumes:
|
|
- /var/run/docker.sock:/var/run/docker.sock:ro
|
|
networks:
|
|
jan_community:
|
|
ipv4_address: 172.20.0.22
|
|
|
|
networks:
|
|
jan_community:
|
|
driver: bridge
|
|
ipam:
|
|
driver: default
|
|
config:
|
|
- subnet: 172.20.0.0/16
|