Files
docker-compose/scrapy/compose.yaml

66 lines
2.6 KiB
YAML
Raw Normal View History

services:
scrapyd:
image: ${SCRAPY_SCRAPYD_IMAGE:-vimagick/scrapyd}
container_name: ${SCRAPY_COMPOSE_PROJECT_NAME}_scrapyd
restart: unless-stopped
volumes:
- scrapyd_data:/var/lib/scrapyd
- /usr/local/lib/python3.9/dist-packages
environment:
TZ: ${TIMEZONE:-Europe/Berlin}
networks:
- compose_network
labels:
- 'traefik.enable=${SCRAPY_TRAEFIK_ENABLED}'
- 'traefik.http.middlewares.${SCRAPY_COMPOSE_PROJECT_NAME}-scrapyd-redirect-web-secure.redirectscheme.scheme=https'
- 'traefik.http.routers.${SCRAPY_COMPOSE_PROJECT_NAME}-scrapyd-web.middlewares=${SCRAPY_COMPOSE_PROJECT_NAME}-scrapyd-redirect-web-secure'
- 'traefik.http.routers.${SCRAPY_COMPOSE_PROJECT_NAME}-scrapyd-web.rule=Host(`${SCRAPY_TRAEFIK_HOST}`)'
- 'traefik.http.routers.${SCRAPY_COMPOSE_PROJECT_NAME}-scrapyd-web.entrypoints=web'
- 'traefik.http.routers.${SCRAPY_COMPOSE_PROJECT_NAME}-scrapyd-web-secure.rule=Host(`${SCRAPY_TRAEFIK_HOST}`)'
- 'traefik.http.routers.${SCRAPY_COMPOSE_PROJECT_NAME}-scrapyd-web-secure.tls.certresolver=resolver'
- 'traefik.http.routers.${SCRAPY_COMPOSE_PROJECT_NAME}-scrapyd-web-secure.entrypoints=web-secure'
- 'traefik.http.middlewares.${SCRAPY_COMPOSE_PROJECT_NAME}-scrapyd-web-secure-compress.compress=true'
- 'traefik.http.routers.${SCRAPY_COMPOSE_PROJECT_NAME}-scrapyd-web-secure.middlewares=${SCRAPY_COMPOSE_PROJECT_NAME}-scrapyd-web-secure-compress'
- 'traefik.http.services.${SCRAPY_COMPOSE_PROJECT_NAME}-scrapyd-web-secure.loadbalancer.server.port=6800'
- 'traefik.docker.network=${NETWORK_NAME}'
- 'com.centurylinklabs.watchtower.enable=${WATCHTOWER_LABEL_ENABLE}'
scrapy:
image: ${SCRAPY_IMAGE:-vimagick/scrapyd}
container_name: ${SCRAPY_COMPOSE_PROJECT_NAME}_scrapy
command: bash
volumes:
- scrapy_code:/code
working_dir: /code
restart: unless-stopped
environment:
TZ: ${TIMEZONE:-Europe/Berlin}
networks:
- compose_network
scrapyrt:
image: ${SCRAPY_SCRAPYRT_IMAGE:-vimagick/scrapyd}
container_name: ${SCRAPY_COMPOSE_PROJECT_NAME}_scrapyrt
command: scrapyrt -i 0.0.0.0 -p 9080
restart: unless-stopped
ports:
- "${SCRAPY_SCRAPYRT_PORT:-9080}:9080"
volumes:
- scrapy_code:/code
working_dir: /code
environment:
TZ: ${TIMEZONE:-Europe/Berlin}
networks:
- compose_network
volumes:
scrapyd_data:
name: ${SCRAPY_COMPOSE_PROJECT_NAME}_scrapyd_data
scrapy_code:
name: ${SCRAPY_COMPOSE_PROJECT_NAME}_scrapy_code
networks:
compose_network:
name: ${NETWORK_NAME}
external: true