scrapyd spiderkeeper docker部署
Dockerfile
1 2 3 4 5 6 7 | FROM python:3.5 RUN cp -f /usr/share/zoneinfo/Asia/Shanghai /etc/localtime && pip install --upgrade pip && pip install scrapy scrapyd sqlalchemy scrapy-redis mysql_connector scrapyd-client spiderkeeper COPY scrapyd.conf /etc/scrapyd/ COPY config.txt / VOLUME /data VOLUME /images EXPOSE 6800 |
config.txt
1 2 3 4 5 6 7 | [mysql] db_host=root:123456@localhost:3306 /scrapy [redis] db_host=localhost db_port=7501 [img] path = /images/ |
scrapyd.conf
1 2 3 4 5 6 | [scrapyd] bind_address = 0.0.0.0 eggs_dir = /data/eggs logs_dir = /data/logs dbs_dir = /data/dbs http_port = 6800 |
docker-compose.yml
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 | version: '2' services: worker_1: image: scrapy hostname : worker_1 volumes: - /Project/docker/worker_1 : /data - /Project/docker/images : /images command : scrapyd worker_2: image: scrapy hostname : worker_2 volumes: - /Project/docker/worker_2 : /data - /Project/docker/images : /images command : scrapyd spiderkeeper: image: scrapy hostname : spiderkeeper ports: - '5000:5000' volumes: - /Project/docker/spiderkeeper : /data external_links: - worker_1 - worker_2 command : spiderkeeper --database-url=sqlite: ////data/SpiderKeeper .db --server=http: //worker_1 :6800 --server=http: //worker_2 :6800 --username=admin --password=admin |
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】凌霞软件回馈社区,博客园 & 1Panel & Halo 联合会员上线
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】博客园社区专享云产品让利特惠,阿里云新客6.5折上折
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步