scraper-proxy / playbook.yml
github-actions[bot]
Update from GitHub Actions
3718037
raw
history blame
1.45 kB
- name: My first play
hosts: hostgroup
remote_user: root
tasks:
- name: 停止旧服务
shell: |
pid=$(ps -ef | grep "uvicorn main:app" | grep -v grep | awk '{print $2}')
if [ -n "$pid" ]; then
echo "Stopping process with PID: $pid"
kill -15 $pid
sleep 3
if ps -p $pid > /dev/null 2>&1; then
echo "Force killing process with PID: $pid"
kill -9 $pid
fi
else
echo "No running process found"
fi
args:
executable: /bin/bash
ignore_errors: yes
- name: 确认服务已停止
wait_for:
port: 7860
state: stopped
timeout: 30
- name: 复制到远程服务器
copy:
src: archive/release.tar.gz
dest: /root/deploy/scraper-proxy.tar.gz
- name: 解压文件
shell: |
rm -rf /root/workspace/scraper-proxy
mkdir -p /root/workspace/scraper-proxy
tar -zxvf /root/deploy/scraper-proxy.tar.gz -C /root/workspace/scraper-proxy
args:
executable: /bin/bash
- name: 构建并运行
shell: |
cd /root/workspace/scraper-proxy
pip install --no-cache-dir -r requirements.txt
nohup uvicorn main:app --host 0.0.0.0 --port 7860 > ./nohup.log 2>&1 &
args:
executable: /bin/bash
- name: 等待服务启动
wait_for:
port: 7860
timeout: 30