aiflow / run.sh
hijnu's picture
Update run.sh
148af65 verified
raw
history blame
1.72 kB
#!/bin/bash
current_time=$(date +"%Y-%m-%d %H:%M:%S")
LOG_FILE="${WORKDIR}/buildlog.txt"
# 启动 PostgreSQL 服务
/usr/local/bin/docker-entrypoint.sh postgres &
# 检查 PostgreSQL 服务是否已启动
echo "等待 PostgreSQL 服务启动..."
until pg_isready -h localhost; do
sleep 3
done
echo "PostgreSQL 服务已启动!"
# 执行数据库导入脚本
echo "运行数据库导入脚本..."
${WORKDIR}/import-db.sh
# 设置 N8N 环境变量
export N8N_ENCRYPTION_KEY="n8n8n8n"
# 允许使用所有内建模块
export NODE_FUNCTION_ALLOW_BUILTIN=*
# 允许使用外部 npm 模块
export NODE_FUNCTION_ALLOW_EXTERNAL=*
# Activate automatic data pruning
export EXECUTIONS_DATA_PRUNE=true
# Number of hours after execution that n8n deletes data
export EXECUTIONS_DATA_MAX_AGE=36
# Number of executions to store
export EXECUTIONS_DATA_PRUNE_MAX_COUNT=1000
# Save executions ending in errors
export EXECUTIONS_DATA_SAVE_ON_ERROR=all
# Save successful executions
export EXECUTIONS_DATA_SAVE_ON_SUCCESS=all
# Don't save node progress for each execution
export EXECUTIONS_DATA_SAVE_ON_PROGRESS=false
# Don't save manually launched executions
export EXECUTIONS_DATA_SAVE_MANUAL_EXECUTIONS=false
echo "检查 WebDAV 日志文件是否存在,下载或创建新文件..."
if curl -f -u "${WEBDAV_USER}:${WEBDAV_PASSWORD}" -o "${LOG_FILE}" "${WEBDAV_URL}/buildlog.txt"; then
echo "日志文件下载成功并写入部署时间: ${LOG_FILE}"
echo "hf部署时间: ${current_time}" >> "${LOG_FILE}"
else
echo "WebDAV 上日志文件不存在,创建新的日志文件: ${LOG_FILE}"
touch "${LOG_FILE}"
echo "hf部署时间: ${current_time}" >> "${LOG_FILE}"
fi
# 使用绝对路径调用 n8n
exec n8n