feat: 记录日志

This commit is contained in:
李如威 2025-09-15 16:48:39 +08:00
parent df7cb47cfd
commit f42a2583c1
10 changed files with 275 additions and 134 deletions

View File

@ -1,5 +1,4 @@
import asyncio
from hashlib import md5
import json
import uvicorn
import uuid
@ -7,6 +6,8 @@ import time
import models.db as DB
import nodes as F
from tortoise.transactions import in_transaction
from tortoise.expressions import Subquery
from tortoise.functions import Max
from fastapi import FastAPI, Depends, HTTPException
from models.schemas.system import InitStatusResponse, InitConfigRequest, BaseResponse, TableResponse
from models.schemas.user import LoginRequest, LoginResponse
@ -155,6 +156,20 @@ async def _create_demo_records(app: FastAPI):
"cmd": ("cd /data/PAI/dbpai-beta/dbpai-jenkins;" " git stash;" " git pull origin v3.4.2;" " git stash pop;" " docker-compose restart;"),
},
},
{
"type": "status",
"title": "健康监测",
"description": "检测服务是否成功运行",
"uuid": uuid.uuid4().hex,
"detail": {
"host": "58.214.239.10",
"user": "dgx07",
"password": "f=bHt/.7kRo:KIXq",
"port": 6378,
"sudo": True,
"status_url": "http://localhost:1336/api/system/license_info",
},
},
],
)
await work_content.save()
@ -250,7 +265,7 @@ def create_app():
return LoginResponse(data=user.json(del_columns=["password"]))
@app.post("/api/run", tags=["Flow"], description="执行任务", response_model=BaseResponse)
async def flow_run(work_uuid: str):
async def flow_run(work_uuid: str, user: DB.User = Depends(get_current_user)):
work = await DB.Work.filter(uuid=work_uuid).first()
if not work:
return BaseResponse(success=False, message="任务不存在")
@ -265,7 +280,15 @@ def create_app():
if task_id in task_nodes and node_id in task_nodes[task_id]:
task_nodes[task_id][node_id] = status
async def _task(task_id: str, work: DB.Work):
async def _task(task_id: str, work: DB.Work, user: DB.User):
work_log = DB.WorkLogs(
user_id=user.id,
work_id=work.id,
status=DB.WorkLogsStatus.RUNNING,
work_title=work.work_title,
work_desc=work.work_desc,
)
await work_log.save()
try:
work_content = await DB.WorkContent.filter(work_id=work.id).first()
if not work_content:
@ -280,16 +303,20 @@ def create_app():
node_info = x["detail"]
params[node_id] = node_info
status[node_id] = "wait"
if node_type == "git":
node = F.GitNode()
node.set_info(node_id, lambda k, v: _update_node_status(task_id, k, v))
nodes.append(node)
elif node_type == "cmd":
node = F.CmdNode()
node.set_info(node_id, lambda k, v: _update_node_status(task_id, k, v))
nodes.append(node)
node_class_map = {
"git": F.GitNode,
"cmd": F.CmdNode,
"status": F.StatusNode
}
node_cls = node_class_map[node_type]
node = node_cls()
node.set_info(node_id, lambda k, v: _update_node_status(task_id, k, v))
nodes.append(node)
nodes.pop()
# 测试,过滤部署节点
build_index = next((i for i, x in enumerate(work_content.content) if x["title"] == "部署"), -1)
if build_index >= 0:
del nodes[build_index]
for i in range(len(nodes) - 1):
nodes[i] - "true" >> nodes[i + 1]
@ -301,11 +328,25 @@ def create_app():
app.state.tasks.pop(task_id, None)
app.state.task_nodes.pop(task_id, None)
# 更新日志
work_log.result = shared
if next((x for x in shared.values() if x.get("err")), None):
work_log.status = DB.WorkLogsStatus.FAILED
else:
work_log.status = DB.WorkLogsStatus.SUCCESS
await work_log.save()
except asyncio.CancelledError:
work_log.status = DB.WorkLogsStatus.CANCELLED
await work_log.save()
logger.debug(f"任务被取消: {task_id}")
app.state.tasks.pop(task_id, None)
except Exception as e:
logger.error(e)
work_log.status = DB.WorkLogsStatus.FAILED
await work_log.save()
app.state.tasks.pop(task_id, None)
app.state.tasks[task_id] = asyncio.create_task(_task(task_id, work))
app.state.tasks[task_id] = asyncio.create_task(_task(task_id, work, user))
return BaseResponse(data=task_id)
@app.post("/api/stop", tags=["Flow"], description="停止任务", response_model=BaseResponse)
@ -321,13 +362,23 @@ def create_app():
global_tasks = app.state.tasks
global_status = app.state.task_nodes
work_list = await DB.Work.filter(is_deleted=0).order_by("-id")
work_content_list = await DB.WorkContent.filter(work_id__in=[x.id for x in work_list]).all()
work_ids = [x.id for x in work_list]
work_content_list = await DB.WorkContent.filter(work_id__in=work_ids).all()
sub_logs = await DB.WorkLogs.filter(work_id__in=work_ids).annotate(max_id=Max("id")).values_list("max_id", flat=True)
work_logs_list = await DB.WorkLogs.filter(id__in=sub_logs).all()
user_list = await DB.User.filter(id__in=list(set([x.user_id for x in work_content_list]))).all()
res = []
for work in work_list:
obj = work.json()
content = next((x for x in work_content_list if x.work_id == work.id), None)
if content:
obj["content"] = [{k: v for k, v in _c.items() if k != "detail"} for _c in (content.content or [])]
log:DB.WorkLogs = next((x for x in work_logs_list if x.work_id == work.id), DB.WorkLogs(user_id=-1))
log_user:DB.User = next((u for u in user_list if u.id==log.user_id), DB.User())
obj["last_user"] = log_user.username or ""
obj["last_run_time"] = log.create_time.timestamp() if log.create_time else 0
obj["last_status"] = log.status or ""
obj["is_running"] = work.uuid in global_tasks
obj["status"] = global_status[work.uuid] if work.uuid in global_status else {}
res.append(obj)

View File

@ -8,12 +8,8 @@ class BaseModel(Model):
def json(self, del_columns=None):
user_json = dict(self)
user_json["create_time"] = (
self.create_time.strftime("%Y-%m-%d %H:%M:%S") if self.create_time else ""
)
user_json["update_time"] = (
self.update_time.strftime("%Y-%m-%d %H:%M:%S") if self.update_time else ""
)
user_json["create_time"] = self.create_time.timestamp() if self.create_time else 0
user_json["update_time"] = self.update_time.timestamp() if self.update_time else 0
if del_columns is not None:
for column in del_columns:
try:

View File

@ -24,6 +24,7 @@ class WorkLogsStatus(str, Enum):
RUNNING = "RUNNING"
SUCCESS = "SUCCESS"
FAILED = "FAILED"
CANCELLED = "CANCELLED"
class WorkLogs(BaseModel):

View File

@ -1,3 +1,4 @@
from core.pocket_flow import AsyncFlow
from .cmd_node import *
from .git_node import *
from .git_node import *
from .status_node import *

101
backend/app/nodes/base.py Normal file
View File

@ -0,0 +1,101 @@
import asyncio
import asyncssh
async def async_run_cmd(
cmd: str,
host: str = None,
user: str = None,
password: str = None,
port: int = None,
sudo: bool = False,
):
"""
异步执行命令
- 如果 host=None则在本地执行
- 如果 host 不为空则通过 SSH 执行远程命令
"""
async def read_stream(stream, stream_type):
async for line in stream:
text = line
if not isinstance(text, str):
text = line.decode()
yield stream_type, text.rstrip()
if not host:
# 本地执行
proc = await asyncio.create_subprocess_shell(cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE)
tasks = [
read_stream(proc.stdout, "stdout"),
read_stream(proc.stderr, "stderr"),
]
for t in tasks:
async for stream_type, line in t:
yield stream_type, line
await proc.wait()
else:
try:
conn = await asyncssh.connect(
host,
username=user,
password=password,
port=port,
known_hosts=None,
)
except (asyncssh.PermissionDenied, asyncssh.DisconnectError) as e:
# 认证失败 / 连接断开
yield "stderr", f"SSH connection failed: {str(e)}"
return
except Exception as e:
# 其它异常
yield "stderr", f"SSH error: {str(e)}"
return
try:
# 执行脚本
remote_cmd = cmd
if sudo:
remote_cmd = f"echo {password!r} | sudo -S -p '' bash -c '{cmd}'"
# 执行
proc = await conn.create_process(remote_cmd, term_type="xterm")
# 分别创建 stdout/stderr 的异步生成器
stdout_gen = read_stream(proc.stdout, "stdout")
stderr_gen = read_stream(proc.stderr, "stderr")
# 用 task 管理两个流
stdout_task = asyncio.create_task(stdout_gen.__anext__())
stderr_task = asyncio.create_task(stderr_gen.__anext__())
while True:
if stdout_task is None and stderr_task is None:
break
# 等待任意一个流先返回
done, _ = await asyncio.wait(
[t for t in (stdout_task, stderr_task) if t is not None],
return_when=asyncio.FIRST_COMPLETED,
)
for fut in done:
try:
stream_type, line = fut.result()
yield stream_type, line
# 继续拉取该流的下一行
if stream_type == "stdout":
stdout_task = asyncio.create_task(stdout_gen.__anext__())
else:
stderr_task = asyncio.create_task(stderr_gen.__anext__())
except StopAsyncIteration:
# 该流已结束
if fut == stdout_task:
stdout_task = None
elif fut == stderr_task:
stderr_task = None
await proc.wait()
finally:
conn.close()
await conn.wait_closed()

View File

@ -1,111 +1,7 @@
from fastapi import FastAPI
from core.pocket_flow import AsyncNode
from core.logger import logger
import asyncio
import asyncssh
async def async_run_cmd(
cmd: str,
host: str = None,
user: str = None,
password: str = None,
port: int = None,
sudo: bool = False,
):
"""
异步执行命令
- 如果 host=None则在本地执行
- 如果 host 不为空则通过 SSH 执行远程命令
"""
async def read_stream(stream, stream_type):
async for line in stream:
text = line
if not isinstance(text, str):
text = line.decode()
yield stream_type, text.rstrip()
if not host:
# 本地执行
proc = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
tasks = [
read_stream(proc.stdout, "stdout"),
read_stream(proc.stderr, "stderr"),
]
for t in tasks:
async for stream_type, line in t:
yield stream_type, line
await proc.wait()
else:
try:
conn = await asyncssh.connect(
host,
username=user,
password=password,
port=port,
known_hosts=None,
)
except (asyncssh.PermissionDenied, asyncssh.DisconnectError) as e:
# 认证失败 / 连接断开
yield "stderr", f"SSH connection failed: {str(e)}"
return
except Exception as e:
# 其它异常
yield "stderr", f"SSH error: {str(e)}"
return
try:
# 执行脚本
remote_cmd = cmd
if sudo:
remote_cmd = f"echo {password!r} | sudo -S -p '' bash -c '{cmd}'"
# 执行
proc = await conn.create_process(remote_cmd, term_type="xterm")
# 分别创建 stdout/stderr 的异步生成器
stdout_gen = read_stream(proc.stdout, "stdout")
stderr_gen = read_stream(proc.stderr, "stderr")
# 用 task 管理两个流
stdout_task = asyncio.create_task(stdout_gen.__anext__())
stderr_task = asyncio.create_task(stderr_gen.__anext__())
while True:
if stdout_task is None and stderr_task is None:
break
# 等待任意一个流先返回
done, _ = await asyncio.wait(
[t for t in (stdout_task, stderr_task) if t is not None],
return_when=asyncio.FIRST_COMPLETED,
)
for fut in done:
try:
stream_type, line = fut.result()
yield stream_type, line
# 继续拉取该流的下一行
if stream_type == "stdout":
stdout_task = asyncio.create_task(stdout_gen.__anext__())
else:
stderr_task = asyncio.create_task(stderr_gen.__anext__())
except StopAsyncIteration:
# 该流已结束
if fut == stdout_task:
stdout_task = None
elif fut == stderr_task:
stderr_task = None
await proc.wait()
finally:
conn.close()
await conn.wait_closed()
from .base import async_run_cmd
class CmdNode(AsyncNode):
def set_info(self, id, callback):

View File

@ -1,7 +1,7 @@
from fastapi import FastAPI
from core.pocket_flow import AsyncNode
from core.logger import logger
from .cmd_node import async_run_cmd
from .base import async_run_cmd
from pathlib import Path
class GitNode(AsyncNode):

View File

@ -0,0 +1,72 @@
from fastapi import FastAPI
from core.pocket_flow import AsyncNode
from core.logger import logger
from .base import async_run_cmd
class StatusNode(AsyncNode):
def set_info(self, id, callback):
self.id = id
self.callback = callback
async def prep_async(self, shared):
if self.callback:
await self.callback(self.id, "exec")
status_url = self.params[self.id].get("status_url", "")
return {
"host": self.params[self.id].get("host"),
"user": self.params[self.id].get("user"),
"password": self.params[self.id].get("password"),
"port": self.params[self.id].get("port"),
"sudo": self.params[self.id].get("sudo", False),
"cmd": "\n".join(
[
f'status_code=$(curl -s -o /dev/null -w "%{{http_code}}" {status_url})',
'if [ "$status_code" -eq 200 ]; then',
' echo "true"',
" exit 0",
"else",
' echo "Error: status code $status_code" >&2',
" exit 1",
"fi",
]
),
}
async def exec_async(self, prep_res):
if self.callback:
await self.callback(self.id, "exec")
err = []
out = []
async for stream_type, line in async_run_cmd(**prep_res):
if stream_type == "stderr":
err.append(str(line))
logger.debug(f"err: {line}")
else:
out.append(str(line))
logger.debug(f"out: {line}")
err_str = "\n".join(err)
out_str = "\n".join(out)
# 补充一下密码失效问题
if "Sorry, try again." in out_str and not err_str:
err_str = "SSH connection failed no password was provided"
return {"err": err_str, "out": out_str}
async def post_async(self, shared, prep_res, exec_res):
shared[self.id] = exec_res
result = "true"
if exec_res.get("err"):
result = "false"
if self.callback:
await self.callback(self.id, "post")
return result

View File

@ -7,9 +7,12 @@ import { tableList, flowRun, flowStop } from "../../api/flow";
export default () => {
const { user, logout } = useAuth();
const eventSourceRef = useRef(null);
const pageInit = useRef(false);
const [status, setStatus] = useState({});
const eventSourceRef = useRef(null);
const needReload = useRef(false);
const pageInit = useRef(false);
const actionRef = useRef(null);
const isRunning = (record) => {
return status?.[record.uuid]
}
@ -65,16 +68,26 @@ export default () => {
}
},
{
dataIndex: 'user',
dataIndex: 'last_user',
title: '最后执行用户',
},
{
dataIndex: 'time',
dataIndex: 'last_run_time',
title: '最后执行时间',
render: (_, record) => record?.last_run_time ? new Date(record?.last_run_time * 1000).toLocaleString() : ''
},
{
dataIndex: 'result',
dataIndex: 'last_status',
title: '最后执行结果',
valueType: 'select',
fieldProps: {
options: [
{ value: 'RUNNING', "label": "执行中" },
{ value: 'SUCCESS', "label": "成功" },
{ value: 'FAILED', "label": "失败" },
{ value: 'CANCELLED', "label": "取消" }
]
}
},
{
title: '操作',
@ -125,13 +138,20 @@ export default () => {
if (data?.code === 403) {
logout();
}
setStatus({ ...data, time: new Date() })
const hasValues = Object.values(data)?.length > 0;
if (hasValues && !needReload.current) {
needReload.current = true;
}
if (!hasValues && needReload.current) {
needReload.current = false;
actionRef?.current?.reload();
}
setStatus({ ...data})
} catch (err) {
console.error("Failed to parse SSE data:", err);
}
};
es.onerror = () => {
console.log(user);
es.close();
setTimeout(() => {
eventSourceRef.current = new EventSource(url);
@ -148,6 +168,7 @@ export default () => {
return (
<Card title="工作台">
<ProTable
actionRef={actionRef}
rowKey='id'
search={false}
scroll={{ x: 'max-content' }}

View File

@ -1,6 +1,8 @@
.page-layout {
max-width: 100vw;
min-width: 100vw;
min-height: 100vh;
box-sizing: border-box;
.logo {
height: 40px;