feat: 引入pocketflow
This commit is contained in:
parent
46607a68df
commit
b8055a03e2
|
|
@ -0,0 +1,213 @@
|
|||
import asyncio, warnings, copy, time
|
||||
|
||||
|
||||
class BaseNode:
|
||||
def __init__(self):
|
||||
self.params, self.successors = {}, {}
|
||||
|
||||
def set_params(self, params):
|
||||
self.params = params
|
||||
|
||||
def next(self, node, action="default"):
|
||||
if action in self.successors:
|
||||
warnings.warn(f"Overwriting successor for action '{action}'")
|
||||
self.successors[action] = node
|
||||
return node
|
||||
|
||||
def prep(self, shared):
|
||||
pass
|
||||
|
||||
def exec(self, prep_res):
|
||||
pass
|
||||
|
||||
def post(self, shared, prep_res, exec_res):
|
||||
pass
|
||||
|
||||
def _exec(self, prep_res):
|
||||
return self.exec(prep_res)
|
||||
|
||||
def _run(self, shared):
|
||||
p = self.prep(shared)
|
||||
e = self._exec(p)
|
||||
return self.post(shared, p, e)
|
||||
|
||||
def run(self, shared):
|
||||
if self.successors:
|
||||
warnings.warn("Node won't run successors. Use Flow.")
|
||||
return self._run(shared)
|
||||
|
||||
def __rshift__(self, other):
|
||||
return self.next(other)
|
||||
|
||||
def __sub__(self, action):
|
||||
if isinstance(action, str):
|
||||
return _ConditionalTransition(self, action)
|
||||
raise TypeError("Action must be a string")
|
||||
|
||||
|
||||
class _ConditionalTransition:
|
||||
def __init__(self, src, action):
|
||||
self.src, self.action = src, action
|
||||
|
||||
def __rshift__(self, tgt):
|
||||
return self.src.next(tgt, self.action)
|
||||
|
||||
|
||||
class Node(BaseNode):
|
||||
def __init__(self, max_retries=1, wait=0):
|
||||
super().__init__()
|
||||
self.max_retries, self.wait = max_retries, wait
|
||||
|
||||
def exec_fallback(self, prep_res, exc):
|
||||
raise exc
|
||||
|
||||
def _exec(self, prep_res):
|
||||
for self.cur_retry in range(self.max_retries):
|
||||
try:
|
||||
return self.exec(prep_res)
|
||||
except Exception as e:
|
||||
if self.cur_retry == self.max_retries - 1:
|
||||
return self.exec_fallback(prep_res, e)
|
||||
if self.wait > 0:
|
||||
time.sleep(self.wait)
|
||||
|
||||
|
||||
class BatchNode(Node):
|
||||
def _exec(self, items):
|
||||
return [super(BatchNode, self)._exec(i) for i in (items or [])]
|
||||
|
||||
|
||||
class Flow(BaseNode):
|
||||
def __init__(self, start=None):
|
||||
super().__init__()
|
||||
self.start_node = start
|
||||
|
||||
def start(self, start):
|
||||
self.start_node = start
|
||||
return start
|
||||
|
||||
def get_next_node(self, curr, action):
|
||||
nxt = curr.successors.get(action or "default")
|
||||
if not nxt and curr.successors:
|
||||
warnings.warn(f"Flow ends: '{action}' not found in {list(curr.successors)}")
|
||||
return nxt
|
||||
|
||||
def _orch(self, shared, params=None):
|
||||
curr, p, last_action = (
|
||||
copy.copy(self.start_node),
|
||||
(params or {**self.params}),
|
||||
None,
|
||||
)
|
||||
while curr:
|
||||
curr.set_params(p)
|
||||
last_action = curr._run(shared)
|
||||
curr = copy.copy(self.get_next_node(curr, last_action))
|
||||
return last_action
|
||||
|
||||
def _run(self, shared):
|
||||
p = self.prep(shared)
|
||||
o = self._orch(shared)
|
||||
return self.post(shared, p, o)
|
||||
|
||||
def post(self, shared, prep_res, exec_res):
|
||||
return exec_res
|
||||
|
||||
|
||||
class BatchFlow(Flow):
|
||||
def _run(self, shared):
|
||||
pr = self.prep(shared) or []
|
||||
for bp in pr:
|
||||
self._orch(shared, {**self.params, **bp})
|
||||
return self.post(shared, pr, None)
|
||||
|
||||
|
||||
class AsyncNode(Node):
|
||||
async def prep_async(self, shared):
|
||||
pass
|
||||
|
||||
async def exec_async(self, prep_res):
|
||||
pass
|
||||
|
||||
async def exec_fallback_async(self, prep_res, exc):
|
||||
raise exc
|
||||
|
||||
async def post_async(self, shared, prep_res, exec_res):
|
||||
pass
|
||||
|
||||
async def _exec(self, prep_res):
|
||||
for self.cur_retry in range(self.max_retries):
|
||||
try:
|
||||
return await self.exec_async(prep_res)
|
||||
except Exception as e:
|
||||
if self.cur_retry == self.max_retries - 1:
|
||||
return await self.exec_fallback_async(prep_res, e)
|
||||
if self.wait > 0:
|
||||
await asyncio.sleep(self.wait)
|
||||
|
||||
async def run_async(self, shared):
|
||||
if self.successors:
|
||||
warnings.warn("Node won't run successors. Use AsyncFlow.")
|
||||
return await self._run_async(shared)
|
||||
|
||||
async def _run_async(self, shared):
|
||||
p = await self.prep_async(shared)
|
||||
e = await self._exec(p)
|
||||
return await self.post_async(shared, p, e)
|
||||
|
||||
def _run(self, shared):
|
||||
raise RuntimeError("Use run_async.")
|
||||
|
||||
|
||||
class AsyncBatchNode(AsyncNode, BatchNode):
|
||||
async def _exec(self, items):
|
||||
return [await super(AsyncBatchNode, self)._exec(i) for i in items]
|
||||
|
||||
|
||||
class AsyncParallelBatchNode(AsyncNode, BatchNode):
|
||||
async def _exec(self, items):
|
||||
return await asyncio.gather(
|
||||
*(super(AsyncParallelBatchNode, self)._exec(i) for i in items)
|
||||
)
|
||||
|
||||
|
||||
class AsyncFlow(Flow, AsyncNode):
|
||||
async def _orch_async(self, shared, params=None):
|
||||
curr, p, last_action = (
|
||||
copy.copy(self.start_node),
|
||||
(params or {**self.params}),
|
||||
None,
|
||||
)
|
||||
while curr:
|
||||
curr.set_params(p)
|
||||
last_action = (
|
||||
await curr._run_async(shared)
|
||||
if isinstance(curr, AsyncNode)
|
||||
else curr._run(shared)
|
||||
)
|
||||
curr = copy.copy(self.get_next_node(curr, last_action))
|
||||
return last_action
|
||||
|
||||
async def _run_async(self, shared):
|
||||
p = await self.prep_async(shared)
|
||||
o = await self._orch_async(shared)
|
||||
return await self.post_async(shared, p, o)
|
||||
|
||||
async def post_async(self, shared, prep_res, exec_res):
|
||||
return exec_res
|
||||
|
||||
|
||||
class AsyncBatchFlow(AsyncFlow, BatchFlow):
|
||||
async def _run_async(self, shared):
|
||||
pr = await self.prep_async(shared) or []
|
||||
for bp in pr:
|
||||
await self._orch_async(shared, {**self.params, **bp})
|
||||
return await self.post_async(shared, pr, None)
|
||||
|
||||
|
||||
class AsyncParallelBatchFlow(AsyncFlow, BatchFlow):
|
||||
async def _run_async(self, shared):
|
||||
pr = await self.prep_async(shared) or []
|
||||
await asyncio.gather(
|
||||
*(self._orch_async(shared, {**self.params, **bp}) for bp in pr)
|
||||
)
|
||||
return await self.post_async(shared, pr, None)
|
||||
|
|
@ -1,7 +1,8 @@
|
|||
import uvicorn
|
||||
import models.db as DB
|
||||
import uuid
|
||||
import time
|
||||
import models.db as DB
|
||||
import nodes as F
|
||||
from fastapi import FastAPI, Depends, HTTPException
|
||||
from models.schemas.system import InitStatusResponse, InitConfigRequest, BaseResponse
|
||||
from models.schemas.user import LoginRequest, LoginResponse
|
||||
|
|
@ -12,7 +13,6 @@ from contextlib import asynccontextmanager
|
|||
from fastapi.security import OAuth2PasswordBearer
|
||||
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||
|
||||
|
||||
# 简单的登陆状态存储
|
||||
sessions: dict[str, tuple[float, DB.User]] = {}
|
||||
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token")
|
||||
|
|
@ -21,10 +21,9 @@ scheduler = AsyncIOScheduler()
|
|||
|
||||
# 定时清理过期session
|
||||
def clear_session():
|
||||
TTL = 60 * 60 * 24
|
||||
TTL = 60 * 60 * 24 # 1天有效期
|
||||
now = time.time()
|
||||
expired_keys = [token for token, (expires_at, _) in sessions.items() if expires_at + TTL < now]
|
||||
logger.info(f"检测 session, {now} {sessions}")
|
||||
for token in expired_keys:
|
||||
sessions.pop(token, None)
|
||||
|
||||
|
|
@ -134,6 +133,17 @@ def create_app():
|
|||
logger.info(sessions)
|
||||
return BaseResponse()
|
||||
|
||||
@app.post("/run", tags=["Flow"], description="执行命令", response_model=BaseResponse)
|
||||
async def flow_run(id: int):
|
||||
cmd_node = F.CMDNode()
|
||||
cmd_node.set_params({"cmd": "ls"})
|
||||
|
||||
flow = F.AsyncFlow(cmd_node)
|
||||
flow.set_params({"cmd": """ls && pwd"""})
|
||||
await flow.run_async({})
|
||||
|
||||
return BaseResponse()
|
||||
|
||||
return app
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,2 @@
|
|||
from core.pocket_flow import AsyncFlow
|
||||
from .cmd_node import *
|
||||
|
|
@ -0,0 +1,49 @@
|
|||
from core.pocket_flow import AsyncNode
|
||||
from core.logger import logger
|
||||
import asyncio
|
||||
import asyncssh
|
||||
|
||||
|
||||
async def async_run_cmd(
|
||||
cmd: str, host: str = None, user: str = None, password: str = None
|
||||
):
|
||||
"""
|
||||
异步执行命令
|
||||
- 如果 host=None,则在本地执行
|
||||
- 如果 host 不为空,则通过 SSH 执行远程命令
|
||||
"""
|
||||
if host is None:
|
||||
# 本地执行
|
||||
proc = await asyncio.create_subprocess_shell(
|
||||
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
|
||||
)
|
||||
stdout, stderr = await proc.communicate()
|
||||
return proc.returncode, stdout.decode(), stderr.decode()
|
||||
else:
|
||||
# 远程执行
|
||||
conn = await asyncssh.connect(
|
||||
host,
|
||||
username=user,
|
||||
password=password,
|
||||
known_hosts=None, # 不检查 known_hosts
|
||||
)
|
||||
result = await conn.run(cmd, check=False)
|
||||
await conn.close()
|
||||
return result.exit_status, result.stdout, result.stderr
|
||||
|
||||
|
||||
class CMDNode(AsyncNode):
|
||||
async def prep_async(self, shared):
|
||||
pass
|
||||
|
||||
async def exec_async(self, prep_res):
|
||||
cmd = self.params.get("cmd")
|
||||
_, out, err = await async_run_cmd(cmd)
|
||||
if out:
|
||||
logger.info(out)
|
||||
if err:
|
||||
logger.error(err)
|
||||
return
|
||||
|
||||
async def post_async(self, shared, prep_res, exec_res):
|
||||
pass
|
||||
|
|
@ -2,4 +2,5 @@ fastapi==0.116.1
|
|||
uvicorn==0.35.0
|
||||
tortoise-orm==0.25.1
|
||||
aiomysql==0.2.0
|
||||
apscheduler==3.11.0
|
||||
apscheduler==3.11.0
|
||||
asyncssh==2.21.0
|
||||
Loading…
Reference in New Issue