sync: mnogo
This commit is contained in:
@@ -15,6 +15,23 @@ from agentui.pipeline.storage import load_pipeline, save_pipeline, list_presets,
|
||||
from agentui.common.vendors import detect_vendor
|
||||
from agentui.common.cancel import request_cancel, clear_cancel, is_cancelled
|
||||
from agentui.pipeline.templating import render_template_simple
|
||||
# Manual resend support: use http client builder and executor helpers to sanitize/lookup originals
|
||||
from agentui.providers.http_client import build_client
|
||||
from agentui.pipeline.executor import (
|
||||
_sanitize_b64_for_log as _san_b64,
|
||||
_sanitize_json_string_for_log as _san_json_str,
|
||||
get_http_request as _get_http_req,
|
||||
)
|
||||
from agentui.common.manual_http import (
|
||||
parse_editable_http,
|
||||
dedupe_headers,
|
||||
content_type_is_json,
|
||||
normalize_jsonish_text,
|
||||
extract_json_trailing,
|
||||
try_parse_json,
|
||||
salvage_json_for_send,
|
||||
register_manual_request,
|
||||
)
|
||||
|
||||
|
||||
class UnifiedParams(BaseModel):
|
||||
@@ -744,6 +761,9 @@ def create_app() -> FastAPI:
|
||||
# --- Manual cancel/clear for pipeline execution ---
|
||||
@app.post("/admin/cancel")
|
||||
async def admin_cancel() -> JSONResponse:
|
||||
"""
|
||||
Graceful cancel: do not interrupt in-flight operations; stop before next step.
|
||||
"""
|
||||
try:
|
||||
p = load_pipeline()
|
||||
pid = p.get("id", "pipeline_editor")
|
||||
@@ -751,10 +771,27 @@ def create_app() -> FastAPI:
|
||||
p = default_pipeline()
|
||||
pid = p.get("id", "pipeline_editor")
|
||||
try:
|
||||
request_cancel(pid)
|
||||
request_cancel(pid, mode="graceful")
|
||||
except Exception:
|
||||
pass
|
||||
return JSONResponse({"ok": True, "pipeline_id": pid, "cancelled": True})
|
||||
return JSONResponse({"ok": True, "pipeline_id": pid, "cancelled": True, "mode": "graceful"})
|
||||
|
||||
@app.post("/admin/cancel/abort")
|
||||
async def admin_cancel_abort() -> JSONResponse:
|
||||
"""
|
||||
Hard abort: attempt to interrupt in-flight operations immediately.
|
||||
"""
|
||||
try:
|
||||
p = load_pipeline()
|
||||
pid = p.get("id", "pipeline_editor")
|
||||
except Exception:
|
||||
p = default_pipeline()
|
||||
pid = p.get("id", "pipeline_editor")
|
||||
try:
|
||||
request_cancel(pid, mode="abort")
|
||||
except Exception:
|
||||
pass
|
||||
return JSONResponse({"ok": True, "pipeline_id": pid, "cancelled": True, "mode": "abort"})
|
||||
|
||||
@app.post("/admin/cancel/clear")
|
||||
async def admin_cancel_clear() -> JSONResponse:
|
||||
@@ -770,7 +807,380 @@ def create_app() -> FastAPI:
|
||||
pass
|
||||
return JSONResponse({"ok": True, "pipeline_id": pid, "cancelled": False})
|
||||
|
||||
# --- SSE endpoint for live pipeline trace ---
|
||||
# --- Manual HTTP resend endpoint (Burp-like Repeater for Logs) -----------------
|
||||
@app.post("/admin/http/manual-send")
|
||||
async def manual_send(request: Request) -> JSONResponse:
|
||||
"""
|
||||
Re-send an HTTP request from Logs with optional edits from UI.
|
||||
|
||||
Accepts JSON:
|
||||
{
|
||||
"req_id": "original-req-id", // required to fetch original (untrimmed) body if available
|
||||
"request_text": "METHOD URL HTTP/1.1\\nH: V\\n\\n{...}", // optional raw edited HTTP text from UI
|
||||
"prefer_registry_original": true, // use untrimmed original JSON body where possible
|
||||
// Optional explicit overrides (take precedence over parsed request_text):
|
||||
"method": "POST",
|
||||
"url": "https://example/api",
|
||||
"headers": { "Authorization": "Bearer [[VAR:incoming.headers.authorization]]" },
|
||||
"body_text": "{...}" // explicit body text override (string)
|
||||
}
|
||||
|
||||
Behavior:
|
||||
- Parses request_text into method/url/headers/body if provided.
|
||||
- Looks up original untrimmed body_json by req_id from executor registry.
|
||||
- If prefer_registry_original and edited body parses as JSON — deep-merge it onto original JSON (dicts merged, lists replaced).
|
||||
- If prefer_registry_original and edited body contains human preview fragments (e.g. trimmed) or fails JSON parse — try to extract the last JSON object from text; else fallback to original body_json.
|
||||
- Resolves [[...]] and {{ ... }} macros (URL/headers/body) against last STORE snapshot (vars + snapshot.OUT/etc) of the pipeline.
|
||||
- Emits http_req/http_resp SSE with a fresh req_id ('manual-<ts>') so the original log is never overwritten.
|
||||
"""
|
||||
try:
|
||||
payload = await request.json()
|
||||
except Exception:
|
||||
payload = {}
|
||||
|
||||
# Parse edited HTTP text (Request area)
|
||||
def _parse_http_text(s: str) -> tuple[str, str, Dict[str, str], str]:
|
||||
method, url = "POST", ""
|
||||
headers: Dict[str, str] = {}
|
||||
body = ""
|
||||
try:
|
||||
if not isinstance(s, str) or not s.strip():
|
||||
return method, url, headers, body
|
||||
txt = s.replace("\r\n", "\n")
|
||||
lines = txt.split("\n")
|
||||
if not lines:
|
||||
return method, url, headers, body
|
||||
first = (lines[0] or "").strip()
|
||||
import re as _re
|
||||
m = _re.match(r"^([A-Z]+)\s+(\S+)(?:\s+HTTP/\d+(?:\.\d+)?)?$", first)
|
||||
i = 1
|
||||
if m:
|
||||
method = (m.group(1) or "POST").strip().upper()
|
||||
url = (m.group(2) or "").strip()
|
||||
else:
|
||||
i = 0 # no start line → treat as headers/body only
|
||||
|
||||
def _is_header_line(ln: str) -> bool:
|
||||
if ":" not in ln:
|
||||
return False
|
||||
name = ln.split(":", 1)[0].strip()
|
||||
# HTTP token: allow only letters/digits/hyphen. This prevents JSON lines like "contents": ... being treated as headers.
|
||||
return bool(_re.fullmatch(r"[A-Za-z0-9\\-]+", name))
|
||||
|
||||
# Read headers until a blank line OR until a non-header-looking line (start of body)
|
||||
while i < len(lines):
|
||||
ln = lines[i]
|
||||
if ln.strip() == "":
|
||||
i += 1
|
||||
break
|
||||
if not _is_header_line(ln):
|
||||
# Assume this and the rest is body (e.g., starts with {, [, or a quoted key)
|
||||
break
|
||||
k, v = ln.split(":", 1)
|
||||
headers[str(k).strip()] = str(v).strip()
|
||||
i += 1
|
||||
|
||||
# Remainder is the body (can be JSON or any text)
|
||||
body = "\\n".join(lines[i:]) if i < len(lines) else ""
|
||||
except Exception:
|
||||
pass
|
||||
return method, url, headers, body
|
||||
|
||||
# Lookup original (untrimmed) body by req_id
|
||||
orig: Optional[Dict[str, Any]] = None
|
||||
try:
|
||||
orig = _get_http_req(str(payload.get("req_id") or ""))
|
||||
except Exception:
|
||||
orig = None
|
||||
|
||||
# Pipeline meta (timeout) and pipeline id
|
||||
try:
|
||||
p = load_pipeline()
|
||||
default_pid = p.get("id", "pipeline_editor")
|
||||
timeout_sec = float(p.get("http_timeout_sec", 60) or 60)
|
||||
except Exception:
|
||||
default_pid = "pipeline_editor"
|
||||
timeout_sec = 60.0
|
||||
|
||||
pid = str((orig or {}).get("pipeline_id") or default_pid)
|
||||
|
||||
# Build macro context from STORE (last snapshot)
|
||||
try:
|
||||
store = load_var_store(pid) or {}
|
||||
except Exception:
|
||||
store = {}
|
||||
snapshot = store.get("snapshot") or {}
|
||||
ctx: Dict[str, Any] = {}
|
||||
try:
|
||||
ctx.update({
|
||||
"incoming": snapshot.get("incoming"),
|
||||
"params": snapshot.get("params"),
|
||||
"model": snapshot.get("model"),
|
||||
"vendor_format": snapshot.get("vendor_format"),
|
||||
"system": snapshot.get("system") or "",
|
||||
})
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
ctx["OUT"] = snapshot.get("OUT") or {}
|
||||
except Exception:
|
||||
ctx["OUT"] = {}
|
||||
try:
|
||||
vmap = dict(store)
|
||||
vmap.pop("snapshot", None)
|
||||
ctx["vars"] = vmap
|
||||
ctx["store"] = store
|
||||
except Exception:
|
||||
ctx["vars"] = {}
|
||||
ctx["store"] = store or {}
|
||||
|
||||
# Extract overrides / edited request data
|
||||
edited_text = payload.get("request_text") or ""
|
||||
ov_method = payload.get("method")
|
||||
ov_url = payload.get("url")
|
||||
ov_headers = payload.get("headers") if isinstance(payload.get("headers"), dict) else None
|
||||
ov_body_text = payload.get("body_text")
|
||||
prefer_orig = bool(payload.get("prefer_registry_original", True))
|
||||
|
||||
# Parse HTTP text (safe)
|
||||
m_parsed, u_parsed, h_parsed, b_parsed = parse_editable_http(edited_text)
|
||||
|
||||
# Compose method/url/headers
|
||||
method = str(ov_method or m_parsed or (orig or {}).get("method") or "POST").upper()
|
||||
url = str(ov_url or u_parsed or (orig or {}).get("url") or "")
|
||||
# headers: start from original -> parsed -> explicit override
|
||||
headers: Dict[str, Any] = {}
|
||||
try:
|
||||
if isinstance((orig or {}).get("headers"), dict):
|
||||
headers.update(orig.get("headers") or {})
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
headers.update(h_parsed or {})
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
if isinstance(ov_headers, dict):
|
||||
headers.update(ov_headers)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Render macros in URL and headers
|
||||
try:
|
||||
if url:
|
||||
url = render_template_simple(str(url), ctx, ctx.get("OUT") or {})
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
rendered_headers: Dict[str, Any] = {}
|
||||
for k, v in headers.items():
|
||||
try:
|
||||
rendered_headers[k] = render_template_simple(str(v), ctx, ctx.get("OUT") or {})
|
||||
except Exception:
|
||||
rendered_headers[k] = v
|
||||
headers = rendered_headers
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Normalize/dedupe headers (case-insensitive) and drop auto-calculated ones
|
||||
headers = dedupe_headers(headers)
|
||||
|
||||
# Determine body (JSON vs text), preserving original untrimmed JSON
|
||||
# Build orig_json (prefer registry; fallback parse from original body_text)
|
||||
orig_json = (orig or {}).get("body_json") if isinstance(orig, dict) else None
|
||||
if orig_json is None:
|
||||
try:
|
||||
ob = (orig or {}).get("body_text")
|
||||
except Exception:
|
||||
ob = None
|
||||
if isinstance(ob, str):
|
||||
try:
|
||||
ob_norm = normalize_jsonish_text(ob)
|
||||
except Exception:
|
||||
ob_norm = ob
|
||||
_oj = try_parse_json(ob_norm) or extract_json_trailing(ob_norm)
|
||||
if _oj is not None:
|
||||
orig_json = _oj
|
||||
|
||||
# Resolve body edits through macros
|
||||
raw_edited_body_text = ov_body_text if ov_body_text is not None else b_parsed
|
||||
try:
|
||||
edited_body_text_resolved = render_template_simple(str(raw_edited_body_text or ""), ctx, ctx.get("OUT") or {})
|
||||
except Exception:
|
||||
edited_body_text_resolved = str(raw_edited_body_text or "")
|
||||
|
||||
# Compute final_json / final_text using helper (handles normalization, salvage, prefer_registry_original, content-type)
|
||||
final_json, final_text = salvage_json_for_send(
|
||||
edited_body_text_resolved,
|
||||
headers,
|
||||
orig_json,
|
||||
prefer_orig
|
||||
)
|
||||
|
||||
# Diagnostic: summarize merge decision without leaking payload
|
||||
try:
|
||||
def _summ(v):
|
||||
try:
|
||||
if v is None:
|
||||
return {"t": "none"}
|
||||
if isinstance(v, dict):
|
||||
return {"t": "dict", "keys": len(v)}
|
||||
if isinstance(v, list):
|
||||
return {"t": "list", "len": len(v)}
|
||||
if isinstance(v, str):
|
||||
return {"t": "str", "len": len(v)}
|
||||
return {"t": type(v).__name__}
|
||||
except Exception:
|
||||
return {"t": "err"}
|
||||
|
||||
norm_dbg = normalize_jsonish_text(edited_body_text_resolved)
|
||||
edited_json_dbg = try_parse_json(norm_dbg) or extract_json_trailing(norm_dbg)
|
||||
|
||||
logger.info(
|
||||
"%s",
|
||||
json.dumps(
|
||||
{
|
||||
"event": "manual_send_merge_debug",
|
||||
"req_id_original": str(payload.get("req_id") or ""),
|
||||
"prefer_registry_original": prefer_orig,
|
||||
"headers_content_type": ("json" if content_type_is_json(headers) else "other"),
|
||||
"orig_json": _summ(orig_json),
|
||||
"edited_json": _summ(edited_json_dbg),
|
||||
"final": {
|
||||
"json": _summ(final_json),
|
||||
"text_len": (len(final_text) if isinstance(final_text, str) else None)
|
||||
},
|
||||
},
|
||||
ensure_ascii=False,
|
||||
),
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Fresh req_id to avoid any overwrite of original log
|
||||
import time as _time
|
||||
rid = f"manual-{int(_time.time()*1000)}"
|
||||
|
||||
async def _publish(evt: Dict[str, Any]) -> None:
|
||||
try:
|
||||
await _trace_hub.publish(evt)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Prepare request body for logs (sanitized/trimmed for base64)
|
||||
if final_json is not None:
|
||||
try:
|
||||
body_text_for_log = json.dumps(_san_b64(final_json, max_len=180), ensure_ascii=False, indent=2)
|
||||
except Exception:
|
||||
body_text_for_log = json.dumps(final_json, ensure_ascii=False)
|
||||
else:
|
||||
try:
|
||||
body_text_for_log = _san_json_str(str(final_text or ""), max_len=180)
|
||||
except Exception:
|
||||
body_text_for_log = str(final_text or "")
|
||||
|
||||
# Register manual request in registry so subsequent "send" on this log has an original JSON source
|
||||
try:
|
||||
register_manual_request(rid, {
|
||||
"pipeline_id": pid,
|
||||
"node_id": "manual",
|
||||
"node_type": "Manual",
|
||||
"method": method,
|
||||
"url": url,
|
||||
"headers": dict(headers),
|
||||
"body_json": (final_json if final_json is not None else None),
|
||||
"body_text": (None if final_json is not None else str(final_text or "")),
|
||||
})
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Emit http_req SSE (Manual)
|
||||
await _publish({
|
||||
"event": "http_req",
|
||||
"node_id": "manual",
|
||||
"node_type": "Manual",
|
||||
"provider": "manual",
|
||||
"req_id": rid,
|
||||
"method": method,
|
||||
"url": url,
|
||||
"headers": headers,
|
||||
"body_text": body_text_for_log,
|
||||
"ts": int(_time.time()*1000),
|
||||
})
|
||||
|
||||
# Perform HTTP
|
||||
async with build_client(timeout=timeout_sec) as client:
|
||||
# Ensure JSON Content-Type when sending JSON
|
||||
try:
|
||||
if final_json is not None:
|
||||
has_ct = any((str(k or "").lower() == "content-type") for k in headers.keys())
|
||||
if not has_ct:
|
||||
headers["Content-Type"] = "application/json"
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
content = None
|
||||
try:
|
||||
if method in {"GET", "HEAD"}:
|
||||
content = None
|
||||
else:
|
||||
if final_json is not None:
|
||||
content = json.dumps(final_json, ensure_ascii=False).encode("utf-8")
|
||||
else:
|
||||
content = (final_text or "").encode("utf-8")
|
||||
except Exception:
|
||||
content = None
|
||||
|
||||
# Send
|
||||
try:
|
||||
resp = await client.request(method, url, headers=headers, content=content)
|
||||
except Exception as e:
|
||||
# Network/client error — emit http_resp with error text
|
||||
await _publish({
|
||||
"event": "http_resp",
|
||||
"node_id": "manual",
|
||||
"node_type": "Manual",
|
||||
"provider": "manual",
|
||||
"req_id": rid,
|
||||
"status": 0,
|
||||
"headers": {},
|
||||
"body_text": str(e),
|
||||
"ts": int(_time.time()*1000),
|
||||
})
|
||||
return JSONResponse({"ok": False, "error": str(e), "req_id": rid})
|
||||
|
||||
# Build response body for log (prefer JSON with trimmed base64)
|
||||
try:
|
||||
try:
|
||||
obj = resp.json()
|
||||
body_text_resp = json.dumps(_san_b64(obj, max_len=180), ensure_ascii=False, indent=2)
|
||||
except Exception:
|
||||
try:
|
||||
t = await resp.aread()
|
||||
body_text_resp = t.decode(getattr(resp, "encoding", "utf-8") or "utf-8", errors="replace")
|
||||
except Exception:
|
||||
try:
|
||||
body_text_resp = resp.text
|
||||
except Exception:
|
||||
body_text_resp = "<resp.decode error>"
|
||||
except Exception:
|
||||
body_text_resp = "<resp.decode error>"
|
||||
|
||||
await _publish({
|
||||
"event": "http_resp",
|
||||
"node_id": "manual",
|
||||
"node_type": "Manual",
|
||||
"provider": "manual",
|
||||
"req_id": rid,
|
||||
"status": int(getattr(resp, "status_code", 0)),
|
||||
"headers": dict(getattr(resp, "headers", {})),
|
||||
"body_text": body_text_resp,
|
||||
"ts": int(_time.time()*1000),
|
||||
})
|
||||
|
||||
return JSONResponse({"ok": True, "req_id": rid})
|
||||
|
||||
# --- SSE endpoint for live pipeline trace --- # --- SSE endpoint for live pipeline trace ---
|
||||
@app.get("/admin/trace/stream")
|
||||
async def sse_trace() -> StreamingResponse:
|
||||
loop = _asyncio.get_event_loop()
|
||||
|
||||
Reference in New Issue
Block a user