sync: mnogo
This commit is contained in:
40
.agentui/vars/p_cancel_abort.json
Normal file
40
.agentui/vars/p_cancel_abort.json
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
{
|
||||||
|
"WAS_ERROR__n2": true,
|
||||||
|
"CYCLEINDEX__n2": 0,
|
||||||
|
"snapshot": {
|
||||||
|
"incoming": {
|
||||||
|
"method": "POST",
|
||||||
|
"url": "http://localhost/test",
|
||||||
|
"path": "/test",
|
||||||
|
"query": "",
|
||||||
|
"headers": {
|
||||||
|
"x": "X-HEADER"
|
||||||
|
},
|
||||||
|
"json": {}
|
||||||
|
},
|
||||||
|
"params": {
|
||||||
|
"temperature": 0.25
|
||||||
|
},
|
||||||
|
"model": "gpt-x",
|
||||||
|
"vendor_format": "openai",
|
||||||
|
"system": "",
|
||||||
|
"OUT": {
|
||||||
|
"n2": {
|
||||||
|
"result": {
|
||||||
|
"error": "Cancelled by user (abort)"
|
||||||
|
},
|
||||||
|
"response_text": "",
|
||||||
|
"vars": {
|
||||||
|
"WAS_ERROR__n2": true,
|
||||||
|
"CYCLEINDEX__n2": 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"OUT_TEXT": {
|
||||||
|
"n2": "Cancelled by user (abort)"
|
||||||
|
},
|
||||||
|
"LAST_NODE": "n2",
|
||||||
|
"OUT2": "Cancelled by user (abort)",
|
||||||
|
"EXEC_TRACE": "n2(ProviderCall)"
|
||||||
|
}
|
||||||
|
}
|
||||||
40
.agentui/vars/p_cancel_soft.json
Normal file
40
.agentui/vars/p_cancel_soft.json
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
{
|
||||||
|
"WAS_ERROR__n2": false,
|
||||||
|
"CYCLEINDEX__n2": 0,
|
||||||
|
"snapshot": {
|
||||||
|
"incoming": {
|
||||||
|
"method": "POST",
|
||||||
|
"url": "http://localhost/test",
|
||||||
|
"path": "/test",
|
||||||
|
"query": "",
|
||||||
|
"headers": {
|
||||||
|
"x": "X-HEADER"
|
||||||
|
},
|
||||||
|
"json": {}
|
||||||
|
},
|
||||||
|
"params": {
|
||||||
|
"temperature": 0.25
|
||||||
|
},
|
||||||
|
"model": "gpt-x",
|
||||||
|
"vendor_format": "openai",
|
||||||
|
"system": "",
|
||||||
|
"OUT": {
|
||||||
|
"n2": {
|
||||||
|
"result": {
|
||||||
|
"echo": {}
|
||||||
|
},
|
||||||
|
"response_text": "",
|
||||||
|
"vars": {
|
||||||
|
"WAS_ERROR__n2": false,
|
||||||
|
"CYCLEINDEX__n2": 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"OUT_TEXT": {
|
||||||
|
"n2": ""
|
||||||
|
},
|
||||||
|
"LAST_NODE": "n2",
|
||||||
|
"OUT2": "",
|
||||||
|
"EXEC_TRACE": "n2(ProviderCall)"
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -19,21 +19,36 @@
|
|||||||
"OUT": {
|
"OUT": {
|
||||||
"n1": {
|
"n1": {
|
||||||
"result": {
|
"result": {
|
||||||
"error": {
|
"echo": {
|
||||||
"message": "Incorrect API key provided: TEST. You can find your API key at https://platform.openai.com/account/api-keys.",
|
"url": "https://api.openai.com/v1/chat/completions",
|
||||||
"type": "invalid_request_error",
|
"headers": {
|
||||||
"param": null,
|
"Content-Type": "application/json",
|
||||||
"code": "invalid_api_key"
|
"Authorization": "Bearer TEST"
|
||||||
|
},
|
||||||
|
"payload": {
|
||||||
|
"model": "gpt-x",
|
||||||
|
"messages": [
|
||||||
|
{
|
||||||
|
"role": "system",
|
||||||
|
"content": "You are test"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"role": "user",
|
||||||
|
"content": "Say Привет"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"temperature": 0.25
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"response_text": "Incorrect API key provided: TEST. You can find your API key at https://platform.openai.com/account/api-keys."
|
"response_text": "https://api.openai.com/v1/chat/completions"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"OUT_TEXT": {
|
"OUT_TEXT": {
|
||||||
"n1": "Incorrect API key provided: TEST. You can find your API key at https://platform.openai.com/account/api-keys."
|
"n1": "https://api.openai.com/v1/chat/completions"
|
||||||
},
|
},
|
||||||
"LAST_NODE": "n1",
|
"LAST_NODE": "n1",
|
||||||
"OUT1": "Incorrect API key provided: TEST. You can find your API key at https://platform.openai.com/account/api-keys.",
|
"OUT1": "https://api.openai.com/v1/chat/completions",
|
||||||
"EXEC_TRACE": "n1(ProviderCall)"
|
"EXEC_TRACE": "n1(ProviderCall)"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -16,29 +16,6 @@
|
|||||||
"B": "bar"
|
"B": "bar"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"n2": {
|
|
||||||
"result": {
|
|
||||||
"id": "ret_mock_123",
|
|
||||||
"object": "chat.completion",
|
|
||||||
"model": "gpt-x",
|
|
||||||
"choices": [
|
|
||||||
{
|
|
||||||
"index": 0,
|
|
||||||
"message": {
|
|
||||||
"role": "assistant",
|
|
||||||
"content": "foo"
|
|
||||||
},
|
|
||||||
"finish_reason": "stop"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"usage": {
|
|
||||||
"prompt_tokens": 0,
|
|
||||||
"completion_tokens": 1,
|
|
||||||
"total_tokens": 0
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"response_text": "foo"
|
|
||||||
},
|
|
||||||
"n3": {
|
"n3": {
|
||||||
"result": {
|
"result": {
|
||||||
"id": "ret_mock_123",
|
"id": "ret_mock_123",
|
||||||
@@ -61,17 +38,40 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"response_text": "bar"
|
"response_text": "bar"
|
||||||
|
},
|
||||||
|
"n2": {
|
||||||
|
"result": {
|
||||||
|
"id": "ret_mock_123",
|
||||||
|
"object": "chat.completion",
|
||||||
|
"model": "gpt-x",
|
||||||
|
"choices": [
|
||||||
|
{
|
||||||
|
"index": 0,
|
||||||
|
"message": {
|
||||||
|
"role": "assistant",
|
||||||
|
"content": "foo"
|
||||||
|
},
|
||||||
|
"finish_reason": "stop"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"usage": {
|
||||||
|
"prompt_tokens": 0,
|
||||||
|
"completion_tokens": 1,
|
||||||
|
"total_tokens": 0
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"response_text": "foo"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"OUT_TEXT": {
|
"OUT_TEXT": {
|
||||||
"n1": "foo",
|
"n1": "foo",
|
||||||
"n2": "foo",
|
"n3": "bar",
|
||||||
"n3": "bar"
|
"n2": "foo"
|
||||||
},
|
},
|
||||||
"LAST_NODE": "n2",
|
"LAST_NODE": "n3",
|
||||||
"OUT1": "foo",
|
"OUT1": "foo",
|
||||||
"OUT2": "foo",
|
|
||||||
"OUT3": "bar",
|
"OUT3": "bar",
|
||||||
"EXEC_TRACE": "n1(SetVars) -> n2(Return) -> n3(Return)"
|
"OUT2": "foo",
|
||||||
|
"EXEC_TRACE": "n1(SetVars) -> n3(Return) -> n2(Return)"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -10,21 +10,27 @@
|
|||||||
"OUT": {
|
"OUT": {
|
||||||
"n1": {
|
"n1": {
|
||||||
"result": {
|
"result": {
|
||||||
"error": {
|
"echo": {
|
||||||
"message": "Incorrect API key provided: TEST. You can find your API key at https://platform.openai.com/account/api-keys.",
|
"url": "https://api.openai.com/v1/chat/completions",
|
||||||
"type": "invalid_request_error",
|
"headers": {
|
||||||
"param": null,
|
"Content-Type": "application/json",
|
||||||
"code": "invalid_api_key"
|
"Authorization": "Bearer TEST"
|
||||||
|
},
|
||||||
|
"payload": {
|
||||||
|
"model": "gpt-x",
|
||||||
|
"messages": [],
|
||||||
|
"temperature": 0.1
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"response_text": "Incorrect API key provided: TEST. You can find your API key at https://platform.openai.com/account/api-keys."
|
"response_text": "https://api.openai.com/v1/chat/completions"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"OUT_TEXT": {
|
"OUT_TEXT": {
|
||||||
"n1": "Incorrect API key provided: TEST. You can find your API key at https://platform.openai.com/account/api-keys."
|
"n1": "https://api.openai.com/v1/chat/completions"
|
||||||
},
|
},
|
||||||
"LAST_NODE": "n1",
|
"LAST_NODE": "n1",
|
||||||
"OUT1": "Incorrect API key provided: TEST. You can find your API key at https://platform.openai.com/account/api-keys.",
|
"OUT1": "https://api.openai.com/v1/chat/completions",
|
||||||
"EXEC_TRACE": "n1(ProviderCall)"
|
"EXEC_TRACE": "n1(ProviderCall)"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
File diff suppressed because one or more lines are too long
@@ -15,6 +15,23 @@ from agentui.pipeline.storage import load_pipeline, save_pipeline, list_presets,
|
|||||||
from agentui.common.vendors import detect_vendor
|
from agentui.common.vendors import detect_vendor
|
||||||
from agentui.common.cancel import request_cancel, clear_cancel, is_cancelled
|
from agentui.common.cancel import request_cancel, clear_cancel, is_cancelled
|
||||||
from agentui.pipeline.templating import render_template_simple
|
from agentui.pipeline.templating import render_template_simple
|
||||||
|
# Manual resend support: use http client builder and executor helpers to sanitize/lookup originals
|
||||||
|
from agentui.providers.http_client import build_client
|
||||||
|
from agentui.pipeline.executor import (
|
||||||
|
_sanitize_b64_for_log as _san_b64,
|
||||||
|
_sanitize_json_string_for_log as _san_json_str,
|
||||||
|
get_http_request as _get_http_req,
|
||||||
|
)
|
||||||
|
from agentui.common.manual_http import (
|
||||||
|
parse_editable_http,
|
||||||
|
dedupe_headers,
|
||||||
|
content_type_is_json,
|
||||||
|
normalize_jsonish_text,
|
||||||
|
extract_json_trailing,
|
||||||
|
try_parse_json,
|
||||||
|
salvage_json_for_send,
|
||||||
|
register_manual_request,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class UnifiedParams(BaseModel):
|
class UnifiedParams(BaseModel):
|
||||||
@@ -744,6 +761,9 @@ def create_app() -> FastAPI:
|
|||||||
# --- Manual cancel/clear for pipeline execution ---
|
# --- Manual cancel/clear for pipeline execution ---
|
||||||
@app.post("/admin/cancel")
|
@app.post("/admin/cancel")
|
||||||
async def admin_cancel() -> JSONResponse:
|
async def admin_cancel() -> JSONResponse:
|
||||||
|
"""
|
||||||
|
Graceful cancel: do not interrupt in-flight operations; stop before next step.
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
p = load_pipeline()
|
p = load_pipeline()
|
||||||
pid = p.get("id", "pipeline_editor")
|
pid = p.get("id", "pipeline_editor")
|
||||||
@@ -751,10 +771,27 @@ def create_app() -> FastAPI:
|
|||||||
p = default_pipeline()
|
p = default_pipeline()
|
||||||
pid = p.get("id", "pipeline_editor")
|
pid = p.get("id", "pipeline_editor")
|
||||||
try:
|
try:
|
||||||
request_cancel(pid)
|
request_cancel(pid, mode="graceful")
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
return JSONResponse({"ok": True, "pipeline_id": pid, "cancelled": True})
|
return JSONResponse({"ok": True, "pipeline_id": pid, "cancelled": True, "mode": "graceful"})
|
||||||
|
|
||||||
|
@app.post("/admin/cancel/abort")
|
||||||
|
async def admin_cancel_abort() -> JSONResponse:
|
||||||
|
"""
|
||||||
|
Hard abort: attempt to interrupt in-flight operations immediately.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
p = load_pipeline()
|
||||||
|
pid = p.get("id", "pipeline_editor")
|
||||||
|
except Exception:
|
||||||
|
p = default_pipeline()
|
||||||
|
pid = p.get("id", "pipeline_editor")
|
||||||
|
try:
|
||||||
|
request_cancel(pid, mode="abort")
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return JSONResponse({"ok": True, "pipeline_id": pid, "cancelled": True, "mode": "abort"})
|
||||||
|
|
||||||
@app.post("/admin/cancel/clear")
|
@app.post("/admin/cancel/clear")
|
||||||
async def admin_cancel_clear() -> JSONResponse:
|
async def admin_cancel_clear() -> JSONResponse:
|
||||||
@@ -770,7 +807,380 @@ def create_app() -> FastAPI:
|
|||||||
pass
|
pass
|
||||||
return JSONResponse({"ok": True, "pipeline_id": pid, "cancelled": False})
|
return JSONResponse({"ok": True, "pipeline_id": pid, "cancelled": False})
|
||||||
|
|
||||||
# --- SSE endpoint for live pipeline trace ---
|
# --- Manual HTTP resend endpoint (Burp-like Repeater for Logs) -----------------
|
||||||
|
@app.post("/admin/http/manual-send")
|
||||||
|
async def manual_send(request: Request) -> JSONResponse:
|
||||||
|
"""
|
||||||
|
Re-send an HTTP request from Logs with optional edits from UI.
|
||||||
|
|
||||||
|
Accepts JSON:
|
||||||
|
{
|
||||||
|
"req_id": "original-req-id", // required to fetch original (untrimmed) body if available
|
||||||
|
"request_text": "METHOD URL HTTP/1.1\\nH: V\\n\\n{...}", // optional raw edited HTTP text from UI
|
||||||
|
"prefer_registry_original": true, // use untrimmed original JSON body where possible
|
||||||
|
// Optional explicit overrides (take precedence over parsed request_text):
|
||||||
|
"method": "POST",
|
||||||
|
"url": "https://example/api",
|
||||||
|
"headers": { "Authorization": "Bearer [[VAR:incoming.headers.authorization]]" },
|
||||||
|
"body_text": "{...}" // explicit body text override (string)
|
||||||
|
}
|
||||||
|
|
||||||
|
Behavior:
|
||||||
|
- Parses request_text into method/url/headers/body if provided.
|
||||||
|
- Looks up original untrimmed body_json by req_id from executor registry.
|
||||||
|
- If prefer_registry_original and edited body parses as JSON — deep-merge it onto original JSON (dicts merged, lists replaced).
|
||||||
|
- If prefer_registry_original and edited body contains human preview fragments (e.g. trimmed) or fails JSON parse — try to extract the last JSON object from text; else fallback to original body_json.
|
||||||
|
- Resolves [[...]] and {{ ... }} macros (URL/headers/body) against last STORE snapshot (vars + snapshot.OUT/etc) of the pipeline.
|
||||||
|
- Emits http_req/http_resp SSE with a fresh req_id ('manual-<ts>') so the original log is never overwritten.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
payload = await request.json()
|
||||||
|
except Exception:
|
||||||
|
payload = {}
|
||||||
|
|
||||||
|
# Parse edited HTTP text (Request area)
|
||||||
|
def _parse_http_text(s: str) -> tuple[str, str, Dict[str, str], str]:
|
||||||
|
method, url = "POST", ""
|
||||||
|
headers: Dict[str, str] = {}
|
||||||
|
body = ""
|
||||||
|
try:
|
||||||
|
if not isinstance(s, str) or not s.strip():
|
||||||
|
return method, url, headers, body
|
||||||
|
txt = s.replace("\r\n", "\n")
|
||||||
|
lines = txt.split("\n")
|
||||||
|
if not lines:
|
||||||
|
return method, url, headers, body
|
||||||
|
first = (lines[0] or "").strip()
|
||||||
|
import re as _re
|
||||||
|
m = _re.match(r"^([A-Z]+)\s+(\S+)(?:\s+HTTP/\d+(?:\.\d+)?)?$", first)
|
||||||
|
i = 1
|
||||||
|
if m:
|
||||||
|
method = (m.group(1) or "POST").strip().upper()
|
||||||
|
url = (m.group(2) or "").strip()
|
||||||
|
else:
|
||||||
|
i = 0 # no start line → treat as headers/body only
|
||||||
|
|
||||||
|
def _is_header_line(ln: str) -> bool:
|
||||||
|
if ":" not in ln:
|
||||||
|
return False
|
||||||
|
name = ln.split(":", 1)[0].strip()
|
||||||
|
# HTTP token: allow only letters/digits/hyphen. This prevents JSON lines like "contents": ... being treated as headers.
|
||||||
|
return bool(_re.fullmatch(r"[A-Za-z0-9\\-]+", name))
|
||||||
|
|
||||||
|
# Read headers until a blank line OR until a non-header-looking line (start of body)
|
||||||
|
while i < len(lines):
|
||||||
|
ln = lines[i]
|
||||||
|
if ln.strip() == "":
|
||||||
|
i += 1
|
||||||
|
break
|
||||||
|
if not _is_header_line(ln):
|
||||||
|
# Assume this and the rest is body (e.g., starts with {, [, or a quoted key)
|
||||||
|
break
|
||||||
|
k, v = ln.split(":", 1)
|
||||||
|
headers[str(k).strip()] = str(v).strip()
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
# Remainder is the body (can be JSON or any text)
|
||||||
|
body = "\\n".join(lines[i:]) if i < len(lines) else ""
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return method, url, headers, body
|
||||||
|
|
||||||
|
# Lookup original (untrimmed) body by req_id
|
||||||
|
orig: Optional[Dict[str, Any]] = None
|
||||||
|
try:
|
||||||
|
orig = _get_http_req(str(payload.get("req_id") or ""))
|
||||||
|
except Exception:
|
||||||
|
orig = None
|
||||||
|
|
||||||
|
# Pipeline meta (timeout) and pipeline id
|
||||||
|
try:
|
||||||
|
p = load_pipeline()
|
||||||
|
default_pid = p.get("id", "pipeline_editor")
|
||||||
|
timeout_sec = float(p.get("http_timeout_sec", 60) or 60)
|
||||||
|
except Exception:
|
||||||
|
default_pid = "pipeline_editor"
|
||||||
|
timeout_sec = 60.0
|
||||||
|
|
||||||
|
pid = str((orig or {}).get("pipeline_id") or default_pid)
|
||||||
|
|
||||||
|
# Build macro context from STORE (last snapshot)
|
||||||
|
try:
|
||||||
|
store = load_var_store(pid) or {}
|
||||||
|
except Exception:
|
||||||
|
store = {}
|
||||||
|
snapshot = store.get("snapshot") or {}
|
||||||
|
ctx: Dict[str, Any] = {}
|
||||||
|
try:
|
||||||
|
ctx.update({
|
||||||
|
"incoming": snapshot.get("incoming"),
|
||||||
|
"params": snapshot.get("params"),
|
||||||
|
"model": snapshot.get("model"),
|
||||||
|
"vendor_format": snapshot.get("vendor_format"),
|
||||||
|
"system": snapshot.get("system") or "",
|
||||||
|
})
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
ctx["OUT"] = snapshot.get("OUT") or {}
|
||||||
|
except Exception:
|
||||||
|
ctx["OUT"] = {}
|
||||||
|
try:
|
||||||
|
vmap = dict(store)
|
||||||
|
vmap.pop("snapshot", None)
|
||||||
|
ctx["vars"] = vmap
|
||||||
|
ctx["store"] = store
|
||||||
|
except Exception:
|
||||||
|
ctx["vars"] = {}
|
||||||
|
ctx["store"] = store or {}
|
||||||
|
|
||||||
|
# Extract overrides / edited request data
|
||||||
|
edited_text = payload.get("request_text") or ""
|
||||||
|
ov_method = payload.get("method")
|
||||||
|
ov_url = payload.get("url")
|
||||||
|
ov_headers = payload.get("headers") if isinstance(payload.get("headers"), dict) else None
|
||||||
|
ov_body_text = payload.get("body_text")
|
||||||
|
prefer_orig = bool(payload.get("prefer_registry_original", True))
|
||||||
|
|
||||||
|
# Parse HTTP text (safe)
|
||||||
|
m_parsed, u_parsed, h_parsed, b_parsed = parse_editable_http(edited_text)
|
||||||
|
|
||||||
|
# Compose method/url/headers
|
||||||
|
method = str(ov_method or m_parsed or (orig or {}).get("method") or "POST").upper()
|
||||||
|
url = str(ov_url or u_parsed or (orig or {}).get("url") or "")
|
||||||
|
# headers: start from original -> parsed -> explicit override
|
||||||
|
headers: Dict[str, Any] = {}
|
||||||
|
try:
|
||||||
|
if isinstance((orig or {}).get("headers"), dict):
|
||||||
|
headers.update(orig.get("headers") or {})
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
headers.update(h_parsed or {})
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
if isinstance(ov_headers, dict):
|
||||||
|
headers.update(ov_headers)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Render macros in URL and headers
|
||||||
|
try:
|
||||||
|
if url:
|
||||||
|
url = render_template_simple(str(url), ctx, ctx.get("OUT") or {})
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
rendered_headers: Dict[str, Any] = {}
|
||||||
|
for k, v in headers.items():
|
||||||
|
try:
|
||||||
|
rendered_headers[k] = render_template_simple(str(v), ctx, ctx.get("OUT") or {})
|
||||||
|
except Exception:
|
||||||
|
rendered_headers[k] = v
|
||||||
|
headers = rendered_headers
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Normalize/dedupe headers (case-insensitive) and drop auto-calculated ones
|
||||||
|
headers = dedupe_headers(headers)
|
||||||
|
|
||||||
|
# Determine body (JSON vs text), preserving original untrimmed JSON
|
||||||
|
# Build orig_json (prefer registry; fallback parse from original body_text)
|
||||||
|
orig_json = (orig or {}).get("body_json") if isinstance(orig, dict) else None
|
||||||
|
if orig_json is None:
|
||||||
|
try:
|
||||||
|
ob = (orig or {}).get("body_text")
|
||||||
|
except Exception:
|
||||||
|
ob = None
|
||||||
|
if isinstance(ob, str):
|
||||||
|
try:
|
||||||
|
ob_norm = normalize_jsonish_text(ob)
|
||||||
|
except Exception:
|
||||||
|
ob_norm = ob
|
||||||
|
_oj = try_parse_json(ob_norm) or extract_json_trailing(ob_norm)
|
||||||
|
if _oj is not None:
|
||||||
|
orig_json = _oj
|
||||||
|
|
||||||
|
# Resolve body edits through macros
|
||||||
|
raw_edited_body_text = ov_body_text if ov_body_text is not None else b_parsed
|
||||||
|
try:
|
||||||
|
edited_body_text_resolved = render_template_simple(str(raw_edited_body_text or ""), ctx, ctx.get("OUT") or {})
|
||||||
|
except Exception:
|
||||||
|
edited_body_text_resolved = str(raw_edited_body_text or "")
|
||||||
|
|
||||||
|
# Compute final_json / final_text using helper (handles normalization, salvage, prefer_registry_original, content-type)
|
||||||
|
final_json, final_text = salvage_json_for_send(
|
||||||
|
edited_body_text_resolved,
|
||||||
|
headers,
|
||||||
|
orig_json,
|
||||||
|
prefer_orig
|
||||||
|
)
|
||||||
|
|
||||||
|
# Diagnostic: summarize merge decision without leaking payload
|
||||||
|
try:
|
||||||
|
def _summ(v):
|
||||||
|
try:
|
||||||
|
if v is None:
|
||||||
|
return {"t": "none"}
|
||||||
|
if isinstance(v, dict):
|
||||||
|
return {"t": "dict", "keys": len(v)}
|
||||||
|
if isinstance(v, list):
|
||||||
|
return {"t": "list", "len": len(v)}
|
||||||
|
if isinstance(v, str):
|
||||||
|
return {"t": "str", "len": len(v)}
|
||||||
|
return {"t": type(v).__name__}
|
||||||
|
except Exception:
|
||||||
|
return {"t": "err"}
|
||||||
|
|
||||||
|
norm_dbg = normalize_jsonish_text(edited_body_text_resolved)
|
||||||
|
edited_json_dbg = try_parse_json(norm_dbg) or extract_json_trailing(norm_dbg)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"%s",
|
||||||
|
json.dumps(
|
||||||
|
{
|
||||||
|
"event": "manual_send_merge_debug",
|
||||||
|
"req_id_original": str(payload.get("req_id") or ""),
|
||||||
|
"prefer_registry_original": prefer_orig,
|
||||||
|
"headers_content_type": ("json" if content_type_is_json(headers) else "other"),
|
||||||
|
"orig_json": _summ(orig_json),
|
||||||
|
"edited_json": _summ(edited_json_dbg),
|
||||||
|
"final": {
|
||||||
|
"json": _summ(final_json),
|
||||||
|
"text_len": (len(final_text) if isinstance(final_text, str) else None)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
ensure_ascii=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Fresh req_id to avoid any overwrite of original log
|
||||||
|
import time as _time
|
||||||
|
rid = f"manual-{int(_time.time()*1000)}"
|
||||||
|
|
||||||
|
async def _publish(evt: Dict[str, Any]) -> None:
|
||||||
|
try:
|
||||||
|
await _trace_hub.publish(evt)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Prepare request body for logs (sanitized/trimmed for base64)
|
||||||
|
if final_json is not None:
|
||||||
|
try:
|
||||||
|
body_text_for_log = json.dumps(_san_b64(final_json, max_len=180), ensure_ascii=False, indent=2)
|
||||||
|
except Exception:
|
||||||
|
body_text_for_log = json.dumps(final_json, ensure_ascii=False)
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
body_text_for_log = _san_json_str(str(final_text or ""), max_len=180)
|
||||||
|
except Exception:
|
||||||
|
body_text_for_log = str(final_text or "")
|
||||||
|
|
||||||
|
# Register manual request in registry so subsequent "send" on this log has an original JSON source
|
||||||
|
try:
|
||||||
|
register_manual_request(rid, {
|
||||||
|
"pipeline_id": pid,
|
||||||
|
"node_id": "manual",
|
||||||
|
"node_type": "Manual",
|
||||||
|
"method": method,
|
||||||
|
"url": url,
|
||||||
|
"headers": dict(headers),
|
||||||
|
"body_json": (final_json if final_json is not None else None),
|
||||||
|
"body_text": (None if final_json is not None else str(final_text or "")),
|
||||||
|
})
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Emit http_req SSE (Manual)
|
||||||
|
await _publish({
|
||||||
|
"event": "http_req",
|
||||||
|
"node_id": "manual",
|
||||||
|
"node_type": "Manual",
|
||||||
|
"provider": "manual",
|
||||||
|
"req_id": rid,
|
||||||
|
"method": method,
|
||||||
|
"url": url,
|
||||||
|
"headers": headers,
|
||||||
|
"body_text": body_text_for_log,
|
||||||
|
"ts": int(_time.time()*1000),
|
||||||
|
})
|
||||||
|
|
||||||
|
# Perform HTTP
|
||||||
|
async with build_client(timeout=timeout_sec) as client:
|
||||||
|
# Ensure JSON Content-Type when sending JSON
|
||||||
|
try:
|
||||||
|
if final_json is not None:
|
||||||
|
has_ct = any((str(k or "").lower() == "content-type") for k in headers.keys())
|
||||||
|
if not has_ct:
|
||||||
|
headers["Content-Type"] = "application/json"
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
content = None
|
||||||
|
try:
|
||||||
|
if method in {"GET", "HEAD"}:
|
||||||
|
content = None
|
||||||
|
else:
|
||||||
|
if final_json is not None:
|
||||||
|
content = json.dumps(final_json, ensure_ascii=False).encode("utf-8")
|
||||||
|
else:
|
||||||
|
content = (final_text or "").encode("utf-8")
|
||||||
|
except Exception:
|
||||||
|
content = None
|
||||||
|
|
||||||
|
# Send
|
||||||
|
try:
|
||||||
|
resp = await client.request(method, url, headers=headers, content=content)
|
||||||
|
except Exception as e:
|
||||||
|
# Network/client error — emit http_resp with error text
|
||||||
|
await _publish({
|
||||||
|
"event": "http_resp",
|
||||||
|
"node_id": "manual",
|
||||||
|
"node_type": "Manual",
|
||||||
|
"provider": "manual",
|
||||||
|
"req_id": rid,
|
||||||
|
"status": 0,
|
||||||
|
"headers": {},
|
||||||
|
"body_text": str(e),
|
||||||
|
"ts": int(_time.time()*1000),
|
||||||
|
})
|
||||||
|
return JSONResponse({"ok": False, "error": str(e), "req_id": rid})
|
||||||
|
|
||||||
|
# Build response body for log (prefer JSON with trimmed base64)
|
||||||
|
try:
|
||||||
|
try:
|
||||||
|
obj = resp.json()
|
||||||
|
body_text_resp = json.dumps(_san_b64(obj, max_len=180), ensure_ascii=False, indent=2)
|
||||||
|
except Exception:
|
||||||
|
try:
|
||||||
|
t = await resp.aread()
|
||||||
|
body_text_resp = t.decode(getattr(resp, "encoding", "utf-8") or "utf-8", errors="replace")
|
||||||
|
except Exception:
|
||||||
|
try:
|
||||||
|
body_text_resp = resp.text
|
||||||
|
except Exception:
|
||||||
|
body_text_resp = "<resp.decode error>"
|
||||||
|
except Exception:
|
||||||
|
body_text_resp = "<resp.decode error>"
|
||||||
|
|
||||||
|
await _publish({
|
||||||
|
"event": "http_resp",
|
||||||
|
"node_id": "manual",
|
||||||
|
"node_type": "Manual",
|
||||||
|
"provider": "manual",
|
||||||
|
"req_id": rid,
|
||||||
|
"status": int(getattr(resp, "status_code", 0)),
|
||||||
|
"headers": dict(getattr(resp, "headers", {})),
|
||||||
|
"body_text": body_text_resp,
|
||||||
|
"ts": int(_time.time()*1000),
|
||||||
|
})
|
||||||
|
|
||||||
|
return JSONResponse({"ok": True, "req_id": rid})
|
||||||
|
|
||||||
|
# --- SSE endpoint for live pipeline trace --- # --- SSE endpoint for live pipeline trace ---
|
||||||
@app.get("/admin/trace/stream")
|
@app.get("/admin/trace/stream")
|
||||||
async def sse_trace() -> StreamingResponse:
|
async def sse_trace() -> StreamingResponse:
|
||||||
loop = _asyncio.get_event_loop()
|
loop = _asyncio.get_event_loop()
|
||||||
|
|||||||
@@ -6,14 +6,25 @@ import threading
|
|||||||
# Simple in-process cancel flags storage (per pipeline_id)
|
# Simple in-process cancel flags storage (per pipeline_id)
|
||||||
# Thread-safe for FastAPI workers in same process
|
# Thread-safe for FastAPI workers in same process
|
||||||
_cancel_flags: Dict[str, bool] = {}
|
_cancel_flags: Dict[str, bool] = {}
|
||||||
|
# Mode of cancellation per pipeline: "graceful" (default) or "abort"
|
||||||
|
_cancel_modes: Dict[str, str] = {}
|
||||||
_lock = threading.Lock()
|
_lock = threading.Lock()
|
||||||
|
|
||||||
|
|
||||||
def request_cancel(pipeline_id: str) -> None:
|
def request_cancel(pipeline_id: str, mode: str = "graceful") -> None:
|
||||||
"""Set cancel flag for given pipeline id."""
|
"""Set cancel flag for given pipeline id with an optional mode.
|
||||||
|
|
||||||
|
mode:
|
||||||
|
- "graceful": do not interrupt in-flight operations, stop before next step
|
||||||
|
- "abort": attempt to cancel in-flight operations immediately
|
||||||
|
"""
|
||||||
pid = str(pipeline_id or "pipeline_editor")
|
pid = str(pipeline_id or "pipeline_editor")
|
||||||
|
m = str(mode or "graceful").lower().strip()
|
||||||
|
if m not in {"graceful", "abort"}:
|
||||||
|
m = "graceful"
|
||||||
with _lock:
|
with _lock:
|
||||||
_cancel_flags[pid] = True
|
_cancel_flags[pid] = True
|
||||||
|
_cancel_modes[pid] = m
|
||||||
|
|
||||||
|
|
||||||
def clear_cancel(pipeline_id: str) -> None:
|
def clear_cancel(pipeline_id: str) -> None:
|
||||||
@@ -21,10 +32,19 @@ def clear_cancel(pipeline_id: str) -> None:
|
|||||||
pid = str(pipeline_id or "pipeline_editor")
|
pid = str(pipeline_id or "pipeline_editor")
|
||||||
with _lock:
|
with _lock:
|
||||||
_cancel_flags.pop(pid, None)
|
_cancel_flags.pop(pid, None)
|
||||||
|
_cancel_modes.pop(pid, None)
|
||||||
|
|
||||||
|
|
||||||
def is_cancelled(pipeline_id: str) -> bool:
|
def is_cancelled(pipeline_id: str) -> bool:
|
||||||
"""Check cancel flag for given pipeline id."""
|
"""Check cancel flag for given pipeline id."""
|
||||||
pid = str(pipeline_id or "pipeline_editor")
|
pid = str(pipeline_id or "pipeline_editor")
|
||||||
with _lock:
|
with _lock:
|
||||||
return bool(_cancel_flags.get(pid, False))
|
return bool(_cancel_flags.get(pid, False))
|
||||||
|
|
||||||
|
|
||||||
|
def get_cancel_mode(pipeline_id: str) -> str:
|
||||||
|
"""Return current cancel mode for given pipeline id: 'graceful' or 'abort' (default graceful)."""
|
||||||
|
pid = str(pipeline_id or "pipeline_editor")
|
||||||
|
with _lock:
|
||||||
|
m = _cancel_modes.get(pid)
|
||||||
|
return m if m in {"graceful", "abort"} else "graceful"
|
||||||
415
agentui/common/manual_http.py
Normal file
415
agentui/common/manual_http.py
Normal file
@@ -0,0 +1,415 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from typing import Any, Dict, Optional, Tuple
|
||||||
|
|
||||||
|
# Reuse executor's registry for original (untrimmed) requests
|
||||||
|
try:
|
||||||
|
from agentui.pipeline.executor import register_http_request as _reg_http_req # type: ignore
|
||||||
|
except Exception: # pragma: no cover
|
||||||
|
_reg_http_req = None # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
# -------- HTTP editable text parser (safe) --------
|
||||||
|
def parse_editable_http(s: str) -> Tuple[str, str, Dict[str, str], str]:
|
||||||
|
"""
|
||||||
|
Parse text pasted from Request area into (method, url, headers, body_text).
|
||||||
|
Stops header parsing when a line is not a valid HTTP header key (prevents treating JSON like '"contents": ...' as header).
|
||||||
|
"""
|
||||||
|
method, url = "POST", ""
|
||||||
|
headers: Dict[str, str] = {}
|
||||||
|
body = ""
|
||||||
|
try:
|
||||||
|
if not isinstance(s, str) or not s.strip():
|
||||||
|
return method, url, headers, body
|
||||||
|
txt = s.replace("\r\n", "\n")
|
||||||
|
lines = txt.split("\n")
|
||||||
|
if not lines:
|
||||||
|
return method, url, headers, body
|
||||||
|
first = (lines[0] or "").strip()
|
||||||
|
m = re.match(r"^([A-Z]+)\s+(\S+)(?:\s+HTTP/\d+(?:\.\d+)?)?$", first)
|
||||||
|
i = 1
|
||||||
|
if m:
|
||||||
|
method = (m.group(1) or "POST").strip().upper()
|
||||||
|
url = (m.group(2) or "").strip()
|
||||||
|
else:
|
||||||
|
i = 0 # no start-line -> treat as headers/body only
|
||||||
|
|
||||||
|
def _is_header_line(ln: str) -> bool:
|
||||||
|
if ":" not in ln:
|
||||||
|
return False
|
||||||
|
name = ln.split(":", 1)[0].strip()
|
||||||
|
# HTTP token: only letters/digits/hyphen. Prevents JSON keys like "contents": from being treated as headers.
|
||||||
|
return bool(re.fullmatch(r"[A-Za-z0-9\-]+", name))
|
||||||
|
|
||||||
|
# Read headers until blank line OR until line not looking like header (start of body)
|
||||||
|
while i < len(lines):
|
||||||
|
ln = lines[i]
|
||||||
|
if ln.strip() == "":
|
||||||
|
i += 1
|
||||||
|
break
|
||||||
|
if not _is_header_line(ln):
|
||||||
|
break
|
||||||
|
k, v = ln.split(":", 1)
|
||||||
|
headers[str(k).strip()] = str(v).strip()
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
# Remainder is body (JSON or text)
|
||||||
|
body = "\n".join(lines[i:]) if i < len(lines) else ""
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return method, url, headers, body
|
||||||
|
|
||||||
|
|
||||||
|
# -------- Headers helpers --------
|
||||||
|
def dedupe_headers(h: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Case-insensitive dedupe; drop Host/Content-Length (httpx will set proper).
|
||||||
|
Last value wins.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
dedup: Dict[str, Tuple[str, Any]] = {}
|
||||||
|
for k, v in (h or {}).items():
|
||||||
|
lk = str(k).strip().lower()
|
||||||
|
if lk in {"host", "content-length"}:
|
||||||
|
continue
|
||||||
|
dedup[lk] = (k, v)
|
||||||
|
return {orig_k: val for (_, (orig_k, val)) in dedup.items()}
|
||||||
|
except Exception:
|
||||||
|
return dict(h or {})
|
||||||
|
|
||||||
|
|
||||||
|
def content_type_is_json(h: Dict[str, Any]) -> bool:
|
||||||
|
try:
|
||||||
|
return any(str(k).lower() == "content-type" and "json" in str(v).lower() for k, v in (h or {}).items())
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
# -------- JSON parsing & normalization helpers --------
|
||||||
|
def try_parse_json(s: Any) -> Optional[Any]:
|
||||||
|
try:
|
||||||
|
if isinstance(s, (dict, list)):
|
||||||
|
return s
|
||||||
|
if isinstance(s, str) and s.strip():
|
||||||
|
return json.loads(s)
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_jsonish_text(s: Any) -> str:
|
||||||
|
"""
|
||||||
|
Normalize JSON-looking text safely:
|
||||||
|
- If whole text is a quoted JSON string, decode via json.loads to inner string.
|
||||||
|
- Replace visible \\n/\\r/\\t outside JSON string literals with real control chars.
|
||||||
|
- Escape raw CR/LF/TAB inside JSON string literals as \\n/\\r/\\t to keep JSON valid.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
txt = str(s if s is not None else "")
|
||||||
|
except Exception:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
# If whole text looks like a quoted JSON string: decode to inner string
|
||||||
|
try:
|
||||||
|
if len(txt) >= 2 and txt[0] == '"' and txt[-1] == '"':
|
||||||
|
v = json.loads(txt)
|
||||||
|
if isinstance(v, str):
|
||||||
|
txt = v
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
out_chars = []
|
||||||
|
i = 0
|
||||||
|
n = len(txt)
|
||||||
|
in_str = False
|
||||||
|
esc = False
|
||||||
|
while i < n:
|
||||||
|
ch = txt[i]
|
||||||
|
if in_str:
|
||||||
|
# escape raw control chars within JSON string literal
|
||||||
|
if ch == "\r":
|
||||||
|
# CRLF -> \n
|
||||||
|
if (i + 1) < n and txt[i + 1] == "\n":
|
||||||
|
out_chars.append("\\n")
|
||||||
|
i += 2
|
||||||
|
esc = False
|
||||||
|
continue
|
||||||
|
out_chars.append("\\r")
|
||||||
|
i += 1
|
||||||
|
esc = False
|
||||||
|
continue
|
||||||
|
if ch == "\n":
|
||||||
|
out_chars.append("\\n")
|
||||||
|
i += 1
|
||||||
|
esc = False
|
||||||
|
continue
|
||||||
|
if ch == "\t":
|
||||||
|
out_chars.append("\\t")
|
||||||
|
i += 1
|
||||||
|
esc = False
|
||||||
|
continue
|
||||||
|
out_chars.append(ch)
|
||||||
|
if esc:
|
||||||
|
esc = False
|
||||||
|
else:
|
||||||
|
if ch == "\\":
|
||||||
|
esc = True
|
||||||
|
elif ch == '"':
|
||||||
|
in_str = False
|
||||||
|
i += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
# not in string literal
|
||||||
|
if ch == '"':
|
||||||
|
in_str = True
|
||||||
|
out_chars.append(ch)
|
||||||
|
i += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
if ch == "\\" and (i + 1) < n:
|
||||||
|
nx = txt[i + 1]
|
||||||
|
if nx == "n":
|
||||||
|
out_chars.append("\n")
|
||||||
|
i += 2
|
||||||
|
continue
|
||||||
|
if nx == "r":
|
||||||
|
out_chars.append("\r")
|
||||||
|
i += 2
|
||||||
|
continue
|
||||||
|
if nx == "t":
|
||||||
|
out_chars.append("\t")
|
||||||
|
i += 2
|
||||||
|
continue
|
||||||
|
|
||||||
|
out_chars.append(ch)
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
return "".join(out_chars)
|
||||||
|
|
||||||
|
|
||||||
|
def extract_json_trailing(s: str) -> Optional[Any]:
|
||||||
|
"""
|
||||||
|
Pull trailing JSON object/array from mixed text:
|
||||||
|
- Try whole text first
|
||||||
|
- Then scan from last '{' or '[' backward.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
if not isinstance(s, str):
|
||||||
|
return None
|
||||||
|
txt = s.strip()
|
||||||
|
try:
|
||||||
|
return json.loads(txt)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
idx = txt.rfind("{")
|
||||||
|
while idx >= 0:
|
||||||
|
seg = txt[idx:]
|
||||||
|
try:
|
||||||
|
return json.loads(seg)
|
||||||
|
except Exception:
|
||||||
|
idx = txt.rfind("{", 0, idx)
|
||||||
|
|
||||||
|
idx = txt.rfind("[")
|
||||||
|
while idx >= 0:
|
||||||
|
seg = txt[idx:]
|
||||||
|
try:
|
||||||
|
return json.loads(seg)
|
||||||
|
except Exception:
|
||||||
|
idx = txt.rfind("[", 0, idx)
|
||||||
|
|
||||||
|
return None
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def global_unescape_jsonish(s: str) -> str:
|
||||||
|
"""
|
||||||
|
Last-resort: unicode_escape decode to convert \\n -> \n, \\" -> ", \\\\ -> \, \\uXXXX -> char, etc.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
import codecs as _codecs
|
||||||
|
|
||||||
|
return _codecs.decode(s, "unicode_escape")
|
||||||
|
except Exception:
|
||||||
|
try:
|
||||||
|
return (
|
||||||
|
s.replace("\\n", "\n")
|
||||||
|
.replace("\\r", "\r")
|
||||||
|
.replace("\\t", "\t")
|
||||||
|
.replace('\\"', '"')
|
||||||
|
.replace("\\\\", "\\")
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
def looks_jsonish(txt: Any) -> bool:
|
||||||
|
try:
|
||||||
|
s = str(txt or "")
|
||||||
|
if "{" in s or "[" in s:
|
||||||
|
return True
|
||||||
|
# also patterns like key:
|
||||||
|
return bool(re.search(r'\s["\']?[A-Za-z0-9_\-]+["\']?\s*:', s))
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def deep_merge_dicts(a: Any, b: Any) -> Any:
|
||||||
|
"""
|
||||||
|
Merge dicts (b over a, recursively). Lists or non-dicts are replaced by b.
|
||||||
|
"""
|
||||||
|
if isinstance(a, dict) and isinstance(b, dict):
|
||||||
|
out = dict(a)
|
||||||
|
for k, v in b.items():
|
||||||
|
if (k in a) and isinstance(a.get(k), dict) and isinstance(v, dict):
|
||||||
|
out[k] = deep_merge_dicts(a.get(k), v)
|
||||||
|
else:
|
||||||
|
out[k] = v
|
||||||
|
return out
|
||||||
|
return b
|
||||||
|
|
||||||
|
# ---- Trim-aware merge that preserves original binary/base64 fields ----
|
||||||
|
def is_trimmed_b64_string(s: Any) -> bool:
|
||||||
|
try:
|
||||||
|
if not isinstance(s, str):
|
||||||
|
return False
|
||||||
|
return "(trimmed " in s
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def looks_base64ish(s: Any) -> bool:
|
||||||
|
try:
|
||||||
|
if not isinstance(s, str) or len(s) < 64:
|
||||||
|
return False
|
||||||
|
return bool(re.fullmatch(r"[A-Za-z0-9+/=\r\n]+", s))
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def merge_lists_preserving_b64(orig_list: Any, edited_list: Any) -> Any:
|
||||||
|
"""
|
||||||
|
Merge lists with base64-trimmed preservation but DO NOT pad from original:
|
||||||
|
- Result length equals edited_list length (indices beyond edited are dropped).
|
||||||
|
- At each index:
|
||||||
|
* If edited value is a trimmed placeholder string and original has a string → keep original.
|
||||||
|
* If both dicts → recurse via deep_merge_preserving_b64.
|
||||||
|
* If both lists → recurse via merge_lists_preserving_b64.
|
||||||
|
* Else → take edited value as-is.
|
||||||
|
"""
|
||||||
|
if not isinstance(edited_list, list):
|
||||||
|
return edited_list
|
||||||
|
if not isinstance(orig_list, list):
|
||||||
|
orig_list = []
|
||||||
|
out = []
|
||||||
|
for i, ev in enumerate(edited_list):
|
||||||
|
ov = orig_list[i] if i < len(orig_list) else None
|
||||||
|
if isinstance(ev, str) and is_trimmed_b64_string(ev) and isinstance(ov, str):
|
||||||
|
out.append(ov)
|
||||||
|
elif isinstance(ev, dict) and isinstance(ov, dict):
|
||||||
|
out.append(deep_merge_preserving_b64(ov, ev))
|
||||||
|
elif isinstance(ev, list) and isinstance(ov, list):
|
||||||
|
out.append(merge_lists_preserving_b64(ov, ev))
|
||||||
|
else:
|
||||||
|
out.append(ev)
|
||||||
|
return out
|
||||||
|
|
||||||
|
def deep_merge_preserving_b64(orig: Any, edited: Any) -> Any:
|
||||||
|
"""
|
||||||
|
Merge preserving original base64/data_url only for trimmed placeholders, with strict edited-shape:
|
||||||
|
- If edited is a trimmed placeholder string and orig is a string → keep orig.
|
||||||
|
- Dicts: RESULT CONTAINS ONLY KEYS FROM EDITED. Keys missing in edited are treated as deleted.
|
||||||
|
For each present key: recurse (dict/list) or take edited value; for trimmed strings keep orig.
|
||||||
|
- Lists: delegate to merge_lists_preserving_b64 (result length = edited length).
|
||||||
|
- Other types: replace with edited.
|
||||||
|
"""
|
||||||
|
if isinstance(edited, str) and is_trimmed_b64_string(edited) and isinstance(orig, str):
|
||||||
|
return orig
|
||||||
|
if isinstance(orig, dict) and isinstance(edited, dict):
|
||||||
|
out: Dict[str, Any] = {}
|
||||||
|
for k, ev in edited.items():
|
||||||
|
ov = orig.get(k)
|
||||||
|
if isinstance(ev, str) and is_trimmed_b64_string(ev) and isinstance(ov, str):
|
||||||
|
out[k] = ov
|
||||||
|
elif isinstance(ev, dict) and isinstance(ov, dict):
|
||||||
|
out[k] = deep_merge_preserving_b64(ov, ev)
|
||||||
|
elif isinstance(ev, list) and isinstance(ov, list):
|
||||||
|
out[k] = merge_lists_preserving_b64(ov, ev)
|
||||||
|
else:
|
||||||
|
out[k] = ev
|
||||||
|
return out
|
||||||
|
if isinstance(orig, list) and isinstance(edited, list):
|
||||||
|
return merge_lists_preserving_b64(orig, edited)
|
||||||
|
return edited
|
||||||
|
|
||||||
|
|
||||||
|
def salvage_json_for_send(
|
||||||
|
edited_body_text: Any,
|
||||||
|
headers: Dict[str, Any],
|
||||||
|
orig_json: Optional[Any],
|
||||||
|
prefer_registry_original: bool = True,
|
||||||
|
) -> Tuple[Optional[Any], Optional[str]]:
|
||||||
|
"""
|
||||||
|
Build (final_json, final_text) for outgoing request body.
|
||||||
|
|
||||||
|
Strategy:
|
||||||
|
- Normalize text for JSON.
|
||||||
|
- Try parse; then try trailing extract; then unicode_escape unescape and retry.
|
||||||
|
- If prefer_registry_original=True and orig_json present:
|
||||||
|
* If edited_json present: deep-merge with base64 preservation, but ONLY keep keys present in edited;
|
||||||
|
lists are limited to the edited length (no padding from original).
|
||||||
|
* If not: DO NOT resurrect original. Empty/whitespace → send empty text; otherwise send raw text as-is.
|
||||||
|
- Else:
|
||||||
|
* If edited_json present => final_json = edited_json
|
||||||
|
* Else: if content-type is json and orig_json present => final_json = orig_json
|
||||||
|
else send raw text.
|
||||||
|
"""
|
||||||
|
# Normalize and attempt parse
|
||||||
|
norm = normalize_jsonish_text(edited_body_text)
|
||||||
|
edited_json = try_parse_json(norm)
|
||||||
|
if edited_json is None:
|
||||||
|
edited_json = extract_json_trailing(norm)
|
||||||
|
|
||||||
|
if edited_json is None:
|
||||||
|
ue = global_unescape_jsonish(str(edited_body_text or ""))
|
||||||
|
if isinstance(ue, str) and ue != edited_body_text:
|
||||||
|
ue_norm = normalize_jsonish_text(ue)
|
||||||
|
edited_json = try_parse_json(ue_norm) or extract_json_trailing(ue_norm)
|
||||||
|
|
||||||
|
json_ct = content_type_is_json(headers)
|
||||||
|
|
||||||
|
# Prefer original registry JSON where applicable
|
||||||
|
if prefer_registry_original and orig_json is not None:
|
||||||
|
if edited_json is None:
|
||||||
|
# Respect full manual control: do NOT resurrect original JSON.
|
||||||
|
# Empty/whitespace → send empty text; otherwise send raw text as-is.
|
||||||
|
if isinstance(norm, str) and not norm.strip():
|
||||||
|
return None, ""
|
||||||
|
else:
|
||||||
|
return None, str(edited_body_text or "")
|
||||||
|
else:
|
||||||
|
# Merge edits over original with trimmed-b64 preservation, but keep only keys present in edited
|
||||||
|
# and limit lists to the edited length.
|
||||||
|
return deep_merge_preserving_b64(orig_json, edited_json), None
|
||||||
|
|
||||||
|
# No prefer or no orig_json
|
||||||
|
if edited_json is not None:
|
||||||
|
return edited_json, None
|
||||||
|
|
||||||
|
if json_ct and orig_json is not None:
|
||||||
|
# Hard salvage for declared JSON payloads
|
||||||
|
maybe = try_parse_json(norm) or extract_json_trailing(norm)
|
||||||
|
return (maybe if maybe is not None else orig_json), None
|
||||||
|
|
||||||
|
# Plain text fallback
|
||||||
|
return None, str(edited_body_text or "")
|
||||||
|
|
||||||
|
|
||||||
|
# -------- Registry wrapper --------
|
||||||
|
def register_manual_request(req_id: str, info: Dict[str, Any]) -> None:
|
||||||
|
try:
|
||||||
|
if _reg_http_req:
|
||||||
|
_reg_http_req(req_id, info)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
@@ -10,6 +10,7 @@ import hashlib
|
|||||||
from collections import deque
|
from collections import deque
|
||||||
from agentui.providers.http_client import build_client
|
from agentui.providers.http_client import build_client
|
||||||
from agentui.common.vendors import detect_vendor
|
from agentui.common.vendors import detect_vendor
|
||||||
|
from agentui.providers.adapters.registry import get_adapter, default_base_url_for as _adapter_default_base_url_for
|
||||||
from agentui.pipeline.templating import (
|
from agentui.pipeline.templating import (
|
||||||
_OUT_MACRO_RE,
|
_OUT_MACRO_RE,
|
||||||
_VAR_MACRO_RE,
|
_VAR_MACRO_RE,
|
||||||
@@ -25,7 +26,45 @@ from agentui.pipeline.templating import (
|
|||||||
eval_condition_expr,
|
eval_condition_expr,
|
||||||
)
|
)
|
||||||
from agentui.pipeline.storage import load_var_store, save_var_store, clear_var_store
|
from agentui.pipeline.storage import load_var_store, save_var_store, clear_var_store
|
||||||
from agentui.common.cancel import is_cancelled, clear_cancel
|
from agentui.common.cancel import is_cancelled, clear_cancel, get_cancel_mode
|
||||||
|
|
||||||
|
# HTTP request registry for manual resend feature (store original untrimmed bodies)
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
_HTTP_REQ_REGISTRY_MAX = 200
|
||||||
|
_HTTP_REQ_REGISTRY = OrderedDict()
|
||||||
|
|
||||||
|
def register_http_request(req_id: str, info: Dict[str, Any]) -> None:
|
||||||
|
try:
|
||||||
|
rid = str(req_id or "")
|
||||||
|
if not rid:
|
||||||
|
return
|
||||||
|
# overwrite if exists to refresh order
|
||||||
|
if rid in _HTTP_REQ_REGISTRY:
|
||||||
|
try:
|
||||||
|
_HTTP_REQ_REGISTRY.pop(rid, None)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
_HTTP_REQ_REGISTRY[rid] = info or {}
|
||||||
|
# trim oldest beyond cap
|
||||||
|
while len(_HTTP_REQ_REGISTRY) > _HTTP_REQ_REGISTRY_MAX:
|
||||||
|
try:
|
||||||
|
_HTTP_REQ_REGISTRY.popitem(last=False)
|
||||||
|
except Exception:
|
||||||
|
break
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_http_request(req_id: str) -> Optional[Dict[str, Any]]:
|
||||||
|
try:
|
||||||
|
rid = str(req_id or "")
|
||||||
|
if not rid:
|
||||||
|
return None
|
||||||
|
v = _HTTP_REQ_REGISTRY.get(rid)
|
||||||
|
# return shallow copy to avoid mutation
|
||||||
|
return dict(v) if isinstance(v, dict) else None
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
# --- Global helpers: robust auto-decompression for logging/JSON parsing ----------
|
# --- Global helpers: robust auto-decompression for logging/JSON parsing ----------
|
||||||
import gzip
|
import gzip
|
||||||
@@ -115,6 +154,54 @@ def _safe_response_json(resp) -> Any:
|
|||||||
return {"error": "Failed to decode JSON from upstream", "text": t}
|
return {"error": "Failed to decode JSON from upstream", "text": t}
|
||||||
|
|
||||||
|
|
||||||
|
# --- Cooperative cancel/abort helper for in-flight awaits (HTTP, etc.) ---------
|
||||||
|
async def _await_coro_with_cancel(coro: Awaitable[Any], pipeline_id: str, poll_interval: float = 0.1) -> Any:
|
||||||
|
"""
|
||||||
|
Await 'coro' while polling manual cancel flag for the pipeline.
|
||||||
|
|
||||||
|
Behavior:
|
||||||
|
- If cancel mode is 'graceful' -> do NOT interrupt 'coro'; we just continue waiting
|
||||||
|
and upper layers will stop before scheduling new work.
|
||||||
|
- If cancel mode is 'abort' -> cancel in-flight task immediately and raise ExecutionError.
|
||||||
|
|
||||||
|
Returns the result of 'coro' or raises ExecutionError on abort.
|
||||||
|
"""
|
||||||
|
task = asyncio.create_task(coro)
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
# Shield from timeout cancellation; we only use timeout to poll flags.
|
||||||
|
return await asyncio.wait_for(asyncio.shield(task), timeout=poll_interval)
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
# Not done yet — fall through to polling
|
||||||
|
pass
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
# Map in-flight cancellation to ExecutionError on abort; otherwise re-raise
|
||||||
|
try:
|
||||||
|
mode = get_cancel_mode(pipeline_id)
|
||||||
|
except Exception:
|
||||||
|
mode = "abort"
|
||||||
|
if mode == "abort":
|
||||||
|
raise ExecutionError("Cancelled by user (abort)")
|
||||||
|
raise
|
||||||
|
# Poll cancel flag
|
||||||
|
try:
|
||||||
|
if is_cancelled(pipeline_id):
|
||||||
|
mode = get_cancel_mode(pipeline_id)
|
||||||
|
if mode == "abort":
|
||||||
|
try:
|
||||||
|
if not task.done():
|
||||||
|
task.cancel()
|
||||||
|
await task
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
raise ExecutionError("Cancelled by user (abort)")
|
||||||
|
except Exception as exc: # noqa: BLE001
|
||||||
|
raise ExecutionError(f"Cancelled by user (abort): {exc}")
|
||||||
|
# graceful: do not interrupt; keep waiting
|
||||||
|
except Exception:
|
||||||
|
# Be defensive: ignore polling failures and keep waiting
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
# --- Helpers: sanitize base64/data URLs in JSON for logging (Burp-like) ----------
|
# --- Helpers: sanitize base64/data URLs in JSON for logging (Burp-like) ----------
|
||||||
def _is_b64ish_string(s: str) -> bool:
|
def _is_b64ish_string(s: str) -> bool:
|
||||||
try:
|
try:
|
||||||
@@ -442,7 +529,14 @@ class PipelineExecutor:
|
|||||||
# Исполнение
|
# Исполнение
|
||||||
try:
|
try:
|
||||||
out = await node.run(inputs, ctx)
|
out = await node.run(inputs, ctx)
|
||||||
except Exception as exc:
|
except BaseException as exc:
|
||||||
|
err = exc
|
||||||
|
try:
|
||||||
|
import asyncio as _asyncio
|
||||||
|
if isinstance(exc, _asyncio.CancelledError):
|
||||||
|
err = ExecutionError("Cancelled by user (abort)")
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
if trace is not None:
|
if trace is not None:
|
||||||
try:
|
try:
|
||||||
await trace({
|
await trace({
|
||||||
@@ -451,11 +545,11 @@ class PipelineExecutor:
|
|||||||
"node_type": node.type_name,
|
"node_type": node.type_name,
|
||||||
"wave": wave_num,
|
"wave": wave_num,
|
||||||
"ts": int(time.time() * 1000),
|
"ts": int(time.time() * 1000),
|
||||||
"error": str(exc),
|
"error": str(err),
|
||||||
})
|
})
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
raise
|
raise err
|
||||||
else:
|
else:
|
||||||
dur_ms = int((time.perf_counter() - started) * 1000)
|
dur_ms = int((time.perf_counter() - started) * 1000)
|
||||||
if trace is not None:
|
if trace is not None:
|
||||||
@@ -2053,15 +2147,26 @@ class ProviderCallNode(Node):
|
|||||||
|
|
||||||
# Default endpoints if not set
|
# Default endpoints if not set
|
||||||
if not endpoint_tmpl:
|
if not endpoint_tmpl:
|
||||||
if provider == "openai":
|
_ad = None
|
||||||
endpoint_tmpl = "/v1/chat/completions"
|
try:
|
||||||
elif provider == "gemini":
|
_ad = get_adapter(provider)
|
||||||
endpoint_tmpl = "/v1beta/models/{{ model }}:generateContent"
|
except Exception:
|
||||||
elif provider == "gemini_image":
|
_ad = None
|
||||||
# Специальный провайдер для генерации/превью изображений Gemini (generateContent)
|
if _ad is not None:
|
||||||
endpoint_tmpl = "/v1beta/models/{{ model }}:generateContent"
|
try:
|
||||||
elif provider == "claude":
|
endpoint_tmpl = _ad.default_endpoint(str(context.get("model") or ""))
|
||||||
endpoint_tmpl = "/v1/messages"
|
except Exception:
|
||||||
|
endpoint_tmpl = ""
|
||||||
|
if not endpoint_tmpl:
|
||||||
|
if provider == "openai":
|
||||||
|
endpoint_tmpl = "/v1/chat/completions"
|
||||||
|
elif provider == "gemini":
|
||||||
|
endpoint_tmpl = "/v1beta/models/{{ model }}:generateContent"
|
||||||
|
elif provider == "gemini_image":
|
||||||
|
# Специальный провайдер для генерации/превью изображений Gemini (generateContent)
|
||||||
|
endpoint_tmpl = "/v1beta/models/{{ model }}:generateContent"
|
||||||
|
elif provider == "claude":
|
||||||
|
endpoint_tmpl = "/v1/messages"
|
||||||
|
|
||||||
# Default template for gemini_image if none provided (inject [[PROMPT]])
|
# Default template for gemini_image if none provided (inject [[PROMPT]])
|
||||||
try:
|
try:
|
||||||
@@ -2074,11 +2179,259 @@ class ProviderCallNode(Node):
|
|||||||
|
|
||||||
# Подготовим Prompt Blocks + pm-структуру для шаблона
|
# Подготовим Prompt Blocks + pm-структуру для шаблона
|
||||||
unified_msgs = self._render_blocks_to_unified(context)
|
unified_msgs = self._render_blocks_to_unified(context)
|
||||||
pm_struct = self._blocks_struct_for_template(provider, unified_msgs, context)
|
adapter = None
|
||||||
|
try:
|
||||||
|
adapter = get_adapter(provider)
|
||||||
|
except Exception:
|
||||||
|
adapter = None
|
||||||
|
if adapter:
|
||||||
|
blocks_struct = adapter.blocks_struct_for_template(unified_msgs, context, self.config or {})
|
||||||
|
else:
|
||||||
|
blocks_struct = self._blocks_struct_for_template(provider, unified_msgs, context)
|
||||||
|
pm_struct = dict(blocks_struct)
|
||||||
|
|
||||||
# Расширяем контекст для рендера шаблонов
|
# Расширяем контекст для рендера шаблонов
|
||||||
render_ctx = dict(context)
|
render_ctx = dict(context)
|
||||||
render_ctx["pm"] = pm_struct
|
render_ctx["pm"] = pm_struct
|
||||||
|
# Прокинем конфиг ноды в контекст для адаптеров (например, claude_no_system)
|
||||||
|
try:
|
||||||
|
render_ctx["_node_config"] = dict(self.config or {})
|
||||||
|
except Exception:
|
||||||
|
render_ctx["_node_config"] = {}
|
||||||
|
# Node-local: track VAR paths overridden by prompt_preprocess (no extra syntax)
|
||||||
|
pre_var_paths = set()
|
||||||
|
|
||||||
|
# prompt_preprocess (pre-merge DSL): парсим строки до prompt_combine и готовим «пред‑сегменты»
|
||||||
|
# Синтаксис строки:
|
||||||
|
# SEGMENT [delKeyContains "needle"] [delpos=prepend|append|N|-1] [pruneEmpty] [case=ci|cs]
|
||||||
|
# По умолчанию: case=ci, pruneEmpty=false, без delpos → append
|
||||||
|
# SEGMENT поддерживает [[...]] и {{ ... }}
|
||||||
|
pre_segments_raw: List[Dict[str, Any]] = []
|
||||||
|
try:
|
||||||
|
pre_raw = str((self.config or {}).get("prompt_preprocess") or "").strip()
|
||||||
|
except Exception:
|
||||||
|
pre_raw = ""
|
||||||
|
if pre_raw:
|
||||||
|
lines = [ln.strip() for ln in pre_raw.splitlines() if str(ln or "").strip()]
|
||||||
|
import re as _repp
|
||||||
|
# Collect local overrides for plain [[VAR:path]] segments after filtering
|
||||||
|
pre_var_overrides: Dict[str, Any] = {}
|
||||||
|
|
||||||
|
def _pp_try_json(s: str) -> Any:
|
||||||
|
try:
|
||||||
|
obj = json.loads(s)
|
||||||
|
except Exception:
|
||||||
|
try:
|
||||||
|
obj = json.loads(s, strict=False) # type: ignore[call-arg]
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
for _ in range(2):
|
||||||
|
if isinstance(obj, str):
|
||||||
|
st = obj.strip()
|
||||||
|
if (st.startswith("{") and st.endswith("}")) or (st.startswith("[") and st.endswith("]")):
|
||||||
|
try:
|
||||||
|
obj = json.loads(st)
|
||||||
|
continue
|
||||||
|
except Exception:
|
||||||
|
break
|
||||||
|
break
|
||||||
|
return obj
|
||||||
|
|
||||||
|
def _norm(s: Any, ci: bool) -> str:
|
||||||
|
try:
|
||||||
|
ss = s if isinstance(s, str) else str(s)
|
||||||
|
except Exception:
|
||||||
|
ss = ""
|
||||||
|
return ss.lower() if ci else ss
|
||||||
|
|
||||||
|
def _delkeys_by_val_contains(x: Any, needles: List[str], ci: bool, prune_empty: bool, is_root: bool = False) -> tuple[Any, int]:
|
||||||
|
"""
|
||||||
|
Рекурсивно удаляет ключи словаря, если строковое представление их значения содержит needle.
|
||||||
|
- needles: список подстрок
|
||||||
|
- ci: регистронезависимый поиск (по умолчанию True)
|
||||||
|
- prune_empty: удалять пустые {} / [] из родителей (кроме корня)
|
||||||
|
Возвращает (новое_значение_или_None, удалённые_ключи_суммарно)
|
||||||
|
"""
|
||||||
|
removed = 0
|
||||||
|
if isinstance(x, dict):
|
||||||
|
out: Dict[str, Any] = {}
|
||||||
|
for k, v in x.items():
|
||||||
|
v2, rem2 = _delkeys_by_val_contains(v, needles, ci, prune_empty, False)
|
||||||
|
removed += rem2
|
||||||
|
sv = _stringify_for_template(v2)
|
||||||
|
cond = False
|
||||||
|
try:
|
||||||
|
nsv = _norm(sv, ci)
|
||||||
|
for nd in needles:
|
||||||
|
nds = _norm(nd, ci)
|
||||||
|
if nds and (nds in nsv):
|
||||||
|
cond = True
|
||||||
|
break
|
||||||
|
except Exception:
|
||||||
|
cond = False
|
||||||
|
if cond:
|
||||||
|
removed += 1
|
||||||
|
continue
|
||||||
|
out[k] = v2
|
||||||
|
if prune_empty and not is_root and len(out) == 0:
|
||||||
|
return None, removed # type: ignore[return-value]
|
||||||
|
return out, removed
|
||||||
|
if isinstance(x, list):
|
||||||
|
arr: List[Any] = []
|
||||||
|
for it in x:
|
||||||
|
it2, rem2 = _delkeys_by_val_contains(it, needles, ci, prune_empty, False)
|
||||||
|
removed += rem2
|
||||||
|
if it2 is None and prune_empty:
|
||||||
|
continue
|
||||||
|
arr.append(it2)
|
||||||
|
if prune_empty and not is_root and len(arr) == 0:
|
||||||
|
return None, removed # type: ignore[return-value]
|
||||||
|
return arr, removed
|
||||||
|
# scalar
|
||||||
|
return x, removed
|
||||||
|
|
||||||
|
pre_used = 0
|
||||||
|
pre_removed_total = 0
|
||||||
|
for ln in lines:
|
||||||
|
try:
|
||||||
|
# Опции
|
||||||
|
needles = [m.group(1) for m in _repp.finditer(r'(?is)\bdelKeyContains\s+"([^"]*)"', ln)]
|
||||||
|
mpos = _repp.search(r'(?is)\bdelpos\s*=\s*(prepend|append|-?\d+)\b', ln)
|
||||||
|
pos_spec = mpos.group(1).strip().lower() if mpos else None
|
||||||
|
mcase = _repp.search(r'(?is)\bcase\s*=\s*(ci|cs)\b', ln)
|
||||||
|
ci = True if not mcase else (mcase.group(1).strip().lower() == "ci")
|
||||||
|
prune = bool(_repp.search(r'(?is)\bpruneEmpty\b', ln))
|
||||||
|
|
||||||
|
# Очищаем директивы из текста строки → остаётся сам SEGMENT
|
||||||
|
s2 = _repp.sub(r'(?is)\bdelKeyContains\s+"[^"]*"', "", ln)
|
||||||
|
s2 = _repp.sub(r'(?is)\bdelpos\s*=\s*(prepend|append|-?\d+)\b', "", s2)
|
||||||
|
s2 = _repp.sub(r'(?is)\bcase\s*=\s*(ci|cs)\b', "", s2)
|
||||||
|
s2 = _repp.sub(r'(?is)\bpruneEmpty\b', "", s2)
|
||||||
|
seg = s2.strip()
|
||||||
|
if not seg:
|
||||||
|
continue
|
||||||
|
# Try to detect plain [[VAR:path]] to support node-local override without extra syntax
|
||||||
|
var_path = None
|
||||||
|
try:
|
||||||
|
mvar = _VAR_MACRO_RE.fullmatch(seg)
|
||||||
|
if mvar:
|
||||||
|
var_path = (mvar.group(1) or "").strip()
|
||||||
|
except Exception:
|
||||||
|
var_path = None
|
||||||
|
|
||||||
|
# Макросы и попытка распарсить JSON
|
||||||
|
resolved = render_template_simple(seg, render_ctx, render_ctx.get("OUT") or {})
|
||||||
|
obj = _pp_try_json(resolved)
|
||||||
|
base = obj if obj is not None else resolved
|
||||||
|
|
||||||
|
# Удаление ключей по contains, если задано
|
||||||
|
if needles:
|
||||||
|
try:
|
||||||
|
base2, remcnt = _delkeys_by_val_contains(base, needles, ci, prune, True)
|
||||||
|
except Exception:
|
||||||
|
base2, remcnt = base, 0
|
||||||
|
else:
|
||||||
|
base2, remcnt = base, 0
|
||||||
|
|
||||||
|
# If the segment was a pure [[VAR:path]] and we had filters,
|
||||||
|
# locally override this VAR for the rest of the node (so prompt_combine sees the filtered value)
|
||||||
|
try:
|
||||||
|
if var_path and needles:
|
||||||
|
pre_var_overrides[var_path] = base2
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
pre_segments_raw.append({"obj": base2, "pos": pos_spec})
|
||||||
|
pre_used += 1
|
||||||
|
pre_removed_total += int(remcnt or 0)
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Apply local VAR overrides onto render_ctx (node-local, no extra syntax)
|
||||||
|
try:
|
||||||
|
import copy as _copy
|
||||||
|
|
||||||
|
def _safe_deepcopy(x: Any) -> Any:
|
||||||
|
try:
|
||||||
|
return _copy.deepcopy(x)
|
||||||
|
except Exception:
|
||||||
|
# Fallback deep copy for dict/list; otherwise return as-is
|
||||||
|
try:
|
||||||
|
if isinstance(x, dict):
|
||||||
|
return {k: _safe_deepcopy(v) for k, v in x.items()}
|
||||||
|
if isinstance(x, list):
|
||||||
|
return [_safe_deepcopy(i) for i in x]
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
return json.loads(json.dumps(x))
|
||||||
|
except Exception:
|
||||||
|
return x
|
||||||
|
|
||||||
|
def _set_by_path(obj: Any, path: str, value: Any) -> None:
|
||||||
|
cur = obj
|
||||||
|
parts = [p.strip() for p in str(path).split(".") if p.strip()]
|
||||||
|
for i, part in enumerate(parts):
|
||||||
|
# list index?
|
||||||
|
idx = None
|
||||||
|
try:
|
||||||
|
idx = int(part)
|
||||||
|
except Exception:
|
||||||
|
idx = None
|
||||||
|
last = (i == len(parts) - 1)
|
||||||
|
if idx is not None:
|
||||||
|
if not isinstance(cur, list) or idx < 0 or idx >= len(cur):
|
||||||
|
return
|
||||||
|
if last:
|
||||||
|
cur[idx] = value
|
||||||
|
return
|
||||||
|
cur = cur[idx]
|
||||||
|
continue
|
||||||
|
# dict key
|
||||||
|
if not isinstance(cur, dict):
|
||||||
|
return
|
||||||
|
if last:
|
||||||
|
cur[part] = value
|
||||||
|
return
|
||||||
|
if part not in cur or not isinstance(cur[part], (dict, list)):
|
||||||
|
cur[part] = {}
|
||||||
|
cur = cur[part]
|
||||||
|
|
||||||
|
# Deep-copy only the top-level roots we are going to mutate (e.g., 'incoming' for 'incoming.*')
|
||||||
|
roots_to_copy: set = set()
|
||||||
|
for _p in (pre_var_overrides or {}).keys():
|
||||||
|
if "." in str(_p):
|
||||||
|
roots_to_copy.add(str(_p).split(".", 1)[0].strip())
|
||||||
|
for _root in roots_to_copy:
|
||||||
|
try:
|
||||||
|
if _root in render_ctx:
|
||||||
|
render_ctx[_root] = _safe_deepcopy(render_ctx[_root])
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
for _p, _v in (pre_var_overrides or {}).items():
|
||||||
|
_set_by_path(render_ctx, _p, _v)
|
||||||
|
pre_var_paths = set(pre_var_overrides.keys())
|
||||||
|
except Exception:
|
||||||
|
pre_var_paths = set()
|
||||||
|
# SSE: prompt_preprocess summary
|
||||||
|
try:
|
||||||
|
trace_fn = context.get("_trace")
|
||||||
|
if trace_fn:
|
||||||
|
await trace_fn({
|
||||||
|
"event": "prompt_preprocess",
|
||||||
|
"node_id": self.node_id,
|
||||||
|
"node_type": self.type_name,
|
||||||
|
"provider": provider,
|
||||||
|
"lines": len(lines),
|
||||||
|
"used": pre_used,
|
||||||
|
"removed_keys": pre_removed_total,
|
||||||
|
"ts": int(time.time() * 1000),
|
||||||
|
})
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
pre_segments_raw = []
|
||||||
|
|
||||||
# prompt_combine (DSL "&"): комбинируем сегменты в заданном порядке.
|
# prompt_combine (DSL "&"): комбинируем сегменты в заданном порядке.
|
||||||
# Расширения:
|
# Расширения:
|
||||||
@@ -2090,7 +2443,35 @@ class ProviderCallNode(Node):
|
|||||||
combine_raw = str(cfg.get("prompt_combine") or "").strip()
|
combine_raw = str(cfg.get("prompt_combine") or "").strip()
|
||||||
except Exception:
|
except Exception:
|
||||||
combine_raw = ""
|
combine_raw = ""
|
||||||
if combine_raw:
|
# Быстрая ветка: если есть адаптер — выполняем merge сегментов через него и коротко-замыкаем легаси-блок ниже
|
||||||
|
raw_segs_for_adapter = [s.strip() for s in combine_raw.split("&") if str(s or "").strip()]
|
||||||
|
if adapter and (raw_segs_for_adapter or (pre_segments_raw and len(pre_segments_raw) > 0)):
|
||||||
|
try:
|
||||||
|
pm_struct = adapter.combine_segments(
|
||||||
|
blocks_struct=blocks_struct,
|
||||||
|
pre_segments_raw=pre_segments_raw,
|
||||||
|
raw_segs=raw_segs_for_adapter,
|
||||||
|
render_ctx=render_ctx,
|
||||||
|
pre_var_paths=pre_var_paths,
|
||||||
|
render_template_simple_fn=render_template_simple,
|
||||||
|
var_macro_fullmatch_re=_VAR_MACRO_RE,
|
||||||
|
detect_vendor_fn=detect_vendor,
|
||||||
|
)
|
||||||
|
# обновим pm в контексте
|
||||||
|
try:
|
||||||
|
render_ctx["pm"] = pm_struct
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
except Exception as _e:
|
||||||
|
try:
|
||||||
|
print(f"TRACE adapter_combine_error: node={self.node_id} provider={provider} err={_e}")
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
# коротко-замкнём легаси-блок prompt_combine ниже
|
||||||
|
pre_segments_raw = []
|
||||||
|
combine_raw = ""
|
||||||
|
if combine_raw or (pre_segments_raw and len(pre_segments_raw) > 0):
|
||||||
raw_segs = [s.strip() for s in combine_raw.split("&") if str(s or "").strip()]
|
raw_segs = [s.strip() for s in combine_raw.split("&") if str(s or "").strip()]
|
||||||
|
|
||||||
def _try_json(s: str) -> Any:
|
def _try_json(s: str) -> Any:
|
||||||
@@ -2547,12 +2928,36 @@ class ProviderCallNode(Node):
|
|||||||
if provider in {"gemini", "gemini_image"}:
|
if provider in {"gemini", "gemini_image"}:
|
||||||
built: List[Dict[str, Any]] = []
|
built: List[Dict[str, Any]] = []
|
||||||
sys_texts: List[str] = []
|
sys_texts: List[str] = []
|
||||||
|
# Preprocess-inserted segments (prompt_preprocess)
|
||||||
|
for _pre in pre_segments_raw:
|
||||||
|
try:
|
||||||
|
_obj = _pre.get("obj")
|
||||||
|
items = _as_gemini_contents(_obj)
|
||||||
|
items = _filter_gemini(items)
|
||||||
|
built = _insert_items(built, items, _pre.get("pos"))
|
||||||
|
try:
|
||||||
|
sx = _extract_sys_text_from_obj(_obj)
|
||||||
|
if isinstance(sx, str) and sx.strip():
|
||||||
|
sys_texts.append(sx.strip())
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
for raw_seg in raw_segs:
|
for raw_seg in raw_segs:
|
||||||
body_seg, pos_spec = _split_pos_spec(raw_seg)
|
body_seg, pos_spec = _split_pos_spec(raw_seg)
|
||||||
if body_seg == "[[PROMPT]]":
|
if body_seg == "[[PROMPT]]":
|
||||||
items = _filter_gemini(list(blocks_struct.get("contents", []) or []))
|
items = _filter_gemini(list(blocks_struct.get("contents", []) or []))
|
||||||
built = _insert_items(built, items, pos_spec)
|
built = _insert_items(built, items, pos_spec)
|
||||||
continue
|
continue
|
||||||
|
m_pre = _VAR_MACRO_RE.fullmatch(body_seg)
|
||||||
|
if m_pre:
|
||||||
|
_p = (m_pre.group(1) or "").strip()
|
||||||
|
try:
|
||||||
|
if _p in pre_var_paths:
|
||||||
|
# Skip duplicate var segment - already inserted via prompt_preprocess (filtered)
|
||||||
|
continue
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
resolved = render_template_simple(body_seg, render_ctx, render_ctx.get("OUT") or {})
|
resolved = render_template_simple(body_seg, render_ctx, render_ctx.get("OUT") or {})
|
||||||
obj = _try_json(resolved)
|
obj = _try_json(resolved)
|
||||||
# provider guess + system extract for cross-provider combine
|
# provider guess + system extract for cross-provider combine
|
||||||
@@ -2585,12 +2990,36 @@ class ProviderCallNode(Node):
|
|||||||
elif provider in {"openai"}:
|
elif provider in {"openai"}:
|
||||||
built2: List[Dict[str, Any]] = []
|
built2: List[Dict[str, Any]] = []
|
||||||
sys_texts: List[str] = []
|
sys_texts: List[str] = []
|
||||||
|
# Preprocess-inserted segments (prompt_preprocess)
|
||||||
|
for _pre in pre_segments_raw:
|
||||||
|
try:
|
||||||
|
_obj = _pre.get("obj")
|
||||||
|
items = _as_openai_messages(_obj)
|
||||||
|
items = _filter_openai(items)
|
||||||
|
built2 = _insert_items(built2, items, _pre.get("pos"))
|
||||||
|
try:
|
||||||
|
sx = _extract_sys_text_from_obj(_obj)
|
||||||
|
if isinstance(sx, str) and sx.strip():
|
||||||
|
sys_texts.append(sx.strip())
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
for raw_seg in raw_segs:
|
for raw_seg in raw_segs:
|
||||||
body_seg, pos_spec = _split_pos_spec(raw_seg)
|
body_seg, pos_spec = _split_pos_spec(raw_seg)
|
||||||
if body_seg == "[[PROMPT]]":
|
if body_seg == "[[PROMPT]]":
|
||||||
items = _filter_openai(list(blocks_struct.get("messages", []) or []))
|
items = _filter_openai(list(blocks_struct.get("messages", []) or []))
|
||||||
built2 = _insert_items(built2, items, pos_spec)
|
built2 = _insert_items(built2, items, pos_spec)
|
||||||
continue
|
continue
|
||||||
|
m_pre = _VAR_MACRO_RE.fullmatch(body_seg)
|
||||||
|
if m_pre:
|
||||||
|
_p = (m_pre.group(1) or "").strip()
|
||||||
|
try:
|
||||||
|
if _p in pre_var_paths:
|
||||||
|
# Skip duplicate var segment - already inserted via prompt_preprocess (filtered)
|
||||||
|
continue
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
resolved = render_template_simple(body_seg, render_ctx, render_ctx.get("OUT") or {})
|
resolved = render_template_simple(body_seg, render_ctx, render_ctx.get("OUT") or {})
|
||||||
obj = _try_json(resolved)
|
obj = _try_json(resolved)
|
||||||
try:
|
try:
|
||||||
@@ -2622,12 +3051,36 @@ class ProviderCallNode(Node):
|
|||||||
else: # claude
|
else: # claude
|
||||||
built3: List[Dict[str, Any]] = []
|
built3: List[Dict[str, Any]] = []
|
||||||
sys_texts: List[str] = []
|
sys_texts: List[str] = []
|
||||||
|
# Preprocess-inserted segments (prompt_preprocess)
|
||||||
|
for _pre in pre_segments_raw:
|
||||||
|
try:
|
||||||
|
_obj = _pre.get("obj")
|
||||||
|
items = _as_claude_messages(_obj)
|
||||||
|
items = _filter_claude(items)
|
||||||
|
built3 = _insert_items(built3, items, _pre.get("pos"))
|
||||||
|
try:
|
||||||
|
sx = _extract_sys_text_from_obj(_obj)
|
||||||
|
if isinstance(sx, str) and sx.strip():
|
||||||
|
sys_texts.append(sx.strip())
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
for raw_seg in raw_segs:
|
for raw_seg in raw_segs:
|
||||||
body_seg, pos_spec = _split_pos_spec(raw_seg)
|
body_seg, pos_spec = _split_pos_spec(raw_seg)
|
||||||
if body_seg == "[[PROMPT]]":
|
if body_seg == "[[PROMPT]]":
|
||||||
items = _filter_claude(list(blocks_struct.get("messages", []) or []))
|
items = _filter_claude(list(blocks_struct.get("messages", []) or []))
|
||||||
built3 = _insert_items(built3, items, pos_spec)
|
built3 = _insert_items(built3, items, pos_spec)
|
||||||
continue
|
continue
|
||||||
|
m_pre = _VAR_MACRO_RE.fullmatch(body_seg)
|
||||||
|
if m_pre:
|
||||||
|
_p = (m_pre.group(1) or "").strip()
|
||||||
|
try:
|
||||||
|
if _p in pre_var_paths:
|
||||||
|
# Skip duplicate var segment - already inserted via prompt_preprocess (filtered)
|
||||||
|
continue
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
resolved = render_template_simple(body_seg, render_ctx, render_ctx.get("OUT") or {})
|
resolved = render_template_simple(body_seg, render_ctx, render_ctx.get("OUT") or {})
|
||||||
obj = _try_json(resolved)
|
obj = _try_json(resolved)
|
||||||
try:
|
try:
|
||||||
@@ -2652,6 +3105,9 @@ class ProviderCallNode(Node):
|
|||||||
if isinstance(existing_sys, list):
|
if isinstance(existing_sys, list):
|
||||||
sys_blocks.extend(existing_sys)
|
sys_blocks.extend(existing_sys)
|
||||||
st0 = blocks_struct.get("system_text") or ""
|
st0 = blocks_struct.get("system_text") or ""
|
||||||
|
# Ensure PROMPT system_text from blocks is included as a Claude system block
|
||||||
|
if isinstance(st0, str) and st0.strip():
|
||||||
|
sys_blocks.append({"type": "text", "text": st0})
|
||||||
for s in sys_texts:
|
for s in sys_texts:
|
||||||
sys_blocks.append({"type": "text", "text": s})
|
sys_blocks.append({"type": "text", "text": s})
|
||||||
st = "\n\n".join([t for t in [st0] + sys_texts if isinstance(t, str) and t.strip()])
|
st = "\n\n".join([t for t in [st0] + sys_texts if isinstance(t, str) and t.strip()])
|
||||||
@@ -2671,8 +3127,11 @@ class ProviderCallNode(Node):
|
|||||||
else:
|
else:
|
||||||
# Prefer top-level system as plain string (proxy compatibility)
|
# Prefer top-level system as plain string (proxy compatibility)
|
||||||
pm_struct = {"messages": built3, "system_text": st}
|
pm_struct = {"messages": built3, "system_text": st}
|
||||||
if st:
|
# Prefer array of system blocks when possible; fallback to single text block
|
||||||
pm_struct["system"] = st
|
if sys_blocks:
|
||||||
|
pm_struct["system"] = sys_blocks
|
||||||
|
elif st:
|
||||||
|
pm_struct["system"] = [{"type": "text", "text": st}]
|
||||||
|
|
||||||
# SSE метрика
|
# SSE метрика
|
||||||
try:
|
try:
|
||||||
@@ -2695,49 +3154,41 @@ class ProviderCallNode(Node):
|
|||||||
# Единый JSON-фрагмент PROMPT для шаблонов: [[PROMPT]]
|
# Единый JSON-фрагмент PROMPT для шаблонов: [[PROMPT]]
|
||||||
prompt_fragment = ""
|
prompt_fragment = ""
|
||||||
try:
|
try:
|
||||||
if provider == "openai":
|
if adapter:
|
||||||
prompt_fragment = '"messages": ' + json.dumps(pm_struct.get("messages", []), ensure_ascii=False)
|
prompt_fragment = adapter.prompt_fragment(pm_struct, self.config or {})
|
||||||
elif provider == "gemini":
|
else:
|
||||||
parts = []
|
if provider == "openai":
|
||||||
contents = pm_struct.get("contents")
|
prompt_fragment = '"messages": ' + json.dumps(pm_struct.get("messages", []), ensure_ascii=False)
|
||||||
if contents is not None:
|
elif provider in {"gemini", "gemini_image"}:
|
||||||
parts.append('"contents": ' + json.dumps(contents, ensure_ascii=False))
|
parts = []
|
||||||
sysi = pm_struct.get("systemInstruction")
|
contents = pm_struct.get("contents")
|
||||||
# даже если пустой объект {}, это валидно
|
if contents is not None:
|
||||||
if sysi is not None:
|
parts.append('"contents": ' + json.dumps(contents, ensure_ascii=False))
|
||||||
parts.append('"systemInstruction": ' + json.dumps(sysi, ensure_ascii=False))
|
sysi = pm_struct.get("systemInstruction")
|
||||||
prompt_fragment = ", ".join(parts)
|
if sysi is not None:
|
||||||
elif provider == "gemini_image":
|
parts.append('"systemInstruction": ' + json.dumps(sysi, ensure_ascii=False))
|
||||||
# Используем ту же структуру PROMPT, что и для Gemini (generateContent)
|
prompt_fragment = ", ".join(parts)
|
||||||
parts = []
|
elif provider == "claude":
|
||||||
contents = pm_struct.get("contents")
|
parts = []
|
||||||
if contents is not None:
|
# Учитываем флаг совместимости: при claude_no_system не добавляем top-level "system"
|
||||||
parts.append('"contents": ' + json.dumps(contents, ensure_ascii=False))
|
|
||||||
sysi = pm_struct.get("systemInstruction")
|
|
||||||
if sysi is not None:
|
|
||||||
parts.append('"systemInstruction": ' + json.dumps(sysi, ensure_ascii=False))
|
|
||||||
prompt_fragment = ", ".join(parts)
|
|
||||||
elif provider == "claude":
|
|
||||||
parts = []
|
|
||||||
# Учитываем флаг совместимости: при claude_no_system не добавляем top-level "system"
|
|
||||||
claude_no_system = False
|
|
||||||
try:
|
|
||||||
claude_no_system = bool((self.config or {}).get("claude_no_system", False))
|
|
||||||
except Exception:
|
|
||||||
claude_no_system = False
|
claude_no_system = False
|
||||||
|
try:
|
||||||
|
claude_no_system = bool((self.config or {}).get("claude_no_system", False))
|
||||||
|
except Exception:
|
||||||
|
claude_no_system = False
|
||||||
|
|
||||||
if not claude_no_system:
|
if not claude_no_system:
|
||||||
# Предпочитаем массив блоков system, если он есть; иначе строковый system_text
|
# Предпочитаем массив блоков system, если он есть; иначе строковый system_text
|
||||||
sys_val = pm_struct.get("system", None)
|
sys_val = pm_struct.get("system", None)
|
||||||
if sys_val is None:
|
if sys_val is None:
|
||||||
sys_val = pm_struct.get("system_text")
|
sys_val = pm_struct.get("system_text")
|
||||||
if sys_val:
|
if sys_val:
|
||||||
parts.append('"system": ' + json.dumps(sys_val, ensure_ascii=False))
|
parts.append('"system": ' + json.dumps(sys_val, ensure_ascii=False))
|
||||||
|
|
||||||
msgs = pm_struct.get("messages")
|
msgs = pm_struct.get("messages")
|
||||||
if msgs is not None:
|
if msgs is not None:
|
||||||
parts.append('"messages": ' + json.dumps(msgs, ensure_ascii=False))
|
parts.append('"messages": ' + json.dumps(msgs, ensure_ascii=False))
|
||||||
prompt_fragment = ", ".join(parts)
|
prompt_fragment = ", ".join(parts)
|
||||||
except Exception: # noqa: BLE001
|
except Exception: # noqa: BLE001
|
||||||
prompt_fragment = ""
|
prompt_fragment = ""
|
||||||
render_ctx["PROMPT"] = prompt_fragment
|
render_ctx["PROMPT"] = prompt_fragment
|
||||||
@@ -2809,6 +3260,24 @@ class ProviderCallNode(Node):
|
|||||||
pass
|
pass
|
||||||
# SSE: http_req (как в Burp)
|
# SSE: http_req (как в Burp)
|
||||||
req_id = f"{self.node_id}-{int(time.time()*1000)}"
|
req_id = f"{self.node_id}-{int(time.time()*1000)}"
|
||||||
|
# Register original (untrimmed) request details for manual resend
|
||||||
|
try:
|
||||||
|
pipeline_id = str((context.get("meta") or {}).get("id", "pipeline_editor"))
|
||||||
|
except Exception:
|
||||||
|
pipeline_id = "pipeline_editor"
|
||||||
|
try:
|
||||||
|
register_http_request(req_id, {
|
||||||
|
"pipeline_id": pipeline_id,
|
||||||
|
"node_id": self.node_id,
|
||||||
|
"node_type": self.type_name,
|
||||||
|
"provider": provider,
|
||||||
|
"method": "POST",
|
||||||
|
"url": url,
|
||||||
|
"headers": dict(final_headers),
|
||||||
|
"body_json": payload, # untrimmed original payload
|
||||||
|
})
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
try:
|
try:
|
||||||
trace_fn = context.get("_trace")
|
trace_fn = context.get("_trace")
|
||||||
if trace_fn:
|
if trace_fn:
|
||||||
@@ -2836,7 +3305,26 @@ class ProviderCallNode(Node):
|
|||||||
st: Optional[int] = None
|
st: Optional[int] = None
|
||||||
async with build_client(timeout=timeout_sec) as client:
|
async with build_client(timeout=timeout_sec) as client:
|
||||||
body_bytes = json.dumps(payload, ensure_ascii=False).encode("utf-8")
|
body_bytes = json.dumps(payload, ensure_ascii=False).encode("utf-8")
|
||||||
resp = await client.post(url, content=body_bytes, headers=final_headers)
|
# Cooperative cancel pre-check (avoid starting new HTTP on abort)
|
||||||
|
try:
|
||||||
|
pipeline_id = str((context.get("meta") or {}).get("id", "pipeline_editor"))
|
||||||
|
except Exception:
|
||||||
|
pipeline_id = "pipeline_editor"
|
||||||
|
try:
|
||||||
|
if is_cancelled(pipeline_id) and get_cancel_mode(pipeline_id) == "abort":
|
||||||
|
try:
|
||||||
|
print(f"TRACE http_cancel_pre: {self.node_id} abort before request")
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
raise ExecutionError("Cancelled by user (abort)")
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def _do_post():
|
||||||
|
return await client.post(url, content=body_bytes, headers=final_headers)
|
||||||
|
|
||||||
|
# Await HTTP with cooperative cancel/abort handling
|
||||||
|
resp = await _await_coro_with_cancel(_do_post(), pipeline_id)
|
||||||
# Do not raise_for_status: keep body/logs on 4xx/5xx
|
# Do not raise_for_status: keep body/logs on 4xx/5xx
|
||||||
try:
|
try:
|
||||||
print("===== ProviderCall RESPONSE BEGIN =====")
|
print("===== ProviderCall RESPONSE BEGIN =====")
|
||||||
@@ -3166,13 +3654,19 @@ class RawForwardNode(Node):
|
|||||||
# Автодетекция вендора для базового URL если base_url не задан
|
# Автодетекция вендора для базового URL если base_url не задан
|
||||||
if not base_url:
|
if not base_url:
|
||||||
vendor = detect_vendor(raw_payload)
|
vendor = detect_vendor(raw_payload)
|
||||||
if vendor == "openai":
|
base_url = None
|
||||||
base_url = "https://api.openai.com"
|
try:
|
||||||
elif vendor == "claude":
|
base_url = _adapter_default_base_url_for(vendor)
|
||||||
base_url = "https://api.anthropic.com"
|
except Exception:
|
||||||
elif vendor == "gemini":
|
base_url = None
|
||||||
base_url = "https://generativelanguage.googleapis.com"
|
if not base_url:
|
||||||
else:
|
if vendor == "openai":
|
||||||
|
base_url = "https://api.openai.com"
|
||||||
|
elif vendor == "claude":
|
||||||
|
base_url = "https://api.anthropic.com"
|
||||||
|
elif vendor == "gemini":
|
||||||
|
base_url = "https://generativelanguage.googleapis.com"
|
||||||
|
if not base_url:
|
||||||
raise ExecutionError(
|
raise ExecutionError(
|
||||||
f"Node {self.node_id} ({self.type_name}): 'base_url' is not configured and vendor could not be detected."
|
f"Node {self.node_id} ({self.type_name}): 'base_url' is not configured and vendor could not be detected."
|
||||||
)
|
)
|
||||||
@@ -3288,6 +3782,31 @@ class RawForwardNode(Node):
|
|||||||
|
|
||||||
# SSE: http_req
|
# SSE: http_req
|
||||||
req_id = f"{self.node_id}-{int(time.time()*1000)}"
|
req_id = f"{self.node_id}-{int(time.time()*1000)}"
|
||||||
|
# Register original (untrimmed) request details for manual resend
|
||||||
|
try:
|
||||||
|
pipeline_id = str((context.get("meta") or {}).get("id", "pipeline_editor"))
|
||||||
|
except Exception:
|
||||||
|
pipeline_id = "pipeline_editor"
|
||||||
|
try:
|
||||||
|
reg_info: Dict[str, Any] = {
|
||||||
|
"pipeline_id": pipeline_id,
|
||||||
|
"node_id": self.node_id,
|
||||||
|
"node_type": self.type_name,
|
||||||
|
"method": method,
|
||||||
|
"url": url,
|
||||||
|
"headers": dict(headers),
|
||||||
|
}
|
||||||
|
if raw_payload is not None:
|
||||||
|
reg_info["body_json"] = raw_payload # original JSON body
|
||||||
|
# Always keep original textual/bytes preview for non-JSON
|
||||||
|
reg_info["body_text"] = body_text
|
||||||
|
try:
|
||||||
|
reg_info["body_bytes_len"] = int(len(body_bytes or b""))
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
register_http_request(req_id, reg_info)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
try:
|
try:
|
||||||
trace_fn = context.get("_trace")
|
trace_fn = context.get("_trace")
|
||||||
if trace_fn:
|
if trace_fn:
|
||||||
@@ -3309,7 +3828,26 @@ class RawForwardNode(Node):
|
|||||||
async with build_client(timeout=timeout_sec) as client:
|
async with build_client(timeout=timeout_sec) as client:
|
||||||
# Для GET/HEAD обычно не отправляем body
|
# Для GET/HEAD обычно не отправляем body
|
||||||
send_content = None if method in {"GET", "HEAD"} else body_bytes
|
send_content = None if method in {"GET", "HEAD"} else body_bytes
|
||||||
resp = await client.request(method, url, headers=headers, content=send_content)
|
# Cooperative cancel pre-check (avoid starting new HTTP on abort)
|
||||||
|
try:
|
||||||
|
pipeline_id = str((context.get("meta") or {}).get("id", "pipeline_editor"))
|
||||||
|
except Exception:
|
||||||
|
pipeline_id = "pipeline_editor"
|
||||||
|
try:
|
||||||
|
if is_cancelled(pipeline_id) and get_cancel_mode(pipeline_id) == "abort":
|
||||||
|
try:
|
||||||
|
print(f"TRACE http_cancel_pre: {self.node_id} abort before request")
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
raise ExecutionError("Cancelled by user (abort)")
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def _do_req():
|
||||||
|
return await client.request(method, url, headers=headers, content=send_content)
|
||||||
|
|
||||||
|
# Await HTTP with cooperative cancel/abort handling
|
||||||
|
resp = await _await_coro_with_cancel(_do_req(), pipeline_id)
|
||||||
|
|
||||||
# Ответ: лог/печать
|
# Ответ: лог/печать
|
||||||
try:
|
try:
|
||||||
@@ -3628,6 +4166,22 @@ async def _providercall_run_with_while(self, inputs, context):
|
|||||||
last_idx = -1
|
last_idx = -1
|
||||||
|
|
||||||
for i in range(max_iters):
|
for i in range(max_iters):
|
||||||
|
# Cancel check before starting next iteration
|
||||||
|
try:
|
||||||
|
pid = str((context.get("meta") or {}).get("id", "pipeline_editor"))
|
||||||
|
except Exception:
|
||||||
|
pid = "pipeline_editor"
|
||||||
|
try:
|
||||||
|
if is_cancelled(pid):
|
||||||
|
mode = get_cancel_mode(pid)
|
||||||
|
try:
|
||||||
|
print(f"TRACE while_cancel: {self.node_id} mode={mode} at i={i} (pre)")
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
break
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
# Build loop-local context with cycleindex and WAS_ERROR
|
# Build loop-local context with cycleindex and WAS_ERROR
|
||||||
ctx2 = dict(context or {})
|
ctx2 = dict(context or {})
|
||||||
try:
|
try:
|
||||||
@@ -3651,6 +4205,13 @@ async def _providercall_run_with_while(self, inputs, context):
|
|||||||
if i == 0:
|
if i == 0:
|
||||||
cond = True
|
cond = True
|
||||||
else:
|
else:
|
||||||
|
# Inject previous iteration error flag so [[WAS_ERROR]] in while_expr refers to the last iteration
|
||||||
|
try:
|
||||||
|
vmap2 = dict(ctx2.get("vars") or {})
|
||||||
|
except Exception:
|
||||||
|
vmap2 = {}
|
||||||
|
vmap2["WAS_ERROR"] = bool(last_was_error)
|
||||||
|
ctx2["vars"] = vmap2
|
||||||
# Augment OUT with the last output of this node so [[OUTn]] / [[OUT:nX...]] can see it
|
# Augment OUT with the last output of this node so [[OUTn]] / [[OUT:nX...]] can see it
|
||||||
try:
|
try:
|
||||||
out_aug = dict(out_map or {})
|
out_aug = dict(out_map or {})
|
||||||
@@ -3695,7 +4256,7 @@ async def _providercall_run_with_while(self, inputs, context):
|
|||||||
# Single iteration run; reenter node.run with _in_while flag set
|
# Single iteration run; reenter node.run with _in_while flag set
|
||||||
try:
|
try:
|
||||||
inner_out = await self.run(inputs, ctx2)
|
inner_out = await self.run(inputs, ctx2)
|
||||||
except Exception as exc: # network or other runtime error
|
except BaseException as exc:
|
||||||
if ignore:
|
if ignore:
|
||||||
inner_out = {"result": {"error": str(exc)}, "response_text": ""}
|
inner_out = {"result": {"error": str(exc)}, "response_text": ""}
|
||||||
try:
|
try:
|
||||||
@@ -3769,6 +4330,22 @@ async def _rawforward_run_with_while(self, inputs, context):
|
|||||||
last_idx = -1
|
last_idx = -1
|
||||||
|
|
||||||
for i in range(max_iters):
|
for i in range(max_iters):
|
||||||
|
# Cancel check before starting next iteration
|
||||||
|
try:
|
||||||
|
pid = str((context.get("meta") or {}).get("id", "pipeline_editor"))
|
||||||
|
except Exception:
|
||||||
|
pid = "pipeline_editor"
|
||||||
|
try:
|
||||||
|
if is_cancelled(pid):
|
||||||
|
mode = get_cancel_mode(pid)
|
||||||
|
try:
|
||||||
|
print(f"TRACE while_cancel: {self.node_id} mode={mode} at i={i} (pre)")
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
break
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
ctx2 = dict(context or {})
|
ctx2 = dict(context or {})
|
||||||
try:
|
try:
|
||||||
vmap = dict(ctx2.get("vars") or {})
|
vmap = dict(ctx2.get("vars") or {})
|
||||||
@@ -3787,6 +4364,13 @@ async def _rawforward_run_with_while(self, inputs, context):
|
|||||||
if i == 0:
|
if i == 0:
|
||||||
cond = True
|
cond = True
|
||||||
else:
|
else:
|
||||||
|
# Inject previous iteration error flag so [[WAS_ERROR]] in while_expr refers to the last iteration
|
||||||
|
try:
|
||||||
|
vmap2 = dict(ctx2.get("vars") or {})
|
||||||
|
except Exception:
|
||||||
|
vmap2 = {}
|
||||||
|
vmap2["WAS_ERROR"] = bool(last_was_error)
|
||||||
|
ctx2["vars"] = vmap2
|
||||||
try:
|
try:
|
||||||
out_aug = dict(out_map or {})
|
out_aug = dict(out_map or {})
|
||||||
out_aug[self.node_id] = dict(last_out or {})
|
out_aug[self.node_id] = dict(last_out or {})
|
||||||
@@ -3829,7 +4413,7 @@ async def _rawforward_run_with_while(self, inputs, context):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
inner_out = await self.run(inputs, ctx2)
|
inner_out = await self.run(inputs, ctx2)
|
||||||
except Exception as exc:
|
except BaseException as exc:
|
||||||
if ignore:
|
if ignore:
|
||||||
inner_out = {"result": {"error": str(exc)}, "response_text": ""}
|
inner_out = {"result": {"error": str(exc)}, "response_text": ""}
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -594,6 +594,12 @@ def _tokenize_condition_expr(expr: str, context: Dict[str, Any], out_map: Dict[s
|
|||||||
while j < n and (expr[j].isalnum() or expr[j] in "._"):
|
while j < n and (expr[j].isalnum() or expr[j] in "._"):
|
||||||
j += 1
|
j += 1
|
||||||
word = expr[i:j]
|
word = expr[i:j]
|
||||||
|
lw = word.lower()
|
||||||
|
# Литералы: true/false/null (любая раскладка) → Python-константы
|
||||||
|
if re.fullmatch(r"[A-Za-z_][A-Za-z0-9_]*", word) and lw in {"true", "false", "null"}:
|
||||||
|
tokens.append("True" if lw == "true" else ("False" if lw == "false" else "None"))
|
||||||
|
i = j
|
||||||
|
continue
|
||||||
# Поддержка «голых» идентификаторов из vars: cycleindex, WAS_ERROR и т.п.
|
# Поддержка «голых» идентификаторов из vars: cycleindex, WAS_ERROR и т.п.
|
||||||
# Если это простой идентификатор (без точек) и он есть в context.vars — биндим его значением.
|
# Если это простой идентификатор (без точек) и он есть в context.vars — биндим его значением.
|
||||||
try:
|
try:
|
||||||
@@ -752,17 +758,19 @@ def _safe_eval_bool(py_expr: str, bindings: Dict[str, Any]) -> bool:
|
|||||||
if isinstance(node.op, ast.Not):
|
if isinstance(node.op, ast.Not):
|
||||||
return (not val)
|
return (not val)
|
||||||
if isinstance(node, ast.BoolOp) and isinstance(node.op, tuple(allowed_boolops)):
|
if isinstance(node, ast.BoolOp) and isinstance(node.op, tuple(allowed_boolops)):
|
||||||
vals = [bool(eval_node(v)) for v in node.values]
|
# Короткое замыкание:
|
||||||
|
# AND — при первом False прекращаем и возвращаем False; иначе True
|
||||||
|
# OR — при первом True прекращаем и возвращаем True; иначе False
|
||||||
if isinstance(node.op, ast.And):
|
if isinstance(node.op, ast.And):
|
||||||
res = True
|
for v in node.values:
|
||||||
for v in vals:
|
if not bool(eval_node(v)):
|
||||||
res = res and v
|
return False
|
||||||
return res
|
return True
|
||||||
if isinstance(node.op, ast.Or):
|
if isinstance(node.op, ast.Or):
|
||||||
res = False
|
for v in node.values:
|
||||||
for v in vals:
|
if bool(eval_node(v)):
|
||||||
res = res or v
|
return True
|
||||||
return res
|
return False
|
||||||
if isinstance(node, ast.Compare):
|
if isinstance(node, ast.Compare):
|
||||||
left = eval_node(node.left)
|
left = eval_node(node.left)
|
||||||
for opnode, comparator in zip(node.ops, node.comparators):
|
for opnode, comparator in zip(node.ops, node.comparators):
|
||||||
|
|||||||
34
agentui/providers/adapters/__init__.py
Normal file
34
agentui/providers/adapters/__init__.py
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
"""
|
||||||
|
Пакет адаптеров провайдеров для ProviderCall.
|
||||||
|
|
||||||
|
Экспортируем:
|
||||||
|
- ProviderAdapter базовый класс
|
||||||
|
- Реализации: OpenAIAdapter, GeminiAdapter, GeminiImageAdapter, ClaudeAdapter
|
||||||
|
- Утилиты: default_base_url_for, insert_items, split_pos_spec
|
||||||
|
"""
|
||||||
|
|
||||||
|
from .base import ( # [ProviderAdapter](agentui/providers/adapters/base.py:10)
|
||||||
|
ProviderAdapter,
|
||||||
|
default_base_url_for,
|
||||||
|
insert_items,
|
||||||
|
split_pos_spec,
|
||||||
|
)
|
||||||
|
from .openai import OpenAIAdapter # [OpenAIAdapter](agentui/providers/adapters/openai.py:39)
|
||||||
|
from .gemini import ( # [GeminiAdapter](agentui/providers/adapters/gemini.py:56)
|
||||||
|
GeminiAdapter,
|
||||||
|
GeminiImageAdapter, # [GeminiImageAdapter](agentui/providers/adapters/gemini.py:332)
|
||||||
|
)
|
||||||
|
from .claude import ClaudeAdapter # [ClaudeAdapter](agentui/providers/adapters/claude.py:56)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"ProviderAdapter",
|
||||||
|
"OpenAIAdapter",
|
||||||
|
"GeminiAdapter",
|
||||||
|
"GeminiImageAdapter",
|
||||||
|
"ClaudeAdapter",
|
||||||
|
"default_base_url_for",
|
||||||
|
"insert_items",
|
||||||
|
"split_pos_spec",
|
||||||
|
]
|
||||||
148
agentui/providers/adapters/base.py
Normal file
148
agentui/providers/adapters/base.py
Normal file
@@ -0,0 +1,148 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
|
||||||
|
|
||||||
|
class ProviderAdapter(ABC): # [ProviderAdapter.__init__()](agentui/providers/adapters/base.py:10)
|
||||||
|
"""
|
||||||
|
Базовый интерфейс адаптера провайдера для ProviderCall.
|
||||||
|
|
||||||
|
Задачи адаптера:
|
||||||
|
- blocks_struct_for_template: собрать pm_struct из унифицированных сообщений (Prompt Blocks)
|
||||||
|
- normalize_segment/filter_items: привести произвольный сегмент к целевой провайдерной структуре и отфильтровать пустое
|
||||||
|
- extract_system_text_from_obj: вытащить системный текст из произвольного сегмента (если он там есть)
|
||||||
|
- combine_segments: слить pre_segments (prompt_preprocess) и prompt_combine с blocks_struct → итоговый pm_struct
|
||||||
|
- prompt_fragment: собрать строку JSON-фрагмента для подстановки в [[PROMPT]]
|
||||||
|
- default_endpoint/default_base_url: дефолты путей и базовых URL
|
||||||
|
"""
|
||||||
|
|
||||||
|
name: str = "base"
|
||||||
|
|
||||||
|
# --- Дефолты HTTP ---
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def default_base_url(self) -> str:
|
||||||
|
...
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def default_endpoint(self, model: str) -> str:
|
||||||
|
...
|
||||||
|
|
||||||
|
# --- PROMPT: построение провайдерных структур ---
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def blocks_struct_for_template(
|
||||||
|
self,
|
||||||
|
unified_messages: List[Dict[str, Any]],
|
||||||
|
context: Dict[str, Any],
|
||||||
|
node_config: Dict[str, Any],
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Из унифицированных сообщений [{role, content}] (включая text+image) собрать pm_struct
|
||||||
|
для целевого провайдера. Результат должен быть совместим с текущей логикой [[PROMPT]].
|
||||||
|
"""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def normalize_segment(self, obj: Any) -> List[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Привести произвольный сегмент (dict/list/str/числа) к целевому массиву элементов
|
||||||
|
(например, messages для openai/claude или contents для gemini).
|
||||||
|
"""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def filter_items(self, items: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Отфильтровать пустые элементы (пустые тексты и т.п.) согласно правилам провайдера.
|
||||||
|
"""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def extract_system_text_from_obj(self, obj: Any, render_ctx: Dict[str, Any]) -> Optional[str]:
|
||||||
|
"""
|
||||||
|
Вытащить системный текст из произвольного объекта фрагмента:
|
||||||
|
- OpenAI: messages[*] role=system
|
||||||
|
- Gemini: systemInstruction.parts[].text
|
||||||
|
- Claude: top-level system (string/blocks)
|
||||||
|
Возвращает строку или None.
|
||||||
|
"""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def combine_segments(
|
||||||
|
self,
|
||||||
|
blocks_struct: Dict[str, Any],
|
||||||
|
pre_segments_raw: List[Dict[str, Any]],
|
||||||
|
raw_segs: List[str],
|
||||||
|
render_ctx: Dict[str, Any],
|
||||||
|
pre_var_paths: set[str],
|
||||||
|
render_template_simple_fn, # (s, ctx, out_map) -> str
|
||||||
|
var_macro_fullmatch_re, # _VAR_MACRO_RE.fullmatch
|
||||||
|
detect_vendor_fn, # detect_vendor
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Слить blocks_struct c массивами pre_segments_raw и строковыми raw_segs (prompt_combine)
|
||||||
|
и вернуть итоговый pm_struct. Поведение должно повторять текущее (позиционирование, фильтр пустых,
|
||||||
|
сбор системного текста).
|
||||||
|
"""
|
||||||
|
...
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def prompt_fragment(self, pm_struct: Dict[str, Any], node_config: Dict[str, Any]) -> str:
|
||||||
|
"""
|
||||||
|
Сформировать строку JSON-фрагмента для [[PROMPT]] по итоговому pm_struct.
|
||||||
|
"""
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
# --- Общие утилиты для позиционирования и парсинга директив ---------------------
|
||||||
|
|
||||||
|
def insert_items(base: List[Any], items: List[Any], pos_spec: Optional[str]) -> List[Any]: # [insert_items()](agentui/providers/adapters/base.py:114)
|
||||||
|
if not items:
|
||||||
|
return base
|
||||||
|
if not pos_spec or str(pos_spec).lower() == "append":
|
||||||
|
base.extend(items)
|
||||||
|
return base
|
||||||
|
p = str(pos_spec).lower()
|
||||||
|
if p == "prepend":
|
||||||
|
return list(items) + base
|
||||||
|
try:
|
||||||
|
idx = int(pos_spec) # type: ignore[arg-type]
|
||||||
|
if idx < 0:
|
||||||
|
idx = len(base) + idx
|
||||||
|
if idx < 0:
|
||||||
|
idx = 0
|
||||||
|
if idx > len(base):
|
||||||
|
idx = len(base)
|
||||||
|
return base[:idx] + list(items) + base[idx:]
|
||||||
|
except Exception:
|
||||||
|
base.extend(items)
|
||||||
|
return base
|
||||||
|
|
||||||
|
|
||||||
|
def split_pos_spec(s: str) -> Tuple[str, Optional[str]]: # [split_pos_spec()](agentui/providers/adapters/base.py:135)
|
||||||
|
"""
|
||||||
|
Отделить директиву @pos=... от тела сегмента.
|
||||||
|
Возвращает (body, pos_spec | None).
|
||||||
|
"""
|
||||||
|
import re as _re
|
||||||
|
m = _re.search(r"@pos\s*=\s*(prepend|append|-?\d+)\s*$", str(s or ""), flags=_re.IGNORECASE)
|
||||||
|
if not m:
|
||||||
|
return (str(s or "").strip(), None)
|
||||||
|
body = str(s[: m.start()]).strip()
|
||||||
|
return (body, str(m.group(1)).strip().lower())
|
||||||
|
|
||||||
|
|
||||||
|
# --- Дефолтные base_url по "вендору" (используется RawForward) ------------------
|
||||||
|
|
||||||
|
def default_base_url_for(vendor: str) -> Optional[str]: # [default_base_url_for()](agentui/providers/adapters/base.py:149)
|
||||||
|
v = (vendor or "").strip().lower()
|
||||||
|
if v == "openai":
|
||||||
|
return "https://api.openai.com"
|
||||||
|
if v == "claude" or v == "anthropic":
|
||||||
|
return "https://api.anthropic.com"
|
||||||
|
if v == "gemini" or v == "gemini_image":
|
||||||
|
return "https://generativelanguage.googleapis.com"
|
||||||
|
return None
|
||||||
475
agentui/providers/adapters/claude.py
Normal file
475
agentui/providers/adapters/claude.py
Normal file
@@ -0,0 +1,475 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
from agentui.providers.adapters.base import ( # [ProviderAdapter](agentui/providers/adapters/base.py:10)
|
||||||
|
ProviderAdapter,
|
||||||
|
insert_items,
|
||||||
|
split_pos_spec,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _is_data_url(u: str) -> bool: # [_is_data_url()](agentui/providers/adapters/claude.py:14)
|
||||||
|
return isinstance(u, str) and u.strip().lower().startswith("data:")
|
||||||
|
|
||||||
|
|
||||||
|
def _split_data_url(u: str) -> tuple[str, str]: # [_split_data_url()](agentui/providers/adapters/claude.py:18)
|
||||||
|
"""
|
||||||
|
Возвращает (mime, b64) для data URL.
|
||||||
|
Поддерживаем форму: data:<mime>;base64,<b64>
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
header, b64 = u.split(",", 1)
|
||||||
|
mime = "application/octet-stream"
|
||||||
|
if header.startswith("data:"):
|
||||||
|
header2 = header[5:]
|
||||||
|
if ";base64" in header2:
|
||||||
|
mime = header2.split(";base64", 1)[0] or mime
|
||||||
|
elif ";" in header2:
|
||||||
|
mime = header2.split(";", 1)[0] or mime
|
||||||
|
elif header2:
|
||||||
|
mime = header2
|
||||||
|
return mime, b64
|
||||||
|
except Exception:
|
||||||
|
return "application/octet-stream", ""
|
||||||
|
|
||||||
|
|
||||||
|
def _try_json(s: str) -> Any: # [_try_json()](agentui/providers/adapters/claude.py:38)
|
||||||
|
try:
|
||||||
|
obj = json.loads(s)
|
||||||
|
except Exception:
|
||||||
|
try:
|
||||||
|
obj = json.loads(s, strict=False) # type: ignore[call-arg]
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
for _ in range(2):
|
||||||
|
if isinstance(obj, str):
|
||||||
|
st = obj.strip()
|
||||||
|
if (st.startswith("{") and st.endswith("}")) or (st.startswith("[") and st.endswith("]")):
|
||||||
|
try:
|
||||||
|
obj = json.loads(st)
|
||||||
|
continue
|
||||||
|
except Exception:
|
||||||
|
break
|
||||||
|
break
|
||||||
|
return obj
|
||||||
|
|
||||||
|
|
||||||
|
class ClaudeAdapter(ProviderAdapter): # [ClaudeAdapter.__init__()](agentui/providers/adapters/claude.py:56)
|
||||||
|
name = "claude"
|
||||||
|
|
||||||
|
# --- Дефолты HTTP ---
|
||||||
|
def default_base_url(self) -> str:
|
||||||
|
return "https://api.anthropic.com"
|
||||||
|
|
||||||
|
def default_endpoint(self, model: str) -> str:
|
||||||
|
return "/v1/messages"
|
||||||
|
|
||||||
|
# --- PROMPT: построение провайдерных структур ---
|
||||||
|
|
||||||
|
def blocks_struct_for_template(
|
||||||
|
self,
|
||||||
|
unified_messages: List[Dict[str, Any]],
|
||||||
|
context: Dict[str, Any],
|
||||||
|
node_config: Dict[str, Any],
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Совместимо с веткой provider=='claude' из
|
||||||
|
[ProviderCallNode._blocks_struct_for_template()](agentui/pipeline/executor.py:2022).
|
||||||
|
"""
|
||||||
|
# Системные сообщения как текст
|
||||||
|
sys_msgs = []
|
||||||
|
for m in (unified_messages or []):
|
||||||
|
if m.get("role") == "system":
|
||||||
|
c = m.get("content")
|
||||||
|
if isinstance(c, list):
|
||||||
|
sys_msgs.append("\n".join([str(p.get("text") or "") for p in c if isinstance(p, dict) and p.get("type") == "text"]))
|
||||||
|
else:
|
||||||
|
sys_msgs.append(str(c or ""))
|
||||||
|
sys_text = "\n\n".join([s for s in sys_msgs if s]).strip()
|
||||||
|
|
||||||
|
out_msgs = []
|
||||||
|
for m in (unified_messages or []):
|
||||||
|
if m.get("role") == "system":
|
||||||
|
continue
|
||||||
|
role = m.get("role")
|
||||||
|
role = role if role in {"user", "assistant"} else "user"
|
||||||
|
c = m.get("content")
|
||||||
|
blocks: List[Dict[str, Any]] = []
|
||||||
|
if isinstance(c, list):
|
||||||
|
for p in c:
|
||||||
|
if not isinstance(p, dict):
|
||||||
|
continue
|
||||||
|
if p.get("type") == "text":
|
||||||
|
blocks.append({"type": "text", "text": str(p.get("text") or "")})
|
||||||
|
elif p.get("type") in {"image_url", "image"}:
|
||||||
|
url = str(p.get("url") or "")
|
||||||
|
if _is_data_url(url):
|
||||||
|
mime, b64 = _split_data_url(url)
|
||||||
|
blocks.append({"type": "image", "source": {"type": "base64", "media_type": mime, "data": b64}})
|
||||||
|
else:
|
||||||
|
blocks.append({"type": "image", "source": {"type": "url", "url": url}})
|
||||||
|
else:
|
||||||
|
blocks.append({"type": "text", "text": str(c or "")})
|
||||||
|
out_msgs.append({"role": role, "content": blocks})
|
||||||
|
|
||||||
|
claude_no_system = False
|
||||||
|
try:
|
||||||
|
claude_no_system = bool((node_config or {}).get("claude_no_system", False))
|
||||||
|
except Exception:
|
||||||
|
claude_no_system = False
|
||||||
|
|
||||||
|
if claude_no_system:
|
||||||
|
if sys_text:
|
||||||
|
out_msgs = [{"role": "user", "content": [{"type": "text", "text": sys_text}]}] + out_msgs
|
||||||
|
return {
|
||||||
|
"messages": out_msgs,
|
||||||
|
"system_text": sys_text,
|
||||||
|
}
|
||||||
|
|
||||||
|
d = {
|
||||||
|
"system_text": sys_text,
|
||||||
|
"messages": out_msgs,
|
||||||
|
}
|
||||||
|
if sys_text:
|
||||||
|
# Prefer system as a plain string (proxy compatibility)
|
||||||
|
d["system"] = sys_text
|
||||||
|
return d
|
||||||
|
|
||||||
|
def normalize_segment(self, x: Any) -> List[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Совместимо с [_as_claude_messages()](agentui/pipeline/executor.py:2602).
|
||||||
|
"""
|
||||||
|
msgs: List[Dict[str, Any]] = []
|
||||||
|
try:
|
||||||
|
if isinstance(x, dict):
|
||||||
|
# Dict with messages (OpenAI-like)
|
||||||
|
if isinstance(x.get("messages"), list):
|
||||||
|
x = x.get("messages") or []
|
||||||
|
# fallthrough to list mapping below
|
||||||
|
elif isinstance(x.get("contents"), list):
|
||||||
|
# Gemini -> Claude
|
||||||
|
for c in (x.get("contents") or []):
|
||||||
|
if not isinstance(c, dict):
|
||||||
|
continue
|
||||||
|
role_raw = str(c.get("role") or "user")
|
||||||
|
role = "assistant" if role_raw == "model" else ("user" if role_raw not in {"user", "assistant"} else role_raw)
|
||||||
|
parts = c.get("parts") or []
|
||||||
|
text = "\n".join([str(p.get("text")) for p in parts if isinstance(p, dict) and isinstance(p.get("text"), str)]).strip()
|
||||||
|
msgs.append({"role": role, "content": [{"type": "text", "text": text}]})
|
||||||
|
return msgs
|
||||||
|
|
||||||
|
if isinstance(x, list):
|
||||||
|
# Gemini contents list -> Claude messages
|
||||||
|
if all(isinstance(c, dict) and "parts" in c for c in x):
|
||||||
|
for c in x:
|
||||||
|
role_raw = str(c.get("role") or "user")
|
||||||
|
role = "assistant" if role_raw == "model" else ("user" if role_raw not in {"user", "assistant"} else role_raw)
|
||||||
|
blocks: List[Dict[str, Any]] = []
|
||||||
|
for p in (c.get("parts") or []):
|
||||||
|
if isinstance(p, dict) and isinstance(p.get("text"), str):
|
||||||
|
txt = p.get("text").strip()
|
||||||
|
if txt:
|
||||||
|
blocks.append({"type": "text", "text": txt})
|
||||||
|
msgs.append({"role": role, "content": blocks or [{"type": "text", "text": ""}]})
|
||||||
|
return msgs
|
||||||
|
# OpenAI messages list -> Claude
|
||||||
|
if all(isinstance(m, dict) and "content" in m for m in x):
|
||||||
|
out: List[Dict[str, Any]] = []
|
||||||
|
for m in x:
|
||||||
|
role = m.get("role", "user")
|
||||||
|
cont = m.get("content")
|
||||||
|
blocks: List[Dict[str, Any]] = []
|
||||||
|
if isinstance(cont, str):
|
||||||
|
blocks.append({"type": "text", "text": cont})
|
||||||
|
elif isinstance(cont, list):
|
||||||
|
for p in cont:
|
||||||
|
if not isinstance(p, dict):
|
||||||
|
continue
|
||||||
|
if p.get("type") == "text":
|
||||||
|
blocks.append({"type": "text", "text": str(p.get("text") or "")})
|
||||||
|
elif p.get("type") in {"image_url", "image"}:
|
||||||
|
url = ""
|
||||||
|
if isinstance(p.get("image_url"), dict):
|
||||||
|
url = str((p.get("image_url") or {}).get("url") or "")
|
||||||
|
elif "url" in p:
|
||||||
|
url = str(p.get("url") or "")
|
||||||
|
if url:
|
||||||
|
blocks.append({"type": "image", "source": {"type": "url", "url": url}})
|
||||||
|
else:
|
||||||
|
blocks.append({"type": "text", "text": json.dumps(cont, ensure_ascii=False)})
|
||||||
|
out.append({"role": role if role in {"user", "assistant"} else "user", "content": blocks})
|
||||||
|
return out
|
||||||
|
# Fallback
|
||||||
|
return [{"role": "user", "content": [{"type": "text", "text": json.dumps(x, ensure_ascii=False)}]}]
|
||||||
|
|
||||||
|
if isinstance(x, str):
|
||||||
|
try_obj = _try_json(x)
|
||||||
|
if try_obj is not None:
|
||||||
|
return self.normalize_segment(try_obj)
|
||||||
|
return [{"role": "user", "content": [{"type": "text", "text": x}]}]
|
||||||
|
return [{"role": "user", "content": [{"type": "text", "text": json.dumps(x, ensure_ascii=False)}]}]
|
||||||
|
except Exception:
|
||||||
|
return [{"role": "user", "content": [{"type": "text", "text": str(x)}]}]
|
||||||
|
|
||||||
|
def filter_items(self, arr: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Совместимо с [_filter_claude()](agentui/pipeline/executor.py:2820).
|
||||||
|
"""
|
||||||
|
out: List[Dict[str, Any]] = []
|
||||||
|
for m in (arr or []):
|
||||||
|
if not isinstance(m, dict):
|
||||||
|
continue
|
||||||
|
blocks = m.get("content")
|
||||||
|
if isinstance(blocks, list):
|
||||||
|
norm = []
|
||||||
|
for b in blocks:
|
||||||
|
if isinstance(b, dict) and b.get("type") == "text":
|
||||||
|
txt = str(b.get("text") or "")
|
||||||
|
if txt.strip():
|
||||||
|
norm.append({"type": "text", "text": txt})
|
||||||
|
if norm:
|
||||||
|
out.append({"role": m.get("role", "user"), "content": norm})
|
||||||
|
return out
|
||||||
|
|
||||||
|
def extract_system_text_from_obj(self, x: Any, render_ctx: Dict[str, Any]) -> Optional[str]:
|
||||||
|
"""
|
||||||
|
Поведение совместимо с [_extract_sys_text_from_obj()](agentui/pipeline/executor.py:2676).
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Dict objects
|
||||||
|
if isinstance(x, dict):
|
||||||
|
# Gemini systemInstruction
|
||||||
|
if "systemInstruction" in x:
|
||||||
|
si = x.get("systemInstruction")
|
||||||
|
|
||||||
|
def _parts_to_text(siobj: Any) -> str:
|
||||||
|
try:
|
||||||
|
parts = siobj.get("parts") or []
|
||||||
|
texts = [
|
||||||
|
str(p.get("text") or "")
|
||||||
|
for p in parts
|
||||||
|
if isinstance(p, dict) and isinstance(p.get("text"), str) and p.get("text").strip()
|
||||||
|
]
|
||||||
|
return "\n".join([t for t in texts if t]).strip()
|
||||||
|
except Exception:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
if isinstance(si, dict):
|
||||||
|
t = _parts_to_text(si)
|
||||||
|
if t:
|
||||||
|
return t
|
||||||
|
if isinstance(si, list):
|
||||||
|
texts = []
|
||||||
|
for p in si:
|
||||||
|
if isinstance(p, dict) and isinstance(p.get("text"), str) and p.get("text").strip():
|
||||||
|
texts.append(p.get("text").strip())
|
||||||
|
t = "\n".join(texts).strip()
|
||||||
|
if t:
|
||||||
|
return t
|
||||||
|
if isinstance(si, str) and si.strip():
|
||||||
|
return si.strip()
|
||||||
|
# Claude system (string or blocks)
|
||||||
|
if "system" in x and not ("messages" in x and isinstance(x.get("messages"), list)):
|
||||||
|
sysv = x.get("system")
|
||||||
|
if isinstance(sysv, str) and sysv.strip():
|
||||||
|
return sysv.strip()
|
||||||
|
if isinstance(sysv, list):
|
||||||
|
texts = [
|
||||||
|
str(b.get("text") or "")
|
||||||
|
for b in sysv
|
||||||
|
if isinstance(b, dict)
|
||||||
|
and (b.get("type") == "text")
|
||||||
|
and isinstance(b.get("text"), str)
|
||||||
|
and b.get("text").strip()
|
||||||
|
]
|
||||||
|
t = "\n".join([t for t in texts if t]).strip()
|
||||||
|
if t:
|
||||||
|
return t
|
||||||
|
# OpenAI messages with role=system
|
||||||
|
if isinstance(x.get("messages"), list):
|
||||||
|
sys_msgs = []
|
||||||
|
for m in (x.get("messages") or []):
|
||||||
|
try:
|
||||||
|
if (str(m.get("role") or "").lower().strip() == "system"):
|
||||||
|
cont = m.get("content")
|
||||||
|
if isinstance(cont, str) and cont.strip():
|
||||||
|
sys_msgs.append(cont.strip())
|
||||||
|
elif isinstance(cont, list):
|
||||||
|
for p in cont:
|
||||||
|
if isinstance(p, dict) and p.get("type") == "text" and isinstance(p.get("text"), str) and p.get("text").strip():
|
||||||
|
sys_msgs.append(p.get("text").strip())
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
if sys_msgs:
|
||||||
|
return "\n\n".join(sys_msgs).strip()
|
||||||
|
|
||||||
|
# List objects
|
||||||
|
if isinstance(x, list):
|
||||||
|
# OpenAI messages list with role=system
|
||||||
|
if all(isinstance(m, dict) and "role" in m for m in x):
|
||||||
|
sys_msgs = []
|
||||||
|
for m in x:
|
||||||
|
try:
|
||||||
|
if (str(m.get("role") or "").lower().strip() == "system"):
|
||||||
|
cont = m.get("content")
|
||||||
|
if isinstance(cont, str) and cont.strip():
|
||||||
|
sys_msgs.append(cont.strip())
|
||||||
|
elif isinstance(cont, list):
|
||||||
|
for p in cont:
|
||||||
|
if isinstance(p, dict) and p.get("type") == "text" and isinstance(p.get("text"), str) and p.get("text").strip():
|
||||||
|
sys_msgs.append(p.get("text").strip())
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
if sys_msgs:
|
||||||
|
return "\n\n".join(sys_msgs).strip()
|
||||||
|
# Gemini 'contents' list: попробуем прочитать systemInstruction из входящего snapshot
|
||||||
|
if all(isinstance(c, dict) and "parts" in c for c in x):
|
||||||
|
try:
|
||||||
|
inc = (render_ctx.get("incoming") or {}).get("json") or {}
|
||||||
|
si = inc.get("systemInstruction")
|
||||||
|
if si is not None:
|
||||||
|
return self.extract_system_text_from_obj({"systemInstruction": si}, render_ctx)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return None
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def combine_segments(
|
||||||
|
self,
|
||||||
|
blocks_struct: Dict[str, Any],
|
||||||
|
pre_segments_raw: List[Dict[str, Any]],
|
||||||
|
raw_segs: List[str],
|
||||||
|
render_ctx: Dict[str, Any],
|
||||||
|
pre_var_paths: set[str],
|
||||||
|
render_template_simple_fn,
|
||||||
|
var_macro_fullmatch_re,
|
||||||
|
detect_vendor_fn,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Повторяет ветку provider=='claude' из prompt_combine
|
||||||
|
([ProviderCallNode.run()](agentui/pipeline/executor.py:2998)).
|
||||||
|
"""
|
||||||
|
built3: List[Dict[str, Any]] = []
|
||||||
|
sys_texts: List[str] = []
|
||||||
|
|
||||||
|
# Нода-конфиг (для claude_no_system) передан через render_ctx['_node_config'], см. интеграцию
|
||||||
|
node_cfg = {}
|
||||||
|
try:
|
||||||
|
nc = render_ctx.get("_node_config")
|
||||||
|
if isinstance(nc, dict):
|
||||||
|
node_cfg = nc
|
||||||
|
except Exception:
|
||||||
|
node_cfg = {}
|
||||||
|
claude_no_system = False
|
||||||
|
try:
|
||||||
|
claude_no_system = bool(node_cfg.get("claude_no_system", False))
|
||||||
|
except Exception:
|
||||||
|
claude_no_system = False
|
||||||
|
|
||||||
|
# Пред‑сегменты
|
||||||
|
for _pre in (pre_segments_raw or []):
|
||||||
|
try:
|
||||||
|
_obj = _pre.get("obj")
|
||||||
|
items = self.normalize_segment(_obj)
|
||||||
|
items = self.filter_items(items)
|
||||||
|
built3 = insert_items(built3, items, _pre.get("pos"))
|
||||||
|
try:
|
||||||
|
sx = self.extract_system_text_from_obj(_obj, render_ctx)
|
||||||
|
if isinstance(sx, str) and sx.strip():
|
||||||
|
sys_texts.append(sx.strip())
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Основные сегменты
|
||||||
|
for raw_seg in (raw_segs or []):
|
||||||
|
body_seg, pos_spec = split_pos_spec(raw_seg)
|
||||||
|
if body_seg == "[[PROMPT]]":
|
||||||
|
items = self.filter_items(list(blocks_struct.get("messages", []) or []))
|
||||||
|
built3 = insert_items(built3, items, pos_spec)
|
||||||
|
continue
|
||||||
|
m_pre = var_macro_fullmatch_re.fullmatch(body_seg)
|
||||||
|
if m_pre:
|
||||||
|
_p = (m_pre.group(1) or "").strip()
|
||||||
|
try:
|
||||||
|
if _p in pre_var_paths:
|
||||||
|
# Skip duplicate var segment - already inserted via prompt_preprocess (filtered)
|
||||||
|
continue
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
resolved = render_template_simple_fn(body_seg, render_ctx, render_ctx.get("OUT") or {})
|
||||||
|
obj = _try_json(resolved)
|
||||||
|
try:
|
||||||
|
pg = detect_vendor_fn(obj if isinstance(obj, dict) else {})
|
||||||
|
print(f"DEBUG: prompt_combine seg provider_guess={pg} -> target=claude pos={pos_spec}")
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
items = self.normalize_segment(obj if obj is not None else resolved)
|
||||||
|
items = self.filter_items(items)
|
||||||
|
built3 = insert_items(built3, items, pos_spec)
|
||||||
|
try:
|
||||||
|
sx = self.extract_system_text_from_obj(obj, render_ctx) if obj is not None else None
|
||||||
|
if isinstance(sx, str) and sx.strip():
|
||||||
|
sys_texts.append(sx.strip())
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if not built3:
|
||||||
|
built3 = self.filter_items(list(blocks_struct.get("messages", []) or []))
|
||||||
|
|
||||||
|
# Merge system blocks from PROMPT blocks + gathered sys_texts
|
||||||
|
existing_sys = blocks_struct.get("system") or []
|
||||||
|
sys_blocks: List[Dict[str, Any]] = []
|
||||||
|
if isinstance(existing_sys, list):
|
||||||
|
sys_blocks.extend(existing_sys)
|
||||||
|
st0 = blocks_struct.get("system_text") or ""
|
||||||
|
# Ensure PROMPT system_text from blocks is included as a Claude system block
|
||||||
|
if isinstance(st0, str) and st0.strip():
|
||||||
|
sys_blocks.append({"type": "text", "text": st0})
|
||||||
|
for s in sys_texts:
|
||||||
|
sys_blocks.append({"type": "text", "text": s})
|
||||||
|
st = "\n\n".join([t for t in [st0] + sys_texts if isinstance(t, str) and t.strip()])
|
||||||
|
|
||||||
|
if claude_no_system:
|
||||||
|
# Prepend system text as a user message instead of top-level system
|
||||||
|
if st:
|
||||||
|
built3 = [{"role": "user", "content": [{"type": "text", "text": st}]}] + built3
|
||||||
|
return {"messages": built3, "system_text": st}
|
||||||
|
|
||||||
|
pm_struct = {"messages": built3, "system_text": st}
|
||||||
|
# Prefer array of system blocks when possible; fallback to single text block
|
||||||
|
if sys_blocks:
|
||||||
|
pm_struct["system"] = sys_blocks
|
||||||
|
elif st:
|
||||||
|
pm_struct["system"] = [{"type": "text", "text": st}]
|
||||||
|
return pm_struct
|
||||||
|
|
||||||
|
def prompt_fragment(self, pm_struct: Dict[str, Any], node_config: Dict[str, Any]) -> str:
|
||||||
|
"""
|
||||||
|
Совместимо с веткой provider=='claude' в построении [[PROMPT]]
|
||||||
|
([ProviderCallNode.run()](agentui/pipeline/executor.py:3125)).
|
||||||
|
"""
|
||||||
|
parts: List[str] = []
|
||||||
|
# Учитываем флаг совместимости: при claude_no_system не добавляем top-level "system"
|
||||||
|
claude_no_system = False
|
||||||
|
try:
|
||||||
|
claude_no_system = bool((node_config or {}).get("claude_no_system", False))
|
||||||
|
except Exception:
|
||||||
|
claude_no_system = False
|
||||||
|
|
||||||
|
if not claude_no_system:
|
||||||
|
# Предпочитаем массив блоков system, если он есть; иначе строковый system_text
|
||||||
|
sys_val = pm_struct.get("system", None)
|
||||||
|
if sys_val is None:
|
||||||
|
sys_val = pm_struct.get("system_text")
|
||||||
|
if sys_val:
|
||||||
|
parts.append('"system": ' + json.dumps(sys_val, ensure_ascii=False))
|
||||||
|
|
||||||
|
msgs = pm_struct.get("messages")
|
||||||
|
if msgs is not None:
|
||||||
|
parts.append('"messages": ' + json.dumps(msgs, ensure_ascii=False))
|
||||||
|
return ", ".join(parts)
|
||||||
419
agentui/providers/adapters/gemini.py
Normal file
419
agentui/providers/adapters/gemini.py
Normal file
@@ -0,0 +1,419 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
|
||||||
|
from agentui.providers.adapters.base import ( # [ProviderAdapter](agentui/providers/adapters/base.py:10)
|
||||||
|
ProviderAdapter,
|
||||||
|
insert_items,
|
||||||
|
split_pos_spec,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _is_data_url(u: str) -> bool: # [_is_data_url()](agentui/providers/adapters/gemini.py:14)
|
||||||
|
return isinstance(u, str) and u.strip().lower().startswith("data:")
|
||||||
|
|
||||||
|
|
||||||
|
def _split_data_url(u: str) -> tuple[str, str]: # [_split_data_url()](agentui/providers/adapters/gemini.py:18)
|
||||||
|
"""
|
||||||
|
Возвращает (mime, b64) для data URL.
|
||||||
|
Поддерживаем форму: data:<mime>;base64,<b64>
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
header, b64 = u.split(",", 1)
|
||||||
|
mime = "application/octet-stream"
|
||||||
|
if header.startswith("data:"):
|
||||||
|
header2 = header[5:]
|
||||||
|
if ";base64" in header2:
|
||||||
|
mime = header2.split(";base64", 1)[0] or mime
|
||||||
|
elif ";" in header2:
|
||||||
|
mime = header2.split(";", 1)[0] or mime
|
||||||
|
elif header2:
|
||||||
|
mime = header2
|
||||||
|
return mime, b64
|
||||||
|
except Exception:
|
||||||
|
return "application/octet-stream", ""
|
||||||
|
|
||||||
|
|
||||||
|
def _try_json(s: str) -> Any: # [_try_json()](agentui/providers/adapters/gemini.py:38)
|
||||||
|
try:
|
||||||
|
obj = json.loads(s)
|
||||||
|
except Exception:
|
||||||
|
try:
|
||||||
|
obj = json.loads(s, strict=False) # type: ignore[call-arg]
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
for _ in range(2):
|
||||||
|
if isinstance(obj, str):
|
||||||
|
st = obj.strip()
|
||||||
|
if (st.startswith("{") and st.endswith("}")) or (st.startswith("[") and st.endswith("]")):
|
||||||
|
try:
|
||||||
|
obj = json.loads(st)
|
||||||
|
continue
|
||||||
|
except Exception:
|
||||||
|
break
|
||||||
|
break
|
||||||
|
return obj
|
||||||
|
|
||||||
|
|
||||||
|
class GeminiAdapter(ProviderAdapter): # [GeminiAdapter.__init__()](agentui/providers/adapters/gemini.py:56)
|
||||||
|
name = "gemini"
|
||||||
|
|
||||||
|
# --- Дефолты HTTP ---
|
||||||
|
def default_base_url(self) -> str:
|
||||||
|
return "https://generativelanguage.googleapis.com"
|
||||||
|
|
||||||
|
def default_endpoint(self, model: str) -> str:
|
||||||
|
# endpoint с шаблоном model (как в исходном коде)
|
||||||
|
return "/v1beta/models/{{ model }}:generateContent"
|
||||||
|
|
||||||
|
# --- PROMPT: построение провайдерных структур ---
|
||||||
|
|
||||||
|
def blocks_struct_for_template(
|
||||||
|
self,
|
||||||
|
unified_messages: List[Dict[str, Any]],
|
||||||
|
context: Dict[str, Any],
|
||||||
|
node_config: Dict[str, Any],
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Совместимо с веткой provider in {'gemini','gemini_image'} из
|
||||||
|
[ProviderCallNode._blocks_struct_for_template()](agentui/pipeline/executor.py:1981).
|
||||||
|
"""
|
||||||
|
def _text_from_msg(m: Dict[str, Any]) -> str:
|
||||||
|
c = m.get("content")
|
||||||
|
if isinstance(c, list):
|
||||||
|
texts = [str(p.get("text") or "") for p in c if isinstance(p, dict) and p.get("type") == "text"]
|
||||||
|
return "\n".join([t for t in texts if t])
|
||||||
|
return str(c or "")
|
||||||
|
|
||||||
|
sys_text = "\n\n".join([_text_from_msg(m) for m in (unified_messages or []) if m.get("role") == "system"]).strip()
|
||||||
|
|
||||||
|
contents: List[Dict[str, Any]] = []
|
||||||
|
for m in (unified_messages or []):
|
||||||
|
if m.get("role") == "system":
|
||||||
|
continue
|
||||||
|
role = "model" if m.get("role") == "assistant" else "user"
|
||||||
|
c = m.get("content")
|
||||||
|
parts: List[Dict[str, Any]] = []
|
||||||
|
if isinstance(c, list):
|
||||||
|
for p in c:
|
||||||
|
if not isinstance(p, dict):
|
||||||
|
continue
|
||||||
|
if p.get("type") == "text":
|
||||||
|
parts.append({"text": str(p.get("text") or "")})
|
||||||
|
elif p.get("type") in {"image_url", "image"}:
|
||||||
|
url = str(p.get("url") or "")
|
||||||
|
if _is_data_url(url):
|
||||||
|
mime, b64 = _split_data_url(url)
|
||||||
|
parts.append({"inline_data": {"mime_type": mime, "data": b64}})
|
||||||
|
else:
|
||||||
|
parts.append({"text": url})
|
||||||
|
else:
|
||||||
|
parts.append({"text": str(c or "")})
|
||||||
|
contents.append({"role": role, "parts": parts})
|
||||||
|
|
||||||
|
d: Dict[str, Any] = {
|
||||||
|
"contents": contents,
|
||||||
|
"system_text": sys_text,
|
||||||
|
}
|
||||||
|
if sys_text:
|
||||||
|
d["systemInstruction"] = {"parts": [{"text": sys_text}]}
|
||||||
|
return d
|
||||||
|
|
||||||
|
def normalize_segment(self, x: Any) -> List[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Совместимо с [_as_gemini_contents()](agentui/pipeline/executor.py:2521).
|
||||||
|
"""
|
||||||
|
cnts: List[Dict[str, Any]] = []
|
||||||
|
try:
|
||||||
|
if isinstance(x, dict):
|
||||||
|
if isinstance(x.get("contents"), list):
|
||||||
|
return list(x.get("contents") or [])
|
||||||
|
if isinstance(x.get("messages"), list):
|
||||||
|
# OpenAI → Gemini
|
||||||
|
for m in (x.get("messages") or []):
|
||||||
|
if not isinstance(m, dict):
|
||||||
|
continue
|
||||||
|
role_raw = str(m.get("role") or "user")
|
||||||
|
role = "model" if role_raw == "assistant" else "user"
|
||||||
|
cont = m.get("content")
|
||||||
|
parts: List[Dict[str, Any]] = []
|
||||||
|
if isinstance(cont, str):
|
||||||
|
parts = [{"text": cont}]
|
||||||
|
elif isinstance(cont, list):
|
||||||
|
for p in cont:
|
||||||
|
if not isinstance(p, dict):
|
||||||
|
continue
|
||||||
|
if p.get("type") == "text":
|
||||||
|
parts.append({"text": str(p.get("text") or "")})
|
||||||
|
elif p.get("type") in {"image_url", "image"}:
|
||||||
|
# Gemini не принимает внешние URL картинок как image — оставим как текстовую ссылку
|
||||||
|
url = ""
|
||||||
|
if isinstance(p.get("image_url"), dict):
|
||||||
|
url = str((p.get("image_url") or {}).get("url") or "")
|
||||||
|
elif "url" in p:
|
||||||
|
url = str(p.get("url") or "")
|
||||||
|
if url:
|
||||||
|
parts.append({"text": url})
|
||||||
|
else:
|
||||||
|
parts = [{"text": json.dumps(cont, ensure_ascii=False)}]
|
||||||
|
cnts.append({"role": role, "parts": parts})
|
||||||
|
return cnts
|
||||||
|
|
||||||
|
if isinstance(x, list):
|
||||||
|
# Gemini contents list already
|
||||||
|
if all(isinstance(c, dict) and "parts" in c for c in x):
|
||||||
|
return list(x)
|
||||||
|
# OpenAI messages list -> Gemini
|
||||||
|
if all(isinstance(m, dict) and "content" in m for m in x):
|
||||||
|
out: List[Dict[str, Any]] = []
|
||||||
|
for m in x:
|
||||||
|
role_raw = str(m.get("role") or "user")
|
||||||
|
role = "model" if role_raw == "assistant" else "user"
|
||||||
|
cont = m.get("content")
|
||||||
|
parts: List[Dict[str, Any]] = []
|
||||||
|
if isinstance(cont, str):
|
||||||
|
parts = [{"text": cont}]
|
||||||
|
elif isinstance(cont, list):
|
||||||
|
for p in cont:
|
||||||
|
if not isinstance(p, dict):
|
||||||
|
continue
|
||||||
|
if p.get("type") == "text":
|
||||||
|
parts.append({"text": str(p.get("text") or "")})
|
||||||
|
elif p.get("type") in {"image_url", "image"}:
|
||||||
|
url = ""
|
||||||
|
if isinstance(p.get("image_url"), dict):
|
||||||
|
url = str((p.get("image_url") or {}).get("url") or "")
|
||||||
|
elif "url" in p:
|
||||||
|
url = str(p.get("url") or "")
|
||||||
|
if url:
|
||||||
|
parts.append({"text": url})
|
||||||
|
else:
|
||||||
|
parts = [{"text": json.dumps(cont, ensure_ascii=False)}]
|
||||||
|
out.append({"role": role, "parts": parts})
|
||||||
|
return out
|
||||||
|
# Fallback
|
||||||
|
return [{"role": "user", "parts": [{"text": json.dumps(x, ensure_ascii=False)}]}]
|
||||||
|
|
||||||
|
if isinstance(x, str):
|
||||||
|
try_obj = _try_json(x)
|
||||||
|
if try_obj is not None:
|
||||||
|
return self.normalize_segment(try_obj)
|
||||||
|
return [{"role": "user", "parts": [{"text": x}]}]
|
||||||
|
return [{"role": "user", "parts": [{"text": json.dumps(x, ensure_ascii=False)}]}]
|
||||||
|
except Exception:
|
||||||
|
return [{"role": "user", "parts": [{"text": str(x)}]}]
|
||||||
|
|
||||||
|
def filter_items(self, arr: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Совместимо с [_filter_gemini()](agentui/pipeline/executor.py:2782).
|
||||||
|
Сохраняем inline_data/inlineData как есть; текстовые части — только непустые.
|
||||||
|
"""
|
||||||
|
out: List[Dict[str, Any]] = []
|
||||||
|
for it in (arr or []):
|
||||||
|
if not isinstance(it, dict):
|
||||||
|
continue
|
||||||
|
parts = it.get("parts") or []
|
||||||
|
norm_parts = []
|
||||||
|
for p in parts:
|
||||||
|
if isinstance(p, dict):
|
||||||
|
t = p.get("text")
|
||||||
|
if isinstance(t, str) and t.strip():
|
||||||
|
norm_parts.append({"text": t})
|
||||||
|
elif "inline_data" in p or "inlineData" in p:
|
||||||
|
norm_parts.append(p) # изображения пропускаем как есть
|
||||||
|
if norm_parts:
|
||||||
|
out.append({"role": it.get("role", "user"), "parts": norm_parts})
|
||||||
|
return out
|
||||||
|
|
||||||
|
def extract_system_text_from_obj(self, x: Any, render_ctx: Dict[str, Any]) -> Optional[str]:
|
||||||
|
"""
|
||||||
|
Совместимо с [_extract_sys_text_from_obj()](agentui/pipeline/executor.py:2676) для Gemini.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Dict
|
||||||
|
if isinstance(x, dict):
|
||||||
|
if "systemInstruction" in x:
|
||||||
|
si = x.get("systemInstruction")
|
||||||
|
def _parts_to_text(siobj: Any) -> str:
|
||||||
|
try:
|
||||||
|
parts = siobj.get("parts") or []
|
||||||
|
texts = [
|
||||||
|
str(p.get("text") or "")
|
||||||
|
for p in parts
|
||||||
|
if isinstance(p, dict) and isinstance(p.get("text"), str) and p.get("text").strip()
|
||||||
|
]
|
||||||
|
return "\n".join([t for t in texts if t]).strip()
|
||||||
|
except Exception:
|
||||||
|
return ""
|
||||||
|
if isinstance(si, dict):
|
||||||
|
t = _parts_to_text(si)
|
||||||
|
if t:
|
||||||
|
return t
|
||||||
|
if isinstance(si, list):
|
||||||
|
texts = []
|
||||||
|
for p in si:
|
||||||
|
if isinstance(p, dict) and isinstance(p.get("text"), str) and p.get("text").strip():
|
||||||
|
texts.append(p.get("text").strip())
|
||||||
|
t = "\n".join(texts).strip()
|
||||||
|
if t:
|
||||||
|
return t
|
||||||
|
if isinstance(si, str) and si.strip():
|
||||||
|
return si.strip()
|
||||||
|
# OpenAI system внутри messages
|
||||||
|
if isinstance(x.get("messages"), list):
|
||||||
|
sys_msgs = []
|
||||||
|
for m in (x.get("messages") or []):
|
||||||
|
try:
|
||||||
|
if (str(m.get("role") or "").lower().strip() == "system"):
|
||||||
|
cont = m.get("content")
|
||||||
|
if isinstance(cont, str) and cont.strip():
|
||||||
|
sys_msgs.append(cont.strip())
|
||||||
|
elif isinstance(cont, list):
|
||||||
|
for p in cont:
|
||||||
|
if (
|
||||||
|
isinstance(p, dict)
|
||||||
|
and p.get("type") == "text"
|
||||||
|
and isinstance(p.get("text"), str)
|
||||||
|
and p.get("text").strip()
|
||||||
|
):
|
||||||
|
sys_msgs.append(p.get("text").strip())
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
if sys_msgs:
|
||||||
|
return "\n\n".join(sys_msgs).strip()
|
||||||
|
# List
|
||||||
|
if isinstance(x, list):
|
||||||
|
if all(isinstance(m, dict) and "role" in m for m in x):
|
||||||
|
sys_msgs = []
|
||||||
|
for m in x:
|
||||||
|
try:
|
||||||
|
if (str(m.get("role") or "").lower().strip() == "system"):
|
||||||
|
cont = m.get("content")
|
||||||
|
if isinstance(cont, str) and cont.strip():
|
||||||
|
sys_msgs.append(cont.strip())
|
||||||
|
elif isinstance(cont, list):
|
||||||
|
for p in cont:
|
||||||
|
if (
|
||||||
|
isinstance(p, dict)
|
||||||
|
and p.get("type") == "text"
|
||||||
|
and isinstance(p.get("text"), str)
|
||||||
|
and p.get("text").strip()
|
||||||
|
):
|
||||||
|
sys_msgs.append(p.get("text").strip())
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
if sys_msgs:
|
||||||
|
return "\n\n".join(sys_msgs).strip()
|
||||||
|
# Gemini contents list -> попробуем взять из входящего snapshot
|
||||||
|
if all(isinstance(c, dict) and "parts" in c for c in x):
|
||||||
|
try:
|
||||||
|
inc = (render_ctx.get("incoming") or {}).get("json") or {}
|
||||||
|
si = inc.get("systemInstruction")
|
||||||
|
if si is not None:
|
||||||
|
return self.extract_system_text_from_obj({"systemInstruction": si}, render_ctx)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return None
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def combine_segments(
|
||||||
|
self,
|
||||||
|
blocks_struct: Dict[str, Any],
|
||||||
|
pre_segments_raw: List[Dict[str, Any]],
|
||||||
|
raw_segs: List[str],
|
||||||
|
render_ctx: Dict[str, Any],
|
||||||
|
pre_var_paths: set[str],
|
||||||
|
render_template_simple_fn,
|
||||||
|
var_macro_fullmatch_re,
|
||||||
|
detect_vendor_fn,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Повторяет ветку provider in {'gemini','gemini_image'} из prompt_combine
|
||||||
|
([ProviderCallNode.run()](agentui/pipeline/executor.py:2874)).
|
||||||
|
"""
|
||||||
|
built: List[Dict[str, Any]] = []
|
||||||
|
sys_texts: List[str] = []
|
||||||
|
|
||||||
|
# 1) Пред‑сегменты
|
||||||
|
for _pre in (pre_segments_raw or []):
|
||||||
|
try:
|
||||||
|
_obj = _pre.get("obj")
|
||||||
|
items = self.normalize_segment(_obj)
|
||||||
|
items = self.filter_items(items)
|
||||||
|
built = insert_items(built, items, _pre.get("pos"))
|
||||||
|
try:
|
||||||
|
sx = self.extract_system_text_from_obj(_obj, render_ctx)
|
||||||
|
if isinstance(sx, str) and sx.strip():
|
||||||
|
sys_texts.append(sx.strip())
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# 2) Основные сегменты
|
||||||
|
for raw_seg in (raw_segs or []):
|
||||||
|
body_seg, pos_spec = split_pos_spec(raw_seg)
|
||||||
|
if body_seg == "[[PROMPT]]":
|
||||||
|
items = self.filter_items(list(blocks_struct.get("contents", []) or []))
|
||||||
|
built = insert_items(built, items, pos_spec)
|
||||||
|
continue
|
||||||
|
m_pre = var_macro_fullmatch_re.fullmatch(body_seg)
|
||||||
|
if m_pre:
|
||||||
|
_p = (m_pre.group(1) or "").strip()
|
||||||
|
try:
|
||||||
|
if _p in pre_var_paths:
|
||||||
|
continue
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
resolved = render_template_simple_fn(body_seg, render_ctx, render_ctx.get("OUT") or {})
|
||||||
|
obj = _try_json(resolved)
|
||||||
|
# debug provider guess
|
||||||
|
try:
|
||||||
|
pg = detect_vendor_fn(obj if isinstance(obj, dict) else {})
|
||||||
|
print(f"DEBUG: prompt_combine seg provider_guess={pg} -> target=gemini pos={pos_spec}")
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
items = self.normalize_segment(obj if obj is not None else resolved)
|
||||||
|
items = self.filter_items(items)
|
||||||
|
built = insert_items(built, items, pos_spec)
|
||||||
|
try:
|
||||||
|
sx = self.extract_system_text_from_obj(obj, render_ctx) if obj is not None else None
|
||||||
|
if isinstance(sx, str) and sx.strip():
|
||||||
|
sys_texts.append(sx.strip())
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if not built:
|
||||||
|
built = self.filter_items(list(blocks_struct.get("contents", []) or []))
|
||||||
|
|
||||||
|
# Merge systemInstruction: PROMPT blocks + gathered sys_texts
|
||||||
|
existing_si = blocks_struct.get("systemInstruction")
|
||||||
|
parts = []
|
||||||
|
if isinstance(existing_si, dict) and isinstance(existing_si.get("parts"), list):
|
||||||
|
parts = list(existing_si.get("parts") or [])
|
||||||
|
for s in sys_texts:
|
||||||
|
parts.append({"text": s})
|
||||||
|
new_si = {"parts": parts} if parts else existing_si
|
||||||
|
return {"contents": built, "systemInstruction": new_si, "system_text": blocks_struct.get("system_text")}
|
||||||
|
|
||||||
|
def prompt_fragment(self, pm_struct: Dict[str, Any], node_config: Dict[str, Any]) -> str:
|
||||||
|
"""
|
||||||
|
Совместимо с веткой provider in {'gemini','gemini_image'} в построении [[PROMPT]]
|
||||||
|
([ProviderCallNode.run()](agentui/pipeline/executor.py:3103)).
|
||||||
|
"""
|
||||||
|
parts = []
|
||||||
|
contents = pm_struct.get("contents")
|
||||||
|
if contents is not None:
|
||||||
|
parts.append('"contents": ' + json.dumps(contents, ensure_ascii=False))
|
||||||
|
sysi = pm_struct.get("systemInstruction")
|
||||||
|
if sysi is not None:
|
||||||
|
parts.append('"systemInstruction": ' + json.dumps(sysi, ensure_ascii=False))
|
||||||
|
return ", ".join(parts)
|
||||||
|
|
||||||
|
|
||||||
|
class GeminiImageAdapter(GeminiAdapter): # [GeminiImageAdapter.__init__()](agentui/providers/adapters/gemini.py:332)
|
||||||
|
name = "gemini_image"
|
||||||
|
|
||||||
|
# Вся логика такая же, как у Gemini (generateContent), включая defaults.
|
||||||
398
agentui/providers/adapters/openai.py
Normal file
398
agentui/providers/adapters/openai.py
Normal file
@@ -0,0 +1,398 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
|
||||||
|
from agentui.providers.adapters.base import ( # [ProviderAdapter](agentui/providers/adapters/base.py:10)
|
||||||
|
ProviderAdapter,
|
||||||
|
insert_items,
|
||||||
|
split_pos_spec,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _try_json(s: str) -> Any: # [_try_json()](agentui/providers/adapters/openai.py:16)
|
||||||
|
"""
|
||||||
|
Парсит JSON из строки. Пермиссивный режим и двукратная распаковка строк, как в старой логике.
|
||||||
|
Возвращает dict/list/примитив или None при неудаче.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
obj = json.loads(s)
|
||||||
|
except Exception:
|
||||||
|
try:
|
||||||
|
obj = json.loads(s, strict=False) # type: ignore[call-arg]
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
# Если это строка, которая сама похожа на JSON — пробуем распаковать до 2 раз
|
||||||
|
for _ in range(2):
|
||||||
|
if isinstance(obj, str):
|
||||||
|
st = obj.strip()
|
||||||
|
if (st.startswith("{") and st.endswith("}")) or (st.startswith("[") and st.endswith("]")):
|
||||||
|
try:
|
||||||
|
obj = json.loads(st)
|
||||||
|
continue
|
||||||
|
except Exception:
|
||||||
|
break
|
||||||
|
break
|
||||||
|
return obj
|
||||||
|
|
||||||
|
|
||||||
|
class OpenAIAdapter(ProviderAdapter): # [OpenAIAdapter.__init__()](agentui/providers/adapters/openai.py:39)
|
||||||
|
name = "openai"
|
||||||
|
|
||||||
|
# --- Дефолты HTTP ---
|
||||||
|
def default_base_url(self) -> str:
|
||||||
|
return "https://api.openai.com"
|
||||||
|
|
||||||
|
def default_endpoint(self, model: str) -> str:
|
||||||
|
return "/v1/chat/completions"
|
||||||
|
|
||||||
|
# --- PROMPT: построение провайдерных структур ---
|
||||||
|
|
||||||
|
def blocks_struct_for_template(
|
||||||
|
self,
|
||||||
|
unified_messages: List[Dict[str, Any]],
|
||||||
|
context: Dict[str, Any],
|
||||||
|
node_config: Dict[str, Any],
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Совместимо с веткой provider=='openai' из [ProviderCallNode._blocks_struct_for_template()](agentui/pipeline/executor.py:1958).
|
||||||
|
"""
|
||||||
|
def _map(m: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
c = m.get("content")
|
||||||
|
if isinstance(c, list):
|
||||||
|
parts = []
|
||||||
|
for p in c:
|
||||||
|
if isinstance(p, dict) and p.get("type") == "text":
|
||||||
|
parts.append({"type": "text", "text": str(p.get("text") or "")})
|
||||||
|
elif isinstance(p, dict) and p.get("type") in {"image_url", "image"}:
|
||||||
|
url = str(p.get("url") or "")
|
||||||
|
parts.append({"type": "image_url", "image_url": {"url": url}})
|
||||||
|
return {"role": m.get("role", "user"), "content": parts}
|
||||||
|
return {"role": m.get("role", "user"), "content": str(c or "")}
|
||||||
|
|
||||||
|
# system_text — склейка всех system-блоков (только текст, без картинок)
|
||||||
|
sys_text = "\n\n".join(
|
||||||
|
[
|
||||||
|
str(m.get("content") or "")
|
||||||
|
if not isinstance(m.get("content"), list)
|
||||||
|
else "\n".join(
|
||||||
|
[str(p.get("text") or "") for p in m.get("content") if isinstance(p, dict) and p.get("type") == "text"]
|
||||||
|
)
|
||||||
|
for m in (unified_messages or [])
|
||||||
|
if m.get("role") == "system"
|
||||||
|
]
|
||||||
|
).strip()
|
||||||
|
|
||||||
|
return {
|
||||||
|
"messages": [_map(m) for m in (unified_messages or [])],
|
||||||
|
"system_text": sys_text,
|
||||||
|
}
|
||||||
|
|
||||||
|
def normalize_segment(self, x: Any) -> List[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Совместимо с [_as_openai_messages()](agentui/pipeline/executor.py:2451).
|
||||||
|
- Поддерживает dict with messages (openai)
|
||||||
|
- Поддерживает dict/list в стиле Gemini.contents (склейка текстов частей)
|
||||||
|
- Поддерживает list openai-like messages (нормализация parts)
|
||||||
|
- Строки/прочее упаковываются как один user message
|
||||||
|
"""
|
||||||
|
msgs: List[Dict[str, Any]] = []
|
||||||
|
try:
|
||||||
|
# Dict inputs
|
||||||
|
if isinstance(x, dict):
|
||||||
|
if isinstance(x.get("messages"), list):
|
||||||
|
return list(x.get("messages") or [])
|
||||||
|
if isinstance(x.get("contents"), list):
|
||||||
|
# Gemini -> OpenAI (text-only join)
|
||||||
|
for c in (x.get("contents") or []):
|
||||||
|
if not isinstance(c, dict):
|
||||||
|
continue
|
||||||
|
role_raw = str(c.get("role") or "user")
|
||||||
|
role = "assistant" if role_raw == "model" else ("user" if role_raw not in {"user", "assistant"} else role_raw)
|
||||||
|
parts = c.get("parts") or []
|
||||||
|
text = "\n".join(
|
||||||
|
[str(p.get("text")) for p in parts if isinstance(p, dict) and isinstance(p.get("text"), str)]
|
||||||
|
).strip()
|
||||||
|
msgs.append({"role": role, "content": text})
|
||||||
|
return msgs
|
||||||
|
|
||||||
|
# List inputs
|
||||||
|
if isinstance(x, list):
|
||||||
|
# Gemini contents list -> OpenAI messages
|
||||||
|
if all(isinstance(c, dict) and "parts" in c for c in x):
|
||||||
|
for c in x:
|
||||||
|
role_raw = str(c.get("role") or "user")
|
||||||
|
role = "assistant" if role_raw == "model" else ("user" if role_raw not in {"user", "assistant"} else role_raw)
|
||||||
|
parts = c.get("parts") or []
|
||||||
|
text = "\n".join(
|
||||||
|
[str(p.get("text")) for p in parts if isinstance(p, dict) and isinstance(p.get("text"), str)]
|
||||||
|
).strip()
|
||||||
|
msgs.append({"role": role, "content": text})
|
||||||
|
return msgs
|
||||||
|
# OpenAI messages list already — normalize parts if needed
|
||||||
|
if all(isinstance(m, dict) and "content" in m for m in x):
|
||||||
|
out: List[Dict[str, Any]] = []
|
||||||
|
for m in x:
|
||||||
|
role = m.get("role", "user")
|
||||||
|
cont = m.get("content")
|
||||||
|
if isinstance(cont, str):
|
||||||
|
out.append({"role": role, "content": cont})
|
||||||
|
elif isinstance(cont, list):
|
||||||
|
parts2: List[Dict[str, Any]] = []
|
||||||
|
for p in cont:
|
||||||
|
if not isinstance(p, dict):
|
||||||
|
continue
|
||||||
|
if p.get("type") == "text":
|
||||||
|
parts2.append({"type": "text", "text": str(p.get("text") or "")})
|
||||||
|
elif p.get("type") in {"image_url", "image"}:
|
||||||
|
url = ""
|
||||||
|
if isinstance(p.get("image_url"), dict):
|
||||||
|
url = str((p.get("image_url") or {}).get("url") or "")
|
||||||
|
elif "url" in p:
|
||||||
|
url = str(p.get("url") or "")
|
||||||
|
if url:
|
||||||
|
parts2.append({"type": "image_url", "image_url": {"url": url}})
|
||||||
|
out.append({"role": role, "content": parts2 if parts2 else ""})
|
||||||
|
return out
|
||||||
|
# Fallback: dump JSON as a single user message
|
||||||
|
return [{"role": "user", "content": json.dumps(x, ensure_ascii=False)}]
|
||||||
|
|
||||||
|
# Primitive inputs or embedded JSON string
|
||||||
|
if isinstance(x, str):
|
||||||
|
try_obj = _try_json(x)
|
||||||
|
if try_obj is not None:
|
||||||
|
return self.normalize_segment(try_obj)
|
||||||
|
return [{"role": "user", "content": x}]
|
||||||
|
return [{"role": "user", "content": json.dumps(x, ensure_ascii=False)}]
|
||||||
|
except Exception:
|
||||||
|
return [{"role": "user", "content": str(x)}]
|
||||||
|
|
||||||
|
def filter_items(self, arr: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Совместимо с [_filter_openai()](agentui/pipeline/executor.py:2801).
|
||||||
|
"""
|
||||||
|
out: List[Dict[str, Any]] = []
|
||||||
|
for m in (arr or []):
|
||||||
|
if not isinstance(m, dict):
|
||||||
|
continue
|
||||||
|
c = m.get("content")
|
||||||
|
if isinstance(c, str) and c.strip():
|
||||||
|
out.append({"role": m.get("role", "user"), "content": c})
|
||||||
|
elif isinstance(c, list):
|
||||||
|
parts = []
|
||||||
|
for p in c:
|
||||||
|
if isinstance(p, dict) and p.get("type") == "text":
|
||||||
|
txt = str(p.get("text") or "")
|
||||||
|
if txt.strip():
|
||||||
|
parts.append({"type": "text", "text": txt})
|
||||||
|
if parts:
|
||||||
|
out.append({"role": m.get("role", "user"), "content": parts})
|
||||||
|
return out
|
||||||
|
|
||||||
|
def extract_system_text_from_obj(self, x: Any, render_ctx: Dict[str, Any]) -> Optional[str]:
|
||||||
|
"""
|
||||||
|
Совместимо с [_extract_sys_text_from_obj()](agentui/pipeline/executor.py:2676).
|
||||||
|
Умеет читать:
|
||||||
|
- Gemini: systemInstruction.parts[].text
|
||||||
|
- Claude: top-level system (string/list of blocks)
|
||||||
|
- OpenAI: messages[*] with role=system (string content or parts[].text)
|
||||||
|
- List форматы: openai messages list и gemini contents list (в последнем случае смотрит incoming.json.systemInstruction)
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Dict objects
|
||||||
|
if isinstance(x, dict):
|
||||||
|
# Gemini systemInstruction
|
||||||
|
if "systemInstruction" in x:
|
||||||
|
si = x.get("systemInstruction")
|
||||||
|
|
||||||
|
def _parts_to_text(siobj: Any) -> str:
|
||||||
|
try:
|
||||||
|
parts = siobj.get("parts") or []
|
||||||
|
texts = [
|
||||||
|
str(p.get("text") or "")
|
||||||
|
for p in parts
|
||||||
|
if isinstance(p, dict) and isinstance(p.get("text"), str) and p.get("text").strip()
|
||||||
|
]
|
||||||
|
return "\n".join([t for t in texts if t]).strip()
|
||||||
|
except Exception:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
if isinstance(si, dict):
|
||||||
|
t = _parts_to_text(si)
|
||||||
|
if t:
|
||||||
|
return t
|
||||||
|
if isinstance(si, list):
|
||||||
|
texts = []
|
||||||
|
for p in si:
|
||||||
|
if isinstance(p, dict) and isinstance(p.get("text"), str) and p.get("text").strip():
|
||||||
|
texts.append(p.get("text").strip())
|
||||||
|
t = "\n".join(texts).strip()
|
||||||
|
if t:
|
||||||
|
return t
|
||||||
|
if isinstance(si, str) and si.strip():
|
||||||
|
return si.strip()
|
||||||
|
# Claude system (string or blocks)
|
||||||
|
if "system" in x and not ("messages" in x and isinstance(x.get("messages"), list)):
|
||||||
|
sysv = x.get("system")
|
||||||
|
if isinstance(sysv, str) and sysv.strip():
|
||||||
|
return sysv.strip()
|
||||||
|
if isinstance(sysv, list):
|
||||||
|
texts = [
|
||||||
|
str(b.get("text") or "")
|
||||||
|
for b in sysv
|
||||||
|
if isinstance(b, dict) and (b.get("type") == "text") and isinstance(b.get("text"), str) and b.get("text").strip()
|
||||||
|
]
|
||||||
|
t = "\n".join([t for t in texts if t]).strip()
|
||||||
|
if t:
|
||||||
|
return t
|
||||||
|
# OpenAI messages with role=system
|
||||||
|
if isinstance(x.get("messages"), list):
|
||||||
|
sys_msgs = []
|
||||||
|
for m in (x.get("messages") or []):
|
||||||
|
try:
|
||||||
|
if (str(m.get("role") or "").lower().strip() == "system"):
|
||||||
|
cont = m.get("content")
|
||||||
|
if isinstance(cont, str) and cont.strip():
|
||||||
|
sys_msgs.append(cont.strip())
|
||||||
|
elif isinstance(cont, list):
|
||||||
|
for p in cont:
|
||||||
|
if (
|
||||||
|
isinstance(p, dict)
|
||||||
|
and p.get("type") == "text"
|
||||||
|
and isinstance(p.get("text"), str)
|
||||||
|
and p.get("text").strip()
|
||||||
|
):
|
||||||
|
sys_msgs.append(p.get("text").strip())
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
if sys_msgs:
|
||||||
|
return "\n\n".join(sys_msgs).strip()
|
||||||
|
|
||||||
|
# List objects
|
||||||
|
if isinstance(x, list):
|
||||||
|
# OpenAI messages list with role=system
|
||||||
|
if all(isinstance(m, dict) and "role" in m for m in x):
|
||||||
|
sys_msgs = []
|
||||||
|
for m in x:
|
||||||
|
try:
|
||||||
|
if (str(m.get("role") or "").lower().strip() == "system"):
|
||||||
|
cont = m.get("content")
|
||||||
|
if isinstance(cont, str) and cont.strip():
|
||||||
|
sys_msgs.append(cont.strip())
|
||||||
|
elif isinstance(cont, list):
|
||||||
|
for p in cont:
|
||||||
|
if (
|
||||||
|
isinstance(p, dict)
|
||||||
|
and p.get("type") == "text"
|
||||||
|
and isinstance(p.get("text"), str)
|
||||||
|
and p.get("text").strip()
|
||||||
|
):
|
||||||
|
sys_msgs.append(p.get("text").strip())
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
if sys_msgs:
|
||||||
|
return "\n\n".join(sys_msgs).strip()
|
||||||
|
# Gemini 'contents' list: try to read systemInstruction from incoming JSON snapshot
|
||||||
|
if all(isinstance(c, dict) and "parts" in c for c in x):
|
||||||
|
try:
|
||||||
|
inc = (render_ctx.get("incoming") or {}).get("json") or {}
|
||||||
|
si = inc.get("systemInstruction")
|
||||||
|
if si is not None:
|
||||||
|
# Рекурсивно используем себя
|
||||||
|
return self.extract_system_text_from_obj({"systemInstruction": si}, render_ctx)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return None
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def combine_segments(
|
||||||
|
self,
|
||||||
|
blocks_struct: Dict[str, Any],
|
||||||
|
pre_segments_raw: List[Dict[str, Any]],
|
||||||
|
raw_segs: List[str],
|
||||||
|
render_ctx: Dict[str, Any],
|
||||||
|
pre_var_paths: set[str],
|
||||||
|
render_template_simple_fn,
|
||||||
|
var_macro_fullmatch_re,
|
||||||
|
detect_vendor_fn,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Повторяет ветку provider=='openai' из prompt_combine в [ProviderCallNode.run()](agentui/pipeline/executor.py:2936).
|
||||||
|
"""
|
||||||
|
built: List[Dict[str, Any]] = []
|
||||||
|
sys_texts: List[str] = []
|
||||||
|
|
||||||
|
# 1) Пред‑сегменты (prompt_preprocess)
|
||||||
|
for _pre in (pre_segments_raw or []):
|
||||||
|
try:
|
||||||
|
_obj = _pre.get("obj")
|
||||||
|
items = self.normalize_segment(_obj)
|
||||||
|
items = self.filter_items(items)
|
||||||
|
built = insert_items(built, items, _pre.get("pos"))
|
||||||
|
try:
|
||||||
|
sx = self.extract_system_text_from_obj(_obj, render_ctx)
|
||||||
|
if isinstance(sx, str) and sx.strip():
|
||||||
|
sys_texts.append(sx.strip())
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# 2) Основные сегменты (prompt_combine)
|
||||||
|
for raw_seg in (raw_segs or []):
|
||||||
|
body_seg, pos_spec = split_pos_spec(raw_seg)
|
||||||
|
if body_seg == "[[PROMPT]]":
|
||||||
|
items = self.filter_items(list(blocks_struct.get("messages", []) or []))
|
||||||
|
built = insert_items(built, items, pos_spec)
|
||||||
|
continue
|
||||||
|
# Спрятать дубли plain [[VAR:path]] если уже вставляли этим путём в pre_var_overrides
|
||||||
|
m_pre = var_macro_fullmatch_re.fullmatch(body_seg)
|
||||||
|
if m_pre:
|
||||||
|
_p = (m_pre.group(1) or "").strip()
|
||||||
|
try:
|
||||||
|
if _p in pre_var_paths:
|
||||||
|
# Уже вставлено через prompt_preprocess с фильтрацией — пропускаем
|
||||||
|
continue
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
resolved = render_template_simple_fn(body_seg, render_ctx, render_ctx.get("OUT") or {})
|
||||||
|
obj = _try_json(resolved)
|
||||||
|
# debug provider guess
|
||||||
|
try:
|
||||||
|
pg = detect_vendor_fn(obj if isinstance(obj, dict) else {})
|
||||||
|
print(f"DEBUG: prompt_combine seg provider_guess={pg} -> target=openai pos={pos_spec}")
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
items = self.normalize_segment(obj if obj is not None else resolved)
|
||||||
|
items = self.filter_items(items)
|
||||||
|
built = insert_items(built, items, pos_spec)
|
||||||
|
try:
|
||||||
|
sx = self.extract_system_text_from_obj(obj, render_ctx) if obj is not None else None
|
||||||
|
if isinstance(sx, str) and sx.strip():
|
||||||
|
sys_texts.append(sx.strip())
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Если ничего не собрали — берём исходные blocks
|
||||||
|
if not built:
|
||||||
|
built = self.filter_items(list(blocks_struct.get("messages", []) or []))
|
||||||
|
|
||||||
|
# Препендинг системных сообщений из sys_texts
|
||||||
|
if sys_texts:
|
||||||
|
sys_msgs = [{"role": "system", "content": s} for s in sys_texts if s]
|
||||||
|
if sys_msgs:
|
||||||
|
built = sys_msgs + built
|
||||||
|
|
||||||
|
# keep system_text for UI/debug
|
||||||
|
st0 = blocks_struct.get("system_text") or ""
|
||||||
|
st = "\n\n".join([t for t in [st0] + sys_texts if isinstance(t, str) and t.strip()])
|
||||||
|
return {"messages": built, "system_text": st}
|
||||||
|
|
||||||
|
def prompt_fragment(self, pm_struct: Dict[str, Any], node_config: Dict[str, Any]) -> str:
|
||||||
|
"""
|
||||||
|
Совместимо с веткой provider=='openai' в построении [[PROMPT]] из [ProviderCallNode.run()](agentui/pipeline/executor.py:3103).
|
||||||
|
"""
|
||||||
|
return '"messages": ' + json.dumps(pm_struct.get("messages", []), ensure_ascii=False)
|
||||||
32
agentui/providers/adapters/registry.py
Normal file
32
agentui/providers/adapters/registry.py
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from agentui.providers.adapters.base import ProviderAdapter, default_base_url_for as _default_base_url_for
|
||||||
|
from agentui.providers.adapters.openai import OpenAIAdapter
|
||||||
|
try:
|
||||||
|
from agentui.providers.adapters.gemini import GeminiAdapter, GeminiImageAdapter
|
||||||
|
except Exception:
|
||||||
|
GeminiAdapter = None # type: ignore
|
||||||
|
GeminiImageAdapter = None # type: ignore
|
||||||
|
try:
|
||||||
|
from agentui.providers.adapters.claude import ClaudeAdapter
|
||||||
|
except Exception:
|
||||||
|
ClaudeAdapter = None # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
def get_adapter(provider: str) -> Optional[ProviderAdapter]:
|
||||||
|
p = (provider or "").strip().lower()
|
||||||
|
if p == "openai":
|
||||||
|
return OpenAIAdapter()
|
||||||
|
if p == "gemini" and GeminiAdapter:
|
||||||
|
return GeminiAdapter() # type: ignore[operator]
|
||||||
|
if p == "gemini_image" and GeminiImageAdapter:
|
||||||
|
return GeminiImageAdapter() # type: ignore[operator]
|
||||||
|
if p == "claude" and ClaudeAdapter:
|
||||||
|
return ClaudeAdapter() # type: ignore[operator]
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def default_base_url_for(vendor: str) -> Optional[str]:
|
||||||
|
return _default_base_url_for(vendor)
|
||||||
1343
docs/VARIABLES.md
1343
docs/VARIABLES.md
File diff suppressed because it is too large
Load Diff
@@ -30,8 +30,8 @@
|
|||||||
{
|
{
|
||||||
"id": "n5",
|
"id": "n5",
|
||||||
"type": "SetVars",
|
"type": "SetVars",
|
||||||
"pos_x": 12,
|
"pos_x": 300,
|
||||||
"pos_y": 780,
|
"pos_y": 720,
|
||||||
"config": {
|
"config": {
|
||||||
"variables": [
|
"variables": [
|
||||||
{
|
{
|
||||||
@@ -80,9 +80,11 @@
|
|||||||
"passthrough_headers": true,
|
"passthrough_headers": true,
|
||||||
"extra_headers": "{\"connection\": \"close\"}",
|
"extra_headers": "{\"connection\": \"close\"}",
|
||||||
"_origId": "n3",
|
"_origId": "n3",
|
||||||
"while_expr": "([[OUT3]] contains \"Stream failed to\") || ([[OUT3]] contains \"gemini-2.5-pro\")",
|
"while_expr": "([[OUT3]] contains \"Stream failed to\") || ([[OUT3]] contains \"gemini-2.5-pro\") ) || [[WAS_ERROR]]",
|
||||||
"ignore_errors": false,
|
"ignore_errors": true,
|
||||||
"while_max_iters": 50
|
"while_max_iters": 50,
|
||||||
|
"override_path": "",
|
||||||
|
"base_url": ""
|
||||||
},
|
},
|
||||||
"in": {
|
"in": {
|
||||||
"depends": "n5.done"
|
"depends": "n5.done"
|
||||||
@@ -91,8 +93,8 @@
|
|||||||
{
|
{
|
||||||
"id": "n4",
|
"id": "n4",
|
||||||
"type": "ProviderCall",
|
"type": "ProviderCall",
|
||||||
"pos_x": 792,
|
"pos_x": 780,
|
||||||
"pos_y": 624,
|
"pos_y": 672,
|
||||||
"config": {
|
"config": {
|
||||||
"provider": "gemini",
|
"provider": "gemini",
|
||||||
"provider_configs": {
|
"provider_configs": {
|
||||||
@@ -132,10 +134,12 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"_origId": "n4",
|
"_origId": "n4",
|
||||||
"prompt_combine": "[[VAR:incoming.json.contents]] & [[PROMPT]]@pos=-1",
|
"prompt_preprocess": "[[VAR:incoming.json.contents]] delKeyContains \"Okie!\"",
|
||||||
"while_expr": "([[OUT3]] contains \"Stream failed to\") || ([[OUT3]] contains \"gemini-2.5-pro\")",
|
"prompt_combine": "[[VAR:incoming.json.contents]] & [[PROMPT]]@pos=append",
|
||||||
"ignore_errors": false,
|
"while_expr": "([[OUT4]] contains \"Stream failed to\") || ([[OUT4]] contains \"gemini-2.5-pro\") || ([[WAS_ERROR]] == true)",
|
||||||
"while_max_iters": 50
|
"ignore_errors": true,
|
||||||
|
"while_max_iters": 50,
|
||||||
|
"sleep_ms": 555555000
|
||||||
},
|
},
|
||||||
"in": {
|
"in": {
|
||||||
"depends": "n3.done"
|
"depends": "n3.done"
|
||||||
@@ -145,7 +149,7 @@
|
|||||||
"id": "n7",
|
"id": "n7",
|
||||||
"type": "ProviderCall",
|
"type": "ProviderCall",
|
||||||
"pos_x": 1080,
|
"pos_x": 1080,
|
||||||
"pos_y": 624,
|
"pos_y": 600,
|
||||||
"config": {
|
"config": {
|
||||||
"provider": "gemini",
|
"provider": "gemini",
|
||||||
"provider_configs": {
|
"provider_configs": {
|
||||||
@@ -187,8 +191,8 @@
|
|||||||
"_origId": "n7",
|
"_origId": "n7",
|
||||||
"prompt_combine": "[[VAR:incoming.json.contents]] & [[PROMPT]]@pos=-1",
|
"prompt_combine": "[[VAR:incoming.json.contents]] & [[PROMPT]]@pos=-1",
|
||||||
"claude_no_system": true,
|
"claude_no_system": true,
|
||||||
"while_expr": "([[OUT7]] contains \"Stream failed to\") || ([[OUT7]] contains \"gemini-2.5-pro\")",
|
"while_expr": "([[OUT7]] contains \"Stream failed to\") || ([[OUT7]] contains \"gemini-2.5-pro\") || [[WAS_ERROR]] == true",
|
||||||
"ignore_errors": false,
|
"ignore_errors": true,
|
||||||
"while_max_iters": 50
|
"while_max_iters": 50
|
||||||
},
|
},
|
||||||
"in": {
|
"in": {
|
||||||
|
|||||||
@@ -1463,4 +1463,142 @@ input[type="number"] {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/* --- Canvas preview sanitization (напоминание): хинты/лейблы/чекбоксы скрыты в превью --- */
|
/* --- Canvas preview sanitization (напоминание): хинты/лейблы/чекбоксы скрыты в превью --- */
|
||||||
/* Секции summary (headers/template) остаются видимыми */
|
/* Секции summary (headers/template) остаются видимыми */
|
||||||
|
/* --- Logs panel: base layout ------------------------------------------------- */
|
||||||
|
#logs-list { --log-border: #1f2b3b; }
|
||||||
|
#logs-list .logs-row {
|
||||||
|
padding: 8px 10px;
|
||||||
|
border-bottom: 1px solid var(--log-border);
|
||||||
|
background: #0f141a;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: background-color .15s ease, box-shadow .15s ease, opacity .2s ease;
|
||||||
|
}
|
||||||
|
#logs-list .logs-row:hover { background: #111821; }
|
||||||
|
#logs-list .logs-row.selected {
|
||||||
|
outline: 0;
|
||||||
|
box-shadow: inset 0 0 0 2px color-mix(in srgb, var(--accent-2) 40%, transparent);
|
||||||
|
}
|
||||||
|
#logs-list .logs-row.dim { opacity: .70; }
|
||||||
|
#logs-list .logs-row .title { font-size: 13px; }
|
||||||
|
#logs-list .logs-row .sub {
|
||||||
|
font-size: 11px;
|
||||||
|
opacity: .85;
|
||||||
|
white-space: nowrap;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* --- HTTP status styling ----------------------------------------------------- */
|
||||||
|
/* Shimmer animation for pending HTTP rows */
|
||||||
|
@keyframes logs-shimmer {
|
||||||
|
0% { background-position: -200% 0; }
|
||||||
|
100% { background-position: 200% 0; }
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Pending request (no response yet): blue accent shimmer using --accent-2 */
|
||||||
|
#logs-list .logs-row.kind-http.http-pending {
|
||||||
|
border-left: 3px solid var(--accent-2);
|
||||||
|
background:
|
||||||
|
linear-gradient(90deg,
|
||||||
|
color-mix(in srgb, var(--accent-2) 10%, transparent) 0%,
|
||||||
|
color-mix(in srgb, var(--accent-2) 20%, transparent) 50%,
|
||||||
|
color-mix(in srgb, var(--accent-2) 10%, transparent) 100%);
|
||||||
|
background-size: 200% 100%;
|
||||||
|
animation: logs-shimmer 1.4s linear infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Success/Failure borders for completed HTTP rows */
|
||||||
|
#logs-list .logs-row.kind-http.http-ok { border-left: 3px solid #10b981; } /* emerald-500 */
|
||||||
|
#logs-list .logs-row.kind-http.http-err { border-left: 3px solid #ef4444; } /* red-500 */
|
||||||
|
|
||||||
|
/* --- Node sleep pulse -------------------------------------------------------- */
|
||||||
|
@keyframes logs-sleep-pulse {
|
||||||
|
0% { box-shadow: inset 0 0 0 0 rgba(245,158,11, 0.00); }
|
||||||
|
50% { box-shadow: inset 0 0 0 2px rgba(245,158,11, 0.35); }
|
||||||
|
100% { box-shadow: inset 0 0 0 0 rgba(245,158,11, 0.00); }
|
||||||
|
}
|
||||||
|
#logs-list .logs-row.kind-node.ev-sleep {
|
||||||
|
border-left: 3px dashed #f59e0b; /* amber-500 */
|
||||||
|
animation: logs-sleep-pulse 1.8s ease-in-out infinite;
|
||||||
|
}
|
||||||
|
/* --- Node "water" running effect (blue→violet gradient, slow, saturated) ----- */
|
||||||
|
@keyframes node-water {
|
||||||
|
0% { background-position: 0% 0%; }
|
||||||
|
50% { background-position: 100% 100%; }
|
||||||
|
100% { background-position: 0% 0%; }
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Применяется, когда нода исполняется (класс .node-running вешается из SSE) */
|
||||||
|
#drawflow .drawflow-node.node-running .title-box,
|
||||||
|
#drawflow .drawflow-node.node-running .box {
|
||||||
|
/* База: наш стандартный фон ноды, сверху — насыщенный градиент воды */
|
||||||
|
background:
|
||||||
|
linear-gradient(125deg,
|
||||||
|
color-mix(in srgb, #60a5fa 78%, transparent) 0%,
|
||||||
|
color-mix(in srgb, #7c3aed 56%, transparent) 50%,
|
||||||
|
color-mix(in srgb, #60a5fa 78%, transparent) 100%),
|
||||||
|
var(--node);
|
||||||
|
background-size: 360% 360%;
|
||||||
|
animation: node-water 5.0s ease-in-out infinite; /* медленнее и гуще, «водная гладь» */
|
||||||
|
border-color: color-mix(in srgb, var(--accent-2) 55%, #7c3aed 45%);
|
||||||
|
/* Лёгкое свечение, чтобы подчеркнуть активность, без ядовитости */
|
||||||
|
box-shadow:
|
||||||
|
0 0 0 2px color-mix(in srgb, var(--accent-2) 30%, transparent),
|
||||||
|
0 0 16px rgba(96,165,250,.18),
|
||||||
|
inset 0 0 22px rgba(167,139,250,.12);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Безопасность: при успехе/ошибке временные классы могут перебить рамку */
|
||||||
|
#drawflow .drawflow-node.node-ok .title-box,
|
||||||
|
#drawflow .drawflow-node.node-ok .box {
|
||||||
|
border-color: #10b981 !important; /* emerald */
|
||||||
|
box-shadow:
|
||||||
|
0 0 0 2px color-mix(in srgb, #10b981 35%, transparent),
|
||||||
|
0 0 12px rgba(16,185,129,.18);
|
||||||
|
background-image: none; /* убрать «воду» после окончания */
|
||||||
|
}
|
||||||
|
|
||||||
|
#drawflow .drawflow-node.node-err .title-box,
|
||||||
|
#drawflow .drawflow-node.node-err .box {
|
||||||
|
border-color: #ef4444 !important; /* red */
|
||||||
|
box-shadow:
|
||||||
|
0 0 0 2px color-mix(in srgb, #ef4444 35%, transparent),
|
||||||
|
0 0 12px rgba(239,68,68,.18);
|
||||||
|
background-image: none; /* убрать «воду» после ошибки */
|
||||||
|
}
|
||||||
|
/* --- Water overlay: full-node coverage with fade-out on stop ------------------ */
|
||||||
|
/* База: прозрачный градиент-оверлей на ВСЕЙ .drawflow_content_node,
|
||||||
|
который плавно меняет прозрачность. Когда нода активна (.node-running) —
|
||||||
|
поднимаем непрозрачность и двигаем «волну». При снятии .node-running
|
||||||
|
оверлей сам «затухает» благодаря transition на opacity. */
|
||||||
|
#drawflow .drawflow-node .drawflow_content_node {
|
||||||
|
position: relative;
|
||||||
|
overflow: hidden;
|
||||||
|
z-index: 0; /* чтобы ::before можно было поднять поверх */
|
||||||
|
}
|
||||||
|
#drawflow .drawflow-node .drawflow_content_node::before {
|
||||||
|
content: '';
|
||||||
|
position: absolute;
|
||||||
|
inset: 0;
|
||||||
|
z-index: 1; /* поверх содержимого ноды, но без кликов */
|
||||||
|
pointer-events: none;
|
||||||
|
border-radius: 10px;
|
||||||
|
background: linear-gradient(125deg,
|
||||||
|
color-mix(in srgb, #60a5fa 78%, transparent) 0%,
|
||||||
|
color-mix(in srgb, #7c3aed 56%, transparent) 50%,
|
||||||
|
color-mix(in srgb, #60a5fa 78%, transparent) 100%);
|
||||||
|
background-size: 360% 360%;
|
||||||
|
opacity: 0; /* по умолчанию невидим */
|
||||||
|
transition: opacity 1.4s ease-in-out; /* «затухание» при остановке */
|
||||||
|
}
|
||||||
|
#drawflow .drawflow-node.node-running .drawflow_content_node::before {
|
||||||
|
opacity: .42; /* насыщенно, но читаемо; плавно исчезает при снятии класса */
|
||||||
|
animation: node-water 5.0s ease-in-out infinite; /* медленная водная гладь */
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Отключаем прежнюю «водную» анимацию на частях, оставляя оверлей на всю ноду */
|
||||||
|
#drawflow .drawflow-node.node-running .title-box,
|
||||||
|
#drawflow .drawflow-node.node-running .box {
|
||||||
|
background: var(--node) !important;
|
||||||
|
animation: none !important;
|
||||||
|
}
|
||||||
@@ -32,6 +32,34 @@
|
|||||||
header .actions { display: flex; gap: 8px; }
|
header .actions { display: flex; gap: 8px; }
|
||||||
button { background: #1f2937; border: 1px solid #334155; color: #e5e7eb; padding: 6px 10px; border-radius: 8px; cursor: pointer; }
|
button { background: #1f2937; border: 1px solid #334155; color: #e5e7eb; padding: 6px 10px; border-radius: 8px; cursor: pointer; }
|
||||||
button:hover { background: #273246; }
|
button:hover { background: #273246; }
|
||||||
|
|
||||||
|
/* Split STOP button styling */
|
||||||
|
.chip-btn.split {
|
||||||
|
position: relative;
|
||||||
|
display: inline-flex;
|
||||||
|
padding: 0;
|
||||||
|
overflow: hidden;
|
||||||
|
border-radius: 8px;
|
||||||
|
}
|
||||||
|
.chip-btn.split .seg {
|
||||||
|
padding: 6px 10px;
|
||||||
|
display: inline-block;
|
||||||
|
line-height: 1.2;
|
||||||
|
user-select: none;
|
||||||
|
}
|
||||||
|
.chip-btn.split .seg-left {
|
||||||
|
border-right: 1px solid rgba(255,255,255,0.08);
|
||||||
|
}
|
||||||
|
.chip-btn.split.hover-left .seg-left {
|
||||||
|
background: color-mix(in srgb, #f59e0b 28%, transparent);
|
||||||
|
}
|
||||||
|
.chip-btn.split.hover-right .seg-right {
|
||||||
|
background: color-mix(in srgb, #ef4444 28%, transparent);
|
||||||
|
}
|
||||||
|
.chip-btn.split.is-busy {
|
||||||
|
opacity: .7;
|
||||||
|
pointer-events: none;
|
||||||
|
}
|
||||||
#container { display: grid; grid-template-columns: 260px 1fr 360px; height: calc(100vh - 52px); }
|
#container { display: grid; grid-template-columns: 260px 1fr 360px; height: calc(100vh - 52px); }
|
||||||
#sidebar { border-right: 1px solid var(--border); padding: 12px; background: var(--panel); overflow: auto; }
|
#sidebar { border-right: 1px solid var(--border); padding: 12px; background: var(--panel); overflow: auto; }
|
||||||
#canvas { position: relative; }
|
#canvas { position: relative; }
|
||||||
@@ -93,7 +121,11 @@
|
|||||||
<button class="chip-btn" id="btn-scheme" title="Показать мини‑схему">СХЕМА</button>
|
<button class="chip-btn" id="btn-scheme" title="Показать мини‑схему">СХЕМА</button>
|
||||||
<button class="chip-btn" id="btn-tidy" title="Авто‑раскладка графа (повторный клик — отмена)">РАСКЛАДКА</button>
|
<button class="chip-btn" id="btn-tidy" title="Авто‑раскладка графа (повторный клик — отмена)">РАСКЛАДКА</button>
|
||||||
<button class="chip-btn" id="btn-logs" title="Журнал HTTP запросов/ответов">ЛОГИ</button>
|
<button class="chip-btn" id="btn-logs" title="Журнал HTTP запросов/ответов">ЛОГИ</button>
|
||||||
<button class="chip-btn" id="btn-cancel" title="Прервать текущее исполнение пайплайна">СТОП ⏹</button>
|
<!-- Split STOP button: left=graceful, right=abort -->
|
||||||
|
<button class="chip-btn split" id="btn-cancel" title="СТОП: левая половина — мягкая (ждать), правая — жёсткая (обрыв)">
|
||||||
|
<span class="seg seg-left" aria-label="мягкая">СТ ⏹</span>
|
||||||
|
<span class="seg seg-right" aria-label="жёсткая">ОП</span>
|
||||||
|
</button>
|
||||||
<a class="chip-btn" href="/" role="button">ДОМОЙ</a>
|
<a class="chip-btn" href="/" role="button">ДОМОЙ</a>
|
||||||
</div>
|
</div>
|
||||||
<!-- Danmaku overlay layer -->
|
<!-- Danmaku overlay layer -->
|
||||||
@@ -178,8 +210,14 @@
|
|||||||
<div id="logs-list" style="border:1px solid #2b3646;border-radius:8px;overflow:auto;background:#0f141a"></div>
|
<div id="logs-list" style="border:1px solid #2b3646;border-radius:8px;overflow:auto;background:#0f141a"></div>
|
||||||
<div id="logs-detail" style="display:flex;flex-direction:column;gap:8px">
|
<div id="logs-detail" style="display:flex;flex-direction:column;gap:8px">
|
||||||
<div>
|
<div>
|
||||||
<strong>Request</strong>
|
<div style="display:flex;align-items:center;gap:8px;justify-content:space-between">
|
||||||
<pre id="logs-req" style="min-height:120px;max-height:36vh;overflow:auto;white-space:pre-wrap;word-break:break-word;overflow-wrap:anywhere"></pre>
|
<strong>Request</strong>
|
||||||
|
<div class="logs-req-actions" style="display:flex;gap:8px">
|
||||||
|
<button id="logs-send" title="Отправить отредактированный запрос">Отправить</button>
|
||||||
|
<button id="logs-revert" title="Вернуть оригинальный запрос">Вернуть</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<pre id="logs-req" contenteditable="true" tabindex="0" style="min-height:120px;max-height:36vh;overflow:auto;white-space:pre-wrap;word-break:break-word;overflow-wrap:anywhere"></pre>
|
||||||
</div>
|
</div>
|
||||||
<div>
|
<div>
|
||||||
<strong>Response</strong>
|
<strong>Response</strong>
|
||||||
@@ -293,7 +331,7 @@
|
|||||||
const NODE_IO = {
|
const NODE_IO = {
|
||||||
// depends: используется только для порядка выполнения (зависимости), данные не читаются
|
// depends: используется только для порядка выполнения (зависимости), данные не читаются
|
||||||
// Провода не переносят данные; OUT/vars берутся из контекста и снапшота.
|
// Провода не переносят данные; OUT/vars берутся из контекста и снапшота.
|
||||||
SetVars: { inputs: [], outputs: ['done'] },
|
SetVars: { inputs: ['depends'], outputs: ['done'] },
|
||||||
If: { inputs: ['depends'], outputs: ['true','false'] },
|
If: { inputs: ['depends'], outputs: ['true','false'] },
|
||||||
ProviderCall:{ inputs: ['depends'], outputs: ['done'] },
|
ProviderCall:{ inputs: ['depends'], outputs: ['done'] },
|
||||||
RawForward: { inputs: ['depends'], outputs: ['done'] },
|
RawForward: { inputs: ['depends'], outputs: ['done'] },
|
||||||
@@ -1293,6 +1331,7 @@
|
|||||||
<label>headers (JSON)</label><textarea id="f-headers" rows="4">${escText(cfg.headers||'{}')}</textarea>
|
<label>headers (JSON)</label><textarea id="f-headers" rows="4">${escText(cfg.headers||'{}')}</textarea>
|
||||||
<label>template (JSON)</label>
|
<label>template (JSON)</label>
|
||||||
<textarea id="f-template" rows="10">${escText(cfg.template||'{}')}</textarea>
|
<textarea id="f-template" rows="10">${escText(cfg.template||'{}')}</textarea>
|
||||||
|
<label id="row-claude-no-system" style="display:${((data.provider||'openai').toLowerCase()==='claude') ? 'inline-flex' : 'none'}; align-items:center; gap:6px"><input id="f-claude-no-system" type="checkbox" ${(data.claude_no_system===true)?'checked':''}> claude_no_system</label>
|
||||||
<div style="margin-top:6px">
|
<div style="margin-top:6px">
|
||||||
<details class="help">
|
<details class="help">
|
||||||
<summary title="Подсказка по шаблону">?</summary>
|
<summary title="Подсказка по шаблону">?</summary>
|
||||||
@@ -1312,6 +1351,15 @@
|
|||||||
<div class="hint">
|
<div class="hint">
|
||||||
Пример: <code>[[VAR:incoming.json.contents]] & [[PROMPT]]</code>. Пусто — выключено. Итог автоматически приводится к структуре выбранного провайдера (messages/contents/system).
|
Пример: <code>[[VAR:incoming.json.contents]] & [[PROMPT]]</code>. Пусто — выключено. Итог автоматически приводится к структуре выбранного провайдера (messages/contents/system).
|
||||||
</div>
|
</div>
|
||||||
|
<label>prompt_preprocess (pre-merge DSL)</label>
|
||||||
|
<textarea id="f-prompt-preprocess" rows="3">${escText(data.prompt_preprocess || '')}</textarea>
|
||||||
|
<div class="hint">
|
||||||
|
Каждая строка: <code>SEGMENT [delKeyContains "строка"] [delpos=prepend|append|N|-1] [case=ci|cs] [pruneEmpty]</code>.
|
||||||
|
По умолчанию: case=ci, pruneEmpty=false, без delpos → append. Примеры:
|
||||||
|
<br/><code>[[VAR:incoming.json.contents]] delKeyContains "Текст" delpos=-1</code>
|
||||||
|
<br/><code>[[VAR:incoming.json.messages]] delKeyContains "debug" case=cs</code>
|
||||||
|
<br/>SEGMENT поддерживает макросы [[...]] и {{ ... }}. Выполняется ДО prompt_combine.
|
||||||
|
</div>
|
||||||
`;
|
`;
|
||||||
html += `
|
html += `
|
||||||
<div class="group-title" style="margin-top:16px">Prompt Blocks</div>
|
<div class="group-title" style="margin-top:16px">Prompt Blocks</div>
|
||||||
@@ -1465,6 +1513,7 @@
|
|||||||
if (inp.id === 'f-baseurl') cfg.base_url = inp.value;
|
if (inp.id === 'f-baseurl') cfg.base_url = inp.value;
|
||||||
if (inp.id === 'f-endpoint') cfg.endpoint = inp.value;
|
if (inp.id === 'f-endpoint') cfg.endpoint = inp.value;
|
||||||
if (inp.id === 'f-headers') cfg.headers = inp.value;
|
if (inp.id === 'f-headers') cfg.headers = inp.value;
|
||||||
|
if (inp.id === 'f-claude-no-system') d.claude_no_system = !!inp.checked;
|
||||||
if (inp.id === 'f-provider') d.provider = inp.value; // select changes provider
|
if (inp.id === 'f-provider') d.provider = inp.value; // select changes provider
|
||||||
if (inp.id === 'f-prompt-combine') {
|
if (inp.id === 'f-prompt-combine') {
|
||||||
const val = String(inp.value || '').trim();
|
const val = String(inp.value || '').trim();
|
||||||
@@ -1474,6 +1523,14 @@
|
|||||||
delete d.prompt_combine;
|
delete d.prompt_combine;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (inp.id === 'f-prompt-preprocess') {
|
||||||
|
const val = String(inp.value || '').trim();
|
||||||
|
if (val) {
|
||||||
|
d.prompt_preprocess = inp.value;
|
||||||
|
} else {
|
||||||
|
delete d.prompt_preprocess;
|
||||||
|
}
|
||||||
|
}
|
||||||
// Sleep controls (seconds + enable checkbox)
|
// Sleep controls (seconds + enable checkbox)
|
||||||
if (inp.id === 'f-sleep-en') {
|
if (inp.id === 'f-sleep-en') {
|
||||||
const secEl = document.getElementById('f-sleep-sec');
|
const secEl = document.getElementById('f-sleep-sec');
|
||||||
@@ -1556,6 +1613,7 @@
|
|||||||
if (inp.id === 'f-model') d.model = inp.value;
|
if (inp.id === 'f-model') d.model = inp.value;
|
||||||
if (inp.id === 'f-extra') d.extra_headers = inp.value;
|
if (inp.id === 'f-extra') d.extra_headers = inp.value;
|
||||||
if (inp.id === 'f-override') d.override_path = inp.value;
|
if (inp.id === 'f-override') d.override_path = inp.value;
|
||||||
|
if (inp.id === 'f-baseurl') d.base_url = inp.value;
|
||||||
if (inp.id === 'f-pass') d.passthrough_headers = inp.checked;
|
if (inp.id === 'f-pass') d.passthrough_headers = inp.checked;
|
||||||
// Sleep controls (seconds + enable checkbox)
|
// Sleep controls (seconds + enable checkbox)
|
||||||
if (inp.id === 'f-sleep-en') {
|
if (inp.id === 'f-sleep-en') {
|
||||||
@@ -1802,6 +1860,10 @@
|
|||||||
if (el) el.__data = d;
|
if (el) el.__data = d;
|
||||||
}
|
}
|
||||||
} catch (e) {}
|
} catch (e) {}
|
||||||
|
try {
|
||||||
|
const rowCns = document.getElementById('row-claude-no-system');
|
||||||
|
if (rowCns) rowCns.style.display = (d.provider === 'claude' ? 'inline-flex' : 'none');
|
||||||
|
} catch (_){}
|
||||||
try { console.debug('[ProviderCall] provider switched to', d.provider, cfg); } catch (e) {}
|
try { console.debug('[ProviderCall] provider switched to', d.provider, cfg); } catch (e) {}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -1867,6 +1929,71 @@
|
|||||||
const elN = document.querySelector(`#node-${id}`);
|
const elN = document.querySelector(`#node-${id}`);
|
||||||
if (elN) elN.__data = JSON.parse(JSON.stringify(ncheck.data || {}));
|
if (elN) elN.__data = JSON.parse(JSON.stringify(ncheck.data || {}));
|
||||||
}
|
}
|
||||||
|
// JSON5 validation for headers/extra_headers (normalize to strict JSON)
|
||||||
|
try {
|
||||||
|
function __attachJsonValidation(el, opts) {
|
||||||
|
if (!el) return;
|
||||||
|
const wantObject = !!(opts && opts.wantObject);
|
||||||
|
const normalize = !!(opts && opts.normalize !== false);
|
||||||
|
const good = () => { try { el.style.borderColor=''; el.title=''; } catch(_){} };
|
||||||
|
const bad = (msg) => { try { el.style.borderColor = '#e11d48'; el.title = msg || 'Invalid JSON'; } catch(_){} };
|
||||||
|
const parseAndMark = () => {
|
||||||
|
try {
|
||||||
|
const txt = String(el.value || '').trim();
|
||||||
|
if (!txt) { good(); return; }
|
||||||
|
let obj = JSON5.parse(txt);
|
||||||
|
if (wantObject && (typeof obj !== 'object' || obj === null || Array.isArray(obj))) { bad('Ожидается JSON-объект { ... }'); return; }
|
||||||
|
if (normalize) {
|
||||||
|
try { el.value = JSON.stringify(obj, null, 2); } catch(_){}
|
||||||
|
}
|
||||||
|
good();
|
||||||
|
} catch (e) {
|
||||||
|
bad('Ошибка JSON: ' + (e && e.message ? e.message : 'parse error'));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
el.addEventListener('blur', parseAndMark);
|
||||||
|
el.addEventListener('input', () => { try { el.style.borderColor=''; el.title=''; } catch(_){} });
|
||||||
|
}
|
||||||
|
if (type === 'ProviderCall') {
|
||||||
|
__attachJsonValidation(document.getElementById('f-headers'), { wantObject: true, normalize: true });
|
||||||
|
} else if (type === 'RawForward') {
|
||||||
|
__attachJsonValidation(document.getElementById('f-extra'), { wantObject: true, normalize: true });
|
||||||
|
}
|
||||||
|
} catch (_) {}
|
||||||
|
// JSON5 validation for template (macro-aware)
|
||||||
|
try {
|
||||||
|
(function(){
|
||||||
|
const tplEl = document.getElementById('f-template');
|
||||||
|
if (tplEl) {
|
||||||
|
const good = () => { try { tplEl.style.borderColor=''; tplEl.title=''; } catch(_){} };
|
||||||
|
const bad = (msg) => { try { tplEl.style.borderColor='#e11d48'; tplEl.title=(msg||'Invalid JSON template'); } catch(_){} };
|
||||||
|
const parseAndMark = () => {
|
||||||
|
try {
|
||||||
|
let txt = String(tplEl.value || '').trim();
|
||||||
|
if (!txt) { good(); return; }
|
||||||
|
let s = txt;
|
||||||
|
// Neutralize templating macros so JSON5.parse won't choke:
|
||||||
|
// 1) Replace any {{ ... }} with a scalar value
|
||||||
|
s = s.replace(/{{[\s\S]*?}}/g, '0');
|
||||||
|
// 2) Replace [[PROMPT]] with a dummy property to keep object shape valid
|
||||||
|
s = s.replace(/\[\[\s*PROMPT\s*\]\]/g, '"__PROMPT__":true');
|
||||||
|
// Tolerant parse (JSON5 supports unquoted keys, trailing commas, etc.)
|
||||||
|
const obj = JSON5.parse(s);
|
||||||
|
if (typeof obj !== 'object' || obj === null || Array.isArray(obj)) {
|
||||||
|
bad('Шаблон должен быть JSON-объектом');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
good();
|
||||||
|
} catch (e) {
|
||||||
|
bad('Ошибка JSON шаблона: ' + (e && e.message ? e.message : 'parse error'));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
tplEl.addEventListener('blur', parseAndMark);
|
||||||
|
tplEl.addEventListener('input', () => { try { tplEl.style.borderColor=''; tplEl.title=''; } catch(_){} });
|
||||||
|
}
|
||||||
|
})();
|
||||||
|
} catch (_) {}
|
||||||
|
|
||||||
// Prompt Manager UI for ProviderCall
|
// Prompt Manager UI for ProviderCall
|
||||||
if (type === 'ProviderCall') { PM.setupProviderCallPMUI(editor, id); }
|
if (type === 'ProviderCall') { PM.setupProviderCallPMUI(editor, id); }
|
||||||
}
|
}
|
||||||
@@ -2536,26 +2663,79 @@
|
|||||||
loadPipeline();
|
loadPipeline();
|
||||||
refreshPresets();
|
refreshPresets();
|
||||||
|
|
||||||
// Wire manual cancel button
|
// Wire split STOP button (left=graceful, right=abort)
|
||||||
try {
|
try {
|
||||||
const btnCancel = document.getElementById('btn-cancel');
|
const btnSplit = document.getElementById('btn-cancel');
|
||||||
if (btnCancel) {
|
if (btnSplit) {
|
||||||
btnCancel.addEventListener('click', async () => {
|
const leftSeg = btnSplit.querySelector('.seg-left');
|
||||||
|
const rightSeg = btnSplit.querySelector('.seg-right');
|
||||||
|
|
||||||
|
function sideFromEvent(ev) {
|
||||||
|
const r = btnSplit.getBoundingClientRect();
|
||||||
|
const x = (ev.touches && ev.touches[0] ? ev.touches[0].clientX : ev.clientX) - r.left;
|
||||||
|
return (x < r.width / 2) ? 'left' : 'right';
|
||||||
|
}
|
||||||
|
|
||||||
|
function setHover(side) {
|
||||||
|
btnSplit.classList.toggle('hover-left', side === 'left');
|
||||||
|
btnSplit.classList.toggle('hover-right', side === 'right');
|
||||||
|
}
|
||||||
|
|
||||||
|
async function postCancel(url, side) {
|
||||||
try {
|
try {
|
||||||
btnCancel.disabled = true;
|
btnSplit.classList.add('is-busy');
|
||||||
btnCancel.textContent = 'СТОП…';
|
if (side === 'left' && leftSeg) leftSeg.textContent = 'СТ…';
|
||||||
const res = await fetch('/admin/cancel', { method: 'POST' });
|
if (side === 'right' && rightSeg) rightSeg.textContent = 'ОП…';
|
||||||
|
const res = await fetch(url, { method: 'POST' });
|
||||||
if (res.ok) {
|
if (res.ok) {
|
||||||
status('Отмена исполнения запрошена');
|
const mode = url.includes('/abort') ? 'обрыв' : 'мягкая отмена';
|
||||||
|
status('Отмена исполнения: ' + mode + ' запрошена');
|
||||||
} else {
|
} else {
|
||||||
status('Ошибка запроса отмены: ' + res.status);
|
status('Ошибка запроса отмены: ' + res.status);
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
status('Ошибка запроса отмены');
|
status('Ошибка запроса отмены');
|
||||||
} finally {
|
} finally {
|
||||||
setTimeout(()=>{ try { btnCancel.disabled = false; btnCancel.textContent = 'СТОП ⏹'; } catch(_){} }, 600);
|
setTimeout(()=>{
|
||||||
|
try {
|
||||||
|
btnSplit.classList.remove('is-busy');
|
||||||
|
if (leftSeg) leftSeg.textContent = 'СТ ⏹';
|
||||||
|
if (rightSeg) rightSeg.textContent = 'ОП';
|
||||||
|
} catch(_){}
|
||||||
|
}, 600);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
btnSplit.addEventListener('mousemove', (ev) => {
|
||||||
|
setHover(sideFromEvent(ev));
|
||||||
|
}, { passive: true });
|
||||||
|
|
||||||
|
btnSplit.addEventListener('mouseleave', () => {
|
||||||
|
btnSplit.classList.remove('hover-left','hover-right');
|
||||||
|
}, { passive: true });
|
||||||
|
|
||||||
|
btnSplit.addEventListener('click', async (ev) => {
|
||||||
|
const side = sideFromEvent(ev);
|
||||||
|
if (side === 'left') {
|
||||||
|
await postCancel('/admin/cancel', 'left');
|
||||||
|
} else {
|
||||||
|
await postCancel('/admin/cancel/abort', 'right');
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Touch support
|
||||||
|
btnSplit.addEventListener('touchstart', (ev) => {
|
||||||
|
setHover(sideFromEvent(ev));
|
||||||
|
}, { passive: true });
|
||||||
|
btnSplit.addEventListener('touchend', async (ev) => {
|
||||||
|
const side = sideFromEvent(ev);
|
||||||
|
btnSplit.classList.remove('hover-left','hover-right');
|
||||||
|
if (side === 'left') {
|
||||||
|
await postCancel('/admin/cancel', 'left');
|
||||||
|
} else {
|
||||||
|
await postCancel('/admin/cancel/abort', 'right');
|
||||||
|
}
|
||||||
|
}, { passive: false });
|
||||||
}
|
}
|
||||||
} catch(_) {}
|
} catch(_) {}
|
||||||
|
|
||||||
@@ -3557,6 +3737,59 @@ const __busyFav = (function(){
|
|||||||
const dataPre = document.getElementById('logs-data');
|
const dataPre = document.getElementById('logs-data');
|
||||||
const logsDetail = document.getElementById('logs-detail');
|
const logsDetail = document.getElementById('logs-detail');
|
||||||
|
|
||||||
|
// Manual resend editor state (edited/original per-log)
|
||||||
|
const btnReqSend = document.getElementById('logs-send');
|
||||||
|
const btnReqRevert = document.getElementById('logs-revert');
|
||||||
|
const __reqOriginalById = new Map();
|
||||||
|
const __reqEditedById = new Map();
|
||||||
|
|
||||||
|
function getSelectedLog() {
|
||||||
|
try { return selectedLogId ? logsById.get(selectedLogId) : null; } catch(_) { return null; }
|
||||||
|
}
|
||||||
|
function isHttpLog(it) {
|
||||||
|
try { return !!(it && (it.kind === 'http' || it.req)); } catch(_) { return false; }
|
||||||
|
}
|
||||||
|
function updateReqButtons() {
|
||||||
|
try {
|
||||||
|
const it = getSelectedLog();
|
||||||
|
const en = isHttpLog(it);
|
||||||
|
if (btnReqSend) btnReqSend.disabled = !en;
|
||||||
|
if (btnReqRevert) btnReqRevert.disabled = !en;
|
||||||
|
} catch(_) {}
|
||||||
|
}
|
||||||
|
if (reqPre) {
|
||||||
|
try { reqPre.setAttribute('contenteditable','true'); } catch(_) {}
|
||||||
|
reqPre.addEventListener('input', () => {
|
||||||
|
try { if (selectedLogId) __reqEditedById.set(selectedLogId, reqPre.innerText); } catch(_){}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (btnReqSend) btnReqSend.addEventListener('click', async () => {
|
||||||
|
const it = getSelectedLog();
|
||||||
|
if (!isHttpLog(it)) return;
|
||||||
|
const reqText = (reqPre && reqPre.innerText!=null) ? reqPre.innerText : '';
|
||||||
|
const body = { req_id: it.id, request_text: reqText, prefer_registry_original: true };
|
||||||
|
try {
|
||||||
|
const res = await fetch('/admin/http/manual-send', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify(body)
|
||||||
|
});
|
||||||
|
let j = null; try { j = await res.json(); } catch(_){}
|
||||||
|
try { status('Manual send: ' + (res.ok ? 'ok' : ('error ' + res.status)) + (j && j.req_id ? (' • new req=' + j.req_id) : '')); } catch(_){}
|
||||||
|
} catch (e) {
|
||||||
|
try { status('Manual send error'); } catch(_){}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
if (btnReqRevert) btnReqRevert.addEventListener('click', () => {
|
||||||
|
const it = getSelectedLog();
|
||||||
|
if (!isHttpLog(it)) return;
|
||||||
|
try {
|
||||||
|
const orig = __reqOriginalById.get(it.id) || (it && it.req ? buildReqText(it.req) : '');
|
||||||
|
__reqEditedById.delete(it.id);
|
||||||
|
if (reqPre) reqPre.textContent = orig || '';
|
||||||
|
} catch(_) {}
|
||||||
|
});
|
||||||
|
|
||||||
// Простая изоляция выделения для Request/Response/Data: без pointer-events, без «замков»
|
// Простая изоляция выделения для Request/Response/Data: без pointer-events, без «замков»
|
||||||
(function simpleLogsSelectionIsolation(){
|
(function simpleLogsSelectionIsolation(){
|
||||||
const detail = logsDetail || document.getElementById('logs-detail');
|
const detail = logsDetail || document.getElementById('logs-detail');
|
||||||
@@ -3623,7 +3856,7 @@ const __busyFav = (function(){
|
|||||||
const btnLogsClose = document.getElementById('logs-close');
|
const btnLogsClose = document.getElementById('logs-close');
|
||||||
const btnLogsClear = document.getElementById('logs-clear');
|
const btnLogsClear = document.getElementById('logs-clear');
|
||||||
function isLogsOpen(){ return panelLogs && panelLogs.style.display !== 'none'; }
|
function isLogsOpen(){ return panelLogs && panelLogs.style.display !== 'none'; }
|
||||||
function openLogs(){ if (panelLogs) { panelLogs.style.display='block'; panelLogs.setAttribute('aria-hidden','false'); try { if (logsDetail) logsDetail.setAttribute('data-active-pre','logs-req'); } catch(_){} renderLogsList(); renderLogsDetail(selectedLogId); } }
|
function openLogs(){ if (panelLogs) { panelLogs.style.display='block'; panelLogs.setAttribute('aria-hidden','false'); try { if (logsDetail) logsDetail.setAttribute('data-active-pre','logs-req'); } catch(_){} renderLogsList(); renderLogsDetail(selectedLogId); try { updateReqButtons && updateReqButtons(); } catch(_){} } }
|
||||||
function closeLogs(){ if (panelLogs) { panelLogs.style.display='none'; panelLogs.setAttribute('aria-hidden','true'); } }
|
function closeLogs(){ if (panelLogs) { panelLogs.style.display='none'; panelLogs.setAttribute('aria-hidden','true'); } }
|
||||||
if (btnLogsOpen) btnLogsOpen.addEventListener('click', () => { if (isLogsOpen()) { closeLogs(); } else { openLogs(); } });
|
if (btnLogsOpen) btnLogsOpen.addEventListener('click', () => { if (isLogsOpen()) { closeLogs(); } else { openLogs(); } });
|
||||||
if (btnLogsClose) btnLogsClose.addEventListener('click', closeLogs);
|
if (btnLogsClose) btnLogsClose.addEventListener('click', closeLogs);
|
||||||
@@ -3710,10 +3943,15 @@ const __busyFav = (function(){
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (item.kind === 'http') {
|
if (item.kind === 'http') {
|
||||||
st = (item.res && item.res.status!=null) ? ` • ${item.res.status}` : '';
|
const hasResp = !!(item.res && item.res.status != null);
|
||||||
|
const stc = hasResp ? Number(item.res.status) : 0;
|
||||||
|
const ok = hasResp && stc >= 200 && stc < 400;
|
||||||
|
// Показать индикатор ожидания для активного HTTP
|
||||||
|
st = hasResp ? ` • ${stc}` : ' • …';
|
||||||
classes.push('kind-http');
|
classes.push('kind-http');
|
||||||
const stc = (item.res && typeof item.res.status === 'number') ? item.res.status : 0;
|
if (!hasResp) classes.push('http-pending');
|
||||||
if (stc >= 200 && stc < 400) classes.push('http-ok'); else if (stc) classes.push('http-err');
|
else if (ok) classes.push('http-ok');
|
||||||
|
else classes.push('http-err');
|
||||||
} else if (item.kind === 'node') {
|
} else if (item.kind === 'node') {
|
||||||
const ev = (item.ev ? String(item.ev) : '').toLowerCase();
|
const ev = (item.ev ? String(item.ev) : '').toLowerCase();
|
||||||
const dur = (item.duration_ms!=null) ? ` (${fmtMs(item.duration_ms)})` : '';
|
const dur = (item.duration_ms!=null) ? ` (${fmtMs(item.duration_ms)})` : '';
|
||||||
@@ -3787,7 +4025,15 @@ const __busyFav = (function(){
|
|||||||
|
|
||||||
if (it.kind === 'http' || (!it.kind && it.req)) {
|
if (it.kind === 'http' || (!it.kind && it.req)) {
|
||||||
// HTTP logs: show full structure, trim only base64 values; render images in Data
|
// HTTP logs: show full structure, trim only base64 values; render images in Data
|
||||||
reqPre.textContent = buildReqText(it && it.req);
|
try {
|
||||||
|
const __origTxt = buildReqText(it && it.req);
|
||||||
|
try { __reqOriginalById.set(it.id, __origTxt); } catch(_){}
|
||||||
|
const __editedTxt = (typeof __reqEditedById !== 'undefined' && __reqEditedById && __reqEditedById.get) ? __reqEditedById.get(it.id) : null;
|
||||||
|
reqPre.textContent = (typeof __editedTxt === 'string') ? __editedTxt : __origTxt;
|
||||||
|
} catch(_) {
|
||||||
|
reqPre.textContent = buildReqText(it && it.req);
|
||||||
|
}
|
||||||
|
try { updateReqButtons && updateReqButtons(); } catch(_){}
|
||||||
if (it && it.res) {
|
if (it && it.res) {
|
||||||
const raw = it.res.body_text || '';
|
const raw = it.res.body_text || '';
|
||||||
let shown = raw;
|
let shown = raw;
|
||||||
@@ -4153,9 +4399,19 @@ const __busyFav = (function(){
|
|||||||
const data = JSON.parse(e.data);
|
const data = JSON.parse(e.data);
|
||||||
// Special handling for manual cancel notification
|
// Special handling for manual cancel notification
|
||||||
if (data && data.event === 'cancelled') {
|
if (data && data.event === 'cancelled') {
|
||||||
try { status('Исполнение остановлено пользователем'); } catch(_){}
|
try { status('Исполнение остановлено пользователем'); } catch(_) {}
|
||||||
try { __busyFav.reset(); } catch(_){}
|
try { __busyFav.reset(); } catch(_) {}
|
||||||
}
|
// Завершаем все висящие HTTP-записи (убираем анимацию ожидания)
|
||||||
|
try {
|
||||||
|
const now = Date.now();
|
||||||
|
for (const it of logs) {
|
||||||
|
if (it && it.kind === 'http' && !it.res) {
|
||||||
|
it.res = { status: 0, headers: {}, body_text: 'Cancelled by user (abort)', ts: now, data_preview: 'cancelled' };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (isLogsOpen()) { renderLogsList(); if (selectedLogId) renderLogsDetail(selectedLogId); }
|
||||||
|
} catch (_) {}
|
||||||
|
}
|
||||||
handleTraceEvent(data);
|
handleTraceEvent(data);
|
||||||
handleLogEvent(data);
|
handleLogEvent(data);
|
||||||
} catch (_) {
|
} catch (_) {
|
||||||
|
|||||||
199
tests/test_cancel_modes.py
Normal file
199
tests/test_cancel_modes.py
Normal file
@@ -0,0 +1,199 @@
|
|||||||
|
import asyncio
|
||||||
|
import json
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
from agentui.pipeline.executor import PipelineExecutor, ExecutionError
|
||||||
|
from agentui.common.cancel import request_cancel, clear_cancel
|
||||||
|
import agentui.providers.http_client as hc
|
||||||
|
import agentui.pipeline.executor as ex
|
||||||
|
from tests.utils import ctx as _ctx
|
||||||
|
|
||||||
|
|
||||||
|
class DummyResponse:
|
||||||
|
def __init__(self, status: int, json_obj: Dict[str, Any]) -> None:
|
||||||
|
self.status_code = status
|
||||||
|
self._json = json_obj
|
||||||
|
self.headers = {}
|
||||||
|
try:
|
||||||
|
self.content = json.dumps(json_obj, ensure_ascii=False).encode("utf-8")
|
||||||
|
except Exception:
|
||||||
|
self.content = b"{}"
|
||||||
|
try:
|
||||||
|
self.text = json.dumps(json_obj, ensure_ascii=False)
|
||||||
|
except Exception:
|
||||||
|
self.text = "{}"
|
||||||
|
|
||||||
|
def json(self) -> Dict[str, Any]:
|
||||||
|
return self._json
|
||||||
|
|
||||||
|
|
||||||
|
class DummyClient:
|
||||||
|
"""
|
||||||
|
Async client with artificial delay to simulate in-flight HTTP that can be cancelled.
|
||||||
|
Provides .post() and .request() compatible with executor usage.
|
||||||
|
"""
|
||||||
|
def __init__(self, delay: float = 0.3, status_code: int = 200) -> None:
|
||||||
|
self._delay = delay
|
||||||
|
self._status = status_code
|
||||||
|
|
||||||
|
async def __aenter__(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def __aexit__(self, exc_type, exc, tb):
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def post(self, url: str, content: bytes, headers: Dict[str, str]):
|
||||||
|
# Artificial delay to allow cancel/abort to happen while awaiting
|
||||||
|
await asyncio.sleep(self._delay)
|
||||||
|
try:
|
||||||
|
payload = json.loads(content.decode("utf-8"))
|
||||||
|
except Exception:
|
||||||
|
payload = {"_raw": content.decode("utf-8", errors="ignore")}
|
||||||
|
return DummyResponse(self._status, {"echo": payload})
|
||||||
|
|
||||||
|
async def request(self, method: str, url: str, headers: Dict[str, str], content: bytes | None):
|
||||||
|
return await self.post(url, content or b"{}", headers)
|
||||||
|
|
||||||
|
|
||||||
|
def _patch_http_client(delay: float = 0.3):
|
||||||
|
"""
|
||||||
|
Patch both providers.http_client.build_client and executor.build_client
|
||||||
|
to return our DummyClient with a given delay.
|
||||||
|
"""
|
||||||
|
orig_hc = hc.build_client
|
||||||
|
orig_ex = ex.build_client
|
||||||
|
hc.build_client = lambda timeout=60.0: DummyClient(delay=delay) # type: ignore[assignment]
|
||||||
|
ex.build_client = lambda timeout=60.0: DummyClient(delay=delay) # type: ignore[assignment]
|
||||||
|
return orig_hc, orig_ex
|
||||||
|
|
||||||
|
|
||||||
|
def _restore_http_client(orig_hc, orig_ex) -> None:
|
||||||
|
hc.build_client = orig_hc
|
||||||
|
ex.build_client = orig_ex
|
||||||
|
|
||||||
|
|
||||||
|
def test_graceful_cancel_while_providercall():
|
||||||
|
"""
|
||||||
|
Expectation:
|
||||||
|
- Cancel(mode=graceful) during in-flight HTTP should NOT interrupt the current request.
|
||||||
|
- While-wrapper should stop before starting next iteration.
|
||||||
|
- Final CYCLEINDEX__n2 == 0 (only first iteration finished), WAS_ERROR__n2 is False/absent.
|
||||||
|
"""
|
||||||
|
async def main():
|
||||||
|
p = {
|
||||||
|
"id": "p_cancel_soft",
|
||||||
|
"name": "ProviderCall graceful cancel",
|
||||||
|
"loop_mode": "dag",
|
||||||
|
"nodes": [
|
||||||
|
{
|
||||||
|
"id": "n2",
|
||||||
|
"type": "ProviderCall",
|
||||||
|
"config": {
|
||||||
|
"provider": "openai",
|
||||||
|
"while_expr": "cycleindex < 5",
|
||||||
|
"while_max_iters": 10,
|
||||||
|
# ignore_errors not needed for graceful (no interruption of in-flight)
|
||||||
|
"provider_configs": {
|
||||||
|
"openai": {
|
||||||
|
"base_url": "http://dummy.local",
|
||||||
|
"headers": "{}",
|
||||||
|
"template": "{}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"in": {}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
pid = p["id"]
|
||||||
|
orig_hc, orig_ex = _patch_http_client(delay=0.3)
|
||||||
|
try:
|
||||||
|
ctx = _ctx()
|
||||||
|
exr = PipelineExecutor(p)
|
||||||
|
task = asyncio.create_task(exr.run(ctx))
|
||||||
|
# Give the node time to start HTTP, then request graceful cancel
|
||||||
|
await asyncio.sleep(0.05)
|
||||||
|
request_cancel(pid, mode="graceful")
|
||||||
|
out = await task
|
||||||
|
finally:
|
||||||
|
_restore_http_client(orig_hc, orig_ex)
|
||||||
|
try:
|
||||||
|
clear_cancel(pid)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert isinstance(out, dict)
|
||||||
|
vars_map = out.get("vars") or {}
|
||||||
|
assert isinstance(vars_map, dict)
|
||||||
|
# Only first iteration should have finished; last index = 0
|
||||||
|
assert vars_map.get("CYCLEINDEX__n2") == 0
|
||||||
|
# No error expected on graceful (we didn't interrupt the in-flight HTTP)
|
||||||
|
assert vars_map.get("WAS_ERROR__n2") in (False, None)
|
||||||
|
|
||||||
|
asyncio.run(main())
|
||||||
|
|
||||||
|
|
||||||
|
def test_abort_cancel_inflight_providercall():
|
||||||
|
"""
|
||||||
|
Expectation:
|
||||||
|
- Cancel(mode=abort) during in-flight HTTP cancels the await with ExecutionError.
|
||||||
|
- While-wrapper with ignore_errors=True converts it into {"result":{"error":...}}.
|
||||||
|
- Final CYCLEINDEX__n2 == 0 and WAS_ERROR__n2 == True; error mentions 'Cancelled by user (abort)'.
|
||||||
|
"""
|
||||||
|
async def main():
|
||||||
|
p = {
|
||||||
|
"id": "p_cancel_abort",
|
||||||
|
"name": "ProviderCall abort cancel",
|
||||||
|
"loop_mode": "dag",
|
||||||
|
"nodes": [
|
||||||
|
{
|
||||||
|
"id": "n2",
|
||||||
|
"type": "ProviderCall",
|
||||||
|
"config": {
|
||||||
|
"provider": "openai",
|
||||||
|
"while_expr": "cycleindex < 5",
|
||||||
|
"while_max_iters": 10,
|
||||||
|
"ignore_errors": True, # convert cancellation exception into error payload
|
||||||
|
"provider_configs": {
|
||||||
|
"openai": {
|
||||||
|
"base_url": "http://dummy.local",
|
||||||
|
"headers": "{}",
|
||||||
|
"template": "{}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"in": {}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
pid = p["id"]
|
||||||
|
orig_hc, orig_ex = _patch_http_client(delay=0.3)
|
||||||
|
try:
|
||||||
|
ctx = _ctx()
|
||||||
|
exr = PipelineExecutor(p)
|
||||||
|
task = asyncio.create_task(exr.run(ctx))
|
||||||
|
# Let HTTP start, then trigger hard abort
|
||||||
|
await asyncio.sleep(0.05)
|
||||||
|
request_cancel(pid, mode="abort")
|
||||||
|
out = await task
|
||||||
|
finally:
|
||||||
|
_restore_http_client(orig_hc, orig_ex)
|
||||||
|
try:
|
||||||
|
clear_cancel(pid)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert isinstance(out, dict)
|
||||||
|
vars_map = out.get("vars") or {}
|
||||||
|
assert isinstance(vars_map, dict)
|
||||||
|
# First iteration was started; after abort it is considered errored and loop stops
|
||||||
|
assert vars_map.get("CYCLEINDEX__n2") == 0
|
||||||
|
assert vars_map.get("WAS_ERROR__n2") is True
|
||||||
|
|
||||||
|
# Error propagated into node's result (ignore_errors=True path)
|
||||||
|
res = out.get("result") or {}
|
||||||
|
assert isinstance(res, dict)
|
||||||
|
err = res.get("error")
|
||||||
|
assert isinstance(err, str) and "Cancelled by user (abort)" in err
|
||||||
|
|
||||||
|
asyncio.run(main())
|
||||||
Reference in New Issue
Block a user