sync: UI animations select styling TLS verify flag via proxy second line brand spacing

This commit is contained in:
2025-09-14 11:54:28 +03:00
parent 11a0535712
commit 81014d26f8
11 changed files with 3633 additions and 1459 deletions

View File

@@ -1,6 +1,7 @@
from typing import Dict, Optional
from typing import Dict, Optional, Union
from pathlib import Path
from urllib.parse import quote
import os
def _parse_proxy_line(line: str) -> Optional[str]:
@@ -39,6 +40,9 @@ def _read_proxy_from_file() -> Optional[str]:
line = raw.strip()
if not line or line.startswith("#"):
continue
# поддержим дополнительные ключи вида key=value в этом же файле (разберём ниже)
if "=" in line:
continue
url = _parse_proxy_line(line)
if url:
return url
@@ -59,3 +63,138 @@ def build_httpx_proxies() -> Optional[Dict[str, str]]:
}
def _read_kv_from_proxy_file() -> Dict[str, str]:
"""
Поддержка дополнительных опций в proxy.txt:
ca=/полный/путь/к/burp-ca.pem
verify=false # отключить проверку сертификатов (для отладки)
"""
out: Dict[str, str] = {}
p = Path("proxy.txt")
if not p.exists():
return out
try:
for raw in p.read_text(encoding="utf-8").splitlines():
line = raw.strip()
if not line or line.startswith("#"):
continue
if "=" not in line:
continue
k, v = line.split("=", 1)
out[k.strip().lower()] = v.strip()
except Exception:
return out
return out
def get_tls_verify() -> Union[bool, str]:
"""
Возвращает значение для параметра httpx.AsyncClient(verify=...):
- путь к PEM-бандлу (строка), если нашли ca=... или файл proxy-ca.pem в корне
- False, если verify=false/insecure=1/AGENTUI_VERIFY=false
- True по умолчанию
- Новое: можно задать флаг второй «голой» строкой в proxy.txt (после URL прокси):
пример:
http:127.0.0.1:8888
false
или
http:127.0.0.1:8888
true
"""
# 1) Переменные окружения имеют приоритет
env_verify = os.getenv("AGENTUI_VERIFY")
if env_verify is not None and env_verify.strip().lower() in ("0", "false", "no", "off"):
return False
env_ca = os.getenv("AGENTUI_CA")
if env_ca:
path = Path(env_ca).expanduser()
if path.exists():
return str(path)
# 2) proxy.txt ключи
kv = _read_kv_from_proxy_file()
if kv.get("verify", "").lower() in ("0", "false", "no", "off"):
return False
if "ca" in kv:
path = Path(kv["ca"]).expanduser()
if path.exists():
return str(path)
# 2.1) Дополнительно: поддержка второй строки без ключа — true/false
try:
p = Path("proxy.txt")
if p.exists():
lines = [ln.strip() for ln in p.read_text(encoding="utf-8").splitlines()]
# найдём первую «URL» строку (без '=' и не пустую/коммент)
idx_url = -1
for i, ln in enumerate(lines):
if not ln or ln.startswith("#") or "=" in ln:
continue
idx_url = i
break
if idx_url >= 0:
# ищем следующую «голую» строку
for j in range(idx_url + 1, len(lines)):
ln = lines[j].strip()
if not ln or ln.startswith("#") or "=" in ln:
continue
low = ln.lower()
if low in ("1", "true", "yes", "on"):
return True
if low in ("0", "false", "no", "off"):
return False
# если это не похожее на флаг, игнорируем и продолжаем
except Exception:
pass
# 3) Файл по умолчанию в корне проекта
default_ca = Path("proxy-ca.pem")
if default_ca.exists():
return str(default_ca)
# 4) По умолчанию строгая проверка
return True
def is_verify_explicit() -> bool:
"""
Возвращает True, если пользователь ЯВНО задал политику проверки TLS,
чтобы клиент не переопределял её значением по умолчанию.
Учитываются:
- переменные окружения: AGENTUI_VERIFY, AGENTUI_CA
- ключи в proxy.txt: verify=..., ca=...
- файл proxy-ca.pem в корне проекта
- Новое: «вторая голая строка» после URL в proxy.txt со значением true/false
"""
if os.getenv("AGENTUI_VERIFY") is not None:
return True
if os.getenv("AGENTUI_CA"):
return True
kv = _read_kv_from_proxy_file()
if "verify" in kv or "ca" in kv:
return True
# Вторая «голая» строка как явный флаг
try:
p = Path("proxy.txt")
if p.exists():
lines = [ln.strip() for ln in p.read_text(encoding="utf-8").splitlines()]
idx_url = -1
for i, ln in enumerate(lines):
if not ln or ln.startswith("#") or "=" in ln:
continue
idx_url = i
break
if idx_url >= 0:
for j in range(idx_url + 1, len(lines)):
ln = lines[j].strip()
if not ln or ln.startswith("#") or "=" in ln:
continue
if ln.lower() in ("1", "0", "true", "false", "yes", "no", "on", "off"):
return True
break
except Exception:
pass
if Path("proxy-ca.pem").exists():
return True
return False

View File

@@ -291,7 +291,32 @@ class PipelineExecutor:
ctx["store"] = dict(self._store)
except Exception:
ctx["store"] = {}
# Pipeline meta (includes http_timeout_sec) available to nodes via context.meta
try:
ctx["meta"] = {
"id": self.pipeline_id,
"loop_mode": self.loop_mode,
"loop_max_iters": self.loop_max_iters,
"loop_time_budget_ms": self.loop_time_budget_ms,
"clear_var_store": self.clear_var_store,
"http_timeout_sec": float(self.pipeline.get("http_timeout_sec", 60) or 60),
# v1: стратегия извлечения текста для [[OUTx]]
"text_extract_strategy": str(self.pipeline.get("text_extract_strategy", "auto") or "auto"),
"text_extract_json_path": str(self.pipeline.get("text_extract_json_path", "") or ""),
"text_join_sep": str(self.pipeline.get("text_join_sep", "\n") or "\n"),
# v2: коллекция пресетов парсинга для выбора в нодах
"text_extract_presets": list(self.pipeline.get("text_extract_presets", []) or []),
}
except Exception:
pass
# Прокидываем traceфункцию в контекст, чтобы ноды могли посылать события (SSE лог)
try:
if trace is not None:
ctx["_trace"] = trace
except Exception:
pass
inputs: Dict[str, Any] = {}
for name, source in (ndef.get("in") or {}).items():
if isinstance(source, list):
@@ -301,7 +326,13 @@ class PipelineExecutor:
if trace is not None:
try:
await trace({"event": "node_start", "node_id": ndef["id"], "wave": wave_num, "ts": int(time.time() * 1000)})
await trace({
"event": "node_start",
"node_id": ndef["id"],
"node_type": node.type_name,
"wave": wave_num,
"ts": int(time.time() * 1000),
})
except Exception:
pass
@@ -318,6 +349,7 @@ class PipelineExecutor:
await trace({
"event": "node_error",
"node_id": ndef["id"],
"node_type": node.type_name,
"wave": wave_num,
"ts": int(time.time() * 1000),
"error": str(exc),
@@ -332,6 +364,7 @@ class PipelineExecutor:
await trace({
"event": "node_done",
"node_id": ndef["id"],
"node_type": node.type_name,
"wave": wave_num,
"ts": int(time.time() * 1000),
"duration_ms": dur_ms,
@@ -615,7 +648,32 @@ class PipelineExecutor:
ctx["store"] = dict(self._store)
except Exception:
ctx["store"] = {}
# Pipeline meta (includes http_timeout_sec) available to nodes via context.meta
try:
ctx["meta"] = {
"id": self.pipeline_id,
"loop_mode": self.loop_mode,
"loop_max_iters": self.loop_max_iters,
"loop_time_budget_ms": self.loop_time_budget_ms,
"clear_var_store": self.clear_var_store,
"http_timeout_sec": float(self.pipeline.get("http_timeout_sec", 60) or 60),
# v1: стратегия извлечения текста для [[OUTx]]
"text_extract_strategy": str(self.pipeline.get("text_extract_strategy", "auto") or "auto"),
"text_extract_json_path": str(self.pipeline.get("text_extract_json_path", "") or ""),
"text_join_sep": str(self.pipeline.get("text_join_sep", "\n") or "\n"),
# v2: коллекция пресетов парсинга для выбора в нодах
"text_extract_presets": list(self.pipeline.get("text_extract_presets", []) or []),
}
except Exception:
pass
# Прокидываем traceфункцию в контекст, чтобы ноды могли посылать события (SSE лог)
try:
if trace is not None:
ctx["_trace"] = trace
except Exception:
pass
inputs: Dict[str, Any] = {}
for name, source in (ndef.get("in") or {}).items():
if isinstance(source, list):
@@ -625,7 +683,13 @@ class PipelineExecutor:
if trace is not None:
try:
await trace({"event": "node_start", "node_id": ndef["id"], "wave": wave_num, "ts": int(time.time() * 1000)})
await trace({
"event": "node_start",
"node_id": ndef["id"],
"node_type": node.type_name,
"wave": wave_num,
"ts": int(time.time() * 1000),
})
except Exception:
pass
@@ -642,6 +706,7 @@ class PipelineExecutor:
await trace({
"event": "node_error",
"node_id": ndef["id"],
"node_type": node.type_name,
"wave": wave_num,
"ts": int(time.time() * 1000),
"error": str(exc),
@@ -656,6 +721,7 @@ class PipelineExecutor:
await trace({
"event": "node_done",
"node_id": ndef["id"],
"node_type": node.type_name,
"wave": wave_num,
"ts": int(time.time() * 1000),
"duration_ms": dur_ms,
@@ -1100,7 +1166,173 @@ class SetVarsNode(Node):
else:
resolved = render_template_simple(str(raw_val or ""), context, out_map)
result[name] = resolved
# Событие «vars_set» в SSEлог (+ превью значений)
try:
trace_fn = context.get("_trace")
if trace_fn:
def _pv(val: Any) -> str:
try:
if isinstance(val, str):
s = val
else:
s = json.dumps(val, ensure_ascii=False)
except Exception:
try:
s = str(val)
except Exception:
s = "<unrepresentable>"
s = s if isinstance(s, str) else str(s)
return (s[:300] + "") if len(s) > 300 else s
values_preview = {k: _pv(v) for k, v in (result or {}).items()}
await trace_fn({
"event": "vars_set",
"node_id": self.node_id,
"node_type": self.type_name,
"vars": list(result.keys()),
"count": len(result),
"values_preview": values_preview,
})
except Exception:
pass
return {"vars": result}
# --- v1: Нормализованный экстрактор текста для OUTx -----------------------------
def _json_path_extract(obj: Any, path: str) -> Any:
if not path:
return None
nodes = [obj]
for raw_seg in str(path).split("."):
seg = (raw_seg or "").strip()
if not seg:
continue
next_nodes = []
# Поддержка "*": разворачиваем dict.values() или элементы списка
if seg == "*":
for n in nodes:
if isinstance(n, dict):
next_nodes.extend(list(n.values()))
elif isinstance(n, list):
next_nodes.extend(list(n))
nodes = next_nodes
if not nodes:
break
continue
# Числовой индекс для списков
idx = None
try:
idx = int(seg)
except Exception:
idx = None
for n in nodes:
if idx is not None and isinstance(n, list):
if 0 <= idx < len(n):
next_nodes.append(n[idx])
elif idx is None and isinstance(n, dict):
if seg in n:
next_nodes.append(n[seg])
nodes = next_nodes
if not nodes:
break
if not nodes:
return None
return nodes if len(nodes) > 1 else nodes[0]
def _stringify_join(values: Any, join_sep: str = "\n") -> str:
def _flatten(x):
if isinstance(x, list):
for it in x:
yield from _flatten(it)
else:
yield x
arr = list(_flatten(values if isinstance(values, list) else [values]))
out: List[str] = []
for v in arr:
if isinstance(v, str):
if v:
out.append(v)
elif isinstance(v, (dict, list)):
try:
t = _deep_find_text(v) # type: ignore[name-defined]
if isinstance(t, str) and t:
out.append(t)
except Exception:
pass
else:
try:
sv = str(v)
if sv:
out.append(sv)
except Exception:
pass
if not out:
return ""
return (join_sep or "\n").join(out)
def _extract_text_for_out(data: Any, strategy: str = "auto", provider_hint: str = "", json_path: str = "", join_sep: str = "\n") -> str:
s = (strategy or "auto").lower().strip() or "auto"
prov = (provider_hint or "").lower().strip()
# Принудительные стратегии
if s == "deep":
try:
t = _deep_find_text(data) # type: ignore[name-defined]
return t if isinstance(t, str) else (str(t) if t is not None else "")
except Exception:
return ""
if s == "jsonpath":
try:
vals = _json_path_extract(data, json_path or "")
return _stringify_join(vals, join_sep)
except Exception:
return ""
if s in {"openai", "gemini", "claude"}:
try:
if s == "openai" and isinstance(data, dict):
msg = (data.get("choices") or [{}])[0].get("message") or {}
c = msg.get("content")
return c if isinstance(c, str) else (str(c) if c is not None else "")
if s == "gemini" and isinstance(data, dict):
cand0 = (data.get("candidates") or [{}])[0]
content = cand0.get("content") or {}
parts0 = (content.get("parts") or [{}])[0]
t = parts0.get("text")
return t if isinstance(t, str) else (str(t) if t is not None else "")
if s == "claude" and isinstance(data, dict):
blocks = data.get("content") or []
texts = [b.get("text") for b in blocks if isinstance(b, dict) and isinstance(b.get("text"), str)]
if texts:
return "\n".join(texts)
return ""
except Exception:
return ""
return ""
# auto: если есть подсказка — пробуем сначала её ветку, потом общий
if prov in {"openai", "gemini", "claude"}:
forced = _extract_text_for_out(data, prov, prov, json_path, join_sep)
if forced:
return forced
# Универсальный best-effort: знает openai/gemini/claude
try:
t = _best_text_from_outputs({"result": data}) # type: ignore[name-defined]
if isinstance(t, str) and t:
return t
if t is not None:
t2 = str(t)
if t2:
return t2
except Exception:
pass
# Последний шанс — глубокий поиск
try:
t = _deep_find_text(data) # type: ignore[name-defined]
return t if isinstance(t, str) else (str(t) if t is not None else "")
except Exception:
return ""
class ProviderCallNode(Node):
type_name = "ProviderCall"
@@ -1283,6 +1515,31 @@ class ProviderCallNode(Node):
async def run(self, inputs: Dict[str, Any], context: Dict[str, Any]) -> Dict[str, Any]:
provider = (self.config.get("provider") or "openai").lower()
# Optional sleep before execution (UI-configured, milliseconds)
try:
sleep_ms = int(self.config.get("sleep_ms") or 0)
except Exception:
sleep_ms = 0
if sleep_ms > 0:
# Emit SSE event to highlight node as "sleeping" on UI
try:
trace_fn = context.get("_trace")
if trace_fn:
await trace_fn({
"event": "node_sleep",
"node_id": self.node_id,
"node_type": self.type_name,
"sleep_ms": int(sleep_ms),
"ts": int(time.time() * 1000),
})
except Exception:
pass
# Non-blocking pause
try:
await asyncio.sleep(max(0.0, sleep_ms / 1000.0))
except Exception:
pass
# Support provider-specific configs stored in UI as provider_configs.{provider}
prov_cfg: Dict[str, Any] = {}
try:
@@ -1406,9 +1663,35 @@ class ProviderCallNode(Node):
print("===== ProviderCall REQUEST END =====")
except Exception:
pass
# SSE: http_req (как в Burp)
req_id = f"{self.node_id}-{int(time.time()*1000)}"
try:
trace_fn = context.get("_trace")
if trace_fn:
await trace_fn({
"event": "http_req",
"node_id": self.node_id,
"node_type": self.type_name,
"provider": provider,
"req_id": req_id,
"method": "POST",
"url": url,
"headers": final_headers,
"body_text": json.dumps(payload, ensure_ascii=False, indent=2),
})
except Exception:
pass
async with build_client() as client:
resp = await client.post(url, json=payload, headers=final_headers)
# Timeout from run-meta (seconds); fallback to 60
try:
timeout_sec = float((context.get("meta") or {}).get("http_timeout_sec", 60) or 60)
except Exception:
timeout_sec = 60.0
st: Optional[int] = None
async with build_client(timeout=timeout_sec) as client:
body_bytes = json.dumps(payload, ensure_ascii=False).encode("utf-8")
resp = await client.post(url, content=body_bytes, headers=final_headers)
# Do not raise_for_status: keep body/logs on 4xx/5xx
try:
print("===== ProviderCall RESPONSE BEGIN =====")
@@ -1430,33 +1713,107 @@ class ProviderCallNode(Node):
print(body_text)
print("===== ProviderCall RESPONSE END =====")
except Exception:
body_text = "<resp.text decode error>"
pass
try:
data = resp.json()
except Exception:
data = {"error": "Failed to decode JSON from upstream", "text": resp.text}
# SSE: http_resp
try:
trace_fn = context.get("_trace")
if trace_fn:
await trace_fn({
"event": "http_resp",
"node_id": self.node_id,
"node_type": self.type_name,
"provider": provider,
"req_id": req_id,
"status": int(resp.status_code),
"headers": dict(resp.headers),
"body_text": body_text if isinstance(body_text, str) else str(body_text),
})
except Exception:
pass
try:
st = int(resp.status_code)
except Exception:
st = None
# Извлекаем текст best-effort
text = None
if provider == "openai":
try:
text = data.get("choices", [{}])[0].get("message", {}).get("content")
except Exception: # noqa: BLE001
text = None
elif provider == "gemini":
try:
text = data.get("candidates", [{}])[0].get("content", {}).get("parts", [{}])[0].get("text")
except Exception: # noqa: BLE001
text = None
elif provider == "claude":
try:
blocks = data.get("content") or []
texts = [b.get("text") for b in blocks if isinstance(b, dict) and b.get("type") == "text"]
text = "\n".join([t for t in texts if isinstance(t, str)])
except Exception: # noqa: BLE001
text = None
# Извлечение текста: приоритет — пресет ноды -> кастомн. поля ноды -> глобальные meta
try:
meta_opts = (context.get("meta") or {})
presets = meta_opts.get("text_extract_presets") or []
cfg = self.config or {}
strategy = None
json_path = None
join_sep = None
# 1) Пресет по id
pid = str(cfg.get("text_extract_preset_id") or "").strip()
if pid:
try:
pr = next((x for x in presets if isinstance(x, dict) and str(x.get("id", "")) == pid), None)
except Exception:
pr = None
if pr:
strategy = str(pr.get("strategy") or "auto")
json_path = str(pr.get("json_path") or "")
join_sep = str(pr.get("join_sep") or "\n")
# 2) Кастомные поля ноды (если задано что-то из набора)
if not strategy and (cfg.get("text_extract_strategy") is not None or cfg.get("text_extract_json_path") is not None or cfg.get("text_join_sep") is not None):
strategy = str(cfg.get("text_extract_strategy") or "auto")
json_path = str(cfg.get("text_extract_json_path") or "")
join_sep = str(cfg.get("text_join_sep") or "\n")
# 3) Глобальные метаданные запуска
if not strategy:
strategy = str(meta_opts.get("text_extract_strategy", "auto") or "auto")
json_path = str(meta_opts.get("text_extract_json_path", "") or "")
join_sep = str(meta_opts.get("text_join_sep", "\n") or "\n")
except Exception:
strategy, json_path, join_sep = "auto", "", "\n"
text = _extract_text_for_out(data, strategy, provider, json_path, join_sep)
# Подробный SSE-лог завершения ProviderCall
try:
trace_fn = context.get("_trace")
if trace_fn:
# Compute destination macros for extracted text
try:
to_path = f"OUT.{self.node_id}.response_text"
macro_braces = f"{{{{ OUT.{self.node_id}.response_text }}}}"
alias_macro = ""
m = re.match(r"^n(\d+)$", str(self.node_id))
if m:
alias_macro = f"[[OUT{int(m.group(1))}]]"
except Exception:
to_path = f"OUT.{self.node_id}.response_text"
macro_braces = f"{{{{ OUT.{self.node_id}.response_text }}}}"
alias_macro = ""
await trace_fn({
"event": "provider_done",
"node_id": self.node_id,
"node_type": self.type_name,
"provider": provider,
"url": url,
"req_id": req_id,
"status": int(st) if st is not None else None,
"text_len": int(len(text or "")),
"extracted_text": text or "",
"strategy": strategy,
"json_path": json_path or "",
"join_sep": join_sep or "\n",
"to_path": to_path,
"to_macro_outx": alias_macro,
"to_macro_braces": macro_braces,
"ts": int(time.time() * 1000),
})
except Exception:
pass
return {"result": data, "response_text": text or ""}
@@ -1464,20 +1821,51 @@ class RawForwardNode(Node):
type_name = "RawForward"
async def run(self, inputs: Dict[str, Any], context: Dict[str, Any]) -> Dict[str, Any]:
incoming = context.get("incoming", {})
"""
Прямой форвард входящего запроса на апстрим:
- base_url/override_path/extra_headers поддерживают макросы [[...]] и {{ ... }}
- По умолчанию пробрасываем входящие заголовки (кроме Host/Content-Length), затем применяем extra_headers поверх.
- Тело берём из incoming.json (если есть), иначе пытаемся использовать текст/байты.
- Эмитим SSE события http_req/http_resp для панели логов.
"""
incoming = context.get("incoming", {}) or {}
raw_payload = incoming.get("json")
# Optional sleep before forwarding (UI-configured, milliseconds)
try:
sleep_ms = int(self.config.get("sleep_ms") or 0)
except Exception:
sleep_ms = 0
if sleep_ms > 0:
# Notify UI to show "sleep" state for this node
try:
trace_fn = context.get("_trace")
if trace_fn:
await trace_fn({
"event": "node_sleep",
"node_id": self.node_id,
"node_type": self.type_name,
"sleep_ms": int(sleep_ms),
"ts": int(time.time() * 1000),
})
except Exception:
pass
try:
await asyncio.sleep(max(0.0, sleep_ms / 1000.0))
except Exception:
pass
# Конфиг с поддержкой макросов
base_url: Optional[str] = self.config.get("base_url")
override_path: Optional[str] = self.config.get("override_path")
# Разрешаем макросы в конфиге RawForward (base_url, override_path)
out_map_for_macros = context.get("OUT") or {}
if base_url:
base_url = render_template_simple(str(base_url), context, out_map_for_macros)
if override_path:
override_path = render_template_simple(str(override_path), context, out_map_for_macros)
# Если base_url не указан, включаем автодетекцию
# Автодетекция вендора для базового URL если base_url не задан
if not base_url:
vendor = detect_vendor(raw_payload)
if vendor == "openai":
@@ -1487,25 +1875,21 @@ class RawForwardNode(Node):
elif vendor == "gemini":
base_url = "https://generativelanguage.googleapis.com"
else:
raise ExecutionError(f"Node {self.node_id} ({self.type_name}): 'base_url' is not configured and vendor could not be detected.")
raise ExecutionError(
f"Node {self.node_id} ({self.type_name}): 'base_url' is not configured and vendor could not be detected."
)
# Гарантируем наличие схемы у base_url
if not base_url.startswith(("http://", "https://")):
base_url = "http://" + base_url
if not str(base_url).startswith(("http://", "https://")):
base_url = "http://" + str(base_url)
path = override_path or incoming.get("path") or "/"
query = incoming.get("query")
if query:
path_with_qs = f"{path}?{query}"
else:
path_with_qs = path
url = urljoin(base_url.rstrip("/") + "/", path_with_qs.lstrip("/"))
path_with_qs = f"{path}?{query}" if query else str(path)
url = urljoin(str(base_url).rstrip("/") + "/", str(path_with_qs).lstrip("/"))
# Заголовки: passthrough + extra_headers(JSON)
passthrough_headers: bool = bool(self.config.get("passthrough_headers", True))
extra_headers_json: str = self.config.get("extra_headers") or "{}"
# Макросы в extra_headers
try:
extra_headers_src = render_template_simple(extra_headers_json, context, out_map_for_macros) if extra_headers_json else "{}"
extra_headers = json.loads(extra_headers_src) if extra_headers_src else {}
@@ -1517,40 +1901,114 @@ class RawForwardNode(Node):
headers: Dict[str, str] = {}
if passthrough_headers:
inc_headers = incoming.get("headers") or {}
# Копируем все заголовки, кроме Host и Content-Length
for k, v in inc_headers.items():
if k.lower() not in ['host', 'content-length']:
# Не пробрасываем управляющие заголовки, которые рассчитает httpx/сервер
if k and k.lower() not in {"host", "content-length"}:
headers[k] = v
# Убедимся, что Content-Type на месте, если его не было
if 'content-type' not in {k.lower() for k in headers}:
headers['Content-Type'] = 'application/json'
# extra_headers перекрывают проброс
try:
headers.update(extra_headers)
except Exception:
pass
# Нормализация: защитимся от повторного наличия host/content-length после update
for k in list(headers.keys()):
kl = k.lower()
if kl in {"host", "content-length"}:
try:
headers.pop(k, None)
except Exception:
pass
headers.update(extra_headers)
# Метод
method = str(incoming.get("method") or "POST").upper().strip() or "POST"
# Brute request/response logging (FULL, no masking)
# Тело запроса
body_text = ""
body_bytes: bytes = b""
if raw_payload is not None:
# JSON — сериализуем явно как UTF8
try:
body_text = json.dumps(raw_payload, ensure_ascii=False, indent=2)
except Exception:
try:
body_text = str(raw_payload)
except Exception:
body_text = ""
try:
body_bytes = json.dumps(raw_payload, ensure_ascii=False).encode("utf-8")
except Exception:
body_bytes = (body_text or "").encode("utf-8", errors="ignore")
# Если Content-Type не задан — выставим JSON
if "content-type" not in {k.lower() for k in headers.keys()}:
headers["Content-Type"] = "application/json"
else:
# Попробуем текст/байты из incoming
raw_bytes = incoming.get("body_bytes") or incoming.get("bytes")
raw_text = incoming.get("body_text") or incoming.get("text")
if isinstance(raw_bytes, (bytes, bytearray)):
body_bytes = bytes(raw_bytes)
try:
body_text = body_bytes.decode("utf-8", errors="ignore")
except Exception:
body_text = ""
elif raw_text is not None:
body_text = str(raw_text)
try:
body_bytes = body_text.encode("utf-8")
except Exception:
body_bytes = (body_text or "").encode("utf-8", errors="ignore")
else:
body_text = ""
body_bytes = b""
# Таймаут из метаданных запуска
try:
timeout_sec = float((context.get("meta") or {}).get("http_timeout_sec", 60) or 60)
except Exception:
timeout_sec = 60.0
# Диагностический вывод полной заявки/ответа (без маскировки)
try:
print("===== RawForward REQUEST BEGIN =====")
print(f"node={self.node_id} type={self.type_name}")
print(f"Method: {method}")
print(f"URL: {url}")
try:
print("Headers:")
print(json.dumps(headers, ensure_ascii=False, indent=2))
except Exception:
print(f"Headers(raw): {headers}")
try:
print("Body JSON:")
print(json.dumps(raw_payload, ensure_ascii=False, indent=2))
except Exception:
print(f"Body(raw): {raw_payload}")
print("Body:")
print(body_text)
print("===== RawForward REQUEST END =====")
except Exception:
pass
async with build_client() as client:
resp = await client.post(url, json=raw_payload, headers=headers)
# SSE: http_req
req_id = f"{self.node_id}-{int(time.time()*1000)}"
try:
trace_fn = context.get("_trace")
if trace_fn:
await trace_fn({
"event": "http_req",
"node_id": self.node_id,
"node_type": self.type_name,
"req_id": req_id,
"method": method,
"url": url,
"headers": headers,
"body_text": body_text,
})
except Exception:
pass
# Response logging
# Отправка
async with build_client(timeout=timeout_sec) as client:
# Для GET/HEAD обычно не отправляем body
send_content = None if method in {"GET", "HEAD"} else body_bytes
resp = await client.request(method, url, headers=headers, content=send_content)
# Ответ: лог/печать
try:
print("===== RawForward RESPONSE BEGIN =====")
print(f"node={self.node_id} type={self.type_name}")
@@ -1564,23 +2022,118 @@ class RawForwardNode(Node):
except Exception:
print("Headers(raw): <unavailable>")
try:
body_text = resp.text
resp_text = resp.text
except Exception:
body_text = "<resp.text decode error>"
resp_text = "<resp.text decode error>"
print("Body Text:")
print(body_text)
print(resp_text)
print("===== RawForward RESPONSE END =====")
except Exception:
resp_text = "<resp.text decode error>"
# SSE: http_resp
try:
trace_fn = context.get("_trace")
if trace_fn:
await trace_fn({
"event": "http_resp",
"node_id": self.node_id,
"node_type": self.type_name,
"req_id": req_id,
"status": int(resp.status_code),
"headers": dict(resp.headers),
"body_text": resp_text if isinstance(resp_text, str) else str(resp_text),
})
except Exception:
pass
# Decode JSON if possible, otherwise return text
# Разбор результата
try:
data = resp.json()
except Exception:
data = {"error": "Failed to decode JSON from upstream", "text": resp.text}
return {"result": data}
data = {"error": "Failed to decode JSON from upstream", "text": resp_text}
# Извлечение текста: приоритет — пресет ноды -> кастомные поля ноды -> глобальные meta.
try:
meta_opts = (context.get("meta") or {})
presets = meta_opts.get("text_extract_presets") or []
cfg = self.config or {}
strategy = None
json_path = None
join_sep = None
# 1) Пресет по id, выбранный в ноде
pid = str(cfg.get("text_extract_preset_id") or "").strip()
if pid:
try:
pr = next((x for x in presets if isinstance(x, dict) and str(x.get("id", "")) == pid), None)
except Exception:
pr = None
if pr:
strategy = str(pr.get("strategy") or "auto")
json_path = str(pr.get("json_path") or "")
join_sep = str(pr.get("join_sep") or "\n")
# 2) Кастомные поля ноды (если что-то задано)
if not strategy and (cfg.get("text_extract_strategy") is not None or cfg.get("text_extract_json_path") is not None or cfg.get("text_join_sep") is not None):
strategy = str(cfg.get("text_extract_strategy") or "auto")
json_path = str(cfg.get("text_extract_json_path") or "")
join_sep = str(cfg.get("text_join_sep") or "\n")
# 3) Глобальные метаданные запуска (дефолт)
if not strategy:
strategy = str(meta_opts.get("text_extract_strategy", "auto") or "auto")
json_path = str(meta_opts.get("text_extract_json_path", "") or "")
join_sep = str(meta_opts.get("text_join_sep", "\n") or "\n")
except Exception:
strategy, json_path, join_sep = "auto", "", "\n"
# Подсказка о провайдере: используем detect_vendor(data) как hint для auto
try:
prov_hint = detect_vendor(data if isinstance(data, dict) else {}) # type: ignore[arg-type]
except Exception:
prov_hint = "unknown"
best_text = _extract_text_for_out(data, strategy, prov_hint, json_path, join_sep)
# Подробный SSE-лог завершения RawForward
try:
trace_fn = context.get("_trace")
if trace_fn:
# Compute destination macros for extracted text and include req_id
try:
to_path = f"OUT.{self.node_id}.response_text"
macro_braces = f"{{{{ OUT.{self.node_id}.response_text }}}}"
alias_macro = ""
m = re.match(r"^n(\d+)$", str(self.node_id))
if m:
alias_macro = f"[[OUT{int(m.group(1))}]]"
except Exception:
to_path = f"OUT.{self.node_id}.response_text"
macro_braces = f"{{{{ OUT.{self.node_id}.response_text }}}}"
alias_macro = ""
await trace_fn({
"event": "rawforward_done",
"node_id": self.node_id,
"node_type": self.type_name,
"method": method,
"url": url,
"req_id": req_id,
"status": int(resp.status_code) if 'resp' in locals() else None,
"text_len": int(len(best_text or "")),
"extracted_text": best_text or "",
"strategy": strategy,
"json_path": json_path or "",
"join_sep": join_sep or "\n",
"to_path": to_path,
"to_macro_outx": alias_macro,
"to_macro_braces": macro_braces,
"ts": int(time.time() * 1000),
})
except Exception:
pass
return {"result": data, "response_text": best_text or ""}
class ReturnNode(Node):
type_name = "Return"
@@ -1655,7 +2208,23 @@ class ReturnNode(Node):
else:
# неизвестное значение — безопасный дефолт
result = fmt_openai(text)
# Подробный SSE-лог Return
try:
trace_fn = context.get("_trace")
if trace_fn:
await trace_fn({
"event": "return_detail",
"node_id": self.node_id,
"node_type": self.type_name,
"target": target,
"text_len": int(len(text or "")),
"template_used": str(template),
"ts": int(time.time() * 1000),
})
except Exception:
pass
return {"result": result, "response_text": text}
@@ -1683,6 +2252,21 @@ class IfNode(Node):
print(f"TRACE if: {self.node_id} expr={expr!r} expanded={expanded!r} result={str(res).lower()}")
except Exception:
pass
# Подробный SSE-лог по If
try:
trace_fn = context.get("_trace")
if trace_fn:
await trace_fn({
"event": "if_result",
"node_id": self.node_id,
"node_type": self.type_name,
"expr": expr,
"expanded": expanded,
"result": bool(res),
"ts": int(time.time() * 1000),
})
except Exception:
pass
return {"result": res, "true": res, "false": (not res)}

View File

@@ -1,14 +1,44 @@
from __future__ import annotations
import httpx
from typing import Optional, Dict
from agentui.config import build_httpx_proxies
from typing import Optional, Dict, Union
import os
from agentui.config import build_httpx_proxies, get_tls_verify, is_verify_explicit
def _mask_proxy(url: str) -> str:
"""Маскируем часть с логином/паролем в URL прокси, чтобы не утекла в логи."""
try:
if "://" in url and "@" in url:
prefix, rest = url.split("://", 1)
auth, host = rest.split("@", 1)
return f"{prefix}://***@{host}"
return url
except Exception:
return "<masked>"
def build_client(timeout: float = 60.0) -> httpx.AsyncClient:
proxies: Optional[Dict[str, str]] = build_httpx_proxies()
verify: Union[bool, str] = get_tls_verify()
explicit = is_verify_explicit()
# По умолчанию при наличии прокси отключаем проверку сертификатов,
# но не трогаем, если пользователь явно задал verify или CA.
if proxies and (verify is True) and (not explicit):
verify = False
if os.getenv("AGENTUI_DEBUG", "").lower() in ("1", "true", "on", "yes"):
masked = {k: _mask_proxy(v) for k, v in (proxies or {}).items()}
print("[agentui.http_client] proxies=", masked, " verify=", verify)
# httpx сам понимает схемы socks://, socks5:// при установленном extras [socks]
client = httpx.AsyncClient(timeout=timeout, proxies=proxies, follow_redirects=True)
client = httpx.AsyncClient(
timeout=timeout,
proxies=proxies,
follow_redirects=True,
verify=verify,
)
return client