sync: UI animations, select styling, TLS verify flag via proxy second line, brand spacing

This commit is contained in:
2025-09-27 18:46:52 +03:00
parent 135c393eda
commit 2abfbb4b1a
52 changed files with 8029 additions and 1408 deletions

View File

@@ -1,10 +1,12 @@
from fastapi import FastAPI, Request, HTTPException, Query, Header
import logging
from logging.handlers import RotatingFileHandler
import json
from urllib.parse import urlsplit, urlunsplit, parse_qsl, urlencode, unquote
from fastapi.responses import JSONResponse, HTMLResponse, StreamingResponse
from fastapi.responses import JSONResponse, HTMLResponse, StreamingResponse, FileResponse
from fastapi.staticfiles import StaticFiles
import os
import hashlib
import time
from pydantic import BaseModel, Field
from typing import Any, Dict, List, Literal, Optional
from agentui.pipeline.executor import PipelineExecutor
@@ -12,6 +14,7 @@ from agentui.pipeline.defaults import default_pipeline
from agentui.pipeline.storage import load_pipeline, save_pipeline, list_presets, load_preset, save_preset, load_var_store
from agentui.common.vendors import detect_vendor
from agentui.common.cancel import request_cancel, clear_cancel, is_cancelled
from agentui.pipeline.templating import render_template_simple
class UnifiedParams(BaseModel):
@@ -175,35 +178,7 @@ def build_macro_context(u: UnifiedChatRequest, incoming: Optional[Dict[str, Any]
}
def jinja_render(template: str, ctx: Dict[str, Any]) -> str:
# Чтобы не тянуть Jinja2 в MVP: простая {{ key.path }} замена
def get_value(path: str, data: Dict[str, Any]) -> Any:
cur: Any = data
for part in path.split('.'):
if isinstance(cur, dict):
cur = cur.get(part, "")
else:
return ""
return cur if isinstance(cur, (str, int, float)) else ""
out = template
import re
for m in re.findall(r"\{\{\s*([^}]+)\s*\}\}", template):
expr = m.strip()
# support simple default filter: {{ path|default(value) }}
default_match = re.match(r"([^|]+)\|\s*default\((.*)\)", expr)
if default_match:
path = default_match.group(1).strip()
fallback = default_match.group(2).strip()
# strip quotes if present
if (fallback.startswith("\"") and fallback.endswith("\"")) or (fallback.startswith("'") and fallback.endswith("'")):
fallback = fallback[1:-1]
raw_val = get_value(path, ctx)
val = str(raw_val) if raw_val not in (None, "") else str(fallback)
else:
val = str(get_value(expr, ctx))
out = out.replace("{{ "+m+" }}", val).replace("{{"+m+"}}", val)
return out
# jinja_render removed (duplication). Use agentui.pipeline.templating.render_template_simple instead.
async def execute_pipeline_echo(u: UnifiedChatRequest) -> Dict[str, Any]:
@@ -211,7 +186,7 @@ async def execute_pipeline_echo(u: UnifiedChatRequest) -> Dict[str, Any]:
macro_ctx = build_macro_context(u)
# PromptTemplate
prompt_template = "System: {{ system }}\nUser: {{ chat.last_user }}"
rendered_prompt = jinja_render(prompt_template, macro_ctx)
rendered_prompt = render_template_simple(prompt_template, macro_ctx, {})
# LLMInvoke (echo, т.к. без реального провайдера в MVP)
llm_response_text = f"[echo by {u.model}]\n" + rendered_prompt
# Дополняем эхо человекочитаемым трейсом выполнения пайплайна (если есть)
@@ -274,10 +249,7 @@ def create_app() -> FastAPI:
if not logger.handlers:
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.INFO)
file_handler = RotatingFileHandler("agentui.log", maxBytes=1_000_000, backupCount=3, encoding="utf-8")
file_handler.setLevel(logging.INFO)
logger.addHandler(stream_handler)
logger.addHandler(file_handler)
# --- Simple in-process SSE hub (subscriptions per browser tab) ---
import asyncio as _asyncio
@@ -362,6 +334,77 @@ def create_app() -> FastAPI:
except Exception: # noqa: BLE001
pass
async def _run_pipeline_for_payload(request: Request, payload: Dict[str, Any], raw: Optional[bytes] = None) -> JSONResponse:
# Единый обработчик: лог входящего запроса, нормализация, запуск PipelineExecutor, fallback-echo, лог ответа
await _log_request(request, raw_body=raw, parsed=payload)
unified = normalize_to_unified(payload)
unified.stream = False
incoming = {
"method": request.method,
"url": _sanitize_url(str(request.url)),
"path": request.url.path,
"query": request.url.query,
"headers": dict(request.headers),
"json": payload,
}
macro_ctx = build_macro_context(unified, incoming=incoming)
pipeline = load_pipeline()
executor = PipelineExecutor(pipeline)
async def _trace(evt: Dict[str, Any]) -> None:
try:
base = {"pipeline_id": pipeline.get("id", "pipeline_editor")}
await _trace_hub.publish({**base, **evt})
except Exception:
pass
# Диагностический INFOлог для валидации рефакторинга
try:
logger.info(
"%s",
json.dumps(
{
"event": "unified_handler",
"vendor": unified.vendor_format,
"model": unified.model,
"pipeline_id": pipeline.get("id", "pipeline_editor"),
},
ensure_ascii=False,
),
)
except Exception:
pass
# Mark pipeline start for UI and measure total active time
t0 = time.perf_counter()
try:
await _trace_hub.publish({
"event": "pipeline_start",
"pipeline_id": pipeline.get("id", "pipeline_editor"),
"ts": int(time.time() * 1000),
})
except Exception:
pass
last = await executor.run(macro_ctx, trace=_trace)
result = last.get("result") or await execute_pipeline_echo(unified)
# Mark pipeline end for UI
t1 = time.perf_counter()
try:
await _trace_hub.publish({
"event": "pipeline_done",
"pipeline_id": pipeline.get("id", "pipeline_editor"),
"ts": int(time.time() * 1000),
"duration_ms": int((t1 - t0) * 1000),
})
except Exception:
pass
await _log_response(request, 200, result)
return JSONResponse(result)
@app.get("/")
async def index() -> HTMLResponse:
html = (
@@ -383,33 +426,7 @@ def create_app() -> FastAPI:
payload = json.loads(raw or b"{}")
except Exception: # noqa: BLE001
raise HTTPException(status_code=400, detail="Invalid JSON")
await _log_request(request, raw_body=raw, parsed=payload)
unified = normalize_to_unified(payload)
unified.stream = False # по требованию MVP без стриминга
# контекст для пайплайна
incoming = {
"method": request.method,
"url": _sanitize_url(str(request.url)),
"path": request.url.path,
"query": request.url.query,
"headers": dict(request.headers),
"json": payload,
}
macro_ctx = build_macro_context(unified, incoming=incoming)
pipeline = load_pipeline()
executor = PipelineExecutor(pipeline)
async def _trace(evt: Dict[str, Any]) -> None:
try:
base = {"pipeline_id": pipeline.get("id", "pipeline_editor")}
await _trace_hub.publish({**base, **evt})
except Exception:
pass
last = await executor.run(macro_ctx, trace=_trace)
result = last.get("result") or await execute_pipeline_echo(unified)
await _log_response(request, 200, result)
return JSONResponse(result)
return await _run_pipeline_for_payload(request, payload, raw)
# Google AI Studio совместимые роуты (Gemini):
# POST /v1beta/models/{model}:generateContent?key=...
@@ -421,34 +438,10 @@ def create_app() -> FastAPI:
payload = json.loads(raw or b"{}")
except Exception: # noqa: BLE001
raise HTTPException(status_code=400, detail="Invalid JSON")
# Убедимся, что модель присутствует в полезной нагрузке
if not isinstance(payload, dict):
raise HTTPException(status_code=400, detail="Invalid payload type")
payload = {**payload, "model": model}
await _log_request(request, raw_body=raw, parsed=payload)
unified = normalize_to_unified(payload)
unified.stream = False
incoming = {
"method": request.method,
"url": _sanitize_url(str(request.url)),
"path": request.url.path,
"query": request.url.query,
"headers": dict(request.headers),
"json": payload,
}
macro_ctx = build_macro_context(unified, incoming=incoming)
pipeline = load_pipeline()
executor = PipelineExecutor(pipeline)
async def _trace(evt: Dict[str, Any]) -> None:
try:
base = {"pipeline_id": pipeline.get("id", "pipeline_editor")}
await _trace_hub.publish({**base, **evt})
except Exception:
pass
last = await executor.run(macro_ctx, trace=_trace)
result = last.get("result") or await execute_pipeline_echo(unified)
await _log_response(request, 200, result)
return JSONResponse(result)
return await _run_pipeline_for_payload(request, payload, raw)
@app.post("/v1/models/{model}:generateContent")
async def gemini_generate_content_v1(model: str, request: Request, key: Optional[str] = Query(default=None)) -> JSONResponse: # noqa: ARG001
@@ -460,30 +453,7 @@ def create_app() -> FastAPI:
if not isinstance(payload, dict):
raise HTTPException(status_code=400, detail="Invalid payload type")
payload = {**payload, "model": model}
await _log_request(request, raw_body=raw, parsed=payload)
unified = normalize_to_unified(payload)
unified.stream = False
incoming = {
"method": request.method,
"url": _sanitize_url(str(request.url)),
"path": request.url.path,
"query": request.url.query,
"headers": dict(request.headers),
"json": payload,
}
macro_ctx = build_macro_context(unified, incoming=incoming)
pipeline = load_pipeline()
executor = PipelineExecutor(pipeline)
async def _trace(evt: Dict[str, Any]) -> None:
try:
base = {"pipeline_id": pipeline.get("id", "pipeline_editor")}
await _trace_hub.publish({**base, **evt})
except Exception:
pass
last = await executor.run(macro_ctx, trace=_trace)
result = last.get("result") or await execute_pipeline_echo(unified)
await _log_response(request, 200, result)
return JSONResponse(result)
return await _run_pipeline_for_payload(request, payload, raw)
# Catch-all для случаев, когда двоеточие в пути закодировано как %3A
@app.post("/v1beta/models/{rest_of_path:path}")
@@ -500,30 +470,7 @@ def create_app() -> FastAPI:
if not isinstance(payload, dict):
raise HTTPException(status_code=400, detail="Invalid payload type")
payload = {**payload, "model": model}
await _log_request(request, raw_body=raw, parsed=payload)
unified = normalize_to_unified(payload)
unified.stream = False
incoming = {
"method": request.method,
"url": _sanitize_url(str(request.url)),
"path": request.url.path,
"query": request.url.query,
"headers": dict(request.headers),
"json": payload,
}
macro_ctx = build_macro_context(unified, incoming=incoming)
pipeline = load_pipeline()
executor = PipelineExecutor(pipeline)
async def _trace(evt: Dict[str, Any]) -> None:
try:
base = {"pipeline_id": pipeline.get("id", "pipeline_editor")}
await _trace_hub.publish({**base, **evt})
except Exception:
pass
last = await executor.run(macro_ctx, trace=_trace)
result = last.get("result") or await execute_pipeline_echo(unified)
await _log_response(request, 200, result)
return JSONResponse(result)
return await _run_pipeline_for_payload(request, payload, raw)
@app.post("/v1/models/{rest_of_path:path}")
async def gemini_generate_content_v1_catchall(rest_of_path: str, request: Request, key: Optional[str] = Query(default=None)) -> JSONResponse: # noqa: ARG001
@@ -539,30 +486,7 @@ def create_app() -> FastAPI:
if not isinstance(payload, dict):
raise HTTPException(status_code=400, detail="Invalid payload type")
payload = {**payload, "model": model}
await _log_request(request, raw_body=raw, parsed=payload)
unified = normalize_to_unified(payload)
unified.stream = False
incoming = {
"method": request.method,
"url": _sanitize_url(str(request.url)),
"path": request.url.path,
"query": request.url.query,
"headers": dict(request.headers),
"json": payload,
}
macro_ctx = build_macro_context(unified, incoming=incoming)
pipeline = load_pipeline()
executor = PipelineExecutor(pipeline)
async def _trace(evt: Dict[str, Any]) -> None:
try:
base = {"pipeline_id": pipeline.get("id", "pipeline_editor")}
await _trace_hub.publish({**base, **evt})
except Exception:
pass
last = await executor.run(macro_ctx, trace=_trace)
result = last.get("result") or await execute_pipeline_echo(unified)
await _log_response(request, 200, result)
return JSONResponse(result)
return await _run_pipeline_for_payload(request, payload, raw)
# Anthropic Claude messages endpoint compatibility
@app.post("/v1/messages")
@@ -574,37 +498,114 @@ def create_app() -> FastAPI:
raise HTTPException(status_code=400, detail="Invalid JSON")
if not isinstance(payload, dict):
raise HTTPException(status_code=400, detail="Invalid payload type")
# Помечаем как Anthropic, передаём версию из заголовка в payload для детекции
if anthropic_version:
payload = {**payload, "anthropic_version": anthropic_version}
else:
payload = {**payload, "anthropic_version": payload.get("anthropic_version", "2023-06-01")}
await _log_request(request, raw_body=raw, parsed=payload)
unified = normalize_to_unified(payload)
unified.stream = False
incoming = {
"method": request.method,
"url": _sanitize_url(str(request.url)),
"path": request.url.path,
"query": request.url.query,
"headers": dict(request.headers),
"json": payload,
}
macro_ctx = build_macro_context(unified, incoming=incoming)
pipeline = load_pipeline()
executor = PipelineExecutor(pipeline)
async def _trace(evt: Dict[str, Any]) -> None:
try:
base = {"pipeline_id": pipeline.get("id", "pipeline_editor")}
await _trace_hub.publish({**base, **evt})
except Exception:
pass
last = await executor.run(macro_ctx, trace=_trace)
result = last.get("result") or await execute_pipeline_echo(unified)
await _log_response(request, 200, result)
return JSONResponse(result)
return await _run_pipeline_for_payload(request, payload, raw)
app.mount("/ui", StaticFiles(directory="static", html=True), name="ui")
# NOTE: нельзя объявлять эндпоинты под /ui/* после монтирования StaticFiles(/ui),
# т.к. монтирование перехватывает все пути под /ui. Используем отдельный путь /ui_version.
@app.get("/ui_version")
async def ui_version() -> JSONResponse:
try:
import time
static_dir = os.path.abspath("static")
editor_path = os.path.join(static_dir, "editor.html")
js_ser_path = os.path.join(static_dir, "js", "serialization.js")
js_pm_path = os.path.join(static_dir, "js", "pm-ui.js")
def md5p(p: str):
try:
with open(p, "rb") as f:
return hashlib.md5(f.read()).hexdigest()
except Exception:
return None
payload = {
"cwd": os.path.abspath("."),
"static_dir": static_dir,
"files": {
"editor.html": md5p(editor_path),
"js/serialization.js": md5p(js_ser_path),
"js/pm-ui.js": md5p(js_pm_path),
},
"ts": int(time.time()),
}
return JSONResponse(payload, headers={"Cache-Control": "no-store"})
except Exception as e:
return JSONResponse({"error": str(e)}, status_code=500, headers={"Cache-Control": "no-store"})
# --- Favicon and PWA icons at root -----------------------------------------
FAV_DIR = "favicon_io_saya"
@app.get("/favicon.ico")
async def _favicon_ico():
p = f"{FAV_DIR}/favicon.ico"
try:
return FileResponse(p, media_type="image/x-icon")
except Exception:
raise HTTPException(status_code=404, detail="favicon not found")
@app.get("/apple-touch-icon.png")
async def _apple_touch_icon():
p = f"{FAV_DIR}/apple-touch-icon.png"
try:
return FileResponse(p, media_type="image/png")
except Exception:
raise HTTPException(status_code=404, detail="apple-touch-icon not found")
@app.get("/favicon-32x32.png")
async def _favicon_32():
p = f"{FAV_DIR}/favicon-32x32.png"
try:
return FileResponse(p, media_type="image/png")
except Exception:
raise HTTPException(status_code=404, detail="favicon-32x32 not found")
@app.get("/favicon-16x16.png")
async def _favicon_16():
p = f"{FAV_DIR}/favicon-16x16.png"
try:
return FileResponse(p, media_type="image/png")
except Exception:
raise HTTPException(status_code=404, detail="favicon-16x16 not found")
@app.get("/android-chrome-192x192.png")
async def _android_192():
p = f"{FAV_DIR}/android-chrome-192x192.png"
try:
return FileResponse(p, media_type="image/png")
except Exception:
raise HTTPException(status_code=404, detail="android-chrome-192x192 not found")
@app.get("/android-chrome-512x512.png")
async def _android_512():
p = f"{FAV_DIR}/android-chrome-512x512.png"
try:
return FileResponse(p, media_type="image/png")
except Exception:
raise HTTPException(status_code=404, detail="android-chrome-512x512 not found")
@app.get("/site.webmanifest")
async def _site_manifest():
p = f"{FAV_DIR}/site.webmanifest"
try:
return FileResponse(p, media_type="application/manifest+json")
except Exception:
raise HTTPException(status_code=404, detail="site.webmanifest not found")
# Custom APNG favicon for "busy" state in UI
@app.get("/saya1.png")
async def _apng_busy_icon():
p = f"{FAV_DIR}/saya1.png"
try:
# APNG served as image/png is acceptable for browsers
return FileResponse(p, media_type="image/png")
except Exception:
raise HTTPException(status_code=404, detail="saya1.png not found")
# Variable store API (per-pipeline)
@app.get("/admin/vars")
async def get_vars() -> JSONResponse:
@@ -640,7 +641,37 @@ def create_app() -> FastAPI:
# Admin API для пайплайна
@app.get("/admin/pipeline")
async def get_pipeline() -> JSONResponse:
return JSONResponse(load_pipeline())
p = load_pipeline()
# Диагностический лог состава meta (для подтверждения DRY-рефакторинга)
try:
meta_keys = [
"id","name","parallel_limit","loop_mode","loop_max_iters","loop_time_budget_ms","clear_var_store",
"http_timeout_sec","text_extract_strategy","text_extract_json_path","text_join_sep","text_extract_presets"
]
present = [k for k in meta_keys if k in p]
meta_preview = {k: p.get(k) for k in present if k != "text_extract_presets"}
presets_count = 0
try:
presets = p.get("text_extract_presets")
if isinstance(presets, list):
presets_count = len(presets)
except Exception:
presets_count = 0
logger.info(
"%s",
json.dumps(
{
"event": "admin_get_pipeline_meta",
"keys": present,
"presets_count": presets_count,
"meta_preview": meta_preview,
},
ensure_ascii=False,
),
)
except Exception:
pass
return JSONResponse(p)
@app.post("/admin/pipeline")
async def set_pipeline(request: Request) -> JSONResponse:
@@ -652,6 +683,37 @@ def create_app() -> FastAPI:
# простая проверка
if not isinstance(pipeline, dict) or "nodes" not in pipeline:
raise HTTPException(status_code=400, detail="Invalid pipeline format")
# Диагностический лог входящих meta-ключей перед сохранением
try:
meta_keys = [
"id","name","parallel_limit","loop_mode","loop_max_iters","loop_time_budget_ms","clear_var_store",
"http_timeout_sec","text_extract_strategy","text_extract_json_path","text_join_sep","text_extract_presets"
]
present = [k for k in meta_keys if k in pipeline]
meta_preview = {k: pipeline.get(k) for k in present if k != "text_extract_presets"}
presets_count = 0
try:
presets = pipeline.get("text_extract_presets")
if isinstance(presets, list):
presets_count = len(presets)
except Exception:
presets_count = 0
logger.info(
"%s",
json.dumps(
{
"event": "admin_set_pipeline_meta",
"keys": present,
"presets_count": presets_count,
"meta_preview": meta_preview,
},
ensure_ascii=False,
),
)
except Exception:
pass
save_pipeline(pipeline)
return JSONResponse({"ok": True})

View File

@@ -86,6 +86,41 @@ def _read_kv_from_proxy_file() -> Dict[str, str]:
return out
return out
def _read_second_bare_flag_from_proxy() -> Optional[bool]:
"""
Читает «вторую голую строку» после URL в proxy.txt и интерпретирует как флаг verify:
true/1/yes/on -> True
false/0/no/off -> False
Возвращает None, если строка отсутствует или не распознана.
"""
try:
p = Path("proxy.txt")
if not p.exists():
return None
lines = [ln.strip() for ln in p.read_text(encoding="utf-8").splitlines()]
# найдём первую «URL» строку (без '=' и не пустую/коммент)
idx_url = -1
for i, ln in enumerate(lines):
if not ln or ln.startswith("#") or "=" in ln:
continue
idx_url = i
break
if idx_url >= 0:
# ищем следующую «голую» строку
for j in range(idx_url + 1, len(lines)):
ln = lines[j].strip()
if not ln or ln.startswith("#") or "=" in ln:
continue
low = ln.lower()
if low in ("1", "true", "yes", "on"):
return True
if low in ("0", "false", "no", "off"):
return False
# если это не похожее на флаг — считаем отсутствующим
break
except Exception:
return None
return None
def get_tls_verify() -> Union[bool, str]:
"""
Возвращает значение для параметра httpx.AsyncClient(verify=...):
@@ -119,31 +154,11 @@ def get_tls_verify() -> Union[bool, str]:
if path.exists():
return str(path)
# 2.1) Дополнительно: поддержка второй строки без ключа — true/false
try:
p = Path("proxy.txt")
if p.exists():
lines = [ln.strip() for ln in p.read_text(encoding="utf-8").splitlines()]
# найдём первую «URL» строку (без '=' и не пустую/коммент)
idx_url = -1
for i, ln in enumerate(lines):
if not ln or ln.startswith("#") or "=" in ln:
continue
idx_url = i
break
if idx_url >= 0:
# ищем следующую «голую» строку
for j in range(idx_url + 1, len(lines)):
ln = lines[j].strip()
if not ln or ln.startswith("#") or "=" in ln:
continue
low = ln.lower()
if low in ("1", "true", "yes", "on"):
return True
if low in ("0", "false", "no", "off"):
return False
# если это не похожее на флаг, игнорируем и продолжаем
except Exception:
pass
second = _read_second_bare_flag_from_proxy()
if second is True:
return True
if second is False:
return False
# 3) Файл по умолчанию в корне проекта
default_ca = Path("proxy-ca.pem")
@@ -173,26 +188,9 @@ def is_verify_explicit() -> bool:
if "verify" in kv or "ca" in kv:
return True
# Вторая «голая» строка как явный флаг
try:
p = Path("proxy.txt")
if p.exists():
lines = [ln.strip() for ln in p.read_text(encoding="utf-8").splitlines()]
idx_url = -1
for i, ln in enumerate(lines):
if not ln or ln.startswith("#") or "=" in ln:
continue
idx_url = i
break
if idx_url >= 0:
for j in range(idx_url + 1, len(lines)):
ln = lines[j].strip()
if not ln or ln.startswith("#") or "=" in ln:
continue
if ln.lower() in ("1", "0", "true", "false", "yes", "no", "on", "off"):
return True
break
except Exception:
pass
second = _read_second_bare_flag_from_proxy()
if second is not None:
return True
if Path("proxy-ca.pem").exists():
return True

File diff suppressed because it is too large Load Diff

View File

@@ -11,17 +11,93 @@ PRESETS_DIR = Path("presets")
VARS_DIR = Path(".agentui") / "vars"
# DRY нормализация meta/пайплайна: единый источник дефолтов и типов
def normalize_pipeline(pipeline: Dict[str, Any]) -> Dict[str, Any]:
"""
Приводит верхнеуровневые ключи пайплайна к согласованному виду, заполняет дефолты.
Безопасно к отсутствующим ключам и неверным типам.
"""
if not isinstance(pipeline, dict):
pipeline = {}
out: Dict[str, Any] = dict(pipeline)
def _to_int(v, d):
try:
n = int(v)
return n if n > 0 else d
except Exception:
return d
def _to_float(v, d):
try:
n = float(v)
return n if n > 0 else d
except Exception:
return d
# Базовые поля
out["id"] = str(out.get("id") or "pipeline_editor")
out["name"] = str(out.get("name") or "Edited Pipeline")
out["parallel_limit"] = _to_int(out.get("parallel_limit"), 8)
out["loop_mode"] = str(out.get("loop_mode") or "dag")
out["loop_max_iters"] = _to_int(out.get("loop_max_iters"), 1000)
out["loop_time_budget_ms"] = _to_int(out.get("loop_time_budget_ms"), 10000)
out["clear_var_store"] = bool(out.get("clear_var_store", True))
out["http_timeout_sec"] = _to_float(out.get("http_timeout_sec"), 60)
# Глобальные опции извлечения текста для [[OUTx]]
out["text_extract_strategy"] = str(out.get("text_extract_strategy") or "auto")
out["text_extract_json_path"] = str(out.get("text_extract_json_path") or "")
# Поддержка разных написаний text_join_sep
join_sep = out.get("text_join_sep")
if join_sep is None:
for k in list(out.keys()):
if isinstance(k, str) and k.lower() == "text_join_sep":
join_sep = out.get(k)
break
out["text_join_sep"] = str(join_sep or "\n")
# Пресеты парсинга
presets = out.get("text_extract_presets")
norm_presets: List[Dict[str, Any]] = []
if isinstance(presets, list):
for i, it in enumerate(presets):
if not isinstance(it, dict):
continue
norm_presets.append({
"id": str(it.get("id") or f"p{i}"),
"name": str(it.get("name") or it.get("json_path") or "Preset"),
"strategy": str(it.get("strategy") or "auto"),
"json_path": str(it.get("json_path") or ""),
"join_sep": str(it.get("join_sep") or "\n"),
})
out["text_extract_presets"] = norm_presets
# Узлы — список
try:
nodes = out.get("nodes") or []
if not isinstance(nodes, list):
nodes = []
out["nodes"] = nodes
except Exception:
out["nodes"] = []
return out
def load_pipeline() -> Dict[str, Any]:
if PIPELINE_FILE.exists():
try:
return json.loads(PIPELINE_FILE.read_text(encoding="utf-8"))
except Exception:
pass
return default_pipeline()
if PIPELINE_FILE.exists():
try:
data = json.loads(PIPELINE_FILE.read_text(encoding="utf-8"))
return normalize_pipeline(data)
except Exception:
pass
return normalize_pipeline(default_pipeline())
def save_pipeline(pipeline: Dict[str, Any]) -> None:
PIPELINE_FILE.write_text(json.dumps(pipeline, ensure_ascii=False, indent=2), encoding="utf-8")
norm = normalize_pipeline(pipeline or {})
PIPELINE_FILE.write_text(json.dumps(norm, ensure_ascii=False, indent=2), encoding="utf-8")
def list_presets() -> List[str]:

View File

@@ -35,6 +35,11 @@ _BARE_MACRO_RE = re.compile(r"\[\[\s*([A-Za-z_][A-Za-z0-9_]*(?:\.[^\]]+?)?)\s*\]
# Разбираем выражение до ближайшего '}}', допускаем '}' внутри (например в JSON-литералах)
_BRACES_RE = re.compile(r"\{\{\s*(.*?)\s*\}\}", re.DOTALL)
# Сокращённый синтаксис: img(mime?)[[...]] → data:<mime>;base64,<resolved_inner_macro>
# Пример: img()[[OUT1]] → data:image/png;base64,{{resolved OUT1}}
# img(jpeg)[[OUT:n1.result...]] → data:image/jpeg;base64,{{resolved}}
_IMG_WRAPPER_RE = re.compile(r"(?is)img\(\s*([^)]+?)?\s*\)\s*\[\[\s*(.+?)\s*\]\]")
def _split_path(path: str) -> List[str]:
return [p.strip() for p in str(path).split(".") if str(p).strip()]
@@ -164,12 +169,21 @@ def _best_text_from_outputs(node_out: Any) -> str:
# Gemini
try:
if isinstance(base, dict):
cand0 = (base.get("candidates") or [{}])[0]
content = cand0.get("content") or {}
parts0 = (content.get("parts") or [{}])[0]
t = parts0.get("text")
if isinstance(t, str):
return t
cands = base.get("candidates") or []
texts: List[str] = []
for cand in cands:
try:
content = cand.get("content") or {}
parts = content.get("parts") or []
for p in parts:
if isinstance(p, dict):
t = p.get("text")
if isinstance(t, str) and t.strip():
texts.append(t.strip())
except Exception:
continue
if texts:
return "\n".join(texts)
except Exception:
pass
@@ -203,6 +217,47 @@ def render_template_simple(template: str, context: Dict[str, Any], out_map: Dict
return ""
s = str(template)
# 0) Сокращённый синтаксис: img(mime?)[[...]] → data:<mime>;base64,<resolved>
# Выполняем до развёртки обычных [[...]] макросов, чтобы внутри можно было использовать любой квадратный макрос.
def _normalize_mime(m: str) -> str:
mm = (m or "").strip().lower()
if not mm:
return "image/png"
if "/" in mm:
return mm
return {
"png": "image/png",
"jpg": "image/jpeg",
"jpeg": "image/jpeg",
"webp": "image/webp",
"gif": "image/gif",
"svg": "image/svg+xml",
"bmp": "image/bmp",
"tif": "image/tiff",
"tiff": "image/tiff",
}.get(mm, mm)
def _repl_imgwrap(m: re.Match) -> str:
mime_raw = m.group(1) or ""
inner = m.group(2) or ""
mime = _normalize_mime(mime_raw)
try:
val = _resolve_square_macro_value(inner, context, out_map)
except Exception:
val = ""
if isinstance(val, (dict, list, bool)) or val is None:
val = _stringify_for_template(val)
else:
val = str(val)
return f"data:{mime};base64,{val}"
# Поддерживаем много вхождений — повторяем до исчерпания (на случай каскадных макросов)
while True:
ns, cnt = _IMG_WRAPPER_RE.subn(_repl_imgwrap, s)
s = ns
if cnt == 0:
break
# 1) Макросы [[VAR:...]] / [[OUT:...]] / [[STORE:...]]
def repl_var(m: re.Match) -> str:
path = m.group(1).strip()
@@ -539,8 +594,18 @@ def _tokenize_condition_expr(expr: str, context: Dict[str, Any], out_map: Dict[s
while j < n and (expr[j].isalnum() or expr[j] in "._"):
j += 1
word = expr[i:j]
# Логические в словах не поддерживаем (используйте &&, ||, !)
tokens.append(word)
# Поддержка «голых» идентификаторов из vars: cycleindex, WAS_ERROR и т.п.
# Если это простой идентификатор (без точек) и он есть в context.vars — биндим его значением.
try:
vmap = context.get("vars") or {}
except Exception:
vmap = {}
if re.fullmatch(r"[A-Za-z_][A-Za-z0-9_]*", word) and isinstance(vmap, dict) and word in vmap:
name = add_binding(vmap.get(word))
tokens.append(name)
else:
# Логические в словах не поддерживаем (используйте &&, ||, !)
tokens.append(word)
i = j
continue

View File

@@ -33,12 +33,25 @@ def build_client(timeout: float = 60.0) -> httpx.AsyncClient:
print("[agentui.http_client] proxies=", masked, " verify=", verify)
# httpx сам понимает схемы socks://, socks5:// при установленном extras [socks]
client = httpx.AsyncClient(
timeout=timeout,
proxies=proxies,
follow_redirects=True,
verify=verify,
)
try:
client = httpx.AsyncClient(
timeout=timeout,
proxies=proxies,
follow_redirects=True,
verify=verify,
)
except TypeError:
if proxies:
try:
masked = {k: _mask_proxy(v) for k, v in proxies.items()}
except Exception:
masked = proxies
print(f"[agentui.http_client] WARNING: proxies not supported in httpx.AsyncClient, skipping proxies={masked}")
client = httpx.AsyncClient(
timeout=timeout,
follow_redirects=True,
verify=verify,
)
return client