sync: UI animations, select styling, TLS verify flag via proxy second line, brand spacing
This commit is contained in:
File diff suppressed because it is too large
Load Diff
@@ -11,17 +11,93 @@ PRESETS_DIR = Path("presets")
|
||||
VARS_DIR = Path(".agentui") / "vars"
|
||||
|
||||
|
||||
# DRY нормализация meta/пайплайна: единый источник дефолтов и типов
|
||||
def normalize_pipeline(pipeline: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Приводит верхнеуровневые ключи пайплайна к согласованному виду, заполняет дефолты.
|
||||
Безопасно к отсутствующим ключам и неверным типам.
|
||||
"""
|
||||
if not isinstance(pipeline, dict):
|
||||
pipeline = {}
|
||||
out: Dict[str, Any] = dict(pipeline)
|
||||
|
||||
def _to_int(v, d):
|
||||
try:
|
||||
n = int(v)
|
||||
return n if n > 0 else d
|
||||
except Exception:
|
||||
return d
|
||||
|
||||
def _to_float(v, d):
|
||||
try:
|
||||
n = float(v)
|
||||
return n if n > 0 else d
|
||||
except Exception:
|
||||
return d
|
||||
|
||||
# Базовые поля
|
||||
out["id"] = str(out.get("id") or "pipeline_editor")
|
||||
out["name"] = str(out.get("name") or "Edited Pipeline")
|
||||
out["parallel_limit"] = _to_int(out.get("parallel_limit"), 8)
|
||||
out["loop_mode"] = str(out.get("loop_mode") or "dag")
|
||||
out["loop_max_iters"] = _to_int(out.get("loop_max_iters"), 1000)
|
||||
out["loop_time_budget_ms"] = _to_int(out.get("loop_time_budget_ms"), 10000)
|
||||
out["clear_var_store"] = bool(out.get("clear_var_store", True))
|
||||
out["http_timeout_sec"] = _to_float(out.get("http_timeout_sec"), 60)
|
||||
|
||||
# Глобальные опции извлечения текста для [[OUTx]]
|
||||
out["text_extract_strategy"] = str(out.get("text_extract_strategy") or "auto")
|
||||
out["text_extract_json_path"] = str(out.get("text_extract_json_path") or "")
|
||||
# Поддержка разных написаний text_join_sep
|
||||
join_sep = out.get("text_join_sep")
|
||||
if join_sep is None:
|
||||
for k in list(out.keys()):
|
||||
if isinstance(k, str) and k.lower() == "text_join_sep":
|
||||
join_sep = out.get(k)
|
||||
break
|
||||
out["text_join_sep"] = str(join_sep or "\n")
|
||||
|
||||
# Пресеты парсинга
|
||||
presets = out.get("text_extract_presets")
|
||||
norm_presets: List[Dict[str, Any]] = []
|
||||
if isinstance(presets, list):
|
||||
for i, it in enumerate(presets):
|
||||
if not isinstance(it, dict):
|
||||
continue
|
||||
norm_presets.append({
|
||||
"id": str(it.get("id") or f"p{i}"),
|
||||
"name": str(it.get("name") or it.get("json_path") or "Preset"),
|
||||
"strategy": str(it.get("strategy") or "auto"),
|
||||
"json_path": str(it.get("json_path") or ""),
|
||||
"join_sep": str(it.get("join_sep") or "\n"),
|
||||
})
|
||||
out["text_extract_presets"] = norm_presets
|
||||
|
||||
# Узлы — список
|
||||
try:
|
||||
nodes = out.get("nodes") or []
|
||||
if not isinstance(nodes, list):
|
||||
nodes = []
|
||||
out["nodes"] = nodes
|
||||
except Exception:
|
||||
out["nodes"] = []
|
||||
|
||||
return out
|
||||
|
||||
|
||||
def load_pipeline() -> Dict[str, Any]:
|
||||
if PIPELINE_FILE.exists():
|
||||
try:
|
||||
return json.loads(PIPELINE_FILE.read_text(encoding="utf-8"))
|
||||
except Exception:
|
||||
pass
|
||||
return default_pipeline()
|
||||
if PIPELINE_FILE.exists():
|
||||
try:
|
||||
data = json.loads(PIPELINE_FILE.read_text(encoding="utf-8"))
|
||||
return normalize_pipeline(data)
|
||||
except Exception:
|
||||
pass
|
||||
return normalize_pipeline(default_pipeline())
|
||||
|
||||
|
||||
def save_pipeline(pipeline: Dict[str, Any]) -> None:
|
||||
PIPELINE_FILE.write_text(json.dumps(pipeline, ensure_ascii=False, indent=2), encoding="utf-8")
|
||||
norm = normalize_pipeline(pipeline or {})
|
||||
PIPELINE_FILE.write_text(json.dumps(norm, ensure_ascii=False, indent=2), encoding="utf-8")
|
||||
|
||||
|
||||
def list_presets() -> List[str]:
|
||||
|
||||
@@ -35,6 +35,11 @@ _BARE_MACRO_RE = re.compile(r"\[\[\s*([A-Za-z_][A-Za-z0-9_]*(?:\.[^\]]+?)?)\s*\]
|
||||
# Разбираем выражение до ближайшего '}}', допускаем '}' внутри (например в JSON-литералах)
|
||||
_BRACES_RE = re.compile(r"\{\{\s*(.*?)\s*\}\}", re.DOTALL)
|
||||
|
||||
# Сокращённый синтаксис: img(mime?)[[...]] → data:<mime>;base64,<resolved_inner_macro>
|
||||
# Пример: img()[[OUT1]] → data:image/png;base64,{{resolved OUT1}}
|
||||
# img(jpeg)[[OUT:n1.result...]] → data:image/jpeg;base64,{{resolved}}
|
||||
_IMG_WRAPPER_RE = re.compile(r"(?is)img\(\s*([^)]+?)?\s*\)\s*\[\[\s*(.+?)\s*\]\]")
|
||||
|
||||
|
||||
def _split_path(path: str) -> List[str]:
|
||||
return [p.strip() for p in str(path).split(".") if str(p).strip()]
|
||||
@@ -164,12 +169,21 @@ def _best_text_from_outputs(node_out: Any) -> str:
|
||||
# Gemini
|
||||
try:
|
||||
if isinstance(base, dict):
|
||||
cand0 = (base.get("candidates") or [{}])[0]
|
||||
content = cand0.get("content") or {}
|
||||
parts0 = (content.get("parts") or [{}])[0]
|
||||
t = parts0.get("text")
|
||||
if isinstance(t, str):
|
||||
return t
|
||||
cands = base.get("candidates") or []
|
||||
texts: List[str] = []
|
||||
for cand in cands:
|
||||
try:
|
||||
content = cand.get("content") or {}
|
||||
parts = content.get("parts") or []
|
||||
for p in parts:
|
||||
if isinstance(p, dict):
|
||||
t = p.get("text")
|
||||
if isinstance(t, str) and t.strip():
|
||||
texts.append(t.strip())
|
||||
except Exception:
|
||||
continue
|
||||
if texts:
|
||||
return "\n".join(texts)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@@ -203,6 +217,47 @@ def render_template_simple(template: str, context: Dict[str, Any], out_map: Dict
|
||||
return ""
|
||||
s = str(template)
|
||||
|
||||
# 0) Сокращённый синтаксис: img(mime?)[[...]] → data:<mime>;base64,<resolved>
|
||||
# Выполняем до развёртки обычных [[...]] макросов, чтобы внутри можно было использовать любой квадратный макрос.
|
||||
def _normalize_mime(m: str) -> str:
|
||||
mm = (m or "").strip().lower()
|
||||
if not mm:
|
||||
return "image/png"
|
||||
if "/" in mm:
|
||||
return mm
|
||||
return {
|
||||
"png": "image/png",
|
||||
"jpg": "image/jpeg",
|
||||
"jpeg": "image/jpeg",
|
||||
"webp": "image/webp",
|
||||
"gif": "image/gif",
|
||||
"svg": "image/svg+xml",
|
||||
"bmp": "image/bmp",
|
||||
"tif": "image/tiff",
|
||||
"tiff": "image/tiff",
|
||||
}.get(mm, mm)
|
||||
|
||||
def _repl_imgwrap(m: re.Match) -> str:
|
||||
mime_raw = m.group(1) or ""
|
||||
inner = m.group(2) or ""
|
||||
mime = _normalize_mime(mime_raw)
|
||||
try:
|
||||
val = _resolve_square_macro_value(inner, context, out_map)
|
||||
except Exception:
|
||||
val = ""
|
||||
if isinstance(val, (dict, list, bool)) or val is None:
|
||||
val = _stringify_for_template(val)
|
||||
else:
|
||||
val = str(val)
|
||||
return f"data:{mime};base64,{val}"
|
||||
|
||||
# Поддерживаем много вхождений — повторяем до исчерпания (на случай каскадных макросов)
|
||||
while True:
|
||||
ns, cnt = _IMG_WRAPPER_RE.subn(_repl_imgwrap, s)
|
||||
s = ns
|
||||
if cnt == 0:
|
||||
break
|
||||
|
||||
# 1) Макросы [[VAR:...]] / [[OUT:...]] / [[STORE:...]]
|
||||
def repl_var(m: re.Match) -> str:
|
||||
path = m.group(1).strip()
|
||||
@@ -539,8 +594,18 @@ def _tokenize_condition_expr(expr: str, context: Dict[str, Any], out_map: Dict[s
|
||||
while j < n and (expr[j].isalnum() or expr[j] in "._"):
|
||||
j += 1
|
||||
word = expr[i:j]
|
||||
# Логические в словах не поддерживаем (используйте &&, ||, !)
|
||||
tokens.append(word)
|
||||
# Поддержка «голых» идентификаторов из vars: cycleindex, WAS_ERROR и т.п.
|
||||
# Если это простой идентификатор (без точек) и он есть в context.vars — биндим его значением.
|
||||
try:
|
||||
vmap = context.get("vars") or {}
|
||||
except Exception:
|
||||
vmap = {}
|
||||
if re.fullmatch(r"[A-Za-z_][A-Za-z0-9_]*", word) and isinstance(vmap, dict) and word in vmap:
|
||||
name = add_binding(vmap.get(word))
|
||||
tokens.append(name)
|
||||
else:
|
||||
# Логические в словах не поддерживаем (используйте &&, ||, !)
|
||||
tokens.append(word)
|
||||
i = j
|
||||
continue
|
||||
|
||||
|
||||
Reference in New Issue
Block a user