sync: UI animations, select styling, TLS verify flag via proxy second line, brand spacing

This commit is contained in:
2025-09-27 18:46:52 +03:00
parent 135c393eda
commit 2abfbb4b1a
52 changed files with 8029 additions and 1408 deletions

1
tests/__init__.py Normal file
View File

@@ -0,0 +1 @@
# Make tests a package so imports like "from tests.utils import ..." work.

View File

@@ -1,25 +1,9 @@
import asyncio
import json
from agentui.pipeline.executor import PipelineExecutor, ExecutionError, Node, NODE_REGISTRY
from tests.utils import pp as _pp, base_ctx as _base_ctx
# Helper to pretty print short JSON safely
def _pp(obj, max_len=800):
try:
s = json.dumps(obj, ensure_ascii=False, indent=2)
except Exception:
s = str(obj)
if len(s) > max_len:
return s[:max_len] + "...<truncated>"
return s
def _base_ctx(vendor="openai"):
return {
"model": "gpt-x",
"vendor_format": vendor,
"params": {"temperature": 0.1},
"chat": {"last_user": "hi"},
"OUT": {},
}
async def scenario_if_single_quotes_ok():
print("\n=== SCENARIO 1: If with single quotes ===")

View File

@@ -1,33 +1,8 @@
import asyncio
import json
from agentui.pipeline.executor import PipelineExecutor
from agentui.pipeline.storage import clear_var_store
from tests.utils import pp as _pp, ctx as _ctx
def _pp(obj, max_len=800):
try:
s = json.dumps(obj, ensure_ascii=False, indent=2)
except Exception:
s = str(obj)
if len(s) > max_len:
return s[:max_len] + "...<truncated>"
return s
def _ctx(vendor="openai", incoming=None, params=None):
return {
"model": "gpt-x",
"vendor_format": vendor,
"params": params or {"temperature": 0.25},
"chat": {"last_user": "Привет"},
"OUT": {},
"incoming": incoming or {
"method": "POST",
"url": "http://localhost/test",
"path": "/test",
"query": "",
"headers": {"x": "X-HEADER"},
"json": {},
},
}
async def scenario_bare_vars_and_braces():
print("\n=== MACROS 1: Bare [[NAME]] и {{ NAME }} + числа/объекты без кавычек ===")
@@ -63,6 +38,7 @@ async def scenario_bare_vars_and_braces():
out = await PipelineExecutor(p).run(_ctx())
print("OUT:", _pp(out))
async def scenario_var_path_and_defaults():
print("\n=== MACROS 2: [[VAR:path]] и {{ ...|default(...) }} (вложенные и JSON-литералы) ===")
incoming = {
@@ -101,6 +77,7 @@ async def scenario_var_path_and_defaults():
out = await PipelineExecutor(p).run(_ctx(incoming=incoming, params={"temperature": 0.2}))
print("OUT:", _pp(out))
async def scenario_out_macros_full_and_short():
print("\n=== MACROS 3: [[OUT:nX...]] и короткая форма [[OUTx]] ===")
p = {
@@ -142,6 +119,7 @@ async def scenario_out_macros_full_and_short():
out = await PipelineExecutor(p).run(_ctx())
print("OUT:", _pp(out))
async def scenario_store_macros_two_runs():
print("\n=== MACROS 4: [[STORE:key]] и {{ STORE.key }} между запусками (clear_var_store=False) ===")
pid = "p_macros_4_store"
@@ -198,6 +176,7 @@ async def scenario_store_macros_two_runs():
out2 = await PipelineExecutor(p2).run(_ctx())
print("RUN2:", _pp(out2))
async def scenario_pm_prompt_blocks_to_provider_structs():
print("\n=== MACROS 5: Prompt Blocks ([[PROMPT]]) → provider-structures (OpenAI) ===")
# Проверяем, что [[PROMPT]] со списком блоков превращается в "messages":[...]
@@ -232,6 +211,7 @@ async def scenario_pm_prompt_blocks_to_provider_structs():
out = await PipelineExecutor(p).run(_ctx())
print("OUT:", _pp(out))
def run_all():
async def main():
await scenario_bare_vars_and_braces()
@@ -242,5 +222,6 @@ def run_all():
print("\n=== MACROS VARS SUITE: DONE ===")
asyncio.run(main())
if __name__ == "__main__":
run_all()

View File

@@ -0,0 +1,249 @@
import asyncio
import json
from typing import Any, Dict, List
from agentui.pipeline.executor import PipelineExecutor
import agentui.providers.http_client as hc
from tests.utils import ctx as _ctx, pp as _pp
# Capture of all outbound ProviderCall HTTP requests (one per run)
CAPTURED: List[Dict[str, Any]] = []
class DummyResponse:
def __init__(self, status_code: int = 200, body: Dict[str, Any] | None = None):
self.status_code = status_code
self._json = body if body is not None else {"ok": True}
self.headers = {}
try:
self.content = json.dumps(self._json, ensure_ascii=False).encode("utf-8")
except Exception:
self.content = b"{}"
try:
self.text = json.dumps(self._json, ensure_ascii=False)
except Exception:
self.text = "{}"
def json(self) -> Any:
return self._json
class DummyClient:
def __init__(self, capture: List[Dict[str, Any]], status_code: int = 200):
self._capture = capture
self._status = status_code
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc, tb):
return False
async def post(self, url: str, content: bytes, headers: Dict[str, str]):
try:
payload = json.loads(content.decode("utf-8"))
except Exception:
payload = {"_raw": content.decode("utf-8", errors="ignore")}
rec = {"url": url, "headers": headers, "payload": payload}
self._capture.append(rec)
# Echo payload back to keep extractor happy but not tied to vendor formats
return DummyResponse(self._status, {"echo": rec})
# RawForward may use .request, but we don't need it here
async def request(self, method: str, url: str, headers: Dict[str, str], content: bytes | None):
return await self.post(url, content or b"{}", headers)
def _patch_http_client():
"""Monkeypatch build_client used by ProviderCall to our dummy."""
hc.build_client = lambda timeout=60.0: DummyClient(CAPTURED, 200) # type: ignore[assignment]
# Также патчим символ, импортированный внутрь executor, чтобы ProviderCall использовал DummyClient
import agentui.pipeline.executor as ex # type: ignore
ex.build_client = lambda timeout=60.0: DummyClient(CAPTURED, 200) # type: ignore
def _mk_pipeline(provider: str, prompt_combine: str) -> Dict[str, Any]:
"""Build a minimal ProviderCall-only pipeline for a given provider and combine spec."""
provider = provider.lower().strip()
if provider not in {"openai", "gemini", "claude"}:
raise AssertionError(f"Unsupported provider in test: {provider}")
base_url = "http://mock.local"
if provider == "openai":
endpoint = "/v1/chat/completions"
template = '{ "model": "{{ model }}", [[PROMPT]] }'
elif provider == "gemini":
endpoint = "/v1beta/models/{{ model }}:generateContent"
template = '{ "model": "{{ model }}", [[PROMPT]] }'
else: # claude
endpoint = "/v1/messages"
template = '{ "model": "{{ model }}", [[PROMPT]] }'
p = {
"id": f"p_prompt_combine_{provider}",
"name": f"prompt_combine to {provider}",
"loop_mode": "dag",
"nodes": [
{
"id": "n1",
"type": "ProviderCall",
"config": {
"provider": provider,
"provider_configs": {
provider: {
"base_url": base_url,
"endpoint": endpoint,
"headers": "{}",
"template": template,
}
},
# Key under test:
"prompt_combine": prompt_combine,
# Prompt Blocks (PROMPT)
"blocks": [
{"id": "b1", "name": "sys", "role": "system", "prompt": "Ты — Narrator-chan.", "enabled": True, "order": 0},
{"id": "b2", "name": "user", "role": "user", "prompt": "как лела", "enabled": True, "order": 1},
],
},
"in": {},
}
],
}
return p
def _ctx_with_incoming(incoming_json: Dict[str, Any], vendor: str = "openai") -> Dict[str, Any]:
base = _ctx(vendor=vendor)
inc = dict(base["incoming"])
inc["json"] = incoming_json
base["incoming"] = inc
return base
async def scenario_openai_target_from_gemini_contents():
print("\n=== PROMPT_COMBINE 1: target=openai, incoming=gemini.contents & PROMPT ===")
_patch_http_client()
CAPTURED.clear()
# Incoming JSON in Gemini shape
incoming_json = {
"contents": [
{"role": "user", "parts": [{"text": "Прив"}]},
{"role": "model", "parts": [{"text": "И тебе привет!"}]},
]
}
p = _mk_pipeline("openai", "[[VAR:incoming.json.contents]] & [[PROMPT]]")
out = await PipelineExecutor(p).run(_ctx_with_incoming(incoming_json, vendor="gemini"))
print("PIPE OUT:", _pp(out))
assert CAPTURED, "No HTTP request captured"
req = CAPTURED[-1]
payload = req["payload"]
# Validate OpenAI body
assert "messages" in payload, "OpenAI payload must contain messages"
msgs = payload["messages"]
# Expected: 2 (converted Gemini) + 2 (PROMPT blocks system+user) = 4
assert isinstance(msgs, list) and len(msgs) == 4
roles = [m.get("role") for m in msgs]
# Gemini model -> OpenAI assistant
assert "assistant" in roles and "user" in roles
# PROMPT system+user present (system may be not first without @pos; we just ensure existence)
assert any(m.get("role") == "system" for m in msgs), "System message from PROMPT must be present"
async def scenario_gemini_target_from_openai_messages():
print("\n=== PROMPT_COMBINE 2: target=gemini, incoming=openai.messages & PROMPT ===")
_patch_http_client()
CAPTURED.clear()
incoming_json = {
"messages": [
{"role": "system", "content": "Системный-тест из входящего"},
{"role": "user", "content": "Its just me.."},
{"role": "assistant", "content": "Reply from model"},
]
}
p = _mk_pipeline("gemini", "[[VAR:incoming.json.messages]] & [[PROMPT]]")
out = await PipelineExecutor(p).run(_ctx_with_incoming(incoming_json, vendor="openai"))
print("PIPE OUT:", _pp(out))
assert CAPTURED, "No HTTP request captured"
payload = CAPTURED[-1]["payload"]
# Validate Gemini body
assert "contents" in payload, "Gemini payload must contain contents"
cnts = payload["contents"]
assert isinstance(cnts, list)
# PROMPT system goes to systemInstruction, user block goes to contents
assert "systemInstruction" in payload, "Gemini payload must contain systemInstruction when system text exists"
si = payload["systemInstruction"]
# SystemInstruction.parts[].text must include both incoming system and PROMPT system merged
si_texts = []
try:
for prt in si.get("parts", []):
t = prt.get("text")
if isinstance(t, str) and t.strip():
si_texts.append(t.strip())
except Exception:
pass
joined = "\n".join(si_texts)
assert "Системный-тест из входящего" in joined, "Incoming system must be merged into systemInstruction"
assert "Narrator-chan" in joined, "PROMPT system must be merged into systemInstruction"
async def scenario_claude_target_from_openai_messages():
print("\n=== PROMPT_COMBINE 3: target=claude, incoming=openai.messages & PROMPT ===")
_patch_http_client()
CAPTURED.clear()
incoming_json = {
"messages": [
{"role": "system", "content": "Системный-тест CLAUDE"},
{"role": "user", "content": "Прив"},
{"role": "assistant", "content": "Привет!"},
]
}
p = _mk_pipeline("claude", "[[VAR:incoming.json.messages]] & [[PROMPT]]")
out = await PipelineExecutor(p).run(_ctx_with_incoming(incoming_json, vendor="openai"))
print("PIPE OUT:", _pp(out))
assert CAPTURED, "No HTTP request captured"
payload = CAPTURED[-1]["payload"]
# Validate Claude body
assert "messages" in payload, "Claude payload must contain messages"
assert "system" in payload, "Claude payload must contain system blocks"
sys_blocks = payload["system"]
# system must be array of blocks with type=text
assert isinstance(sys_blocks, list) and any(isinstance(b, dict) and b.get("type") == "text" for b in sys_blocks)
sys_text_join = "\n".join([b.get("text") for b in sys_blocks if isinstance(b, dict) and isinstance(b.get("text"), str)])
assert "Системный-тест CLAUDE" in sys_text_join, "Incoming system should be present"
assert "Narrator-chan" in sys_text_join, "PROMPT system should be present"
async def scenario_prepend_positioning_openai():
print("\n=== PROMPT_COMBINE 4: target=openai, PROMPT@pos=prepend & incoming.contents ===")
_patch_http_client()
CAPTURED.clear()
incoming_json = {
"contents": [
{"role": "user", "parts": [{"text": "A"}]},
{"role": "model", "parts": [{"text": "B"}]},
]
}
# Put PROMPT first; ensure system message becomes first in messages
p = _mk_pipeline("openai", "[[PROMPT]]@pos=prepend & [[VAR:incoming.json.contents]]")
out = await PipelineExecutor(p).run(_ctx_with_incoming(incoming_json, vendor="gemini"))
print("PIPE OUT:", _pp(out))
assert CAPTURED, "No HTTP request captured"
payload = CAPTURED[-1]["payload"]
msgs = payload.get("messages", [])
assert isinstance(msgs, list) and len(msgs) >= 2
first = msgs[0]
# Expect first to be system (from PROMPT) due to prepend
assert first.get("role") == "system", f"Expected system as first message, got {first}"
def test_prompt_combine_all():
async def main():
await scenario_openai_target_from_gemini_contents()
await scenario_gemini_target_from_openai_messages()
await scenario_claude_target_from_openai_messages()
await scenario_prepend_positioning_openai()
print("\n=== PROMPT_COMBINE: DONE ===")
asyncio.run(main())

View File

@@ -0,0 +1,23 @@
# Pytest-обёртка для существующих сценариев, которые сами себя запускают через run_all()/run_checks()
# Позволяет запускать все тесты одной командой: python -m pytest -q
# Не меняем исходные файлы, просто вызываем их публичные функции из pytest-тестов.
def test_executor_iterative():
# tests/test_executor_iterative.py содержит run_checks() (внутри сам asyncio.run)
from tests.test_executor_iterative import run_checks
run_checks()
def test_edge_cases():
# tests/test_edge_cases.py содержит run_all() (внутри сам asyncio.run)
from tests.test_edge_cases import run_all
run_all()
def test_macros_and_vars():
# tests/test_macros_vars.py содержит run_all() (внутри сам asyncio.run)
from tests.test_macros_vars import run_all
run_all()
def test_while_nodes():
# наш новый набор сценариев; внутри есть run_all() со своим asyncio.run
from tests.test_while_nodes import run_all
run_all()

134
tests/test_while_nodes.py Normal file
View File

@@ -0,0 +1,134 @@
import asyncio
from agentui.pipeline.executor import PipelineExecutor
from tests.utils import ctx as _ctx
async def scenario_providercall_while_ignore():
# ProviderCall with while loop and ignore_errors enabled.
# No base_url is provided to force ExecutionError inside node.run();
# wrapper will catch it and expose {"error": "..."} plus vars.
p = {
"id": "p_pc_while_ignore",
"name": "ProviderCall while+ignore",
"loop_mode": "dag",
"nodes": [
{
"id": "n2",
"type": "ProviderCall",
"config": {
"provider": "openai",
# while: 3 iterations (0,1,2)
"while_expr": "cycleindex < 3",
"while_max_iters": 10,
"ignore_errors": True,
# no base_url / provider_configs to trigger error safely
},
"in": {}
}
]
}
out = await PipelineExecutor(p).run(_ctx())
assert isinstance(out, dict)
# Wrapper returns final out with .vars merged by executor into STORE as well, but we assert on node out.
vars_map = out.get("vars") or {}
assert isinstance(vars_map, dict)
# Final iteration index should be 2
assert vars_map.get("WAS_ERROR__n2") is True
assert vars_map.get("CYCLEINDEX__n2") == 2
async def scenario_rawforward_while_ignore():
# RawForward with while loop and ignore_errors enabled.
# No base_url and incoming.json is a plain string -> detect_vendor=unknown -> ExecutionError,
# wrapper catches and returns {"error": "..."} with vars set.
p = {
"id": "p_rf_while_ignore",
"name": "RawForward while+ignore",
"loop_mode": "dag",
"nodes": [
{
"id": "n1",
"type": "RawForward",
"config": {
"while_expr": "cycleindex < 2",
"while_max_iters": 10,
"ignore_errors": True,
# no base_url; vendor detect will fail on plain text
},
"in": {}
}
]
}
ctx = _ctx()
# Provide incoming as plain text-like JSON so detect_vendor returns unknown
ctx["incoming"] = {
"method": "POST",
"url": "http://example.local/test",
"path": "/test",
"query": "",
"headers": {"content-type": "text/plain"},
"json": "raw-plain-body-simulated"
}
out = await PipelineExecutor(p).run(ctx)
assert isinstance(out, dict)
vars_map = out.get("vars") or {}
assert isinstance(vars_map, dict)
# Final iteration index should be 1 (0 and 1)
assert vars_map.get("WAS_ERROR__n1") is True
assert vars_map.get("CYCLEINDEX__n1") == 1
async def scenario_providercall_while_with_out_macro():
# SetVars -> ProviderCall while uses OUT from n1 in expression
# Expression: ([[OUT:n1.vars.MSG]] contains "123") && (cycleindex < 2)
# Ignore errors to bypass real HTTP
p = {
"id": "p_pc_while_out_macro",
"name": "ProviderCall while with OUT macro",
"loop_mode": "iterative",
"nodes": [
{
"id": "n1",
"type": "SetVars",
"config": {
"variables": [
{"id": "v1", "name": "MSG", "mode": "string", "value": "abc123xyz"}
]
},
"in": {}
},
{
"id": "n2",
"type": "ProviderCall",
"config": {
"provider": "openai",
"while_expr": "([[OUT:n1.vars.MSG]] contains \"123\") && (cycleindex < 2)",
"while_max_iters": 10,
"ignore_errors": True
},
"in": {
"depends": "n1.done"
}
}
]
}
out = await PipelineExecutor(p).run(_ctx())
assert isinstance(out, dict)
vars_map = out.get("vars") or {}
assert isinstance(vars_map, dict)
# Since MSG contains "123" and cycleindex < 2, two iterations (0,1)
assert vars_map.get("WAS_ERROR__n2") is True
assert vars_map.get("CYCLEINDEX__n2") == 1
def run_all():
async def main():
await scenario_providercall_while_ignore()
await scenario_rawforward_while_ignore()
await scenario_providercall_while_with_out_macro()
print("\n=== WHILE_NODES: DONE ===")
asyncio.run(main())
if __name__ == "__main__":
run_all()

52
tests/utils.py Normal file
View File

@@ -0,0 +1,52 @@
from __future__ import annotations
import json
from typing import Any, Dict, Optional
def pp(obj: Any, max_len: int = 800) -> str:
"""
Pretty-print JSON-like objects in tests with length guard.
"""
try:
s = json.dumps(obj, ensure_ascii=False, indent=2)
except Exception:
s = str(obj)
if len(s) > max_len:
return s[:max_len] + "...<truncated>"
return s
def base_ctx(vendor: str = "openai") -> Dict[str, Any]:
"""
Base context used by edge-case tests (mirrors previous _base_ctx).
"""
return {
"model": "gpt-x",
"vendor_format": vendor,
"params": {"temperature": 0.1},
"chat": {"last_user": "hi"},
"OUT": {},
}
def ctx(vendor: str = "openai", incoming: Optional[Dict[str, Any]] = None, params: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
"""
General context used by macros/vars tests (mirrors previous _ctx).
"""
return {
"model": "gpt-x",
"vendor_format": vendor,
"params": params or {"temperature": 0.25},
"chat": {"last_user": "Привет"},
"OUT": {},
"incoming": incoming
or {
"method": "POST",
"url": "http://localhost/test",
"path": "/test",
"query": "",
"headers": {"x": "X-HEADER"},
"json": {},
},
}