sync: UI animations, select styling, TLS verify flag via proxy second line, brand spacing

This commit is contained in:
2025-09-27 18:46:52 +03:00
parent 135c393eda
commit 2abfbb4b1a
52 changed files with 8029 additions and 1408 deletions

View File

@@ -1,48 +0,0 @@
{
"VAL": 2,
"snapshot": {
"incoming": null,
"params": {},
"model": "gpt-x",
"vendor_format": "openai",
"system": "",
"OUT": {
"n1": {
"vars": {
"VAL": 2
}
},
"n2": {
"result": {
"id": "ret_mock_123",
"object": "chat.completion",
"model": "gpt-x",
"choices": [
{
"index": 0,
"message": {
"role": "assistant",
"content": "2"
},
"finish_reason": "stop"
}
],
"usage": {
"prompt_tokens": 0,
"completion_tokens": 1,
"total_tokens": 0
}
},
"response_text": "2"
}
},
"OUT_TEXT": {
"n1": "",
"n2": "2"
},
"LAST_NODE": "n2",
"OUT1": "",
"OUT2": "2",
"EXEC_TRACE": "n1(SetVars) -> n2(Return)"
}
}

View File

@@ -1,48 +0,0 @@
{
"TXT": "A | B | C",
"snapshot": {
"incoming": null,
"params": {},
"model": "gpt-x",
"vendor_format": "openai",
"system": "",
"OUT": {
"n1": {
"vars": {
"TXT": "A | B | C"
}
},
"n2": {
"result": {
"id": "ret_mock_123",
"object": "chat.completion",
"model": "gpt-x",
"choices": [
{
"index": 0,
"message": {
"role": "assistant",
"content": "A | B | C"
},
"finish_reason": "stop"
}
],
"usage": {
"prompt_tokens": 0,
"completion_tokens": 5,
"total_tokens": 0
}
},
"response_text": "A | B | C"
}
},
"OUT_TEXT": {
"n1": "A | B | C",
"n2": "A | B | C"
},
"LAST_NODE": "n2",
"OUT1": "A | B | C",
"OUT2": "A | B | C",
"EXEC_TRACE": "n1(SetVars) -> n2(Return)"
}
}

View File

@@ -0,0 +1,40 @@
{
"WAS_ERROR__n2": true,
"CYCLEINDEX__n2": 2,
"snapshot": {
"incoming": {
"method": "POST",
"url": "http://localhost/test",
"path": "/test",
"query": "",
"headers": {
"x": "X-HEADER"
},
"json": {}
},
"params": {
"temperature": 0.25
},
"model": "gpt-x",
"vendor_format": "openai",
"system": "",
"OUT": {
"n2": {
"result": {
"error": "Node n2 (ProviderCall) requires 'base_url' in config"
},
"response_text": "",
"vars": {
"WAS_ERROR__n2": true,
"CYCLEINDEX__n2": 2
}
}
},
"OUT_TEXT": {
"n2": "Node n2 (ProviderCall) requires 'base_url' in config"
},
"LAST_NODE": "n2",
"OUT2": "Node n2 (ProviderCall) requires 'base_url' in config",
"EXEC_TRACE": "n2(ProviderCall)"
}
}

View File

@@ -0,0 +1,48 @@
{
"MSG": "abc123xyz",
"WAS_ERROR__n2": true,
"CYCLEINDEX__n2": 1,
"snapshot": {
"incoming": {
"method": "POST",
"url": "http://localhost/test",
"path": "/test",
"query": "",
"headers": {
"x": "X-HEADER"
},
"json": {}
},
"params": {
"temperature": 0.25
},
"model": "gpt-x",
"vendor_format": "openai",
"system": "",
"OUT": {
"n1": {
"vars": {
"MSG": "abc123xyz"
}
},
"n2": {
"result": {
"error": "Node n2 (ProviderCall) requires 'base_url' in config"
},
"response_text": "",
"vars": {
"WAS_ERROR__n2": true,
"CYCLEINDEX__n2": 1
}
}
},
"OUT_TEXT": {
"n1": "abc123xyz",
"n2": "Node n2 (ProviderCall) requires 'base_url' in config"
},
"LAST_NODE": "n2",
"OUT1": "abc123xyz",
"OUT2": "Node n2 (ProviderCall) requires 'base_url' in config",
"EXEC_TRACE": "n1(SetVars) -> n2(ProviderCall)"
}
}

View File

@@ -0,0 +1,105 @@
{
"snapshot": {
"incoming": {
"method": "POST",
"url": "http://localhost/test",
"path": "/test",
"query": "",
"headers": {
"x": "X-HEADER"
},
"json": {
"messages": [
{
"role": "system",
"content": "Системный-тест CLAUDE"
},
{
"role": "user",
"content": "Прив"
},
{
"role": "assistant",
"content": "Привет!"
}
]
}
},
"params": {
"temperature": 0.25
},
"model": "gpt-x",
"vendor_format": "openai",
"system": "",
"OUT": {
"n1": {
"result": {
"echo": {
"url": "http://mock.local/v1/messages",
"headers": {
"Content-Type": "application/json"
},
"payload": {
"model": "gpt-x",
"system": [
{
"type": "text",
"text": "Ты — Narrator-chan."
},
{
"type": "text",
"text": "Системный-тест CLAUDE"
}
],
"messages": [
{
"role": "user",
"content": [
{
"type": "text",
"text": "Системный-тест CLAUDE"
}
]
},
{
"role": "user",
"content": [
{
"type": "text",
"text": "Прив"
}
]
},
{
"role": "assistant",
"content": [
{
"type": "text",
"text": "Привет!"
}
]
},
{
"role": "user",
"content": [
{
"type": "text",
"text": "как лела"
}
]
}
]
}
}
},
"response_text": "http://mock.local/v1/messages"
}
},
"OUT_TEXT": {
"n1": "http://mock.local/v1/messages"
},
"LAST_NODE": "n1",
"OUT1": "http://mock.local/v1/messages",
"EXEC_TRACE": "n1(ProviderCall)"
}
}

View File

@@ -0,0 +1,101 @@
{
"snapshot": {
"incoming": {
"method": "POST",
"url": "http://localhost/test",
"path": "/test",
"query": "",
"headers": {
"x": "X-HEADER"
},
"json": {
"messages": [
{
"role": "system",
"content": "Системный-тест из входящего"
},
{
"role": "user",
"content": "Its just me.."
},
{
"role": "assistant",
"content": "Reply from model"
}
]
}
},
"params": {
"temperature": 0.25
},
"model": "gpt-x",
"vendor_format": "openai",
"system": "",
"OUT": {
"n1": {
"result": {
"echo": {
"url": "http://mock.local/v1beta/models/gpt-x:generateContent",
"headers": {
"Content-Type": "application/json"
},
"payload": {
"model": "gpt-x",
"contents": [
{
"role": "user",
"parts": [
{
"text": "Системный-тест из входящего"
}
]
},
{
"role": "user",
"parts": [
{
"text": "Its just me.."
}
]
},
{
"role": "model",
"parts": [
{
"text": "Reply from model"
}
]
},
{
"role": "user",
"parts": [
{
"text": "как лела"
}
]
}
],
"systemInstruction": {
"parts": [
{
"text": "Ты — Narrator-chan."
},
{
"text": "Системный-тест из входящего"
}
]
}
}
}
},
"response_text": "http://mock.local/v1beta/models/gpt-x:generateContent"
}
},
"OUT_TEXT": {
"n1": "http://mock.local/v1beta/models/gpt-x:generateContent"
},
"LAST_NODE": "n1",
"OUT1": "http://mock.local/v1beta/models/gpt-x:generateContent",
"EXEC_TRACE": "n1(ProviderCall)"
}
}

View File

@@ -0,0 +1,79 @@
{
"snapshot": {
"incoming": {
"method": "POST",
"url": "http://localhost/test",
"path": "/test",
"query": "",
"headers": {
"x": "X-HEADER"
},
"json": {
"contents": [
{
"role": "user",
"parts": [
{
"text": "A"
}
]
},
{
"role": "model",
"parts": [
{
"text": "B"
}
]
}
]
}
},
"params": {
"temperature": 0.25
},
"model": "gpt-x",
"vendor_format": "gemini",
"system": "",
"OUT": {
"n1": {
"result": {
"echo": {
"url": "http://mock.local/v1/chat/completions",
"headers": {
"Content-Type": "application/json"
},
"payload": {
"model": "gpt-x",
"messages": [
{
"role": "system",
"content": "Ты — Narrator-chan."
},
{
"role": "user",
"content": "как лела"
},
{
"role": "user",
"content": "A"
},
{
"role": "assistant",
"content": "B"
}
]
}
}
},
"response_text": "http://mock.local/v1/chat/completions"
}
},
"OUT_TEXT": {
"n1": "http://mock.local/v1/chat/completions"
},
"LAST_NODE": "n1",
"OUT1": "http://mock.local/v1/chat/completions",
"EXEC_TRACE": "n1(ProviderCall)"
}
}

View File

@@ -0,0 +1,40 @@
{
"WAS_ERROR__n1": true,
"CYCLEINDEX__n1": 1,
"snapshot": {
"incoming": {
"method": "POST",
"url": "http://example.local/test",
"path": "/test",
"query": "",
"headers": {
"content-type": "text/plain"
},
"json": "raw-plain-body-simulated"
},
"params": {
"temperature": 0.25
},
"model": "gpt-x",
"vendor_format": "openai",
"system": "",
"OUT": {
"n1": {
"result": {
"error": "Node n1 (RawForward): 'base_url' is not configured and vendor could not be detected."
},
"response_text": "",
"vars": {
"WAS_ERROR__n1": true,
"CYCLEINDEX__n1": 1
}
}
},
"OUT_TEXT": {
"n1": "Node n1 (RawForward): 'base_url' is not configured and vendor could not be detected."
},
"LAST_NODE": "n1",
"OUT1": "Node n1 (RawForward): 'base_url' is not configured and vendor could not be detected.",
"EXEC_TRACE": "n1(RawForward)"
}
}

File diff suppressed because one or more lines are too long