upload
This commit is contained in:
@@ -0,0 +1,77 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from types import SimpleNamespace
|
||||
from uuid import uuid4
|
||||
|
||||
from app.services.capability_service import CapabilityService
|
||||
|
||||
|
||||
def test_build_capability_payload_stores_rich_action_context():
|
||||
action = SimpleNamespace(
|
||||
id=uuid4(),
|
||||
operation_id="sendCampaignEmail",
|
||||
method=SimpleNamespace(value="POST"),
|
||||
path="/v1/campaigns/{campaign_id}/emails/send",
|
||||
base_url="https://api.example.com",
|
||||
summary="Send campaign email",
|
||||
description="Send email for selected users",
|
||||
tags=["campaign", "email"],
|
||||
source_filename="crm.yaml",
|
||||
parameters_schema={
|
||||
"type": "object",
|
||||
"required": ["campaign_id"],
|
||||
"properties": {
|
||||
"campaign_id": {"type": "string", "x-parameter-location": "path"},
|
||||
"segment_id": {"type": "string", "x-parameter-location": "query"},
|
||||
},
|
||||
},
|
||||
request_body_schema={
|
||||
"type": "object",
|
||||
"required": ["subject", "template_id"],
|
||||
"properties": {
|
||||
"subject": {"type": "string"},
|
||||
"template_id": {"type": "string"},
|
||||
},
|
||||
"x-content-type": "application/json",
|
||||
},
|
||||
response_schema={
|
||||
"type": "object",
|
||||
"properties": {"delivery_id": {"type": "string"}},
|
||||
"x-content-type": "application/json",
|
||||
},
|
||||
raw_spec={
|
||||
"deprecated": False,
|
||||
"security": [{"BearerAuth": []}],
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {"type": "object"},
|
||||
}
|
||||
}
|
||||
},
|
||||
"responses": {
|
||||
"200": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {"type": "object"},
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
payload = CapabilityService._build_capability_payload(action)
|
||||
llm_payload = payload["llm_payload"]
|
||||
action_context = llm_payload["action_context"]
|
||||
hints = llm_payload["openapi_hints"]
|
||||
|
||||
assert payload["name"] == "sendCampaignEmail"
|
||||
assert payload["description"] == "Send campaign email"
|
||||
assert action_context["method"] == "POST"
|
||||
assert action_context["path"] == "/v1/campaigns/{campaign_id}/emails/send"
|
||||
assert action_context["raw_spec"]["responses"]["200"] is not None
|
||||
assert action_context["input_signals"]["required_inputs"] == ["campaign_id", "subject", "template_id"]
|
||||
assert hints["request_content_types"] == ["application/json"]
|
||||
assert "200" in hints["response_status_codes"]
|
||||
|
||||
@@ -0,0 +1,693 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import copy
|
||||
from typing import Any
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
|
||||
from app.models import Action, HttpMethod
|
||||
from app.models.capability import Capability
|
||||
from app.models.execution import (
|
||||
ExecutionRun,
|
||||
ExecutionRunStatus,
|
||||
ExecutionStepRun,
|
||||
ExecutionStepStatus,
|
||||
)
|
||||
from app.models.pipeline import Pipeline, PipelineStatus
|
||||
from app.services.execution_service import ExecutionService, StepExecutionError
|
||||
|
||||
|
||||
class FakeSession:
|
||||
def __init__(self, initial: dict[tuple[type[Any], Any], Any] | None = None) -> None:
|
||||
self._store = initial or {}
|
||||
self.step_runs_by_step: dict[int, ExecutionStepRun] = {}
|
||||
self.commit_calls = 0
|
||||
|
||||
async def get(self, model: type[Any], key: Any) -> Any:
|
||||
return self._store.get((model, key))
|
||||
|
||||
def add(self, obj: Any) -> None:
|
||||
if isinstance(obj, ExecutionStepRun):
|
||||
self.step_runs_by_step[obj.step] = obj
|
||||
|
||||
def add_all(self, items: list[Any]) -> None:
|
||||
for item in items:
|
||||
self.add(item)
|
||||
|
||||
async def commit(self) -> None:
|
||||
self.commit_calls += 1
|
||||
|
||||
async def refresh(self, _obj: Any) -> None:
|
||||
return None
|
||||
|
||||
|
||||
class FakeContextStore:
|
||||
def __init__(self, initial: Any = None) -> None:
|
||||
self._context = initial
|
||||
self.saved_contexts: list[dict[str, Any]] = []
|
||||
|
||||
async def load_context(self, _run_id) -> dict[str, Any]:
|
||||
if isinstance(self._context, dict):
|
||||
return copy.deepcopy(self._context)
|
||||
return {}
|
||||
|
||||
async def save_context(self, _run_id, context: dict[str, Any]) -> None:
|
||||
normalized = copy.deepcopy(context)
|
||||
self._context = normalized
|
||||
self.saved_contexts.append(normalized)
|
||||
|
||||
|
||||
def _build_action(action_id) -> Action:
|
||||
return Action(
|
||||
id=action_id,
|
||||
method=HttpMethod.GET,
|
||||
path="/resource",
|
||||
base_url="https://api.example.com",
|
||||
)
|
||||
|
||||
|
||||
def _build_capability(capability_id, action_id) -> Capability:
|
||||
return Capability(
|
||||
id=capability_id,
|
||||
action_id=action_id,
|
||||
name=f"cap_{capability_id.hex[:8]}",
|
||||
)
|
||||
|
||||
|
||||
def _build_node(step: int, capability_id, action_id, *, external_inputs: list[str] | None = None) -> dict[str, Any]:
|
||||
return {
|
||||
"step": step,
|
||||
"name": f"Step {step}",
|
||||
"external_inputs": external_inputs or [],
|
||||
"endpoints": [
|
||||
{
|
||||
"capability_id": str(capability_id),
|
||||
"action_id": str(action_id),
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def test_topological_sort_linear_graph():
|
||||
ordered = ExecutionService._topological_sort(
|
||||
steps=[1, 2, 3],
|
||||
edges=[
|
||||
{"from_step": 1, "to_step": 2, "type": "users"},
|
||||
{"from_step": 2, "to_step": 3, "type": "segments"},
|
||||
],
|
||||
)
|
||||
assert ordered == [1, 2, 3]
|
||||
|
||||
|
||||
def test_extract_value_from_output_by_edge_type():
|
||||
output = {"users": [{"id": 1}]}
|
||||
value = ExecutionService._extract_value_from_output(output, "users")
|
||||
assert value == [{"id": 1}]
|
||||
|
||||
|
||||
def test_build_request_payload_uses_path_params_and_defaults():
|
||||
action = Action(
|
||||
method=HttpMethod.GET,
|
||||
path="/users/{user_id}",
|
||||
base_url="https://api.example.com",
|
||||
parameters_schema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"user_id": {
|
||||
"type": "string",
|
||||
"x-parameter-location": "path",
|
||||
},
|
||||
"limit": {
|
||||
"type": "integer",
|
||||
"x-parameter-location": "query",
|
||||
"default": 10,
|
||||
},
|
||||
},
|
||||
"required": ["user_id"],
|
||||
},
|
||||
)
|
||||
|
||||
service = ExecutionService(session=None) # type: ignore[arg-type]
|
||||
payload = service._build_request_payload(
|
||||
action=action,
|
||||
resolved_inputs={"user_id": "abc"},
|
||||
)
|
||||
|
||||
assert payload["url"] == "https://api.example.com/users/abc"
|
||||
assert payload["query_params"] == {"limit": 10}
|
||||
assert payload["missing_required"] == []
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_action_from_node_uses_capability_action_id():
|
||||
primary_action_id = uuid4()
|
||||
stale_action_id = uuid4()
|
||||
capability_id = uuid4()
|
||||
|
||||
action = _build_action(primary_action_id)
|
||||
capability = _build_capability(capability_id, primary_action_id)
|
||||
session = FakeSession(
|
||||
{
|
||||
(Capability, capability_id): capability,
|
||||
(Action, primary_action_id): action,
|
||||
}
|
||||
)
|
||||
|
||||
service = ExecutionService(session=session) # type: ignore[arg-type]
|
||||
node = _build_node(step=1, capability_id=capability_id, action_id=stale_action_id)
|
||||
resolved_capability_id, resolved_action = await service._get_action_from_node(node)
|
||||
|
||||
assert resolved_capability_id == capability_id
|
||||
assert resolved_action.id == primary_action_id
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_action_from_node_raises_for_invalid_or_missing_bindings():
|
||||
service = ExecutionService(session=FakeSession()) # type: ignore[arg-type]
|
||||
|
||||
with pytest.raises(StepExecutionError, match="valid capability_id"):
|
||||
await service._get_action_from_node(
|
||||
{"step": 1, "endpoints": [{"capability_id": "invalid"}]}
|
||||
)
|
||||
|
||||
missing_capability_id = uuid4()
|
||||
with pytest.raises(StepExecutionError, match=f"Capability not found: {missing_capability_id}"):
|
||||
await service._get_action_from_node(
|
||||
{
|
||||
"step": 1,
|
||||
"endpoints": [{"capability_id": str(missing_capability_id)}],
|
||||
}
|
||||
)
|
||||
|
||||
capability_id = uuid4()
|
||||
capability_without_action = _build_capability(capability_id, None)
|
||||
session = FakeSession({(Capability, capability_id): capability_without_action})
|
||||
service = ExecutionService(session=session) # type: ignore[arg-type]
|
||||
with pytest.raises(StepExecutionError, match=f"Capability does not have action_id: {capability_id}"):
|
||||
await service._get_action_from_node(
|
||||
{"step": 1, "endpoints": [{"capability_id": str(capability_id)}]}
|
||||
)
|
||||
|
||||
missing_action_id = uuid4()
|
||||
capability_with_missing_action = _build_capability(capability_id, missing_action_id)
|
||||
session = FakeSession({(Capability, capability_id): capability_with_missing_action})
|
||||
service = ExecutionService(session=session) # type: ignore[arg-type]
|
||||
with pytest.raises(StepExecutionError, match=f"Action not found for capability {capability_id}: {missing_action_id}"):
|
||||
await service._get_action_from_node(
|
||||
{"step": 1, "endpoints": [{"capability_id": str(capability_id)}]}
|
||||
)
|
||||
|
||||
|
||||
def test_resolve_node_inputs_prefers_edge_values_over_step_outputs():
|
||||
service = ExecutionService(session=None) # type: ignore[arg-type]
|
||||
resolved, missing = service._resolve_node_inputs(
|
||||
node={"step": 2, "external_inputs": []},
|
||||
incoming_edges=[{"from_step": 1, "to_step": 2, "type": "users"}],
|
||||
step_outputs={"1": {"users": [{"id": 1}]}},
|
||||
edge_values={"1:2:users": [{"id": 42}]},
|
||||
run_inputs={},
|
||||
)
|
||||
|
||||
assert resolved == {"users": [{"id": 42}]}
|
||||
assert missing == []
|
||||
|
||||
|
||||
def test_resolve_node_inputs_normalizes_array_suffix_edge_types():
|
||||
service = ExecutionService(session=None) # type: ignore[arg-type]
|
||||
resolved, missing = service._resolve_node_inputs(
|
||||
node={"step": 3, "external_inputs": []},
|
||||
incoming_edges=[{"from_step": 1, "to_step": 3, "type": "users[]"}],
|
||||
step_outputs={"1": {"users": [{"id": 1}]}},
|
||||
edge_values={},
|
||||
run_inputs={},
|
||||
)
|
||||
|
||||
assert resolved["users[]"] == [{"id": 1}]
|
||||
assert resolved["users"] == [{"id": 1}]
|
||||
assert missing == []
|
||||
|
||||
|
||||
def test_resolve_node_inputs_maps_user_hotel_pairs_to_segments():
|
||||
service = ExecutionService(session=None) # type: ignore[arg-type]
|
||||
segment_payload = [
|
||||
{"segment_id": "seg_1", "hotel_id": "hotel_001", "user_ids": ["usr_001"]},
|
||||
]
|
||||
resolved, missing = service._resolve_node_inputs(
|
||||
node={"step": 4, "external_inputs": []},
|
||||
incoming_edges=[{"from_step": 3, "to_step": 4, "type": "user_hotel_pairs"}],
|
||||
step_outputs={"3": {"segments": segment_payload}},
|
||||
edge_values={},
|
||||
run_inputs={},
|
||||
)
|
||||
|
||||
assert resolved["user_hotel_pairs"] == segment_payload
|
||||
assert resolved["segments"] == segment_payload
|
||||
assert missing == []
|
||||
|
||||
|
||||
def test_resolve_node_inputs_maps_empty_user_hotel_pairs_to_assignments():
|
||||
service = ExecutionService(session=None) # type: ignore[arg-type]
|
||||
resolved, missing = service._resolve_node_inputs(
|
||||
node={"step": 5, "external_inputs": []},
|
||||
incoming_edges=[{"from_step": 4, "to_step": 5, "type": "user_hotel_pairs"}],
|
||||
step_outputs={"4": {"assignments": []}},
|
||||
edge_values={"4:5:user_hotel_pairs": []},
|
||||
run_inputs={},
|
||||
)
|
||||
|
||||
assert resolved["user_hotel_pairs"] == []
|
||||
assert resolved["assignments"] == []
|
||||
assert missing == []
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_execute_run_linear_pipeline_succeeds_and_persists_context():
|
||||
run_id = uuid4()
|
||||
pipeline_id = uuid4()
|
||||
action_1_id = uuid4()
|
||||
action_2_id = uuid4()
|
||||
capability_1_id = uuid4()
|
||||
capability_2_id = uuid4()
|
||||
|
||||
action_1 = _build_action(action_1_id)
|
||||
action_2 = _build_action(action_2_id)
|
||||
capability_1 = _build_capability(capability_1_id, action_1_id)
|
||||
capability_2 = _build_capability(capability_2_id, action_2_id)
|
||||
|
||||
pipeline = Pipeline(
|
||||
id=pipeline_id,
|
||||
name="Linear pipeline",
|
||||
nodes=[
|
||||
_build_node(1, capability_1_id, action_1_id, external_inputs=["seed"]),
|
||||
_build_node(2, capability_2_id, action_2_id),
|
||||
],
|
||||
edges=[{"from_step": 1, "to_step": 2, "type": "users"}],
|
||||
status=PipelineStatus.READY,
|
||||
)
|
||||
run = ExecutionRun(
|
||||
id=run_id,
|
||||
pipeline_id=pipeline_id,
|
||||
status=ExecutionRunStatus.QUEUED,
|
||||
inputs={"seed": "abc"},
|
||||
)
|
||||
|
||||
session = FakeSession(
|
||||
{
|
||||
(ExecutionRun, run_id): run,
|
||||
(Pipeline, pipeline_id): pipeline,
|
||||
(Capability, capability_1_id): capability_1,
|
||||
(Capability, capability_2_id): capability_2,
|
||||
(Action, action_1_id): action_1,
|
||||
(Action, action_2_id): action_2,
|
||||
}
|
||||
)
|
||||
context_store = FakeContextStore(initial={"step_outputs": "bad", "edge_values": []})
|
||||
service = ExecutionService(session=session, context_store=context_store) # type: ignore[arg-type]
|
||||
|
||||
async def fake_call_action(action: Action, request_payload: dict[str, Any]):
|
||||
if action.id == action_1_id:
|
||||
assert request_payload["resolved_inputs"]["seed"] == "abc"
|
||||
return {"status_code": 200, "body": {"users": [{"id": 1}]}}, {"users": [{"id": 1}]}
|
||||
return {"status_code": 200, "body": {"ok": True}}, {"ok": True}
|
||||
|
||||
service._call_action = fake_call_action # type: ignore[method-assign]
|
||||
|
||||
await service.execute_run(run_id)
|
||||
|
||||
assert run.status == ExecutionRunStatus.SUCCEEDED
|
||||
assert run.summary is not None
|
||||
assert run.summary["total_steps"] == 2
|
||||
assert run.summary["succeeded_steps"] == 2
|
||||
assert run.summary["failed_steps"] == 0
|
||||
assert run.summary["skipped_steps"] == 0
|
||||
assert run.summary["final_output_step"] == 2
|
||||
assert run.summary["final_output"] == {"ok": True}
|
||||
assert session.step_runs_by_step[1].status == ExecutionStepStatus.SUCCEEDED
|
||||
assert session.step_runs_by_step[2].status == ExecutionStepStatus.SUCCEEDED
|
||||
assert context_store.saved_contexts[-1]["edge_values"]["1:2:users"] == [{"id": 1}]
|
||||
assert context_store.saved_contexts[-1]["step_outputs"]["1"] == {"users": [{"id": 1}]}
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_execute_run_is_fail_fast_and_marks_remaining_as_skipped():
|
||||
run_id = uuid4()
|
||||
pipeline_id = uuid4()
|
||||
action_1_id = uuid4()
|
||||
action_2_id = uuid4()
|
||||
action_3_id = uuid4()
|
||||
capability_1_id = uuid4()
|
||||
capability_2_id = uuid4()
|
||||
capability_3_id = uuid4()
|
||||
|
||||
action_1 = _build_action(action_1_id)
|
||||
action_2 = _build_action(action_2_id)
|
||||
action_3 = _build_action(action_3_id)
|
||||
capability_1 = _build_capability(capability_1_id, action_1_id)
|
||||
capability_2 = _build_capability(capability_2_id, action_2_id)
|
||||
capability_3 = _build_capability(capability_3_id, action_3_id)
|
||||
|
||||
pipeline = Pipeline(
|
||||
id=pipeline_id,
|
||||
name="Fail fast pipeline",
|
||||
nodes=[
|
||||
_build_node(1, capability_1_id, action_1_id),
|
||||
_build_node(2, capability_2_id, action_2_id),
|
||||
_build_node(3, capability_3_id, action_3_id),
|
||||
],
|
||||
edges=[
|
||||
{"from_step": 1, "to_step": 2, "type": "users"},
|
||||
{"from_step": 2, "to_step": 3, "type": "segments"},
|
||||
],
|
||||
status=PipelineStatus.READY,
|
||||
)
|
||||
run = ExecutionRun(
|
||||
id=run_id,
|
||||
pipeline_id=pipeline_id,
|
||||
status=ExecutionRunStatus.QUEUED,
|
||||
inputs={},
|
||||
)
|
||||
|
||||
session = FakeSession(
|
||||
{
|
||||
(ExecutionRun, run_id): run,
|
||||
(Pipeline, pipeline_id): pipeline,
|
||||
(Capability, capability_1_id): capability_1,
|
||||
(Capability, capability_2_id): capability_2,
|
||||
(Capability, capability_3_id): capability_3,
|
||||
(Action, action_1_id): action_1,
|
||||
(Action, action_2_id): action_2,
|
||||
(Action, action_3_id): action_3,
|
||||
}
|
||||
)
|
||||
service = ExecutionService(
|
||||
session=session, # type: ignore[arg-type]
|
||||
context_store=FakeContextStore(initial={"step_outputs": {}, "edge_values": {}}),
|
||||
)
|
||||
|
||||
async def fake_call_action(action: Action, _request_payload: dict[str, Any]):
|
||||
if action.id == action_2_id:
|
||||
raise StepExecutionError("boom")
|
||||
return {"status_code": 200}, {"users": [1]}
|
||||
|
||||
service._call_action = fake_call_action # type: ignore[method-assign]
|
||||
|
||||
await service.execute_run(run_id)
|
||||
|
||||
assert run.status == ExecutionRunStatus.PARTIAL_FAILED
|
||||
assert run.summary is not None
|
||||
assert run.summary["total_steps"] == 3
|
||||
assert run.summary["succeeded_steps"] == 1
|
||||
assert run.summary["failed_steps"] == 1
|
||||
assert run.summary["skipped_steps"] == 1
|
||||
assert run.summary["final_output_step"] == 1
|
||||
assert run.summary["final_output"] == {"users": [1]}
|
||||
assert session.step_runs_by_step[1].status == ExecutionStepStatus.SUCCEEDED
|
||||
assert session.step_runs_by_step[2].status == ExecutionStepStatus.FAILED
|
||||
assert session.step_runs_by_step[3].status == ExecutionStepStatus.SKIPPED
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_execute_run_multi_endpoint_node_executes_sequential_chain():
|
||||
run_id = uuid4()
|
||||
pipeline_id = uuid4()
|
||||
action_1_id = uuid4()
|
||||
action_2_id = uuid4()
|
||||
capability_1_id = uuid4()
|
||||
capability_2_id = uuid4()
|
||||
|
||||
action_1 = Action(
|
||||
id=action_1_id,
|
||||
method=HttpMethod.GET,
|
||||
path="/users/recent",
|
||||
base_url="https://api.example.com",
|
||||
)
|
||||
action_2 = Action(
|
||||
id=action_2_id,
|
||||
method=HttpMethod.GET,
|
||||
path="/segments/build",
|
||||
base_url="https://api.example.com",
|
||||
parameters_schema={
|
||||
"type": "object",
|
||||
"required": ["usersList"],
|
||||
"properties": {
|
||||
"usersList": {
|
||||
"type": "array",
|
||||
"x-parameter-location": "query",
|
||||
}
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
capability_1 = _build_capability(capability_1_id, action_1_id)
|
||||
capability_2 = _build_capability(capability_2_id, action_2_id)
|
||||
|
||||
multi_endpoint_node = {
|
||||
"step": 1,
|
||||
"name": "Multi endpoint node",
|
||||
"external_inputs": [],
|
||||
"endpoints": [
|
||||
{
|
||||
"capability_id": str(capability_1_id),
|
||||
"action_id": str(action_1_id),
|
||||
},
|
||||
{
|
||||
"capability_id": str(capability_2_id),
|
||||
"action_id": str(action_2_id),
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
pipeline = Pipeline(
|
||||
id=pipeline_id,
|
||||
name="Multi endpoint chain",
|
||||
nodes=[multi_endpoint_node],
|
||||
edges=[],
|
||||
status=PipelineStatus.READY,
|
||||
)
|
||||
run = ExecutionRun(
|
||||
id=run_id,
|
||||
pipeline_id=pipeline_id,
|
||||
status=ExecutionRunStatus.QUEUED,
|
||||
inputs={},
|
||||
)
|
||||
|
||||
session = FakeSession(
|
||||
{
|
||||
(ExecutionRun, run_id): run,
|
||||
(Pipeline, pipeline_id): pipeline,
|
||||
(Capability, capability_1_id): capability_1,
|
||||
(Capability, capability_2_id): capability_2,
|
||||
(Action, action_1_id): action_1,
|
||||
(Action, action_2_id): action_2,
|
||||
}
|
||||
)
|
||||
service = ExecutionService(
|
||||
session=session, # type: ignore[arg-type]
|
||||
context_store=FakeContextStore(initial={"step_outputs": {}, "edge_values": {}}),
|
||||
)
|
||||
|
||||
call_order: list[Any] = []
|
||||
|
||||
async def fake_call_action(action: Action, request_payload: dict[str, Any]):
|
||||
call_order.append(action.id)
|
||||
if action.id == action_1_id:
|
||||
return {"status_code": 200, "body": {"users_list": [{"id": 1}]}}, {"users_list": [{"id": 1}]}
|
||||
assert request_payload["resolved_inputs"]["usersList"] == [{"id": 1}]
|
||||
return {"status_code": 200, "body": {"segments": [1]}}, {"segments": [1]}
|
||||
|
||||
service._call_action = fake_call_action # type: ignore[method-assign]
|
||||
|
||||
await service.execute_run(run_id)
|
||||
|
||||
assert run.status == ExecutionRunStatus.SUCCEEDED
|
||||
assert run.summary is not None
|
||||
assert run.summary["final_output"] == {"segments": [1]}
|
||||
assert call_order == [action_1_id, action_2_id]
|
||||
assert session.step_runs_by_step[1].capability_id == capability_1_id
|
||||
assert session.step_runs_by_step[1].action_id == action_1_id
|
||||
trace = session.step_runs_by_step[1].response_snapshot["endpoints_trace"] # type: ignore[index]
|
||||
assert len(trace) == 2
|
||||
assert trace[0]["status"] == "succeeded"
|
||||
assert trace[1]["status"] == "succeeded"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_execute_run_multi_endpoint_failure_stops_pipeline():
|
||||
run_id = uuid4()
|
||||
pipeline_id = uuid4()
|
||||
action_1_id = uuid4()
|
||||
action_2_id = uuid4()
|
||||
action_3_id = uuid4()
|
||||
capability_1_id = uuid4()
|
||||
capability_2_id = uuid4()
|
||||
capability_3_id = uuid4()
|
||||
|
||||
action_1 = _build_action(action_1_id)
|
||||
action_2 = _build_action(action_2_id)
|
||||
action_3 = _build_action(action_3_id)
|
||||
capability_1 = _build_capability(capability_1_id, action_1_id)
|
||||
capability_2 = _build_capability(capability_2_id, action_2_id)
|
||||
capability_3 = _build_capability(capability_3_id, action_3_id)
|
||||
|
||||
multi_endpoint_node = {
|
||||
"step": 1,
|
||||
"name": "Fail on second endpoint",
|
||||
"external_inputs": [],
|
||||
"endpoints": [
|
||||
{"capability_id": str(capability_1_id), "action_id": str(action_1_id)},
|
||||
{"capability_id": str(capability_2_id), "action_id": str(action_2_id)},
|
||||
],
|
||||
}
|
||||
|
||||
pipeline = Pipeline(
|
||||
id=pipeline_id,
|
||||
name="Failing multi-endpoint pipeline",
|
||||
nodes=[
|
||||
multi_endpoint_node,
|
||||
_build_node(2, capability_3_id, action_3_id),
|
||||
],
|
||||
edges=[{"from_step": 1, "to_step": 2, "type": "segments"}],
|
||||
status=PipelineStatus.READY,
|
||||
)
|
||||
run = ExecutionRun(
|
||||
id=run_id,
|
||||
pipeline_id=pipeline_id,
|
||||
status=ExecutionRunStatus.QUEUED,
|
||||
inputs={},
|
||||
)
|
||||
|
||||
session = FakeSession(
|
||||
{
|
||||
(ExecutionRun, run_id): run,
|
||||
(Pipeline, pipeline_id): pipeline,
|
||||
(Capability, capability_1_id): capability_1,
|
||||
(Capability, capability_2_id): capability_2,
|
||||
(Capability, capability_3_id): capability_3,
|
||||
(Action, action_1_id): action_1,
|
||||
(Action, action_2_id): action_2,
|
||||
(Action, action_3_id): action_3,
|
||||
}
|
||||
)
|
||||
service = ExecutionService(
|
||||
session=session, # type: ignore[arg-type]
|
||||
context_store=FakeContextStore(initial={"step_outputs": {}, "edge_values": {}}),
|
||||
)
|
||||
|
||||
async def fake_call_action(action: Action, _request_payload: dict[str, Any]):
|
||||
if action.id == action_2_id:
|
||||
raise StepExecutionError("boom")
|
||||
return {"status_code": 200, "body": {"segments": [1]}}, {"segments": [1]}
|
||||
|
||||
service._call_action = fake_call_action # type: ignore[method-assign]
|
||||
|
||||
await service.execute_run(run_id)
|
||||
|
||||
assert run.status == ExecutionRunStatus.FAILED
|
||||
assert run.summary is not None
|
||||
assert run.summary["succeeded_steps"] == 0
|
||||
assert run.summary["failed_steps"] == 1
|
||||
assert run.summary["skipped_steps"] == 1
|
||||
assert session.step_runs_by_step[1].status == ExecutionStepStatus.FAILED
|
||||
assert session.step_runs_by_step[2].status == ExecutionStepStatus.SKIPPED
|
||||
failed_trace = session.step_runs_by_step[1].response_snapshot["endpoints_trace"] # type: ignore[index]
|
||||
assert len(failed_trace) == 2
|
||||
assert failed_trace[0]["status"] == "succeeded"
|
||||
assert failed_trace[1]["status"] == "failed"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_execute_run_multi_endpoint_chain_supports_composite_endpoint():
|
||||
run_id = uuid4()
|
||||
pipeline_id = uuid4()
|
||||
action_1_id = uuid4()
|
||||
atomic_capability_id = uuid4()
|
||||
composite_capability_id = uuid4()
|
||||
|
||||
action_1 = Action(
|
||||
id=action_1_id,
|
||||
method=HttpMethod.GET,
|
||||
path="/users/recent",
|
||||
base_url="https://api.example.com",
|
||||
)
|
||||
atomic_capability = _build_capability(atomic_capability_id, action_1_id)
|
||||
composite_capability = Capability(
|
||||
id=composite_capability_id,
|
||||
action_id=None,
|
||||
type="COMPOSITE",
|
||||
name="composite_cap",
|
||||
input_schema={
|
||||
"type": "object",
|
||||
"required": ["users"],
|
||||
"properties": {
|
||||
"users": {"type": "array"},
|
||||
},
|
||||
},
|
||||
recipe={"version": 1, "steps": [{"step": 1, "capability_id": str(atomic_capability_id), "inputs": {}}]},
|
||||
)
|
||||
|
||||
node = {
|
||||
"step": 1,
|
||||
"name": "Atomic then composite",
|
||||
"external_inputs": [],
|
||||
"endpoints": [
|
||||
{"capability_id": str(atomic_capability_id), "action_id": str(action_1_id)},
|
||||
{"capability_id": str(composite_capability_id), "action_id": None},
|
||||
],
|
||||
}
|
||||
pipeline = Pipeline(
|
||||
id=pipeline_id,
|
||||
name="mixed chain pipeline",
|
||||
nodes=[node],
|
||||
edges=[],
|
||||
status=PipelineStatus.READY,
|
||||
)
|
||||
run = ExecutionRun(
|
||||
id=run_id,
|
||||
pipeline_id=pipeline_id,
|
||||
status=ExecutionRunStatus.QUEUED,
|
||||
inputs={},
|
||||
)
|
||||
|
||||
session = FakeSession(
|
||||
{
|
||||
(ExecutionRun, run_id): run,
|
||||
(Pipeline, pipeline_id): pipeline,
|
||||
(Capability, atomic_capability_id): atomic_capability,
|
||||
(Capability, composite_capability_id): composite_capability,
|
||||
(Action, action_1_id): action_1,
|
||||
}
|
||||
)
|
||||
service = ExecutionService(
|
||||
session=session, # type: ignore[arg-type]
|
||||
context_store=FakeContextStore(initial={"step_outputs": {}, "edge_values": {}}),
|
||||
)
|
||||
|
||||
async def fake_call_action(action: Action, _request_payload: dict[str, Any]):
|
||||
assert action.id == action_1_id
|
||||
return {"status_code": 200, "body": {"users": [{"id": 1}]}}, {"users": [{"id": 1}]}
|
||||
|
||||
async def fake_execute_composite_capability(
|
||||
*,
|
||||
capability: Capability,
|
||||
resolved_inputs: dict[str, Any],
|
||||
run_inputs: dict[str, Any],
|
||||
):
|
||||
assert capability.id == composite_capability_id
|
||||
assert resolved_inputs["users"] == [{"id": 1}]
|
||||
assert run_inputs == {}
|
||||
return {"capability_type": "COMPOSITE", "status_code": 200}, {"segments": [1]}
|
||||
|
||||
service._call_action = fake_call_action # type: ignore[method-assign]
|
||||
service._execute_composite_capability = fake_execute_composite_capability # type: ignore[method-assign]
|
||||
|
||||
await service.execute_run(run_id)
|
||||
|
||||
assert run.status == ExecutionRunStatus.SUCCEEDED
|
||||
assert run.summary is not None
|
||||
assert run.summary["final_output"] == {"segments": [1]}
|
||||
trace = session.step_runs_by_step[1].response_snapshot["endpoints_trace"] # type: ignore[index]
|
||||
assert len(trace) == 2
|
||||
assert trace[0]["capability_id"] == str(atomic_capability_id)
|
||||
assert trace[1]["capability_id"] == str(composite_capability_id)
|
||||
assert trace[1]["capability_type"] == "COMPOSITE"
|
||||
@@ -0,0 +1,80 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from uuid import uuid4
|
||||
|
||||
from app.api.executions.get_execution import _build_step_run_response
|
||||
from app.models.execution import ExecutionStepRun, ExecutionStepStatus
|
||||
|
||||
|
||||
def _build_step_run(
|
||||
*,
|
||||
request_snapshot,
|
||||
response_snapshot,
|
||||
) -> ExecutionStepRun:
|
||||
now = datetime.now(timezone.utc)
|
||||
step_run = ExecutionStepRun(
|
||||
run_id=uuid4(),
|
||||
step=1,
|
||||
status=ExecutionStepStatus.SUCCEEDED,
|
||||
)
|
||||
step_run.name = "Step 1"
|
||||
step_run.request_snapshot = request_snapshot
|
||||
step_run.response_snapshot = response_snapshot
|
||||
step_run.created_at = now
|
||||
step_run.updated_at = now
|
||||
return step_run
|
||||
|
||||
|
||||
def test_build_step_run_response_for_post_sets_accepted_and_output_payloads():
|
||||
step_run = _build_step_run(
|
||||
request_snapshot={
|
||||
"method": "post",
|
||||
"json_body": {"subject": "Hi", "message": "Hello"},
|
||||
},
|
||||
response_snapshot={
|
||||
"status_code": 200,
|
||||
"body": {"sent": 1},
|
||||
},
|
||||
)
|
||||
|
||||
response = _build_step_run_response(step_run)
|
||||
|
||||
assert response.method == "POST"
|
||||
assert response.status_code == 200
|
||||
assert response.accepted_payload == {"subject": "Hi", "message": "Hello"}
|
||||
assert response.output_payload == {"sent": 1}
|
||||
|
||||
|
||||
def test_build_step_run_response_for_get_keeps_accepted_payload_none():
|
||||
step_run = _build_step_run(
|
||||
request_snapshot={
|
||||
"method": "GET",
|
||||
"query_params": {"limit": 20},
|
||||
},
|
||||
response_snapshot={
|
||||
"status_code": "204",
|
||||
"body": "",
|
||||
},
|
||||
)
|
||||
|
||||
response = _build_step_run_response(step_run)
|
||||
|
||||
assert response.method == "GET"
|
||||
assert response.status_code == 204
|
||||
assert response.accepted_payload is None
|
||||
assert response.output_payload == ""
|
||||
|
||||
|
||||
def test_build_step_run_response_handles_missing_snapshots():
|
||||
step_run = _build_step_run(
|
||||
request_snapshot=None,
|
||||
response_snapshot=None,
|
||||
)
|
||||
|
||||
response = _build_step_run_response(step_run)
|
||||
|
||||
assert response.method is None
|
||||
assert response.status_code is None
|
||||
assert response.accepted_payload is None
|
||||
assert response.output_payload is None
|
||||
@@ -0,0 +1,11 @@
|
||||
from httpx import AsyncClient, ASGITransport
|
||||
import pytest
|
||||
from app.main import app
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_ping():
|
||||
# Используем ASGITransport для современных версий httpx
|
||||
async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as ac:
|
||||
response = await ac.get("/api/ping")
|
||||
assert response.status_code == 200
|
||||
assert response.json() == {"status": "ok"}
|
||||
@@ -0,0 +1,123 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from uuid import uuid4
|
||||
|
||||
from app.models.capability import Capability, CapabilityType
|
||||
from app.services.pipeline_service import PipelineService
|
||||
from app.services.semantic_selection import SelectedCapability
|
||||
|
||||
|
||||
def _build_capability(*, name: str, required_inputs: list[str] | None = None) -> Capability:
|
||||
cap_id = uuid4()
|
||||
action_id = uuid4()
|
||||
input_schema = None
|
||||
if required_inputs is not None:
|
||||
input_schema = {
|
||||
"type": "object",
|
||||
"required": required_inputs,
|
||||
"properties": {
|
||||
input_name: {"type": "string"}
|
||||
for input_name in required_inputs
|
||||
},
|
||||
}
|
||||
return Capability(
|
||||
id=cap_id,
|
||||
action_id=action_id,
|
||||
type=CapabilityType.ATOMIC,
|
||||
name=name,
|
||||
input_schema=input_schema,
|
||||
output_schema={"type": "object"},
|
||||
)
|
||||
|
||||
|
||||
def _select(capability: Capability) -> SelectedCapability:
|
||||
return SelectedCapability(capability=capability, score=1.0, confidence_tier="high")
|
||||
|
||||
|
||||
def test_extract_required_inputs_from_node_merges_all_endpoints():
|
||||
service = PipelineService(session=None) # type: ignore[arg-type]
|
||||
node = {
|
||||
"step": 1,
|
||||
"endpoints": [
|
||||
{
|
||||
"input_type": {
|
||||
"type": "object",
|
||||
"required": ["users", "campaignId"],
|
||||
}
|
||||
},
|
||||
{
|
||||
"input_type": {
|
||||
"type": "object",
|
||||
"required": ["segments", "users"],
|
||||
}
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
required = service._extract_required_inputs_from_node(node)
|
||||
|
||||
assert required == ["users", "campaignId", "segments"]
|
||||
|
||||
|
||||
def test_normalize_workflow_preserves_multi_endpoint_nodes():
|
||||
capability_a = _build_capability(name="Get users", required_inputs=["users"])
|
||||
capability_b = _build_capability(name="Build segments", required_inputs=["users"])
|
||||
selected = [_select(capability_a), _select(capability_b)]
|
||||
service = PipelineService(session=None) # type: ignore[arg-type]
|
||||
|
||||
raw_graph = {
|
||||
"nodes": [
|
||||
{
|
||||
"step": 1,
|
||||
"name": "Composite-like node",
|
||||
"endpoints": [
|
||||
{
|
||||
"capability_id": str(capability_a.id),
|
||||
},
|
||||
{
|
||||
"capability_id": str(capability_b.id),
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
"edges": [],
|
||||
}
|
||||
|
||||
nodes, edges, issues = service._normalize_workflow(raw_graph, selected)
|
||||
|
||||
assert issues == []
|
||||
assert edges == []
|
||||
assert len(nodes) == 1
|
||||
endpoints = nodes[0]["endpoints"]
|
||||
assert len(endpoints) == 2
|
||||
assert endpoints[0]["capability_id"] == str(capability_a.id)
|
||||
assert endpoints[1]["capability_id"] == str(capability_b.id)
|
||||
assert endpoints[0]["action_id"] == str(capability_a.action_id)
|
||||
assert endpoints[1]["action_id"] == str(capability_b.action_id)
|
||||
|
||||
|
||||
def test_normalize_workflow_flags_invalid_endpoint_capability_refs():
|
||||
capability = _build_capability(name="Get users", required_inputs=["users"])
|
||||
selected = [_select(capability)]
|
||||
service = PipelineService(session=None) # type: ignore[arg-type]
|
||||
|
||||
raw_graph = {
|
||||
"nodes": [
|
||||
{
|
||||
"step": 1,
|
||||
"name": "Node with invalid endpoint",
|
||||
"endpoints": [
|
||||
{"capability_id": str(uuid4())},
|
||||
{"capability_id": str(capability.id)},
|
||||
],
|
||||
}
|
||||
],
|
||||
"edges": [],
|
||||
}
|
||||
|
||||
nodes, _edges, issues = service._normalize_workflow(raw_graph, selected)
|
||||
|
||||
assert "graph:invalid_capability_ref" in issues
|
||||
assert len(nodes) == 1
|
||||
assert len(nodes[0]["endpoints"]) == 1
|
||||
assert nodes[0]["endpoints"][0]["capability_id"] == str(capability.id)
|
||||
@@ -0,0 +1,41 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from types import SimpleNamespace
|
||||
|
||||
from app.services.semantic_selection import SemanticSelectionService
|
||||
|
||||
|
||||
def test_score_maps_ru_users_query_to_en_capability_tokens():
|
||||
service = SemanticSelectionService()
|
||||
query_tokens = service._tokenize("Хочу получить пользователей")
|
||||
query_tokens_expanded = service._expand_tokens(query_tokens)
|
||||
capability = SimpleNamespace(
|
||||
name="get_users",
|
||||
description="Get users list",
|
||||
)
|
||||
|
||||
score = service._score_capability(query_tokens, query_tokens_expanded, capability)
|
||||
|
||||
assert score >= 0.45
|
||||
|
||||
|
||||
def test_score_uses_capability_action_context_tokens():
|
||||
service = SemanticSelectionService()
|
||||
query_tokens = service._tokenize("Отправь email по кампании")
|
||||
query_tokens_expanded = service._expand_tokens(query_tokens)
|
||||
capability = SimpleNamespace(
|
||||
name="execute_action",
|
||||
description="General API action",
|
||||
llm_payload={
|
||||
"action_context_brief": {
|
||||
"method": "POST",
|
||||
"path": "/v1/campaigns/emails/send",
|
||||
"tags": ["campaign", "email"],
|
||||
"summary": "Send campaign emails",
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
score = service._score_capability(query_tokens, query_tokens_expanded, capability)
|
||||
|
||||
assert score > 0.0
|
||||
@@ -0,0 +1,248 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
import pytest
|
||||
from httpx import ASGITransport, AsyncClient, Response
|
||||
|
||||
from app.core.database.session import get_session
|
||||
from app.main import app
|
||||
from app.models import Pipeline, PipelineStatus, User, UserRole
|
||||
from app.utils.token_manager import get_current_user
|
||||
|
||||
|
||||
class FakeSession:
|
||||
def __init__(self, pipeline: Pipeline | None):
|
||||
self.pipeline = pipeline
|
||||
self.committed = False
|
||||
|
||||
async def get(self, model, key: UUID):
|
||||
if model is Pipeline and self.pipeline and key == self.pipeline.id:
|
||||
return self.pipeline
|
||||
return None
|
||||
|
||||
async def commit(self):
|
||||
self.committed = True
|
||||
if self.pipeline is not None:
|
||||
self.pipeline.updated_at = datetime.now(timezone.utc)
|
||||
|
||||
async def refresh(self, _obj):
|
||||
return None
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def clear_dependency_overrides():
|
||||
app.dependency_overrides.clear()
|
||||
yield
|
||||
app.dependency_overrides.clear()
|
||||
|
||||
|
||||
def _build_user(*, user_id: UUID, role: UserRole = UserRole.USER) -> User:
|
||||
user = User(
|
||||
id=user_id,
|
||||
email=f"{user_id}@example.com",
|
||||
hashed_password="hashed",
|
||||
role=role,
|
||||
is_active=True,
|
||||
)
|
||||
user.created_at = datetime.now(timezone.utc)
|
||||
user.updated_at = datetime.now(timezone.utc)
|
||||
return user
|
||||
|
||||
|
||||
def _build_pipeline(*, pipeline_id: UUID, owner_id: UUID) -> Pipeline:
|
||||
pipeline = Pipeline(
|
||||
id=pipeline_id,
|
||||
name="Travel pipeline",
|
||||
description=None,
|
||||
user_prompt=None,
|
||||
nodes=[
|
||||
{
|
||||
"step": 1,
|
||||
"name": "Get users",
|
||||
"description": None,
|
||||
"input_connected_from": [99],
|
||||
"output_connected_to": [98],
|
||||
"input_data_type_from_previous": [],
|
||||
"external_inputs": [],
|
||||
"endpoints": [],
|
||||
},
|
||||
{
|
||||
"step": 2,
|
||||
"name": "Segment users",
|
||||
"description": None,
|
||||
"input_connected_from": [],
|
||||
"output_connected_to": [],
|
||||
"input_data_type_from_previous": [],
|
||||
"external_inputs": [],
|
||||
"endpoints": [],
|
||||
},
|
||||
],
|
||||
edges=[],
|
||||
status=PipelineStatus.DRAFT,
|
||||
created_by=owner_id,
|
||||
)
|
||||
pipeline.created_at = datetime.now(timezone.utc)
|
||||
pipeline.updated_at = datetime.now(timezone.utc)
|
||||
return pipeline
|
||||
|
||||
|
||||
async def _patch_graph(pipeline_id: UUID, payload: dict) -> Response:
|
||||
async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as client:
|
||||
return await client.patch(f"/api/v1/pipelines/{pipeline_id}/graph", json=payload)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_patch_graph_success_for_owner_normalizes_connections():
|
||||
owner_id = uuid4()
|
||||
pipeline_id = uuid4()
|
||||
fake_session = FakeSession(_build_pipeline(pipeline_id=pipeline_id, owner_id=owner_id))
|
||||
|
||||
async def override_session():
|
||||
yield fake_session
|
||||
|
||||
async def override_user():
|
||||
return _build_user(user_id=owner_id)
|
||||
|
||||
app.dependency_overrides[get_session] = override_session
|
||||
app.dependency_overrides[get_current_user] = override_user
|
||||
|
||||
response = await _patch_graph(
|
||||
pipeline_id,
|
||||
{
|
||||
"nodes": fake_session.pipeline.nodes,
|
||||
"edges": [{"from_step": 1, "to_step": 2, "type": "users"}],
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
payload = response.json()
|
||||
assert payload["pipeline_id"] == str(pipeline_id)
|
||||
assert payload["edges"] == [{"from_step": 1, "to_step": 2, "type": "users"}]
|
||||
assert payload["nodes"][0]["output_connected_to"] == [2]
|
||||
assert payload["nodes"][1]["input_connected_from"] == [1]
|
||||
assert payload["nodes"][1]["input_data_type_from_previous"] == [
|
||||
{"from_step": 1, "type": "users"}
|
||||
]
|
||||
assert isinstance(payload["updated_at"], str)
|
||||
assert fake_session.committed is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_patch_graph_returns_404_for_non_owner():
|
||||
owner_id = uuid4()
|
||||
pipeline_id = uuid4()
|
||||
fake_session = FakeSession(_build_pipeline(pipeline_id=pipeline_id, owner_id=owner_id))
|
||||
|
||||
async def override_session():
|
||||
yield fake_session
|
||||
|
||||
async def override_user():
|
||||
return _build_user(user_id=uuid4())
|
||||
|
||||
app.dependency_overrides[get_session] = override_session
|
||||
app.dependency_overrides[get_current_user] = override_user
|
||||
|
||||
response = await _patch_graph(
|
||||
pipeline_id,
|
||||
{
|
||||
"nodes": fake_session.pipeline.nodes,
|
||||
"edges": [{"from_step": 1, "to_step": 2, "type": "users"}],
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 404
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_patch_graph_rejects_cycle():
|
||||
owner_id = uuid4()
|
||||
pipeline_id = uuid4()
|
||||
fake_session = FakeSession(_build_pipeline(pipeline_id=pipeline_id, owner_id=owner_id))
|
||||
|
||||
async def override_session():
|
||||
yield fake_session
|
||||
|
||||
async def override_user():
|
||||
return _build_user(user_id=owner_id)
|
||||
|
||||
app.dependency_overrides[get_session] = override_session
|
||||
app.dependency_overrides[get_current_user] = override_user
|
||||
|
||||
response = await _patch_graph(
|
||||
pipeline_id,
|
||||
{
|
||||
"nodes": fake_session.pipeline.nodes,
|
||||
"edges": [
|
||||
{"from_step": 1, "to_step": 2, "type": "users"},
|
||||
{"from_step": 2, "to_step": 1, "type": "segments"},
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 422
|
||||
payload = response.json()
|
||||
assert payload["code"] == "VALIDATION_FAILED"
|
||||
assert "graph: cycle" in payload["details"]["errors"]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_patch_graph_rejects_edge_to_missing_node():
|
||||
owner_id = uuid4()
|
||||
pipeline_id = uuid4()
|
||||
fake_session = FakeSession(_build_pipeline(pipeline_id=pipeline_id, owner_id=owner_id))
|
||||
|
||||
async def override_session():
|
||||
yield fake_session
|
||||
|
||||
async def override_user():
|
||||
return _build_user(user_id=owner_id)
|
||||
|
||||
app.dependency_overrides[get_session] = override_session
|
||||
app.dependency_overrides[get_current_user] = override_user
|
||||
|
||||
response = await _patch_graph(
|
||||
pipeline_id,
|
||||
{
|
||||
"nodes": fake_session.pipeline.nodes,
|
||||
"edges": [{"from_step": 1, "to_step": 999, "type": "users"}],
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 422
|
||||
payload = response.json()
|
||||
assert payload["code"] == "VALIDATION_FAILED"
|
||||
assert "graph: edge_to_missing_node:1->999" in payload["details"]["errors"]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_patch_graph_rejects_duplicate_edge_triplets():
|
||||
owner_id = uuid4()
|
||||
pipeline_id = uuid4()
|
||||
fake_session = FakeSession(_build_pipeline(pipeline_id=pipeline_id, owner_id=owner_id))
|
||||
|
||||
async def override_session():
|
||||
yield fake_session
|
||||
|
||||
async def override_user():
|
||||
return _build_user(user_id=owner_id)
|
||||
|
||||
app.dependency_overrides[get_session] = override_session
|
||||
app.dependency_overrides[get_current_user] = override_user
|
||||
|
||||
response = await _patch_graph(
|
||||
pipeline_id,
|
||||
{
|
||||
"nodes": fake_session.pipeline.nodes,
|
||||
"edges": [
|
||||
{"from_step": 1, "to_step": 2, "type": "users"},
|
||||
{"from_step": 1, "to_step": 2, "type": "users"},
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 422
|
||||
payload = response.json()
|
||||
assert payload["code"] == "VALIDATION_FAILED"
|
||||
assert "graph: duplicate_edge:1->2:users" in payload["details"]["errors"]
|
||||
Reference in New Issue
Block a user