This commit is contained in:
2026-03-17 18:32:44 +03:00
commit efcd4a8dfd
209 changed files with 33355 additions and 0 deletions
+26
View File
@@ -0,0 +1,26 @@
FROM python:3.12-slim
WORKDIR /app
# Install system dependencies
RUN apt-get update && apt-get install -y --no-install-recommends \
build-essential \
libpq-dev \
&& rm -rf /var/lib/apt/lists/*
# Copy requirements and install
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Copy application code
COPY . .
# Environment variables
ENV PYTHONPATH=/app
ENV PYTHONUNBUFFERED=1
# Expose port
EXPOSE 8000
# Start command
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
View File
View File
+3
View File
@@ -0,0 +1,3 @@
from app.api.actions.router import router
__all__ = ["router"]
+49
View File
@@ -0,0 +1,49 @@
from __future__ import annotations
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Request, Response, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.api.actions.dependencies import get_active_action_or_404
from app.core.database.session import get_session
from app.models import User
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Actions"])
@router.delete("/{action_id}", status_code=status.HTTP_204_NO_CONTENT)
async def delete_action(
action_id: UUID,
request: Request,
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
try:
action = await get_active_action_or_404(session, action_id, current_user)
except HTTPException:
log_business_event(
"action_delete_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
action_id=str(action_id),
reason="action_not_found_or_forbidden",
)
raise
action.is_deleted = True
await session.commit()
await session.refresh(action)
log_business_event(
"action_deleted",
trace_id=trace_id,
user_id=str(current_user.id),
action_id=str(action.id),
)
return Response(status_code=status.HTTP_204_NO_CONTENT)
+21
View File
@@ -0,0 +1,21 @@
from __future__ import annotations
from uuid import UUID
from fastapi import HTTPException, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.models import Action, ActionIngestStatus, User, UserRole
async def get_active_action_or_404(
session: AsyncSession,
action_id: UUID,
current_user: User,
) -> Action:
action = await session.get(Action, action_id)
if action is None or action.is_deleted or action.ingest_status != ActionIngestStatus.SUCCEEDED:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Action not found")
if current_user.role != UserRole.ADMIN and action.user_id != current_user.id:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Action not found")
return action
+47
View File
@@ -0,0 +1,47 @@
from __future__ import annotations
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Request
from sqlalchemy.ext.asyncio import AsyncSession
from app.api.actions.dependencies import get_active_action_or_404
from app.core.database.session import get_session
from app.models import User
from app.schemas.action_sch import ActionDetailResponse
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Actions"])
@router.get("/{action_id}", response_model=ActionDetailResponse)
async def get_action(
action_id: UUID,
request: Request,
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
try:
action = await get_active_action_or_404(session, action_id, current_user)
except HTTPException:
log_business_event(
"action_fetch_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
action_id=str(action_id),
reason="action_not_found_or_forbidden",
)
raise
log_business_event(
"action_fetched",
trace_id=trace_id,
user_id=str(current_user.id),
action_id=str(action.id),
action_method=action.method.value if action.method is not None else None,
action_path=action.path,
)
return action
+92
View File
@@ -0,0 +1,92 @@
from __future__ import annotations
from fastapi import APIRouter, Depends, File, HTTPException, Request, UploadFile, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import Action, ActionIngestStatus, User
from app.schemas.capability_sch import ActionIngestWithCapabilitiesResponse
from app.services.capability_service import CapabilityService
from app.services.openapi_service import OpenAPIService
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Actions"])
@router.post("/ingest", response_model=ActionIngestWithCapabilitiesResponse, status_code=status.HTTP_201_CREATED)
async def ingest_actions(
request: Request,
file: UploadFile = File(...),
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
payload = await file.read()
try:
document = OpenAPIService.load_document(payload)
ingestion_result = OpenAPIService.extract_actions_with_failures(document, source_filename=file.filename)
except ValueError as exc:
log_business_event(
"actions_ingest_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
source_filename=file.filename,
file_size_bytes=len(payload),
reason="invalid_openapi_document",
details=str(exc),
)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(exc)) from exc
action_payloads = ingestion_result["succeeded"] + ingestion_result["failed"]
if not action_payloads:
log_business_event(
"actions_ingest_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
source_filename=file.filename,
file_size_bytes=len(payload),
reason="no_supported_operations",
)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="No supported HTTP operations found in OpenAPI file")
actions = [Action(user_id=current_user.id, **action_payload) for action_payload in action_payloads]
session.add_all(actions)
await session.flush()
succeeded_actions = [action for action in actions if action.ingest_status == ActionIngestStatus.SUCCEEDED]
failed_actions = [action for action in actions if action.ingest_status == ActionIngestStatus.FAILED]
capability_service = CapabilityService(session)
capabilities = await capability_service.create_from_actions(
succeeded_actions,
owner_user_id=current_user.id,
refresh=False,
)
await session.commit()
for action in actions:
await session.refresh(action)
for capability in capabilities:
await session.refresh(capability)
log_business_event(
"actions_ingested",
trace_id=trace_id,
user_id=str(current_user.id),
source_filename=file.filename,
file_size_bytes=len(payload),
succeeded_count=len(succeeded_actions),
failed_count=len(failed_actions),
created_capabilities_count=len(capabilities),
)
return ActionIngestWithCapabilitiesResponse(
succeeded_count=len(succeeded_actions),
failed_count=len(failed_actions),
created_capabilities_count=len(capabilities),
succeeded_actions=succeeded_actions,
failed_actions=failed_actions,
capabilities=capabilities,
)
+79
View File
@@ -0,0 +1,79 @@
from __future__ import annotations
from uuid import UUID
from fastapi import APIRouter, Depends, Query, Request
from sqlalchemy import or_, select
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import Action, ActionIngestStatus, HttpMethod, User, UserRole
from app.schemas.action_sch import ActionListItemResponse
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Actions"])
@router.get("/", response_model=list[ActionListItemResponse], include_in_schema=False)
async def list_actions(
request: Request,
method: HttpMethod | None = Query(default=None),
owner_id: UUID | None = Query(default=None),
source_filename: str | None = Query(default=None),
search: str | None = Query(default=None, min_length=1),
limit: int = Query(default=50, ge=1, le=200),
offset: int = Query(default=0, ge=0),
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
query = (
select(Action)
.where(Action.is_deleted.is_(False))
.where(Action.ingest_status == ActionIngestStatus.SUCCEEDED)
.order_by(Action.created_at.desc())
.limit(limit)
.offset(offset)
)
if current_user.role == UserRole.ADMIN:
if owner_id is not None:
query = query.where(Action.user_id == owner_id)
else:
query = query.where(Action.user_id == current_user.id)
if method is not None:
query = query.where(Action.method == method)
if source_filename:
query = query.where(Action.source_filename == source_filename)
if search:
search_pattern = f"%{search}%"
query = query.where(
or_(
Action.operation_id.ilike(search_pattern),
Action.path.ilike(search_pattern),
Action.summary.ilike(search_pattern),
)
)
result = await session.execute(query)
actions = list(result.scalars().all())
log_business_event(
"actions_listed",
trace_id=trace_id,
user_id=str(current_user.id),
method=method.value if method is not None else None,
owner_id=str(owner_id) if owner_id is not None else None,
source_filename=source_filename,
search=search,
limit=limit,
offset=offset,
result_count=len(actions),
)
return actions
+13
View File
@@ -0,0 +1,13 @@
from fastapi import APIRouter
from app.api.actions.delete_action import router as delete_action_router
from app.api.actions.get_action import router as get_action_router
from app.api.actions.ingest_actions import router as ingest_actions_router
from app.api.actions.list_actions import router as list_actions_router
router = APIRouter(prefix="/v1/actions", tags=["Actions"])
router.include_router(ingest_actions_router)
router.include_router(list_actions_router)
router.include_router(get_action_router)
router.include_router(delete_action_router)
+84
View File
@@ -0,0 +1,84 @@
from fastapi import APIRouter, Depends, HTTPException, Request, status
from sqlalchemy import func, select
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import User
from app.schemas.auth_sch import LoginIn
from app.utils.business_logger import log_business_event
from app.utils.hashing import verify_password
from app.utils.token_manager import create_access_token
router = APIRouter(prefix="/v1/auth", tags=["Auth"])
@router.post("/login", status_code=status.HTTP_200_OK)
async def login(
data: LoginIn,
request: Request,
session: AsyncSession = Depends(get_session),
):
email = data.email.strip().lower()
trace_id = getattr(request.state, "traceId", None)
result = await session.execute(select(User).where(func.lower(User.email) == email))
user = result.scalar_one_or_none()
if user is None:
log_business_event(
"auth_login_failed",
trace_id=trace_id,
email=email,
reason="user_not_found",
)
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail={"message": "Invalid email or password"},
)
if not verify_password(data.password, user.hashed_password):
log_business_event(
"auth_login_failed",
trace_id=trace_id,
email=email,
reason="invalid_password",
)
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail={"message": "Invalid email or password"},
)
if not user.is_active:
log_business_event(
"auth_login_blocked",
trace_id=trace_id,
user_id=str(user.id),
email=user.email,
reason="user_inactive",
)
raise HTTPException(
status_code=status.HTTP_423_LOCKED,
detail={"message": "User account is deactivated"},
)
token, expires_in = create_access_token(sub=str(user.id), role=user.role.value)
log_business_event(
"auth_login_succeeded",
trace_id=trace_id,
user_id=str(user.id),
email=user.email,
role=user.role.value,
)
return {
"accessToken": token,
"expiresIn": expires_in,
"user": {
"id": str(user.id),
"email": user.email,
"fullName": user.full_name,
"role": user.role.value,
"isActive": user.is_active,
"createdAt": user.created_at.isoformat(),
},
}
+72
View File
@@ -0,0 +1,72 @@
from fastapi import APIRouter, Depends, HTTPException, Request, status
from sqlalchemy import func, select
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import User, UserRole
from app.schemas.auth_sch import RegisterIn
from app.utils.business_logger import log_business_event
from app.utils.hashing import hash_password
from app.utils.token_manager import create_access_token
router = APIRouter(prefix="/v1/auth", tags=["Auth"])
@router.post("/register", status_code=status.HTTP_201_CREATED)
async def register(
data: RegisterIn,
request: Request,
session: AsyncSession = Depends(get_session),
):
email = data.email.strip().lower()
trace_id = getattr(request.state, "traceId", None)
result = await session.execute(select(User).where(func.lower(User.email) == email))
existing_user = result.scalar_one_or_none()
if existing_user is not None:
log_business_event(
"auth_register_failed",
trace_id=trace_id,
email=email,
reason="email_already_exists",
)
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail={"message": "Email already exists. Please login."},
)
user = User(
email=email,
full_name=data.full_name,
hashed_password=hash_password(data.password),
role=UserRole.USER,
is_active=True,
)
session.add(user)
await session.commit()
await session.refresh(user)
token, expires_in = create_access_token(sub=str(user.id), role=user.role.value)
log_business_event(
"auth_register_succeeded",
trace_id=trace_id,
user_id=str(user.id),
email=user.email,
role=user.role.value,
)
return {
"accessToken": token,
"expiresIn": expires_in,
"user": {
"id": str(user.id),
"email": user.email,
"fullName": user.full_name,
"role": user.role.value,
"isActive": user.is_active,
"createdAt": user.created_at.isoformat(),
},
}
+1
View File
@@ -0,0 +1 @@
@@ -0,0 +1,72 @@
from __future__ import annotations
from fastapi import APIRouter, Depends, HTTPException, Request, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import User, UserRole
from app.schemas.capability_sch import CapabilityResponse, CreateCompositeCapabilityRequest
from app.services.capability_service import (
CapabilityService,
CompositeRecipeValidationError,
)
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Capabilities"])
@router.post(
"/composite",
response_model=CapabilityResponse,
status_code=status.HTTP_201_CREATED,
)
async def create_composite_capability(
payload: CreateCompositeCapabilityRequest,
request: Request,
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
capability_service = CapabilityService(session)
try:
capability = await capability_service.create_validated_composite_capability(
owner_user_id=current_user.id,
name=payload.name,
description=payload.description,
input_schema=payload.input_schema,
output_schema=payload.output_schema,
recipe=payload.recipe.model_dump(mode="python"),
include_all=current_user.role == UserRole.ADMIN,
)
await session.commit()
await session.refresh(capability)
recipe_dump = payload.recipe.model_dump(mode="python")
recipe_steps = recipe_dump.get("steps") if isinstance(recipe_dump, dict) else None
log_business_event(
"composite_capability_created",
trace_id=trace_id,
user_id=str(current_user.id),
capability_id=str(capability.id),
capability_name=capability.name,
recipe_steps_count=len(recipe_steps) if isinstance(recipe_steps, list) else None,
)
return capability
except CompositeRecipeValidationError as exc:
await session.rollback()
log_business_event(
"composite_capability_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
capability_name=payload.name,
reason="validation_failed",
errors_count=len(exc.errors),
)
raise HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
detail={
"message": "Composite recipe validation failed",
"errors": exc.errors,
},
) from exc
@@ -0,0 +1,25 @@
from __future__ import annotations
from uuid import UUID
from fastapi import HTTPException, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.models import Capability, User, UserRole
from app.services.capability_service import CapabilityService
async def get_capability_or_404(
session: AsyncSession,
capability_id: UUID,
current_user: User,
) -> Capability:
capability_service = CapabilityService(session)
capability = await capability_service.get_capability(
capability_id,
owner_user_id=current_user.id,
include_all=current_user.role == UserRole.ADMIN,
)
if capability is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Capability not found")
return capability
@@ -0,0 +1,46 @@
from __future__ import annotations
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Request
from sqlalchemy.ext.asyncio import AsyncSession
from app.api.capabilities.dependencies import get_capability_or_404
from app.core.database.session import get_session
from app.models import User
from app.schemas.capability_sch import CapabilityResponse
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Capabilities"])
@router.get("/{capability_id}", response_model=CapabilityResponse)
async def get_capability(
capability_id: UUID,
request: Request,
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
try:
capability = await get_capability_or_404(session, capability_id, current_user)
except HTTPException:
log_business_event(
"capability_fetch_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
capability_id=str(capability_id),
reason="capability_not_found_or_forbidden",
)
raise
log_business_event(
"capability_fetched",
trace_id=trace_id,
user_id=str(current_user.id),
capability_id=str(capability.id),
capability_type=capability.type.value if hasattr(capability.type, "value") else str(capability.type),
)
return capability
@@ -0,0 +1,55 @@
from __future__ import annotations
from uuid import UUID
from fastapi import APIRouter, Depends, Query, Request
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import User, UserRole
from app.schemas.capability_sch import CapabilityResponse
from app.services.capability_service import CapabilityService
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Capabilities"])
@router.get("/", response_model=list[CapabilityResponse])
async def list_capabilities(
request: Request,
action_id: UUID | None = Query(default=None),
owner_id: UUID | None = Query(default=None),
limit: int = Query(default=50, ge=1, le=200),
offset: int = Query(default=0, ge=0),
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
capability_service = CapabilityService(session)
action_ids = [action_id] if action_id is not None else None
include_all = current_user.role == UserRole.ADMIN
owner_user_id = owner_id if include_all and owner_id is not None else current_user.id
capabilities = await capability_service.get_capabilities(
action_ids=action_ids,
owner_user_id=owner_user_id,
include_all=include_all and owner_id is None,
limit=limit,
offset=offset,
)
log_business_event(
"capabilities_listed",
trace_id=trace_id,
user_id=str(current_user.id),
owner_id=str(owner_user_id) if owner_user_id is not None else None,
action_id=str(action_id) if action_id is not None else None,
include_all=include_all and owner_id is None,
limit=limit,
offset=offset,
result_count=len(capabilities),
)
return capabilities
+13
View File
@@ -0,0 +1,13 @@
from fastapi import APIRouter
from app.api.capabilities.create_composite_capability import (
router as create_composite_capability_router,
)
from app.api.capabilities.get_capability import router as get_capability_router
from app.api.capabilities.list_capabilities import router as list_capabilities_router
router = APIRouter(prefix="/v1/capabilities", tags=["Capabilities"])
router.include_router(list_capabilities_router)
router.include_router(create_composite_capability_router)
router.include_router(get_capability_router)
+3
View File
@@ -0,0 +1,3 @@
from app.api.executions.router import router
__all__ = ["router"]
+168
View File
@@ -0,0 +1,168 @@
from __future__ import annotations
from typing import Any
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Request, status
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import ExecutionRun, ExecutionStepRun, Pipeline, User, UserRole
from app.schemas.execution_sch import ExecutionRunDetailResponse, ExecutionStepRunResponse
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Executions"])
KNOWN_HTTP_METHODS = {"GET", "POST", "PUT", "PATCH", "DELETE", "HEAD", "OPTIONS"}
REQUEST_BODY_METHODS = {"POST", "PUT", "PATCH"}
def _extract_method(request_snapshot: dict[str, Any] | None) -> str | None:
if not isinstance(request_snapshot, dict):
return None
method_raw = request_snapshot.get("method")
if not isinstance(method_raw, str):
return None
method = method_raw.upper()
if method in KNOWN_HTTP_METHODS:
return method
return None
def _extract_status_code(response_snapshot: dict[str, Any] | None) -> int | None:
if not isinstance(response_snapshot, dict):
return None
status_code_raw = response_snapshot.get("status_code")
if isinstance(status_code_raw, int):
return status_code_raw
if isinstance(status_code_raw, str) and status_code_raw.isdigit():
return int(status_code_raw)
return None
def _extract_accepted_payload(
*,
method: str | None,
request_snapshot: dict[str, Any] | None,
) -> Any:
if method not in REQUEST_BODY_METHODS:
return None
if not isinstance(request_snapshot, dict):
return None
return request_snapshot.get("json_body")
def _extract_output_payload(response_snapshot: dict[str, Any] | None) -> Any:
if not isinstance(response_snapshot, dict):
return None
return response_snapshot.get("body")
def _build_step_run_response(step_run: ExecutionStepRun) -> ExecutionStepRunResponse:
status_value = step_run.status.value if hasattr(step_run.status, "value") else step_run.status
base = ExecutionStepRunResponse(
step=step_run.step,
name=step_run.name,
capability_id=step_run.capability_id,
action_id=step_run.action_id,
status=status_value,
resolved_inputs=step_run.resolved_inputs,
request_snapshot=step_run.request_snapshot,
response_snapshot=step_run.response_snapshot,
error=step_run.error,
started_at=step_run.started_at,
finished_at=step_run.finished_at,
duration_ms=step_run.duration_ms,
created_at=step_run.created_at,
updated_at=step_run.updated_at,
)
request_snapshot = base.request_snapshot if isinstance(base.request_snapshot, dict) else None
response_snapshot = base.response_snapshot if isinstance(base.response_snapshot, dict) else None
method = _extract_method(request_snapshot)
status_code = _extract_status_code(response_snapshot)
accepted_payload = _extract_accepted_payload(method=method, request_snapshot=request_snapshot)
output_payload = _extract_output_payload(response_snapshot)
return base.model_copy(
update={
"method": method,
"status_code": status_code,
"accepted_payload": accepted_payload,
"output_payload": output_payload,
}
)
@router.get("/{run_id}", response_model=ExecutionRunDetailResponse)
async def get_execution(
run_id: UUID,
request: Request,
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
run = await session.get(ExecutionRun, run_id)
if run is None:
log_business_event(
"execution_fetch_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
run_id=str(run_id),
reason="run_not_found",
)
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Execution run not found")
if current_user.role != UserRole.ADMIN:
is_owner = run.initiated_by == current_user.id
if not is_owner and run.initiated_by is None:
pipeline = await session.get(Pipeline, run.pipeline_id)
is_owner = pipeline is not None and pipeline.created_by == current_user.id
if not is_owner:
log_business_event(
"execution_fetch_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
run_id=str(run.id),
pipeline_id=str(run.pipeline_id),
reason="run_not_found_or_forbidden",
)
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Execution run not found")
step_query = (
select(ExecutionStepRun)
.where(ExecutionStepRun.run_id == run.id)
.order_by(ExecutionStepRun.step.asc(), ExecutionStepRun.created_at.asc())
)
step_result = await session.execute(step_query)
step_runs = list(step_result.scalars().all())
log_business_event(
"execution_fetched",
trace_id=trace_id,
user_id=str(current_user.id),
run_id=str(run.id),
pipeline_id=str(run.pipeline_id),
result_status=run.status.value,
step_count=len(step_runs),
)
return ExecutionRunDetailResponse(
id=run.id,
pipeline_id=run.pipeline_id,
status=run.status.value,
inputs=run.inputs or {},
summary=run.summary,
error=run.error,
started_at=run.started_at,
finished_at=run.finished_at,
created_at=run.created_at,
updated_at=run.updated_at,
steps=[
_build_step_run_response(step_run)
for step_run in step_runs
],
)
@@ -0,0 +1,50 @@
from __future__ import annotations
from fastapi import APIRouter, Depends, Query, Request
from sqlalchemy import and_, or_, select
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import ExecutionRun, Pipeline, User, UserRole
from app.schemas.execution_sch import ExecutionRunListItemResponse
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Executions"])
@router.get("/", response_model=list[ExecutionRunListItemResponse])
async def list_executions(
request: Request,
limit: int = Query(default=50, ge=1, le=200),
offset: int = Query(default=0, ge=0),
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
query = select(ExecutionRun).order_by(ExecutionRun.created_at.desc())
if current_user.role != UserRole.ADMIN:
query = query.join(Pipeline, Pipeline.id == ExecutionRun.pipeline_id).where(
or_(
ExecutionRun.initiated_by == current_user.id,
and_(
ExecutionRun.initiated_by.is_(None),
Pipeline.created_by == current_user.id,
),
)
)
query = query.limit(limit).offset(offset)
result = await session.execute(query)
runs = list(result.scalars().all())
log_business_event(
"executions_listed",
trace_id=trace_id,
user_id=str(current_user.id),
limit=limit,
offset=offset,
result_count=len(runs),
)
return runs
+9
View File
@@ -0,0 +1,9 @@
from fastapi import APIRouter
from app.api.executions.get_execution import router as get_execution_router
from app.api.executions.list_executions import router as list_executions_router
router = APIRouter(prefix="/v1/executions", tags=["Executions"])
router.include_router(list_executions_router)
router.include_router(get_execution_router)
View File
+7
View File
@@ -0,0 +1,7 @@
from fastapi import APIRouter
router = APIRouter()
@router.get("/ping")
async def ping():
return {"status": "ok"}
+3
View File
@@ -0,0 +1,3 @@
from app.api.pipelines.router import router
__all__ = ["router"]
+114
View File
@@ -0,0 +1,114 @@
from __future__ import annotations
from fastapi import APIRouter, Depends, HTTPException, Request, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import User
from app.schemas.pipeline_chat_sch import PipelineGenerateRequest, PipelineGenerateResponse
from app.services.pipeline_dialog_service import DialogAccessError, PipelineDialogService
from app.services.pipeline_service import PipelineService
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Pipelines"])
@router.post("/generate", response_model=PipelineGenerateResponse)
async def generate_pipeline(
payload: PipelineGenerateRequest,
request: Request,
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
log_business_event(
"pipeline_prompt_received",
trace_id=trace_id,
user_id=str(current_user.id),
dialog_id=str(payload.dialog_id),
message_len=len(payload.message),
capability_ids_count=len(payload.capability_ids or []),
)
service = PipelineService(session)
dialog_service = PipelineDialogService(session)
try:
await dialog_service.append_user_message(
dialog_id=payload.dialog_id,
user_id=current_user.id,
content=payload.message,
)
dialog = await dialog_service.get_dialog(
dialog_id=payload.dialog_id,
user_id=current_user.id,
)
except DialogAccessError as exc:
detail = str(exc)
log_business_event(
"pipeline_prompt_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
dialog_id=str(payload.dialog_id),
reason=detail,
)
if "denied" in detail:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=detail) from exc
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=detail) from exc
try:
result = await service.generate(
dialog_id=payload.dialog_id,
message=payload.message,
user_id=current_user.id,
capability_ids=payload.capability_ids,
previous_pipeline_id=dialog.last_pipeline_id,
)
except Exception as exc:
if "ollama" in str(exc).lower():
message_ru = "Не удалось обратиться к локальной модели Ollama. Проверьте OLLAMA_HOST/OLLAMA_MODEL и повторите запрос."
result = {
"status": "cannot_build",
"message_ru": message_ru,
"chat_reply_ru": message_ru,
"pipeline_id": None,
"nodes": [],
"edges": [],
"missing_requirements": ["ollama_unavailable"],
"context_summary": None,
}
else:
raise
response_payload = PipelineGenerateResponse(**result)
try:
await dialog_service.append_assistant_message(
dialog_id=payload.dialog_id,
user_id=current_user.id,
content=response_payload.chat_reply_ru or response_payload.message_ru,
assistant_payload=response_payload.model_dump(mode="json", exclude_none=True),
)
except DialogAccessError as exc:
detail = str(exc)
log_business_event(
"pipeline_prompt_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
dialog_id=str(payload.dialog_id),
reason=detail,
)
if "denied" in detail:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=detail) from exc
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=detail) from exc
log_business_event(
"pipeline_prompt_processed",
trace_id=trace_id,
user_id=str(current_user.id),
dialog_id=str(payload.dialog_id),
result_status=response_payload.status,
pipeline_id=str(response_payload.pipeline_id) if response_payload.pipeline_id else None,
)
return response_payload
@@ -0,0 +1,78 @@
from __future__ import annotations
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Query, Request, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import User
from app.schemas.pipeline_chat_sch import (
PipelineDialogHistoryResponse,
PipelineDialogMessageResponse,
)
from app.services.pipeline_dialog_service import DialogAccessError, PipelineDialogService
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Pipelines"])
@router.get("/dialogs/{dialog_id}/history", response_model=PipelineDialogHistoryResponse)
async def get_pipeline_dialog_history(
dialog_id: UUID,
request: Request,
limit: int = Query(default=30, ge=1, le=200),
offset: int = Query(default=0, ge=0),
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
dialog_service = PipelineDialogService(session)
try:
dialog, messages = await dialog_service.get_history(
dialog_id=dialog_id,
user_id=current_user.id,
limit=limit,
offset=offset,
)
except DialogAccessError as exc:
detail = str(exc)
log_business_event(
"pipeline_dialog_history_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
dialog_id=str(dialog_id),
reason=detail,
)
if "denied" in detail:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=detail) from exc
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=detail) from exc
response = PipelineDialogHistoryResponse(
dialog_id=dialog.id,
title=dialog.title,
messages=[
PipelineDialogMessageResponse(
id=message.id,
role=message.role.value,
content=message.content,
assistant_payload=message.assistant_payload,
created_at=message.created_at,
)
for message in messages
],
)
log_business_event(
"pipeline_dialog_history_viewed",
trace_id=trace_id,
user_id=str(current_user.id),
dialog_id=str(dialog.id),
limit=limit,
offset=offset,
message_count=len(response.messages),
)
return response
+52
View File
@@ -0,0 +1,52 @@
from __future__ import annotations
from fastapi import APIRouter, Depends, Query, Request
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import User
from app.schemas.pipeline_chat_sch import PipelineDialogListItemResponse
from app.services.pipeline_dialog_service import PipelineDialogService
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Pipelines"])
@router.get("/dialogs", response_model=list[PipelineDialogListItemResponse])
async def list_pipeline_dialogs(
request: Request,
limit: int = Query(default=20, ge=1, le=200),
offset: int = Query(default=0, ge=0),
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
dialog_service = PipelineDialogService(session)
dialogs = await dialog_service.list_dialogs(
user_id=current_user.id,
limit=limit,
offset=offset,
)
response = [
PipelineDialogListItemResponse(
dialog_id=dialog.id,
title=dialog.title,
last_status=dialog.last_status,
last_pipeline_id=dialog.last_pipeline_id,
last_message_preview=dialog.last_message_preview,
created_at=dialog.created_at,
updated_at=dialog.updated_at,
)
for dialog in dialogs
]
log_business_event(
"pipeline_dialogs_listed",
trace_id=trace_id,
user_id=str(current_user.id),
limit=limit,
offset=offset,
result_count=len(response),
)
return response
+54
View File
@@ -0,0 +1,54 @@
from __future__ import annotations
from fastapi import APIRouter, Depends, HTTPException, Request, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import User
from app.schemas.pipeline_chat_sch import DialogResetRequest, DialogResetResponse
from app.services.pipeline_dialog_service import DialogAccessError, PipelineDialogService
from app.services.pipeline_service import PipelineService
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Pipelines"])
@router.post("/dialog/reset", response_model=DialogResetResponse)
async def reset_pipeline_dialog(
payload: DialogResetRequest,
request: Request,
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
dialog_service = PipelineDialogService(session)
try:
await dialog_service.get_dialog(
dialog_id=payload.dialog_id,
user_id=current_user.id,
)
except DialogAccessError as exc:
detail = str(exc)
log_business_event(
"pipeline_dialog_reset_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
dialog_id=str(payload.dialog_id),
reason=detail,
)
if "denied" in detail:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=detail) from exc
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=detail) from exc
service = PipelineService(session)
result = await service.reset_dialog(payload.dialog_id)
log_business_event(
"pipeline_dialog_reset",
trace_id=trace_id,
user_id=str(current_user.id),
dialog_id=str(payload.dialog_id),
result_status=result.get("status") if isinstance(result, dict) else None,
)
return DialogResetResponse(**result)
+17
View File
@@ -0,0 +1,17 @@
from fastapi import APIRouter
from app.api.pipelines.generate import router as generate_router
from app.api.pipelines.get_dialog_history import router as get_dialog_history_router
from app.api.pipelines.list_dialogs import router as list_dialogs_router
from app.api.pipelines.reset_dialog import router as reset_dialog_router
from app.api.pipelines.run import router as run_router
from app.api.pipelines.update_graph import router as update_graph_router
router = APIRouter(prefix="/v1/pipelines", tags=["Pipelines"])
router.include_router(generate_router)
router.include_router(list_dialogs_router)
router.include_router(get_dialog_history_router)
router.include_router(reset_dialog_router)
router.include_router(run_router)
router.include_router(update_graph_router)
+83
View File
@@ -0,0 +1,83 @@
from __future__ import annotations
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Request, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import Pipeline, User, UserRole
from app.schemas.execution_sch import RunPipelineRequest, RunPipelineResponse
from app.services.execution_service import ExecutionService, ExecutionServiceError
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Pipelines"])
@router.post("/{pipeline_id}/run", response_model=RunPipelineResponse, status_code=status.HTTP_202_ACCEPTED)
async def run_pipeline(
pipeline_id: UUID,
payload: RunPipelineRequest,
request: Request,
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
pipeline = await session.get(Pipeline, pipeline_id)
if pipeline is None:
log_business_event(
"pipeline_run_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
pipeline_id=str(pipeline_id),
reason="pipeline_not_found",
)
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Pipeline not found")
if current_user.role != UserRole.ADMIN and pipeline.created_by != current_user.id:
log_business_event(
"pipeline_run_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
pipeline_id=str(pipeline_id),
reason="pipeline_not_owned",
)
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Pipeline not found")
service = ExecutionService(session)
try:
run = await service.create_run(
pipeline_id=pipeline_id,
inputs=payload.inputs,
initiated_by=current_user.id,
)
except ExecutionServiceError as exc:
message = str(exc)
log_business_event(
"pipeline_run_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
pipeline_id=str(pipeline_id),
reason=message,
)
if "not found" in message.lower():
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=message) from exc
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message) from exc
ExecutionService.start_background_execution(run.id)
log_business_event(
"pipeline_run_started",
trace_id=trace_id,
user_id=str(current_user.id),
pipeline_id=str(run.pipeline_id),
run_id=str(run.id),
inputs_count=len(payload.inputs or {}),
)
return RunPipelineResponse(
run_id=run.id,
pipeline_id=run.pipeline_id,
status=run.status.value,
)
+205
View File
@@ -0,0 +1,205 @@
from __future__ import annotations
from collections import defaultdict
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Request, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import Pipeline, User, UserRole
from app.schemas.pipeline_chat_sch import (
PipelineGraphUpdateRequest,
PipelineGraphUpdateResponse,
)
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Pipelines"])
def _graph_has_cycle(steps: set[int], edges: list[dict[str, int | str]]) -> bool:
adjacency: dict[int, set[int]] = {step: set() for step in steps}
for edge in edges:
src = edge["from_step"]
dst = edge["to_step"]
if isinstance(src, int) and isinstance(dst, int):
adjacency.setdefault(src, set()).add(dst)
visiting: set[int] = set()
visited: set[int] = set()
def dfs(step: int) -> bool:
if step in visiting:
return True
if step in visited:
return False
visiting.add(step)
for neighbor in adjacency.get(step, set()):
if dfs(neighbor):
return True
visiting.remove(step)
visited.add(step)
return False
return any(dfs(step) for step in adjacency)
def _sync_node_connections(
nodes: list[dict[str, object]],
edges: list[dict[str, int | str]],
) -> None:
incoming_by_step: dict[int, set[int]] = defaultdict(set)
outgoing_by_step: dict[int, set[int]] = defaultdict(set)
incoming_types_by_step: dict[int, set[tuple[int, str]]] = defaultdict(set)
for edge in edges:
src = edge.get("from_step")
dst = edge.get("to_step")
edge_type = edge.get("type")
if not isinstance(src, int) or not isinstance(dst, int) or not isinstance(edge_type, str):
continue
outgoing_by_step[src].add(dst)
incoming_by_step[dst].add(src)
incoming_types_by_step[dst].add((src, edge_type))
for node in nodes:
step = node.get("step")
if not isinstance(step, int):
node["input_connected_from"] = []
node["output_connected_to"] = []
node["input_data_type_from_previous"] = []
continue
node["input_connected_from"] = sorted(incoming_by_step.get(step, set()))
node["output_connected_to"] = sorted(outgoing_by_step.get(step, set()))
node["input_data_type_from_previous"] = [
{"from_step": src, "type": edge_type}
for src, edge_type in sorted(incoming_types_by_step.get(step, set()))
]
@router.patch("/{pipeline_id}/graph", response_model=PipelineGraphUpdateResponse)
async def update_pipeline_graph(
pipeline_id: UUID,
payload: PipelineGraphUpdateRequest,
request: Request,
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
pipeline = await session.get(Pipeline, pipeline_id)
if pipeline is None:
log_business_event(
"pipeline_graph_update_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
pipeline_id=str(pipeline_id),
reason="pipeline_not_found",
)
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Pipeline not found")
if current_user.role != UserRole.ADMIN and pipeline.created_by != current_user.id:
log_business_event(
"pipeline_graph_update_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
pipeline_id=str(pipeline_id),
reason="pipeline_not_owned",
)
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Pipeline not found")
nodes = [node.model_dump(mode="json") for node in payload.nodes]
edges = [edge.model_dump(mode="json") for edge in payload.edges]
validation_errors: list[str] = []
steps: set[int] = set()
for node in nodes:
step = node.get("step")
if not isinstance(step, int):
validation_errors.append("graph: invalid_step")
continue
if step in steps:
validation_errors.append(f"graph: duplicate_node_step:{step}")
continue
steps.add(step)
normalized_edges: list[dict[str, int | str]] = []
seen_edges: set[tuple[int, int, str]] = set()
for edge in edges:
src = edge.get("from_step")
dst = edge.get("to_step")
edge_type = str(edge.get("type") or "").strip()
if not isinstance(src, int) or not isinstance(dst, int):
validation_errors.append("graph: invalid_edge_reference")
continue
if src not in steps or dst not in steps:
validation_errors.append(f"graph: edge_to_missing_node:{src}->{dst}")
continue
if src == dst:
validation_errors.append(f"graph: self_loop:{src}")
continue
if not edge_type:
validation_errors.append("graph: invalid_edge_type")
continue
edge_key = (src, dst, edge_type)
if edge_key in seen_edges:
validation_errors.append(
f"graph: duplicate_edge:{src}->{dst}:{edge_type}"
)
continue
seen_edges.add(edge_key)
normalized_edges.append({"from_step": src, "to_step": dst, "type": edge_type})
if normalized_edges and _graph_has_cycle(steps, normalized_edges):
validation_errors.append("graph: cycle")
if validation_errors:
log_business_event(
"pipeline_graph_update_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
pipeline_id=str(pipeline_id),
reason="invalid_graph",
errors=sorted(set(validation_errors)),
)
raise HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
detail={
"message": "Invalid pipeline graph",
"errors": sorted(set(validation_errors)),
},
)
_sync_node_connections(nodes, normalized_edges)
pipeline.nodes = nodes
pipeline.edges = normalized_edges
await session.commit()
await session.refresh(pipeline)
log_business_event(
"pipeline_graph_updated",
trace_id=trace_id,
user_id=str(current_user.id),
pipeline_id=str(pipeline.id),
nodes_count=len(nodes),
edges_count=len(normalized_edges),
)
return PipelineGraphUpdateResponse(
pipeline_id=pipeline.id,
nodes=pipeline.nodes,
edges=pipeline.edges,
updated_at=pipeline.updated_at,
)
+54
View File
@@ -0,0 +1,54 @@
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Request, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import User, UserRole
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Users"])
@router.delete("/{user_id}", status_code=status.HTTP_200_OK)
async def delete_user(
user_id: UUID,
request: Request,
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
if current_user.role != UserRole.ADMIN and current_user.id != user_id:
log_business_event(
"user_deactivation_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
target_user_id=str(user_id),
reason="forbidden",
)
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Нет доступа")
user = await session.get(User, user_id)
if user is None:
log_business_event(
"user_deactivation_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
target_user_id=str(user_id),
reason="target_user_not_found",
)
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
user.is_active = False
await session.commit()
log_business_event(
"user_deactivated",
trace_id=trace_id,
user_id=str(current_user.id),
target_user_id=str(user.id),
)
return {"message": "Пользователь успешно деактивирован"}
+21
View File
@@ -0,0 +1,21 @@
from fastapi import APIRouter, Depends, Request
from app.models import User
from app.schemas.users_sch import UserResponse
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Users"])
@router.get("/me", response_model=UserResponse)
async def get_me(
request: Request,
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
log_business_event(
"user_profile_viewed",
trace_id=trace_id,
user_id=str(current_user.id),
)
return current_user
+27
View File
@@ -0,0 +1,27 @@
from fastapi import APIRouter, Depends, Request
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
from app.core.database.session import get_session
from app.models import User, UserRole
from app.utils.business_logger import log_business_event
from app.utils.token_manager import check_permissions
from app.schemas.users_sch import UserResponse
router = APIRouter(tags=["Users"])
@router.get("/", response_model=list[UserResponse])
async def list_users(
request: Request,
session: AsyncSession = Depends(get_session),
current_user: User = Depends(check_permissions([UserRole.ADMIN])),
):
result = await session.execute(select(User))
users = result.scalars().all()
trace_id = getattr(request.state, "traceId", None)
log_business_event(
"users_listed",
trace_id=trace_id,
user_id=str(current_user.id),
result_count=len(users),
)
return users
+51
View File
@@ -0,0 +1,51 @@
from fastapi import APIRouter, Depends, HTTPException, Request, status
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import User
from app.schemas.users_sch import UserResponse, UserUpdateMe
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Users"])
@router.patch("/me", response_model=UserResponse)
async def update_me(
data: UserUpdateMe,
request: Request,
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
if data.email and data.email != current_user.email:
stmt = select(User).where(User.email == data.email)
result = await session.execute(stmt)
if result.scalar_one_or_none():
log_business_event(
"user_profile_update_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
reason="email_already_exists",
requested_email=data.email,
)
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail="Пользователь с таким email уже существует",
)
update_data = data.model_dump(exclude_unset=True)
for key, value in update_data.items():
setattr(current_user, key, value)
await session.commit()
await session.refresh(current_user)
log_business_event(
"user_profile_updated",
trace_id=trace_id,
user_id=str(current_user.id),
updated_fields=sorted(update_data.keys()),
)
return current_user
+44
View File
@@ -0,0 +1,44 @@
from fastapi import APIRouter, Depends, HTTPException, Request, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import User
from app.schemas.users_sch import PasswordUpdate
from app.utils.business_logger import log_business_event
from app.utils.hashing import hash_password, verify_password
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Users"])
@router.patch("/me/password", status_code=status.HTTP_200_OK)
async def update_password(
data: PasswordUpdate,
request: Request,
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
if not verify_password(data.old_password, current_user.hashed_password):
log_business_event(
"user_password_update_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
reason="invalid_current_password",
)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Неверный текущий пароль",
)
current_user.hashed_password = hash_password(data.new_password)
await session.commit()
log_business_event(
"user_password_updated",
trace_id=trace_id,
user_id=str(current_user.id),
)
return {"message": "Пароль успешно обновлен"}
+51
View File
@@ -0,0 +1,51 @@
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Request, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import User, UserRole
from app.schemas.users_sch import UserResponse, UserUpdate
from app.utils.business_logger import log_business_event
from app.utils.token_manager import check_permissions
router = APIRouter(tags=["Users"])
@router.patch("/{user_id}", response_model=UserResponse)
async def update_user(
user_id: UUID,
data: UserUpdate,
request: Request,
session: AsyncSession = Depends(get_session),
current_user: User = Depends(check_permissions([UserRole.ADMIN])),
):
trace_id = getattr(request.state, "traceId", None)
user = await session.get(User, user_id)
if user is None:
log_business_event(
"user_update_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
target_user_id=str(user_id),
reason="target_user_not_found",
)
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
update_data = data.model_dump(exclude_unset=True)
for key, value in update_data.items():
setattr(user, key, value)
await session.commit()
await session.refresh(user)
log_business_event(
"user_updated",
trace_id=trace_id,
user_id=str(current_user.id),
target_user_id=str(user.id),
updated_fields=sorted(update_data.keys()),
)
return user
+135
View File
@@ -0,0 +1,135 @@
import asyncio
import os
from sqlalchemy import select, text
# Important: import all ORM models before create_all() so SQLAlchemy metadata is complete.
from app.models import (
Action,
Base,
Capability,
DialogMessageRole,
ExecutionRun,
ExecutionStepRun,
Pipeline,
PipelineDialog,
PipelineDialogMessage,
User,
UserRole,
)
from app.core.database.session import SessionLocal, engine
from app.utils.hashing import hash_password
async def init_db():
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
# Best-effort schema drift handling without requiring Alembic.
# Use DO blocks so missing tables don't abort the whole transaction (and roll back create_all()).
await conn.execute(
text(
"""
DO $$
DECLARE
cap_constraint_name TEXT;
admin_user_id UUID;
BEGIN
IF to_regclass('public.actions') IS NOT NULL THEN
ALTER TABLE actions ADD COLUMN IF NOT EXISTS is_deleted BOOLEAN NOT NULL DEFAULT FALSE;
ALTER TABLE actions ADD COLUMN IF NOT EXISTS ingest_status VARCHAR(32) NOT NULL DEFAULT 'SUCCEEDED';
ALTER TABLE actions ADD COLUMN IF NOT EXISTS ingest_error TEXT;
ALTER TABLE actions ADD COLUMN IF NOT EXISTS user_id UUID REFERENCES users(id) ON DELETE CASCADE;
CREATE INDEX IF NOT EXISTS ix_actions_method_path ON actions (method, path);
CREATE INDEX IF NOT EXISTS ix_actions_is_deleted ON actions (is_deleted);
CREATE INDEX IF NOT EXISTS ix_actions_ingest_status ON actions (ingest_status);
CREATE INDEX IF NOT EXISTS ix_actions_user_id ON actions (user_id);
END IF;
IF to_regclass('public.capabilities') IS NOT NULL THEN
ALTER TABLE capabilities ADD COLUMN IF NOT EXISTS type VARCHAR(50) DEFAULT 'ATOMIC';
ALTER TABLE capabilities ADD COLUMN IF NOT EXISTS recipe JSONB;
ALTER TABLE capabilities ADD COLUMN IF NOT EXISTS user_id UUID REFERENCES users(id) ON DELETE CASCADE;
ALTER TABLE capabilities ALTER COLUMN action_id DROP NOT NULL;
CREATE INDEX IF NOT EXISTS ix_capabilities_type ON capabilities (type);
CREATE INDEX IF NOT EXISTS ix_capabilities_user_id ON capabilities (user_id);
FOR cap_constraint_name IN
SELECT c.conname
FROM pg_constraint c
JOIN pg_class t ON t.oid = c.conrelid
JOIN pg_namespace ns ON ns.oid = t.relnamespace
WHERE ns.nspname = 'public'
AND t.relname = 'capabilities'
AND c.contype = 'u'
AND array_length(c.conkey, 1) = 1
AND c.conkey[1] = (
SELECT a.attnum
FROM pg_attribute a
WHERE a.attrelid = t.oid
AND a.attname = 'action_id'
AND a.attnum > 0
AND NOT a.attisdropped
LIMIT 1
)
LOOP
EXECUTE format('ALTER TABLE capabilities DROP CONSTRAINT IF EXISTS %I', cap_constraint_name);
END LOOP;
CREATE UNIQUE INDEX IF NOT EXISTS uq_capabilities_user_action
ON capabilities (user_id, action_id)
WHERE action_id IS NOT NULL;
END IF;
IF to_regclass('public.users') IS NOT NULL THEN
SELECT id
INTO admin_user_id
FROM users
WHERE role::text = 'ADMIN'
ORDER BY created_at ASC
LIMIT 1;
IF admin_user_id IS NOT NULL THEN
IF to_regclass('public.actions') IS NOT NULL THEN
UPDATE actions SET user_id = admin_user_id WHERE user_id IS NULL;
END IF;
IF to_regclass('public.capabilities') IS NOT NULL THEN
UPDATE capabilities SET user_id = admin_user_id WHERE user_id IS NULL;
END IF;
END IF;
END IF;
IF to_regclass('public.pipeline_dialogs') IS NOT NULL THEN
CREATE INDEX IF NOT EXISTS ix_pipeline_dialogs_user_updated_at_desc
ON pipeline_dialogs (user_id, updated_at DESC);
END IF;
IF to_regclass('public.pipeline_dialog_messages') IS NOT NULL THEN
CREATE INDEX IF NOT EXISTS ix_pipeline_dialog_messages_dialog_created_at_asc
ON pipeline_dialog_messages (dialog_id, created_at ASC);
END IF;
END $$;
"""
)
)
async with SessionLocal() as session:
admin_email = os.getenv("ADMIN_EMAIL")
admin_password = os.getenv("ADMIN_PASSWORD")
admin_fullname = os.getenv("ADMIN_FULLNAME", "System Admin")
if admin_email and admin_password:
result = await session.execute(
select(User).where(User.email == admin_email)
)
existing_admin = result.scalar_one_or_none()
if not existing_admin:
new_admin = User(
email=admin_email,
hashed_password=hash_password(admin_password),
full_name=admin_fullname,
role=UserRole.ADMIN,
is_active=True
)
session.add(new_admin)
await session.commit()
if __name__ == "__main__":
asyncio.run(init_db())
+22
View File
@@ -0,0 +1,22 @@
from typing import AsyncGenerator
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
import os
DATABASE_URL = os.getenv("DATABASE_URL")
if not DATABASE_URL:
DB_HOST = os.getenv("DB_HOST", "localhost")
DB_PORT = os.getenv("DB_PORT", "5432")
DB_NAME = os.getenv("DB_NAME", "postgres")
DB_USER = os.getenv("DB_USER", "postgres")
DB_PASSWORD = os.getenv("DB_PASSWORD", "postgres")
DATABASE_URL = f"postgresql+asyncpg://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}"
engine = create_async_engine(DATABASE_URL, pool_pre_ping=True)
SessionLocal = async_sessionmaker(engine, expire_on_commit=False)
async def get_session() -> AsyncGenerator[AsyncSession, None]:
async with SessionLocal() as session:
yield session
+100
View File
@@ -0,0 +1,100 @@
from __future__ import annotations
import json
import logging
import os
from datetime import datetime, timezone
from typing import Any
from app.utils.log_context import get_log_context
SERVICE_NAME = os.getenv("APP_SERVICE_NAME", "backend-api")
LOG_RECORD_RESERVED_FIELDS = set(
logging.LogRecord(
name="",
level=0,
pathname="",
lineno=0,
msg="",
args=(),
exc_info=None,
).__dict__.keys()
) | {"message", "asctime"}
def _normalize_extra_value(value: Any) -> Any:
if isinstance(value, (str, int, float, bool)) or value is None:
return value
if isinstance(value, (list, tuple)):
return [_normalize_extra_value(item) for item in value]
if isinstance(value, dict):
normalized: dict[str, Any] = {}
for key, nested_value in value.items():
normalized[str(key)] = _normalize_extra_value(nested_value)
return normalized
return str(value)
class JsonFormatter(logging.Formatter):
def format(self, record: logging.LogRecord) -> str:
payload: dict[str, Any] = {
"timestamp": datetime.now(timezone.utc).isoformat().replace("+00:00", "Z"),
"level": record.levelname,
"logger": record.name,
"message": record.getMessage(),
"service_name": SERVICE_NAME,
}
for key in (
"event",
"trace_id",
"path",
"method",
"status_code",
"duration_ms",
"user_id",
"email",
"role",
"dialog_id",
"pipeline_id",
"run_id",
"result_status",
"message_len",
"capability_ids_count",
"reason",
):
value = getattr(record, key, None)
if value is not None:
payload[key] = value
for key, value in record.__dict__.items():
if key in LOG_RECORD_RESERVED_FIELDS or key in payload:
continue
payload[key] = _normalize_extra_value(value)
if record.exc_info:
payload["exception"] = self.formatException(record.exc_info)
return json.dumps(payload, ensure_ascii=True)
class RequestContextFilter(logging.Filter):
def filter(self, record: logging.LogRecord) -> bool:
for key, value in get_log_context().items():
if getattr(record, key, None) is None:
setattr(record, key, value)
return True
def configure_logging() -> None:
level = os.getenv("LOG_LEVEL", "INFO").upper()
root_logger = logging.getLogger()
root_logger.handlers.clear()
root_logger.setLevel(level)
handler = logging.StreamHandler()
handler.setFormatter(JsonFormatter())
handler.addFilter(RequestContextFilter())
root_logger.addHandler(handler)
+188
View File
@@ -0,0 +1,188 @@
import sys
import asyncio
import os
import uuid
import logging
from time import perf_counter
from contextlib import asynccontextmanager
from prometheus_fastapi_instrumentator import Instrumentator
from fastapi import FastAPI, HTTPException
from fastapi.exceptions import RequestValidationError
from app.api.ping.router import router as health_router
from app.api.actions.router import router as actions_router
from app.api.capabilities.router import router as capabilities_router
from app.api.executions.router import router as executions_router
from app.api.pipelines.router import router as pipelines_router
from app.utils.error_handlers import (
validation_exception_handler,
http_exception_handler,
unhandled_exception_handler,
)
from app.utils.log_context import clear_log_context, set_request_context
from app.core.logging import configure_logging
from app.core.database.init import init_db
try:
from fastapi_cache import FastAPICache
from fastapi_cache.backends.redis import RedisBackend
from redis import asyncio as aioredis
except ModuleNotFoundError:
FastAPICache = None
RedisBackend = None
aioredis = None
try:
from app.api.auth.register import router as auth_router
from app.api.auth.login import router as login_router
except ModuleNotFoundError as exc:
auth_router = None
login_router = None
print(f"Auth routes are disabled: {exc}")
try:
from app.api.users.get_me import router as get_me_router
from app.api.users.list_users import router as list_users_router
from app.api.users.update_me import router as update_me_router
from app.api.users.update_user import router as update_user_router
from app.api.users.update_password import router as update_password_router
from app.api.users.delete_user import router as delete_user_router
except ModuleNotFoundError as exc:
get_me_router = None
list_users_router = None
update_me_router = None
update_user_router = None
update_password_router = None
delete_user_router = None
print(f"User routes are disabled: {exc}")
if sys.platform == "win32":
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
configure_logging()
http_logger = logging.getLogger("app.http")
@asynccontextmanager
async def lifespan(app: FastAPI):
try:
await init_db()
except Exception as e:
print(f"Database initialization error: {e}")
redis_host = os.getenv("REDIS_HOST", "localhost")
redis_port = os.getenv("REDIS_PORT", "6379")
redis_url = os.getenv("REDIS_URL", f"redis://{redis_host}:{redis_port}")
redis = None
if FastAPICache and RedisBackend and aioredis:
try:
redis = aioredis.from_url(redis_url, encoding="utf8", decode_responses=True)
FastAPICache.init(RedisBackend(redis), prefix="fastapi-cache")
print(f"Redis initialized successfully at {redis_url}!")
except Exception as e:
print(f"Redis initialization error: {e}")
else:
print("fastapi-cache2 is not installed; Redis cache is disabled.")
yield
if redis:
await redis.close()
app = FastAPI(lifespan=lifespan, redirect_slashes=False)
@app.middleware("http")
async def add_trace_id(request, call_next):
trace_id = request.headers.get("X-Trace-Id") or str(uuid.uuid4())
request.state.traceId = trace_id
set_request_context(
trace_id=trace_id,
path=request.url.path,
method=request.method,
)
started_at = perf_counter()
try:
try:
response = await call_next(request)
except Exception:
duration_ms = int((perf_counter() - started_at) * 1000)
http_logger.exception(
"http_request_failed",
extra={
"event": "http_request_failed",
"trace_id": trace_id,
"method": request.method,
"path": request.url.path,
"duration_ms": duration_ms,
},
)
raise
duration_ms = int((perf_counter() - started_at) * 1000)
http_logger.info(
"http_request",
extra={
"event": "http_request",
"trace_id": trace_id,
"method": request.method,
"path": request.url.path,
"status_code": response.status_code,
"duration_ms": duration_ms,
},
)
response.headers["X-Trace-Id"] = trace_id
return response
finally:
clear_log_context()
app.add_exception_handler(RequestValidationError, validation_exception_handler)
app.add_exception_handler(HTTPException, http_exception_handler)
app.add_exception_handler(Exception, unhandled_exception_handler)
from fastapi.middleware.cors import CORSMiddleware
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(health_router, prefix="/api")
app.include_router(actions_router, prefix="/api")
app.include_router(capabilities_router, prefix="/api")
app.include_router(pipelines_router, prefix="/api")
app.include_router(executions_router, prefix="/api")
if auth_router is not None and login_router is not None:
app.include_router(auth_router, prefix="/api")
app.include_router(login_router, prefix="/api")
if all(
router is not None
for router in (
get_me_router,
list_users_router,
update_me_router,
update_user_router,
update_password_router,
delete_user_router,
)
):
app.include_router(get_me_router, prefix="/api/users")
app.include_router(list_users_router, prefix="/api/users")
app.include_router(update_me_router, prefix="/api/users")
app.include_router(update_user_router, prefix="/api/users")
app.include_router(update_password_router, prefix="/api/users")
app.include_router(delete_user_router, prefix="/api/users")
Instrumentator().instrument(app).expose(app)
+35
View File
@@ -0,0 +1,35 @@
from app.models.base import Base
from app.models.user import User, UserRole
from app.models.action import Action, ActionIngestStatus, HttpMethod
from app.models.capability import Capability
from app.models.execution import (
ExecutionRun,
ExecutionRunStatus,
ExecutionStepRun,
ExecutionStepStatus,
)
from app.models.pipeline import Pipeline, PipelineStatus
from app.models.pipeline_dialog import (
DialogMessageRole,
PipelineDialog,
PipelineDialogMessage,
)
__all__ = [
"Base",
"User",
"UserRole",
"Action",
"ActionIngestStatus",
"HttpMethod",
"Capability",
"ExecutionRun",
"ExecutionRunStatus",
"ExecutionStepRun",
"ExecutionStepStatus",
"Pipeline",
"PipelineStatus",
"DialogMessageRole",
"PipelineDialog",
"PipelineDialogMessage",
]
+115
View File
@@ -0,0 +1,115 @@
from __future__ import annotations
import enum
import uuid
from typing import Any
from sqlalchemy import Boolean, Enum, ForeignKey, Index, String, Text
from sqlalchemy.dialects.postgresql import JSON, UUID
from sqlalchemy.orm import Mapped, mapped_column, relationship
from app.models.base import Base, TimestampMixin
class HttpMethod(str, enum.Enum):
GET = "GET"
POST = "POST"
PUT = "PUT"
PATCH = "PATCH"
DELETE = "DELETE"
HEAD = "HEAD"
OPTIONS = "OPTIONS"
class ActionIngestStatus(str, enum.Enum):
SUCCEEDED = "SUCCEEDED"
FAILED = "FAILED"
class Action(TimestampMixin, Base):
__tablename__ = "actions"
__table_args__ = (
Index("ix_actions_method_path", "method", "path"),
)
id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
primary_key=True,
default=uuid.uuid4,
)
user_id: Mapped[uuid.UUID | None] = mapped_column(
UUID(as_uuid=True),
ForeignKey("users.id", ondelete="CASCADE"),
nullable=True,
index=True,
comment="Owner of imported action",
)
operation_id: Mapped[str | None] = mapped_column(
String(255),
nullable=True,
index=True,
)
method: Mapped[HttpMethod] = mapped_column(
Enum(HttpMethod, name="http_method"),
nullable=False,
)
path: Mapped[str] = mapped_column(
String(2048),
nullable=False,
)
base_url: Mapped[str | None] = mapped_column(
String(2048),
nullable=True,
)
summary: Mapped[str | None] = mapped_column(
String(512),
nullable=True,
)
description: Mapped[str | None] = mapped_column(
Text,
nullable=True,
)
tags: Mapped[list[str] | None] = mapped_column(
JSON,
nullable=True,
)
parameters_schema: Mapped[dict[str, Any] | None] = mapped_column(
JSON,
nullable=True,
)
request_body_schema: Mapped[dict[str, Any] | None] = mapped_column(
JSON,
nullable=True,
)
response_schema: Mapped[dict[str, Any] | None] = mapped_column(
JSON,
nullable=True,
)
source_filename: Mapped[str | None] = mapped_column(
String(512),
nullable=True,
)
raw_spec: Mapped[dict[str, Any] | None] = mapped_column(
JSON,
nullable=True,
)
ingest_status: Mapped[ActionIngestStatus] = mapped_column(
Enum(ActionIngestStatus, name="action_ingest_status", native_enum=False),
nullable=False,
default=ActionIngestStatus.SUCCEEDED,
server_default=ActionIngestStatus.SUCCEEDED.value,
index=True,
)
ingest_error: Mapped[str | None] = mapped_column(
Text,
nullable=True,
)
is_deleted: Mapped[bool] = mapped_column(
Boolean,
nullable=False,
default=False,
server_default="false",
index=True,
)
owner = relationship("User", lazy="select")
+21
View File
@@ -0,0 +1,21 @@
from datetime import datetime
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
from sqlalchemy import DateTime, func
class Base(DeclarativeBase):
pass
class TimestampMixin:
created_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True),
server_default=func.now(),
nullable=False,
)
updated_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True),
server_default=func.now(),
onupdate=func.now(),
nullable=False,
)
+86
View File
@@ -0,0 +1,86 @@
from __future__ import annotations
import enum
import uuid
from typing import Any
from sqlalchemy import Enum, ForeignKey, Index, String, Text, UniqueConstraint
from sqlalchemy.dialects.postgresql import JSON, UUID
from sqlalchemy.orm import Mapped, mapped_column, relationship
from app.models.base import Base, TimestampMixin
class CapabilityType(str, enum.Enum):
ATOMIC = "ATOMIC"
COMPOSITE = "COMPOSITE"
class Capability(TimestampMixin, Base):
__tablename__ = "capabilities"
__table_args__ = (
Index("ix_capabilities_action_id", "action_id"),
UniqueConstraint(
"user_id",
"action_id",
name="uq_capabilities_user_action",
),
)
id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
primary_key=True,
default=uuid.uuid4,
)
user_id: Mapped[uuid.UUID | None] = mapped_column(
UUID(as_uuid=True),
ForeignKey("users.id", ondelete="CASCADE"),
nullable=True,
index=True,
comment="Owner of capability",
)
action_id: Mapped[uuid.UUID | None] = mapped_column(
UUID(as_uuid=True),
ForeignKey("actions.id", ondelete="CASCADE"),
nullable=True,
comment="Action source for atomic capability",
)
type: Mapped[CapabilityType] = mapped_column(
Enum(CapabilityType, name="capability_type", native_enum=False),
nullable=False,
default=CapabilityType.ATOMIC,
server_default=CapabilityType.ATOMIC.value,
index=True,
)
name: Mapped[str] = mapped_column(
String(255),
nullable=False,
index=True,
)
description: Mapped[str | None] = mapped_column(
Text,
nullable=True,
)
input_schema: Mapped[dict[str, Any] | None] = mapped_column(
JSON,
nullable=True,
)
output_schema: Mapped[dict[str, Any] | None] = mapped_column(
JSON,
nullable=True,
)
recipe: Mapped[dict[str, Any] | None] = mapped_column(
JSON,
nullable=True,
)
data_format: Mapped[dict[str, Any] | None] = mapped_column(
JSON,
nullable=True,
)
llm_payload: Mapped[dict[str, Any] | None] = mapped_column(
JSON,
nullable=True,
)
action = relationship("Action", lazy="select")
owner = relationship("User", lazy="select")
+159
View File
@@ -0,0 +1,159 @@
from __future__ import annotations
import enum
import uuid
from datetime import datetime
from typing import Any
from sqlalchemy import DateTime, Enum, ForeignKey, Integer, String, Text
from sqlalchemy.dialects.postgresql import JSON, UUID
from sqlalchemy.orm import Mapped, mapped_column, relationship
from app.models.base import Base, TimestampMixin
class ExecutionRunStatus(str, enum.Enum):
QUEUED = "QUEUED"
RUNNING = "RUNNING"
SUCCEEDED = "SUCCEEDED"
FAILED = "FAILED"
PARTIAL_FAILED = "PARTIAL_FAILED"
class ExecutionStepStatus(str, enum.Enum):
PENDING = "PENDING"
RUNNING = "RUNNING"
SUCCEEDED = "SUCCEEDED"
FAILED = "FAILED"
SKIPPED = "SKIPPED"
class ExecutionRun(TimestampMixin, Base):
__tablename__ = "execution_runs"
id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
primary_key=True,
default=uuid.uuid4,
)
pipeline_id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
ForeignKey("pipelines.id", ondelete="CASCADE"),
nullable=False,
index=True,
)
initiated_by: Mapped[uuid.UUID | None] = mapped_column(
UUID(as_uuid=True),
ForeignKey("users.id", ondelete="SET NULL"),
nullable=True,
index=True,
)
status: Mapped[ExecutionRunStatus] = mapped_column(
Enum(ExecutionRunStatus, name="execution_run_status"),
nullable=False,
default=ExecutionRunStatus.QUEUED,
server_default=ExecutionRunStatus.QUEUED.value,
index=True,
)
inputs: Mapped[dict[str, Any]] = mapped_column(
JSON,
nullable=False,
default=dict,
server_default="{}",
)
summary: Mapped[dict[str, Any] | None] = mapped_column(
JSON,
nullable=True,
)
error: Mapped[str | None] = mapped_column(
Text,
nullable=True,
)
started_at: Mapped[datetime | None] = mapped_column(
DateTime(timezone=True),
nullable=True,
)
finished_at: Mapped[datetime | None] = mapped_column(
DateTime(timezone=True),
nullable=True,
)
pipeline = relationship("Pipeline", lazy="select")
step_runs = relationship(
"ExecutionStepRun",
back_populates="run",
cascade="all, delete-orphan",
lazy="selectin",
)
class ExecutionStepRun(TimestampMixin, Base):
__tablename__ = "execution_step_runs"
id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
primary_key=True,
default=uuid.uuid4,
)
run_id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
ForeignKey("execution_runs.id", ondelete="CASCADE"),
nullable=False,
index=True,
)
step: Mapped[int] = mapped_column(
Integer,
nullable=False,
index=True,
)
name: Mapped[str | None] = mapped_column(
String(512),
nullable=True,
)
capability_id: Mapped[uuid.UUID | None] = mapped_column(
UUID(as_uuid=True),
nullable=True,
index=True,
)
action_id: Mapped[uuid.UUID | None] = mapped_column(
UUID(as_uuid=True),
nullable=True,
index=True,
)
status: Mapped[ExecutionStepStatus] = mapped_column(
Enum(ExecutionStepStatus, name="execution_step_status"),
nullable=False,
default=ExecutionStepStatus.PENDING,
server_default=ExecutionStepStatus.PENDING.value,
index=True,
)
resolved_inputs: Mapped[dict[str, Any] | None] = mapped_column(
JSON,
nullable=True,
)
request_snapshot: Mapped[dict[str, Any] | None] = mapped_column(
JSON,
nullable=True,
)
response_snapshot: Mapped[dict[str, Any] | None] = mapped_column(
JSON,
nullable=True,
)
error: Mapped[str | None] = mapped_column(
Text,
nullable=True,
)
started_at: Mapped[datetime | None] = mapped_column(
DateTime(timezone=True),
nullable=True,
)
finished_at: Mapped[datetime | None] = mapped_column(
DateTime(timezone=True),
nullable=True,
)
duration_ms: Mapped[int | None] = mapped_column(
Integer,
nullable=True,
)
run = relationship("ExecutionRun", back_populates="step_runs", lazy="select")
+85
View File
@@ -0,0 +1,85 @@
import enum
import uuid
from typing import Any
from sqlalchemy import Enum, ForeignKey, String, Text
from sqlalchemy.dialects.postgresql import JSON, UUID
from sqlalchemy.orm import Mapped, mapped_column, relationship
from app.models.base import Base, TimestampMixin
class PipelineStatus(str, enum.Enum):
DRAFT = "DRAFT"
READY = "READY"
ARCHIVED = "ARCHIVED"
class Pipeline(TimestampMixin, Base):
"""
Сценарный слой.
Коллекция нод и связей между ними — полная структура графа,
сгенерированного SynthesisService и отображаемого на канвасе (React Flow).
"""
__tablename__ = "pipelines"
id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
primary_key=True,
default=uuid.uuid4,
)
name: Mapped[str] = mapped_column(
String(512),
nullable=False,
comment="Человекочитаемое название пайплайна",
)
description: Mapped[str | None] = mapped_column(
Text,
nullable=True,
comment="Подробное описание того, что делает этот сценарий",
)
user_prompt: Mapped[str | None] = mapped_column(
Text,
nullable=True,
comment="Оригинальный текстовый запрос PM из чата, породивший этот граф",
)
nodes: Mapped[list[dict[str, Any]]] = mapped_column(
JSON,
nullable=False,
default=list,
comment="Список нод графа. Каждая нода ссылается на Capability и хранит индивидуальные параметры",
)
edges: Mapped[list[dict[str, Any]]] = mapped_column(
JSON,
nullable=False,
default=list,
comment="Список рёбер графа. Определяет порядок выполнения нод (DAG)",
)
status: Mapped[PipelineStatus] = mapped_column(
Enum(PipelineStatus, name="pipeline_status"),
nullable=False,
default=PipelineStatus.DRAFT,
server_default=PipelineStatus.DRAFT.value,
comment="Статус пайплайна: DRAFT → READY → ARCHIVED",
)
created_by: Mapped[uuid.UUID | None] = mapped_column(
UUID(as_uuid=True),
ForeignKey("users.id", ondelete="SET NULL"),
nullable=True,
index=True,
comment="UUID пользователя (PM), создавшего или запустившего генерацию",
)
creator = relationship("User", lazy="select")
dialogs = relationship(
"PipelineDialog",
back_populates="last_pipeline",
passive_deletes=True,
lazy="selectin",
)
+119
View File
@@ -0,0 +1,119 @@
from __future__ import annotations
import enum
import uuid
from datetime import datetime
from typing import Any
from sqlalchemy import DateTime, Enum, ForeignKey, Index, String, Text, func
from sqlalchemy.dialects.postgresql import JSONB, UUID
from sqlalchemy.orm import Mapped, mapped_column, relationship
from app.models.base import Base, TimestampMixin
class DialogMessageRole(str, enum.Enum):
USER = "user"
ASSISTANT = "assistant"
class PipelineDialog(TimestampMixin, Base):
__tablename__ = "pipeline_dialogs"
__table_args__ = (
Index("ix_pipeline_dialogs_user_updated_at", "user_id", "updated_at"),
)
id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
primary_key=True,
)
user_id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
ForeignKey("users.id", ondelete="CASCADE"),
nullable=False,
index=True,
)
title: Mapped[str | None] = mapped_column(
String(256),
nullable=True,
)
last_status: Mapped[str | None] = mapped_column(
String(32),
nullable=True,
)
last_pipeline_id: Mapped[uuid.UUID | None] = mapped_column(
UUID(as_uuid=True),
ForeignKey("pipelines.id", ondelete="SET NULL"),
nullable=True,
index=True,
)
last_message_preview: Mapped[str | None] = mapped_column(
Text,
nullable=True,
)
user = relationship(
"User",
back_populates="pipeline_dialogs",
lazy="select",
)
last_pipeline = relationship(
"Pipeline",
back_populates="dialogs",
lazy="select",
)
messages = relationship(
"PipelineDialogMessage",
back_populates="dialog",
cascade="all, delete-orphan",
passive_deletes=True,
lazy="selectin",
)
class PipelineDialogMessage(Base):
__tablename__ = "pipeline_dialog_messages"
__table_args__ = (
Index(
"ix_pipeline_dialog_messages_dialog_created_at",
"dialog_id",
"created_at",
),
)
id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
primary_key=True,
default=uuid.uuid4,
)
dialog_id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
ForeignKey("pipeline_dialogs.id", ondelete="CASCADE"),
nullable=False,
index=True,
)
role: Mapped[DialogMessageRole] = mapped_column(
Enum(DialogMessageRole, name="dialog_message_role"),
nullable=False,
index=True,
)
content: Mapped[str] = mapped_column(
Text,
nullable=False,
)
assistant_payload: Mapped[dict[str, Any] | None] = mapped_column(
JSONB,
nullable=True,
)
created_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True),
server_default=func.now(),
nullable=False,
index=True,
)
dialog = relationship(
"PipelineDialog",
back_populates="messages",
lazy="select",
)
+39
View File
@@ -0,0 +1,39 @@
import enum
import uuid
from sqlalchemy import Boolean, Enum, String
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.orm import Mapped, mapped_column, relationship
from app.models.base import Base, TimestampMixin
class UserRole(str, enum.Enum):
USER = "USER"
ADMIN = "ADMIN"
class User(TimestampMixin, Base):
__tablename__ = "users"
id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
email: Mapped[str] = mapped_column(String(320), unique=True, index=True, nullable=False)
full_name: Mapped[str | None] = mapped_column(String(255), nullable=True)
hashed_password: Mapped[str] = mapped_column(String(255), nullable=False)
role: Mapped[UserRole] = mapped_column(
Enum(UserRole, name="user_role"),
nullable=False,
default=UserRole.USER,
server_default=UserRole.USER.value,
)
is_active: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True, server_default="true")
pipeline_dialogs = relationship(
"PipelineDialog",
back_populates="user",
cascade="all, delete-orphan",
passive_deletes=True,
lazy="selectin",
)
actions = relationship("Action", passive_deletes=True, lazy="selectin")
capabilities = relationship("Capability", passive_deletes=True, lazy="selectin")
+69
View File
@@ -0,0 +1,69 @@
from __future__ import annotations
from datetime import datetime
from typing import Any
from uuid import UUID
from pydantic import BaseModel, ConfigDict, computed_field
from app.models import ActionIngestStatus, HttpMethod
class ActionListItemResponse(BaseModel):
id: UUID
user_id: UUID | None = None
operation_id: str | None = None
method: HttpMethod
path: str
base_url: str | None = None
summary: str | None = None
description: str | None = None
tags: list[str] | None = None
source_filename: str | None = None
ingest_status: ActionIngestStatus
ingest_error: str | None = None
created_at: datetime
updated_at: datetime
model_config = ConfigDict(from_attributes=True)
class ActionIngestItemResponse(BaseModel):
id: UUID
user_id: UUID | None = None
operation_id: str | None = None
method: HttpMethod
path: str
summary: str | None = None
source_filename: str | None = None
ingest_status: ActionIngestStatus
ingest_error: str | None = None
model_config = ConfigDict(from_attributes=True)
class ActionDetailResponse(ActionListItemResponse):
parameters_schema: dict[str, Any] | None = None
request_body_schema: dict[str, Any] | None = None
response_schema: dict[str, Any] | None = None
raw_spec: dict[str, Any] | None = None
@computed_field(return_type=dict[str, Any] | None)
@property
def json_schema(self) -> dict[str, Any] | None:
if not any((self.parameters_schema, self.request_body_schema, self.response_schema, self.raw_spec)):
return None
return {
"parameters": self.parameters_schema,
"request_body": self.request_body_schema,
"response": self.response_schema,
"raw_spec": self.raw_spec,
}
class ActionIngestResponse(BaseModel):
succeeded_count: int
failed_count: int
succeeded_actions: list[ActionDetailResponse]
failed_actions: list[ActionDetailResponse]
+19
View File
@@ -0,0 +1,19 @@
from pydantic import AliasChoices, BaseModel, ConfigDict, EmailStr, Field
class RegisterIn(BaseModel):
email: EmailStr = Field(max_length=254)
password: str = Field(min_length=1, max_length=72)
full_name: str = Field(
min_length=2,
max_length=200,
validation_alias=AliasChoices("full_name", "fullName"),
serialization_alias="fullName",
)
model_config = ConfigDict(populate_by_name=True)
class LoginIn(BaseModel):
email: EmailStr = Field(max_length=254)
password: str = Field(min_length=1, max_length=72)
+73
View File
@@ -0,0 +1,73 @@
from __future__ import annotations
from datetime import datetime
from typing import Any
from uuid import UUID
from pydantic import BaseModel, ConfigDict, Field
from app.schemas.action_sch import ActionIngestItemResponse
class CapabilityDataFormat(BaseModel):
parameter_locations: list[str] = []
request_content_types: list[str] = []
request_schema_type: str | None = None
response_content_types: list[str] = []
response_schema_types: list[str] = []
class CapabilityResponse(BaseModel):
id: UUID
user_id: UUID | None = None
action_id: UUID | None = None
type: str = "ATOMIC"
name: str
description: str | None = None
input_schema: dict[str, Any] | None = None
output_schema: dict[str, Any] | None = None
recipe: dict[str, Any] | None = None
data_format: CapabilityDataFormat | None = None
created_at: datetime
updated_at: datetime
model_config = ConfigDict(from_attributes=True)
class CapabilityIngestItemResponse(BaseModel):
id: UUID
user_id: UUID | None = None
action_id: UUID | None = None
type: str = "ATOMIC"
name: str
description: str | None = None
model_config = ConfigDict(from_attributes=True)
class ActionIngestWithCapabilitiesResponse(BaseModel):
succeeded_count: int
failed_count: int
created_capabilities_count: int
succeeded_actions: list[ActionIngestItemResponse]
failed_actions: list[ActionIngestItemResponse]
capabilities: list[CapabilityIngestItemResponse]
class CompositeCapabilityRecipeStepCreate(BaseModel):
step: int = Field(ge=1)
capability_id: UUID
inputs: dict[str, str] = Field(default_factory=dict)
class CompositeCapabilityRecipeCreate(BaseModel):
version: int = 1
steps: list[CompositeCapabilityRecipeStepCreate] = Field(default_factory=list)
class CreateCompositeCapabilityRequest(BaseModel):
name: str = Field(min_length=1, max_length=255)
description: str | None = None
input_schema: dict[str, Any] | None = None
output_schema: dict[str, Any] | None = None
recipe: CompositeCapabilityRecipeCreate
+67
View File
@@ -0,0 +1,67 @@
from __future__ import annotations
from datetime import datetime
from typing import Any, Literal
from uuid import UUID
from pydantic import BaseModel, ConfigDict, Field
class RunPipelineRequest(BaseModel):
inputs: dict[str, Any] = Field(default_factory=dict)
class RunPipelineResponse(BaseModel):
run_id: UUID
pipeline_id: UUID
status: Literal["QUEUED", "RUNNING"]
class ExecutionRunListItemResponse(BaseModel):
id: UUID
pipeline_id: UUID
status: Literal["QUEUED", "RUNNING", "SUCCEEDED", "FAILED", "PARTIAL_FAILED"]
error: str | None = None
started_at: datetime | None = None
finished_at: datetime | None = None
created_at: datetime
updated_at: datetime
model_config = ConfigDict(from_attributes=True)
class ExecutionStepRunResponse(BaseModel):
step: int
name: str | None = None
capability_id: UUID | None = None
action_id: UUID | None = None
method: Literal["GET", "POST", "PUT", "PATCH", "DELETE", "HEAD", "OPTIONS"] | None = None
status_code: int | None = None
status: Literal["PENDING", "RUNNING", "SUCCEEDED", "FAILED", "SKIPPED"]
resolved_inputs: dict[str, Any] | None = None
accepted_payload: Any = None
output_payload: Any = None
request_snapshot: dict[str, Any] | None = None
response_snapshot: dict[str, Any] | None = None
error: str | None = None
started_at: datetime | None = None
finished_at: datetime | None = None
duration_ms: int | None = None
created_at: datetime
updated_at: datetime
model_config = ConfigDict(from_attributes=True)
class ExecutionRunDetailResponse(BaseModel):
id: UUID
pipeline_id: UUID
status: Literal["QUEUED", "RUNNING", "SUCCEEDED", "FAILED", "PARTIAL_FAILED"]
inputs: dict[str, Any] = Field(default_factory=dict)
summary: dict[str, Any] | None = None
error: str | None = None
started_at: datetime | None = None
finished_at: datetime | None = None
created_at: datetime
updated_at: datetime
steps: list[ExecutionStepRunResponse] = Field(default_factory=list)
+104
View File
@@ -0,0 +1,104 @@
from __future__ import annotations
from datetime import datetime
from typing import Any, Literal
from uuid import UUID
from pydantic import BaseModel, ConfigDict, Field
class PipelineInputTypeFromPrevious(BaseModel):
from_step: int
type: str
class PipelineStepEndpoint(BaseModel):
name: str
capability_id: UUID
action_id: UUID | None = None
type: str | None = None
input_type: str | dict[str, Any] | None = None
output_type: str | dict[str, Any] | None = None
class PipelineGraphNode(BaseModel):
step: int
name: str
description: str | None = None
input_connected_from: list[int] = Field(default_factory=list)
output_connected_to: list[int] = Field(default_factory=list)
input_data_type_from_previous: list[PipelineInputTypeFromPrevious] = Field(default_factory=list)
external_inputs: list[str] = Field(default_factory=list)
endpoints: list[PipelineStepEndpoint] = Field(default_factory=list)
class PipelineGraphEdge(BaseModel):
from_step: int
to_step: int
type: str
class PipelineGenerateRequest(BaseModel):
dialog_id: UUID
message: str = Field(min_length=1)
capability_ids: list[UUID] | None = None
class PipelineGenerateResponse(BaseModel):
status: Literal["ready", "needs_input", "cannot_build"]
message_ru: str
chat_reply_ru: str
pipeline_id: UUID | None = None
nodes: list[PipelineGraphNode] = Field(default_factory=list)
edges: list[PipelineGraphEdge] = Field(default_factory=list)
missing_requirements: list[str] = Field(default_factory=list)
context_summary: str | None = None
class PipelineGraphUpdateRequest(BaseModel):
nodes: list[PipelineGraphNode] = Field(default_factory=list)
edges: list[PipelineGraphEdge] = Field(default_factory=list)
class PipelineGraphUpdateResponse(BaseModel):
pipeline_id: UUID
nodes: list[PipelineGraphNode] = Field(default_factory=list)
edges: list[PipelineGraphEdge] = Field(default_factory=list)
updated_at: datetime
class DialogResetRequest(BaseModel):
dialog_id: UUID
class DialogResetResponse(BaseModel):
status: Literal["ok"]
message_ru: str
class PipelineDialogListItemResponse(BaseModel):
dialog_id: UUID
title: str | None = None
last_status: str | None = None
last_pipeline_id: UUID | None = None
last_message_preview: str | None = None
created_at: datetime
updated_at: datetime
model_config = ConfigDict(from_attributes=True)
class PipelineDialogMessageResponse(BaseModel):
id: UUID
role: Literal["user", "assistant"]
content: str
assistant_payload: dict[str, Any] | None = None
created_at: datetime
model_config = ConfigDict(from_attributes=True)
class PipelineDialogHistoryResponse(BaseModel):
dialog_id: UUID
title: str | None = None
messages: list[PipelineDialogMessageResponse] = Field(default_factory=list)
+33
View File
@@ -0,0 +1,33 @@
from pydantic import BaseModel, EmailStr, ConfigDict
from uuid import UUID
from datetime import datetime
from app.models import UserRole
from typing import Optional
class UserBase(BaseModel):
email: EmailStr
full_name: str
class UserUpdate(BaseModel):
email: Optional[EmailStr] = None
full_name: Optional[str] = None
role: Optional[UserRole] = None
is_active: Optional[bool] = None
min_approvals_required: Optional[int] = None
class UserResponse(UserBase):
id: UUID
role: UserRole
is_active: bool
min_approvals_required: int
created_at: datetime
model_config = ConfigDict(from_attributes=True)
class UserUpdateMe(BaseModel):
email: Optional[EmailStr] = None
full_name: Optional[str] = None
class PasswordUpdate(BaseModel):
old_password: str
new_password: str
+45
View File
@@ -0,0 +1,45 @@
from typing import Annotated, Optional
import uuid
from datetime import datetime
from pydantic import BaseModel, EmailStr, Field, ConfigDict, field_validator
from app.models import UserRole
class UserBase(BaseModel):
email: EmailStr
full_name: Annotated[str | None, Field(max_length=255)] = None
class UserResponse(UserBase):
id: uuid.UUID
role: UserRole
is_active: bool
created_at: datetime
updated_at: datetime | None = None
model_config = ConfigDict(from_attributes=True)
class UserUpdate(BaseModel):
email: Optional[EmailStr] = None
full_name: Optional[str] = Field(None, min_length=2, max_length=255)
role: Optional[UserRole] = None
is_active: Optional[bool] = None
class UserUpdateMe(BaseModel):
email: Optional[EmailStr] = None
full_name: Optional[str] = Field(None, min_length=2, max_length=255)
class PasswordUpdate(BaseModel):
old_password: str = Field(min_length=8)
new_password: str = Field(min_length=8)
@field_validator("new_password")
@classmethod
def validate_password_complexity(cls, v: str) -> str:
if not any(c.isalpha() for c in v) or not any(c.isdigit() for c in v):
raise ValueError("must contain at least one letter and one digit")
return v
@@ -0,0 +1,80 @@
from __future__ import annotations
import asyncio
from sqlalchemy import select
from app.core.database.session import SessionLocal
from app.models import Action, Capability
from app.services.capability_service import CapabilityService
def _needs_backfill(capability: Capability) -> bool:
llm_payload = capability.llm_payload
if not isinstance(llm_payload, dict):
return True
if llm_payload.get("action_context_version") != "v2":
return True
if not isinstance(llm_payload.get("action_context"), dict):
return True
if not isinstance(llm_payload.get("action_context_brief"), dict):
return True
return False
async def main() -> None:
async with SessionLocal() as session:
result = await session.execute(
select(Capability).where(Capability.action_id.is_not(None))
)
capabilities = list(result.scalars().all())
if not capabilities:
print("No capabilities found.")
return
action_ids = [cap.action_id for cap in capabilities if cap.action_id is not None]
actions_result = await session.execute(select(Action).where(Action.id.in_(action_ids)))
actions_by_id = {action.id: action for action in actions_result.scalars().all()}
updated = 0
for capability in capabilities:
if capability.action_id is None:
continue
if not _needs_backfill(capability):
continue
action = actions_by_id.get(capability.action_id)
if action is None:
continue
built = CapabilityService._build_capability_payload(action)
built_llm = built.get("llm_payload") or {}
existing = capability.llm_payload if isinstance(capability.llm_payload, dict) else {}
capability.llm_payload = {
**existing,
"source": existing.get("source", built_llm.get("source", "deterministic")),
"action_context_version": built_llm.get("action_context_version", "v2"),
"action_context": built_llm.get("action_context"),
"action_context_brief": built_llm.get("action_context_brief"),
"openapi_hints": built_llm.get("openapi_hints"),
}
if capability.input_schema is None:
capability.input_schema = built.get("input_schema")
if capability.output_schema is None:
capability.output_schema = built.get("output_schema")
if capability.data_format is None:
capability.data_format = built.get("data_format")
updated += 1
if not updated:
print("No capabilities required backfill.")
return
await session.commit()
print(f"Backfilled {updated} capabilities.")
if __name__ == "__main__":
asyncio.run(main())
+30
View File
@@ -0,0 +1,30 @@
import asyncio
import os
from sqlalchemy import text
from app.core.database.session import SessionLocal
async def migrate():
print("Starting migration: adding 'type' and 'recipe' to 'capabilities' table...")
async with SessionLocal() as session:
try:
# 1. Add type column if it doesn't exist
await session.execute(text(
"ALTER TABLE capabilities ADD COLUMN IF NOT EXISTS type VARCHAR(50) DEFAULT 'ATOMIC';"
))
# 2. Add recipe column if it doesn't exist
await session.execute(text(
"ALTER TABLE capabilities ADD COLUMN IF NOT EXISTS recipe JSONB;"
))
# 3. Make action_id nullable
await session.execute(text(
"ALTER TABLE capabilities ALTER COLUMN action_id DROP NOT NULL;"
))
await session.commit()
print("Migration completed successfully!")
except Exception as e:
await session.rollback()
print(f"Migration failed: {e}")
if __name__ == "__main__":
asyncio.run(migrate())
+11
View File
@@ -0,0 +1,11 @@
from app.services.openapi_service import OpenAPIService
from app.services.capability_service import CapabilityService
from app.services.execution_service import ExecutionService
from app.services.pipeline_service import PipelineService
__all__ = [
"OpenAPIService",
"CapabilityService",
"ExecutionService",
"PipelineService",
]
+758
View File
@@ -0,0 +1,758 @@
from __future__ import annotations
import re
from typing import Any
from uuid import UUID
from sqlalchemy import and_, or_, select
from sqlalchemy.ext.asyncio import AsyncSession
from app.models import Action, Capability
from app.models.capability import CapabilityType
class CompositeRecipeValidationError(ValueError):
def __init__(self, errors: list[str]) -> None:
self.errors = errors
super().__init__("; ".join(errors))
class CapabilityService:
def __init__(self, session: AsyncSession) -> None:
self.session = session
@staticmethod
def build_from_actions(
actions: list[Action],
*,
owner_user_id: UUID,
) -> list[Capability]:
capabilities: list[Capability] = []
for action in actions:
capability_payload = CapabilityService._build_capability_payload(action)
capabilities.append(
Capability(
user_id=owner_user_id,
action_id=action.id,
type=CapabilityType.ATOMIC,
name=capability_payload["name"],
description=capability_payload.get("description"),
input_schema=capability_payload.get("input_schema"),
output_schema=capability_payload.get("output_schema"),
data_format=capability_payload.get("data_format"),
llm_payload=capability_payload.get("llm_payload"),
)
)
return capabilities
async def create_composite_capability(
self,
*,
owner_user_id: UUID,
name: str,
description: str | None = None,
input_schema: dict[str, Any] | None = None,
output_schema: dict[str, Any] | None = None,
recipe: dict[str, Any],
llm_payload: dict[str, Any] | None = None,
data_format: dict[str, Any] | None = None,
) -> Capability:
capability = Capability(
user_id=owner_user_id,
type=CapabilityType.COMPOSITE,
name=name,
description=description,
input_schema=input_schema,
output_schema=output_schema,
recipe=recipe,
llm_payload=llm_payload,
data_format=data_format,
)
self.session.add(capability)
await self.session.flush()
await self.session.refresh(capability)
return capability
async def create_validated_composite_capability(
self,
*,
owner_user_id: UUID,
name: str,
description: str | None = None,
input_schema: dict[str, Any] | None = None,
output_schema: dict[str, Any] | None = None,
recipe: dict[str, Any],
include_all: bool = False,
) -> Capability:
normalized_recipe, step_capabilities = await self.validate_composite_recipe(
recipe=recipe,
owner_user_id=owner_user_id,
include_all=include_all,
)
llm_payload = self._build_composite_llm_payload(step_capabilities)
data_format = {
"request_schema_type": input_schema.get("type")
if isinstance(input_schema, dict)
else None,
"response_schema_types": [output_schema.get("type")]
if isinstance(output_schema, dict)
and isinstance(output_schema.get("type"), str)
else [],
"composite": {
"version": normalized_recipe.get("version"),
"steps_count": len(normalized_recipe.get("steps", [])),
"step_capability_names": [
str(getattr(capability, "name", ""))
for capability in step_capabilities
],
},
}
return await self.create_composite_capability(
owner_user_id=owner_user_id,
name=name,
description=description,
input_schema=input_schema,
output_schema=output_schema,
recipe=normalized_recipe,
llm_payload=llm_payload,
data_format=data_format,
)
async def validate_composite_recipe(
self,
*,
recipe: dict[str, Any],
owner_user_id: UUID,
include_all: bool = False,
) -> tuple[dict[str, Any], list[Capability]]:
errors: list[str] = []
if not isinstance(recipe, dict):
raise CompositeRecipeValidationError(["recipe must be an object"])
version = recipe.get("version")
if version != 1:
errors.append("recipe.version must be 1")
raw_steps = recipe.get("steps")
if not isinstance(raw_steps, list) or not raw_steps:
errors.append("recipe.steps must be a non-empty list")
raise CompositeRecipeValidationError(errors)
normalized_steps: list[dict[str, Any]] = []
seen_step_numbers: set[int] = set()
for index, raw_step in enumerate(raw_steps):
if not isinstance(raw_step, dict):
errors.append(f"recipe.steps[{index}] must be an object")
continue
step_number = raw_step.get("step")
if not isinstance(step_number, int) or step_number < 1:
errors.append(f"recipe.steps[{index}].step must be positive integer")
continue
if step_number in seen_step_numbers:
errors.append(f"recipe.steps[{index}].step duplicates step {step_number}")
seen_step_numbers.add(step_number)
capability_uuid = self._to_uuid(raw_step.get("capability_id"))
if capability_uuid is None:
errors.append(f"recipe.steps[{index}].capability_id must be UUID")
continue
raw_inputs = raw_step.get("inputs", {})
if raw_inputs is None:
raw_inputs = {}
if not isinstance(raw_inputs, dict):
errors.append(f"recipe.steps[{index}].inputs must be an object")
raw_inputs = {}
normalized_inputs: dict[str, str] = {}
for input_name, binding in raw_inputs.items():
if not isinstance(input_name, str) or not input_name.strip():
errors.append(f"recipe.steps[{index}].inputs has invalid key")
continue
if not isinstance(binding, str):
errors.append(
f"recipe.steps[{index}].inputs.{input_name} must be string binding"
)
continue
normalized_binding = binding.strip()
if not normalized_binding:
errors.append(
f"recipe.steps[{index}].inputs.{input_name} must be non-empty binding"
)
continue
if not self._is_supported_binding_expression(normalized_binding):
errors.append(
f"recipe.steps[{index}].inputs.{input_name} has unsupported binding '{normalized_binding}'"
)
continue
normalized_inputs[input_name] = normalized_binding
normalized_steps.append(
{
"step": step_number,
"capability_id": str(capability_uuid),
"inputs": normalized_inputs,
}
)
if errors:
raise CompositeRecipeValidationError(errors)
normalized_steps.sort(key=lambda item: item["step"])
for idx in range(1, len(normalized_steps)):
if normalized_steps[idx]["step"] <= normalized_steps[idx - 1]["step"]:
errors.append("recipe.steps must be strictly increasing by step")
break
known_steps = {item["step"] for item in normalized_steps}
for item in normalized_steps:
for binding in item["inputs"].values():
if not binding.startswith("$step."):
continue
source_step = self._extract_binding_source_step(binding)
if source_step is None:
errors.append(
f"step {item['step']}: invalid step binding '{binding}'"
)
continue
if source_step not in known_steps:
errors.append(
f"step {item['step']}: binding references missing step {source_step}"
)
continue
if source_step >= item["step"]:
errors.append(
f"step {item['step']}: binding references non-previous step {source_step}"
)
capability_ids = [UUID(item["capability_id"]) for item in normalized_steps]
capabilities = await self.get_capabilities(
capability_ids=capability_ids,
owner_user_id=owner_user_id,
include_all=include_all,
)
capabilities_by_id = {str(item.id): item for item in capabilities}
for item in normalized_steps:
capability = capabilities_by_id.get(item["capability_id"])
if capability is None:
errors.append(
f"step {item['step']}: capability {item['capability_id']} not found or not accessible"
)
continue
capability_type = self._capability_type_value(capability)
if capability_type != CapabilityType.ATOMIC.value:
errors.append(
f"step {item['step']}: nested composite is not allowed ({item['capability_id']})"
)
continue
if getattr(capability, "action_id", None) is None:
errors.append(
f"step {item['step']}: atomic capability {item['capability_id']} has no action_id"
)
if errors:
raise CompositeRecipeValidationError(errors)
normalized_recipe = {
"version": 1,
"steps": normalized_steps,
}
ordered_caps = [
capabilities_by_id[item["capability_id"]]
for item in normalized_steps
if item["capability_id"] in capabilities_by_id
]
return normalized_recipe, ordered_caps
async def create_from_actions(
self,
actions: list[Action],
*,
owner_user_id: UUID,
refresh: bool = True,
) -> list[Capability]:
capabilities = self.build_from_actions(actions, owner_user_id=owner_user_id)
if not capabilities:
return []
self.session.add_all(capabilities)
await self.session.flush()
if refresh:
for capability in capabilities:
await self.session.refresh(capability)
return capabilities
async def get_capabilities(
self,
*,
capability_ids: list[UUID] | None = None,
action_ids: list[UUID] | None = None,
owner_user_id: UUID | None = None,
include_all: bool = False,
limit: int | None = None,
offset: int = 0,
) -> list[Capability]:
query = select(Capability).order_by(Capability.created_at.asc())
if not include_all and owner_user_id is not None:
# Legacy compatibility: some old rows may have user_id=NULL while action is user-owned.
query = query.outerjoin(Action, Capability.action_id == Action.id).where(
or_(
Capability.user_id == owner_user_id,
and_(
Capability.user_id.is_(None),
Action.user_id == owner_user_id,
),
)
)
if capability_ids:
query = query.where(Capability.id.in_(capability_ids))
if action_ids:
query = query.where(Capability.action_id.in_(action_ids))
if offset:
query = query.offset(offset)
if limit is not None:
query = query.limit(limit)
result = await self.session.execute(query)
return list(result.scalars().all())
async def get_capability(
self,
capability_id: UUID,
*,
owner_user_id: UUID | None = None,
include_all: bool = False,
) -> Capability | None:
query = select(Capability).where(Capability.id == capability_id)
if not include_all and owner_user_id is not None:
query = query.outerjoin(Action, Capability.action_id == Action.id).where(
or_(
Capability.user_id == owner_user_id,
and_(
Capability.user_id.is_(None),
Action.user_id == owner_user_id,
),
)
)
result = await self.session.execute(query)
return result.scalar_one_or_none()
@staticmethod
def _is_supported_binding_expression(value: str) -> bool:
if re.fullmatch(r"\$run\.[A-Za-z0-9_][A-Za-z0-9_\.]*", value):
return True
if re.fullmatch(r"\$step\.\d+\.[A-Za-z0-9_][A-Za-z0-9_\.]*", value):
return True
return False
@staticmethod
def _extract_binding_source_step(value: str) -> int | None:
match = re.fullmatch(r"\$step\.(\d+)\.[A-Za-z0-9_][A-Za-z0-9_\.]*", value)
if not match:
return None
return int(match.group(1))
@staticmethod
def _to_uuid(value: Any) -> UUID | None:
try:
return UUID(str(value))
except (TypeError, ValueError):
return None
@staticmethod
def _capability_type_value(capability: Capability) -> str:
cap_type = getattr(capability, "type", None)
if isinstance(cap_type, CapabilityType):
return cap_type.value
if isinstance(cap_type, str):
return cap_type
if hasattr(cap_type, "value"):
return str(cap_type.value)
return CapabilityType.ATOMIC.value
@staticmethod
def _build_composite_llm_payload(step_capabilities: list[Capability]) -> dict[str, Any]:
step_names = [
str(getattr(capability, "name", "") or "")
for capability in step_capabilities
if str(getattr(capability, "name", "") or "").strip()
]
return {
"source": "composite",
"recipe_summary": {
"steps_count": len(step_capabilities),
"step_names": step_names,
},
}
@staticmethod
def _build_capability_payload(action: Action) -> dict[str, Any]:
input_schema = CapabilityService._build_input_schema(action)
output_schema = getattr(action, "response_schema", None)
data_format = CapabilityService._build_data_format(action)
action_context = CapabilityService._build_action_context(
action=action,
input_schema=input_schema,
output_schema=output_schema,
data_format=data_format,
)
openapi_hints = CapabilityService._build_openapi_hints(
action=action,
input_schema=input_schema,
output_schema=output_schema,
)
return {
"name": CapabilityService._build_capability_name(action),
"description": CapabilityService._build_capability_description(action),
"input_schema": input_schema,
"output_schema": output_schema,
"data_format": data_format,
"llm_payload": {
"source": "deterministic",
"action_context_version": "v2",
"action_context": action_context,
"action_context_brief": CapabilityService._build_action_context_brief(
action_context=action_context,
openapi_hints=openapi_hints,
),
"openapi_hints": openapi_hints,
},
}
@staticmethod
def _build_action_context(
*,
action: Action,
input_schema: dict[str, Any] | None,
output_schema: dict[str, Any] | None,
data_format: dict[str, Any] | None,
) -> dict[str, Any]:
method = getattr(action, "method", None)
method_value = method.value if hasattr(method, "value") else str(method or "")
parameter_names = CapabilityService._extract_parameter_names_by_location(
getattr(action, "parameters_schema", None)
)
request_property_names = CapabilityService._extract_schema_property_names(
getattr(action, "request_body_schema", None)
)
response_property_names = CapabilityService._extract_schema_property_names(
getattr(action, "response_schema", None)
)
return {
"action_id": str(getattr(action, "id", "")),
"operation_id": getattr(action, "operation_id", None),
"method": method_value,
"path": getattr(action, "path", None),
"base_url": getattr(action, "base_url", None),
"summary": getattr(action, "summary", None),
"description": getattr(action, "description", None),
"tags": getattr(action, "tags", None) or [],
"source_filename": getattr(action, "source_filename", None),
"input_schema": input_schema,
"output_schema": output_schema,
"parameters_schema": getattr(action, "parameters_schema", None),
"request_body_schema": getattr(action, "request_body_schema", None),
"response_schema": getattr(action, "response_schema", None),
"raw_spec": getattr(action, "raw_spec", None),
"data_format": data_format,
"input_signals": {
"required_inputs": CapabilityService._extract_required_inputs(input_schema),
"parameter_names_by_location": parameter_names,
"request_property_names": request_property_names,
},
"output_signals": {
"response_property_names": response_property_names,
},
}
@staticmethod
def _build_openapi_hints(
*,
action: Action,
input_schema: dict[str, Any] | None,
output_schema: dict[str, Any] | None,
) -> dict[str, Any]:
raw_spec = getattr(action, "raw_spec", None)
if not isinstance(raw_spec, dict):
raw_spec = {}
request_content_types = CapabilityService._extract_content_types_from_request(raw_spec)
response_status_codes, response_content_types = (
CapabilityService._extract_response_hints(raw_spec)
)
security_requirements = (
raw_spec.get("security") if isinstance(raw_spec.get("security"), list) else []
)
parameter_names = CapabilityService._extract_parameter_names_by_location(
getattr(action, "parameters_schema", None)
)
vendor_extensions = {
key: value
for key, value in raw_spec.items()
if isinstance(key, str) and key.startswith("x-")
}
path_value = str(getattr(action, "path", "") or "")
path_segments = [
segment
for segment in path_value.strip("/").split("/")
if segment and not segment.startswith("{")
]
return {
"deprecated": bool(raw_spec.get("deprecated")),
"security_requirements": security_requirements,
"request_content_types": request_content_types,
"response_content_types": response_content_types,
"response_status_codes": response_status_codes,
"has_request_body": bool(getattr(action, "request_body_schema", None)),
"has_response_body": bool(output_schema),
"required_inputs": CapabilityService._extract_required_inputs(input_schema),
"parameter_names_by_location": parameter_names,
"path_segments": path_segments,
"tags": getattr(action, "tags", None) or [],
"vendor_extensions": vendor_extensions,
}
@staticmethod
def _build_action_context_brief(
*,
action_context: dict[str, Any],
openapi_hints: dict[str, Any],
) -> dict[str, Any]:
return {
"operation_id": action_context.get("operation_id"),
"method": action_context.get("method"),
"path": action_context.get("path"),
"base_url": action_context.get("base_url"),
"summary": action_context.get("summary"),
"description": action_context.get("description"),
"tags": action_context.get("tags") or [],
"required_inputs": (action_context.get("input_signals") or {}).get("required_inputs") or [],
"parameter_names_by_location": (action_context.get("input_signals") or {}).get(
"parameter_names_by_location"
)
or {},
"request_content_types": openapi_hints.get("request_content_types") or [],
"response_content_types": openapi_hints.get("response_content_types") or [],
"response_status_codes": openapi_hints.get("response_status_codes") or [],
"security_requirements": openapi_hints.get("security_requirements") or [],
}
@staticmethod
def _build_capability_name(action: Action) -> str:
operation_id = getattr(action, "operation_id", None)
if operation_id:
return str(operation_id)
method = getattr(action, "method", None)
method_value = method.value.lower() if method is not None else "call"
path = getattr(action, "path", "") or ""
normalized_path = re.sub(r"[{}]", "", path).strip("/")
normalized_path = re.sub(r"[^a-zA-Z0-9/]+", "_", normalized_path)
normalized_path = normalized_path.replace("/", "_") or "root"
return f"{method_value}_{normalized_path.lower()}"
@staticmethod
def _build_capability_description(action: Action) -> str:
summary = getattr(action, "summary", None)
description = getattr(action, "description", None)
operation_id = getattr(action, "operation_id", None)
return str(
summary
or description
or operation_id
or CapabilityService._build_capability_name(action)
)
@staticmethod
def _build_input_schema(action: Action) -> dict[str, Any] | None:
parameters_schema = getattr(action, "parameters_schema", None)
request_body_schema = getattr(action, "request_body_schema", None)
if parameters_schema and request_body_schema:
return {
"type": "object",
"properties": {
"parameters": parameters_schema,
"request_body": request_body_schema,
},
}
if parameters_schema:
return parameters_schema
if request_body_schema:
return request_body_schema
return None
@staticmethod
def _build_data_format(action: Action) -> dict[str, Any]:
parameters_schema = getattr(action, "parameters_schema", None) or {}
request_body_schema = getattr(action, "request_body_schema", None) or {}
response_schema = getattr(action, "response_schema", None) or {}
parameter_locations: list[str] = []
if isinstance(parameters_schema, dict):
properties = parameters_schema.get("properties", {})
if isinstance(properties, dict):
for property_schema in properties.values():
if not isinstance(property_schema, dict):
continue
location = property_schema.get("x-parameter-location")
if isinstance(location, str) and location not in parameter_locations:
parameter_locations.append(location)
request_content_type = (
request_body_schema.get("x-content-type")
if isinstance(request_body_schema, dict)
else None
)
response_content_type = (
response_schema.get("x-content-type")
if isinstance(response_schema, dict)
else None
)
return {
"parameter_locations": parameter_locations,
"request_content_types": [request_content_type]
if isinstance(request_content_type, str)
else [],
"request_schema_type": request_body_schema.get("type")
if isinstance(request_body_schema, dict)
else None,
"response_content_types": [response_content_type]
if isinstance(response_content_type, str)
else [],
"response_schema_types": [response_schema.get("type")]
if isinstance(response_schema, dict)
and isinstance(response_schema.get("type"), str)
else [],
}
@staticmethod
def _extract_required_inputs(input_schema: dict[str, Any] | None) -> list[str]:
if not isinstance(input_schema, dict):
return []
required = input_schema.get("required")
if isinstance(required, list):
return [str(item) for item in required if isinstance(item, str) and item]
# Nested schemas: {"properties":{"parameters":{"required":[...]}, "request_body":{"required":[...]}}}
nested_required: list[str] = []
properties = input_schema.get("properties")
if isinstance(properties, dict):
for nested_name in ("parameters", "request_body"):
nested_schema = properties.get(nested_name)
if not isinstance(nested_schema, dict):
continue
nested = nested_schema.get("required")
if isinstance(nested, list):
for value in nested:
if isinstance(value, str) and value and value not in nested_required:
nested_required.append(value)
return nested_required
@staticmethod
def _extract_parameter_names_by_location(
parameters_schema: dict[str, Any] | None,
) -> dict[str, list[str]]:
names_by_location: dict[str, list[str]] = {
"path": [],
"query": [],
"header": [],
"cookie": [],
}
if not isinstance(parameters_schema, dict):
return names_by_location
properties = parameters_schema.get("properties")
if not isinstance(properties, dict):
return names_by_location
for name, schema in properties.items():
if not isinstance(name, str):
continue
location = "query"
if isinstance(schema, dict):
location_raw = schema.get("x-parameter-location")
if isinstance(location_raw, str) and location_raw in names_by_location:
location = location_raw
if name not in names_by_location[location]:
names_by_location[location].append(name)
return names_by_location
@staticmethod
def _extract_schema_property_names(
schema: dict[str, Any] | None,
*,
limit: int = 64,
) -> list[str]:
if not isinstance(schema, dict):
return []
result: list[str] = []
queue: list[dict[str, Any]] = [schema]
seen: set[str] = set()
while queue and len(result) < limit:
current = queue.pop(0)
properties = current.get("properties")
if isinstance(properties, dict):
for key, value in properties.items():
if isinstance(key, str) and key not in seen:
seen.add(key)
result.append(key)
if len(result) >= limit:
break
if isinstance(value, dict):
queue.append(value)
items = current.get("items")
if isinstance(items, dict):
queue.append(items)
return result
@staticmethod
def _extract_content_types_from_request(raw_spec: dict[str, Any]) -> list[str]:
request_body = raw_spec.get("requestBody")
if not isinstance(request_body, dict):
return []
content = request_body.get("content")
if not isinstance(content, dict):
return []
return [str(content_type) for content_type in content.keys() if isinstance(content_type, str)]
@staticmethod
def _extract_response_hints(raw_spec: dict[str, Any]) -> tuple[list[str], list[str]]:
responses = raw_spec.get("responses")
if not isinstance(responses, dict):
return [], []
response_status_codes: list[str] = []
response_content_types: list[str] = []
for status_code, response_payload in responses.items():
status_value = str(status_code)
if status_value not in response_status_codes:
response_status_codes.append(status_value)
if not isinstance(response_payload, dict):
continue
content = response_payload.get("content")
if not isinstance(content, dict):
continue
for content_type in content.keys():
if isinstance(content_type, str) and content_type not in response_content_types:
response_content_types.append(content_type)
return response_status_codes, response_content_types
+88
View File
@@ -0,0 +1,88 @@
from __future__ import annotations
import json
import os
from typing import Any
try:
from redis import asyncio as aioredis
except ModuleNotFoundError:
aioredis = None
from app.utils.ollama_client import chat_json, summarize_dialog_text
class DialogMemoryService:
def __init__(self) -> None:
redis_host = os.getenv("REDIS_HOST", "localhost")
redis_port = os.getenv("REDIS_PORT", "6379")
self.redis_url = os.getenv("REDIS_URL", f"redis://{redis_host}:{redis_port}")
self.ttl_seconds = int(os.getenv("DIALOG_TTL_SECONDS", "86400"))
async def get_context(self, dialog_id: str) -> tuple[list[dict[str, Any]], str | None]:
redis = await self._get_redis()
if redis is None:
return [], None
messages_raw = await redis.get(self._messages_key(dialog_id))
summary = await redis.get(self._summary_key(dialog_id))
messages = self._decode_messages(messages_raw)
return messages, summary
async def append_and_summarize(self, dialog_id: str, role: str, content: str) -> str | None:
redis = await self._get_redis()
if redis is None:
return None
messages_key = self._messages_key(dialog_id)
summary_key = self._summary_key(dialog_id)
current_messages = self._decode_messages(await redis.get(messages_key))
current_messages.append({"role": role, "content": content})
await redis.set(messages_key, json.dumps(current_messages, ensure_ascii=False), ex=self.ttl_seconds)
try:
summary = await summarize_dialog_text(current_messages)
except Exception:
summary = None
if summary is None:
summary = self._fallback_summary(current_messages)
await redis.set(summary_key, summary, ex=self.ttl_seconds)
return summary
async def reset(self, dialog_id: str) -> None:
redis = await self._get_redis()
if redis is None:
return
await redis.delete(self._messages_key(dialog_id), self._summary_key(dialog_id))
async def _get_redis(self):
if aioredis is None:
return None
try:
redis = aioredis.from_url(self.redis_url, encoding="utf8", decode_responses=True)
await redis.ping()
return redis
except Exception:
return None
def _messages_key(self, dialog_id: str) -> str:
return f"dialog:{dialog_id}:messages"
def _summary_key(self, dialog_id: str) -> str:
return f"dialog:{dialog_id}:summary"
def _decode_messages(self, payload: str | None) -> list[dict[str, Any]]:
if not payload:
return []
try:
parsed = json.loads(payload)
except json.JSONDecodeError:
return []
if not isinstance(parsed, list):
return []
return [item for item in parsed if isinstance(item, dict)]
def _fallback_summary(self, messages: list[dict[str, Any]]) -> str:
chunks = [str(item.get("content", "")) for item in messages[-4:]]
return "\n".join(chunk for chunk in chunks if chunk)
File diff suppressed because it is too large Load Diff
+371
View File
@@ -0,0 +1,371 @@
from __future__ import annotations
import re
from typing import Any
import yaml
from app.models import ActionIngestStatus, HttpMethod
class OpenAPIService:
SUPPORTED_METHODS = {method.value.lower(): method for method in HttpMethod}
JSON_CONTENT_TYPES = ("application/json", "application/*+json")
@staticmethod
def load_document(raw_bytes: bytes) -> dict[str, Any]:
if not raw_bytes:
raise ValueError("OpenAPI file is empty")
try:
document = yaml.safe_load(raw_bytes.decode("utf-8"))
except UnicodeDecodeError as exc:
raise ValueError("OpenAPI file must be UTF-8 encoded") from exc
except yaml.YAMLError as exc:
raise ValueError("OpenAPI file is not valid YAML or JSON") from exc
if not isinstance(document, dict):
raise ValueError("OpenAPI root must be an object")
openapi_version = document.get("openapi")
if not isinstance(openapi_version, str) or not openapi_version.startswith("3."):
raise ValueError("Only OpenAPI 3.x documents are supported")
if not isinstance(document.get("paths"), dict) or not document["paths"]:
raise ValueError("OpenAPI file must contain a non-empty paths section")
base_url = OpenAPIService._extract_base_url(document)
if base_url is None:
raise ValueError(
"OpenAPI file must contain servers[0].url (base_url)"
)
return document
@classmethod
def extract_actions(
cls,
document: dict[str, Any],
*,
source_filename: str | None = None,
) -> list[dict[str, Any]]:
return cls.extract_actions_with_failures(document, source_filename=source_filename)["succeeded"]
@classmethod
def extract_actions_with_failures(
cls,
document: dict[str, Any],
*,
source_filename: str | None = None,
) -> dict[str, list[dict[str, Any]]]:
base_url = cls._extract_base_url(document)
succeeded_actions: list[dict[str, Any]] = []
failed_actions: list[dict[str, Any]] = []
for path, path_item in document.get("paths", {}).items():
if not isinstance(path_item, dict):
continue
shared_parameters = path_item.get("parameters", [])
for method_name, operation in path_item.items():
if method_name not in cls.SUPPORTED_METHODS:
continue
if not isinstance(operation, dict):
failed_actions.append(
cls._build_failed_action_payload(
method_name=method_name,
path=path,
base_url=base_url,
source_filename=source_filename,
raw_spec=operation,
error_message="Operation definition must be an object",
)
)
continue
try:
succeeded_actions.append(
cls._build_succeeded_action_payload(
method_name=method_name,
path=path,
operation=operation,
shared_parameters=shared_parameters,
document=document,
base_url=base_url,
source_filename=source_filename,
)
)
except ValueError as exc:
failed_actions.append(
cls._build_failed_action_payload(
method_name=method_name,
path=path,
base_url=base_url,
source_filename=source_filename,
raw_spec=operation,
error_message=str(exc),
)
)
return {
"succeeded": succeeded_actions,
"failed": failed_actions,
}
@classmethod
def _build_succeeded_action_payload(
cls,
*,
method_name: str,
path: str,
operation: dict[str, Any],
shared_parameters: list[Any] | None,
document: dict[str, Any],
base_url: str | None,
source_filename: str | None,
) -> dict[str, Any]:
normalized_operation = cls._dereference(operation, document)
parameters = cls._merge_parameters(shared_parameters, normalized_operation.get("parameters", []), document)
return {
"operation_id": normalized_operation.get("operationId") or cls._build_operation_id(method_name, path),
"method": cls.SUPPORTED_METHODS[method_name],
"path": path,
"base_url": base_url,
"summary": normalized_operation.get("summary"),
"description": normalized_operation.get("description"),
"tags": normalized_operation.get("tags"),
"parameters_schema": cls._build_parameters_schema(parameters, document),
"request_body_schema": cls._extract_request_body_schema(normalized_operation, document),
"response_schema": cls._extract_response_schema(normalized_operation, document),
"source_filename": source_filename,
"raw_spec": normalized_operation,
"ingest_status": ActionIngestStatus.SUCCEEDED,
"ingest_error": None,
}
@classmethod
def _build_failed_action_payload(
cls,
*,
method_name: str,
path: str,
base_url: str | None,
source_filename: str | None,
raw_spec: Any,
error_message: str,
) -> dict[str, Any]:
operation = raw_spec if isinstance(raw_spec, dict) else {}
return {
"operation_id": operation.get("operationId") or cls._build_operation_id(method_name, path),
"method": cls.SUPPORTED_METHODS[method_name],
"path": path,
"base_url": base_url,
"summary": operation.get("summary"),
"description": operation.get("description"),
"tags": operation.get("tags"),
"parameters_schema": None,
"request_body_schema": None,
"response_schema": None,
"source_filename": source_filename,
"raw_spec": operation or None,
"ingest_status": ActionIngestStatus.FAILED,
"ingest_error": error_message,
}
@staticmethod
def _extract_base_url(document: dict[str, Any]) -> str | None:
servers = document.get("servers")
if isinstance(servers, list) and servers:
first_server = servers[0]
if isinstance(first_server, dict):
url = first_server.get("url")
if isinstance(url, str):
normalized_url = url.strip()
if normalized_url:
return normalized_url
return None
@classmethod
def _merge_parameters(
cls,
path_parameters: list[Any] | None,
operation_parameters: list[Any] | None,
document: dict[str, Any],
) -> list[dict[str, Any]]:
merged: dict[tuple[str | None, str | None], dict[str, Any]] = {}
for raw_parameter in (path_parameters or []) + (operation_parameters or []):
parameter = cls._dereference(raw_parameter, document)
if not isinstance(parameter, dict):
continue
key = (parameter.get("name"), parameter.get("in"))
merged[key] = parameter
return list(merged.values())
@classmethod
def _build_parameters_schema(
cls,
parameters: list[dict[str, Any]],
document: dict[str, Any],
) -> dict[str, Any] | None:
if not parameters:
return None
properties: dict[str, Any] = {}
required: list[str] = []
for parameter in parameters:
name = parameter.get("name")
if not name:
continue
if parameter.get("in") not in {"query", "path", "header", "cookie"}:
continue
schema = parameter.get("schema")
if schema is None:
schema = cls._extract_schema_from_content(parameter.get("content"), document)
else:
schema = cls._dereference(schema, document)
property_schema = schema if isinstance(schema, dict) else {"type": "string"}
property_schema = {
**property_schema,
"x-parameter-location": parameter.get("in"),
}
if parameter.get("description"):
property_schema["description"] = parameter["description"]
properties[name] = property_schema
if parameter.get("required"):
required.append(name)
if not properties:
return None
schema: dict[str, Any] = {
"type": "object",
"properties": properties,
}
if required:
schema["required"] = required
return schema
@classmethod
def _extract_request_body_schema(
cls,
operation: dict[str, Any],
document: dict[str, Any],
) -> dict[str, Any] | None:
request_body = operation.get("requestBody")
if not isinstance(request_body, dict):
return None
request_body = cls._dereference(request_body, document)
schema = cls._extract_schema_from_content(request_body.get("content"), document)
if not isinstance(schema, dict):
return None
if request_body.get("required"):
schema = {**schema, "x-required": True}
return schema
@classmethod
def _extract_response_schema(
cls,
operation: dict[str, Any],
document: dict[str, Any],
) -> dict[str, Any] | None:
responses = operation.get("responses")
if not isinstance(responses, dict):
return None
for status_code, response in responses.items():
if not str(status_code).startswith("2"):
continue
normalized_response = cls._dereference(response, document)
if not isinstance(normalized_response, dict):
continue
schema = cls._extract_schema_from_content(normalized_response.get("content"), document)
if isinstance(schema, dict):
return schema
if normalized_response.get("description"):
return {"description": normalized_response["description"]}
return None
@classmethod
def _extract_schema_from_content(cls, content: Any, document: dict[str, Any]) -> dict[str, Any] | None:
if not isinstance(content, dict):
return None
preferred_content_type = next((content_type for content_type in cls.JSON_CONTENT_TYPES if content_type in content), None)
items = []
if preferred_content_type:
items.append((preferred_content_type, content[preferred_content_type]))
items.extend((content_type, value) for content_type, value in content.items() if content_type != preferred_content_type)
for content_type, value in items:
if not isinstance(value, dict):
continue
schema = value.get("schema")
if not isinstance(schema, dict):
continue
normalized_schema = cls._dereference(schema, document)
if isinstance(normalized_schema, dict):
return {
**normalized_schema,
"x-content-type": content_type,
}
return None
@classmethod
def _dereference(cls, value: Any, document: dict[str, Any]) -> Any:
if isinstance(value, list):
return [cls._dereference(item, document) for item in value]
if not isinstance(value, dict):
return value
if "$ref" in value:
resolved = cls._resolve_ref(value["$ref"], document)
merged = cls._dereference(resolved, document)
if not isinstance(merged, dict):
return merged
sibling_fields = {key: cls._dereference(item, document) for key, item in value.items() if key != "$ref"}
return {**merged, **sibling_fields}
return {key: cls._dereference(item, document) for key, item in value.items()}
@staticmethod
def _resolve_ref(ref: str, document: dict[str, Any]) -> Any:
if not ref.startswith("#/"):
raise ValueError(f"Only local $ref values are supported, got: {ref}")
current: Any = document
for part in ref[2:].split("/"):
token = part.replace("~1", "/").replace("~0", "~")
if not isinstance(current, dict) or token not in current:
raise ValueError(f"Could not resolve OpenAPI reference: {ref}")
current = current[token]
return current
@staticmethod
def _build_operation_id(method_name: str, path: str) -> str:
normalized_path = re.sub(r"[{}]", "", path).strip("/")
normalized_path = re.sub(r"[^a-zA-Z0-9/]+", "_", normalized_path)
normalized_path = normalized_path.replace("/", "_") or "root"
return f"{method_name.lower()}_{normalized_path.lower()}"
@@ -0,0 +1,176 @@
from __future__ import annotations
from typing import Any
from uuid import UUID
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.models import DialogMessageRole, PipelineDialog, PipelineDialogMessage
class DialogAccessError(Exception):
pass
class PipelineDialogService:
def __init__(self, session: AsyncSession) -> None:
self.session = session
async def list_dialogs(
self,
*,
user_id: UUID,
limit: int,
offset: int,
) -> list[PipelineDialog]:
query = (
select(PipelineDialog)
.where(PipelineDialog.user_id == user_id)
.order_by(PipelineDialog.updated_at.desc())
.limit(limit)
.offset(offset)
)
result = await self.session.execute(query)
return list(result.scalars().all())
async def get_history(
self,
*,
dialog_id: UUID,
user_id: UUID,
limit: int,
offset: int,
) -> tuple[PipelineDialog, list[PipelineDialogMessage]]:
dialog = await self._get_dialog_owned_by_user(dialog_id=dialog_id, user_id=user_id)
query = (
select(PipelineDialogMessage)
.where(PipelineDialogMessage.dialog_id == dialog.id)
.order_by(PipelineDialogMessage.created_at.desc())
.limit(limit)
.offset(offset)
)
result = await self.session.execute(query)
messages_desc = list(result.scalars().all())
return dialog, list(reversed(messages_desc))
async def get_dialog(
self,
*,
dialog_id: UUID,
user_id: UUID,
) -> PipelineDialog:
return await self._get_dialog_owned_by_user(dialog_id=dialog_id, user_id=user_id)
async def append_user_message(
self,
*,
dialog_id: UUID,
user_id: UUID,
content: str,
) -> PipelineDialogMessage:
return await self._append_message(
dialog_id=dialog_id,
user_id=user_id,
role=DialogMessageRole.USER,
content=content,
assistant_payload=None,
create_dialog_if_missing=True,
)
async def append_assistant_message(
self,
*,
dialog_id: UUID,
user_id: UUID,
content: str,
assistant_payload: dict[str, Any],
) -> PipelineDialogMessage:
return await self._append_message(
dialog_id=dialog_id,
user_id=user_id,
role=DialogMessageRole.ASSISTANT,
content=content,
assistant_payload=assistant_payload,
create_dialog_if_missing=False,
)
async def _append_message(
self,
*,
dialog_id: UUID,
user_id: UUID,
role: DialogMessageRole,
content: str,
assistant_payload: dict[str, Any] | None,
create_dialog_if_missing: bool,
) -> PipelineDialogMessage:
dialog = await self.session.get(PipelineDialog, dialog_id)
if dialog is None:
if not create_dialog_if_missing:
raise DialogAccessError("Dialog not found")
dialog = PipelineDialog(
id=dialog_id,
user_id=user_id,
title=self._build_title(content),
)
self.session.add(dialog)
await self.session.flush()
elif dialog.user_id != user_id:
raise DialogAccessError("Dialog access denied")
if role == DialogMessageRole.USER and not dialog.title:
dialog.title = self._build_title(content)
message = PipelineDialogMessage(
dialog_id=dialog.id,
role=role,
content=content,
assistant_payload=assistant_payload,
)
self.session.add(message)
dialog.last_message_preview = self._build_preview(content)
if role == DialogMessageRole.ASSISTANT and assistant_payload:
status = assistant_payload.get("status")
if isinstance(status, str):
dialog.last_status = status
pipeline_id = self._parse_uuid(assistant_payload.get("pipeline_id"))
if pipeline_id is not None:
# Preserve the last valid graph reference for non-ready statuses.
dialog.last_pipeline_id = pipeline_id
await self.session.commit()
return message
async def _get_dialog_owned_by_user(
self,
*,
dialog_id: UUID,
user_id: UUID,
) -> PipelineDialog:
dialog = await self.session.get(PipelineDialog, dialog_id)
if dialog is None:
raise DialogAccessError("Dialog not found")
if dialog.user_id != user_id:
raise DialogAccessError("Dialog access denied")
return dialog
def _build_title(self, content: str) -> str:
text = (content or "").strip().replace("\n", " ")
return (text[:120] or "Pipeline dialog")
def _build_preview(self, content: str) -> str:
text = (content or "").strip().replace("\n", " ")
return text[:280]
def _parse_uuid(self, value: Any) -> UUID | None:
if isinstance(value, UUID):
return value
if isinstance(value, str):
try:
return UUID(value)
except ValueError:
return None
return None
File diff suppressed because it is too large Load Diff
+491
View File
@@ -0,0 +1,491 @@
from __future__ import annotations
import re
from typing import Any, NamedTuple
from uuid import UUID
from sqlalchemy import and_, or_, select
from sqlalchemy.ext.asyncio import AsyncSession
from app.models import Action, Capability
from app.models.capability import CapabilityType
class SelectedCapability(NamedTuple):
capability: Capability
score: float
confidence_tier: str = "high"
class SemanticSelectionService:
HIGH_CONFIDENCE_THRESHOLD = 0.45
MEDIUM_CONFIDENCE_THRESHOLD = 0.30
LOW_MARGIN_THRESHOLD = 0.05
CRM_TOKENS = {
"crm",
"segment",
"segments",
"audience",
"campaign",
"campaigns",
"mailing",
"newsletter",
"lead",
"leads",
"retention",
"cohort",
"churn",
"conversion",
"promo",
"offer",
"offers",
"email",
"emails",
"push",
"sale",
"sales",
"сегмент",
"сегменты",
"аудитория",
"кампания",
"кампании",
"рассылка",
"лид",
"лиды",
"ретеншн",
"конверсия",
"оффер",
"офферы",
"пуш",
"продажи",
"клиент",
"клиенты",
}
GENERIC_TOKENS = {
"get",
"list",
"create",
"update",
"delete",
"call",
"data",
"info",
"items",
"resource",
"resources",
"service",
"api",
"handle",
"handler",
"manage",
"process",
"method",
"action",
"fetch",
"general",
"common",
"получить",
"список",
"создать",
"обновить",
"удалить",
"данные",
"инфо",
"ресурс",
"сервис",
"метод",
"действие",
"общее",
}
_STOPWORDS = {
"and",
"the",
"for",
"with",
"from",
"into",
"that",
"this",
"что",
"это",
"как",
"для",
"или",
"при",
"про",
"надо",
"нужно",
"хочу",
"build",
"pipeline",
"workflow",
"scenario",
"automation",
"пайплайн",
"сценарий",
"автоматизация",
"построй",
"собери",
}
_ALIAS_EXPANSIONS = {
"польз": {"user", "users", "client", "clients", "пользователь", "пользователи"},
"клиент": {"client", "clients", "user", "users", "клиент", "клиенты"},
"юзер": {"user", "users", "пользователь", "пользователи"},
"получ": {"get", "fetch", "list", "retrieve", "получить", "список"},
"спис": {"list", "get", "fetch", "список", "получить"},
"созд": {"create", "add", "post", "создать"},
"обнов": {"update", "patch", "put", "обновить"},
"удал": {"delete", "remove", "del", "удалить"},
"рассыл": {"mailing", "newsletter", "broadcast", "email", "рассылка"},
"сегмент": {"segment", "segments", "сегмент", "сегменты"},
"лид": {"lead", "leads", "лид", "лиды"},
"отчет": {"report", "analytics", "отчет", "отчёт"},
"отчёт": {"report", "analytics", "отчет", "отчёт"},
"user": {"пользователь", "пользователи", "user", "users"},
"users": {"пользователь", "пользователи", "user", "users"},
"get": {"получить", "список", "get", "fetch", "list"},
"fetch": {"получить", "список", "get", "fetch", "list"},
"list": {"получить", "список", "get", "fetch", "list"},
}
async def select_capabilities(
self,
session: AsyncSession,
user_query: str,
owner_user_id: UUID | None = None,
limit: int = 10,
) -> list[SelectedCapability]:
query_tokens = self._tokenize(user_query)
if not query_tokens:
return []
query = select(Capability).order_by(Capability.created_at.asc())
if owner_user_id is not None:
# User-scoped with legacy compatibility:
# some old capabilities may have user_id=NULL while their source action has owner.
query = query.outerjoin(Action, Capability.action_id == Action.id).where(
or_(
Capability.user_id == owner_user_id,
and_(
Capability.user_id.is_(None),
Action.user_id == owner_user_id,
),
)
)
query = query.limit(200)
result = await session.execute(query)
capabilities = list(result.scalars().all())
executable_capabilities = [
capability
for capability in capabilities
if self._is_executable_capability(capability)
]
candidates = executable_capabilities
if not candidates:
return []
query_tokens_expanded = self._expand_tokens(query_tokens)
ranked: list[SelectedCapability] = []
for capability in candidates:
score = self._score_capability(query_tokens, query_tokens_expanded, capability)
if score <= 0:
continue
ranked.append(SelectedCapability(capability=capability, score=score))
ranked.sort(key=lambda item: item.score, reverse=True)
if not ranked:
if candidates:
# Fallback: keep generation moving even when lexical matching is weak.
return [
SelectedCapability(
capability=capability,
score=0.01,
confidence_tier="low",
)
for capability in candidates[:limit]
]
return []
top_score = ranked[0].score
second_score = ranked[1].score if len(ranked) > 1 else 0.0
margin = top_score - second_score
confidence_tier = self._resolve_confidence_tier(top_score, margin)
return [
SelectedCapability(
capability=item.capability,
score=item.score,
confidence_tier=confidence_tier,
)
for item in ranked[:limit]
]
def _score_capability(
self,
query_tokens: set[str],
query_tokens_expanded: set[str],
capability: Capability,
) -> float:
name = str(getattr(capability, "name", "") or "")
description = str(getattr(capability, "description", "") or "")
name_tokens = self._tokenize(name)
description_tokens = self._tokenize(description)
context_tokens = self._extract_context_tokens(capability)
recipe_tokens = self._extract_recipe_tokens(capability)
combined_tokens = name_tokens | description_tokens | context_tokens | recipe_tokens
if not combined_tokens:
return 0.0
combined_tokens_expanded = self._expand_tokens(combined_tokens)
overlap = query_tokens_expanded & combined_tokens_expanded
if not overlap:
return 0.0
overlap_ratio = len(overlap) / len(query_tokens_expanded)
name_tokens_expanded = self._expand_tokens(name_tokens)
name_ratio = len(query_tokens_expanded & name_tokens_expanded) / len(query_tokens_expanded)
exact_bonus = 0.22 if query_tokens_expanded <= combined_tokens_expanded else 0.0
context_ratio = 0.0
context_bonus = 0.0
if context_tokens:
context_tokens_expanded = self._expand_tokens(context_tokens)
context_overlap = query_tokens_expanded & context_tokens_expanded
context_ratio = len(context_overlap) / len(query_tokens_expanded)
context_bonus = min(0.16, len(context_overlap) * 0.03)
generic_expanded = self._expand_tokens(self.GENERIC_TOKENS)
entity_overlap = overlap - generic_expanded
entity_bonus = min(0.18, len(entity_overlap) * 0.06) if entity_overlap else 0.0
query_crm_tokens = query_tokens_expanded & self.CRM_TOKENS
capability_crm_tokens = combined_tokens_expanded & self.CRM_TOKENS
crm_bonus = 0.0
if query_crm_tokens and capability_crm_tokens:
crm_overlap = len(query_crm_tokens & capability_crm_tokens)
crm_bonus = 0.12 + min(0.14, crm_overlap * 0.04)
generic_penalty = self._generic_capability_penalty(combined_tokens)
return (
max(overlap_ratio, name_ratio * 1.12, context_ratio * 0.95)
+ exact_bonus
+ context_bonus
+ entity_bonus
+ crm_bonus
- generic_penalty
)
def _extract_context_tokens(self, capability: Capability) -> set[str]:
llm_payload = getattr(capability, "llm_payload", None)
if not isinstance(llm_payload, dict):
return set()
chunks: list[str] = []
for key in (
"action_context_brief",
"openapi_hints",
"action_context",
"recipe_summary",
"composite_context",
):
value = llm_payload.get(key)
if value is None:
continue
self._collect_text_chunks(value=value, chunks=chunks, depth=0, max_depth=4)
tokens: set[str] = set()
for chunk in chunks[:120]:
tokens.update(self._tokenize(chunk))
return tokens
def _extract_recipe_tokens(self, capability: Capability) -> set[str]:
recipe = getattr(capability, "recipe", None)
if not isinstance(recipe, dict):
return set()
steps = recipe.get("steps")
if not isinstance(steps, list):
return set()
chunks: list[str] = []
for raw_step in steps[:30]:
if not isinstance(raw_step, dict):
continue
inputs = raw_step.get("inputs")
if not isinstance(inputs, dict):
continue
for key, value in inputs.items():
if isinstance(key, str):
chunks.append(key)
if isinstance(value, str):
chunks.append(value)
tokens: set[str] = set()
for chunk in chunks:
tokens.update(self._tokenize(chunk))
return tokens
def _collect_text_chunks(
self,
*,
value: object,
chunks: list[str],
depth: int,
max_depth: int,
) -> None:
if depth > max_depth or len(chunks) >= 120:
return
if isinstance(value, str):
stripped = value.strip()
if stripped:
chunks.append(stripped)
return
if isinstance(value, dict):
preferred_keys = {
"operation_id",
"method",
"path",
"base_url",
"summary",
"description",
"tags",
"source_filename",
"required_inputs",
"request_content_types",
"response_content_types",
"response_status_codes",
"security_requirements",
"parameter_names_by_location",
"path_segments",
"input_signals",
"output_signals",
}
for key, item in value.items():
if not isinstance(key, str):
continue
if key not in preferred_keys:
continue
chunks.append(key)
self._collect_text_chunks(
value=item,
chunks=chunks,
depth=depth + 1,
max_depth=max_depth,
)
return
if isinstance(value, list):
for item in value[:30]:
self._collect_text_chunks(
value=item,
chunks=chunks,
depth=depth + 1,
max_depth=max_depth,
)
def _resolve_confidence_tier(self, top_score: float, margin: float) -> str:
if margin < self.LOW_MARGIN_THRESHOLD:
return "low"
if top_score >= self.HIGH_CONFIDENCE_THRESHOLD:
return "high"
if top_score >= self.MEDIUM_CONFIDENCE_THRESHOLD:
return "medium"
return "low"
def _generic_capability_penalty(self, tokens: set[str]) -> float:
if not tokens:
return 0.0
generic_share = len(tokens & self.GENERIC_TOKENS) / len(tokens)
if generic_share >= 0.65:
return 0.14
if generic_share >= 0.5:
return 0.09
if generic_share >= 0.35:
return 0.04
return 0.0
def _tokenize(self, value: str) -> set[str]:
tokens = set(re.findall(r"[a-zA-Zа-яА-Я0-9]+", value.lower()))
return {
token
for token in tokens
if len(token) >= 3 and token not in self._STOPWORDS
}
def _is_executable_capability(self, capability: Capability) -> bool:
cap_type = self._capability_type_value(capability)
if cap_type == CapabilityType.ATOMIC.value:
return getattr(capability, "action_id", None) is not None
if cap_type == CapabilityType.COMPOSITE.value:
return self._recipe_is_executable(getattr(capability, "recipe", None))
return False
def _recipe_is_executable(self, recipe: Any) -> bool:
if not isinstance(recipe, dict):
return False
if recipe.get("version") != 1:
return False
steps = recipe.get("steps")
return isinstance(steps, list) and bool(steps)
def _capability_type_value(self, capability: Capability) -> str:
raw = getattr(capability, "type", None)
if isinstance(raw, CapabilityType):
return raw.value
if isinstance(raw, str):
return raw
if hasattr(raw, "value"):
return str(raw.value)
return CapabilityType.ATOMIC.value
def _expand_tokens(self, tokens: set[str]) -> set[str]:
expanded: set[str] = set()
for token in tokens:
expanded.add(token)
normalized_variants = self._normalized_variants(token)
expanded.update(normalized_variants)
for variant in normalized_variants | {token}:
for key, aliases in self._ALIAS_EXPANSIONS.items():
if variant == key or variant.startswith(key):
expanded.update(aliases)
return expanded
def _normalized_variants(self, token: str) -> set[str]:
variants = {token}
if len(token) >= 5:
for suffix in (
"иями",
"ями",
"ами",
"ов",
"ев",
"ей",
"ам",
"ям",
"ах",
"ях",
"ые",
"ий",
"ый",
"ая",
"ое",
"ой",
"а",
"я",
"ы",
"и",
"у",
"ю",
"е",
"о",
):
if token.endswith(suffix) and len(token) > len(suffix) + 2:
variants.add(token[: -len(suffix)])
if token.endswith("ies") and len(token) > 4:
variants.add(token[:-3] + "y")
if token.endswith("s") and len(token) > 3:
variants.add(token[:-1])
return variants
+103
View File
@@ -0,0 +1,103 @@
from __future__ import annotations
import logging
import os
from typing import Any
from app.utils.log_context import get_log_context
business_logger = logging.getLogger("app.business")
EVENT_SCHEMA_VERSION = "1.0"
SERVICE_NAME = os.getenv("APP_SERVICE_NAME", "backend-api")
def _derive_event_group(event: str) -> tuple[str, str | None]:
normalized = (event or "").strip().lower()
if normalized.startswith("auth_"):
return "auth", None
if normalized.startswith("action_") or normalized.startswith("actions_"):
return "actions", None
if (
normalized.startswith("capability_")
or normalized.startswith("capabilities_")
or normalized.startswith("composite_capability_")
):
return "capabilities", None
if normalized.startswith("pipeline_prompt_"):
return "pipelines", "prompt"
if normalized.startswith("pipeline_run_"):
return "pipelines", "run"
if normalized.startswith("pipeline_dialog_"):
return "pipelines", "dialog"
if normalized.startswith("pipeline_") or normalized.startswith("pipelines_"):
return "pipelines", None
if normalized.startswith("execution_run_"):
return "executions", "run"
if normalized.startswith("execution_step_"):
return "executions", "step"
if normalized.startswith("execution_") or normalized.startswith("executions_"):
return "executions", None
if normalized.startswith("user_") or normalized.startswith("users_"):
return "users", None
return "other", None
def _derive_event_outcome(event: str) -> str:
normalized = (event or "").strip().lower()
for suffix, outcome in (
("_succeeded", "success"),
("_created", "success"),
("_updated", "success"),
("_deleted", "success"),
("_processed", "success"),
("_finished", "success"),
("_failed", "failure"),
("_rejected", "failure"),
("_blocked", "failure"),
("_started", "progress"),
("_queued", "progress"),
("_received", "progress"),
("_listed", "read"),
("_fetched", "read"),
("_viewed", "read"),
):
if normalized.endswith(suffix):
return outcome
return "unknown"
def log_business_event(event: str, **fields: Any) -> None:
safe_fields: dict[str, Any] = {
"event": event,
"event_schema_version": EVENT_SCHEMA_VERSION,
"service_name": SERVICE_NAME,
}
event_group, event_subgroup = _derive_event_group(event)
event_outcome = _derive_event_outcome(event)
if "event_group" not in fields:
safe_fields["event_group"] = event_group
if event_subgroup is not None and "event_subgroup" not in fields:
safe_fields["event_subgroup"] = event_subgroup
if "event_outcome" not in fields:
safe_fields["event_outcome"] = event_outcome
for key, value in get_log_context().items():
if key not in fields:
safe_fields[key] = value
for key, value in fields.items():
if isinstance(value, (str, int, float, bool)) or value is None:
safe_fields[key] = value
else:
safe_fields[key] = str(value)
business_logger.info(event, extra=safe_fields)
+124
View File
@@ -0,0 +1,124 @@
from datetime import datetime, timezone
from typing import Any
import uuid
import logging
from fastapi import Request, status
from fastapi.exceptions import RequestValidationError
from fastapi.responses import JSONResponse
from starlette.exceptions import HTTPException
logger = logging.getLogger(__name__)
def now_iso() -> str:
return datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
async def validation_exception_handler(request: Request, exc: RequestValidationError) -> JSONResponse:
trace_id = getattr(request.state, "traceId", str(uuid.uuid4()))
is_json_error = any(e.get("type") in ("json_invalid", "json_decode", "value_error.jsondecode") for e in exc.errors())
if is_json_error:
return JSONResponse(
status_code=status.HTTP_400_BAD_REQUEST,
content={
"code": "BAD_REQUEST",
"message": "Невалидный JSON",
"traceId": trace_id,
"timestamp": now_iso(),
"path": request.url.path,
"details": {"hint": "Проверьте запятые/кавычки"},
},
)
field_errors: list[dict[str, Any]] = []
for err in exc.errors():
loc = [str(x) for x in err.get("loc", []) if x != "body"]
field_name = ".".join(loc) if loc else "unknown"
msg = err.get("msg", "invalid")
if msg.startswith("Value error, "):
msg = msg.replace("Value error, ", "")
field_errors.append({
"field": field_name,
"issue": msg,
"rejectedValue": err.get("input", None),
})
return JSONResponse(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
content={
"code": "VALIDATION_FAILED",
"message": "Некоторые поля не прошли валидацию",
"traceId": trace_id,
"timestamp": now_iso(),
"path": request.url.path,
"fieldErrors": field_errors,
},
)
async def http_exception_handler(request: Request, exc: HTTPException) -> JSONResponse:
trace_id = getattr(request.state, "traceId", str(uuid.uuid4()))
message = str(exc.detail)
details = None
if isinstance(exc.detail, dict):
message = exc.detail.get("message", str(exc.detail))
details_data = {k: v for k, v in exc.detail.items() if k != "message"}
if details_data:
details = details_data
code = "HTTP_ERROR"
if exc.status_code == status.HTTP_409_CONFLICT:
code = "EMAIL_ALREADY_EXISTS" if "email" in message.lower() else "CONFLICT"
elif exc.status_code == status.HTTP_400_BAD_REQUEST:
code = "BAD_REQUEST"
elif exc.status_code == status.HTTP_401_UNAUTHORIZED:
code = "UNAUTHORIZED"
elif exc.status_code == status.HTTP_423_LOCKED:
code = "USER_INACTIVE"
elif exc.status_code == status.HTTP_403_FORBIDDEN:
code = "FORBIDDEN"
elif exc.status_code == status.HTTP_404_NOT_FOUND:
code = "NOT_FOUND"
if message == "Not Found":
message = "Ресурс не найден"
elif exc.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY:
code = "VALIDATION_FAILED"
content = {
"code": code,
"message": message,
"traceId": trace_id,
"timestamp": now_iso(),
"path": request.url.path,
}
if details:
content["details"] = details
return JSONResponse(
status_code=exc.status_code,
content=content,
)
async def unhandled_exception_handler(request: Request, exc: Exception) -> JSONResponse:
trace_id = getattr(request.state, "traceId", str(uuid.uuid4()))
logger.exception("Unhandled exception on %s", request.url.path, exc_info=exc)
return JSONResponse(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
content={
"code": "INTERNAL_ERROR",
"message": "Внутренняя ошибка сервера",
"traceId": trace_id,
"timestamp": now_iso(),
"path": request.url.path,
},
)
+16
View File
@@ -0,0 +1,16 @@
import bcrypt
def hash_password(password: str) -> str:
pwd_bytes = password.encode("utf-8")
salt = bcrypt.gensalt()
return bcrypt.hashpw(pwd_bytes, salt).decode("utf-8")
def verify_password(plain_password: str, hashed_password: str) -> bool:
try:
pwd_bytes = plain_password.encode("utf-8")
hashed_bytes = hashed_password.encode("utf-8")
return bcrypt.checkpw(pwd_bytes, hashed_bytes)
except Exception:
return False
+49
View File
@@ -0,0 +1,49 @@
from __future__ import annotations
from contextvars import ContextVar
from typing import Any
_trace_id_ctx: ContextVar[str | None] = ContextVar("trace_id", default=None)
_path_ctx: ContextVar[str | None] = ContextVar("path", default=None)
_method_ctx: ContextVar[str | None] = ContextVar("method", default=None)
_user_id_ctx: ContextVar[str | None] = ContextVar("user_id", default=None)
def set_request_context(*, trace_id: str | None, path: str | None, method: str | None) -> None:
_trace_id_ctx.set(trace_id)
_path_ctx.set(path)
_method_ctx.set(method)
def set_user_context(*, user_id: str | None) -> None:
_user_id_ctx.set(user_id)
def clear_log_context() -> None:
_trace_id_ctx.set(None)
_path_ctx.set(None)
_method_ctx.set(None)
_user_id_ctx.set(None)
def get_log_context() -> dict[str, Any]:
payload: dict[str, Any] = {}
trace_id = _trace_id_ctx.get()
if trace_id:
payload["trace_id"] = trace_id
path = _path_ctx.get()
if path:
payload["path"] = path
method = _method_ctx.get()
if method:
payload["method"] = method
user_id = _user_id_ctx.get()
if user_id:
payload["user_id"] = user_id
return payload
+287
View File
@@ -0,0 +1,287 @@
from __future__ import annotations
import json
import os
import re
from typing import Any
def build_capability_from_action(action: Any) -> dict[str, Any]:
llm_result = _call_ollama_json(
system_prompt=(
"You convert one API action into one capability. "
"Return only valid JSON with keys: "
"name, description, input_schema, output_schema, data_format."
),
user_prompt=_build_prompt(action),
)
if llm_result is not None:
normalized = _normalize_capability_payload(llm_result, action)
normalized["llm_payload"] = llm_result
return normalized
fallback = _build_fallback_capability(action)
fallback["llm_payload"] = {
"source": "fallback",
"reason": "ollama_unavailable_or_invalid_response",
}
return fallback
def chat_json(system_prompt: str, user_prompt: str) -> dict[str, Any] | None:
return _call_ollama_json(system_prompt=system_prompt, user_prompt=user_prompt)
def reset_model_session() -> None:
host = os.getenv("OLLAMA_HOST", "http://178.154.193.191:8067").strip()
model = os.getenv("OLLAMA_MODEL", "qwen2.5-coder:7b")
headers = _load_headers()
try:
from ollama import Client
except Exception:
return None
try:
client = Client(host=host, headers=headers or None)
_reset_model_session(client=client, model=model)
except Exception:
return None
async def summarize_dialog_text(messages: list[dict[str, Any]]) -> str | None:
prompt = (
"Кратко сожми историю диалога на русском. "
"Сохрани цель пользователя, ограничения, недостающие данные и важные решения. "
"Ответь только текстом без markdown.\n\n"
f"История:\n{json.dumps(messages, ensure_ascii=False)}"
)
payload = _call_ollama_json(
system_prompt="Ты помощник, который сжимает диалоговый контекст для дальнейшего планирования.",
user_prompt=prompt,
)
if isinstance(payload, dict):
summary = payload.get("summary")
if isinstance(summary, str) and summary.strip():
return summary.strip()
return None
def _call_ollama_json(system_prompt: str, user_prompt: str) -> dict[str, Any] | None:
host = os.getenv("OLLAMA_HOST", "http://178.154.193.191:8067").strip()
model = os.getenv("OLLAMA_MODEL", "qwen2.5-coder:7b")
headers = _load_headers()
try:
from ollama import Client
except Exception:
return None
try:
client = Client(host=host, headers=headers or None)
response = client.chat(
model=model,
messages=[
{
"role": "system",
"content": system_prompt,
},
{
"role": "user",
"content": user_prompt,
},
],
options={"temperature": 0},
)
except Exception:
return None
content = _extract_message_content(response)
if not content:
return None
payload = _parse_json_payload(content)
if not isinstance(payload, dict):
return None
return payload
def _build_prompt(action: Any) -> str:
payload = {
"operation_id": getattr(action, "operation_id", None),
"method": getattr(action, "method", None).value if getattr(action, "method", None) else None,
"path": getattr(action, "path", None),
"base_url": getattr(action, "base_url", None),
"summary": getattr(action, "summary", None),
"description": getattr(action, "description", None),
"tags": getattr(action, "tags", None),
"parameters_schema": getattr(action, "parameters_schema", None),
"request_body_schema": getattr(action, "request_body_schema", None),
"response_schema": getattr(action, "response_schema", None),
}
return json.dumps(payload, ensure_ascii=True, indent=2)
def _extract_message_content(response: Any) -> str | None:
if isinstance(response, dict):
message = response.get("message")
if isinstance(message, dict):
content = message.get("content")
if isinstance(content, str):
return content
content = response.get("content")
if isinstance(content, str):
return content
return None
message = getattr(response, "message", None)
if message is not None:
content = getattr(message, "content", None)
if isinstance(content, str):
return content
content = getattr(response, "content", None)
if isinstance(content, str):
return content
return None
def _parse_json_payload(content: str) -> dict[str, Any] | None:
try:
return json.loads(content)
except json.JSONDecodeError:
match = re.search(r"\{.*\}", content, re.DOTALL)
if not match:
return None
try:
return json.loads(match.group(0))
except json.JSONDecodeError:
return None
def _normalize_capability_payload(payload: dict[str, Any], action: Any) -> dict[str, Any]:
fallback = _build_fallback_capability(action)
return {
"name": str(payload.get("name") or fallback["name"]),
"description": str(payload.get("description") or fallback["description"]),
"input_schema": _normalize_schema(payload.get("input_schema")) or fallback["input_schema"],
"output_schema": _normalize_schema(payload.get("output_schema")) or fallback["output_schema"],
"data_format": _normalize_data_format(payload.get("data_format")) or fallback["data_format"],
}
def _build_fallback_capability(action: Any) -> dict[str, Any]:
return {
"name": _build_capability_name(action),
"description": _build_capability_description(action),
"input_schema": _build_input_schema(action),
"output_schema": getattr(action, "response_schema", None),
"data_format": _build_data_format(action),
}
def _build_capability_name(action: Any) -> str:
operation_id = getattr(action, "operation_id", None)
if operation_id:
return str(operation_id)
method = getattr(action, "method", None)
method_value = method.value.lower() if method is not None else "call"
path = getattr(action, "path", "") or ""
normalized_path = re.sub(r"[{}]", "", path).strip("/")
normalized_path = re.sub(r"[^a-zA-Z0-9/]+", "_", normalized_path)
normalized_path = normalized_path.replace("/", "_") or "root"
return f"{method_value}_{normalized_path.lower()}"
def _build_capability_description(action: Any) -> str:
summary = getattr(action, "summary", None)
description = getattr(action, "description", None)
operation_id = getattr(action, "operation_id", None)
return str(summary or description or operation_id or _build_capability_name(action))
def _build_input_schema(action: Any) -> dict[str, Any] | None:
parameters_schema = getattr(action, "parameters_schema", None)
request_body_schema = getattr(action, "request_body_schema", None)
if parameters_schema and request_body_schema:
return {
"type": "object",
"properties": {
"parameters": parameters_schema,
"request_body": request_body_schema,
},
}
if parameters_schema:
return parameters_schema
if request_body_schema:
return request_body_schema
return None
def _build_data_format(action: Any) -> dict[str, Any]:
parameters_schema = getattr(action, "parameters_schema", None) or {}
request_body_schema = getattr(action, "request_body_schema", None) or {}
response_schema = getattr(action, "response_schema", None) or {}
parameter_locations: list[str] = []
if isinstance(parameters_schema, dict):
properties = parameters_schema.get("properties", {})
if isinstance(properties, dict):
for property_schema in properties.values():
if not isinstance(property_schema, dict):
continue
location = property_schema.get("x-parameter-location")
if isinstance(location, str) and location not in parameter_locations:
parameter_locations.append(location)
request_content_type = request_body_schema.get("x-content-type") if isinstance(request_body_schema, dict) else None
response_content_type = response_schema.get("x-content-type") if isinstance(response_schema, dict) else None
return {
"parameter_locations": parameter_locations,
"request_content_types": [request_content_type] if isinstance(request_content_type, str) else [],
"request_schema_type": request_body_schema.get("type") if isinstance(request_body_schema, dict) else None,
"response_content_types": [response_content_type] if isinstance(response_content_type, str) else [],
"response_schema_types": [response_schema.get("type")] if isinstance(response_schema, dict) and isinstance(response_schema.get("type"), str) else [],
}
def _normalize_schema(value: Any) -> dict[str, Any] | None:
if isinstance(value, dict):
return value
return None
def _normalize_data_format(value: Any) -> dict[str, Any] | None:
if not isinstance(value, dict):
return None
return {
"parameter_locations": _normalize_string_list(value.get("parameter_locations")),
"request_content_types": _normalize_string_list(value.get("request_content_types")),
"request_schema_type": value.get("request_schema_type"),
"response_content_types": _normalize_string_list(value.get("response_content_types")),
"response_schema_types": _normalize_string_list(value.get("response_schema_types")),
}
def _normalize_string_list(value: Any) -> list[str]:
if value is None:
return []
if isinstance(value, list):
return [str(item) for item in value if item is not None]
return [str(value)]
def _load_headers() -> dict[str, str]:
headers_payload = os.getenv("OLLAMA_HEADERS_JSON")
if not headers_payload:
return {}
try:
parsed = json.loads(headers_payload)
except json.JSONDecodeError:
return {}
if not isinstance(parsed, dict):
return {}
return {str(key): str(value) for key, value in parsed.items()}
+99
View File
@@ -0,0 +1,99 @@
import os
from datetime import datetime, timedelta, timezone
from typing import List
from uuid import UUID
from fastapi import Depends, HTTPException, status
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import User, UserRole
from app.utils.log_context import set_user_context
try:
from jose import JWTError, jwt
except ModuleNotFoundError:
JWTError = Exception
jwt = None
JWT_SECRET = os.environ.get("JWT_SECRET", "super_secret_key_123")
JWT_ALG = "HS256"
security = HTTPBearer(auto_error=False)
def create_access_token(*, sub: str, role: str) -> tuple[str, int]:
expires_in = 3600
if jwt is None:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="JWT support is not installed",
)
expire = datetime.now(timezone.utc) + timedelta(seconds=expires_in)
payload = {"sub": str(sub), "role": role, "exp": expire}
token = jwt.encode(payload, JWT_SECRET, algorithm=JWT_ALG)
return token, expires_in
async def get_current_user(
creds: HTTPAuthorizationCredentials | None = Depends(security),
session: AsyncSession = Depends(get_session),
) -> User:
if creds is None:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Could not validate credentials",
headers={"WWW-Authenticate": "Bearer"},
)
if jwt is None:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="JWT support is not installed",
)
token = creds.credentials
auth_exception = HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Could not validate credentials",
headers={"WWW-Authenticate": "Bearer"},
)
try:
payload = jwt.decode(token, JWT_SECRET, algorithms=[JWT_ALG])
user_id_str: str | None = payload.get("sub")
if user_id_str is None:
raise auth_exception
user_id = UUID(user_id_str)
except (JWTError, ValueError):
raise auth_exception
result = await session.execute(select(User).where(User.id == user_id))
user = result.scalar_one_or_none()
if user is None:
raise auth_exception
if not user.is_active:
raise HTTPException(
status_code=status.HTTP_423_LOCKED,
detail="User account is deactivated",
)
set_user_context(user_id=str(user.id))
return user
def check_permissions(allowed_roles: List[UserRole]):
async def role_checker(current_user: User = Depends(get_current_user)):
if current_user.role not in allowed_roles:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Not enough permissions",
)
return current_user
return role_checker
+55
View File
@@ -0,0 +1,55 @@
services:
api:
image: ${DOCKER_IMAGE:-solution-api}:${TAG:-latest}
build:
context: .
dockerfile: Dockerfile
restart: always
ports:
- "8000:8000"
volumes:
- ./:/app
environment:
- DATABASE_URL=postgresql+asyncpg://user:password@db:5432/dbname
- REDIS_URL=redis://redis:6379/0
depends_on:
db:
condition: service_healthy
redis:
condition: service_healthy
networks:
- shop-network
- default
db:
image: postgres:15-alpine
environment:
- POSTGRES_USER=user
- POSTGRES_PASSWORD=password
- POSTGRES_DB=dbname
ports:
- "5433:5432"
healthcheck:
test: ["CMD-SHELL", "pg_isready -U user -d dbname"]
interval: 5s
timeout: 5s
retries: 5
networks:
- default
redis:
image: redis:7-alpine
ports:
- "6380:6379"
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 5s
timeout: 5s
retries: 5
networks:
- default
networks:
shop-network:
external: true
+10
View File
@@ -0,0 +1,10 @@
global:
scrape_interval: 15s
evaluation_interval: 15s
scrape_configs:
- job_name: "backend-api"
metrics_path: /metrics
static_configs:
- targets:
- "api:8000"
+19
View File
@@ -0,0 +1,19 @@
fastapi
uvicorn[standard]
sqlalchemy>=2.0
asyncpg
redis
fastapi-cache2
pydantic
python-jose[cryptography]
passlib[bcrypt]==1.7.4
bcrypt==4.0.1
python-multipart
lark
pytest
pytest-asyncio
httpx
email-validator
PyYAML
ollama
prometheus-fastapi-instrumentator
View File
+77
View File
@@ -0,0 +1,77 @@
from __future__ import annotations
from types import SimpleNamespace
from uuid import uuid4
from app.services.capability_service import CapabilityService
def test_build_capability_payload_stores_rich_action_context():
action = SimpleNamespace(
id=uuid4(),
operation_id="sendCampaignEmail",
method=SimpleNamespace(value="POST"),
path="/v1/campaigns/{campaign_id}/emails/send",
base_url="https://api.example.com",
summary="Send campaign email",
description="Send email for selected users",
tags=["campaign", "email"],
source_filename="crm.yaml",
parameters_schema={
"type": "object",
"required": ["campaign_id"],
"properties": {
"campaign_id": {"type": "string", "x-parameter-location": "path"},
"segment_id": {"type": "string", "x-parameter-location": "query"},
},
},
request_body_schema={
"type": "object",
"required": ["subject", "template_id"],
"properties": {
"subject": {"type": "string"},
"template_id": {"type": "string"},
},
"x-content-type": "application/json",
},
response_schema={
"type": "object",
"properties": {"delivery_id": {"type": "string"}},
"x-content-type": "application/json",
},
raw_spec={
"deprecated": False,
"security": [{"BearerAuth": []}],
"requestBody": {
"content": {
"application/json": {
"schema": {"type": "object"},
}
}
},
"responses": {
"200": {
"content": {
"application/json": {
"schema": {"type": "object"},
}
}
}
},
},
)
payload = CapabilityService._build_capability_payload(action)
llm_payload = payload["llm_payload"]
action_context = llm_payload["action_context"]
hints = llm_payload["openapi_hints"]
assert payload["name"] == "sendCampaignEmail"
assert payload["description"] == "Send campaign email"
assert action_context["method"] == "POST"
assert action_context["path"] == "/v1/campaigns/{campaign_id}/emails/send"
assert action_context["raw_spec"]["responses"]["200"] is not None
assert action_context["input_signals"]["required_inputs"] == ["campaign_id", "subject", "template_id"]
assert hints["request_content_types"] == ["application/json"]
assert "200" in hints["response_status_codes"]
+693
View File
@@ -0,0 +1,693 @@
from __future__ import annotations
import copy
from typing import Any
from uuid import uuid4
import pytest
from app.models import Action, HttpMethod
from app.models.capability import Capability
from app.models.execution import (
ExecutionRun,
ExecutionRunStatus,
ExecutionStepRun,
ExecutionStepStatus,
)
from app.models.pipeline import Pipeline, PipelineStatus
from app.services.execution_service import ExecutionService, StepExecutionError
class FakeSession:
def __init__(self, initial: dict[tuple[type[Any], Any], Any] | None = None) -> None:
self._store = initial or {}
self.step_runs_by_step: dict[int, ExecutionStepRun] = {}
self.commit_calls = 0
async def get(self, model: type[Any], key: Any) -> Any:
return self._store.get((model, key))
def add(self, obj: Any) -> None:
if isinstance(obj, ExecutionStepRun):
self.step_runs_by_step[obj.step] = obj
def add_all(self, items: list[Any]) -> None:
for item in items:
self.add(item)
async def commit(self) -> None:
self.commit_calls += 1
async def refresh(self, _obj: Any) -> None:
return None
class FakeContextStore:
def __init__(self, initial: Any = None) -> None:
self._context = initial
self.saved_contexts: list[dict[str, Any]] = []
async def load_context(self, _run_id) -> dict[str, Any]:
if isinstance(self._context, dict):
return copy.deepcopy(self._context)
return {}
async def save_context(self, _run_id, context: dict[str, Any]) -> None:
normalized = copy.deepcopy(context)
self._context = normalized
self.saved_contexts.append(normalized)
def _build_action(action_id) -> Action:
return Action(
id=action_id,
method=HttpMethod.GET,
path="/resource",
base_url="https://api.example.com",
)
def _build_capability(capability_id, action_id) -> Capability:
return Capability(
id=capability_id,
action_id=action_id,
name=f"cap_{capability_id.hex[:8]}",
)
def _build_node(step: int, capability_id, action_id, *, external_inputs: list[str] | None = None) -> dict[str, Any]:
return {
"step": step,
"name": f"Step {step}",
"external_inputs": external_inputs or [],
"endpoints": [
{
"capability_id": str(capability_id),
"action_id": str(action_id),
}
],
}
def test_topological_sort_linear_graph():
ordered = ExecutionService._topological_sort(
steps=[1, 2, 3],
edges=[
{"from_step": 1, "to_step": 2, "type": "users"},
{"from_step": 2, "to_step": 3, "type": "segments"},
],
)
assert ordered == [1, 2, 3]
def test_extract_value_from_output_by_edge_type():
output = {"users": [{"id": 1}]}
value = ExecutionService._extract_value_from_output(output, "users")
assert value == [{"id": 1}]
def test_build_request_payload_uses_path_params_and_defaults():
action = Action(
method=HttpMethod.GET,
path="/users/{user_id}",
base_url="https://api.example.com",
parameters_schema={
"type": "object",
"properties": {
"user_id": {
"type": "string",
"x-parameter-location": "path",
},
"limit": {
"type": "integer",
"x-parameter-location": "query",
"default": 10,
},
},
"required": ["user_id"],
},
)
service = ExecutionService(session=None) # type: ignore[arg-type]
payload = service._build_request_payload(
action=action,
resolved_inputs={"user_id": "abc"},
)
assert payload["url"] == "https://api.example.com/users/abc"
assert payload["query_params"] == {"limit": 10}
assert payload["missing_required"] == []
@pytest.mark.asyncio
async def test_get_action_from_node_uses_capability_action_id():
primary_action_id = uuid4()
stale_action_id = uuid4()
capability_id = uuid4()
action = _build_action(primary_action_id)
capability = _build_capability(capability_id, primary_action_id)
session = FakeSession(
{
(Capability, capability_id): capability,
(Action, primary_action_id): action,
}
)
service = ExecutionService(session=session) # type: ignore[arg-type]
node = _build_node(step=1, capability_id=capability_id, action_id=stale_action_id)
resolved_capability_id, resolved_action = await service._get_action_from_node(node)
assert resolved_capability_id == capability_id
assert resolved_action.id == primary_action_id
@pytest.mark.asyncio
async def test_get_action_from_node_raises_for_invalid_or_missing_bindings():
service = ExecutionService(session=FakeSession()) # type: ignore[arg-type]
with pytest.raises(StepExecutionError, match="valid capability_id"):
await service._get_action_from_node(
{"step": 1, "endpoints": [{"capability_id": "invalid"}]}
)
missing_capability_id = uuid4()
with pytest.raises(StepExecutionError, match=f"Capability not found: {missing_capability_id}"):
await service._get_action_from_node(
{
"step": 1,
"endpoints": [{"capability_id": str(missing_capability_id)}],
}
)
capability_id = uuid4()
capability_without_action = _build_capability(capability_id, None)
session = FakeSession({(Capability, capability_id): capability_without_action})
service = ExecutionService(session=session) # type: ignore[arg-type]
with pytest.raises(StepExecutionError, match=f"Capability does not have action_id: {capability_id}"):
await service._get_action_from_node(
{"step": 1, "endpoints": [{"capability_id": str(capability_id)}]}
)
missing_action_id = uuid4()
capability_with_missing_action = _build_capability(capability_id, missing_action_id)
session = FakeSession({(Capability, capability_id): capability_with_missing_action})
service = ExecutionService(session=session) # type: ignore[arg-type]
with pytest.raises(StepExecutionError, match=f"Action not found for capability {capability_id}: {missing_action_id}"):
await service._get_action_from_node(
{"step": 1, "endpoints": [{"capability_id": str(capability_id)}]}
)
def test_resolve_node_inputs_prefers_edge_values_over_step_outputs():
service = ExecutionService(session=None) # type: ignore[arg-type]
resolved, missing = service._resolve_node_inputs(
node={"step": 2, "external_inputs": []},
incoming_edges=[{"from_step": 1, "to_step": 2, "type": "users"}],
step_outputs={"1": {"users": [{"id": 1}]}},
edge_values={"1:2:users": [{"id": 42}]},
run_inputs={},
)
assert resolved == {"users": [{"id": 42}]}
assert missing == []
def test_resolve_node_inputs_normalizes_array_suffix_edge_types():
service = ExecutionService(session=None) # type: ignore[arg-type]
resolved, missing = service._resolve_node_inputs(
node={"step": 3, "external_inputs": []},
incoming_edges=[{"from_step": 1, "to_step": 3, "type": "users[]"}],
step_outputs={"1": {"users": [{"id": 1}]}},
edge_values={},
run_inputs={},
)
assert resolved["users[]"] == [{"id": 1}]
assert resolved["users"] == [{"id": 1}]
assert missing == []
def test_resolve_node_inputs_maps_user_hotel_pairs_to_segments():
service = ExecutionService(session=None) # type: ignore[arg-type]
segment_payload = [
{"segment_id": "seg_1", "hotel_id": "hotel_001", "user_ids": ["usr_001"]},
]
resolved, missing = service._resolve_node_inputs(
node={"step": 4, "external_inputs": []},
incoming_edges=[{"from_step": 3, "to_step": 4, "type": "user_hotel_pairs"}],
step_outputs={"3": {"segments": segment_payload}},
edge_values={},
run_inputs={},
)
assert resolved["user_hotel_pairs"] == segment_payload
assert resolved["segments"] == segment_payload
assert missing == []
def test_resolve_node_inputs_maps_empty_user_hotel_pairs_to_assignments():
service = ExecutionService(session=None) # type: ignore[arg-type]
resolved, missing = service._resolve_node_inputs(
node={"step": 5, "external_inputs": []},
incoming_edges=[{"from_step": 4, "to_step": 5, "type": "user_hotel_pairs"}],
step_outputs={"4": {"assignments": []}},
edge_values={"4:5:user_hotel_pairs": []},
run_inputs={},
)
assert resolved["user_hotel_pairs"] == []
assert resolved["assignments"] == []
assert missing == []
@pytest.mark.asyncio
async def test_execute_run_linear_pipeline_succeeds_and_persists_context():
run_id = uuid4()
pipeline_id = uuid4()
action_1_id = uuid4()
action_2_id = uuid4()
capability_1_id = uuid4()
capability_2_id = uuid4()
action_1 = _build_action(action_1_id)
action_2 = _build_action(action_2_id)
capability_1 = _build_capability(capability_1_id, action_1_id)
capability_2 = _build_capability(capability_2_id, action_2_id)
pipeline = Pipeline(
id=pipeline_id,
name="Linear pipeline",
nodes=[
_build_node(1, capability_1_id, action_1_id, external_inputs=["seed"]),
_build_node(2, capability_2_id, action_2_id),
],
edges=[{"from_step": 1, "to_step": 2, "type": "users"}],
status=PipelineStatus.READY,
)
run = ExecutionRun(
id=run_id,
pipeline_id=pipeline_id,
status=ExecutionRunStatus.QUEUED,
inputs={"seed": "abc"},
)
session = FakeSession(
{
(ExecutionRun, run_id): run,
(Pipeline, pipeline_id): pipeline,
(Capability, capability_1_id): capability_1,
(Capability, capability_2_id): capability_2,
(Action, action_1_id): action_1,
(Action, action_2_id): action_2,
}
)
context_store = FakeContextStore(initial={"step_outputs": "bad", "edge_values": []})
service = ExecutionService(session=session, context_store=context_store) # type: ignore[arg-type]
async def fake_call_action(action: Action, request_payload: dict[str, Any]):
if action.id == action_1_id:
assert request_payload["resolved_inputs"]["seed"] == "abc"
return {"status_code": 200, "body": {"users": [{"id": 1}]}}, {"users": [{"id": 1}]}
return {"status_code": 200, "body": {"ok": True}}, {"ok": True}
service._call_action = fake_call_action # type: ignore[method-assign]
await service.execute_run(run_id)
assert run.status == ExecutionRunStatus.SUCCEEDED
assert run.summary is not None
assert run.summary["total_steps"] == 2
assert run.summary["succeeded_steps"] == 2
assert run.summary["failed_steps"] == 0
assert run.summary["skipped_steps"] == 0
assert run.summary["final_output_step"] == 2
assert run.summary["final_output"] == {"ok": True}
assert session.step_runs_by_step[1].status == ExecutionStepStatus.SUCCEEDED
assert session.step_runs_by_step[2].status == ExecutionStepStatus.SUCCEEDED
assert context_store.saved_contexts[-1]["edge_values"]["1:2:users"] == [{"id": 1}]
assert context_store.saved_contexts[-1]["step_outputs"]["1"] == {"users": [{"id": 1}]}
@pytest.mark.asyncio
async def test_execute_run_is_fail_fast_and_marks_remaining_as_skipped():
run_id = uuid4()
pipeline_id = uuid4()
action_1_id = uuid4()
action_2_id = uuid4()
action_3_id = uuid4()
capability_1_id = uuid4()
capability_2_id = uuid4()
capability_3_id = uuid4()
action_1 = _build_action(action_1_id)
action_2 = _build_action(action_2_id)
action_3 = _build_action(action_3_id)
capability_1 = _build_capability(capability_1_id, action_1_id)
capability_2 = _build_capability(capability_2_id, action_2_id)
capability_3 = _build_capability(capability_3_id, action_3_id)
pipeline = Pipeline(
id=pipeline_id,
name="Fail fast pipeline",
nodes=[
_build_node(1, capability_1_id, action_1_id),
_build_node(2, capability_2_id, action_2_id),
_build_node(3, capability_3_id, action_3_id),
],
edges=[
{"from_step": 1, "to_step": 2, "type": "users"},
{"from_step": 2, "to_step": 3, "type": "segments"},
],
status=PipelineStatus.READY,
)
run = ExecutionRun(
id=run_id,
pipeline_id=pipeline_id,
status=ExecutionRunStatus.QUEUED,
inputs={},
)
session = FakeSession(
{
(ExecutionRun, run_id): run,
(Pipeline, pipeline_id): pipeline,
(Capability, capability_1_id): capability_1,
(Capability, capability_2_id): capability_2,
(Capability, capability_3_id): capability_3,
(Action, action_1_id): action_1,
(Action, action_2_id): action_2,
(Action, action_3_id): action_3,
}
)
service = ExecutionService(
session=session, # type: ignore[arg-type]
context_store=FakeContextStore(initial={"step_outputs": {}, "edge_values": {}}),
)
async def fake_call_action(action: Action, _request_payload: dict[str, Any]):
if action.id == action_2_id:
raise StepExecutionError("boom")
return {"status_code": 200}, {"users": [1]}
service._call_action = fake_call_action # type: ignore[method-assign]
await service.execute_run(run_id)
assert run.status == ExecutionRunStatus.PARTIAL_FAILED
assert run.summary is not None
assert run.summary["total_steps"] == 3
assert run.summary["succeeded_steps"] == 1
assert run.summary["failed_steps"] == 1
assert run.summary["skipped_steps"] == 1
assert run.summary["final_output_step"] == 1
assert run.summary["final_output"] == {"users": [1]}
assert session.step_runs_by_step[1].status == ExecutionStepStatus.SUCCEEDED
assert session.step_runs_by_step[2].status == ExecutionStepStatus.FAILED
assert session.step_runs_by_step[3].status == ExecutionStepStatus.SKIPPED
@pytest.mark.asyncio
async def test_execute_run_multi_endpoint_node_executes_sequential_chain():
run_id = uuid4()
pipeline_id = uuid4()
action_1_id = uuid4()
action_2_id = uuid4()
capability_1_id = uuid4()
capability_2_id = uuid4()
action_1 = Action(
id=action_1_id,
method=HttpMethod.GET,
path="/users/recent",
base_url="https://api.example.com",
)
action_2 = Action(
id=action_2_id,
method=HttpMethod.GET,
path="/segments/build",
base_url="https://api.example.com",
parameters_schema={
"type": "object",
"required": ["usersList"],
"properties": {
"usersList": {
"type": "array",
"x-parameter-location": "query",
}
},
},
)
capability_1 = _build_capability(capability_1_id, action_1_id)
capability_2 = _build_capability(capability_2_id, action_2_id)
multi_endpoint_node = {
"step": 1,
"name": "Multi endpoint node",
"external_inputs": [],
"endpoints": [
{
"capability_id": str(capability_1_id),
"action_id": str(action_1_id),
},
{
"capability_id": str(capability_2_id),
"action_id": str(action_2_id),
},
],
}
pipeline = Pipeline(
id=pipeline_id,
name="Multi endpoint chain",
nodes=[multi_endpoint_node],
edges=[],
status=PipelineStatus.READY,
)
run = ExecutionRun(
id=run_id,
pipeline_id=pipeline_id,
status=ExecutionRunStatus.QUEUED,
inputs={},
)
session = FakeSession(
{
(ExecutionRun, run_id): run,
(Pipeline, pipeline_id): pipeline,
(Capability, capability_1_id): capability_1,
(Capability, capability_2_id): capability_2,
(Action, action_1_id): action_1,
(Action, action_2_id): action_2,
}
)
service = ExecutionService(
session=session, # type: ignore[arg-type]
context_store=FakeContextStore(initial={"step_outputs": {}, "edge_values": {}}),
)
call_order: list[Any] = []
async def fake_call_action(action: Action, request_payload: dict[str, Any]):
call_order.append(action.id)
if action.id == action_1_id:
return {"status_code": 200, "body": {"users_list": [{"id": 1}]}}, {"users_list": [{"id": 1}]}
assert request_payload["resolved_inputs"]["usersList"] == [{"id": 1}]
return {"status_code": 200, "body": {"segments": [1]}}, {"segments": [1]}
service._call_action = fake_call_action # type: ignore[method-assign]
await service.execute_run(run_id)
assert run.status == ExecutionRunStatus.SUCCEEDED
assert run.summary is not None
assert run.summary["final_output"] == {"segments": [1]}
assert call_order == [action_1_id, action_2_id]
assert session.step_runs_by_step[1].capability_id == capability_1_id
assert session.step_runs_by_step[1].action_id == action_1_id
trace = session.step_runs_by_step[1].response_snapshot["endpoints_trace"] # type: ignore[index]
assert len(trace) == 2
assert trace[0]["status"] == "succeeded"
assert trace[1]["status"] == "succeeded"
@pytest.mark.asyncio
async def test_execute_run_multi_endpoint_failure_stops_pipeline():
run_id = uuid4()
pipeline_id = uuid4()
action_1_id = uuid4()
action_2_id = uuid4()
action_3_id = uuid4()
capability_1_id = uuid4()
capability_2_id = uuid4()
capability_3_id = uuid4()
action_1 = _build_action(action_1_id)
action_2 = _build_action(action_2_id)
action_3 = _build_action(action_3_id)
capability_1 = _build_capability(capability_1_id, action_1_id)
capability_2 = _build_capability(capability_2_id, action_2_id)
capability_3 = _build_capability(capability_3_id, action_3_id)
multi_endpoint_node = {
"step": 1,
"name": "Fail on second endpoint",
"external_inputs": [],
"endpoints": [
{"capability_id": str(capability_1_id), "action_id": str(action_1_id)},
{"capability_id": str(capability_2_id), "action_id": str(action_2_id)},
],
}
pipeline = Pipeline(
id=pipeline_id,
name="Failing multi-endpoint pipeline",
nodes=[
multi_endpoint_node,
_build_node(2, capability_3_id, action_3_id),
],
edges=[{"from_step": 1, "to_step": 2, "type": "segments"}],
status=PipelineStatus.READY,
)
run = ExecutionRun(
id=run_id,
pipeline_id=pipeline_id,
status=ExecutionRunStatus.QUEUED,
inputs={},
)
session = FakeSession(
{
(ExecutionRun, run_id): run,
(Pipeline, pipeline_id): pipeline,
(Capability, capability_1_id): capability_1,
(Capability, capability_2_id): capability_2,
(Capability, capability_3_id): capability_3,
(Action, action_1_id): action_1,
(Action, action_2_id): action_2,
(Action, action_3_id): action_3,
}
)
service = ExecutionService(
session=session, # type: ignore[arg-type]
context_store=FakeContextStore(initial={"step_outputs": {}, "edge_values": {}}),
)
async def fake_call_action(action: Action, _request_payload: dict[str, Any]):
if action.id == action_2_id:
raise StepExecutionError("boom")
return {"status_code": 200, "body": {"segments": [1]}}, {"segments": [1]}
service._call_action = fake_call_action # type: ignore[method-assign]
await service.execute_run(run_id)
assert run.status == ExecutionRunStatus.FAILED
assert run.summary is not None
assert run.summary["succeeded_steps"] == 0
assert run.summary["failed_steps"] == 1
assert run.summary["skipped_steps"] == 1
assert session.step_runs_by_step[1].status == ExecutionStepStatus.FAILED
assert session.step_runs_by_step[2].status == ExecutionStepStatus.SKIPPED
failed_trace = session.step_runs_by_step[1].response_snapshot["endpoints_trace"] # type: ignore[index]
assert len(failed_trace) == 2
assert failed_trace[0]["status"] == "succeeded"
assert failed_trace[1]["status"] == "failed"
@pytest.mark.asyncio
async def test_execute_run_multi_endpoint_chain_supports_composite_endpoint():
run_id = uuid4()
pipeline_id = uuid4()
action_1_id = uuid4()
atomic_capability_id = uuid4()
composite_capability_id = uuid4()
action_1 = Action(
id=action_1_id,
method=HttpMethod.GET,
path="/users/recent",
base_url="https://api.example.com",
)
atomic_capability = _build_capability(atomic_capability_id, action_1_id)
composite_capability = Capability(
id=composite_capability_id,
action_id=None,
type="COMPOSITE",
name="composite_cap",
input_schema={
"type": "object",
"required": ["users"],
"properties": {
"users": {"type": "array"},
},
},
recipe={"version": 1, "steps": [{"step": 1, "capability_id": str(atomic_capability_id), "inputs": {}}]},
)
node = {
"step": 1,
"name": "Atomic then composite",
"external_inputs": [],
"endpoints": [
{"capability_id": str(atomic_capability_id), "action_id": str(action_1_id)},
{"capability_id": str(composite_capability_id), "action_id": None},
],
}
pipeline = Pipeline(
id=pipeline_id,
name="mixed chain pipeline",
nodes=[node],
edges=[],
status=PipelineStatus.READY,
)
run = ExecutionRun(
id=run_id,
pipeline_id=pipeline_id,
status=ExecutionRunStatus.QUEUED,
inputs={},
)
session = FakeSession(
{
(ExecutionRun, run_id): run,
(Pipeline, pipeline_id): pipeline,
(Capability, atomic_capability_id): atomic_capability,
(Capability, composite_capability_id): composite_capability,
(Action, action_1_id): action_1,
}
)
service = ExecutionService(
session=session, # type: ignore[arg-type]
context_store=FakeContextStore(initial={"step_outputs": {}, "edge_values": {}}),
)
async def fake_call_action(action: Action, _request_payload: dict[str, Any]):
assert action.id == action_1_id
return {"status_code": 200, "body": {"users": [{"id": 1}]}}, {"users": [{"id": 1}]}
async def fake_execute_composite_capability(
*,
capability: Capability,
resolved_inputs: dict[str, Any],
run_inputs: dict[str, Any],
):
assert capability.id == composite_capability_id
assert resolved_inputs["users"] == [{"id": 1}]
assert run_inputs == {}
return {"capability_type": "COMPOSITE", "status_code": 200}, {"segments": [1]}
service._call_action = fake_call_action # type: ignore[method-assign]
service._execute_composite_capability = fake_execute_composite_capability # type: ignore[method-assign]
await service.execute_run(run_id)
assert run.status == ExecutionRunStatus.SUCCEEDED
assert run.summary is not None
assert run.summary["final_output"] == {"segments": [1]}
trace = session.step_runs_by_step[1].response_snapshot["endpoints_trace"] # type: ignore[index]
assert len(trace) == 2
assert trace[0]["capability_id"] == str(atomic_capability_id)
assert trace[1]["capability_id"] == str(composite_capability_id)
assert trace[1]["capability_type"] == "COMPOSITE"
@@ -0,0 +1,80 @@
from __future__ import annotations
from datetime import datetime, timezone
from uuid import uuid4
from app.api.executions.get_execution import _build_step_run_response
from app.models.execution import ExecutionStepRun, ExecutionStepStatus
def _build_step_run(
*,
request_snapshot,
response_snapshot,
) -> ExecutionStepRun:
now = datetime.now(timezone.utc)
step_run = ExecutionStepRun(
run_id=uuid4(),
step=1,
status=ExecutionStepStatus.SUCCEEDED,
)
step_run.name = "Step 1"
step_run.request_snapshot = request_snapshot
step_run.response_snapshot = response_snapshot
step_run.created_at = now
step_run.updated_at = now
return step_run
def test_build_step_run_response_for_post_sets_accepted_and_output_payloads():
step_run = _build_step_run(
request_snapshot={
"method": "post",
"json_body": {"subject": "Hi", "message": "Hello"},
},
response_snapshot={
"status_code": 200,
"body": {"sent": 1},
},
)
response = _build_step_run_response(step_run)
assert response.method == "POST"
assert response.status_code == 200
assert response.accepted_payload == {"subject": "Hi", "message": "Hello"}
assert response.output_payload == {"sent": 1}
def test_build_step_run_response_for_get_keeps_accepted_payload_none():
step_run = _build_step_run(
request_snapshot={
"method": "GET",
"query_params": {"limit": 20},
},
response_snapshot={
"status_code": "204",
"body": "",
},
)
response = _build_step_run_response(step_run)
assert response.method == "GET"
assert response.status_code == 204
assert response.accepted_payload is None
assert response.output_payload == ""
def test_build_step_run_response_handles_missing_snapshots():
step_run = _build_step_run(
request_snapshot=None,
response_snapshot=None,
)
response = _build_step_run_response(step_run)
assert response.method is None
assert response.status_code is None
assert response.accepted_payload is None
assert response.output_payload is None
+11
View File
@@ -0,0 +1,11 @@
from httpx import AsyncClient, ASGITransport
import pytest
from app.main import app
@pytest.mark.asyncio
async def test_ping():
# Используем ASGITransport для современных версий httpx
async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as ac:
response = await ac.get("/api/ping")
assert response.status_code == 200
assert response.json() == {"status": "ok"}
+123
View File
@@ -0,0 +1,123 @@
from __future__ import annotations
from uuid import uuid4
from app.models.capability import Capability, CapabilityType
from app.services.pipeline_service import PipelineService
from app.services.semantic_selection import SelectedCapability
def _build_capability(*, name: str, required_inputs: list[str] | None = None) -> Capability:
cap_id = uuid4()
action_id = uuid4()
input_schema = None
if required_inputs is not None:
input_schema = {
"type": "object",
"required": required_inputs,
"properties": {
input_name: {"type": "string"}
for input_name in required_inputs
},
}
return Capability(
id=cap_id,
action_id=action_id,
type=CapabilityType.ATOMIC,
name=name,
input_schema=input_schema,
output_schema={"type": "object"},
)
def _select(capability: Capability) -> SelectedCapability:
return SelectedCapability(capability=capability, score=1.0, confidence_tier="high")
def test_extract_required_inputs_from_node_merges_all_endpoints():
service = PipelineService(session=None) # type: ignore[arg-type]
node = {
"step": 1,
"endpoints": [
{
"input_type": {
"type": "object",
"required": ["users", "campaignId"],
}
},
{
"input_type": {
"type": "object",
"required": ["segments", "users"],
}
},
],
}
required = service._extract_required_inputs_from_node(node)
assert required == ["users", "campaignId", "segments"]
def test_normalize_workflow_preserves_multi_endpoint_nodes():
capability_a = _build_capability(name="Get users", required_inputs=["users"])
capability_b = _build_capability(name="Build segments", required_inputs=["users"])
selected = [_select(capability_a), _select(capability_b)]
service = PipelineService(session=None) # type: ignore[arg-type]
raw_graph = {
"nodes": [
{
"step": 1,
"name": "Composite-like node",
"endpoints": [
{
"capability_id": str(capability_a.id),
},
{
"capability_id": str(capability_b.id),
},
],
}
],
"edges": [],
}
nodes, edges, issues = service._normalize_workflow(raw_graph, selected)
assert issues == []
assert edges == []
assert len(nodes) == 1
endpoints = nodes[0]["endpoints"]
assert len(endpoints) == 2
assert endpoints[0]["capability_id"] == str(capability_a.id)
assert endpoints[1]["capability_id"] == str(capability_b.id)
assert endpoints[0]["action_id"] == str(capability_a.action_id)
assert endpoints[1]["action_id"] == str(capability_b.action_id)
def test_normalize_workflow_flags_invalid_endpoint_capability_refs():
capability = _build_capability(name="Get users", required_inputs=["users"])
selected = [_select(capability)]
service = PipelineService(session=None) # type: ignore[arg-type]
raw_graph = {
"nodes": [
{
"step": 1,
"name": "Node with invalid endpoint",
"endpoints": [
{"capability_id": str(uuid4())},
{"capability_id": str(capability.id)},
],
}
],
"edges": [],
}
nodes, _edges, issues = service._normalize_workflow(raw_graph, selected)
assert "graph:invalid_capability_ref" in issues
assert len(nodes) == 1
assert len(nodes[0]["endpoints"]) == 1
assert nodes[0]["endpoints"][0]["capability_id"] == str(capability.id)
+41
View File
@@ -0,0 +1,41 @@
from __future__ import annotations
from types import SimpleNamespace
from app.services.semantic_selection import SemanticSelectionService
def test_score_maps_ru_users_query_to_en_capability_tokens():
service = SemanticSelectionService()
query_tokens = service._tokenize("Хочу получить пользователей")
query_tokens_expanded = service._expand_tokens(query_tokens)
capability = SimpleNamespace(
name="get_users",
description="Get users list",
)
score = service._score_capability(query_tokens, query_tokens_expanded, capability)
assert score >= 0.45
def test_score_uses_capability_action_context_tokens():
service = SemanticSelectionService()
query_tokens = service._tokenize("Отправь email по кампании")
query_tokens_expanded = service._expand_tokens(query_tokens)
capability = SimpleNamespace(
name="execute_action",
description="General API action",
llm_payload={
"action_context_brief": {
"method": "POST",
"path": "/v1/campaigns/emails/send",
"tags": ["campaign", "email"],
"summary": "Send campaign emails",
}
},
)
score = service._score_capability(query_tokens, query_tokens_expanded, capability)
assert score > 0.0
@@ -0,0 +1,248 @@
from __future__ import annotations
from datetime import datetime, timezone
from uuid import UUID, uuid4
import pytest
from httpx import ASGITransport, AsyncClient, Response
from app.core.database.session import get_session
from app.main import app
from app.models import Pipeline, PipelineStatus, User, UserRole
from app.utils.token_manager import get_current_user
class FakeSession:
def __init__(self, pipeline: Pipeline | None):
self.pipeline = pipeline
self.committed = False
async def get(self, model, key: UUID):
if model is Pipeline and self.pipeline and key == self.pipeline.id:
return self.pipeline
return None
async def commit(self):
self.committed = True
if self.pipeline is not None:
self.pipeline.updated_at = datetime.now(timezone.utc)
async def refresh(self, _obj):
return None
@pytest.fixture(autouse=True)
def clear_dependency_overrides():
app.dependency_overrides.clear()
yield
app.dependency_overrides.clear()
def _build_user(*, user_id: UUID, role: UserRole = UserRole.USER) -> User:
user = User(
id=user_id,
email=f"{user_id}@example.com",
hashed_password="hashed",
role=role,
is_active=True,
)
user.created_at = datetime.now(timezone.utc)
user.updated_at = datetime.now(timezone.utc)
return user
def _build_pipeline(*, pipeline_id: UUID, owner_id: UUID) -> Pipeline:
pipeline = Pipeline(
id=pipeline_id,
name="Travel pipeline",
description=None,
user_prompt=None,
nodes=[
{
"step": 1,
"name": "Get users",
"description": None,
"input_connected_from": [99],
"output_connected_to": [98],
"input_data_type_from_previous": [],
"external_inputs": [],
"endpoints": [],
},
{
"step": 2,
"name": "Segment users",
"description": None,
"input_connected_from": [],
"output_connected_to": [],
"input_data_type_from_previous": [],
"external_inputs": [],
"endpoints": [],
},
],
edges=[],
status=PipelineStatus.DRAFT,
created_by=owner_id,
)
pipeline.created_at = datetime.now(timezone.utc)
pipeline.updated_at = datetime.now(timezone.utc)
return pipeline
async def _patch_graph(pipeline_id: UUID, payload: dict) -> Response:
async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as client:
return await client.patch(f"/api/v1/pipelines/{pipeline_id}/graph", json=payload)
@pytest.mark.asyncio
async def test_patch_graph_success_for_owner_normalizes_connections():
owner_id = uuid4()
pipeline_id = uuid4()
fake_session = FakeSession(_build_pipeline(pipeline_id=pipeline_id, owner_id=owner_id))
async def override_session():
yield fake_session
async def override_user():
return _build_user(user_id=owner_id)
app.dependency_overrides[get_session] = override_session
app.dependency_overrides[get_current_user] = override_user
response = await _patch_graph(
pipeline_id,
{
"nodes": fake_session.pipeline.nodes,
"edges": [{"from_step": 1, "to_step": 2, "type": "users"}],
},
)
assert response.status_code == 200
payload = response.json()
assert payload["pipeline_id"] == str(pipeline_id)
assert payload["edges"] == [{"from_step": 1, "to_step": 2, "type": "users"}]
assert payload["nodes"][0]["output_connected_to"] == [2]
assert payload["nodes"][1]["input_connected_from"] == [1]
assert payload["nodes"][1]["input_data_type_from_previous"] == [
{"from_step": 1, "type": "users"}
]
assert isinstance(payload["updated_at"], str)
assert fake_session.committed is True
@pytest.mark.asyncio
async def test_patch_graph_returns_404_for_non_owner():
owner_id = uuid4()
pipeline_id = uuid4()
fake_session = FakeSession(_build_pipeline(pipeline_id=pipeline_id, owner_id=owner_id))
async def override_session():
yield fake_session
async def override_user():
return _build_user(user_id=uuid4())
app.dependency_overrides[get_session] = override_session
app.dependency_overrides[get_current_user] = override_user
response = await _patch_graph(
pipeline_id,
{
"nodes": fake_session.pipeline.nodes,
"edges": [{"from_step": 1, "to_step": 2, "type": "users"}],
},
)
assert response.status_code == 404
@pytest.mark.asyncio
async def test_patch_graph_rejects_cycle():
owner_id = uuid4()
pipeline_id = uuid4()
fake_session = FakeSession(_build_pipeline(pipeline_id=pipeline_id, owner_id=owner_id))
async def override_session():
yield fake_session
async def override_user():
return _build_user(user_id=owner_id)
app.dependency_overrides[get_session] = override_session
app.dependency_overrides[get_current_user] = override_user
response = await _patch_graph(
pipeline_id,
{
"nodes": fake_session.pipeline.nodes,
"edges": [
{"from_step": 1, "to_step": 2, "type": "users"},
{"from_step": 2, "to_step": 1, "type": "segments"},
],
},
)
assert response.status_code == 422
payload = response.json()
assert payload["code"] == "VALIDATION_FAILED"
assert "graph: cycle" in payload["details"]["errors"]
@pytest.mark.asyncio
async def test_patch_graph_rejects_edge_to_missing_node():
owner_id = uuid4()
pipeline_id = uuid4()
fake_session = FakeSession(_build_pipeline(pipeline_id=pipeline_id, owner_id=owner_id))
async def override_session():
yield fake_session
async def override_user():
return _build_user(user_id=owner_id)
app.dependency_overrides[get_session] = override_session
app.dependency_overrides[get_current_user] = override_user
response = await _patch_graph(
pipeline_id,
{
"nodes": fake_session.pipeline.nodes,
"edges": [{"from_step": 1, "to_step": 999, "type": "users"}],
},
)
assert response.status_code == 422
payload = response.json()
assert payload["code"] == "VALIDATION_FAILED"
assert "graph: edge_to_missing_node:1->999" in payload["details"]["errors"]
@pytest.mark.asyncio
async def test_patch_graph_rejects_duplicate_edge_triplets():
owner_id = uuid4()
pipeline_id = uuid4()
fake_session = FakeSession(_build_pipeline(pipeline_id=pipeline_id, owner_id=owner_id))
async def override_session():
yield fake_session
async def override_user():
return _build_user(user_id=owner_id)
app.dependency_overrides[get_session] = override_session
app.dependency_overrides[get_current_user] = override_user
response = await _patch_graph(
pipeline_id,
{
"nodes": fake_session.pipeline.nodes,
"edges": [
{"from_step": 1, "to_step": 2, "type": "users"},
{"from_step": 1, "to_step": 2, "type": "users"},
],
},
)
assert response.status_code == 422
payload = response.json()
assert payload["code"] == "VALIDATION_FAILED"
assert "graph: duplicate_edge:1->2:users" in payload["details"]["errors"]
+9
View File
@@ -0,0 +1,9 @@
__pycache__/
*.py[cod]
*.log
.pytest_cache/
.mypy_cache/
.venv/
venv/
openapi/
README.md
+14
View File
@@ -0,0 +1,14 @@
FROM python:3.12-slim
WORKDIR /app
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
COPY app ./app
ENV PYTHONUNBUFFERED=1
EXPOSE 8010
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8010"]
+83
View File
@@ -0,0 +1,83 @@
# demo-backend
Отдельный демо backend для travel pipeline из `openapi/travel.yaml`.
## Запуск
```bash
cd demo-backend
python3 -m venv .venv
source .venv/bin/activate
pip install -r requirements.txt
uvicorn app.main:app --reload --port 8010
```
## Запуск в Docker
```bash
cd demo-backend
docker network create shop-network 2>/dev/null || true
docker compose up -d --build
```
Остановка:
```bash
docker compose down
```
## Что реализовано
Travel линейный сценарий:
- `GET /users/recent` (`operationId: getRecentUsers`)
- `GET /hotels/top` (`operationId: getTopHotels`)
- `POST /segments/hotel` (`operationId: segmentUsersByHotelPreferences`)
- `POST /assignments/hotels` (`operationId: assignUsersToHotels`)
- `POST /emails/send-offers` (`operationId: sendHotelOffersByEmail`)
CRM линейный сценарий:
- `GET /crm/leads/recent` (`operationId: getRecentLeads`)
- `POST /crm/leads/qualify` (`operationId: qualifyLeadsForOffer`)
- `POST /crm/offers/prepare` (`operationId: prepareOffersForLeads`)
- `POST /crm/offers/send` (`operationId: sendPreparedOffers`)
Swagger UI: `http://localhost:8010/docs`
OpenAPI JSON: `http://localhost:8010/openapi.json`
Для генерации/запуска pipeline в основном backend импортируй именно
`demo-backend/openapi/travel.yaml`:
- `servers[0].url` = `http://demo-api:8010` (работает для backend-контейнера в `shop-network`)
- `servers[1].url` = `http://localhost:8010` (локальный запуск без Docker)
- у `template_id` задан `default`, чтобы one-click execution не требовал ручной ввод
Для CRM-сценария используй `demo-backend/openapi/crm_linear_pipeline.yaml`.
Если хочешь загрузить сразу все демо-ручки одним файлом:
`demo-backend/openapi/all_linear_scenarios.yaml`.
## Быстрая проверка пайплайна
```bash
BASE=http://localhost:8010
curl -s "$BASE/users/recent?limit=3" > /tmp/users.json
curl -s "$BASE/hotels/top?limit=2" > /tmp/hotels.json
jq -n \
--argjson users "$(jq '.users' /tmp/users.json)" \
--argjson hotels "$(jq '.hotels' /tmp/hotels.json)" \
'{users:$users, hotels:$hotels}' \
| curl -s -X POST "$BASE/segments/hotel" \
-H 'content-type: application/json' -d @- > /tmp/segments.json
jq -n --argjson segments "$(jq '.segments' /tmp/segments.json)" '{segments:$segments}' \
| curl -s -X POST "$BASE/assignments/hotels" \
-H 'content-type: application/json' -d @- > /tmp/assignments.json
jq -n \
--arg template_id "offer_template_2026" \
--argjson assignments "$(jq '.assignments' /tmp/assignments.json)" \
'{template_id:$template_id, assignments:$assignments}' \
| curl -s -X POST "$BASE/emails/send-offers" \
-H 'content-type: application/json' -d @-
```
View File
+390
View File
@@ -0,0 +1,390 @@
from __future__ import annotations
from datetime import datetime, timedelta, timezone
from typing import Annotated
from fastapi import FastAPI, Query
from pydantic import BaseModel, Field
class User(BaseModel):
id: str
email: str
last_active: datetime
class Hotel(BaseModel):
id: str
name: str
city: str
class Segment(BaseModel):
segment_id: str
hotel_id: str
user_ids: list[str] = Field(default_factory=list)
class Assignment(BaseModel):
user_id: str
hotel_id: str
class RecentUsersResponse(BaseModel):
users: list[User] = Field(default_factory=list)
class TopHotelsResponse(BaseModel):
hotels: list[Hotel] = Field(default_factory=list)
class HotelSegmentsRequest(BaseModel):
users: list[User] = Field(default_factory=list)
hotels: list[Hotel] = Field(default_factory=list)
class HotelSegmentsResponse(BaseModel):
segments: list[Segment] = Field(default_factory=list)
class AssignmentsRequest(BaseModel):
segments: list[Segment] = Field(default_factory=list)
class AssignmentsResponse(BaseModel):
assignments: list[Assignment] = Field(default_factory=list)
class EmailOfferRequest(BaseModel):
template_id: str = "offer_template_2026"
assignments: list[Assignment] = Field(default_factory=list)
class FailedDelivery(BaseModel):
user_id: str
reason: str
class EmailOfferResponse(BaseModel):
sent_count: int
failed_count: int
failed: list[FailedDelivery] = Field(default_factory=list)
class Lead(BaseModel):
lead_id: str
email: str
source: str
class QualifiedLead(BaseModel):
lead_id: str
email: str
score: int
tier: str
class PreparedOffer(BaseModel):
offer_id: str
lead_id: str
channel: str
message: str
class RecentLeadsResponse(BaseModel):
leads: list[Lead] = Field(default_factory=list)
class QualifyLeadsRequest(BaseModel):
leads: list[Lead] = Field(default_factory=list)
class QualifyLeadsResponse(BaseModel):
qualified_leads: list[QualifiedLead] = Field(default_factory=list)
class PrepareOffersRequest(BaseModel):
qualified_leads: list[QualifiedLead] = Field(default_factory=list)
class PrepareOffersResponse(BaseModel):
offers: list[PreparedOffer] = Field(default_factory=list)
class SendOffersRequest(BaseModel):
offers: list[PreparedOffer] = Field(default_factory=list)
class FailedLeadDelivery(BaseModel):
lead_id: str
reason: str
class SendOffersResponse(BaseModel):
sent_count: int
failed_count: int
failed: list[FailedLeadDelivery] = Field(default_factory=list)
APP_DESCRIPTION = """
Synthetic API with multiple linear demo workflows.
Travel workflow:
1. `GET /users/recent`
2. `GET /hotels/top`
3. `POST /segments/hotel`
4. `POST /assignments/hotels`
5. `POST /emails/send-offers`
CRM workflow:
1. `GET /crm/leads/recent`
2. `POST /crm/leads/qualify`
3. `POST /crm/offers/prepare`
4. `POST /crm/offers/send`
""".strip()
app = FastAPI(
title="Travel Product Manager API",
version="1.0.0",
description=APP_DESCRIPTION,
)
BASE_USERS_TS = datetime(2026, 3, 13, 10, 0, tzinfo=timezone.utc)
HOTEL_CATALOG: list[Hotel] = [
Hotel(id="hotel_001", name="Hotel Aurora", city="Berlin"),
Hotel(id="hotel_002", name="Sea Breeze Resort", city="Lisbon"),
Hotel(id="hotel_003", name="Mountain Vista", city="Zurich"),
Hotel(id="hotel_004", name="City Loft", city="Amsterdam"),
Hotel(id="hotel_005", name="River Palace", city="Prague"),
Hotel(id="hotel_006", name="Nordic Harbor", city="Stockholm"),
Hotel(id="hotel_007", name="Sunset Bay", city="Barcelona"),
Hotel(id="hotel_008", name="Alpine Crown", city="Vienna"),
]
def _build_users() -> list[User]:
users: list[User] = []
for idx in range(1, 31):
users.append(
User(
id=f"usr_{idx:03d}",
email=f"user{idx:03d}@example.com",
last_active=BASE_USERS_TS - timedelta(minutes=(idx - 1) * 5),
)
)
return users
def _build_recent_leads() -> list[Lead]:
leads: list[Lead] = []
sources = ["landing", "webinar", "partner", "organic"]
for idx in range(1, 21):
leads.append(
Lead(
lead_id=f"lead_{idx:03d}",
email=f"lead{idx:03d}@example.com",
source=sources[idx % len(sources)],
)
)
return leads
@app.get(
"/users/recent",
response_model=RecentUsersResponse,
operation_id="getRecentUsers",
tags=["travel-offer-workflow"],
)
async def get_recent_users(
last_active_after: Annotated[datetime | None, Query()] = None,
limit: Annotated[int, Query(ge=1, le=100)] = 30,
) -> RecentUsersResponse:
users = _build_users()
if last_active_after is not None:
users = [user for user in users if user.last_active > last_active_after]
return RecentUsersResponse(users=users[:limit])
@app.get(
"/hotels/top",
response_model=TopHotelsResponse,
operation_id="getTopHotels",
tags=["travel-offer-workflow"],
)
async def get_top_hotels(
limit: Annotated[int, Query(ge=1, le=20)] = 5,
city: Annotated[str | None, Query()] = None,
) -> TopHotelsResponse:
hotels = HOTEL_CATALOG
if city:
city_normalized = city.strip().lower()
hotels = [hotel for hotel in hotels if hotel.city.lower() == city_normalized]
return TopHotelsResponse(hotels=hotels[:limit])
@app.post(
"/segments/hotel",
response_model=HotelSegmentsResponse,
operation_id="segmentUsersByHotelPreferences",
tags=["travel-offer-workflow"],
)
async def segment_users_by_hotel_preferences(
payload: HotelSegmentsRequest,
) -> HotelSegmentsResponse:
if not payload.users or not payload.hotels:
return HotelSegmentsResponse(segments=[])
grouped: dict[str, list[str]] = {hotel.id: [] for hotel in payload.hotels}
for index, user in enumerate(payload.users):
hotel = payload.hotels[index % len(payload.hotels)]
grouped[hotel.id].append(user.id)
segments: list[Segment] = []
for hotel in payload.hotels:
user_ids = grouped.get(hotel.id, [])
if not user_ids:
continue
segments.append(
Segment(
segment_id=f"seg_{hotel.id}",
hotel_id=hotel.id,
user_ids=user_ids,
)
)
return HotelSegmentsResponse(segments=segments)
@app.post(
"/assignments/hotels",
response_model=AssignmentsResponse,
operation_id="assignUsersToHotels",
tags=["travel-offer-workflow"],
)
async def assign_users_to_hotels(payload: AssignmentsRequest) -> AssignmentsResponse:
assignments: list[Assignment] = []
for segment in payload.segments:
for user_id in segment.user_ids:
assignments.append(Assignment(user_id=user_id, hotel_id=segment.hotel_id))
return AssignmentsResponse(assignments=assignments)
@app.post(
"/emails/send-offers",
response_model=EmailOfferResponse,
status_code=200,
operation_id="sendHotelOffersByEmail",
tags=["travel-offer-workflow"],
)
async def send_hotel_offers_by_email(payload: EmailOfferRequest) -> EmailOfferResponse:
_ = payload.template_id
failed: list[FailedDelivery] = []
for assignment in payload.assignments:
if assignment.user_id.endswith("000"):
failed.append(
FailedDelivery(
user_id=assignment.user_id,
reason="Invalid user id for delivery",
)
)
sent_count = len(payload.assignments) - len(failed)
return EmailOfferResponse(
sent_count=sent_count,
failed_count=len(failed),
failed=failed,
)
@app.get(
"/crm/leads/recent",
response_model=RecentLeadsResponse,
operation_id="getRecentLeads",
tags=["crm-linear-workflow"],
)
async def get_recent_leads(
limit: Annotated[int, Query(ge=1, le=50)] = 20,
source: Annotated[str | None, Query()] = None,
) -> RecentLeadsResponse:
leads = _build_recent_leads()
if source:
source_normalized = source.strip().lower()
leads = [lead for lead in leads if lead.source.lower() == source_normalized]
return RecentLeadsResponse(leads=leads[:limit])
@app.post(
"/crm/leads/qualify",
response_model=QualifyLeadsResponse,
operation_id="qualifyLeadsForOffer",
tags=["crm-linear-workflow"],
)
async def qualify_leads_for_offer(payload: QualifyLeadsRequest) -> QualifyLeadsResponse:
qualified: list[QualifiedLead] = []
for index, lead in enumerate(payload.leads):
score = 55 + ((index * 7) % 45)
tier = "high" if score >= 80 else "medium" if score >= 65 else "low"
qualified.append(
QualifiedLead(
lead_id=lead.lead_id,
email=lead.email,
score=score,
tier=tier,
)
)
return QualifyLeadsResponse(qualified_leads=qualified)
@app.post(
"/crm/offers/prepare",
response_model=PrepareOffersResponse,
operation_id="prepareOffersForLeads",
tags=["crm-linear-workflow"],
)
async def prepare_offers_for_leads(payload: PrepareOffersRequest) -> PrepareOffersResponse:
offers: list[PreparedOffer] = []
for lead in payload.qualified_leads:
channel = "email" if lead.tier in {"high", "medium"} else "push"
offers.append(
PreparedOffer(
offer_id=f"offer_{lead.lead_id}",
lead_id=lead.lead_id,
channel=channel,
message=f"Special travel offer for {lead.tier} intent lead",
)
)
return PrepareOffersResponse(offers=offers)
@app.post(
"/crm/offers/send",
response_model=SendOffersResponse,
operation_id="sendPreparedOffers",
tags=["crm-linear-workflow"],
)
async def send_prepared_offers(payload: SendOffersRequest) -> SendOffersResponse:
failed: list[FailedLeadDelivery] = []
for offer in payload.offers:
if offer.lead_id.endswith("000"):
failed.append(
FailedLeadDelivery(
lead_id=offer.lead_id,
reason="Invalid lead for delivery",
)
)
sent_count = len(payload.offers) - len(failed)
return SendOffersResponse(
sent_count=sent_count,
failed_count=len(failed),
failed=failed,
)
@app.get("/health")
async def health() -> dict[str, str]:
return {"status": "ok"}
+26
View File
@@ -0,0 +1,26 @@
services:
demo-api:
image: ${DOCKER_IMAGE:-demo-backend-api}:${TAG:-latest}
build:
context: .
dockerfile: Dockerfile
restart: always
ports:
- "8010:8010"
healthcheck:
test:
[
"CMD",
"python",
"-c",
"import urllib.request; urllib.request.urlopen('http://localhost:8010/health').read()",
]
interval: 10s
timeout: 5s
retries: 5
networks:
- shop-network
networks:
shop-network:
external: true
@@ -0,0 +1,658 @@
openapi: 3.1.0
info:
title: Travel Product Manager API
description: 'Synthetic API with multiple linear demo workflows.
Travel workflow:
1. `GET /users/recent`
2. `GET /hotels/top`
3. `POST /segments/hotel`
4. `POST /assignments/hotels`
5. `POST /emails/send-offers`
CRM workflow:
1. `GET /crm/leads/recent`
2. `POST /crm/leads/qualify`
3. `POST /crm/offers/prepare`
4. `POST /crm/offers/send`'
version: 1.0.0
servers:
- url: http://84.201.161.175
description: production
paths:
/users/recent:
get:
tags:
- travel-offer-workflow
summary: Get Recent Users
operationId: getRecentUsers
parameters:
- name: last_active_after
in: query
required: false
schema:
anyOf:
- type: string
format: date-time
- type: 'null'
title: Last Active After
- name: limit
in: query
required: false
schema:
type: integer
maximum: 100
minimum: 1
default: 30
title: Limit
responses:
'200':
description: Successful Response
content:
application/json:
schema:
$ref: '#/components/schemas/RecentUsersResponse'
'422':
description: Validation Error
content:
application/json:
schema:
$ref: '#/components/schemas/HTTPValidationError'
/hotels/top:
get:
tags:
- travel-offer-workflow
summary: Get Top Hotels
operationId: getTopHotels
parameters:
- name: limit
in: query
required: false
schema:
type: integer
maximum: 20
minimum: 1
default: 5
title: Limit
- name: city
in: query
required: false
schema:
anyOf:
- type: string
- type: 'null'
title: City
responses:
'200':
description: Successful Response
content:
application/json:
schema:
$ref: '#/components/schemas/TopHotelsResponse'
'422':
description: Validation Error
content:
application/json:
schema:
$ref: '#/components/schemas/HTTPValidationError'
/segments/hotel:
post:
tags:
- travel-offer-workflow
summary: Segment Users By Hotel Preferences
operationId: segmentUsersByHotelPreferences
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/HotelSegmentsRequest'
required: true
responses:
'200':
description: Successful Response
content:
application/json:
schema:
$ref: '#/components/schemas/HotelSegmentsResponse'
'422':
description: Validation Error
content:
application/json:
schema:
$ref: '#/components/schemas/HTTPValidationError'
/assignments/hotels:
post:
tags:
- travel-offer-workflow
summary: Assign Users To Hotels
operationId: assignUsersToHotels
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/AssignmentsRequest'
required: true
responses:
'200':
description: Successful Response
content:
application/json:
schema:
$ref: '#/components/schemas/AssignmentsResponse'
'422':
description: Validation Error
content:
application/json:
schema:
$ref: '#/components/schemas/HTTPValidationError'
/emails/send-offers:
post:
tags:
- travel-offer-workflow
summary: Send Hotel Offers By Email
operationId: sendHotelOffersByEmail
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/EmailOfferRequest'
required: true
responses:
'200':
description: Successful Response
content:
application/json:
schema:
$ref: '#/components/schemas/EmailOfferResponse'
'422':
description: Validation Error
content:
application/json:
schema:
$ref: '#/components/schemas/HTTPValidationError'
/crm/leads/recent:
get:
tags:
- crm-linear-workflow
summary: Get Recent Leads
operationId: getRecentLeads
parameters:
- name: limit
in: query
required: false
schema:
type: integer
maximum: 50
minimum: 1
default: 20
title: Limit
- name: source
in: query
required: false
schema:
anyOf:
- type: string
- type: 'null'
title: Source
responses:
'200':
description: Successful Response
content:
application/json:
schema:
$ref: '#/components/schemas/RecentLeadsResponse'
'422':
description: Validation Error
content:
application/json:
schema:
$ref: '#/components/schemas/HTTPValidationError'
/crm/leads/qualify:
post:
tags:
- crm-linear-workflow
summary: Qualify Leads For Offer
operationId: qualifyLeadsForOffer
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/QualifyLeadsRequest'
required: true
responses:
'200':
description: Successful Response
content:
application/json:
schema:
$ref: '#/components/schemas/QualifyLeadsResponse'
'422':
description: Validation Error
content:
application/json:
schema:
$ref: '#/components/schemas/HTTPValidationError'
/crm/offers/prepare:
post:
tags:
- crm-linear-workflow
summary: Prepare Offers For Leads
operationId: prepareOffersForLeads
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/PrepareOffersRequest'
required: true
responses:
'200':
description: Successful Response
content:
application/json:
schema:
$ref: '#/components/schemas/PrepareOffersResponse'
'422':
description: Validation Error
content:
application/json:
schema:
$ref: '#/components/schemas/HTTPValidationError'
/crm/offers/send:
post:
tags:
- crm-linear-workflow
summary: Send Prepared Offers
operationId: sendPreparedOffers
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/SendOffersRequest'
required: true
responses:
'200':
description: Successful Response
content:
application/json:
schema:
$ref: '#/components/schemas/SendOffersResponse'
'422':
description: Validation Error
content:
application/json:
schema:
$ref: '#/components/schemas/HTTPValidationError'
/health:
get:
summary: Health
operationId: health_health_get
responses:
'200':
description: Successful Response
content:
application/json:
schema:
additionalProperties:
type: string
type: object
title: Response Health Health Get
components:
schemas:
Assignment:
properties:
user_id:
type: string
title: User Id
hotel_id:
type: string
title: Hotel Id
type: object
required:
- user_id
- hotel_id
title: Assignment
AssignmentsRequest:
properties:
segments:
items:
$ref: '#/components/schemas/Segment'
type: array
title: Segments
type: object
title: AssignmentsRequest
AssignmentsResponse:
properties:
assignments:
items:
$ref: '#/components/schemas/Assignment'
type: array
title: Assignments
type: object
title: AssignmentsResponse
EmailOfferRequest:
properties:
template_id:
type: string
title: Template Id
assignments:
items:
$ref: '#/components/schemas/Assignment'
type: array
title: Assignments
type: object
required:
- template_id
title: EmailOfferRequest
EmailOfferResponse:
properties:
sent_count:
type: integer
title: Sent Count
failed_count:
type: integer
title: Failed Count
failed:
items:
$ref: '#/components/schemas/FailedDelivery'
type: array
title: Failed
type: object
required:
- sent_count
- failed_count
title: EmailOfferResponse
FailedDelivery:
properties:
user_id:
type: string
title: User Id
reason:
type: string
title: Reason
type: object
required:
- user_id
- reason
title: FailedDelivery
FailedLeadDelivery:
properties:
lead_id:
type: string
title: Lead Id
reason:
type: string
title: Reason
type: object
required:
- lead_id
- reason
title: FailedLeadDelivery
HTTPValidationError:
properties:
detail:
items:
$ref: '#/components/schemas/ValidationError'
type: array
title: Detail
type: object
title: HTTPValidationError
Hotel:
properties:
id:
type: string
title: Id
name:
type: string
title: Name
city:
type: string
title: City
type: object
required:
- id
- name
- city
title: Hotel
HotelSegmentsRequest:
properties:
users:
items:
$ref: '#/components/schemas/User'
type: array
title: Users
hotels:
items:
$ref: '#/components/schemas/Hotel'
type: array
title: Hotels
type: object
title: HotelSegmentsRequest
HotelSegmentsResponse:
properties:
segments:
items:
$ref: '#/components/schemas/Segment'
type: array
title: Segments
type: object
title: HotelSegmentsResponse
Lead:
properties:
lead_id:
type: string
title: Lead Id
email:
type: string
title: Email
source:
type: string
title: Source
type: object
required:
- lead_id
- email
- source
title: Lead
PrepareOffersRequest:
properties:
qualified_leads:
items:
$ref: '#/components/schemas/QualifiedLead'
type: array
title: Qualified Leads
type: object
title: PrepareOffersRequest
PrepareOffersResponse:
properties:
offers:
items:
$ref: '#/components/schemas/PreparedOffer'
type: array
title: Offers
type: object
title: PrepareOffersResponse
PreparedOffer:
properties:
offer_id:
type: string
title: Offer Id
lead_id:
type: string
title: Lead Id
channel:
type: string
title: Channel
message:
type: string
title: Message
type: object
required:
- offer_id
- lead_id
- channel
- message
title: PreparedOffer
QualifiedLead:
properties:
lead_id:
type: string
title: Lead Id
email:
type: string
title: Email
score:
type: integer
title: Score
tier:
type: string
title: Tier
type: object
required:
- lead_id
- email
- score
- tier
title: QualifiedLead
QualifyLeadsRequest:
properties:
leads:
items:
$ref: '#/components/schemas/Lead'
type: array
title: Leads
type: object
title: QualifyLeadsRequest
QualifyLeadsResponse:
properties:
qualified_leads:
items:
$ref: '#/components/schemas/QualifiedLead'
type: array
title: Qualified Leads
type: object
title: QualifyLeadsResponse
RecentLeadsResponse:
properties:
leads:
items:
$ref: '#/components/schemas/Lead'
type: array
title: Leads
type: object
title: RecentLeadsResponse
RecentUsersResponse:
properties:
users:
items:
$ref: '#/components/schemas/User'
type: array
title: Users
type: object
title: RecentUsersResponse
Segment:
properties:
segment_id:
type: string
title: Segment Id
hotel_id:
type: string
title: Hotel Id
user_ids:
items:
type: string
type: array
title: User Ids
type: object
required:
- segment_id
- hotel_id
title: Segment
SendOffersRequest:
properties:
offers:
items:
$ref: '#/components/schemas/PreparedOffer'
type: array
title: Offers
type: object
title: SendOffersRequest
SendOffersResponse:
properties:
sent_count:
type: integer
title: Sent Count
failed_count:
type: integer
title: Failed Count
failed:
items:
$ref: '#/components/schemas/FailedLeadDelivery'
type: array
title: Failed
type: object
required:
- sent_count
- failed_count
title: SendOffersResponse
TopHotelsResponse:
properties:
hotels:
items:
$ref: '#/components/schemas/Hotel'
type: array
title: Hotels
type: object
title: TopHotelsResponse
User:
properties:
id:
type: string
title: Id
email:
type: string
title: Email
last_active:
type: string
format: date-time
title: Last Active
type: object
required:
- id
- email
- last_active
title: User
ValidationError:
properties:
loc:
items:
anyOf:
- type: string
- type: integer
type: array
title: Location
msg:
type: string
title: Message
type:
type: string
title: Error Type
type: object
required:
- loc
- msg
- type
title: ValidationError
servers:
- url: http://demo-api:8010
- url: http://localhost:8010
@@ -0,0 +1,214 @@
openapi: 3.0.3
info:
title: CRM Linear Demo API
version: 1.0.0
description: |
Demo OpenAPI for a strict linear CRM scenario:
1) get recent leads,
2) qualify leads,
3) prepare offers,
4) send offers.
servers:
- url: http://demo-api:8010
- url: http://localhost:8010
paths:
/crm/leads/recent:
get:
operationId: getRecentLeads
tags: [crm-linear-workflow]
summary: Get recent leads
parameters:
- in: query
name: limit
required: false
schema:
type: integer
minimum: 1
maximum: 50
default: 20
- in: query
name: source
required: false
schema:
type: string
responses:
"200":
description: Leads list
content:
application/json:
schema:
$ref: "#/components/schemas/RecentLeadsResponse"
/crm/leads/qualify:
post:
operationId: qualifyLeadsForOffer
tags: [crm-linear-workflow]
summary: Qualify leads
requestBody:
required: true
content:
application/json:
schema:
$ref: "#/components/schemas/QualifyLeadsRequest"
responses:
"200":
description: Qualified leads
content:
application/json:
schema:
$ref: "#/components/schemas/QualifyLeadsResponse"
/crm/offers/prepare:
post:
operationId: prepareOffersForLeads
tags: [crm-linear-workflow]
summary: Prepare offers from qualified leads
requestBody:
required: true
content:
application/json:
schema:
$ref: "#/components/schemas/PrepareOffersRequest"
responses:
"200":
description: Prepared offers
content:
application/json:
schema:
$ref: "#/components/schemas/PrepareOffersResponse"
/crm/offers/send:
post:
operationId: sendPreparedOffers
tags: [crm-linear-workflow]
summary: Send prepared offers
requestBody:
required: true
content:
application/json:
schema:
$ref: "#/components/schemas/SendOffersRequest"
responses:
"200":
description: Send summary
content:
application/json:
schema:
$ref: "#/components/schemas/SendOffersResponse"
components:
schemas:
Lead:
type: object
required: [lead_id, email, source]
properties:
lead_id:
type: string
email:
type: string
format: email
source:
type: string
QualifiedLead:
type: object
required: [lead_id, email, score, tier]
properties:
lead_id:
type: string
email:
type: string
format: email
score:
type: integer
tier:
type: string
PreparedOffer:
type: object
required: [offer_id, lead_id, channel, message]
properties:
offer_id:
type: string
lead_id:
type: string
channel:
type: string
message:
type: string
RecentLeadsResponse:
type: object
required: [leads]
properties:
leads:
type: array
items:
$ref: "#/components/schemas/Lead"
QualifyLeadsRequest:
type: object
required: [leads]
properties:
leads:
type: array
items:
$ref: "#/components/schemas/Lead"
QualifyLeadsResponse:
type: object
required: [qualified_leads]
properties:
qualified_leads:
type: array
items:
$ref: "#/components/schemas/QualifiedLead"
PrepareOffersRequest:
type: object
required: [qualified_leads]
properties:
qualified_leads:
type: array
items:
$ref: "#/components/schemas/QualifiedLead"
PrepareOffersResponse:
type: object
required: [offers]
properties:
offers:
type: array
items:
$ref: "#/components/schemas/PreparedOffer"
SendOffersRequest:
type: object
required: [offers]
properties:
offers:
type: array
items:
$ref: "#/components/schemas/PreparedOffer"
FailedLeadDelivery:
type: object
required: [lead_id, reason]
properties:
lead_id:
type: string
reason:
type: string
SendOffersResponse:
type: object
required: [sent_count, failed_count, failed]
properties:
sent_count:
type: integer
failed_count:
type: integer
failed:
type: array
items:
$ref: "#/components/schemas/FailedLeadDelivery"
+144
View File
@@ -0,0 +1,144 @@
openapi: 3.1.0
info:
title: Travel & CRM Pipeline API
description: |
Это API предназначено для автоматизации маркетинговых и операционных процессов в сфере туризма.
Оно поддерживает два основных сценария автоматизации (пайплайна):
### 1. Сценарий: Рассылка спецпредложений по отелям
Используется для реактивации пользователей, которые были активны недавно.
**Цепочка:** Получение юзеров → Подбор топ-отелей → Сегментация (матчинг) → Назначение конкретных пар Юзер-Отель → Отправка Email.
### 2. Сценарий: Обработка лидов в CRM
Предназначен для отдела продаж.
**Цепочка:** Сбор новых лидов → Квалификация (оценка качества) → Подготовка оффера → Финальная отправка.
version: 1.1.0
servers:
- url: http://84.201.161.175
paths:
/users/recent:
get:
tags:
- travel-offer-workflow
summary: Получить список недавно активных пользователей
description: |
Возвращает список клиентов, которые заходили в приложение за последнее время.
Используйте этот метод как входную точку для начала маркетинговой кампании.
operationId: getRecentUsers
parameters:
- name: last_active_after
in: query
description: Фильтр по дате и времени. Будут возвращены только те, кто был активен ПОСЛЕ указанного момента.
required: false
schema:
anyOf:
- type: string
format: date-time
- type: "null"
- name: limit
in: query
description: Ограничение выборки. По умолчанию возвращается 30 пользователей для оптимальной нагрузки на почтовый сервер.
required: false
schema:
type: integer
maximum: 100
minimum: 1
default: 30
responses:
"200":
description: Список пользователей успешно сформирован.
/hotels/top:
get:
tags:
- travel-offer-workflow
summary: Получить список популярных отелей
description: |
Выгружает наиболее востребованные отели. Можно фильтровать по конкретному городу, чтобы сделать предложение более точным.
operationId: getTopHotels
parameters:
- name: limit
in: query
description: Максимальное количество отелей в выдаче (не более 20).
- name: city
in: query
description: Название города (например, 'Moscow', 'Dubai'). Если не указано, вернутся топ-отели по всем направлениям.
responses:
"200":
description: Список отелей получен.
/segments/hotel:
post:
tags:
- travel-offer-workflow
summary: Сгруппировать пользователей по интересам к отелям
description: |
Принимает списки пользователей и отелей, анализирует их и создает группы (сегменты).
Это "умный" этап, который определяет, кому какой тип отдыха подходит больше.
operationId: segmentUsersByHotelPreferences
requestBody:
description: Данные для анализа (массивы объектов User и Hotel).
content:
application/json:
schema:
$ref: "#/components/schemas/HotelSegmentsRequest"
responses:
"200":
description: Сегментация успешно завершена.
/assignments/hotels:
post:
tags:
- travel-offer-workflow
summary: Назначить конкретные отели пользователям
description: |
Финальное закрепление. На основе сегментов метод создает пары "ID пользователя — ID отеля".
Результат этого метода передается напрямую в сервис рассылки.
operationId: assignUsersToHotels
responses:
"200":
description: Пары для рассылки сформированы.
/emails/send-offers:
post:
tags:
- travel-offer-workflow
summary: Разослать персонализированные предложения
description: |
Запускает процесс отправки писем. Требует ID шаблона письма и список назначений, сформированный на предыдущем шаге.
operationId: sendHotelOffersByEmail
requestBody:
description: Шаблон письма и список получателей с назначенными им отелями.
responses:
"200":
description: Рассылка запущена. В ответе придет статистика (сколько отправлено, сколько сбоев).
/crm/leads/qualify:
post:
tags:
- crm-linear-workflow
summary: Оценить качество лидов (Lead Scoring)
description: |
Метод проверяет входящие заявки и присваивает им рейтинг (score) и уровень (tier).
Это позволяет продакту сфокусироваться на самых "горячих" клиентах.
operationId: qualifyLeadsForOffer
responses:
"200":
description: Лиды успешно квалифицированы.
components:
schemas:
User:
type: object
description: Информация о клиенте сервиса.
properties:
id:
type: string
description: Уникальный идентификатор пользователя (UUID).
email:
type: string
description: Адрес электронной почты для связи.
last_active:
type: string
format: date-time
description: Таймстамп последнего действия в системе
+556
View File
@@ -0,0 +1,556 @@
openapi: 3.0.3
info:
title: Travel Product Manager API
version: 1.0.0
description: |
Synthetic API for a single travel offer workflow.
Intended order of operations:
1. get recent users,
2. get top hotels,
3. build hotel preference segments from users and hotels,
4. build user-to-hotel assignments from segments,
5. send hotel offers by email from assignments.
Each endpoint has one specific responsibility.
The workflow should be interpreted as a strict data pipeline where the output
array of one step becomes the input field of the next step.
servers:
- url: http://demo-api:8010
- url: http://localhost:8010
- url: https://api.travel.example.com
paths:
/users/recent:
get:
operationId: getRecentUsers
tags:
- travel-offer-workflow
summary: Get recent users for travel campaigns
description: |
Returns a list of recent users active in the last 7 days.
By default this endpoint returns up to 30 users because the limit parameter
defaults to 30.
Output of this endpoint is the users array that should be passed as the users
field to /segments/hotel.
This endpoint does not retrieve hotels, create segments, create assignments,
or send emails.
parameters:
- in: query
name: last_active_after
schema:
type: string
format: date-time
required: false
description: |
Optional lower bound for user activity time.
Only users active after this timestamp should be returned.
If omitted, the endpoint behaves like "last 7 days".
- in: query
name: limit
schema:
type: integer
minimum: 1
maximum: 100
default: 30
required: false
description: |
Maximum number of users to return.
If omitted, the endpoint returns up to 30 users.
responses:
"200":
description: |
Successful response containing the users array for the first workflow step.
This users array should be passed forward to /segments/hotel.
content:
application/json:
schema:
$ref: "#/components/schemas/RecentUsersResponse"
examples:
sample:
value:
users:
- id: usr_001
email: user001@example.com
last_active: "2026-03-13T10:00:00Z"
- id: usr_002
email: user002@example.com
last_active: "2026-03-13T09:55:00Z"
- id: usr_003
email: user003@example.com
last_active: "2026-03-13T09:50:00Z"
- id: usr_004
email: user004@example.com
last_active: "2026-03-13T09:45:00Z"
- id: usr_005
email: user005@example.com
last_active: "2026-03-13T09:40:00Z"
- id: usr_006
email: user006@example.com
last_active: "2026-03-13T09:35:00Z"
- id: usr_007
email: user007@example.com
last_active: "2026-03-13T09:30:00Z"
- id: usr_008
email: user008@example.com
last_active: "2026-03-13T09:25:00Z"
- id: usr_009
email: user009@example.com
last_active: "2026-03-13T09:20:00Z"
- id: usr_010
email: user010@example.com
last_active: "2026-03-13T09:15:00Z"
- id: usr_011
email: user011@example.com
last_active: "2026-03-13T09:10:00Z"
- id: usr_012
email: user012@example.com
last_active: "2026-03-13T09:05:00Z"
- id: usr_013
email: user013@example.com
last_active: "2026-03-13T09:00:00Z"
- id: usr_014
email: user014@example.com
last_active: "2026-03-13T08:55:00Z"
- id: usr_015
email: user015@example.com
last_active: "2026-03-13T08:50:00Z"
- id: usr_016
email: user016@example.com
last_active: "2026-03-13T08:45:00Z"
- id: usr_017
email: user017@example.com
last_active: "2026-03-13T08:40:00Z"
- id: usr_018
email: user018@example.com
last_active: "2026-03-13T08:35:00Z"
- id: usr_019
email: user019@example.com
last_active: "2026-03-13T08:30:00Z"
- id: usr_020
email: user020@example.com
last_active: "2026-03-13T08:25:00Z"
- id: usr_021
email: user021@example.com
last_active: "2026-03-13T08:20:00Z"
- id: usr_022
email: user022@example.com
last_active: "2026-03-13T08:15:00Z"
- id: usr_023
email: user023@example.com
last_active: "2026-03-13T08:10:00Z"
- id: usr_024
email: user024@example.com
last_active: "2026-03-13T08:05:00Z"
- id: usr_025
email: user025@example.com
last_active: "2026-03-13T08:00:00Z"
- id: usr_026
email: user026@example.com
last_active: "2026-03-13T07:55:00Z"
- id: usr_027
email: user027@example.com
last_active: "2026-03-13T07:50:00Z"
- id: usr_028
email: user028@example.com
last_active: "2026-03-13T07:45:00Z"
- id: usr_029
email: user029@example.com
last_active: "2026-03-13T07:40:00Z"
- id: usr_030
email: user030@example.com
last_active: "2026-03-13T07:35:00Z"
/hotels/top:
get:
operationId: getTopHotels
tags:
- travel-offer-workflow
summary: Get top hotels for offers
description: |
Returns a list of candidate hotels for the offer workflow.
By default this endpoint returns up to 5 hotels because the limit parameter
defaults to 5.
Output of this endpoint is the hotels array that should be passed as the hotels
field to /segments/hotel.
This endpoint does not retrieve users, create segments, create assignments,
or send emails.
parameters:
- in: query
name: limit
schema:
type: integer
minimum: 1
maximum: 20
default: 5
required: false
description: |
Maximum number of hotels to return.
If omitted, the endpoint returns up to 5 hotels.
- in: query
name: city
schema:
type: string
required: false
description: |
Optional city filter.
If provided, only hotels from this city should be returned.
responses:
"200":
description: |
Successful response containing the hotels array for the second workflow step.
This hotels array should be passed forward to /segments/hotel.
content:
application/json:
schema:
$ref: "#/components/schemas/TopHotelsResponse"
examples:
sample:
value:
hotels:
- id: hotel_001
name: Hotel Aurora
city: Berlin
- id: hotel_002
name: Sea Breeze Resort
city: Lisbon
- id: hotel_003
name: Mountain Vista
city: Zurich
- id: hotel_004
name: City Loft
city: Amsterdam
- id: hotel_005
name: River Palace
city: Prague
/segments/hotel:
post:
operationId: segmentUsersByHotelPreferences
tags:
- travel-offer-workflow
summary: Segment recent users by hotel preferences
description: |
Creates hotel-based user segments from two required inputs in one request:
users and hotels.
The users field must contain the users array returned by /users/recent.
The hotels field must contain the hotels array returned by /hotels/top.
A common workflow is: get up to 30 recent users, get top hotels, then send
both arrays to this endpoint to distribute users across hotels by preference.
Output of this endpoint is the segments array used as the segments field in
/assignments/hotels.
This endpoint does not send emails.
requestBody:
required: true
content:
application/json:
schema:
$ref: "#/components/schemas/HotelSegmentsRequest"
examples:
sample:
value:
users:
- id: usr_001
email: user001@example.com
last_active: "2026-03-13T10:00:00Z"
hotels:
- id: hotel_001
name: Hotel Aurora
city: Berlin
responses:
"200":
description: |
Successful response containing the segments array.
This segments array should be passed forward to /assignments/hotels.
content:
application/json:
schema:
$ref: "#/components/schemas/HotelSegmentsResponse"
examples:
sample:
value:
segments:
- segment_id: seg_berlin
hotel_id: hotel_001
user_ids: ["usr_001", "usr_002"]
/assignments/hotels:
post:
operationId: assignUsersToHotels
tags:
- travel-offer-workflow
summary: Assign users to hotels based on segments
description: |
Builds final user-to-hotel assignments from segments.
The segments field must contain the segments array returned by /segments/hotel.
Output of this endpoint is the assignments array used as the assignments field
in /emails/send-offers.
This endpoint does not send emails and does not fetch users or hotels.
requestBody:
required: true
content:
application/json:
schema:
$ref: "#/components/schemas/AssignmentsRequest"
examples:
sample:
value:
segments:
- segment_id: seg_berlin
hotel_id: hotel_001
user_ids: ["usr_001", "usr_002"]
responses:
"200":
description: |
Successful response containing the assignments array.
This assignments array should be passed forward to /emails/send-offers.
content:
application/json:
schema:
$ref: "#/components/schemas/AssignmentsResponse"
examples:
sample:
value:
assignments:
- user_id: usr_001
hotel_id: hotel_001
- user_id: usr_002
hotel_id: hotel_001
/emails/send-offers:
post:
operationId: sendHotelOffersByEmail
tags:
- travel-offer-workflow
summary: Send hotel offers by email
description: |
Sends hotel offer emails to users based on final assignments.
The assignments field must contain the assignments array returned by
/assignments/hotels.
This endpoint is the final delivery step of the workflow.
It does not build new segments or assignments.
requestBody:
required: true
content:
application/json:
schema:
$ref: "#/components/schemas/EmailOfferRequest"
examples:
sample:
value:
template_id: offer_template_2026
assignments:
- user_id: usr_001
hotel_id: hotel_001
- user_id: usr_002
hotel_id: hotel_001
responses:
"200":
description: |
Successful response containing the result of the email delivery step.
This is the final output of the workflow.
content:
application/json:
schema:
$ref: "#/components/schemas/EmailOfferResponse"
examples:
sample:
value:
sent_count: 2
failed_count: 0
failed: []
components:
schemas:
User:
description: |
A recent user eligible to receive a hotel offer email.
User objects are produced by /users/recent and then reused in
/segments/hotel.
type: object
required: [id, email, last_active]
properties:
id:
type: string
description: Stable unique user identifier.
email:
type: string
format: email
description: Email address used in the final offer delivery step.
last_active:
type: string
format: date-time
description: Most recent activity timestamp used to identify recent users.
Hotel:
description: |
A hotel candidate that may be recommended to users.
Hotel objects are produced by /hotels/top and then reused in
/segments/hotel.
type: object
required: [id, name, city]
properties:
id:
type: string
description: Stable unique hotel identifier.
name:
type: string
description: Human-readable hotel name shown in offers.
city:
type: string
description: City where the hotel is located.
Segment:
description: |
A hotel preference segment that groups users for one hotel.
Segment objects are produced by /segments/hotel and then reused in
/assignments/hotels.
type: object
required: [segment_id, hotel_id, user_ids]
properties:
segment_id:
type: string
description: Stable unique segment identifier.
hotel_id:
type: string
description: Hotel identifier associated with this segment.
user_ids:
type: array
description: User identifiers that belong to this hotel preference segment.
items:
type: string
Assignment:
description: |
A final mapping between one user and one hotel offer.
Assignment objects are produced by /assignments/hotels and then reused in
/emails/send-offers.
type: object
required: [user_id, hotel_id]
properties:
user_id:
type: string
description: Identifier of the user who should receive the offer.
hotel_id:
type: string
description: Identifier of the hotel assigned to the user.
RecentUsersResponse:
description: Response containing the users array produced by /users/recent.
type: object
required: [users]
properties:
users:
type: array
description: |
Recent users that should be copied into the users field of
/segments/hotel. With the default limit this array usually contains
up to 30 users.
items:
$ref: "#/components/schemas/User"
TopHotelsResponse:
description: Response containing the hotels array produced by /hotels/top.
type: object
required: [hotels]
properties:
hotels:
type: array
description: |
Candidate hotels that should be copied into the hotels field of
/segments/hotel. With the default limit this array usually contains
up to 5 hotels.
items:
$ref: "#/components/schemas/Hotel"
HotelSegmentsRequest:
description: |
Request body for building segments from users and hotels.
This request combines the users array from /users/recent and the hotels array
from /hotels/top.
type: object
required: [users, hotels]
properties:
users:
type: array
description: |
Users from /users/recent. This is typically the same array of up to 30
recent users returned by the first step. These users are being
distributed across candidate hotels by preference.
items:
$ref: "#/components/schemas/User"
hotels:
type: array
description: |
Hotels from /hotels/top that should be used as candidate destinations
for user distribution.
items:
$ref: "#/components/schemas/Hotel"
HotelSegmentsResponse:
description: Response containing the segments array produced by /segments/hotel.
type: object
required: [segments]
properties:
segments:
type: array
description: |
Segments that should be copied into the segments field of
/assignments/hotels.
items:
$ref: "#/components/schemas/Segment"
AssignmentsRequest:
description: |
Request body for building assignments from the segments array returned by
/segments/hotel.
type: object
required: [segments]
properties:
segments:
type: array
description: |
Segments from /segments/hotel that should be converted into final
user-to-hotel assignments.
items:
$ref: "#/components/schemas/Segment"
AssignmentsResponse:
description: Response containing the assignments array produced by /assignments/hotels.
type: object
required: [assignments]
properties:
assignments:
type: array
description: |
Assignments that should be copied into the assignments field of
/emails/send-offers.
items:
$ref: "#/components/schemas/Assignment"
EmailOfferRequest:
description: |
Request body for sending offer emails from the assignments array returned
by /assignments/hotels.
type: object
required: [template_id, assignments]
properties:
template_id:
type: string
default: offer_template_2026
description: Identifier of the email template to use for every assignment in this request.
assignments:
type: array
description: |
Assignments from /assignments/hotels that should be emailed in the
final step.
items:
$ref: "#/components/schemas/Assignment"
EmailOfferResponse:
description: |
Result of the final email delivery step.
This response does not contain new users, hotels, segments, or assignments.
type: object
required: [sent_count, failed_count, failed]
properties:
sent_count:
type: integer
description: Number of assignments for which an email was sent successfully.
failed_count:
type: integer
description: Number of assignments for which email delivery failed.
failed:
type: array
description: Failed deliveries with reasons for each affected user.
items:
type: object
required: [user_id, reason]
properties:
user_id:
type: string
description: Identifier of the user whose email delivery failed.
reason:
type: string
description: Human-readable explanation of why the email could not be sent.
+2
View File
@@ -0,0 +1,2 @@
fastapi>=0.115.0
uvicorn[standard]>=0.30.0
@@ -0,0 +1,52 @@
from fastapi.testclient import TestClient
from app.main import app
client = TestClient(app)
def test_travel_linear_workflow() -> None:
users = client.get("/users/recent", params={"limit": 4}).json()["users"]
hotels = client.get("/hotels/top", params={"limit": 2}).json()["hotels"]
segments = client.post(
"/segments/hotel",
json={"users": users, "hotels": hotels},
).json()["segments"]
assignments = client.post(
"/assignments/hotels",
json={"segments": segments},
).json()["assignments"]
response = client.post(
"/emails/send-offers",
json={"template_id": "offer_template_2026", "assignments": assignments},
)
assert response.status_code == 200
body = response.json()
assert body["failed_count"] == 0
assert body["sent_count"] == len(assignments)
def test_crm_linear_workflow() -> None:
leads = client.get("/crm/leads/recent", params={"limit": 5}).json()["leads"]
qualified = client.post(
"/crm/leads/qualify",
json={"leads": leads},
).json()["qualified_leads"]
offers = client.post(
"/crm/offers/prepare",
json={"qualified_leads": qualified},
).json()["offers"]
response = client.post("/crm/offers/send", json={"offers": offers})
assert response.status_code == 200
body = response.json()
assert body["failed_count"] == 0
assert body["sent_count"] == len(offers)
+409
View File
@@ -0,0 +1,409 @@
# ML Core Swagger (Сдача)
Актуальный сдаваемый Swagger для ML core backend лежит в файле:
- `docs/ml_core_backend_openapi.yaml`
Что входит в документ:
- `POST /api/v1/pipelines/generate`
- `GET /api/v1/pipelines/dialogs`
- `GET /api/v1/pipelines/dialogs/{dialog_id}/history`
- `POST /api/v1/pipelines/dialog/reset`
- `POST /api/v1/pipelines/{pipeline_id}/run`
- `GET /api/v1/executions`
- `GET /api/v1/executions/{run_id}`
Что не входит в этот документ:
- auth, actions, capabilities ручки
- demo `/ml/*` ручки из `demo-backend/openapi/*`
Быстрая проверка локально:
```bash
cd backend
pytest -s --capture=no tests/test_ml_openapi_contract.py
```
Дополнительная валидация OpenAPI (если установлен валидатор):
```bash
python3 -m openapi_spec_validator docs/ml_core_backend_openapi.yaml
```
---
# AI Copilot
## Core Concept
Главная задача проекта: Трансформация статической документации API в автономную интеллектуальную систему. Пользователь предоставляет набор OpenAPI файлов и бизнес-логику, которую он хочет получить, чтобы протестировать, а система незамедлительно проектирует и исполняет готовые Pipelines для решения задачи.
## The Value Chain
### Action → Capability → Pipeline
Система работает по принципу восходящей абстракции:
- **Actions (Технический слой)**: Набор разрозненных эндпоинтов из OpenAPI.
- **Capability (Логический слой)**: Группа из одного или нескольких Actions, объединенных общей бизнес-целью. Это абстракция для AI, который знает, как выполнить конкретную функцию.
- **Pipeline (Сценарный слой)**: Последовательность из Capabilities, решающая задачу пользователя.задачу. | Итоговый план действий. |
## ЦА — продакт менеджеры
>**Проблема**: у продакт менеджеров есть гипотеза (например: «Если отправлять пуш тем, кто бросил корзину через 15 минут, конверсия вырастет»), но чтобы её проверить, нужно ставить задачу в спринт, ждать 2 недели и отвлекать бэкенд.
>**Решение**: Наш AI-copilot позволяет проджект менеджеру предоставить системе API маркетинговой платформы и CRM, а затем автоматически создать и запустить Pipeline, решающий бизнес-задачу.
## User stories
Роль|Я хочу (Действие)|Чтобы (Ценность)|Definition of Done (Критерии приемки)|Модуль
|---|---|---|---|---|
PM|Загрузить файл OpenAPI (Swagger)|Система получила сырую базу технических методов (Actions).|Файл парсится, эндпоинты сохранены в БД.|Ingestion
PM|Объединить несколько Actions в одну Capability|Скрыть техническую сложность и создать «навык» (напр. «Обновить профиль»).|В базе создана сущность Capability, связанная с 1+ Actions.|Capability
PM|Получить авто-описание для Capability от AI|Не тратить время на ручное заполнение названий и смыслов для каждого навыка.|Каждой Capability присвоено человекочитаемое имя и описание.|Semantic
PM|Описать бизнес-задачу в чате (напр. «Найди новичков и поздоровайся»)|AI-копилот сам подобрал нужные Capabilities и построил из них Pipeline.|AI выдает валидный JSON-граф с ID навыков и логическими связями.|Synthesis
PM|Видеть сгенерированный сценарий в виде графа|Визуально подтвердить, что данные (напр. user_id) передаются верно.|На канвасе отрисованы ноды и стрелки (React Flow).|Synthesis
PM|Вручную отредактировать параметры внутри ноды|Финальный сценарий на 100% соответствовал конкретной задаче.|Форма редактирования корректно сохраняет данные в объект ноды.|Execution
PM|Подтвердить запуск Pipeline нажатием кнопки|Контролировать процесс и избежать случайных ошибок в реальных системах.|Запуск происходит только после клика; первая нода переходит в статус active.|Execution
PM|Наблюдать за выполнением шагов в реальном времени|Видеть прогресс и понимать, на каком этапе сейчас находится выполнение.|Активная нода подсвечивается; статусы (Success/Fail) обновляются в UI.|Execution
PM|Получить лог ответов от всех API по завершении|Убедиться, что гипотеза проверена и данные ушли/пришли корректно.|По завершении выводится панель с результатами (JSON-логи).|Execution
## Domain layer
Все сущности, покрывающие user-stories:
Сущность|Что это|Зачем нужна
|---|---|---|
Action|Инструмент в ящике.|Хранит технические детали одного эндпоинта (URL, метод, JSON-схема).
#### Что в модели Action:
Поле|Тип|Назначение
|---|---|---|
id|UUID PK|Первичный ключ
operation_id|String|operationId из OpenAPI
method|HttpMethod enum|GET / POST / PUT / PATCH / DELETE …
path|String|URL-путь, напр. /users/{id}
base_url|String|Базовый URL из servers[] спецификации
summary|String|Краткое описание из OpenAPI
description|Text|Подробное описание из OpenAPI
tags|JSON|Теги для группировки
parameters_schema|JSON|JSON Schema query/path/header параметров
request_body_schema|JSON|JSON Schema тела запроса
response_schema|JSON|JSON Schema успешного ответа (2xx)
source_filename|String|Имя загруженного Swagger-файла
raw_spec|JSON|Оригинальный фрагмент операции из спецификации
created_at / updated_at|DateTime|Через TimestampMixin
Capability|Навык мастера.|Описывает бизнес-логику (связку 1+ Actions) и семантику для AI.
Node|Шаг в инструкции.|Конкретный блок в графе. Ссылается на Capability, но хранит индивидуальные настройки (например, текст письма именно для этого шага).
Pipeline|Инструкция (чертеж).|Коллекция нод и связей между ними. Хранит общую структуру графа.
#### Что в модели Pipeline:
Поле|Тип|Назначение
|---|---|---|
id|UUID PK|Первичный ключ
name|String|Название пайплайна
description|Text|Описание сценария
user_prompt|Text|Оригинальный промпт PM из чата
nodes|JSON|Список нод графа с параметрами и позициями
edges|JSON|Список рёбер графа и порядка выполнения
status|PipelineStatus enum|Статус пайплайна: DRAFT / READY / ARCHIVED
created_by|UUID FK|Ссылка на пользователя-автора
created_at / updated_at|DateTime|Через TimestampMixin
Execution (Run)|Процесс сборки.|Хранит статус конкретного запуска (ID, время старта, текущий статус всего процесса).
Context|Рабочая память.|Временный объект внутри Execution, где лежат результаты выполненных нод для подстановки в следующие.
## Infrastructure layer
Компонент|Технология|Роль и описание
|---|---|---
Database|PostgreSQL|Хранение структурированных данных (Actions, Capabilities, Pipelines).
LLM Inference|vLLM / TGI (Text Generation Inference)|Хостинг твоей модели (Llama-3, Mistral и др.). Предоставляет высокопроизводительный OpenAI-совместимый API.
ORM|SQLAlchemy + Alembic|Асинхронный маппинг доменных моделей на таблицы БД и управление миграциями без даунтайма.
Async Runtime|FastAPI + Uvicorn|Ядро монолита. Обработка запросов, управление жизненным циклом приложения и фоновыми задачами.
HTTP Client|HTTPX (Async)|фНеблокирующие вызовы к твоей локальной модели и внешним API сервисов (Slack, CRM и т.д.) внутри ExecutionCore.
Cache / State|Redis|Хранение промежуточного состояния (Context) активных пайплайнов и кэширование результатов инференса.
Containerization|Docker & Compose|Изоляция сервисов (App, DB, Model Server) и их оркестрация одной командой.
## Service layer
Каждый сервис отвечает за свой этап жизненного цикла — от загрузки API до получения результата.
1. `IngestionService`
Задача: **Обработка Swagger/OpenAPI.**
Инструменты: Библиотеки prance или openapi-spec-validator.
Логика: Принимает файл через UploadFile, парсит его и сохраняет в БД (PostgreSQL) список Actions.
Метод: `async def ingest_openapi(file: UploadFile) -> List[ActionDomain]: ...`
2. `CapabilityService`
Задача: **Группировка и описание навыков.**
Инструменты: LangChain или просто прямой вызов OpenAI SDK.
Логика: Получает ID Actions, делает запрос к LLM для генерации описания Capability, сохраняет результат.
Метод: `async def create_capability(action_ids: list[UUID], name: str) -> CapabilityDomain: ...`
3. `SynthesisService`
Задача: **Сборка Pipeline через LLM.**
Логика:
- Получает промпт от PM.
- Выбирает подходящие Capability из библиотеки в БД.
- Формирует промпт для QWEN-2.5, чтобы она вернула JSON-структуру графа.
Метод: `async def synthesize_pipeline(user_query: str) -> PipelineDomain: ...`
4. `ExecutionService`
Задача: **Асинхронное выполнение графа.**
Инструменты: httpx (асинхронный клиент для запросов).
Логика:
- Инициализирует Context (Pydantic модель).
- Проходит циклом по нодам Pipeline.
- Заменяет переменные (Variable Injection).
- Делает await client.request(...).
Метод: `async def run_execution(pipeline_id: UUID) -> ExecutionResult: ...`
## Api layer
### Actions
Загрузка и валидация Swagger/OpenAPI.
Метод|Путь|Описание
|---|---|---|
POST|`/api/v1/actions/ingest`|Загрузка файла OpenAPI (Multipart form-data).
GET|`/api/v1/actions`|Получение списка всех импортированных методов с фильтрацией.
GET|`/api/v1/actions/{id}`|Детальная схема конкретного экшена.
DELETE|`/api/v1/actions/{id}`|Удаление экшена (если API обновилось или метод больше не нужен).
#### Пример запроса ingest
```bash
curl -X POST "http://localhost:8000/api/v1/actions/ingest" \
-H "Accept: application/json" \
-F "file=@/app/examples/travel.yaml;type=application/yaml"
```
#### Пример ответа ingest
```json
{
"created_actions_count": 5,
"created_capabilities_count": 5,
"capabilities": [
{
"id": "7c1d5c9b-2c9d-4f1c-9d2e-4a8f3e1b7a11",
"action_id": "e4b0bcb6-6a8c-4b0a-8e18-3f44b5f7d1c2",
"name": "get_recent_users",
"description": "Get recent users for travel campaigns",
"input_schema": null,
"output_schema": {
"type": "object"
},
"data_format": {
"parameter_locations": ["query"],
"request_content_types": [],
"request_schema_type": null,
"response_content_types": ["application/json"],
"response_schema_types": ["object"]
},
"created_at": "2026-03-14T12:00:00Z",
"updated_at": "2026-03-14T12:00:00Z"
}
]
}
```
### Capabilities
Превращаем API в способности
Метод|Путь|Описание
|---|---|---|
POST|`/api/v1/capabilities/suggest`|AI-powered: Система анализирует Actions и предлагает логические связки.
POST|`/api/v1/capabilities`|Создание навыка: связка 1+ Action ID, маппинг данных и описание.
GET|`/api/v1/capabilities`|Список всех навыков для отображения в библиотеке на фронте.
GET|`/api/v1/capabilities/{id}`|Посмотреть, из каких Actions состоит навык и как внутри ходят данные.
DELETE|`/api/v1/capabilities/{id}`|Удаление навыка.
### Pipelines
Основная точка входа для чата и канваса (React Flow).
Метод|Путь|Описание
|---|---|---|
POST|`/api/v1/pipelines/generate`|AI-powered: Промпт из чата -> AI подбирает Capabilities -> Возвращает JSON-граф.
GET|`/api/v1/pipelines`|Список всех сохраненных или сгенерированных сценариев.
GET|`/api/v1/pipelines/{id}`|Загрузка конкретного графа на канвас.
PUT|`/api/v1/pipelines/{id}`|Сохранение ручных правок: если PM подвигал ноды или изменил параметры.
DELETE|`/api/v1/pipelines/{id}`|Удаление сценария.
#### Вызов чата: `POST /api/v1/pipelines/generate`
Используйте один и тот же `dialog_id` для одной цепочки сообщений, чтобы сохранялся контекст.
```bash
curl -X POST "http://localhost:8000/api/v1/pipelines/generate" \
-H "Content-Type: application/json" \
-d '{
"dialog_id": "11111111-1111-1111-1111-111111111111",
"message": "Нужно взять 30 последних пользователей, распределить по 5 отелям и отправить email-офферы",
"user_id": null,
"capability_ids": null
}'
```
#### Пример ответа чата
```json
{
"status": "ready",
"message_ru": "Пайплайн собран. Можно запускать.",
"chat_reply_ru": "Пайплайн собран. Можно запускать. План шагов: get_users_recent -> get_hotels_top -> post_segments_hotel.",
"pipeline_id": "7b17ac70-3f39-4e70-8f8a-4a2f1fd4ff7e",
"nodes": [
{
"step": 1,
"name": "get_users_recent",
"description": "Отбирает недавних пользователей для travel campaign.",
"input_connected_from": [],
"output_connected_to": [3],
"input_data_type_from_previous": [],
"external_inputs": [],
"endpoints": [
{
"name": "get_users_recent",
"capability_id": "c4be1e66-2e04-4c6f-8d8f-6f39f1f46087",
"action_id": "e4b0bcb6-6a8c-4b0a-8e18-3f44b5f7d1c2",
"output_type": "users[]"
}
]
},
{
"step": 2,
"name": "get_hotels_top",
"description": "Получает список топовых отелей для офферов.",
"input_connected_from": [],
"output_connected_to": [3],
"input_data_type_from_previous": [],
"external_inputs": [],
"endpoints": [
{
"name": "get_hotels_top",
"capability_id": "470ae37e-029e-4c67-a293-acb848675d0b",
"action_id": "96f0bc8f-e294-46a9-9a0e-48e1b8f2a941",
"output_type": "hotels[]"
}
]
}
],
"edges": [
{
"from_step": 1,
"to_step": 3,
"type": "users"
},
{
"from_step": 2,
"to_step": 3,
"type": "hotels"
}
],
"missing_requirements": [],
"context_summary": "Пользователь хочет собрать travel-рассылку из доступных capability."
}
```
`status` может быть:
- `ready` — граф построен, `pipeline_id/nodes/edges` заполнены.
- `needs_input` — нужно уточнение или добавить Swagger/OpenAPI.
- `cannot_build` — с текущими данными сценарий не собирается.
#### Сброс диалога: `POST /api/v1/pipelines/dialog/reset`
```bash
curl -X POST "http://localhost:8000/api/v1/pipelines/dialog/reset" \
-H "Content-Type: application/json" \
-d '{
"dialog_id": "11111111-1111-1111-1111-111111111111"
}'
```
### Execution
Запуск пайплайна.
Метод|Путь|Описание
|---|---|---|
POST|`/api/v1/pipelines/{id}/run`|Запуск пайплайна. Создает объект Execution и запускает цикл выполнения.
GET|`/api/v1/executions`|История всех запусков (кто, когда и с каким результатом запускал).
GET|`/api/v1/executions/{run_id}`|Статус в реальном времени: Поллинг для фронта (какая нода сейчас горит зеленым).
POST|`/api/v1/executions/{run_id}/approve`|Подтверждение «опасного» шага (если нода требует Approval).
## CapabilityService
Задача: Инкапсулировать один или несколько технических Actions (API-эндпоинтов) в единый, понятный для LLM и пользователя бизнес-навык (Capability).
### Этап 1: Формирование связки (Action Binding)
На вход сервис получает массив ID Actions и их порядковые номера. Сервис валидирует, что эти эндпоинты существуют в БД.
**Внутренний маппинг:** Сервис принимает правила передачи данных между экшенами. Например, ответ от GET /users (поле user.id) должен лечь в тело запроса POST /emails (в поле recipient_id).
**Абстракция входа/выхода:** Сервис вычисляет публичную схему навыка (Input/Output Schema). Он берет все обязательные параметры всех внутренних экшенов, вычитает из них те, которые закрыты внутренним маппингом, и формирует итоговый JSON Schema. Для внешнего мира этот навык теперь выглядит как одна функция.
### Этап 2: Семантическое обогащение (LLM Summarization)
Чтобы ИИ в будущем понимал, зачем нужен этот инструмент, сервис обращается к локальной LLM (vLLM/TGI).
Промпт к LLM: Сервис отправляет системный промпт, содержащий URL-адреса, методы и JSON-схемы объединенных экшенов.
Задача LLM: Сгенерировать:
- Короткое название (например, create_refund_ticket).
- Подробное описание (например, «Используется для создания заявки на возврат средств в Zendesk и проверки статуса транзакции в Stripe»).
### Этап 3: Сохранение навыка в библиотеку
Текстовое описание и название навыка сохраняются в PostgreSQL вместе с `input_schema` и `output_schema`.
Этот шаг делает Capability доступной для последующей сборки пайплайнов без дополнительного индексационного слоя.
## SynthesisService
Задача: Принять текстовый запрос пользователя, найти подходящие инструменты (Capabilities) и собрать из них валидный направленный ациклический граф (DAG), готовый к исполнению.
Этот сервис вызывается каждый раз, когда пользователь пишет промпт в чат. Время его работы напрямую влияет на UX, поэтому он должен быть оптимизирован под максимальную скорость.
### Этап 1: Отбор доступных навыков (Capability Selection)
Запрос пользователя (например, "Найди последние 5 оплаченных заказов и отправь их в канал #sales") сопоставляется с доступной библиотекой Capability.
Сервис собирает релевантный список навыков из PostgreSQL и готовит их для контекста LLM.
Результат: Вместо сотен API-методов, в prompt передаются только подходящие "строительные блоки" (например, search_orders и send_slack_message).
### Этап 2: Сборка контекста для LLM (Prompt Engineering)
Сервис формирует динамический промпт для генерации графа. В него вшиваются:
Инструкция (System Message): Жесткие правила работы («Ты парсер. Возвращай только JSON. Не выдумывай ID навыков»).
Библиотека инструментов: JSON-схемы найденных на предыдущем шаге Capabilities (их id, name, description и input_schema).
Промпт пользователя: Оригинальный текст запроса.
### Этап 3: Генерация графа (LLM Inference)
Локальная LLM обрабатывает контекст и возвращает структуру пайплайна в строгом формате.
LLM определяет узлы (Nodes) — какие навыки использовать.
LLM определяет ребра (Edges) — в каком порядке их вызывать.
LLM прописывает переменные (Variable Injection) — как данные перетекают между узлами, используя синтаксис шаблонизатора (например, {{node_1.output.orders_list}}).
### Этап 4: Строгая валидация (Sanitization & DAG Check)
LLM склонны к галлюцинациям, поэтому перед сохранением в базу SynthesisService проводит жесткую проверку полученного JSON:
Schema Validation (Pydantic): Проверка, что структура ответа строго соответствует модели Pipeline.
Capability Existence: Проверка, что все capability_id в узлах реально существуют в базе (LLM не выдумала несуществующий навык).
DAG Validation (Топологическая сортировка): Граф проверяется на отсутствие циклов (A -> B -> C -> A), чтобы предотвратить бесконечное выполнение.
Parameter Validation: Проверка, что все обязательные поля из input_schema каждого навыка либо заполнены статичными значениями, либо имеют ссылку-шаблон на предыдущий узел.
Если граф проходит валидацию, он сохраняется в таблицу pipelines и отдается на фронтенд для визуализации на канвасе. Если валидация провалена — сервис просит LLM исправить ошибку (Retry Logic, максимум 2 попытки), передавая ей текст ошибки валидации.
+944
View File
@@ -0,0 +1,944 @@
openapi: 3.0.3
info:
title: AI Copilot ML Core Backend API
version: 1.0.0
description: |
Сдаваемая спецификация ML core части реального backend.
Источник истины: текущие роуты FastAPI `/api/v1/pipelines*` и `/api/v1/executions*`.
Документ не включает auth/actions/capabilities endpoint'ы, кроме использования JWT bearer security.
servers:
- url: http://localhost:8000
tags:
- name: Pipelines
description: Генерация и управление pipeline-диалогом, запуск выполнения.
- name: Executions
description: История и детали запусков pipeline.
security:
- bearerAuth: []
paths:
/api/v1/pipelines/generate:
post:
tags: [Pipelines]
operationId: generatePipeline
summary: Сгенерировать pipeline по сообщению пользователя
requestBody:
required: true
content:
application/json:
schema:
$ref: "#/components/schemas/PipelineGenerateRequest"
examples:
travel_offer_case:
summary: Travel-offer бизнес-задача
value:
dialog_id: "11111111-1111-1111-1111-111111111111"
message: "Сформируй и запусти рассылку тревел-офферов для активных пользователей за последние 7 дней, подбери топ-5 отелей в Берлине, сегментируй пользователей и отправь персональные email по шаблону offer_template_2026."
capability_ids: null
responses:
"200":
description: Pipeline обработан (готов / требует ввод / не может быть собран)
content:
application/json:
schema:
$ref: "#/components/schemas/PipelineGenerateResponse"
examples:
ready:
summary: Линейный pipeline 1 -> 2 -> 3 -> 4 -> 5
value:
status: "ready"
message_ru: "Пайплайн собран. Можно запускать."
chat_reply_ru: "Пайплайн собран: получаем пользователей и отели, сегментируем, назначаем офферы, затем отправляем email."
pipeline_id: "7b17ac70-3f39-4e70-8f8a-4a2f1fd4ff7e"
nodes:
- step: 1
name: "get_recent_users"
description: "Получить активных пользователей за последние 7 дней."
input_connected_from: []
output_connected_to: [3]
input_data_type_from_previous: []
external_inputs: ["days_back"]
endpoints:
- name: "get_recent_users"
capability_id: "c4be1e66-2e04-4c6f-8d8f-6f39f1f46087"
action_id: "e4b0bcb6-6a8c-4b0a-8e18-3f44b5f7d1c2"
output_type: "users[]"
- step: 2
name: "get_top_hotels"
description: "Получить топ-5 отелей в Берлине."
input_connected_from: []
output_connected_to: [3]
input_data_type_from_previous: []
external_inputs: ["city", "hotels_limit"]
endpoints:
- name: "get_top_hotels"
capability_id: "470ae37e-029e-4c67-a293-acb848675d0b"
action_id: "96f0bc8f-e294-46a9-9a0e-48e1b8f2a941"
output_type: "hotels[]"
- step: 3
name: "segment_users_by_hotel_preferences"
description: "Сегментация пользователей по предпочтениям."
input_connected_from: [1, 2]
output_connected_to: [4]
input_data_type_from_previous:
- from_step: 1
type: "users[]"
- from_step: 2
type: "hotels[]"
external_inputs: []
endpoints:
- name: "segment_users_by_hotel_preferences"
capability_id: "518ea04f-f891-4529-8c74-06e8cd9d2f43"
action_id: "92dfce86-34dd-4c1d-9ee7-4214a16cbd99"
output_type: "segments[]"
- step: 4
name: "assign_users_to_hotels"
description: "Назначить пользователю релевантный отель."
input_connected_from: [3]
output_connected_to: [5]
input_data_type_from_previous:
- from_step: 3
type: "segments[]"
external_inputs: []
endpoints:
- name: "assign_users_to_hotels"
capability_id: "70c67642-c8d2-4eb2-a57b-d5e42dc04342"
action_id: "c57f4c88-7307-4e2c-9d36-6756be8e6ab0"
output_type: "assignments[]"
- step: 5
name: "send_hotel_offers_by_email"
description: "Отправить персональные email-офферы."
input_connected_from: [4]
output_connected_to: []
input_data_type_from_previous:
- from_step: 4
type: "assignments[]"
external_inputs: ["template_id"]
endpoints:
- name: "send_hotel_offers_by_email"
capability_id: "3b90595e-3f6f-4b4a-a89a-63daecf8ed03"
action_id: "3c97cc71-5cf6-45fd-8f75-2cf1ccfc7c45"
output_type: "delivery_report"
edges:
- from_step: 1
to_step: 3
type: "data_dependency"
- from_step: 2
to_step: 3
type: "data_dependency"
- from_step: 3
to_step: 4
type: "data_dependency"
- from_step: 4
to_step: 5
type: "data_dependency"
missing_requirements: []
context_summary: "Используются 5 capability для travel-рассылки."
needs_input:
summary: Требуется один уточняющий параметр
value:
status: "needs_input"
message_ru: "Уточните template_id для email-рассылки."
chat_reply_ru: "Нужен template_id для шага отправки email. Например: offer_template_2026."
pipeline_id: null
nodes: []
edges: []
missing_requirements: ["template_id"]
context_summary: null
cannot_build:
summary: Модель недоступна
value:
status: "cannot_build"
message_ru: "Не удалось обратиться к локальной модели Ollama. Проверьте OLLAMA_HOST/OLLAMA_MODEL и повторите запрос."
chat_reply_ru: "Не удалось обратиться к локальной модели Ollama. Проверьте OLLAMA_HOST/OLLAMA_MODEL и повторите запрос."
pipeline_id: null
nodes: []
edges: []
missing_requirements: ["ollama_unavailable"]
context_summary: null
"401":
$ref: "#/components/responses/UnauthorizedError"
"403":
$ref: "#/components/responses/ForbiddenError"
"404":
$ref: "#/components/responses/NotFoundError"
"422":
$ref: "#/components/responses/ValidationError"
/api/v1/pipelines/dialogs:
get:
tags: [Pipelines]
operationId: listPipelineDialogs
summary: Получить список диалогов pipeline
parameters:
- in: query
name: limit
required: false
schema:
type: integer
minimum: 1
maximum: 200
default: 20
- in: query
name: offset
required: false
schema:
type: integer
minimum: 0
default: 0
responses:
"200":
description: Список диалогов пользователя
content:
application/json:
schema:
type: array
items:
$ref: "#/components/schemas/PipelineDialogListItemResponse"
"401":
$ref: "#/components/responses/UnauthorizedError"
"422":
$ref: "#/components/responses/ValidationError"
/api/v1/pipelines/dialogs/{dialog_id}/history:
get:
tags: [Pipelines]
operationId: getPipelineDialogHistory
summary: Получить историю сообщений диалога
parameters:
- in: path
name: dialog_id
required: true
schema:
type: string
format: uuid
- in: query
name: limit
required: false
schema:
type: integer
minimum: 1
maximum: 200
default: 30
- in: query
name: offset
required: false
schema:
type: integer
minimum: 0
default: 0
responses:
"200":
description: История диалога
content:
application/json:
schema:
$ref: "#/components/schemas/PipelineDialogHistoryResponse"
"401":
$ref: "#/components/responses/UnauthorizedError"
"403":
$ref: "#/components/responses/ForbiddenError"
"404":
$ref: "#/components/responses/NotFoundError"
"422":
$ref: "#/components/responses/ValidationError"
/api/v1/pipelines/dialog/reset:
post:
tags: [Pipelines]
operationId: resetPipelineDialog
summary: Сбросить контекст диалога pipeline
requestBody:
required: true
content:
application/json:
schema:
$ref: "#/components/schemas/DialogResetRequest"
responses:
"200":
description: Диалог успешно сброшен
content:
application/json:
schema:
$ref: "#/components/schemas/DialogResetResponse"
"401":
$ref: "#/components/responses/UnauthorizedError"
"403":
$ref: "#/components/responses/ForbiddenError"
"404":
$ref: "#/components/responses/NotFoundError"
"422":
$ref: "#/components/responses/ValidationError"
/api/v1/pipelines/{pipeline_id}/run:
post:
tags: [Pipelines]
operationId: runPipeline
summary: Запустить pipeline на выполнение
parameters:
- in: path
name: pipeline_id
required: true
schema:
type: string
format: uuid
requestBody:
required: true
content:
application/json:
schema:
$ref: "#/components/schemas/RunPipelineRequest"
examples:
travel_inputs:
value:
inputs:
days_back: 7
city: "Berlin"
hotels_limit: 5
template_id: "offer_template_2026"
responses:
"202":
description: Запуск поставлен в очередь/начат
content:
application/json:
schema:
$ref: "#/components/schemas/RunPipelineResponse"
examples:
queued:
value:
run_id: "2fb1f647-b769-4f56-8a44-6a63a364b478"
pipeline_id: "7b17ac70-3f39-4e70-8f8a-4a2f1fd4ff7e"
status: "QUEUED"
"400":
$ref: "#/components/responses/BadRequestError"
"401":
$ref: "#/components/responses/UnauthorizedError"
"404":
$ref: "#/components/responses/NotFoundError"
"422":
$ref: "#/components/responses/ValidationError"
/api/v1/executions:
get:
tags: [Executions]
operationId: listExecutions
summary: Получить список запусков execution
parameters:
- in: query
name: limit
required: false
schema:
type: integer
minimum: 1
maximum: 200
default: 50
- in: query
name: offset
required: false
schema:
type: integer
minimum: 0
default: 0
responses:
"200":
description: Список запусков
content:
application/json:
schema:
type: array
items:
$ref: "#/components/schemas/ExecutionRunListItemResponse"
"401":
$ref: "#/components/responses/UnauthorizedError"
"422":
$ref: "#/components/responses/ValidationError"
/api/v1/executions/{run_id}:
get:
tags: [Executions]
operationId: getExecution
summary: Получить детальный отчёт execution run
parameters:
- in: path
name: run_id
required: true
schema:
type: string
format: uuid
responses:
"200":
description: Детали запуска с шагами
content:
application/json:
schema:
$ref: "#/components/schemas/ExecutionRunDetailResponse"
examples:
partial_failed:
summary: Пример со статусами SUCCEEDED/FAILED/SKIPPED
value:
id: "2fb1f647-b769-4f56-8a44-6a63a364b478"
pipeline_id: "7b17ac70-3f39-4e70-8f8a-4a2f1fd4ff7e"
status: "PARTIAL_FAILED"
inputs:
days_back: 7
city: "Berlin"
hotels_limit: 5
template_id: "offer_template_2026"
summary:
total_steps: 5
succeeded: 3
failed: 1
skipped: 1
error: "Step 4 failed: upstream service timeout"
started_at: "2026-03-16T12:10:15Z"
finished_at: "2026-03-16T12:10:31Z"
created_at: "2026-03-16T12:10:15Z"
updated_at: "2026-03-16T12:10:31Z"
steps:
- step: 1
name: "get_recent_users"
capability_id: "c4be1e66-2e04-4c6f-8d8f-6f39f1f46087"
action_id: "e4b0bcb6-6a8c-4b0a-8e18-3f44b5f7d1c2"
method: "GET"
status_code: 200
status: "SUCCEEDED"
resolved_inputs:
days_back: 7
accepted_payload: null
output_payload:
users: []
request_snapshot:
method: "GET"
path: "/users/recent"
response_snapshot:
status_code: 200
body:
users: []
error: null
started_at: "2026-03-16T12:10:15Z"
finished_at: "2026-03-16T12:10:17Z"
duration_ms: 2100
created_at: "2026-03-16T12:10:15Z"
updated_at: "2026-03-16T12:10:17Z"
- step: 4
name: "assign_users_to_hotels"
capability_id: "70c67642-c8d2-4eb2-a57b-d5e42dc04342"
action_id: "c57f4c88-7307-4e2c-9d36-6756be8e6ab0"
method: "POST"
status_code: 504
status: "FAILED"
resolved_inputs:
segments: []
accepted_payload:
segments: []
output_payload:
detail: "Gateway Timeout"
request_snapshot:
method: "POST"
path: "/assignments/hotels"
json_body:
segments: []
response_snapshot:
status_code: 504
body:
detail: "Gateway Timeout"
error: "Upstream timeout"
started_at: "2026-03-16T12:10:23Z"
finished_at: "2026-03-16T12:10:29Z"
duration_ms: 6010
created_at: "2026-03-16T12:10:23Z"
updated_at: "2026-03-16T12:10:29Z"
- step: 5
name: "send_hotel_offers_by_email"
capability_id: "3b90595e-3f6f-4b4a-a89a-63daecf8ed03"
action_id: "3c97cc71-5cf6-45fd-8f75-2cf1ccfc7c45"
method: null
status_code: null
status: "SKIPPED"
resolved_inputs: null
accepted_payload: null
output_payload: null
request_snapshot: null
response_snapshot: null
error: "Skipped: run stopped after failure at step 4"
started_at: null
finished_at: null
duration_ms: null
created_at: "2026-03-16T12:10:29Z"
updated_at: "2026-03-16T12:10:29Z"
"401":
$ref: "#/components/responses/UnauthorizedError"
"404":
$ref: "#/components/responses/NotFoundError"
"422":
$ref: "#/components/responses/ValidationError"
components:
securitySchemes:
bearerAuth:
type: http
scheme: bearer
bearerFormat: JWT
responses:
BadRequestError:
description: Некорректный запрос
content:
application/json:
schema:
$ref: "#/components/schemas/ErrorResponse"
examples:
not_ready:
value:
detail: "Pipeline is not ready for execution"
UnauthorizedError:
description: Не передан или некорректен JWT токен
content:
application/json:
schema:
$ref: "#/components/schemas/ErrorResponse"
examples:
invalid_credentials:
value:
detail: "Could not validate credentials"
ForbiddenError:
description: Нет доступа к ресурсу диалога
content:
application/json:
schema:
$ref: "#/components/schemas/ErrorResponse"
examples:
dialog_denied:
value:
detail: "Access denied for dialog"
NotFoundError:
description: Сущность не найдена
content:
application/json:
schema:
$ref: "#/components/schemas/ErrorResponse"
examples:
not_found:
value:
detail: "Pipeline not found"
ValidationError:
description: Ошибка валидации входных данных
content:
application/json:
schema:
$ref: "#/components/schemas/HTTPValidationError"
schemas:
ErrorResponse:
type: object
properties:
detail:
oneOf:
- type: string
- type: object
additionalProperties: true
- type: array
items:
type: object
additionalProperties: true
ValidationError:
type: object
required: [loc, msg, type]
properties:
loc:
type: array
items:
oneOf:
- type: string
- type: integer
msg:
type: string
type:
type: string
HTTPValidationError:
type: object
properties:
detail:
type: array
items:
$ref: "#/components/schemas/ValidationError"
PipelineInputTypeFromPrevious:
type: object
required: [from_step, type]
properties:
from_step:
type: integer
type:
type: string
PipelineStepEndpoint:
type: object
required: [name, capability_id]
properties:
name:
type: string
capability_id:
type: string
format: uuid
action_id:
type: string
format: uuid
nullable: true
type:
type: string
nullable: true
input_type:
oneOf:
- type: string
- type: object
additionalProperties: true
nullable: true
output_type:
oneOf:
- type: string
- type: object
additionalProperties: true
nullable: true
PipelineGraphNode:
type: object
required: [step, name]
properties:
step:
type: integer
name:
type: string
description:
type: string
nullable: true
input_connected_from:
type: array
items:
type: integer
default: []
output_connected_to:
type: array
items:
type: integer
default: []
input_data_type_from_previous:
type: array
items:
$ref: "#/components/schemas/PipelineInputTypeFromPrevious"
default: []
external_inputs:
type: array
items:
type: string
default: []
endpoints:
type: array
items:
$ref: "#/components/schemas/PipelineStepEndpoint"
default: []
PipelineGraphEdge:
type: object
required: [from_step, to_step, type]
properties:
from_step:
type: integer
to_step:
type: integer
type:
type: string
PipelineGenerateRequest:
type: object
required: [dialog_id, message]
properties:
dialog_id:
type: string
format: uuid
message:
type: string
minLength: 1
capability_ids:
type: array
items:
type: string
format: uuid
nullable: true
PipelineGenerateResponse:
type: object
required: [status, message_ru, chat_reply_ru, nodes, edges, missing_requirements]
properties:
status:
type: string
enum: [ready, needs_input, cannot_build]
message_ru:
type: string
chat_reply_ru:
type: string
pipeline_id:
type: string
format: uuid
nullable: true
nodes:
type: array
items:
$ref: "#/components/schemas/PipelineGraphNode"
default: []
edges:
type: array
items:
$ref: "#/components/schemas/PipelineGraphEdge"
default: []
missing_requirements:
type: array
items:
type: string
default: []
context_summary:
type: string
nullable: true
DialogResetRequest:
type: object
required: [dialog_id]
properties:
dialog_id:
type: string
format: uuid
DialogResetResponse:
type: object
required: [status, message_ru]
properties:
status:
type: string
enum: [ok]
message_ru:
type: string
PipelineDialogListItemResponse:
type: object
required: [dialog_id, created_at, updated_at]
properties:
dialog_id:
type: string
format: uuid
title:
type: string
nullable: true
last_status:
type: string
nullable: true
last_pipeline_id:
type: string
format: uuid
nullable: true
last_message_preview:
type: string
nullable: true
created_at:
type: string
format: date-time
updated_at:
type: string
format: date-time
PipelineDialogMessageResponse:
type: object
required: [id, role, content, created_at]
properties:
id:
type: string
format: uuid
role:
type: string
enum: [user, assistant]
content:
type: string
assistant_payload:
type: object
additionalProperties: true
nullable: true
created_at:
type: string
format: date-time
PipelineDialogHistoryResponse:
type: object
required: [dialog_id, messages]
properties:
dialog_id:
type: string
format: uuid
title:
type: string
nullable: true
messages:
type: array
items:
$ref: "#/components/schemas/PipelineDialogMessageResponse"
default: []
RunPipelineRequest:
type: object
properties:
inputs:
type: object
additionalProperties: true
default: {}
RunPipelineResponse:
type: object
required: [run_id, pipeline_id, status]
properties:
run_id:
type: string
format: uuid
pipeline_id:
type: string
format: uuid
status:
type: string
enum: [QUEUED, RUNNING]
ExecutionRunListItemResponse:
type: object
required: [id, pipeline_id, status, created_at, updated_at]
properties:
id:
type: string
format: uuid
pipeline_id:
type: string
format: uuid
status:
type: string
enum: [QUEUED, RUNNING, SUCCEEDED, FAILED, PARTIAL_FAILED]
error:
type: string
nullable: true
started_at:
type: string
format: date-time
nullable: true
finished_at:
type: string
format: date-time
nullable: true
created_at:
type: string
format: date-time
updated_at:
type: string
format: date-time
ExecutionStepRunResponse:
type: object
required: [step, status, accepted_payload, output_payload, created_at, updated_at]
properties:
step:
type: integer
name:
type: string
nullable: true
capability_id:
type: string
format: uuid
nullable: true
action_id:
type: string
format: uuid
nullable: true
method:
type: string
enum: [GET, POST, PUT, PATCH, DELETE, HEAD, OPTIONS]
nullable: true
status_code:
type: integer
nullable: true
status:
type: string
enum: [PENDING, RUNNING, SUCCEEDED, FAILED, SKIPPED]
resolved_inputs:
type: object
additionalProperties: true
nullable: true
accepted_payload:
nullable: true
output_payload:
nullable: true
request_snapshot:
type: object
additionalProperties: true
nullable: true
response_snapshot:
type: object
additionalProperties: true
nullable: true
error:
type: string
nullable: true
started_at:
type: string
format: date-time
nullable: true
finished_at:
type: string
format: date-time
nullable: true
duration_ms:
type: integer
nullable: true
created_at:
type: string
format: date-time
updated_at:
type: string
format: date-time
ExecutionRunDetailResponse:
type: object
required: [id, pipeline_id, status, inputs, created_at, updated_at, steps]
properties:
id:
type: string
format: uuid
pipeline_id:
type: string
format: uuid
status:
type: string
enum: [QUEUED, RUNNING, SUCCEEDED, FAILED, PARTIAL_FAILED]
inputs:
type: object
additionalProperties: true
default: {}
summary:
type: object
additionalProperties: true
nullable: true
error:
type: string
nullable: true
started_at:
type: string
format: date-time
nullable: true
finished_at:
type: string
format: date-time
nullable: true
created_at:
type: string
format: date-time
updated_at:
type: string
format: date-time
steps:
type: array
items:
$ref: "#/components/schemas/ExecutionStepRunResponse"
default: []
+24
View File
@@ -0,0 +1,24 @@
team-29-main2-8f6819.pages.prodcontest.ru {
encode gzip
handle_path /grafana* {
reverse_proxy 84.252.140.215:8052
}
@api path /api/* /docs* /redoc* /openapi.json*
reverse_proxy @api api:8000
@actions_no_slash path /api/v1/actions
rewrite @actions_no_slash /api/v1/actions/
@capabilities_no_slash path /api/v1/capabilities
rewrite @capabilities_no_slash /api/v1/capabilities/
@executions_no_slash path /api/v1/executions
rewrite @executions_no_slash /api/v1/executions/
@users_no_slash path /api/users
rewrite @users_no_slash /api/users/
reverse_proxy web:80
}
+29
View File
@@ -0,0 +1,29 @@
# Stage 1: Build
FROM node:20-slim AS build
WORKDIR /app
# Copy package files
COPY package*.json ./
# Install dependencies
RUN npm ci
# Copy source code
COPY . .
# Build the application
RUN npm run build
# Stage 2: Serve with Nginx
FROM nginx:stable-alpine
# Copy built assets from build stage
COPY --from=build /app/dist /usr/share/nginx/html
# Copy custom nginx config
COPY nginx.conf /etc/nginx/conf.d/default.conf
EXPOSE 80
CMD ["nginx", "-g", "daemon off;"]

Some files were not shown because too many files have changed in this diff Show More