This commit is contained in:
2026-03-17 18:32:44 +03:00
commit efcd4a8dfd
209 changed files with 33355 additions and 0 deletions
+26
View File
@@ -0,0 +1,26 @@
FROM python:3.12-slim
WORKDIR /app
# Install system dependencies
RUN apt-get update && apt-get install -y --no-install-recommends \
build-essential \
libpq-dev \
&& rm -rf /var/lib/apt/lists/*
# Copy requirements and install
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Copy application code
COPY . .
# Environment variables
ENV PYTHONPATH=/app
ENV PYTHONUNBUFFERED=1
# Expose port
EXPOSE 8000
# Start command
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
View File
View File
+3
View File
@@ -0,0 +1,3 @@
from app.api.actions.router import router
__all__ = ["router"]
+49
View File
@@ -0,0 +1,49 @@
from __future__ import annotations
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Request, Response, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.api.actions.dependencies import get_active_action_or_404
from app.core.database.session import get_session
from app.models import User
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Actions"])
@router.delete("/{action_id}", status_code=status.HTTP_204_NO_CONTENT)
async def delete_action(
action_id: UUID,
request: Request,
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
try:
action = await get_active_action_or_404(session, action_id, current_user)
except HTTPException:
log_business_event(
"action_delete_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
action_id=str(action_id),
reason="action_not_found_or_forbidden",
)
raise
action.is_deleted = True
await session.commit()
await session.refresh(action)
log_business_event(
"action_deleted",
trace_id=trace_id,
user_id=str(current_user.id),
action_id=str(action.id),
)
return Response(status_code=status.HTTP_204_NO_CONTENT)
+21
View File
@@ -0,0 +1,21 @@
from __future__ import annotations
from uuid import UUID
from fastapi import HTTPException, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.models import Action, ActionIngestStatus, User, UserRole
async def get_active_action_or_404(
session: AsyncSession,
action_id: UUID,
current_user: User,
) -> Action:
action = await session.get(Action, action_id)
if action is None or action.is_deleted or action.ingest_status != ActionIngestStatus.SUCCEEDED:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Action not found")
if current_user.role != UserRole.ADMIN and action.user_id != current_user.id:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Action not found")
return action
+47
View File
@@ -0,0 +1,47 @@
from __future__ import annotations
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Request
from sqlalchemy.ext.asyncio import AsyncSession
from app.api.actions.dependencies import get_active_action_or_404
from app.core.database.session import get_session
from app.models import User
from app.schemas.action_sch import ActionDetailResponse
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Actions"])
@router.get("/{action_id}", response_model=ActionDetailResponse)
async def get_action(
action_id: UUID,
request: Request,
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
try:
action = await get_active_action_or_404(session, action_id, current_user)
except HTTPException:
log_business_event(
"action_fetch_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
action_id=str(action_id),
reason="action_not_found_or_forbidden",
)
raise
log_business_event(
"action_fetched",
trace_id=trace_id,
user_id=str(current_user.id),
action_id=str(action.id),
action_method=action.method.value if action.method is not None else None,
action_path=action.path,
)
return action
+92
View File
@@ -0,0 +1,92 @@
from __future__ import annotations
from fastapi import APIRouter, Depends, File, HTTPException, Request, UploadFile, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import Action, ActionIngestStatus, User
from app.schemas.capability_sch import ActionIngestWithCapabilitiesResponse
from app.services.capability_service import CapabilityService
from app.services.openapi_service import OpenAPIService
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Actions"])
@router.post("/ingest", response_model=ActionIngestWithCapabilitiesResponse, status_code=status.HTTP_201_CREATED)
async def ingest_actions(
request: Request,
file: UploadFile = File(...),
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
payload = await file.read()
try:
document = OpenAPIService.load_document(payload)
ingestion_result = OpenAPIService.extract_actions_with_failures(document, source_filename=file.filename)
except ValueError as exc:
log_business_event(
"actions_ingest_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
source_filename=file.filename,
file_size_bytes=len(payload),
reason="invalid_openapi_document",
details=str(exc),
)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(exc)) from exc
action_payloads = ingestion_result["succeeded"] + ingestion_result["failed"]
if not action_payloads:
log_business_event(
"actions_ingest_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
source_filename=file.filename,
file_size_bytes=len(payload),
reason="no_supported_operations",
)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="No supported HTTP operations found in OpenAPI file")
actions = [Action(user_id=current_user.id, **action_payload) for action_payload in action_payloads]
session.add_all(actions)
await session.flush()
succeeded_actions = [action for action in actions if action.ingest_status == ActionIngestStatus.SUCCEEDED]
failed_actions = [action for action in actions if action.ingest_status == ActionIngestStatus.FAILED]
capability_service = CapabilityService(session)
capabilities = await capability_service.create_from_actions(
succeeded_actions,
owner_user_id=current_user.id,
refresh=False,
)
await session.commit()
for action in actions:
await session.refresh(action)
for capability in capabilities:
await session.refresh(capability)
log_business_event(
"actions_ingested",
trace_id=trace_id,
user_id=str(current_user.id),
source_filename=file.filename,
file_size_bytes=len(payload),
succeeded_count=len(succeeded_actions),
failed_count=len(failed_actions),
created_capabilities_count=len(capabilities),
)
return ActionIngestWithCapabilitiesResponse(
succeeded_count=len(succeeded_actions),
failed_count=len(failed_actions),
created_capabilities_count=len(capabilities),
succeeded_actions=succeeded_actions,
failed_actions=failed_actions,
capabilities=capabilities,
)
+79
View File
@@ -0,0 +1,79 @@
from __future__ import annotations
from uuid import UUID
from fastapi import APIRouter, Depends, Query, Request
from sqlalchemy import or_, select
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import Action, ActionIngestStatus, HttpMethod, User, UserRole
from app.schemas.action_sch import ActionListItemResponse
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Actions"])
@router.get("/", response_model=list[ActionListItemResponse], include_in_schema=False)
async def list_actions(
request: Request,
method: HttpMethod | None = Query(default=None),
owner_id: UUID | None = Query(default=None),
source_filename: str | None = Query(default=None),
search: str | None = Query(default=None, min_length=1),
limit: int = Query(default=50, ge=1, le=200),
offset: int = Query(default=0, ge=0),
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
query = (
select(Action)
.where(Action.is_deleted.is_(False))
.where(Action.ingest_status == ActionIngestStatus.SUCCEEDED)
.order_by(Action.created_at.desc())
.limit(limit)
.offset(offset)
)
if current_user.role == UserRole.ADMIN:
if owner_id is not None:
query = query.where(Action.user_id == owner_id)
else:
query = query.where(Action.user_id == current_user.id)
if method is not None:
query = query.where(Action.method == method)
if source_filename:
query = query.where(Action.source_filename == source_filename)
if search:
search_pattern = f"%{search}%"
query = query.where(
or_(
Action.operation_id.ilike(search_pattern),
Action.path.ilike(search_pattern),
Action.summary.ilike(search_pattern),
)
)
result = await session.execute(query)
actions = list(result.scalars().all())
log_business_event(
"actions_listed",
trace_id=trace_id,
user_id=str(current_user.id),
method=method.value if method is not None else None,
owner_id=str(owner_id) if owner_id is not None else None,
source_filename=source_filename,
search=search,
limit=limit,
offset=offset,
result_count=len(actions),
)
return actions
+13
View File
@@ -0,0 +1,13 @@
from fastapi import APIRouter
from app.api.actions.delete_action import router as delete_action_router
from app.api.actions.get_action import router as get_action_router
from app.api.actions.ingest_actions import router as ingest_actions_router
from app.api.actions.list_actions import router as list_actions_router
router = APIRouter(prefix="/v1/actions", tags=["Actions"])
router.include_router(ingest_actions_router)
router.include_router(list_actions_router)
router.include_router(get_action_router)
router.include_router(delete_action_router)
+84
View File
@@ -0,0 +1,84 @@
from fastapi import APIRouter, Depends, HTTPException, Request, status
from sqlalchemy import func, select
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import User
from app.schemas.auth_sch import LoginIn
from app.utils.business_logger import log_business_event
from app.utils.hashing import verify_password
from app.utils.token_manager import create_access_token
router = APIRouter(prefix="/v1/auth", tags=["Auth"])
@router.post("/login", status_code=status.HTTP_200_OK)
async def login(
data: LoginIn,
request: Request,
session: AsyncSession = Depends(get_session),
):
email = data.email.strip().lower()
trace_id = getattr(request.state, "traceId", None)
result = await session.execute(select(User).where(func.lower(User.email) == email))
user = result.scalar_one_or_none()
if user is None:
log_business_event(
"auth_login_failed",
trace_id=trace_id,
email=email,
reason="user_not_found",
)
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail={"message": "Invalid email or password"},
)
if not verify_password(data.password, user.hashed_password):
log_business_event(
"auth_login_failed",
trace_id=trace_id,
email=email,
reason="invalid_password",
)
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail={"message": "Invalid email or password"},
)
if not user.is_active:
log_business_event(
"auth_login_blocked",
trace_id=trace_id,
user_id=str(user.id),
email=user.email,
reason="user_inactive",
)
raise HTTPException(
status_code=status.HTTP_423_LOCKED,
detail={"message": "User account is deactivated"},
)
token, expires_in = create_access_token(sub=str(user.id), role=user.role.value)
log_business_event(
"auth_login_succeeded",
trace_id=trace_id,
user_id=str(user.id),
email=user.email,
role=user.role.value,
)
return {
"accessToken": token,
"expiresIn": expires_in,
"user": {
"id": str(user.id),
"email": user.email,
"fullName": user.full_name,
"role": user.role.value,
"isActive": user.is_active,
"createdAt": user.created_at.isoformat(),
},
}
+72
View File
@@ -0,0 +1,72 @@
from fastapi import APIRouter, Depends, HTTPException, Request, status
from sqlalchemy import func, select
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import User, UserRole
from app.schemas.auth_sch import RegisterIn
from app.utils.business_logger import log_business_event
from app.utils.hashing import hash_password
from app.utils.token_manager import create_access_token
router = APIRouter(prefix="/v1/auth", tags=["Auth"])
@router.post("/register", status_code=status.HTTP_201_CREATED)
async def register(
data: RegisterIn,
request: Request,
session: AsyncSession = Depends(get_session),
):
email = data.email.strip().lower()
trace_id = getattr(request.state, "traceId", None)
result = await session.execute(select(User).where(func.lower(User.email) == email))
existing_user = result.scalar_one_or_none()
if existing_user is not None:
log_business_event(
"auth_register_failed",
trace_id=trace_id,
email=email,
reason="email_already_exists",
)
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail={"message": "Email already exists. Please login."},
)
user = User(
email=email,
full_name=data.full_name,
hashed_password=hash_password(data.password),
role=UserRole.USER,
is_active=True,
)
session.add(user)
await session.commit()
await session.refresh(user)
token, expires_in = create_access_token(sub=str(user.id), role=user.role.value)
log_business_event(
"auth_register_succeeded",
trace_id=trace_id,
user_id=str(user.id),
email=user.email,
role=user.role.value,
)
return {
"accessToken": token,
"expiresIn": expires_in,
"user": {
"id": str(user.id),
"email": user.email,
"fullName": user.full_name,
"role": user.role.value,
"isActive": user.is_active,
"createdAt": user.created_at.isoformat(),
},
}
+1
View File
@@ -0,0 +1 @@
@@ -0,0 +1,72 @@
from __future__ import annotations
from fastapi import APIRouter, Depends, HTTPException, Request, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import User, UserRole
from app.schemas.capability_sch import CapabilityResponse, CreateCompositeCapabilityRequest
from app.services.capability_service import (
CapabilityService,
CompositeRecipeValidationError,
)
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Capabilities"])
@router.post(
"/composite",
response_model=CapabilityResponse,
status_code=status.HTTP_201_CREATED,
)
async def create_composite_capability(
payload: CreateCompositeCapabilityRequest,
request: Request,
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
capability_service = CapabilityService(session)
try:
capability = await capability_service.create_validated_composite_capability(
owner_user_id=current_user.id,
name=payload.name,
description=payload.description,
input_schema=payload.input_schema,
output_schema=payload.output_schema,
recipe=payload.recipe.model_dump(mode="python"),
include_all=current_user.role == UserRole.ADMIN,
)
await session.commit()
await session.refresh(capability)
recipe_dump = payload.recipe.model_dump(mode="python")
recipe_steps = recipe_dump.get("steps") if isinstance(recipe_dump, dict) else None
log_business_event(
"composite_capability_created",
trace_id=trace_id,
user_id=str(current_user.id),
capability_id=str(capability.id),
capability_name=capability.name,
recipe_steps_count=len(recipe_steps) if isinstance(recipe_steps, list) else None,
)
return capability
except CompositeRecipeValidationError as exc:
await session.rollback()
log_business_event(
"composite_capability_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
capability_name=payload.name,
reason="validation_failed",
errors_count=len(exc.errors),
)
raise HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
detail={
"message": "Composite recipe validation failed",
"errors": exc.errors,
},
) from exc
@@ -0,0 +1,25 @@
from __future__ import annotations
from uuid import UUID
from fastapi import HTTPException, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.models import Capability, User, UserRole
from app.services.capability_service import CapabilityService
async def get_capability_or_404(
session: AsyncSession,
capability_id: UUID,
current_user: User,
) -> Capability:
capability_service = CapabilityService(session)
capability = await capability_service.get_capability(
capability_id,
owner_user_id=current_user.id,
include_all=current_user.role == UserRole.ADMIN,
)
if capability is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Capability not found")
return capability
@@ -0,0 +1,46 @@
from __future__ import annotations
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Request
from sqlalchemy.ext.asyncio import AsyncSession
from app.api.capabilities.dependencies import get_capability_or_404
from app.core.database.session import get_session
from app.models import User
from app.schemas.capability_sch import CapabilityResponse
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Capabilities"])
@router.get("/{capability_id}", response_model=CapabilityResponse)
async def get_capability(
capability_id: UUID,
request: Request,
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
try:
capability = await get_capability_or_404(session, capability_id, current_user)
except HTTPException:
log_business_event(
"capability_fetch_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
capability_id=str(capability_id),
reason="capability_not_found_or_forbidden",
)
raise
log_business_event(
"capability_fetched",
trace_id=trace_id,
user_id=str(current_user.id),
capability_id=str(capability.id),
capability_type=capability.type.value if hasattr(capability.type, "value") else str(capability.type),
)
return capability
@@ -0,0 +1,55 @@
from __future__ import annotations
from uuid import UUID
from fastapi import APIRouter, Depends, Query, Request
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import User, UserRole
from app.schemas.capability_sch import CapabilityResponse
from app.services.capability_service import CapabilityService
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Capabilities"])
@router.get("/", response_model=list[CapabilityResponse])
async def list_capabilities(
request: Request,
action_id: UUID | None = Query(default=None),
owner_id: UUID | None = Query(default=None),
limit: int = Query(default=50, ge=1, le=200),
offset: int = Query(default=0, ge=0),
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
capability_service = CapabilityService(session)
action_ids = [action_id] if action_id is not None else None
include_all = current_user.role == UserRole.ADMIN
owner_user_id = owner_id if include_all and owner_id is not None else current_user.id
capabilities = await capability_service.get_capabilities(
action_ids=action_ids,
owner_user_id=owner_user_id,
include_all=include_all and owner_id is None,
limit=limit,
offset=offset,
)
log_business_event(
"capabilities_listed",
trace_id=trace_id,
user_id=str(current_user.id),
owner_id=str(owner_user_id) if owner_user_id is not None else None,
action_id=str(action_id) if action_id is not None else None,
include_all=include_all and owner_id is None,
limit=limit,
offset=offset,
result_count=len(capabilities),
)
return capabilities
+13
View File
@@ -0,0 +1,13 @@
from fastapi import APIRouter
from app.api.capabilities.create_composite_capability import (
router as create_composite_capability_router,
)
from app.api.capabilities.get_capability import router as get_capability_router
from app.api.capabilities.list_capabilities import router as list_capabilities_router
router = APIRouter(prefix="/v1/capabilities", tags=["Capabilities"])
router.include_router(list_capabilities_router)
router.include_router(create_composite_capability_router)
router.include_router(get_capability_router)
+3
View File
@@ -0,0 +1,3 @@
from app.api.executions.router import router
__all__ = ["router"]
+168
View File
@@ -0,0 +1,168 @@
from __future__ import annotations
from typing import Any
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Request, status
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import ExecutionRun, ExecutionStepRun, Pipeline, User, UserRole
from app.schemas.execution_sch import ExecutionRunDetailResponse, ExecutionStepRunResponse
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Executions"])
KNOWN_HTTP_METHODS = {"GET", "POST", "PUT", "PATCH", "DELETE", "HEAD", "OPTIONS"}
REQUEST_BODY_METHODS = {"POST", "PUT", "PATCH"}
def _extract_method(request_snapshot: dict[str, Any] | None) -> str | None:
if not isinstance(request_snapshot, dict):
return None
method_raw = request_snapshot.get("method")
if not isinstance(method_raw, str):
return None
method = method_raw.upper()
if method in KNOWN_HTTP_METHODS:
return method
return None
def _extract_status_code(response_snapshot: dict[str, Any] | None) -> int | None:
if not isinstance(response_snapshot, dict):
return None
status_code_raw = response_snapshot.get("status_code")
if isinstance(status_code_raw, int):
return status_code_raw
if isinstance(status_code_raw, str) and status_code_raw.isdigit():
return int(status_code_raw)
return None
def _extract_accepted_payload(
*,
method: str | None,
request_snapshot: dict[str, Any] | None,
) -> Any:
if method not in REQUEST_BODY_METHODS:
return None
if not isinstance(request_snapshot, dict):
return None
return request_snapshot.get("json_body")
def _extract_output_payload(response_snapshot: dict[str, Any] | None) -> Any:
if not isinstance(response_snapshot, dict):
return None
return response_snapshot.get("body")
def _build_step_run_response(step_run: ExecutionStepRun) -> ExecutionStepRunResponse:
status_value = step_run.status.value if hasattr(step_run.status, "value") else step_run.status
base = ExecutionStepRunResponse(
step=step_run.step,
name=step_run.name,
capability_id=step_run.capability_id,
action_id=step_run.action_id,
status=status_value,
resolved_inputs=step_run.resolved_inputs,
request_snapshot=step_run.request_snapshot,
response_snapshot=step_run.response_snapshot,
error=step_run.error,
started_at=step_run.started_at,
finished_at=step_run.finished_at,
duration_ms=step_run.duration_ms,
created_at=step_run.created_at,
updated_at=step_run.updated_at,
)
request_snapshot = base.request_snapshot if isinstance(base.request_snapshot, dict) else None
response_snapshot = base.response_snapshot if isinstance(base.response_snapshot, dict) else None
method = _extract_method(request_snapshot)
status_code = _extract_status_code(response_snapshot)
accepted_payload = _extract_accepted_payload(method=method, request_snapshot=request_snapshot)
output_payload = _extract_output_payload(response_snapshot)
return base.model_copy(
update={
"method": method,
"status_code": status_code,
"accepted_payload": accepted_payload,
"output_payload": output_payload,
}
)
@router.get("/{run_id}", response_model=ExecutionRunDetailResponse)
async def get_execution(
run_id: UUID,
request: Request,
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
run = await session.get(ExecutionRun, run_id)
if run is None:
log_business_event(
"execution_fetch_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
run_id=str(run_id),
reason="run_not_found",
)
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Execution run not found")
if current_user.role != UserRole.ADMIN:
is_owner = run.initiated_by == current_user.id
if not is_owner and run.initiated_by is None:
pipeline = await session.get(Pipeline, run.pipeline_id)
is_owner = pipeline is not None and pipeline.created_by == current_user.id
if not is_owner:
log_business_event(
"execution_fetch_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
run_id=str(run.id),
pipeline_id=str(run.pipeline_id),
reason="run_not_found_or_forbidden",
)
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Execution run not found")
step_query = (
select(ExecutionStepRun)
.where(ExecutionStepRun.run_id == run.id)
.order_by(ExecutionStepRun.step.asc(), ExecutionStepRun.created_at.asc())
)
step_result = await session.execute(step_query)
step_runs = list(step_result.scalars().all())
log_business_event(
"execution_fetched",
trace_id=trace_id,
user_id=str(current_user.id),
run_id=str(run.id),
pipeline_id=str(run.pipeline_id),
result_status=run.status.value,
step_count=len(step_runs),
)
return ExecutionRunDetailResponse(
id=run.id,
pipeline_id=run.pipeline_id,
status=run.status.value,
inputs=run.inputs or {},
summary=run.summary,
error=run.error,
started_at=run.started_at,
finished_at=run.finished_at,
created_at=run.created_at,
updated_at=run.updated_at,
steps=[
_build_step_run_response(step_run)
for step_run in step_runs
],
)
@@ -0,0 +1,50 @@
from __future__ import annotations
from fastapi import APIRouter, Depends, Query, Request
from sqlalchemy import and_, or_, select
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import ExecutionRun, Pipeline, User, UserRole
from app.schemas.execution_sch import ExecutionRunListItemResponse
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Executions"])
@router.get("/", response_model=list[ExecutionRunListItemResponse])
async def list_executions(
request: Request,
limit: int = Query(default=50, ge=1, le=200),
offset: int = Query(default=0, ge=0),
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
query = select(ExecutionRun).order_by(ExecutionRun.created_at.desc())
if current_user.role != UserRole.ADMIN:
query = query.join(Pipeline, Pipeline.id == ExecutionRun.pipeline_id).where(
or_(
ExecutionRun.initiated_by == current_user.id,
and_(
ExecutionRun.initiated_by.is_(None),
Pipeline.created_by == current_user.id,
),
)
)
query = query.limit(limit).offset(offset)
result = await session.execute(query)
runs = list(result.scalars().all())
log_business_event(
"executions_listed",
trace_id=trace_id,
user_id=str(current_user.id),
limit=limit,
offset=offset,
result_count=len(runs),
)
return runs
+9
View File
@@ -0,0 +1,9 @@
from fastapi import APIRouter
from app.api.executions.get_execution import router as get_execution_router
from app.api.executions.list_executions import router as list_executions_router
router = APIRouter(prefix="/v1/executions", tags=["Executions"])
router.include_router(list_executions_router)
router.include_router(get_execution_router)
View File
+7
View File
@@ -0,0 +1,7 @@
from fastapi import APIRouter
router = APIRouter()
@router.get("/ping")
async def ping():
return {"status": "ok"}
+3
View File
@@ -0,0 +1,3 @@
from app.api.pipelines.router import router
__all__ = ["router"]
+114
View File
@@ -0,0 +1,114 @@
from __future__ import annotations
from fastapi import APIRouter, Depends, HTTPException, Request, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import User
from app.schemas.pipeline_chat_sch import PipelineGenerateRequest, PipelineGenerateResponse
from app.services.pipeline_dialog_service import DialogAccessError, PipelineDialogService
from app.services.pipeline_service import PipelineService
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Pipelines"])
@router.post("/generate", response_model=PipelineGenerateResponse)
async def generate_pipeline(
payload: PipelineGenerateRequest,
request: Request,
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
log_business_event(
"pipeline_prompt_received",
trace_id=trace_id,
user_id=str(current_user.id),
dialog_id=str(payload.dialog_id),
message_len=len(payload.message),
capability_ids_count=len(payload.capability_ids or []),
)
service = PipelineService(session)
dialog_service = PipelineDialogService(session)
try:
await dialog_service.append_user_message(
dialog_id=payload.dialog_id,
user_id=current_user.id,
content=payload.message,
)
dialog = await dialog_service.get_dialog(
dialog_id=payload.dialog_id,
user_id=current_user.id,
)
except DialogAccessError as exc:
detail = str(exc)
log_business_event(
"pipeline_prompt_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
dialog_id=str(payload.dialog_id),
reason=detail,
)
if "denied" in detail:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=detail) from exc
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=detail) from exc
try:
result = await service.generate(
dialog_id=payload.dialog_id,
message=payload.message,
user_id=current_user.id,
capability_ids=payload.capability_ids,
previous_pipeline_id=dialog.last_pipeline_id,
)
except Exception as exc:
if "ollama" in str(exc).lower():
message_ru = "Не удалось обратиться к локальной модели Ollama. Проверьте OLLAMA_HOST/OLLAMA_MODEL и повторите запрос."
result = {
"status": "cannot_build",
"message_ru": message_ru,
"chat_reply_ru": message_ru,
"pipeline_id": None,
"nodes": [],
"edges": [],
"missing_requirements": ["ollama_unavailable"],
"context_summary": None,
}
else:
raise
response_payload = PipelineGenerateResponse(**result)
try:
await dialog_service.append_assistant_message(
dialog_id=payload.dialog_id,
user_id=current_user.id,
content=response_payload.chat_reply_ru or response_payload.message_ru,
assistant_payload=response_payload.model_dump(mode="json", exclude_none=True),
)
except DialogAccessError as exc:
detail = str(exc)
log_business_event(
"pipeline_prompt_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
dialog_id=str(payload.dialog_id),
reason=detail,
)
if "denied" in detail:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=detail) from exc
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=detail) from exc
log_business_event(
"pipeline_prompt_processed",
trace_id=trace_id,
user_id=str(current_user.id),
dialog_id=str(payload.dialog_id),
result_status=response_payload.status,
pipeline_id=str(response_payload.pipeline_id) if response_payload.pipeline_id else None,
)
return response_payload
@@ -0,0 +1,78 @@
from __future__ import annotations
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Query, Request, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import User
from app.schemas.pipeline_chat_sch import (
PipelineDialogHistoryResponse,
PipelineDialogMessageResponse,
)
from app.services.pipeline_dialog_service import DialogAccessError, PipelineDialogService
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Pipelines"])
@router.get("/dialogs/{dialog_id}/history", response_model=PipelineDialogHistoryResponse)
async def get_pipeline_dialog_history(
dialog_id: UUID,
request: Request,
limit: int = Query(default=30, ge=1, le=200),
offset: int = Query(default=0, ge=0),
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
dialog_service = PipelineDialogService(session)
try:
dialog, messages = await dialog_service.get_history(
dialog_id=dialog_id,
user_id=current_user.id,
limit=limit,
offset=offset,
)
except DialogAccessError as exc:
detail = str(exc)
log_business_event(
"pipeline_dialog_history_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
dialog_id=str(dialog_id),
reason=detail,
)
if "denied" in detail:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=detail) from exc
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=detail) from exc
response = PipelineDialogHistoryResponse(
dialog_id=dialog.id,
title=dialog.title,
messages=[
PipelineDialogMessageResponse(
id=message.id,
role=message.role.value,
content=message.content,
assistant_payload=message.assistant_payload,
created_at=message.created_at,
)
for message in messages
],
)
log_business_event(
"pipeline_dialog_history_viewed",
trace_id=trace_id,
user_id=str(current_user.id),
dialog_id=str(dialog.id),
limit=limit,
offset=offset,
message_count=len(response.messages),
)
return response
+52
View File
@@ -0,0 +1,52 @@
from __future__ import annotations
from fastapi import APIRouter, Depends, Query, Request
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import User
from app.schemas.pipeline_chat_sch import PipelineDialogListItemResponse
from app.services.pipeline_dialog_service import PipelineDialogService
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Pipelines"])
@router.get("/dialogs", response_model=list[PipelineDialogListItemResponse])
async def list_pipeline_dialogs(
request: Request,
limit: int = Query(default=20, ge=1, le=200),
offset: int = Query(default=0, ge=0),
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
dialog_service = PipelineDialogService(session)
dialogs = await dialog_service.list_dialogs(
user_id=current_user.id,
limit=limit,
offset=offset,
)
response = [
PipelineDialogListItemResponse(
dialog_id=dialog.id,
title=dialog.title,
last_status=dialog.last_status,
last_pipeline_id=dialog.last_pipeline_id,
last_message_preview=dialog.last_message_preview,
created_at=dialog.created_at,
updated_at=dialog.updated_at,
)
for dialog in dialogs
]
log_business_event(
"pipeline_dialogs_listed",
trace_id=trace_id,
user_id=str(current_user.id),
limit=limit,
offset=offset,
result_count=len(response),
)
return response
+54
View File
@@ -0,0 +1,54 @@
from __future__ import annotations
from fastapi import APIRouter, Depends, HTTPException, Request, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import User
from app.schemas.pipeline_chat_sch import DialogResetRequest, DialogResetResponse
from app.services.pipeline_dialog_service import DialogAccessError, PipelineDialogService
from app.services.pipeline_service import PipelineService
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Pipelines"])
@router.post("/dialog/reset", response_model=DialogResetResponse)
async def reset_pipeline_dialog(
payload: DialogResetRequest,
request: Request,
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
dialog_service = PipelineDialogService(session)
try:
await dialog_service.get_dialog(
dialog_id=payload.dialog_id,
user_id=current_user.id,
)
except DialogAccessError as exc:
detail = str(exc)
log_business_event(
"pipeline_dialog_reset_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
dialog_id=str(payload.dialog_id),
reason=detail,
)
if "denied" in detail:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=detail) from exc
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=detail) from exc
service = PipelineService(session)
result = await service.reset_dialog(payload.dialog_id)
log_business_event(
"pipeline_dialog_reset",
trace_id=trace_id,
user_id=str(current_user.id),
dialog_id=str(payload.dialog_id),
result_status=result.get("status") if isinstance(result, dict) else None,
)
return DialogResetResponse(**result)
+17
View File
@@ -0,0 +1,17 @@
from fastapi import APIRouter
from app.api.pipelines.generate import router as generate_router
from app.api.pipelines.get_dialog_history import router as get_dialog_history_router
from app.api.pipelines.list_dialogs import router as list_dialogs_router
from app.api.pipelines.reset_dialog import router as reset_dialog_router
from app.api.pipelines.run import router as run_router
from app.api.pipelines.update_graph import router as update_graph_router
router = APIRouter(prefix="/v1/pipelines", tags=["Pipelines"])
router.include_router(generate_router)
router.include_router(list_dialogs_router)
router.include_router(get_dialog_history_router)
router.include_router(reset_dialog_router)
router.include_router(run_router)
router.include_router(update_graph_router)
+83
View File
@@ -0,0 +1,83 @@
from __future__ import annotations
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Request, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import Pipeline, User, UserRole
from app.schemas.execution_sch import RunPipelineRequest, RunPipelineResponse
from app.services.execution_service import ExecutionService, ExecutionServiceError
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Pipelines"])
@router.post("/{pipeline_id}/run", response_model=RunPipelineResponse, status_code=status.HTTP_202_ACCEPTED)
async def run_pipeline(
pipeline_id: UUID,
payload: RunPipelineRequest,
request: Request,
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
pipeline = await session.get(Pipeline, pipeline_id)
if pipeline is None:
log_business_event(
"pipeline_run_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
pipeline_id=str(pipeline_id),
reason="pipeline_not_found",
)
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Pipeline not found")
if current_user.role != UserRole.ADMIN and pipeline.created_by != current_user.id:
log_business_event(
"pipeline_run_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
pipeline_id=str(pipeline_id),
reason="pipeline_not_owned",
)
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Pipeline not found")
service = ExecutionService(session)
try:
run = await service.create_run(
pipeline_id=pipeline_id,
inputs=payload.inputs,
initiated_by=current_user.id,
)
except ExecutionServiceError as exc:
message = str(exc)
log_business_event(
"pipeline_run_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
pipeline_id=str(pipeline_id),
reason=message,
)
if "not found" in message.lower():
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=message) from exc
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message) from exc
ExecutionService.start_background_execution(run.id)
log_business_event(
"pipeline_run_started",
trace_id=trace_id,
user_id=str(current_user.id),
pipeline_id=str(run.pipeline_id),
run_id=str(run.id),
inputs_count=len(payload.inputs or {}),
)
return RunPipelineResponse(
run_id=run.id,
pipeline_id=run.pipeline_id,
status=run.status.value,
)
+205
View File
@@ -0,0 +1,205 @@
from __future__ import annotations
from collections import defaultdict
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Request, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import Pipeline, User, UserRole
from app.schemas.pipeline_chat_sch import (
PipelineGraphUpdateRequest,
PipelineGraphUpdateResponse,
)
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Pipelines"])
def _graph_has_cycle(steps: set[int], edges: list[dict[str, int | str]]) -> bool:
adjacency: dict[int, set[int]] = {step: set() for step in steps}
for edge in edges:
src = edge["from_step"]
dst = edge["to_step"]
if isinstance(src, int) and isinstance(dst, int):
adjacency.setdefault(src, set()).add(dst)
visiting: set[int] = set()
visited: set[int] = set()
def dfs(step: int) -> bool:
if step in visiting:
return True
if step in visited:
return False
visiting.add(step)
for neighbor in adjacency.get(step, set()):
if dfs(neighbor):
return True
visiting.remove(step)
visited.add(step)
return False
return any(dfs(step) for step in adjacency)
def _sync_node_connections(
nodes: list[dict[str, object]],
edges: list[dict[str, int | str]],
) -> None:
incoming_by_step: dict[int, set[int]] = defaultdict(set)
outgoing_by_step: dict[int, set[int]] = defaultdict(set)
incoming_types_by_step: dict[int, set[tuple[int, str]]] = defaultdict(set)
for edge in edges:
src = edge.get("from_step")
dst = edge.get("to_step")
edge_type = edge.get("type")
if not isinstance(src, int) or not isinstance(dst, int) or not isinstance(edge_type, str):
continue
outgoing_by_step[src].add(dst)
incoming_by_step[dst].add(src)
incoming_types_by_step[dst].add((src, edge_type))
for node in nodes:
step = node.get("step")
if not isinstance(step, int):
node["input_connected_from"] = []
node["output_connected_to"] = []
node["input_data_type_from_previous"] = []
continue
node["input_connected_from"] = sorted(incoming_by_step.get(step, set()))
node["output_connected_to"] = sorted(outgoing_by_step.get(step, set()))
node["input_data_type_from_previous"] = [
{"from_step": src, "type": edge_type}
for src, edge_type in sorted(incoming_types_by_step.get(step, set()))
]
@router.patch("/{pipeline_id}/graph", response_model=PipelineGraphUpdateResponse)
async def update_pipeline_graph(
pipeline_id: UUID,
payload: PipelineGraphUpdateRequest,
request: Request,
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
pipeline = await session.get(Pipeline, pipeline_id)
if pipeline is None:
log_business_event(
"pipeline_graph_update_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
pipeline_id=str(pipeline_id),
reason="pipeline_not_found",
)
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Pipeline not found")
if current_user.role != UserRole.ADMIN and pipeline.created_by != current_user.id:
log_business_event(
"pipeline_graph_update_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
pipeline_id=str(pipeline_id),
reason="pipeline_not_owned",
)
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Pipeline not found")
nodes = [node.model_dump(mode="json") for node in payload.nodes]
edges = [edge.model_dump(mode="json") for edge in payload.edges]
validation_errors: list[str] = []
steps: set[int] = set()
for node in nodes:
step = node.get("step")
if not isinstance(step, int):
validation_errors.append("graph: invalid_step")
continue
if step in steps:
validation_errors.append(f"graph: duplicate_node_step:{step}")
continue
steps.add(step)
normalized_edges: list[dict[str, int | str]] = []
seen_edges: set[tuple[int, int, str]] = set()
for edge in edges:
src = edge.get("from_step")
dst = edge.get("to_step")
edge_type = str(edge.get("type") or "").strip()
if not isinstance(src, int) or not isinstance(dst, int):
validation_errors.append("graph: invalid_edge_reference")
continue
if src not in steps or dst not in steps:
validation_errors.append(f"graph: edge_to_missing_node:{src}->{dst}")
continue
if src == dst:
validation_errors.append(f"graph: self_loop:{src}")
continue
if not edge_type:
validation_errors.append("graph: invalid_edge_type")
continue
edge_key = (src, dst, edge_type)
if edge_key in seen_edges:
validation_errors.append(
f"graph: duplicate_edge:{src}->{dst}:{edge_type}"
)
continue
seen_edges.add(edge_key)
normalized_edges.append({"from_step": src, "to_step": dst, "type": edge_type})
if normalized_edges and _graph_has_cycle(steps, normalized_edges):
validation_errors.append("graph: cycle")
if validation_errors:
log_business_event(
"pipeline_graph_update_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
pipeline_id=str(pipeline_id),
reason="invalid_graph",
errors=sorted(set(validation_errors)),
)
raise HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
detail={
"message": "Invalid pipeline graph",
"errors": sorted(set(validation_errors)),
},
)
_sync_node_connections(nodes, normalized_edges)
pipeline.nodes = nodes
pipeline.edges = normalized_edges
await session.commit()
await session.refresh(pipeline)
log_business_event(
"pipeline_graph_updated",
trace_id=trace_id,
user_id=str(current_user.id),
pipeline_id=str(pipeline.id),
nodes_count=len(nodes),
edges_count=len(normalized_edges),
)
return PipelineGraphUpdateResponse(
pipeline_id=pipeline.id,
nodes=pipeline.nodes,
edges=pipeline.edges,
updated_at=pipeline.updated_at,
)
+54
View File
@@ -0,0 +1,54 @@
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Request, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import User, UserRole
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Users"])
@router.delete("/{user_id}", status_code=status.HTTP_200_OK)
async def delete_user(
user_id: UUID,
request: Request,
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
if current_user.role != UserRole.ADMIN and current_user.id != user_id:
log_business_event(
"user_deactivation_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
target_user_id=str(user_id),
reason="forbidden",
)
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Нет доступа")
user = await session.get(User, user_id)
if user is None:
log_business_event(
"user_deactivation_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
target_user_id=str(user_id),
reason="target_user_not_found",
)
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
user.is_active = False
await session.commit()
log_business_event(
"user_deactivated",
trace_id=trace_id,
user_id=str(current_user.id),
target_user_id=str(user.id),
)
return {"message": "Пользователь успешно деактивирован"}
+21
View File
@@ -0,0 +1,21 @@
from fastapi import APIRouter, Depends, Request
from app.models import User
from app.schemas.users_sch import UserResponse
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Users"])
@router.get("/me", response_model=UserResponse)
async def get_me(
request: Request,
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
log_business_event(
"user_profile_viewed",
trace_id=trace_id,
user_id=str(current_user.id),
)
return current_user
+27
View File
@@ -0,0 +1,27 @@
from fastapi import APIRouter, Depends, Request
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
from app.core.database.session import get_session
from app.models import User, UserRole
from app.utils.business_logger import log_business_event
from app.utils.token_manager import check_permissions
from app.schemas.users_sch import UserResponse
router = APIRouter(tags=["Users"])
@router.get("/", response_model=list[UserResponse])
async def list_users(
request: Request,
session: AsyncSession = Depends(get_session),
current_user: User = Depends(check_permissions([UserRole.ADMIN])),
):
result = await session.execute(select(User))
users = result.scalars().all()
trace_id = getattr(request.state, "traceId", None)
log_business_event(
"users_listed",
trace_id=trace_id,
user_id=str(current_user.id),
result_count=len(users),
)
return users
+51
View File
@@ -0,0 +1,51 @@
from fastapi import APIRouter, Depends, HTTPException, Request, status
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import User
from app.schemas.users_sch import UserResponse, UserUpdateMe
from app.utils.business_logger import log_business_event
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Users"])
@router.patch("/me", response_model=UserResponse)
async def update_me(
data: UserUpdateMe,
request: Request,
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
if data.email and data.email != current_user.email:
stmt = select(User).where(User.email == data.email)
result = await session.execute(stmt)
if result.scalar_one_or_none():
log_business_event(
"user_profile_update_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
reason="email_already_exists",
requested_email=data.email,
)
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail="Пользователь с таким email уже существует",
)
update_data = data.model_dump(exclude_unset=True)
for key, value in update_data.items():
setattr(current_user, key, value)
await session.commit()
await session.refresh(current_user)
log_business_event(
"user_profile_updated",
trace_id=trace_id,
user_id=str(current_user.id),
updated_fields=sorted(update_data.keys()),
)
return current_user
+44
View File
@@ -0,0 +1,44 @@
from fastapi import APIRouter, Depends, HTTPException, Request, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import User
from app.schemas.users_sch import PasswordUpdate
from app.utils.business_logger import log_business_event
from app.utils.hashing import hash_password, verify_password
from app.utils.token_manager import get_current_user
router = APIRouter(tags=["Users"])
@router.patch("/me/password", status_code=status.HTTP_200_OK)
async def update_password(
data: PasswordUpdate,
request: Request,
session: AsyncSession = Depends(get_session),
current_user: User = Depends(get_current_user),
):
trace_id = getattr(request.state, "traceId", None)
if not verify_password(data.old_password, current_user.hashed_password):
log_business_event(
"user_password_update_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
reason="invalid_current_password",
)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Неверный текущий пароль",
)
current_user.hashed_password = hash_password(data.new_password)
await session.commit()
log_business_event(
"user_password_updated",
trace_id=trace_id,
user_id=str(current_user.id),
)
return {"message": "Пароль успешно обновлен"}
+51
View File
@@ -0,0 +1,51 @@
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Request, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import User, UserRole
from app.schemas.users_sch import UserResponse, UserUpdate
from app.utils.business_logger import log_business_event
from app.utils.token_manager import check_permissions
router = APIRouter(tags=["Users"])
@router.patch("/{user_id}", response_model=UserResponse)
async def update_user(
user_id: UUID,
data: UserUpdate,
request: Request,
session: AsyncSession = Depends(get_session),
current_user: User = Depends(check_permissions([UserRole.ADMIN])),
):
trace_id = getattr(request.state, "traceId", None)
user = await session.get(User, user_id)
if user is None:
log_business_event(
"user_update_rejected",
trace_id=trace_id,
user_id=str(current_user.id),
target_user_id=str(user_id),
reason="target_user_not_found",
)
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
update_data = data.model_dump(exclude_unset=True)
for key, value in update_data.items():
setattr(user, key, value)
await session.commit()
await session.refresh(user)
log_business_event(
"user_updated",
trace_id=trace_id,
user_id=str(current_user.id),
target_user_id=str(user.id),
updated_fields=sorted(update_data.keys()),
)
return user
+135
View File
@@ -0,0 +1,135 @@
import asyncio
import os
from sqlalchemy import select, text
# Important: import all ORM models before create_all() so SQLAlchemy metadata is complete.
from app.models import (
Action,
Base,
Capability,
DialogMessageRole,
ExecutionRun,
ExecutionStepRun,
Pipeline,
PipelineDialog,
PipelineDialogMessage,
User,
UserRole,
)
from app.core.database.session import SessionLocal, engine
from app.utils.hashing import hash_password
async def init_db():
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
# Best-effort schema drift handling without requiring Alembic.
# Use DO blocks so missing tables don't abort the whole transaction (and roll back create_all()).
await conn.execute(
text(
"""
DO $$
DECLARE
cap_constraint_name TEXT;
admin_user_id UUID;
BEGIN
IF to_regclass('public.actions') IS NOT NULL THEN
ALTER TABLE actions ADD COLUMN IF NOT EXISTS is_deleted BOOLEAN NOT NULL DEFAULT FALSE;
ALTER TABLE actions ADD COLUMN IF NOT EXISTS ingest_status VARCHAR(32) NOT NULL DEFAULT 'SUCCEEDED';
ALTER TABLE actions ADD COLUMN IF NOT EXISTS ingest_error TEXT;
ALTER TABLE actions ADD COLUMN IF NOT EXISTS user_id UUID REFERENCES users(id) ON DELETE CASCADE;
CREATE INDEX IF NOT EXISTS ix_actions_method_path ON actions (method, path);
CREATE INDEX IF NOT EXISTS ix_actions_is_deleted ON actions (is_deleted);
CREATE INDEX IF NOT EXISTS ix_actions_ingest_status ON actions (ingest_status);
CREATE INDEX IF NOT EXISTS ix_actions_user_id ON actions (user_id);
END IF;
IF to_regclass('public.capabilities') IS NOT NULL THEN
ALTER TABLE capabilities ADD COLUMN IF NOT EXISTS type VARCHAR(50) DEFAULT 'ATOMIC';
ALTER TABLE capabilities ADD COLUMN IF NOT EXISTS recipe JSONB;
ALTER TABLE capabilities ADD COLUMN IF NOT EXISTS user_id UUID REFERENCES users(id) ON DELETE CASCADE;
ALTER TABLE capabilities ALTER COLUMN action_id DROP NOT NULL;
CREATE INDEX IF NOT EXISTS ix_capabilities_type ON capabilities (type);
CREATE INDEX IF NOT EXISTS ix_capabilities_user_id ON capabilities (user_id);
FOR cap_constraint_name IN
SELECT c.conname
FROM pg_constraint c
JOIN pg_class t ON t.oid = c.conrelid
JOIN pg_namespace ns ON ns.oid = t.relnamespace
WHERE ns.nspname = 'public'
AND t.relname = 'capabilities'
AND c.contype = 'u'
AND array_length(c.conkey, 1) = 1
AND c.conkey[1] = (
SELECT a.attnum
FROM pg_attribute a
WHERE a.attrelid = t.oid
AND a.attname = 'action_id'
AND a.attnum > 0
AND NOT a.attisdropped
LIMIT 1
)
LOOP
EXECUTE format('ALTER TABLE capabilities DROP CONSTRAINT IF EXISTS %I', cap_constraint_name);
END LOOP;
CREATE UNIQUE INDEX IF NOT EXISTS uq_capabilities_user_action
ON capabilities (user_id, action_id)
WHERE action_id IS NOT NULL;
END IF;
IF to_regclass('public.users') IS NOT NULL THEN
SELECT id
INTO admin_user_id
FROM users
WHERE role::text = 'ADMIN'
ORDER BY created_at ASC
LIMIT 1;
IF admin_user_id IS NOT NULL THEN
IF to_regclass('public.actions') IS NOT NULL THEN
UPDATE actions SET user_id = admin_user_id WHERE user_id IS NULL;
END IF;
IF to_regclass('public.capabilities') IS NOT NULL THEN
UPDATE capabilities SET user_id = admin_user_id WHERE user_id IS NULL;
END IF;
END IF;
END IF;
IF to_regclass('public.pipeline_dialogs') IS NOT NULL THEN
CREATE INDEX IF NOT EXISTS ix_pipeline_dialogs_user_updated_at_desc
ON pipeline_dialogs (user_id, updated_at DESC);
END IF;
IF to_regclass('public.pipeline_dialog_messages') IS NOT NULL THEN
CREATE INDEX IF NOT EXISTS ix_pipeline_dialog_messages_dialog_created_at_asc
ON pipeline_dialog_messages (dialog_id, created_at ASC);
END IF;
END $$;
"""
)
)
async with SessionLocal() as session:
admin_email = os.getenv("ADMIN_EMAIL")
admin_password = os.getenv("ADMIN_PASSWORD")
admin_fullname = os.getenv("ADMIN_FULLNAME", "System Admin")
if admin_email and admin_password:
result = await session.execute(
select(User).where(User.email == admin_email)
)
existing_admin = result.scalar_one_or_none()
if not existing_admin:
new_admin = User(
email=admin_email,
hashed_password=hash_password(admin_password),
full_name=admin_fullname,
role=UserRole.ADMIN,
is_active=True
)
session.add(new_admin)
await session.commit()
if __name__ == "__main__":
asyncio.run(init_db())
+22
View File
@@ -0,0 +1,22 @@
from typing import AsyncGenerator
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
import os
DATABASE_URL = os.getenv("DATABASE_URL")
if not DATABASE_URL:
DB_HOST = os.getenv("DB_HOST", "localhost")
DB_PORT = os.getenv("DB_PORT", "5432")
DB_NAME = os.getenv("DB_NAME", "postgres")
DB_USER = os.getenv("DB_USER", "postgres")
DB_PASSWORD = os.getenv("DB_PASSWORD", "postgres")
DATABASE_URL = f"postgresql+asyncpg://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}"
engine = create_async_engine(DATABASE_URL, pool_pre_ping=True)
SessionLocal = async_sessionmaker(engine, expire_on_commit=False)
async def get_session() -> AsyncGenerator[AsyncSession, None]:
async with SessionLocal() as session:
yield session
+100
View File
@@ -0,0 +1,100 @@
from __future__ import annotations
import json
import logging
import os
from datetime import datetime, timezone
from typing import Any
from app.utils.log_context import get_log_context
SERVICE_NAME = os.getenv("APP_SERVICE_NAME", "backend-api")
LOG_RECORD_RESERVED_FIELDS = set(
logging.LogRecord(
name="",
level=0,
pathname="",
lineno=0,
msg="",
args=(),
exc_info=None,
).__dict__.keys()
) | {"message", "asctime"}
def _normalize_extra_value(value: Any) -> Any:
if isinstance(value, (str, int, float, bool)) or value is None:
return value
if isinstance(value, (list, tuple)):
return [_normalize_extra_value(item) for item in value]
if isinstance(value, dict):
normalized: dict[str, Any] = {}
for key, nested_value in value.items():
normalized[str(key)] = _normalize_extra_value(nested_value)
return normalized
return str(value)
class JsonFormatter(logging.Formatter):
def format(self, record: logging.LogRecord) -> str:
payload: dict[str, Any] = {
"timestamp": datetime.now(timezone.utc).isoformat().replace("+00:00", "Z"),
"level": record.levelname,
"logger": record.name,
"message": record.getMessage(),
"service_name": SERVICE_NAME,
}
for key in (
"event",
"trace_id",
"path",
"method",
"status_code",
"duration_ms",
"user_id",
"email",
"role",
"dialog_id",
"pipeline_id",
"run_id",
"result_status",
"message_len",
"capability_ids_count",
"reason",
):
value = getattr(record, key, None)
if value is not None:
payload[key] = value
for key, value in record.__dict__.items():
if key in LOG_RECORD_RESERVED_FIELDS or key in payload:
continue
payload[key] = _normalize_extra_value(value)
if record.exc_info:
payload["exception"] = self.formatException(record.exc_info)
return json.dumps(payload, ensure_ascii=True)
class RequestContextFilter(logging.Filter):
def filter(self, record: logging.LogRecord) -> bool:
for key, value in get_log_context().items():
if getattr(record, key, None) is None:
setattr(record, key, value)
return True
def configure_logging() -> None:
level = os.getenv("LOG_LEVEL", "INFO").upper()
root_logger = logging.getLogger()
root_logger.handlers.clear()
root_logger.setLevel(level)
handler = logging.StreamHandler()
handler.setFormatter(JsonFormatter())
handler.addFilter(RequestContextFilter())
root_logger.addHandler(handler)
+188
View File
@@ -0,0 +1,188 @@
import sys
import asyncio
import os
import uuid
import logging
from time import perf_counter
from contextlib import asynccontextmanager
from prometheus_fastapi_instrumentator import Instrumentator
from fastapi import FastAPI, HTTPException
from fastapi.exceptions import RequestValidationError
from app.api.ping.router import router as health_router
from app.api.actions.router import router as actions_router
from app.api.capabilities.router import router as capabilities_router
from app.api.executions.router import router as executions_router
from app.api.pipelines.router import router as pipelines_router
from app.utils.error_handlers import (
validation_exception_handler,
http_exception_handler,
unhandled_exception_handler,
)
from app.utils.log_context import clear_log_context, set_request_context
from app.core.logging import configure_logging
from app.core.database.init import init_db
try:
from fastapi_cache import FastAPICache
from fastapi_cache.backends.redis import RedisBackend
from redis import asyncio as aioredis
except ModuleNotFoundError:
FastAPICache = None
RedisBackend = None
aioredis = None
try:
from app.api.auth.register import router as auth_router
from app.api.auth.login import router as login_router
except ModuleNotFoundError as exc:
auth_router = None
login_router = None
print(f"Auth routes are disabled: {exc}")
try:
from app.api.users.get_me import router as get_me_router
from app.api.users.list_users import router as list_users_router
from app.api.users.update_me import router as update_me_router
from app.api.users.update_user import router as update_user_router
from app.api.users.update_password import router as update_password_router
from app.api.users.delete_user import router as delete_user_router
except ModuleNotFoundError as exc:
get_me_router = None
list_users_router = None
update_me_router = None
update_user_router = None
update_password_router = None
delete_user_router = None
print(f"User routes are disabled: {exc}")
if sys.platform == "win32":
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
configure_logging()
http_logger = logging.getLogger("app.http")
@asynccontextmanager
async def lifespan(app: FastAPI):
try:
await init_db()
except Exception as e:
print(f"Database initialization error: {e}")
redis_host = os.getenv("REDIS_HOST", "localhost")
redis_port = os.getenv("REDIS_PORT", "6379")
redis_url = os.getenv("REDIS_URL", f"redis://{redis_host}:{redis_port}")
redis = None
if FastAPICache and RedisBackend and aioredis:
try:
redis = aioredis.from_url(redis_url, encoding="utf8", decode_responses=True)
FastAPICache.init(RedisBackend(redis), prefix="fastapi-cache")
print(f"Redis initialized successfully at {redis_url}!")
except Exception as e:
print(f"Redis initialization error: {e}")
else:
print("fastapi-cache2 is not installed; Redis cache is disabled.")
yield
if redis:
await redis.close()
app = FastAPI(lifespan=lifespan, redirect_slashes=False)
@app.middleware("http")
async def add_trace_id(request, call_next):
trace_id = request.headers.get("X-Trace-Id") or str(uuid.uuid4())
request.state.traceId = trace_id
set_request_context(
trace_id=trace_id,
path=request.url.path,
method=request.method,
)
started_at = perf_counter()
try:
try:
response = await call_next(request)
except Exception:
duration_ms = int((perf_counter() - started_at) * 1000)
http_logger.exception(
"http_request_failed",
extra={
"event": "http_request_failed",
"trace_id": trace_id,
"method": request.method,
"path": request.url.path,
"duration_ms": duration_ms,
},
)
raise
duration_ms = int((perf_counter() - started_at) * 1000)
http_logger.info(
"http_request",
extra={
"event": "http_request",
"trace_id": trace_id,
"method": request.method,
"path": request.url.path,
"status_code": response.status_code,
"duration_ms": duration_ms,
},
)
response.headers["X-Trace-Id"] = trace_id
return response
finally:
clear_log_context()
app.add_exception_handler(RequestValidationError, validation_exception_handler)
app.add_exception_handler(HTTPException, http_exception_handler)
app.add_exception_handler(Exception, unhandled_exception_handler)
from fastapi.middleware.cors import CORSMiddleware
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(health_router, prefix="/api")
app.include_router(actions_router, prefix="/api")
app.include_router(capabilities_router, prefix="/api")
app.include_router(pipelines_router, prefix="/api")
app.include_router(executions_router, prefix="/api")
if auth_router is not None and login_router is not None:
app.include_router(auth_router, prefix="/api")
app.include_router(login_router, prefix="/api")
if all(
router is not None
for router in (
get_me_router,
list_users_router,
update_me_router,
update_user_router,
update_password_router,
delete_user_router,
)
):
app.include_router(get_me_router, prefix="/api/users")
app.include_router(list_users_router, prefix="/api/users")
app.include_router(update_me_router, prefix="/api/users")
app.include_router(update_user_router, prefix="/api/users")
app.include_router(update_password_router, prefix="/api/users")
app.include_router(delete_user_router, prefix="/api/users")
Instrumentator().instrument(app).expose(app)
+35
View File
@@ -0,0 +1,35 @@
from app.models.base import Base
from app.models.user import User, UserRole
from app.models.action import Action, ActionIngestStatus, HttpMethod
from app.models.capability import Capability
from app.models.execution import (
ExecutionRun,
ExecutionRunStatus,
ExecutionStepRun,
ExecutionStepStatus,
)
from app.models.pipeline import Pipeline, PipelineStatus
from app.models.pipeline_dialog import (
DialogMessageRole,
PipelineDialog,
PipelineDialogMessage,
)
__all__ = [
"Base",
"User",
"UserRole",
"Action",
"ActionIngestStatus",
"HttpMethod",
"Capability",
"ExecutionRun",
"ExecutionRunStatus",
"ExecutionStepRun",
"ExecutionStepStatus",
"Pipeline",
"PipelineStatus",
"DialogMessageRole",
"PipelineDialog",
"PipelineDialogMessage",
]
+115
View File
@@ -0,0 +1,115 @@
from __future__ import annotations
import enum
import uuid
from typing import Any
from sqlalchemy import Boolean, Enum, ForeignKey, Index, String, Text
from sqlalchemy.dialects.postgresql import JSON, UUID
from sqlalchemy.orm import Mapped, mapped_column, relationship
from app.models.base import Base, TimestampMixin
class HttpMethod(str, enum.Enum):
GET = "GET"
POST = "POST"
PUT = "PUT"
PATCH = "PATCH"
DELETE = "DELETE"
HEAD = "HEAD"
OPTIONS = "OPTIONS"
class ActionIngestStatus(str, enum.Enum):
SUCCEEDED = "SUCCEEDED"
FAILED = "FAILED"
class Action(TimestampMixin, Base):
__tablename__ = "actions"
__table_args__ = (
Index("ix_actions_method_path", "method", "path"),
)
id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
primary_key=True,
default=uuid.uuid4,
)
user_id: Mapped[uuid.UUID | None] = mapped_column(
UUID(as_uuid=True),
ForeignKey("users.id", ondelete="CASCADE"),
nullable=True,
index=True,
comment="Owner of imported action",
)
operation_id: Mapped[str | None] = mapped_column(
String(255),
nullable=True,
index=True,
)
method: Mapped[HttpMethod] = mapped_column(
Enum(HttpMethod, name="http_method"),
nullable=False,
)
path: Mapped[str] = mapped_column(
String(2048),
nullable=False,
)
base_url: Mapped[str | None] = mapped_column(
String(2048),
nullable=True,
)
summary: Mapped[str | None] = mapped_column(
String(512),
nullable=True,
)
description: Mapped[str | None] = mapped_column(
Text,
nullable=True,
)
tags: Mapped[list[str] | None] = mapped_column(
JSON,
nullable=True,
)
parameters_schema: Mapped[dict[str, Any] | None] = mapped_column(
JSON,
nullable=True,
)
request_body_schema: Mapped[dict[str, Any] | None] = mapped_column(
JSON,
nullable=True,
)
response_schema: Mapped[dict[str, Any] | None] = mapped_column(
JSON,
nullable=True,
)
source_filename: Mapped[str | None] = mapped_column(
String(512),
nullable=True,
)
raw_spec: Mapped[dict[str, Any] | None] = mapped_column(
JSON,
nullable=True,
)
ingest_status: Mapped[ActionIngestStatus] = mapped_column(
Enum(ActionIngestStatus, name="action_ingest_status", native_enum=False),
nullable=False,
default=ActionIngestStatus.SUCCEEDED,
server_default=ActionIngestStatus.SUCCEEDED.value,
index=True,
)
ingest_error: Mapped[str | None] = mapped_column(
Text,
nullable=True,
)
is_deleted: Mapped[bool] = mapped_column(
Boolean,
nullable=False,
default=False,
server_default="false",
index=True,
)
owner = relationship("User", lazy="select")
+21
View File
@@ -0,0 +1,21 @@
from datetime import datetime
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
from sqlalchemy import DateTime, func
class Base(DeclarativeBase):
pass
class TimestampMixin:
created_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True),
server_default=func.now(),
nullable=False,
)
updated_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True),
server_default=func.now(),
onupdate=func.now(),
nullable=False,
)
+86
View File
@@ -0,0 +1,86 @@
from __future__ import annotations
import enum
import uuid
from typing import Any
from sqlalchemy import Enum, ForeignKey, Index, String, Text, UniqueConstraint
from sqlalchemy.dialects.postgresql import JSON, UUID
from sqlalchemy.orm import Mapped, mapped_column, relationship
from app.models.base import Base, TimestampMixin
class CapabilityType(str, enum.Enum):
ATOMIC = "ATOMIC"
COMPOSITE = "COMPOSITE"
class Capability(TimestampMixin, Base):
__tablename__ = "capabilities"
__table_args__ = (
Index("ix_capabilities_action_id", "action_id"),
UniqueConstraint(
"user_id",
"action_id",
name="uq_capabilities_user_action",
),
)
id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
primary_key=True,
default=uuid.uuid4,
)
user_id: Mapped[uuid.UUID | None] = mapped_column(
UUID(as_uuid=True),
ForeignKey("users.id", ondelete="CASCADE"),
nullable=True,
index=True,
comment="Owner of capability",
)
action_id: Mapped[uuid.UUID | None] = mapped_column(
UUID(as_uuid=True),
ForeignKey("actions.id", ondelete="CASCADE"),
nullable=True,
comment="Action source for atomic capability",
)
type: Mapped[CapabilityType] = mapped_column(
Enum(CapabilityType, name="capability_type", native_enum=False),
nullable=False,
default=CapabilityType.ATOMIC,
server_default=CapabilityType.ATOMIC.value,
index=True,
)
name: Mapped[str] = mapped_column(
String(255),
nullable=False,
index=True,
)
description: Mapped[str | None] = mapped_column(
Text,
nullable=True,
)
input_schema: Mapped[dict[str, Any] | None] = mapped_column(
JSON,
nullable=True,
)
output_schema: Mapped[dict[str, Any] | None] = mapped_column(
JSON,
nullable=True,
)
recipe: Mapped[dict[str, Any] | None] = mapped_column(
JSON,
nullable=True,
)
data_format: Mapped[dict[str, Any] | None] = mapped_column(
JSON,
nullable=True,
)
llm_payload: Mapped[dict[str, Any] | None] = mapped_column(
JSON,
nullable=True,
)
action = relationship("Action", lazy="select")
owner = relationship("User", lazy="select")
+159
View File
@@ -0,0 +1,159 @@
from __future__ import annotations
import enum
import uuid
from datetime import datetime
from typing import Any
from sqlalchemy import DateTime, Enum, ForeignKey, Integer, String, Text
from sqlalchemy.dialects.postgresql import JSON, UUID
from sqlalchemy.orm import Mapped, mapped_column, relationship
from app.models.base import Base, TimestampMixin
class ExecutionRunStatus(str, enum.Enum):
QUEUED = "QUEUED"
RUNNING = "RUNNING"
SUCCEEDED = "SUCCEEDED"
FAILED = "FAILED"
PARTIAL_FAILED = "PARTIAL_FAILED"
class ExecutionStepStatus(str, enum.Enum):
PENDING = "PENDING"
RUNNING = "RUNNING"
SUCCEEDED = "SUCCEEDED"
FAILED = "FAILED"
SKIPPED = "SKIPPED"
class ExecutionRun(TimestampMixin, Base):
__tablename__ = "execution_runs"
id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
primary_key=True,
default=uuid.uuid4,
)
pipeline_id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
ForeignKey("pipelines.id", ondelete="CASCADE"),
nullable=False,
index=True,
)
initiated_by: Mapped[uuid.UUID | None] = mapped_column(
UUID(as_uuid=True),
ForeignKey("users.id", ondelete="SET NULL"),
nullable=True,
index=True,
)
status: Mapped[ExecutionRunStatus] = mapped_column(
Enum(ExecutionRunStatus, name="execution_run_status"),
nullable=False,
default=ExecutionRunStatus.QUEUED,
server_default=ExecutionRunStatus.QUEUED.value,
index=True,
)
inputs: Mapped[dict[str, Any]] = mapped_column(
JSON,
nullable=False,
default=dict,
server_default="{}",
)
summary: Mapped[dict[str, Any] | None] = mapped_column(
JSON,
nullable=True,
)
error: Mapped[str | None] = mapped_column(
Text,
nullable=True,
)
started_at: Mapped[datetime | None] = mapped_column(
DateTime(timezone=True),
nullable=True,
)
finished_at: Mapped[datetime | None] = mapped_column(
DateTime(timezone=True),
nullable=True,
)
pipeline = relationship("Pipeline", lazy="select")
step_runs = relationship(
"ExecutionStepRun",
back_populates="run",
cascade="all, delete-orphan",
lazy="selectin",
)
class ExecutionStepRun(TimestampMixin, Base):
__tablename__ = "execution_step_runs"
id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
primary_key=True,
default=uuid.uuid4,
)
run_id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
ForeignKey("execution_runs.id", ondelete="CASCADE"),
nullable=False,
index=True,
)
step: Mapped[int] = mapped_column(
Integer,
nullable=False,
index=True,
)
name: Mapped[str | None] = mapped_column(
String(512),
nullable=True,
)
capability_id: Mapped[uuid.UUID | None] = mapped_column(
UUID(as_uuid=True),
nullable=True,
index=True,
)
action_id: Mapped[uuid.UUID | None] = mapped_column(
UUID(as_uuid=True),
nullable=True,
index=True,
)
status: Mapped[ExecutionStepStatus] = mapped_column(
Enum(ExecutionStepStatus, name="execution_step_status"),
nullable=False,
default=ExecutionStepStatus.PENDING,
server_default=ExecutionStepStatus.PENDING.value,
index=True,
)
resolved_inputs: Mapped[dict[str, Any] | None] = mapped_column(
JSON,
nullable=True,
)
request_snapshot: Mapped[dict[str, Any] | None] = mapped_column(
JSON,
nullable=True,
)
response_snapshot: Mapped[dict[str, Any] | None] = mapped_column(
JSON,
nullable=True,
)
error: Mapped[str | None] = mapped_column(
Text,
nullable=True,
)
started_at: Mapped[datetime | None] = mapped_column(
DateTime(timezone=True),
nullable=True,
)
finished_at: Mapped[datetime | None] = mapped_column(
DateTime(timezone=True),
nullable=True,
)
duration_ms: Mapped[int | None] = mapped_column(
Integer,
nullable=True,
)
run = relationship("ExecutionRun", back_populates="step_runs", lazy="select")
+85
View File
@@ -0,0 +1,85 @@
import enum
import uuid
from typing import Any
from sqlalchemy import Enum, ForeignKey, String, Text
from sqlalchemy.dialects.postgresql import JSON, UUID
from sqlalchemy.orm import Mapped, mapped_column, relationship
from app.models.base import Base, TimestampMixin
class PipelineStatus(str, enum.Enum):
DRAFT = "DRAFT"
READY = "READY"
ARCHIVED = "ARCHIVED"
class Pipeline(TimestampMixin, Base):
"""
Сценарный слой.
Коллекция нод и связей между ними — полная структура графа,
сгенерированного SynthesisService и отображаемого на канвасе (React Flow).
"""
__tablename__ = "pipelines"
id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
primary_key=True,
default=uuid.uuid4,
)
name: Mapped[str] = mapped_column(
String(512),
nullable=False,
comment="Человекочитаемое название пайплайна",
)
description: Mapped[str | None] = mapped_column(
Text,
nullable=True,
comment="Подробное описание того, что делает этот сценарий",
)
user_prompt: Mapped[str | None] = mapped_column(
Text,
nullable=True,
comment="Оригинальный текстовый запрос PM из чата, породивший этот граф",
)
nodes: Mapped[list[dict[str, Any]]] = mapped_column(
JSON,
nullable=False,
default=list,
comment="Список нод графа. Каждая нода ссылается на Capability и хранит индивидуальные параметры",
)
edges: Mapped[list[dict[str, Any]]] = mapped_column(
JSON,
nullable=False,
default=list,
comment="Список рёбер графа. Определяет порядок выполнения нод (DAG)",
)
status: Mapped[PipelineStatus] = mapped_column(
Enum(PipelineStatus, name="pipeline_status"),
nullable=False,
default=PipelineStatus.DRAFT,
server_default=PipelineStatus.DRAFT.value,
comment="Статус пайплайна: DRAFT → READY → ARCHIVED",
)
created_by: Mapped[uuid.UUID | None] = mapped_column(
UUID(as_uuid=True),
ForeignKey("users.id", ondelete="SET NULL"),
nullable=True,
index=True,
comment="UUID пользователя (PM), создавшего или запустившего генерацию",
)
creator = relationship("User", lazy="select")
dialogs = relationship(
"PipelineDialog",
back_populates="last_pipeline",
passive_deletes=True,
lazy="selectin",
)
+119
View File
@@ -0,0 +1,119 @@
from __future__ import annotations
import enum
import uuid
from datetime import datetime
from typing import Any
from sqlalchemy import DateTime, Enum, ForeignKey, Index, String, Text, func
from sqlalchemy.dialects.postgresql import JSONB, UUID
from sqlalchemy.orm import Mapped, mapped_column, relationship
from app.models.base import Base, TimestampMixin
class DialogMessageRole(str, enum.Enum):
USER = "user"
ASSISTANT = "assistant"
class PipelineDialog(TimestampMixin, Base):
__tablename__ = "pipeline_dialogs"
__table_args__ = (
Index("ix_pipeline_dialogs_user_updated_at", "user_id", "updated_at"),
)
id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
primary_key=True,
)
user_id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
ForeignKey("users.id", ondelete="CASCADE"),
nullable=False,
index=True,
)
title: Mapped[str | None] = mapped_column(
String(256),
nullable=True,
)
last_status: Mapped[str | None] = mapped_column(
String(32),
nullable=True,
)
last_pipeline_id: Mapped[uuid.UUID | None] = mapped_column(
UUID(as_uuid=True),
ForeignKey("pipelines.id", ondelete="SET NULL"),
nullable=True,
index=True,
)
last_message_preview: Mapped[str | None] = mapped_column(
Text,
nullable=True,
)
user = relationship(
"User",
back_populates="pipeline_dialogs",
lazy="select",
)
last_pipeline = relationship(
"Pipeline",
back_populates="dialogs",
lazy="select",
)
messages = relationship(
"PipelineDialogMessage",
back_populates="dialog",
cascade="all, delete-orphan",
passive_deletes=True,
lazy="selectin",
)
class PipelineDialogMessage(Base):
__tablename__ = "pipeline_dialog_messages"
__table_args__ = (
Index(
"ix_pipeline_dialog_messages_dialog_created_at",
"dialog_id",
"created_at",
),
)
id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
primary_key=True,
default=uuid.uuid4,
)
dialog_id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
ForeignKey("pipeline_dialogs.id", ondelete="CASCADE"),
nullable=False,
index=True,
)
role: Mapped[DialogMessageRole] = mapped_column(
Enum(DialogMessageRole, name="dialog_message_role"),
nullable=False,
index=True,
)
content: Mapped[str] = mapped_column(
Text,
nullable=False,
)
assistant_payload: Mapped[dict[str, Any] | None] = mapped_column(
JSONB,
nullable=True,
)
created_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True),
server_default=func.now(),
nullable=False,
index=True,
)
dialog = relationship(
"PipelineDialog",
back_populates="messages",
lazy="select",
)
+39
View File
@@ -0,0 +1,39 @@
import enum
import uuid
from sqlalchemy import Boolean, Enum, String
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.orm import Mapped, mapped_column, relationship
from app.models.base import Base, TimestampMixin
class UserRole(str, enum.Enum):
USER = "USER"
ADMIN = "ADMIN"
class User(TimestampMixin, Base):
__tablename__ = "users"
id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
email: Mapped[str] = mapped_column(String(320), unique=True, index=True, nullable=False)
full_name: Mapped[str | None] = mapped_column(String(255), nullable=True)
hashed_password: Mapped[str] = mapped_column(String(255), nullable=False)
role: Mapped[UserRole] = mapped_column(
Enum(UserRole, name="user_role"),
nullable=False,
default=UserRole.USER,
server_default=UserRole.USER.value,
)
is_active: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True, server_default="true")
pipeline_dialogs = relationship(
"PipelineDialog",
back_populates="user",
cascade="all, delete-orphan",
passive_deletes=True,
lazy="selectin",
)
actions = relationship("Action", passive_deletes=True, lazy="selectin")
capabilities = relationship("Capability", passive_deletes=True, lazy="selectin")
+69
View File
@@ -0,0 +1,69 @@
from __future__ import annotations
from datetime import datetime
from typing import Any
from uuid import UUID
from pydantic import BaseModel, ConfigDict, computed_field
from app.models import ActionIngestStatus, HttpMethod
class ActionListItemResponse(BaseModel):
id: UUID
user_id: UUID | None = None
operation_id: str | None = None
method: HttpMethod
path: str
base_url: str | None = None
summary: str | None = None
description: str | None = None
tags: list[str] | None = None
source_filename: str | None = None
ingest_status: ActionIngestStatus
ingest_error: str | None = None
created_at: datetime
updated_at: datetime
model_config = ConfigDict(from_attributes=True)
class ActionIngestItemResponse(BaseModel):
id: UUID
user_id: UUID | None = None
operation_id: str | None = None
method: HttpMethod
path: str
summary: str | None = None
source_filename: str | None = None
ingest_status: ActionIngestStatus
ingest_error: str | None = None
model_config = ConfigDict(from_attributes=True)
class ActionDetailResponse(ActionListItemResponse):
parameters_schema: dict[str, Any] | None = None
request_body_schema: dict[str, Any] | None = None
response_schema: dict[str, Any] | None = None
raw_spec: dict[str, Any] | None = None
@computed_field(return_type=dict[str, Any] | None)
@property
def json_schema(self) -> dict[str, Any] | None:
if not any((self.parameters_schema, self.request_body_schema, self.response_schema, self.raw_spec)):
return None
return {
"parameters": self.parameters_schema,
"request_body": self.request_body_schema,
"response": self.response_schema,
"raw_spec": self.raw_spec,
}
class ActionIngestResponse(BaseModel):
succeeded_count: int
failed_count: int
succeeded_actions: list[ActionDetailResponse]
failed_actions: list[ActionDetailResponse]
+19
View File
@@ -0,0 +1,19 @@
from pydantic import AliasChoices, BaseModel, ConfigDict, EmailStr, Field
class RegisterIn(BaseModel):
email: EmailStr = Field(max_length=254)
password: str = Field(min_length=1, max_length=72)
full_name: str = Field(
min_length=2,
max_length=200,
validation_alias=AliasChoices("full_name", "fullName"),
serialization_alias="fullName",
)
model_config = ConfigDict(populate_by_name=True)
class LoginIn(BaseModel):
email: EmailStr = Field(max_length=254)
password: str = Field(min_length=1, max_length=72)
+73
View File
@@ -0,0 +1,73 @@
from __future__ import annotations
from datetime import datetime
from typing import Any
from uuid import UUID
from pydantic import BaseModel, ConfigDict, Field
from app.schemas.action_sch import ActionIngestItemResponse
class CapabilityDataFormat(BaseModel):
parameter_locations: list[str] = []
request_content_types: list[str] = []
request_schema_type: str | None = None
response_content_types: list[str] = []
response_schema_types: list[str] = []
class CapabilityResponse(BaseModel):
id: UUID
user_id: UUID | None = None
action_id: UUID | None = None
type: str = "ATOMIC"
name: str
description: str | None = None
input_schema: dict[str, Any] | None = None
output_schema: dict[str, Any] | None = None
recipe: dict[str, Any] | None = None
data_format: CapabilityDataFormat | None = None
created_at: datetime
updated_at: datetime
model_config = ConfigDict(from_attributes=True)
class CapabilityIngestItemResponse(BaseModel):
id: UUID
user_id: UUID | None = None
action_id: UUID | None = None
type: str = "ATOMIC"
name: str
description: str | None = None
model_config = ConfigDict(from_attributes=True)
class ActionIngestWithCapabilitiesResponse(BaseModel):
succeeded_count: int
failed_count: int
created_capabilities_count: int
succeeded_actions: list[ActionIngestItemResponse]
failed_actions: list[ActionIngestItemResponse]
capabilities: list[CapabilityIngestItemResponse]
class CompositeCapabilityRecipeStepCreate(BaseModel):
step: int = Field(ge=1)
capability_id: UUID
inputs: dict[str, str] = Field(default_factory=dict)
class CompositeCapabilityRecipeCreate(BaseModel):
version: int = 1
steps: list[CompositeCapabilityRecipeStepCreate] = Field(default_factory=list)
class CreateCompositeCapabilityRequest(BaseModel):
name: str = Field(min_length=1, max_length=255)
description: str | None = None
input_schema: dict[str, Any] | None = None
output_schema: dict[str, Any] | None = None
recipe: CompositeCapabilityRecipeCreate
+67
View File
@@ -0,0 +1,67 @@
from __future__ import annotations
from datetime import datetime
from typing import Any, Literal
from uuid import UUID
from pydantic import BaseModel, ConfigDict, Field
class RunPipelineRequest(BaseModel):
inputs: dict[str, Any] = Field(default_factory=dict)
class RunPipelineResponse(BaseModel):
run_id: UUID
pipeline_id: UUID
status: Literal["QUEUED", "RUNNING"]
class ExecutionRunListItemResponse(BaseModel):
id: UUID
pipeline_id: UUID
status: Literal["QUEUED", "RUNNING", "SUCCEEDED", "FAILED", "PARTIAL_FAILED"]
error: str | None = None
started_at: datetime | None = None
finished_at: datetime | None = None
created_at: datetime
updated_at: datetime
model_config = ConfigDict(from_attributes=True)
class ExecutionStepRunResponse(BaseModel):
step: int
name: str | None = None
capability_id: UUID | None = None
action_id: UUID | None = None
method: Literal["GET", "POST", "PUT", "PATCH", "DELETE", "HEAD", "OPTIONS"] | None = None
status_code: int | None = None
status: Literal["PENDING", "RUNNING", "SUCCEEDED", "FAILED", "SKIPPED"]
resolved_inputs: dict[str, Any] | None = None
accepted_payload: Any = None
output_payload: Any = None
request_snapshot: dict[str, Any] | None = None
response_snapshot: dict[str, Any] | None = None
error: str | None = None
started_at: datetime | None = None
finished_at: datetime | None = None
duration_ms: int | None = None
created_at: datetime
updated_at: datetime
model_config = ConfigDict(from_attributes=True)
class ExecutionRunDetailResponse(BaseModel):
id: UUID
pipeline_id: UUID
status: Literal["QUEUED", "RUNNING", "SUCCEEDED", "FAILED", "PARTIAL_FAILED"]
inputs: dict[str, Any] = Field(default_factory=dict)
summary: dict[str, Any] | None = None
error: str | None = None
started_at: datetime | None = None
finished_at: datetime | None = None
created_at: datetime
updated_at: datetime
steps: list[ExecutionStepRunResponse] = Field(default_factory=list)
+104
View File
@@ -0,0 +1,104 @@
from __future__ import annotations
from datetime import datetime
from typing import Any, Literal
from uuid import UUID
from pydantic import BaseModel, ConfigDict, Field
class PipelineInputTypeFromPrevious(BaseModel):
from_step: int
type: str
class PipelineStepEndpoint(BaseModel):
name: str
capability_id: UUID
action_id: UUID | None = None
type: str | None = None
input_type: str | dict[str, Any] | None = None
output_type: str | dict[str, Any] | None = None
class PipelineGraphNode(BaseModel):
step: int
name: str
description: str | None = None
input_connected_from: list[int] = Field(default_factory=list)
output_connected_to: list[int] = Field(default_factory=list)
input_data_type_from_previous: list[PipelineInputTypeFromPrevious] = Field(default_factory=list)
external_inputs: list[str] = Field(default_factory=list)
endpoints: list[PipelineStepEndpoint] = Field(default_factory=list)
class PipelineGraphEdge(BaseModel):
from_step: int
to_step: int
type: str
class PipelineGenerateRequest(BaseModel):
dialog_id: UUID
message: str = Field(min_length=1)
capability_ids: list[UUID] | None = None
class PipelineGenerateResponse(BaseModel):
status: Literal["ready", "needs_input", "cannot_build"]
message_ru: str
chat_reply_ru: str
pipeline_id: UUID | None = None
nodes: list[PipelineGraphNode] = Field(default_factory=list)
edges: list[PipelineGraphEdge] = Field(default_factory=list)
missing_requirements: list[str] = Field(default_factory=list)
context_summary: str | None = None
class PipelineGraphUpdateRequest(BaseModel):
nodes: list[PipelineGraphNode] = Field(default_factory=list)
edges: list[PipelineGraphEdge] = Field(default_factory=list)
class PipelineGraphUpdateResponse(BaseModel):
pipeline_id: UUID
nodes: list[PipelineGraphNode] = Field(default_factory=list)
edges: list[PipelineGraphEdge] = Field(default_factory=list)
updated_at: datetime
class DialogResetRequest(BaseModel):
dialog_id: UUID
class DialogResetResponse(BaseModel):
status: Literal["ok"]
message_ru: str
class PipelineDialogListItemResponse(BaseModel):
dialog_id: UUID
title: str | None = None
last_status: str | None = None
last_pipeline_id: UUID | None = None
last_message_preview: str | None = None
created_at: datetime
updated_at: datetime
model_config = ConfigDict(from_attributes=True)
class PipelineDialogMessageResponse(BaseModel):
id: UUID
role: Literal["user", "assistant"]
content: str
assistant_payload: dict[str, Any] | None = None
created_at: datetime
model_config = ConfigDict(from_attributes=True)
class PipelineDialogHistoryResponse(BaseModel):
dialog_id: UUID
title: str | None = None
messages: list[PipelineDialogMessageResponse] = Field(default_factory=list)
+33
View File
@@ -0,0 +1,33 @@
from pydantic import BaseModel, EmailStr, ConfigDict
from uuid import UUID
from datetime import datetime
from app.models import UserRole
from typing import Optional
class UserBase(BaseModel):
email: EmailStr
full_name: str
class UserUpdate(BaseModel):
email: Optional[EmailStr] = None
full_name: Optional[str] = None
role: Optional[UserRole] = None
is_active: Optional[bool] = None
min_approvals_required: Optional[int] = None
class UserResponse(UserBase):
id: UUID
role: UserRole
is_active: bool
min_approvals_required: int
created_at: datetime
model_config = ConfigDict(from_attributes=True)
class UserUpdateMe(BaseModel):
email: Optional[EmailStr] = None
full_name: Optional[str] = None
class PasswordUpdate(BaseModel):
old_password: str
new_password: str
+45
View File
@@ -0,0 +1,45 @@
from typing import Annotated, Optional
import uuid
from datetime import datetime
from pydantic import BaseModel, EmailStr, Field, ConfigDict, field_validator
from app.models import UserRole
class UserBase(BaseModel):
email: EmailStr
full_name: Annotated[str | None, Field(max_length=255)] = None
class UserResponse(UserBase):
id: uuid.UUID
role: UserRole
is_active: bool
created_at: datetime
updated_at: datetime | None = None
model_config = ConfigDict(from_attributes=True)
class UserUpdate(BaseModel):
email: Optional[EmailStr] = None
full_name: Optional[str] = Field(None, min_length=2, max_length=255)
role: Optional[UserRole] = None
is_active: Optional[bool] = None
class UserUpdateMe(BaseModel):
email: Optional[EmailStr] = None
full_name: Optional[str] = Field(None, min_length=2, max_length=255)
class PasswordUpdate(BaseModel):
old_password: str = Field(min_length=8)
new_password: str = Field(min_length=8)
@field_validator("new_password")
@classmethod
def validate_password_complexity(cls, v: str) -> str:
if not any(c.isalpha() for c in v) or not any(c.isdigit() for c in v):
raise ValueError("must contain at least one letter and one digit")
return v
@@ -0,0 +1,80 @@
from __future__ import annotations
import asyncio
from sqlalchemy import select
from app.core.database.session import SessionLocal
from app.models import Action, Capability
from app.services.capability_service import CapabilityService
def _needs_backfill(capability: Capability) -> bool:
llm_payload = capability.llm_payload
if not isinstance(llm_payload, dict):
return True
if llm_payload.get("action_context_version") != "v2":
return True
if not isinstance(llm_payload.get("action_context"), dict):
return True
if not isinstance(llm_payload.get("action_context_brief"), dict):
return True
return False
async def main() -> None:
async with SessionLocal() as session:
result = await session.execute(
select(Capability).where(Capability.action_id.is_not(None))
)
capabilities = list(result.scalars().all())
if not capabilities:
print("No capabilities found.")
return
action_ids = [cap.action_id for cap in capabilities if cap.action_id is not None]
actions_result = await session.execute(select(Action).where(Action.id.in_(action_ids)))
actions_by_id = {action.id: action for action in actions_result.scalars().all()}
updated = 0
for capability in capabilities:
if capability.action_id is None:
continue
if not _needs_backfill(capability):
continue
action = actions_by_id.get(capability.action_id)
if action is None:
continue
built = CapabilityService._build_capability_payload(action)
built_llm = built.get("llm_payload") or {}
existing = capability.llm_payload if isinstance(capability.llm_payload, dict) else {}
capability.llm_payload = {
**existing,
"source": existing.get("source", built_llm.get("source", "deterministic")),
"action_context_version": built_llm.get("action_context_version", "v2"),
"action_context": built_llm.get("action_context"),
"action_context_brief": built_llm.get("action_context_brief"),
"openapi_hints": built_llm.get("openapi_hints"),
}
if capability.input_schema is None:
capability.input_schema = built.get("input_schema")
if capability.output_schema is None:
capability.output_schema = built.get("output_schema")
if capability.data_format is None:
capability.data_format = built.get("data_format")
updated += 1
if not updated:
print("No capabilities required backfill.")
return
await session.commit()
print(f"Backfilled {updated} capabilities.")
if __name__ == "__main__":
asyncio.run(main())
+30
View File
@@ -0,0 +1,30 @@
import asyncio
import os
from sqlalchemy import text
from app.core.database.session import SessionLocal
async def migrate():
print("Starting migration: adding 'type' and 'recipe' to 'capabilities' table...")
async with SessionLocal() as session:
try:
# 1. Add type column if it doesn't exist
await session.execute(text(
"ALTER TABLE capabilities ADD COLUMN IF NOT EXISTS type VARCHAR(50) DEFAULT 'ATOMIC';"
))
# 2. Add recipe column if it doesn't exist
await session.execute(text(
"ALTER TABLE capabilities ADD COLUMN IF NOT EXISTS recipe JSONB;"
))
# 3. Make action_id nullable
await session.execute(text(
"ALTER TABLE capabilities ALTER COLUMN action_id DROP NOT NULL;"
))
await session.commit()
print("Migration completed successfully!")
except Exception as e:
await session.rollback()
print(f"Migration failed: {e}")
if __name__ == "__main__":
asyncio.run(migrate())
+11
View File
@@ -0,0 +1,11 @@
from app.services.openapi_service import OpenAPIService
from app.services.capability_service import CapabilityService
from app.services.execution_service import ExecutionService
from app.services.pipeline_service import PipelineService
__all__ = [
"OpenAPIService",
"CapabilityService",
"ExecutionService",
"PipelineService",
]
+758
View File
@@ -0,0 +1,758 @@
from __future__ import annotations
import re
from typing import Any
from uuid import UUID
from sqlalchemy import and_, or_, select
from sqlalchemy.ext.asyncio import AsyncSession
from app.models import Action, Capability
from app.models.capability import CapabilityType
class CompositeRecipeValidationError(ValueError):
def __init__(self, errors: list[str]) -> None:
self.errors = errors
super().__init__("; ".join(errors))
class CapabilityService:
def __init__(self, session: AsyncSession) -> None:
self.session = session
@staticmethod
def build_from_actions(
actions: list[Action],
*,
owner_user_id: UUID,
) -> list[Capability]:
capabilities: list[Capability] = []
for action in actions:
capability_payload = CapabilityService._build_capability_payload(action)
capabilities.append(
Capability(
user_id=owner_user_id,
action_id=action.id,
type=CapabilityType.ATOMIC,
name=capability_payload["name"],
description=capability_payload.get("description"),
input_schema=capability_payload.get("input_schema"),
output_schema=capability_payload.get("output_schema"),
data_format=capability_payload.get("data_format"),
llm_payload=capability_payload.get("llm_payload"),
)
)
return capabilities
async def create_composite_capability(
self,
*,
owner_user_id: UUID,
name: str,
description: str | None = None,
input_schema: dict[str, Any] | None = None,
output_schema: dict[str, Any] | None = None,
recipe: dict[str, Any],
llm_payload: dict[str, Any] | None = None,
data_format: dict[str, Any] | None = None,
) -> Capability:
capability = Capability(
user_id=owner_user_id,
type=CapabilityType.COMPOSITE,
name=name,
description=description,
input_schema=input_schema,
output_schema=output_schema,
recipe=recipe,
llm_payload=llm_payload,
data_format=data_format,
)
self.session.add(capability)
await self.session.flush()
await self.session.refresh(capability)
return capability
async def create_validated_composite_capability(
self,
*,
owner_user_id: UUID,
name: str,
description: str | None = None,
input_schema: dict[str, Any] | None = None,
output_schema: dict[str, Any] | None = None,
recipe: dict[str, Any],
include_all: bool = False,
) -> Capability:
normalized_recipe, step_capabilities = await self.validate_composite_recipe(
recipe=recipe,
owner_user_id=owner_user_id,
include_all=include_all,
)
llm_payload = self._build_composite_llm_payload(step_capabilities)
data_format = {
"request_schema_type": input_schema.get("type")
if isinstance(input_schema, dict)
else None,
"response_schema_types": [output_schema.get("type")]
if isinstance(output_schema, dict)
and isinstance(output_schema.get("type"), str)
else [],
"composite": {
"version": normalized_recipe.get("version"),
"steps_count": len(normalized_recipe.get("steps", [])),
"step_capability_names": [
str(getattr(capability, "name", ""))
for capability in step_capabilities
],
},
}
return await self.create_composite_capability(
owner_user_id=owner_user_id,
name=name,
description=description,
input_schema=input_schema,
output_schema=output_schema,
recipe=normalized_recipe,
llm_payload=llm_payload,
data_format=data_format,
)
async def validate_composite_recipe(
self,
*,
recipe: dict[str, Any],
owner_user_id: UUID,
include_all: bool = False,
) -> tuple[dict[str, Any], list[Capability]]:
errors: list[str] = []
if not isinstance(recipe, dict):
raise CompositeRecipeValidationError(["recipe must be an object"])
version = recipe.get("version")
if version != 1:
errors.append("recipe.version must be 1")
raw_steps = recipe.get("steps")
if not isinstance(raw_steps, list) or not raw_steps:
errors.append("recipe.steps must be a non-empty list")
raise CompositeRecipeValidationError(errors)
normalized_steps: list[dict[str, Any]] = []
seen_step_numbers: set[int] = set()
for index, raw_step in enumerate(raw_steps):
if not isinstance(raw_step, dict):
errors.append(f"recipe.steps[{index}] must be an object")
continue
step_number = raw_step.get("step")
if not isinstance(step_number, int) or step_number < 1:
errors.append(f"recipe.steps[{index}].step must be positive integer")
continue
if step_number in seen_step_numbers:
errors.append(f"recipe.steps[{index}].step duplicates step {step_number}")
seen_step_numbers.add(step_number)
capability_uuid = self._to_uuid(raw_step.get("capability_id"))
if capability_uuid is None:
errors.append(f"recipe.steps[{index}].capability_id must be UUID")
continue
raw_inputs = raw_step.get("inputs", {})
if raw_inputs is None:
raw_inputs = {}
if not isinstance(raw_inputs, dict):
errors.append(f"recipe.steps[{index}].inputs must be an object")
raw_inputs = {}
normalized_inputs: dict[str, str] = {}
for input_name, binding in raw_inputs.items():
if not isinstance(input_name, str) or not input_name.strip():
errors.append(f"recipe.steps[{index}].inputs has invalid key")
continue
if not isinstance(binding, str):
errors.append(
f"recipe.steps[{index}].inputs.{input_name} must be string binding"
)
continue
normalized_binding = binding.strip()
if not normalized_binding:
errors.append(
f"recipe.steps[{index}].inputs.{input_name} must be non-empty binding"
)
continue
if not self._is_supported_binding_expression(normalized_binding):
errors.append(
f"recipe.steps[{index}].inputs.{input_name} has unsupported binding '{normalized_binding}'"
)
continue
normalized_inputs[input_name] = normalized_binding
normalized_steps.append(
{
"step": step_number,
"capability_id": str(capability_uuid),
"inputs": normalized_inputs,
}
)
if errors:
raise CompositeRecipeValidationError(errors)
normalized_steps.sort(key=lambda item: item["step"])
for idx in range(1, len(normalized_steps)):
if normalized_steps[idx]["step"] <= normalized_steps[idx - 1]["step"]:
errors.append("recipe.steps must be strictly increasing by step")
break
known_steps = {item["step"] for item in normalized_steps}
for item in normalized_steps:
for binding in item["inputs"].values():
if not binding.startswith("$step."):
continue
source_step = self._extract_binding_source_step(binding)
if source_step is None:
errors.append(
f"step {item['step']}: invalid step binding '{binding}'"
)
continue
if source_step not in known_steps:
errors.append(
f"step {item['step']}: binding references missing step {source_step}"
)
continue
if source_step >= item["step"]:
errors.append(
f"step {item['step']}: binding references non-previous step {source_step}"
)
capability_ids = [UUID(item["capability_id"]) for item in normalized_steps]
capabilities = await self.get_capabilities(
capability_ids=capability_ids,
owner_user_id=owner_user_id,
include_all=include_all,
)
capabilities_by_id = {str(item.id): item for item in capabilities}
for item in normalized_steps:
capability = capabilities_by_id.get(item["capability_id"])
if capability is None:
errors.append(
f"step {item['step']}: capability {item['capability_id']} not found or not accessible"
)
continue
capability_type = self._capability_type_value(capability)
if capability_type != CapabilityType.ATOMIC.value:
errors.append(
f"step {item['step']}: nested composite is not allowed ({item['capability_id']})"
)
continue
if getattr(capability, "action_id", None) is None:
errors.append(
f"step {item['step']}: atomic capability {item['capability_id']} has no action_id"
)
if errors:
raise CompositeRecipeValidationError(errors)
normalized_recipe = {
"version": 1,
"steps": normalized_steps,
}
ordered_caps = [
capabilities_by_id[item["capability_id"]]
for item in normalized_steps
if item["capability_id"] in capabilities_by_id
]
return normalized_recipe, ordered_caps
async def create_from_actions(
self,
actions: list[Action],
*,
owner_user_id: UUID,
refresh: bool = True,
) -> list[Capability]:
capabilities = self.build_from_actions(actions, owner_user_id=owner_user_id)
if not capabilities:
return []
self.session.add_all(capabilities)
await self.session.flush()
if refresh:
for capability in capabilities:
await self.session.refresh(capability)
return capabilities
async def get_capabilities(
self,
*,
capability_ids: list[UUID] | None = None,
action_ids: list[UUID] | None = None,
owner_user_id: UUID | None = None,
include_all: bool = False,
limit: int | None = None,
offset: int = 0,
) -> list[Capability]:
query = select(Capability).order_by(Capability.created_at.asc())
if not include_all and owner_user_id is not None:
# Legacy compatibility: some old rows may have user_id=NULL while action is user-owned.
query = query.outerjoin(Action, Capability.action_id == Action.id).where(
or_(
Capability.user_id == owner_user_id,
and_(
Capability.user_id.is_(None),
Action.user_id == owner_user_id,
),
)
)
if capability_ids:
query = query.where(Capability.id.in_(capability_ids))
if action_ids:
query = query.where(Capability.action_id.in_(action_ids))
if offset:
query = query.offset(offset)
if limit is not None:
query = query.limit(limit)
result = await self.session.execute(query)
return list(result.scalars().all())
async def get_capability(
self,
capability_id: UUID,
*,
owner_user_id: UUID | None = None,
include_all: bool = False,
) -> Capability | None:
query = select(Capability).where(Capability.id == capability_id)
if not include_all and owner_user_id is not None:
query = query.outerjoin(Action, Capability.action_id == Action.id).where(
or_(
Capability.user_id == owner_user_id,
and_(
Capability.user_id.is_(None),
Action.user_id == owner_user_id,
),
)
)
result = await self.session.execute(query)
return result.scalar_one_or_none()
@staticmethod
def _is_supported_binding_expression(value: str) -> bool:
if re.fullmatch(r"\$run\.[A-Za-z0-9_][A-Za-z0-9_\.]*", value):
return True
if re.fullmatch(r"\$step\.\d+\.[A-Za-z0-9_][A-Za-z0-9_\.]*", value):
return True
return False
@staticmethod
def _extract_binding_source_step(value: str) -> int | None:
match = re.fullmatch(r"\$step\.(\d+)\.[A-Za-z0-9_][A-Za-z0-9_\.]*", value)
if not match:
return None
return int(match.group(1))
@staticmethod
def _to_uuid(value: Any) -> UUID | None:
try:
return UUID(str(value))
except (TypeError, ValueError):
return None
@staticmethod
def _capability_type_value(capability: Capability) -> str:
cap_type = getattr(capability, "type", None)
if isinstance(cap_type, CapabilityType):
return cap_type.value
if isinstance(cap_type, str):
return cap_type
if hasattr(cap_type, "value"):
return str(cap_type.value)
return CapabilityType.ATOMIC.value
@staticmethod
def _build_composite_llm_payload(step_capabilities: list[Capability]) -> dict[str, Any]:
step_names = [
str(getattr(capability, "name", "") or "")
for capability in step_capabilities
if str(getattr(capability, "name", "") or "").strip()
]
return {
"source": "composite",
"recipe_summary": {
"steps_count": len(step_capabilities),
"step_names": step_names,
},
}
@staticmethod
def _build_capability_payload(action: Action) -> dict[str, Any]:
input_schema = CapabilityService._build_input_schema(action)
output_schema = getattr(action, "response_schema", None)
data_format = CapabilityService._build_data_format(action)
action_context = CapabilityService._build_action_context(
action=action,
input_schema=input_schema,
output_schema=output_schema,
data_format=data_format,
)
openapi_hints = CapabilityService._build_openapi_hints(
action=action,
input_schema=input_schema,
output_schema=output_schema,
)
return {
"name": CapabilityService._build_capability_name(action),
"description": CapabilityService._build_capability_description(action),
"input_schema": input_schema,
"output_schema": output_schema,
"data_format": data_format,
"llm_payload": {
"source": "deterministic",
"action_context_version": "v2",
"action_context": action_context,
"action_context_brief": CapabilityService._build_action_context_brief(
action_context=action_context,
openapi_hints=openapi_hints,
),
"openapi_hints": openapi_hints,
},
}
@staticmethod
def _build_action_context(
*,
action: Action,
input_schema: dict[str, Any] | None,
output_schema: dict[str, Any] | None,
data_format: dict[str, Any] | None,
) -> dict[str, Any]:
method = getattr(action, "method", None)
method_value = method.value if hasattr(method, "value") else str(method or "")
parameter_names = CapabilityService._extract_parameter_names_by_location(
getattr(action, "parameters_schema", None)
)
request_property_names = CapabilityService._extract_schema_property_names(
getattr(action, "request_body_schema", None)
)
response_property_names = CapabilityService._extract_schema_property_names(
getattr(action, "response_schema", None)
)
return {
"action_id": str(getattr(action, "id", "")),
"operation_id": getattr(action, "operation_id", None),
"method": method_value,
"path": getattr(action, "path", None),
"base_url": getattr(action, "base_url", None),
"summary": getattr(action, "summary", None),
"description": getattr(action, "description", None),
"tags": getattr(action, "tags", None) or [],
"source_filename": getattr(action, "source_filename", None),
"input_schema": input_schema,
"output_schema": output_schema,
"parameters_schema": getattr(action, "parameters_schema", None),
"request_body_schema": getattr(action, "request_body_schema", None),
"response_schema": getattr(action, "response_schema", None),
"raw_spec": getattr(action, "raw_spec", None),
"data_format": data_format,
"input_signals": {
"required_inputs": CapabilityService._extract_required_inputs(input_schema),
"parameter_names_by_location": parameter_names,
"request_property_names": request_property_names,
},
"output_signals": {
"response_property_names": response_property_names,
},
}
@staticmethod
def _build_openapi_hints(
*,
action: Action,
input_schema: dict[str, Any] | None,
output_schema: dict[str, Any] | None,
) -> dict[str, Any]:
raw_spec = getattr(action, "raw_spec", None)
if not isinstance(raw_spec, dict):
raw_spec = {}
request_content_types = CapabilityService._extract_content_types_from_request(raw_spec)
response_status_codes, response_content_types = (
CapabilityService._extract_response_hints(raw_spec)
)
security_requirements = (
raw_spec.get("security") if isinstance(raw_spec.get("security"), list) else []
)
parameter_names = CapabilityService._extract_parameter_names_by_location(
getattr(action, "parameters_schema", None)
)
vendor_extensions = {
key: value
for key, value in raw_spec.items()
if isinstance(key, str) and key.startswith("x-")
}
path_value = str(getattr(action, "path", "") or "")
path_segments = [
segment
for segment in path_value.strip("/").split("/")
if segment and not segment.startswith("{")
]
return {
"deprecated": bool(raw_spec.get("deprecated")),
"security_requirements": security_requirements,
"request_content_types": request_content_types,
"response_content_types": response_content_types,
"response_status_codes": response_status_codes,
"has_request_body": bool(getattr(action, "request_body_schema", None)),
"has_response_body": bool(output_schema),
"required_inputs": CapabilityService._extract_required_inputs(input_schema),
"parameter_names_by_location": parameter_names,
"path_segments": path_segments,
"tags": getattr(action, "tags", None) or [],
"vendor_extensions": vendor_extensions,
}
@staticmethod
def _build_action_context_brief(
*,
action_context: dict[str, Any],
openapi_hints: dict[str, Any],
) -> dict[str, Any]:
return {
"operation_id": action_context.get("operation_id"),
"method": action_context.get("method"),
"path": action_context.get("path"),
"base_url": action_context.get("base_url"),
"summary": action_context.get("summary"),
"description": action_context.get("description"),
"tags": action_context.get("tags") or [],
"required_inputs": (action_context.get("input_signals") or {}).get("required_inputs") or [],
"parameter_names_by_location": (action_context.get("input_signals") or {}).get(
"parameter_names_by_location"
)
or {},
"request_content_types": openapi_hints.get("request_content_types") or [],
"response_content_types": openapi_hints.get("response_content_types") or [],
"response_status_codes": openapi_hints.get("response_status_codes") or [],
"security_requirements": openapi_hints.get("security_requirements") or [],
}
@staticmethod
def _build_capability_name(action: Action) -> str:
operation_id = getattr(action, "operation_id", None)
if operation_id:
return str(operation_id)
method = getattr(action, "method", None)
method_value = method.value.lower() if method is not None else "call"
path = getattr(action, "path", "") or ""
normalized_path = re.sub(r"[{}]", "", path).strip("/")
normalized_path = re.sub(r"[^a-zA-Z0-9/]+", "_", normalized_path)
normalized_path = normalized_path.replace("/", "_") or "root"
return f"{method_value}_{normalized_path.lower()}"
@staticmethod
def _build_capability_description(action: Action) -> str:
summary = getattr(action, "summary", None)
description = getattr(action, "description", None)
operation_id = getattr(action, "operation_id", None)
return str(
summary
or description
or operation_id
or CapabilityService._build_capability_name(action)
)
@staticmethod
def _build_input_schema(action: Action) -> dict[str, Any] | None:
parameters_schema = getattr(action, "parameters_schema", None)
request_body_schema = getattr(action, "request_body_schema", None)
if parameters_schema and request_body_schema:
return {
"type": "object",
"properties": {
"parameters": parameters_schema,
"request_body": request_body_schema,
},
}
if parameters_schema:
return parameters_schema
if request_body_schema:
return request_body_schema
return None
@staticmethod
def _build_data_format(action: Action) -> dict[str, Any]:
parameters_schema = getattr(action, "parameters_schema", None) or {}
request_body_schema = getattr(action, "request_body_schema", None) or {}
response_schema = getattr(action, "response_schema", None) or {}
parameter_locations: list[str] = []
if isinstance(parameters_schema, dict):
properties = parameters_schema.get("properties", {})
if isinstance(properties, dict):
for property_schema in properties.values():
if not isinstance(property_schema, dict):
continue
location = property_schema.get("x-parameter-location")
if isinstance(location, str) and location not in parameter_locations:
parameter_locations.append(location)
request_content_type = (
request_body_schema.get("x-content-type")
if isinstance(request_body_schema, dict)
else None
)
response_content_type = (
response_schema.get("x-content-type")
if isinstance(response_schema, dict)
else None
)
return {
"parameter_locations": parameter_locations,
"request_content_types": [request_content_type]
if isinstance(request_content_type, str)
else [],
"request_schema_type": request_body_schema.get("type")
if isinstance(request_body_schema, dict)
else None,
"response_content_types": [response_content_type]
if isinstance(response_content_type, str)
else [],
"response_schema_types": [response_schema.get("type")]
if isinstance(response_schema, dict)
and isinstance(response_schema.get("type"), str)
else [],
}
@staticmethod
def _extract_required_inputs(input_schema: dict[str, Any] | None) -> list[str]:
if not isinstance(input_schema, dict):
return []
required = input_schema.get("required")
if isinstance(required, list):
return [str(item) for item in required if isinstance(item, str) and item]
# Nested schemas: {"properties":{"parameters":{"required":[...]}, "request_body":{"required":[...]}}}
nested_required: list[str] = []
properties = input_schema.get("properties")
if isinstance(properties, dict):
for nested_name in ("parameters", "request_body"):
nested_schema = properties.get(nested_name)
if not isinstance(nested_schema, dict):
continue
nested = nested_schema.get("required")
if isinstance(nested, list):
for value in nested:
if isinstance(value, str) and value and value not in nested_required:
nested_required.append(value)
return nested_required
@staticmethod
def _extract_parameter_names_by_location(
parameters_schema: dict[str, Any] | None,
) -> dict[str, list[str]]:
names_by_location: dict[str, list[str]] = {
"path": [],
"query": [],
"header": [],
"cookie": [],
}
if not isinstance(parameters_schema, dict):
return names_by_location
properties = parameters_schema.get("properties")
if not isinstance(properties, dict):
return names_by_location
for name, schema in properties.items():
if not isinstance(name, str):
continue
location = "query"
if isinstance(schema, dict):
location_raw = schema.get("x-parameter-location")
if isinstance(location_raw, str) and location_raw in names_by_location:
location = location_raw
if name not in names_by_location[location]:
names_by_location[location].append(name)
return names_by_location
@staticmethod
def _extract_schema_property_names(
schema: dict[str, Any] | None,
*,
limit: int = 64,
) -> list[str]:
if not isinstance(schema, dict):
return []
result: list[str] = []
queue: list[dict[str, Any]] = [schema]
seen: set[str] = set()
while queue and len(result) < limit:
current = queue.pop(0)
properties = current.get("properties")
if isinstance(properties, dict):
for key, value in properties.items():
if isinstance(key, str) and key not in seen:
seen.add(key)
result.append(key)
if len(result) >= limit:
break
if isinstance(value, dict):
queue.append(value)
items = current.get("items")
if isinstance(items, dict):
queue.append(items)
return result
@staticmethod
def _extract_content_types_from_request(raw_spec: dict[str, Any]) -> list[str]:
request_body = raw_spec.get("requestBody")
if not isinstance(request_body, dict):
return []
content = request_body.get("content")
if not isinstance(content, dict):
return []
return [str(content_type) for content_type in content.keys() if isinstance(content_type, str)]
@staticmethod
def _extract_response_hints(raw_spec: dict[str, Any]) -> tuple[list[str], list[str]]:
responses = raw_spec.get("responses")
if not isinstance(responses, dict):
return [], []
response_status_codes: list[str] = []
response_content_types: list[str] = []
for status_code, response_payload in responses.items():
status_value = str(status_code)
if status_value not in response_status_codes:
response_status_codes.append(status_value)
if not isinstance(response_payload, dict):
continue
content = response_payload.get("content")
if not isinstance(content, dict):
continue
for content_type in content.keys():
if isinstance(content_type, str) and content_type not in response_content_types:
response_content_types.append(content_type)
return response_status_codes, response_content_types
+88
View File
@@ -0,0 +1,88 @@
from __future__ import annotations
import json
import os
from typing import Any
try:
from redis import asyncio as aioredis
except ModuleNotFoundError:
aioredis = None
from app.utils.ollama_client import chat_json, summarize_dialog_text
class DialogMemoryService:
def __init__(self) -> None:
redis_host = os.getenv("REDIS_HOST", "localhost")
redis_port = os.getenv("REDIS_PORT", "6379")
self.redis_url = os.getenv("REDIS_URL", f"redis://{redis_host}:{redis_port}")
self.ttl_seconds = int(os.getenv("DIALOG_TTL_SECONDS", "86400"))
async def get_context(self, dialog_id: str) -> tuple[list[dict[str, Any]], str | None]:
redis = await self._get_redis()
if redis is None:
return [], None
messages_raw = await redis.get(self._messages_key(dialog_id))
summary = await redis.get(self._summary_key(dialog_id))
messages = self._decode_messages(messages_raw)
return messages, summary
async def append_and_summarize(self, dialog_id: str, role: str, content: str) -> str | None:
redis = await self._get_redis()
if redis is None:
return None
messages_key = self._messages_key(dialog_id)
summary_key = self._summary_key(dialog_id)
current_messages = self._decode_messages(await redis.get(messages_key))
current_messages.append({"role": role, "content": content})
await redis.set(messages_key, json.dumps(current_messages, ensure_ascii=False), ex=self.ttl_seconds)
try:
summary = await summarize_dialog_text(current_messages)
except Exception:
summary = None
if summary is None:
summary = self._fallback_summary(current_messages)
await redis.set(summary_key, summary, ex=self.ttl_seconds)
return summary
async def reset(self, dialog_id: str) -> None:
redis = await self._get_redis()
if redis is None:
return
await redis.delete(self._messages_key(dialog_id), self._summary_key(dialog_id))
async def _get_redis(self):
if aioredis is None:
return None
try:
redis = aioredis.from_url(self.redis_url, encoding="utf8", decode_responses=True)
await redis.ping()
return redis
except Exception:
return None
def _messages_key(self, dialog_id: str) -> str:
return f"dialog:{dialog_id}:messages"
def _summary_key(self, dialog_id: str) -> str:
return f"dialog:{dialog_id}:summary"
def _decode_messages(self, payload: str | None) -> list[dict[str, Any]]:
if not payload:
return []
try:
parsed = json.loads(payload)
except json.JSONDecodeError:
return []
if not isinstance(parsed, list):
return []
return [item for item in parsed if isinstance(item, dict)]
def _fallback_summary(self, messages: list[dict[str, Any]]) -> str:
chunks = [str(item.get("content", "")) for item in messages[-4:]]
return "\n".join(chunk for chunk in chunks if chunk)
File diff suppressed because it is too large Load Diff
+371
View File
@@ -0,0 +1,371 @@
from __future__ import annotations
import re
from typing import Any
import yaml
from app.models import ActionIngestStatus, HttpMethod
class OpenAPIService:
SUPPORTED_METHODS = {method.value.lower(): method for method in HttpMethod}
JSON_CONTENT_TYPES = ("application/json", "application/*+json")
@staticmethod
def load_document(raw_bytes: bytes) -> dict[str, Any]:
if not raw_bytes:
raise ValueError("OpenAPI file is empty")
try:
document = yaml.safe_load(raw_bytes.decode("utf-8"))
except UnicodeDecodeError as exc:
raise ValueError("OpenAPI file must be UTF-8 encoded") from exc
except yaml.YAMLError as exc:
raise ValueError("OpenAPI file is not valid YAML or JSON") from exc
if not isinstance(document, dict):
raise ValueError("OpenAPI root must be an object")
openapi_version = document.get("openapi")
if not isinstance(openapi_version, str) or not openapi_version.startswith("3."):
raise ValueError("Only OpenAPI 3.x documents are supported")
if not isinstance(document.get("paths"), dict) or not document["paths"]:
raise ValueError("OpenAPI file must contain a non-empty paths section")
base_url = OpenAPIService._extract_base_url(document)
if base_url is None:
raise ValueError(
"OpenAPI file must contain servers[0].url (base_url)"
)
return document
@classmethod
def extract_actions(
cls,
document: dict[str, Any],
*,
source_filename: str | None = None,
) -> list[dict[str, Any]]:
return cls.extract_actions_with_failures(document, source_filename=source_filename)["succeeded"]
@classmethod
def extract_actions_with_failures(
cls,
document: dict[str, Any],
*,
source_filename: str | None = None,
) -> dict[str, list[dict[str, Any]]]:
base_url = cls._extract_base_url(document)
succeeded_actions: list[dict[str, Any]] = []
failed_actions: list[dict[str, Any]] = []
for path, path_item in document.get("paths", {}).items():
if not isinstance(path_item, dict):
continue
shared_parameters = path_item.get("parameters", [])
for method_name, operation in path_item.items():
if method_name not in cls.SUPPORTED_METHODS:
continue
if not isinstance(operation, dict):
failed_actions.append(
cls._build_failed_action_payload(
method_name=method_name,
path=path,
base_url=base_url,
source_filename=source_filename,
raw_spec=operation,
error_message="Operation definition must be an object",
)
)
continue
try:
succeeded_actions.append(
cls._build_succeeded_action_payload(
method_name=method_name,
path=path,
operation=operation,
shared_parameters=shared_parameters,
document=document,
base_url=base_url,
source_filename=source_filename,
)
)
except ValueError as exc:
failed_actions.append(
cls._build_failed_action_payload(
method_name=method_name,
path=path,
base_url=base_url,
source_filename=source_filename,
raw_spec=operation,
error_message=str(exc),
)
)
return {
"succeeded": succeeded_actions,
"failed": failed_actions,
}
@classmethod
def _build_succeeded_action_payload(
cls,
*,
method_name: str,
path: str,
operation: dict[str, Any],
shared_parameters: list[Any] | None,
document: dict[str, Any],
base_url: str | None,
source_filename: str | None,
) -> dict[str, Any]:
normalized_operation = cls._dereference(operation, document)
parameters = cls._merge_parameters(shared_parameters, normalized_operation.get("parameters", []), document)
return {
"operation_id": normalized_operation.get("operationId") or cls._build_operation_id(method_name, path),
"method": cls.SUPPORTED_METHODS[method_name],
"path": path,
"base_url": base_url,
"summary": normalized_operation.get("summary"),
"description": normalized_operation.get("description"),
"tags": normalized_operation.get("tags"),
"parameters_schema": cls._build_parameters_schema(parameters, document),
"request_body_schema": cls._extract_request_body_schema(normalized_operation, document),
"response_schema": cls._extract_response_schema(normalized_operation, document),
"source_filename": source_filename,
"raw_spec": normalized_operation,
"ingest_status": ActionIngestStatus.SUCCEEDED,
"ingest_error": None,
}
@classmethod
def _build_failed_action_payload(
cls,
*,
method_name: str,
path: str,
base_url: str | None,
source_filename: str | None,
raw_spec: Any,
error_message: str,
) -> dict[str, Any]:
operation = raw_spec if isinstance(raw_spec, dict) else {}
return {
"operation_id": operation.get("operationId") or cls._build_operation_id(method_name, path),
"method": cls.SUPPORTED_METHODS[method_name],
"path": path,
"base_url": base_url,
"summary": operation.get("summary"),
"description": operation.get("description"),
"tags": operation.get("tags"),
"parameters_schema": None,
"request_body_schema": None,
"response_schema": None,
"source_filename": source_filename,
"raw_spec": operation or None,
"ingest_status": ActionIngestStatus.FAILED,
"ingest_error": error_message,
}
@staticmethod
def _extract_base_url(document: dict[str, Any]) -> str | None:
servers = document.get("servers")
if isinstance(servers, list) and servers:
first_server = servers[0]
if isinstance(first_server, dict):
url = first_server.get("url")
if isinstance(url, str):
normalized_url = url.strip()
if normalized_url:
return normalized_url
return None
@classmethod
def _merge_parameters(
cls,
path_parameters: list[Any] | None,
operation_parameters: list[Any] | None,
document: dict[str, Any],
) -> list[dict[str, Any]]:
merged: dict[tuple[str | None, str | None], dict[str, Any]] = {}
for raw_parameter in (path_parameters or []) + (operation_parameters or []):
parameter = cls._dereference(raw_parameter, document)
if not isinstance(parameter, dict):
continue
key = (parameter.get("name"), parameter.get("in"))
merged[key] = parameter
return list(merged.values())
@classmethod
def _build_parameters_schema(
cls,
parameters: list[dict[str, Any]],
document: dict[str, Any],
) -> dict[str, Any] | None:
if not parameters:
return None
properties: dict[str, Any] = {}
required: list[str] = []
for parameter in parameters:
name = parameter.get("name")
if not name:
continue
if parameter.get("in") not in {"query", "path", "header", "cookie"}:
continue
schema = parameter.get("schema")
if schema is None:
schema = cls._extract_schema_from_content(parameter.get("content"), document)
else:
schema = cls._dereference(schema, document)
property_schema = schema if isinstance(schema, dict) else {"type": "string"}
property_schema = {
**property_schema,
"x-parameter-location": parameter.get("in"),
}
if parameter.get("description"):
property_schema["description"] = parameter["description"]
properties[name] = property_schema
if parameter.get("required"):
required.append(name)
if not properties:
return None
schema: dict[str, Any] = {
"type": "object",
"properties": properties,
}
if required:
schema["required"] = required
return schema
@classmethod
def _extract_request_body_schema(
cls,
operation: dict[str, Any],
document: dict[str, Any],
) -> dict[str, Any] | None:
request_body = operation.get("requestBody")
if not isinstance(request_body, dict):
return None
request_body = cls._dereference(request_body, document)
schema = cls._extract_schema_from_content(request_body.get("content"), document)
if not isinstance(schema, dict):
return None
if request_body.get("required"):
schema = {**schema, "x-required": True}
return schema
@classmethod
def _extract_response_schema(
cls,
operation: dict[str, Any],
document: dict[str, Any],
) -> dict[str, Any] | None:
responses = operation.get("responses")
if not isinstance(responses, dict):
return None
for status_code, response in responses.items():
if not str(status_code).startswith("2"):
continue
normalized_response = cls._dereference(response, document)
if not isinstance(normalized_response, dict):
continue
schema = cls._extract_schema_from_content(normalized_response.get("content"), document)
if isinstance(schema, dict):
return schema
if normalized_response.get("description"):
return {"description": normalized_response["description"]}
return None
@classmethod
def _extract_schema_from_content(cls, content: Any, document: dict[str, Any]) -> dict[str, Any] | None:
if not isinstance(content, dict):
return None
preferred_content_type = next((content_type for content_type in cls.JSON_CONTENT_TYPES if content_type in content), None)
items = []
if preferred_content_type:
items.append((preferred_content_type, content[preferred_content_type]))
items.extend((content_type, value) for content_type, value in content.items() if content_type != preferred_content_type)
for content_type, value in items:
if not isinstance(value, dict):
continue
schema = value.get("schema")
if not isinstance(schema, dict):
continue
normalized_schema = cls._dereference(schema, document)
if isinstance(normalized_schema, dict):
return {
**normalized_schema,
"x-content-type": content_type,
}
return None
@classmethod
def _dereference(cls, value: Any, document: dict[str, Any]) -> Any:
if isinstance(value, list):
return [cls._dereference(item, document) for item in value]
if not isinstance(value, dict):
return value
if "$ref" in value:
resolved = cls._resolve_ref(value["$ref"], document)
merged = cls._dereference(resolved, document)
if not isinstance(merged, dict):
return merged
sibling_fields = {key: cls._dereference(item, document) for key, item in value.items() if key != "$ref"}
return {**merged, **sibling_fields}
return {key: cls._dereference(item, document) for key, item in value.items()}
@staticmethod
def _resolve_ref(ref: str, document: dict[str, Any]) -> Any:
if not ref.startswith("#/"):
raise ValueError(f"Only local $ref values are supported, got: {ref}")
current: Any = document
for part in ref[2:].split("/"):
token = part.replace("~1", "/").replace("~0", "~")
if not isinstance(current, dict) or token not in current:
raise ValueError(f"Could not resolve OpenAPI reference: {ref}")
current = current[token]
return current
@staticmethod
def _build_operation_id(method_name: str, path: str) -> str:
normalized_path = re.sub(r"[{}]", "", path).strip("/")
normalized_path = re.sub(r"[^a-zA-Z0-9/]+", "_", normalized_path)
normalized_path = normalized_path.replace("/", "_") or "root"
return f"{method_name.lower()}_{normalized_path.lower()}"
@@ -0,0 +1,176 @@
from __future__ import annotations
from typing import Any
from uuid import UUID
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.models import DialogMessageRole, PipelineDialog, PipelineDialogMessage
class DialogAccessError(Exception):
pass
class PipelineDialogService:
def __init__(self, session: AsyncSession) -> None:
self.session = session
async def list_dialogs(
self,
*,
user_id: UUID,
limit: int,
offset: int,
) -> list[PipelineDialog]:
query = (
select(PipelineDialog)
.where(PipelineDialog.user_id == user_id)
.order_by(PipelineDialog.updated_at.desc())
.limit(limit)
.offset(offset)
)
result = await self.session.execute(query)
return list(result.scalars().all())
async def get_history(
self,
*,
dialog_id: UUID,
user_id: UUID,
limit: int,
offset: int,
) -> tuple[PipelineDialog, list[PipelineDialogMessage]]:
dialog = await self._get_dialog_owned_by_user(dialog_id=dialog_id, user_id=user_id)
query = (
select(PipelineDialogMessage)
.where(PipelineDialogMessage.dialog_id == dialog.id)
.order_by(PipelineDialogMessage.created_at.desc())
.limit(limit)
.offset(offset)
)
result = await self.session.execute(query)
messages_desc = list(result.scalars().all())
return dialog, list(reversed(messages_desc))
async def get_dialog(
self,
*,
dialog_id: UUID,
user_id: UUID,
) -> PipelineDialog:
return await self._get_dialog_owned_by_user(dialog_id=dialog_id, user_id=user_id)
async def append_user_message(
self,
*,
dialog_id: UUID,
user_id: UUID,
content: str,
) -> PipelineDialogMessage:
return await self._append_message(
dialog_id=dialog_id,
user_id=user_id,
role=DialogMessageRole.USER,
content=content,
assistant_payload=None,
create_dialog_if_missing=True,
)
async def append_assistant_message(
self,
*,
dialog_id: UUID,
user_id: UUID,
content: str,
assistant_payload: dict[str, Any],
) -> PipelineDialogMessage:
return await self._append_message(
dialog_id=dialog_id,
user_id=user_id,
role=DialogMessageRole.ASSISTANT,
content=content,
assistant_payload=assistant_payload,
create_dialog_if_missing=False,
)
async def _append_message(
self,
*,
dialog_id: UUID,
user_id: UUID,
role: DialogMessageRole,
content: str,
assistant_payload: dict[str, Any] | None,
create_dialog_if_missing: bool,
) -> PipelineDialogMessage:
dialog = await self.session.get(PipelineDialog, dialog_id)
if dialog is None:
if not create_dialog_if_missing:
raise DialogAccessError("Dialog not found")
dialog = PipelineDialog(
id=dialog_id,
user_id=user_id,
title=self._build_title(content),
)
self.session.add(dialog)
await self.session.flush()
elif dialog.user_id != user_id:
raise DialogAccessError("Dialog access denied")
if role == DialogMessageRole.USER and not dialog.title:
dialog.title = self._build_title(content)
message = PipelineDialogMessage(
dialog_id=dialog.id,
role=role,
content=content,
assistant_payload=assistant_payload,
)
self.session.add(message)
dialog.last_message_preview = self._build_preview(content)
if role == DialogMessageRole.ASSISTANT and assistant_payload:
status = assistant_payload.get("status")
if isinstance(status, str):
dialog.last_status = status
pipeline_id = self._parse_uuid(assistant_payload.get("pipeline_id"))
if pipeline_id is not None:
# Preserve the last valid graph reference for non-ready statuses.
dialog.last_pipeline_id = pipeline_id
await self.session.commit()
return message
async def _get_dialog_owned_by_user(
self,
*,
dialog_id: UUID,
user_id: UUID,
) -> PipelineDialog:
dialog = await self.session.get(PipelineDialog, dialog_id)
if dialog is None:
raise DialogAccessError("Dialog not found")
if dialog.user_id != user_id:
raise DialogAccessError("Dialog access denied")
return dialog
def _build_title(self, content: str) -> str:
text = (content or "").strip().replace("\n", " ")
return (text[:120] or "Pipeline dialog")
def _build_preview(self, content: str) -> str:
text = (content or "").strip().replace("\n", " ")
return text[:280]
def _parse_uuid(self, value: Any) -> UUID | None:
if isinstance(value, UUID):
return value
if isinstance(value, str):
try:
return UUID(value)
except ValueError:
return None
return None
File diff suppressed because it is too large Load Diff
+491
View File
@@ -0,0 +1,491 @@
from __future__ import annotations
import re
from typing import Any, NamedTuple
from uuid import UUID
from sqlalchemy import and_, or_, select
from sqlalchemy.ext.asyncio import AsyncSession
from app.models import Action, Capability
from app.models.capability import CapabilityType
class SelectedCapability(NamedTuple):
capability: Capability
score: float
confidence_tier: str = "high"
class SemanticSelectionService:
HIGH_CONFIDENCE_THRESHOLD = 0.45
MEDIUM_CONFIDENCE_THRESHOLD = 0.30
LOW_MARGIN_THRESHOLD = 0.05
CRM_TOKENS = {
"crm",
"segment",
"segments",
"audience",
"campaign",
"campaigns",
"mailing",
"newsletter",
"lead",
"leads",
"retention",
"cohort",
"churn",
"conversion",
"promo",
"offer",
"offers",
"email",
"emails",
"push",
"sale",
"sales",
"сегмент",
"сегменты",
"аудитория",
"кампания",
"кампании",
"рассылка",
"лид",
"лиды",
"ретеншн",
"конверсия",
"оффер",
"офферы",
"пуш",
"продажи",
"клиент",
"клиенты",
}
GENERIC_TOKENS = {
"get",
"list",
"create",
"update",
"delete",
"call",
"data",
"info",
"items",
"resource",
"resources",
"service",
"api",
"handle",
"handler",
"manage",
"process",
"method",
"action",
"fetch",
"general",
"common",
"получить",
"список",
"создать",
"обновить",
"удалить",
"данные",
"инфо",
"ресурс",
"сервис",
"метод",
"действие",
"общее",
}
_STOPWORDS = {
"and",
"the",
"for",
"with",
"from",
"into",
"that",
"this",
"что",
"это",
"как",
"для",
"или",
"при",
"про",
"надо",
"нужно",
"хочу",
"build",
"pipeline",
"workflow",
"scenario",
"automation",
"пайплайн",
"сценарий",
"автоматизация",
"построй",
"собери",
}
_ALIAS_EXPANSIONS = {
"польз": {"user", "users", "client", "clients", "пользователь", "пользователи"},
"клиент": {"client", "clients", "user", "users", "клиент", "клиенты"},
"юзер": {"user", "users", "пользователь", "пользователи"},
"получ": {"get", "fetch", "list", "retrieve", "получить", "список"},
"спис": {"list", "get", "fetch", "список", "получить"},
"созд": {"create", "add", "post", "создать"},
"обнов": {"update", "patch", "put", "обновить"},
"удал": {"delete", "remove", "del", "удалить"},
"рассыл": {"mailing", "newsletter", "broadcast", "email", "рассылка"},
"сегмент": {"segment", "segments", "сегмент", "сегменты"},
"лид": {"lead", "leads", "лид", "лиды"},
"отчет": {"report", "analytics", "отчет", "отчёт"},
"отчёт": {"report", "analytics", "отчет", "отчёт"},
"user": {"пользователь", "пользователи", "user", "users"},
"users": {"пользователь", "пользователи", "user", "users"},
"get": {"получить", "список", "get", "fetch", "list"},
"fetch": {"получить", "список", "get", "fetch", "list"},
"list": {"получить", "список", "get", "fetch", "list"},
}
async def select_capabilities(
self,
session: AsyncSession,
user_query: str,
owner_user_id: UUID | None = None,
limit: int = 10,
) -> list[SelectedCapability]:
query_tokens = self._tokenize(user_query)
if not query_tokens:
return []
query = select(Capability).order_by(Capability.created_at.asc())
if owner_user_id is not None:
# User-scoped with legacy compatibility:
# some old capabilities may have user_id=NULL while their source action has owner.
query = query.outerjoin(Action, Capability.action_id == Action.id).where(
or_(
Capability.user_id == owner_user_id,
and_(
Capability.user_id.is_(None),
Action.user_id == owner_user_id,
),
)
)
query = query.limit(200)
result = await session.execute(query)
capabilities = list(result.scalars().all())
executable_capabilities = [
capability
for capability in capabilities
if self._is_executable_capability(capability)
]
candidates = executable_capabilities
if not candidates:
return []
query_tokens_expanded = self._expand_tokens(query_tokens)
ranked: list[SelectedCapability] = []
for capability in candidates:
score = self._score_capability(query_tokens, query_tokens_expanded, capability)
if score <= 0:
continue
ranked.append(SelectedCapability(capability=capability, score=score))
ranked.sort(key=lambda item: item.score, reverse=True)
if not ranked:
if candidates:
# Fallback: keep generation moving even when lexical matching is weak.
return [
SelectedCapability(
capability=capability,
score=0.01,
confidence_tier="low",
)
for capability in candidates[:limit]
]
return []
top_score = ranked[0].score
second_score = ranked[1].score if len(ranked) > 1 else 0.0
margin = top_score - second_score
confidence_tier = self._resolve_confidence_tier(top_score, margin)
return [
SelectedCapability(
capability=item.capability,
score=item.score,
confidence_tier=confidence_tier,
)
for item in ranked[:limit]
]
def _score_capability(
self,
query_tokens: set[str],
query_tokens_expanded: set[str],
capability: Capability,
) -> float:
name = str(getattr(capability, "name", "") or "")
description = str(getattr(capability, "description", "") or "")
name_tokens = self._tokenize(name)
description_tokens = self._tokenize(description)
context_tokens = self._extract_context_tokens(capability)
recipe_tokens = self._extract_recipe_tokens(capability)
combined_tokens = name_tokens | description_tokens | context_tokens | recipe_tokens
if not combined_tokens:
return 0.0
combined_tokens_expanded = self._expand_tokens(combined_tokens)
overlap = query_tokens_expanded & combined_tokens_expanded
if not overlap:
return 0.0
overlap_ratio = len(overlap) / len(query_tokens_expanded)
name_tokens_expanded = self._expand_tokens(name_tokens)
name_ratio = len(query_tokens_expanded & name_tokens_expanded) / len(query_tokens_expanded)
exact_bonus = 0.22 if query_tokens_expanded <= combined_tokens_expanded else 0.0
context_ratio = 0.0
context_bonus = 0.0
if context_tokens:
context_tokens_expanded = self._expand_tokens(context_tokens)
context_overlap = query_tokens_expanded & context_tokens_expanded
context_ratio = len(context_overlap) / len(query_tokens_expanded)
context_bonus = min(0.16, len(context_overlap) * 0.03)
generic_expanded = self._expand_tokens(self.GENERIC_TOKENS)
entity_overlap = overlap - generic_expanded
entity_bonus = min(0.18, len(entity_overlap) * 0.06) if entity_overlap else 0.0
query_crm_tokens = query_tokens_expanded & self.CRM_TOKENS
capability_crm_tokens = combined_tokens_expanded & self.CRM_TOKENS
crm_bonus = 0.0
if query_crm_tokens and capability_crm_tokens:
crm_overlap = len(query_crm_tokens & capability_crm_tokens)
crm_bonus = 0.12 + min(0.14, crm_overlap * 0.04)
generic_penalty = self._generic_capability_penalty(combined_tokens)
return (
max(overlap_ratio, name_ratio * 1.12, context_ratio * 0.95)
+ exact_bonus
+ context_bonus
+ entity_bonus
+ crm_bonus
- generic_penalty
)
def _extract_context_tokens(self, capability: Capability) -> set[str]:
llm_payload = getattr(capability, "llm_payload", None)
if not isinstance(llm_payload, dict):
return set()
chunks: list[str] = []
for key in (
"action_context_brief",
"openapi_hints",
"action_context",
"recipe_summary",
"composite_context",
):
value = llm_payload.get(key)
if value is None:
continue
self._collect_text_chunks(value=value, chunks=chunks, depth=0, max_depth=4)
tokens: set[str] = set()
for chunk in chunks[:120]:
tokens.update(self._tokenize(chunk))
return tokens
def _extract_recipe_tokens(self, capability: Capability) -> set[str]:
recipe = getattr(capability, "recipe", None)
if not isinstance(recipe, dict):
return set()
steps = recipe.get("steps")
if not isinstance(steps, list):
return set()
chunks: list[str] = []
for raw_step in steps[:30]:
if not isinstance(raw_step, dict):
continue
inputs = raw_step.get("inputs")
if not isinstance(inputs, dict):
continue
for key, value in inputs.items():
if isinstance(key, str):
chunks.append(key)
if isinstance(value, str):
chunks.append(value)
tokens: set[str] = set()
for chunk in chunks:
tokens.update(self._tokenize(chunk))
return tokens
def _collect_text_chunks(
self,
*,
value: object,
chunks: list[str],
depth: int,
max_depth: int,
) -> None:
if depth > max_depth or len(chunks) >= 120:
return
if isinstance(value, str):
stripped = value.strip()
if stripped:
chunks.append(stripped)
return
if isinstance(value, dict):
preferred_keys = {
"operation_id",
"method",
"path",
"base_url",
"summary",
"description",
"tags",
"source_filename",
"required_inputs",
"request_content_types",
"response_content_types",
"response_status_codes",
"security_requirements",
"parameter_names_by_location",
"path_segments",
"input_signals",
"output_signals",
}
for key, item in value.items():
if not isinstance(key, str):
continue
if key not in preferred_keys:
continue
chunks.append(key)
self._collect_text_chunks(
value=item,
chunks=chunks,
depth=depth + 1,
max_depth=max_depth,
)
return
if isinstance(value, list):
for item in value[:30]:
self._collect_text_chunks(
value=item,
chunks=chunks,
depth=depth + 1,
max_depth=max_depth,
)
def _resolve_confidence_tier(self, top_score: float, margin: float) -> str:
if margin < self.LOW_MARGIN_THRESHOLD:
return "low"
if top_score >= self.HIGH_CONFIDENCE_THRESHOLD:
return "high"
if top_score >= self.MEDIUM_CONFIDENCE_THRESHOLD:
return "medium"
return "low"
def _generic_capability_penalty(self, tokens: set[str]) -> float:
if not tokens:
return 0.0
generic_share = len(tokens & self.GENERIC_TOKENS) / len(tokens)
if generic_share >= 0.65:
return 0.14
if generic_share >= 0.5:
return 0.09
if generic_share >= 0.35:
return 0.04
return 0.0
def _tokenize(self, value: str) -> set[str]:
tokens = set(re.findall(r"[a-zA-Zа-яА-Я0-9]+", value.lower()))
return {
token
for token in tokens
if len(token) >= 3 and token not in self._STOPWORDS
}
def _is_executable_capability(self, capability: Capability) -> bool:
cap_type = self._capability_type_value(capability)
if cap_type == CapabilityType.ATOMIC.value:
return getattr(capability, "action_id", None) is not None
if cap_type == CapabilityType.COMPOSITE.value:
return self._recipe_is_executable(getattr(capability, "recipe", None))
return False
def _recipe_is_executable(self, recipe: Any) -> bool:
if not isinstance(recipe, dict):
return False
if recipe.get("version") != 1:
return False
steps = recipe.get("steps")
return isinstance(steps, list) and bool(steps)
def _capability_type_value(self, capability: Capability) -> str:
raw = getattr(capability, "type", None)
if isinstance(raw, CapabilityType):
return raw.value
if isinstance(raw, str):
return raw
if hasattr(raw, "value"):
return str(raw.value)
return CapabilityType.ATOMIC.value
def _expand_tokens(self, tokens: set[str]) -> set[str]:
expanded: set[str] = set()
for token in tokens:
expanded.add(token)
normalized_variants = self._normalized_variants(token)
expanded.update(normalized_variants)
for variant in normalized_variants | {token}:
for key, aliases in self._ALIAS_EXPANSIONS.items():
if variant == key or variant.startswith(key):
expanded.update(aliases)
return expanded
def _normalized_variants(self, token: str) -> set[str]:
variants = {token}
if len(token) >= 5:
for suffix in (
"иями",
"ями",
"ами",
"ов",
"ев",
"ей",
"ам",
"ям",
"ах",
"ях",
"ые",
"ий",
"ый",
"ая",
"ое",
"ой",
"а",
"я",
"ы",
"и",
"у",
"ю",
"е",
"о",
):
if token.endswith(suffix) and len(token) > len(suffix) + 2:
variants.add(token[: -len(suffix)])
if token.endswith("ies") and len(token) > 4:
variants.add(token[:-3] + "y")
if token.endswith("s") and len(token) > 3:
variants.add(token[:-1])
return variants
+103
View File
@@ -0,0 +1,103 @@
from __future__ import annotations
import logging
import os
from typing import Any
from app.utils.log_context import get_log_context
business_logger = logging.getLogger("app.business")
EVENT_SCHEMA_VERSION = "1.0"
SERVICE_NAME = os.getenv("APP_SERVICE_NAME", "backend-api")
def _derive_event_group(event: str) -> tuple[str, str | None]:
normalized = (event or "").strip().lower()
if normalized.startswith("auth_"):
return "auth", None
if normalized.startswith("action_") or normalized.startswith("actions_"):
return "actions", None
if (
normalized.startswith("capability_")
or normalized.startswith("capabilities_")
or normalized.startswith("composite_capability_")
):
return "capabilities", None
if normalized.startswith("pipeline_prompt_"):
return "pipelines", "prompt"
if normalized.startswith("pipeline_run_"):
return "pipelines", "run"
if normalized.startswith("pipeline_dialog_"):
return "pipelines", "dialog"
if normalized.startswith("pipeline_") or normalized.startswith("pipelines_"):
return "pipelines", None
if normalized.startswith("execution_run_"):
return "executions", "run"
if normalized.startswith("execution_step_"):
return "executions", "step"
if normalized.startswith("execution_") or normalized.startswith("executions_"):
return "executions", None
if normalized.startswith("user_") or normalized.startswith("users_"):
return "users", None
return "other", None
def _derive_event_outcome(event: str) -> str:
normalized = (event or "").strip().lower()
for suffix, outcome in (
("_succeeded", "success"),
("_created", "success"),
("_updated", "success"),
("_deleted", "success"),
("_processed", "success"),
("_finished", "success"),
("_failed", "failure"),
("_rejected", "failure"),
("_blocked", "failure"),
("_started", "progress"),
("_queued", "progress"),
("_received", "progress"),
("_listed", "read"),
("_fetched", "read"),
("_viewed", "read"),
):
if normalized.endswith(suffix):
return outcome
return "unknown"
def log_business_event(event: str, **fields: Any) -> None:
safe_fields: dict[str, Any] = {
"event": event,
"event_schema_version": EVENT_SCHEMA_VERSION,
"service_name": SERVICE_NAME,
}
event_group, event_subgroup = _derive_event_group(event)
event_outcome = _derive_event_outcome(event)
if "event_group" not in fields:
safe_fields["event_group"] = event_group
if event_subgroup is not None and "event_subgroup" not in fields:
safe_fields["event_subgroup"] = event_subgroup
if "event_outcome" not in fields:
safe_fields["event_outcome"] = event_outcome
for key, value in get_log_context().items():
if key not in fields:
safe_fields[key] = value
for key, value in fields.items():
if isinstance(value, (str, int, float, bool)) or value is None:
safe_fields[key] = value
else:
safe_fields[key] = str(value)
business_logger.info(event, extra=safe_fields)
+124
View File
@@ -0,0 +1,124 @@
from datetime import datetime, timezone
from typing import Any
import uuid
import logging
from fastapi import Request, status
from fastapi.exceptions import RequestValidationError
from fastapi.responses import JSONResponse
from starlette.exceptions import HTTPException
logger = logging.getLogger(__name__)
def now_iso() -> str:
return datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
async def validation_exception_handler(request: Request, exc: RequestValidationError) -> JSONResponse:
trace_id = getattr(request.state, "traceId", str(uuid.uuid4()))
is_json_error = any(e.get("type") in ("json_invalid", "json_decode", "value_error.jsondecode") for e in exc.errors())
if is_json_error:
return JSONResponse(
status_code=status.HTTP_400_BAD_REQUEST,
content={
"code": "BAD_REQUEST",
"message": "Невалидный JSON",
"traceId": trace_id,
"timestamp": now_iso(),
"path": request.url.path,
"details": {"hint": "Проверьте запятые/кавычки"},
},
)
field_errors: list[dict[str, Any]] = []
for err in exc.errors():
loc = [str(x) for x in err.get("loc", []) if x != "body"]
field_name = ".".join(loc) if loc else "unknown"
msg = err.get("msg", "invalid")
if msg.startswith("Value error, "):
msg = msg.replace("Value error, ", "")
field_errors.append({
"field": field_name,
"issue": msg,
"rejectedValue": err.get("input", None),
})
return JSONResponse(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
content={
"code": "VALIDATION_FAILED",
"message": "Некоторые поля не прошли валидацию",
"traceId": trace_id,
"timestamp": now_iso(),
"path": request.url.path,
"fieldErrors": field_errors,
},
)
async def http_exception_handler(request: Request, exc: HTTPException) -> JSONResponse:
trace_id = getattr(request.state, "traceId", str(uuid.uuid4()))
message = str(exc.detail)
details = None
if isinstance(exc.detail, dict):
message = exc.detail.get("message", str(exc.detail))
details_data = {k: v for k, v in exc.detail.items() if k != "message"}
if details_data:
details = details_data
code = "HTTP_ERROR"
if exc.status_code == status.HTTP_409_CONFLICT:
code = "EMAIL_ALREADY_EXISTS" if "email" in message.lower() else "CONFLICT"
elif exc.status_code == status.HTTP_400_BAD_REQUEST:
code = "BAD_REQUEST"
elif exc.status_code == status.HTTP_401_UNAUTHORIZED:
code = "UNAUTHORIZED"
elif exc.status_code == status.HTTP_423_LOCKED:
code = "USER_INACTIVE"
elif exc.status_code == status.HTTP_403_FORBIDDEN:
code = "FORBIDDEN"
elif exc.status_code == status.HTTP_404_NOT_FOUND:
code = "NOT_FOUND"
if message == "Not Found":
message = "Ресурс не найден"
elif exc.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY:
code = "VALIDATION_FAILED"
content = {
"code": code,
"message": message,
"traceId": trace_id,
"timestamp": now_iso(),
"path": request.url.path,
}
if details:
content["details"] = details
return JSONResponse(
status_code=exc.status_code,
content=content,
)
async def unhandled_exception_handler(request: Request, exc: Exception) -> JSONResponse:
trace_id = getattr(request.state, "traceId", str(uuid.uuid4()))
logger.exception("Unhandled exception on %s", request.url.path, exc_info=exc)
return JSONResponse(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
content={
"code": "INTERNAL_ERROR",
"message": "Внутренняя ошибка сервера",
"traceId": trace_id,
"timestamp": now_iso(),
"path": request.url.path,
},
)
+16
View File
@@ -0,0 +1,16 @@
import bcrypt
def hash_password(password: str) -> str:
pwd_bytes = password.encode("utf-8")
salt = bcrypt.gensalt()
return bcrypt.hashpw(pwd_bytes, salt).decode("utf-8")
def verify_password(plain_password: str, hashed_password: str) -> bool:
try:
pwd_bytes = plain_password.encode("utf-8")
hashed_bytes = hashed_password.encode("utf-8")
return bcrypt.checkpw(pwd_bytes, hashed_bytes)
except Exception:
return False
+49
View File
@@ -0,0 +1,49 @@
from __future__ import annotations
from contextvars import ContextVar
from typing import Any
_trace_id_ctx: ContextVar[str | None] = ContextVar("trace_id", default=None)
_path_ctx: ContextVar[str | None] = ContextVar("path", default=None)
_method_ctx: ContextVar[str | None] = ContextVar("method", default=None)
_user_id_ctx: ContextVar[str | None] = ContextVar("user_id", default=None)
def set_request_context(*, trace_id: str | None, path: str | None, method: str | None) -> None:
_trace_id_ctx.set(trace_id)
_path_ctx.set(path)
_method_ctx.set(method)
def set_user_context(*, user_id: str | None) -> None:
_user_id_ctx.set(user_id)
def clear_log_context() -> None:
_trace_id_ctx.set(None)
_path_ctx.set(None)
_method_ctx.set(None)
_user_id_ctx.set(None)
def get_log_context() -> dict[str, Any]:
payload: dict[str, Any] = {}
trace_id = _trace_id_ctx.get()
if trace_id:
payload["trace_id"] = trace_id
path = _path_ctx.get()
if path:
payload["path"] = path
method = _method_ctx.get()
if method:
payload["method"] = method
user_id = _user_id_ctx.get()
if user_id:
payload["user_id"] = user_id
return payload
+287
View File
@@ -0,0 +1,287 @@
from __future__ import annotations
import json
import os
import re
from typing import Any
def build_capability_from_action(action: Any) -> dict[str, Any]:
llm_result = _call_ollama_json(
system_prompt=(
"You convert one API action into one capability. "
"Return only valid JSON with keys: "
"name, description, input_schema, output_schema, data_format."
),
user_prompt=_build_prompt(action),
)
if llm_result is not None:
normalized = _normalize_capability_payload(llm_result, action)
normalized["llm_payload"] = llm_result
return normalized
fallback = _build_fallback_capability(action)
fallback["llm_payload"] = {
"source": "fallback",
"reason": "ollama_unavailable_or_invalid_response",
}
return fallback
def chat_json(system_prompt: str, user_prompt: str) -> dict[str, Any] | None:
return _call_ollama_json(system_prompt=system_prompt, user_prompt=user_prompt)
def reset_model_session() -> None:
host = os.getenv("OLLAMA_HOST", "http://178.154.193.191:8067").strip()
model = os.getenv("OLLAMA_MODEL", "qwen2.5-coder:7b")
headers = _load_headers()
try:
from ollama import Client
except Exception:
return None
try:
client = Client(host=host, headers=headers or None)
_reset_model_session(client=client, model=model)
except Exception:
return None
async def summarize_dialog_text(messages: list[dict[str, Any]]) -> str | None:
prompt = (
"Кратко сожми историю диалога на русском. "
"Сохрани цель пользователя, ограничения, недостающие данные и важные решения. "
"Ответь только текстом без markdown.\n\n"
f"История:\n{json.dumps(messages, ensure_ascii=False)}"
)
payload = _call_ollama_json(
system_prompt="Ты помощник, который сжимает диалоговый контекст для дальнейшего планирования.",
user_prompt=prompt,
)
if isinstance(payload, dict):
summary = payload.get("summary")
if isinstance(summary, str) and summary.strip():
return summary.strip()
return None
def _call_ollama_json(system_prompt: str, user_prompt: str) -> dict[str, Any] | None:
host = os.getenv("OLLAMA_HOST", "http://178.154.193.191:8067").strip()
model = os.getenv("OLLAMA_MODEL", "qwen2.5-coder:7b")
headers = _load_headers()
try:
from ollama import Client
except Exception:
return None
try:
client = Client(host=host, headers=headers or None)
response = client.chat(
model=model,
messages=[
{
"role": "system",
"content": system_prompt,
},
{
"role": "user",
"content": user_prompt,
},
],
options={"temperature": 0},
)
except Exception:
return None
content = _extract_message_content(response)
if not content:
return None
payload = _parse_json_payload(content)
if not isinstance(payload, dict):
return None
return payload
def _build_prompt(action: Any) -> str:
payload = {
"operation_id": getattr(action, "operation_id", None),
"method": getattr(action, "method", None).value if getattr(action, "method", None) else None,
"path": getattr(action, "path", None),
"base_url": getattr(action, "base_url", None),
"summary": getattr(action, "summary", None),
"description": getattr(action, "description", None),
"tags": getattr(action, "tags", None),
"parameters_schema": getattr(action, "parameters_schema", None),
"request_body_schema": getattr(action, "request_body_schema", None),
"response_schema": getattr(action, "response_schema", None),
}
return json.dumps(payload, ensure_ascii=True, indent=2)
def _extract_message_content(response: Any) -> str | None:
if isinstance(response, dict):
message = response.get("message")
if isinstance(message, dict):
content = message.get("content")
if isinstance(content, str):
return content
content = response.get("content")
if isinstance(content, str):
return content
return None
message = getattr(response, "message", None)
if message is not None:
content = getattr(message, "content", None)
if isinstance(content, str):
return content
content = getattr(response, "content", None)
if isinstance(content, str):
return content
return None
def _parse_json_payload(content: str) -> dict[str, Any] | None:
try:
return json.loads(content)
except json.JSONDecodeError:
match = re.search(r"\{.*\}", content, re.DOTALL)
if not match:
return None
try:
return json.loads(match.group(0))
except json.JSONDecodeError:
return None
def _normalize_capability_payload(payload: dict[str, Any], action: Any) -> dict[str, Any]:
fallback = _build_fallback_capability(action)
return {
"name": str(payload.get("name") or fallback["name"]),
"description": str(payload.get("description") or fallback["description"]),
"input_schema": _normalize_schema(payload.get("input_schema")) or fallback["input_schema"],
"output_schema": _normalize_schema(payload.get("output_schema")) or fallback["output_schema"],
"data_format": _normalize_data_format(payload.get("data_format")) or fallback["data_format"],
}
def _build_fallback_capability(action: Any) -> dict[str, Any]:
return {
"name": _build_capability_name(action),
"description": _build_capability_description(action),
"input_schema": _build_input_schema(action),
"output_schema": getattr(action, "response_schema", None),
"data_format": _build_data_format(action),
}
def _build_capability_name(action: Any) -> str:
operation_id = getattr(action, "operation_id", None)
if operation_id:
return str(operation_id)
method = getattr(action, "method", None)
method_value = method.value.lower() if method is not None else "call"
path = getattr(action, "path", "") or ""
normalized_path = re.sub(r"[{}]", "", path).strip("/")
normalized_path = re.sub(r"[^a-zA-Z0-9/]+", "_", normalized_path)
normalized_path = normalized_path.replace("/", "_") or "root"
return f"{method_value}_{normalized_path.lower()}"
def _build_capability_description(action: Any) -> str:
summary = getattr(action, "summary", None)
description = getattr(action, "description", None)
operation_id = getattr(action, "operation_id", None)
return str(summary or description or operation_id or _build_capability_name(action))
def _build_input_schema(action: Any) -> dict[str, Any] | None:
parameters_schema = getattr(action, "parameters_schema", None)
request_body_schema = getattr(action, "request_body_schema", None)
if parameters_schema and request_body_schema:
return {
"type": "object",
"properties": {
"parameters": parameters_schema,
"request_body": request_body_schema,
},
}
if parameters_schema:
return parameters_schema
if request_body_schema:
return request_body_schema
return None
def _build_data_format(action: Any) -> dict[str, Any]:
parameters_schema = getattr(action, "parameters_schema", None) or {}
request_body_schema = getattr(action, "request_body_schema", None) or {}
response_schema = getattr(action, "response_schema", None) or {}
parameter_locations: list[str] = []
if isinstance(parameters_schema, dict):
properties = parameters_schema.get("properties", {})
if isinstance(properties, dict):
for property_schema in properties.values():
if not isinstance(property_schema, dict):
continue
location = property_schema.get("x-parameter-location")
if isinstance(location, str) and location not in parameter_locations:
parameter_locations.append(location)
request_content_type = request_body_schema.get("x-content-type") if isinstance(request_body_schema, dict) else None
response_content_type = response_schema.get("x-content-type") if isinstance(response_schema, dict) else None
return {
"parameter_locations": parameter_locations,
"request_content_types": [request_content_type] if isinstance(request_content_type, str) else [],
"request_schema_type": request_body_schema.get("type") if isinstance(request_body_schema, dict) else None,
"response_content_types": [response_content_type] if isinstance(response_content_type, str) else [],
"response_schema_types": [response_schema.get("type")] if isinstance(response_schema, dict) and isinstance(response_schema.get("type"), str) else [],
}
def _normalize_schema(value: Any) -> dict[str, Any] | None:
if isinstance(value, dict):
return value
return None
def _normalize_data_format(value: Any) -> dict[str, Any] | None:
if not isinstance(value, dict):
return None
return {
"parameter_locations": _normalize_string_list(value.get("parameter_locations")),
"request_content_types": _normalize_string_list(value.get("request_content_types")),
"request_schema_type": value.get("request_schema_type"),
"response_content_types": _normalize_string_list(value.get("response_content_types")),
"response_schema_types": _normalize_string_list(value.get("response_schema_types")),
}
def _normalize_string_list(value: Any) -> list[str]:
if value is None:
return []
if isinstance(value, list):
return [str(item) for item in value if item is not None]
return [str(value)]
def _load_headers() -> dict[str, str]:
headers_payload = os.getenv("OLLAMA_HEADERS_JSON")
if not headers_payload:
return {}
try:
parsed = json.loads(headers_payload)
except json.JSONDecodeError:
return {}
if not isinstance(parsed, dict):
return {}
return {str(key): str(value) for key, value in parsed.items()}
+99
View File
@@ -0,0 +1,99 @@
import os
from datetime import datetime, timedelta, timezone
from typing import List
from uuid import UUID
from fastapi import Depends, HTTPException, status
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database.session import get_session
from app.models import User, UserRole
from app.utils.log_context import set_user_context
try:
from jose import JWTError, jwt
except ModuleNotFoundError:
JWTError = Exception
jwt = None
JWT_SECRET = os.environ.get("JWT_SECRET", "super_secret_key_123")
JWT_ALG = "HS256"
security = HTTPBearer(auto_error=False)
def create_access_token(*, sub: str, role: str) -> tuple[str, int]:
expires_in = 3600
if jwt is None:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="JWT support is not installed",
)
expire = datetime.now(timezone.utc) + timedelta(seconds=expires_in)
payload = {"sub": str(sub), "role": role, "exp": expire}
token = jwt.encode(payload, JWT_SECRET, algorithm=JWT_ALG)
return token, expires_in
async def get_current_user(
creds: HTTPAuthorizationCredentials | None = Depends(security),
session: AsyncSession = Depends(get_session),
) -> User:
if creds is None:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Could not validate credentials",
headers={"WWW-Authenticate": "Bearer"},
)
if jwt is None:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="JWT support is not installed",
)
token = creds.credentials
auth_exception = HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Could not validate credentials",
headers={"WWW-Authenticate": "Bearer"},
)
try:
payload = jwt.decode(token, JWT_SECRET, algorithms=[JWT_ALG])
user_id_str: str | None = payload.get("sub")
if user_id_str is None:
raise auth_exception
user_id = UUID(user_id_str)
except (JWTError, ValueError):
raise auth_exception
result = await session.execute(select(User).where(User.id == user_id))
user = result.scalar_one_or_none()
if user is None:
raise auth_exception
if not user.is_active:
raise HTTPException(
status_code=status.HTTP_423_LOCKED,
detail="User account is deactivated",
)
set_user_context(user_id=str(user.id))
return user
def check_permissions(allowed_roles: List[UserRole]):
async def role_checker(current_user: User = Depends(get_current_user)):
if current_user.role not in allowed_roles:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Not enough permissions",
)
return current_user
return role_checker
+55
View File
@@ -0,0 +1,55 @@
services:
api:
image: ${DOCKER_IMAGE:-solution-api}:${TAG:-latest}
build:
context: .
dockerfile: Dockerfile
restart: always
ports:
- "8000:8000"
volumes:
- ./:/app
environment:
- DATABASE_URL=postgresql+asyncpg://user:password@db:5432/dbname
- REDIS_URL=redis://redis:6379/0
depends_on:
db:
condition: service_healthy
redis:
condition: service_healthy
networks:
- shop-network
- default
db:
image: postgres:15-alpine
environment:
- POSTGRES_USER=user
- POSTGRES_PASSWORD=password
- POSTGRES_DB=dbname
ports:
- "5433:5432"
healthcheck:
test: ["CMD-SHELL", "pg_isready -U user -d dbname"]
interval: 5s
timeout: 5s
retries: 5
networks:
- default
redis:
image: redis:7-alpine
ports:
- "6380:6379"
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 5s
timeout: 5s
retries: 5
networks:
- default
networks:
shop-network:
external: true
+10
View File
@@ -0,0 +1,10 @@
global:
scrape_interval: 15s
evaluation_interval: 15s
scrape_configs:
- job_name: "backend-api"
metrics_path: /metrics
static_configs:
- targets:
- "api:8000"
+19
View File
@@ -0,0 +1,19 @@
fastapi
uvicorn[standard]
sqlalchemy>=2.0
asyncpg
redis
fastapi-cache2
pydantic
python-jose[cryptography]
passlib[bcrypt]==1.7.4
bcrypt==4.0.1
python-multipart
lark
pytest
pytest-asyncio
httpx
email-validator
PyYAML
ollama
prometheus-fastapi-instrumentator
View File
+77
View File
@@ -0,0 +1,77 @@
from __future__ import annotations
from types import SimpleNamespace
from uuid import uuid4
from app.services.capability_service import CapabilityService
def test_build_capability_payload_stores_rich_action_context():
action = SimpleNamespace(
id=uuid4(),
operation_id="sendCampaignEmail",
method=SimpleNamespace(value="POST"),
path="/v1/campaigns/{campaign_id}/emails/send",
base_url="https://api.example.com",
summary="Send campaign email",
description="Send email for selected users",
tags=["campaign", "email"],
source_filename="crm.yaml",
parameters_schema={
"type": "object",
"required": ["campaign_id"],
"properties": {
"campaign_id": {"type": "string", "x-parameter-location": "path"},
"segment_id": {"type": "string", "x-parameter-location": "query"},
},
},
request_body_schema={
"type": "object",
"required": ["subject", "template_id"],
"properties": {
"subject": {"type": "string"},
"template_id": {"type": "string"},
},
"x-content-type": "application/json",
},
response_schema={
"type": "object",
"properties": {"delivery_id": {"type": "string"}},
"x-content-type": "application/json",
},
raw_spec={
"deprecated": False,
"security": [{"BearerAuth": []}],
"requestBody": {
"content": {
"application/json": {
"schema": {"type": "object"},
}
}
},
"responses": {
"200": {
"content": {
"application/json": {
"schema": {"type": "object"},
}
}
}
},
},
)
payload = CapabilityService._build_capability_payload(action)
llm_payload = payload["llm_payload"]
action_context = llm_payload["action_context"]
hints = llm_payload["openapi_hints"]
assert payload["name"] == "sendCampaignEmail"
assert payload["description"] == "Send campaign email"
assert action_context["method"] == "POST"
assert action_context["path"] == "/v1/campaigns/{campaign_id}/emails/send"
assert action_context["raw_spec"]["responses"]["200"] is not None
assert action_context["input_signals"]["required_inputs"] == ["campaign_id", "subject", "template_id"]
assert hints["request_content_types"] == ["application/json"]
assert "200" in hints["response_status_codes"]
+693
View File
@@ -0,0 +1,693 @@
from __future__ import annotations
import copy
from typing import Any
from uuid import uuid4
import pytest
from app.models import Action, HttpMethod
from app.models.capability import Capability
from app.models.execution import (
ExecutionRun,
ExecutionRunStatus,
ExecutionStepRun,
ExecutionStepStatus,
)
from app.models.pipeline import Pipeline, PipelineStatus
from app.services.execution_service import ExecutionService, StepExecutionError
class FakeSession:
def __init__(self, initial: dict[tuple[type[Any], Any], Any] | None = None) -> None:
self._store = initial or {}
self.step_runs_by_step: dict[int, ExecutionStepRun] = {}
self.commit_calls = 0
async def get(self, model: type[Any], key: Any) -> Any:
return self._store.get((model, key))
def add(self, obj: Any) -> None:
if isinstance(obj, ExecutionStepRun):
self.step_runs_by_step[obj.step] = obj
def add_all(self, items: list[Any]) -> None:
for item in items:
self.add(item)
async def commit(self) -> None:
self.commit_calls += 1
async def refresh(self, _obj: Any) -> None:
return None
class FakeContextStore:
def __init__(self, initial: Any = None) -> None:
self._context = initial
self.saved_contexts: list[dict[str, Any]] = []
async def load_context(self, _run_id) -> dict[str, Any]:
if isinstance(self._context, dict):
return copy.deepcopy(self._context)
return {}
async def save_context(self, _run_id, context: dict[str, Any]) -> None:
normalized = copy.deepcopy(context)
self._context = normalized
self.saved_contexts.append(normalized)
def _build_action(action_id) -> Action:
return Action(
id=action_id,
method=HttpMethod.GET,
path="/resource",
base_url="https://api.example.com",
)
def _build_capability(capability_id, action_id) -> Capability:
return Capability(
id=capability_id,
action_id=action_id,
name=f"cap_{capability_id.hex[:8]}",
)
def _build_node(step: int, capability_id, action_id, *, external_inputs: list[str] | None = None) -> dict[str, Any]:
return {
"step": step,
"name": f"Step {step}",
"external_inputs": external_inputs or [],
"endpoints": [
{
"capability_id": str(capability_id),
"action_id": str(action_id),
}
],
}
def test_topological_sort_linear_graph():
ordered = ExecutionService._topological_sort(
steps=[1, 2, 3],
edges=[
{"from_step": 1, "to_step": 2, "type": "users"},
{"from_step": 2, "to_step": 3, "type": "segments"},
],
)
assert ordered == [1, 2, 3]
def test_extract_value_from_output_by_edge_type():
output = {"users": [{"id": 1}]}
value = ExecutionService._extract_value_from_output(output, "users")
assert value == [{"id": 1}]
def test_build_request_payload_uses_path_params_and_defaults():
action = Action(
method=HttpMethod.GET,
path="/users/{user_id}",
base_url="https://api.example.com",
parameters_schema={
"type": "object",
"properties": {
"user_id": {
"type": "string",
"x-parameter-location": "path",
},
"limit": {
"type": "integer",
"x-parameter-location": "query",
"default": 10,
},
},
"required": ["user_id"],
},
)
service = ExecutionService(session=None) # type: ignore[arg-type]
payload = service._build_request_payload(
action=action,
resolved_inputs={"user_id": "abc"},
)
assert payload["url"] == "https://api.example.com/users/abc"
assert payload["query_params"] == {"limit": 10}
assert payload["missing_required"] == []
@pytest.mark.asyncio
async def test_get_action_from_node_uses_capability_action_id():
primary_action_id = uuid4()
stale_action_id = uuid4()
capability_id = uuid4()
action = _build_action(primary_action_id)
capability = _build_capability(capability_id, primary_action_id)
session = FakeSession(
{
(Capability, capability_id): capability,
(Action, primary_action_id): action,
}
)
service = ExecutionService(session=session) # type: ignore[arg-type]
node = _build_node(step=1, capability_id=capability_id, action_id=stale_action_id)
resolved_capability_id, resolved_action = await service._get_action_from_node(node)
assert resolved_capability_id == capability_id
assert resolved_action.id == primary_action_id
@pytest.mark.asyncio
async def test_get_action_from_node_raises_for_invalid_or_missing_bindings():
service = ExecutionService(session=FakeSession()) # type: ignore[arg-type]
with pytest.raises(StepExecutionError, match="valid capability_id"):
await service._get_action_from_node(
{"step": 1, "endpoints": [{"capability_id": "invalid"}]}
)
missing_capability_id = uuid4()
with pytest.raises(StepExecutionError, match=f"Capability not found: {missing_capability_id}"):
await service._get_action_from_node(
{
"step": 1,
"endpoints": [{"capability_id": str(missing_capability_id)}],
}
)
capability_id = uuid4()
capability_without_action = _build_capability(capability_id, None)
session = FakeSession({(Capability, capability_id): capability_without_action})
service = ExecutionService(session=session) # type: ignore[arg-type]
with pytest.raises(StepExecutionError, match=f"Capability does not have action_id: {capability_id}"):
await service._get_action_from_node(
{"step": 1, "endpoints": [{"capability_id": str(capability_id)}]}
)
missing_action_id = uuid4()
capability_with_missing_action = _build_capability(capability_id, missing_action_id)
session = FakeSession({(Capability, capability_id): capability_with_missing_action})
service = ExecutionService(session=session) # type: ignore[arg-type]
with pytest.raises(StepExecutionError, match=f"Action not found for capability {capability_id}: {missing_action_id}"):
await service._get_action_from_node(
{"step": 1, "endpoints": [{"capability_id": str(capability_id)}]}
)
def test_resolve_node_inputs_prefers_edge_values_over_step_outputs():
service = ExecutionService(session=None) # type: ignore[arg-type]
resolved, missing = service._resolve_node_inputs(
node={"step": 2, "external_inputs": []},
incoming_edges=[{"from_step": 1, "to_step": 2, "type": "users"}],
step_outputs={"1": {"users": [{"id": 1}]}},
edge_values={"1:2:users": [{"id": 42}]},
run_inputs={},
)
assert resolved == {"users": [{"id": 42}]}
assert missing == []
def test_resolve_node_inputs_normalizes_array_suffix_edge_types():
service = ExecutionService(session=None) # type: ignore[arg-type]
resolved, missing = service._resolve_node_inputs(
node={"step": 3, "external_inputs": []},
incoming_edges=[{"from_step": 1, "to_step": 3, "type": "users[]"}],
step_outputs={"1": {"users": [{"id": 1}]}},
edge_values={},
run_inputs={},
)
assert resolved["users[]"] == [{"id": 1}]
assert resolved["users"] == [{"id": 1}]
assert missing == []
def test_resolve_node_inputs_maps_user_hotel_pairs_to_segments():
service = ExecutionService(session=None) # type: ignore[arg-type]
segment_payload = [
{"segment_id": "seg_1", "hotel_id": "hotel_001", "user_ids": ["usr_001"]},
]
resolved, missing = service._resolve_node_inputs(
node={"step": 4, "external_inputs": []},
incoming_edges=[{"from_step": 3, "to_step": 4, "type": "user_hotel_pairs"}],
step_outputs={"3": {"segments": segment_payload}},
edge_values={},
run_inputs={},
)
assert resolved["user_hotel_pairs"] == segment_payload
assert resolved["segments"] == segment_payload
assert missing == []
def test_resolve_node_inputs_maps_empty_user_hotel_pairs_to_assignments():
service = ExecutionService(session=None) # type: ignore[arg-type]
resolved, missing = service._resolve_node_inputs(
node={"step": 5, "external_inputs": []},
incoming_edges=[{"from_step": 4, "to_step": 5, "type": "user_hotel_pairs"}],
step_outputs={"4": {"assignments": []}},
edge_values={"4:5:user_hotel_pairs": []},
run_inputs={},
)
assert resolved["user_hotel_pairs"] == []
assert resolved["assignments"] == []
assert missing == []
@pytest.mark.asyncio
async def test_execute_run_linear_pipeline_succeeds_and_persists_context():
run_id = uuid4()
pipeline_id = uuid4()
action_1_id = uuid4()
action_2_id = uuid4()
capability_1_id = uuid4()
capability_2_id = uuid4()
action_1 = _build_action(action_1_id)
action_2 = _build_action(action_2_id)
capability_1 = _build_capability(capability_1_id, action_1_id)
capability_2 = _build_capability(capability_2_id, action_2_id)
pipeline = Pipeline(
id=pipeline_id,
name="Linear pipeline",
nodes=[
_build_node(1, capability_1_id, action_1_id, external_inputs=["seed"]),
_build_node(2, capability_2_id, action_2_id),
],
edges=[{"from_step": 1, "to_step": 2, "type": "users"}],
status=PipelineStatus.READY,
)
run = ExecutionRun(
id=run_id,
pipeline_id=pipeline_id,
status=ExecutionRunStatus.QUEUED,
inputs={"seed": "abc"},
)
session = FakeSession(
{
(ExecutionRun, run_id): run,
(Pipeline, pipeline_id): pipeline,
(Capability, capability_1_id): capability_1,
(Capability, capability_2_id): capability_2,
(Action, action_1_id): action_1,
(Action, action_2_id): action_2,
}
)
context_store = FakeContextStore(initial={"step_outputs": "bad", "edge_values": []})
service = ExecutionService(session=session, context_store=context_store) # type: ignore[arg-type]
async def fake_call_action(action: Action, request_payload: dict[str, Any]):
if action.id == action_1_id:
assert request_payload["resolved_inputs"]["seed"] == "abc"
return {"status_code": 200, "body": {"users": [{"id": 1}]}}, {"users": [{"id": 1}]}
return {"status_code": 200, "body": {"ok": True}}, {"ok": True}
service._call_action = fake_call_action # type: ignore[method-assign]
await service.execute_run(run_id)
assert run.status == ExecutionRunStatus.SUCCEEDED
assert run.summary is not None
assert run.summary["total_steps"] == 2
assert run.summary["succeeded_steps"] == 2
assert run.summary["failed_steps"] == 0
assert run.summary["skipped_steps"] == 0
assert run.summary["final_output_step"] == 2
assert run.summary["final_output"] == {"ok": True}
assert session.step_runs_by_step[1].status == ExecutionStepStatus.SUCCEEDED
assert session.step_runs_by_step[2].status == ExecutionStepStatus.SUCCEEDED
assert context_store.saved_contexts[-1]["edge_values"]["1:2:users"] == [{"id": 1}]
assert context_store.saved_contexts[-1]["step_outputs"]["1"] == {"users": [{"id": 1}]}
@pytest.mark.asyncio
async def test_execute_run_is_fail_fast_and_marks_remaining_as_skipped():
run_id = uuid4()
pipeline_id = uuid4()
action_1_id = uuid4()
action_2_id = uuid4()
action_3_id = uuid4()
capability_1_id = uuid4()
capability_2_id = uuid4()
capability_3_id = uuid4()
action_1 = _build_action(action_1_id)
action_2 = _build_action(action_2_id)
action_3 = _build_action(action_3_id)
capability_1 = _build_capability(capability_1_id, action_1_id)
capability_2 = _build_capability(capability_2_id, action_2_id)
capability_3 = _build_capability(capability_3_id, action_3_id)
pipeline = Pipeline(
id=pipeline_id,
name="Fail fast pipeline",
nodes=[
_build_node(1, capability_1_id, action_1_id),
_build_node(2, capability_2_id, action_2_id),
_build_node(3, capability_3_id, action_3_id),
],
edges=[
{"from_step": 1, "to_step": 2, "type": "users"},
{"from_step": 2, "to_step": 3, "type": "segments"},
],
status=PipelineStatus.READY,
)
run = ExecutionRun(
id=run_id,
pipeline_id=pipeline_id,
status=ExecutionRunStatus.QUEUED,
inputs={},
)
session = FakeSession(
{
(ExecutionRun, run_id): run,
(Pipeline, pipeline_id): pipeline,
(Capability, capability_1_id): capability_1,
(Capability, capability_2_id): capability_2,
(Capability, capability_3_id): capability_3,
(Action, action_1_id): action_1,
(Action, action_2_id): action_2,
(Action, action_3_id): action_3,
}
)
service = ExecutionService(
session=session, # type: ignore[arg-type]
context_store=FakeContextStore(initial={"step_outputs": {}, "edge_values": {}}),
)
async def fake_call_action(action: Action, _request_payload: dict[str, Any]):
if action.id == action_2_id:
raise StepExecutionError("boom")
return {"status_code": 200}, {"users": [1]}
service._call_action = fake_call_action # type: ignore[method-assign]
await service.execute_run(run_id)
assert run.status == ExecutionRunStatus.PARTIAL_FAILED
assert run.summary is not None
assert run.summary["total_steps"] == 3
assert run.summary["succeeded_steps"] == 1
assert run.summary["failed_steps"] == 1
assert run.summary["skipped_steps"] == 1
assert run.summary["final_output_step"] == 1
assert run.summary["final_output"] == {"users": [1]}
assert session.step_runs_by_step[1].status == ExecutionStepStatus.SUCCEEDED
assert session.step_runs_by_step[2].status == ExecutionStepStatus.FAILED
assert session.step_runs_by_step[3].status == ExecutionStepStatus.SKIPPED
@pytest.mark.asyncio
async def test_execute_run_multi_endpoint_node_executes_sequential_chain():
run_id = uuid4()
pipeline_id = uuid4()
action_1_id = uuid4()
action_2_id = uuid4()
capability_1_id = uuid4()
capability_2_id = uuid4()
action_1 = Action(
id=action_1_id,
method=HttpMethod.GET,
path="/users/recent",
base_url="https://api.example.com",
)
action_2 = Action(
id=action_2_id,
method=HttpMethod.GET,
path="/segments/build",
base_url="https://api.example.com",
parameters_schema={
"type": "object",
"required": ["usersList"],
"properties": {
"usersList": {
"type": "array",
"x-parameter-location": "query",
}
},
},
)
capability_1 = _build_capability(capability_1_id, action_1_id)
capability_2 = _build_capability(capability_2_id, action_2_id)
multi_endpoint_node = {
"step": 1,
"name": "Multi endpoint node",
"external_inputs": [],
"endpoints": [
{
"capability_id": str(capability_1_id),
"action_id": str(action_1_id),
},
{
"capability_id": str(capability_2_id),
"action_id": str(action_2_id),
},
],
}
pipeline = Pipeline(
id=pipeline_id,
name="Multi endpoint chain",
nodes=[multi_endpoint_node],
edges=[],
status=PipelineStatus.READY,
)
run = ExecutionRun(
id=run_id,
pipeline_id=pipeline_id,
status=ExecutionRunStatus.QUEUED,
inputs={},
)
session = FakeSession(
{
(ExecutionRun, run_id): run,
(Pipeline, pipeline_id): pipeline,
(Capability, capability_1_id): capability_1,
(Capability, capability_2_id): capability_2,
(Action, action_1_id): action_1,
(Action, action_2_id): action_2,
}
)
service = ExecutionService(
session=session, # type: ignore[arg-type]
context_store=FakeContextStore(initial={"step_outputs": {}, "edge_values": {}}),
)
call_order: list[Any] = []
async def fake_call_action(action: Action, request_payload: dict[str, Any]):
call_order.append(action.id)
if action.id == action_1_id:
return {"status_code": 200, "body": {"users_list": [{"id": 1}]}}, {"users_list": [{"id": 1}]}
assert request_payload["resolved_inputs"]["usersList"] == [{"id": 1}]
return {"status_code": 200, "body": {"segments": [1]}}, {"segments": [1]}
service._call_action = fake_call_action # type: ignore[method-assign]
await service.execute_run(run_id)
assert run.status == ExecutionRunStatus.SUCCEEDED
assert run.summary is not None
assert run.summary["final_output"] == {"segments": [1]}
assert call_order == [action_1_id, action_2_id]
assert session.step_runs_by_step[1].capability_id == capability_1_id
assert session.step_runs_by_step[1].action_id == action_1_id
trace = session.step_runs_by_step[1].response_snapshot["endpoints_trace"] # type: ignore[index]
assert len(trace) == 2
assert trace[0]["status"] == "succeeded"
assert trace[1]["status"] == "succeeded"
@pytest.mark.asyncio
async def test_execute_run_multi_endpoint_failure_stops_pipeline():
run_id = uuid4()
pipeline_id = uuid4()
action_1_id = uuid4()
action_2_id = uuid4()
action_3_id = uuid4()
capability_1_id = uuid4()
capability_2_id = uuid4()
capability_3_id = uuid4()
action_1 = _build_action(action_1_id)
action_2 = _build_action(action_2_id)
action_3 = _build_action(action_3_id)
capability_1 = _build_capability(capability_1_id, action_1_id)
capability_2 = _build_capability(capability_2_id, action_2_id)
capability_3 = _build_capability(capability_3_id, action_3_id)
multi_endpoint_node = {
"step": 1,
"name": "Fail on second endpoint",
"external_inputs": [],
"endpoints": [
{"capability_id": str(capability_1_id), "action_id": str(action_1_id)},
{"capability_id": str(capability_2_id), "action_id": str(action_2_id)},
],
}
pipeline = Pipeline(
id=pipeline_id,
name="Failing multi-endpoint pipeline",
nodes=[
multi_endpoint_node,
_build_node(2, capability_3_id, action_3_id),
],
edges=[{"from_step": 1, "to_step": 2, "type": "segments"}],
status=PipelineStatus.READY,
)
run = ExecutionRun(
id=run_id,
pipeline_id=pipeline_id,
status=ExecutionRunStatus.QUEUED,
inputs={},
)
session = FakeSession(
{
(ExecutionRun, run_id): run,
(Pipeline, pipeline_id): pipeline,
(Capability, capability_1_id): capability_1,
(Capability, capability_2_id): capability_2,
(Capability, capability_3_id): capability_3,
(Action, action_1_id): action_1,
(Action, action_2_id): action_2,
(Action, action_3_id): action_3,
}
)
service = ExecutionService(
session=session, # type: ignore[arg-type]
context_store=FakeContextStore(initial={"step_outputs": {}, "edge_values": {}}),
)
async def fake_call_action(action: Action, _request_payload: dict[str, Any]):
if action.id == action_2_id:
raise StepExecutionError("boom")
return {"status_code": 200, "body": {"segments": [1]}}, {"segments": [1]}
service._call_action = fake_call_action # type: ignore[method-assign]
await service.execute_run(run_id)
assert run.status == ExecutionRunStatus.FAILED
assert run.summary is not None
assert run.summary["succeeded_steps"] == 0
assert run.summary["failed_steps"] == 1
assert run.summary["skipped_steps"] == 1
assert session.step_runs_by_step[1].status == ExecutionStepStatus.FAILED
assert session.step_runs_by_step[2].status == ExecutionStepStatus.SKIPPED
failed_trace = session.step_runs_by_step[1].response_snapshot["endpoints_trace"] # type: ignore[index]
assert len(failed_trace) == 2
assert failed_trace[0]["status"] == "succeeded"
assert failed_trace[1]["status"] == "failed"
@pytest.mark.asyncio
async def test_execute_run_multi_endpoint_chain_supports_composite_endpoint():
run_id = uuid4()
pipeline_id = uuid4()
action_1_id = uuid4()
atomic_capability_id = uuid4()
composite_capability_id = uuid4()
action_1 = Action(
id=action_1_id,
method=HttpMethod.GET,
path="/users/recent",
base_url="https://api.example.com",
)
atomic_capability = _build_capability(atomic_capability_id, action_1_id)
composite_capability = Capability(
id=composite_capability_id,
action_id=None,
type="COMPOSITE",
name="composite_cap",
input_schema={
"type": "object",
"required": ["users"],
"properties": {
"users": {"type": "array"},
},
},
recipe={"version": 1, "steps": [{"step": 1, "capability_id": str(atomic_capability_id), "inputs": {}}]},
)
node = {
"step": 1,
"name": "Atomic then composite",
"external_inputs": [],
"endpoints": [
{"capability_id": str(atomic_capability_id), "action_id": str(action_1_id)},
{"capability_id": str(composite_capability_id), "action_id": None},
],
}
pipeline = Pipeline(
id=pipeline_id,
name="mixed chain pipeline",
nodes=[node],
edges=[],
status=PipelineStatus.READY,
)
run = ExecutionRun(
id=run_id,
pipeline_id=pipeline_id,
status=ExecutionRunStatus.QUEUED,
inputs={},
)
session = FakeSession(
{
(ExecutionRun, run_id): run,
(Pipeline, pipeline_id): pipeline,
(Capability, atomic_capability_id): atomic_capability,
(Capability, composite_capability_id): composite_capability,
(Action, action_1_id): action_1,
}
)
service = ExecutionService(
session=session, # type: ignore[arg-type]
context_store=FakeContextStore(initial={"step_outputs": {}, "edge_values": {}}),
)
async def fake_call_action(action: Action, _request_payload: dict[str, Any]):
assert action.id == action_1_id
return {"status_code": 200, "body": {"users": [{"id": 1}]}}, {"users": [{"id": 1}]}
async def fake_execute_composite_capability(
*,
capability: Capability,
resolved_inputs: dict[str, Any],
run_inputs: dict[str, Any],
):
assert capability.id == composite_capability_id
assert resolved_inputs["users"] == [{"id": 1}]
assert run_inputs == {}
return {"capability_type": "COMPOSITE", "status_code": 200}, {"segments": [1]}
service._call_action = fake_call_action # type: ignore[method-assign]
service._execute_composite_capability = fake_execute_composite_capability # type: ignore[method-assign]
await service.execute_run(run_id)
assert run.status == ExecutionRunStatus.SUCCEEDED
assert run.summary is not None
assert run.summary["final_output"] == {"segments": [1]}
trace = session.step_runs_by_step[1].response_snapshot["endpoints_trace"] # type: ignore[index]
assert len(trace) == 2
assert trace[0]["capability_id"] == str(atomic_capability_id)
assert trace[1]["capability_id"] == str(composite_capability_id)
assert trace[1]["capability_type"] == "COMPOSITE"
@@ -0,0 +1,80 @@
from __future__ import annotations
from datetime import datetime, timezone
from uuid import uuid4
from app.api.executions.get_execution import _build_step_run_response
from app.models.execution import ExecutionStepRun, ExecutionStepStatus
def _build_step_run(
*,
request_snapshot,
response_snapshot,
) -> ExecutionStepRun:
now = datetime.now(timezone.utc)
step_run = ExecutionStepRun(
run_id=uuid4(),
step=1,
status=ExecutionStepStatus.SUCCEEDED,
)
step_run.name = "Step 1"
step_run.request_snapshot = request_snapshot
step_run.response_snapshot = response_snapshot
step_run.created_at = now
step_run.updated_at = now
return step_run
def test_build_step_run_response_for_post_sets_accepted_and_output_payloads():
step_run = _build_step_run(
request_snapshot={
"method": "post",
"json_body": {"subject": "Hi", "message": "Hello"},
},
response_snapshot={
"status_code": 200,
"body": {"sent": 1},
},
)
response = _build_step_run_response(step_run)
assert response.method == "POST"
assert response.status_code == 200
assert response.accepted_payload == {"subject": "Hi", "message": "Hello"}
assert response.output_payload == {"sent": 1}
def test_build_step_run_response_for_get_keeps_accepted_payload_none():
step_run = _build_step_run(
request_snapshot={
"method": "GET",
"query_params": {"limit": 20},
},
response_snapshot={
"status_code": "204",
"body": "",
},
)
response = _build_step_run_response(step_run)
assert response.method == "GET"
assert response.status_code == 204
assert response.accepted_payload is None
assert response.output_payload == ""
def test_build_step_run_response_handles_missing_snapshots():
step_run = _build_step_run(
request_snapshot=None,
response_snapshot=None,
)
response = _build_step_run_response(step_run)
assert response.method is None
assert response.status_code is None
assert response.accepted_payload is None
assert response.output_payload is None
+11
View File
@@ -0,0 +1,11 @@
from httpx import AsyncClient, ASGITransport
import pytest
from app.main import app
@pytest.mark.asyncio
async def test_ping():
# Используем ASGITransport для современных версий httpx
async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as ac:
response = await ac.get("/api/ping")
assert response.status_code == 200
assert response.json() == {"status": "ok"}
+123
View File
@@ -0,0 +1,123 @@
from __future__ import annotations
from uuid import uuid4
from app.models.capability import Capability, CapabilityType
from app.services.pipeline_service import PipelineService
from app.services.semantic_selection import SelectedCapability
def _build_capability(*, name: str, required_inputs: list[str] | None = None) -> Capability:
cap_id = uuid4()
action_id = uuid4()
input_schema = None
if required_inputs is not None:
input_schema = {
"type": "object",
"required": required_inputs,
"properties": {
input_name: {"type": "string"}
for input_name in required_inputs
},
}
return Capability(
id=cap_id,
action_id=action_id,
type=CapabilityType.ATOMIC,
name=name,
input_schema=input_schema,
output_schema={"type": "object"},
)
def _select(capability: Capability) -> SelectedCapability:
return SelectedCapability(capability=capability, score=1.0, confidence_tier="high")
def test_extract_required_inputs_from_node_merges_all_endpoints():
service = PipelineService(session=None) # type: ignore[arg-type]
node = {
"step": 1,
"endpoints": [
{
"input_type": {
"type": "object",
"required": ["users", "campaignId"],
}
},
{
"input_type": {
"type": "object",
"required": ["segments", "users"],
}
},
],
}
required = service._extract_required_inputs_from_node(node)
assert required == ["users", "campaignId", "segments"]
def test_normalize_workflow_preserves_multi_endpoint_nodes():
capability_a = _build_capability(name="Get users", required_inputs=["users"])
capability_b = _build_capability(name="Build segments", required_inputs=["users"])
selected = [_select(capability_a), _select(capability_b)]
service = PipelineService(session=None) # type: ignore[arg-type]
raw_graph = {
"nodes": [
{
"step": 1,
"name": "Composite-like node",
"endpoints": [
{
"capability_id": str(capability_a.id),
},
{
"capability_id": str(capability_b.id),
},
],
}
],
"edges": [],
}
nodes, edges, issues = service._normalize_workflow(raw_graph, selected)
assert issues == []
assert edges == []
assert len(nodes) == 1
endpoints = nodes[0]["endpoints"]
assert len(endpoints) == 2
assert endpoints[0]["capability_id"] == str(capability_a.id)
assert endpoints[1]["capability_id"] == str(capability_b.id)
assert endpoints[0]["action_id"] == str(capability_a.action_id)
assert endpoints[1]["action_id"] == str(capability_b.action_id)
def test_normalize_workflow_flags_invalid_endpoint_capability_refs():
capability = _build_capability(name="Get users", required_inputs=["users"])
selected = [_select(capability)]
service = PipelineService(session=None) # type: ignore[arg-type]
raw_graph = {
"nodes": [
{
"step": 1,
"name": "Node with invalid endpoint",
"endpoints": [
{"capability_id": str(uuid4())},
{"capability_id": str(capability.id)},
],
}
],
"edges": [],
}
nodes, _edges, issues = service._normalize_workflow(raw_graph, selected)
assert "graph:invalid_capability_ref" in issues
assert len(nodes) == 1
assert len(nodes[0]["endpoints"]) == 1
assert nodes[0]["endpoints"][0]["capability_id"] == str(capability.id)
+41
View File
@@ -0,0 +1,41 @@
from __future__ import annotations
from types import SimpleNamespace
from app.services.semantic_selection import SemanticSelectionService
def test_score_maps_ru_users_query_to_en_capability_tokens():
service = SemanticSelectionService()
query_tokens = service._tokenize("Хочу получить пользователей")
query_tokens_expanded = service._expand_tokens(query_tokens)
capability = SimpleNamespace(
name="get_users",
description="Get users list",
)
score = service._score_capability(query_tokens, query_tokens_expanded, capability)
assert score >= 0.45
def test_score_uses_capability_action_context_tokens():
service = SemanticSelectionService()
query_tokens = service._tokenize("Отправь email по кампании")
query_tokens_expanded = service._expand_tokens(query_tokens)
capability = SimpleNamespace(
name="execute_action",
description="General API action",
llm_payload={
"action_context_brief": {
"method": "POST",
"path": "/v1/campaigns/emails/send",
"tags": ["campaign", "email"],
"summary": "Send campaign emails",
}
},
)
score = service._score_capability(query_tokens, query_tokens_expanded, capability)
assert score > 0.0
@@ -0,0 +1,248 @@
from __future__ import annotations
from datetime import datetime, timezone
from uuid import UUID, uuid4
import pytest
from httpx import ASGITransport, AsyncClient, Response
from app.core.database.session import get_session
from app.main import app
from app.models import Pipeline, PipelineStatus, User, UserRole
from app.utils.token_manager import get_current_user
class FakeSession:
def __init__(self, pipeline: Pipeline | None):
self.pipeline = pipeline
self.committed = False
async def get(self, model, key: UUID):
if model is Pipeline and self.pipeline and key == self.pipeline.id:
return self.pipeline
return None
async def commit(self):
self.committed = True
if self.pipeline is not None:
self.pipeline.updated_at = datetime.now(timezone.utc)
async def refresh(self, _obj):
return None
@pytest.fixture(autouse=True)
def clear_dependency_overrides():
app.dependency_overrides.clear()
yield
app.dependency_overrides.clear()
def _build_user(*, user_id: UUID, role: UserRole = UserRole.USER) -> User:
user = User(
id=user_id,
email=f"{user_id}@example.com",
hashed_password="hashed",
role=role,
is_active=True,
)
user.created_at = datetime.now(timezone.utc)
user.updated_at = datetime.now(timezone.utc)
return user
def _build_pipeline(*, pipeline_id: UUID, owner_id: UUID) -> Pipeline:
pipeline = Pipeline(
id=pipeline_id,
name="Travel pipeline",
description=None,
user_prompt=None,
nodes=[
{
"step": 1,
"name": "Get users",
"description": None,
"input_connected_from": [99],
"output_connected_to": [98],
"input_data_type_from_previous": [],
"external_inputs": [],
"endpoints": [],
},
{
"step": 2,
"name": "Segment users",
"description": None,
"input_connected_from": [],
"output_connected_to": [],
"input_data_type_from_previous": [],
"external_inputs": [],
"endpoints": [],
},
],
edges=[],
status=PipelineStatus.DRAFT,
created_by=owner_id,
)
pipeline.created_at = datetime.now(timezone.utc)
pipeline.updated_at = datetime.now(timezone.utc)
return pipeline
async def _patch_graph(pipeline_id: UUID, payload: dict) -> Response:
async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as client:
return await client.patch(f"/api/v1/pipelines/{pipeline_id}/graph", json=payload)
@pytest.mark.asyncio
async def test_patch_graph_success_for_owner_normalizes_connections():
owner_id = uuid4()
pipeline_id = uuid4()
fake_session = FakeSession(_build_pipeline(pipeline_id=pipeline_id, owner_id=owner_id))
async def override_session():
yield fake_session
async def override_user():
return _build_user(user_id=owner_id)
app.dependency_overrides[get_session] = override_session
app.dependency_overrides[get_current_user] = override_user
response = await _patch_graph(
pipeline_id,
{
"nodes": fake_session.pipeline.nodes,
"edges": [{"from_step": 1, "to_step": 2, "type": "users"}],
},
)
assert response.status_code == 200
payload = response.json()
assert payload["pipeline_id"] == str(pipeline_id)
assert payload["edges"] == [{"from_step": 1, "to_step": 2, "type": "users"}]
assert payload["nodes"][0]["output_connected_to"] == [2]
assert payload["nodes"][1]["input_connected_from"] == [1]
assert payload["nodes"][1]["input_data_type_from_previous"] == [
{"from_step": 1, "type": "users"}
]
assert isinstance(payload["updated_at"], str)
assert fake_session.committed is True
@pytest.mark.asyncio
async def test_patch_graph_returns_404_for_non_owner():
owner_id = uuid4()
pipeline_id = uuid4()
fake_session = FakeSession(_build_pipeline(pipeline_id=pipeline_id, owner_id=owner_id))
async def override_session():
yield fake_session
async def override_user():
return _build_user(user_id=uuid4())
app.dependency_overrides[get_session] = override_session
app.dependency_overrides[get_current_user] = override_user
response = await _patch_graph(
pipeline_id,
{
"nodes": fake_session.pipeline.nodes,
"edges": [{"from_step": 1, "to_step": 2, "type": "users"}],
},
)
assert response.status_code == 404
@pytest.mark.asyncio
async def test_patch_graph_rejects_cycle():
owner_id = uuid4()
pipeline_id = uuid4()
fake_session = FakeSession(_build_pipeline(pipeline_id=pipeline_id, owner_id=owner_id))
async def override_session():
yield fake_session
async def override_user():
return _build_user(user_id=owner_id)
app.dependency_overrides[get_session] = override_session
app.dependency_overrides[get_current_user] = override_user
response = await _patch_graph(
pipeline_id,
{
"nodes": fake_session.pipeline.nodes,
"edges": [
{"from_step": 1, "to_step": 2, "type": "users"},
{"from_step": 2, "to_step": 1, "type": "segments"},
],
},
)
assert response.status_code == 422
payload = response.json()
assert payload["code"] == "VALIDATION_FAILED"
assert "graph: cycle" in payload["details"]["errors"]
@pytest.mark.asyncio
async def test_patch_graph_rejects_edge_to_missing_node():
owner_id = uuid4()
pipeline_id = uuid4()
fake_session = FakeSession(_build_pipeline(pipeline_id=pipeline_id, owner_id=owner_id))
async def override_session():
yield fake_session
async def override_user():
return _build_user(user_id=owner_id)
app.dependency_overrides[get_session] = override_session
app.dependency_overrides[get_current_user] = override_user
response = await _patch_graph(
pipeline_id,
{
"nodes": fake_session.pipeline.nodes,
"edges": [{"from_step": 1, "to_step": 999, "type": "users"}],
},
)
assert response.status_code == 422
payload = response.json()
assert payload["code"] == "VALIDATION_FAILED"
assert "graph: edge_to_missing_node:1->999" in payload["details"]["errors"]
@pytest.mark.asyncio
async def test_patch_graph_rejects_duplicate_edge_triplets():
owner_id = uuid4()
pipeline_id = uuid4()
fake_session = FakeSession(_build_pipeline(pipeline_id=pipeline_id, owner_id=owner_id))
async def override_session():
yield fake_session
async def override_user():
return _build_user(user_id=owner_id)
app.dependency_overrides[get_session] = override_session
app.dependency_overrides[get_current_user] = override_user
response = await _patch_graph(
pipeline_id,
{
"nodes": fake_session.pipeline.nodes,
"edges": [
{"from_step": 1, "to_step": 2, "type": "users"},
{"from_step": 1, "to_step": 2, "type": "users"},
],
},
)
assert response.status_code == 422
payload = response.json()
assert payload["code"] == "VALIDATION_FAILED"
assert "graph: duplicate_edge:1->2:users" in payload["details"]["errors"]