feat(taskflow): add core task API, storage persistence, csv export, stats page, and test coverage

This commit is contained in:
Alexander Kalinovsky
2026-04-01 17:56:03 +03:00
commit 19d659df6b
31 changed files with 4197 additions and 0 deletions

1
app/__init__.py Normal file
View File

@@ -0,0 +1 @@

6
app/api/__init__.py Normal file
View File

@@ -0,0 +1,6 @@
from app.api.health import router as health_router
from app.api.stats import router as stats_router
from app.api.tasks import router as tasks_router
__all__ = ["health_router", "stats_router", "tasks_router"]

22
app/api/dto/__init__.py Normal file
View File

@@ -0,0 +1,22 @@
from app.api.dto.tasks import (
ErrorDTO,
TaskCreateDTO,
TaskDTO,
TaskExportRowDTO,
TaskListDTO,
TaskListItemDTO,
TaskQueryDTO,
TaskUpdateDTO,
)
__all__ = [
"ErrorDTO",
"TaskCreateDTO",
"TaskDTO",
"TaskExportRowDTO",
"TaskListDTO",
"TaskListItemDTO",
"TaskQueryDTO",
"TaskUpdateDTO",
]

142
app/api/dto/tasks.py Normal file
View File

@@ -0,0 +1,142 @@
from __future__ import annotations
from datetime import datetime
from typing import Literal
from uuid import UUID
from pydantic import BaseModel, ConfigDict, Field, computed_field, field_validator
TaskStatus = Literal["backlog", "in_progress", "done"]
class ErrorDTO(BaseModel):
error: str = Field(..., examples=["invalid_id"])
message: str = Field(..., examples=["Task was not found"])
class TaskCreateDTO(BaseModel):
model_config = ConfigDict(extra="forbid")
title: str = Field(
...,
min_length=1,
max_length=100,
description="Task title",
examples=["Implement CSV export"],
)
@field_validator("title")
@classmethod
def validate_title(cls, value: str) -> str:
value = value.strip()
if not value:
raise ValueError("Title must not be empty")
return value
class TaskUpdateDTO(BaseModel):
model_config = ConfigDict(extra="forbid")
title: str | None = Field(
default=None,
min_length=1,
max_length=100,
description="Updated task title",
examples=["Fix workflow metrics"],
)
@field_validator("title")
@classmethod
def validate_title(cls, value: str | None) -> str | None:
if value is None:
return None
value = value.strip()
if not value:
raise ValueError("Title must not be empty")
return value
class TaskDTO(BaseModel):
model_config = ConfigDict(
extra="forbid",
frozen=True,
populate_by_name=True,
)
id: UUID
title: str = Field(..., min_length=1, max_length=100)
created_at: datetime
started_at: datetime | None = None
done_at: datetime | None = None
@computed_field(return_type=str)
@property
def status(self) -> TaskStatus:
if self.done_at is not None:
return "done"
if self.started_at is not None:
return "in_progress"
return "backlog"
class TaskListItemDTO(BaseModel):
model_config = ConfigDict(
extra="forbid",
frozen=True,
populate_by_name=True,
)
id: UUID
title: str = Field(..., min_length=1, max_length=100)
created_at: datetime
started_at: datetime | None = None
done_at: datetime | None = None
@computed_field(return_type=str)
@property
def status(self) -> TaskStatus:
if self.done_at is not None:
return "done"
if self.started_at is not None:
return "in_progress"
return "backlog"
class TaskListDTO(BaseModel):
model_config = ConfigDict(extra="forbid", frozen=True)
items: list[TaskListItemDTO]
total: int = Field(..., ge=0)
limit: int = Field(..., ge=1, le=1000)
offset: int = Field(..., ge=0)
class TaskQueryDTO(BaseModel):
model_config = ConfigDict(extra="forbid")
limit: int = Field(default=100, ge=1, le=1000)
offset: int = Field(default=0, ge=0)
status: TaskStatus | None = Field(default=None)
search: str | None = Field(default=None, max_length=100)
@field_validator("search")
@classmethod
def normalize_search(cls, value: str | None) -> str | None:
if value is None:
return None
value = value.strip()
return value or None
class TaskExportRowDTO(BaseModel):
model_config = ConfigDict(extra="forbid", frozen=True)
id: UUID
title: str
status: TaskStatus
created_at: datetime
started_at: datetime | None = None
done_at: datetime | None = None

18
app/api/health.py Normal file
View File

@@ -0,0 +1,18 @@
from datetime import datetime, UTC
from fastapi import APIRouter
router = APIRouter()
@router.get("/health")
def health() -> dict:
"""
Health check endpoint.
Returns server time in UTC ISO-8601 format.
"""
return {
"status": "ok",
"server_time": datetime.now(UTC).isoformat(),
}

80
app/api/stats.py Normal file
View File

@@ -0,0 +1,80 @@
from __future__ import annotations
from pathlib import Path
from fastapi import APIRouter, Depends, Request
from fastapi.responses import HTMLResponse
from fastapi.templating import Jinja2Templates
from app.api.tasks import get_task_repository
from app.storage import JsonFileTaskRepository, StoredTask
router = APIRouter(tags=["stats"])
templates = Jinja2Templates(directory=str(Path(__file__).resolve().parents[2] / "templates"))
def get_task_status(task: StoredTask) -> str:
if task.done_at is not None:
return "done"
if task.started_at is not None:
return "in_progress"
return "backlog"
def format_dt(value: object) -> str:
if value is None:
return ""
return str(value)
def format_duration(task: StoredTask) -> str:
if task.started_at is None or task.done_at is None:
return ""
delta = task.done_at - task.started_at
total_seconds = int(delta.total_seconds())
hours, remainder = divmod(total_seconds, 3600)
minutes, seconds = divmod(remainder, 60)
return f"{hours}h {minutes}m {seconds}s"
def serialize_task(task: StoredTask) -> dict[str, str]:
return {
"id": str(task.id),
"title": task.title,
"status": get_task_status(task),
"created_at": task.created_at.isoformat(),
"started_at": task.started_at.isoformat() if task.started_at is not None else "",
"done_at": task.done_at.isoformat() if task.done_at is not None else "",
"cycle_time": format_duration(task),
}
@router.get("/stats", response_class=HTMLResponse)
def stats_page(
request: Request,
repo: JsonFileTaskRepository = Depends(get_task_repository),
) -> HTMLResponse:
tasks = repo.list_tasks()
backlog_tasks = [serialize_task(task) for task in tasks if get_task_status(task) == "backlog"]
in_progress_tasks = [serialize_task(task) for task in tasks if get_task_status(task) == "in_progress"]
done_tasks = [serialize_task(task) for task in tasks if get_task_status(task) == "done"]
selected_task = None
if tasks:
selected_task = serialize_task(tasks[0])
return templates.TemplateResponse(
request=request,
name="stats.html",
context={
"selected_task": selected_task,
"backlog_tasks": backlog_tasks,
"in_progress_tasks": in_progress_tasks,
"done_tasks": done_tasks,
},
)

338
app/api/tasks.py Normal file
View File

@@ -0,0 +1,338 @@
from __future__ import annotations
import csv
from io import StringIO
from datetime import UTC, datetime
from functools import lru_cache
from uuid import UUID, uuid4
from fastapi import APIRouter, Body, Depends, Path, Query, Response, status
from fastapi.responses import JSONResponse
from app.api.dto.tasks import (
ErrorDTO,
TaskCreateDTO,
TaskDTO,
TaskListDTO,
TaskListItemDTO,
TaskQueryDTO,
TaskUpdateDTO,
)
from app.domain import InvalidTransitionError, complete_task, start_task
from app.storage import JsonFileTaskRepository, StoredTask, create_task_repository
router = APIRouter(prefix="/api/tasks", tags=["tasks"])
@lru_cache(maxsize=1)
def get_task_repository() -> JsonFileTaskRepository:
return create_task_repository()
def error_response(
*,
status_code: int,
error: str,
message: str,
) -> JSONResponse:
payload = ErrorDTO(error=error, message=message)
return JSONResponse(
status_code=status_code,
content=payload.model_dump(mode="json"),
)
def parse_task_id(task_id: str) -> UUID | None:
try:
return UUID(task_id)
except (TypeError, ValueError):
return None
def to_task_dto(task: StoredTask) -> TaskDTO:
return TaskDTO.model_validate(task.model_dump(mode="python"))
def to_task_list_item_dto(task: StoredTask) -> TaskListItemDTO:
return TaskListItemDTO.model_validate(task.model_dump(mode="python"))
def protect_csv_cell(value: str) -> str:
stripped = value.lstrip()
if stripped.startswith(("=", "+", "-", "@")):
return f"'{value}"
return value
def build_tasks_csv(tasks: list[StoredTask]) -> str:
buffer = StringIO()
writer = csv.writer(buffer, lineterminator="\n")
writer.writerow(["id", "title", "status", "created_at", "started_at", "done_at"])
for task in tasks:
dto = to_task_list_item_dto(task)
writer.writerow(
[
str(task.id),
protect_csv_cell(task.title),
dto.status,
task.created_at.isoformat(),
task.started_at.isoformat() if task.started_at is not None else "",
task.done_at.isoformat() if task.done_at is not None else "",
],
)
return buffer.getvalue()
def get_task_or_error(
*,
repo: JsonFileTaskRepository,
task_id: str,
) -> tuple[StoredTask | None, JSONResponse | None]:
parsed_task_id = parse_task_id(task_id)
if parsed_task_id is None:
return None, error_response(
status_code=status.HTTP_400_BAD_REQUEST,
error="invalid_id",
message="Task id must be a valid UUID",
)
task = repo.get_task(parsed_task_id)
if task is None:
return None, error_response(
status_code=status.HTTP_404_NOT_FOUND,
error="invalid_id",
message="Task was not found",
)
return task, None
@router.get(
"",
response_model=TaskListDTO,
responses={
400: {"model": ErrorDTO},
},
)
def list_tasks(
limit: int = Query(default=100, ge=1, le=1000),
offset: int = Query(default=0, ge=0),
status_filter: str | None = Query(default=None, alias="status"),
search: str | None = Query(default=None, max_length=100),
repo: JsonFileTaskRepository = Depends(get_task_repository),
) -> TaskListDTO | JSONResponse:
try:
query = TaskQueryDTO(
limit=limit,
offset=offset,
status=status_filter,
search=search,
)
except Exception:
return error_response(
status_code=status.HTTP_400_BAD_REQUEST,
error="invalid_payload",
message="Invalid query parameters",
)
tasks = repo.list_tasks()
if query.status is not None:
tasks = [
task for task in tasks if to_task_list_item_dto(task).status == query.status
]
if query.search is not None:
needle = query.search.casefold()
tasks = [task for task in tasks if needle in task.title.casefold()]
total = len(tasks)
items = tasks[query.offset : query.offset + query.limit]
return TaskListDTO(
items=[to_task_list_item_dto(task) for task in items],
total=total,
limit=query.limit,
offset=query.offset,
)
@router.get("/export")
def export_tasks_csv(
repo: JsonFileTaskRepository = Depends(get_task_repository),
) -> Response:
csv_content = build_tasks_csv(repo.list_tasks())
return Response(
content=csv_content,
media_type="text/csv",
headers={
"Content-Disposition": 'attachment; filename="tasks.csv"',
},
)
@router.get(
"/{task_id}",
response_model=TaskDTO,
responses={
400: {"model": ErrorDTO},
404: {"model": ErrorDTO},
},
)
def get_task(
task_id: str = Path(...),
repo: JsonFileTaskRepository = Depends(get_task_repository),
) -> TaskDTO | JSONResponse:
task, error = get_task_or_error(repo=repo, task_id=task_id)
if error is not None:
return error
return to_task_dto(task)
@router.post(
"",
response_model=TaskDTO,
status_code=status.HTTP_201_CREATED,
responses={
400: {"model": ErrorDTO},
},
)
def create_task(
payload: TaskCreateDTO = Body(...),
repo: JsonFileTaskRepository = Depends(get_task_repository),
) -> TaskDTO | JSONResponse:
now = datetime.now(UTC)
task = StoredTask(
id=uuid4(),
title=payload.title,
created_at=now,
started_at=None,
done_at=None,
)
created_task = repo.create_task(task)
return to_task_dto(created_task)
@router.patch(
"/{task_id}",
response_model=TaskDTO,
responses={
400: {"model": ErrorDTO},
404: {"model": ErrorDTO},
},
)
def update_task(
task_id: str = Path(...),
payload: TaskUpdateDTO = Body(...),
repo: JsonFileTaskRepository = Depends(get_task_repository),
) -> TaskDTO | JSONResponse:
existing_task, error = get_task_or_error(repo=repo, task_id=task_id)
if error is not None:
return error
updated_task = StoredTask(
id=existing_task.id,
title=payload.title if payload.title is not None else existing_task.title,
created_at=existing_task.created_at,
started_at=existing_task.started_at,
done_at=existing_task.done_at,
)
repo.update_task(updated_task)
return to_task_dto(updated_task)
@router.delete(
"/{task_id}",
status_code=status.HTTP_204_NO_CONTENT,
response_class=Response,
responses={
400: {"model": ErrorDTO},
404: {"model": ErrorDTO},
},
)
def delete_task(
task_id: str = Path(...),
repo: JsonFileTaskRepository = Depends(get_task_repository),
) -> Response:
parsed_task_id = parse_task_id(task_id)
if parsed_task_id is None:
return error_response(
status_code=status.HTTP_400_BAD_REQUEST,
error="invalid_id",
message="Task id must be a valid UUID",
)
deleted = repo.delete_task(parsed_task_id)
if not deleted:
return error_response(
status_code=status.HTTP_404_NOT_FOUND,
error="invalid_id",
message="Task was not found",
)
return Response(status_code=status.HTTP_204_NO_CONTENT)
@router.post(
"/{task_id}/start",
response_model=TaskDTO,
responses={
400: {"model": ErrorDTO},
404: {"model": ErrorDTO},
409: {"model": ErrorDTO},
},
)
def start_task_endpoint(
task_id: str = Path(...),
repo: JsonFileTaskRepository = Depends(get_task_repository),
) -> TaskDTO | JSONResponse:
task, error = get_task_or_error(repo=repo, task_id=task_id)
if error is not None:
return error
try:
updated_task = start_task(task)
except InvalidTransitionError as exc:
return error_response(
status_code=status.HTTP_409_CONFLICT,
error="invalid_transaction",
message=str(exc),
)
repo.update_task(updated_task)
return to_task_dto(updated_task)
@router.post(
"/{task_id}/done",
response_model=TaskDTO,
responses={
400: {"model": ErrorDTO},
404: {"model": ErrorDTO},
409: {"model": ErrorDTO},
},
)
def complete_task_endpoint(
task_id: str = Path(...),
repo: JsonFileTaskRepository = Depends(get_task_repository),
) -> TaskDTO | JSONResponse:
task, error = get_task_or_error(repo=repo, task_id=task_id)
if error is not None:
return error
try:
updated_task = complete_task(task)
except InvalidTransitionError as exc:
return error_response(
status_code=status.HTTP_409_CONFLICT,
error="invalid_transaction",
message=str(exc),
)
repo.update_task(updated_task)
return to_task_dto(updated_task)

8
app/domain/__init__.py Normal file
View File

@@ -0,0 +1,8 @@
from app.domain.tasks import InvalidTransitionError, complete_task, start_task
__all__ = [
"InvalidTransitionError",
"start_task",
"complete_task",
]

60
app/domain/tasks.py Normal file
View File

@@ -0,0 +1,60 @@
from __future__ import annotations
from datetime import datetime, UTC
from app.storage.models import StoredTask
class InvalidTransitionError(Exception):
"""Raised when task workflow transition is invalid."""
def start_task(task: StoredTask) -> StoredTask:
"""
Transition task from backlog → in_progress.
Rules:
- Idempotent: if already started, do not change started_at
- Cannot start a task that is already done
"""
# already done → invalid
if task.done_at is not None:
raise InvalidTransitionError("invalid_transaction")
# already started → idempotent (no change)
if task.started_at is not None:
return task
return StoredTask(
id=task.id,
title=task.title,
created_at=task.created_at,
started_at=datetime.now(UTC),
done_at=task.done_at,
)
def complete_task(task: StoredTask) -> StoredTask:
"""
Transition task from in_progress → done.
Rules:
- Allowed only if task is started
- Idempotent: if already done, do not change done_at
"""
# not started → invalid
if task.started_at is None:
raise InvalidTransitionError("invalid_transaction")
# already done → idempotent
if task.done_at is not None:
return task
return StoredTask(
id=task.id,
title=task.title,
created_at=task.created_at,
started_at=task.started_at,
done_at=datetime.now(UTC),
)

22
app/main.py Normal file
View File

@@ -0,0 +1,22 @@
from fastapi import FastAPI
from app.api.health import router as health_router
from app.api.stats import router as stats_router
from app.api.tasks import router as tasks_router
def create_app() -> FastAPI:
app = FastAPI(
title="TaskFlow",
version="1.0.0",
)
app.include_router(health_router)
app.include_router(tasks_router)
app.include_router(stats_router)
return app
app = create_app()

15
app/storage/__init__.py Normal file
View File

@@ -0,0 +1,15 @@
from app.storage.factory import DEFAULT_TASKS_FILE, create_task_repository
from app.storage.models import DATA_FORMAT_VERSION, StoragePayloadV1, StoredTask
from app.storage.repository import JsonFileTaskRepository, StorageError, StorageTaskNotFoundError
__all__ = [
"DATA_FORMAT_VERSION",
"DEFAULT_TASKS_FILE",
"JsonFileTaskRepository",
"StorageError",
"StoragePayloadV1",
"StorageTaskNotFoundError",
"StoredTask",
"create_task_repository",
]

16
app/storage/factory.py Normal file
View File

@@ -0,0 +1,16 @@
from __future__ import annotations
from pathlib import Path
from app.storage.repository import JsonFileTaskRepository
DEFAULT_DATA_DIR = Path("data")
DEFAULT_TASKS_FILE = DEFAULT_DATA_DIR / "tasks.json"
def create_task_repository(
file_path: str | Path = DEFAULT_TASKS_FILE,
) -> JsonFileTaskRepository:
return JsonFileTaskRepository(file_path=file_path)

58
app/storage/models.py Normal file
View File

@@ -0,0 +1,58 @@
from __future__ import annotations
from datetime import datetime
from uuid import UUID
from pydantic import BaseModel, ConfigDict, Field
DATA_FORMAT_VERSION = 1
class StoredTask(BaseModel):
model_config = ConfigDict(extra="forbid", frozen=True)
id: UUID
title: str = Field(..., min_length=1, max_length=100)
created_at: datetime
started_at: datetime | None = None
done_at: datetime | None = None
class StoragePayloadV1(BaseModel):
model_config = ConfigDict(extra="forbid")
version: int = Field(default=DATA_FORMAT_VERSION)
tasks: list[StoredTask] = Field(default_factory=list)
class StoragePayload(BaseModel):
"""
Canonical in-memory storage payload.
This model allows safe parsing of the top-level version field
before delegating to a concrete versioned schema.
"""
model_config = ConfigDict(extra="allow")
version: int
def upgrade_payload(raw_data: dict) -> StoragePayloadV1:
"""
Upgrade any supported payload version to the latest schema.
"""
payload = StoragePayload.model_validate(raw_data)
if payload.version == 1:
parsed_v1 = StoragePayloadV1.model_validate(raw_data)
return StoragePayloadV1(
version=DATA_FORMAT_VERSION,
tasks=parsed_v1.tasks,
)
raise ValueError(
f"Unsupported storage payload version: {payload.version}. "
f"Supported versions: 1..{DATA_FORMAT_VERSION}",
)

167
app/storage/repository.py Normal file
View File

@@ -0,0 +1,167 @@
from __future__ import annotations
import json
import shutil
from pathlib import Path
from threading import RLock
from typing import Iterable
from uuid import UUID
from app.storage.models import DATA_FORMAT_VERSION, StoragePayloadV1, StoredTask, upgrade_payload
class StorageError(Exception):
"""Base storage error."""
class StorageTaskNotFoundError(StorageError):
"""Task was not found in storage."""
class JsonFileTaskRepository:
"""
File-based repository with:
- versioned payload format
- atomic writes via temporary file + replace
- corruption handling with backup + reset
"""
def __init__(self, file_path: str | Path) -> None:
self._file_path = Path(file_path)
self._lock = RLock()
self._ensure_parent_dir()
self._ensure_storage_exists()
@property
def data_format_version(self) -> int:
return DATA_FORMAT_VERSION
def list_tasks(self) -> list[StoredTask]:
with self._lock:
payload = self._load_payload()
return list(payload.tasks)
def get_task(self, task_id: UUID) -> StoredTask | None:
with self._lock:
payload = self._load_payload()
for task in payload.tasks:
if task.id == task_id:
return task
return None
def create_task(self, task: StoredTask) -> StoredTask:
with self._lock:
payload = self._load_payload()
payload.tasks.append(task)
self._save_payload(payload)
return task
def update_task(self, task: StoredTask) -> StoredTask:
with self._lock:
payload = self._load_payload()
updated = False
new_tasks: list[StoredTask] = []
for existing_task in payload.tasks:
if existing_task.id == task.id:
new_tasks.append(task)
updated = True
else:
new_tasks.append(existing_task)
if not updated:
raise StorageTaskNotFoundError(f"Task {task.id} not found")
payload.tasks = new_tasks
self._save_payload(payload)
return task
def delete_task(self, task_id: UUID) -> bool:
with self._lock:
payload = self._load_payload()
initial_count = len(payload.tasks)
payload.tasks = [task for task in payload.tasks if task.id != task_id]
if len(payload.tasks) == initial_count:
return False
self._save_payload(payload)
return True
def replace_all(self, tasks: Iterable[StoredTask]) -> None:
with self._lock:
payload = StoragePayloadV1(
version=DATA_FORMAT_VERSION,
tasks=list(tasks),
)
self._save_payload(payload)
def _ensure_parent_dir(self) -> None:
self._file_path.parent.mkdir(parents=True, exist_ok=True)
def _ensure_storage_exists(self) -> None:
if self._file_path.exists():
return
self._save_payload(
StoragePayloadV1(
version=DATA_FORMAT_VERSION,
tasks=[],
),
)
def _load_payload(self) -> StoragePayloadV1:
if not self._file_path.exists():
payload = StoragePayloadV1(
version=DATA_FORMAT_VERSION,
tasks=[],
)
self._save_payload(payload)
return payload
try:
raw_text = self._file_path.read_text(encoding="utf-8")
raw_data = json.loads(raw_text)
return upgrade_payload(raw_data)
except Exception:
self._backup_corrupted_file()
reset_payload = StoragePayloadV1(
version=DATA_FORMAT_VERSION,
tasks=[],
)
self._save_payload(reset_payload)
return reset_payload
def _save_payload(self, payload: StoragePayloadV1) -> None:
tmp_path = self._file_path.with_name(f"{self._file_path.name}.tmp")
serialized = json.dumps(
payload.model_dump(mode="json"),
ensure_ascii=False,
indent=2,
)
tmp_path.write_text(serialized + "\n", encoding="utf-8")
tmp_path.replace(self._file_path)
def _backup_corrupted_file(self) -> None:
if not self._file_path.exists():
return
backup_path = self._next_backup_path()
shutil.copy2(self._file_path, backup_path)
def _next_backup_path(self) -> Path:
base_name = f"{self._file_path.name}.corrupted"
candidate = self._file_path.with_name(base_name)
if not candidate.exists():
return candidate
index = 1
while True:
candidate = self._file_path.with_name(f"{base_name}.{index}")
if not candidate.exists():
return candidate
index += 1