feat(taskflow): add core task API, storage persistence, csv export, stats page, and test coverage

This commit is contained in:
Alexander Kalinovsky
2026-04-01 17:56:03 +03:00
commit 19d659df6b
31 changed files with 4197 additions and 0 deletions

15
app/storage/__init__.py Normal file
View File

@@ -0,0 +1,15 @@
from app.storage.factory import DEFAULT_TASKS_FILE, create_task_repository
from app.storage.models import DATA_FORMAT_VERSION, StoragePayloadV1, StoredTask
from app.storage.repository import JsonFileTaskRepository, StorageError, StorageTaskNotFoundError
__all__ = [
"DATA_FORMAT_VERSION",
"DEFAULT_TASKS_FILE",
"JsonFileTaskRepository",
"StorageError",
"StoragePayloadV1",
"StorageTaskNotFoundError",
"StoredTask",
"create_task_repository",
]

16
app/storage/factory.py Normal file
View File

@@ -0,0 +1,16 @@
from __future__ import annotations
from pathlib import Path
from app.storage.repository import JsonFileTaskRepository
DEFAULT_DATA_DIR = Path("data")
DEFAULT_TASKS_FILE = DEFAULT_DATA_DIR / "tasks.json"
def create_task_repository(
file_path: str | Path = DEFAULT_TASKS_FILE,
) -> JsonFileTaskRepository:
return JsonFileTaskRepository(file_path=file_path)

58
app/storage/models.py Normal file
View File

@@ -0,0 +1,58 @@
from __future__ import annotations
from datetime import datetime
from uuid import UUID
from pydantic import BaseModel, ConfigDict, Field
DATA_FORMAT_VERSION = 1
class StoredTask(BaseModel):
model_config = ConfigDict(extra="forbid", frozen=True)
id: UUID
title: str = Field(..., min_length=1, max_length=100)
created_at: datetime
started_at: datetime | None = None
done_at: datetime | None = None
class StoragePayloadV1(BaseModel):
model_config = ConfigDict(extra="forbid")
version: int = Field(default=DATA_FORMAT_VERSION)
tasks: list[StoredTask] = Field(default_factory=list)
class StoragePayload(BaseModel):
"""
Canonical in-memory storage payload.
This model allows safe parsing of the top-level version field
before delegating to a concrete versioned schema.
"""
model_config = ConfigDict(extra="allow")
version: int
def upgrade_payload(raw_data: dict) -> StoragePayloadV1:
"""
Upgrade any supported payload version to the latest schema.
"""
payload = StoragePayload.model_validate(raw_data)
if payload.version == 1:
parsed_v1 = StoragePayloadV1.model_validate(raw_data)
return StoragePayloadV1(
version=DATA_FORMAT_VERSION,
tasks=parsed_v1.tasks,
)
raise ValueError(
f"Unsupported storage payload version: {payload.version}. "
f"Supported versions: 1..{DATA_FORMAT_VERSION}",
)

167
app/storage/repository.py Normal file
View File

@@ -0,0 +1,167 @@
from __future__ import annotations
import json
import shutil
from pathlib import Path
from threading import RLock
from typing import Iterable
from uuid import UUID
from app.storage.models import DATA_FORMAT_VERSION, StoragePayloadV1, StoredTask, upgrade_payload
class StorageError(Exception):
"""Base storage error."""
class StorageTaskNotFoundError(StorageError):
"""Task was not found in storage."""
class JsonFileTaskRepository:
"""
File-based repository with:
- versioned payload format
- atomic writes via temporary file + replace
- corruption handling with backup + reset
"""
def __init__(self, file_path: str | Path) -> None:
self._file_path = Path(file_path)
self._lock = RLock()
self._ensure_parent_dir()
self._ensure_storage_exists()
@property
def data_format_version(self) -> int:
return DATA_FORMAT_VERSION
def list_tasks(self) -> list[StoredTask]:
with self._lock:
payload = self._load_payload()
return list(payload.tasks)
def get_task(self, task_id: UUID) -> StoredTask | None:
with self._lock:
payload = self._load_payload()
for task in payload.tasks:
if task.id == task_id:
return task
return None
def create_task(self, task: StoredTask) -> StoredTask:
with self._lock:
payload = self._load_payload()
payload.tasks.append(task)
self._save_payload(payload)
return task
def update_task(self, task: StoredTask) -> StoredTask:
with self._lock:
payload = self._load_payload()
updated = False
new_tasks: list[StoredTask] = []
for existing_task in payload.tasks:
if existing_task.id == task.id:
new_tasks.append(task)
updated = True
else:
new_tasks.append(existing_task)
if not updated:
raise StorageTaskNotFoundError(f"Task {task.id} not found")
payload.tasks = new_tasks
self._save_payload(payload)
return task
def delete_task(self, task_id: UUID) -> bool:
with self._lock:
payload = self._load_payload()
initial_count = len(payload.tasks)
payload.tasks = [task for task in payload.tasks if task.id != task_id]
if len(payload.tasks) == initial_count:
return False
self._save_payload(payload)
return True
def replace_all(self, tasks: Iterable[StoredTask]) -> None:
with self._lock:
payload = StoragePayloadV1(
version=DATA_FORMAT_VERSION,
tasks=list(tasks),
)
self._save_payload(payload)
def _ensure_parent_dir(self) -> None:
self._file_path.parent.mkdir(parents=True, exist_ok=True)
def _ensure_storage_exists(self) -> None:
if self._file_path.exists():
return
self._save_payload(
StoragePayloadV1(
version=DATA_FORMAT_VERSION,
tasks=[],
),
)
def _load_payload(self) -> StoragePayloadV1:
if not self._file_path.exists():
payload = StoragePayloadV1(
version=DATA_FORMAT_VERSION,
tasks=[],
)
self._save_payload(payload)
return payload
try:
raw_text = self._file_path.read_text(encoding="utf-8")
raw_data = json.loads(raw_text)
return upgrade_payload(raw_data)
except Exception:
self._backup_corrupted_file()
reset_payload = StoragePayloadV1(
version=DATA_FORMAT_VERSION,
tasks=[],
)
self._save_payload(reset_payload)
return reset_payload
def _save_payload(self, payload: StoragePayloadV1) -> None:
tmp_path = self._file_path.with_name(f"{self._file_path.name}.tmp")
serialized = json.dumps(
payload.model_dump(mode="json"),
ensure_ascii=False,
indent=2,
)
tmp_path.write_text(serialized + "\n", encoding="utf-8")
tmp_path.replace(self._file_path)
def _backup_corrupted_file(self) -> None:
if not self._file_path.exists():
return
backup_path = self._next_backup_path()
shutil.copy2(self._file_path, backup_path)
def _next_backup_path(self) -> Path:
base_name = f"{self._file_path.name}.corrupted"
candidate = self._file_path.with_name(base_name)
if not candidate.exists():
return candidate
index = 1
while True:
candidate = self._file_path.with_name(f"{base_name}.{index}")
if not candidate.exists():
return candidate
index += 1