weeee💃

This commit is contained in:
mohamad 2025-03-30 16:02:49 +02:00
commit 4b7415e1c3
49 changed files with 3890 additions and 0 deletions

30
be/.dockerignore Normal file
View File

@ -0,0 +1,30 @@
# Git files
.git
.gitignore
# Virtual environment
.venv
venv/
env/
ENV/
*.env # Ignore local .env files within the backend directory if any
# Python cache
__pycache__/
*.py[cod]
*$py.class
# IDE files
.idea/
.vscode/
# Test artifacts
.pytest_cache/
htmlcov/
.coverage*
# Other build/temp files
*.egg-info/
dist/
build/
*.db # e.g., sqlite temp dbs

141
be/.gitignore vendored Normal file
View File

@ -0,0 +1,141 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# PEP 582; used by PDM, Flit and potentially other tools.
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv/
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static analysis results
.pytype/
# alembic default temp file
*.db # If using sqlite for alembic versions locally for instance
# If you use alembic autogenerate, it might create temporary files
# Depending on your DB, adjust if necessary
# *.sql.tmp
# IDE files
.idea/
.vscode/
# OS generated files
.DS_Store
Thumbs.db

35
be/Dockerfile Normal file
View File

@ -0,0 +1,35 @@
# be/Dockerfile
# Choose a suitable Python base image
FROM python:3.11-slim
# Set environment variables
ENV PYTHONDONTWRITEBYTECODE 1 # Prevent python from writing pyc files
ENV PYTHONUNBUFFERED 1 # Keep stdout/stderr unbuffered
# Set the working directory in the container
WORKDIR /app
# Install system dependencies if needed (e.g., for psycopg2 build)
# RUN apt-get update && apt-get install -y --no-install-recommends gcc build-essential libpq-dev && rm -rf /var/lib/apt/lists/*
# Install Python dependencies
# Upgrade pip first
RUN pip install --no-cache-dir --upgrade pip
# Copy only requirements first to leverage Docker cache
COPY requirements.txt requirements.txt
# Install dependencies
RUN pip install --no-cache-dir -r requirements.txt
# Copy the rest of the application code into the working directory
COPY . .
# This includes your 'app/' directory, alembic.ini, etc.
# Expose the port the app runs on
EXPOSE 8000
# Command to run the application using uvicorn
# The default command for production (can be overridden in docker-compose for development)
# Note: Make sure 'app.main:app' correctly points to your FastAPI app instance
# relative to the WORKDIR (/app). If your main.py is directly in /app, this is correct.
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]

119
be/alembic.ini Normal file
View File

@ -0,0 +1,119 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
# Use forward slashes (/) also on windows to provide an os agnostic path
script_location = alembic
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
# string value is passed to ZoneInfo()
# leave blank for localtime
# timezone =
# max length of characters to apply to the "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
# version_path_separator = newline
#
# Use os.pathsep. Default configuration used for new projects.
version_path_separator = os
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
; sqlalchemy.url = driver://user:pass@localhost/dbname
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
# hooks = ruff
# ruff.type = exec
# ruff.executable = %(here)s/.venv/bin/ruff
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARNING
handlers = console
qualname =
[logger_sqlalchemy]
level = WARNING
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

1
be/alembic/README Normal file
View File

@ -0,0 +1 @@
Generic single-database configuration.

96
be/alembic/env.py Normal file
View File

@ -0,0 +1,96 @@
from logging.config import fileConfig
import os
import sys
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
# Ensure the 'app' directory is in the Python path
# Adjust the path if your project structure is different
sys.path.insert(0, os.path.realpath(os.path.join(os.path.dirname(__file__), '..')))
# Import your app's Base and settings
from app.models import Base # Import Base from your models module
from app.config import settings # Import settings to get DATABASE_URL
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Set the sqlalchemy.url from your application settings
# Use a synchronous version of the URL for Alembic's operations
sync_db_url = settings.DATABASE_URL.replace("+asyncpg", "") if settings.DATABASE_URL else None
if not sync_db_url:
raise ValueError("DATABASE_URL not found in settings for Alembic.")
config.set_main_option('sqlalchemy.url', sync_db_url)
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

28
be/alembic/script.py.mako Normal file
View File

@ -0,0 +1,28 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
"""Upgrade schema."""
${upgrades if upgrades else "pass"}
def downgrade() -> None:
"""Downgrade schema."""
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,32 @@
"""Initial database setup
Revision ID: 643956b3f4de
Revises:
Create Date: 2025-03-29 20:49:01.018626
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '643956b3f4de'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###

View File

@ -0,0 +1,72 @@
"""Add User, Group, UserGroup models
Revision ID: 85a3c075e73a
Revises: c6cbef99588b
Create Date: 2025-03-30 12:46:07.322285
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '85a3c075e73a'
down_revision: Union[str, None] = 'c6cbef99588b'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('users',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('email', sa.String(), nullable=False),
sa.Column('password_hash', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
op.create_index(op.f('ix_users_id'), 'users', ['id'], unique=False)
op.create_index(op.f('ix_users_name'), 'users', ['name'], unique=False)
op.create_table('groups',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('created_by_id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.ForeignKeyConstraint(['created_by_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_groups_id'), 'groups', ['id'], unique=False)
op.create_index(op.f('ix_groups_name'), 'groups', ['name'], unique=False)
op.create_table('user_groups',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('group_id', sa.Integer(), nullable=False),
sa.Column('role', sa.Enum('owner', 'member', name='userroleenum'), nullable=False),
sa.Column('joined_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.ForeignKeyConstraint(['group_id'], ['groups.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user_id', 'group_id', name='uq_user_group')
)
op.create_index(op.f('ix_user_groups_id'), 'user_groups', ['id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_user_groups_id'), table_name='user_groups')
op.drop_table('user_groups')
op.drop_index(op.f('ix_groups_name'), table_name='groups')
op.drop_index(op.f('ix_groups_id'), table_name='groups')
op.drop_table('groups')
op.drop_index(op.f('ix_users_name'), table_name='users')
op.drop_index(op.f('ix_users_id'), table_name='users')
op.drop_index(op.f('ix_users_email'), table_name='users')
op.drop_table('users')
# ### end Alembic commands ###

View File

@ -0,0 +1,32 @@
"""Initial database setup
Revision ID: c6cbef99588b
Revises: 643956b3f4de
Create Date: 2025-03-30 12:18:51.207858
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'c6cbef99588b'
down_revision: Union[str, None] = '643956b3f4de'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###

0
be/app/__init__.py Normal file
View File

0
be/app/api/__init__.py Normal file
View File

12
be/app/api/api_router.py Normal file
View File

@ -0,0 +1,12 @@
# app/api/api_router.py
from fastapi import APIRouter
from app.api.v1.api import api_router_v1 # Import the v1 router
api_router = APIRouter()
# Include versioned routers here, adding the /api prefix
api_router.include_router(api_router_v1, prefix="/v1") # Mounts v1 endpoints under /api/v1/...
# Add other API versions later
# e.g., api_router.include_router(api_router_v2, prefix="/v2")

View File

12
be/app/api/v1/api.py Normal file
View File

@ -0,0 +1,12 @@
# app/api/v1/api.py
from fastapi import APIRouter
from app.api.v1.endpoints import health # Import the health endpoint router
api_router_v1 = APIRouter()
# Include endpoint routers here, adding the desired prefix for v1
api_router_v1.include_router(health.router) # The path "/health" is defined inside health.router
# Add other v1 endpoint routers here later
# e.g., api_router_v1.include_router(users.router, prefix="/users", tags=["Users"])

View File

View File

@ -0,0 +1,45 @@
# app/api/v1/endpoints/health.py
import logging
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.sql import text
from app.database import get_db # Import the dependency function
from app.schemas.health import HealthStatus # Import the response schema
logger = logging.getLogger(__name__)
router = APIRouter()
@router.get(
"/health",
response_model=HealthStatus,
summary="Perform a Health Check",
description="Checks the operational status of the API and its connection to the database.",
tags=["Health"] # Group this endpoint in Swagger UI
)
async def check_health(db: AsyncSession = Depends(get_db)):
"""
Health check endpoint. Verifies API reachability and database connection.
"""
try:
# Try executing a simple query to check DB connection
result = await db.execute(text("SELECT 1"))
if result.scalar_one() == 1:
logger.info("Health check successful: Database connection verified.")
return HealthStatus(status="ok", database="connected")
else:
# This case should ideally not happen with 'SELECT 1'
logger.error("Health check failed: Database connection check returned unexpected result.")
# Raise 503 Service Unavailable
raise HTTPException(
status_code=503,
detail="Database connection error: Unexpected result"
)
except Exception as e:
logger.error(f"Health check failed: Database connection error - {e}", exc_info=True) # Log stack trace
# Raise 503 Service Unavailable
raise HTTPException(
status_code=503,
detail=f"Database connection error: {e}"
)

24
be/app/config.py Normal file
View File

@ -0,0 +1,24 @@
# app/config.py
import os
from pydantic_settings import BaseSettings
from dotenv import load_dotenv
load_dotenv()
class Settings(BaseSettings):
DATABASE_URL: str | None = None
class Config:
env_file = ".env"
env_file_encoding = 'utf-8'
extra = "ignore"
settings = Settings()
# Basic validation to ensure DATABASE_URL is set
if settings.DATABASE_URL is None:
print("Error: DATABASE_URL environment variable not set.")
# Consider raising an exception for clearer failure
# raise ValueError("DATABASE_URL environment variable not set.")
# else: # Optional: Log the URL being used (without credentials ideally) for debugging
# print(f"DATABASE_URL loaded: {settings.DATABASE_URL[:settings.DATABASE_URL.find('@')] if '@' in settings.DATABASE_URL else 'URL structure unexpected'}")

47
be/app/database.py Normal file
View File

@ -0,0 +1,47 @@
# app/database.py
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
from sqlalchemy.orm import sessionmaker, declarative_base
from app.config import settings
# Ensure DATABASE_URL is set before proceeding
if not settings.DATABASE_URL:
raise ValueError("DATABASE_URL is not configured in settings.")
# Create the SQLAlchemy async engine
# pool_recycle=3600 helps prevent stale connections on some DBs
engine = create_async_engine(
settings.DATABASE_URL,
echo=True, # Log SQL queries (useful for debugging)
future=True, # Use SQLAlchemy 2.0 style features
pool_recycle=3600 # Optional: recycle connections after 1 hour
)
# Create a configured "Session" class
# expire_on_commit=False prevents attributes from expiring after commit
AsyncSessionLocal = sessionmaker(
bind=engine,
class_=AsyncSession,
expire_on_commit=False,
autoflush=False,
autocommit=False,
)
# Base class for our ORM models
Base = declarative_base()
# Dependency to get DB session in path operations
async def get_db() -> AsyncSession: # type: ignore
"""
Dependency function that yields an AsyncSession.
Ensures the session is closed after the request.
"""
async with AsyncSessionLocal() as session:
try:
yield session
# Optionally commit if your endpoints modify data directly
# await session.commit() # Usually commit happens within endpoint logic
except Exception:
await session.rollback()
raise
finally:
await session.close() # Not strictly necessary with async context manager, but explicit

90
be/app/main.py Normal file
View File

@ -0,0 +1,90 @@
# app/main.py
import logging
import uvicorn
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from app.api.api_router import api_router # Import the main combined router
# Import database and models if needed for startup/shutdown events later
# from . import database, models
# --- Logging Setup ---
# Configure logging (can be more sophisticated later, e.g., using logging.yaml)
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
# --- FastAPI App Instance ---
app = FastAPI(
title="Shared Lists API",
description="API for managing shared shopping lists, OCR, and cost splitting.",
version="0.1.0",
openapi_url="/api/openapi.json", # Place OpenAPI spec under /api
docs_url="/api/docs", # Place Swagger UI under /api
redoc_url="/api/redoc" # Place ReDoc under /api
)
# --- CORS Middleware ---
# Define allowed origins. Be specific in production!
# Use ["*"] for wide open access during early development if needed,
# but restrict it as soon as possible.
# SvelteKit default dev port is 5173
origins = [
"http://localhost:5173",
"http://localhost:8000", # Allow requests from the API itself (e.g., Swagger UI)
# Add your deployed frontend URL here later
# "https://your-frontend-domain.com",
]
app.add_middleware(
CORSMiddleware,
allow_origins=origins, # List of origins that are allowed to make requests
allow_credentials=True, # Allow cookies to be included in requests
allow_methods=["*"], # Allow all methods (GET, POST, PUT, DELETE, etc.)
allow_headers=["*"], # Allow all headers
)
# --- End CORS Middleware ---
# --- Include API Routers ---
# All API endpoints will be prefixed with /api
app.include_router(api_router, prefix="/api")
# --- End Include API Routers ---
# --- Root Endpoint (Optional - outside the main API structure) ---
@app.get("/", tags=["Root"])
async def read_root():
"""
Provides a simple welcome message at the root path.
Useful for basic reachability checks.
"""
logger.info("Root endpoint '/' accessed.")
# You could redirect to the docs or return a simple message
# from fastapi.responses import RedirectResponse
# return RedirectResponse(url="/api/docs")
return {"message": "Welcome to the Shared Lists API! Docs available at /api/docs"}
# --- End Root Endpoint ---
# --- Application Startup/Shutdown Events (Optional) ---
# @app.on_event("startup")
# async def startup_event():
# logger.info("Application startup: Connecting to database...")
# # You might perform initial checks or warm-up here
# # await database.engine.connect() # Example check (get_db handles sessions per request)
# logger.info("Application startup complete.")
# @app.on_event("shutdown")
# async def shutdown_event():
# logger.info("Application shutdown: Disconnecting from database...")
# # await database.engine.dispose() # Close connection pool
# logger.info("Application shutdown complete.")
# --- End Events ---
# --- Direct Run (for simple local testing if needed) ---
# It's better to use `uvicorn app.main:app --reload` from the terminal
# if __name__ == "__main__":
# logger.info("Starting Uvicorn server directly from main.py")
# uvicorn.run(app, host="0.0.0.0", port=8000)
# ------------------------------------------------------

108
be/app/models.py Normal file
View File

@ -0,0 +1,108 @@
# app/models.py
import enum
from datetime import datetime
from sqlalchemy import (
Column,
Integer,
String,
DateTime,
ForeignKey,
Boolean,
Enum as SAEnum, # Renamed to avoid clash with Python's enum
UniqueConstraint,
event,
DDL
)
from sqlalchemy.orm import relationship
from sqlalchemy.sql import func # For server_default=func.now()
from app.database import Base # Import Base from database setup
# Define Enum for User Roles in Groups
class UserRoleEnum(enum.Enum):
owner = "owner"
member = "member"
# --- User Model ---
class User(Base):
__tablename__ = "users"
id = Column(Integer, primary_key=True, index=True)
email = Column(String, unique=True, index=True, nullable=False)
password_hash = Column(String, nullable=False)
name = Column(String, index=True, nullable=True) # Allow nullable name initially
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
# Relationships
# Groups created by this user
created_groups = relationship("Group", back_populates="creator")
# Association object for group membership
group_associations = relationship("UserGroup", back_populates="user", cascade="all, delete-orphan")
# Items added by this user (Add later when Item model is defined)
# added_items = relationship("Item", foreign_keys="[Item.added_by_id]", back_populates="added_by_user")
# Items completed by this user (Add later)
# completed_items = relationship("Item", foreign_keys="[Item.completed_by_id]", back_populates="completed_by_user")
# Expense shares for this user (Add later)
# expense_shares = relationship("ExpenseShare", back_populates="user")
# Lists created by this user (Add later)
# created_lists = relationship("List", foreign_keys="[List.created_by_id]", back_populates="creator")
# --- Group Model ---
class Group(Base):
__tablename__ = "groups"
id = Column(Integer, primary_key=True, index=True)
name = Column(String, index=True, nullable=False)
created_by_id = Column(Integer, ForeignKey("users.id"), nullable=False)
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
# Relationships
# The user who created this group
creator = relationship("User", back_populates="created_groups")
# Association object for group membership
member_associations = relationship("UserGroup", back_populates="group", cascade="all, delete-orphan")
# Lists belonging to this group (Add later)
# lists = relationship("List", back_populates="group")
# --- UserGroup Association Model ---
class UserGroup(Base):
__tablename__ = "user_groups"
__table_args__ = (UniqueConstraint('user_id', 'group_id', name='uq_user_group'),) # Ensure user cannot be in same group twice
id = Column(Integer, primary_key=True, index=True) # Surrogate primary key
user_id = Column(Integer, ForeignKey("users.id", ondelete="CASCADE"), nullable=False)
group_id = Column(Integer, ForeignKey("groups.id", ondelete="CASCADE"), nullable=False)
role = Column(SAEnum(UserRoleEnum), nullable=False, default=UserRoleEnum.member)
joined_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
# Relationships back to User and Group
user = relationship("User", back_populates="group_associations")
group = relationship("Group", back_populates="member_associations")
# --- Add other models below when needed ---
# class List(Base): ...
# class Item(Base): ...
# class Expense(Base): ...
# class ExpenseShare(Base): ...
# Optional: Trigger for automatically creating an 'owner' UserGroup entry when a Group is created.
# This requires importing event and DDL. It's advanced and DB-specific, might be simpler to handle in application logic.
# Example for PostgreSQL (might need adjustment):
# group_owner_trigger = DDL("""
# CREATE OR REPLACE FUNCTION add_group_owner()
# RETURNS TRIGGER AS $$
# BEGIN
# INSERT INTO user_groups (user_id, group_id, role, joined_at)
# VALUES (NEW.created_by_id, NEW.id, 'owner', NOW());
# RETURN NEW;
# END;
# $$ LANGUAGE plpgsql;
#
# CREATE TRIGGER trg_add_group_owner
# AFTER INSERT ON groups
# FOR EACH ROW EXECUTE FUNCTION add_group_owner();
# """)
# event.listen(Group.__table__, 'after_create', group_owner_trigger)

View File

9
be/app/schemas/health.py Normal file
View File

@ -0,0 +1,9 @@
# app/schemas/health.py
from pydantic import BaseModel
class HealthStatus(BaseModel):
"""
Response model for the health check endpoint.
"""
status: str = "ok" # Provide a default value
database: str

8
be/requirements.txt Normal file
View File

@ -0,0 +1,8 @@
fastapi>=0.95.0
uvicorn[standard]>=0.20.0
sqlalchemy[asyncio]>=2.0.0 # Core ORM + Async support
asyncpg>=0.27.0 # Async PostgreSQL driver
psycopg2-binary>=2.9.0 # Often needed by Alembic even if app uses asyncpg
alembic>=1.9.0 # Database migrations
pydantic-settings>=2.0.0 # For loading settings from .env
python-dotenv>=1.0.0 # To load .env file for scripts/alembic

65
docker-compose.yml Normal file
View File

@ -0,0 +1,65 @@
# docker-compose.yml (in project root)
version: '3.8'
services:
db:
image: postgres:15 # Use a specific PostgreSQL version
container_name: postgres_db
environment:
POSTGRES_USER: dev_user # Define DB user
POSTGRES_PASSWORD: dev_password # Define DB password
POSTGRES_DB: dev_db # Define Database name
volumes:
- postgres_data:/var/lib/postgresql/data # Persist data using a named volume
ports:
- "5432:5432" # Expose PostgreSQL port to host (optional, for direct access)
healthcheck:
test: ["CMD-SHELL", "pg_isready -U $${POSTGRES_USER} -d $${POSTGRES_DB}"]
interval: 10s
timeout: 5s
retries: 5
start_period: 10s
restart: unless-stopped
backend:
container_name: fastapi_backend
build:
context: ./be # Path to the directory containing the Dockerfile
dockerfile: Dockerfile
volumes:
# Mount local code into the container for development hot-reloading
# The code inside the container at /app will mirror your local ./be directory
- ./be:/app
ports:
- "8000:8000" # Map container port 8000 to host port 8000
environment:
# Pass the database URL to the backend container
# Uses the service name 'db' as the host, and credentials defined above
# IMPORTANT: Use the correct async driver prefix if your app needs it!
- DATABASE_URL=postgresql+asyncpg://dev_user:dev_password@db:5432/dev_db
# Add other environment variables needed by the backend here
# - SOME_OTHER_VAR=some_value
depends_on:
db: # Wait for the db service to be healthy before starting backend
condition: service_healthy
command: ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"] # Override CMD for development reload
restart: unless-stopped
pgadmin: # Optional service for database administration
image: dpage/pgadmin4:latest
container_name: pgadmin4_server
environment:
PGADMIN_DEFAULT_EMAIL: admin@example.com # Change as needed
PGADMIN_DEFAULT_PASSWORD: admin_password # Change to a secure password
PGADMIN_CONFIG_SERVER_MODE: 'False' # Run in Desktop mode for easier local dev server setup
volumes:
- pgadmin_data:/var/lib/pgadmin # Persist pgAdmin configuration
ports:
- "5050:80" # Map container port 80 to host port 5050
depends_on:
- db # Depends on the database service
restart: unless-stopped
volumes: # Define named volumes for data persistence
postgres_data:
pgadmin_data:

23
fe/.gitignore vendored Normal file
View File

@ -0,0 +1,23 @@
node_modules
# Output
.output
.vercel
.netlify
.wrangler
/.svelte-kit
/build
# OS
.DS_Store
Thumbs.db
# Env
.env
.env.*
!.env.example
!.env.test
# Vite
vite.config.js.timestamp-*
vite.config.ts.timestamp-*

1
fe/.npmrc Normal file
View File

@ -0,0 +1 @@
engine-strict=true

6
fe/.prettierignore Normal file
View File

@ -0,0 +1,6 @@
# Package Managers
package-lock.json
pnpm-lock.yaml
yarn.lock
bun.lock
bun.lockb

15
fe/.prettierrc Normal file
View File

@ -0,0 +1,15 @@
{
"useTabs": true,
"singleQuote": true,
"trailingComma": "none",
"printWidth": 100,
"plugins": ["prettier-plugin-svelte", "prettier-plugin-tailwindcss"],
"overrides": [
{
"files": "*.svelte",
"options": {
"parser": "svelte"
}
}
]
}

38
fe/README.md Normal file
View File

@ -0,0 +1,38 @@
# sv
Everything you need to build a Svelte project, powered by [`sv`](https://github.com/sveltejs/cli).
## Creating a project
If you're seeing this, you've probably already done this step. Congrats!
```bash
# create a new project in the current directory
npx sv create
# create a new project in my-app
npx sv create my-app
```
## Developing
Once you've created a project and installed dependencies with `npm install` (or `pnpm install` or `yarn`), start a development server:
```bash
npm run dev
# or start the server and open the app in a new browser tab
npm run dev -- --open
```
## Building
To create a production version of your app:
```bash
npm run build
```
You can preview the production build with `npm run preview`.
> To deploy your app, you may need to install an [adapter](https://svelte.dev/docs/kit/adapters) for your target environment.

2401
fe/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

31
fe/package.json Normal file
View File

@ -0,0 +1,31 @@
{
"name": "fe",
"private": true,
"version": "0.0.1",
"type": "module",
"scripts": {
"dev": "vite dev",
"build": "vite build",
"preview": "vite preview",
"prepare": "svelte-kit sync || echo ''",
"check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json",
"check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch",
"format": "prettier --write .",
"lint": "prettier --check . && eslint ."
},
"devDependencies": {
"@sveltejs/adapter-node": "^5.2.11",
"@sveltejs/kit": "^2.16.0",
"@sveltejs/vite-plugin-svelte": "^5.0.0",
"@tailwindcss/forms": "^0.5.9",
"@tailwindcss/vite": "^4.0.0",
"prettier": "^3.4.2",
"prettier-plugin-svelte": "^3.3.3",
"prettier-plugin-tailwindcss": "^0.6.11",
"svelte": "^5.0.0",
"svelte-check": "^4.0.0",
"tailwindcss": "^4.0.0",
"typescript": "^5.0.0",
"vite": "^6.0.0"
}
}

2
fe/src/app.css Normal file
View File

@ -0,0 +1,2 @@
@import 'tailwindcss';
@plugin '@tailwindcss/forms';

13
fe/src/app.d.ts vendored Normal file
View File

@ -0,0 +1,13 @@
// See https://svelte.dev/docs/kit/types#app.d.ts
// for information about these interfaces
declare global {
namespace App {
// interface Error {}
// interface Locals {}
// interface PageData {}
// interface PageState {}
// interface Platform {}
}
}
export {};

17
fe/src/app.html Normal file
View File

@ -0,0 +1,17 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8" />
<link rel="icon" href="%sveltekit.assets%/favicon.png" />
<link rel="manifest" href="/manifest.json" />
<meta name="theme-color" content="#4a90e2">
<meta name="viewport" content="width=device-width, initial-scale=1" />
%sveltekit.head%
</head>
<body data-sveltekit-preload-data="hover">
<div style="display: contents">%sveltekit.body%</div>
</body>
</html>

163
fe/src/lib/apiClient.ts Normal file
View File

@ -0,0 +1,163 @@
// src/lib/apiClient.ts
import { error } from '@sveltejs/kit'; // SvelteKit's error helper
// --- Configuration ---
// Get the base URL from environment variables provided by Vite/SvelteKit
// Ensure VITE_API_BASE_URL is set in your .env file (e.g., VITE_API_BASE_URL=http://localhost:8000/api)
const BASE_URL = import.meta.env.VITE_API_BASE_URL;
if (!BASE_URL) {
console.error('VITE_API_BASE_URL is not defined. Please set it in your .env file.');
// In a real app, you might throw an error here or have a default,
// but logging is often sufficient during development.
}
export class ApiClientError extends Error {
status: number;
errorData: unknown;
constructor(message: string, status: number, errorData: unknown = null) {
super(message);
this.name = 'ApiClientError';
this.status = status;
this.errorData = errorData;
// --- Corrected Conditional Check ---
// Check if the static method exists on the Error constructor object
if (typeof (Error as any).captureStackTrace === 'function') {
// Call it if it exists, casting Error to 'any' to bypass static type check
(Error as any).captureStackTrace(this, ApiClientError);
}
// else {
// Optional: Fallback if captureStackTrace is not available
// You might assign the stack from a new error instance,
// though `super(message)` often handles basic stack creation.
// this.stack = new Error(message).stack;
// }
// --- End Corrected Check ---
}
}
// --- Core Fetch Function ---
interface RequestOptions extends Omit<RequestInit, 'body'> {
// Can add custom options here if needed later
}
async function request<T = unknown>( // Generic type T for expected response data
method: string,
path: string, // Relative path (e.g., /v1/health)
data?: unknown, // Optional request body data
options: RequestOptions = {} // Optional fetch options (headers, etc.)
): Promise<T> {
if (!BASE_URL) {
// Or use SvelteKit's error helper for server-side/universal loads
// error(500, 'API Base URL is not configured.');
throw new Error('API Base URL (VITE_API_BASE_URL) is not configured.');
}
// Construct the full URL, handling potential leading/trailing slashes
const cleanBase = BASE_URL.replace(/\/$/, ''); // Remove trailing slash from base
const cleanPath = path.replace(/^\//, ''); // Remove leading slash from path
const url = `${cleanBase}/${cleanPath}`;
// Default headers
const headers = new Headers({
Accept: 'application/json',
...options.headers // Spread custom headers from options
});
// Fetch options
const fetchOptions: RequestInit = {
method: method.toUpperCase(),
headers,
...options // Spread other custom options (credentials, mode, cache, etc.)
};
// Add body and Content-Type header if data is provided
if (data !== undefined && data !== null) {
headers.set('Content-Type', 'application/json');
fetchOptions.body = JSON.stringify(data);
}
// Add credentials option if needed for cookies/auth later
// fetchOptions.credentials = 'include';
try {
const response = await fetch(url, fetchOptions);
// Check if the response is successful (status code 200-299)
if (!response.ok) {
let errorJson: unknown = null;
try {
// Try to parse error details from the response body
errorJson = await response.json();
} catch (e) {
// Ignore if response body isn't valid JSON
console.warn('API Error response was not valid JSON.', response.status, response.statusText)
}
// Throw a custom error with status and potentially parsed error data
throw new ApiClientError(
`API request failed: ${response.status} ${response.statusText}`,
response.status,
errorJson
);
}
// Handle successful responses with no content (e.g., 204 No Content)
if (response.status === 204) {
// Type assertion needed because Promise<T> expects a value,
// but 204 has no body. We return null. Adjust T if needed.
return null as T;
}
// Parse successful JSON response
const responseData = await response.json();
return responseData as T; // Assert the type based on the generic T
} catch (err) {
// Handle network errors or errors thrown above
console.error(`API Client request error: ${method} ${path}`, err);
// Re-throw the error so calling code can handle it
// If it's already our custom error, re-throw it directly
if (err instanceof ApiClientError) {
throw err;
}
// Otherwise, wrap network or other errors
throw new ApiClientError(
`Network or unexpected error during API request: ${err instanceof Error ? err.message : String(err)}`,
0, // Use 0 or a specific code for network errors
err
);
}
}
// --- Helper Methods ---
export const apiClient = {
get: <T = unknown>(path: string, options: RequestOptions = {}): Promise<T> => {
return request<T>('GET', path, undefined, options);
},
post: <T = unknown>(path: string, data: unknown, options: RequestOptions = {}): Promise<T> => {
return request<T>('POST', path, data, options);
},
put: <T = unknown>(path: string, data: unknown, options: RequestOptions = {}): Promise<T> => {
return request<T>('PUT', path, data, options);
},
delete: <T = unknown>(path: string, options: RequestOptions = {}): Promise<T> => {
// Note: DELETE requests might have a body, but often don't. Adjust if needed.
return request<T>('DELETE', path, undefined, options);
},
patch: <T = unknown>(path: string, data: unknown, options: RequestOptions = {}): Promise<T> => {
return request<T>('PATCH', path, data, options);
}
// Can add other methods (HEAD, OPTIONS) if necessary
};
// Default export can sometimes be convenient, but named export is clear
// export default apiClient;

1
fe/src/lib/index.ts Normal file
View File

@ -0,0 +1 @@
// place files you want to import through the `$lib` alias in this folder.

View File

@ -0,0 +1,4 @@
export interface HealthStatus {
status: string;
database: string;
}

View File

@ -0,0 +1,41 @@
<script lang="ts">
// Import global styles if you have them, e.g., app.css
// We'll rely on Tailwind configured via app.postcss for now.
import '../app.css'; // Import the main PostCSS file where Tailwind directives are
console.log('Root layout loaded'); // For debugging in browser console
</script>
<div class="flex min-h-screen flex-col bg-gray-50">
<!-- Header Placeholder -->
<header class="bg-gradient-to-r from-blue-600 to-indigo-700 p-4 text-white shadow-md">
<div class="container mx-auto flex items-center justify-between">
<h1 class="text-xl font-bold">Shared Lists App</h1>
<!-- Navigation Placeholder -->
<nav class="space-x-4">
<a href="/" class="hover:text-blue-200 hover:underline">Home</a>
<a href="/login" class="hover:text-blue-200 hover:underline">Login</a>
<!-- Add other basic links later -->
</nav>
</div>
</header>
<!-- Main Content Area -->
<main class="container mx-auto flex-grow p-4 md:p-8">
<!-- The <slot /> component renders the content of the current page (+page.svelte) -->
<slot />
</main>
<!-- Footer Placeholder -->
<footer class="mt-auto bg-gray-200 p-4 text-center text-sm text-gray-600">
<p>© {new Date().getFullYear()} Shared Lists App. All rights reserved.</p>
</footer>
</div>
<style lang="postcss">
/* You can add global non-utility styles here if needed, */
/* but Tailwind is generally preferred for component styling. */
/* Example: */
/* :global(body) { */
/* font-family: 'Inter', sans-serif; */
/* } */
</style>

View File

64
fe/src/service-worker.ts Normal file
View File

@ -0,0 +1,64 @@
/// <reference types="@sveltejs/kit" />
// REMOVED: /// <reference types="@types/workbox-sw" />
/// <reference lib="webworker" />
// Import SvelteKit-provided variables ONLY
import { build, files, version } from '$service-worker';
declare let self: ServiceWorkerGlobalScope;
// Declare 'workbox' as any for now IF TypeScript still complains after removing @types/workbox-sw.
// Often, SvelteKit's types are enough, but this is a fallback.
declare const workbox: any; // Uncomment this line ONLY if 'Cannot find name workbox' persists
console.log(`[Service Worker] Version: ${version}`);
// --- Precaching ---
// Use the global workbox object (assuming SvelteKit injects it)
workbox.precaching.precacheAndRoute(build);
workbox.precaching.precacheAndRoute(files.map(f => ({ url: f, revision: null })));
// --- Runtime Caching ---
// Google Fonts
workbox.routing.registerRoute(
({ url }) => url.origin === 'https://fonts.googleapis.com' || url.origin === 'https://fonts.gstatic.com',
new workbox.strategies.StaleWhileRevalidate({
cacheName: 'google-fonts',
plugins: [
new workbox.cacheableResponse.CacheableResponse({ statuses: [0, 200] }),
new workbox.expiration.ExpirationPlugin({ maxEntries: 20, maxAgeSeconds: 30 * 24 * 60 * 60 }),
],
}),
);
// Images from origin
workbox.routing.registerRoute(
({ request, url }) => !!request && request.destination === 'image' && url.origin === self.location.origin,
new workbox.strategies.CacheFirst({
cacheName: 'images',
plugins: [
new workbox.cacheableResponse.CacheableResponse({ statuses: [0, 200] }),
new workbox.expiration.ExpirationPlugin({
maxEntries: 50,
maxAgeSeconds: 30 * 24 * 60 * 60,
purgeOnQuotaError: true,
}),
],
}),
);
// --- Lifecycle ---
self.addEventListener('install', (event) => {
console.log('[Service Worker] Install event');
// event.waitUntil(self.skipWaiting());
});
self.addEventListener('activate', (event) => {
const extendableEvent = event as ExtendableEvent;
console.log('[Service Worker] Activate event');
extendableEvent.waitUntil(workbox.precaching.cleanupOutdatedCaches());
// event.waitUntil(self.clients.claim());
});
self.addEventListener('fetch', (event) => {
// console.log(`[Service Worker] Fetching: ${event.request.url}`);
});

BIN
fe/static/favicon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

BIN
fe/static/icon-144x144.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.1 KiB

BIN
fe/static/icon-192x192.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.1 KiB

BIN
fe/static/icon-512x512.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

29
fe/static/manifest.json Normal file
View File

@ -0,0 +1,29 @@
{
"name": "Shared Household Lists",
"short_name": "SharedLists",
"description": "Collaborative shopping lists, OCR, and cost splitting for households.",
"start_url": "/",
"display": "standalone",
"background_color": "#f3f3f3",
"theme_color": "#c0377b",
"icons": [
{
"src": "/icon-192x192.png",
"sizes": "192x192",
"type": "image/png",
"purpose": "any maskable"
},
{
"src": "/icon-512x512.png",
"sizes": "512x512",
"type": "image/png",
"purpose": "any maskable"
},
{
"src": "/icon-144x144.png",
"sizes": "144x144",
"type": "image/png",
"purpose": "any maskable"
}
]
}

9
fe/svelte.config.js Normal file
View File

@ -0,0 +1,9 @@
import adapter from '@sveltejs/adapter-node';
import { vitePreprocess } from '@sveltejs/vite-plugin-svelte';
const config = {
preprocess: vitePreprocess(),
kit: { adapter: adapter() }
};
export default config;

19
fe/tsconfig.json Normal file
View File

@ -0,0 +1,19 @@
{
"extends": "./.svelte-kit/tsconfig.json",
"compilerOptions": {
"allowJs": true,
"checkJs": true,
"esModuleInterop": true,
"forceConsistentCasingInFileNames": true,
"resolveJsonModule": true,
"skipLibCheck": true,
"sourceMap": true,
"strict": true,
"moduleResolution": "bundler"
}
// Path aliases are handled by https://svelte.dev/docs/kit/configuration#alias
// except $lib which is handled by https://svelte.dev/docs/kit/configuration#files
//
// If you want to overwrite includes/excludes, make sure to copy over the relevant includes/excludes
// from the referenced tsconfig.json - TypeScript does not merge them in
}

7
fe/vite.config.ts Normal file
View File

@ -0,0 +1,7 @@
import tailwindcss from '@tailwindcss/vite';
import { sveltekit } from '@sveltejs/kit/vite';
import { defineConfig } from 'vite';
export default defineConfig({
plugins: [tailwindcss(), sveltekit()]
});