ph4 #38

Merged
mo merged 5 commits from ph4 into prod 2025-06-01 19:19:21 +02:00
6 changed files with 19 additions and 88 deletions

View File

@ -49,6 +49,8 @@ COPY --chown=appuser:appuser alembic/ ./alembic/
COPY --chown=appuser:appuser alembic.ini ./ COPY --chown=appuser:appuser alembic.ini ./
COPY --chown=appuser:appuser *.py ./ COPY --chown=appuser:appuser *.py ./
COPY --chown=appuser:appuser requirements.txt ./ COPY --chown=appuser:appuser requirements.txt ./
COPY --chown=appuser:appuser entrypoint.sh /app/entrypoint.sh
RUN chmod +x /app/entrypoint.sh
# Create logs directory # Create logs directory
RUN mkdir -p /app/logs && chown -R appuser:appuser /app RUN mkdir -p /app/logs && chown -R appuser:appuser /app
@ -64,6 +66,7 @@ HEALTHCHECK --interval=30s --timeout=10s --start-period=30s --retries=3 \
EXPOSE 8000 EXPOSE 8000
# Production command # Production command
ENTRYPOINT ["/app/entrypoint.sh"]
CMD ["uvicorn", "app.main:app", \ CMD ["uvicorn", "app.main:app", \
"--host", "0.0.0.0", \ "--host", "0.0.0.0", \
"--port", "8000", \ "--port", "8000", \

View File

@ -25,7 +25,7 @@ recurrence_type_enum = postgresql.ENUM('DAILY', 'WEEKLY', 'MONTHLY', 'YEARLY', n
chore_frequency_enum = postgresql.ENUM('one_time', 'daily', 'weekly', 'monthly', 'custom', name='chorefrequencyenum', create_type=False) chore_frequency_enum = postgresql.ENUM('one_time', 'daily', 'weekly', 'monthly', 'custom', name='chorefrequencyenum', create_type=False)
chore_type_enum = postgresql.ENUM('personal', 'group', name='choretypeenum', create_type=False) chore_type_enum = postgresql.ENUM('personal', 'group', name='choretypeenum', create_type=False)
def upgrade() -> None: def upgrade(context=None) -> None: # Add context=None for compatibility, real arg passed by Alembic
# Create enums # Create enums
user_role_enum.create(op.get_bind(), checkfirst=True) user_role_enum.create(op.get_bind(), checkfirst=True)
split_type_enum.create(op.get_bind(), checkfirst=True) split_type_enum.create(op.get_bind(), checkfirst=True)
@ -280,7 +280,7 @@ def upgrade() -> None:
op.create_index(op.f('ix_settlement_activities_paid_by_user_id'), 'settlement_activities', ['paid_by_user_id'], unique=False) op.create_index(op.f('ix_settlement_activities_paid_by_user_id'), 'settlement_activities', ['paid_by_user_id'], unique=False)
def downgrade() -> None: def downgrade(context=None) -> None: # Add context=None for compatibility, real arg passed by Alembic
op.drop_table('settlement_activities') op.drop_table('settlement_activities')
op.drop_table('settlements') op.drop_table('settlements')
op.drop_table('expense_splits') op.drop_table('expense_splits')

View File

@ -242,7 +242,7 @@ async def startup_event():
logger.info(f"Application startup in {settings.ENVIRONMENT} environment...") logger.info(f"Application startup in {settings.ENVIRONMENT} environment...")
# Run database migrations # Run database migrations
await run_migrations() # await run_migrations()
# Initialize scheduler # Initialize scheduler
init_scheduler() init_scheduler()

10
be/entrypoint.sh Executable file
View File

@ -0,0 +1,10 @@
#!/bin/sh
set -e
# Run database migrations
echo "Running database migrations..."
alembic upgrade head
# Execute the command passed as arguments to this script
echo "Starting application..."
exec "$@"

View File

@ -1,30 +1,4 @@
services: services:
db:
image: postgres:17-alpine
container_name: postgres_db_prod
environment:
POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
POSTGRES_DB: ${POSTGRES_DB}
volumes:
- postgres_data:/var/lib/postgresql/data
- ./be/init-db.sql:/docker-entrypoint-initdb.d/init-db.sql:ro
networks:
- app-network
healthcheck:
test: [ "CMD-SHELL", "pg_isready -U $${POSTGRES_USER} -d $${POSTGRES_DB}" ]
interval: 30s
timeout: 10s
retries: 3
start_period: 30s
restart: unless-stopped
deploy:
resources:
limits:
memory: 512M
reservations:
memory: 256M
backend: backend:
container_name: fastapi_backend_prod container_name: fastapi_backend_prod
build: build:
@ -41,24 +15,7 @@ services:
- ENVIRONMENT=production - ENVIRONMENT=production
- CORS_ORIGINS=${CORS_ORIGINS} - CORS_ORIGINS=${CORS_ORIGINS}
- FRONTEND_URL=${FRONTEND_URL} - FRONTEND_URL=${FRONTEND_URL}
networks:
- app-network
depends_on:
db:
condition: service_healthy
restart: unless-stopped restart: unless-stopped
deploy:
resources:
limits:
memory: 1G
reservations:
memory: 512M
healthcheck:
test: [ "CMD", "curl", "-f", "http://localhost:8000/health" ]
interval: 30s
timeout: 10s
retries: 3
start_period: 30s
frontend: frontend:
container_name: frontend_prod container_name: frontend_prod
@ -67,44 +24,5 @@ services:
dockerfile: Dockerfile.prod dockerfile: Dockerfile.prod
target: production target: production
environment: environment:
- VITE_API_URL=${VITE_API_URL} - VITE_API_URL=https://mitlistbe.mohamad.dev
- VITE_SENTRY_DSN=${VITE_SENTRY_DSN} restart: unless-stopped
ports:
- "80:3000"
networks:
- app-network
depends_on:
- backend
restart: unless-stopped
deploy:
resources:
limits:
memory: 256M
reservations:
memory: 128M
redis:
image: redis:7-alpine
container_name: redis_prod
command: redis-server --appendonly yes --requirepass ${REDIS_PASSWORD}
volumes:
- redis_data:/data
networks:
- app-network
restart: unless-stopped
deploy:
resources:
limits:
memory: 256M
reservations:
memory: 128M
volumes:
postgres_data:
driver: local
redis_data:
driver: local
networks:
app-network:
driver: bridge

View File

@ -33,7 +33,7 @@ services:
# Pass the database URL to the backend container # Pass the database URL to the backend container
# Uses the service name 'db' as the host, and credentials defined above # Uses the service name 'db' as the host, and credentials defined above
# IMPORTANT: Use the correct async driver prefix if your app needs it! # IMPORTANT: Use the correct async driver prefix if your app needs it!
- DATABASE_URL=xxx - DATABASE_URL=postgresql+asyncpg://mitlist_owner:npg_p0SkmyJ6BPWO@ep-small-sound-a9ketcef-pooler.gwc.azure.neon.tech/testnewmig
- GEMINI_API_KEY=xxx - GEMINI_API_KEY=xxx
- SECRET_KEY=xxx - SECRET_KEY=xxx
# Add other environment variables needed by the backend here # Add other environment variables needed by the backend here