Compare commits

..

2 Commits

Author SHA1 Message Date
mohamad
81577ac7e8 feat: Add Recurrence Pattern and Update Expense Schema
- Introduced a new `RecurrencePattern` model to manage recurrence details for expenses, allowing for daily, weekly, monthly, and yearly patterns.
- Updated the `Expense` model to include fields for recurrence management, such as `is_recurring`, `recurrence_pattern_id`, and `next_occurrence`.
- Modified the database schema to reflect these changes, including alterations to existing columns and the removal of obsolete fields.
- Enhanced the expense creation logic to accommodate recurring expenses and updated related CRUD operations accordingly.
- Implemented necessary migrations to ensure database integrity and support for the new features.
2025-05-23 21:01:49 +02:00
google-labs-jules[bot]
b0100a2e96 Fix: Ensure financial accuracy in cost splitting and balances
I've refactored the group balance summary logic to correctly account for
SettlementActivity. A SettlementActivity now reduces your
effective total_share_of_expenses, ensuring that net balances within
a group sum to zero. Previously, SettlementActivity amounts were
incorrectly added to total_settlements_paid, skewing balance
calculations.

I updated the existing `test_group_balance_summary_with_settlement_activity`
to assert the corrected balance outcomes.

I also added an extensive suite of API-level tests for:
- All expense splitting types (EQUAL, EXACT_AMOUNTS, PERCENTAGE, SHARES, ITEM_BASED),
  covering various scenarios and input validations.
- Group balance summary calculations, including multiple scenarios with
  SettlementActivity, partial payments, multiple expenses, and
  interactions with generic settlements. All balance tests verify that
  the sum of net balances is zero.

The CRUD operations for expenses and settlement activities were reviewed
and found to be sound, requiring no changes for this fix.

This resolves the flawed logic identified in
`be/tests/api/v1/test_costs.py` (test_group_balance_summary_with_settlement_activity)
and ensures that backend financial calculations are provably correct.
2025-05-22 17:04:46 +00:00
17 changed files with 3243 additions and 344 deletions

View File

@ -0,0 +1,90 @@
"""add_recurrence_pattern
Revision ID: 295cb070f266
Revises: 7cc1484074eb
Create Date: 2025-05-22 19:55:24.650524
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '295cb070f266'
down_revision: Union[str, None] = '7cc1484074eb'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('expenses', 'next_occurrence',
existing_type=postgresql.TIMESTAMP(),
type_=sa.DateTime(timezone=True),
existing_nullable=True)
op.drop_index('ix_expenses_recurring_next_occurrence', table_name='expenses', postgresql_where='(is_recurring = true)')
op.drop_constraint('fk_expenses_recurrence_pattern_id', 'expenses', type_='foreignkey')
op.drop_constraint('fk_expenses_parent_expense_id', 'expenses', type_='foreignkey')
op.drop_column('expenses', 'recurrence_pattern_id')
op.drop_column('expenses', 'last_occurrence')
op.drop_column('expenses', 'parent_expense_id')
op.alter_column('recurrence_patterns', 'days_of_week',
existing_type=postgresql.JSON(astext_type=sa.Text()),
type_=sa.String(),
existing_nullable=True)
op.alter_column('recurrence_patterns', 'end_date',
existing_type=postgresql.TIMESTAMP(),
type_=sa.DateTime(timezone=True),
existing_nullable=True)
op.alter_column('recurrence_patterns', 'created_at',
existing_type=postgresql.TIMESTAMP(),
type_=sa.DateTime(timezone=True),
existing_nullable=False)
op.alter_column('recurrence_patterns', 'updated_at',
existing_type=postgresql.TIMESTAMP(),
type_=sa.DateTime(timezone=True),
existing_nullable=False)
op.create_index(op.f('ix_settlement_activities_created_by_user_id'), 'settlement_activities', ['created_by_user_id'], unique=False)
op.create_index(op.f('ix_settlement_activities_expense_split_id'), 'settlement_activities', ['expense_split_id'], unique=False)
op.create_index(op.f('ix_settlement_activities_id'), 'settlement_activities', ['id'], unique=False)
op.create_index(op.f('ix_settlement_activities_paid_by_user_id'), 'settlement_activities', ['paid_by_user_id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_settlement_activities_paid_by_user_id'), table_name='settlement_activities')
op.drop_index(op.f('ix_settlement_activities_id'), table_name='settlement_activities')
op.drop_index(op.f('ix_settlement_activities_expense_split_id'), table_name='settlement_activities')
op.drop_index(op.f('ix_settlement_activities_created_by_user_id'), table_name='settlement_activities')
op.alter_column('recurrence_patterns', 'updated_at',
existing_type=sa.DateTime(timezone=True),
type_=postgresql.TIMESTAMP(),
existing_nullable=False)
op.alter_column('recurrence_patterns', 'created_at',
existing_type=sa.DateTime(timezone=True),
type_=postgresql.TIMESTAMP(),
existing_nullable=False)
op.alter_column('recurrence_patterns', 'end_date',
existing_type=sa.DateTime(timezone=True),
type_=postgresql.TIMESTAMP(),
existing_nullable=True)
op.alter_column('recurrence_patterns', 'days_of_week',
existing_type=sa.String(),
type_=postgresql.JSON(astext_type=sa.Text()),
existing_nullable=True)
op.add_column('expenses', sa.Column('parent_expense_id', sa.INTEGER(), autoincrement=False, nullable=True))
op.add_column('expenses', sa.Column('last_occurrence', postgresql.TIMESTAMP(), autoincrement=False, nullable=True))
op.add_column('expenses', sa.Column('recurrence_pattern_id', sa.INTEGER(), autoincrement=False, nullable=True))
op.create_foreign_key('fk_expenses_parent_expense_id', 'expenses', 'expenses', ['parent_expense_id'], ['id'], ondelete='SET NULL')
op.create_foreign_key('fk_expenses_recurrence_pattern_id', 'expenses', 'recurrence_patterns', ['recurrence_pattern_id'], ['id'], ondelete='SET NULL')
op.create_index('ix_expenses_recurring_next_occurrence', 'expenses', ['is_recurring', 'next_occurrence'], unique=False, postgresql_where='(is_recurring = true)')
op.alter_column('expenses', 'next_occurrence',
existing_type=sa.DateTime(timezone=True),
type_=postgresql.TIMESTAMP(),
existing_nullable=True)
# ### end Alembic commands ###

View File

@ -339,51 +339,69 @@ async def get_group_balance_summary(
# 3. Calculate user balances
user_balances_data = {}
# Initialize UserBalanceDetail for each group member
for assoc in db_group_for_check.member_associations:
if assoc.user:
user_balances_data[assoc.user.id] = UserBalanceDetail(
user_id=assoc.user.id,
user_identifier=assoc.user.name if assoc.user.name else assoc.user.email
)
user_balances_data[assoc.user.id] = {
"user_id": assoc.user.id,
"user_identifier": assoc.user.name if assoc.user.name else assoc.user.email,
"total_paid_for_expenses": Decimal("0.00"),
"initial_total_share_of_expenses": Decimal("0.00"),
"total_amount_paid_via_settlement_activities": Decimal("0.00"),
"total_generic_settlements_paid": Decimal("0.00"),
"total_generic_settlements_received": Decimal("0.00"),
}
# Process expenses
# Process Expenses
for expense in expenses:
if expense.paid_by_user_id in user_balances_data:
user_balances_data[expense.paid_by_user_id].total_paid_for_expenses += expense.total_amount
user_balances_data[expense.paid_by_user_id]["total_paid_for_expenses"] += expense.total_amount
for split in expense.splits:
if split.user_id in user_balances_data:
user_balances_data[split.user_id].total_share_of_expenses += split.owed_amount
user_balances_data[split.user_id]["initial_total_share_of_expenses"] += split.owed_amount
# Process settlements
for settlement in settlements:
if settlement.paid_by_user_id in user_balances_data:
user_balances_data[settlement.paid_by_user_id].total_settlements_paid += settlement.amount
if settlement.paid_to_user_id in user_balances_data:
user_balances_data[settlement.paid_to_user_id].total_settlements_received += settlement.amount
# Process settlement activities
# Process Settlement Activities (SettlementActivityModel)
for activity in settlement_activities:
if activity.paid_by_user_id in user_balances_data:
# These are payments made by a user for their specific expense shares
user_balances_data[activity.paid_by_user_id].total_settlements_paid += activity.amount_paid
# No direct "received" counterpart for another user in this model for SettlementActivity,
# as it settles a debt towards the original expense payer (implicitly handled by reducing net owed).
user_balances_data[activity.paid_by_user_id]["total_amount_paid_via_settlement_activities"] += activity.amount_paid
# Calculate net balances
# Process Generic Settlements (SettlementModel)
for settlement in settlements:
if settlement.paid_by_user_id in user_balances_data:
user_balances_data[settlement.paid_by_user_id]["total_generic_settlements_paid"] += settlement.amount
if settlement.paid_to_user_id in user_balances_data:
user_balances_data[settlement.paid_to_user_id]["total_generic_settlements_received"] += settlement.amount
# Calculate Final Balances
final_user_balances = []
for user_id, data in user_balances_data.items():
data.net_balance = (
data.total_paid_for_expenses + data.total_settlements_received
) - (data.total_share_of_expenses + data.total_settlements_paid)
initial_total_share_of_expenses = data["initial_total_share_of_expenses"]
total_amount_paid_via_settlement_activities = data["total_amount_paid_via_settlement_activities"]
data.total_paid_for_expenses = data.total_paid_for_expenses.quantize(Decimal("0.01"), rounding=ROUND_HALF_UP)
data.total_share_of_expenses = data.total_share_of_expenses.quantize(Decimal("0.01"), rounding=ROUND_HALF_UP)
data.total_settlements_paid = data.total_settlements_paid.quantize(Decimal("0.01"), rounding=ROUND_HALF_UP)
data.total_settlements_received = data.total_settlements_received.quantize(Decimal("0.01"), rounding=ROUND_HALF_UP)
data.net_balance = data.net_balance.quantize(Decimal("0.01"), rounding=ROUND_HALF_UP)
adjusted_total_share_of_expenses = initial_total_share_of_expenses - total_amount_paid_via_settlement_activities
final_user_balances.append(data)
total_paid_for_expenses = data["total_paid_for_expenses"]
total_generic_settlements_received = data["total_generic_settlements_received"]
total_generic_settlements_paid = data["total_generic_settlements_paid"]
net_balance = (
total_paid_for_expenses + total_generic_settlements_received
) - (adjusted_total_share_of_expenses + total_generic_settlements_paid)
# Quantize all final values for UserBalanceDetail schema
user_detail = UserBalanceDetail(
user_id=data["user_id"],
user_identifier=data["user_identifier"],
total_paid_for_expenses=total_paid_for_expenses.quantize(Decimal("0.01"), rounding=ROUND_HALF_UP),
# Store adjusted_total_share_of_expenses in total_share_of_expenses
total_share_of_expenses=adjusted_total_share_of_expenses.quantize(Decimal("0.01"), rounding=ROUND_HALF_UP),
# Store total_generic_settlements_paid in total_settlements_paid
total_settlements_paid=total_generic_settlements_paid.quantize(Decimal("0.01"), rounding=ROUND_HALF_UP),
total_settlements_received=total_generic_settlements_received.quantize(Decimal("0.01"), rounding=ROUND_HALF_UP),
net_balance=net_balance.quantize(Decimal("0.01"), rounding=ROUND_HALF_UP)
)
final_user_balances.append(user_detail)
# Sort by user identifier
final_user_balances.sort(key=lambda x: x.user_identifier)

View File

@ -172,22 +172,23 @@ async def leave_group(
db: AsyncSession = Depends(get_transactional_session),
current_user: UserModel = Depends(current_active_user),
):
"""Removes the current user from the specified group."""
"""Removes the current user from the specified group. If the owner is the last member, the group will be deleted."""
logger.info(f"User {current_user.email} attempting to leave group {group_id}")
user_role = await crud_group.get_user_role_in_group(db, group_id=group_id, user_id=current_user.id)
if user_role is None:
raise GroupMembershipError(group_id, "leave (you are not a member)")
# --- MVP: Prevent owner leaving if they are the last member/owner ---
# Check if owner is the last member
if user_role == UserRoleEnum.owner:
member_count = await crud_group.get_group_member_count(db, group_id)
# More robust check: count owners. For now, just check member count.
if member_count <= 1:
logger.warning(f"Owner {current_user.email} attempted to leave group {group_id} as last member.")
raise GroupValidationError("Owner cannot leave the group as the last member. Delete the group or transfer ownership.")
# Delete the group since owner is the last member
logger.info(f"Owner {current_user.email} is the last member. Deleting group {group_id}")
await crud_group.delete_group(db, group_id)
return Message(detail="Group deleted as you were the last member")
# Proceed with removal
# Proceed with removal for non-owner or if there are other members
deleted = await crud_group.remove_user_from_group(db, group_id=group_id, user_id=current_user.id)
if not deleted:

View File

@ -3,16 +3,20 @@ from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
from apscheduler.executors.pool import ThreadPoolExecutor
from apscheduler.triggers.cron import CronTrigger
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
from app.core.config import settings
from app.config import settings
from app.jobs.recurring_expenses import generate_recurring_expenses
from app.db.session import async_session
import logging
logger = logging.getLogger(__name__)
# Convert async database URL to sync URL for APScheduler
# Replace postgresql+asyncpg:// with postgresql://
sync_db_url = settings.DATABASE_URL.replace('postgresql+asyncpg://', 'postgresql://')
# Configure the scheduler
jobstores = {
'default': SQLAlchemyJobStore(url=settings.SQLALCHEMY_DATABASE_URI)
'default': SQLAlchemyJobStore(url=sync_db_url)
}
executors = {

View File

@ -34,7 +34,7 @@ async def create_chore(
raise ValueError("group_id must be None for personal chores")
db_chore = Chore(
**chore_in.model_dump(exclude_unset=True),
**chore_in.model_dump(exclude_unset=True, exclude={'group_id'}),
group_id=group_id,
created_by_id=user_id,
)

View File

@ -19,7 +19,6 @@ from app.models import (
Item as ItemModel,
ExpenseOverallStatusEnum, # Added
ExpenseSplitStatusEnum, # Added
RecurrencePattern,
)
from app.schemas.expense import ExpenseCreate, ExpenseSplitCreate, ExpenseUpdate # Removed unused ExpenseUpdate
from app.core.exceptions import (
@ -34,6 +33,7 @@ from app.core.exceptions import (
DatabaseTransactionError,# Added
ExpenseOperationError # Added specific exception
)
from app.models import RecurrencePattern
# Placeholder for InvalidOperationError if not defined in app.core.exceptions
# This should be a proper HTTPException subclass if used in API layer

View File

@ -267,4 +267,31 @@ async def check_user_role_in_group(
action=f"{action} (requires at least '{required_role.value}' role)"
)
# If role is sufficient, return None
return None
return None
async def delete_group(db: AsyncSession, group_id: int) -> None:
"""
Deletes a group and all its associated data (members, invites, lists, etc.).
The cascade delete in the models will handle the deletion of related records.
Raises:
GroupNotFoundError: If the group doesn't exist.
DatabaseError: If there's an error during deletion.
"""
try:
# Get the group first to ensure it exists
group = await get_group_by_id(db, group_id)
if not group:
raise GroupNotFoundError(group_id)
# Delete the group - cascading delete will handle related records
await db.delete(group)
await db.flush()
logger.info(f"Group {group_id} deleted successfully")
except OperationalError as e:
logger.error(f"Database connection error while deleting group {group_id}: {str(e)}", exc_info=True)
raise DatabaseConnectionError(f"Database connection error: {str(e)}")
except SQLAlchemyError as e:
logger.error(f"Unexpected SQLAlchemy error while deleting group {group_id}: {str(e)}", exc_info=True)
raise DatabaseTransactionError(f"Failed to delete group: {str(e)}")

3
be/app/db/__init__.py Normal file
View File

@ -0,0 +1,3 @@
from app.db.session import async_session
__all__ = ["async_session"]

4
be/app/db/session.py Normal file
View File

@ -0,0 +1,4 @@
from app.database import AsyncSessionLocal
# Export the async session factory
async_session = AsyncSessionLocal

View File

@ -4,7 +4,10 @@ from sqlalchemy import select, and_
from app.models import Expense, RecurrencePattern
from app.crud.expense import create_expense
from app.schemas.expense import ExpenseCreate
from app.core.logging import logger
import logging
from typing import Optional
logger = logging.getLogger(__name__)
async def generate_recurring_expenses(db: AsyncSession) -> None:
"""

View File

@ -50,6 +50,13 @@ class ExpenseOverallStatusEnum(enum.Enum):
partially_paid = "partially_paid"
paid = "paid"
class RecurrenceTypeEnum(enum.Enum):
DAILY = "DAILY"
WEEKLY = "WEEKLY"
MONTHLY = "MONTHLY"
YEARLY = "YEARLY"
# Add more types as needed
# Define ChoreFrequencyEnum
class ChoreFrequencyEnum(enum.Enum):
one_time = "one_time"
@ -245,6 +252,11 @@ class Expense(Base):
item = relationship("Item", foreign_keys=[item_id], back_populates="expenses")
splits = relationship("ExpenseSplit", back_populates="expense", cascade="all, delete-orphan")
overall_settlement_status = Column(SAEnum(ExpenseOverallStatusEnum, name="expenseoverallstatusenum", create_type=True), nullable=False, server_default=ExpenseOverallStatusEnum.unpaid.value, default=ExpenseOverallStatusEnum.unpaid)
# --- Recurrence fields ---
is_recurring = Column(Boolean, default=False, nullable=False)
recurrence_pattern_id = Column(Integer, ForeignKey("recurrence_patterns.id"), nullable=True)
recurrence_pattern = relationship("RecurrencePattern", back_populates="expenses", uselist=False) # One-to-one
next_occurrence = Column(DateTime(timezone=True), nullable=True) # For recurring expenses
__table_args__ = (
# Ensure at least one context is provided
@ -376,3 +388,30 @@ class ChoreAssignment(Base):
# --- Relationships ---
chore = relationship("Chore", back_populates="assignments")
assigned_user = relationship("User", back_populates="assigned_chores")
# === NEW: RecurrencePattern Model ===
class RecurrencePattern(Base):
__tablename__ = "recurrence_patterns"
id = Column(Integer, primary_key=True, index=True)
type = Column(SAEnum(RecurrenceTypeEnum, name="recurrencetypeenum", create_type=True), nullable=False)
interval = Column(Integer, default=1, nullable=False) # e.g., every 1 day, every 2 weeks
days_of_week = Column(String, nullable=True) # For weekly recurrences, e.g., "MON,TUE,FRI"
# day_of_month = Column(Integer, nullable=True) # For monthly on a specific day
# week_of_month = Column(Integer, nullable=True) # For monthly on a specific week (e.g., 2nd week)
# month_of_year = Column(Integer, nullable=True) # For yearly recurrences
end_date = Column(DateTime(timezone=True), nullable=True)
max_occurrences = Column(Integer, nullable=True)
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False)
# Relationship back to Expenses that use this pattern (could be one-to-many if patterns are shared)
# However, the current CRUD implies one RecurrencePattern per Expense if recurring.
# If a pattern can be shared, this would be a one-to-many (RecurrencePattern to many Expenses).
# For now, assuming one-to-one as implied by current Expense.recurrence_pattern relationship setup.
expenses = relationship("Expense", back_populates="recurrence_pattern")
# === END: RecurrencePattern Model ===

View File

@ -42,9 +42,9 @@ class ChoreCreate(ChoreBase):
@field_validator('group_id')
@classmethod
def validate_group_id(cls, v, values):
if values.get('type') == ChoreTypeEnum.group and v is None:
if values.data.get('type') == ChoreTypeEnum.group and v is None:
raise ValueError("group_id is required for group chores")
if values.get('type') == ChoreTypeEnum.personal and v is not None:
if values.data.get('type') == ChoreTypeEnum.personal and v is not None:
raise ValueError("group_id must be None for personal chores")
return v
@ -61,9 +61,9 @@ class ChoreUpdate(BaseModel):
@field_validator('group_id')
@classmethod
def validate_group_id(cls, v, values):
if values.get('type') == ChoreTypeEnum.group and v is None:
if values.data.get('type') == ChoreTypeEnum.group and v is None:
raise ValueError("group_id is required for group chores")
if values.get('type') == ChoreTypeEnum.personal and v is not None:
if values.data.get('type') == ChoreTypeEnum.personal and v is not None:
raise ValueError("group_id must be None for personal chores")
return v

File diff suppressed because it is too large Load Diff

8
fe/package-lock.json generated
View File

@ -25,6 +25,7 @@
"@intlify/unplugin-vue-i18n": "^6.0.8",
"@playwright/test": "^1.51.1",
"@tsconfig/node22": "^22.0.1",
"@types/date-fns": "^2.5.3",
"@types/jsdom": "^21.1.7",
"@types/node": "^22.15.17",
"@vitejs/plugin-vue": "^5.2.3",
@ -4124,6 +4125,13 @@
"dev": true,
"license": "MIT"
},
"node_modules/@types/date-fns": {
"version": "2.5.3",
"resolved": "https://registry.npmjs.org/@types/date-fns/-/date-fns-2.5.3.tgz",
"integrity": "sha512-4KVPD3g5RjSgZtdOjvI/TDFkLNUHhdoWxmierdQbDeEg17Rov0hbBYtIzNaQA67ORpteOhvR9YEMTb6xeDCang==",
"dev": true,
"license": "MIT"
},
"node_modules/@types/estree": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.7.tgz",

View File

@ -34,6 +34,7 @@
"@intlify/unplugin-vue-i18n": "^6.0.8",
"@playwright/test": "^1.51.1",
"@tsconfig/node22": "^22.0.1",
"@types/date-fns": "^2.5.3",
"@types/jsdom": "^21.1.7",
"@types/node": "^22.15.17",
"@vitejs/plugin-vue": "^5.2.3",

File diff suppressed because it is too large Load Diff

View File

@ -116,6 +116,42 @@
</div>
</div>
<!-- Expenses Section -->
<div class="mt-4">
<div class="neo-card">
<div class="neo-card-header">
<h3>Group Expenses</h3>
<router-link :to="`/groups/${groupId}/expenses`" class="btn btn-primary">
<span class="material-icons">payments</span>
Manage Expenses
</router-link>
</div>
<div class="neo-card-body">
<div v-if="recentExpenses.length > 0" class="neo-expenses-list">
<div v-for="expense in recentExpenses" :key="expense.id" class="neo-expense-item">
<div class="neo-expense-info">
<span class="neo-expense-name">{{ expense.description }}</span>
<span class="neo-expense-date">{{ formatDate(expense.expense_date) }}</span>
</div>
<div class="neo-expense-details">
<span class="neo-expense-amount">{{ expense.currency }} {{ formatAmount(expense.total_amount)
}}</span>
<span class="neo-chip" :class="getSplitTypeColor(expense.split_type)">
{{ formatSplitType(expense.split_type) }}
</span>
</div>
</div>
</div>
<div v-else class="neo-empty-state">
<svg class="icon icon-lg" aria-hidden="true">
<use xlink:href="#icon-payments" />
</svg>
<p>No expenses recorded. Click "Manage Expenses" to add some!</p>
</div>
</div>
</div>
</div>
</div>
<div v-else class="alert alert-info" role="status">
@ -134,6 +170,7 @@ import { useNotificationStore } from '@/stores/notifications';
import { choreService } from '../services/choreService'
import type { Chore, ChoreFrequency } from '../types/chore'
import { format } from 'date-fns'
import type { Expense } from '@/types/expense'
interface Group {
id: string | number;
@ -174,6 +211,9 @@ const { copy, copied, isSupported: clipboardIsSupported } = useClipboard({
// Chores state
const upcomingChores = ref<Chore[]>([])
// Add new state for expenses
const recentExpenses = ref<Expense[]>([])
const fetchActiveInviteCode = async () => {
if (!groupId.value) return;
// Consider adding a loading state for this fetch if needed, e.g., initialInviteCodeLoading
@ -326,9 +366,42 @@ const getFrequencyColor = (frequency: ChoreFrequency) => {
return colors[frequency]
}
// Add new methods for expenses
const loadRecentExpenses = async () => {
if (!groupId.value) return
try {
const response = await apiClient.get(`/api/groups/${groupId.value}/expenses`)
recentExpenses.value = response.data.slice(0, 5) // Get only the 5 most recent expenses
} catch (error) {
console.error('Error loading recent expenses:', error)
}
}
const formatAmount = (amount: string) => {
return parseFloat(amount).toFixed(2)
}
const formatSplitType = (type: string) => {
return type.split('_').map(word =>
word.charAt(0).toUpperCase() + word.slice(1).toLowerCase()
).join(' ')
}
const getSplitTypeColor = (type: string) => {
const colors: Record<string, string> = {
equal: 'blue',
exact_amounts: 'green',
percentage: 'purple',
shares: 'orange',
item_based: 'teal'
}
return colors[type] || 'grey'
}
onMounted(() => {
fetchGroupDetails();
loadUpcomingChores();
loadRecentExpenses();
});
</script>
@ -564,4 +637,91 @@ onMounted(() => {
font-size: 0.875rem;
color: #666;
}
/* Expenses List Styles */
.neo-expenses-list {
display: flex;
flex-direction: column;
gap: 1rem;
}
.neo-expense-item {
display: flex;
justify-content: space-between;
align-items: center;
padding: 1rem;
border-radius: 12px;
background: #fafafa;
border: 2px solid #111;
transition: transform 0.1s ease-in-out;
}
.neo-expense-item:hover {
transform: translateY(-2px);
}
.neo-expense-info {
display: flex;
flex-direction: column;
gap: 0.5rem;
}
.neo-expense-name {
font-weight: 600;
font-size: 1.1rem;
}
.neo-expense-date {
font-size: 0.875rem;
color: #666;
}
.neo-expense-details {
display: flex;
align-items: center;
gap: 1rem;
}
.neo-expense-amount {
font-weight: 600;
font-size: 1.1rem;
}
.neo-chip {
padding: 0.25rem 0.75rem;
border-radius: 1rem;
font-size: 0.875rem;
font-weight: 600;
background: #e0e0e0;
}
.neo-chip.blue {
background: #e3f2fd;
color: #1976d2;
}
.neo-chip.green {
background: #e8f5e9;
color: #2e7d32;
}
.neo-chip.purple {
background: #f3e5f5;
color: #7b1fa2;
}
.neo-chip.orange {
background: #fff3e0;
color: #f57c00;
}
.neo-chip.teal {
background: #e0f2f1;
color: #00796b;
}
.neo-chip.grey {
background: #f5f5f5;
color: #616161;
}
</style>