state: Partial Backend Rebuild
This commit is contained in:
parent
ad697e5f54
commit
b41aa20948
215
CHANGES.md
Normal file
215
CHANGES.md
Normal file
@ -0,0 +1,215 @@
|
|||||||
|
# Changes Summary - Dynamic Application System
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
The application system has been completely redesigned to be fully dynamic and configurable. All application types, fields, statuses, and workflows are now defined in the database rather than hardcoded.
|
||||||
|
|
||||||
|
## Major Changes
|
||||||
|
|
||||||
|
### 1. Database Architecture
|
||||||
|
|
||||||
|
#### New Tables
|
||||||
|
- `application_types` - Defines application type templates
|
||||||
|
- `application_fields` - Field definitions for each type
|
||||||
|
- `application_type_statuses` - Status definitions per type
|
||||||
|
- `status_transitions` - Workflow transition rules
|
||||||
|
- `dynamic_applications` - Application instances
|
||||||
|
- `application_history_v2` - Complete audit trail
|
||||||
|
- `application_attachments_v2` - File attachments
|
||||||
|
- `application_transition_logs` - Status change tracking
|
||||||
|
- `application_approvals` - Approval decisions
|
||||||
|
|
||||||
|
#### Removed Tables
|
||||||
|
- `applications` (old fixed structure)
|
||||||
|
- `form_templates` (replaced by application_types)
|
||||||
|
- `field_mappings` (integrated into application_fields)
|
||||||
|
|
||||||
|
### 2. Core Features
|
||||||
|
|
||||||
|
#### Dynamic Field System
|
||||||
|
- 18+ field types (text, date, amount, etc.)
|
||||||
|
- Conditional display rules
|
||||||
|
- Custom validation per field
|
||||||
|
- Section grouping
|
||||||
|
- Default values and placeholders
|
||||||
|
|
||||||
|
#### Flexible Workflow
|
||||||
|
- Custom statuses with colors/icons
|
||||||
|
- Configurable transitions between statuses
|
||||||
|
- Multiple trigger types:
|
||||||
|
- User approval (with role requirements)
|
||||||
|
- Applicant actions
|
||||||
|
- Time-based triggers
|
||||||
|
- Condition-based triggers
|
||||||
|
- Automatic transitions
|
||||||
|
|
||||||
|
#### Enhanced Cost Management
|
||||||
|
- Up to 100 cost positions (previously 24)
|
||||||
|
- Up to 100 comparison offers (previously 24)
|
||||||
|
- Categories and notes per position
|
||||||
|
- Automatic total calculation
|
||||||
|
|
||||||
|
### 3. API Changes
|
||||||
|
|
||||||
|
#### New Endpoints
|
||||||
|
|
||||||
|
**Application Types:**
|
||||||
|
- `GET /api/application-types` - List all types
|
||||||
|
- `GET /api/application-types/{id}` - Get specific type
|
||||||
|
- `POST /api/application-types` - Create new type (admin)
|
||||||
|
- `PUT /api/application-types/{id}` - Update type (admin)
|
||||||
|
- `DELETE /api/application-types/{id}` - Delete/deactivate type
|
||||||
|
- `POST /api/application-types/{id}/pdf-template` - Upload PDF
|
||||||
|
|
||||||
|
**Dynamic Applications:**
|
||||||
|
- `GET /api/applications` - List with advanced filtering
|
||||||
|
- `GET /api/applications/{id}` - Get with access key support
|
||||||
|
- `POST /api/applications` - Create with type selection
|
||||||
|
- `PUT /api/applications/{id}` - Update with validation
|
||||||
|
- `POST /api/applications/{id}/submit` - Submit for review
|
||||||
|
- `POST /api/applications/{id}/transition` - Status change
|
||||||
|
- `POST /api/applications/{id}/approve` - Approval actions
|
||||||
|
- `GET /api/applications/{id}/history` - Audit trail
|
||||||
|
- `POST /api/applications/{id}/generate-pdf` - PDF generation
|
||||||
|
|
||||||
|
#### Removed Endpoints
|
||||||
|
- All QSM/VSM specific endpoints
|
||||||
|
- Fixed form template endpoints
|
||||||
|
- Legacy PDF processing endpoints
|
||||||
|
|
||||||
|
### 4. Models & Types
|
||||||
|
|
||||||
|
#### New TypeScript Types (`frontend/src/types/dynamic.ts`)
|
||||||
|
- `ApplicationType` - Type definition
|
||||||
|
- `FieldDefinition` - Field configuration
|
||||||
|
- `StatusDefinition` - Status configuration
|
||||||
|
- `TransitionDefinition` - Workflow rules
|
||||||
|
- `DynamicApplication` - Application instance
|
||||||
|
- `CostPosition` - Cost item structure
|
||||||
|
- `ComparisonOffer` - Vendor offer structure
|
||||||
|
|
||||||
|
#### New Python Models (`backend/src/models/application_type.py`)
|
||||||
|
- `ApplicationType` - Type ORM model
|
||||||
|
- `ApplicationField` - Field ORM model
|
||||||
|
- `ApplicationTypeStatus` - Status ORM model
|
||||||
|
- `StatusTransition` - Transition ORM model
|
||||||
|
- `DynamicApplication` - Application ORM model
|
||||||
|
- Supporting models for history, attachments, approvals
|
||||||
|
|
||||||
|
### 5. Services
|
||||||
|
|
||||||
|
#### New Services
|
||||||
|
- `NotificationService` - Email notifications with templates
|
||||||
|
- `PDFService` - Dynamic PDF generation
|
||||||
|
- `AuthService` - Enhanced authentication with roles
|
||||||
|
|
||||||
|
#### Enhanced Services
|
||||||
|
- Field validation with type-specific rules
|
||||||
|
- PDF template mapping and filling
|
||||||
|
- Workflow engine for transitions
|
||||||
|
- Audit logging for all changes
|
||||||
|
|
||||||
|
### 6. Frontend Updates
|
||||||
|
|
||||||
|
#### New Components (to be implemented)
|
||||||
|
- Dynamic field renderer
|
||||||
|
- Visual workflow designer
|
||||||
|
- Application type builder
|
||||||
|
- Status badge with colors
|
||||||
|
- Cost position manager (100 items)
|
||||||
|
- Comparison offer manager (100 items)
|
||||||
|
|
||||||
|
#### API Client (`frontend/src/api/dynamicClient.ts`)
|
||||||
|
- Full TypeScript support
|
||||||
|
- Automatic token refresh
|
||||||
|
- Public access support
|
||||||
|
- Error handling
|
||||||
|
- File upload support
|
||||||
|
|
||||||
|
### 7. Migration
|
||||||
|
|
||||||
|
#### Data Migration (`backend/scripts/migrate_to_dynamic.py`)
|
||||||
|
- Creates QSM and VSM as dynamic types
|
||||||
|
- Migrates existing applications
|
||||||
|
- Preserves all data and relationships
|
||||||
|
- Maintains audit trail
|
||||||
|
|
||||||
|
#### Migration Steps
|
||||||
|
1. Run database migration to create new tables
|
||||||
|
2. Execute migration script to create default types
|
||||||
|
3. Verify data integrity
|
||||||
|
4. Update frontend to use new endpoints
|
||||||
|
5. Remove old code and tables
|
||||||
|
|
||||||
|
### 8. Configuration
|
||||||
|
|
||||||
|
#### Environment Variables
|
||||||
|
```env
|
||||||
|
# New/Updated
|
||||||
|
MAX_COST_POSITIONS=100
|
||||||
|
MAX_COMPARISON_OFFERS=100
|
||||||
|
PDF_TEMPLATE_STORAGE=database
|
||||||
|
DYNAMIC_FIELD_VALIDATION=true
|
||||||
|
WORKFLOW_ENGINE_ENABLED=true
|
||||||
|
AUDIT_LOGGING_LEVEL=detailed
|
||||||
|
```
|
||||||
|
|
||||||
|
### 9. Benefits
|
||||||
|
|
||||||
|
#### For Administrators
|
||||||
|
- Create new application types without coding
|
||||||
|
- Visual workflow designer
|
||||||
|
- Flexible field configuration
|
||||||
|
- PDF template management
|
||||||
|
- Role-based access control
|
||||||
|
|
||||||
|
#### For Users
|
||||||
|
- Consistent interface across all types
|
||||||
|
- Better validation and help text
|
||||||
|
- Public access with keys
|
||||||
|
- Enhanced cost management
|
||||||
|
- Real-time status tracking
|
||||||
|
|
||||||
|
#### For Developers
|
||||||
|
- No hardcoded logic
|
||||||
|
- Extensible field types
|
||||||
|
- Clean separation of concerns
|
||||||
|
- Full TypeScript support
|
||||||
|
- Comprehensive audit trail
|
||||||
|
|
||||||
|
### 10. Breaking Changes
|
||||||
|
|
||||||
|
#### Backend
|
||||||
|
- All application endpoints changed
|
||||||
|
- Database schema completely redesigned
|
||||||
|
- Old models removed
|
||||||
|
- API response format changed
|
||||||
|
|
||||||
|
#### Frontend
|
||||||
|
- New type system required
|
||||||
|
- API client rewritten
|
||||||
|
- Component props changed
|
||||||
|
- State management updated
|
||||||
|
|
||||||
|
### 11. Upgrade Path
|
||||||
|
|
||||||
|
1. **Backup** all existing data
|
||||||
|
2. **Deploy** new backend with migrations
|
||||||
|
3. **Run** migration script
|
||||||
|
4. **Update** frontend to new API
|
||||||
|
5. **Test** thoroughly
|
||||||
|
6. **Remove** old code and tables
|
||||||
|
|
||||||
|
### 12. Future Enhancements
|
||||||
|
|
||||||
|
- Form templates and presets
|
||||||
|
- Batch operations
|
||||||
|
- Advanced reporting
|
||||||
|
- Mobile app support
|
||||||
|
- Webhook integrations
|
||||||
|
- Custom field types via plugins
|
||||||
|
- Multi-language support
|
||||||
|
- Advanced PDF templates with conditionals
|
||||||
|
|
||||||
|
## Summary
|
||||||
|
|
||||||
|
This update transforms the application system from a fixed, hardcoded structure to a fully dynamic, database-driven system. While this is a major breaking change, it provides unlimited flexibility for future requirements without code changes.
|
||||||
374
DYNAMIC_SYSTEM_ARCHITECTURE.md
Normal file
374
DYNAMIC_SYSTEM_ARCHITECTURE.md
Normal file
@ -0,0 +1,374 @@
|
|||||||
|
# Dynamic Application System Architecture
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
This document describes the new fully dynamic application system that replaces the previous fixed QSM/VSM structure. The system now allows administrators to define any type of application with custom fields, statuses, and workflows.
|
||||||
|
|
||||||
|
## Core Concepts
|
||||||
|
|
||||||
|
### 1. Application Types
|
||||||
|
|
||||||
|
Application types are fully configurable templates that define:
|
||||||
|
- **Fields**: Dynamic field definitions with types, validation, and display rules
|
||||||
|
- **Statuses**: Custom status workflow with transitions
|
||||||
|
- **PDF Templates**: Optional PDF template with field mapping
|
||||||
|
- **Access Control**: Role-based access restrictions
|
||||||
|
- **Limits**: Maximum cost positions and comparison offers
|
||||||
|
|
||||||
|
### 2. Fields
|
||||||
|
|
||||||
|
Fields are the building blocks of applications with the following types:
|
||||||
|
- `text_short`: Short text input (max 255 chars)
|
||||||
|
- `text_long`: Long text/textarea
|
||||||
|
- `options`: Single selection from predefined options
|
||||||
|
- `yesno`: Boolean yes/no field
|
||||||
|
- `mail`: Email address with validation
|
||||||
|
- `date`: Date picker
|
||||||
|
- `datetime`: Date and time picker
|
||||||
|
- `amount`: Numeric amount field
|
||||||
|
- `currency_eur`: EUR currency field with formatting
|
||||||
|
- `number`: General numeric field
|
||||||
|
- `file`: File upload
|
||||||
|
- `signature`: Digital signature field
|
||||||
|
- `phone`: Phone number with validation
|
||||||
|
- `url`: URL with validation
|
||||||
|
- `checkbox`: Single checkbox
|
||||||
|
- `radio`: Radio button group
|
||||||
|
- `select`: Dropdown selection
|
||||||
|
- `multiselect`: Multiple selection
|
||||||
|
|
||||||
|
Each field supports:
|
||||||
|
- **Validation Rules**: min/max values, patterns, required status
|
||||||
|
- **Display Conditions**: Show/hide based on other field values
|
||||||
|
- **Default Values**: Pre-filled values
|
||||||
|
- **Placeholders & Help Text**: User guidance
|
||||||
|
|
||||||
|
### 3. Status System
|
||||||
|
|
||||||
|
Statuses define the workflow states:
|
||||||
|
- **Editability**: Whether the application can be edited
|
||||||
|
- **Visual Style**: Color and icon for UI
|
||||||
|
- **Notifications**: Email templates for status changes
|
||||||
|
- **Transitions**: Rules for moving between statuses
|
||||||
|
|
||||||
|
### 4. Transitions
|
||||||
|
|
||||||
|
Transitions define how applications move between statuses:
|
||||||
|
|
||||||
|
**Trigger Types:**
|
||||||
|
- `user_approval`: Requires N users with specific role to approve
|
||||||
|
- `applicant_action`: Button/action by the applicant
|
||||||
|
- `deadline_expired`: Automatic when a deadline passes
|
||||||
|
- `time_elapsed`: After a specific time period
|
||||||
|
- `condition_met`: When field conditions are satisfied
|
||||||
|
- `automatic`: Immediate automatic transition
|
||||||
|
|
||||||
|
## Database Schema
|
||||||
|
|
||||||
|
### Core Tables
|
||||||
|
|
||||||
|
1. **application_types**
|
||||||
|
- Defines application type templates
|
||||||
|
- Stores PDF template as BLOB
|
||||||
|
- Contains field mapping configuration
|
||||||
|
|
||||||
|
2. **application_fields**
|
||||||
|
- Field definitions for each type
|
||||||
|
- Validation rules as JSON
|
||||||
|
- Display conditions as JSON
|
||||||
|
|
||||||
|
3. **application_type_statuses**
|
||||||
|
- Status definitions per type
|
||||||
|
- Visual configuration (color, icon)
|
||||||
|
- Notification templates
|
||||||
|
|
||||||
|
4. **status_transitions**
|
||||||
|
- Transition rules between statuses
|
||||||
|
- Trigger configuration
|
||||||
|
- Conditions and actions
|
||||||
|
|
||||||
|
5. **dynamic_applications**
|
||||||
|
- Actual application instances
|
||||||
|
- Common fields (email, title, names)
|
||||||
|
- Dynamic field_data as JSON
|
||||||
|
- Cost positions and comparison offers as JSON
|
||||||
|
|
||||||
|
6. **application_history_v2**
|
||||||
|
- Complete audit trail
|
||||||
|
- Field-level change tracking
|
||||||
|
- User and IP tracking
|
||||||
|
|
||||||
|
7. **application_approvals**
|
||||||
|
- Approval decisions by role
|
||||||
|
- Comments and timestamps
|
||||||
|
|
||||||
|
## API Endpoints
|
||||||
|
|
||||||
|
### Application Types Management
|
||||||
|
|
||||||
|
```
|
||||||
|
GET /api/application-types - List all types
|
||||||
|
GET /api/application-types/{id} - Get specific type
|
||||||
|
POST /api/application-types - Create new type (admin)
|
||||||
|
PUT /api/application-types/{id} - Update type (admin)
|
||||||
|
DELETE /api/application-types/{id} - Delete type (admin)
|
||||||
|
POST /api/application-types/{id}/pdf-template - Upload PDF template
|
||||||
|
```
|
||||||
|
|
||||||
|
### Dynamic Applications
|
||||||
|
|
||||||
|
```
|
||||||
|
GET /api/applications - List applications
|
||||||
|
GET /api/applications/{id} - Get application details
|
||||||
|
POST /api/applications - Create new application
|
||||||
|
PUT /api/applications/{id} - Update application
|
||||||
|
POST /api/applications/{id}/submit - Submit for review
|
||||||
|
POST /api/applications/{id}/transition - Change status (admin)
|
||||||
|
POST /api/applications/{id}/approve - Approve/reject
|
||||||
|
GET /api/applications/{id}/history - Get history
|
||||||
|
POST /api/applications/{id}/generate-pdf - Generate PDF
|
||||||
|
```
|
||||||
|
|
||||||
|
## Common Fields
|
||||||
|
|
||||||
|
The following fields are always present (not dynamic):
|
||||||
|
|
||||||
|
1. **Email**: Applicant's email address
|
||||||
|
2. **Status**: Current workflow status
|
||||||
|
3. **Type**: Application type reference
|
||||||
|
4. **Title**: Application title/subject
|
||||||
|
5. **First Name**: Applicant's first name
|
||||||
|
6. **Last Name**: Applicant's last name
|
||||||
|
7. **Timestamps**: Created, submitted, status changed, completed
|
||||||
|
8. **Cost Positions**: Up to 100 items with description, amount, category
|
||||||
|
9. **Comparison Offers**: Up to 100 vendor offers
|
||||||
|
|
||||||
|
## Frontend Components
|
||||||
|
|
||||||
|
### Dynamic Field Renderer
|
||||||
|
|
||||||
|
The frontend includes a dynamic field rendering system that:
|
||||||
|
- Renders fields based on type
|
||||||
|
- Applies validation rules
|
||||||
|
- Handles display conditions
|
||||||
|
- Manages field dependencies
|
||||||
|
|
||||||
|
### Status Workflow UI
|
||||||
|
|
||||||
|
Visual workflow display showing:
|
||||||
|
- Current status with color/icon
|
||||||
|
- Available actions
|
||||||
|
- Transition history
|
||||||
|
- Approval tracking
|
||||||
|
|
||||||
|
### Admin Interface
|
||||||
|
|
||||||
|
Application type builder with:
|
||||||
|
- Drag-and-drop field designer
|
||||||
|
- Visual workflow editor
|
||||||
|
- PDF template mapper
|
||||||
|
- Role management
|
||||||
|
|
||||||
|
## Migration from Old System
|
||||||
|
|
||||||
|
### Data Migration Steps
|
||||||
|
|
||||||
|
1. **Create new tables** - Run migration script
|
||||||
|
2. **Define standard types** - Create QSM/VSM as dynamic types
|
||||||
|
3. **Map existing data** - Convert old applications to new format
|
||||||
|
4. **Update references** - Point to new tables
|
||||||
|
5. **Remove old tables** - Clean up after verification
|
||||||
|
|
||||||
|
### Field Mapping
|
||||||
|
|
||||||
|
Old QSM/VSM fields map to dynamic fields:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"project.name": "project_name",
|
||||||
|
"applicant.name.first": "first_name",
|
||||||
|
"applicant.name.last": "last_name",
|
||||||
|
"applicant.contact.email": "email",
|
||||||
|
"project.costs": "cost_positions",
|
||||||
|
"project.totals.requestedAmountEur": "total_amount"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Security & Access Control
|
||||||
|
|
||||||
|
### Role-Based Access
|
||||||
|
|
||||||
|
- **Admin**: Full access to type management and all applications
|
||||||
|
- **Budget Reviewer**: Review and approve budget-related applications
|
||||||
|
- **Finance Reviewer**: Financial review and approval
|
||||||
|
- **AStA Member**: Voting rights on applications
|
||||||
|
- **Applicant**: Create and edit own applications
|
||||||
|
|
||||||
|
### Public Access
|
||||||
|
|
||||||
|
Applications can be accessed via:
|
||||||
|
- **Authenticated**: Full access based on role
|
||||||
|
- **Access Key**: Limited access with unique key
|
||||||
|
- **Public Link**: Read-only access if configured
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
### Environment Variables
|
||||||
|
|
||||||
|
```env
|
||||||
|
# Database
|
||||||
|
DATABASE_URL=mysql://user:pass@localhost/stupa_db
|
||||||
|
|
||||||
|
# Email
|
||||||
|
SMTP_HOST=smtp.example.com
|
||||||
|
SMTP_PORT=587
|
||||||
|
SMTP_USER=noreply@example.com
|
||||||
|
SMTP_PASSWORD=secret
|
||||||
|
FROM_EMAIL=noreply@example.com
|
||||||
|
FROM_NAME=Application System
|
||||||
|
|
||||||
|
# Security
|
||||||
|
JWT_SECRET_KEY=your-secret-key
|
||||||
|
ACCESS_TOKEN_EXPIRE_MINUTES=30
|
||||||
|
|
||||||
|
# Storage
|
||||||
|
PDF_OUTPUT_DIR=./uploads/pdfs
|
||||||
|
ATTACHMENT_DIR=./uploads/attachments
|
||||||
|
MAX_UPLOAD_SIZE=10485760
|
||||||
|
|
||||||
|
# Frontend
|
||||||
|
BASE_URL=https://applications.example.com
|
||||||
|
```
|
||||||
|
|
||||||
|
### Application Type Example
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"type_id": "travel_grant",
|
||||||
|
"name": "Travel Grant Application",
|
||||||
|
"description": "Apply for travel funding",
|
||||||
|
"fields": [
|
||||||
|
{
|
||||||
|
"field_id": "destination",
|
||||||
|
"field_type": "text_short",
|
||||||
|
"name": "Destination",
|
||||||
|
"is_required": true,
|
||||||
|
"validation_rules": {
|
||||||
|
"max_length": 100
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field_id": "purpose",
|
||||||
|
"field_type": "text_long",
|
||||||
|
"name": "Purpose of Travel",
|
||||||
|
"is_required": true,
|
||||||
|
"validation_rules": {
|
||||||
|
"min_length": 50,
|
||||||
|
"max_length": 500
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field_id": "travel_date",
|
||||||
|
"field_type": "date",
|
||||||
|
"name": "Travel Date",
|
||||||
|
"is_required": true,
|
||||||
|
"validation_rules": {
|
||||||
|
"min_date": "2024-01-01"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field_id": "amount_requested",
|
||||||
|
"field_type": "currency_eur",
|
||||||
|
"name": "Amount Requested",
|
||||||
|
"is_required": true,
|
||||||
|
"validation_rules": {
|
||||||
|
"min": 0,
|
||||||
|
"max": 5000
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"statuses": [
|
||||||
|
{
|
||||||
|
"status_id": "draft",
|
||||||
|
"name": "Draft",
|
||||||
|
"is_editable": true,
|
||||||
|
"color": "#6B7280",
|
||||||
|
"is_initial": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status_id": "submitted",
|
||||||
|
"name": "Submitted",
|
||||||
|
"is_editable": false,
|
||||||
|
"color": "#3B82F6",
|
||||||
|
"send_notification": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status_id": "approved",
|
||||||
|
"name": "Approved",
|
||||||
|
"is_editable": false,
|
||||||
|
"color": "#10B981",
|
||||||
|
"is_final": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status_id": "rejected",
|
||||||
|
"name": "Rejected",
|
||||||
|
"is_editable": false,
|
||||||
|
"color": "#EF4444",
|
||||||
|
"is_final": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"transitions": [
|
||||||
|
{
|
||||||
|
"from_status_id": "draft",
|
||||||
|
"to_status_id": "submitted",
|
||||||
|
"name": "Submit Application",
|
||||||
|
"trigger_type": "applicant_action"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"from_status_id": "submitted",
|
||||||
|
"to_status_id": "approved",
|
||||||
|
"name": "Approve",
|
||||||
|
"trigger_type": "user_approval",
|
||||||
|
"trigger_config": {
|
||||||
|
"role": "admin",
|
||||||
|
"required_approvals": 1
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"from_status_id": "submitted",
|
||||||
|
"to_status_id": "rejected",
|
||||||
|
"name": "Reject",
|
||||||
|
"trigger_type": "user_approval",
|
||||||
|
"trigger_config": {
|
||||||
|
"role": "admin",
|
||||||
|
"required_approvals": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Advantages of Dynamic System
|
||||||
|
|
||||||
|
1. **Flexibility**: Create any type of application without code changes
|
||||||
|
2. **Maintainability**: All configuration in database, no hardcoded logic
|
||||||
|
3. **Scalability**: Same infrastructure handles all application types
|
||||||
|
4. **User Experience**: Consistent interface across all applications
|
||||||
|
5. **Compliance**: Built-in audit trail and approval workflows
|
||||||
|
6. **Integration**: PDF generation works with any template
|
||||||
|
7. **Future-Proof**: Easy to add new field types and features
|
||||||
|
|
||||||
|
## Performance Considerations
|
||||||
|
|
||||||
|
- **JSON Fields**: Indexed for fast searching
|
||||||
|
- **Caching**: Application types cached in memory
|
||||||
|
- **Lazy Loading**: Field data loaded on demand
|
||||||
|
- **Batch Operations**: Support for bulk status changes
|
||||||
|
- **Async Processing**: PDF generation in background
|
||||||
|
|
||||||
|
## Backup and Recovery
|
||||||
|
|
||||||
|
- **Daily Backups**: Automated database backups
|
||||||
|
- **Version History**: All changes tracked in history tables
|
||||||
|
- **Soft Deletes**: Applications marked as deleted, not removed
|
||||||
|
- **Export/Import**: JSON format for data portability
|
||||||
@ -10,6 +10,7 @@ RUN apt-get update && apt-get install -y \
|
|||||||
pkg-config \
|
pkg-config \
|
||||||
wget \
|
wget \
|
||||||
curl \
|
curl \
|
||||||
|
netcat-openbsd \
|
||||||
# PDF processing tools
|
# PDF processing tools
|
||||||
poppler-utils \
|
poppler-utils \
|
||||||
# Clean up
|
# Clean up
|
||||||
@ -38,14 +39,19 @@ RUN pip install --no-cache-dir \
|
|||||||
|
|
||||||
# Copy application code
|
# Copy application code
|
||||||
COPY src/ ./src/
|
COPY src/ ./src/
|
||||||
COPY assets/ ./assets/
|
# Copy entrypoint script
|
||||||
|
COPY docker-entrypoint.sh /app/
|
||||||
|
RUN chmod +x /app/docker-entrypoint.sh
|
||||||
|
# Copy assets if they exist (currently no assets needed after removing LaTeX)
|
||||||
|
# COPY assets/ ./assets/
|
||||||
|
|
||||||
# Create necessary directories
|
# Create necessary directories
|
||||||
RUN mkdir -p /app/uploads \
|
RUN mkdir -p /app/uploads \
|
||||||
/app/templates \
|
/app/templates \
|
||||||
/app/attachments \
|
/app/attachments \
|
||||||
/app/pdf_forms \
|
/app/pdf_forms \
|
||||||
/app/logs
|
/app/logs \
|
||||||
|
/app/assets
|
||||||
|
|
||||||
# Set permissions
|
# Set permissions
|
||||||
RUN chmod -R 755 /app
|
RUN chmod -R 755 /app
|
||||||
@ -58,4 +64,4 @@ HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \
|
|||||||
EXPOSE 8000
|
EXPOSE 8000
|
||||||
|
|
||||||
# Run the application
|
# Run the application
|
||||||
CMD ["uvicorn", "src.main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
|
ENTRYPOINT ["/app/docker-entrypoint.sh"]
|
||||||
|
|||||||
34
backend/docker-entrypoint.sh
Normal file
34
backend/docker-entrypoint.sh
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
echo "Starting STUPA PDF API Backend..."
|
||||||
|
|
||||||
|
# Wait for database to be ready
|
||||||
|
echo "Waiting for database..."
|
||||||
|
while ! nc -z ${MYSQL_HOST:-db} ${MYSQL_PORT:-3306}; do
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
echo "Database is ready!"
|
||||||
|
|
||||||
|
# Run database initialization
|
||||||
|
echo "Initializing database..."
|
||||||
|
python -m src.startup || {
|
||||||
|
echo "Warning: Database initialization failed or already initialized"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Run migrations if alembic is available
|
||||||
|
if [ -f "alembic.ini" ]; then
|
||||||
|
echo "Running database migrations..."
|
||||||
|
alembic upgrade head || {
|
||||||
|
echo "Warning: Migrations failed or not configured"
|
||||||
|
}
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Start the application
|
||||||
|
echo "Starting application server..."
|
||||||
|
exec uvicorn src.main:app \
|
||||||
|
--host 0.0.0.0 \
|
||||||
|
--port ${PORT:-8000} \
|
||||||
|
--workers ${WORKERS:-1} \
|
||||||
|
--reload-dir /app/src \
|
||||||
|
${UVICORN_ARGS}
|
||||||
@ -25,6 +25,7 @@ requests-oauthlib==1.3.1
|
|||||||
|
|
||||||
# PDF Processing
|
# PDF Processing
|
||||||
pypdf==3.17.4
|
pypdf==3.17.4
|
||||||
|
PyPDF2==3.0.1
|
||||||
PyMuPDF==1.23.16
|
PyMuPDF==1.23.16
|
||||||
reportlab==4.0.8
|
reportlab==4.0.8
|
||||||
pillow==10.2.0
|
pillow==10.2.0
|
||||||
@ -37,7 +38,6 @@ email-validator==2.1.0.post1
|
|||||||
|
|
||||||
# Caching & Sessions
|
# Caching & Sessions
|
||||||
redis==5.0.1
|
redis==5.0.1
|
||||||
python-redis==0.6.0
|
|
||||||
|
|
||||||
# Template Processing
|
# Template Processing
|
||||||
jinja2==3.1.3
|
jinja2==3.1.3
|
||||||
@ -60,7 +60,6 @@ openapi-schema-pydantic==1.2.4
|
|||||||
pytest==7.4.4
|
pytest==7.4.4
|
||||||
pytest-asyncio==0.23.3
|
pytest-asyncio==0.23.3
|
||||||
pytest-cov==4.1.0
|
pytest-cov==4.1.0
|
||||||
httpx-mock==0.4.0
|
|
||||||
faker==22.0.0
|
faker==22.0.0
|
||||||
|
|
||||||
# Development Tools
|
# Development Tools
|
||||||
@ -74,13 +73,10 @@ python-json-logger==2.0.7
|
|||||||
sentry-sdk[fastapi]==1.39.2
|
sentry-sdk[fastapi]==1.39.2
|
||||||
|
|
||||||
# Data Validation & Serialization
|
# Data Validation & Serialization
|
||||||
marshmallow==3.20.2
|
|
||||||
pyyaml==6.0.1
|
pyyaml==6.0.1
|
||||||
|
|
||||||
# Background Tasks (optional)
|
# Background Tasks (optional)
|
||||||
celery==5.3.6
|
celery==5.3.6
|
||||||
kombu==5.3.5
|
|
||||||
flower==2.0.1
|
|
||||||
|
|
||||||
# Rate Limiting
|
# Rate Limiting
|
||||||
slowapi==0.1.9
|
slowapi==0.1.9
|
||||||
|
|||||||
500
backend/scripts/migrate_to_dynamic.py
Normal file
500
backend/scripts/migrate_to_dynamic.py
Normal file
@ -0,0 +1,500 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Migration script to convert old application system to dynamic application system
|
||||||
|
|
||||||
|
This script:
|
||||||
|
1. Creates default QSM and VSM application types
|
||||||
|
2. Migrates existing applications to the new dynamic format
|
||||||
|
3. Preserves all data and relationships
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Dict, Any, List, Optional
|
||||||
|
|
||||||
|
# Add parent directory to path for imports
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
from sqlalchemy import create_engine, text
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
from src.models.application_type import (
|
||||||
|
ApplicationType, ApplicationField, ApplicationTypeStatus,
|
||||||
|
StatusTransition, DynamicApplication, ApplicationHistory,
|
||||||
|
FieldType, TransitionTriggerType
|
||||||
|
)
|
||||||
|
from src.models.base import Base
|
||||||
|
from src.config.database import get_database_url
|
||||||
|
|
||||||
|
# Configure logging
|
||||||
|
logging.basicConfig(
|
||||||
|
level=logging.INFO,
|
||||||
|
format='%(asctime)s - %(levelname)s - %(message)s'
|
||||||
|
)
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def create_qsm_application_type(session) -> ApplicationType:
|
||||||
|
"""Create QSM application type with all fields"""
|
||||||
|
logger.info("Creating QSM application type...")
|
||||||
|
|
||||||
|
qsm_type = ApplicationType(
|
||||||
|
type_id="qsm",
|
||||||
|
name="QSM - Qualitätssicherungsmittel",
|
||||||
|
description="Antrag für Qualitätssicherungsmittel zur Verbesserung der Lehre",
|
||||||
|
is_active=True,
|
||||||
|
is_public=True,
|
||||||
|
max_cost_positions=100,
|
||||||
|
max_comparison_offers=100
|
||||||
|
)
|
||||||
|
session.add(qsm_type)
|
||||||
|
session.flush()
|
||||||
|
|
||||||
|
# Define QSM fields
|
||||||
|
qsm_fields = [
|
||||||
|
# Institution fields
|
||||||
|
{"field_id": "institution_type", "type": FieldType.SELECT, "name": "Art der Institution",
|
||||||
|
"options": ["Fachschaft", "STUPA-Referat", "Studentische Hochschulgruppe", "Fakultät", "Hochschuleinrichtung"],
|
||||||
|
"required": True, "order": 10},
|
||||||
|
{"field_id": "institution_name", "type": FieldType.TEXT_SHORT, "name": "Name der Institution",
|
||||||
|
"required": True, "order": 11},
|
||||||
|
|
||||||
|
# Applicant fields
|
||||||
|
{"field_id": "applicant_type", "type": FieldType.SELECT, "name": "Antragsteller",
|
||||||
|
"options": ["Person", "Institution"], "required": True, "order": 20},
|
||||||
|
{"field_id": "course", "type": FieldType.SELECT, "name": "Studiengang",
|
||||||
|
"options": ["INF", "ESB", "LS", "TEC", "TEX", "NXT"], "order": 21},
|
||||||
|
{"field_id": "role", "type": FieldType.SELECT, "name": "Rolle",
|
||||||
|
"options": ["Student", "Professor", "Mitarbeiter", "AStA", "Referatsleitung", "Fachschaftsvorstand"],
|
||||||
|
"order": 22},
|
||||||
|
{"field_id": "phone", "type": FieldType.PHONE, "name": "Telefonnummer", "order": 23},
|
||||||
|
|
||||||
|
# Project fields
|
||||||
|
{"field_id": "project_description", "type": FieldType.TEXT_LONG, "name": "Projektbeschreibung",
|
||||||
|
"required": True, "order": 30},
|
||||||
|
{"field_id": "project_start", "type": FieldType.DATE, "name": "Projektbeginn",
|
||||||
|
"required": True, "order": 31},
|
||||||
|
{"field_id": "project_end", "type": FieldType.DATE, "name": "Projektende", "order": 32},
|
||||||
|
{"field_id": "participants", "type": FieldType.NUMBER, "name": "Anzahl Teilnehmer", "order": 33},
|
||||||
|
|
||||||
|
# Participation
|
||||||
|
{"field_id": "faculty_inf", "type": FieldType.CHECKBOX, "name": "Fakultät INF", "order": 40},
|
||||||
|
{"field_id": "faculty_esb", "type": FieldType.CHECKBOX, "name": "Fakultät ESB", "order": 41},
|
||||||
|
{"field_id": "faculty_ls", "type": FieldType.CHECKBOX, "name": "Fakultät LS", "order": 42},
|
||||||
|
{"field_id": "faculty_tec", "type": FieldType.CHECKBOX, "name": "Fakultät TEC", "order": 43},
|
||||||
|
{"field_id": "faculty_tex", "type": FieldType.CHECKBOX, "name": "Fakultät TEX", "order": 44},
|
||||||
|
{"field_id": "faculty_nxt", "type": FieldType.CHECKBOX, "name": "Fakultät NxT", "order": 45},
|
||||||
|
{"field_id": "faculty_open", "type": FieldType.CHECKBOX, "name": "Fakultätsübergreifend", "order": 46},
|
||||||
|
|
||||||
|
# Financing
|
||||||
|
{"field_id": "qsm_code", "type": FieldType.SELECT, "name": "QSM-Code",
|
||||||
|
"options": [
|
||||||
|
"vwv-3-2-1-1: Finanzierung zusätzlicher Lehr- und Seminarangebote",
|
||||||
|
"vwv-3-2-1-2: Fachspezifische Studienprojekte",
|
||||||
|
"vwv-3-2-1-3: Hochschuldidaktische Fort- und Weiterbildungsmaßnahmen",
|
||||||
|
"vwv-3-2-2-1: Verbesserung/Ausbau von Serviceeinrichtungen",
|
||||||
|
"vwv-3-2-2-2: Lehr- und Lernmaterialien",
|
||||||
|
"vwv-3-2-2-3: Durchführung von Exkursionen",
|
||||||
|
"vwv-3-2-2-4: Infrastrukturelle Begleit- und Anpassungsmaßnahmen",
|
||||||
|
"vwv-3-2-3-1: Verbesserung der Beratungsangebote",
|
||||||
|
"vwv-3-2-3-2: Studium Generale und fachübergreifende Lehrangebote",
|
||||||
|
"vwv-3-2-3-3: Sonstige Maßnahmen im Interesse der Studierendenschaft"
|
||||||
|
],
|
||||||
|
"required": True, "order": 50},
|
||||||
|
{"field_id": "qsm_stellenfinanzierungen", "type": FieldType.CHECKBOX,
|
||||||
|
"name": "Stellenfinanzierungen", "order": 51},
|
||||||
|
{"field_id": "qsm_studierende", "type": FieldType.CHECKBOX,
|
||||||
|
"name": "Für Studierende", "order": 52},
|
||||||
|
{"field_id": "qsm_individuell", "type": FieldType.CHECKBOX,
|
||||||
|
"name": "Individuelle Maßnahme", "order": 53},
|
||||||
|
{"field_id": "qsm_exkursion_genehmigt", "type": FieldType.CHECKBOX,
|
||||||
|
"name": "Exkursion genehmigt", "order": 54},
|
||||||
|
{"field_id": "qsm_exkursion_bezuschusst", "type": FieldType.CHECKBOX,
|
||||||
|
"name": "Exkursion bezuschusst", "order": 55},
|
||||||
|
|
||||||
|
# Attachments
|
||||||
|
{"field_id": "comparative_offers", "type": FieldType.CHECKBOX,
|
||||||
|
"name": "Vergleichsangebote vorhanden", "order": 60},
|
||||||
|
{"field_id": "fakultaet_attachment", "type": FieldType.CHECKBOX,
|
||||||
|
"name": "Fakultätsbeschluss angehängt", "order": 61},
|
||||||
|
]
|
||||||
|
|
||||||
|
for field_def in qsm_fields:
|
||||||
|
field = ApplicationField(
|
||||||
|
application_type_id=qsm_type.id,
|
||||||
|
field_id=field_def["field_id"],
|
||||||
|
field_type=field_def["type"],
|
||||||
|
name=field_def["name"],
|
||||||
|
field_order=field_def.get("order", 0),
|
||||||
|
is_required=field_def.get("required", False),
|
||||||
|
options=field_def.get("options"),
|
||||||
|
validation_rules=field_def.get("validation", {})
|
||||||
|
)
|
||||||
|
session.add(field)
|
||||||
|
|
||||||
|
return qsm_type
|
||||||
|
|
||||||
|
|
||||||
|
def create_vsm_application_type(session) -> ApplicationType:
|
||||||
|
"""Create VSM application type with all fields"""
|
||||||
|
logger.info("Creating VSM application type...")
|
||||||
|
|
||||||
|
vsm_type = ApplicationType(
|
||||||
|
type_id="vsm",
|
||||||
|
name="VSM - Verfasste Studierendenschaft",
|
||||||
|
description="Antrag für Mittel der Verfassten Studierendenschaft",
|
||||||
|
is_active=True,
|
||||||
|
is_public=True,
|
||||||
|
max_cost_positions=100,
|
||||||
|
max_comparison_offers=100
|
||||||
|
)
|
||||||
|
session.add(vsm_type)
|
||||||
|
session.flush()
|
||||||
|
|
||||||
|
# Define VSM fields (similar to QSM but with VSM-specific financing)
|
||||||
|
vsm_fields = [
|
||||||
|
# Institution fields
|
||||||
|
{"field_id": "institution_type", "type": FieldType.SELECT, "name": "Art der Institution",
|
||||||
|
"options": ["Fachschaft", "STUPA-Referat", "Studentische Hochschulgruppe"],
|
||||||
|
"required": True, "order": 10},
|
||||||
|
{"field_id": "institution_name", "type": FieldType.TEXT_SHORT, "name": "Name der Institution",
|
||||||
|
"required": True, "order": 11},
|
||||||
|
|
||||||
|
# Applicant fields (same as QSM)
|
||||||
|
{"field_id": "applicant_type", "type": FieldType.SELECT, "name": "Antragsteller",
|
||||||
|
"options": ["Person", "Institution"], "required": True, "order": 20},
|
||||||
|
{"field_id": "course", "type": FieldType.SELECT, "name": "Studiengang",
|
||||||
|
"options": ["INF", "ESB", "LS", "TEC", "TEX", "NXT"], "order": 21},
|
||||||
|
{"field_id": "role", "type": FieldType.SELECT, "name": "Rolle",
|
||||||
|
"options": ["Student", "AStA", "Referatsleitung", "Fachschaftsvorstand"],
|
||||||
|
"order": 22},
|
||||||
|
{"field_id": "phone", "type": FieldType.PHONE, "name": "Telefonnummer", "order": 23},
|
||||||
|
|
||||||
|
# Project fields (same as QSM)
|
||||||
|
{"field_id": "project_description", "type": FieldType.TEXT_LONG, "name": "Projektbeschreibung",
|
||||||
|
"required": True, "order": 30},
|
||||||
|
{"field_id": "project_start", "type": FieldType.DATE, "name": "Projektbeginn",
|
||||||
|
"required": True, "order": 31},
|
||||||
|
{"field_id": "project_end", "type": FieldType.DATE, "name": "Projektende", "order": 32},
|
||||||
|
{"field_id": "participants", "type": FieldType.NUMBER, "name": "Anzahl Teilnehmer", "order": 33},
|
||||||
|
|
||||||
|
# VSM-specific financing
|
||||||
|
{"field_id": "vsm_code", "type": FieldType.SELECT, "name": "VSM-Code",
|
||||||
|
"options": [
|
||||||
|
"lhg-01: Hochschulpolitische, fachliche, soziale, wirtschaftliche und kulturelle Belange",
|
||||||
|
"lhg-02: Mitwirkung an den Aufgaben der Hochschulen",
|
||||||
|
"lhg-03: Politische Bildung",
|
||||||
|
"lhg-04: Förderung der Chancengleichheit",
|
||||||
|
"lhg-05: Förderung der Integration ausländischer Studierender",
|
||||||
|
"lhg-06: Förderung der sportlichen Aktivitäten",
|
||||||
|
"lhg-07: Pflege der überregionalen Studierendenbeziehungen"
|
||||||
|
],
|
||||||
|
"required": True, "order": 50},
|
||||||
|
{"field_id": "vsm_aufgaben", "type": FieldType.CHECKBOX,
|
||||||
|
"name": "Aufgaben der Studierendenschaft", "order": 51},
|
||||||
|
{"field_id": "vsm_individuell", "type": FieldType.CHECKBOX,
|
||||||
|
"name": "Individuelle Maßnahme", "order": 52},
|
||||||
|
|
||||||
|
# Attachments
|
||||||
|
{"field_id": "comparative_offers", "type": FieldType.CHECKBOX,
|
||||||
|
"name": "Vergleichsangebote vorhanden", "order": 60},
|
||||||
|
]
|
||||||
|
|
||||||
|
for field_def in vsm_fields:
|
||||||
|
field = ApplicationField(
|
||||||
|
application_type_id=vsm_type.id,
|
||||||
|
field_id=field_def["field_id"],
|
||||||
|
field_type=field_def["type"],
|
||||||
|
name=field_def["name"],
|
||||||
|
field_order=field_def.get("order", 0),
|
||||||
|
is_required=field_def.get("required", False),
|
||||||
|
options=field_def.get("options"),
|
||||||
|
validation_rules=field_def.get("validation", {})
|
||||||
|
)
|
||||||
|
session.add(field)
|
||||||
|
|
||||||
|
return vsm_type
|
||||||
|
|
||||||
|
|
||||||
|
def create_statuses_and_transitions(session, app_type: ApplicationType):
|
||||||
|
"""Create standard statuses and transitions for an application type"""
|
||||||
|
logger.info(f"Creating statuses and transitions for {app_type.name}...")
|
||||||
|
|
||||||
|
# Define standard statuses
|
||||||
|
statuses = [
|
||||||
|
{"id": "draft", "name": "Entwurf", "editable": True, "color": "#6B7280",
|
||||||
|
"initial": True, "final": False},
|
||||||
|
{"id": "submitted", "name": "Beantragt", "editable": False, "color": "#3B82F6",
|
||||||
|
"initial": False, "final": False, "notification": True},
|
||||||
|
{"id": "processing_locked", "name": "Bearbeitung gesperrt", "editable": False,
|
||||||
|
"color": "#F59E0B", "initial": False, "final": False},
|
||||||
|
{"id": "under_review", "name": "Zu prüfen", "editable": False, "color": "#8B5CF6",
|
||||||
|
"initial": False, "final": False},
|
||||||
|
{"id": "voting", "name": "Zur Abstimmung", "editable": False, "color": "#EC4899",
|
||||||
|
"initial": False, "final": False},
|
||||||
|
{"id": "approved", "name": "Genehmigt", "editable": False, "color": "#10B981",
|
||||||
|
"initial": False, "final": True, "notification": True},
|
||||||
|
{"id": "rejected", "name": "Abgelehnt", "editable": False, "color": "#EF4444",
|
||||||
|
"initial": False, "final": True, "notification": True},
|
||||||
|
{"id": "cancelled", "name": "Zurückgezogen", "editable": False, "color": "#9CA3AF",
|
||||||
|
"initial": False, "final": True, "cancelled": True},
|
||||||
|
]
|
||||||
|
|
||||||
|
status_objects = {}
|
||||||
|
for i, status_def in enumerate(statuses):
|
||||||
|
status = ApplicationTypeStatus(
|
||||||
|
application_type_id=app_type.id,
|
||||||
|
status_id=status_def["id"],
|
||||||
|
name=status_def["name"],
|
||||||
|
is_editable=status_def["editable"],
|
||||||
|
color=status_def["color"],
|
||||||
|
display_order=i * 10,
|
||||||
|
is_initial=status_def.get("initial", False),
|
||||||
|
is_final=status_def.get("final", False),
|
||||||
|
is_cancelled=status_def.get("cancelled", False),
|
||||||
|
send_notification=status_def.get("notification", False)
|
||||||
|
)
|
||||||
|
session.add(status)
|
||||||
|
session.flush()
|
||||||
|
status_objects[status_def["id"]] = status
|
||||||
|
|
||||||
|
# Define transitions
|
||||||
|
transitions = [
|
||||||
|
# From Draft
|
||||||
|
{"from": "draft", "to": "submitted", "name": "Antrag einreichen",
|
||||||
|
"trigger": TransitionTriggerType.APPLICANT_ACTION},
|
||||||
|
|
||||||
|
# From Submitted
|
||||||
|
{"from": "submitted", "to": "processing_locked", "name": "Bearbeitung sperren",
|
||||||
|
"trigger": TransitionTriggerType.USER_APPROVAL, "role": "admin"},
|
||||||
|
{"from": "submitted", "to": "under_review", "name": "Zur Prüfung freigeben",
|
||||||
|
"trigger": TransitionTriggerType.USER_APPROVAL, "role": "admin"},
|
||||||
|
{"from": "submitted", "to": "cancelled", "name": "Zurückziehen",
|
||||||
|
"trigger": TransitionTriggerType.APPLICANT_ACTION},
|
||||||
|
|
||||||
|
# From Processing Locked
|
||||||
|
{"from": "processing_locked", "to": "under_review", "name": "Bearbeitung entsperren",
|
||||||
|
"trigger": TransitionTriggerType.USER_APPROVAL, "role": "admin"},
|
||||||
|
|
||||||
|
# From Under Review
|
||||||
|
{"from": "under_review", "to": "voting", "name": "Zur Abstimmung freigeben",
|
||||||
|
"trigger": TransitionTriggerType.USER_APPROVAL, "role": "budget_reviewer"},
|
||||||
|
{"from": "under_review", "to": "rejected", "name": "Ablehnen",
|
||||||
|
"trigger": TransitionTriggerType.USER_APPROVAL, "role": "budget_reviewer"},
|
||||||
|
|
||||||
|
# From Voting
|
||||||
|
{"from": "voting", "to": "approved", "name": "Genehmigen",
|
||||||
|
"trigger": TransitionTriggerType.USER_APPROVAL, "role": "asta_member",
|
||||||
|
"required": 3}, # Requires 3 AStA members to approve
|
||||||
|
{"from": "voting", "to": "rejected", "name": "Ablehnen",
|
||||||
|
"trigger": TransitionTriggerType.USER_APPROVAL, "role": "asta_member",
|
||||||
|
"required": 3}, # Requires 3 AStA members to reject
|
||||||
|
]
|
||||||
|
|
||||||
|
for trans_def in transitions:
|
||||||
|
config = {"role": trans_def.get("role", "admin")}
|
||||||
|
if "required" in trans_def:
|
||||||
|
config["required_approvals"] = trans_def["required"]
|
||||||
|
|
||||||
|
transition = StatusTransition(
|
||||||
|
from_status_id=status_objects[trans_def["from"]].id,
|
||||||
|
to_status_id=status_objects[trans_def["to"]].id,
|
||||||
|
name=trans_def["name"],
|
||||||
|
trigger_type=trans_def["trigger"],
|
||||||
|
trigger_config=config,
|
||||||
|
is_active=True
|
||||||
|
)
|
||||||
|
session.add(transition)
|
||||||
|
|
||||||
|
|
||||||
|
def migrate_old_application(session, old_app: Dict[str, Any], app_type: ApplicationType) -> DynamicApplication:
|
||||||
|
"""Migrate an old application to the new dynamic format"""
|
||||||
|
|
||||||
|
# Extract data from old format
|
||||||
|
payload = old_app.get("payload", {})
|
||||||
|
pa = payload.get("pa", {})
|
||||||
|
applicant = pa.get("applicant", {})
|
||||||
|
project = pa.get("project", {})
|
||||||
|
|
||||||
|
# Map old status to new status
|
||||||
|
status_map = {
|
||||||
|
"DRAFT": "draft",
|
||||||
|
"BEANTRAGT": "submitted",
|
||||||
|
"BEARBEITUNG_GESPERRT": "processing_locked",
|
||||||
|
"ZU_PRUEFEN": "under_review",
|
||||||
|
"ZUR_ABSTIMMUNG": "voting",
|
||||||
|
"GENEHMIGT": "approved",
|
||||||
|
"ABGELEHNT": "rejected",
|
||||||
|
"CANCELLED": "cancelled"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Build field data
|
||||||
|
field_data = {}
|
||||||
|
|
||||||
|
# Institution fields
|
||||||
|
institution = applicant.get("institution", {})
|
||||||
|
field_data["institution_type"] = institution.get("type", "")
|
||||||
|
field_data["institution_name"] = institution.get("name", "")
|
||||||
|
|
||||||
|
# Applicant fields
|
||||||
|
field_data["applicant_type"] = applicant.get("type", "person")
|
||||||
|
name = applicant.get("name", {})
|
||||||
|
contact = applicant.get("contact", {})
|
||||||
|
field_data["course"] = applicant.get("course", "")
|
||||||
|
field_data["role"] = applicant.get("role", "")
|
||||||
|
field_data["phone"] = contact.get("phone", "")
|
||||||
|
|
||||||
|
# Project fields
|
||||||
|
field_data["project_description"] = project.get("description", "")
|
||||||
|
dates = project.get("dates", {})
|
||||||
|
field_data["project_start"] = dates.get("start", "")
|
||||||
|
field_data["project_end"] = dates.get("end", "")
|
||||||
|
field_data["participants"] = project.get("participants", 0)
|
||||||
|
|
||||||
|
# Participation
|
||||||
|
participation = project.get("participation", {})
|
||||||
|
faculties = participation.get("faculties", {})
|
||||||
|
field_data["faculty_inf"] = faculties.get("inf", False)
|
||||||
|
field_data["faculty_esb"] = faculties.get("esb", False)
|
||||||
|
field_data["faculty_ls"] = faculties.get("ls", False)
|
||||||
|
field_data["faculty_tec"] = faculties.get("tec", False)
|
||||||
|
field_data["faculty_tex"] = faculties.get("tex", False)
|
||||||
|
field_data["faculty_nxt"] = faculties.get("nxt", False)
|
||||||
|
field_data["faculty_open"] = faculties.get("open", False)
|
||||||
|
|
||||||
|
# Financing
|
||||||
|
financing = project.get("financing", {})
|
||||||
|
if app_type.type_id == "qsm":
|
||||||
|
qsm = financing.get("qsm", {})
|
||||||
|
field_data["qsm_code"] = qsm.get("code", "")
|
||||||
|
flags = qsm.get("flags", {})
|
||||||
|
field_data["qsm_stellenfinanzierungen"] = flags.get("stellenfinanzierungen", False)
|
||||||
|
field_data["qsm_studierende"] = flags.get("studierende", False)
|
||||||
|
field_data["qsm_individuell"] = flags.get("individuell", False)
|
||||||
|
field_data["qsm_exkursion_genehmigt"] = flags.get("exkursionGenehmigt", False)
|
||||||
|
field_data["qsm_exkursion_bezuschusst"] = flags.get("exkursionBezuschusst", False)
|
||||||
|
else: # VSM
|
||||||
|
vsm = financing.get("vsm", {})
|
||||||
|
field_data["vsm_code"] = vsm.get("code", "")
|
||||||
|
flags = vsm.get("flags", {})
|
||||||
|
field_data["vsm_aufgaben"] = flags.get("aufgaben", False)
|
||||||
|
field_data["vsm_individuell"] = flags.get("individuell", False)
|
||||||
|
|
||||||
|
# Attachments
|
||||||
|
attachments = pa.get("attachments", {})
|
||||||
|
field_data["comparative_offers"] = attachments.get("comparativeOffers", False)
|
||||||
|
if app_type.type_id == "qsm":
|
||||||
|
field_data["fakultaet_attachment"] = attachments.get("fakultaet", False)
|
||||||
|
|
||||||
|
# Cost positions
|
||||||
|
costs = project.get("costs", [])
|
||||||
|
cost_positions = []
|
||||||
|
for cost in costs:
|
||||||
|
cost_positions.append({
|
||||||
|
"description": cost.get("name", ""),
|
||||||
|
"amount": cost.get("amountEur", 0),
|
||||||
|
"category": "",
|
||||||
|
"notes": ""
|
||||||
|
})
|
||||||
|
|
||||||
|
# Create new dynamic application
|
||||||
|
new_app = DynamicApplication(
|
||||||
|
application_id=old_app["pa_id"],
|
||||||
|
application_key=old_app["pa_key"],
|
||||||
|
application_type_id=app_type.id,
|
||||||
|
user_id=old_app.get("user_id"),
|
||||||
|
email=contact.get("email", ""),
|
||||||
|
status_id=status_map.get(old_app.get("status", "DRAFT"), "draft"),
|
||||||
|
title=project.get("name", ""),
|
||||||
|
first_name=name.get("first", ""),
|
||||||
|
last_name=name.get("last", ""),
|
||||||
|
field_data=field_data,
|
||||||
|
cost_positions=cost_positions,
|
||||||
|
total_amount=project.get("totals", {}).get("requestedAmountEur", 0),
|
||||||
|
submitted_at=old_app.get("submitted_at"),
|
||||||
|
created_at=old_app.get("created_at", datetime.utcnow()),
|
||||||
|
updated_at=old_app.get("updated_at", datetime.utcnow())
|
||||||
|
)
|
||||||
|
|
||||||
|
return new_app
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Main migration function"""
|
||||||
|
logger.info("Starting migration to dynamic application system...")
|
||||||
|
|
||||||
|
# Create database connection
|
||||||
|
engine = create_engine(get_database_url())
|
||||||
|
Session = sessionmaker(bind=engine)
|
||||||
|
session = Session()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Step 1: Create application types
|
||||||
|
qsm_type = create_qsm_application_type(session)
|
||||||
|
vsm_type = create_vsm_application_type(session)
|
||||||
|
session.commit()
|
||||||
|
logger.info("Application types created successfully")
|
||||||
|
|
||||||
|
# Step 2: Create statuses and transitions
|
||||||
|
create_statuses_and_transitions(session, qsm_type)
|
||||||
|
create_statuses_and_transitions(session, vsm_type)
|
||||||
|
session.commit()
|
||||||
|
logger.info("Statuses and transitions created successfully")
|
||||||
|
|
||||||
|
# Step 3: Migrate existing applications
|
||||||
|
logger.info("Migrating existing applications...")
|
||||||
|
|
||||||
|
# Query old applications (if table exists)
|
||||||
|
try:
|
||||||
|
result = session.execute(text("SELECT * FROM applications"))
|
||||||
|
old_applications = result.fetchall()
|
||||||
|
|
||||||
|
migrated_count = 0
|
||||||
|
for old_app_row in old_applications:
|
||||||
|
old_app = dict(old_app_row._mapping)
|
||||||
|
|
||||||
|
# Determine type based on variant
|
||||||
|
variant = old_app.get("variant", "QSM")
|
||||||
|
app_type = qsm_type if variant == "QSM" else vsm_type
|
||||||
|
|
||||||
|
# Migrate application
|
||||||
|
new_app = migrate_old_application(session, old_app, app_type)
|
||||||
|
session.add(new_app)
|
||||||
|
|
||||||
|
# Create history entry
|
||||||
|
history = ApplicationHistory(
|
||||||
|
application_id=new_app.id,
|
||||||
|
action="migrated",
|
||||||
|
comment=f"Migrated from old {variant} application",
|
||||||
|
created_at=datetime.utcnow()
|
||||||
|
)
|
||||||
|
session.add(history)
|
||||||
|
|
||||||
|
migrated_count += 1
|
||||||
|
|
||||||
|
if migrated_count % 100 == 0:
|
||||||
|
session.commit()
|
||||||
|
logger.info(f"Migrated {migrated_count} applications...")
|
||||||
|
|
||||||
|
session.commit()
|
||||||
|
logger.info(f"Successfully migrated {migrated_count} applications")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Could not migrate old applications: {e}")
|
||||||
|
logger.info("This is normal if running on a fresh database")
|
||||||
|
|
||||||
|
logger.info("Migration completed successfully!")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Migration failed: {e}")
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
611
backend/src/api/application_types.py
Normal file
611
backend/src/api/application_types.py
Normal file
@ -0,0 +1,611 @@
|
|||||||
|
"""
|
||||||
|
API routes for dynamic application type management
|
||||||
|
"""
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile, Form
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from typing import List, Optional, Dict, Any
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
import json
|
||||||
|
|
||||||
|
from ..config.database import get_db
|
||||||
|
from ..models.application_type import (
|
||||||
|
ApplicationType, ApplicationField, ApplicationTypeStatus,
|
||||||
|
StatusTransition, FieldType, TransitionTriggerType
|
||||||
|
)
|
||||||
|
from ..models.user import User
|
||||||
|
from ..services.auth_service import get_current_user, require_admin
|
||||||
|
from ..utils.pdf_utils import validate_pdf_template, extract_pdf_fields
|
||||||
|
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/application-types", tags=["Application Types"])
|
||||||
|
|
||||||
|
|
||||||
|
# Pydantic models
|
||||||
|
class FieldDefinition(BaseModel):
|
||||||
|
field_id: str
|
||||||
|
field_type: str
|
||||||
|
name: str
|
||||||
|
label: Optional[str] = None
|
||||||
|
description: Optional[str] = None
|
||||||
|
field_order: int = 0
|
||||||
|
is_required: bool = False
|
||||||
|
is_readonly: bool = False
|
||||||
|
is_hidden: bool = False
|
||||||
|
options: Optional[List[str]] = None
|
||||||
|
default_value: Optional[str] = None
|
||||||
|
validation_rules: Optional[Dict[str, Any]] = None
|
||||||
|
display_conditions: Optional[Dict[str, Any]] = None
|
||||||
|
placeholder: Optional[str] = None
|
||||||
|
section: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class StatusDefinition(BaseModel):
|
||||||
|
status_id: str
|
||||||
|
name: str
|
||||||
|
description: Optional[str] = None
|
||||||
|
is_editable: bool = True
|
||||||
|
color: Optional[str] = None
|
||||||
|
icon: Optional[str] = None
|
||||||
|
display_order: int = 0
|
||||||
|
is_initial: bool = False
|
||||||
|
is_final: bool = False
|
||||||
|
is_cancelled: bool = False
|
||||||
|
send_notification: bool = False
|
||||||
|
notification_template: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class TransitionDefinition(BaseModel):
|
||||||
|
from_status_id: str
|
||||||
|
to_status_id: str
|
||||||
|
name: str
|
||||||
|
trigger_type: str
|
||||||
|
trigger_config: Dict[str, Any] = Field(default_factory=dict)
|
||||||
|
conditions: Optional[Dict[str, Any]] = None
|
||||||
|
actions: Optional[List[Dict[str, Any]]] = None
|
||||||
|
priority: int = 0
|
||||||
|
is_active: bool = True
|
||||||
|
|
||||||
|
|
||||||
|
class ApplicationTypeCreate(BaseModel):
|
||||||
|
type_id: str
|
||||||
|
name: str
|
||||||
|
description: Optional[str] = None
|
||||||
|
fields: List[FieldDefinition]
|
||||||
|
statuses: List[StatusDefinition]
|
||||||
|
transitions: List[TransitionDefinition]
|
||||||
|
pdf_field_mapping: Dict[str, str] = Field(default_factory=dict)
|
||||||
|
is_active: bool = True
|
||||||
|
is_public: bool = True
|
||||||
|
allowed_roles: Optional[List[str]] = None
|
||||||
|
max_cost_positions: int = 100
|
||||||
|
max_comparison_offers: int = 100
|
||||||
|
|
||||||
|
|
||||||
|
class ApplicationTypeUpdate(BaseModel):
|
||||||
|
name: Optional[str] = None
|
||||||
|
description: Optional[str] = None
|
||||||
|
is_active: Optional[bool] = None
|
||||||
|
is_public: Optional[bool] = None
|
||||||
|
allowed_roles: Optional[List[str]] = None
|
||||||
|
max_cost_positions: Optional[int] = None
|
||||||
|
max_comparison_offers: Optional[int] = None
|
||||||
|
|
||||||
|
|
||||||
|
class ApplicationTypeResponse(BaseModel):
|
||||||
|
id: int
|
||||||
|
type_id: str
|
||||||
|
name: str
|
||||||
|
description: Optional[str]
|
||||||
|
is_active: bool
|
||||||
|
is_public: bool
|
||||||
|
allowed_roles: List[str]
|
||||||
|
max_cost_positions: int
|
||||||
|
max_comparison_offers: int
|
||||||
|
version: str
|
||||||
|
usage_count: int
|
||||||
|
pdf_template_filename: Optional[str]
|
||||||
|
fields: List[FieldDefinition]
|
||||||
|
statuses: List[StatusDefinition]
|
||||||
|
transitions: List[TransitionDefinition]
|
||||||
|
created_at: str
|
||||||
|
updated_at: str
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/", response_model=List[ApplicationTypeResponse])
|
||||||
|
async def get_application_types(
|
||||||
|
include_inactive: bool = False,
|
||||||
|
current_user: Optional[User] = Depends(get_current_user),
|
||||||
|
db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Get all application types"""
|
||||||
|
query = db.query(ApplicationType)
|
||||||
|
|
||||||
|
if not include_inactive:
|
||||||
|
query = query.filter(ApplicationType.is_active == True)
|
||||||
|
|
||||||
|
# Non-admin users only see public types or types they have access to
|
||||||
|
if current_user and not current_user.has_role("admin"):
|
||||||
|
query = query.filter(
|
||||||
|
(ApplicationType.is_public == True) |
|
||||||
|
(ApplicationType.allowed_roles.contains([role.name for role in current_user.roles]))
|
||||||
|
)
|
||||||
|
elif not current_user:
|
||||||
|
# Anonymous users only see public types
|
||||||
|
query = query.filter(ApplicationType.is_public == True)
|
||||||
|
|
||||||
|
types = query.all()
|
||||||
|
|
||||||
|
result = []
|
||||||
|
for app_type in types:
|
||||||
|
type_dict = {
|
||||||
|
"id": app_type.id,
|
||||||
|
"type_id": app_type.type_id,
|
||||||
|
"name": app_type.name,
|
||||||
|
"description": app_type.description,
|
||||||
|
"is_active": app_type.is_active,
|
||||||
|
"is_public": app_type.is_public,
|
||||||
|
"allowed_roles": app_type.allowed_roles or [],
|
||||||
|
"max_cost_positions": app_type.max_cost_positions,
|
||||||
|
"max_comparison_offers": app_type.max_comparison_offers,
|
||||||
|
"version": app_type.version,
|
||||||
|
"usage_count": app_type.usage_count,
|
||||||
|
"pdf_template_filename": app_type.pdf_template_filename,
|
||||||
|
"created_at": app_type.created_at.isoformat(),
|
||||||
|
"updated_at": app_type.updated_at.isoformat(),
|
||||||
|
"fields": [],
|
||||||
|
"statuses": [],
|
||||||
|
"transitions": []
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add fields
|
||||||
|
for field in app_type.fields:
|
||||||
|
type_dict["fields"].append({
|
||||||
|
"field_id": field.field_id,
|
||||||
|
"field_type": field.field_type.value,
|
||||||
|
"name": field.name,
|
||||||
|
"label": field.label,
|
||||||
|
"description": field.description,
|
||||||
|
"field_order": field.field_order,
|
||||||
|
"is_required": field.is_required,
|
||||||
|
"is_readonly": field.is_readonly,
|
||||||
|
"is_hidden": field.is_hidden,
|
||||||
|
"options": field.options,
|
||||||
|
"default_value": field.default_value,
|
||||||
|
"validation_rules": field.validation_rules,
|
||||||
|
"display_conditions": field.display_conditions,
|
||||||
|
"placeholder": field.placeholder,
|
||||||
|
"section": field.section
|
||||||
|
})
|
||||||
|
|
||||||
|
# Add statuses and transitions
|
||||||
|
status_map = {}
|
||||||
|
for status in app_type.statuses:
|
||||||
|
status_dict = {
|
||||||
|
"status_id": status.status_id,
|
||||||
|
"name": status.name,
|
||||||
|
"description": status.description,
|
||||||
|
"is_editable": status.is_editable,
|
||||||
|
"color": status.color,
|
||||||
|
"icon": status.icon,
|
||||||
|
"display_order": status.display_order,
|
||||||
|
"is_initial": status.is_initial,
|
||||||
|
"is_final": status.is_final,
|
||||||
|
"is_cancelled": status.is_cancelled,
|
||||||
|
"send_notification": status.send_notification,
|
||||||
|
"notification_template": status.notification_template
|
||||||
|
}
|
||||||
|
type_dict["statuses"].append(status_dict)
|
||||||
|
status_map[status.id] = status.status_id
|
||||||
|
|
||||||
|
# Add transitions
|
||||||
|
for status in app_type.statuses:
|
||||||
|
for transition in status.transitions_from:
|
||||||
|
type_dict["transitions"].append({
|
||||||
|
"from_status_id": status_map.get(transition.from_status_id),
|
||||||
|
"to_status_id": status_map.get(transition.to_status_id),
|
||||||
|
"name": transition.name,
|
||||||
|
"trigger_type": transition.trigger_type.value,
|
||||||
|
"trigger_config": transition.trigger_config,
|
||||||
|
"conditions": transition.conditions,
|
||||||
|
"actions": transition.actions,
|
||||||
|
"priority": transition.priority,
|
||||||
|
"is_active": transition.is_active
|
||||||
|
})
|
||||||
|
|
||||||
|
result.append(ApplicationTypeResponse(**type_dict))
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{type_id}", response_model=ApplicationTypeResponse)
|
||||||
|
async def get_application_type(
|
||||||
|
type_id: str,
|
||||||
|
current_user: Optional[User] = Depends(get_current_user),
|
||||||
|
db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Get a specific application type"""
|
||||||
|
app_type = db.query(ApplicationType).filter(ApplicationType.type_id == type_id).first()
|
||||||
|
|
||||||
|
if not app_type:
|
||||||
|
raise HTTPException(status_code=404, detail="Application type not found")
|
||||||
|
|
||||||
|
# Check access
|
||||||
|
if not app_type.is_public:
|
||||||
|
if not current_user:
|
||||||
|
raise HTTPException(status_code=403, detail="Access denied")
|
||||||
|
if not current_user.has_role("admin"):
|
||||||
|
if app_type.allowed_roles and not current_user.has_any_role(app_type.allowed_roles):
|
||||||
|
raise HTTPException(status_code=403, detail="Access denied")
|
||||||
|
|
||||||
|
# Build response
|
||||||
|
type_dict = {
|
||||||
|
"id": app_type.id,
|
||||||
|
"type_id": app_type.type_id,
|
||||||
|
"name": app_type.name,
|
||||||
|
"description": app_type.description,
|
||||||
|
"is_active": app_type.is_active,
|
||||||
|
"is_public": app_type.is_public,
|
||||||
|
"allowed_roles": app_type.allowed_roles or [],
|
||||||
|
"max_cost_positions": app_type.max_cost_positions,
|
||||||
|
"max_comparison_offers": app_type.max_comparison_offers,
|
||||||
|
"version": app_type.version,
|
||||||
|
"usage_count": app_type.usage_count,
|
||||||
|
"pdf_template_filename": app_type.pdf_template_filename,
|
||||||
|
"created_at": app_type.created_at.isoformat(),
|
||||||
|
"updated_at": app_type.updated_at.isoformat(),
|
||||||
|
"fields": [],
|
||||||
|
"statuses": [],
|
||||||
|
"transitions": []
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add fields
|
||||||
|
for field in app_type.fields:
|
||||||
|
type_dict["fields"].append({
|
||||||
|
"field_id": field.field_id,
|
||||||
|
"field_type": field.field_type.value,
|
||||||
|
"name": field.name,
|
||||||
|
"label": field.label,
|
||||||
|
"description": field.description,
|
||||||
|
"field_order": field.field_order,
|
||||||
|
"is_required": field.is_required,
|
||||||
|
"is_readonly": field.is_readonly,
|
||||||
|
"is_hidden": field.is_hidden,
|
||||||
|
"options": field.options,
|
||||||
|
"default_value": field.default_value,
|
||||||
|
"validation_rules": field.validation_rules,
|
||||||
|
"display_conditions": field.display_conditions,
|
||||||
|
"placeholder": field.placeholder,
|
||||||
|
"section": field.section
|
||||||
|
})
|
||||||
|
|
||||||
|
# Add statuses and transitions
|
||||||
|
status_map = {}
|
||||||
|
for status in app_type.statuses:
|
||||||
|
status_dict = {
|
||||||
|
"status_id": status.status_id,
|
||||||
|
"name": status.name,
|
||||||
|
"description": status.description,
|
||||||
|
"is_editable": status.is_editable,
|
||||||
|
"color": status.color,
|
||||||
|
"icon": status.icon,
|
||||||
|
"display_order": status.display_order,
|
||||||
|
"is_initial": status.is_initial,
|
||||||
|
"is_final": status.is_final,
|
||||||
|
"is_cancelled": status.is_cancelled,
|
||||||
|
"send_notification": status.send_notification,
|
||||||
|
"notification_template": status.notification_template
|
||||||
|
}
|
||||||
|
type_dict["statuses"].append(status_dict)
|
||||||
|
status_map[status.id] = status.status_id
|
||||||
|
|
||||||
|
# Add transitions
|
||||||
|
for status in app_type.statuses:
|
||||||
|
for transition in status.transitions_from:
|
||||||
|
type_dict["transitions"].append({
|
||||||
|
"from_status_id": status_map.get(transition.from_status_id),
|
||||||
|
"to_status_id": status_map.get(transition.to_status_id),
|
||||||
|
"name": transition.name,
|
||||||
|
"trigger_type": transition.trigger_type.value,
|
||||||
|
"trigger_config": transition.trigger_config,
|
||||||
|
"conditions": transition.conditions,
|
||||||
|
"actions": transition.actions,
|
||||||
|
"priority": transition.priority,
|
||||||
|
"is_active": transition.is_active
|
||||||
|
})
|
||||||
|
|
||||||
|
return ApplicationTypeResponse(**type_dict)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/", response_model=ApplicationTypeResponse)
|
||||||
|
async def create_application_type(
|
||||||
|
type_data: str = Form(...),
|
||||||
|
pdf_template: Optional[UploadFile] = File(None),
|
||||||
|
current_user: User = Depends(require_admin),
|
||||||
|
db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Create a new application type (admin only)"""
|
||||||
|
try:
|
||||||
|
data = json.loads(type_data)
|
||||||
|
type_create = ApplicationTypeCreate(**data)
|
||||||
|
except (json.JSONDecodeError, ValueError) as e:
|
||||||
|
raise HTTPException(status_code=400, detail=f"Invalid data: {str(e)}")
|
||||||
|
|
||||||
|
# Check if type_id already exists
|
||||||
|
existing = db.query(ApplicationType).filter(ApplicationType.type_id == type_create.type_id).first()
|
||||||
|
if existing:
|
||||||
|
raise HTTPException(status_code=400, detail="Application type with this ID already exists")
|
||||||
|
|
||||||
|
# Create application type
|
||||||
|
app_type = ApplicationType(
|
||||||
|
type_id=type_create.type_id,
|
||||||
|
name=type_create.name,
|
||||||
|
description=type_create.description,
|
||||||
|
pdf_field_mapping=type_create.pdf_field_mapping,
|
||||||
|
is_active=type_create.is_active,
|
||||||
|
is_public=type_create.is_public,
|
||||||
|
allowed_roles=type_create.allowed_roles,
|
||||||
|
max_cost_positions=type_create.max_cost_positions,
|
||||||
|
max_comparison_offers=type_create.max_comparison_offers
|
||||||
|
)
|
||||||
|
|
||||||
|
# Handle PDF template upload
|
||||||
|
if pdf_template:
|
||||||
|
pdf_content = await pdf_template.read()
|
||||||
|
app_type.pdf_template = pdf_content
|
||||||
|
app_type.pdf_template_filename = pdf_template.filename
|
||||||
|
|
||||||
|
# Extract and validate PDF fields
|
||||||
|
try:
|
||||||
|
pdf_fields = extract_pdf_fields(pdf_content)
|
||||||
|
# Validate mapping
|
||||||
|
for pdf_field in type_create.pdf_field_mapping.keys():
|
||||||
|
if pdf_field not in pdf_fields:
|
||||||
|
raise ValueError(f"PDF field '{pdf_field}' not found in template")
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=400, detail=f"PDF validation failed: {str(e)}")
|
||||||
|
|
||||||
|
db.add(app_type)
|
||||||
|
db.flush()
|
||||||
|
|
||||||
|
# Create fields
|
||||||
|
for field_def in type_create.fields:
|
||||||
|
field = ApplicationField(
|
||||||
|
application_type_id=app_type.id,
|
||||||
|
field_id=field_def.field_id,
|
||||||
|
field_type=FieldType(field_def.field_type),
|
||||||
|
name=field_def.name,
|
||||||
|
label=field_def.label,
|
||||||
|
description=field_def.description,
|
||||||
|
field_order=field_def.field_order,
|
||||||
|
is_required=field_def.is_required,
|
||||||
|
is_readonly=field_def.is_readonly,
|
||||||
|
is_hidden=field_def.is_hidden,
|
||||||
|
options=field_def.options,
|
||||||
|
default_value=field_def.default_value,
|
||||||
|
validation_rules=field_def.validation_rules,
|
||||||
|
display_conditions=field_def.display_conditions,
|
||||||
|
placeholder=field_def.placeholder,
|
||||||
|
section=field_def.section
|
||||||
|
)
|
||||||
|
db.add(field)
|
||||||
|
|
||||||
|
# Create statuses
|
||||||
|
status_map = {}
|
||||||
|
for status_def in type_create.statuses:
|
||||||
|
status = ApplicationTypeStatus(
|
||||||
|
application_type_id=app_type.id,
|
||||||
|
status_id=status_def.status_id,
|
||||||
|
name=status_def.name,
|
||||||
|
description=status_def.description,
|
||||||
|
is_editable=status_def.is_editable,
|
||||||
|
color=status_def.color,
|
||||||
|
icon=status_def.icon,
|
||||||
|
display_order=status_def.display_order,
|
||||||
|
is_initial=status_def.is_initial,
|
||||||
|
is_final=status_def.is_final,
|
||||||
|
is_cancelled=status_def.is_cancelled,
|
||||||
|
send_notification=status_def.send_notification,
|
||||||
|
notification_template=status_def.notification_template
|
||||||
|
)
|
||||||
|
db.add(status)
|
||||||
|
db.flush()
|
||||||
|
status_map[status_def.status_id] = status
|
||||||
|
|
||||||
|
# Create transitions
|
||||||
|
for trans_def in type_create.transitions:
|
||||||
|
from_status = status_map.get(trans_def.from_status_id)
|
||||||
|
to_status = status_map.get(trans_def.to_status_id)
|
||||||
|
|
||||||
|
if not from_status or not to_status:
|
||||||
|
raise HTTPException(status_code=400, detail=f"Invalid status in transition: {trans_def.from_status_id} -> {trans_def.to_status_id}")
|
||||||
|
|
||||||
|
transition = StatusTransition(
|
||||||
|
from_status_id=from_status.id,
|
||||||
|
to_status_id=to_status.id,
|
||||||
|
name=trans_def.name,
|
||||||
|
trigger_type=TransitionTriggerType(trans_def.trigger_type),
|
||||||
|
trigger_config=trans_def.trigger_config,
|
||||||
|
conditions=trans_def.conditions,
|
||||||
|
actions=trans_def.actions,
|
||||||
|
priority=trans_def.priority,
|
||||||
|
is_active=trans_def.is_active
|
||||||
|
)
|
||||||
|
db.add(transition)
|
||||||
|
|
||||||
|
db.commit()
|
||||||
|
db.refresh(app_type)
|
||||||
|
|
||||||
|
# Return created type
|
||||||
|
return await get_application_type(app_type.type_id, current_user, db)
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/{type_id}", response_model=ApplicationTypeResponse)
|
||||||
|
async def update_application_type(
|
||||||
|
type_id: str,
|
||||||
|
update_data: ApplicationTypeUpdate,
|
||||||
|
current_user: User = Depends(require_admin),
|
||||||
|
db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Update an application type (admin only)"""
|
||||||
|
app_type = db.query(ApplicationType).filter(ApplicationType.type_id == type_id).first()
|
||||||
|
|
||||||
|
if not app_type:
|
||||||
|
raise HTTPException(status_code=404, detail="Application type not found")
|
||||||
|
|
||||||
|
# Update fields
|
||||||
|
if update_data.name is not None:
|
||||||
|
app_type.name = update_data.name
|
||||||
|
if update_data.description is not None:
|
||||||
|
app_type.description = update_data.description
|
||||||
|
if update_data.is_active is not None:
|
||||||
|
app_type.is_active = update_data.is_active
|
||||||
|
if update_data.is_public is not None:
|
||||||
|
app_type.is_public = update_data.is_public
|
||||||
|
if update_data.allowed_roles is not None:
|
||||||
|
app_type.allowed_roles = update_data.allowed_roles
|
||||||
|
if update_data.max_cost_positions is not None:
|
||||||
|
app_type.max_cost_positions = update_data.max_cost_positions
|
||||||
|
if update_data.max_comparison_offers is not None:
|
||||||
|
app_type.max_comparison_offers = update_data.max_comparison_offers
|
||||||
|
|
||||||
|
db.commit()
|
||||||
|
db.refresh(app_type)
|
||||||
|
|
||||||
|
return await get_application_type(app_type.type_id, current_user, db)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{type_id}/pdf-template")
|
||||||
|
async def upload_pdf_template(
|
||||||
|
type_id: str,
|
||||||
|
pdf_template: UploadFile = File(...),
|
||||||
|
current_user: User = Depends(require_admin),
|
||||||
|
db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Upload or update PDF template for an application type"""
|
||||||
|
app_type = db.query(ApplicationType).filter(ApplicationType.type_id == type_id).first()
|
||||||
|
|
||||||
|
if not app_type:
|
||||||
|
raise HTTPException(status_code=404, detail="Application type not found")
|
||||||
|
|
||||||
|
# Read and validate PDF
|
||||||
|
pdf_content = await pdf_template.read()
|
||||||
|
|
||||||
|
try:
|
||||||
|
pdf_fields = extract_pdf_fields(pdf_content)
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=400, detail=f"Invalid PDF template: {str(e)}")
|
||||||
|
|
||||||
|
# Update template
|
||||||
|
app_type.pdf_template = pdf_content
|
||||||
|
app_type.pdf_template_filename = pdf_template.filename
|
||||||
|
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
return {
|
||||||
|
"message": "PDF template uploaded successfully",
|
||||||
|
"filename": pdf_template.filename,
|
||||||
|
"fields": pdf_fields
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{type_id}")
|
||||||
|
async def delete_application_type(
|
||||||
|
type_id: str,
|
||||||
|
current_user: User = Depends(require_admin),
|
||||||
|
db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Delete an application type (admin only)"""
|
||||||
|
app_type = db.query(ApplicationType).filter(ApplicationType.type_id == type_id).first()
|
||||||
|
|
||||||
|
if not app_type:
|
||||||
|
raise HTTPException(status_code=404, detail="Application type not found")
|
||||||
|
|
||||||
|
# Check if type has been used
|
||||||
|
if app_type.usage_count > 0:
|
||||||
|
# Instead of deleting, deactivate it
|
||||||
|
app_type.is_active = False
|
||||||
|
db.commit()
|
||||||
|
return {"message": "Application type deactivated (has existing applications)"}
|
||||||
|
|
||||||
|
db.delete(app_type)
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
return {"message": "Application type deleted successfully"}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{type_id}/fields")
|
||||||
|
async def add_field_to_type(
|
||||||
|
type_id: str,
|
||||||
|
field: FieldDefinition,
|
||||||
|
current_user: User = Depends(require_admin),
|
||||||
|
db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Add a field to an application type"""
|
||||||
|
app_type = db.query(ApplicationType).filter(ApplicationType.type_id == type_id).first()
|
||||||
|
|
||||||
|
if not app_type:
|
||||||
|
raise HTTPException(status_code=404, detail="Application type not found")
|
||||||
|
|
||||||
|
# Check if field_id already exists
|
||||||
|
existing = db.query(ApplicationField).filter(
|
||||||
|
ApplicationField.application_type_id == app_type.id,
|
||||||
|
ApplicationField.field_id == field.field_id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if existing:
|
||||||
|
raise HTTPException(status_code=400, detail="Field with this ID already exists")
|
||||||
|
|
||||||
|
new_field = ApplicationField(
|
||||||
|
application_type_id=app_type.id,
|
||||||
|
field_id=field.field_id,
|
||||||
|
field_type=FieldType(field.field_type),
|
||||||
|
name=field.name,
|
||||||
|
label=field.label,
|
||||||
|
description=field.description,
|
||||||
|
field_order=field.field_order,
|
||||||
|
is_required=field.is_required,
|
||||||
|
is_readonly=field.is_readonly,
|
||||||
|
is_hidden=field.is_hidden,
|
||||||
|
options=field.options,
|
||||||
|
default_value=field.default_value,
|
||||||
|
validation_rules=field.validation_rules,
|
||||||
|
display_conditions=field.display_conditions,
|
||||||
|
placeholder=field.placeholder,
|
||||||
|
section=field.section
|
||||||
|
)
|
||||||
|
|
||||||
|
db.add(new_field)
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
return {"message": "Field added successfully"}
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{type_id}/fields/{field_id}")
|
||||||
|
async def remove_field_from_type(
|
||||||
|
type_id: str,
|
||||||
|
field_id: str,
|
||||||
|
current_user: User = Depends(require_admin),
|
||||||
|
db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Remove a field from an application type"""
|
||||||
|
app_type = db.query(ApplicationType).filter(ApplicationType.type_id == type_id).first()
|
||||||
|
|
||||||
|
if not app_type:
|
||||||
|
raise HTTPException(status_code=404, detail="Application type not found")
|
||||||
|
|
||||||
|
field = db.query(ApplicationField).filter(
|
||||||
|
ApplicationField.application_type_id == app_type.id,
|
||||||
|
ApplicationField.field_id == field_id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not field:
|
||||||
|
raise HTTPException(status_code=404, detail="Field not found")
|
||||||
|
|
||||||
|
db.delete(field)
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
return {"message": "Field removed successfully"}
|
||||||
831
backend/src/api/dynamic_applications.py
Normal file
831
backend/src/api/dynamic_applications.py
Normal file
@ -0,0 +1,831 @@
|
|||||||
|
"""
|
||||||
|
API routes for dynamic application management
|
||||||
|
"""
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, status, Query, BackgroundTasks
|
||||||
|
from sqlalchemy.orm import Session, joinedload
|
||||||
|
from sqlalchemy import or_, and_, desc
|
||||||
|
from typing import List, Optional, Dict, Any
|
||||||
|
from pydantic import BaseModel, Field, validator
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
import secrets
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
from ..config.database import get_db
|
||||||
|
from ..models.application_type import (
|
||||||
|
ApplicationType, ApplicationField, ApplicationTypeStatus,
|
||||||
|
DynamicApplication, ApplicationHistory, ApplicationAttachment,
|
||||||
|
ApplicationTransitionLog, ApplicationApproval, TransitionTriggerType
|
||||||
|
)
|
||||||
|
from ..models.user import User
|
||||||
|
from ..services.auth_service import get_current_user, get_optional_user
|
||||||
|
from ..services.notification_service import send_notification
|
||||||
|
from ..services.pdf_service import generate_pdf_for_application
|
||||||
|
from ..utils.validators import validate_field_value
|
||||||
|
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/applications", tags=["Dynamic Applications"])
|
||||||
|
|
||||||
|
|
||||||
|
# Pydantic models
|
||||||
|
class CostPosition(BaseModel):
|
||||||
|
description: str
|
||||||
|
amount: float
|
||||||
|
category: Optional[str] = None
|
||||||
|
notes: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class ComparisonOffer(BaseModel):
|
||||||
|
vendor: str
|
||||||
|
description: str
|
||||||
|
amount: float
|
||||||
|
selected: bool = False
|
||||||
|
notes: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class ApplicationCreate(BaseModel):
|
||||||
|
application_type_id: str
|
||||||
|
title: str
|
||||||
|
field_data: Dict[str, Any] = Field(default_factory=dict)
|
||||||
|
cost_positions: Optional[List[CostPosition]] = None
|
||||||
|
comparison_offers: Optional[List[ComparisonOffer]] = None
|
||||||
|
|
||||||
|
@validator('cost_positions')
|
||||||
|
def validate_cost_positions(cls, v, values):
|
||||||
|
if v and len(v) > 100:
|
||||||
|
raise ValueError("Maximum 100 cost positions allowed")
|
||||||
|
return v
|
||||||
|
|
||||||
|
@validator('comparison_offers')
|
||||||
|
def validate_comparison_offers(cls, v, values):
|
||||||
|
if v and len(v) > 100:
|
||||||
|
raise ValueError("Maximum 100 comparison offers allowed")
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
|
class ApplicationUpdate(BaseModel):
|
||||||
|
title: Optional[str] = None
|
||||||
|
field_data: Optional[Dict[str, Any]] = None
|
||||||
|
cost_positions: Optional[List[CostPosition]] = None
|
||||||
|
comparison_offers: Optional[List[ComparisonOffer]] = None
|
||||||
|
|
||||||
|
|
||||||
|
class ApplicationResponse(BaseModel):
|
||||||
|
id: int
|
||||||
|
application_id: str
|
||||||
|
application_type_id: int
|
||||||
|
type_name: str
|
||||||
|
email: str
|
||||||
|
status_id: str
|
||||||
|
status_name: str
|
||||||
|
title: str
|
||||||
|
first_name: Optional[str]
|
||||||
|
last_name: Optional[str]
|
||||||
|
total_amount: float
|
||||||
|
field_data: Dict[str, Any]
|
||||||
|
cost_positions: List[Dict[str, Any]]
|
||||||
|
comparison_offers: List[Dict[str, Any]]
|
||||||
|
submitted_at: Optional[datetime]
|
||||||
|
status_changed_at: Optional[datetime]
|
||||||
|
created_at: datetime
|
||||||
|
updated_at: datetime
|
||||||
|
can_edit: bool
|
||||||
|
available_actions: List[str]
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
||||||
|
|
||||||
|
|
||||||
|
class ApplicationListResponse(BaseModel):
|
||||||
|
id: int
|
||||||
|
application_id: str
|
||||||
|
type_name: str
|
||||||
|
title: str
|
||||||
|
email: str
|
||||||
|
status_id: str
|
||||||
|
status_name: str
|
||||||
|
total_amount: float
|
||||||
|
submitted_at: Optional[datetime]
|
||||||
|
created_at: datetime
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
||||||
|
|
||||||
|
|
||||||
|
class StatusTransitionRequest(BaseModel):
|
||||||
|
new_status_id: str
|
||||||
|
comment: Optional[str] = None
|
||||||
|
trigger_data: Optional[Dict[str, Any]] = None
|
||||||
|
|
||||||
|
|
||||||
|
class ApprovalRequest(BaseModel):
|
||||||
|
decision: str # approve, reject, abstain
|
||||||
|
comment: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/", response_model=List[ApplicationListResponse])
|
||||||
|
async def list_applications(
|
||||||
|
type_id: Optional[str] = None,
|
||||||
|
status_id: Optional[str] = None,
|
||||||
|
email: Optional[str] = None,
|
||||||
|
search: Optional[str] = None,
|
||||||
|
submitted_after: Optional[datetime] = None,
|
||||||
|
submitted_before: Optional[datetime] = None,
|
||||||
|
limit: int = Query(50, ge=1, le=200),
|
||||||
|
offset: int = Query(0, ge=0),
|
||||||
|
current_user: Optional[User] = Depends(get_optional_user),
|
||||||
|
db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""List applications with filtering"""
|
||||||
|
query = db.query(DynamicApplication).join(ApplicationType)
|
||||||
|
|
||||||
|
# Filter by type
|
||||||
|
if type_id:
|
||||||
|
query = query.filter(ApplicationType.type_id == type_id)
|
||||||
|
|
||||||
|
# Filter by status
|
||||||
|
if status_id:
|
||||||
|
query = query.filter(DynamicApplication.status_id == status_id)
|
||||||
|
|
||||||
|
# Filter by email (for users to see their own applications)
|
||||||
|
if email:
|
||||||
|
query = query.filter(DynamicApplication.email == email)
|
||||||
|
elif current_user and not current_user.has_role("admin"):
|
||||||
|
# Non-admin users only see their own applications
|
||||||
|
query = query.filter(DynamicApplication.email == current_user.email)
|
||||||
|
|
||||||
|
# Search
|
||||||
|
if search:
|
||||||
|
search_term = f"%{search}%"
|
||||||
|
query = query.filter(
|
||||||
|
or_(
|
||||||
|
DynamicApplication.title.ilike(search_term),
|
||||||
|
DynamicApplication.email.ilike(search_term),
|
||||||
|
DynamicApplication.search_text.ilike(search_term)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Date filters
|
||||||
|
if submitted_after:
|
||||||
|
query = query.filter(DynamicApplication.submitted_at >= submitted_after)
|
||||||
|
if submitted_before:
|
||||||
|
query = query.filter(DynamicApplication.submitted_at <= submitted_before)
|
||||||
|
|
||||||
|
# Order and paginate
|
||||||
|
query = query.order_by(desc(DynamicApplication.created_at))
|
||||||
|
applications = query.offset(offset).limit(limit).all()
|
||||||
|
|
||||||
|
# Build response
|
||||||
|
result = []
|
||||||
|
for app in applications:
|
||||||
|
# Get status name
|
||||||
|
status = db.query(ApplicationTypeStatus).filter(
|
||||||
|
ApplicationTypeStatus.application_type_id == app.application_type_id,
|
||||||
|
ApplicationTypeStatus.status_id == app.status_id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
result.append({
|
||||||
|
"id": app.id,
|
||||||
|
"application_id": app.application_id,
|
||||||
|
"type_name": app.application_type.name,
|
||||||
|
"title": app.title,
|
||||||
|
"email": app.email,
|
||||||
|
"status_id": app.status_id,
|
||||||
|
"status_name": status.name if status else app.status_id,
|
||||||
|
"total_amount": app.total_amount,
|
||||||
|
"submitted_at": app.submitted_at,
|
||||||
|
"created_at": app.created_at
|
||||||
|
})
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{application_id}", response_model=ApplicationResponse)
|
||||||
|
async def get_application(
|
||||||
|
application_id: str,
|
||||||
|
access_key: Optional[str] = None,
|
||||||
|
current_user: Optional[User] = Depends(get_optional_user),
|
||||||
|
db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Get application details"""
|
||||||
|
app = db.query(DynamicApplication).filter(
|
||||||
|
DynamicApplication.application_id == application_id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not app:
|
||||||
|
raise HTTPException(status_code=404, detail="Application not found")
|
||||||
|
|
||||||
|
# Check access
|
||||||
|
has_access = False
|
||||||
|
if current_user:
|
||||||
|
if current_user.has_role("admin"):
|
||||||
|
has_access = True
|
||||||
|
elif app.email == current_user.email:
|
||||||
|
has_access = True
|
||||||
|
elif access_key:
|
||||||
|
# Verify access key
|
||||||
|
key_hash = hashlib.sha256(access_key.encode()).hexdigest()
|
||||||
|
if app.application_key == key_hash:
|
||||||
|
has_access = True
|
||||||
|
|
||||||
|
if not has_access:
|
||||||
|
raise HTTPException(status_code=403, detail="Access denied")
|
||||||
|
|
||||||
|
# Get status details
|
||||||
|
status = db.query(ApplicationTypeStatus).filter(
|
||||||
|
ApplicationTypeStatus.application_type_id == app.application_type_id,
|
||||||
|
ApplicationTypeStatus.status_id == app.status_id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
# Determine if editable
|
||||||
|
can_edit = False
|
||||||
|
if status and status.is_editable:
|
||||||
|
if current_user and (current_user.has_role("admin") or app.email == current_user.email):
|
||||||
|
can_edit = True
|
||||||
|
elif access_key:
|
||||||
|
can_edit = True
|
||||||
|
|
||||||
|
# Get available actions
|
||||||
|
available_actions = []
|
||||||
|
if current_user:
|
||||||
|
# Check for available transitions
|
||||||
|
transitions = db.query(StatusTransition).filter(
|
||||||
|
StatusTransition.from_status_id == status.id,
|
||||||
|
StatusTransition.is_active == True
|
||||||
|
).all()
|
||||||
|
|
||||||
|
for trans in transitions:
|
||||||
|
if trans.trigger_type == TransitionTriggerType.APPLICANT_ACTION:
|
||||||
|
if app.email == current_user.email:
|
||||||
|
available_actions.append(trans.name)
|
||||||
|
elif trans.trigger_type == TransitionTriggerType.USER_APPROVAL:
|
||||||
|
config = trans.trigger_config or {}
|
||||||
|
required_role = config.get("role")
|
||||||
|
if required_role and current_user.has_role(required_role):
|
||||||
|
available_actions.append(trans.name)
|
||||||
|
|
||||||
|
return ApplicationResponse(
|
||||||
|
id=app.id,
|
||||||
|
application_id=app.application_id,
|
||||||
|
application_type_id=app.application_type_id,
|
||||||
|
type_name=app.application_type.name,
|
||||||
|
email=app.email,
|
||||||
|
status_id=app.status_id,
|
||||||
|
status_name=status.name if status else app.status_id,
|
||||||
|
title=app.title,
|
||||||
|
first_name=app.first_name,
|
||||||
|
last_name=app.last_name,
|
||||||
|
total_amount=app.total_amount,
|
||||||
|
field_data=app.field_data or {},
|
||||||
|
cost_positions=app.cost_positions or [],
|
||||||
|
comparison_offers=app.comparison_offers or [],
|
||||||
|
submitted_at=app.submitted_at,
|
||||||
|
status_changed_at=app.status_changed_at,
|
||||||
|
created_at=app.created_at,
|
||||||
|
updated_at=app.updated_at,
|
||||||
|
can_edit=can_edit,
|
||||||
|
available_actions=available_actions
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/", response_model=Dict[str, Any])
|
||||||
|
async def create_application(
|
||||||
|
application_data: ApplicationCreate,
|
||||||
|
background_tasks: BackgroundTasks,
|
||||||
|
current_user: Optional[User] = Depends(get_optional_user),
|
||||||
|
db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Create a new application"""
|
||||||
|
# Get application type
|
||||||
|
app_type = db.query(ApplicationType).filter(
|
||||||
|
ApplicationType.type_id == application_data.application_type_id,
|
||||||
|
ApplicationType.is_active == True
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not app_type:
|
||||||
|
raise HTTPException(status_code=404, detail="Application type not found or inactive")
|
||||||
|
|
||||||
|
# Check access to type
|
||||||
|
if not app_type.is_public:
|
||||||
|
if not current_user:
|
||||||
|
raise HTTPException(status_code=403, detail="Authentication required")
|
||||||
|
if app_type.allowed_roles and not current_user.has_any_role(app_type.allowed_roles):
|
||||||
|
raise HTTPException(status_code=403, detail="Not authorized for this application type")
|
||||||
|
|
||||||
|
# Validate fields
|
||||||
|
for field in app_type.fields:
|
||||||
|
if field.is_required and field.field_id not in application_data.field_data:
|
||||||
|
raise HTTPException(status_code=400, detail=f"Required field missing: {field.name}")
|
||||||
|
|
||||||
|
if field.field_id in application_data.field_data:
|
||||||
|
value = application_data.field_data[field.field_id]
|
||||||
|
try:
|
||||||
|
validate_field_value(value, field)
|
||||||
|
except ValueError as e:
|
||||||
|
raise HTTPException(status_code=400, detail=str(e))
|
||||||
|
|
||||||
|
# Get initial status
|
||||||
|
initial_status = db.query(ApplicationTypeStatus).filter(
|
||||||
|
ApplicationTypeStatus.application_type_id == app_type.id,
|
||||||
|
ApplicationTypeStatus.is_initial == True
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not initial_status:
|
||||||
|
# Fallback to first status
|
||||||
|
initial_status = db.query(ApplicationTypeStatus).filter(
|
||||||
|
ApplicationTypeStatus.application_type_id == app_type.id
|
||||||
|
).order_by(ApplicationTypeStatus.display_order).first()
|
||||||
|
|
||||||
|
if not initial_status:
|
||||||
|
raise HTTPException(status_code=500, detail="No status defined for application type")
|
||||||
|
|
||||||
|
# Generate application ID and access key
|
||||||
|
app_id = secrets.token_urlsafe(16)
|
||||||
|
access_key = secrets.token_urlsafe(32)
|
||||||
|
key_hash = hashlib.sha256(access_key.encode()).hexdigest()
|
||||||
|
|
||||||
|
# Create application
|
||||||
|
application = DynamicApplication(
|
||||||
|
application_id=app_id,
|
||||||
|
application_key=key_hash,
|
||||||
|
application_type_id=app_type.id,
|
||||||
|
user_id=current_user.id if current_user else None,
|
||||||
|
email=current_user.email if current_user else application_data.field_data.get("email", ""),
|
||||||
|
status_id=initial_status.status_id,
|
||||||
|
title=application_data.title,
|
||||||
|
first_name=current_user.given_name if current_user else application_data.field_data.get("first_name"),
|
||||||
|
last_name=current_user.family_name if current_user else application_data.field_data.get("last_name"),
|
||||||
|
field_data=application_data.field_data,
|
||||||
|
cost_positions=[cp.dict() for cp in application_data.cost_positions] if application_data.cost_positions else [],
|
||||||
|
comparison_offers=[co.dict() for co in application_data.comparison_offers] if application_data.comparison_offers else []
|
||||||
|
)
|
||||||
|
|
||||||
|
# Calculate total amount
|
||||||
|
application.calculate_total_amount()
|
||||||
|
|
||||||
|
# Update search text
|
||||||
|
application.update_search_text()
|
||||||
|
|
||||||
|
db.add(application)
|
||||||
|
db.flush()
|
||||||
|
|
||||||
|
# Create history entry
|
||||||
|
history = ApplicationHistory(
|
||||||
|
application_id=application.id,
|
||||||
|
user_id=current_user.id if current_user else None,
|
||||||
|
action="created",
|
||||||
|
comment="Application created"
|
||||||
|
)
|
||||||
|
db.add(history)
|
||||||
|
|
||||||
|
# Update usage count
|
||||||
|
app_type.usage_count += 1
|
||||||
|
|
||||||
|
db.commit()
|
||||||
|
db.refresh(application)
|
||||||
|
|
||||||
|
# Send notification
|
||||||
|
if initial_status.send_notification:
|
||||||
|
background_tasks.add_task(
|
||||||
|
send_notification,
|
||||||
|
application.email,
|
||||||
|
"Application Created",
|
||||||
|
initial_status.notification_template or f"Your application {app_id} has been created."
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"application_id": app_id,
|
||||||
|
"access_key": access_key,
|
||||||
|
"access_url": f"/applications/{app_id}?key={access_key}",
|
||||||
|
"status": initial_status.status_id
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/{application_id}", response_model=ApplicationResponse)
|
||||||
|
async def update_application(
|
||||||
|
application_id: str,
|
||||||
|
update_data: ApplicationUpdate,
|
||||||
|
access_key: Optional[str] = None,
|
||||||
|
current_user: Optional[User] = Depends(get_optional_user),
|
||||||
|
db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Update an application"""
|
||||||
|
app = db.query(DynamicApplication).filter(
|
||||||
|
DynamicApplication.application_id == application_id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not app:
|
||||||
|
raise HTTPException(status_code=404, detail="Application not found")
|
||||||
|
|
||||||
|
# Check access and editability
|
||||||
|
has_access = False
|
||||||
|
if current_user:
|
||||||
|
if current_user.has_role("admin"):
|
||||||
|
has_access = True
|
||||||
|
elif app.email == current_user.email:
|
||||||
|
has_access = True
|
||||||
|
elif access_key:
|
||||||
|
key_hash = hashlib.sha256(access_key.encode()).hexdigest()
|
||||||
|
if app.application_key == key_hash:
|
||||||
|
has_access = True
|
||||||
|
|
||||||
|
if not has_access:
|
||||||
|
raise HTTPException(status_code=403, detail="Access denied")
|
||||||
|
|
||||||
|
# Check if status allows editing
|
||||||
|
status = db.query(ApplicationTypeStatus).filter(
|
||||||
|
ApplicationTypeStatus.application_type_id == app.application_type_id,
|
||||||
|
ApplicationTypeStatus.status_id == app.status_id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not status or not status.is_editable:
|
||||||
|
raise HTTPException(status_code=400, detail="Application cannot be edited in current status")
|
||||||
|
|
||||||
|
# Track changes
|
||||||
|
changes = {}
|
||||||
|
|
||||||
|
# Update fields
|
||||||
|
if update_data.title is not None:
|
||||||
|
changes["title"] = {"old": app.title, "new": update_data.title}
|
||||||
|
app.title = update_data.title
|
||||||
|
|
||||||
|
if update_data.field_data is not None:
|
||||||
|
# Validate new field data
|
||||||
|
app_type = app.application_type
|
||||||
|
for field in app_type.fields:
|
||||||
|
if field.is_required and field.field_id not in update_data.field_data:
|
||||||
|
raise HTTPException(status_code=400, detail=f"Required field missing: {field.name}")
|
||||||
|
|
||||||
|
if field.field_id in update_data.field_data:
|
||||||
|
value = update_data.field_data[field.field_id]
|
||||||
|
try:
|
||||||
|
validate_field_value(value, field)
|
||||||
|
except ValueError as e:
|
||||||
|
raise HTTPException(status_code=400, detail=str(e))
|
||||||
|
|
||||||
|
changes["field_data"] = {"old": app.field_data, "new": update_data.field_data}
|
||||||
|
app.field_data = update_data.field_data
|
||||||
|
|
||||||
|
if update_data.cost_positions is not None:
|
||||||
|
cost_data = [cp.dict() for cp in update_data.cost_positions]
|
||||||
|
changes["cost_positions"] = {"old": app.cost_positions, "new": cost_data}
|
||||||
|
app.cost_positions = cost_data
|
||||||
|
app.calculate_total_amount()
|
||||||
|
|
||||||
|
if update_data.comparison_offers is not None:
|
||||||
|
offer_data = [co.dict() for co in update_data.comparison_offers]
|
||||||
|
changes["comparison_offers"] = {"old": app.comparison_offers, "new": offer_data}
|
||||||
|
app.comparison_offers = offer_data
|
||||||
|
|
||||||
|
# Update search text
|
||||||
|
app.update_search_text()
|
||||||
|
|
||||||
|
# Create history entry
|
||||||
|
if changes:
|
||||||
|
history = ApplicationHistory(
|
||||||
|
application_id=app.id,
|
||||||
|
user_id=current_user.id if current_user else None,
|
||||||
|
action="updated",
|
||||||
|
field_changes=changes,
|
||||||
|
comment="Application updated"
|
||||||
|
)
|
||||||
|
db.add(history)
|
||||||
|
|
||||||
|
db.commit()
|
||||||
|
db.refresh(app)
|
||||||
|
|
||||||
|
return await get_application(application_id, access_key, current_user, db)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{application_id}/submit")
|
||||||
|
async def submit_application(
|
||||||
|
application_id: str,
|
||||||
|
access_key: Optional[str] = None,
|
||||||
|
background_tasks: BackgroundTasks = None,
|
||||||
|
current_user: Optional[User] = Depends(get_optional_user),
|
||||||
|
db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Submit an application for review"""
|
||||||
|
app = db.query(DynamicApplication).filter(
|
||||||
|
DynamicApplication.application_id == application_id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not app:
|
||||||
|
raise HTTPException(status_code=404, detail="Application not found")
|
||||||
|
|
||||||
|
# Check access
|
||||||
|
has_access = False
|
||||||
|
if current_user and app.email == current_user.email:
|
||||||
|
has_access = True
|
||||||
|
elif access_key:
|
||||||
|
key_hash = hashlib.sha256(access_key.encode()).hexdigest()
|
||||||
|
if app.application_key == key_hash:
|
||||||
|
has_access = True
|
||||||
|
|
||||||
|
if not has_access:
|
||||||
|
raise HTTPException(status_code=403, detail="Access denied")
|
||||||
|
|
||||||
|
# Check if already submitted
|
||||||
|
if app.submitted_at:
|
||||||
|
raise HTTPException(status_code=400, detail="Application already submitted")
|
||||||
|
|
||||||
|
# Find submit transition
|
||||||
|
current_status = db.query(ApplicationTypeStatus).filter(
|
||||||
|
ApplicationTypeStatus.application_type_id == app.application_type_id,
|
||||||
|
ApplicationTypeStatus.status_id == app.status_id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not current_status:
|
||||||
|
raise HTTPException(status_code=500, detail="Current status not found")
|
||||||
|
|
||||||
|
# Find transition for submit action
|
||||||
|
from ..models.application_type import StatusTransition
|
||||||
|
transition = db.query(StatusTransition).filter(
|
||||||
|
StatusTransition.from_status_id == current_status.id,
|
||||||
|
StatusTransition.trigger_type == TransitionTriggerType.APPLICANT_ACTION,
|
||||||
|
StatusTransition.is_active == True
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not transition:
|
||||||
|
raise HTTPException(status_code=400, detail="Submit action not available in current status")
|
||||||
|
|
||||||
|
# Get target status
|
||||||
|
target_status = db.query(ApplicationTypeStatus).filter(
|
||||||
|
ApplicationTypeStatus.id == transition.to_status_id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not target_status:
|
||||||
|
raise HTTPException(status_code=500, detail="Target status not found")
|
||||||
|
|
||||||
|
# Update application
|
||||||
|
app.submitted_at = datetime.utcnow()
|
||||||
|
app.status_id = target_status.status_id
|
||||||
|
app.status_changed_at = datetime.utcnow()
|
||||||
|
|
||||||
|
# Log transition
|
||||||
|
transition_log = ApplicationTransitionLog(
|
||||||
|
application_id=app.id,
|
||||||
|
from_status=current_status.status_id,
|
||||||
|
to_status=target_status.status_id,
|
||||||
|
transition_name=transition.name,
|
||||||
|
trigger_type=TransitionTriggerType.APPLICANT_ACTION.value,
|
||||||
|
triggered_by=current_user.id if current_user else None
|
||||||
|
)
|
||||||
|
db.add(transition_log)
|
||||||
|
|
||||||
|
# Create history entry
|
||||||
|
history = ApplicationHistory(
|
||||||
|
application_id=app.id,
|
||||||
|
user_id=current_user.id if current_user else None,
|
||||||
|
action="submitted",
|
||||||
|
comment="Application submitted for review"
|
||||||
|
)
|
||||||
|
db.add(history)
|
||||||
|
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
# Send notification
|
||||||
|
if target_status.send_notification and background_tasks:
|
||||||
|
background_tasks.add_task(
|
||||||
|
send_notification,
|
||||||
|
app.email,
|
||||||
|
"Application Submitted",
|
||||||
|
target_status.notification_template or f"Your application {app.application_id} has been submitted."
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"message": "Application submitted successfully",
|
||||||
|
"new_status": target_status.status_id
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{application_id}/transition")
|
||||||
|
async def transition_application_status(
|
||||||
|
application_id: str,
|
||||||
|
transition_request: StatusTransitionRequest,
|
||||||
|
background_tasks: BackgroundTasks,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Transition application to a new status"""
|
||||||
|
app = db.query(DynamicApplication).filter(
|
||||||
|
DynamicApplication.application_id == application_id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not app:
|
||||||
|
raise HTTPException(status_code=404, detail="Application not found")
|
||||||
|
|
||||||
|
# Check admin access
|
||||||
|
if not current_user.has_role("admin"):
|
||||||
|
raise HTTPException(status_code=403, detail="Admin access required")
|
||||||
|
|
||||||
|
# Get current and target status
|
||||||
|
current_status = db.query(ApplicationTypeStatus).filter(
|
||||||
|
ApplicationTypeStatus.application_type_id == app.application_type_id,
|
||||||
|
ApplicationTypeStatus.status_id == app.status_id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
target_status = db.query(ApplicationTypeStatus).filter(
|
||||||
|
ApplicationTypeStatus.application_type_id == app.application_type_id,
|
||||||
|
ApplicationTypeStatus.status_id == transition_request.new_status_id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not current_status or not target_status:
|
||||||
|
raise HTTPException(status_code=400, detail="Invalid status")
|
||||||
|
|
||||||
|
# Check if transition is valid
|
||||||
|
from ..models.application_type import StatusTransition
|
||||||
|
transition = db.query(StatusTransition).filter(
|
||||||
|
StatusTransition.from_status_id == current_status.id,
|
||||||
|
StatusTransition.to_status_id == target_status.id,
|
||||||
|
StatusTransition.is_active == True
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not transition:
|
||||||
|
raise HTTPException(status_code=400, detail="Invalid status transition")
|
||||||
|
|
||||||
|
# Update application
|
||||||
|
app.status_id = target_status.status_id
|
||||||
|
app.status_changed_at = datetime.utcnow()
|
||||||
|
|
||||||
|
if target_status.is_final:
|
||||||
|
app.completed_at = datetime.utcnow()
|
||||||
|
|
||||||
|
# Log transition
|
||||||
|
transition_log = ApplicationTransitionLog(
|
||||||
|
application_id=app.id,
|
||||||
|
from_status=current_status.status_id,
|
||||||
|
to_status=target_status.status_id,
|
||||||
|
transition_name=transition.name,
|
||||||
|
trigger_type=transition.trigger_type.value,
|
||||||
|
triggered_by=current_user.id,
|
||||||
|
trigger_data=transition_request.trigger_data
|
||||||
|
)
|
||||||
|
db.add(transition_log)
|
||||||
|
|
||||||
|
# Create history entry
|
||||||
|
history = ApplicationHistory(
|
||||||
|
application_id=app.id,
|
||||||
|
user_id=current_user.id,
|
||||||
|
action="status_changed",
|
||||||
|
comment=transition_request.comment or f"Status changed from {current_status.name} to {target_status.name}"
|
||||||
|
)
|
||||||
|
db.add(history)
|
||||||
|
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
# Send notification
|
||||||
|
if target_status.send_notification:
|
||||||
|
background_tasks.add_task(
|
||||||
|
send_notification,
|
||||||
|
app.email,
|
||||||
|
f"Application Status Changed: {target_status.name}",
|
||||||
|
target_status.notification_template or f"Your application {app.application_id} status has been updated to {target_status.name}."
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"message": "Status changed successfully",
|
||||||
|
"new_status": target_status.status_id,
|
||||||
|
"new_status_name": target_status.name
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{application_id}/approve")
|
||||||
|
async def approve_application(
|
||||||
|
application_id: str,
|
||||||
|
approval: ApprovalRequest,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Approve or reject an application"""
|
||||||
|
app = db.query(DynamicApplication).filter(
|
||||||
|
DynamicApplication.application_id == application_id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not app:
|
||||||
|
raise HTTPException(status_code=404, detail="Application not found")
|
||||||
|
|
||||||
|
# Determine user's role for this approval
|
||||||
|
approval_role = None
|
||||||
|
if current_user.has_role("budget_reviewer"):
|
||||||
|
approval_role = "budget_reviewer"
|
||||||
|
elif current_user.has_role("finance_reviewer"):
|
||||||
|
approval_role = "finance_reviewer"
|
||||||
|
elif current_user.has_role("asta_member"):
|
||||||
|
approval_role = "asta_member"
|
||||||
|
else:
|
||||||
|
raise HTTPException(status_code=403, detail="No approval permission")
|
||||||
|
|
||||||
|
# Check if already approved by this user
|
||||||
|
existing = db.query(ApplicationApproval).filter(
|
||||||
|
ApplicationApproval.application_id == app.id,
|
||||||
|
ApplicationApproval.user_id == current_user.id,
|
||||||
|
ApplicationApproval.role == approval_role
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if existing:
|
||||||
|
# Update existing approval
|
||||||
|
existing.decision = approval.decision
|
||||||
|
existing.comment = approval.comment
|
||||||
|
existing.updated_at = datetime.utcnow()
|
||||||
|
else:
|
||||||
|
# Create new approval
|
||||||
|
new_approval = ApplicationApproval(
|
||||||
|
application_id=app.id,
|
||||||
|
user_id=current_user.id,
|
||||||
|
role=approval_role,
|
||||||
|
decision=approval.decision,
|
||||||
|
comment=approval.comment,
|
||||||
|
status_at_approval=app.status_id
|
||||||
|
)
|
||||||
|
db.add(new_approval)
|
||||||
|
|
||||||
|
# Create history entry
|
||||||
|
history = ApplicationHistory(
|
||||||
|
application_id=app.id,
|
||||||
|
user_id=current_user.id,
|
||||||
|
action=f"{approval_role}_{approval.decision}",
|
||||||
|
comment=approval.comment or f"{approval_role} {approval.decision}"
|
||||||
|
)
|
||||||
|
db.add(history)
|
||||||
|
|
||||||
|
# Check if this triggers a status transition
|
||||||
|
# (This would be implemented based on transition rules)
|
||||||
|
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
return {
|
||||||
|
"message": f"Approval recorded: {approval.decision}",
|
||||||
|
"role": approval_role,
|
||||||
|
"decision": approval.decision
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{application_id}/history")
|
||||||
|
async def get_application_history(
|
||||||
|
application_id: str,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Get application history"""
|
||||||
|
app = db.query(DynamicApplication).filter(
|
||||||
|
DynamicApplication.application_id == application_id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not app:
|
||||||
|
raise HTTPException(status_code=404, detail="Application not found")
|
||||||
|
|
||||||
|
# Check access
|
||||||
|
if not current_user.has_role("admin") and app.email != current_user.email:
|
||||||
|
raise HTTPException(status_code=403, detail="Access denied")
|
||||||
|
|
||||||
|
history = db.query(ApplicationHistory).filter(
|
||||||
|
ApplicationHistory.application_id == app.id
|
||||||
|
).order_by(desc(ApplicationHistory.created_at)).all()
|
||||||
|
|
||||||
|
result = []
|
||||||
|
for entry in history:
|
||||||
|
result.append({
|
||||||
|
"id": entry.id,
|
||||||
|
"action": entry.action,
|
||||||
|
"comment": entry.comment,
|
||||||
|
"field_changes": entry.field_changes,
|
||||||
|
"user_id": entry.user_id,
|
||||||
|
"created_at": entry.created_at
|
||||||
|
})
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{application_id}/generate-pdf")
|
||||||
|
async def generate_application_pdf(
|
||||||
|
application_id: str,
|
||||||
|
current_user: User = Depends(get_current_user),
|
||||||
|
db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Generate PDF for an application"""
|
||||||
|
app = db.query(DynamicApplication).filter(
|
||||||
|
DynamicApplication.application_id == application_id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not app:
|
||||||
|
raise HTTPException(status_code=404, detail="Application not found")
|
||||||
|
|
||||||
|
# Check access
|
||||||
|
if not current_user.has_role("admin") and app.email != current_user.email:
|
||||||
|
raise HTTPException(status_code=403, detail="Access denied")
|
||||||
|
|
||||||
|
# Generate PDF
|
||||||
|
try:
|
||||||
|
pdf_path = generate_pdf_for_application(app, db)
|
||||||
|
app.pdf_generated = True
|
||||||
|
app.pdf_generated_at = datetime.utcnow()
|
||||||
|
app.pdf_file_path = pdf_path
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
return {
|
||||||
|
"message": "PDF generated successfully",
|
||||||
|
"pdf_path": pdf_path
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"PDF generation failed: {str(e)}")
|
||||||
11
backend/src/api/middleware/__init__.py
Normal file
11
backend/src/api/middleware/__init__.py
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
"""API middleware modules."""
|
||||||
|
|
||||||
|
from .error_handler import ErrorHandlerMiddleware
|
||||||
|
from .logging import LoggingMiddleware
|
||||||
|
from .rate_limit import RateLimitMiddleware
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"ErrorHandlerMiddleware",
|
||||||
|
"LoggingMiddleware",
|
||||||
|
"RateLimitMiddleware",
|
||||||
|
]
|
||||||
220
backend/src/api/middleware/error_handler.py
Normal file
220
backend/src/api/middleware/error_handler.py
Normal file
@ -0,0 +1,220 @@
|
|||||||
|
"""Error handling middleware for API exceptions."""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import traceback
|
||||||
|
from typing import Optional
|
||||||
|
from starlette.middleware.base import BaseHTTPMiddleware
|
||||||
|
from starlette.requests import Request
|
||||||
|
from starlette.responses import JSONResponse
|
||||||
|
from fastapi import status
|
||||||
|
from fastapi.exceptions import RequestValidationError, HTTPException
|
||||||
|
from pydantic import ValidationError
|
||||||
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ErrorHandlerMiddleware(BaseHTTPMiddleware):
|
||||||
|
"""Middleware for handling API errors and exceptions."""
|
||||||
|
|
||||||
|
def __init__(self, app):
|
||||||
|
"""
|
||||||
|
Initialize error handler middleware.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
app: The FastAPI application
|
||||||
|
"""
|
||||||
|
super().__init__(app)
|
||||||
|
|
||||||
|
def _get_request_id(self, request: Request) -> str:
|
||||||
|
"""
|
||||||
|
Get request ID from request state.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request: The incoming request
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Request ID or 'unknown'
|
||||||
|
"""
|
||||||
|
return getattr(request.state, 'request_id', 'unknown')
|
||||||
|
|
||||||
|
def _format_error_response(
|
||||||
|
self,
|
||||||
|
request: Request,
|
||||||
|
status_code: int,
|
||||||
|
error_type: str,
|
||||||
|
message: str,
|
||||||
|
details: Optional[dict] = None
|
||||||
|
) -> JSONResponse:
|
||||||
|
"""
|
||||||
|
Format error response.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request: The incoming request
|
||||||
|
status_code: HTTP status code
|
||||||
|
error_type: Type of error
|
||||||
|
message: Error message
|
||||||
|
details: Additional error details
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
JSONResponse with error information
|
||||||
|
"""
|
||||||
|
error_response = {
|
||||||
|
"error": {
|
||||||
|
"type": error_type,
|
||||||
|
"message": message,
|
||||||
|
"path": request.url.path,
|
||||||
|
"method": request.method,
|
||||||
|
"request_id": self._get_request_id(request)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if details:
|
||||||
|
error_response["error"]["details"] = details
|
||||||
|
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=status_code,
|
||||||
|
content=error_response,
|
||||||
|
headers={
|
||||||
|
"X-Request-ID": self._get_request_id(request),
|
||||||
|
"X-Error-Type": error_type
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
async def dispatch(self, request: Request, call_next):
|
||||||
|
"""
|
||||||
|
Process the request and handle any errors.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request: The incoming request
|
||||||
|
call_next: The next middleware or endpoint
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The response
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
response = await call_next(request)
|
||||||
|
return response
|
||||||
|
|
||||||
|
except HTTPException as e:
|
||||||
|
# Handle FastAPI HTTP exceptions
|
||||||
|
logger.warning(
|
||||||
|
f"HTTP exception: {e.status_code} - {e.detail}",
|
||||||
|
extra={
|
||||||
|
"request_id": self._get_request_id(request),
|
||||||
|
"status_code": e.status_code,
|
||||||
|
"path": request.url.path
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return self._format_error_response(
|
||||||
|
request=request,
|
||||||
|
status_code=e.status_code,
|
||||||
|
error_type="http_error",
|
||||||
|
message=str(e.detail),
|
||||||
|
details={"status_code": e.status_code}
|
||||||
|
)
|
||||||
|
|
||||||
|
except RequestValidationError as e:
|
||||||
|
# Handle validation errors
|
||||||
|
logger.warning(
|
||||||
|
f"Validation error: {str(e)}",
|
||||||
|
extra={
|
||||||
|
"request_id": self._get_request_id(request),
|
||||||
|
"path": request.url.path,
|
||||||
|
"errors": e.errors()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return self._format_error_response(
|
||||||
|
request=request,
|
||||||
|
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||||
|
error_type="validation_error",
|
||||||
|
message="Request validation failed",
|
||||||
|
details={"validation_errors": e.errors()}
|
||||||
|
)
|
||||||
|
|
||||||
|
except ValidationError as e:
|
||||||
|
# Handle Pydantic validation errors
|
||||||
|
logger.warning(
|
||||||
|
f"Pydantic validation error: {str(e)}",
|
||||||
|
extra={
|
||||||
|
"request_id": self._get_request_id(request),
|
||||||
|
"path": request.url.path,
|
||||||
|
"errors": e.errors()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return self._format_error_response(
|
||||||
|
request=request,
|
||||||
|
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||||
|
error_type="validation_error",
|
||||||
|
message="Data validation failed",
|
||||||
|
details={"validation_errors": e.errors()}
|
||||||
|
)
|
||||||
|
|
||||||
|
except SQLAlchemyError as e:
|
||||||
|
# Handle database errors
|
||||||
|
logger.error(
|
||||||
|
f"Database error: {str(e)}",
|
||||||
|
extra={
|
||||||
|
"request_id": self._get_request_id(request),
|
||||||
|
"path": request.url.path,
|
||||||
|
"error": str(e)
|
||||||
|
},
|
||||||
|
exc_info=True
|
||||||
|
)
|
||||||
|
return self._format_error_response(
|
||||||
|
request=request,
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
error_type="database_error",
|
||||||
|
message="A database error occurred",
|
||||||
|
details={"error": "Database operation failed"}
|
||||||
|
)
|
||||||
|
|
||||||
|
except ValueError as e:
|
||||||
|
# Handle value errors
|
||||||
|
logger.error(
|
||||||
|
f"Value error: {str(e)}",
|
||||||
|
extra={
|
||||||
|
"request_id": self._get_request_id(request),
|
||||||
|
"path": request.url.path,
|
||||||
|
"error": str(e)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return self._format_error_response(
|
||||||
|
request=request,
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
error_type="value_error",
|
||||||
|
message=str(e)
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
# Handle all other exceptions
|
||||||
|
error_id = self._get_request_id(request)
|
||||||
|
logger.error(
|
||||||
|
f"Unexpected error: {str(e)}",
|
||||||
|
extra={
|
||||||
|
"request_id": error_id,
|
||||||
|
"path": request.url.path,
|
||||||
|
"error": str(e),
|
||||||
|
"traceback": traceback.format_exc()
|
||||||
|
},
|
||||||
|
exc_info=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# Determine if we should show detailed error (dev mode)
|
||||||
|
show_details = False # Set to True in development
|
||||||
|
|
||||||
|
error_message = "An unexpected error occurred"
|
||||||
|
error_details = {"error_id": error_id}
|
||||||
|
|
||||||
|
if show_details:
|
||||||
|
error_message = str(e)
|
||||||
|
error_details["exception"] = type(e).__name__
|
||||||
|
error_details["traceback"] = traceback.format_exc().split('\n')
|
||||||
|
|
||||||
|
return self._format_error_response(
|
||||||
|
request=request,
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
error_type="internal_error",
|
||||||
|
message=error_message,
|
||||||
|
details=error_details
|
||||||
|
)
|
||||||
206
backend/src/api/middleware/logging.py
Normal file
206
backend/src/api/middleware/logging.py
Normal file
@ -0,0 +1,206 @@
|
|||||||
|
"""Logging middleware for API request/response tracking."""
|
||||||
|
|
||||||
|
import time
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from typing import Optional
|
||||||
|
from starlette.middleware.base import BaseHTTPMiddleware
|
||||||
|
from starlette.requests import Request
|
||||||
|
from starlette.responses import Response
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class LoggingMiddleware(BaseHTTPMiddleware):
|
||||||
|
"""Middleware for logging API requests and responses."""
|
||||||
|
|
||||||
|
def __init__(self, app):
|
||||||
|
"""
|
||||||
|
Initialize logging middleware.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
app: The FastAPI application
|
||||||
|
"""
|
||||||
|
super().__init__(app)
|
||||||
|
self.skip_paths = {
|
||||||
|
"/health",
|
||||||
|
"/ready",
|
||||||
|
"/docs",
|
||||||
|
"/redoc",
|
||||||
|
"/openapi.json",
|
||||||
|
"/favicon.ico"
|
||||||
|
}
|
||||||
|
|
||||||
|
def _get_client_info(self, request: Request) -> dict:
|
||||||
|
"""
|
||||||
|
Extract client information from request.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request: The incoming request
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary containing client information
|
||||||
|
"""
|
||||||
|
client_info = {}
|
||||||
|
|
||||||
|
# Get client IP
|
||||||
|
forwarded_for = request.headers.get("X-Forwarded-For")
|
||||||
|
if forwarded_for:
|
||||||
|
client_info["ip"] = forwarded_for.split(",")[0].strip()
|
||||||
|
elif real_ip := request.headers.get("X-Real-IP"):
|
||||||
|
client_info["ip"] = real_ip
|
||||||
|
elif request.client:
|
||||||
|
client_info["ip"] = request.client.host
|
||||||
|
else:
|
||||||
|
client_info["ip"] = "unknown"
|
||||||
|
|
||||||
|
# Get user agent
|
||||||
|
client_info["user_agent"] = request.headers.get("User-Agent", "unknown")
|
||||||
|
|
||||||
|
return client_info
|
||||||
|
|
||||||
|
def _should_log_body(self, content_type: Optional[str]) -> bool:
|
||||||
|
"""
|
||||||
|
Determine if request/response body should be logged.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
content_type: The content type header value
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Boolean indicating if body should be logged
|
||||||
|
"""
|
||||||
|
if not content_type:
|
||||||
|
return False
|
||||||
|
|
||||||
|
loggable_types = [
|
||||||
|
"application/json",
|
||||||
|
"application/x-www-form-urlencoded",
|
||||||
|
"text/plain",
|
||||||
|
"text/html",
|
||||||
|
"text/xml",
|
||||||
|
"application/xml"
|
||||||
|
]
|
||||||
|
|
||||||
|
return any(t in content_type.lower() for t in loggable_types)
|
||||||
|
|
||||||
|
async def _get_request_body(self, request: Request) -> Optional[str]:
|
||||||
|
"""
|
||||||
|
Safely get request body for logging.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request: The incoming request
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Request body as string or None
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Check content type
|
||||||
|
content_type = request.headers.get("Content-Type", "")
|
||||||
|
|
||||||
|
if not self._should_log_body(content_type):
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Don't log large bodies
|
||||||
|
content_length = request.headers.get("Content-Length")
|
||||||
|
if content_length and int(content_length) > 10000: # 10KB limit
|
||||||
|
return "[Body too large to log]"
|
||||||
|
|
||||||
|
# Get body
|
||||||
|
body = await request.body()
|
||||||
|
if body:
|
||||||
|
if "application/json" in content_type:
|
||||||
|
# Try to parse as JSON for better formatting
|
||||||
|
try:
|
||||||
|
return json.dumps(json.loads(body), indent=2)
|
||||||
|
except:
|
||||||
|
return body.decode("utf-8", errors="ignore")
|
||||||
|
else:
|
||||||
|
return body.decode("utf-8", errors="ignore")
|
||||||
|
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug(f"Could not get request body: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def dispatch(self, request: Request, call_next):
|
||||||
|
"""
|
||||||
|
Process the request and log details.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request: The incoming request
|
||||||
|
call_next: The next middleware or endpoint
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The response
|
||||||
|
"""
|
||||||
|
# Skip logging for certain paths
|
||||||
|
if request.url.path in self.skip_paths:
|
||||||
|
return await call_next(request)
|
||||||
|
|
||||||
|
# Generate request ID
|
||||||
|
request_id = str(uuid.uuid4())[:8]
|
||||||
|
|
||||||
|
# Start timing
|
||||||
|
start_time = time.time()
|
||||||
|
|
||||||
|
# Get client info
|
||||||
|
client_info = self._get_client_info(request)
|
||||||
|
|
||||||
|
# Log request
|
||||||
|
logger.info(
|
||||||
|
f"[{request_id}] Request: {request.method} {request.url.path}",
|
||||||
|
extra={
|
||||||
|
"request_id": request_id,
|
||||||
|
"method": request.method,
|
||||||
|
"path": request.url.path,
|
||||||
|
"query_params": dict(request.query_params),
|
||||||
|
"client": client_info
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Store request ID in request state for use in endpoints
|
||||||
|
request.state.request_id = request_id
|
||||||
|
|
||||||
|
# Process request
|
||||||
|
try:
|
||||||
|
response = await call_next(request)
|
||||||
|
except Exception as e:
|
||||||
|
# Log exception
|
||||||
|
process_time = time.time() - start_time
|
||||||
|
logger.error(
|
||||||
|
f"[{request_id}] Request failed after {process_time:.3f}s: {str(e)}",
|
||||||
|
extra={
|
||||||
|
"request_id": request_id,
|
||||||
|
"process_time": process_time,
|
||||||
|
"error": str(e)
|
||||||
|
},
|
||||||
|
exc_info=True
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
# Calculate process time
|
||||||
|
process_time = time.time() - start_time
|
||||||
|
|
||||||
|
# Add request ID to response headers
|
||||||
|
response.headers["X-Request-ID"] = request_id
|
||||||
|
response.headers["X-Process-Time"] = f"{process_time:.3f}"
|
||||||
|
|
||||||
|
# Log response
|
||||||
|
log_level = logging.INFO
|
||||||
|
if response.status_code >= 500:
|
||||||
|
log_level = logging.ERROR
|
||||||
|
elif response.status_code >= 400:
|
||||||
|
log_level = logging.WARNING
|
||||||
|
|
||||||
|
logger.log(
|
||||||
|
log_level,
|
||||||
|
f"[{request_id}] Response: {response.status_code} in {process_time:.3f}s",
|
||||||
|
extra={
|
||||||
|
"request_id": request_id,
|
||||||
|
"status_code": response.status_code,
|
||||||
|
"process_time": process_time
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return response
|
||||||
165
backend/src/api/middleware/rate_limit.py
Normal file
165
backend/src/api/middleware/rate_limit.py
Normal file
@ -0,0 +1,165 @@
|
|||||||
|
"""Rate limiting middleware for API endpoints."""
|
||||||
|
|
||||||
|
import time
|
||||||
|
from typing import Dict, Optional
|
||||||
|
from collections import defaultdict
|
||||||
|
from starlette.middleware.base import BaseHTTPMiddleware
|
||||||
|
from starlette.requests import Request
|
||||||
|
from starlette.responses import JSONResponse
|
||||||
|
from fastapi import status
|
||||||
|
|
||||||
|
|
||||||
|
class RateLimitMiddleware(BaseHTTPMiddleware):
|
||||||
|
"""Middleware for rate limiting API requests."""
|
||||||
|
|
||||||
|
def __init__(self, app, settings=None):
|
||||||
|
"""
|
||||||
|
Initialize rate limit middleware.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
app: The FastAPI application
|
||||||
|
settings: Rate limit settings object
|
||||||
|
"""
|
||||||
|
super().__init__(app)
|
||||||
|
self.settings = settings
|
||||||
|
|
||||||
|
# Store request counts per IP
|
||||||
|
self.request_counts: Dict[str, Dict[str, float]] = defaultdict(dict)
|
||||||
|
|
||||||
|
# Default settings if not provided
|
||||||
|
self.requests_per_minute = 60
|
||||||
|
self.requests_per_hour = 1000
|
||||||
|
|
||||||
|
if settings:
|
||||||
|
self.requests_per_minute = getattr(settings, 'requests_per_minute', 60)
|
||||||
|
self.requests_per_hour = getattr(settings, 'requests_per_hour', 1000)
|
||||||
|
|
||||||
|
def _get_client_ip(self, request: Request) -> str:
|
||||||
|
"""
|
||||||
|
Get the client IP address from the request.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request: The incoming request
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The client IP address
|
||||||
|
"""
|
||||||
|
# Try to get real IP from headers (for proxy scenarios)
|
||||||
|
forwarded_for = request.headers.get("X-Forwarded-For")
|
||||||
|
if forwarded_for:
|
||||||
|
return forwarded_for.split(",")[0].strip()
|
||||||
|
|
||||||
|
real_ip = request.headers.get("X-Real-IP")
|
||||||
|
if real_ip:
|
||||||
|
return real_ip
|
||||||
|
|
||||||
|
# Fallback to client host
|
||||||
|
if request.client:
|
||||||
|
return request.client.host
|
||||||
|
|
||||||
|
return "unknown"
|
||||||
|
|
||||||
|
def _is_rate_limited(self, client_ip: str) -> tuple[bool, Optional[str]]:
|
||||||
|
"""
|
||||||
|
Check if the client has exceeded rate limits.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
client_ip: The client IP address
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of (is_limited, reason)
|
||||||
|
"""
|
||||||
|
current_time = time.time()
|
||||||
|
client_data = self.request_counts[client_ip]
|
||||||
|
|
||||||
|
# Clean up old entries
|
||||||
|
minute_ago = current_time - 60
|
||||||
|
hour_ago = current_time - 3600
|
||||||
|
|
||||||
|
# Remove entries older than an hour
|
||||||
|
client_data = {
|
||||||
|
timestamp: count
|
||||||
|
for timestamp, count in client_data.items()
|
||||||
|
if float(timestamp) > hour_ago
|
||||||
|
}
|
||||||
|
|
||||||
|
# Count requests in the last minute
|
||||||
|
minute_requests = sum(
|
||||||
|
count for timestamp, count in client_data.items()
|
||||||
|
if float(timestamp) > minute_ago
|
||||||
|
)
|
||||||
|
|
||||||
|
# Count requests in the last hour
|
||||||
|
hour_requests = sum(client_data.values())
|
||||||
|
|
||||||
|
# Check minute limit
|
||||||
|
if minute_requests >= self.requests_per_minute:
|
||||||
|
return True, f"Rate limit exceeded: {self.requests_per_minute} requests per minute"
|
||||||
|
|
||||||
|
# Check hour limit
|
||||||
|
if hour_requests >= self.requests_per_hour:
|
||||||
|
return True, f"Rate limit exceeded: {self.requests_per_hour} requests per hour"
|
||||||
|
|
||||||
|
# Update request count
|
||||||
|
timestamp_key = str(current_time)
|
||||||
|
client_data[timestamp_key] = client_data.get(timestamp_key, 0) + 1
|
||||||
|
self.request_counts[client_ip] = client_data
|
||||||
|
|
||||||
|
return False, None
|
||||||
|
|
||||||
|
async def dispatch(self, request: Request, call_next):
|
||||||
|
"""
|
||||||
|
Process the request and apply rate limiting.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request: The incoming request
|
||||||
|
call_next: The next middleware or endpoint
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The response
|
||||||
|
"""
|
||||||
|
# Skip rate limiting for health check endpoints
|
||||||
|
if request.url.path in ["/health", "/ready", "/docs", "/redoc", "/openapi.json"]:
|
||||||
|
return await call_next(request)
|
||||||
|
|
||||||
|
# Get client IP
|
||||||
|
client_ip = self._get_client_ip(request)
|
||||||
|
|
||||||
|
# Check rate limit
|
||||||
|
is_limited, reason = self._is_rate_limited(client_ip)
|
||||||
|
|
||||||
|
if is_limited:
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=status.HTTP_429_TOO_MANY_REQUESTS,
|
||||||
|
content={
|
||||||
|
"detail": reason,
|
||||||
|
"type": "rate_limit_exceeded"
|
||||||
|
},
|
||||||
|
headers={
|
||||||
|
"Retry-After": "60", # Suggest retry after 60 seconds
|
||||||
|
"X-RateLimit-Limit": str(self.requests_per_minute),
|
||||||
|
"X-RateLimit-Remaining": "0",
|
||||||
|
"X-RateLimit-Reset": str(int(time.time()) + 60)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Process the request
|
||||||
|
response = await call_next(request)
|
||||||
|
|
||||||
|
# Add rate limit headers to successful responses
|
||||||
|
if hasattr(self, 'request_counts') and client_ip in self.request_counts:
|
||||||
|
current_time = time.time()
|
||||||
|
minute_ago = current_time - 60
|
||||||
|
|
||||||
|
minute_requests = sum(
|
||||||
|
count for timestamp, count in self.request_counts[client_ip].items()
|
||||||
|
if float(timestamp) > minute_ago
|
||||||
|
)
|
||||||
|
|
||||||
|
remaining = max(0, self.requests_per_minute - minute_requests)
|
||||||
|
|
||||||
|
response.headers["X-RateLimit-Limit"] = str(self.requests_per_minute)
|
||||||
|
response.headers["X-RateLimit-Remaining"] = str(remaining)
|
||||||
|
response.headers["X-RateLimit-Reset"] = str(int(current_time) + 60)
|
||||||
|
|
||||||
|
return response
|
||||||
80
backend/src/api/routes/__init__.py
Normal file
80
backend/src/api/routes/__init__.py
Normal file
@ -0,0 +1,80 @@
|
|||||||
|
"""
|
||||||
|
API Routes Module
|
||||||
|
|
||||||
|
This module exports all available API routers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from fastapi import APIRouter
|
||||||
|
|
||||||
|
# Create placeholder routers for now
|
||||||
|
application_router = APIRouter(tags=["applications"])
|
||||||
|
attachment_router = APIRouter(tags=["attachments"])
|
||||||
|
pdf_router = APIRouter(tags=["pdf"])
|
||||||
|
auth_router = APIRouter(tags=["authentication"])
|
||||||
|
health_router = APIRouter(tags=["health"])
|
||||||
|
|
||||||
|
# Import actual routes when available
|
||||||
|
try:
|
||||||
|
from ..v1.auth import router as auth_v1_router
|
||||||
|
auth_router = auth_v1_router
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
from .applications import router as app_router
|
||||||
|
application_router = app_router
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
from .attachments import router as attach_router
|
||||||
|
attachment_router = attach_router
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
from .pdf import router as pdf_route
|
||||||
|
pdf_router = pdf_route
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
from .health import router as health_route
|
||||||
|
health_router = health_route
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Health check endpoints
|
||||||
|
@health_router.get("/")
|
||||||
|
async def health_check():
|
||||||
|
"""Health check endpoint"""
|
||||||
|
return {"status": "healthy", "service": "api"}
|
||||||
|
|
||||||
|
@health_router.get("/ready")
|
||||||
|
async def readiness_check():
|
||||||
|
"""Readiness check endpoint"""
|
||||||
|
return {"status": "ready", "service": "api"}
|
||||||
|
|
||||||
|
# Placeholder endpoints for missing routes
|
||||||
|
@application_router.get("/")
|
||||||
|
async def list_applications():
|
||||||
|
"""List applications"""
|
||||||
|
return {"applications": []}
|
||||||
|
|
||||||
|
@attachment_router.get("/")
|
||||||
|
async def list_attachments():
|
||||||
|
"""List attachments"""
|
||||||
|
return {"attachments": []}
|
||||||
|
|
||||||
|
@pdf_router.get("/")
|
||||||
|
async def pdf_info():
|
||||||
|
"""PDF service info"""
|
||||||
|
return {"service": "pdf", "version": "1.0.0"}
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"application_router",
|
||||||
|
"attachment_router",
|
||||||
|
"pdf_router",
|
||||||
|
"auth_router",
|
||||||
|
"health_router"
|
||||||
|
]
|
||||||
245
backend/src/config/database.py
Normal file
245
backend/src/config/database.py
Normal file
@ -0,0 +1,245 @@
|
|||||||
|
"""
|
||||||
|
Database Configuration Module
|
||||||
|
|
||||||
|
This module provides database configuration and connection utilities.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from typing import Optional, Generator
|
||||||
|
from urllib.parse import quote_plus
|
||||||
|
|
||||||
|
from sqlalchemy import create_engine
|
||||||
|
from sqlalchemy.orm import sessionmaker, Session
|
||||||
|
from sqlalchemy.pool import QueuePool
|
||||||
|
|
||||||
|
from .settings import get_settings
|
||||||
|
|
||||||
|
|
||||||
|
def get_database_url() -> str:
|
||||||
|
"""
|
||||||
|
Get the database connection URL.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Database connection URL string
|
||||||
|
"""
|
||||||
|
# Always prefer environment variables for database connection
|
||||||
|
# This ensures Docker environment settings take precedence
|
||||||
|
db_type = os.getenv("DB_TYPE", "mysql")
|
||||||
|
|
||||||
|
if db_type == "sqlite":
|
||||||
|
db_path = os.getenv("SQLITE_PATH", "./app.db")
|
||||||
|
return f"sqlite:///{db_path}"
|
||||||
|
|
||||||
|
# MySQL/MariaDB connection
|
||||||
|
host = os.getenv("MYSQL_HOST", "localhost")
|
||||||
|
port = os.getenv("MYSQL_PORT", "3306")
|
||||||
|
database = os.getenv("MYSQL_DB", "stupa")
|
||||||
|
user = os.getenv("MYSQL_USER", "stupa")
|
||||||
|
password = os.getenv("MYSQL_PASSWORD", "secret")
|
||||||
|
|
||||||
|
# URL encode the password to handle special characters
|
||||||
|
password_encoded = quote_plus(password)
|
||||||
|
|
||||||
|
return f"mysql+pymysql://{user}:{password_encoded}@{host}:{port}/{database}?charset=utf8mb4"
|
||||||
|
|
||||||
|
|
||||||
|
def get_engine(url: Optional[str] = None):
|
||||||
|
"""
|
||||||
|
Create and return a SQLAlchemy engine.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
url: Optional database URL. If not provided, will use get_database_url()
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
SQLAlchemy Engine instance
|
||||||
|
"""
|
||||||
|
if url is None:
|
||||||
|
url = get_database_url()
|
||||||
|
|
||||||
|
# Engine configuration
|
||||||
|
engine_config = {
|
||||||
|
"pool_size": int(os.getenv("DB_POOL_SIZE", "10")),
|
||||||
|
"max_overflow": int(os.getenv("DB_MAX_OVERFLOW", "20")),
|
||||||
|
"pool_pre_ping": os.getenv("DB_POOL_PRE_PING", "true").lower() == "true",
|
||||||
|
"pool_recycle": int(os.getenv("DB_POOL_RECYCLE", "3600")),
|
||||||
|
"echo": os.getenv("DB_ECHO", "false").lower() == "true",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Create engine with connection pooling
|
||||||
|
engine = create_engine(
|
||||||
|
url,
|
||||||
|
poolclass=QueuePool,
|
||||||
|
**engine_config
|
||||||
|
)
|
||||||
|
|
||||||
|
return engine
|
||||||
|
|
||||||
|
|
||||||
|
# Global engine and session factory
|
||||||
|
_engine = None
|
||||||
|
_session_factory = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_session_factory() -> sessionmaker:
|
||||||
|
"""
|
||||||
|
Get or create a session factory.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
SQLAlchemy sessionmaker instance
|
||||||
|
"""
|
||||||
|
global _session_factory, _engine
|
||||||
|
|
||||||
|
if _session_factory is None:
|
||||||
|
if _engine is None:
|
||||||
|
_engine = get_engine()
|
||||||
|
|
||||||
|
_session_factory = sessionmaker(
|
||||||
|
bind=_engine,
|
||||||
|
autocommit=False,
|
||||||
|
autoflush=False,
|
||||||
|
expire_on_commit=False
|
||||||
|
)
|
||||||
|
|
||||||
|
return _session_factory
|
||||||
|
|
||||||
|
|
||||||
|
def get_db() -> Generator[Session, None, None]:
|
||||||
|
"""
|
||||||
|
Dependency injection for FastAPI to get database session.
|
||||||
|
|
||||||
|
Yields:
|
||||||
|
Database session
|
||||||
|
"""
|
||||||
|
session_factory = get_session_factory()
|
||||||
|
db = session_factory()
|
||||||
|
try:
|
||||||
|
yield db
|
||||||
|
finally:
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
|
||||||
|
def init_database():
|
||||||
|
"""
|
||||||
|
Initialize the database (create tables).
|
||||||
|
This should be called on application startup.
|
||||||
|
"""
|
||||||
|
from models.base import Base
|
||||||
|
from models.application_type import (
|
||||||
|
ApplicationType,
|
||||||
|
ApplicationField,
|
||||||
|
ApplicationTypeStatus,
|
||||||
|
StatusTransition,
|
||||||
|
DynamicApplication,
|
||||||
|
ApplicationHistory,
|
||||||
|
ApplicationAttachment,
|
||||||
|
ApplicationTransitionLog,
|
||||||
|
ApplicationApproval
|
||||||
|
)
|
||||||
|
from models.user import User, Role, user_roles
|
||||||
|
|
||||||
|
engine = get_engine()
|
||||||
|
|
||||||
|
# Create all tables
|
||||||
|
Base.metadata.create_all(bind=engine)
|
||||||
|
|
||||||
|
# Initialize default data if needed
|
||||||
|
init_default_data(engine)
|
||||||
|
|
||||||
|
|
||||||
|
def init_default_data(engine):
|
||||||
|
"""
|
||||||
|
Initialize default data in the database.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
engine: SQLAlchemy engine instance
|
||||||
|
"""
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from models.user import Role
|
||||||
|
|
||||||
|
with Session(engine) as session:
|
||||||
|
# Check if default roles exist
|
||||||
|
admin_role = session.query(Role).filter_by(name="admin").first()
|
||||||
|
|
||||||
|
if not admin_role:
|
||||||
|
# Create default roles
|
||||||
|
default_roles = [
|
||||||
|
Role(
|
||||||
|
name="admin",
|
||||||
|
display_name="Administrator",
|
||||||
|
description="Full system access",
|
||||||
|
is_admin=True,
|
||||||
|
is_system=True,
|
||||||
|
permissions=["*"]
|
||||||
|
),
|
||||||
|
Role(
|
||||||
|
name="budget_reviewer",
|
||||||
|
display_name="Haushaltsbeauftragte",
|
||||||
|
description="Budget review permissions",
|
||||||
|
can_review_budget=True,
|
||||||
|
is_system=True,
|
||||||
|
permissions=["applications.review", "applications.view"]
|
||||||
|
),
|
||||||
|
Role(
|
||||||
|
name="finance_reviewer",
|
||||||
|
display_name="Finanzreferent",
|
||||||
|
description="Finance review permissions",
|
||||||
|
can_review_finance=True,
|
||||||
|
is_system=True,
|
||||||
|
permissions=["applications.review", "applications.view", "applications.finance"]
|
||||||
|
),
|
||||||
|
Role(
|
||||||
|
name="asta_member",
|
||||||
|
display_name="AStA Member",
|
||||||
|
description="AStA voting member",
|
||||||
|
can_vote=True,
|
||||||
|
is_system=True,
|
||||||
|
permissions=["applications.vote", "applications.view"]
|
||||||
|
),
|
||||||
|
Role(
|
||||||
|
name="applicant",
|
||||||
|
display_name="Applicant",
|
||||||
|
description="Can create and manage own applications",
|
||||||
|
is_system=True,
|
||||||
|
permissions=["applications.create", "applications.own.view", "applications.own.edit"]
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
for role in default_roles:
|
||||||
|
session.add(role)
|
||||||
|
|
||||||
|
session.commit()
|
||||||
|
print("Default roles created successfully")
|
||||||
|
|
||||||
|
|
||||||
|
def drop_database():
|
||||||
|
"""
|
||||||
|
Drop all database tables.
|
||||||
|
WARNING: This will delete all data!
|
||||||
|
"""
|
||||||
|
from ..models.base import Base
|
||||||
|
|
||||||
|
engine = get_engine()
|
||||||
|
Base.metadata.drop_all(bind=engine)
|
||||||
|
print("All database tables dropped")
|
||||||
|
|
||||||
|
|
||||||
|
def reset_database():
|
||||||
|
"""
|
||||||
|
Reset the database (drop and recreate all tables).
|
||||||
|
WARNING: This will delete all data!
|
||||||
|
"""
|
||||||
|
drop_database()
|
||||||
|
init_database()
|
||||||
|
print("Database reset complete")
|
||||||
|
|
||||||
|
|
||||||
|
# Export for backwards compatibility
|
||||||
|
__all__ = [
|
||||||
|
"get_database_url",
|
||||||
|
"get_engine",
|
||||||
|
"get_session_factory",
|
||||||
|
"get_db",
|
||||||
|
"init_database",
|
||||||
|
"drop_database",
|
||||||
|
"reset_database"
|
||||||
|
]
|
||||||
@ -26,6 +26,18 @@ class DatabaseSettings(BaseSettings):
|
|||||||
pool_pre_ping: bool = Field(default=True, env="DB_POOL_PRE_PING")
|
pool_pre_ping: bool = Field(default=True, env="DB_POOL_PRE_PING")
|
||||||
echo: bool = Field(default=False, env="DB_ECHO")
|
echo: bool = Field(default=False, env="DB_ECHO")
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
"""Initialize DatabaseSettings and log environment variables"""
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
logger.info(f"DatabaseSettings init - MYSQL_HOST from env: {os.getenv('MYSQL_HOST', 'NOT SET')}")
|
||||||
|
logger.info(f"DatabaseSettings init - MYSQL_PORT from env: {os.getenv('MYSQL_PORT', 'NOT SET')}")
|
||||||
|
logger.info(f"DatabaseSettings init - MYSQL_DB from env: {os.getenv('MYSQL_DB', 'NOT SET')}")
|
||||||
|
logger.info(f"DatabaseSettings init - MYSQL_USER from env: {os.getenv('MYSQL_USER', 'NOT SET')}")
|
||||||
|
super().__init__(**kwargs)
|
||||||
|
logger.info(f"DatabaseSettings after init - host: {self.host}, port: {self.port}, database: {self.database}")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def dsn(self) -> str:
|
def dsn(self) -> str:
|
||||||
"""Generate database connection string"""
|
"""Generate database connection string"""
|
||||||
@ -239,6 +251,19 @@ class Settings(BaseSettings):
|
|||||||
workflow: WorkflowSettings = Field(default_factory=WorkflowSettings)
|
workflow: WorkflowSettings = Field(default_factory=WorkflowSettings)
|
||||||
app: ApplicationSettings = Field(default_factory=ApplicationSettings)
|
app: ApplicationSettings = Field(default_factory=ApplicationSettings)
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
"""Initialize Settings with proper environment variable loading for nested models"""
|
||||||
|
super().__init__(**kwargs)
|
||||||
|
# Reinitialize nested settings to ensure they load environment variables
|
||||||
|
self.database = DatabaseSettings()
|
||||||
|
self.security = SecuritySettings()
|
||||||
|
self.oidc = OIDCSettings()
|
||||||
|
self.email = EmailSettings()
|
||||||
|
self.rate_limit = RateLimitSettings()
|
||||||
|
self.storage = StorageSettings()
|
||||||
|
self.workflow = WorkflowSettings()
|
||||||
|
self.app = ApplicationSettings()
|
||||||
|
|
||||||
# Dynamic configuration support
|
# Dynamic configuration support
|
||||||
config_file: Optional[Path] = Field(default=None, env="CONFIG_FILE")
|
config_file: Optional[Path] = Field(default=None, env="CONFIG_FILE")
|
||||||
config_overrides: Dict[str, Any] = Field(default_factory=dict)
|
config_overrides: Dict[str, Any] = Field(default_factory=dict)
|
||||||
|
|||||||
@ -368,34 +368,30 @@ def set_container(container: Container):
|
|||||||
_container = container
|
_container = container
|
||||||
|
|
||||||
|
|
||||||
@lru_cache()
|
|
||||||
def create_container(settings: Optional[Settings] = None) -> Container:
|
def create_container(settings: Optional[Settings] = None) -> Container:
|
||||||
"""Create and configure a new container instance"""
|
"""Create and configure a new container instance"""
|
||||||
container = Container(settings)
|
container = Container(settings)
|
||||||
|
|
||||||
# Register default repositories
|
# Note: Repositories and services will be registered as needed
|
||||||
from ..repositories.application import ApplicationRepository
|
# The dynamic system doesn't require pre-registered repositories
|
||||||
from ..repositories.attachment import AttachmentRepository
|
|
||||||
|
|
||||||
container.register_repository("application_repository", ApplicationRepository)
|
|
||||||
container.register_repository("attachment_repository", AttachmentRepository)
|
|
||||||
|
|
||||||
# Register default services
|
|
||||||
from ..services.application import ApplicationService
|
|
||||||
from ..services.pdf import PDFService
|
|
||||||
from ..services.auth import AuthService
|
|
||||||
|
|
||||||
container.register_service(
|
|
||||||
"application_service",
|
|
||||||
ApplicationService,
|
|
||||||
dependencies={
|
|
||||||
"repository": "application_repository",
|
|
||||||
"pdf_service": "pdf_service"
|
|
||||||
},
|
|
||||||
singleton=True
|
|
||||||
)
|
|
||||||
|
|
||||||
|
# Register core services that might still be needed
|
||||||
|
try:
|
||||||
|
from ..services.pdf_service import PDFService
|
||||||
container.register_service("pdf_service", PDFService, singleton=True)
|
container.register_service("pdf_service", PDFService, singleton=True)
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
from ..services.auth_service import AuthService
|
||||||
container.register_service("auth_service", AuthService, singleton=True)
|
container.register_service("auth_service", AuthService, singleton=True)
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
from ..services.notification_service import NotificationService
|
||||||
|
container.register_service("notification_service", NotificationService, singleton=True)
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
return container
|
return container
|
||||||
|
|||||||
@ -5,7 +5,7 @@ This module provides database initialization, connection management,
|
|||||||
and migration support for the application.
|
and migration support for the application.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import Optional, Generator, Any
|
from typing import Optional, Generator, Any, Dict
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
@ -16,15 +16,20 @@ from sqlalchemy.pool import QueuePool
|
|||||||
from sqlalchemy.exc import SQLAlchemyError
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
|
|
||||||
from ..config.settings import Settings, get_settings
|
from ..config.settings import Settings, get_settings
|
||||||
|
from ..config.database import get_database_url
|
||||||
from ..models.base import Base
|
from ..models.base import Base
|
||||||
from ..models.application import (
|
from ..models.application_type import (
|
||||||
Application,
|
ApplicationType,
|
||||||
|
ApplicationField,
|
||||||
|
ApplicationTypeStatus,
|
||||||
|
StatusTransition,
|
||||||
|
DynamicApplication,
|
||||||
|
ApplicationHistory,
|
||||||
ApplicationAttachment,
|
ApplicationAttachment,
|
||||||
Attachment,
|
ApplicationTransitionLog,
|
||||||
ComparisonOffer,
|
ApplicationApproval
|
||||||
CostPositionJustification,
|
|
||||||
Counter
|
|
||||||
)
|
)
|
||||||
|
from ..models.user import User, Role, Session as UserSession
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -73,7 +78,7 @@ class DatabaseManager:
|
|||||||
def _create_engine(self) -> Engine:
|
def _create_engine(self) -> Engine:
|
||||||
"""Create SQLAlchemy engine with configuration"""
|
"""Create SQLAlchemy engine with configuration"""
|
||||||
engine = create_engine(
|
engine = create_engine(
|
||||||
self.settings.database.dsn,
|
get_database_url(),
|
||||||
poolclass=QueuePool,
|
poolclass=QueuePool,
|
||||||
pool_size=self.settings.database.pool_size,
|
pool_size=self.settings.database.pool_size,
|
||||||
max_overflow=self.settings.database.max_overflow,
|
max_overflow=self.settings.database.max_overflow,
|
||||||
@ -167,31 +172,47 @@ class DatabaseManager:
|
|||||||
def _init_default_data(self):
|
def _init_default_data(self):
|
||||||
"""Initialize default data in the database"""
|
"""Initialize default data in the database"""
|
||||||
with self.session_scope() as session:
|
with self.session_scope() as session:
|
||||||
# Initialize counters
|
# Initialize default roles if not present
|
||||||
counters = [
|
default_roles = [
|
||||||
{
|
{
|
||||||
"key": "application_id",
|
"name": "admin",
|
||||||
"value": 0,
|
"display_name": "Administrator",
|
||||||
"prefix": "PA",
|
"description": "Full system access",
|
||||||
"format_string": "{prefix}{value:06d}"
|
"is_admin": True,
|
||||||
|
"is_system": True
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"key": "attachment_id",
|
"name": "budget_reviewer",
|
||||||
"value": 0,
|
"display_name": "Haushaltsbeauftragte",
|
||||||
"prefix": "ATT",
|
"description": "Budget review permissions",
|
||||||
"format_string": "{prefix}{value:08d}"
|
"can_review_budget": True,
|
||||||
|
"is_system": True
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "finance_reviewer",
|
||||||
|
"display_name": "Finanzreferent",
|
||||||
|
"description": "Finance review permissions",
|
||||||
|
"can_review_finance": True,
|
||||||
|
"is_system": True
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "asta_member",
|
||||||
|
"display_name": "AStA Member",
|
||||||
|
"description": "AStA voting member",
|
||||||
|
"can_vote": True,
|
||||||
|
"is_system": True
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
||||||
for counter_data in counters:
|
for role_data in default_roles:
|
||||||
existing = session.query(Counter).filter_by(
|
existing = session.query(Role).filter_by(
|
||||||
key=counter_data["key"]
|
name=role_data["name"]
|
||||||
).first()
|
).first()
|
||||||
|
|
||||||
if not existing:
|
if not existing:
|
||||||
counter = Counter(**counter_data)
|
role = Role(**role_data)
|
||||||
session.add(counter)
|
session.add(role)
|
||||||
logger.info(f"Created counter: {counter_data['key']}")
|
logger.info(f"Created role: {role_data['name']}")
|
||||||
|
|
||||||
def verify_connection(self) -> bool:
|
def verify_connection(self) -> bool:
|
||||||
"""
|
"""
|
||||||
@ -375,12 +396,18 @@ class DatabaseHealthCheck:
|
|||||||
manager = get_db_manager()
|
manager = get_db_manager()
|
||||||
|
|
||||||
required_tables = [
|
required_tables = [
|
||||||
"applications",
|
"application_types",
|
||||||
"attachments",
|
"application_fields",
|
||||||
"application_attachments",
|
"application_type_statuses",
|
||||||
"comparison_offers",
|
"status_transitions",
|
||||||
"cost_position_justifications",
|
"dynamic_applications",
|
||||||
"counters"
|
"application_history_v2",
|
||||||
|
"application_attachments_v2",
|
||||||
|
"application_transition_logs",
|
||||||
|
"application_approvals",
|
||||||
|
"users",
|
||||||
|
"roles",
|
||||||
|
"user_roles"
|
||||||
]
|
]
|
||||||
|
|
||||||
with manager.session_scope() as session:
|
with manager.session_scope() as session:
|
||||||
|
|||||||
@ -21,15 +21,13 @@ from .api.middleware.rate_limit import RateLimitMiddleware
|
|||||||
from .api.middleware.logging import LoggingMiddleware
|
from .api.middleware.logging import LoggingMiddleware
|
||||||
from .api.middleware.error_handler import ErrorHandlerMiddleware
|
from .api.middleware.error_handler import ErrorHandlerMiddleware
|
||||||
from .api.routes import (
|
from .api.routes import (
|
||||||
application_router,
|
|
||||||
attachment_router,
|
attachment_router,
|
||||||
pdf_router,
|
pdf_router,
|
||||||
auth_router,
|
auth_router,
|
||||||
health_router
|
health_router
|
||||||
)
|
)
|
||||||
from .providers.pdf_qsm import QSMProvider
|
from .api.application_types import router as application_types_router
|
||||||
from .providers.pdf_vsm import VSMProvider
|
from .api.dynamic_applications import router as dynamic_applications_router
|
||||||
from .services.pdf import PDFService
|
|
||||||
|
|
||||||
# Configure logging
|
# Configure logging
|
||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
@ -57,10 +55,8 @@ async def lifespan(app: FastAPI):
|
|||||||
# Initialize dependency injection container
|
# Initialize dependency injection container
|
||||||
container = create_container(settings)
|
container = create_container(settings)
|
||||||
|
|
||||||
# Register PDF providers
|
# PDF service will be initialized if needed
|
||||||
pdf_service = container.get_service("pdf_service")
|
# Dynamic system doesn't require pre-registered providers
|
||||||
pdf_service.register_provider(QSMProvider(settings))
|
|
||||||
pdf_service.register_provider(VSMProvider(settings))
|
|
||||||
|
|
||||||
# Store container in app state
|
# Store container in app state
|
||||||
app.state.container = container
|
app.state.container = container
|
||||||
@ -123,6 +119,7 @@ def create_app() -> FastAPI:
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Include routers
|
# Include routers
|
||||||
|
# Note: nginx strips /api/ prefix when proxying, so we don't add it here
|
||||||
app.include_router(
|
app.include_router(
|
||||||
health_router,
|
health_router,
|
||||||
prefix="/health",
|
prefix="/health",
|
||||||
@ -131,28 +128,32 @@ def create_app() -> FastAPI:
|
|||||||
|
|
||||||
app.include_router(
|
app.include_router(
|
||||||
auth_router,
|
auth_router,
|
||||||
prefix=f"{settings.app.api_prefix}/auth",
|
prefix="/auth",
|
||||||
tags=["authentication"]
|
tags=["authentication"]
|
||||||
)
|
)
|
||||||
|
|
||||||
app.include_router(
|
|
||||||
application_router,
|
|
||||||
prefix=f"{settings.app.api_prefix}/applications",
|
|
||||||
tags=["applications"]
|
|
||||||
)
|
|
||||||
|
|
||||||
app.include_router(
|
app.include_router(
|
||||||
attachment_router,
|
attachment_router,
|
||||||
prefix=f"{settings.app.api_prefix}/attachments",
|
prefix="/attachments",
|
||||||
tags=["attachments"]
|
tags=["attachments"]
|
||||||
)
|
)
|
||||||
|
|
||||||
app.include_router(
|
app.include_router(
|
||||||
pdf_router,
|
pdf_router,
|
||||||
prefix=f"{settings.app.api_prefix}/pdf",
|
prefix="/pdf",
|
||||||
tags=["pdf"]
|
tags=["pdf"]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
app.include_router(
|
||||||
|
application_types_router,
|
||||||
|
tags=["application-types"]
|
||||||
|
)
|
||||||
|
|
||||||
|
app.include_router(
|
||||||
|
dynamic_applications_router,
|
||||||
|
tags=["dynamic-applications"]
|
||||||
|
)
|
||||||
|
|
||||||
# Root endpoint
|
# Root endpoint
|
||||||
@app.get("/", tags=["root"])
|
@app.get("/", tags=["root"])
|
||||||
async def root() -> Dict[str, Any]:
|
async def root() -> Dict[str, Any]:
|
||||||
|
|||||||
248
backend/src/migrations/002_add_dynamic_application_system.py
Normal file
248
backend/src/migrations/002_add_dynamic_application_system.py
Normal file
@ -0,0 +1,248 @@
|
|||||||
|
"""
|
||||||
|
Add dynamic application system tables
|
||||||
|
|
||||||
|
This migration creates all tables needed for the fully dynamic application system.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import mysql
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
# Revision identifiers
|
||||||
|
revision = 'add_dynamic_application_system'
|
||||||
|
down_revision = None
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
"""Create tables for dynamic application system"""
|
||||||
|
|
||||||
|
# Create application_types table
|
||||||
|
op.create_table(
|
||||||
|
'application_types',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False, autoincrement=True),
|
||||||
|
sa.Column('type_id', sa.String(100), nullable=False, comment='Unique identifier for application type'),
|
||||||
|
sa.Column('name', sa.String(255), nullable=False, comment='Display name'),
|
||||||
|
sa.Column('description', sa.Text(), nullable=True, comment='Markdown description'),
|
||||||
|
sa.Column('pdf_template', sa.LargeBinary(), nullable=True, comment='PDF template blob'),
|
||||||
|
sa.Column('pdf_template_filename', sa.String(255), nullable=True, comment='Original PDF template filename'),
|
||||||
|
sa.Column('pdf_field_mapping', sa.JSON(), nullable=False, default={}, comment='Mapping from PDF field names to field IDs'),
|
||||||
|
sa.Column('is_active', sa.Boolean(), default=True, nullable=False, comment='Whether this type is currently active'),
|
||||||
|
sa.Column('is_public', sa.Boolean(), default=True, nullable=False, comment='Whether this type is publicly available'),
|
||||||
|
sa.Column('allowed_roles', sa.JSON(), nullable=True, comment='List of roles allowed to create this type'),
|
||||||
|
sa.Column('max_cost_positions', sa.Integer(), default=100, nullable=False, comment='Maximum number of cost positions'),
|
||||||
|
sa.Column('max_comparison_offers', sa.Integer(), default=100, nullable=False, comment='Maximum number of comparison offers'),
|
||||||
|
sa.Column('version', sa.String(20), default='1.0.0', nullable=False, comment='Version number'),
|
||||||
|
sa.Column('parent_type_id', sa.Integer(), sa.ForeignKey('application_types.id'), nullable=True, comment='Parent type for versioning'),
|
||||||
|
sa.Column('usage_count', sa.Integer(), default=0, nullable=False, comment='Number of applications created with this type'),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=False, default=datetime.utcnow),
|
||||||
|
sa.Column('updated_at', sa.DateTime(), nullable=False, default=datetime.utcnow, onupdate=datetime.utcnow),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
sa.UniqueConstraint('type_id'),
|
||||||
|
sa.Index('idx_apptype_active_public', 'is_active', 'is_public')
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create application_fields table
|
||||||
|
op.create_table(
|
||||||
|
'application_fields',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False, autoincrement=True),
|
||||||
|
sa.Column('application_type_id', sa.Integer(), sa.ForeignKey('application_types.id', ondelete='CASCADE'), nullable=False),
|
||||||
|
sa.Column('field_id', sa.String(100), nullable=False, comment='Unique field identifier within type'),
|
||||||
|
sa.Column('field_type', sa.Enum(
|
||||||
|
'TEXT_SHORT', 'TEXT_LONG', 'OPTIONS', 'YESNO', 'MAIL', 'DATE', 'DATETIME',
|
||||||
|
'AMOUNT', 'CURRENCY_EUR', 'NUMBER', 'FILE', 'SIGNATURE', 'PHONE', 'URL',
|
||||||
|
'CHECKBOX', 'RADIO', 'SELECT', 'MULTISELECT',
|
||||||
|
name='fieldtype'
|
||||||
|
), nullable=False, comment='Field data type'),
|
||||||
|
sa.Column('name', sa.String(255), nullable=False, comment='Field display name'),
|
||||||
|
sa.Column('label', sa.String(500), nullable=True, comment='Field label for forms'),
|
||||||
|
sa.Column('description', sa.Text(), nullable=True, comment='Field help text'),
|
||||||
|
sa.Column('field_order', sa.Integer(), default=0, nullable=False, comment='Display order'),
|
||||||
|
sa.Column('is_required', sa.Boolean(), default=False, nullable=False, comment='Whether field is required'),
|
||||||
|
sa.Column('is_readonly', sa.Boolean(), default=False, nullable=False, comment='Whether field is read-only'),
|
||||||
|
sa.Column('is_hidden', sa.Boolean(), default=False, nullable=False, comment='Whether field is hidden'),
|
||||||
|
sa.Column('options', sa.JSON(), nullable=True, comment='List of options for selection fields'),
|
||||||
|
sa.Column('default_value', sa.Text(), nullable=True, comment='Default field value'),
|
||||||
|
sa.Column('validation_rules', sa.JSON(), nullable=True, comment='Validation rules (min, max, pattern, etc.)'),
|
||||||
|
sa.Column('display_conditions', sa.JSON(), nullable=True, comment='Conditions for displaying field'),
|
||||||
|
sa.Column('placeholder', sa.String(500), nullable=True, comment='Input placeholder text'),
|
||||||
|
sa.Column('section', sa.String(100), nullable=True, comment='Section identifier for grouping'),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=False, default=datetime.utcnow),
|
||||||
|
sa.Column('updated_at', sa.DateTime(), nullable=False, default=datetime.utcnow, onupdate=datetime.utcnow),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
sa.UniqueConstraint('application_type_id', 'field_id', name='uq_type_field'),
|
||||||
|
sa.Index('idx_field_type_order', 'application_type_id', 'field_order')
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create application_type_statuses table
|
||||||
|
op.create_table(
|
||||||
|
'application_type_statuses',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False, autoincrement=True),
|
||||||
|
sa.Column('application_type_id', sa.Integer(), sa.ForeignKey('application_types.id', ondelete='CASCADE'), nullable=False),
|
||||||
|
sa.Column('status_id', sa.String(50), nullable=False, comment='Status identifier'),
|
||||||
|
sa.Column('name', sa.String(100), nullable=False, comment='Status display name'),
|
||||||
|
sa.Column('description', sa.Text(), nullable=True, comment='Status description'),
|
||||||
|
sa.Column('is_editable', sa.Boolean(), default=True, nullable=False, comment='Whether application is editable in this status'),
|
||||||
|
sa.Column('color', sa.String(7), nullable=True, comment='RGB color code (e.g., #FF5733)'),
|
||||||
|
sa.Column('icon', sa.String(50), nullable=True, comment='Icon identifier'),
|
||||||
|
sa.Column('display_order', sa.Integer(), default=0, nullable=False, comment='Display order'),
|
||||||
|
sa.Column('is_initial', sa.Boolean(), default=False, nullable=False, comment='Whether this is the initial status'),
|
||||||
|
sa.Column('is_final', sa.Boolean(), default=False, nullable=False, comment='Whether this is a final status'),
|
||||||
|
sa.Column('is_cancelled', sa.Boolean(), default=False, nullable=False, comment='Whether this represents a cancelled state'),
|
||||||
|
sa.Column('send_notification', sa.Boolean(), default=False, nullable=False, comment='Send notification when entering this status'),
|
||||||
|
sa.Column('notification_template', sa.Text(), nullable=True, comment='Notification template'),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=False, default=datetime.utcnow),
|
||||||
|
sa.Column('updated_at', sa.DateTime(), nullable=False, default=datetime.utcnow, onupdate=datetime.utcnow),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
sa.UniqueConstraint('application_type_id', 'status_id', name='uq_type_status'),
|
||||||
|
sa.Index('idx_status_type_order', 'application_type_id', 'display_order')
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create status_transitions table
|
||||||
|
op.create_table(
|
||||||
|
'status_transitions',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False, autoincrement=True),
|
||||||
|
sa.Column('from_status_id', sa.Integer(), sa.ForeignKey('application_type_statuses.id', ondelete='CASCADE'), nullable=False),
|
||||||
|
sa.Column('to_status_id', sa.Integer(), sa.ForeignKey('application_type_statuses.id', ondelete='CASCADE'), nullable=False),
|
||||||
|
sa.Column('name', sa.String(100), nullable=False, comment='Transition name'),
|
||||||
|
sa.Column('trigger_type', sa.Enum(
|
||||||
|
'USER_APPROVAL', 'APPLICANT_ACTION', 'DEADLINE_EXPIRED',
|
||||||
|
'TIME_ELAPSED', 'CONDITION_MET', 'AUTOMATIC',
|
||||||
|
name='transitiontriggertype'
|
||||||
|
), nullable=False, comment='Type of trigger'),
|
||||||
|
sa.Column('trigger_config', sa.JSON(), nullable=False, default={}, comment='Trigger-specific configuration'),
|
||||||
|
sa.Column('conditions', sa.JSON(), nullable=True, comment='Additional conditions for transition'),
|
||||||
|
sa.Column('actions', sa.JSON(), nullable=True, comment='Actions to execute on transition'),
|
||||||
|
sa.Column('priority', sa.Integer(), default=0, nullable=False, comment='Priority (higher = executed first)'),
|
||||||
|
sa.Column('is_active', sa.Boolean(), default=True, nullable=False, comment='Whether transition is active'),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=False, default=datetime.utcnow),
|
||||||
|
sa.Column('updated_at', sa.DateTime(), nullable=False, default=datetime.utcnow, onupdate=datetime.utcnow),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
sa.UniqueConstraint('from_status_id', 'to_status_id', 'name', name='uq_transition'),
|
||||||
|
sa.Index('idx_transition_from_to', 'from_status_id', 'to_status_id')
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create dynamic_applications table
|
||||||
|
op.create_table(
|
||||||
|
'dynamic_applications',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False, autoincrement=True),
|
||||||
|
sa.Column('application_id', sa.String(64), nullable=False, comment='Public application ID'),
|
||||||
|
sa.Column('application_key', sa.String(255), nullable=False, comment='Application access key (hashed)'),
|
||||||
|
sa.Column('application_type_id', sa.Integer(), sa.ForeignKey('application_types.id'), nullable=False),
|
||||||
|
sa.Column('user_id', sa.Integer(), sa.ForeignKey('users.id'), nullable=True),
|
||||||
|
sa.Column('email', sa.String(255), nullable=False, comment='Applicant email'),
|
||||||
|
sa.Column('status_id', sa.String(50), nullable=False, comment='Current status ID'),
|
||||||
|
sa.Column('title', sa.String(500), nullable=False, comment='Application title'),
|
||||||
|
sa.Column('first_name', sa.String(100), nullable=True),
|
||||||
|
sa.Column('last_name', sa.String(100), nullable=True),
|
||||||
|
sa.Column('status_changed_at', sa.DateTime(), nullable=True, comment='When status was last changed'),
|
||||||
|
sa.Column('submitted_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('completed_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('field_data', sa.JSON(), nullable=False, default={}, comment='Dynamic field values'),
|
||||||
|
sa.Column('cost_positions', sa.JSON(), nullable=True, comment='List of cost positions (up to 100)'),
|
||||||
|
sa.Column('comparison_offers', sa.JSON(), nullable=True, comment='List of comparison offers (up to 100)'),
|
||||||
|
sa.Column('total_amount', sa.Float(), default=0.0, nullable=False, comment='Calculated total amount'),
|
||||||
|
sa.Column('pdf_generated', sa.Boolean(), default=False, nullable=False),
|
||||||
|
sa.Column('pdf_generated_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('pdf_file_path', sa.String(500), nullable=True),
|
||||||
|
sa.Column('metadata', sa.JSON(), nullable=True, comment='Additional metadata'),
|
||||||
|
sa.Column('search_text', sa.Text(), nullable=True, comment='Concatenated searchable text'),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=False, default=datetime.utcnow),
|
||||||
|
sa.Column('updated_at', sa.DateTime(), nullable=False, default=datetime.utcnow, onupdate=datetime.utcnow),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
sa.UniqueConstraint('application_id'),
|
||||||
|
sa.Index('idx_dynapp_type_status', 'application_type_id', 'status_id'),
|
||||||
|
sa.Index('idx_dynapp_email_type', 'email', 'application_type_id'),
|
||||||
|
sa.Index('idx_dynapp_submitted', 'submitted_at', 'status_id')
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create application_history_v2 table
|
||||||
|
op.create_table(
|
||||||
|
'application_history_v2',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False, autoincrement=True),
|
||||||
|
sa.Column('application_id', sa.Integer(), sa.ForeignKey('dynamic_applications.id', ondelete='CASCADE'), nullable=False),
|
||||||
|
sa.Column('user_id', sa.Integer(), sa.ForeignKey('users.id'), nullable=True),
|
||||||
|
sa.Column('action', sa.String(100), nullable=False, comment='Action performed'),
|
||||||
|
sa.Column('field_changes', sa.JSON(), nullable=True, comment='Changed fields with old/new values'),
|
||||||
|
sa.Column('comment', sa.Text(), nullable=True),
|
||||||
|
sa.Column('ip_address', sa.String(45), nullable=True),
|
||||||
|
sa.Column('user_agent', sa.String(500), nullable=True),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=False, default=datetime.utcnow),
|
||||||
|
sa.Column('updated_at', sa.DateTime(), nullable=False, default=datetime.utcnow, onupdate=datetime.utcnow),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
sa.Index('idx_history_app', 'application_id')
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create application_attachments_v2 table
|
||||||
|
op.create_table(
|
||||||
|
'application_attachments_v2',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False, autoincrement=True),
|
||||||
|
sa.Column('application_id', sa.Integer(), sa.ForeignKey('dynamic_applications.id', ondelete='CASCADE'), nullable=False),
|
||||||
|
sa.Column('field_id', sa.String(100), nullable=True, comment='Associated field ID'),
|
||||||
|
sa.Column('file_name', sa.String(255), nullable=False),
|
||||||
|
sa.Column('file_path', sa.String(500), nullable=False),
|
||||||
|
sa.Column('file_size', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('file_type', sa.String(100), nullable=True),
|
||||||
|
sa.Column('file_hash', sa.String(64), nullable=True),
|
||||||
|
sa.Column('uploaded_by', sa.Integer(), sa.ForeignKey('users.id'), nullable=True),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=False, default=datetime.utcnow),
|
||||||
|
sa.Column('updated_at', sa.DateTime(), nullable=False, default=datetime.utcnow, onupdate=datetime.utcnow),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
sa.Index('idx_attachment_app', 'application_id')
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create application_transition_logs table
|
||||||
|
op.create_table(
|
||||||
|
'application_transition_logs',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False, autoincrement=True),
|
||||||
|
sa.Column('application_id', sa.Integer(), sa.ForeignKey('dynamic_applications.id', ondelete='CASCADE'), nullable=False),
|
||||||
|
sa.Column('from_status', sa.String(50), nullable=True),
|
||||||
|
sa.Column('to_status', sa.String(50), nullable=False),
|
||||||
|
sa.Column('transition_name', sa.String(100), nullable=True),
|
||||||
|
sa.Column('trigger_type', sa.String(50), nullable=True),
|
||||||
|
sa.Column('triggered_by', sa.Integer(), sa.ForeignKey('users.id'), nullable=True),
|
||||||
|
sa.Column('trigger_data', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=False, default=datetime.utcnow),
|
||||||
|
sa.Column('updated_at', sa.DateTime(), nullable=False, default=datetime.utcnow, onupdate=datetime.utcnow),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
sa.Index('idx_translog_app', 'application_id')
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create application_approvals table
|
||||||
|
op.create_table(
|
||||||
|
'application_approvals',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False, autoincrement=True),
|
||||||
|
sa.Column('application_id', sa.Integer(), sa.ForeignKey('dynamic_applications.id', ondelete='CASCADE'), nullable=False),
|
||||||
|
sa.Column('user_id', sa.Integer(), sa.ForeignKey('users.id'), nullable=False),
|
||||||
|
sa.Column('role', sa.String(50), nullable=False, comment='Role of approver'),
|
||||||
|
sa.Column('decision', sa.String(20), nullable=False, comment='approve, reject, abstain'),
|
||||||
|
sa.Column('comment', sa.Text(), nullable=True),
|
||||||
|
sa.Column('status_at_approval', sa.String(50), nullable=True, comment='Status when approval was given'),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=False, default=datetime.utcnow),
|
||||||
|
sa.Column('updated_at', sa.DateTime(), nullable=False, default=datetime.utcnow, onupdate=datetime.utcnow),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
sa.UniqueConstraint('application_id', 'user_id', 'role', name='uq_app_user_role_approval'),
|
||||||
|
sa.Index('idx_approval_app', 'application_id'),
|
||||||
|
sa.Index('idx_approval_user', 'user_id')
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
"""Drop all dynamic application system tables"""
|
||||||
|
|
||||||
|
# Drop tables in reverse order due to foreign key constraints
|
||||||
|
op.drop_table('application_approvals')
|
||||||
|
op.drop_table('application_transition_logs')
|
||||||
|
op.drop_table('application_attachments_v2')
|
||||||
|
op.drop_table('application_history_v2')
|
||||||
|
op.drop_table('dynamic_applications')
|
||||||
|
op.drop_table('status_transitions')
|
||||||
|
op.drop_table('application_type_statuses')
|
||||||
|
op.drop_table('application_fields')
|
||||||
|
op.drop_table('application_types')
|
||||||
|
|
||||||
|
# Drop enums
|
||||||
|
op.execute('DROP TYPE IF EXISTS fieldtype')
|
||||||
|
op.execute('DROP TYPE IF EXISTS transitiontriggertype')
|
||||||
@ -1,620 +0,0 @@
|
|||||||
"""
|
|
||||||
Application Database Models
|
|
||||||
|
|
||||||
This module defines the database models for the application system.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from sqlalchemy import (
|
|
||||||
Column, Integer, String, Text, DateTime, JSON, Boolean,
|
|
||||||
ForeignKey, UniqueConstraint, Index, Float, Enum as SQLEnum
|
|
||||||
)
|
|
||||||
from sqlalchemy.orm import relationship, backref
|
|
||||||
from sqlalchemy.dialects.mysql import LONGTEXT
|
|
||||||
import enum
|
|
||||||
from typing import Optional, Dict, Any
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from .base import ExtendedBaseModel, BaseModel, TimestampMixin
|
|
||||||
|
|
||||||
|
|
||||||
class ApplicationStatus(enum.Enum):
|
|
||||||
"""Application status enumeration"""
|
|
||||||
DRAFT = "draft"
|
|
||||||
BEANTRAGT = "beantragt" # Submitted
|
|
||||||
BEARBEITUNG_GESPERRT = "bearbeitung_gesperrt" # Processing locked
|
|
||||||
ZU_PRUEFEN = "zu_pruefen" # To be reviewed
|
|
||||||
ZUR_ABSTIMMUNG = "zur_abstimmung" # For voting
|
|
||||||
GENEHMIGT = "genehmigt" # Approved
|
|
||||||
ABGELEHNT = "abgelehnt" # Rejected
|
|
||||||
CANCELLED = "cancelled"
|
|
||||||
|
|
||||||
|
|
||||||
class ApplicationType(enum.Enum):
|
|
||||||
"""Application type enumeration"""
|
|
||||||
QSM = "QSM"
|
|
||||||
VSM = "VSM"
|
|
||||||
|
|
||||||
|
|
||||||
class InstitutionType(enum.Enum):
|
|
||||||
"""Institution type enumeration"""
|
|
||||||
STUDENT_FS = "stud-fs"
|
|
||||||
STUDENT_RF = "stud-rf"
|
|
||||||
STUDENT_HG = "stud-hg"
|
|
||||||
FACULTY = "faculty"
|
|
||||||
HS_INSTITUTION = "hs-institution"
|
|
||||||
|
|
||||||
|
|
||||||
class Application(ExtendedBaseModel):
|
|
||||||
"""Main application model"""
|
|
||||||
|
|
||||||
__tablename__ = "applications"
|
|
||||||
|
|
||||||
# Core fields
|
|
||||||
pa_id = Column(
|
|
||||||
String(64),
|
|
||||||
unique=True,
|
|
||||||
nullable=False,
|
|
||||||
index=True,
|
|
||||||
comment="Public application ID"
|
|
||||||
)
|
|
||||||
pa_key = Column(
|
|
||||||
String(255),
|
|
||||||
nullable=False,
|
|
||||||
comment="Application access key (hashed)"
|
|
||||||
)
|
|
||||||
|
|
||||||
# User relationship
|
|
||||||
user_id = Column(
|
|
||||||
Integer,
|
|
||||||
ForeignKey('users.id'),
|
|
||||||
nullable=True,
|
|
||||||
index=True,
|
|
||||||
comment="User who created the application"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Template relationship
|
|
||||||
template_id = Column(
|
|
||||||
Integer,
|
|
||||||
ForeignKey('form_templates.id'),
|
|
||||||
nullable=True,
|
|
||||||
index=True,
|
|
||||||
comment="Form template used"
|
|
||||||
)
|
|
||||||
|
|
||||||
variant = Column(
|
|
||||||
SQLEnum(ApplicationType),
|
|
||||||
nullable=False,
|
|
||||||
default=ApplicationType.QSM,
|
|
||||||
comment="Application variant (QSM or VSM)"
|
|
||||||
)
|
|
||||||
status = Column(
|
|
||||||
SQLEnum(ApplicationStatus),
|
|
||||||
nullable=False,
|
|
||||||
default=ApplicationStatus.DRAFT,
|
|
||||||
index=True,
|
|
||||||
comment="Application status"
|
|
||||||
)
|
|
||||||
|
|
||||||
# JSON payload containing all form data
|
|
||||||
payload = Column(
|
|
||||||
JSON,
|
|
||||||
nullable=False,
|
|
||||||
default=dict,
|
|
||||||
comment="Complete application payload"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Searchable extracted fields for quick queries
|
|
||||||
institution_name = Column(String(255), index=True)
|
|
||||||
institution_type = Column(SQLEnum(InstitutionType), index=True)
|
|
||||||
applicant_first_name = Column(String(255), index=True)
|
|
||||||
applicant_last_name = Column(String(255), index=True)
|
|
||||||
applicant_email = Column(String(255), index=True)
|
|
||||||
project_name = Column(String(500), index=True)
|
|
||||||
project_start_date = Column(String(50))
|
|
||||||
project_end_date = Column(String(50))
|
|
||||||
total_amount = Column(Float, default=0.0)
|
|
||||||
|
|
||||||
|
|
||||||
class ApplicationVote(ExtendedBaseModel):
|
|
||||||
"""Application voting model"""
|
|
||||||
|
|
||||||
__tablename__ = "application_votes"
|
|
||||||
|
|
||||||
application_id = Column(
|
|
||||||
Integer,
|
|
||||||
ForeignKey('applications.id', ondelete='CASCADE'),
|
|
||||||
nullable=False,
|
|
||||||
index=True
|
|
||||||
)
|
|
||||||
|
|
||||||
user_id = Column(
|
|
||||||
Integer,
|
|
||||||
ForeignKey('users.id', ondelete='CASCADE'),
|
|
||||||
nullable=False,
|
|
||||||
index=True
|
|
||||||
)
|
|
||||||
|
|
||||||
vote = Column(
|
|
||||||
String(20),
|
|
||||||
nullable=False,
|
|
||||||
comment="Vote: for, against, abstain"
|
|
||||||
)
|
|
||||||
|
|
||||||
comment = Column(
|
|
||||||
Text,
|
|
||||||
nullable=True,
|
|
||||||
comment="Optional vote comment"
|
|
||||||
)
|
|
||||||
|
|
||||||
voted_at = Column(
|
|
||||||
DateTime,
|
|
||||||
nullable=False,
|
|
||||||
default=datetime.utcnow
|
|
||||||
)
|
|
||||||
|
|
||||||
# Relationships
|
|
||||||
application = relationship("Application", back_populates="votes")
|
|
||||||
user = relationship("User")
|
|
||||||
|
|
||||||
# Unique constraint
|
|
||||||
__table_args__ = (
|
|
||||||
UniqueConstraint('application_id', 'user_id', name='uq_application_user_vote'),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ApplicationHistory(ExtendedBaseModel):
|
|
||||||
"""Application history tracking"""
|
|
||||||
|
|
||||||
__tablename__ = "application_history"
|
|
||||||
|
|
||||||
application_id = Column(
|
|
||||||
Integer,
|
|
||||||
ForeignKey('applications.id', ondelete='CASCADE'),
|
|
||||||
nullable=False,
|
|
||||||
index=True
|
|
||||||
)
|
|
||||||
|
|
||||||
user_id = Column(
|
|
||||||
Integer,
|
|
||||||
ForeignKey('users.id'),
|
|
||||||
nullable=True,
|
|
||||||
index=True
|
|
||||||
)
|
|
||||||
|
|
||||||
action = Column(
|
|
||||||
String(100),
|
|
||||||
nullable=False,
|
|
||||||
comment="Action performed"
|
|
||||||
)
|
|
||||||
|
|
||||||
old_status = Column(
|
|
||||||
String(50),
|
|
||||||
nullable=True,
|
|
||||||
comment="Previous status"
|
|
||||||
)
|
|
||||||
|
|
||||||
new_status = Column(
|
|
||||||
String(50),
|
|
||||||
nullable=True,
|
|
||||||
comment="New status"
|
|
||||||
)
|
|
||||||
|
|
||||||
changes = Column(
|
|
||||||
JSON,
|
|
||||||
nullable=True,
|
|
||||||
default=dict,
|
|
||||||
comment="Field changes"
|
|
||||||
)
|
|
||||||
|
|
||||||
comment = Column(
|
|
||||||
Text,
|
|
||||||
nullable=True,
|
|
||||||
comment="History comment"
|
|
||||||
)
|
|
||||||
|
|
||||||
timestamp = Column(
|
|
||||||
DateTime,
|
|
||||||
nullable=False,
|
|
||||||
default=datetime.utcnow,
|
|
||||||
index=True
|
|
||||||
)
|
|
||||||
|
|
||||||
# Relationships
|
|
||||||
application = relationship("Application", back_populates="history")
|
|
||||||
user = relationship("User")
|
|
||||||
|
|
||||||
|
|
||||||
class ApplicationAttachment(ExtendedBaseModel):
|
|
||||||
"""Application attachment model"""
|
|
||||||
|
|
||||||
__tablename__ = "application_attachments"
|
|
||||||
|
|
||||||
application_id = Column(
|
|
||||||
Integer,
|
|
||||||
ForeignKey('applications.id', ondelete='CASCADE'),
|
|
||||||
nullable=False,
|
|
||||||
index=True
|
|
||||||
)
|
|
||||||
|
|
||||||
file_name = Column(
|
|
||||||
String(255),
|
|
||||||
nullable=False,
|
|
||||||
comment="Original file name"
|
|
||||||
)
|
|
||||||
|
|
||||||
file_path = Column(
|
|
||||||
String(500),
|
|
||||||
nullable=False,
|
|
||||||
comment="Storage path"
|
|
||||||
)
|
|
||||||
|
|
||||||
file_size = Column(
|
|
||||||
Integer,
|
|
||||||
nullable=False,
|
|
||||||
comment="File size in bytes"
|
|
||||||
)
|
|
||||||
|
|
||||||
file_type = Column(
|
|
||||||
String(100),
|
|
||||||
nullable=True,
|
|
||||||
comment="MIME type"
|
|
||||||
)
|
|
||||||
|
|
||||||
file_hash = Column(
|
|
||||||
String(64),
|
|
||||||
nullable=True,
|
|
||||||
comment="File SHA256 hash"
|
|
||||||
)
|
|
||||||
|
|
||||||
uploaded_by = Column(
|
|
||||||
Integer,
|
|
||||||
ForeignKey('users.id'),
|
|
||||||
nullable=True
|
|
||||||
)
|
|
||||||
|
|
||||||
uploaded_at = Column(
|
|
||||||
DateTime,
|
|
||||||
nullable=False,
|
|
||||||
default=datetime.utcnow
|
|
||||||
)
|
|
||||||
|
|
||||||
# Relationships
|
|
||||||
application = relationship("Application", back_populates="attachments")
|
|
||||||
uploader = relationship("User")
|
|
||||||
|
|
||||||
# Metadata
|
|
||||||
submitted_at = Column(DateTime, nullable=True)
|
|
||||||
reviewed_at = Column(DateTime, nullable=True)
|
|
||||||
completed_at = Column(DateTime, nullable=True)
|
|
||||||
|
|
||||||
# Workflow fields
|
|
||||||
locked_at = Column(DateTime, nullable=True, comment="When processing was locked")
|
|
||||||
locked_by = Column(Integer, ForeignKey('users.id'), nullable=True, comment="Who locked the processing")
|
|
||||||
|
|
||||||
# Budget review
|
|
||||||
budget_reviewed_by = Column(Integer, ForeignKey('users.id'), nullable=True, comment="Haushaltsbeauftragte")
|
|
||||||
budget_reviewed_at = Column(DateTime, nullable=True)
|
|
||||||
budget_review_status = Column(String(50), nullable=True) # approved, rejected, pending
|
|
||||||
budget_review_comment = Column(Text, nullable=True)
|
|
||||||
|
|
||||||
# Finance review
|
|
||||||
finance_reviewed_by = Column(Integer, ForeignKey('users.id'), nullable=True, comment="Finanzreferent")
|
|
||||||
finance_reviewed_at = Column(DateTime, nullable=True)
|
|
||||||
finance_review_status = Column(String(50), nullable=True) # approved, rejected, pending
|
|
||||||
finance_review_comment = Column(Text, nullable=True)
|
|
||||||
|
|
||||||
# Voting
|
|
||||||
voting_opened_at = Column(DateTime, nullable=True)
|
|
||||||
voting_closed_at = Column(DateTime, nullable=True)
|
|
||||||
voting_result = Column(String(50), nullable=True) # approved, rejected
|
|
||||||
votes_for = Column(Integer, default=0)
|
|
||||||
votes_against = Column(Integer, default=0)
|
|
||||||
votes_abstain = Column(Integer, default=0)
|
|
||||||
|
|
||||||
# Relationships
|
|
||||||
user = relationship("User", foreign_keys=[user_id], back_populates="applications")
|
|
||||||
template = relationship("FormTemplate", back_populates="applications")
|
|
||||||
locker = relationship("User", foreign_keys=[locked_by])
|
|
||||||
budget_reviewer = relationship("User", foreign_keys=[budget_reviewed_by])
|
|
||||||
finance_reviewer = relationship("User", foreign_keys=[finance_reviewed_by])
|
|
||||||
votes = relationship("ApplicationVote", back_populates="application", cascade="all, delete-orphan")
|
|
||||||
attachments = relationship("ApplicationAttachment", back_populates="application", cascade="all, delete-orphan")
|
|
||||||
history = relationship("ApplicationHistory", back_populates="application", cascade="all, delete-orphan")
|
|
||||||
reviewed_by = Column(String(255), nullable=True)
|
|
||||||
|
|
||||||
# PDF storage
|
|
||||||
pdf_data = Column(
|
|
||||||
LONGTEXT,
|
|
||||||
nullable=True,
|
|
||||||
comment="Base64 encoded PDF data"
|
|
||||||
)
|
|
||||||
pdf_generated_at = Column(DateTime, nullable=True)
|
|
||||||
|
|
||||||
# Relationships
|
|
||||||
attachments = relationship(
|
|
||||||
"ApplicationAttachment",
|
|
||||||
back_populates="application",
|
|
||||||
cascade="all, delete-orphan",
|
|
||||||
lazy="dynamic"
|
|
||||||
)
|
|
||||||
comparison_offers = relationship(
|
|
||||||
"ComparisonOffer",
|
|
||||||
back_populates="application",
|
|
||||||
cascade="all, delete-orphan",
|
|
||||||
lazy="dynamic"
|
|
||||||
)
|
|
||||||
cost_justifications = relationship(
|
|
||||||
"CostPositionJustification",
|
|
||||||
back_populates="application",
|
|
||||||
cascade="all, delete-orphan",
|
|
||||||
lazy="dynamic"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Indexes
|
|
||||||
__table_args__ = (
|
|
||||||
Index("idx_app_status_created", "status", "created_at"),
|
|
||||||
Index("idx_app_email_status", "applicant_email", "status"),
|
|
||||||
Index("idx_app_institution", "institution_type", "institution_name"),
|
|
||||||
Index("idx_app_dates", "project_start_date", "project_end_date"),
|
|
||||||
)
|
|
||||||
|
|
||||||
def update_from_payload(self):
|
|
||||||
"""Update searchable fields from payload"""
|
|
||||||
if not self.payload:
|
|
||||||
return
|
|
||||||
|
|
||||||
pa = self.payload.get("pa", {})
|
|
||||||
|
|
||||||
# Extract applicant info
|
|
||||||
applicant = pa.get("applicant", {})
|
|
||||||
self.applicant_first_name = applicant.get("name", {}).get("first")
|
|
||||||
self.applicant_last_name = applicant.get("name", {}).get("last")
|
|
||||||
self.applicant_email = applicant.get("contact", {}).get("email")
|
|
||||||
|
|
||||||
# Extract institution info
|
|
||||||
institution = applicant.get("institution", {})
|
|
||||||
self.institution_name = institution.get("name")
|
|
||||||
inst_type = institution.get("type")
|
|
||||||
if inst_type and inst_type != "-":
|
|
||||||
try:
|
|
||||||
self.institution_type = InstitutionType(inst_type)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Extract project info
|
|
||||||
project = pa.get("project", {})
|
|
||||||
self.project_name = project.get("name")
|
|
||||||
|
|
||||||
dates = project.get("dates", {})
|
|
||||||
self.project_start_date = dates.get("start")
|
|
||||||
self.project_end_date = dates.get("end")
|
|
||||||
|
|
||||||
# Calculate total amount
|
|
||||||
costs = project.get("costs", [])
|
|
||||||
total = 0.0
|
|
||||||
for cost in costs:
|
|
||||||
amount = cost.get("amountEur", 0)
|
|
||||||
if amount:
|
|
||||||
total += float(amount)
|
|
||||||
self.total_amount = total
|
|
||||||
|
|
||||||
def to_dict(self, exclude: Optional[set] = None, include_pdf: bool = False) -> Dict[str, Any]:
|
|
||||||
"""Convert to dictionary with optional PDF exclusion"""
|
|
||||||
exclude = exclude or set()
|
|
||||||
if not include_pdf:
|
|
||||||
exclude.add("pdf_data")
|
|
||||||
|
|
||||||
data = super().to_dict(exclude=exclude)
|
|
||||||
|
|
||||||
# Convert enums to strings
|
|
||||||
if "status" in data and data["status"]:
|
|
||||||
data["status"] = data["status"].value if hasattr(data["status"], "value") else data["status"]
|
|
||||||
if "variant" in data and data["variant"]:
|
|
||||||
data["variant"] = data["variant"].value if hasattr(data["variant"], "value") else data["variant"]
|
|
||||||
if "institution_type" in data and data["institution_type"]:
|
|
||||||
data["institution_type"] = data["institution_type"].value if hasattr(data["institution_type"], "value") else data["institution_type"]
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
class Attachment(BaseModel, TimestampMixin):
|
|
||||||
"""Base attachment model"""
|
|
||||||
|
|
||||||
__tablename__ = "attachments"
|
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True)
|
|
||||||
filename = Column(String(255), nullable=False)
|
|
||||||
content_type = Column(String(100), nullable=False)
|
|
||||||
size = Column(Integer, nullable=False)
|
|
||||||
checksum = Column(String(64), nullable=True)
|
|
||||||
storage_type = Column(
|
|
||||||
String(50),
|
|
||||||
default="database",
|
|
||||||
comment="Storage type: database or filesystem"
|
|
||||||
)
|
|
||||||
storage_path = Column(
|
|
||||||
String(500),
|
|
||||||
nullable=True,
|
|
||||||
comment="Path if stored in filesystem"
|
|
||||||
)
|
|
||||||
data = Column(
|
|
||||||
LONGTEXT,
|
|
||||||
nullable=True,
|
|
||||||
comment="Base64 encoded data if stored in database"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Indexes
|
|
||||||
__table_args__ = (
|
|
||||||
Index("idx_attachment_checksum", "checksum"),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ApplicationAttachment(BaseModel):
|
|
||||||
"""Junction table for application attachments with additional metadata"""
|
|
||||||
|
|
||||||
__tablename__ = "application_attachments"
|
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True)
|
|
||||||
application_id = Column(
|
|
||||||
Integer,
|
|
||||||
ForeignKey("applications.id", ondelete="CASCADE"),
|
|
||||||
nullable=False,
|
|
||||||
index=True
|
|
||||||
)
|
|
||||||
attachment_id = Column(
|
|
||||||
Integer,
|
|
||||||
ForeignKey("attachments.id", ondelete="CASCADE"),
|
|
||||||
nullable=False,
|
|
||||||
index=True
|
|
||||||
)
|
|
||||||
category = Column(
|
|
||||||
String(50),
|
|
||||||
nullable=True,
|
|
||||||
comment="Attachment category (e.g., invoice, receipt, etc.)"
|
|
||||||
)
|
|
||||||
description = Column(Text, nullable=True)
|
|
||||||
uploaded_at = Column(DateTime, default=datetime.utcnow)
|
|
||||||
uploaded_by = Column(String(255), nullable=True)
|
|
||||||
|
|
||||||
# Relationships
|
|
||||||
application = relationship("Application", back_populates="attachments")
|
|
||||||
attachment = relationship("Attachment", backref="applications", cascade="all, delete")
|
|
||||||
|
|
||||||
# Constraints
|
|
||||||
__table_args__ = (
|
|
||||||
UniqueConstraint("application_id", "attachment_id", name="uq_app_attachment"),
|
|
||||||
Index("idx_app_attach_category", "application_id", "category"),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ComparisonOffer(ExtendedBaseModel):
|
|
||||||
"""Comparison offers for cost positions"""
|
|
||||||
|
|
||||||
__tablename__ = "comparison_offers"
|
|
||||||
|
|
||||||
application_id = Column(
|
|
||||||
Integer,
|
|
||||||
ForeignKey("applications.id", ondelete="CASCADE"),
|
|
||||||
nullable=False,
|
|
||||||
index=True
|
|
||||||
)
|
|
||||||
cost_position_idx = Column(
|
|
||||||
Integer,
|
|
||||||
nullable=False,
|
|
||||||
comment="Index of cost position in application"
|
|
||||||
)
|
|
||||||
supplier_name = Column(String(255), nullable=False)
|
|
||||||
amount_eur = Column(Float, nullable=False)
|
|
||||||
description = Column(Text, nullable=True)
|
|
||||||
is_preferred = Column(
|
|
||||||
Boolean,
|
|
||||||
default=False,
|
|
||||||
comment="Whether this is the preferred offer"
|
|
||||||
)
|
|
||||||
attachment_id = Column(
|
|
||||||
Integer,
|
|
||||||
ForeignKey("attachments.id", ondelete="SET NULL"),
|
|
||||||
nullable=True
|
|
||||||
)
|
|
||||||
|
|
||||||
# Relationships
|
|
||||||
application = relationship("Application", back_populates="comparison_offers")
|
|
||||||
attachment = relationship("Attachment", backref="comparison_offers")
|
|
||||||
|
|
||||||
# Indexes
|
|
||||||
__table_args__ = (
|
|
||||||
Index("idx_comp_offer_app_pos", "application_id", "cost_position_idx"),
|
|
||||||
Index("idx_comp_offer_preferred", "application_id", "is_preferred"),
|
|
||||||
)
|
|
||||||
|
|
||||||
def to_dict(self, exclude: Optional[set] = None) -> Dict[str, Any]:
|
|
||||||
"""Convert to dictionary"""
|
|
||||||
data = super().to_dict(exclude=exclude)
|
|
||||||
|
|
||||||
# Include attachment info if available
|
|
||||||
if self.attachment:
|
|
||||||
data["attachment_info"] = {
|
|
||||||
"id": self.attachment.id,
|
|
||||||
"filename": self.attachment.filename,
|
|
||||||
"size": self.attachment.size,
|
|
||||||
"content_type": self.attachment.content_type
|
|
||||||
}
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
class CostPositionJustification(ExtendedBaseModel):
|
|
||||||
"""Justifications for cost positions"""
|
|
||||||
|
|
||||||
__tablename__ = "cost_position_justifications"
|
|
||||||
|
|
||||||
application_id = Column(
|
|
||||||
Integer,
|
|
||||||
ForeignKey("applications.id", ondelete="CASCADE"),
|
|
||||||
nullable=False,
|
|
||||||
index=True
|
|
||||||
)
|
|
||||||
cost_position_idx = Column(
|
|
||||||
Integer,
|
|
||||||
nullable=False,
|
|
||||||
comment="Index of cost position in application"
|
|
||||||
)
|
|
||||||
justification = Column(
|
|
||||||
Text,
|
|
||||||
nullable=False,
|
|
||||||
comment="Justification text"
|
|
||||||
)
|
|
||||||
justification_type = Column(
|
|
||||||
String(50),
|
|
||||||
default="standard",
|
|
||||||
comment="Type of justification"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Relationships
|
|
||||||
application = relationship("Application", back_populates="cost_justifications")
|
|
||||||
|
|
||||||
# Constraints
|
|
||||||
__table_args__ = (
|
|
||||||
UniqueConstraint(
|
|
||||||
"application_id", "cost_position_idx",
|
|
||||||
name="uq_app_cost_justification"
|
|
||||||
),
|
|
||||||
Index("idx_cost_just_type", "justification_type"),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Counter(BaseModel):
|
|
||||||
"""Counter for generating sequential IDs"""
|
|
||||||
|
|
||||||
__tablename__ = "counters"
|
|
||||||
|
|
||||||
key = Column(String(50), unique=True, nullable=False)
|
|
||||||
value = Column(Integer, default=0, nullable=False)
|
|
||||||
prefix = Column(String(20), nullable=True)
|
|
||||||
suffix = Column(String(20), nullable=True)
|
|
||||||
format_string = Column(
|
|
||||||
String(100),
|
|
||||||
default="{prefix}{value:06d}{suffix}",
|
|
||||||
comment="Python format string for ID generation"
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_next_value(cls, session, key: str, increment: int = 1) -> int:
|
|
||||||
"""Get next counter value with atomic increment"""
|
|
||||||
counter = session.query(cls).filter_by(key=key).with_for_update().first()
|
|
||||||
if not counter:
|
|
||||||
counter = cls(key=key, value=0)
|
|
||||||
session.add(counter)
|
|
||||||
|
|
||||||
counter.value += increment
|
|
||||||
session.flush()
|
|
||||||
return counter.value
|
|
||||||
|
|
||||||
def format_id(self, value: Optional[int] = None) -> str:
|
|
||||||
"""Format counter value as ID string"""
|
|
||||||
val = value if value is not None else self.value
|
|
||||||
return self.format_string.format(
|
|
||||||
prefix=self.prefix or "",
|
|
||||||
suffix=self.suffix or "",
|
|
||||||
value=val
|
|
||||||
)
|
|
||||||
955
backend/src/models/application_type.py
Normal file
955
backend/src/models/application_type.py
Normal file
@ -0,0 +1,955 @@
|
|||||||
|
"""
|
||||||
|
Dynamic Application Type Models
|
||||||
|
|
||||||
|
This module defines the database models for fully dynamic application types.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from sqlalchemy import (
|
||||||
|
Column, Integer, String, Text, DateTime, JSON, Boolean,
|
||||||
|
ForeignKey, UniqueConstraint, Index, Float, LargeBinary,
|
||||||
|
Enum as SQLEnum
|
||||||
|
)
|
||||||
|
from sqlalchemy.orm import relationship, backref
|
||||||
|
from sqlalchemy.dialects.mysql import LONGTEXT
|
||||||
|
import enum
|
||||||
|
from typing import Optional, Dict, Any, List
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from .base import ExtendedBaseModel, BaseModel, TimestampMixin
|
||||||
|
|
||||||
|
|
||||||
|
class FieldType(enum.Enum):
|
||||||
|
"""Field type enumeration"""
|
||||||
|
TEXT_SHORT = "text_short"
|
||||||
|
TEXT_LONG = "text_long"
|
||||||
|
OPTIONS = "options"
|
||||||
|
YESNO = "yesno"
|
||||||
|
MAIL = "mail"
|
||||||
|
DATE = "date"
|
||||||
|
DATETIME = "datetime"
|
||||||
|
AMOUNT = "amount"
|
||||||
|
CURRENCY_EUR = "currency_eur"
|
||||||
|
NUMBER = "number"
|
||||||
|
FILE = "file"
|
||||||
|
SIGNATURE = "signature"
|
||||||
|
PHONE = "phone"
|
||||||
|
URL = "url"
|
||||||
|
CHECKBOX = "checkbox"
|
||||||
|
RADIO = "radio"
|
||||||
|
SELECT = "select"
|
||||||
|
MULTISELECT = "multiselect"
|
||||||
|
|
||||||
|
|
||||||
|
class TransitionTriggerType(enum.Enum):
|
||||||
|
"""Transition trigger type"""
|
||||||
|
USER_APPROVAL = "user_approval" # N users with role X approve/reject
|
||||||
|
APPLICANT_ACTION = "applicant_action" # Button clicked by applicant
|
||||||
|
DEADLINE_EXPIRED = "deadline_expired" # Date deadline passed
|
||||||
|
TIME_ELAPSED = "time_elapsed" # Timespan elapsed
|
||||||
|
CONDITION_MET = "condition_met" # Field condition met
|
||||||
|
AUTOMATIC = "automatic" # Automatic transition
|
||||||
|
|
||||||
|
|
||||||
|
class ApplicationType(ExtendedBaseModel):
|
||||||
|
"""Dynamic application type definition"""
|
||||||
|
|
||||||
|
__tablename__ = "application_types"
|
||||||
|
|
||||||
|
# Core fields
|
||||||
|
type_id = Column(
|
||||||
|
String(100),
|
||||||
|
unique=True,
|
||||||
|
nullable=False,
|
||||||
|
index=True,
|
||||||
|
comment="Unique identifier for application type"
|
||||||
|
)
|
||||||
|
|
||||||
|
name = Column(
|
||||||
|
String(255),
|
||||||
|
nullable=False,
|
||||||
|
comment="Display name"
|
||||||
|
)
|
||||||
|
|
||||||
|
description = Column(
|
||||||
|
Text,
|
||||||
|
nullable=True,
|
||||||
|
comment="Markdown description"
|
||||||
|
)
|
||||||
|
|
||||||
|
# PDF Template
|
||||||
|
pdf_template = Column(
|
||||||
|
LargeBinary,
|
||||||
|
nullable=True,
|
||||||
|
comment="PDF template blob"
|
||||||
|
)
|
||||||
|
|
||||||
|
pdf_template_filename = Column(
|
||||||
|
String(255),
|
||||||
|
nullable=True,
|
||||||
|
comment="Original PDF template filename"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Field mapping (PDF field name -> field ID)
|
||||||
|
pdf_field_mapping = Column(
|
||||||
|
JSON,
|
||||||
|
nullable=False,
|
||||||
|
default=dict,
|
||||||
|
comment="Mapping from PDF field names to field IDs"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
is_active = Column(
|
||||||
|
Boolean,
|
||||||
|
default=True,
|
||||||
|
index=True,
|
||||||
|
comment="Whether this type is currently active"
|
||||||
|
)
|
||||||
|
|
||||||
|
is_public = Column(
|
||||||
|
Boolean,
|
||||||
|
default=True,
|
||||||
|
comment="Whether this type is publicly available"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Access control
|
||||||
|
allowed_roles = Column(
|
||||||
|
JSON,
|
||||||
|
nullable=True,
|
||||||
|
default=list,
|
||||||
|
comment="List of roles allowed to create this type"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Cost configuration
|
||||||
|
max_cost_positions = Column(
|
||||||
|
Integer,
|
||||||
|
default=100,
|
||||||
|
comment="Maximum number of cost positions"
|
||||||
|
)
|
||||||
|
|
||||||
|
max_comparison_offers = Column(
|
||||||
|
Integer,
|
||||||
|
default=100,
|
||||||
|
comment="Maximum number of comparison offers"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Versioning
|
||||||
|
version = Column(
|
||||||
|
String(20),
|
||||||
|
default="1.0.0",
|
||||||
|
comment="Version number"
|
||||||
|
)
|
||||||
|
|
||||||
|
parent_type_id = Column(
|
||||||
|
Integer,
|
||||||
|
ForeignKey('application_types.id'),
|
||||||
|
nullable=True,
|
||||||
|
comment="Parent type for versioning"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Statistics
|
||||||
|
usage_count = Column(
|
||||||
|
Integer,
|
||||||
|
default=0,
|
||||||
|
comment="Number of applications created with this type"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Relationships
|
||||||
|
fields = relationship(
|
||||||
|
"ApplicationField",
|
||||||
|
back_populates="application_type",
|
||||||
|
cascade="all, delete-orphan",
|
||||||
|
order_by="ApplicationField.field_order"
|
||||||
|
)
|
||||||
|
|
||||||
|
statuses = relationship(
|
||||||
|
"ApplicationTypeStatus",
|
||||||
|
back_populates="application_type",
|
||||||
|
cascade="all, delete-orphan"
|
||||||
|
)
|
||||||
|
|
||||||
|
applications = relationship(
|
||||||
|
"DynamicApplication",
|
||||||
|
back_populates="application_type"
|
||||||
|
)
|
||||||
|
|
||||||
|
parent_type = relationship(
|
||||||
|
"ApplicationType",
|
||||||
|
remote_side="ApplicationType.id",
|
||||||
|
backref=backref("versions", lazy="dynamic")
|
||||||
|
)
|
||||||
|
|
||||||
|
# Indexes
|
||||||
|
__table_args__ = (
|
||||||
|
Index('idx_apptype_active_public', 'is_active', 'is_public'),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ApplicationField(ExtendedBaseModel):
|
||||||
|
"""Field definition for application types"""
|
||||||
|
|
||||||
|
__tablename__ = "application_fields"
|
||||||
|
|
||||||
|
application_type_id = Column(
|
||||||
|
Integer,
|
||||||
|
ForeignKey('application_types.id', ondelete='CASCADE'),
|
||||||
|
nullable=False,
|
||||||
|
index=True
|
||||||
|
)
|
||||||
|
|
||||||
|
field_id = Column(
|
||||||
|
String(100),
|
||||||
|
nullable=False,
|
||||||
|
comment="Unique field identifier within type"
|
||||||
|
)
|
||||||
|
|
||||||
|
field_type = Column(
|
||||||
|
SQLEnum(FieldType),
|
||||||
|
nullable=False,
|
||||||
|
comment="Field data type"
|
||||||
|
)
|
||||||
|
|
||||||
|
name = Column(
|
||||||
|
String(255),
|
||||||
|
nullable=False,
|
||||||
|
comment="Field display name"
|
||||||
|
)
|
||||||
|
|
||||||
|
label = Column(
|
||||||
|
String(500),
|
||||||
|
nullable=True,
|
||||||
|
comment="Field label for forms"
|
||||||
|
)
|
||||||
|
|
||||||
|
description = Column(
|
||||||
|
Text,
|
||||||
|
nullable=True,
|
||||||
|
comment="Field help text"
|
||||||
|
)
|
||||||
|
|
||||||
|
field_order = Column(
|
||||||
|
Integer,
|
||||||
|
default=0,
|
||||||
|
comment="Display order"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Field configuration
|
||||||
|
is_required = Column(
|
||||||
|
Boolean,
|
||||||
|
default=False,
|
||||||
|
comment="Whether field is required"
|
||||||
|
)
|
||||||
|
|
||||||
|
is_readonly = Column(
|
||||||
|
Boolean,
|
||||||
|
default=False,
|
||||||
|
comment="Whether field is read-only"
|
||||||
|
)
|
||||||
|
|
||||||
|
is_hidden = Column(
|
||||||
|
Boolean,
|
||||||
|
default=False,
|
||||||
|
comment="Whether field is hidden"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Options for select/radio/checkbox fields
|
||||||
|
options = Column(
|
||||||
|
JSON,
|
||||||
|
nullable=True,
|
||||||
|
default=list,
|
||||||
|
comment="List of options for selection fields"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Default value
|
||||||
|
default_value = Column(
|
||||||
|
Text,
|
||||||
|
nullable=True,
|
||||||
|
comment="Default field value"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Validation rules
|
||||||
|
validation_rules = Column(
|
||||||
|
JSON,
|
||||||
|
nullable=True,
|
||||||
|
default=dict,
|
||||||
|
comment="Validation rules (min, max, pattern, etc.)"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Display conditions
|
||||||
|
display_conditions = Column(
|
||||||
|
JSON,
|
||||||
|
nullable=True,
|
||||||
|
default=dict,
|
||||||
|
comment="Conditions for displaying field"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Placeholder
|
||||||
|
placeholder = Column(
|
||||||
|
String(500),
|
||||||
|
nullable=True,
|
||||||
|
comment="Input placeholder text"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Section grouping
|
||||||
|
section = Column(
|
||||||
|
String(100),
|
||||||
|
nullable=True,
|
||||||
|
comment="Section identifier for grouping"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Relationships
|
||||||
|
application_type = relationship(
|
||||||
|
"ApplicationType",
|
||||||
|
back_populates="fields"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Unique constraint
|
||||||
|
__table_args__ = (
|
||||||
|
UniqueConstraint('application_type_id', 'field_id', name='uq_type_field'),
|
||||||
|
Index('idx_field_type_order', 'application_type_id', 'field_order'),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ApplicationTypeStatus(ExtendedBaseModel):
|
||||||
|
"""Status definition for application types"""
|
||||||
|
|
||||||
|
__tablename__ = "application_type_statuses"
|
||||||
|
|
||||||
|
application_type_id = Column(
|
||||||
|
Integer,
|
||||||
|
ForeignKey('application_types.id', ondelete='CASCADE'),
|
||||||
|
nullable=False,
|
||||||
|
index=True
|
||||||
|
)
|
||||||
|
|
||||||
|
status_id = Column(
|
||||||
|
String(50),
|
||||||
|
nullable=False,
|
||||||
|
comment="Status identifier"
|
||||||
|
)
|
||||||
|
|
||||||
|
name = Column(
|
||||||
|
String(100),
|
||||||
|
nullable=False,
|
||||||
|
comment="Status display name"
|
||||||
|
)
|
||||||
|
|
||||||
|
description = Column(
|
||||||
|
Text,
|
||||||
|
nullable=True,
|
||||||
|
comment="Status description"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
is_editable = Column(
|
||||||
|
Boolean,
|
||||||
|
default=True,
|
||||||
|
comment="Whether application is editable in this status"
|
||||||
|
)
|
||||||
|
|
||||||
|
color = Column(
|
||||||
|
String(7),
|
||||||
|
nullable=True,
|
||||||
|
comment="RGB color code (e.g., #FF5733)"
|
||||||
|
)
|
||||||
|
|
||||||
|
icon = Column(
|
||||||
|
String(50),
|
||||||
|
nullable=True,
|
||||||
|
comment="Icon identifier"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Order for display
|
||||||
|
display_order = Column(
|
||||||
|
Integer,
|
||||||
|
default=0,
|
||||||
|
comment="Display order"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Status flags
|
||||||
|
is_initial = Column(
|
||||||
|
Boolean,
|
||||||
|
default=False,
|
||||||
|
comment="Whether this is the initial status"
|
||||||
|
)
|
||||||
|
|
||||||
|
is_final = Column(
|
||||||
|
Boolean,
|
||||||
|
default=False,
|
||||||
|
comment="Whether this is a final status"
|
||||||
|
)
|
||||||
|
|
||||||
|
is_cancelled = Column(
|
||||||
|
Boolean,
|
||||||
|
default=False,
|
||||||
|
comment="Whether this represents a cancelled state"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Notification configuration
|
||||||
|
send_notification = Column(
|
||||||
|
Boolean,
|
||||||
|
default=False,
|
||||||
|
comment="Send notification when entering this status"
|
||||||
|
)
|
||||||
|
|
||||||
|
notification_template = Column(
|
||||||
|
Text,
|
||||||
|
nullable=True,
|
||||||
|
comment="Notification template"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Relationships
|
||||||
|
application_type = relationship(
|
||||||
|
"ApplicationType",
|
||||||
|
back_populates="statuses"
|
||||||
|
)
|
||||||
|
|
||||||
|
transitions_from = relationship(
|
||||||
|
"StatusTransition",
|
||||||
|
foreign_keys="StatusTransition.from_status_id",
|
||||||
|
back_populates="from_status",
|
||||||
|
cascade="all, delete-orphan"
|
||||||
|
)
|
||||||
|
|
||||||
|
transitions_to = relationship(
|
||||||
|
"StatusTransition",
|
||||||
|
foreign_keys="StatusTransition.to_status_id",
|
||||||
|
back_populates="to_status"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Unique constraint
|
||||||
|
__table_args__ = (
|
||||||
|
UniqueConstraint('application_type_id', 'status_id', name='uq_type_status'),
|
||||||
|
Index('idx_status_type_order', 'application_type_id', 'display_order'),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class StatusTransition(ExtendedBaseModel):
|
||||||
|
"""Status transition rules"""
|
||||||
|
|
||||||
|
__tablename__ = "status_transitions"
|
||||||
|
|
||||||
|
from_status_id = Column(
|
||||||
|
Integer,
|
||||||
|
ForeignKey('application_type_statuses.id', ondelete='CASCADE'),
|
||||||
|
nullable=False,
|
||||||
|
index=True
|
||||||
|
)
|
||||||
|
|
||||||
|
to_status_id = Column(
|
||||||
|
Integer,
|
||||||
|
ForeignKey('application_type_statuses.id', ondelete='CASCADE'),
|
||||||
|
nullable=False,
|
||||||
|
index=True
|
||||||
|
)
|
||||||
|
|
||||||
|
name = Column(
|
||||||
|
String(100),
|
||||||
|
nullable=False,
|
||||||
|
comment="Transition name"
|
||||||
|
)
|
||||||
|
|
||||||
|
trigger_type = Column(
|
||||||
|
SQLEnum(TransitionTriggerType),
|
||||||
|
nullable=False,
|
||||||
|
comment="Type of trigger"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Trigger configuration
|
||||||
|
trigger_config = Column(
|
||||||
|
JSON,
|
||||||
|
nullable=False,
|
||||||
|
default=dict,
|
||||||
|
comment="Trigger-specific configuration"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Conditions
|
||||||
|
conditions = Column(
|
||||||
|
JSON,
|
||||||
|
nullable=True,
|
||||||
|
default=dict,
|
||||||
|
comment="Additional conditions for transition"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Actions
|
||||||
|
actions = Column(
|
||||||
|
JSON,
|
||||||
|
nullable=True,
|
||||||
|
default=list,
|
||||||
|
comment="Actions to execute on transition"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Priority for multiple possible transitions
|
||||||
|
priority = Column(
|
||||||
|
Integer,
|
||||||
|
default=0,
|
||||||
|
comment="Priority (higher = executed first)"
|
||||||
|
)
|
||||||
|
|
||||||
|
is_active = Column(
|
||||||
|
Boolean,
|
||||||
|
default=True,
|
||||||
|
comment="Whether transition is active"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Relationships
|
||||||
|
from_status = relationship(
|
||||||
|
"ApplicationTypeStatus",
|
||||||
|
foreign_keys=[from_status_id],
|
||||||
|
back_populates="transitions_from"
|
||||||
|
)
|
||||||
|
|
||||||
|
to_status = relationship(
|
||||||
|
"ApplicationTypeStatus",
|
||||||
|
foreign_keys=[to_status_id],
|
||||||
|
back_populates="transitions_to"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Unique constraint
|
||||||
|
__table_args__ = (
|
||||||
|
UniqueConstraint('from_status_id', 'to_status_id', 'name', name='uq_transition'),
|
||||||
|
Index('idx_transition_from_to', 'from_status_id', 'to_status_id'),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class DynamicApplication(ExtendedBaseModel):
|
||||||
|
"""Dynamic application instance"""
|
||||||
|
|
||||||
|
__tablename__ = "dynamic_applications"
|
||||||
|
|
||||||
|
# Identification
|
||||||
|
application_id = Column(
|
||||||
|
String(64),
|
||||||
|
unique=True,
|
||||||
|
nullable=False,
|
||||||
|
index=True,
|
||||||
|
comment="Public application ID"
|
||||||
|
)
|
||||||
|
|
||||||
|
application_key = Column(
|
||||||
|
String(255),
|
||||||
|
nullable=False,
|
||||||
|
comment="Application access key (hashed)"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Type reference
|
||||||
|
application_type_id = Column(
|
||||||
|
Integer,
|
||||||
|
ForeignKey('application_types.id'),
|
||||||
|
nullable=False,
|
||||||
|
index=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# User reference
|
||||||
|
user_id = Column(
|
||||||
|
Integer,
|
||||||
|
ForeignKey('users.id'),
|
||||||
|
nullable=True,
|
||||||
|
index=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# Common fields (always present)
|
||||||
|
email = Column(
|
||||||
|
String(255),
|
||||||
|
nullable=False,
|
||||||
|
index=True,
|
||||||
|
comment="Applicant email"
|
||||||
|
)
|
||||||
|
|
||||||
|
status_id = Column(
|
||||||
|
String(50),
|
||||||
|
nullable=False,
|
||||||
|
index=True,
|
||||||
|
comment="Current status ID"
|
||||||
|
)
|
||||||
|
|
||||||
|
title = Column(
|
||||||
|
String(500),
|
||||||
|
nullable=False,
|
||||||
|
comment="Application title"
|
||||||
|
)
|
||||||
|
|
||||||
|
first_name = Column(
|
||||||
|
String(100),
|
||||||
|
nullable=True,
|
||||||
|
index=True
|
||||||
|
)
|
||||||
|
|
||||||
|
last_name = Column(
|
||||||
|
String(100),
|
||||||
|
nullable=True,
|
||||||
|
index=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# Timestamps
|
||||||
|
status_changed_at = Column(
|
||||||
|
DateTime,
|
||||||
|
nullable=True,
|
||||||
|
comment="When status was last changed"
|
||||||
|
)
|
||||||
|
|
||||||
|
submitted_at = Column(
|
||||||
|
DateTime,
|
||||||
|
nullable=True,
|
||||||
|
index=True
|
||||||
|
)
|
||||||
|
|
||||||
|
completed_at = Column(
|
||||||
|
DateTime,
|
||||||
|
nullable=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# Dynamic field data
|
||||||
|
field_data = Column(
|
||||||
|
JSON,
|
||||||
|
nullable=False,
|
||||||
|
default=dict,
|
||||||
|
comment="Dynamic field values"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Cost positions (extended)
|
||||||
|
cost_positions = Column(
|
||||||
|
JSON,
|
||||||
|
nullable=True,
|
||||||
|
default=list,
|
||||||
|
comment="List of cost positions (up to 100)"
|
||||||
|
)
|
||||||
|
|
||||||
|
comparison_offers = Column(
|
||||||
|
JSON,
|
||||||
|
nullable=True,
|
||||||
|
default=list,
|
||||||
|
comment="List of comparison offers (up to 100)"
|
||||||
|
)
|
||||||
|
|
||||||
|
total_amount = Column(
|
||||||
|
Float,
|
||||||
|
default=0.0,
|
||||||
|
index=True,
|
||||||
|
comment="Calculated total amount"
|
||||||
|
)
|
||||||
|
|
||||||
|
# PDF generation
|
||||||
|
pdf_generated = Column(
|
||||||
|
Boolean,
|
||||||
|
default=False
|
||||||
|
)
|
||||||
|
|
||||||
|
pdf_generated_at = Column(
|
||||||
|
DateTime,
|
||||||
|
nullable=True
|
||||||
|
)
|
||||||
|
|
||||||
|
pdf_file_path = Column(
|
||||||
|
String(500),
|
||||||
|
nullable=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# Metadata
|
||||||
|
application_metadata = Column(
|
||||||
|
JSON,
|
||||||
|
nullable=True,
|
||||||
|
default=dict,
|
||||||
|
comment="Additional metadata"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Search optimization
|
||||||
|
search_text = Column(
|
||||||
|
Text,
|
||||||
|
nullable=True,
|
||||||
|
comment="Concatenated searchable text"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Relationships
|
||||||
|
application_type = relationship(
|
||||||
|
"ApplicationType",
|
||||||
|
back_populates="applications"
|
||||||
|
)
|
||||||
|
|
||||||
|
user = relationship(
|
||||||
|
"User",
|
||||||
|
back_populates="dynamic_applications"
|
||||||
|
)
|
||||||
|
|
||||||
|
history = relationship(
|
||||||
|
"ApplicationHistory",
|
||||||
|
back_populates="application",
|
||||||
|
cascade="all, delete-orphan"
|
||||||
|
)
|
||||||
|
|
||||||
|
attachments = relationship(
|
||||||
|
"ApplicationAttachment",
|
||||||
|
back_populates="application",
|
||||||
|
cascade="all, delete-orphan"
|
||||||
|
)
|
||||||
|
|
||||||
|
transitions = relationship(
|
||||||
|
"ApplicationTransitionLog",
|
||||||
|
back_populates="application",
|
||||||
|
cascade="all, delete-orphan"
|
||||||
|
)
|
||||||
|
|
||||||
|
approvals = relationship(
|
||||||
|
"ApplicationApproval",
|
||||||
|
back_populates="application",
|
||||||
|
cascade="all, delete-orphan"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Indexes
|
||||||
|
__table_args__ = (
|
||||||
|
Index('idx_dynapp_type_status', 'application_type_id', 'status_id'),
|
||||||
|
Index('idx_dynapp_email_type', 'email', 'application_type_id'),
|
||||||
|
Index('idx_dynapp_submitted', 'submitted_at', 'status_id'),
|
||||||
|
)
|
||||||
|
|
||||||
|
def update_search_text(self):
|
||||||
|
"""Update searchable text from field data"""
|
||||||
|
parts = [
|
||||||
|
self.title or '',
|
||||||
|
self.email or '',
|
||||||
|
self.first_name or '',
|
||||||
|
self.last_name or '',
|
||||||
|
]
|
||||||
|
|
||||||
|
# Add field data
|
||||||
|
if self.field_data:
|
||||||
|
for key, value in self.field_data.items():
|
||||||
|
if isinstance(value, str):
|
||||||
|
parts.append(value)
|
||||||
|
elif isinstance(value, (list, dict)):
|
||||||
|
parts.append(str(value))
|
||||||
|
|
||||||
|
self.search_text = ' '.join(filter(None, parts))
|
||||||
|
|
||||||
|
def calculate_total_amount(self):
|
||||||
|
"""Calculate total from cost positions"""
|
||||||
|
total = 0.0
|
||||||
|
if self.cost_positions:
|
||||||
|
for pos in self.cost_positions:
|
||||||
|
if isinstance(pos, dict) and 'amount' in pos:
|
||||||
|
try:
|
||||||
|
total += float(pos['amount'])
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
pass
|
||||||
|
self.total_amount = total
|
||||||
|
|
||||||
|
|
||||||
|
class ApplicationHistory(ExtendedBaseModel):
|
||||||
|
"""Application history tracking"""
|
||||||
|
|
||||||
|
__tablename__ = "application_history_v2"
|
||||||
|
|
||||||
|
application_id = Column(
|
||||||
|
Integer,
|
||||||
|
ForeignKey('dynamic_applications.id', ondelete='CASCADE'),
|
||||||
|
nullable=False,
|
||||||
|
index=True
|
||||||
|
)
|
||||||
|
|
||||||
|
user_id = Column(
|
||||||
|
Integer,
|
||||||
|
ForeignKey('users.id'),
|
||||||
|
nullable=True
|
||||||
|
)
|
||||||
|
|
||||||
|
action = Column(
|
||||||
|
String(100),
|
||||||
|
nullable=False,
|
||||||
|
comment="Action performed"
|
||||||
|
)
|
||||||
|
|
||||||
|
field_changes = Column(
|
||||||
|
JSON,
|
||||||
|
nullable=True,
|
||||||
|
default=dict,
|
||||||
|
comment="Changed fields with old/new values"
|
||||||
|
)
|
||||||
|
|
||||||
|
comment = Column(
|
||||||
|
Text,
|
||||||
|
nullable=True
|
||||||
|
)
|
||||||
|
|
||||||
|
ip_address = Column(
|
||||||
|
String(45),
|
||||||
|
nullable=True
|
||||||
|
)
|
||||||
|
|
||||||
|
user_agent = Column(
|
||||||
|
String(500),
|
||||||
|
nullable=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# Relationships
|
||||||
|
application = relationship(
|
||||||
|
"DynamicApplication",
|
||||||
|
back_populates="history"
|
||||||
|
)
|
||||||
|
|
||||||
|
user = relationship("User")
|
||||||
|
|
||||||
|
|
||||||
|
class ApplicationAttachment(ExtendedBaseModel):
|
||||||
|
"""Application attachments"""
|
||||||
|
|
||||||
|
__tablename__ = "application_attachments_v2"
|
||||||
|
|
||||||
|
application_id = Column(
|
||||||
|
Integer,
|
||||||
|
ForeignKey('dynamic_applications.id', ondelete='CASCADE'),
|
||||||
|
nullable=False,
|
||||||
|
index=True
|
||||||
|
)
|
||||||
|
|
||||||
|
field_id = Column(
|
||||||
|
String(100),
|
||||||
|
nullable=True,
|
||||||
|
comment="Associated field ID"
|
||||||
|
)
|
||||||
|
|
||||||
|
file_name = Column(
|
||||||
|
String(255),
|
||||||
|
nullable=False
|
||||||
|
)
|
||||||
|
|
||||||
|
file_path = Column(
|
||||||
|
String(500),
|
||||||
|
nullable=False
|
||||||
|
)
|
||||||
|
|
||||||
|
file_size = Column(
|
||||||
|
Integer,
|
||||||
|
nullable=False
|
||||||
|
)
|
||||||
|
|
||||||
|
file_type = Column(
|
||||||
|
String(100),
|
||||||
|
nullable=True
|
||||||
|
)
|
||||||
|
|
||||||
|
file_hash = Column(
|
||||||
|
String(64),
|
||||||
|
nullable=True
|
||||||
|
)
|
||||||
|
|
||||||
|
uploaded_by = Column(
|
||||||
|
Integer,
|
||||||
|
ForeignKey('users.id'),
|
||||||
|
nullable=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# Relationships
|
||||||
|
application = relationship(
|
||||||
|
"DynamicApplication",
|
||||||
|
back_populates="attachments"
|
||||||
|
)
|
||||||
|
|
||||||
|
uploader = relationship("User")
|
||||||
|
|
||||||
|
|
||||||
|
class ApplicationTransitionLog(ExtendedBaseModel):
|
||||||
|
"""Log of status transitions"""
|
||||||
|
|
||||||
|
__tablename__ = "application_transition_logs"
|
||||||
|
|
||||||
|
application_id = Column(
|
||||||
|
Integer,
|
||||||
|
ForeignKey('dynamic_applications.id', ondelete='CASCADE'),
|
||||||
|
nullable=False,
|
||||||
|
index=True
|
||||||
|
)
|
||||||
|
|
||||||
|
from_status = Column(
|
||||||
|
String(50),
|
||||||
|
nullable=True
|
||||||
|
)
|
||||||
|
|
||||||
|
to_status = Column(
|
||||||
|
String(50),
|
||||||
|
nullable=False
|
||||||
|
)
|
||||||
|
|
||||||
|
transition_name = Column(
|
||||||
|
String(100),
|
||||||
|
nullable=True
|
||||||
|
)
|
||||||
|
|
||||||
|
trigger_type = Column(
|
||||||
|
String(50),
|
||||||
|
nullable=True
|
||||||
|
)
|
||||||
|
|
||||||
|
triggered_by = Column(
|
||||||
|
Integer,
|
||||||
|
ForeignKey('users.id'),
|
||||||
|
nullable=True
|
||||||
|
)
|
||||||
|
|
||||||
|
trigger_data = Column(
|
||||||
|
JSON,
|
||||||
|
nullable=True,
|
||||||
|
default=dict
|
||||||
|
)
|
||||||
|
|
||||||
|
# Relationships
|
||||||
|
application = relationship(
|
||||||
|
"DynamicApplication",
|
||||||
|
back_populates="transitions"
|
||||||
|
)
|
||||||
|
|
||||||
|
user = relationship("User")
|
||||||
|
|
||||||
|
|
||||||
|
class ApplicationApproval(ExtendedBaseModel):
|
||||||
|
"""Approval tracking for applications"""
|
||||||
|
|
||||||
|
__tablename__ = "application_approvals"
|
||||||
|
|
||||||
|
application_id = Column(
|
||||||
|
Integer,
|
||||||
|
ForeignKey('dynamic_applications.id', ondelete='CASCADE'),
|
||||||
|
nullable=False,
|
||||||
|
index=True
|
||||||
|
)
|
||||||
|
|
||||||
|
user_id = Column(
|
||||||
|
Integer,
|
||||||
|
ForeignKey('users.id'),
|
||||||
|
nullable=False,
|
||||||
|
index=True
|
||||||
|
)
|
||||||
|
|
||||||
|
role = Column(
|
||||||
|
String(50),
|
||||||
|
nullable=False,
|
||||||
|
comment="Role of approver"
|
||||||
|
)
|
||||||
|
|
||||||
|
decision = Column(
|
||||||
|
String(20),
|
||||||
|
nullable=False,
|
||||||
|
comment="approve, reject, abstain"
|
||||||
|
)
|
||||||
|
|
||||||
|
comment = Column(
|
||||||
|
Text,
|
||||||
|
nullable=True
|
||||||
|
)
|
||||||
|
|
||||||
|
status_at_approval = Column(
|
||||||
|
String(50),
|
||||||
|
nullable=True,
|
||||||
|
comment="Status when approval was given"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Relationships
|
||||||
|
application = relationship(
|
||||||
|
"DynamicApplication",
|
||||||
|
back_populates="approvals"
|
||||||
|
)
|
||||||
|
|
||||||
|
user = relationship("User")
|
||||||
|
|
||||||
|
# Unique constraint
|
||||||
|
__table_args__ = (
|
||||||
|
UniqueConstraint('application_id', 'user_id', 'role', name='uq_app_user_role_approval'),
|
||||||
|
)
|
||||||
@ -1,458 +0,0 @@
|
|||||||
"""
|
|
||||||
Form Template Database Models
|
|
||||||
|
|
||||||
This module defines the database models for form templates and PDF field mappings.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from sqlalchemy import (
|
|
||||||
Column, Integer, String, Text, DateTime, JSON, Boolean,
|
|
||||||
ForeignKey, UniqueConstraint, Index, Enum as SQLEnum
|
|
||||||
)
|
|
||||||
from sqlalchemy.orm import relationship, backref
|
|
||||||
import enum
|
|
||||||
from typing import Optional, Dict, Any, List
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from .base import ExtendedBaseModel, BaseModel, TimestampMixin
|
|
||||||
|
|
||||||
|
|
||||||
class FormType(enum.Enum):
|
|
||||||
"""Form type enumeration"""
|
|
||||||
QSM = "QSM"
|
|
||||||
VSM = "VSM"
|
|
||||||
CUSTOM = "CUSTOM"
|
|
||||||
|
|
||||||
|
|
||||||
class FieldType(enum.Enum):
|
|
||||||
"""Field type enumeration"""
|
|
||||||
TEXT = "text"
|
|
||||||
NUMBER = "number"
|
|
||||||
DATE = "date"
|
|
||||||
EMAIL = "email"
|
|
||||||
PHONE = "phone"
|
|
||||||
CHECKBOX = "checkbox"
|
|
||||||
RADIO = "radio"
|
|
||||||
SELECT = "select"
|
|
||||||
TEXTAREA = "textarea"
|
|
||||||
FILE = "file"
|
|
||||||
SIGNATURE = "signature"
|
|
||||||
CURRENCY = "currency"
|
|
||||||
|
|
||||||
|
|
||||||
class FormTemplate(ExtendedBaseModel):
|
|
||||||
"""Form template model for configurable PDF forms"""
|
|
||||||
|
|
||||||
__tablename__ = "form_templates"
|
|
||||||
|
|
||||||
# Core fields
|
|
||||||
name = Column(
|
|
||||||
String(255),
|
|
||||||
unique=True,
|
|
||||||
nullable=False,
|
|
||||||
index=True,
|
|
||||||
comment="Template name"
|
|
||||||
)
|
|
||||||
|
|
||||||
display_name = Column(
|
|
||||||
String(255),
|
|
||||||
nullable=False,
|
|
||||||
comment="Display name for UI"
|
|
||||||
)
|
|
||||||
|
|
||||||
description = Column(
|
|
||||||
Text,
|
|
||||||
nullable=True,
|
|
||||||
comment="Template description"
|
|
||||||
)
|
|
||||||
|
|
||||||
form_type = Column(
|
|
||||||
SQLEnum(FormType),
|
|
||||||
nullable=False,
|
|
||||||
default=FormType.CUSTOM,
|
|
||||||
index=True,
|
|
||||||
comment="Form type"
|
|
||||||
)
|
|
||||||
|
|
||||||
# PDF template file
|
|
||||||
pdf_file_path = Column(
|
|
||||||
String(500),
|
|
||||||
nullable=True,
|
|
||||||
comment="Path to uploaded PDF template file"
|
|
||||||
)
|
|
||||||
|
|
||||||
pdf_file_name = Column(
|
|
||||||
String(255),
|
|
||||||
nullable=True,
|
|
||||||
comment="Original PDF file name"
|
|
||||||
)
|
|
||||||
|
|
||||||
pdf_file_size = Column(
|
|
||||||
Integer,
|
|
||||||
nullable=True,
|
|
||||||
comment="PDF file size in bytes"
|
|
||||||
)
|
|
||||||
|
|
||||||
pdf_file_hash = Column(
|
|
||||||
String(64),
|
|
||||||
nullable=True,
|
|
||||||
comment="PDF file SHA256 hash"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Form configuration
|
|
||||||
is_active = Column(
|
|
||||||
Boolean,
|
|
||||||
default=True,
|
|
||||||
index=True,
|
|
||||||
comment="Whether template is active"
|
|
||||||
)
|
|
||||||
|
|
||||||
is_public = Column(
|
|
||||||
Boolean,
|
|
||||||
default=True,
|
|
||||||
comment="Whether template is publicly accessible"
|
|
||||||
)
|
|
||||||
|
|
||||||
requires_verification = Column(
|
|
||||||
Boolean,
|
|
||||||
default=True,
|
|
||||||
comment="Whether user verification is required"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Access control
|
|
||||||
allowed_roles = Column(
|
|
||||||
JSON,
|
|
||||||
nullable=True,
|
|
||||||
default=list,
|
|
||||||
comment="List of role names allowed to use this template"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Form designer configuration
|
|
||||||
form_design = Column(
|
|
||||||
JSON,
|
|
||||||
nullable=True,
|
|
||||||
default=dict,
|
|
||||||
comment="Visual form designer configuration"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Workflow configuration
|
|
||||||
workflow_config = Column(
|
|
||||||
JSON,
|
|
||||||
nullable=True,
|
|
||||||
default=dict,
|
|
||||||
comment="Workflow configuration for approval process"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Statistics
|
|
||||||
usage_count = Column(
|
|
||||||
Integer,
|
|
||||||
default=0,
|
|
||||||
comment="Number of times template has been used"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Version management
|
|
||||||
version = Column(
|
|
||||||
String(20),
|
|
||||||
default="1.0.0",
|
|
||||||
comment="Template version"
|
|
||||||
)
|
|
||||||
|
|
||||||
parent_template_id = Column(
|
|
||||||
Integer,
|
|
||||||
ForeignKey('form_templates.id'),
|
|
||||||
nullable=True,
|
|
||||||
comment="Parent template for versioning"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Relationships
|
|
||||||
field_mappings = relationship(
|
|
||||||
"FieldMapping",
|
|
||||||
back_populates="template",
|
|
||||||
cascade="all, delete-orphan",
|
|
||||||
order_by="FieldMapping.field_order"
|
|
||||||
)
|
|
||||||
|
|
||||||
applications = relationship(
|
|
||||||
"Application",
|
|
||||||
back_populates="template"
|
|
||||||
)
|
|
||||||
|
|
||||||
parent_template = relationship(
|
|
||||||
"FormTemplate",
|
|
||||||
remote_side="FormTemplate.id",
|
|
||||||
backref=backref("versions", lazy="dynamic")
|
|
||||||
)
|
|
||||||
|
|
||||||
# Indexes
|
|
||||||
__table_args__ = (
|
|
||||||
Index('idx_template_active_public', 'is_active', 'is_public'),
|
|
||||||
Index('idx_template_type_active', 'form_type', 'is_active'),
|
|
||||||
)
|
|
||||||
|
|
||||||
def to_dict(self, include_mappings: bool = False) -> Dict[str, Any]:
|
|
||||||
"""Convert to dictionary representation"""
|
|
||||||
data = {
|
|
||||||
"id": self.id,
|
|
||||||
"name": self.name,
|
|
||||||
"display_name": self.display_name,
|
|
||||||
"description": self.description,
|
|
||||||
"form_type": self.form_type.value if self.form_type else None,
|
|
||||||
"is_active": self.is_active,
|
|
||||||
"is_public": self.is_public,
|
|
||||||
"requires_verification": self.requires_verification,
|
|
||||||
"allowed_roles": self.allowed_roles or [],
|
|
||||||
"version": self.version,
|
|
||||||
"usage_count": self.usage_count,
|
|
||||||
"pdf_file_name": self.pdf_file_name,
|
|
||||||
"pdf_file_size": self.pdf_file_size,
|
|
||||||
"created_at": self.created_at.isoformat() if self.created_at else None,
|
|
||||||
"updated_at": self.updated_at.isoformat() if self.updated_at else None,
|
|
||||||
}
|
|
||||||
|
|
||||||
if include_mappings:
|
|
||||||
data["field_mappings"] = [
|
|
||||||
mapping.to_dict() for mapping in self.field_mappings
|
|
||||||
]
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
class FieldMapping(ExtendedBaseModel):
|
|
||||||
"""Field mapping model for PDF form fields"""
|
|
||||||
|
|
||||||
__tablename__ = "field_mappings"
|
|
||||||
|
|
||||||
template_id = Column(
|
|
||||||
Integer,
|
|
||||||
ForeignKey('form_templates.id', ondelete='CASCADE'),
|
|
||||||
nullable=False,
|
|
||||||
index=True
|
|
||||||
)
|
|
||||||
|
|
||||||
# PDF field information
|
|
||||||
pdf_field_name = Column(
|
|
||||||
String(255),
|
|
||||||
nullable=False,
|
|
||||||
comment="Field name in PDF"
|
|
||||||
)
|
|
||||||
|
|
||||||
pdf_field_type = Column(
|
|
||||||
String(50),
|
|
||||||
nullable=True,
|
|
||||||
comment="Original PDF field type"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Mapping configuration
|
|
||||||
field_key = Column(
|
|
||||||
String(255),
|
|
||||||
nullable=False,
|
|
||||||
comment="Internal field key for data storage"
|
|
||||||
)
|
|
||||||
|
|
||||||
field_label = Column(
|
|
||||||
String(255),
|
|
||||||
nullable=False,
|
|
||||||
comment="Display label for field"
|
|
||||||
)
|
|
||||||
|
|
||||||
field_type = Column(
|
|
||||||
SQLEnum(FieldType),
|
|
||||||
nullable=False,
|
|
||||||
default=FieldType.TEXT,
|
|
||||||
comment="Mapped field type"
|
|
||||||
)
|
|
||||||
|
|
||||||
field_order = Column(
|
|
||||||
Integer,
|
|
||||||
default=0,
|
|
||||||
comment="Field display order"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Field configuration
|
|
||||||
is_required = Column(
|
|
||||||
Boolean,
|
|
||||||
default=False,
|
|
||||||
comment="Whether field is required"
|
|
||||||
)
|
|
||||||
|
|
||||||
is_readonly = Column(
|
|
||||||
Boolean,
|
|
||||||
default=False,
|
|
||||||
comment="Whether field is read-only"
|
|
||||||
)
|
|
||||||
|
|
||||||
is_hidden = Column(
|
|
||||||
Boolean,
|
|
||||||
default=False,
|
|
||||||
comment="Whether field is hidden"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Special field flags
|
|
||||||
is_email_field = Column(
|
|
||||||
Boolean,
|
|
||||||
default=False,
|
|
||||||
comment="Whether this is the email field (auto-filled from user)"
|
|
||||||
)
|
|
||||||
|
|
||||||
is_name_field = Column(
|
|
||||||
Boolean,
|
|
||||||
default=False,
|
|
||||||
comment="Whether this is a name field (auto-filled from OIDC)"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Validation rules
|
|
||||||
validation_rules = Column(
|
|
||||||
JSON,
|
|
||||||
nullable=True,
|
|
||||||
default=dict,
|
|
||||||
comment="Field validation rules"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Options for select/radio fields
|
|
||||||
field_options = Column(
|
|
||||||
JSON,
|
|
||||||
nullable=True,
|
|
||||||
default=list,
|
|
||||||
comment="Options for select/radio/checkbox fields"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Default value
|
|
||||||
default_value = Column(
|
|
||||||
Text,
|
|
||||||
nullable=True,
|
|
||||||
comment="Default field value"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Placeholder and help text
|
|
||||||
placeholder = Column(
|
|
||||||
String(500),
|
|
||||||
nullable=True,
|
|
||||||
comment="Field placeholder text"
|
|
||||||
)
|
|
||||||
|
|
||||||
help_text = Column(
|
|
||||||
Text,
|
|
||||||
nullable=True,
|
|
||||||
comment="Field help text"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Conditional display
|
|
||||||
display_conditions = Column(
|
|
||||||
JSON,
|
|
||||||
nullable=True,
|
|
||||||
default=dict,
|
|
||||||
comment="Conditions for displaying field"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Data transformation
|
|
||||||
transform_rules = Column(
|
|
||||||
JSON,
|
|
||||||
nullable=True,
|
|
||||||
default=dict,
|
|
||||||
comment="Rules for transforming field data"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Relationships
|
|
||||||
template = relationship(
|
|
||||||
"FormTemplate",
|
|
||||||
back_populates="field_mappings"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Indexes
|
|
||||||
__table_args__ = (
|
|
||||||
UniqueConstraint('template_id', 'pdf_field_name', name='uq_template_pdf_field'),
|
|
||||||
UniqueConstraint('template_id', 'field_key', name='uq_template_field_key'),
|
|
||||||
Index('idx_field_template_order', 'template_id', 'field_order'),
|
|
||||||
)
|
|
||||||
|
|
||||||
def to_dict(self) -> Dict[str, Any]:
|
|
||||||
"""Convert to dictionary representation"""
|
|
||||||
return {
|
|
||||||
"id": self.id,
|
|
||||||
"pdf_field_name": self.pdf_field_name,
|
|
||||||
"pdf_field_type": self.pdf_field_type,
|
|
||||||
"field_key": self.field_key,
|
|
||||||
"field_label": self.field_label,
|
|
||||||
"field_type": self.field_type.value if self.field_type else None,
|
|
||||||
"field_order": self.field_order,
|
|
||||||
"is_required": self.is_required,
|
|
||||||
"is_readonly": self.is_readonly,
|
|
||||||
"is_hidden": self.is_hidden,
|
|
||||||
"is_email_field": self.is_email_field,
|
|
||||||
"is_name_field": self.is_name_field,
|
|
||||||
"validation_rules": self.validation_rules or {},
|
|
||||||
"field_options": self.field_options or [],
|
|
||||||
"default_value": self.default_value,
|
|
||||||
"placeholder": self.placeholder,
|
|
||||||
"help_text": self.help_text,
|
|
||||||
"display_conditions": self.display_conditions or {},
|
|
||||||
"transform_rules": self.transform_rules or {},
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class FormDesign(ExtendedBaseModel):
|
|
||||||
"""Visual form designer configuration"""
|
|
||||||
|
|
||||||
__tablename__ = "form_designs"
|
|
||||||
|
|
||||||
template_id = Column(
|
|
||||||
Integer,
|
|
||||||
ForeignKey('form_templates.id', ondelete='CASCADE'),
|
|
||||||
unique=True,
|
|
||||||
nullable=False,
|
|
||||||
index=True
|
|
||||||
)
|
|
||||||
|
|
||||||
# Designer layout
|
|
||||||
layout_type = Column(
|
|
||||||
String(50),
|
|
||||||
default="single-column",
|
|
||||||
comment="Layout type (single-column, two-column, custom)"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Sections configuration
|
|
||||||
sections = Column(
|
|
||||||
JSON,
|
|
||||||
nullable=False,
|
|
||||||
default=list,
|
|
||||||
comment="Form sections configuration"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Styling
|
|
||||||
theme = Column(
|
|
||||||
JSON,
|
|
||||||
nullable=True,
|
|
||||||
default=dict,
|
|
||||||
comment="Theme configuration"
|
|
||||||
)
|
|
||||||
|
|
||||||
custom_css = Column(
|
|
||||||
Text,
|
|
||||||
nullable=True,
|
|
||||||
comment="Custom CSS styles"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Components configuration
|
|
||||||
components = Column(
|
|
||||||
JSON,
|
|
||||||
nullable=True,
|
|
||||||
default=list,
|
|
||||||
comment="Custom components configuration"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Relationships
|
|
||||||
template = relationship(
|
|
||||||
"FormTemplate",
|
|
||||||
backref=backref("design", uselist=False, cascade="all, delete-orphan")
|
|
||||||
)
|
|
||||||
|
|
||||||
def to_dict(self) -> Dict[str, Any]:
|
|
||||||
"""Convert to dictionary representation"""
|
|
||||||
return {
|
|
||||||
"id": self.id,
|
|
||||||
"template_id": self.template_id,
|
|
||||||
"layout_type": self.layout_type,
|
|
||||||
"sections": self.sections or [],
|
|
||||||
"theme": self.theme or {},
|
|
||||||
"custom_css": self.custom_css,
|
|
||||||
"components": self.components or [],
|
|
||||||
}
|
|
||||||
@ -11,7 +11,7 @@ from sqlalchemy import (
|
|||||||
from sqlalchemy.orm import relationship, backref
|
from sqlalchemy.orm import relationship, backref
|
||||||
import enum
|
import enum
|
||||||
from typing import Optional, Dict, Any, List
|
from typing import Optional, Dict, Any, List
|
||||||
from datetime import datetime
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
from .base import ExtendedBaseModel, BaseModel, TimestampMixin
|
from .base import ExtendedBaseModel, BaseModel, TimestampMixin
|
||||||
|
|
||||||
@ -123,8 +123,8 @@ class User(ExtendedBaseModel):
|
|||||||
lazy="joined"
|
lazy="joined"
|
||||||
)
|
)
|
||||||
|
|
||||||
applications = relationship(
|
dynamic_applications = relationship(
|
||||||
"Application",
|
"DynamicApplication",
|
||||||
back_populates="user",
|
back_populates="user",
|
||||||
cascade="all, delete-orphan"
|
cascade="all, delete-orphan"
|
||||||
)
|
)
|
||||||
|
|||||||
@ -1,400 +0,0 @@
|
|||||||
"""
|
|
||||||
QSM PDF Variant Provider
|
|
||||||
|
|
||||||
This module provides the PDF variant provider for QSM (Qualitätssicherungsmittel) forms.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from typing import Dict, Any, List, Optional
|
|
||||||
from pathlib import Path
|
|
||||||
import re
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from ..services.pdf import PDFVariantProvider
|
|
||||||
from ..config.settings import Settings, get_settings
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class QSMProvider(PDFVariantProvider):
|
|
||||||
"""Provider for QSM PDF variant"""
|
|
||||||
|
|
||||||
def __init__(self, settings: Optional[Settings] = None):
|
|
||||||
"""Initialize QSM provider"""
|
|
||||||
self.settings = settings or get_settings()
|
|
||||||
self._field_mapping = self._initialize_field_mapping()
|
|
||||||
|
|
||||||
def get_variant_name(self) -> str:
|
|
||||||
"""Get the name of this variant"""
|
|
||||||
return "QSM"
|
|
||||||
|
|
||||||
def get_template_path(self) -> Path:
|
|
||||||
"""Get the path to the PDF template for this variant"""
|
|
||||||
return self.settings.pdf.qsm_template
|
|
||||||
|
|
||||||
def get_variant_indicators(self) -> List[str]:
|
|
||||||
"""Get list of field names that indicate this variant"""
|
|
||||||
return [
|
|
||||||
"pa-qsm-financing",
|
|
||||||
"pa-qsm-vwv-3-2-1-1",
|
|
||||||
"pa-qsm-vwv-3-2-1-2"
|
|
||||||
]
|
|
||||||
|
|
||||||
def _initialize_field_mapping(self) -> Dict[str, Any]:
|
|
||||||
"""Initialize field mapping configuration"""
|
|
||||||
return {
|
|
||||||
# Meta fields
|
|
||||||
"pa-id": {
|
|
||||||
"target": "pa.meta.id",
|
|
||||||
"type": "str",
|
|
||||||
"required": True
|
|
||||||
},
|
|
||||||
"pa-key": {
|
|
||||||
"target": "pa.meta.key",
|
|
||||||
"type": "str",
|
|
||||||
"required": True
|
|
||||||
},
|
|
||||||
|
|
||||||
# Applicant fields
|
|
||||||
"pa-institution-type": {
|
|
||||||
"target": "pa.applicant.institution.type",
|
|
||||||
"type": "enum",
|
|
||||||
"values": {
|
|
||||||
"stud-fs": "Fachschaft",
|
|
||||||
"stud-rf": "STUPA-Referat",
|
|
||||||
"stud-hg": "Studentische Hochschulgruppe",
|
|
||||||
"faculty": "Fakultät",
|
|
||||||
"hs-institution": "Hochschuleinrichtung"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"pa-institution": {
|
|
||||||
"target": "pa.applicant.institution.name",
|
|
||||||
"type": "str",
|
|
||||||
"required": True
|
|
||||||
},
|
|
||||||
"pa-first-name": {
|
|
||||||
"target": "pa.applicant.name.first",
|
|
||||||
"type": "str",
|
|
||||||
"required": True
|
|
||||||
},
|
|
||||||
"pa-last-name": {
|
|
||||||
"target": "pa.applicant.name.last",
|
|
||||||
"type": "str",
|
|
||||||
"required": True
|
|
||||||
},
|
|
||||||
"pa-email": {
|
|
||||||
"target": "pa.applicant.contact.email",
|
|
||||||
"type": "str",
|
|
||||||
"required": True
|
|
||||||
},
|
|
||||||
"pa-phone": {
|
|
||||||
"target": "pa.applicant.contact.phone",
|
|
||||||
"type": "str"
|
|
||||||
},
|
|
||||||
"pa-course": {
|
|
||||||
"target": "pa.applicant.course",
|
|
||||||
"type": "enum",
|
|
||||||
"values": ["INF", "ESB", "LS", "TEC", "TEX", "NXT"]
|
|
||||||
},
|
|
||||||
"pa-role": {
|
|
||||||
"target": "pa.applicant.role",
|
|
||||||
"type": "enum",
|
|
||||||
"values": [
|
|
||||||
"Student", "Professor", "Mitarbeiter",
|
|
||||||
"ASTA", "Referatsleitung", "Fachschaftsvorstand"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
|
|
||||||
# Project fields
|
|
||||||
"pa-project-name": {
|
|
||||||
"target": "pa.project.name",
|
|
||||||
"type": "str",
|
|
||||||
"required": True
|
|
||||||
},
|
|
||||||
"pa-project-description": {
|
|
||||||
"target": "pa.project.description",
|
|
||||||
"type": "str",
|
|
||||||
"required": True
|
|
||||||
},
|
|
||||||
"pa-start-date": {
|
|
||||||
"target": "pa.project.dates.start",
|
|
||||||
"type": "str",
|
|
||||||
"required": True
|
|
||||||
},
|
|
||||||
"pa-end-date": {
|
|
||||||
"target": "pa.project.dates.end",
|
|
||||||
"type": "str"
|
|
||||||
},
|
|
||||||
"pa-participants": {
|
|
||||||
"target": "pa.project.participants",
|
|
||||||
"type": "int"
|
|
||||||
},
|
|
||||||
|
|
||||||
# QSM-specific fields
|
|
||||||
"pa-qsm-financing": {
|
|
||||||
"target": "pa.project.financing.qsm.code",
|
|
||||||
"type": "enum",
|
|
||||||
"values": {
|
|
||||||
"vwv-3-2-1-1": "Finanzierung zusätzlicher Lehr- und Seminarangebote",
|
|
||||||
"vwv-3-2-1-2": "Fachspezifische Studienprojekte",
|
|
||||||
"vwv-3-2-1-3": "Hochschuldidaktische Fort- und Weiterbildungsmaßnahmen",
|
|
||||||
"vwv-3-2-1-4": "Studentische Tutorien und Arbeitsgruppen",
|
|
||||||
"vwv-3-2-1-5": "Exkursionen",
|
|
||||||
"vwv-3-2-1-6": "Sonstige Maßnahmen"
|
|
||||||
},
|
|
||||||
"required": True
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
def get_field_mapping(self) -> Dict[str, Any]:
|
|
||||||
"""Get the field mapping configuration for this variant"""
|
|
||||||
return self._field_mapping
|
|
||||||
|
|
||||||
def parse_pdf_fields(self, pdf_fields: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Parse PDF form fields into a structured payload"""
|
|
||||||
payload = {
|
|
||||||
"pa": {
|
|
||||||
"meta": {},
|
|
||||||
"applicant": {
|
|
||||||
"name": {},
|
|
||||||
"contact": {},
|
|
||||||
"institution": {}
|
|
||||||
},
|
|
||||||
"project": {
|
|
||||||
"dates": {},
|
|
||||||
"costs": [],
|
|
||||||
"participation": {"faculties": {}},
|
|
||||||
"financing": {"qsm": {}}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Process each field according to mapping
|
|
||||||
for field_name, field_value in pdf_fields.items():
|
|
||||||
if field_value is None or field_value == "":
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Handle cost fields with wildcards
|
|
||||||
cost_match = re.match(r"pa-cost-(\d+)-(name|amount-euro)", field_name)
|
|
||||||
if cost_match:
|
|
||||||
idx = int(cost_match.group(1)) - 1
|
|
||||||
field_type = cost_match.group(2)
|
|
||||||
|
|
||||||
# Ensure costs array is large enough
|
|
||||||
while len(payload["pa"]["project"]["costs"]) <= idx:
|
|
||||||
payload["pa"]["project"]["costs"].append({})
|
|
||||||
|
|
||||||
if field_type == "name":
|
|
||||||
payload["pa"]["project"]["costs"][idx]["name"] = field_value
|
|
||||||
elif field_type == "amount-euro":
|
|
||||||
payload["pa"]["project"]["costs"][idx]["amountEur"] = self._parse_amount(field_value)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Handle participation checkboxes
|
|
||||||
if field_name.startswith("pa-participating-faculties-"):
|
|
||||||
faculty = field_name.replace("pa-participating-faculties-", "")
|
|
||||||
payload["pa"]["project"]["participation"]["faculties"][faculty] = self._parse_bool(field_value)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Handle QSM-specific checkboxes
|
|
||||||
if field_name.startswith("pa-qsm-vwv-"):
|
|
||||||
key = field_name.replace("pa-qsm-", "")
|
|
||||||
if "flags" not in payload["pa"]["project"]["financing"]["qsm"]:
|
|
||||||
payload["pa"]["project"]["financing"]["qsm"]["flags"] = {}
|
|
||||||
payload["pa"]["project"]["financing"]["qsm"]["flags"][key] = self._parse_bool(field_value)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Process regular mapped fields
|
|
||||||
if field_name in self._field_mapping:
|
|
||||||
mapping = self._field_mapping[field_name]
|
|
||||||
target_path = mapping["target"]
|
|
||||||
field_type = mapping.get("type", "str")
|
|
||||||
|
|
||||||
# Transform value based on type
|
|
||||||
transformed_value = self.transform_value(field_value, field_type)
|
|
||||||
|
|
||||||
# Handle enum values
|
|
||||||
if field_type == "enum" and "values" in mapping:
|
|
||||||
if isinstance(mapping["values"], dict):
|
|
||||||
# Map to display value if available
|
|
||||||
transformed_value = mapping["values"].get(field_value, field_value)
|
|
||||||
|
|
||||||
# Set value in payload using target path
|
|
||||||
self._set_nested_value(payload, target_path, transformed_value)
|
|
||||||
|
|
||||||
# Clean up empty costs
|
|
||||||
if payload["pa"]["project"]["costs"]:
|
|
||||||
payload["pa"]["project"]["costs"] = [
|
|
||||||
cost for cost in payload["pa"]["project"]["costs"]
|
|
||||||
if cost.get("name") or cost.get("amountEur")
|
|
||||||
]
|
|
||||||
|
|
||||||
return payload
|
|
||||||
|
|
||||||
def map_payload_to_fields(self, payload: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Map a structured payload to PDF form fields"""
|
|
||||||
pdf_fields = {}
|
|
||||||
|
|
||||||
# Process regular fields
|
|
||||||
for field_name, mapping in self._field_mapping.items():
|
|
||||||
target_path = mapping["target"]
|
|
||||||
value = self._get_nested_value(payload, target_path)
|
|
||||||
|
|
||||||
if value is not None:
|
|
||||||
field_type = mapping.get("type", "str")
|
|
||||||
|
|
||||||
# Handle enum reverse mapping
|
|
||||||
if field_type == "enum" and "values" in mapping and isinstance(mapping["values"], dict):
|
|
||||||
# Find key by value
|
|
||||||
for key, display_value in mapping["values"].items():
|
|
||||||
if display_value == value or key == value:
|
|
||||||
value = key
|
|
||||||
break
|
|
||||||
|
|
||||||
# Format value for PDF
|
|
||||||
if field_type == "float":
|
|
||||||
value = self._format_amount(value)
|
|
||||||
elif field_type == "bool":
|
|
||||||
value = "Ja" if value else "Nein"
|
|
||||||
|
|
||||||
pdf_fields[field_name] = str(value) if value is not None else ""
|
|
||||||
|
|
||||||
# Process costs array
|
|
||||||
pa = payload.get("pa", {})
|
|
||||||
project = pa.get("project", {})
|
|
||||||
costs = project.get("costs", [])
|
|
||||||
|
|
||||||
for i, cost in enumerate(costs[:24], 1): # Limit to 24 cost positions
|
|
||||||
if "name" in cost:
|
|
||||||
pdf_fields[f"pa-cost-{i}-name"] = cost["name"]
|
|
||||||
if "amountEur" in cost:
|
|
||||||
pdf_fields[f"pa-cost-{i}-amount-euro"] = self._format_amount(cost["amountEur"])
|
|
||||||
|
|
||||||
# Process participation faculties
|
|
||||||
participation = project.get("participation", {})
|
|
||||||
faculties = participation.get("faculties", {})
|
|
||||||
|
|
||||||
for faculty, is_participating in faculties.items():
|
|
||||||
pdf_fields[f"pa-participating-faculties-{faculty}"] = "Ja" if is_participating else "Nein"
|
|
||||||
|
|
||||||
# Process QSM flags
|
|
||||||
financing = project.get("financing", {})
|
|
||||||
qsm = financing.get("qsm", {})
|
|
||||||
flags = qsm.get("flags", {})
|
|
||||||
|
|
||||||
for flag_key, flag_value in flags.items():
|
|
||||||
pdf_fields[f"pa-qsm-{flag_key}"] = "Ja" if flag_value else "Nein"
|
|
||||||
|
|
||||||
return pdf_fields
|
|
||||||
|
|
||||||
def validate_payload(self, payload: Dict[str, Any]) -> List[str]:
|
|
||||||
"""Validate a payload for this variant"""
|
|
||||||
errors = []
|
|
||||||
|
|
||||||
# Check required fields
|
|
||||||
for field_name, mapping in self._field_mapping.items():
|
|
||||||
if mapping.get("required", False):
|
|
||||||
target_path = mapping["target"]
|
|
||||||
value = self._get_nested_value(payload, target_path)
|
|
||||||
|
|
||||||
if value is None or (isinstance(value, str) and not value.strip()):
|
|
||||||
errors.append(f"Required field missing: {target_path}")
|
|
||||||
|
|
||||||
# Validate QSM-specific requirements
|
|
||||||
pa = payload.get("pa", {})
|
|
||||||
project = pa.get("project", {})
|
|
||||||
|
|
||||||
# Check if financing code is valid
|
|
||||||
financing = project.get("financing", {})
|
|
||||||
qsm = financing.get("qsm", {})
|
|
||||||
financing_code = qsm.get("code")
|
|
||||||
|
|
||||||
if not financing_code:
|
|
||||||
errors.append("QSM financing code is required")
|
|
||||||
elif financing_code not in self._field_mapping["pa-qsm-financing"]["values"]:
|
|
||||||
errors.append(f"Invalid QSM financing code: {financing_code}")
|
|
||||||
|
|
||||||
# Validate costs
|
|
||||||
costs = project.get("costs", [])
|
|
||||||
if not costs:
|
|
||||||
errors.append("At least one cost position is required")
|
|
||||||
else:
|
|
||||||
for i, cost in enumerate(costs):
|
|
||||||
if not cost.get("name"):
|
|
||||||
errors.append(f"Cost position {i+1}: name is required")
|
|
||||||
if cost.get("amountEur") is not None:
|
|
||||||
try:
|
|
||||||
amount = float(cost["amountEur"])
|
|
||||||
if amount < 0:
|
|
||||||
errors.append(f"Cost position {i+1}: amount cannot be negative")
|
|
||||||
except (TypeError, ValueError):
|
|
||||||
errors.append(f"Cost position {i+1}: invalid amount")
|
|
||||||
|
|
||||||
return errors
|
|
||||||
|
|
||||||
def detect_variant(self, pdf_fields: Dict[str, Any]) -> bool:
|
|
||||||
"""Check if the given PDF fields match this variant"""
|
|
||||||
# Check for QSM-specific fields
|
|
||||||
qsm_indicators = ["pa-qsm-financing"]
|
|
||||||
|
|
||||||
for indicator in qsm_indicators:
|
|
||||||
if indicator in pdf_fields and pdf_fields[indicator]:
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Check for QSM flags
|
|
||||||
for field_name in pdf_fields:
|
|
||||||
if field_name.startswith("pa-qsm-vwv-"):
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Helper methods
|
|
||||||
|
|
||||||
def _set_nested_value(self, obj: Dict[str, Any], path: str, value: Any):
|
|
||||||
"""Set a value in a nested dictionary using dot notation"""
|
|
||||||
keys = path.split(".")
|
|
||||||
current = obj
|
|
||||||
|
|
||||||
for key in keys[:-1]:
|
|
||||||
if key not in current:
|
|
||||||
current[key] = {}
|
|
||||||
current = current[key]
|
|
||||||
|
|
||||||
current[keys[-1]] = value
|
|
||||||
|
|
||||||
def _get_nested_value(self, obj: Dict[str, Any], path: str) -> Any:
|
|
||||||
"""Get a value from a nested dictionary using dot notation"""
|
|
||||||
keys = path.split(".")
|
|
||||||
current = obj
|
|
||||||
|
|
||||||
for key in keys:
|
|
||||||
if isinstance(current, dict) and key in current:
|
|
||||||
current = current[key]
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return current
|
|
||||||
|
|
||||||
def _parse_amount(self, value: str) -> float:
|
|
||||||
"""Parse amount from German format (1.234,56) to float"""
|
|
||||||
if not value:
|
|
||||||
return 0.0
|
|
||||||
|
|
||||||
# Remove thousands separator and replace comma with dot
|
|
||||||
cleaned = value.replace(".", "").replace(",", ".")
|
|
||||||
|
|
||||||
try:
|
|
||||||
return float(cleaned)
|
|
||||||
except (TypeError, ValueError):
|
|
||||||
return 0.0
|
|
||||||
|
|
||||||
def _format_amount(self, value: float) -> str:
|
|
||||||
"""Format amount to German format (1.234,56)"""
|
|
||||||
return f"{value:,.2f}".replace(",", "X").replace(".", ",").replace("X", ".")
|
|
||||||
|
|
||||||
def _parse_bool(self, value: Any) -> bool:
|
|
||||||
"""Parse boolean value from various formats"""
|
|
||||||
if isinstance(value, bool):
|
|
||||||
return value
|
|
||||||
if isinstance(value, str):
|
|
||||||
return value.lower() in ["true", "yes", "1", "on", "ja", "/Yes"]
|
|
||||||
return bool(value)
|
|
||||||
245
backend/src/repositories/role.py
Normal file
245
backend/src/repositories/role.py
Normal file
@ -0,0 +1,245 @@
|
|||||||
|
"""
|
||||||
|
Role Repository
|
||||||
|
|
||||||
|
This module provides data access methods for role management.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Optional, List, Dict, Any
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from sqlalchemy import func
|
||||||
|
|
||||||
|
from ..models.user import Role
|
||||||
|
from .base import BaseRepository
|
||||||
|
|
||||||
|
|
||||||
|
class RoleRepository(BaseRepository[Role]):
|
||||||
|
"""Repository for role data access"""
|
||||||
|
|
||||||
|
def __init__(self, db: Session):
|
||||||
|
super().__init__(Role, db)
|
||||||
|
|
||||||
|
def get_by_name(self, name: str) -> Optional[Role]:
|
||||||
|
"""Get role by name"""
|
||||||
|
return self.db.query(Role).filter(
|
||||||
|
func.lower(Role.name) == func.lower(name)
|
||||||
|
).first()
|
||||||
|
|
||||||
|
def get_by_oidc_claim(self, claim: str) -> Optional[Role]:
|
||||||
|
"""Get role by OIDC claim value"""
|
||||||
|
return self.db.query(Role).filter(
|
||||||
|
Role.oidc_role_claim == claim
|
||||||
|
).first()
|
||||||
|
|
||||||
|
def get_system_roles(self) -> List[Role]:
|
||||||
|
"""Get all system roles"""
|
||||||
|
return self.db.query(Role).filter(
|
||||||
|
Role.is_system == True
|
||||||
|
).all()
|
||||||
|
|
||||||
|
def get_admin_roles(self) -> List[Role]:
|
||||||
|
"""Get all admin roles"""
|
||||||
|
return self.db.query(Role).filter(
|
||||||
|
Role.is_admin == True
|
||||||
|
).all()
|
||||||
|
|
||||||
|
def get_reviewer_roles(self) -> List[Role]:
|
||||||
|
"""Get all reviewer roles"""
|
||||||
|
return self.db.query(Role).filter(
|
||||||
|
(Role.can_review_budget == True) |
|
||||||
|
(Role.can_review_finance == True)
|
||||||
|
).all()
|
||||||
|
|
||||||
|
def get_voting_roles(self) -> List[Role]:
|
||||||
|
"""Get all roles that can vote"""
|
||||||
|
return self.db.query(Role).filter(
|
||||||
|
Role.can_vote == True
|
||||||
|
).all()
|
||||||
|
|
||||||
|
def get_oidc_role_mappings(self) -> Dict[str, Role]:
|
||||||
|
"""Get mapping of OIDC claims to roles"""
|
||||||
|
roles = self.db.query(Role).filter(
|
||||||
|
Role.oidc_role_claim.isnot(None)
|
||||||
|
).all()
|
||||||
|
|
||||||
|
return {
|
||||||
|
role.oidc_role_claim: role
|
||||||
|
for role in roles
|
||||||
|
}
|
||||||
|
|
||||||
|
def search_roles(
|
||||||
|
self,
|
||||||
|
query: Optional[str] = None,
|
||||||
|
is_system: Optional[bool] = None,
|
||||||
|
is_admin: Optional[bool] = None,
|
||||||
|
can_review: Optional[bool] = None,
|
||||||
|
can_vote: Optional[bool] = None,
|
||||||
|
limit: int = 100,
|
||||||
|
offset: int = 0
|
||||||
|
) -> List[Role]:
|
||||||
|
"""Search roles with filters"""
|
||||||
|
q = self.db.query(Role)
|
||||||
|
|
||||||
|
if query:
|
||||||
|
search_term = f"%{query}%"
|
||||||
|
q = q.filter(
|
||||||
|
(Role.name.ilike(search_term)) |
|
||||||
|
(Role.display_name.ilike(search_term)) |
|
||||||
|
(Role.description.ilike(search_term))
|
||||||
|
)
|
||||||
|
|
||||||
|
if is_system is not None:
|
||||||
|
q = q.filter(Role.is_system == is_system)
|
||||||
|
|
||||||
|
if is_admin is not None:
|
||||||
|
q = q.filter(Role.is_admin == is_admin)
|
||||||
|
|
||||||
|
if can_review is not None:
|
||||||
|
q = q.filter(
|
||||||
|
(Role.can_review_budget == can_review) |
|
||||||
|
(Role.can_review_finance == can_review)
|
||||||
|
)
|
||||||
|
|
||||||
|
if can_vote is not None:
|
||||||
|
q = q.filter(Role.can_vote == can_vote)
|
||||||
|
|
||||||
|
return q.order_by(Role.priority.desc(), Role.name).limit(limit).offset(offset).all()
|
||||||
|
|
||||||
|
def create_role(
|
||||||
|
self,
|
||||||
|
name: str,
|
||||||
|
display_name: str,
|
||||||
|
description: Optional[str] = None,
|
||||||
|
permissions: Optional[List[str]] = None,
|
||||||
|
is_system: bool = False,
|
||||||
|
is_admin: bool = False,
|
||||||
|
can_review_budget: bool = False,
|
||||||
|
can_review_finance: bool = False,
|
||||||
|
can_vote: bool = False,
|
||||||
|
oidc_role_claim: Optional[str] = None,
|
||||||
|
priority: int = 0
|
||||||
|
) -> Role:
|
||||||
|
"""Create a new role"""
|
||||||
|
role = Role(
|
||||||
|
name=name,
|
||||||
|
display_name=display_name,
|
||||||
|
description=description,
|
||||||
|
permissions=permissions or [],
|
||||||
|
is_system=is_system,
|
||||||
|
is_admin=is_admin,
|
||||||
|
can_review_budget=can_review_budget,
|
||||||
|
can_review_finance=can_review_finance,
|
||||||
|
can_vote=can_vote,
|
||||||
|
oidc_role_claim=oidc_role_claim,
|
||||||
|
priority=priority
|
||||||
|
)
|
||||||
|
|
||||||
|
self.db.add(role)
|
||||||
|
self.db.commit()
|
||||||
|
self.db.refresh(role)
|
||||||
|
return role
|
||||||
|
|
||||||
|
def update_permissions(self, role_id: int, permissions: List[str]) -> Optional[Role]:
|
||||||
|
"""Update role permissions"""
|
||||||
|
role = self.get_by_id(role_id)
|
||||||
|
if role and not role.is_system:
|
||||||
|
role.permissions = permissions
|
||||||
|
self.db.commit()
|
||||||
|
self.db.refresh(role)
|
||||||
|
return role
|
||||||
|
|
||||||
|
def update_oidc_mapping(self, role_id: int, oidc_claim: Optional[str]) -> Optional[Role]:
|
||||||
|
"""Update OIDC claim mapping for a role"""
|
||||||
|
role = self.get_by_id(role_id)
|
||||||
|
if role:
|
||||||
|
role.oidc_role_claim = oidc_claim
|
||||||
|
self.db.commit()
|
||||||
|
self.db.refresh(role)
|
||||||
|
return role
|
||||||
|
|
||||||
|
def get_role_by_priority(self, roles: List[str]) -> Optional[Role]:
|
||||||
|
"""Get the highest priority role from a list of role names"""
|
||||||
|
if not roles:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return self.db.query(Role).filter(
|
||||||
|
Role.name.in_(roles)
|
||||||
|
).order_by(Role.priority.desc()).first()
|
||||||
|
|
||||||
|
def count_users_by_role(self) -> Dict[str, int]:
|
||||||
|
"""Get count of users per role"""
|
||||||
|
from sqlalchemy import select, func
|
||||||
|
from ..models.user import User, user_roles
|
||||||
|
|
||||||
|
result = self.db.execute(
|
||||||
|
select(
|
||||||
|
Role.name,
|
||||||
|
func.count(user_roles.c.user_id).label('user_count')
|
||||||
|
)
|
||||||
|
.select_from(Role)
|
||||||
|
.outerjoin(user_roles, Role.id == user_roles.c.role_id)
|
||||||
|
.group_by(Role.id, Role.name)
|
||||||
|
).all()
|
||||||
|
|
||||||
|
return {row.name: row.user_count for row in result}
|
||||||
|
|
||||||
|
def has_permission(self, role_id: int, permission: str) -> bool:
|
||||||
|
"""Check if a role has a specific permission"""
|
||||||
|
role = self.get_by_id(role_id)
|
||||||
|
if not role:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return role.has_permission(permission)
|
||||||
|
|
||||||
|
def get_default_user_role(self) -> Optional[Role]:
|
||||||
|
"""Get the default role for new users"""
|
||||||
|
return self.get_by_name("user")
|
||||||
|
|
||||||
|
def get_or_create_default_roles(self) -> Dict[str, Role]:
|
||||||
|
"""Get or create default system roles"""
|
||||||
|
default_roles = {
|
||||||
|
"admin": {
|
||||||
|
"display_name": "Administrator",
|
||||||
|
"description": "Full system access",
|
||||||
|
"is_system": True,
|
||||||
|
"is_admin": True,
|
||||||
|
"permissions": ["*"],
|
||||||
|
"priority": 100
|
||||||
|
},
|
||||||
|
"user": {
|
||||||
|
"display_name": "User",
|
||||||
|
"description": "Basic user access",
|
||||||
|
"is_system": True,
|
||||||
|
"permissions": ["read:own", "write:own", "submit:application"],
|
||||||
|
"priority": 0
|
||||||
|
},
|
||||||
|
"haushaltsbeauftragte": {
|
||||||
|
"display_name": "Haushaltsbeauftragte(r)",
|
||||||
|
"description": "Budget reviewer",
|
||||||
|
"can_review_budget": True,
|
||||||
|
"permissions": ["review:budget", "read:applications", "comment:applications"],
|
||||||
|
"priority": 50
|
||||||
|
},
|
||||||
|
"finanzreferent": {
|
||||||
|
"display_name": "Finanzreferent",
|
||||||
|
"description": "Finance reviewer",
|
||||||
|
"can_review_finance": True,
|
||||||
|
"permissions": ["review:finance", "read:applications", "comment:applications"],
|
||||||
|
"priority": 50
|
||||||
|
},
|
||||||
|
"asta": {
|
||||||
|
"display_name": "AStA Member",
|
||||||
|
"description": "Can vote on applications",
|
||||||
|
"can_vote": True,
|
||||||
|
"permissions": ["vote:applications", "read:applications", "comment:applications"],
|
||||||
|
"priority": 40
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
created_roles = {}
|
||||||
|
for name, config in default_roles.items():
|
||||||
|
role = self.get_by_name(name)
|
||||||
|
if not role:
|
||||||
|
role = self.create_role(name=name, **config)
|
||||||
|
created_roles[name] = role
|
||||||
|
|
||||||
|
return created_roles
|
||||||
216
backend/src/repositories/user.py
Normal file
216
backend/src/repositories/user.py
Normal file
@ -0,0 +1,216 @@
|
|||||||
|
"""
|
||||||
|
User Repository
|
||||||
|
|
||||||
|
This module provides data access methods for user management.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Optional, List, Dict, Any
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from sqlalchemy import or_, and_, func
|
||||||
|
|
||||||
|
from ..models.user import User, Role, AuthProvider, VerificationStatus
|
||||||
|
from .base import BaseRepository
|
||||||
|
|
||||||
|
|
||||||
|
class UserRepository(BaseRepository[User]):
|
||||||
|
"""Repository for user data access"""
|
||||||
|
|
||||||
|
def __init__(self, db: Session):
|
||||||
|
super().__init__(User, db)
|
||||||
|
|
||||||
|
def get_by_email(self, email: str) -> Optional[User]:
|
||||||
|
"""Get user by email address"""
|
||||||
|
return self.db.query(User).filter(
|
||||||
|
func.lower(User.email) == func.lower(email)
|
||||||
|
).first()
|
||||||
|
|
||||||
|
def get_by_oidc_sub(self, sub: str, issuer: str) -> Optional[User]:
|
||||||
|
"""Get user by OIDC subject identifier"""
|
||||||
|
return self.db.query(User).filter(
|
||||||
|
User.oidc_sub == sub,
|
||||||
|
User.oidc_issuer == issuer
|
||||||
|
).first()
|
||||||
|
|
||||||
|
def get_by_verification_token(self, token_hash: str) -> Optional[User]:
|
||||||
|
"""Get user by email verification token"""
|
||||||
|
return self.db.query(User).filter(
|
||||||
|
User.email_verification_token == token_hash
|
||||||
|
).first()
|
||||||
|
|
||||||
|
def get_by_auth_provider(self, provider: AuthProvider) -> List[User]:
|
||||||
|
"""Get all users using a specific auth provider"""
|
||||||
|
return self.db.query(User).filter(
|
||||||
|
User.auth_provider == provider
|
||||||
|
).all()
|
||||||
|
|
||||||
|
def search_users(
|
||||||
|
self,
|
||||||
|
query: Optional[str] = None,
|
||||||
|
verification_status: Optional[VerificationStatus] = None,
|
||||||
|
auth_provider: Optional[AuthProvider] = None,
|
||||||
|
has_role: Optional[str] = None,
|
||||||
|
limit: int = 100,
|
||||||
|
offset: int = 0
|
||||||
|
) -> List[User]:
|
||||||
|
"""Search users with filters"""
|
||||||
|
q = self.db.query(User)
|
||||||
|
|
||||||
|
if query:
|
||||||
|
search_term = f"%{query}%"
|
||||||
|
q = q.filter(
|
||||||
|
or_(
|
||||||
|
User.email.ilike(search_term),
|
||||||
|
User.given_name.ilike(search_term),
|
||||||
|
User.family_name.ilike(search_term),
|
||||||
|
User.display_name.ilike(search_term),
|
||||||
|
User.preferred_username.ilike(search_term)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if verification_status:
|
||||||
|
q = q.filter(User.verification_status == verification_status)
|
||||||
|
|
||||||
|
if auth_provider:
|
||||||
|
q = q.filter(User.auth_provider == auth_provider)
|
||||||
|
|
||||||
|
if has_role:
|
||||||
|
q = q.join(User.roles).filter(Role.name == has_role)
|
||||||
|
|
||||||
|
return q.limit(limit).offset(offset).all()
|
||||||
|
|
||||||
|
def count_by_verification_status(self) -> Dict[str, int]:
|
||||||
|
"""Get count of users by verification status"""
|
||||||
|
counts = self.db.query(
|
||||||
|
User.verification_status,
|
||||||
|
func.count(User.id)
|
||||||
|
).group_by(User.verification_status).all()
|
||||||
|
|
||||||
|
return {
|
||||||
|
status.value if status else 'unknown': count
|
||||||
|
for status, count in counts
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_users_with_role(self, role_name: str) -> List[User]:
|
||||||
|
"""Get all users with a specific role"""
|
||||||
|
return self.db.query(User).join(User.roles).filter(
|
||||||
|
Role.name == role_name
|
||||||
|
).all()
|
||||||
|
|
||||||
|
def get_admin_users(self) -> List[User]:
|
||||||
|
"""Get all admin users"""
|
||||||
|
return self.db.query(User).join(User.roles).filter(
|
||||||
|
Role.is_admin == True
|
||||||
|
).all()
|
||||||
|
|
||||||
|
def get_reviewers(self, review_type: str) -> List[User]:
|
||||||
|
"""Get users who can review applications"""
|
||||||
|
if review_type == "budget":
|
||||||
|
return self.db.query(User).join(User.roles).filter(
|
||||||
|
Role.can_review_budget == True
|
||||||
|
).all()
|
||||||
|
elif review_type == "finance":
|
||||||
|
return self.db.query(User).join(User.roles).filter(
|
||||||
|
Role.can_review_finance == True
|
||||||
|
).all()
|
||||||
|
else:
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_voters(self) -> List[User]:
|
||||||
|
"""Get users who can vote on applications"""
|
||||||
|
return self.db.query(User).join(User.roles).filter(
|
||||||
|
Role.can_vote == True
|
||||||
|
).all()
|
||||||
|
|
||||||
|
def update_last_login(self, user_id: int) -> Optional[User]:
|
||||||
|
"""Update user's last login timestamp"""
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
user = self.get_by_id(user_id)
|
||||||
|
if user:
|
||||||
|
user.last_login_at = datetime.utcnow()
|
||||||
|
self.db.commit()
|
||||||
|
self.db.refresh(user)
|
||||||
|
return user
|
||||||
|
|
||||||
|
def update_last_activity(self, user_id: int) -> Optional[User]:
|
||||||
|
"""Update user's last activity timestamp"""
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
user = self.get_by_id(user_id)
|
||||||
|
if user:
|
||||||
|
user.last_activity_at = datetime.utcnow()
|
||||||
|
self.db.commit()
|
||||||
|
self.db.refresh(user)
|
||||||
|
return user
|
||||||
|
|
||||||
|
def verify_email(self, user_id: int) -> Optional[User]:
|
||||||
|
"""Mark user's email as verified"""
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
user = self.get_by_id(user_id)
|
||||||
|
if user:
|
||||||
|
user.email_verified = True
|
||||||
|
user.email_verified_at = datetime.utcnow()
|
||||||
|
user.email_verification_token = None
|
||||||
|
|
||||||
|
# Update verification status
|
||||||
|
if user.auth_provider == AuthProvider.EMAIL:
|
||||||
|
user.verification_status = VerificationStatus.EMAIL_VERIFIED
|
||||||
|
elif user.auth_provider == AuthProvider.OIDC:
|
||||||
|
user.verification_status = VerificationStatus.FULLY_VERIFIED
|
||||||
|
|
||||||
|
self.db.commit()
|
||||||
|
self.db.refresh(user)
|
||||||
|
return user
|
||||||
|
|
||||||
|
def add_role(self, user_id: int, role: Role) -> Optional[User]:
|
||||||
|
"""Add a role to a user"""
|
||||||
|
user = self.get_by_id(user_id)
|
||||||
|
if user and role not in user.roles:
|
||||||
|
user.roles.append(role)
|
||||||
|
self.db.commit()
|
||||||
|
self.db.refresh(user)
|
||||||
|
return user
|
||||||
|
|
||||||
|
def remove_role(self, user_id: int, role: Role) -> Optional[User]:
|
||||||
|
"""Remove a role from a user"""
|
||||||
|
user = self.get_by_id(user_id)
|
||||||
|
if user and role in user.roles:
|
||||||
|
user.roles.remove(role)
|
||||||
|
self.db.commit()
|
||||||
|
self.db.refresh(user)
|
||||||
|
return user
|
||||||
|
|
||||||
|
def set_roles(self, user_id: int, roles: List[Role]) -> Optional[User]:
|
||||||
|
"""Set user's roles (replaces existing)"""
|
||||||
|
user = self.get_by_id(user_id)
|
||||||
|
if user:
|
||||||
|
user.roles = roles
|
||||||
|
self.db.commit()
|
||||||
|
self.db.refresh(user)
|
||||||
|
return user
|
||||||
|
|
||||||
|
def get_inactive_users(self, days: int = 30) -> List[User]:
|
||||||
|
"""Get users who haven't been active for specified days"""
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
cutoff_date = datetime.utcnow() - timedelta(days=days)
|
||||||
|
return self.db.query(User).filter(
|
||||||
|
or_(
|
||||||
|
User.last_activity_at < cutoff_date,
|
||||||
|
and_(
|
||||||
|
User.last_activity_at.is_(None),
|
||||||
|
User.last_login_at < cutoff_date
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).all()
|
||||||
|
|
||||||
|
def get_unverified_users(self, days_old: int = 7) -> List[User]:
|
||||||
|
"""Get unverified users older than specified days"""
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
cutoff_date = datetime.utcnow() - timedelta(days=days_old)
|
||||||
|
return self.db.query(User).filter(
|
||||||
|
User.verification_status == VerificationStatus.UNVERIFIED,
|
||||||
|
User.created_at < cutoff_date
|
||||||
|
).all()
|
||||||
259
backend/src/services/auth_service.py
Normal file
259
backend/src/services/auth_service.py
Normal file
@ -0,0 +1,259 @@
|
|||||||
|
"""
|
||||||
|
Authentication service with dependency injections for FastAPI
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Optional
|
||||||
|
from fastapi import Depends, HTTPException, status
|
||||||
|
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from jose import JWTError, jwt
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
import os
|
||||||
|
|
||||||
|
from ..config.database import get_db
|
||||||
|
from ..models.user import User
|
||||||
|
|
||||||
|
# Security
|
||||||
|
security = HTTPBearer(auto_error=False)
|
||||||
|
|
||||||
|
# JWT Configuration
|
||||||
|
SECRET_KEY = os.getenv("JWT_SECRET_KEY", "your-secret-key-change-in-production")
|
||||||
|
ALGORITHM = "HS256"
|
||||||
|
ACCESS_TOKEN_EXPIRE_MINUTES = int(os.getenv("ACCESS_TOKEN_EXPIRE_MINUTES", "30"))
|
||||||
|
|
||||||
|
|
||||||
|
def create_access_token(data: dict, expires_delta: Optional[timedelta] = None):
|
||||||
|
"""
|
||||||
|
Create a JWT access token
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data: Data to encode in the token
|
||||||
|
expires_delta: Optional custom expiration time
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Encoded JWT token
|
||||||
|
"""
|
||||||
|
to_encode = data.copy()
|
||||||
|
if expires_delta:
|
||||||
|
expire = datetime.utcnow() + expires_delta
|
||||||
|
else:
|
||||||
|
expire = datetime.utcnow() + timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
|
||||||
|
|
||||||
|
to_encode.update({"exp": expire})
|
||||||
|
encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
|
||||||
|
return encoded_jwt
|
||||||
|
|
||||||
|
|
||||||
|
def decode_access_token(token: str) -> dict:
|
||||||
|
"""
|
||||||
|
Decode and verify a JWT access token
|
||||||
|
|
||||||
|
Args:
|
||||||
|
token: JWT token to decode
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Decoded token payload
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
HTTPException: If token is invalid or expired
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
||||||
|
return payload
|
||||||
|
except JWTError:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Could not validate credentials",
|
||||||
|
headers={"WWW-Authenticate": "Bearer"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def get_current_user(
|
||||||
|
credentials: HTTPAuthorizationCredentials = Depends(security),
|
||||||
|
db: Session = Depends(get_db)
|
||||||
|
) -> User:
|
||||||
|
"""
|
||||||
|
Get the current authenticated user from JWT token
|
||||||
|
|
||||||
|
Args:
|
||||||
|
credentials: HTTP Bearer token credentials
|
||||||
|
db: Database session
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Current authenticated user
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
HTTPException: If authentication fails
|
||||||
|
"""
|
||||||
|
if not credentials:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Not authenticated",
|
||||||
|
headers={"WWW-Authenticate": "Bearer"},
|
||||||
|
)
|
||||||
|
|
||||||
|
token = credentials.credentials
|
||||||
|
|
||||||
|
try:
|
||||||
|
payload = decode_access_token(token)
|
||||||
|
user_id: int = payload.get("sub")
|
||||||
|
|
||||||
|
if user_id is None:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Could not validate credentials",
|
||||||
|
headers={"WWW-Authenticate": "Bearer"},
|
||||||
|
)
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Could not validate credentials",
|
||||||
|
headers={"WWW-Authenticate": "Bearer"},
|
||||||
|
)
|
||||||
|
|
||||||
|
user = db.query(User).filter(User.id == user_id).first()
|
||||||
|
if user is None:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="User not found",
|
||||||
|
headers={"WWW-Authenticate": "Bearer"},
|
||||||
|
)
|
||||||
|
|
||||||
|
return user
|
||||||
|
|
||||||
|
|
||||||
|
async def get_optional_user(
|
||||||
|
credentials: Optional[HTTPAuthorizationCredentials] = Depends(security),
|
||||||
|
db: Session = Depends(get_db)
|
||||||
|
) -> Optional[User]:
|
||||||
|
"""
|
||||||
|
Get the current user if authenticated, otherwise return None
|
||||||
|
|
||||||
|
Args:
|
||||||
|
credentials: Optional HTTP Bearer token credentials
|
||||||
|
db: Database session
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Current authenticated user or None
|
||||||
|
"""
|
||||||
|
if not credentials:
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
return await get_current_user(credentials, db)
|
||||||
|
except HTTPException:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
async def require_admin(
|
||||||
|
current_user: User = Depends(get_current_user)
|
||||||
|
) -> User:
|
||||||
|
"""
|
||||||
|
Require the current user to have admin role
|
||||||
|
|
||||||
|
Args:
|
||||||
|
current_user: Current authenticated user
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Current user if admin
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
HTTPException: If user is not admin
|
||||||
|
"""
|
||||||
|
if not current_user.has_role("admin"):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
|
detail="Admin access required"
|
||||||
|
)
|
||||||
|
return current_user
|
||||||
|
|
||||||
|
|
||||||
|
async def require_roles(roles: list):
|
||||||
|
"""
|
||||||
|
Create a dependency that requires specific roles
|
||||||
|
|
||||||
|
Args:
|
||||||
|
roles: List of role names that are allowed
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dependency function
|
||||||
|
"""
|
||||||
|
async def role_checker(current_user: User = Depends(get_current_user)) -> User:
|
||||||
|
if not current_user.has_any_role(roles):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
|
detail=f"One of these roles required: {', '.join(roles)}"
|
||||||
|
)
|
||||||
|
return current_user
|
||||||
|
|
||||||
|
return role_checker
|
||||||
|
|
||||||
|
|
||||||
|
def verify_password(plain_password: str, hashed_password: str) -> bool:
|
||||||
|
"""
|
||||||
|
Verify a plain password against a hashed password
|
||||||
|
|
||||||
|
Args:
|
||||||
|
plain_password: Plain text password
|
||||||
|
hashed_password: Hashed password to compare against
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if password matches
|
||||||
|
"""
|
||||||
|
from passlib.context import CryptContext
|
||||||
|
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||||
|
return pwd_context.verify(plain_password, hashed_password)
|
||||||
|
|
||||||
|
|
||||||
|
def get_password_hash(password: str) -> str:
|
||||||
|
"""
|
||||||
|
Hash a password for storage
|
||||||
|
|
||||||
|
Args:
|
||||||
|
password: Plain text password
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Hashed password
|
||||||
|
"""
|
||||||
|
from passlib.context import CryptContext
|
||||||
|
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||||
|
return pwd_context.hash(password)
|
||||||
|
|
||||||
|
|
||||||
|
# Helper functions for specific role checks
|
||||||
|
async def require_budget_reviewer(
|
||||||
|
current_user: User = Depends(get_current_user)
|
||||||
|
) -> User:
|
||||||
|
"""Require budget reviewer role"""
|
||||||
|
if not current_user.has_role("budget_reviewer") and not current_user.has_role("admin"):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
|
detail="Budget reviewer access required"
|
||||||
|
)
|
||||||
|
return current_user
|
||||||
|
|
||||||
|
|
||||||
|
async def require_finance_reviewer(
|
||||||
|
current_user: User = Depends(get_current_user)
|
||||||
|
) -> User:
|
||||||
|
"""Require finance reviewer role"""
|
||||||
|
if not current_user.has_role("finance_reviewer") and not current_user.has_role("admin"):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
|
detail="Finance reviewer access required"
|
||||||
|
)
|
||||||
|
return current_user
|
||||||
|
|
||||||
|
|
||||||
|
async def require_asta_member(
|
||||||
|
current_user: User = Depends(get_current_user)
|
||||||
|
) -> User:
|
||||||
|
"""Require AStA member role"""
|
||||||
|
if not current_user.has_role("asta_member") and not current_user.has_role("admin"):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
|
detail="AStA member access required"
|
||||||
|
)
|
||||||
|
return current_user
|
||||||
409
backend/src/services/notification_service.py
Normal file
409
backend/src/services/notification_service.py
Normal file
@ -0,0 +1,409 @@
|
|||||||
|
"""
|
||||||
|
Notification service for sending emails and notifications
|
||||||
|
"""
|
||||||
|
|
||||||
|
import smtplib
|
||||||
|
import os
|
||||||
|
from email.mime.text import MIMEText
|
||||||
|
from email.mime.multipart import MIMEMultipart
|
||||||
|
from email.mime.base import MIMEBase
|
||||||
|
from email import encoders
|
||||||
|
from typing import List, Optional, Dict, Any
|
||||||
|
import logging
|
||||||
|
from jinja2 import Template
|
||||||
|
from datetime import datetime
|
||||||
|
import asyncio
|
||||||
|
import aiosmtplib
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class NotificationService:
|
||||||
|
"""Service for sending notifications"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.smtp_host = os.getenv("SMTP_HOST", "localhost")
|
||||||
|
self.smtp_port = int(os.getenv("SMTP_PORT", "587"))
|
||||||
|
self.smtp_user = os.getenv("SMTP_USER", "")
|
||||||
|
self.smtp_password = os.getenv("SMTP_PASSWORD", "")
|
||||||
|
self.smtp_use_tls = os.getenv("SMTP_USE_TLS", "true").lower() == "true"
|
||||||
|
self.smtp_use_ssl = os.getenv("SMTP_USE_SSL", "false").lower() == "true"
|
||||||
|
self.from_email = os.getenv("FROM_EMAIL", "noreply@example.com")
|
||||||
|
self.from_name = os.getenv("FROM_NAME", "Application System")
|
||||||
|
self.reply_to_email = os.getenv("REPLY_TO_EMAIL", "")
|
||||||
|
self.base_url = os.getenv("BASE_URL", "http://localhost:3000")
|
||||||
|
|
||||||
|
def _render_template(self, template: str, context: Dict[str, Any]) -> str:
|
||||||
|
"""
|
||||||
|
Render a template with context
|
||||||
|
|
||||||
|
Args:
|
||||||
|
template: Template string (can include Jinja2 syntax)
|
||||||
|
context: Context dictionary for template rendering
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Rendered template string
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
jinja_template = Template(template)
|
||||||
|
return jinja_template.render(**context)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to render template: {str(e)}")
|
||||||
|
return template
|
||||||
|
|
||||||
|
def _create_message(
|
||||||
|
self,
|
||||||
|
to_email: str,
|
||||||
|
subject: str,
|
||||||
|
body: str,
|
||||||
|
html_body: Optional[str] = None,
|
||||||
|
attachments: Optional[List[tuple]] = None,
|
||||||
|
cc: Optional[List[str]] = None,
|
||||||
|
bcc: Optional[List[str]] = None
|
||||||
|
) -> MIMEMultipart:
|
||||||
|
"""
|
||||||
|
Create an email message
|
||||||
|
|
||||||
|
Args:
|
||||||
|
to_email: Recipient email address
|
||||||
|
subject: Email subject
|
||||||
|
body: Plain text body
|
||||||
|
html_body: Optional HTML body
|
||||||
|
attachments: Optional list of (filename, content) tuples
|
||||||
|
cc: Optional list of CC recipients
|
||||||
|
bcc: Optional list of BCC recipients
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
MIMEMultipart message object
|
||||||
|
"""
|
||||||
|
msg = MIMEMultipart('alternative')
|
||||||
|
msg['From'] = f"{self.from_name} <{self.from_email}>"
|
||||||
|
msg['To'] = to_email
|
||||||
|
msg['Subject'] = subject
|
||||||
|
|
||||||
|
if self.reply_to_email:
|
||||||
|
msg['Reply-To'] = self.reply_to_email
|
||||||
|
|
||||||
|
if cc:
|
||||||
|
msg['Cc'] = ', '.join(cc)
|
||||||
|
|
||||||
|
if bcc:
|
||||||
|
msg['Bcc'] = ', '.join(bcc)
|
||||||
|
|
||||||
|
# Add plain text part
|
||||||
|
msg.attach(MIMEText(body, 'plain'))
|
||||||
|
|
||||||
|
# Add HTML part if provided
|
||||||
|
if html_body:
|
||||||
|
msg.attach(MIMEText(html_body, 'html'))
|
||||||
|
|
||||||
|
# Add attachments
|
||||||
|
if attachments:
|
||||||
|
for filename, content in attachments:
|
||||||
|
part = MIMEBase('application', 'octet-stream')
|
||||||
|
part.set_payload(content)
|
||||||
|
encoders.encode_base64(part)
|
||||||
|
part.add_header(
|
||||||
|
'Content-Disposition',
|
||||||
|
f'attachment; filename= {filename}'
|
||||||
|
)
|
||||||
|
msg.attach(part)
|
||||||
|
|
||||||
|
return msg
|
||||||
|
|
||||||
|
def send_email(
|
||||||
|
self,
|
||||||
|
to_email: str,
|
||||||
|
subject: str,
|
||||||
|
body: str,
|
||||||
|
html_body: Optional[str] = None,
|
||||||
|
attachments: Optional[List[tuple]] = None,
|
||||||
|
cc: Optional[List[str]] = None,
|
||||||
|
bcc: Optional[List[str]] = None,
|
||||||
|
context: Optional[Dict[str, Any]] = None
|
||||||
|
) -> bool:
|
||||||
|
"""
|
||||||
|
Send an email synchronously
|
||||||
|
|
||||||
|
Args:
|
||||||
|
to_email: Recipient email address
|
||||||
|
subject: Email subject
|
||||||
|
body: Plain text body
|
||||||
|
html_body: Optional HTML body
|
||||||
|
attachments: Optional list of (filename, content) tuples
|
||||||
|
cc: Optional list of CC recipients
|
||||||
|
bcc: Optional list of BCC recipients
|
||||||
|
context: Optional context for template rendering
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if email was sent successfully
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Render templates if context is provided
|
||||||
|
if context:
|
||||||
|
subject = self._render_template(subject, context)
|
||||||
|
body = self._render_template(body, context)
|
||||||
|
if html_body:
|
||||||
|
html_body = self._render_template(html_body, context)
|
||||||
|
|
||||||
|
msg = self._create_message(
|
||||||
|
to_email, subject, body, html_body,
|
||||||
|
attachments, cc, bcc
|
||||||
|
)
|
||||||
|
|
||||||
|
# Send email
|
||||||
|
if self.smtp_use_ssl:
|
||||||
|
server = smtplib.SMTP_SSL(self.smtp_host, self.smtp_port)
|
||||||
|
else:
|
||||||
|
server = smtplib.SMTP(self.smtp_host, self.smtp_port)
|
||||||
|
if self.smtp_use_tls:
|
||||||
|
server.starttls()
|
||||||
|
|
||||||
|
if self.smtp_user and self.smtp_password:
|
||||||
|
server.login(self.smtp_user, self.smtp_password)
|
||||||
|
|
||||||
|
all_recipients = [to_email]
|
||||||
|
if cc:
|
||||||
|
all_recipients.extend(cc)
|
||||||
|
if bcc:
|
||||||
|
all_recipients.extend(bcc)
|
||||||
|
|
||||||
|
server.send_message(msg, self.from_email, all_recipients)
|
||||||
|
server.quit()
|
||||||
|
|
||||||
|
logger.info(f"Email sent successfully to {to_email}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to send email to {to_email}: {str(e)}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def send_email_async(
|
||||||
|
self,
|
||||||
|
to_email: str,
|
||||||
|
subject: str,
|
||||||
|
body: str,
|
||||||
|
html_body: Optional[str] = None,
|
||||||
|
attachments: Optional[List[tuple]] = None,
|
||||||
|
cc: Optional[List[str]] = None,
|
||||||
|
bcc: Optional[List[str]] = None,
|
||||||
|
context: Optional[Dict[str, Any]] = None
|
||||||
|
) -> bool:
|
||||||
|
"""
|
||||||
|
Send an email asynchronously
|
||||||
|
|
||||||
|
Args:
|
||||||
|
to_email: Recipient email address
|
||||||
|
subject: Email subject
|
||||||
|
body: Plain text body
|
||||||
|
html_body: Optional HTML body
|
||||||
|
attachments: Optional list of (filename, content) tuples
|
||||||
|
cc: Optional list of CC recipients
|
||||||
|
bcc: Optional list of BCC recipients
|
||||||
|
context: Optional context for template rendering
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if email was sent successfully
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Render templates if context is provided
|
||||||
|
if context:
|
||||||
|
subject = self._render_template(subject, context)
|
||||||
|
body = self._render_template(body, context)
|
||||||
|
if html_body:
|
||||||
|
html_body = self._render_template(html_body, context)
|
||||||
|
|
||||||
|
msg = self._create_message(
|
||||||
|
to_email, subject, body, html_body,
|
||||||
|
attachments, cc, bcc
|
||||||
|
)
|
||||||
|
|
||||||
|
# Send email asynchronously
|
||||||
|
await aiosmtplib.send(
|
||||||
|
msg,
|
||||||
|
hostname=self.smtp_host,
|
||||||
|
port=self.smtp_port,
|
||||||
|
username=self.smtp_user if self.smtp_user else None,
|
||||||
|
password=self.smtp_password if self.smtp_password else None,
|
||||||
|
use_tls=self.smtp_use_tls,
|
||||||
|
start_tls=self.smtp_use_tls and not self.smtp_use_ssl
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(f"Email sent successfully to {to_email}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to send email to {to_email}: {str(e)}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def send_application_created(
|
||||||
|
self,
|
||||||
|
to_email: str,
|
||||||
|
application_id: str,
|
||||||
|
access_key: str,
|
||||||
|
application_title: str,
|
||||||
|
status: str
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Send application created notification
|
||||||
|
|
||||||
|
Args:
|
||||||
|
to_email: Recipient email
|
||||||
|
application_id: Application ID
|
||||||
|
access_key: Access key for the application
|
||||||
|
application_title: Application title
|
||||||
|
status: Current status
|
||||||
|
"""
|
||||||
|
subject = "Application Created Successfully"
|
||||||
|
|
||||||
|
body = f"""
|
||||||
|
Dear Applicant,
|
||||||
|
|
||||||
|
Your application "{application_title}" has been created successfully.
|
||||||
|
|
||||||
|
Application ID: {application_id}
|
||||||
|
Status: {status}
|
||||||
|
|
||||||
|
You can access your application at:
|
||||||
|
{self.base_url}/applications/{application_id}?key={access_key}
|
||||||
|
|
||||||
|
Please save this link for future reference. You will need it to access and update your application.
|
||||||
|
|
||||||
|
Best regards,
|
||||||
|
{self.from_name}
|
||||||
|
"""
|
||||||
|
|
||||||
|
html_body = f"""
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<style>
|
||||||
|
body {{ font-family: Arial, sans-serif; line-height: 1.6; }}
|
||||||
|
.container {{ max-width: 600px; margin: 0 auto; padding: 20px; }}
|
||||||
|
.header {{ background-color: #f8f9fa; padding: 20px; border-radius: 5px; margin-bottom: 20px; }}
|
||||||
|
.info-box {{ background-color: #e7f3ff; padding: 15px; border-left: 4px solid #0066cc; margin: 20px 0; }}
|
||||||
|
.button {{ display: inline-block; padding: 10px 20px; background-color: #0066cc; color: white; text-decoration: none; border-radius: 5px; }}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div class="container">
|
||||||
|
<div class="header">
|
||||||
|
<h2>Application Created Successfully</h2>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<p>Dear Applicant,</p>
|
||||||
|
|
||||||
|
<p>Your application "<strong>{application_title}</strong>" has been created successfully.</p>
|
||||||
|
|
||||||
|
<div class="info-box">
|
||||||
|
<p><strong>Application ID:</strong> {application_id}</p>
|
||||||
|
<p><strong>Status:</strong> {status}</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<p>You can access your application at:</p>
|
||||||
|
<p style="text-align: center;">
|
||||||
|
<a href="{self.base_url}/applications/{application_id}?key={access_key}" class="button">
|
||||||
|
View Application
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<p><strong>Important:</strong> Please save this link for future reference. You will need it to access and update your application.</p>
|
||||||
|
|
||||||
|
<p>Best regards,<br>{self.from_name}</p>
|
||||||
|
</div>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
"""
|
||||||
|
|
||||||
|
return self.send_email(to_email, subject, body, html_body)
|
||||||
|
|
||||||
|
def send_status_change(
|
||||||
|
self,
|
||||||
|
to_email: str,
|
||||||
|
application_id: str,
|
||||||
|
application_title: str,
|
||||||
|
old_status: str,
|
||||||
|
new_status: str,
|
||||||
|
comment: Optional[str] = None
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Send status change notification
|
||||||
|
|
||||||
|
Args:
|
||||||
|
to_email: Recipient email
|
||||||
|
application_id: Application ID
|
||||||
|
application_title: Application title
|
||||||
|
old_status: Previous status
|
||||||
|
new_status: New status
|
||||||
|
comment: Optional comment
|
||||||
|
"""
|
||||||
|
subject = f"Application Status Changed: {new_status}"
|
||||||
|
|
||||||
|
body = f"""
|
||||||
|
Dear Applicant,
|
||||||
|
|
||||||
|
The status of your application "{application_title}" has been changed.
|
||||||
|
|
||||||
|
Application ID: {application_id}
|
||||||
|
Previous Status: {old_status}
|
||||||
|
New Status: {new_status}
|
||||||
|
{f'Comment: {comment}' if comment else ''}
|
||||||
|
|
||||||
|
You can view your application at:
|
||||||
|
{self.base_url}/applications/{application_id}
|
||||||
|
|
||||||
|
Best regards,
|
||||||
|
{self.from_name}
|
||||||
|
"""
|
||||||
|
|
||||||
|
return self.send_email(to_email, subject, body)
|
||||||
|
|
||||||
|
|
||||||
|
# Singleton instance
|
||||||
|
notification_service = NotificationService()
|
||||||
|
|
||||||
|
|
||||||
|
# Convenience functions for background tasks
|
||||||
|
async def send_notification(
|
||||||
|
to_email: str,
|
||||||
|
subject: str,
|
||||||
|
body: str,
|
||||||
|
html_body: Optional[str] = None,
|
||||||
|
attachments: Optional[List[tuple]] = None
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Send a notification email (for use in background tasks)
|
||||||
|
|
||||||
|
Args:
|
||||||
|
to_email: Recipient email
|
||||||
|
subject: Email subject
|
||||||
|
body: Plain text body
|
||||||
|
html_body: Optional HTML body
|
||||||
|
attachments: Optional attachments
|
||||||
|
"""
|
||||||
|
await notification_service.send_email_async(
|
||||||
|
to_email, subject, body, html_body, attachments
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def send_notification_sync(
|
||||||
|
to_email: str,
|
||||||
|
subject: str,
|
||||||
|
body: str,
|
||||||
|
html_body: Optional[str] = None,
|
||||||
|
attachments: Optional[List[tuple]] = None
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Send a notification email synchronously
|
||||||
|
|
||||||
|
Args:
|
||||||
|
to_email: Recipient email
|
||||||
|
subject: Email subject
|
||||||
|
body: Plain text body
|
||||||
|
html_body: Optional HTML body
|
||||||
|
attachments: Optional attachments
|
||||||
|
"""
|
||||||
|
notification_service.send_email(
|
||||||
|
to_email, subject, body, html_body, attachments
|
||||||
|
)
|
||||||
392
backend/src/services/pdf_service.py
Normal file
392
backend/src/services/pdf_service.py
Normal file
@ -0,0 +1,392 @@
|
|||||||
|
"""
|
||||||
|
PDF generation service for applications
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import io
|
||||||
|
from typing import Optional, Dict, Any, List
|
||||||
|
from datetime import datetime
|
||||||
|
from pathlib import Path
|
||||||
|
import hashlib
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from ..models.application_type import (
|
||||||
|
DynamicApplication, ApplicationType, ApplicationField,
|
||||||
|
ApplicationTypeStatus, FieldType
|
||||||
|
)
|
||||||
|
from ..utils.pdf_utils import (
|
||||||
|
fill_pdf_template, create_pdf_from_data,
|
||||||
|
add_watermark_to_pdf, merge_pdfs
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PDFService:
|
||||||
|
"""Service for generating PDFs from applications"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.output_dir = Path(os.getenv("PDF_OUTPUT_DIR", "./uploads/pdfs"))
|
||||||
|
self.output_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
def generate_pdf_for_application(
|
||||||
|
self,
|
||||||
|
application: DynamicApplication,
|
||||||
|
db: Session,
|
||||||
|
include_watermark: bool = False,
|
||||||
|
watermark_text: Optional[str] = None
|
||||||
|
) -> str:
|
||||||
|
"""
|
||||||
|
Generate a PDF for an application
|
||||||
|
|
||||||
|
Args:
|
||||||
|
application: The application to generate PDF for
|
||||||
|
db: Database session
|
||||||
|
include_watermark: Whether to include a watermark
|
||||||
|
watermark_text: Custom watermark text
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path to the generated PDF file
|
||||||
|
"""
|
||||||
|
# Get application type
|
||||||
|
app_type = application.application_type
|
||||||
|
|
||||||
|
# Prepare data for PDF
|
||||||
|
pdf_data = self._prepare_pdf_data(application, app_type, db)
|
||||||
|
|
||||||
|
# Generate PDF
|
||||||
|
if app_type.pdf_template:
|
||||||
|
# Use template if available
|
||||||
|
pdf_content = self._generate_from_template(
|
||||||
|
app_type.pdf_template,
|
||||||
|
app_type.pdf_field_mapping,
|
||||||
|
pdf_data
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Generate from scratch
|
||||||
|
pdf_content = self._generate_from_scratch(
|
||||||
|
pdf_data,
|
||||||
|
app_type.name,
|
||||||
|
application.title
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add watermark if requested
|
||||||
|
if include_watermark:
|
||||||
|
if not watermark_text:
|
||||||
|
watermark_text = self._get_default_watermark(application, db)
|
||||||
|
pdf_content = add_watermark_to_pdf(pdf_content, watermark_text)
|
||||||
|
|
||||||
|
# Save to file
|
||||||
|
filename = self._generate_filename(application)
|
||||||
|
filepath = self.output_dir / filename
|
||||||
|
|
||||||
|
with open(filepath, 'wb') as f:
|
||||||
|
f.write(pdf_content)
|
||||||
|
|
||||||
|
return str(filepath)
|
||||||
|
|
||||||
|
def _prepare_pdf_data(
|
||||||
|
self,
|
||||||
|
application: DynamicApplication,
|
||||||
|
app_type: ApplicationType,
|
||||||
|
db: Session
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Prepare data for PDF generation
|
||||||
|
|
||||||
|
Args:
|
||||||
|
application: The application
|
||||||
|
app_type: Application type
|
||||||
|
db: Database session
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary of data for PDF
|
||||||
|
"""
|
||||||
|
# Start with common fields
|
||||||
|
data = {
|
||||||
|
"application_id": application.application_id,
|
||||||
|
"type": app_type.name,
|
||||||
|
"title": application.title,
|
||||||
|
"email": application.email,
|
||||||
|
"first_name": application.first_name or "",
|
||||||
|
"last_name": application.last_name or "",
|
||||||
|
"submitted_at": application.submitted_at.strftime("%d.%m.%Y %H:%M") if application.submitted_at else "",
|
||||||
|
"created_at": application.created_at.strftime("%d.%m.%Y %H:%M") if application.created_at else "",
|
||||||
|
"status": self._get_status_name(application, app_type, db),
|
||||||
|
"total_amount": f"{application.total_amount:.2f} €"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add field data with proper formatting
|
||||||
|
for field in app_type.fields:
|
||||||
|
field_id = field.field_id
|
||||||
|
value = application.field_data.get(field_id)
|
||||||
|
|
||||||
|
if value is not None:
|
||||||
|
formatted_value = self._format_field_value(value, field)
|
||||||
|
data[field_id] = formatted_value
|
||||||
|
# Also add with field name as key for better template compatibility
|
||||||
|
data[field.name.lower().replace(' ', '_')] = formatted_value
|
||||||
|
|
||||||
|
# Add cost positions
|
||||||
|
if application.cost_positions:
|
||||||
|
data["cost_positions"] = self._format_cost_positions(application.cost_positions)
|
||||||
|
data["cost_positions_count"] = len(application.cost_positions)
|
||||||
|
|
||||||
|
# Add comparison offers
|
||||||
|
if application.comparison_offers:
|
||||||
|
data["comparison_offers"] = self._format_comparison_offers(application.comparison_offers)
|
||||||
|
data["comparison_offers_count"] = len(application.comparison_offers)
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
def _format_field_value(self, value: Any, field: ApplicationField) -> str:
|
||||||
|
"""
|
||||||
|
Format a field value for PDF display
|
||||||
|
|
||||||
|
Args:
|
||||||
|
value: The raw value
|
||||||
|
field: Field definition
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Formatted string value
|
||||||
|
"""
|
||||||
|
if value is None or value == "":
|
||||||
|
return ""
|
||||||
|
|
||||||
|
if field.field_type == FieldType.YESNO:
|
||||||
|
return "Ja" if value else "Nein"
|
||||||
|
|
||||||
|
elif field.field_type == FieldType.DATE:
|
||||||
|
try:
|
||||||
|
# Parse and format date
|
||||||
|
from datetime import datetime
|
||||||
|
if isinstance(value, str):
|
||||||
|
# Try different formats
|
||||||
|
for fmt in ["%Y-%m-%d", "%d.%m.%Y", "%d/%m/%Y"]:
|
||||||
|
try:
|
||||||
|
dt = datetime.strptime(value, fmt)
|
||||||
|
return dt.strftime("%d.%m.%Y")
|
||||||
|
except:
|
||||||
|
continue
|
||||||
|
return str(value)
|
||||||
|
except:
|
||||||
|
return str(value)
|
||||||
|
|
||||||
|
elif field.field_type == FieldType.DATETIME:
|
||||||
|
try:
|
||||||
|
if isinstance(value, str):
|
||||||
|
dt = datetime.fromisoformat(value)
|
||||||
|
return dt.strftime("%d.%m.%Y %H:%M")
|
||||||
|
return str(value)
|
||||||
|
except:
|
||||||
|
return str(value)
|
||||||
|
|
||||||
|
elif field.field_type in [FieldType.AMOUNT, FieldType.CURRENCY_EUR]:
|
||||||
|
try:
|
||||||
|
amount = float(value)
|
||||||
|
return f"{amount:.2f} €"
|
||||||
|
except:
|
||||||
|
return str(value)
|
||||||
|
|
||||||
|
elif field.field_type == FieldType.MULTISELECT:
|
||||||
|
if isinstance(value, list):
|
||||||
|
return ", ".join(str(v) for v in value)
|
||||||
|
return str(value)
|
||||||
|
|
||||||
|
else:
|
||||||
|
return str(value)
|
||||||
|
|
||||||
|
def _format_cost_positions(self, positions: List[Dict]) -> List[Dict]:
|
||||||
|
"""Format cost positions for PDF"""
|
||||||
|
formatted = []
|
||||||
|
for i, pos in enumerate(positions, 1):
|
||||||
|
formatted.append({
|
||||||
|
"number": i,
|
||||||
|
"description": pos.get("description", ""),
|
||||||
|
"amount": f"{float(pos.get('amount', 0)):.2f} €",
|
||||||
|
"category": pos.get("category", ""),
|
||||||
|
"notes": pos.get("notes", "")
|
||||||
|
})
|
||||||
|
return formatted
|
||||||
|
|
||||||
|
def _format_comparison_offers(self, offers: List[Dict]) -> List[Dict]:
|
||||||
|
"""Format comparison offers for PDF"""
|
||||||
|
formatted = []
|
||||||
|
for i, offer in enumerate(offers, 1):
|
||||||
|
formatted.append({
|
||||||
|
"number": i,
|
||||||
|
"vendor": offer.get("vendor", ""),
|
||||||
|
"description": offer.get("description", ""),
|
||||||
|
"amount": f"{float(offer.get('amount', 0)):.2f} €",
|
||||||
|
"selected": "✓" if offer.get("selected") else "",
|
||||||
|
"notes": offer.get("notes", "")
|
||||||
|
})
|
||||||
|
return formatted
|
||||||
|
|
||||||
|
def _get_status_name(
|
||||||
|
self,
|
||||||
|
application: DynamicApplication,
|
||||||
|
app_type: ApplicationType,
|
||||||
|
db: Session
|
||||||
|
) -> str:
|
||||||
|
"""Get the display name for the current status"""
|
||||||
|
status = db.query(ApplicationTypeStatus).filter(
|
||||||
|
ApplicationTypeStatus.application_type_id == app_type.id,
|
||||||
|
ApplicationTypeStatus.status_id == application.status_id
|
||||||
|
).first()
|
||||||
|
return status.name if status else application.status_id
|
||||||
|
|
||||||
|
def _generate_from_template(
|
||||||
|
self,
|
||||||
|
template_content: bytes,
|
||||||
|
field_mapping: Dict[str, str],
|
||||||
|
data: Dict[str, Any]
|
||||||
|
) -> bytes:
|
||||||
|
"""Generate PDF from template"""
|
||||||
|
return fill_pdf_template(template_content, field_mapping, data)
|
||||||
|
|
||||||
|
def _generate_from_scratch(
|
||||||
|
self,
|
||||||
|
data: Dict[str, Any],
|
||||||
|
type_name: str,
|
||||||
|
title: str
|
||||||
|
) -> bytes:
|
||||||
|
"""Generate PDF from scratch"""
|
||||||
|
# Prepare formatted data
|
||||||
|
formatted_data = {
|
||||||
|
"Application Type": type_name,
|
||||||
|
"Title": title,
|
||||||
|
"Application ID": data.get("application_id", ""),
|
||||||
|
"Status": data.get("status", ""),
|
||||||
|
"Submitted": data.get("submitted_at", ""),
|
||||||
|
"Email": data.get("email", ""),
|
||||||
|
"Name": f"{data.get('first_name', '')} {data.get('last_name', '')}".strip(),
|
||||||
|
"Total Amount": data.get("total_amount", "")
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add other fields
|
||||||
|
for key, value in data.items():
|
||||||
|
if key not in ["application_id", "type", "title", "email", "first_name",
|
||||||
|
"last_name", "submitted_at", "created_at", "status",
|
||||||
|
"total_amount", "cost_positions", "comparison_offers",
|
||||||
|
"cost_positions_count", "comparison_offers_count"]:
|
||||||
|
# Format key for display
|
||||||
|
display_key = key.replace('_', ' ').title()
|
||||||
|
formatted_data[display_key] = value
|
||||||
|
|
||||||
|
# Add cost positions if present
|
||||||
|
if "cost_positions" in data and data["cost_positions"]:
|
||||||
|
formatted_data["Cost Positions"] = data["cost_positions"]
|
||||||
|
|
||||||
|
# Add comparison offers if present
|
||||||
|
if "comparison_offers" in data and data["comparison_offers"]:
|
||||||
|
formatted_data["Comparison Offers"] = data["comparison_offers"]
|
||||||
|
|
||||||
|
return create_pdf_from_data(formatted_data, title)
|
||||||
|
|
||||||
|
def _get_default_watermark(
|
||||||
|
self,
|
||||||
|
application: DynamicApplication,
|
||||||
|
db: Session
|
||||||
|
) -> str:
|
||||||
|
"""Get default watermark text"""
|
||||||
|
status = self._get_status_name(application, application.application_type, db)
|
||||||
|
|
||||||
|
if "draft" in status.lower():
|
||||||
|
return "ENTWURF"
|
||||||
|
elif "approved" in status.lower() or "genehmigt" in status.lower():
|
||||||
|
return "GENEHMIGT"
|
||||||
|
elif "rejected" in status.lower() or "abgelehnt" in status.lower():
|
||||||
|
return "ABGELEHNT"
|
||||||
|
else:
|
||||||
|
return status.upper()
|
||||||
|
|
||||||
|
def _generate_filename(self, application: DynamicApplication) -> str:
|
||||||
|
"""Generate a unique filename for the PDF"""
|
||||||
|
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||||
|
safe_title = "".join(c for c in application.title if c.isalnum() or c in (' ', '-', '_'))[:50]
|
||||||
|
safe_title = safe_title.replace(' ', '_')
|
||||||
|
|
||||||
|
return f"{application.application_id}_{safe_title}_{timestamp}.pdf"
|
||||||
|
|
||||||
|
def generate_batch_pdfs(
|
||||||
|
self,
|
||||||
|
application_ids: List[str],
|
||||||
|
db: Session,
|
||||||
|
merge: bool = False
|
||||||
|
) -> Optional[str]:
|
||||||
|
"""
|
||||||
|
Generate PDFs for multiple applications
|
||||||
|
|
||||||
|
Args:
|
||||||
|
application_ids: List of application IDs
|
||||||
|
db: Database session
|
||||||
|
merge: Whether to merge into a single PDF
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path to the generated file(s)
|
||||||
|
"""
|
||||||
|
pdf_contents = []
|
||||||
|
pdf_paths = []
|
||||||
|
|
||||||
|
for app_id in application_ids:
|
||||||
|
application = db.query(DynamicApplication).filter(
|
||||||
|
DynamicApplication.application_id == app_id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if application:
|
||||||
|
path = self.generate_pdf_for_application(application, db)
|
||||||
|
pdf_paths.append(path)
|
||||||
|
|
||||||
|
if merge:
|
||||||
|
with open(path, 'rb') as f:
|
||||||
|
pdf_contents.append(f.read())
|
||||||
|
|
||||||
|
if merge and pdf_contents:
|
||||||
|
# Merge all PDFs
|
||||||
|
merged_content = merge_pdfs(pdf_contents)
|
||||||
|
|
||||||
|
# Save merged PDF
|
||||||
|
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||||
|
merged_filename = f"batch_{timestamp}.pdf"
|
||||||
|
merged_path = self.output_dir / merged_filename
|
||||||
|
|
||||||
|
with open(merged_path, 'wb') as f:
|
||||||
|
f.write(merged_content)
|
||||||
|
|
||||||
|
# Delete individual PDFs
|
||||||
|
for path in pdf_paths:
|
||||||
|
try:
|
||||||
|
os.remove(path)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return str(merged_path)
|
||||||
|
|
||||||
|
return pdf_paths[0] if pdf_paths else None
|
||||||
|
|
||||||
|
|
||||||
|
# Singleton instance
|
||||||
|
pdf_service = PDFService()
|
||||||
|
|
||||||
|
|
||||||
|
# Convenience function for use in routes
|
||||||
|
def generate_pdf_for_application(
|
||||||
|
application: DynamicApplication,
|
||||||
|
db: Session,
|
||||||
|
include_watermark: bool = False,
|
||||||
|
watermark_text: Optional[str] = None
|
||||||
|
) -> str:
|
||||||
|
"""
|
||||||
|
Generate a PDF for an application
|
||||||
|
|
||||||
|
Args:
|
||||||
|
application: The application
|
||||||
|
db: Database session
|
||||||
|
include_watermark: Whether to include watermark
|
||||||
|
watermark_text: Custom watermark text
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path to generated PDF
|
||||||
|
"""
|
||||||
|
return pdf_service.generate_pdf_for_application(
|
||||||
|
application, db, include_watermark, watermark_text
|
||||||
|
)
|
||||||
260
backend/src/startup.py
Normal file
260
backend/src/startup.py
Normal file
@ -0,0 +1,260 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Application Startup Script
|
||||||
|
|
||||||
|
This script initializes the database and creates default data for the dynamic application system.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# Add src directory to path
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||||
|
|
||||||
|
from config.database import init_database, get_engine
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from models.application_type import (
|
||||||
|
ApplicationType, ApplicationField, ApplicationTypeStatus,
|
||||||
|
StatusTransition, FieldType, TransitionTriggerType
|
||||||
|
)
|
||||||
|
from models.user import User, Role
|
||||||
|
|
||||||
|
# Configure logging
|
||||||
|
logging.basicConfig(
|
||||||
|
level=logging.INFO,
|
||||||
|
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||||
|
)
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def create_default_application_types():
|
||||||
|
"""Create default QSM and VSM application types if they don't exist"""
|
||||||
|
engine = get_engine()
|
||||||
|
|
||||||
|
with Session(engine) as session:
|
||||||
|
# Check if types already exist
|
||||||
|
existing_qsm = session.query(ApplicationType).filter_by(type_id="qsm").first()
|
||||||
|
existing_vsm = session.query(ApplicationType).filter_by(type_id="vsm").first()
|
||||||
|
|
||||||
|
if existing_qsm and existing_vsm:
|
||||||
|
logger.info("Default application types already exist")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Create QSM type if not exists
|
||||||
|
if not existing_qsm:
|
||||||
|
logger.info("Creating QSM application type...")
|
||||||
|
qsm_type = ApplicationType(
|
||||||
|
type_id="qsm",
|
||||||
|
name="QSM - Qualitätssicherungsmittel",
|
||||||
|
description="Antrag für Qualitätssicherungsmittel zur Verbesserung der Lehre",
|
||||||
|
is_active=True,
|
||||||
|
is_public=True,
|
||||||
|
max_cost_positions=100,
|
||||||
|
max_comparison_offers=100
|
||||||
|
)
|
||||||
|
session.add(qsm_type)
|
||||||
|
session.flush()
|
||||||
|
|
||||||
|
# Create default statuses for QSM
|
||||||
|
create_default_statuses(session, qsm_type)
|
||||||
|
logger.info("QSM application type created successfully")
|
||||||
|
|
||||||
|
# Create VSM type if not exists
|
||||||
|
if not existing_vsm:
|
||||||
|
logger.info("Creating VSM application type...")
|
||||||
|
vsm_type = ApplicationType(
|
||||||
|
type_id="vsm",
|
||||||
|
name="VSM - Verfasste Studierendenschaft",
|
||||||
|
description="Antrag für Mittel der Verfassten Studierendenschaft",
|
||||||
|
is_active=True,
|
||||||
|
is_public=True,
|
||||||
|
max_cost_positions=100,
|
||||||
|
max_comparison_offers=100
|
||||||
|
)
|
||||||
|
session.add(vsm_type)
|
||||||
|
session.flush()
|
||||||
|
|
||||||
|
# Create default statuses for VSM
|
||||||
|
create_default_statuses(session, vsm_type)
|
||||||
|
logger.info("VSM application type created successfully")
|
||||||
|
|
||||||
|
session.commit()
|
||||||
|
logger.info("Default application types initialization complete")
|
||||||
|
|
||||||
|
|
||||||
|
def create_default_statuses(session, app_type: ApplicationType):
|
||||||
|
"""Create default statuses and transitions for an application type"""
|
||||||
|
|
||||||
|
# Define default statuses
|
||||||
|
statuses = [
|
||||||
|
{
|
||||||
|
"status_id": "draft",
|
||||||
|
"name": "Entwurf",
|
||||||
|
"is_editable": True,
|
||||||
|
"color": "#6B7280",
|
||||||
|
"is_initial": True,
|
||||||
|
"display_order": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status_id": "submitted",
|
||||||
|
"name": "Eingereicht",
|
||||||
|
"is_editable": False,
|
||||||
|
"color": "#3B82F6",
|
||||||
|
"send_notification": True,
|
||||||
|
"display_order": 10
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status_id": "under_review",
|
||||||
|
"name": "In Prüfung",
|
||||||
|
"is_editable": False,
|
||||||
|
"color": "#8B5CF6",
|
||||||
|
"display_order": 20
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status_id": "approved",
|
||||||
|
"name": "Genehmigt",
|
||||||
|
"is_editable": False,
|
||||||
|
"color": "#10B981",
|
||||||
|
"is_final": True,
|
||||||
|
"send_notification": True,
|
||||||
|
"display_order": 30
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status_id": "rejected",
|
||||||
|
"name": "Abgelehnt",
|
||||||
|
"is_editable": False,
|
||||||
|
"color": "#EF4444",
|
||||||
|
"is_final": True,
|
||||||
|
"send_notification": True,
|
||||||
|
"display_order": 40
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
status_objects = {}
|
||||||
|
|
||||||
|
for status_data in statuses:
|
||||||
|
status = ApplicationTypeStatus(
|
||||||
|
application_type_id=app_type.id,
|
||||||
|
**status_data
|
||||||
|
)
|
||||||
|
session.add(status)
|
||||||
|
session.flush()
|
||||||
|
status_objects[status_data["status_id"]] = status
|
||||||
|
|
||||||
|
# Create transitions
|
||||||
|
transitions = [
|
||||||
|
{
|
||||||
|
"from": "draft",
|
||||||
|
"to": "submitted",
|
||||||
|
"name": "Einreichen",
|
||||||
|
"trigger_type": TransitionTriggerType.APPLICANT_ACTION
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"from": "submitted",
|
||||||
|
"to": "under_review",
|
||||||
|
"name": "Prüfung starten",
|
||||||
|
"trigger_type": TransitionTriggerType.USER_APPROVAL,
|
||||||
|
"config": {"role": "admin"}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"from": "under_review",
|
||||||
|
"to": "approved",
|
||||||
|
"name": "Genehmigen",
|
||||||
|
"trigger_type": TransitionTriggerType.USER_APPROVAL,
|
||||||
|
"config": {"role": "admin"}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"from": "under_review",
|
||||||
|
"to": "rejected",
|
||||||
|
"name": "Ablehnen",
|
||||||
|
"trigger_type": TransitionTriggerType.USER_APPROVAL,
|
||||||
|
"config": {"role": "admin"}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
for trans in transitions:
|
||||||
|
transition = StatusTransition(
|
||||||
|
from_status_id=status_objects[trans["from"]].id,
|
||||||
|
to_status_id=status_objects[trans["to"]].id,
|
||||||
|
name=trans["name"],
|
||||||
|
trigger_type=trans["trigger_type"],
|
||||||
|
trigger_config=trans.get("config", {}),
|
||||||
|
is_active=True
|
||||||
|
)
|
||||||
|
session.add(transition)
|
||||||
|
|
||||||
|
|
||||||
|
def create_admin_user():
|
||||||
|
"""Create a default admin user if none exists"""
|
||||||
|
engine = get_engine()
|
||||||
|
|
||||||
|
with Session(engine) as session:
|
||||||
|
# Check if any admin user exists
|
||||||
|
admin_role = session.query(Role).filter_by(name="admin").first()
|
||||||
|
if not admin_role:
|
||||||
|
logger.warning("Admin role not found, skipping admin user creation")
|
||||||
|
return
|
||||||
|
|
||||||
|
admin_users = session.query(User).join(User.roles).filter(Role.name == "admin").all()
|
||||||
|
|
||||||
|
if admin_users:
|
||||||
|
logger.info("Admin user(s) already exist")
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info("Creating default admin user...")
|
||||||
|
|
||||||
|
# Create admin user
|
||||||
|
from services.auth_service import get_password_hash
|
||||||
|
|
||||||
|
admin = User(
|
||||||
|
email="admin@example.com",
|
||||||
|
given_name="System",
|
||||||
|
family_name="Administrator",
|
||||||
|
display_name="System Admin",
|
||||||
|
auth_provider="local",
|
||||||
|
verification_status="fully_verified",
|
||||||
|
email_verified=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# Set password (you should change this!)
|
||||||
|
# For production, this should be set via environment variable
|
||||||
|
default_password = os.getenv("ADMIN_PASSWORD", "changeme123")
|
||||||
|
|
||||||
|
# Note: You'll need to implement password storage separately
|
||||||
|
# This is just a placeholder
|
||||||
|
|
||||||
|
session.add(admin)
|
||||||
|
admin.roles.append(admin_role)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
logger.info(f"Default admin user created: admin@example.com")
|
||||||
|
logger.warning(f"IMPORTANT: Change the default admin password immediately!")
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Main startup function"""
|
||||||
|
logger.info("Starting application initialization...")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Initialize database schema
|
||||||
|
logger.info("Initializing database schema...")
|
||||||
|
init_database()
|
||||||
|
logger.info("Database schema initialized")
|
||||||
|
|
||||||
|
# Create default application types
|
||||||
|
create_default_application_types()
|
||||||
|
|
||||||
|
# Create admin user
|
||||||
|
create_admin_user()
|
||||||
|
|
||||||
|
logger.info("Application initialization complete!")
|
||||||
|
logger.info("You can now start the application server.")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Initialization failed: {e}", exc_info=True)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
139
backend/src/utils/crypto.py
Normal file
139
backend/src/utils/crypto.py
Normal file
@ -0,0 +1,139 @@
|
|||||||
|
"""
|
||||||
|
Cryptography Utilities
|
||||||
|
|
||||||
|
This module provides encryption and decryption utilities for sensitive data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import secrets
|
||||||
|
import hashlib
|
||||||
|
from typing import Optional
|
||||||
|
from cryptography.fernet import Fernet
|
||||||
|
from cryptography.hazmat.primitives import hashes
|
||||||
|
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2
|
||||||
|
|
||||||
|
|
||||||
|
def generate_key(password: str, salt: Optional[bytes] = None) -> bytes:
|
||||||
|
"""Generate an encryption key from a password"""
|
||||||
|
if salt is None:
|
||||||
|
salt = secrets.token_bytes(16)
|
||||||
|
|
||||||
|
kdf = PBKDF2(
|
||||||
|
algorithm=hashes.SHA256(),
|
||||||
|
length=32,
|
||||||
|
salt=salt,
|
||||||
|
iterations=100000,
|
||||||
|
)
|
||||||
|
key = base64.urlsafe_b64encode(kdf.derive(password.encode()))
|
||||||
|
return key
|
||||||
|
|
||||||
|
|
||||||
|
def encrypt_token(token: str, key: str) -> str:
|
||||||
|
"""Encrypt a token using Fernet encryption"""
|
||||||
|
if not token or not key:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
# Ensure key is properly formatted
|
||||||
|
if len(key) < 32:
|
||||||
|
# Pad the key if too short
|
||||||
|
key = key.ljust(32, '0')
|
||||||
|
|
||||||
|
# Create Fernet key from the provided key
|
||||||
|
fernet_key = base64.urlsafe_b64encode(key[:32].encode())
|
||||||
|
f = Fernet(fernet_key)
|
||||||
|
|
||||||
|
# Encrypt the token
|
||||||
|
encrypted = f.encrypt(token.encode())
|
||||||
|
return base64.urlsafe_b64encode(encrypted).decode()
|
||||||
|
|
||||||
|
|
||||||
|
def decrypt_token(encrypted_token: str, key: str) -> Optional[str]:
|
||||||
|
"""Decrypt a token using Fernet encryption"""
|
||||||
|
if not encrypted_token or not key:
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Ensure key is properly formatted
|
||||||
|
if len(key) < 32:
|
||||||
|
# Pad the key if too short
|
||||||
|
key = key.ljust(32, '0')
|
||||||
|
|
||||||
|
# Create Fernet key from the provided key
|
||||||
|
fernet_key = base64.urlsafe_b64encode(key[:32].encode())
|
||||||
|
f = Fernet(fernet_key)
|
||||||
|
|
||||||
|
# Decode and decrypt the token
|
||||||
|
encrypted_bytes = base64.urlsafe_b64decode(encrypted_token.encode())
|
||||||
|
decrypted = f.decrypt(encrypted_bytes)
|
||||||
|
return decrypted.decode()
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def hash_password(password: str, salt: Optional[str] = None) -> tuple[str, str]:
|
||||||
|
"""Hash a password with salt"""
|
||||||
|
if salt is None:
|
||||||
|
salt = secrets.token_hex(16)
|
||||||
|
|
||||||
|
# Create hash
|
||||||
|
password_hash = hashlib.pbkdf2_hmac(
|
||||||
|
'sha256',
|
||||||
|
password.encode(),
|
||||||
|
salt.encode(),
|
||||||
|
100000
|
||||||
|
)
|
||||||
|
|
||||||
|
return base64.b64encode(password_hash).decode(), salt
|
||||||
|
|
||||||
|
|
||||||
|
def verify_password(password: str, password_hash: str, salt: str) -> bool:
|
||||||
|
"""Verify a password against its hash"""
|
||||||
|
try:
|
||||||
|
# Recreate hash with provided password and salt
|
||||||
|
new_hash = hashlib.pbkdf2_hmac(
|
||||||
|
'sha256',
|
||||||
|
password.encode(),
|
||||||
|
salt.encode(),
|
||||||
|
100000
|
||||||
|
)
|
||||||
|
new_hash_str = base64.b64encode(new_hash).decode()
|
||||||
|
|
||||||
|
# Compare hashes
|
||||||
|
return new_hash_str == password_hash
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def generate_secure_token(length: int = 32) -> str:
|
||||||
|
"""Generate a cryptographically secure random token"""
|
||||||
|
return secrets.token_urlsafe(length)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_api_key() -> str:
|
||||||
|
"""Generate a secure API key"""
|
||||||
|
return secrets.token_urlsafe(32)
|
||||||
|
|
||||||
|
|
||||||
|
def hash_api_key(api_key: str) -> str:
|
||||||
|
"""Hash an API key for storage"""
|
||||||
|
return hashlib.sha256(api_key.encode()).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def verify_api_key(api_key: str, hashed_key: str) -> bool:
|
||||||
|
"""Verify an API key against its hash"""
|
||||||
|
return hash_api_key(api_key) == hashed_key
|
||||||
|
|
||||||
|
|
||||||
|
def generate_session_id() -> str:
|
||||||
|
"""Generate a secure session ID"""
|
||||||
|
return secrets.token_hex(32)
|
||||||
|
|
||||||
|
|
||||||
|
def encode_base64(data: bytes) -> str:
|
||||||
|
"""Encode bytes to base64 string"""
|
||||||
|
return base64.urlsafe_b64encode(data).decode()
|
||||||
|
|
||||||
|
|
||||||
|
def decode_base64(data: str) -> bytes:
|
||||||
|
"""Decode base64 string to bytes"""
|
||||||
|
return base64.urlsafe_b64decode(data.encode())
|
||||||
359
backend/src/utils/email.py
Normal file
359
backend/src/utils/email.py
Normal file
@ -0,0 +1,359 @@
|
|||||||
|
"""
|
||||||
|
Email Service Utilities
|
||||||
|
|
||||||
|
This module provides email sending functionality for the application.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import smtplib
|
||||||
|
import ssl
|
||||||
|
from email.mime.text import MIMEText
|
||||||
|
from email.mime.multipart import MIMEMultipart
|
||||||
|
from typing import Optional, List, Dict, Any
|
||||||
|
from pathlib import Path
|
||||||
|
import logging
|
||||||
|
from jinja2 import Template, Environment, FileSystemLoader
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class EmailService:
|
||||||
|
"""Service for sending emails"""
|
||||||
|
|
||||||
|
def __init__(self, settings):
|
||||||
|
self.settings = settings
|
||||||
|
self.smtp_host = settings.email.smtp_host
|
||||||
|
self.smtp_port = settings.email.smtp_port
|
||||||
|
self.smtp_tls = settings.email.smtp_tls
|
||||||
|
self.smtp_ssl = settings.email.smtp_ssl
|
||||||
|
self.smtp_username = settings.email.smtp_username
|
||||||
|
self.smtp_password = settings.email.smtp_password
|
||||||
|
self.from_email = settings.email.from_email
|
||||||
|
self.from_name = settings.email.from_name
|
||||||
|
|
||||||
|
# Setup Jinja2 for email templates
|
||||||
|
template_dir = Path(__file__).parent.parent / "templates" / "emails"
|
||||||
|
if template_dir.exists():
|
||||||
|
self.jinja_env = Environment(loader=FileSystemLoader(str(template_dir)))
|
||||||
|
else:
|
||||||
|
self.jinja_env = Environment()
|
||||||
|
|
||||||
|
async def send_email(
|
||||||
|
self,
|
||||||
|
to_email: str,
|
||||||
|
subject: str,
|
||||||
|
html_content: str,
|
||||||
|
text_content: Optional[str] = None,
|
||||||
|
cc: Optional[List[str]] = None,
|
||||||
|
bcc: Optional[List[str]] = None,
|
||||||
|
attachments: Optional[List[Dict[str, Any]]] = None
|
||||||
|
) -> bool:
|
||||||
|
"""Send an email"""
|
||||||
|
try:
|
||||||
|
# Create message
|
||||||
|
message = MIMEMultipart("alternative")
|
||||||
|
message["Subject"] = subject
|
||||||
|
message["From"] = f"{self.from_name} <{self.from_email}>"
|
||||||
|
message["To"] = to_email
|
||||||
|
|
||||||
|
if cc:
|
||||||
|
message["Cc"] = ", ".join(cc)
|
||||||
|
if bcc:
|
||||||
|
message["Bcc"] = ", ".join(bcc)
|
||||||
|
|
||||||
|
# Add text and HTML parts
|
||||||
|
if text_content:
|
||||||
|
text_part = MIMEText(text_content, "plain")
|
||||||
|
message.attach(text_part)
|
||||||
|
|
||||||
|
html_part = MIMEText(html_content, "html")
|
||||||
|
message.attach(html_part)
|
||||||
|
|
||||||
|
# Add attachments if provided
|
||||||
|
if attachments:
|
||||||
|
for attachment in attachments:
|
||||||
|
# attachment should have 'content', 'filename', and optional 'content_type'
|
||||||
|
pass # Implementation would go here
|
||||||
|
|
||||||
|
# Send email
|
||||||
|
if self.smtp_ssl:
|
||||||
|
# SSL connection
|
||||||
|
context = ssl.create_default_context()
|
||||||
|
with smtplib.SMTP_SSL(self.smtp_host, self.smtp_port, context=context) as server:
|
||||||
|
if self.smtp_username and self.smtp_password:
|
||||||
|
server.login(self.smtp_username, self.smtp_password)
|
||||||
|
recipients = [to_email]
|
||||||
|
if cc:
|
||||||
|
recipients.extend(cc)
|
||||||
|
if bcc:
|
||||||
|
recipients.extend(bcc)
|
||||||
|
server.sendmail(self.from_email, recipients, message.as_string())
|
||||||
|
else:
|
||||||
|
# TLS connection
|
||||||
|
with smtplib.SMTP(self.smtp_host, self.smtp_port) as server:
|
||||||
|
if self.smtp_tls:
|
||||||
|
server.starttls()
|
||||||
|
if self.smtp_username and self.smtp_password:
|
||||||
|
server.login(self.smtp_username, self.smtp_password)
|
||||||
|
recipients = [to_email]
|
||||||
|
if cc:
|
||||||
|
recipients.extend(cc)
|
||||||
|
if bcc:
|
||||||
|
recipients.extend(bcc)
|
||||||
|
server.sendmail(self.from_email, recipients, message.as_string())
|
||||||
|
|
||||||
|
logger.info(f"Email sent successfully to {to_email}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to send email to {to_email}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def send_verification_email(
|
||||||
|
self,
|
||||||
|
to_email: str,
|
||||||
|
user_name: str,
|
||||||
|
verification_url: str
|
||||||
|
) -> bool:
|
||||||
|
"""Send email verification link"""
|
||||||
|
subject = self.settings.email.verification_subject
|
||||||
|
|
||||||
|
# HTML content
|
||||||
|
html_content = f"""
|
||||||
|
<html>
|
||||||
|
<body style="font-family: Arial, sans-serif; line-height: 1.6; color: #333;">
|
||||||
|
<div style="max-width: 600px; margin: 0 auto; padding: 20px;">
|
||||||
|
<h2 style="color: #2c3e50;">E-Mail-Verifizierung</h2>
|
||||||
|
<p>Hallo {user_name},</p>
|
||||||
|
<p>Vielen Dank für Ihre Registrierung. Bitte verifizieren Sie Ihre E-Mail-Adresse, indem Sie auf den folgenden Link klicken:</p>
|
||||||
|
<div style="text-align: center; margin: 30px 0;">
|
||||||
|
<a href="{verification_url}"
|
||||||
|
style="display: inline-block; padding: 12px 24px; background-color: #3498db; color: white; text-decoration: none; border-radius: 5px;">
|
||||||
|
E-Mail verifizieren
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
<p>Oder kopieren Sie diesen Link in Ihren Browser:</p>
|
||||||
|
<p style="word-break: break-all; background: #f4f4f4; padding: 10px; border-radius: 3px;">
|
||||||
|
{verification_url}
|
||||||
|
</p>
|
||||||
|
<p>Dieser Link ist 24 Stunden gültig.</p>
|
||||||
|
<hr style="margin: 30px 0; border: none; border-top: 1px solid #ddd;">
|
||||||
|
<p style="font-size: 12px; color: #666;">
|
||||||
|
Falls Sie diese E-Mail nicht angefordert haben, können Sie sie ignorieren.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Text content
|
||||||
|
text_content = f"""
|
||||||
|
Hallo {user_name},
|
||||||
|
|
||||||
|
Vielen Dank für Ihre Registrierung. Bitte verifizieren Sie Ihre E-Mail-Adresse, indem Sie auf den folgenden Link klicken:
|
||||||
|
|
||||||
|
{verification_url}
|
||||||
|
|
||||||
|
Dieser Link ist 24 Stunden gültig.
|
||||||
|
|
||||||
|
Falls Sie diese E-Mail nicht angefordert haben, können Sie sie ignorieren.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return await self.send_email(to_email, subject, html_content, text_content)
|
||||||
|
|
||||||
|
async def send_magic_link_email(
|
||||||
|
self,
|
||||||
|
to_email: str,
|
||||||
|
user_name: str,
|
||||||
|
login_url: str
|
||||||
|
) -> bool:
|
||||||
|
"""Send magic link for login"""
|
||||||
|
subject = self.settings.email.magic_link_subject
|
||||||
|
|
||||||
|
# HTML content
|
||||||
|
html_content = f"""
|
||||||
|
<html>
|
||||||
|
<body style="font-family: Arial, sans-serif; line-height: 1.6; color: #333;">
|
||||||
|
<div style="max-width: 600px; margin: 0 auto; padding: 20px;">
|
||||||
|
<h2 style="color: #2c3e50;">Anmeldung bei STUPA</h2>
|
||||||
|
<p>Hallo {user_name},</p>
|
||||||
|
<p>Sie haben eine Anmeldung angefordert. Klicken Sie auf den folgenden Link, um sich anzumelden:</p>
|
||||||
|
<div style="text-align: center; margin: 30px 0;">
|
||||||
|
<a href="{login_url}"
|
||||||
|
style="display: inline-block; padding: 12px 24px; background-color: #27ae60; color: white; text-decoration: none; border-radius: 5px;">
|
||||||
|
Jetzt anmelden
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
<p>Oder kopieren Sie diesen Link in Ihren Browser:</p>
|
||||||
|
<p style="word-break: break-all; background: #f4f4f4; padding: 10px; border-radius: 3px;">
|
||||||
|
{login_url}
|
||||||
|
</p>
|
||||||
|
<p>Dieser Link ist 15 Minuten gültig.</p>
|
||||||
|
<hr style="margin: 30px 0; border: none; border-top: 1px solid #ddd;">
|
||||||
|
<p style="font-size: 12px; color: #666;">
|
||||||
|
Falls Sie diese Anmeldung nicht angefordert haben, ignorieren Sie bitte diese E-Mail.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Text content
|
||||||
|
text_content = f"""
|
||||||
|
Hallo {user_name},
|
||||||
|
|
||||||
|
Sie haben eine Anmeldung angefordert. Klicken Sie auf den folgenden Link, um sich anzumelden:
|
||||||
|
|
||||||
|
{login_url}
|
||||||
|
|
||||||
|
Dieser Link ist 15 Minuten gültig.
|
||||||
|
|
||||||
|
Falls Sie diese Anmeldung nicht angefordert haben, ignorieren Sie bitte diese E-Mail.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return await self.send_email(to_email, subject, html_content, text_content)
|
||||||
|
|
||||||
|
async def send_application_status_email(
|
||||||
|
self,
|
||||||
|
to_email: str,
|
||||||
|
user_name: str,
|
||||||
|
application_id: str,
|
||||||
|
old_status: str,
|
||||||
|
new_status: str,
|
||||||
|
comment: Optional[str] = None,
|
||||||
|
application_url: Optional[str] = None
|
||||||
|
) -> bool:
|
||||||
|
"""Send application status update email"""
|
||||||
|
subject = f"{self.settings.email.application_notification_subject} - Antrag {application_id}"
|
||||||
|
|
||||||
|
status_translations = {
|
||||||
|
"beantragt": "Beantragt",
|
||||||
|
"bearbeitung_gesperrt": "In Bearbeitung",
|
||||||
|
"zu_pruefen": "Wird geprüft",
|
||||||
|
"zur_abstimmung": "Zur Abstimmung",
|
||||||
|
"genehmigt": "Genehmigt",
|
||||||
|
"abgelehnt": "Abgelehnt"
|
||||||
|
}
|
||||||
|
|
||||||
|
old_status_display = status_translations.get(old_status, old_status)
|
||||||
|
new_status_display = status_translations.get(new_status, new_status)
|
||||||
|
|
||||||
|
# HTML content
|
||||||
|
html_content = f"""
|
||||||
|
<html>
|
||||||
|
<body style="font-family: Arial, sans-serif; line-height: 1.6; color: #333;">
|
||||||
|
<div style="max-width: 600px; margin: 0 auto; padding: 20px;">
|
||||||
|
<h2 style="color: #2c3e50;">Status-Update zu Ihrem Antrag</h2>
|
||||||
|
<p>Hallo {user_name},</p>
|
||||||
|
<p>Der Status Ihres Antrags <strong>{application_id}</strong> wurde aktualisiert:</p>
|
||||||
|
<div style="background: #f8f9fa; padding: 15px; border-radius: 5px; margin: 20px 0;">
|
||||||
|
<p><strong>Vorheriger Status:</strong> {old_status_display}</p>
|
||||||
|
<p><strong>Neuer Status:</strong> <span style="color: #27ae60; font-weight: bold;">{new_status_display}</span></p>
|
||||||
|
</div>
|
||||||
|
"""
|
||||||
|
|
||||||
|
if comment:
|
||||||
|
html_content += f"""
|
||||||
|
<div style="background: #fff3cd; padding: 15px; border-radius: 5px; margin: 20px 0;">
|
||||||
|
<p><strong>Kommentar:</strong></p>
|
||||||
|
<p>{comment}</p>
|
||||||
|
</div>
|
||||||
|
"""
|
||||||
|
|
||||||
|
if application_url:
|
||||||
|
html_content += f"""
|
||||||
|
<div style="text-align: center; margin: 30px 0;">
|
||||||
|
<a href="{application_url}"
|
||||||
|
style="display: inline-block; padding: 12px 24px; background-color: #3498db; color: white; text-decoration: none; border-radius: 5px;">
|
||||||
|
Antrag anzeigen
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
"""
|
||||||
|
|
||||||
|
html_content += """
|
||||||
|
<hr style="margin: 30px 0; border: none; border-top: 1px solid #ddd;">
|
||||||
|
<p style="font-size: 12px; color: #666;">
|
||||||
|
Diese E-Mail wurde automatisch generiert. Bitte antworten Sie nicht auf diese E-Mail.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Text content
|
||||||
|
text_content = f"""
|
||||||
|
Hallo {user_name},
|
||||||
|
|
||||||
|
Der Status Ihres Antrags {application_id} wurde aktualisiert:
|
||||||
|
|
||||||
|
Vorheriger Status: {old_status_display}
|
||||||
|
Neuer Status: {new_status_display}
|
||||||
|
"""
|
||||||
|
|
||||||
|
if comment:
|
||||||
|
text_content += f"\n\nKommentar:\n{comment}"
|
||||||
|
|
||||||
|
if application_url:
|
||||||
|
text_content += f"\n\nAntrag anzeigen: {application_url}"
|
||||||
|
|
||||||
|
text_content += "\n\nDiese E-Mail wurde automatisch generiert. Bitte antworten Sie nicht auf diese E-Mail."
|
||||||
|
|
||||||
|
return await self.send_email(to_email, subject, html_content, text_content)
|
||||||
|
|
||||||
|
async def send_review_request_email(
|
||||||
|
self,
|
||||||
|
to_email: str,
|
||||||
|
reviewer_name: str,
|
||||||
|
application_id: str,
|
||||||
|
applicant_name: str,
|
||||||
|
review_type: str,
|
||||||
|
review_url: str
|
||||||
|
) -> bool:
|
||||||
|
"""Send review request to reviewers"""
|
||||||
|
review_type_display = {
|
||||||
|
"budget": "Haushaltsbeauftragte(r)",
|
||||||
|
"finance": "Finanzreferent"
|
||||||
|
}.get(review_type, review_type)
|
||||||
|
|
||||||
|
subject = f"Prüfauftrag: Antrag {application_id} - {review_type_display}"
|
||||||
|
|
||||||
|
# HTML content
|
||||||
|
html_content = f"""
|
||||||
|
<html>
|
||||||
|
<body style="font-family: Arial, sans-serif; line-height: 1.6; color: #333;">
|
||||||
|
<div style="max-width: 600px; margin: 0 auto; padding: 20px;">
|
||||||
|
<h2 style="color: #2c3e50;">Prüfauftrag</h2>
|
||||||
|
<p>Hallo {reviewer_name},</p>
|
||||||
|
<p>Es liegt ein neuer Antrag zur Prüfung vor:</p>
|
||||||
|
<div style="background: #f8f9fa; padding: 15px; border-radius: 5px; margin: 20px 0;">
|
||||||
|
<p><strong>Antragsnummer:</strong> {application_id}</p>
|
||||||
|
<p><strong>Antragsteller:</strong> {applicant_name}</p>
|
||||||
|
<p><strong>Ihre Rolle:</strong> {review_type_display}</p>
|
||||||
|
</div>
|
||||||
|
<div style="text-align: center; margin: 30px 0;">
|
||||||
|
<a href="{review_url}"
|
||||||
|
style="display: inline-block; padding: 12px 24px; background-color: #e74c3c; color: white; text-decoration: none; border-radius: 5px;">
|
||||||
|
Antrag prüfen
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
<p>Bitte prüfen Sie den Antrag zeitnah.</p>
|
||||||
|
</div>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Text content
|
||||||
|
text_content = f"""
|
||||||
|
Hallo {reviewer_name},
|
||||||
|
|
||||||
|
Es liegt ein neuer Antrag zur Prüfung vor:
|
||||||
|
|
||||||
|
Antragsnummer: {application_id}
|
||||||
|
Antragsteller: {applicant_name}
|
||||||
|
Ihre Rolle: {review_type_display}
|
||||||
|
|
||||||
|
Antrag prüfen: {review_url}
|
||||||
|
|
||||||
|
Bitte prüfen Sie den Antrag zeitnah.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return await self.send_email(to_email, subject, html_content, text_content)
|
||||||
348
backend/src/utils/file_storage.py
Normal file
348
backend/src/utils/file_storage.py
Normal file
@ -0,0 +1,348 @@
|
|||||||
|
"""
|
||||||
|
File Storage Service
|
||||||
|
|
||||||
|
This module provides file storage and management utilities.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import hashlib
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional, BinaryIO, Tuple
|
||||||
|
from datetime import datetime
|
||||||
|
import uuid
|
||||||
|
import mimetypes
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class FileStorageService:
|
||||||
|
"""Service for managing file storage"""
|
||||||
|
|
||||||
|
def __init__(self, base_path: str):
|
||||||
|
"""Initialize file storage service
|
||||||
|
|
||||||
|
Args:
|
||||||
|
base_path: Base directory for file storage
|
||||||
|
"""
|
||||||
|
self.base_path = Path(base_path)
|
||||||
|
self.base_path.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
def save_file(
|
||||||
|
self,
|
||||||
|
file_content: bytes,
|
||||||
|
filename: str,
|
||||||
|
subdirectory: Optional[str] = None,
|
||||||
|
generate_unique_name: bool = True
|
||||||
|
) -> Tuple[str, str]:
|
||||||
|
"""Save a file to storage
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_content: File content as bytes
|
||||||
|
filename: Original filename
|
||||||
|
subdirectory: Optional subdirectory within base path
|
||||||
|
generate_unique_name: Whether to generate a unique filename
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of (stored_path, file_hash)
|
||||||
|
"""
|
||||||
|
# Create subdirectory if specified
|
||||||
|
if subdirectory:
|
||||||
|
target_dir = self.base_path / subdirectory
|
||||||
|
else:
|
||||||
|
target_dir = self.base_path
|
||||||
|
target_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Generate unique filename if requested
|
||||||
|
if generate_unique_name:
|
||||||
|
file_ext = Path(filename).suffix
|
||||||
|
unique_id = uuid.uuid4().hex[:8]
|
||||||
|
timestamp = datetime.utcnow().strftime("%Y%m%d_%H%M%S")
|
||||||
|
stored_filename = f"{timestamp}_{unique_id}{file_ext}"
|
||||||
|
else:
|
||||||
|
stored_filename = filename
|
||||||
|
|
||||||
|
# Full path for storage
|
||||||
|
file_path = target_dir / stored_filename
|
||||||
|
|
||||||
|
# Calculate file hash
|
||||||
|
file_hash = hashlib.sha256(file_content).hexdigest()
|
||||||
|
|
||||||
|
# Save file
|
||||||
|
try:
|
||||||
|
with open(file_path, 'wb') as f:
|
||||||
|
f.write(file_content)
|
||||||
|
|
||||||
|
logger.info(f"File saved: {file_path}")
|
||||||
|
return str(file_path.relative_to(self.base_path)), file_hash
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to save file {filename}: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def read_file(self, file_path: str) -> bytes:
|
||||||
|
"""Read a file from storage
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_path: Path relative to base_path
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
File content as bytes
|
||||||
|
"""
|
||||||
|
full_path = self.base_path / file_path
|
||||||
|
|
||||||
|
if not full_path.exists():
|
||||||
|
raise FileNotFoundError(f"File not found: {file_path}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(full_path, 'rb') as f:
|
||||||
|
return f.read()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to read file {file_path}: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def delete_file(self, file_path: str) -> bool:
|
||||||
|
"""Delete a file from storage
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_path: Path relative to base_path
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if file was deleted, False if file didn't exist
|
||||||
|
"""
|
||||||
|
full_path = self.base_path / file_path
|
||||||
|
|
||||||
|
if not full_path.exists():
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
os.remove(full_path)
|
||||||
|
logger.info(f"File deleted: {full_path}")
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to delete file {file_path}: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def move_file(self, source_path: str, dest_path: str) -> str:
|
||||||
|
"""Move a file within storage
|
||||||
|
|
||||||
|
Args:
|
||||||
|
source_path: Source path relative to base_path
|
||||||
|
dest_path: Destination path relative to base_path
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
New file path
|
||||||
|
"""
|
||||||
|
source_full = self.base_path / source_path
|
||||||
|
dest_full = self.base_path / dest_path
|
||||||
|
|
||||||
|
if not source_full.exists():
|
||||||
|
raise FileNotFoundError(f"Source file not found: {source_path}")
|
||||||
|
|
||||||
|
# Create destination directory if needed
|
||||||
|
dest_full.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
shutil.move(str(source_full), str(dest_full))
|
||||||
|
logger.info(f"File moved from {source_path} to {dest_path}")
|
||||||
|
return str(dest_full.relative_to(self.base_path))
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to move file from {source_path} to {dest_path}: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def copy_file(self, source_path: str, dest_path: str) -> str:
|
||||||
|
"""Copy a file within storage
|
||||||
|
|
||||||
|
Args:
|
||||||
|
source_path: Source path relative to base_path
|
||||||
|
dest_path: Destination path relative to base_path
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
New file path
|
||||||
|
"""
|
||||||
|
source_full = self.base_path / source_path
|
||||||
|
dest_full = self.base_path / dest_path
|
||||||
|
|
||||||
|
if not source_full.exists():
|
||||||
|
raise FileNotFoundError(f"Source file not found: {source_path}")
|
||||||
|
|
||||||
|
# Create destination directory if needed
|
||||||
|
dest_full.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
shutil.copy2(str(source_full), str(dest_full))
|
||||||
|
logger.info(f"File copied from {source_path} to {dest_path}")
|
||||||
|
return str(dest_full.relative_to(self.base_path))
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to copy file from {source_path} to {dest_path}: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def file_exists(self, file_path: str) -> bool:
|
||||||
|
"""Check if a file exists
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_path: Path relative to base_path
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if file exists, False otherwise
|
||||||
|
"""
|
||||||
|
full_path = self.base_path / file_path
|
||||||
|
return full_path.exists()
|
||||||
|
|
||||||
|
def get_file_info(self, file_path: str) -> dict:
|
||||||
|
"""Get information about a file
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_path: Path relative to base_path
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with file information
|
||||||
|
"""
|
||||||
|
full_path = self.base_path / file_path
|
||||||
|
|
||||||
|
if not full_path.exists():
|
||||||
|
raise FileNotFoundError(f"File not found: {file_path}")
|
||||||
|
|
||||||
|
stat = full_path.stat()
|
||||||
|
|
||||||
|
# Get MIME type
|
||||||
|
mime_type, _ = mimetypes.guess_type(str(full_path))
|
||||||
|
|
||||||
|
return {
|
||||||
|
'path': file_path,
|
||||||
|
'filename': full_path.name,
|
||||||
|
'size': stat.st_size,
|
||||||
|
'mime_type': mime_type,
|
||||||
|
'created_at': datetime.fromtimestamp(stat.st_ctime),
|
||||||
|
'modified_at': datetime.fromtimestamp(stat.st_mtime),
|
||||||
|
'is_file': full_path.is_file(),
|
||||||
|
'is_dir': full_path.is_dir()
|
||||||
|
}
|
||||||
|
|
||||||
|
def list_files(self, subdirectory: Optional[str] = None, pattern: str = "*") -> list:
|
||||||
|
"""List files in storage
|
||||||
|
|
||||||
|
Args:
|
||||||
|
subdirectory: Optional subdirectory to list
|
||||||
|
pattern: Glob pattern for filtering files
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of file paths relative to base_path
|
||||||
|
"""
|
||||||
|
if subdirectory:
|
||||||
|
target_dir = self.base_path / subdirectory
|
||||||
|
else:
|
||||||
|
target_dir = self.base_path
|
||||||
|
|
||||||
|
if not target_dir.exists():
|
||||||
|
return []
|
||||||
|
|
||||||
|
files = []
|
||||||
|
for file_path in target_dir.glob(pattern):
|
||||||
|
if file_path.is_file():
|
||||||
|
relative_path = file_path.relative_to(self.base_path)
|
||||||
|
files.append(str(relative_path))
|
||||||
|
|
||||||
|
return sorted(files)
|
||||||
|
|
||||||
|
def get_directory_size(self, subdirectory: Optional[str] = None) -> int:
|
||||||
|
"""Get total size of files in a directory
|
||||||
|
|
||||||
|
Args:
|
||||||
|
subdirectory: Optional subdirectory
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Total size in bytes
|
||||||
|
"""
|
||||||
|
if subdirectory:
|
||||||
|
target_dir = self.base_path / subdirectory
|
||||||
|
else:
|
||||||
|
target_dir = self.base_path
|
||||||
|
|
||||||
|
if not target_dir.exists():
|
||||||
|
return 0
|
||||||
|
|
||||||
|
total_size = 0
|
||||||
|
for file_path in target_dir.rglob('*'):
|
||||||
|
if file_path.is_file():
|
||||||
|
total_size += file_path.stat().st_size
|
||||||
|
|
||||||
|
return total_size
|
||||||
|
|
||||||
|
def cleanup_old_files(self, days: int = 30, subdirectory: Optional[str] = None) -> int:
|
||||||
|
"""Delete files older than specified days
|
||||||
|
|
||||||
|
Args:
|
||||||
|
days: Age threshold in days
|
||||||
|
subdirectory: Optional subdirectory to clean
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Number of files deleted
|
||||||
|
"""
|
||||||
|
if subdirectory:
|
||||||
|
target_dir = self.base_path / subdirectory
|
||||||
|
else:
|
||||||
|
target_dir = self.base_path
|
||||||
|
|
||||||
|
if not target_dir.exists():
|
||||||
|
return 0
|
||||||
|
|
||||||
|
cutoff_time = datetime.utcnow().timestamp() - (days * 24 * 60 * 60)
|
||||||
|
deleted_count = 0
|
||||||
|
|
||||||
|
for file_path in target_dir.rglob('*'):
|
||||||
|
if file_path.is_file():
|
||||||
|
if file_path.stat().st_mtime < cutoff_time:
|
||||||
|
try:
|
||||||
|
os.remove(file_path)
|
||||||
|
deleted_count += 1
|
||||||
|
logger.info(f"Deleted old file: {file_path}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to delete old file {file_path}: {e}")
|
||||||
|
|
||||||
|
return deleted_count
|
||||||
|
|
||||||
|
def create_temp_file(self, content: bytes, suffix: str = "") -> str:
|
||||||
|
"""Create a temporary file
|
||||||
|
|
||||||
|
Args:
|
||||||
|
content: File content
|
||||||
|
suffix: File suffix/extension
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path to temporary file
|
||||||
|
"""
|
||||||
|
temp_dir = self.base_path / "temp"
|
||||||
|
temp_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
temp_filename = f"tmp_{uuid.uuid4().hex}{suffix}"
|
||||||
|
temp_path = temp_dir / temp_filename
|
||||||
|
|
||||||
|
with open(temp_path, 'wb') as f:
|
||||||
|
f.write(content)
|
||||||
|
|
||||||
|
return str(temp_path.relative_to(self.base_path))
|
||||||
|
|
||||||
|
def get_file_hash(self, file_path: str, algorithm: str = 'sha256') -> str:
|
||||||
|
"""Calculate hash of a file
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_path: Path relative to base_path
|
||||||
|
algorithm: Hash algorithm to use
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Hex digest of file hash
|
||||||
|
"""
|
||||||
|
full_path = self.base_path / file_path
|
||||||
|
|
||||||
|
if not full_path.exists():
|
||||||
|
raise FileNotFoundError(f"File not found: {file_path}")
|
||||||
|
|
||||||
|
hash_obj = hashlib.new(algorithm)
|
||||||
|
|
||||||
|
with open(full_path, 'rb') as f:
|
||||||
|
while chunk := f.read(8192):
|
||||||
|
hash_obj.update(chunk)
|
||||||
|
|
||||||
|
return hash_obj.hexdigest()
|
||||||
405
backend/src/utils/pdf_utils.py
Normal file
405
backend/src/utils/pdf_utils.py
Normal file
@ -0,0 +1,405 @@
|
|||||||
|
"""
|
||||||
|
PDF utilities for template handling and field extraction
|
||||||
|
"""
|
||||||
|
|
||||||
|
import io
|
||||||
|
import os
|
||||||
|
from typing import List, Dict, Any, Optional
|
||||||
|
from PyPDF2 import PdfReader, PdfWriter
|
||||||
|
from reportlab.pdfgen import canvas
|
||||||
|
from reportlab.lib.pagesizes import A4
|
||||||
|
from reportlab.lib.utils import ImageReader
|
||||||
|
from reportlab.pdfbase import pdfmetrics
|
||||||
|
from reportlab.pdfbase.ttfonts import TTFont
|
||||||
|
import fitz # PyMuPDF
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
def extract_pdf_fields(pdf_content: bytes) -> List[str]:
|
||||||
|
"""
|
||||||
|
Extract form field names from a PDF template
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pdf_content: PDF file content as bytes
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of field names found in the PDF
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
pdf_reader = PdfReader(io.BytesIO(pdf_content))
|
||||||
|
|
||||||
|
fields = []
|
||||||
|
if '/AcroForm' in pdf_reader.trailer['/Root']:
|
||||||
|
form = pdf_reader.trailer['/Root']['/AcroForm']
|
||||||
|
if '/Fields' in form:
|
||||||
|
for field_ref in form['/Fields']:
|
||||||
|
field = field_ref.get_object()
|
||||||
|
if '/T' in field:
|
||||||
|
field_name = field['/T']
|
||||||
|
if isinstance(field_name, bytes):
|
||||||
|
field_name = field_name.decode('utf-8')
|
||||||
|
fields.append(str(field_name))
|
||||||
|
|
||||||
|
return fields
|
||||||
|
except Exception as e:
|
||||||
|
raise ValueError(f"Failed to extract PDF fields: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
def validate_pdf_template(pdf_content: bytes) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Validate a PDF template and extract metadata
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pdf_content: PDF file content as bytes
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with validation results and metadata
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
pdf_reader = PdfReader(io.BytesIO(pdf_content))
|
||||||
|
|
||||||
|
result = {
|
||||||
|
"valid": True,
|
||||||
|
"page_count": len(pdf_reader.pages),
|
||||||
|
"has_form": False,
|
||||||
|
"fields": [],
|
||||||
|
"metadata": {}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Extract metadata
|
||||||
|
if pdf_reader.metadata:
|
||||||
|
result["metadata"] = {
|
||||||
|
"title": pdf_reader.metadata.get('/Title', ''),
|
||||||
|
"author": pdf_reader.metadata.get('/Author', ''),
|
||||||
|
"subject": pdf_reader.metadata.get('/Subject', ''),
|
||||||
|
"creator": pdf_reader.metadata.get('/Creator', ''),
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check for form fields
|
||||||
|
if '/AcroForm' in pdf_reader.trailer['/Root']:
|
||||||
|
result["has_form"] = True
|
||||||
|
result["fields"] = extract_pdf_fields(pdf_content)
|
||||||
|
|
||||||
|
return result
|
||||||
|
except Exception as e:
|
||||||
|
return {
|
||||||
|
"valid": False,
|
||||||
|
"error": str(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def fill_pdf_template(
|
||||||
|
template_content: bytes,
|
||||||
|
field_mapping: Dict[str, str],
|
||||||
|
field_data: Dict[str, Any],
|
||||||
|
output_path: Optional[str] = None
|
||||||
|
) -> bytes:
|
||||||
|
"""
|
||||||
|
Fill a PDF template with data
|
||||||
|
|
||||||
|
Args:
|
||||||
|
template_content: PDF template content as bytes
|
||||||
|
field_mapping: Mapping from PDF field names to data field IDs
|
||||||
|
field_data: Data to fill in the fields
|
||||||
|
output_path: Optional path to save the filled PDF
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Filled PDF content as bytes
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Read the template
|
||||||
|
pdf_reader = PdfReader(io.BytesIO(template_content))
|
||||||
|
pdf_writer = PdfWriter()
|
||||||
|
|
||||||
|
# Copy all pages and fill form fields
|
||||||
|
for page_num in range(len(pdf_reader.pages)):
|
||||||
|
page = pdf_reader.pages[page_num]
|
||||||
|
pdf_writer.add_page(page)
|
||||||
|
|
||||||
|
# Fill form fields if they exist
|
||||||
|
if '/AcroForm' in pdf_reader.trailer['/Root']:
|
||||||
|
pdf_writer.update_page_form_field_values(
|
||||||
|
pdf_writer.pages[0],
|
||||||
|
{pdf_field: str(field_data.get(data_field, ''))
|
||||||
|
for pdf_field, data_field in field_mapping.items()
|
||||||
|
if data_field in field_data}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Write to bytes or file
|
||||||
|
output_buffer = io.BytesIO()
|
||||||
|
pdf_writer.write(output_buffer)
|
||||||
|
pdf_content = output_buffer.getvalue()
|
||||||
|
|
||||||
|
if output_path:
|
||||||
|
with open(output_path, 'wb') as f:
|
||||||
|
f.write(pdf_content)
|
||||||
|
|
||||||
|
return pdf_content
|
||||||
|
except Exception as e:
|
||||||
|
raise ValueError(f"Failed to fill PDF template: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
def create_pdf_from_data(
|
||||||
|
data: Dict[str, Any],
|
||||||
|
title: str = "Application",
|
||||||
|
output_path: Optional[str] = None
|
||||||
|
) -> bytes:
|
||||||
|
"""
|
||||||
|
Create a PDF document from application data (when no template is available)
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data: Application data
|
||||||
|
title: Document title
|
||||||
|
output_path: Optional path to save the PDF
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
PDF content as bytes
|
||||||
|
"""
|
||||||
|
buffer = io.BytesIO()
|
||||||
|
c = canvas.Canvas(buffer, pagesize=A4)
|
||||||
|
width, height = A4
|
||||||
|
|
||||||
|
# Set up fonts
|
||||||
|
try:
|
||||||
|
# Try to use a custom font if available
|
||||||
|
font_path = Path(__file__).parent.parent / "assets" / "fonts" / "Roboto-Regular.ttf"
|
||||||
|
if font_path.exists():
|
||||||
|
pdfmetrics.registerFont(TTFont('Roboto', str(font_path)))
|
||||||
|
c.setFont('Roboto', 12)
|
||||||
|
else:
|
||||||
|
c.setFont('Helvetica', 12)
|
||||||
|
except:
|
||||||
|
c.setFont('Helvetica', 12)
|
||||||
|
|
||||||
|
# Title
|
||||||
|
c.setFont('Helvetica-Bold', 16)
|
||||||
|
c.drawString(50, height - 50, title)
|
||||||
|
|
||||||
|
# Content
|
||||||
|
y_position = height - 100
|
||||||
|
c.setFont('Helvetica', 10)
|
||||||
|
|
||||||
|
for key, value in data.items():
|
||||||
|
if y_position < 100:
|
||||||
|
c.showPage()
|
||||||
|
y_position = height - 50
|
||||||
|
c.setFont('Helvetica', 10)
|
||||||
|
|
||||||
|
# Format key
|
||||||
|
display_key = key.replace('_', ' ').title()
|
||||||
|
|
||||||
|
# Handle different value types
|
||||||
|
if isinstance(value, (list, dict)):
|
||||||
|
c.setFont('Helvetica-Bold', 10)
|
||||||
|
c.drawString(50, y_position, f"{display_key}:")
|
||||||
|
y_position -= 15
|
||||||
|
c.setFont('Helvetica', 10)
|
||||||
|
|
||||||
|
if isinstance(value, list):
|
||||||
|
for item in value:
|
||||||
|
if y_position < 100:
|
||||||
|
c.showPage()
|
||||||
|
y_position = height - 50
|
||||||
|
c.drawString(70, y_position, f"• {str(item)}")
|
||||||
|
y_position -= 15
|
||||||
|
else:
|
||||||
|
for sub_key, sub_value in value.items():
|
||||||
|
if y_position < 100:
|
||||||
|
c.showPage()
|
||||||
|
y_position = height - 50
|
||||||
|
c.drawString(70, y_position, f"{sub_key}: {str(sub_value)}")
|
||||||
|
y_position -= 15
|
||||||
|
else:
|
||||||
|
# Simple key-value pair
|
||||||
|
text = f"{display_key}: {str(value)}"
|
||||||
|
# Handle long text
|
||||||
|
if len(text) > 80:
|
||||||
|
lines = [text[i:i+80] for i in range(0, len(text), 80)]
|
||||||
|
for line in lines:
|
||||||
|
if y_position < 100:
|
||||||
|
c.showPage()
|
||||||
|
y_position = height - 50
|
||||||
|
c.drawString(50, y_position, line)
|
||||||
|
y_position -= 15
|
||||||
|
else:
|
||||||
|
c.drawString(50, y_position, text)
|
||||||
|
y_position -= 15
|
||||||
|
|
||||||
|
y_position -= 5 # Extra spacing between fields
|
||||||
|
|
||||||
|
# Footer
|
||||||
|
c.setFont('Helvetica-Oblique', 8)
|
||||||
|
c.drawString(50, 30, f"Generated on {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
|
||||||
|
|
||||||
|
c.save()
|
||||||
|
pdf_content = buffer.getvalue()
|
||||||
|
|
||||||
|
if output_path:
|
||||||
|
with open(output_path, 'wb') as f:
|
||||||
|
f.write(pdf_content)
|
||||||
|
|
||||||
|
return pdf_content
|
||||||
|
|
||||||
|
|
||||||
|
def merge_pdfs(pdf_contents: List[bytes], output_path: Optional[str] = None) -> bytes:
|
||||||
|
"""
|
||||||
|
Merge multiple PDF documents
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pdf_contents: List of PDF contents as bytes
|
||||||
|
output_path: Optional path to save the merged PDF
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Merged PDF content as bytes
|
||||||
|
"""
|
||||||
|
pdf_writer = PdfWriter()
|
||||||
|
|
||||||
|
for pdf_content in pdf_contents:
|
||||||
|
pdf_reader = PdfReader(io.BytesIO(pdf_content))
|
||||||
|
for page in pdf_reader.pages:
|
||||||
|
pdf_writer.add_page(page)
|
||||||
|
|
||||||
|
output_buffer = io.BytesIO()
|
||||||
|
pdf_writer.write(output_buffer)
|
||||||
|
merged_content = output_buffer.getvalue()
|
||||||
|
|
||||||
|
if output_path:
|
||||||
|
with open(output_path, 'wb') as f:
|
||||||
|
f.write(merged_content)
|
||||||
|
|
||||||
|
return merged_content
|
||||||
|
|
||||||
|
|
||||||
|
def add_watermark_to_pdf(
|
||||||
|
pdf_content: bytes,
|
||||||
|
watermark_text: str,
|
||||||
|
output_path: Optional[str] = None
|
||||||
|
) -> bytes:
|
||||||
|
"""
|
||||||
|
Add a watermark to a PDF document
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pdf_content: PDF content as bytes
|
||||||
|
watermark_text: Text to use as watermark
|
||||||
|
output_path: Optional path to save the watermarked PDF
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Watermarked PDF content as bytes
|
||||||
|
"""
|
||||||
|
# Create watermark PDF
|
||||||
|
watermark_buffer = io.BytesIO()
|
||||||
|
c = canvas.Canvas(watermark_buffer, pagesize=A4)
|
||||||
|
width, height = A4
|
||||||
|
|
||||||
|
c.setFont('Helvetica', 50)
|
||||||
|
c.setFillAlpha(0.3)
|
||||||
|
c.saveState()
|
||||||
|
c.translate(width/2, height/2)
|
||||||
|
c.rotate(45)
|
||||||
|
c.drawCentredString(0, 0, watermark_text)
|
||||||
|
c.restoreState()
|
||||||
|
c.save()
|
||||||
|
|
||||||
|
# Read original and watermark PDFs
|
||||||
|
pdf_reader = PdfReader(io.BytesIO(pdf_content))
|
||||||
|
watermark_reader = PdfReader(watermark_buffer)
|
||||||
|
watermark_page = watermark_reader.pages[0]
|
||||||
|
|
||||||
|
# Apply watermark to all pages
|
||||||
|
pdf_writer = PdfWriter()
|
||||||
|
for page in pdf_reader.pages:
|
||||||
|
page.merge_page(watermark_page)
|
||||||
|
pdf_writer.add_page(page)
|
||||||
|
|
||||||
|
# Write output
|
||||||
|
output_buffer = io.BytesIO()
|
||||||
|
pdf_writer.write(output_buffer)
|
||||||
|
watermarked_content = output_buffer.getvalue()
|
||||||
|
|
||||||
|
if output_path:
|
||||||
|
with open(output_path, 'wb') as f:
|
||||||
|
f.write(watermarked_content)
|
||||||
|
|
||||||
|
return watermarked_content
|
||||||
|
|
||||||
|
|
||||||
|
def extract_text_from_pdf(pdf_content: bytes) -> str:
|
||||||
|
"""
|
||||||
|
Extract text content from a PDF
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pdf_content: PDF content as bytes
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Extracted text as string
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
pdf_document = fitz.open(stream=pdf_content, filetype="pdf")
|
||||||
|
text = ""
|
||||||
|
|
||||||
|
for page_num in range(pdf_document.page_count):
|
||||||
|
page = pdf_document[page_num]
|
||||||
|
text += page.get_text()
|
||||||
|
|
||||||
|
pdf_document.close()
|
||||||
|
return text
|
||||||
|
except Exception as e:
|
||||||
|
# Fallback to PyPDF2
|
||||||
|
try:
|
||||||
|
pdf_reader = PdfReader(io.BytesIO(pdf_content))
|
||||||
|
text = ""
|
||||||
|
for page in pdf_reader.pages:
|
||||||
|
text += page.extract_text()
|
||||||
|
return text
|
||||||
|
except:
|
||||||
|
raise ValueError(f"Failed to extract text from PDF: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
def get_pdf_info(pdf_content: bytes) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Get information about a PDF document
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pdf_content: PDF content as bytes
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with PDF information
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
pdf_reader = PdfReader(io.BytesIO(pdf_content))
|
||||||
|
|
||||||
|
info = {
|
||||||
|
"page_count": len(pdf_reader.pages),
|
||||||
|
"has_forms": '/AcroForm' in pdf_reader.trailer['/Root'],
|
||||||
|
"is_encrypted": pdf_reader.is_encrypted,
|
||||||
|
"metadata": {}
|
||||||
|
}
|
||||||
|
|
||||||
|
if pdf_reader.metadata:
|
||||||
|
info["metadata"] = {
|
||||||
|
"title": pdf_reader.metadata.get('/Title', ''),
|
||||||
|
"author": pdf_reader.metadata.get('/Author', ''),
|
||||||
|
"subject": pdf_reader.metadata.get('/Subject', ''),
|
||||||
|
"creator": pdf_reader.metadata.get('/Creator', ''),
|
||||||
|
"producer": pdf_reader.metadata.get('/Producer', ''),
|
||||||
|
"creation_date": str(pdf_reader.metadata.get('/CreationDate', '')),
|
||||||
|
"modification_date": str(pdf_reader.metadata.get('/ModDate', '')),
|
||||||
|
}
|
||||||
|
|
||||||
|
# Get page sizes
|
||||||
|
page_sizes = []
|
||||||
|
for page in pdf_reader.pages:
|
||||||
|
mediabox = page.mediabox
|
||||||
|
page_sizes.append({
|
||||||
|
"width": float(mediabox.width),
|
||||||
|
"height": float(mediabox.height)
|
||||||
|
})
|
||||||
|
info["page_sizes"] = page_sizes
|
||||||
|
|
||||||
|
# Get form fields if present
|
||||||
|
if info["has_forms"]:
|
||||||
|
info["form_fields"] = extract_pdf_fields(pdf_content)
|
||||||
|
|
||||||
|
return info
|
||||||
|
except Exception as e:
|
||||||
|
raise ValueError(f"Failed to get PDF info: {str(e)}")
|
||||||
463
backend/src/utils/validators.py
Normal file
463
backend/src/utils/validators.py
Normal file
@ -0,0 +1,463 @@
|
|||||||
|
"""
|
||||||
|
Field validation utilities for dynamic applications
|
||||||
|
"""
|
||||||
|
|
||||||
|
import re
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Any, Optional, Dict
|
||||||
|
from email_validator import validate_email, EmailNotValidError
|
||||||
|
|
||||||
|
from ..models.application_type import ApplicationField, FieldType
|
||||||
|
|
||||||
|
|
||||||
|
def validate_field_value(value: Any, field: ApplicationField) -> bool:
|
||||||
|
"""
|
||||||
|
Validate a field value against its definition and rules
|
||||||
|
|
||||||
|
Args:
|
||||||
|
value: The value to validate
|
||||||
|
field: The field definition
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if valid
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If validation fails
|
||||||
|
"""
|
||||||
|
# Check if required
|
||||||
|
if field.is_required and (value is None or value == ""):
|
||||||
|
raise ValueError(f"Field '{field.name}' is required")
|
||||||
|
|
||||||
|
# If not required and empty, that's okay
|
||||||
|
if value is None or value == "":
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Type-specific validation
|
||||||
|
if field.field_type == FieldType.TEXT_SHORT:
|
||||||
|
return validate_text_short(value, field)
|
||||||
|
elif field.field_type == FieldType.TEXT_LONG:
|
||||||
|
return validate_text_long(value, field)
|
||||||
|
elif field.field_type == FieldType.OPTIONS:
|
||||||
|
return validate_options(value, field)
|
||||||
|
elif field.field_type == FieldType.YESNO:
|
||||||
|
return validate_yesno(value, field)
|
||||||
|
elif field.field_type == FieldType.MAIL:
|
||||||
|
return validate_mail(value, field)
|
||||||
|
elif field.field_type == FieldType.DATE:
|
||||||
|
return validate_date(value, field)
|
||||||
|
elif field.field_type == FieldType.DATETIME:
|
||||||
|
return validate_datetime(value, field)
|
||||||
|
elif field.field_type == FieldType.AMOUNT:
|
||||||
|
return validate_amount(value, field)
|
||||||
|
elif field.field_type == FieldType.CURRENCY_EUR:
|
||||||
|
return validate_currency_eur(value, field)
|
||||||
|
elif field.field_type == FieldType.NUMBER:
|
||||||
|
return validate_number(value, field)
|
||||||
|
elif field.field_type == FieldType.PHONE:
|
||||||
|
return validate_phone(value, field)
|
||||||
|
elif field.field_type == FieldType.URL:
|
||||||
|
return validate_url(value, field)
|
||||||
|
elif field.field_type == FieldType.CHECKBOX:
|
||||||
|
return validate_checkbox(value, field)
|
||||||
|
elif field.field_type == FieldType.RADIO:
|
||||||
|
return validate_radio(value, field)
|
||||||
|
elif field.field_type == FieldType.SELECT:
|
||||||
|
return validate_select(value, field)
|
||||||
|
elif field.field_type == FieldType.MULTISELECT:
|
||||||
|
return validate_multiselect(value, field)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def validate_text_short(value: Any, field: ApplicationField) -> bool:
|
||||||
|
"""Validate short text field"""
|
||||||
|
if not isinstance(value, str):
|
||||||
|
raise ValueError(f"Field '{field.name}' must be a string")
|
||||||
|
|
||||||
|
rules = field.validation_rules or {}
|
||||||
|
|
||||||
|
# Check max length (default 255 for short text)
|
||||||
|
max_length = rules.get("max_length", 255)
|
||||||
|
if len(value) > max_length:
|
||||||
|
raise ValueError(f"Field '{field.name}' exceeds maximum length of {max_length}")
|
||||||
|
|
||||||
|
# Check min length
|
||||||
|
min_length = rules.get("min_length")
|
||||||
|
if min_length and len(value) < min_length:
|
||||||
|
raise ValueError(f"Field '{field.name}' must be at least {min_length} characters")
|
||||||
|
|
||||||
|
# Check pattern
|
||||||
|
pattern = rules.get("pattern")
|
||||||
|
if pattern and not re.match(pattern, value):
|
||||||
|
raise ValueError(f"Field '{field.name}' does not match required pattern")
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def validate_text_long(value: Any, field: ApplicationField) -> bool:
|
||||||
|
"""Validate long text field"""
|
||||||
|
if not isinstance(value, str):
|
||||||
|
raise ValueError(f"Field '{field.name}' must be a string")
|
||||||
|
|
||||||
|
rules = field.validation_rules or {}
|
||||||
|
|
||||||
|
# Check max length (default 10000 for long text)
|
||||||
|
max_length = rules.get("max_length", 10000)
|
||||||
|
if len(value) > max_length:
|
||||||
|
raise ValueError(f"Field '{field.name}' exceeds maximum length of {max_length}")
|
||||||
|
|
||||||
|
# Check min length
|
||||||
|
min_length = rules.get("min_length")
|
||||||
|
if min_length and len(value) < min_length:
|
||||||
|
raise ValueError(f"Field '{field.name}' must be at least {min_length} characters")
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def validate_options(value: Any, field: ApplicationField) -> bool:
|
||||||
|
"""Validate options field"""
|
||||||
|
if not field.options:
|
||||||
|
raise ValueError(f"Field '{field.name}' has no options defined")
|
||||||
|
|
||||||
|
if value not in field.options:
|
||||||
|
raise ValueError(f"Field '{field.name}' value must be one of: {', '.join(field.options)}")
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def validate_yesno(value: Any, field: ApplicationField) -> bool:
|
||||||
|
"""Validate yes/no field"""
|
||||||
|
if not isinstance(value, bool):
|
||||||
|
# Also accept "true"/"false", "yes"/"no", 1/0
|
||||||
|
if isinstance(value, str):
|
||||||
|
if value.lower() in ["true", "yes", "1"]:
|
||||||
|
return True
|
||||||
|
elif value.lower() in ["false", "no", "0"]:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Field '{field.name}' must be yes/no")
|
||||||
|
elif isinstance(value, int):
|
||||||
|
if value not in [0, 1]:
|
||||||
|
raise ValueError(f"Field '{field.name}' must be yes/no")
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Field '{field.name}' must be yes/no")
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def validate_mail(value: Any, field: ApplicationField) -> bool:
|
||||||
|
"""Validate email field"""
|
||||||
|
if not isinstance(value, str):
|
||||||
|
raise ValueError(f"Field '{field.name}' must be a string")
|
||||||
|
|
||||||
|
try:
|
||||||
|
validate_email(value)
|
||||||
|
except EmailNotValidError as e:
|
||||||
|
raise ValueError(f"Field '{field.name}' is not a valid email address: {str(e)}")
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def validate_date(value: Any, field: ApplicationField) -> bool:
|
||||||
|
"""Validate date field"""
|
||||||
|
if not isinstance(value, str):
|
||||||
|
raise ValueError(f"Field '{field.name}' must be a date string")
|
||||||
|
|
||||||
|
# Accept various date formats
|
||||||
|
date_formats = [
|
||||||
|
"%Y-%m-%d",
|
||||||
|
"%d.%m.%Y",
|
||||||
|
"%d/%m/%Y",
|
||||||
|
"%Y/%m/%d"
|
||||||
|
]
|
||||||
|
|
||||||
|
rules = field.validation_rules or {}
|
||||||
|
custom_format = rules.get("date_format")
|
||||||
|
if custom_format:
|
||||||
|
date_formats = [custom_format] + date_formats
|
||||||
|
|
||||||
|
parsed_date = None
|
||||||
|
for fmt in date_formats:
|
||||||
|
try:
|
||||||
|
parsed_date = datetime.strptime(value, fmt)
|
||||||
|
break
|
||||||
|
except ValueError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not parsed_date:
|
||||||
|
raise ValueError(f"Field '{field.name}' is not a valid date")
|
||||||
|
|
||||||
|
# Check min/max dates
|
||||||
|
min_date = rules.get("min_date")
|
||||||
|
if min_date:
|
||||||
|
min_dt = datetime.strptime(min_date, "%Y-%m-%d")
|
||||||
|
if parsed_date.date() < min_dt.date():
|
||||||
|
raise ValueError(f"Field '{field.name}' must be after {min_date}")
|
||||||
|
|
||||||
|
max_date = rules.get("max_date")
|
||||||
|
if max_date:
|
||||||
|
max_dt = datetime.strptime(max_date, "%Y-%m-%d")
|
||||||
|
if parsed_date.date() > max_dt.date():
|
||||||
|
raise ValueError(f"Field '{field.name}' must be before {max_date}")
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def validate_datetime(value: Any, field: ApplicationField) -> bool:
|
||||||
|
"""Validate datetime field"""
|
||||||
|
if not isinstance(value, str):
|
||||||
|
raise ValueError(f"Field '{field.name}' must be a datetime string")
|
||||||
|
|
||||||
|
# Accept ISO format primarily
|
||||||
|
try:
|
||||||
|
datetime.fromisoformat(value)
|
||||||
|
except ValueError:
|
||||||
|
raise ValueError(f"Field '{field.name}' is not a valid datetime (use ISO format)")
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def validate_amount(value: Any, field: ApplicationField) -> bool:
|
||||||
|
"""Validate amount field"""
|
||||||
|
try:
|
||||||
|
amount = float(value)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
raise ValueError(f"Field '{field.name}' must be a number")
|
||||||
|
|
||||||
|
rules = field.validation_rules or {}
|
||||||
|
|
||||||
|
# Check min/max
|
||||||
|
min_amount = rules.get("min", 0)
|
||||||
|
if amount < min_amount:
|
||||||
|
raise ValueError(f"Field '{field.name}' must be at least {min_amount}")
|
||||||
|
|
||||||
|
max_amount = rules.get("max")
|
||||||
|
if max_amount and amount > max_amount:
|
||||||
|
raise ValueError(f"Field '{field.name}' must not exceed {max_amount}")
|
||||||
|
|
||||||
|
# Check decimal places
|
||||||
|
decimal_places = rules.get("decimal_places", 2)
|
||||||
|
if decimal_places is not None:
|
||||||
|
decimal_str = str(amount).split('.')
|
||||||
|
if len(decimal_str) > 1 and len(decimal_str[1]) > decimal_places:
|
||||||
|
raise ValueError(f"Field '{field.name}' must have at most {decimal_places} decimal places")
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def validate_currency_eur(value: Any, field: ApplicationField) -> bool:
|
||||||
|
"""Validate EUR currency field"""
|
||||||
|
# Same as amount but with EUR-specific validation
|
||||||
|
result = validate_amount(value, field)
|
||||||
|
|
||||||
|
# Additional EUR-specific checks if needed
|
||||||
|
try:
|
||||||
|
amount = float(value)
|
||||||
|
if amount < 0:
|
||||||
|
raise ValueError(f"Field '{field.name}' cannot be negative")
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
pass # Already handled in validate_amount
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def validate_number(value: Any, field: ApplicationField) -> bool:
|
||||||
|
"""Validate number field"""
|
||||||
|
rules = field.validation_rules or {}
|
||||||
|
integer_only = rules.get("integer_only", False)
|
||||||
|
|
||||||
|
if integer_only:
|
||||||
|
try:
|
||||||
|
num = int(value)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
raise ValueError(f"Field '{field.name}' must be an integer")
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
num = float(value)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
raise ValueError(f"Field '{field.name}' must be a number")
|
||||||
|
|
||||||
|
# Check min/max
|
||||||
|
min_val = rules.get("min")
|
||||||
|
if min_val is not None and num < min_val:
|
||||||
|
raise ValueError(f"Field '{field.name}' must be at least {min_val}")
|
||||||
|
|
||||||
|
max_val = rules.get("max")
|
||||||
|
if max_val is not None and num > max_val:
|
||||||
|
raise ValueError(f"Field '{field.name}' must not exceed {max_val}")
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def validate_phone(value: Any, field: ApplicationField) -> bool:
|
||||||
|
"""Validate phone number field"""
|
||||||
|
if not isinstance(value, str):
|
||||||
|
raise ValueError(f"Field '{field.name}' must be a string")
|
||||||
|
|
||||||
|
# Remove common formatting characters
|
||||||
|
cleaned = re.sub(r'[\s\-\(\)\.]+', '', value)
|
||||||
|
|
||||||
|
# Check if it looks like a phone number
|
||||||
|
if not re.match(r'^\+?[0-9]{7,15}$', cleaned):
|
||||||
|
raise ValueError(f"Field '{field.name}' is not a valid phone number")
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def validate_url(value: Any, field: ApplicationField) -> bool:
|
||||||
|
"""Validate URL field"""
|
||||||
|
if not isinstance(value, str):
|
||||||
|
raise ValueError(f"Field '{field.name}' must be a string")
|
||||||
|
|
||||||
|
# Basic URL validation
|
||||||
|
url_pattern = re.compile(
|
||||||
|
r'^https?://' # http:// or https://
|
||||||
|
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?|' # domain...
|
||||||
|
r'localhost|' # localhost...
|
||||||
|
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
|
||||||
|
r'(?::\d+)?' # optional port
|
||||||
|
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
|
||||||
|
|
||||||
|
if not url_pattern.match(value):
|
||||||
|
raise ValueError(f"Field '{field.name}' is not a valid URL")
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def validate_checkbox(value: Any, field: ApplicationField) -> bool:
|
||||||
|
"""Validate checkbox field"""
|
||||||
|
return validate_yesno(value, field)
|
||||||
|
|
||||||
|
|
||||||
|
def validate_radio(value: Any, field: ApplicationField) -> bool:
|
||||||
|
"""Validate radio field"""
|
||||||
|
return validate_options(value, field)
|
||||||
|
|
||||||
|
|
||||||
|
def validate_select(value: Any, field: ApplicationField) -> bool:
|
||||||
|
"""Validate select field"""
|
||||||
|
return validate_options(value, field)
|
||||||
|
|
||||||
|
|
||||||
|
def validate_multiselect(value: Any, field: ApplicationField) -> bool:
|
||||||
|
"""Validate multiselect field"""
|
||||||
|
if not isinstance(value, list):
|
||||||
|
raise ValueError(f"Field '{field.name}' must be a list")
|
||||||
|
|
||||||
|
if not field.options:
|
||||||
|
raise ValueError(f"Field '{field.name}' has no options defined")
|
||||||
|
|
||||||
|
for item in value:
|
||||||
|
if item not in field.options:
|
||||||
|
raise ValueError(f"Field '{field.name}' contains invalid option: {item}")
|
||||||
|
|
||||||
|
rules = field.validation_rules or {}
|
||||||
|
|
||||||
|
# Check min/max selections
|
||||||
|
min_selections = rules.get("min_selections")
|
||||||
|
if min_selections and len(value) < min_selections:
|
||||||
|
raise ValueError(f"Field '{field.name}' requires at least {min_selections} selections")
|
||||||
|
|
||||||
|
max_selections = rules.get("max_selections")
|
||||||
|
if max_selections and len(value) > max_selections:
|
||||||
|
raise ValueError(f"Field '{field.name}' allows at most {max_selections} selections")
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def validate_display_conditions(
|
||||||
|
field: ApplicationField,
|
||||||
|
form_data: Dict[str, Any]
|
||||||
|
) -> bool:
|
||||||
|
"""
|
||||||
|
Check if a field should be displayed based on conditions
|
||||||
|
|
||||||
|
Args:
|
||||||
|
field: The field to check
|
||||||
|
form_data: Current form data
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if field should be displayed
|
||||||
|
"""
|
||||||
|
if not field.display_conditions:
|
||||||
|
return True
|
||||||
|
|
||||||
|
conditions = field.display_conditions
|
||||||
|
|
||||||
|
# Support simple conditions like:
|
||||||
|
# {"field": "other_field", "operator": "equals", "value": "some_value"}
|
||||||
|
# or {"and": [...], "or": [...]} for complex conditions
|
||||||
|
|
||||||
|
return evaluate_condition(conditions, form_data)
|
||||||
|
|
||||||
|
|
||||||
|
def evaluate_condition(condition: Dict[str, Any], form_data: Dict[str, Any]) -> bool:
|
||||||
|
"""
|
||||||
|
Evaluate a display condition
|
||||||
|
|
||||||
|
Args:
|
||||||
|
condition: Condition definition
|
||||||
|
form_data: Current form data
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if condition is met
|
||||||
|
"""
|
||||||
|
if "and" in condition:
|
||||||
|
# All conditions must be true
|
||||||
|
return all(evaluate_condition(c, form_data) for c in condition["and"])
|
||||||
|
|
||||||
|
if "or" in condition:
|
||||||
|
# At least one condition must be true
|
||||||
|
return any(evaluate_condition(c, form_data) for c in condition["or"])
|
||||||
|
|
||||||
|
if "not" in condition:
|
||||||
|
# Negate the condition
|
||||||
|
return not evaluate_condition(condition["not"], form_data)
|
||||||
|
|
||||||
|
# Simple condition
|
||||||
|
if "field" in condition:
|
||||||
|
field_id = condition["field"]
|
||||||
|
operator = condition.get("operator", "equals")
|
||||||
|
expected_value = condition.get("value")
|
||||||
|
|
||||||
|
actual_value = form_data.get(field_id)
|
||||||
|
|
||||||
|
if operator == "equals":
|
||||||
|
return actual_value == expected_value
|
||||||
|
elif operator == "not_equals":
|
||||||
|
return actual_value != expected_value
|
||||||
|
elif operator == "in":
|
||||||
|
return actual_value in expected_value
|
||||||
|
elif operator == "not_in":
|
||||||
|
return actual_value not in expected_value
|
||||||
|
elif operator == "contains":
|
||||||
|
return expected_value in str(actual_value)
|
||||||
|
elif operator == "not_contains":
|
||||||
|
return expected_value not in str(actual_value)
|
||||||
|
elif operator == "empty":
|
||||||
|
return not actual_value
|
||||||
|
elif operator == "not_empty":
|
||||||
|
return bool(actual_value)
|
||||||
|
elif operator == "greater_than":
|
||||||
|
try:
|
||||||
|
return float(actual_value) > float(expected_value)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
return False
|
||||||
|
elif operator == "less_than":
|
||||||
|
try:
|
||||||
|
return float(actual_value) < float(expected_value)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
return False
|
||||||
|
elif operator == "greater_or_equal":
|
||||||
|
try:
|
||||||
|
return float(actual_value) >= float(expected_value)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
return False
|
||||||
|
elif operator == "less_or_equal":
|
||||||
|
try:
|
||||||
|
return float(actual_value) <= float(expected_value)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Unknown condition format, default to true
|
||||||
|
return True
|
||||||
@ -25,7 +25,7 @@ services:
|
|||||||
timeout: 5s
|
timeout: 5s
|
||||||
retries: 6
|
retries: 6
|
||||||
ports:
|
ports:
|
||||||
- "3306:3306"
|
- "3307:3306"
|
||||||
volumes:
|
volumes:
|
||||||
- db_data:/var/lib/mysql
|
- db_data:/var/lib/mysql
|
||||||
networks:
|
networks:
|
||||||
@ -188,7 +188,7 @@ services:
|
|||||||
ADMINER_DEFAULT_SERVER: db
|
ADMINER_DEFAULT_SERVER: db
|
||||||
ADMINER_DESIGN: pepa-linha-dark
|
ADMINER_DESIGN: pepa-linha-dark
|
||||||
ports:
|
ports:
|
||||||
- "8080:8080"
|
- "8081:8080"
|
||||||
networks:
|
networks:
|
||||||
- stupa_network
|
- stupa_network
|
||||||
|
|
||||||
|
|||||||
478
frontend/src/api/dynamicClient.ts
Normal file
478
frontend/src/api/dynamicClient.ts
Normal file
@ -0,0 +1,478 @@
|
|||||||
|
// API client for dynamic application system
|
||||||
|
|
||||||
|
import axios, { AxiosInstance, AxiosError } from 'axios';
|
||||||
|
import {
|
||||||
|
ApplicationType,
|
||||||
|
DynamicApplication,
|
||||||
|
ApplicationListItem,
|
||||||
|
CreateApplicationRequest,
|
||||||
|
UpdateApplicationRequest,
|
||||||
|
CreateApplicationResponse,
|
||||||
|
StatusTransitionRequest,
|
||||||
|
ApplicationHistoryEntry,
|
||||||
|
ApplicationApproval,
|
||||||
|
ApplicationSearchParams,
|
||||||
|
ApplicationTypeCreateRequest,
|
||||||
|
ApplicationTypeUpdateRequest,
|
||||||
|
PDFTemplateUploadResponse,
|
||||||
|
ApiResponse,
|
||||||
|
ApiError,
|
||||||
|
User,
|
||||||
|
Session,
|
||||||
|
LoginRequest,
|
||||||
|
} from '../types/dynamic';
|
||||||
|
|
||||||
|
class DynamicApiClient {
|
||||||
|
private client: AxiosInstance;
|
||||||
|
private baseURL: string;
|
||||||
|
private accessToken: string | null = null;
|
||||||
|
|
||||||
|
constructor(baseURL: string = import.meta.env.VITE_API_BASE_URL || 'http://localhost:8000') {
|
||||||
|
this.baseURL = baseURL;
|
||||||
|
this.client = axios.create({
|
||||||
|
baseURL: `${baseURL}/api`,
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
withCredentials: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Request interceptor to add auth token
|
||||||
|
this.client.interceptors.request.use(
|
||||||
|
(config) => {
|
||||||
|
const token = this.getAccessToken();
|
||||||
|
if (token) {
|
||||||
|
config.headers.Authorization = `Bearer ${token}`;
|
||||||
|
}
|
||||||
|
return config;
|
||||||
|
},
|
||||||
|
(error) => Promise.reject(error)
|
||||||
|
);
|
||||||
|
|
||||||
|
// Response interceptor for error handling
|
||||||
|
this.client.interceptors.response.use(
|
||||||
|
(response) => response,
|
||||||
|
async (error: AxiosError) => {
|
||||||
|
if (error.response?.status === 401) {
|
||||||
|
// Try to refresh token
|
||||||
|
const refreshed = await this.refreshToken();
|
||||||
|
if (refreshed) {
|
||||||
|
// Retry original request
|
||||||
|
const originalRequest = error.config;
|
||||||
|
if (originalRequest) {
|
||||||
|
return this.client(originalRequest);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Clear token and redirect to login
|
||||||
|
this.clearAuth();
|
||||||
|
window.location.href = '/login';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return Promise.reject(error);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Auth methods
|
||||||
|
private getAccessToken(): string | null {
|
||||||
|
if (this.accessToken) {
|
||||||
|
return this.accessToken;
|
||||||
|
}
|
||||||
|
const stored = localStorage.getItem('access_token');
|
||||||
|
if (stored) {
|
||||||
|
this.accessToken = stored;
|
||||||
|
}
|
||||||
|
return this.accessToken;
|
||||||
|
}
|
||||||
|
|
||||||
|
private setAccessToken(token: string): void {
|
||||||
|
this.accessToken = token;
|
||||||
|
localStorage.setItem('access_token', token);
|
||||||
|
}
|
||||||
|
|
||||||
|
private clearAuth(): void {
|
||||||
|
this.accessToken = null;
|
||||||
|
localStorage.removeItem('access_token');
|
||||||
|
localStorage.removeItem('refresh_token');
|
||||||
|
localStorage.removeItem('user');
|
||||||
|
}
|
||||||
|
|
||||||
|
private async refreshToken(): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const refreshToken = localStorage.getItem('refresh_token');
|
||||||
|
if (!refreshToken) return false;
|
||||||
|
|
||||||
|
const response = await axios.post(`${this.baseURL}/api/auth/refresh`, {
|
||||||
|
refresh_token: refreshToken,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (response.data.access_token) {
|
||||||
|
this.setAccessToken(response.data.access_token);
|
||||||
|
if (response.data.refresh_token) {
|
||||||
|
localStorage.setItem('refresh_token', response.data.refresh_token);
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
} catch (error) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Authentication
|
||||||
|
async login(request: LoginRequest): Promise<ApiResponse<Session>> {
|
||||||
|
try {
|
||||||
|
const response = await this.client.post<Session>('/auth/login', request);
|
||||||
|
const session = response.data;
|
||||||
|
|
||||||
|
this.setAccessToken(session.access_token);
|
||||||
|
if (session.refresh_token) {
|
||||||
|
localStorage.setItem('refresh_token', session.refresh_token);
|
||||||
|
}
|
||||||
|
localStorage.setItem('user', JSON.stringify(session.user));
|
||||||
|
|
||||||
|
return { success: true, data: session };
|
||||||
|
} catch (error) {
|
||||||
|
return this.handleError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async logout(): Promise<void> {
|
||||||
|
try {
|
||||||
|
await this.client.post('/auth/logout');
|
||||||
|
} finally {
|
||||||
|
this.clearAuth();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getCurrentUser(): Promise<ApiResponse<User>> {
|
||||||
|
try {
|
||||||
|
const response = await this.client.get<User>('/auth/me');
|
||||||
|
return { success: true, data: response.data };
|
||||||
|
} catch (error) {
|
||||||
|
return this.handleError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Application Types
|
||||||
|
async getApplicationTypes(includeInactive = false): Promise<ApiResponse<ApplicationType[]>> {
|
||||||
|
try {
|
||||||
|
const response = await this.client.get<ApplicationType[]>('/application-types', {
|
||||||
|
params: { include_inactive: includeInactive },
|
||||||
|
});
|
||||||
|
return { success: true, data: response.data };
|
||||||
|
} catch (error) {
|
||||||
|
return this.handleError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getApplicationType(typeId: string): Promise<ApiResponse<ApplicationType>> {
|
||||||
|
try {
|
||||||
|
const response = await this.client.get<ApplicationType>(`/application-types/${typeId}`);
|
||||||
|
return { success: true, data: response.data };
|
||||||
|
} catch (error) {
|
||||||
|
return this.handleError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async createApplicationType(
|
||||||
|
data: ApplicationTypeCreateRequest,
|
||||||
|
pdfTemplate?: File
|
||||||
|
): Promise<ApiResponse<ApplicationType>> {
|
||||||
|
try {
|
||||||
|
const formData = new FormData();
|
||||||
|
formData.append('type_data', JSON.stringify(data));
|
||||||
|
if (pdfTemplate) {
|
||||||
|
formData.append('pdf_template', pdfTemplate);
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await this.client.post<ApplicationType>('/application-types', formData, {
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'multipart/form-data',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
return { success: true, data: response.data };
|
||||||
|
} catch (error) {
|
||||||
|
return this.handleError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async updateApplicationType(
|
||||||
|
typeId: string,
|
||||||
|
data: ApplicationTypeUpdateRequest
|
||||||
|
): Promise<ApiResponse<ApplicationType>> {
|
||||||
|
try {
|
||||||
|
const response = await this.client.put<ApplicationType>(
|
||||||
|
`/application-types/${typeId}`,
|
||||||
|
data
|
||||||
|
);
|
||||||
|
return { success: true, data: response.data };
|
||||||
|
} catch (error) {
|
||||||
|
return this.handleError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async uploadPdfTemplate(
|
||||||
|
typeId: string,
|
||||||
|
pdfFile: File
|
||||||
|
): Promise<ApiResponse<PDFTemplateUploadResponse>> {
|
||||||
|
try {
|
||||||
|
const formData = new FormData();
|
||||||
|
formData.append('pdf_template', pdfFile);
|
||||||
|
|
||||||
|
const response = await this.client.post<PDFTemplateUploadResponse>(
|
||||||
|
`/application-types/${typeId}/pdf-template`,
|
||||||
|
formData,
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'multipart/form-data',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
);
|
||||||
|
return { success: true, data: response.data };
|
||||||
|
} catch (error) {
|
||||||
|
return this.handleError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async deleteApplicationType(typeId: string): Promise<ApiResponse<{ message: string }>> {
|
||||||
|
try {
|
||||||
|
const response = await this.client.delete<{ message: string }>(
|
||||||
|
`/application-types/${typeId}`
|
||||||
|
);
|
||||||
|
return { success: true, data: response.data };
|
||||||
|
} catch (error) {
|
||||||
|
return this.handleError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Applications
|
||||||
|
async getApplications(params?: ApplicationSearchParams): Promise<ApiResponse<ApplicationListItem[]>> {
|
||||||
|
try {
|
||||||
|
const response = await this.client.get<ApplicationListItem[]>('/applications', { params });
|
||||||
|
return { success: true, data: response.data };
|
||||||
|
} catch (error) {
|
||||||
|
return this.handleError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getApplication(
|
||||||
|
applicationId: string,
|
||||||
|
accessKey?: string
|
||||||
|
): Promise<ApiResponse<DynamicApplication>> {
|
||||||
|
try {
|
||||||
|
const params = accessKey ? { access_key: accessKey } : {};
|
||||||
|
const response = await this.client.get<DynamicApplication>(
|
||||||
|
`/applications/${applicationId}`,
|
||||||
|
{ params }
|
||||||
|
);
|
||||||
|
return { success: true, data: response.data };
|
||||||
|
} catch (error) {
|
||||||
|
return this.handleError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async createApplication(
|
||||||
|
data: CreateApplicationRequest
|
||||||
|
): Promise<ApiResponse<CreateApplicationResponse>> {
|
||||||
|
try {
|
||||||
|
const response = await this.client.post<CreateApplicationResponse>('/applications', data);
|
||||||
|
return { success: true, data: response.data };
|
||||||
|
} catch (error) {
|
||||||
|
return this.handleError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async updateApplication(
|
||||||
|
applicationId: string,
|
||||||
|
data: UpdateApplicationRequest,
|
||||||
|
accessKey?: string
|
||||||
|
): Promise<ApiResponse<DynamicApplication>> {
|
||||||
|
try {
|
||||||
|
const params = accessKey ? { access_key: accessKey } : {};
|
||||||
|
const response = await this.client.put<DynamicApplication>(
|
||||||
|
`/applications/${applicationId}`,
|
||||||
|
data,
|
||||||
|
{ params }
|
||||||
|
);
|
||||||
|
return { success: true, data: response.data };
|
||||||
|
} catch (error) {
|
||||||
|
return this.handleError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async submitApplication(
|
||||||
|
applicationId: string,
|
||||||
|
accessKey?: string
|
||||||
|
): Promise<ApiResponse<{ message: string; new_status: string }>> {
|
||||||
|
try {
|
||||||
|
const params = accessKey ? { access_key: accessKey } : {};
|
||||||
|
const response = await this.client.post<{ message: string; new_status: string }>(
|
||||||
|
`/applications/${applicationId}/submit`,
|
||||||
|
{},
|
||||||
|
{ params }
|
||||||
|
);
|
||||||
|
return { success: true, data: response.data };
|
||||||
|
} catch (error) {
|
||||||
|
return this.handleError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async transitionApplicationStatus(
|
||||||
|
applicationId: string,
|
||||||
|
data: StatusTransitionRequest
|
||||||
|
): Promise<ApiResponse<{ message: string; new_status: string; new_status_name: string }>> {
|
||||||
|
try {
|
||||||
|
const response = await this.client.post<{
|
||||||
|
message: string;
|
||||||
|
new_status: string;
|
||||||
|
new_status_name: string;
|
||||||
|
}>(`/applications/${applicationId}/transition`, data);
|
||||||
|
return { success: true, data: response.data };
|
||||||
|
} catch (error) {
|
||||||
|
return this.handleError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async approveApplication(
|
||||||
|
applicationId: string,
|
||||||
|
approval: ApplicationApproval
|
||||||
|
): Promise<ApiResponse<{ message: string; role: string; decision: string }>> {
|
||||||
|
try {
|
||||||
|
const response = await this.client.post<{
|
||||||
|
message: string;
|
||||||
|
role: string;
|
||||||
|
decision: string;
|
||||||
|
}>(`/applications/${applicationId}/approve`, approval);
|
||||||
|
return { success: true, data: response.data };
|
||||||
|
} catch (error) {
|
||||||
|
return this.handleError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getApplicationHistory(
|
||||||
|
applicationId: string
|
||||||
|
): Promise<ApiResponse<ApplicationHistoryEntry[]>> {
|
||||||
|
try {
|
||||||
|
const response = await this.client.get<ApplicationHistoryEntry[]>(
|
||||||
|
`/applications/${applicationId}/history`
|
||||||
|
);
|
||||||
|
return { success: true, data: response.data };
|
||||||
|
} catch (error) {
|
||||||
|
return this.handleError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async generateApplicationPdf(
|
||||||
|
applicationId: string
|
||||||
|
): Promise<ApiResponse<{ message: string; pdf_path: string }>> {
|
||||||
|
try {
|
||||||
|
const response = await this.client.post<{ message: string; pdf_path: string }>(
|
||||||
|
`/applications/${applicationId}/generate-pdf`
|
||||||
|
);
|
||||||
|
return { success: true, data: response.data };
|
||||||
|
} catch (error) {
|
||||||
|
return this.handleError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async downloadApplicationPdf(applicationId: string): Promise<Blob> {
|
||||||
|
const response = await this.client.get(`/applications/${applicationId}/pdf`, {
|
||||||
|
responseType: 'blob',
|
||||||
|
});
|
||||||
|
return response.data;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Error handling
|
||||||
|
private handleError(error: any): ApiResponse<any> {
|
||||||
|
if (axios.isAxiosError(error)) {
|
||||||
|
const axiosError = error as AxiosError<ApiError>;
|
||||||
|
if (axiosError.response?.data) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: axiosError.response.data,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: {
|
||||||
|
detail: axiosError.message || 'An unknown error occurred',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: {
|
||||||
|
detail: 'An unexpected error occurred',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Utility methods
|
||||||
|
async uploadFile(file: File, endpoint: string): Promise<ApiResponse<{ url: string }>> {
|
||||||
|
try {
|
||||||
|
const formData = new FormData();
|
||||||
|
formData.append('file', file);
|
||||||
|
|
||||||
|
const response = await this.client.post<{ url: string }>(endpoint, formData, {
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'multipart/form-data',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
return { success: true, data: response.data };
|
||||||
|
} catch (error) {
|
||||||
|
return this.handleError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async exportApplications(
|
||||||
|
applicationIds: string[],
|
||||||
|
format: 'pdf' | 'json' | 'csv'
|
||||||
|
): Promise<Blob> {
|
||||||
|
const response = await this.client.post(
|
||||||
|
'/applications/export',
|
||||||
|
{ application_ids: applicationIds, format },
|
||||||
|
{ responseType: 'blob' }
|
||||||
|
);
|
||||||
|
return response.data;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Public access methods (no auth required)
|
||||||
|
async getPublicApplication(
|
||||||
|
applicationId: string,
|
||||||
|
accessKey: string
|
||||||
|
): Promise<ApiResponse<DynamicApplication>> {
|
||||||
|
try {
|
||||||
|
const response = await axios.get<DynamicApplication>(
|
||||||
|
`${this.baseURL}/api/public/applications/${applicationId}`,
|
||||||
|
{ params: { key: accessKey } }
|
||||||
|
);
|
||||||
|
return { success: true, data: response.data };
|
||||||
|
} catch (error) {
|
||||||
|
return this.handleError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async updatePublicApplication(
|
||||||
|
applicationId: string,
|
||||||
|
accessKey: string,
|
||||||
|
data: UpdateApplicationRequest
|
||||||
|
): Promise<ApiResponse<DynamicApplication>> {
|
||||||
|
try {
|
||||||
|
const response = await axios.put<DynamicApplication>(
|
||||||
|
`${this.baseURL}/api/public/applications/${applicationId}`,
|
||||||
|
data,
|
||||||
|
{ params: { key: accessKey } }
|
||||||
|
);
|
||||||
|
return { success: true, data: response.data };
|
||||||
|
} catch (error) {
|
||||||
|
return this.handleError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Export singleton instance
|
||||||
|
export const dynamicApiClient = new DynamicApiClient();
|
||||||
|
|
||||||
|
// Export class for testing or multiple instances
|
||||||
|
export default DynamicApiClient;
|
||||||
527
frontend/src/types/dynamic.ts
Normal file
527
frontend/src/types/dynamic.ts
Normal file
@ -0,0 +1,527 @@
|
|||||||
|
// Dynamic Application System Types
|
||||||
|
|
||||||
|
// Field Types
|
||||||
|
export type FieldType =
|
||||||
|
| "text_short"
|
||||||
|
| "text_long"
|
||||||
|
| "options"
|
||||||
|
| "yesno"
|
||||||
|
| "mail"
|
||||||
|
| "date"
|
||||||
|
| "datetime"
|
||||||
|
| "amount"
|
||||||
|
| "currency_eur"
|
||||||
|
| "number"
|
||||||
|
| "file"
|
||||||
|
| "signature"
|
||||||
|
| "phone"
|
||||||
|
| "url"
|
||||||
|
| "checkbox"
|
||||||
|
| "radio"
|
||||||
|
| "select"
|
||||||
|
| "multiselect";
|
||||||
|
|
||||||
|
// Transition Trigger Types
|
||||||
|
export type TransitionTriggerType =
|
||||||
|
| "user_approval"
|
||||||
|
| "applicant_action"
|
||||||
|
| "deadline_expired"
|
||||||
|
| "time_elapsed"
|
||||||
|
| "condition_met"
|
||||||
|
| "automatic";
|
||||||
|
|
||||||
|
// Field Definition
|
||||||
|
export interface FieldDefinition {
|
||||||
|
field_id: string;
|
||||||
|
field_type: FieldType;
|
||||||
|
name: string;
|
||||||
|
label?: string;
|
||||||
|
description?: string;
|
||||||
|
field_order: number;
|
||||||
|
is_required: boolean;
|
||||||
|
is_readonly: boolean;
|
||||||
|
is_hidden: boolean;
|
||||||
|
options?: string[];
|
||||||
|
default_value?: string;
|
||||||
|
validation_rules?: ValidationRules;
|
||||||
|
display_conditions?: DisplayCondition;
|
||||||
|
placeholder?: string;
|
||||||
|
section?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validation Rules
|
||||||
|
export interface ValidationRules {
|
||||||
|
min?: number;
|
||||||
|
max?: number;
|
||||||
|
min_length?: number;
|
||||||
|
max_length?: number;
|
||||||
|
pattern?: string;
|
||||||
|
date_format?: string;
|
||||||
|
min_date?: string;
|
||||||
|
max_date?: string;
|
||||||
|
decimal_places?: number;
|
||||||
|
integer_only?: boolean;
|
||||||
|
min_selections?: number;
|
||||||
|
max_selections?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Display Conditions
|
||||||
|
export interface DisplayCondition {
|
||||||
|
field?: string;
|
||||||
|
operator?: ConditionOperator;
|
||||||
|
value?: any;
|
||||||
|
and?: DisplayCondition[];
|
||||||
|
or?: DisplayCondition[];
|
||||||
|
not?: DisplayCondition;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ConditionOperator =
|
||||||
|
| "equals"
|
||||||
|
| "not_equals"
|
||||||
|
| "in"
|
||||||
|
| "not_in"
|
||||||
|
| "contains"
|
||||||
|
| "not_contains"
|
||||||
|
| "empty"
|
||||||
|
| "not_empty"
|
||||||
|
| "greater_than"
|
||||||
|
| "less_than"
|
||||||
|
| "greater_or_equal"
|
||||||
|
| "less_or_equal";
|
||||||
|
|
||||||
|
// Status Definition
|
||||||
|
export interface StatusDefinition {
|
||||||
|
status_id: string;
|
||||||
|
name: string;
|
||||||
|
description?: string;
|
||||||
|
is_editable: boolean;
|
||||||
|
color?: string;
|
||||||
|
icon?: string;
|
||||||
|
display_order: number;
|
||||||
|
is_initial: boolean;
|
||||||
|
is_final: boolean;
|
||||||
|
is_cancelled: boolean;
|
||||||
|
send_notification: boolean;
|
||||||
|
notification_template?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Transition Definition
|
||||||
|
export interface TransitionDefinition {
|
||||||
|
from_status_id: string;
|
||||||
|
to_status_id: string;
|
||||||
|
name: string;
|
||||||
|
trigger_type: TransitionTriggerType;
|
||||||
|
trigger_config: TriggerConfig;
|
||||||
|
conditions?: Record<string, any>;
|
||||||
|
actions?: TransitionAction[];
|
||||||
|
priority: number;
|
||||||
|
is_active: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Trigger Configuration
|
||||||
|
export interface TriggerConfig {
|
||||||
|
role?: string;
|
||||||
|
required_approvals?: number;
|
||||||
|
deadline_field?: string;
|
||||||
|
time_span_hours?: number;
|
||||||
|
button_label?: string;
|
||||||
|
button_style?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Transition Actions
|
||||||
|
export interface TransitionAction {
|
||||||
|
type: string;
|
||||||
|
config: Record<string, any>;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Application Type
|
||||||
|
export interface ApplicationType {
|
||||||
|
id: number;
|
||||||
|
type_id: string;
|
||||||
|
name: string;
|
||||||
|
description?: string;
|
||||||
|
is_active: boolean;
|
||||||
|
is_public: boolean;
|
||||||
|
allowed_roles: string[];
|
||||||
|
max_cost_positions: number;
|
||||||
|
max_comparison_offers: number;
|
||||||
|
version: string;
|
||||||
|
usage_count: number;
|
||||||
|
pdf_template_filename?: string;
|
||||||
|
fields: FieldDefinition[];
|
||||||
|
statuses: StatusDefinition[];
|
||||||
|
transitions: TransitionDefinition[];
|
||||||
|
created_at: string;
|
||||||
|
updated_at: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cost Position
|
||||||
|
export interface CostPosition {
|
||||||
|
description: string;
|
||||||
|
amount: number;
|
||||||
|
category?: string;
|
||||||
|
notes?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Comparison Offer
|
||||||
|
export interface ComparisonOffer {
|
||||||
|
vendor: string;
|
||||||
|
description: string;
|
||||||
|
amount: number;
|
||||||
|
selected: boolean;
|
||||||
|
notes?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Dynamic Application
|
||||||
|
export interface DynamicApplication {
|
||||||
|
id: number;
|
||||||
|
application_id: string;
|
||||||
|
application_type_id: number;
|
||||||
|
type_name: string;
|
||||||
|
email: string;
|
||||||
|
status_id: string;
|
||||||
|
status_name: string;
|
||||||
|
title: string;
|
||||||
|
first_name?: string;
|
||||||
|
last_name?: string;
|
||||||
|
total_amount: number;
|
||||||
|
field_data: Record<string, any>;
|
||||||
|
cost_positions: CostPosition[];
|
||||||
|
comparison_offers: ComparisonOffer[];
|
||||||
|
submitted_at?: string;
|
||||||
|
status_changed_at?: string;
|
||||||
|
created_at: string;
|
||||||
|
updated_at: string;
|
||||||
|
can_edit: boolean;
|
||||||
|
available_actions: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Application List Item
|
||||||
|
export interface ApplicationListItem {
|
||||||
|
id: number;
|
||||||
|
application_id: string;
|
||||||
|
type_name: string;
|
||||||
|
title: string;
|
||||||
|
email: string;
|
||||||
|
status_id: string;
|
||||||
|
status_name: string;
|
||||||
|
total_amount: number;
|
||||||
|
submitted_at?: string;
|
||||||
|
created_at: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Application History Entry
|
||||||
|
export interface ApplicationHistoryEntry {
|
||||||
|
id: number;
|
||||||
|
action: string;
|
||||||
|
comment?: string;
|
||||||
|
field_changes?: Record<string, { old: any; new: any }>;
|
||||||
|
user_id?: number;
|
||||||
|
created_at: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Application Approval
|
||||||
|
export interface ApplicationApproval {
|
||||||
|
decision: "approve" | "reject" | "abstain";
|
||||||
|
comment?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create Application Request
|
||||||
|
export interface CreateApplicationRequest {
|
||||||
|
application_type_id: string;
|
||||||
|
title: string;
|
||||||
|
field_data: Record<string, any>;
|
||||||
|
cost_positions?: CostPosition[];
|
||||||
|
comparison_offers?: ComparisonOffer[];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update Application Request
|
||||||
|
export interface UpdateApplicationRequest {
|
||||||
|
title?: string;
|
||||||
|
field_data?: Record<string, any>;
|
||||||
|
cost_positions?: CostPosition[];
|
||||||
|
comparison_offers?: ComparisonOffer[];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create Application Response
|
||||||
|
export interface CreateApplicationResponse {
|
||||||
|
application_id: string;
|
||||||
|
access_key: string;
|
||||||
|
access_url: string;
|
||||||
|
status: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Status Transition Request
|
||||||
|
export interface StatusTransitionRequest {
|
||||||
|
new_status_id: string;
|
||||||
|
comment?: string;
|
||||||
|
trigger_data?: Record<string, any>;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Application Type Create Request
|
||||||
|
export interface ApplicationTypeCreateRequest {
|
||||||
|
type_id: string;
|
||||||
|
name: string;
|
||||||
|
description?: string;
|
||||||
|
fields: FieldDefinition[];
|
||||||
|
statuses: StatusDefinition[];
|
||||||
|
transitions: TransitionDefinition[];
|
||||||
|
pdf_field_mapping?: Record<string, string>;
|
||||||
|
is_active?: boolean;
|
||||||
|
is_public?: boolean;
|
||||||
|
allowed_roles?: string[];
|
||||||
|
max_cost_positions?: number;
|
||||||
|
max_comparison_offers?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Application Type Update Request
|
||||||
|
export interface ApplicationTypeUpdateRequest {
|
||||||
|
name?: string;
|
||||||
|
description?: string;
|
||||||
|
is_active?: boolean;
|
||||||
|
is_public?: boolean;
|
||||||
|
allowed_roles?: string[];
|
||||||
|
max_cost_positions?: number;
|
||||||
|
max_comparison_offers?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Search Parameters
|
||||||
|
export interface ApplicationSearchParams {
|
||||||
|
type_id?: string;
|
||||||
|
status_id?: string;
|
||||||
|
email?: string;
|
||||||
|
search?: string;
|
||||||
|
submitted_after?: string;
|
||||||
|
submitted_before?: string;
|
||||||
|
limit?: number;
|
||||||
|
offset?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Form State for Dynamic Applications
|
||||||
|
export interface DynamicFormData {
|
||||||
|
// Common fields
|
||||||
|
email: string;
|
||||||
|
title: string;
|
||||||
|
first_name?: string;
|
||||||
|
last_name?: string;
|
||||||
|
|
||||||
|
// Dynamic fields
|
||||||
|
fields: Record<string, any>;
|
||||||
|
|
||||||
|
// Cost positions
|
||||||
|
cost_positions: CostPosition[];
|
||||||
|
|
||||||
|
// Comparison offers
|
||||||
|
comparison_offers: ComparisonOffer[];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Field Render Configuration
|
||||||
|
export interface FieldRenderConfig {
|
||||||
|
field: FieldDefinition;
|
||||||
|
value: any;
|
||||||
|
onChange: (value: any) => void;
|
||||||
|
error?: string;
|
||||||
|
disabled?: boolean;
|
||||||
|
visible?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Application Type List Response
|
||||||
|
export interface ApplicationTypeListResponse {
|
||||||
|
types: ApplicationType[];
|
||||||
|
total: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
// PDF Template Upload Response
|
||||||
|
export interface PDFTemplateUploadResponse {
|
||||||
|
message: string;
|
||||||
|
filename: string;
|
||||||
|
fields: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
// API Error Response
|
||||||
|
export interface ApiError {
|
||||||
|
detail: string;
|
||||||
|
field?: string;
|
||||||
|
code?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// API Response Wrapper
|
||||||
|
export type ApiResponse<T> =
|
||||||
|
| { success: true; data: T }
|
||||||
|
| { success: false; error: ApiError };
|
||||||
|
|
||||||
|
// User Role
|
||||||
|
export interface UserRole {
|
||||||
|
id: number;
|
||||||
|
name: string;
|
||||||
|
display_name: string;
|
||||||
|
description?: string;
|
||||||
|
is_admin: boolean;
|
||||||
|
can_review_budget: boolean;
|
||||||
|
can_review_finance: boolean;
|
||||||
|
can_vote: boolean;
|
||||||
|
permissions: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
// User
|
||||||
|
export interface User {
|
||||||
|
id: number;
|
||||||
|
email: string;
|
||||||
|
given_name?: string;
|
||||||
|
family_name?: string;
|
||||||
|
display_name: string;
|
||||||
|
picture_url?: string;
|
||||||
|
verification_status: string;
|
||||||
|
email_verified: boolean;
|
||||||
|
roles: UserRole[];
|
||||||
|
last_login_at?: string;
|
||||||
|
created_at: string;
|
||||||
|
updated_at: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Session
|
||||||
|
export interface Session {
|
||||||
|
access_token: string;
|
||||||
|
refresh_token?: string;
|
||||||
|
expires_at: string;
|
||||||
|
user: User;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Login Request
|
||||||
|
export interface LoginRequest {
|
||||||
|
email: string;
|
||||||
|
password?: string;
|
||||||
|
oidc_token?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Field Value Formatter
|
||||||
|
export type FieldValueFormatter = (value: any, field: FieldDefinition) => string;
|
||||||
|
|
||||||
|
// Field Value Validator
|
||||||
|
export type FieldValueValidator = (value: any, field: FieldDefinition) => string | undefined;
|
||||||
|
|
||||||
|
// Application State
|
||||||
|
export interface ApplicationState {
|
||||||
|
currentApplication?: DynamicApplication;
|
||||||
|
applicationTypes: ApplicationType[];
|
||||||
|
selectedType?: ApplicationType;
|
||||||
|
formData: DynamicFormData;
|
||||||
|
validation: Record<string, string>;
|
||||||
|
isDirty: boolean;
|
||||||
|
isSubmitting: boolean;
|
||||||
|
accessKey?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Notification
|
||||||
|
export interface Notification {
|
||||||
|
id: string;
|
||||||
|
type: "success" | "error" | "warning" | "info";
|
||||||
|
title: string;
|
||||||
|
message?: string;
|
||||||
|
duration?: number;
|
||||||
|
timestamp: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Field Component Props
|
||||||
|
export interface FieldComponentProps {
|
||||||
|
field: FieldDefinition;
|
||||||
|
value: any;
|
||||||
|
onChange: (value: any) => void;
|
||||||
|
error?: string;
|
||||||
|
disabled?: boolean;
|
||||||
|
formData?: Record<string, any>;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Status Badge Props
|
||||||
|
export interface StatusBadgeProps {
|
||||||
|
status: StatusDefinition;
|
||||||
|
size?: "small" | "medium" | "large";
|
||||||
|
}
|
||||||
|
|
||||||
|
// Application Card Props
|
||||||
|
export interface ApplicationCardProps {
|
||||||
|
application: ApplicationListItem;
|
||||||
|
onClick?: (id: string) => void;
|
||||||
|
onStatusChange?: (id: string, newStatus: string) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Field Group
|
||||||
|
export interface FieldGroup {
|
||||||
|
section: string;
|
||||||
|
title?: string;
|
||||||
|
description?: string;
|
||||||
|
fields: FieldDefinition[];
|
||||||
|
collapsed?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Export Configuration
|
||||||
|
export interface ExportConfig {
|
||||||
|
format: "pdf" | "json" | "csv";
|
||||||
|
include_attachments?: boolean;
|
||||||
|
include_history?: boolean;
|
||||||
|
watermark?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Import Configuration
|
||||||
|
export interface ImportConfig {
|
||||||
|
format: "json" | "csv";
|
||||||
|
mapping?: Record<string, string>;
|
||||||
|
validate?: boolean;
|
||||||
|
dry_run?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Batch Operation
|
||||||
|
export interface BatchOperation {
|
||||||
|
operation: "export" | "status_change" | "delete";
|
||||||
|
application_ids: string[];
|
||||||
|
params?: Record<string, any>;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Dashboard Statistics
|
||||||
|
export interface DashboardStats {
|
||||||
|
total_applications: number;
|
||||||
|
pending_applications: number;
|
||||||
|
approved_applications: number;
|
||||||
|
rejected_applications: number;
|
||||||
|
total_amount_requested: number;
|
||||||
|
total_amount_approved: number;
|
||||||
|
applications_by_type: Record<string, number>;
|
||||||
|
applications_by_status: Record<string, number>;
|
||||||
|
recent_applications: ApplicationListItem[];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Activity Log
|
||||||
|
export interface ActivityLogEntry {
|
||||||
|
id: number;
|
||||||
|
user_id?: number;
|
||||||
|
user_name?: string;
|
||||||
|
action: string;
|
||||||
|
entity_type: string;
|
||||||
|
entity_id: string;
|
||||||
|
changes?: Record<string, any>;
|
||||||
|
ip_address?: string;
|
||||||
|
user_agent?: string;
|
||||||
|
timestamp: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Help Text
|
||||||
|
export interface HelpText {
|
||||||
|
field_id: string;
|
||||||
|
title: string;
|
||||||
|
content: string;
|
||||||
|
examples?: string[];
|
||||||
|
links?: { label: string; url: string }[];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Application Template
|
||||||
|
export interface ApplicationTemplate {
|
||||||
|
id: number;
|
||||||
|
name: string;
|
||||||
|
description?: string;
|
||||||
|
type_id: string;
|
||||||
|
field_defaults: Record<string, any>;
|
||||||
|
is_public: boolean;
|
||||||
|
created_by?: number;
|
||||||
|
created_at: string;
|
||||||
|
updated_at: string;
|
||||||
|
}
|
||||||
Loading…
Reference in New Issue
Block a user