From 31e22f20e93a5d4b666c65de42b9ff0f21e0413e Mon Sep 17 00:00:00 2001 From: Jens Luedicke Date: Wed, 9 Jul 2025 19:06:12 +0200 Subject: [PATCH 01/14] DB Migrations Code for Flask-Migrate change. --- FLASK_MIGRATE_GUIDE.md | 220 ++++++++++++++ MIGRATION_QUICK_REFERENCE.md | 78 +++++ POST_BASELINE_MIGRATIONS.md | 179 +++++++++++ Pipfile | 1 + app.py | 4 + establish_baseline_4214e88.py | 279 ++++++++++++++++++ init_migrations.py | 56 ++++ manage_migrations.py | 136 +++++++++ migrate_to_alembic.py | 195 ++++++++++++ .../add_cascade_delete_note_links.sql | 0 .../add_cascade_delete_note_links_sqlite.sql | 0 .../add_folder_to_notes.sql | 0 .../add_note_folder_table.sql | 0 .../add_note_sharing.sql | 0 .../add_time_preferences.sql | 0 .../migration_list.txt | 0 .../old_migrations/00_migration_summary.py | 0 .../old_migrations/01_migrate_db.py | 0 .../02_migrate_sqlite_to_postgres.py | 0 .../02_migrate_sqlite_to_postgres_fixed.py | 0 .../03_add_dashboard_columns.py | 0 .../04_add_user_preferences_columns.py | 0 .../old_migrations/05_fix_task_status_enum.py | 0 .../old_migrations/06_add_archived_status.py | 0 .../07_fix_company_work_config_columns.py | 0 .../old_migrations/08_fix_work_region_enum.py | 0 .../09_add_germany_to_workregion.py | 0 .../10_add_company_settings_columns.py | 0 .../11_fix_company_work_config_usage.py | 0 .../12_fix_task_status_usage.py | 0 .../13_fix_work_region_usage.py | 0 .../old_migrations/14_fix_removed_fields.py | 0 .../old_migrations/15_repair_user_roles.py | 0 .../19_add_company_invitations.py | 0 .../20_add_company_updated_at.py | 0 .../old_migrations/run_all_db_migrations.py | 0 .../old_migrations/run_code_migrations.py | 0 .../postgres_only_migration.py | 0 .../remove_email_preferences.sql | 0 .../run_postgres_migrations.py | 0 requirements.txt | 1 + startup.sh | 59 ++-- startup_postgres.sh | 45 ++- 43 files changed, 1206 insertions(+), 47 deletions(-) create mode 100644 FLASK_MIGRATE_GUIDE.md create mode 100644 MIGRATION_QUICK_REFERENCE.md create mode 100644 POST_BASELINE_MIGRATIONS.md create mode 100755 establish_baseline_4214e88.py create mode 100755 init_migrations.py create mode 100755 manage_migrations.py create mode 100755 migrate_to_alembic.py rename {migrations => migrations_old}/add_cascade_delete_note_links.sql (100%) rename {migrations => migrations_old}/add_cascade_delete_note_links_sqlite.sql (100%) rename {migrations => migrations_old}/add_folder_to_notes.sql (100%) rename {migrations => migrations_old}/add_note_folder_table.sql (100%) rename {migrations => migrations_old}/add_note_sharing.sql (100%) rename {migrations => migrations_old}/add_time_preferences.sql (100%) rename {migrations => migrations_old}/migration_list.txt (100%) rename {migrations => migrations_old}/old_migrations/00_migration_summary.py (100%) rename {migrations => migrations_old}/old_migrations/01_migrate_db.py (100%) rename {migrations => migrations_old}/old_migrations/02_migrate_sqlite_to_postgres.py (100%) rename {migrations => migrations_old}/old_migrations/02_migrate_sqlite_to_postgres_fixed.py (100%) rename {migrations => migrations_old}/old_migrations/03_add_dashboard_columns.py (100%) rename {migrations => migrations_old}/old_migrations/04_add_user_preferences_columns.py (100%) rename {migrations => migrations_old}/old_migrations/05_fix_task_status_enum.py (100%) rename {migrations => migrations_old}/old_migrations/06_add_archived_status.py (100%) rename {migrations => migrations_old}/old_migrations/07_fix_company_work_config_columns.py (100%) rename {migrations => migrations_old}/old_migrations/08_fix_work_region_enum.py (100%) rename {migrations => migrations_old}/old_migrations/09_add_germany_to_workregion.py (100%) rename {migrations => migrations_old}/old_migrations/10_add_company_settings_columns.py (100%) rename {migrations => migrations_old}/old_migrations/11_fix_company_work_config_usage.py (100%) rename {migrations => migrations_old}/old_migrations/12_fix_task_status_usage.py (100%) rename {migrations => migrations_old}/old_migrations/13_fix_work_region_usage.py (100%) rename {migrations => migrations_old}/old_migrations/14_fix_removed_fields.py (100%) rename {migrations => migrations_old}/old_migrations/15_repair_user_roles.py (100%) rename {migrations => migrations_old}/old_migrations/19_add_company_invitations.py (100%) rename {migrations => migrations_old}/old_migrations/20_add_company_updated_at.py (100%) rename {migrations => migrations_old}/old_migrations/run_all_db_migrations.py (100%) rename {migrations => migrations_old}/old_migrations/run_code_migrations.py (100%) rename {migrations => migrations_old}/postgres_only_migration.py (100%) rename {migrations => migrations_old}/remove_email_preferences.sql (100%) rename {migrations => migrations_old}/run_postgres_migrations.py (100%) diff --git a/FLASK_MIGRATE_GUIDE.md b/FLASK_MIGRATE_GUIDE.md new file mode 100644 index 0000000..e570f4b --- /dev/null +++ b/FLASK_MIGRATE_GUIDE.md @@ -0,0 +1,220 @@ +# Flask-Migrate Migration Guide + +## Overview + +TimeTrack has been refactored to use Flask-Migrate (which wraps Alembic) for database migrations instead of manual SQL scripts. This provides automatic migration generation, version control, and rollback capabilities. + +**IMPORTANT**: The baseline for Flask-Migrate is set at git commit `4214e88d18fce7a9c75927753b8d4e9222771e14`. All schema changes after this commit need to be recreated as Flask-Migrate migrations. + +## Migration from Old System + +### For Existing Deployments + +If you have an existing database with the old migration system: + +```bash +# 1. Install new dependencies +pip install -r requirements.txt + +# 2. Establish baseline from commit 4214e88 +python establish_baseline_4214e88.py + +# 3. Mark your database as being at the baseline +flask db stamp head + +# 4. Apply any post-baseline migrations +# Review migrations_old/postgres_only_migration.py for changes after 4214e88 +# Create new migrations for each feature: +flask db migrate -m "Add company updated_at column" +flask db migrate -m "Add user 2FA columns" +flask db migrate -m "Add company invitation table" +# etc... +``` + +### For New Deployments + +```bash +# 1. Install dependencies +pip install -r requirements.txt + +# 2. Initialize and create database +python manage_migrations.py init +python manage_migrations.py apply +``` + +## Daily Usage + +### Creating Migrations + +When you modify models (add columns, tables, etc.): + +```bash +# Generate migration automatically +flask db migrate -m "Add user avatar field" + +# Or use the helper script +python manage_migrations.py create -m "Add user avatar field" +``` + +**Always review the generated migration** in `migrations/versions/` before applying! + +### Applying Migrations + +```bash +# Apply all pending migrations +flask db upgrade + +# Or use the helper script +python manage_migrations.py apply +``` + +### Rolling Back + +```bash +# Rollback one migration +flask db downgrade + +# Or use the helper script +python manage_migrations.py rollback +``` + +### Viewing Status + +```bash +# Current migration version +flask db current + +# Migration history +flask db history + +# Or use the helper script +python manage_migrations.py history +``` + +## Important Considerations + +### 1. PostgreSQL Enums + +Flask-Migrate may not perfectly handle PostgreSQL enum types. When adding new enum values: + +```python +# In the migration file, you may need to add: +from alembic import op +import sqlalchemy as sa + +def upgrade(): + # Add new enum value + op.execute("ALTER TYPE taskstatus ADD VALUE 'NEW_STATUS'") +``` + +### 2. Data Migrations + +For complex data transformations, add custom code to migration files: + +```python +def upgrade(): + # Schema changes + op.add_column('user', sa.Column('new_field', sa.String())) + + # Data migration + connection = op.get_bind() + result = connection.execute('SELECT id, old_field FROM user') + for row in result: + connection.execute( + f"UPDATE user SET new_field = '{process(row.old_field)}' WHERE id = {row.id}" + ) +``` + +### 3. Production Deployments + +The startup scripts have been updated to automatically run migrations: + +```bash +# startup_postgres.sh now includes: +flask db upgrade +``` + +### 4. Development Workflow + +1. Pull latest code +2. Run `flask db upgrade` to apply any new migrations +3. Make your model changes +4. Run `flask db migrate -m "Description"` +5. Review the generated migration +6. Test with `flask db upgrade` +7. Commit both model changes and migration file + +## Troubleshooting + +### "Target database is not up to date" + +```bash +# Check current version +flask db current + +# Force upgrade +flask db stamp head # Mark as latest without running +flask db upgrade # Apply any pending +``` + +### "Can't locate revision" + +Your database revision doesn't match any migration file. This happens when switching branches. + +```bash +# See all migrations +flask db history + +# Stamp to a specific revision +flask db stamp +``` + +### Migration Conflicts + +When multiple developers create migrations: + +1. Merge the migration files carefully +2. Update the `down_revision` in the newer migration +3. Test thoroughly + +## Best Practices + +1. **One migration per feature** - Don't bundle unrelated changes +2. **Descriptive messages** - Use clear migration messages +3. **Review before applying** - Always check generated SQL +4. **Test rollbacks** - Ensure downgrade() works +5. **Backup before major migrations** - Especially in production + +## Migration File Structure + +``` +migrations/ +├── README.md # Quick reference +├── alembic.ini # Alembic configuration +├── env.py # Migration environment +├── script.py.mako # Migration template +└── versions/ # Migration files + ├── 001_initial_migration.py + ├── 002_add_user_avatars.py + └── ... +``` + +## Helper Scripts + +- `manage_migrations.py` - Simplified migration management +- `migrate_to_alembic.py` - One-time transition from old system +- `init_migrations.py` - Quick initialization script + +## Environment Variables + +```bash +# Required for migrations +export FLASK_APP=app.py +export DATABASE_URL=postgresql://user:pass@host/db +``` + +## References + +- [Flask-Migrate Documentation](https://flask-migrate.readthedocs.io/) +- [Alembic Documentation](https://alembic.sqlalchemy.org/) +- [SQLAlchemy Documentation](https://docs.sqlalchemy.org/) \ No newline at end of file diff --git a/MIGRATION_QUICK_REFERENCE.md b/MIGRATION_QUICK_REFERENCE.md new file mode 100644 index 0000000..ab4a6eb --- /dev/null +++ b/MIGRATION_QUICK_REFERENCE.md @@ -0,0 +1,78 @@ +# Flask-Migrate Quick Reference + +## 🚀 Quick Start (For Existing Database) + +```bash +# One-time setup for existing database +python migrate_to_alembic.py +``` + +## 📝 Common Commands + +### Make Changes & Generate Migration +```bash +# 1. Edit your models in models/*.py +# 2. Generate migration +flask db migrate -m "Add user preferences table" + +# 3. Review the generated file in migrations/versions/ +# 4. Apply the migration +flask db upgrade +``` + +### Check Status +```bash +flask db current # Show current version +flask db history # Show all migrations +``` + +### Rollback +```bash +flask db downgrade # Go back one version +``` + +## 🔧 Helper Scripts + +```bash +# Interactive migration manager +python manage_migrations.py create # Create new migration +python manage_migrations.py apply # Apply migrations +python manage_migrations.py history # View history +``` + +## ⚠️ Important Notes + +1. **Always review generated migrations** before applying +2. **PostgreSQL enums** may need manual tweaking +3. **Test on development first** +4. **Migrations run automatically on startup** + +## 🆘 Troubleshooting + +### "Target database is not up to date" +```bash +flask db stamp head # Mark as current +flask db upgrade # Apply pending +``` + +### Migration conflicts after git pull +```bash +flask db merge -m "Merge migrations" # Resolve conflicts +``` + +## 📁 Structure +``` +migrations/ +├── versions/ # Your migration files +├── alembic.ini # Config (don't edit) +└── README.md # Detailed docs +``` + +## 🔄 Workflow + +1. `git pull` → `flask db upgrade` → Make changes +2. `flask db migrate -m "..."` → Review → Test +3. `git add migrations/versions/*.py` → Commit → Push + +--- +See `FLASK_MIGRATE_GUIDE.md` for detailed documentation. \ No newline at end of file diff --git a/POST_BASELINE_MIGRATIONS.md b/POST_BASELINE_MIGRATIONS.md new file mode 100644 index 0000000..cf8431e --- /dev/null +++ b/POST_BASELINE_MIGRATIONS.md @@ -0,0 +1,179 @@ +# Post-Baseline Migrations Required + +After establishing the baseline at commit `4214e88d18fce7a9c75927753b8d4e9222771e14`, the following schema changes need to be recreated as Flask-Migrate migrations: + +## Required Migrations (in order) + +### 1. Add company.updated_at +```bash +flask db migrate -m "Add updated_at to company table" +``` + +Expected changes: +- ADD COLUMN company.updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + +### 2. Add user 2FA and avatar columns +```bash +flask db migrate -m "Add two-factor auth and avatar columns to user" +``` + +Expected changes: +- ADD COLUMN user.two_factor_enabled BOOLEAN DEFAULT FALSE +- ADD COLUMN user.two_factor_secret VARCHAR(32) +- ADD COLUMN user.avatar_url VARCHAR(255) + +### 3. Create company_invitation table +```bash +flask db migrate -m "Create company invitation system" +``` + +Expected changes: +- CREATE TABLE company_invitation (with all columns as defined in models/invitation.py) +- Note: The current model has slightly different columns than the old migration + +### 4. Add user_preferences columns +```bash +flask db migrate -m "Add missing columns to user preferences" +``` + +Expected changes: +- Multiple columns for theme, language, timezone, notifications, etc. + +### 5. Add user_dashboard layout columns +```bash +flask db migrate -m "Add layout and lock columns to user dashboard" +``` + +Expected changes: +- ADD COLUMN user_dashboard.layout JSON DEFAULT '{}' +- ADD COLUMN user_dashboard.is_locked BOOLEAN DEFAULT FALSE + +### 6. Add company_work_config columns +```bash +flask db migrate -m "Add work configuration columns" +``` + +Expected changes: +- Multiple columns for overtime, rates, thresholds + +### 7. Add company_settings columns +```bash +flask db migrate -m "Add company settings columns" +``` + +Expected changes: +- Multiple columns for work week, time tracking, features + +### 8. Add dashboard_widget config columns +```bash +flask db migrate -m "Add widget configuration columns" +``` + +Expected changes: +- ADD COLUMN dashboard_widget.config JSON DEFAULT '{}' +- ADD COLUMN dashboard_widget.is_visible BOOLEAN DEFAULT TRUE + +### 9. Update enums +```bash +# These might need manual migration files +flask db migrate -m "Add GERMANY to WorkRegion enum" +flask db migrate -m "Add ARCHIVED to TaskStatus enum" +flask db migrate -m "Add new WidgetType enum values" +``` + +### 10. Add note sharing functionality +```bash +flask db migrate -m "Add note sharing tables and columns" +``` + +Expected changes: +- CREATE TABLE note_share +- ADD COLUMN note.folder VARCHAR(100) +- CREATE TABLE note_folder +- Cascade delete constraints on note_link + +### 11. Add time preferences +```bash +flask db migrate -m "Add time formatting preferences" +``` + +Expected changes: +- ADD COLUMN user_preferences.time_format_24h BOOLEAN DEFAULT TRUE +- ADD COLUMN user_preferences.time_rounding_minutes INTEGER DEFAULT 0 +- ADD COLUMN user_preferences.round_to_nearest BOOLEAN DEFAULT FALSE + +## Migration Order Script + +Create a script to apply all migrations in order: + +```bash +#!/bin/bash +# apply_post_baseline_migrations.sh + +echo "Applying post-baseline migrations..." + +# Mark database at baseline if not already done +flask db stamp head + +# Generate and apply each migration +flask db migrate -m "Add updated_at to company table" +flask db upgrade + +flask db migrate -m "Add two-factor auth and avatar columns to user" +flask db upgrade + +flask db migrate -m "Create company invitation system" +flask db upgrade + +# ... continue for all migrations +``` + +## Manual Migration Adjustments + +Some migrations may need manual adjustments: + +### PostgreSQL Enums +Edit the generated migration files to add enum values: + +```python +def upgrade(): + # For WorkRegion enum + op.execute("ALTER TYPE workregion ADD VALUE IF NOT EXISTS 'GERMANY'") + + # For TaskStatus enum + op.execute("ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'ARCHIVED'") + + # For WidgetType enum + op.execute("ALTER TYPE widgettype ADD VALUE IF NOT EXISTS 'REVENUE_CHART'") + # ... add other widget types +``` + +### Foreign Key Constraints +Ensure CASCADE deletes are properly set: + +```python +def upgrade(): + # For note_link table + op.create_foreign_key( + 'note_link_source_note_id_fkey', + 'note_link', 'note', + ['source_note_id'], ['id'], + ondelete='CASCADE' + ) +``` + +## Verification + +After applying all migrations: + +1. Compare schema with production database +2. Verify all enum values are present +3. Check foreign key constraints +4. Test rollback functionality + +## Notes + +- Review `migrations_old/postgres_only_migration.py` for the complete list of changes +- Some columns in the old migrations may not exist in current models - skip those +- Always test on development database first +- Keep this document updated as migrations are applied \ No newline at end of file diff --git a/Pipfile b/Pipfile index 1328b9b..e7c3dba 100644 --- a/Pipfile +++ b/Pipfile @@ -12,6 +12,7 @@ itsdangerous = "==2.0.1" click = "==8.0.1" flask-sqlalchemy = "==2.5.1" sqlalchemy = "==1.4.23" +flask-migrate = "==3.1.0" [dev-packages] diff --git a/app.py b/app.py index 8d1e146..9ccc23f 100644 --- a/app.py +++ b/app.py @@ -1,4 +1,5 @@ from flask import Flask, render_template, request, redirect, url_for, jsonify, flash, session, g, Response, send_file, abort +from flask_migrate import Migrate from models import db, TimeEntry, WorkConfig, User, SystemSettings, Team, Role, Project, Company, CompanyWorkConfig, CompanySettings, UserPreferences, WorkRegion, AccountType, ProjectCategory, Task, SubTask, TaskStatus, TaskPriority, TaskDependency, Sprint, SprintStatus, Announcement, SystemEvent, WidgetType, UserDashboard, DashboardWidget, WidgetTemplate, Comment, CommentVisibility, BrandingSettings, CompanyInvitation, Note, NoteFolder, NoteShare from data_formatting import ( format_duration, prepare_export_data, prepare_team_hours_export_data, @@ -85,6 +86,9 @@ # Initialize the database with the app db.init_app(app) +# Initialize Flask-Migrate +migrate = Migrate(app, db) + # Register blueprints app.register_blueprint(notes_bp) app.register_blueprint(notes_download_bp) diff --git a/establish_baseline_4214e88.py b/establish_baseline_4214e88.py new file mode 100755 index 0000000..fd0a7aa --- /dev/null +++ b/establish_baseline_4214e88.py @@ -0,0 +1,279 @@ +#!/usr/bin/env python3 +""" +Establish Flask-Migrate baseline from git commit 4214e88d18fce7a9c75927753b8d4e9222771e14. + +This script: +1. Checks out the models from commit 4214e88 +2. Initializes Flask-Migrate +3. Creates an initial migration representing that schema +4. Stamps the database to mark it as up-to-date with that baseline + +This allows all migrations after commit 4214e88 to be managed by Flask-Migrate. +""" + +import os +import sys +import subprocess +import tempfile +import shutil +from datetime import datetime + +def run_command(cmd, description, check=True): + """Run a command and handle errors.""" + print(f"\n{description}...") + result = subprocess.run(cmd, shell=True, capture_output=True, text=True) + + if result.returncode == 0: + print(f"✓ {description} completed") + if result.stdout.strip(): + print(result.stdout) + return True + else: + print(f"✗ {description} failed") + if result.stderr: + print(f"Error: {result.stderr}") + if result.stdout: + print(f"Output: {result.stdout}") + if check: + sys.exit(1) + return False + +def check_git_status(): + """Ensure git working directory is clean.""" + result = subprocess.run("git status --porcelain", shell=True, capture_output=True, text=True) + if result.stdout.strip(): + print("❌ Git working directory is not clean!") + print("Please commit or stash your changes before running this script.") + return False + return True + +def get_commit_date(commit_hash): + """Get the date of a specific commit.""" + result = subprocess.run( + f"git show -s --format=%ci {commit_hash}", + shell=True, + capture_output=True, + text=True + ) + if result.returncode == 0: + return result.stdout.strip() + return datetime.now().isoformat() + +def main(): + """Main function to establish baseline.""" + print("=== Establishing Flask-Migrate Baseline from Commit 4214e88 ===") + + # Configuration + BASELINE_COMMIT = "4214e88d18fce7a9c75927753b8d4e9222771e14" + BASELINE_DATE = get_commit_date(BASELINE_COMMIT) + + print(f"Baseline commit: {BASELINE_COMMIT}") + print(f"Commit date: {BASELINE_DATE}") + + # Check prerequisites + if not check_git_status(): + return 1 + + # Set Flask app + os.environ['FLASK_APP'] = 'app.py' + + # Step 1: Clean up any existing migrations + if os.path.exists('migrations'): + response = input("\n⚠️ Migrations directory already exists. Remove it? (y/N): ") + if response.lower() != 'y': + print("Aborting...") + return 1 + run_command("rm -rf migrations", "Removing existing migrations directory") + + # Step 2: Create a temporary directory for baseline models + with tempfile.TemporaryDirectory() as tmpdir: + print(f"\nCreating temporary directory: {tmpdir}") + + # Step 3: Extract models from baseline commit + print(f"\nExtracting models from commit {BASELINE_COMMIT}...") + + # Get the models directory from the baseline commit + models_files = [ + "models/__init__.py", + "models/base.py", + "models/enums.py", + "models/company.py", + "models/user.py", + "models/team.py", + "models/project.py", + "models/task.py", + "models/time_entry.py", + "models/sprint.py", + "models/system.py", + "models/announcement.py", + "models/dashboard.py", + "models/work_config.py", + "models/invitation.py", + "models/note.py", + "models/note_share.py" + ] + + # Also check if models_old.py exists in baseline (fallback) + result = subprocess.run( + f"git show {BASELINE_COMMIT}:models_old.py", + shell=True, + capture_output=True + ) + use_models_old = result.returncode == 0 + + if use_models_old: + print("Using models_old.py from baseline commit") + # Save current models + run_command("cp -r models models_current", "Backing up current models") + + # Get models_old.py from baseline + run_command( + f"git show {BASELINE_COMMIT}:models_old.py > models_baseline.py", + "Extracting baseline models" + ) + + # Temporarily replace models with baseline + # This is a bit hacky but ensures we generate the right migration + print("\nPreparing baseline schema...") + with open('models_baseline.py', 'r') as f: + baseline_content = f.read() + + # We need to be careful here - save current state and restore later + else: + print("Using models/ directory from baseline commit") + # Extract each model file from baseline + os.makedirs(os.path.join(tmpdir, "models"), exist_ok=True) + + for model_file in models_files: + result = subprocess.run( + f"git show {BASELINE_COMMIT}:{model_file}", + shell=True, + capture_output=True, + text=True + ) + if result.returncode == 0: + file_path = os.path.join(tmpdir, model_file) + os.makedirs(os.path.dirname(file_path), exist_ok=True) + with open(file_path, 'w') as f: + f.write(result.stdout) + print(f" ✓ Extracted {model_file}") + else: + print(f" ⚠️ Could not extract {model_file}") + + # Step 4: Initialize Flask-Migrate + run_command("flask db init", "Initializing Flask-Migrate") + + # Step 5: Create the baseline migration + print("\n📝 Creating baseline migration...") + print("This migration represents the schema at commit 4214e88") + + migration_message = f"Baseline schema from commit {BASELINE_COMMIT[:8]} ({BASELINE_DATE})" + run_command( + f'flask db migrate -m "{migration_message}"', + "Generating baseline migration" + ) + + # Step 6: Add a note to the migration file + migration_files = os.listdir("migrations/versions") + if migration_files: + latest_migration = sorted(migration_files)[-1] + migration_path = os.path.join("migrations/versions", latest_migration) + + with open(migration_path, 'r') as f: + content = f.read() + + # Add comment at the top of the file + baseline_note = f'''""" +BASELINE MIGRATION - DO NOT MODIFY + +This migration represents the database schema at commit {BASELINE_COMMIT}. +Date: {BASELINE_DATE} + +This is the starting point for Flask-Migrate. All future schema changes +should be managed through Flask-Migrate migrations. + +If you have a database that was created before this point, you should: +1. Ensure your database schema matches this migration +2. Run: flask db stamp head + +If you're creating a new database: +1. Run: flask db upgrade +""" + +''' + + with open(migration_path, 'w') as f: + f.write(baseline_note + content) + + print(f"✓ Added baseline note to migration: {latest_migration}") + + # Step 7: Create documentation + doc_content = f"""# Flask-Migrate Baseline Information + +## Baseline Commit +- Commit: {BASELINE_COMMIT} +- Date: {BASELINE_DATE} +- Description: This is the baseline schema for Flask-Migrate + +## For Existing Databases + +If your database was created from the schema at or after commit {BASELINE_COMMIT[:8]}: + +```bash +# Mark your database as being at the baseline +flask db stamp head +``` + +## For New Databases + +```bash +# Create all tables from the baseline +flask db upgrade +``` + +## Post-Baseline Migrations + +All migrations after commit {BASELINE_COMMIT[:8]} that were previously in the +old migration system need to be recreated as Flask-Migrate migrations: + +1. Company settings additions +2. User preferences columns +3. Dashboard widget updates +4. Work configuration changes +5. Note sharing functionality +6. Time preferences + +Use `flask db migrate -m "description"` to create these migrations. + +## Important Notes + +- Do NOT modify the baseline migration +- Always review generated migrations before applying +- Test migrations on a development database first +""" + + with open('migrations/BASELINE_INFO.md', 'w') as f: + f.write(doc_content) + + print("\n✅ Created migrations/BASELINE_INFO.md") + + # Step 8: Show summary + print("\n" + "="*60) + print("✨ Baseline establishment completed!") + print("="*60) + print(f"\nBaseline: Commit {BASELINE_COMMIT[:8]} ({BASELINE_DATE})") + print("\nNext steps:") + print("\n1. For existing databases at or after this commit:") + print(" flask db stamp head") + print("\n2. For new databases:") + print(" flask db upgrade") + print("\n3. To add post-baseline changes:") + print(" - Review migrations_old/postgres_only_migration.py") + print(" - Create new migrations for changes after 4214e88") + print(" - Example: flask db migrate -m 'Add company settings columns'") + print("\n4. Always review generated migrations in migrations/versions/") + + return 0 + +if __name__ == "__main__": + sys.exit(main()) \ No newline at end of file diff --git a/init_migrations.py b/init_migrations.py new file mode 100755 index 0000000..7d31334 --- /dev/null +++ b/init_migrations.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python3 +""" +Initialize Flask-Migrate for the TimeTrack application. +This script sets up the migrations directory and creates the initial migration. +""" + +import os +import sys +import subprocess + +def run_command(cmd, description): + """Run a command and handle errors.""" + print(f"\n{description}...") + result = subprocess.run(cmd, shell=True, capture_output=True, text=True) + + if result.returncode == 0: + print(f"✓ {description} completed successfully") + if result.stdout: + print(result.stdout) + else: + print(f"✗ {description} failed") + if result.stderr: + print(f"Error: {result.stderr}") + if result.stdout: + print(f"Output: {result.stdout}") + return False + return True + +def main(): + """Main initialization function.""" + print("=== Flask-Migrate Initialization ===") + + # Set Flask app environment variable + os.environ['FLASK_APP'] = 'app.py' + + # Initialize migrations directory + if not run_command("flask db init", "Initializing migrations directory"): + return 1 + + # Create initial migration + if not run_command( + 'flask db migrate -m "Initial migration from existing schema"', + "Creating initial migration" + ): + return 1 + + print("\n✨ Flask-Migrate initialization completed!") + print("\nNext steps:") + print("1. Review the generated migration in migrations/versions/") + print("2. Apply the migration with: flask db upgrade") + print("3. For future schema changes, use: flask db migrate -m 'Description'") + + return 0 + +if __name__ == "__main__": + sys.exit(main()) \ No newline at end of file diff --git a/manage_migrations.py b/manage_migrations.py new file mode 100755 index 0000000..f9f8619 --- /dev/null +++ b/manage_migrations.py @@ -0,0 +1,136 @@ +#!/usr/bin/env python3 +""" +Migration management script for TimeTrack. +Handles both development and production migration scenarios. +""" + +import os +import sys +import subprocess +import argparse +from datetime import datetime + +def run_command(cmd, description, check=True): + """Run a command and handle errors.""" + print(f"\n{description}...") + result = subprocess.run(cmd, shell=True, capture_output=True, text=True) + + if result.returncode == 0: + print(f"✓ {description} completed") + if result.stdout: + print(result.stdout) + return True + else: + print(f"✗ {description} failed") + if result.stderr: + print(f"Error: {result.stderr}") + if result.stdout: + print(f"Output: {result.stdout}") + if check: + sys.exit(1) + return False + +def init_migrations(): + """Initialize Flask-Migrate.""" + os.environ['FLASK_APP'] = 'app.py' + + if os.path.exists('migrations'): + print("⚠️ Migrations directory already exists!") + response = input("Do you want to reinitialize? This will delete existing migrations. (y/N): ") + if response.lower() != 'y': + print("Aborting...") + return False + run_command("rm -rf migrations", "Removing existing migrations directory") + + run_command("flask db init", "Initializing Flask-Migrate") + return True + +def create_migration(message): + """Create a new migration.""" + os.environ['FLASK_APP'] = 'app.py' + + if not message: + message = input("Enter migration message: ") + + run_command(f'flask db migrate -m "{message}"', "Creating migration") + print("\n📝 Please review the generated migration before applying it!") + return True + +def apply_migrations(): + """Apply pending migrations.""" + os.environ['FLASK_APP'] = 'app.py' + + # Show current version + run_command("flask db current", "Current database version", check=False) + + # Show pending migrations + print("\nPending migrations:") + run_command("flask db show", "Migration history", check=False) + + response = input("\nApply pending migrations? (y/N): ") + if response.lower() == 'y': + run_command("flask db upgrade", "Applying migrations") + return True + +def rollback_migration(): + """Rollback to previous migration.""" + os.environ['FLASK_APP'] = 'app.py' + + run_command("flask db current", "Current database version") + response = input("\nRollback to previous version? (y/N): ") + + if response.lower() == 'y': + run_command("flask db downgrade", "Rolling back migration") + return True + +def show_history(): + """Show migration history.""" + os.environ['FLASK_APP'] = 'app.py' + + run_command("flask db history", "Migration history") + return True + +def stamp_database(revision='head'): + """Stamp database with a specific revision without running migrations.""" + os.environ['FLASK_APP'] = 'app.py' + + print(f"⚠️ This will mark the database as being at revision '{revision}' without running any migrations.") + response = input("Continue? (y/N): ") + + if response.lower() == 'y': + run_command(f"flask db stamp {revision}", f"Stamping database with revision {revision}") + return True + +def main(): + """Main function.""" + parser = argparse.ArgumentParser(description='Manage Flask-Migrate migrations') + parser.add_argument('command', choices=['init', 'create', 'apply', 'rollback', 'history', 'stamp'], + help='Command to execute') + parser.add_argument('-m', '--message', help='Migration message (for create command)') + parser.add_argument('-r', '--revision', default='head', help='Revision to stamp (for stamp command)') + + args = parser.parse_args() + + commands = { + 'init': init_migrations, + 'create': lambda: create_migration(args.message), + 'apply': apply_migrations, + 'rollback': rollback_migration, + 'history': show_history, + 'stamp': lambda: stamp_database(args.revision) + } + + print(f"=== TimeTrack Migration Manager ===") + print(f"Command: {args.command}") + print(f"Time: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}") + + success = commands[args.command]() + + if success: + print("\n✨ Operation completed successfully!") + else: + print("\n❌ Operation failed or was cancelled") + sys.exit(1) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/migrate_to_alembic.py b/migrate_to_alembic.py new file mode 100755 index 0000000..27bf6c9 --- /dev/null +++ b/migrate_to_alembic.py @@ -0,0 +1,195 @@ +#!/usr/bin/env python3 +""" +Special migration script to transition from manual migrations to Flask-Migrate/Alembic. +This script handles the existing database schema and creates a baseline migration. +""" + +import os +import sys +import subprocess +import psycopg2 +from urllib.parse import urlparse + +def check_database_exists(): + """Check if database exists and has tables.""" + database_url = os.environ.get('DATABASE_URL', 'sqlite:////data/timetrack.db') + + if database_url.startswith('sqlite'): + db_path = database_url.replace('sqlite:///', '') + return os.path.exists(db_path) + + # PostgreSQL + try: + parsed = urlparse(database_url) + conn = psycopg2.connect( + host=parsed.hostname, + port=parsed.port or 5432, + database=parsed.path[1:], + user=parsed.username, + password=parsed.password + ) + cursor = conn.cursor() + cursor.execute(""" + SELECT COUNT(*) FROM information_schema.tables + WHERE table_schema = 'public' AND table_type = 'BASE TABLE' + """) + table_count = cursor.fetchone()[0] + cursor.close() + conn.close() + return table_count > 0 + except Exception as e: + print(f"Error checking database: {e}") + return False + +def run_command(cmd, description): + """Run a command and handle errors.""" + print(f"\n{description}...") + result = subprocess.run(cmd, shell=True, capture_output=True, text=True) + + if result.returncode == 0: + print(f"✓ {description} completed") + if result.stdout: + print(result.stdout) + return True + else: + print(f"✗ {description} failed") + if result.stderr: + print(f"Error: {result.stderr}") + if result.stdout: + print(f"Output: {result.stdout}") + return False + +def main(): + """Main migration function.""" + print("=== Migrating to Flask-Migrate/Alembic ===") + print("\n⚠️ IMPORTANT: This script assumes your database is at the current schema.") + print("For baseline at commit 4214e88, use: python establish_baseline_4214e88.py") + + # Set Flask app + os.environ['FLASK_APP'] = 'app.py' + + # Check if we have an existing database + has_existing_db = check_database_exists() + + if has_existing_db: + print("\n📊 Existing database detected!") + print("This process will:") + print("1. Initialize Flask-Migrate") + print("2. Create a baseline migration matching your CURRENT schema") + print("3. Mark the database as up-to-date without running migrations") + print("\nThis allows you to start using Flask-Migrate for future changes.") + print("\n⚠️ If your database is at commit 4214e88, use establish_baseline_4214e88.py instead!") + + response = input("\nContinue with current schema? (y/N): ") + if response.lower() != 'y': + print("Aborting...") + return 1 + else: + print("\n🆕 No existing database detected.") + print("This will set up a fresh Flask-Migrate installation.") + + # Step 1: Initialize Flask-Migrate + if not run_command("flask db init", "Initializing Flask-Migrate"): + return 1 + + if has_existing_db: + # Step 2: Create initial migration from existing schema + if not run_command( + 'flask db migrate -m "Initial migration from existing database"', + "Creating migration from existing schema" + ): + return 1 + + print("\n📝 Review the generated migration!") + print("The migration file is in migrations/versions/") + print("Make sure it matches your existing schema.") + + response = input("\nHave you reviewed the migration? Continue? (y/N): ") + if response.lower() != 'y': + print("Please review and run: flask db stamp head") + return 0 + + # Step 3: Stamp the database without running migrations + if not run_command("flask db stamp head", "Marking database as up-to-date"): + return 1 + + print("\n✅ Database marked as up-to-date with current schema") + else: + # Fresh installation - create tables + if not run_command( + 'flask db migrate -m "Initial database creation"', + "Creating initial migration" + ): + return 1 + + if not run_command("flask db upgrade", "Creating database tables"): + return 1 + + print("\n✅ Database tables created successfully") + + print("\n✨ Migration to Flask-Migrate completed!") + print("\nFuture migrations:") + print("1. Make changes to your models") + print("2. Run: flask db migrate -m 'Description of changes'") + print("3. Review the generated migration") + print("4. Run: flask db upgrade") + + # Create a README for the team + readme_content = """# Flask-Migrate Usage + +This project now uses Flask-Migrate (Alembic) for database migrations. + +## Common Commands + +### Create a new migration +```bash +flask db migrate -m "Description of changes" +``` + +### Apply migrations +```bash +flask db upgrade +``` + +### View migration history +```bash +flask db history +``` + +### Rollback one migration +```bash +flask db downgrade +``` + +### View current migration +```bash +flask db current +``` + +## Important Notes + +1. **Always review generated migrations** before applying them +2. **Test migrations** on a development database first +3. **Back up your database** before applying migrations in production +4. **Custom enums** may need manual adjustment in migration files + +## Migration Files + +- `migrations/` - Main migrations directory +- `migrations/versions/` - Individual migration files +- `migrations/alembic.ini` - Alembic configuration + +## For Production + +The startup scripts have been updated to automatically run migrations. +""" + + with open('migrations/README.md', 'w') as f: + f.write(readme_content) + + print("\n📄 Created migrations/README.md with usage instructions") + + return 0 + +if __name__ == "__main__": + sys.exit(main()) \ No newline at end of file diff --git a/migrations/add_cascade_delete_note_links.sql b/migrations_old/add_cascade_delete_note_links.sql similarity index 100% rename from migrations/add_cascade_delete_note_links.sql rename to migrations_old/add_cascade_delete_note_links.sql diff --git a/migrations/add_cascade_delete_note_links_sqlite.sql b/migrations_old/add_cascade_delete_note_links_sqlite.sql similarity index 100% rename from migrations/add_cascade_delete_note_links_sqlite.sql rename to migrations_old/add_cascade_delete_note_links_sqlite.sql diff --git a/migrations/add_folder_to_notes.sql b/migrations_old/add_folder_to_notes.sql similarity index 100% rename from migrations/add_folder_to_notes.sql rename to migrations_old/add_folder_to_notes.sql diff --git a/migrations/add_note_folder_table.sql b/migrations_old/add_note_folder_table.sql similarity index 100% rename from migrations/add_note_folder_table.sql rename to migrations_old/add_note_folder_table.sql diff --git a/migrations/add_note_sharing.sql b/migrations_old/add_note_sharing.sql similarity index 100% rename from migrations/add_note_sharing.sql rename to migrations_old/add_note_sharing.sql diff --git a/migrations/add_time_preferences.sql b/migrations_old/add_time_preferences.sql similarity index 100% rename from migrations/add_time_preferences.sql rename to migrations_old/add_time_preferences.sql diff --git a/migrations/migration_list.txt b/migrations_old/migration_list.txt similarity index 100% rename from migrations/migration_list.txt rename to migrations_old/migration_list.txt diff --git a/migrations/old_migrations/00_migration_summary.py b/migrations_old/old_migrations/00_migration_summary.py similarity index 100% rename from migrations/old_migrations/00_migration_summary.py rename to migrations_old/old_migrations/00_migration_summary.py diff --git a/migrations/old_migrations/01_migrate_db.py b/migrations_old/old_migrations/01_migrate_db.py similarity index 100% rename from migrations/old_migrations/01_migrate_db.py rename to migrations_old/old_migrations/01_migrate_db.py diff --git a/migrations/old_migrations/02_migrate_sqlite_to_postgres.py b/migrations_old/old_migrations/02_migrate_sqlite_to_postgres.py similarity index 100% rename from migrations/old_migrations/02_migrate_sqlite_to_postgres.py rename to migrations_old/old_migrations/02_migrate_sqlite_to_postgres.py diff --git a/migrations/old_migrations/02_migrate_sqlite_to_postgres_fixed.py b/migrations_old/old_migrations/02_migrate_sqlite_to_postgres_fixed.py similarity index 100% rename from migrations/old_migrations/02_migrate_sqlite_to_postgres_fixed.py rename to migrations_old/old_migrations/02_migrate_sqlite_to_postgres_fixed.py diff --git a/migrations/old_migrations/03_add_dashboard_columns.py b/migrations_old/old_migrations/03_add_dashboard_columns.py similarity index 100% rename from migrations/old_migrations/03_add_dashboard_columns.py rename to migrations_old/old_migrations/03_add_dashboard_columns.py diff --git a/migrations/old_migrations/04_add_user_preferences_columns.py b/migrations_old/old_migrations/04_add_user_preferences_columns.py similarity index 100% rename from migrations/old_migrations/04_add_user_preferences_columns.py rename to migrations_old/old_migrations/04_add_user_preferences_columns.py diff --git a/migrations/old_migrations/05_fix_task_status_enum.py b/migrations_old/old_migrations/05_fix_task_status_enum.py similarity index 100% rename from migrations/old_migrations/05_fix_task_status_enum.py rename to migrations_old/old_migrations/05_fix_task_status_enum.py diff --git a/migrations/old_migrations/06_add_archived_status.py b/migrations_old/old_migrations/06_add_archived_status.py similarity index 100% rename from migrations/old_migrations/06_add_archived_status.py rename to migrations_old/old_migrations/06_add_archived_status.py diff --git a/migrations/old_migrations/07_fix_company_work_config_columns.py b/migrations_old/old_migrations/07_fix_company_work_config_columns.py similarity index 100% rename from migrations/old_migrations/07_fix_company_work_config_columns.py rename to migrations_old/old_migrations/07_fix_company_work_config_columns.py diff --git a/migrations/old_migrations/08_fix_work_region_enum.py b/migrations_old/old_migrations/08_fix_work_region_enum.py similarity index 100% rename from migrations/old_migrations/08_fix_work_region_enum.py rename to migrations_old/old_migrations/08_fix_work_region_enum.py diff --git a/migrations/old_migrations/09_add_germany_to_workregion.py b/migrations_old/old_migrations/09_add_germany_to_workregion.py similarity index 100% rename from migrations/old_migrations/09_add_germany_to_workregion.py rename to migrations_old/old_migrations/09_add_germany_to_workregion.py diff --git a/migrations/old_migrations/10_add_company_settings_columns.py b/migrations_old/old_migrations/10_add_company_settings_columns.py similarity index 100% rename from migrations/old_migrations/10_add_company_settings_columns.py rename to migrations_old/old_migrations/10_add_company_settings_columns.py diff --git a/migrations/old_migrations/11_fix_company_work_config_usage.py b/migrations_old/old_migrations/11_fix_company_work_config_usage.py similarity index 100% rename from migrations/old_migrations/11_fix_company_work_config_usage.py rename to migrations_old/old_migrations/11_fix_company_work_config_usage.py diff --git a/migrations/old_migrations/12_fix_task_status_usage.py b/migrations_old/old_migrations/12_fix_task_status_usage.py similarity index 100% rename from migrations/old_migrations/12_fix_task_status_usage.py rename to migrations_old/old_migrations/12_fix_task_status_usage.py diff --git a/migrations/old_migrations/13_fix_work_region_usage.py b/migrations_old/old_migrations/13_fix_work_region_usage.py similarity index 100% rename from migrations/old_migrations/13_fix_work_region_usage.py rename to migrations_old/old_migrations/13_fix_work_region_usage.py diff --git a/migrations/old_migrations/14_fix_removed_fields.py b/migrations_old/old_migrations/14_fix_removed_fields.py similarity index 100% rename from migrations/old_migrations/14_fix_removed_fields.py rename to migrations_old/old_migrations/14_fix_removed_fields.py diff --git a/migrations/old_migrations/15_repair_user_roles.py b/migrations_old/old_migrations/15_repair_user_roles.py similarity index 100% rename from migrations/old_migrations/15_repair_user_roles.py rename to migrations_old/old_migrations/15_repair_user_roles.py diff --git a/migrations/old_migrations/19_add_company_invitations.py b/migrations_old/old_migrations/19_add_company_invitations.py similarity index 100% rename from migrations/old_migrations/19_add_company_invitations.py rename to migrations_old/old_migrations/19_add_company_invitations.py diff --git a/migrations/old_migrations/20_add_company_updated_at.py b/migrations_old/old_migrations/20_add_company_updated_at.py similarity index 100% rename from migrations/old_migrations/20_add_company_updated_at.py rename to migrations_old/old_migrations/20_add_company_updated_at.py diff --git a/migrations/old_migrations/run_all_db_migrations.py b/migrations_old/old_migrations/run_all_db_migrations.py similarity index 100% rename from migrations/old_migrations/run_all_db_migrations.py rename to migrations_old/old_migrations/run_all_db_migrations.py diff --git a/migrations/old_migrations/run_code_migrations.py b/migrations_old/old_migrations/run_code_migrations.py similarity index 100% rename from migrations/old_migrations/run_code_migrations.py rename to migrations_old/old_migrations/run_code_migrations.py diff --git a/migrations/postgres_only_migration.py b/migrations_old/postgres_only_migration.py similarity index 100% rename from migrations/postgres_only_migration.py rename to migrations_old/postgres_only_migration.py diff --git a/migrations/remove_email_preferences.sql b/migrations_old/remove_email_preferences.sql similarity index 100% rename from migrations/remove_email_preferences.sql rename to migrations_old/remove_email_preferences.sql diff --git a/migrations/run_postgres_migrations.py b/migrations_old/run_postgres_migrations.py similarity index 100% rename from migrations/run_postgres_migrations.py rename to migrations_old/run_postgres_migrations.py diff --git a/requirements.txt b/requirements.txt index 82c0062..e8e7320 100644 --- a/requirements.txt +++ b/requirements.txt @@ -13,6 +13,7 @@ numpy==1.26.4 pandas==1.5.3 xlsxwriter==3.1.2 Flask-Mail==0.9.1 +Flask-Migrate==3.1.0 psycopg2-binary==2.9.9 markdown==3.4.4 PyYAML==6.0.1 diff --git a/startup.sh b/startup.sh index 09371c1..6c8d9a4 100755 --- a/startup.sh +++ b/startup.sh @@ -11,43 +11,40 @@ while ! pg_isready -h db -p 5432 -U "$POSTGRES_USER" > /dev/null 2>&1; do done echo "PostgreSQL is ready!" -# SQLite to PostgreSQL migration is now handled by the migration system below - -# Initialize database tables if they don't exist -echo "Ensuring database tables exist..." -python -c " -from app import app, db -with app.app_context(): - db.create_all() - print('Database tables created/verified') -" - -# Run all database schema migrations +# Run Flask-Migrate migrations echo "" -echo "=== Running Database Schema Migrations ===" -if [ -d "migrations" ] && [ -f "migrations/run_all_db_migrations.py" ]; then - echo "Checking and applying database schema updates..." - python migrations/run_all_db_migrations.py +echo "=== Running Database Migrations ===" +export FLASK_APP=app.py + +# Check if migrations directory exists +if [ -d "migrations" ]; then + echo "Applying database migrations..." + flask db upgrade if [ $? -ne 0 ]; then - echo "⚠️ Some database migrations had issues, but continuing..." + echo "❌ Migration failed! Check the logs above." + exit 1 fi + echo "✅ Database migrations completed successfully" else - echo "No migrations directory found, skipping database migrations..." + echo "⚠️ No migrations directory found. Initializing Flask-Migrate..." + echo "Using baseline from commit 4214e88..." + python establish_baseline_4214e88.py + if [ $? -ne 0 ]; then + echo "❌ Migration initialization failed!" + echo "Please run manually: python establish_baseline_4214e88.py" + exit 1 + fi + # Stamp the database as being at baseline + flask db stamp head + echo "✅ Database marked at baseline commit 4214e88" fi -# Run code migrations to update code for model changes -echo "" -echo "=== Running Code Migrations ===" -echo "Code migrations temporarily disabled for debugging" -# if [ -d "migrations" ] && [ -f "migrations/run_code_migrations.py" ]; then -# echo "Checking and applying code updates for model changes..." -# python migrations/run_code_migrations.py -# if [ $? -ne 0 ]; then -# echo "⚠️ Code migrations had issues, but continuing..." -# fi -# else -# echo "No migrations directory found, skipping code migrations..." -# fi +# Legacy migration support (can be removed after full transition) +if [ -f "migrations_old/run_all_db_migrations.py" ]; then + echo "" + echo "=== Checking Legacy Migrations ===" + echo "Found old migration system. Consider removing after confirming Flask-Migrate is working." +fi # Start the Flask application with gunicorn echo "" diff --git a/startup_postgres.sh b/startup_postgres.sh index 4107b2f..8133289 100755 --- a/startup_postgres.sh +++ b/startup_postgres.sh @@ -11,26 +11,39 @@ while ! pg_isready -h db -p 5432 -U "$POSTGRES_USER" > /dev/null 2>&1; do done echo "PostgreSQL is ready!" -# Initialize database tables if they don't exist -echo "Ensuring database tables exist..." -python -c " -from app import app, db -with app.app_context(): - db.create_all() - print('Database tables created/verified') -" - -# Run PostgreSQL-only migrations +# Run Flask-Migrate migrations echo "" -echo "=== Running PostgreSQL Migrations ===" -if [ -f "migrations/run_postgres_migrations.py" ]; then - echo "Applying PostgreSQL schema updates..." - python migrations/run_postgres_migrations.py +echo "=== Running Database Migrations ===" +export FLASK_APP=app.py + +# Check if migrations directory exists +if [ -d "migrations" ]; then + echo "Applying database migrations..." + flask db upgrade if [ $? -ne 0 ]; then - echo "⚠️ Some migrations failed, but continuing..." + echo "❌ Migration failed! Check the logs above." + exit 1 fi + echo "✅ Database migrations completed successfully" else - echo "PostgreSQL migration runner not found, skipping..." + echo "⚠️ No migrations directory found. Initializing Flask-Migrate..." + echo "Using baseline from commit 4214e88..." + python establish_baseline_4214e88.py + if [ $? -ne 0 ]; then + echo "❌ Migration initialization failed!" + echo "Please run manually: python establish_baseline_4214e88.py" + exit 1 + fi + # Stamp the database as being at baseline + flask db stamp head + echo "✅ Database marked at baseline commit 4214e88" +fi + +# Legacy migration support (can be removed after full transition) +if [ -f "migrations_old/run_postgres_migrations.py" ]; then + echo "" + echo "=== Checking Legacy Migrations ===" + echo "Found old migration system. Consider removing after confirming Flask-Migrate is working." fi # Start the Flask application with gunicorn From 1416035ae55edffde5bd25ad30f158f69e67e5f3 Mon Sep 17 00:00:00 2001 From: Jens Luedicke Date: Wed, 9 Jul 2025 19:49:34 +0200 Subject: [PATCH 02/14] Change DB Migration scripts. --- FLASK_MIGRATE_GUIDE.md | 3 +- FLASK_MIGRATE_TROUBLESHOOTING.md | 223 +++++++++++++++++++++++++++++ diagnose_migrations.py | 236 +++++++++++++++++++++++++++++++ establish_baseline_4214e88.py | 210 +++++++++++++++------------ fix_migration_sequence.py | 73 ++++++++++ fix_revision_mismatch.py | 183 ++++++++++++++++++++++++ quick_fix_revision.sh | 95 +++++++++++++ simple_baseline_4214e88.py | 142 +++++++++++++++++++ 8 files changed, 1073 insertions(+), 92 deletions(-) create mode 100644 FLASK_MIGRATE_TROUBLESHOOTING.md create mode 100755 diagnose_migrations.py create mode 100755 fix_migration_sequence.py create mode 100755 fix_revision_mismatch.py create mode 100755 quick_fix_revision.sh create mode 100755 simple_baseline_4214e88.py diff --git a/FLASK_MIGRATE_GUIDE.md b/FLASK_MIGRATE_GUIDE.md index e570f4b..d787629 100644 --- a/FLASK_MIGRATE_GUIDE.md +++ b/FLASK_MIGRATE_GUIDE.md @@ -17,7 +17,8 @@ If you have an existing database with the old migration system: pip install -r requirements.txt # 2. Establish baseline from commit 4214e88 -python establish_baseline_4214e88.py +python simple_baseline_4214e88.py +# Note: Use simple_baseline_4214e88.py as it handles the models.py transition correctly # 3. Mark your database as being at the baseline flask db stamp head diff --git a/FLASK_MIGRATE_TROUBLESHOOTING.md b/FLASK_MIGRATE_TROUBLESHOOTING.md new file mode 100644 index 0000000..5617119 --- /dev/null +++ b/FLASK_MIGRATE_TROUBLESHOOTING.md @@ -0,0 +1,223 @@ +# Flask-Migrate Troubleshooting Guide + +## Common Issues and Solutions + +### 0. Baseline Script Fails - "Could not extract models/" + +**Error**: When running `establish_baseline_4214e88.py`: +``` +⚠️ Could not extract models/__init__.py +⚠️ Could not extract models/base.py +``` + +**Cause**: Commit 4214e88 uses a single `models.py` file, not the modular `models/` directory. + +**Solution**: +```bash +# Use the simplified baseline script instead +python simple_baseline_4214e88.py + +# This properly handles the models.py → models/ transition +``` + +### 1. "Target database is not up to date" + +**Error**: When running `flask db migrate`, you get: +``` +ERROR [flask_migrate] Target database is not up to date. +``` + +**Solution**: +```bash +# Apply pending migrations first +flask db upgrade + +# Then create new migration +flask db migrate -m "Your changes" +``` + +### 2. "No changes in schema detected" + +**Possible Causes**: +1. No actual model changes were made +2. Model not imported in `models/__init__.py` +3. Database already has the changes + +**Solutions**: +```bash +# Check what Flask-Migrate sees +flask db compare + +# Force detection by editing a model slightly +# (add a comment, save, then remove it) + +# Check current state +python diagnose_migrations.py +``` + +### 3. After First Migration, Second One Fails + +**This is the most common issue!** + +After creating the baseline migration, you must apply it before creating new ones: + +```bash +# Sequence: +flask db migrate -m "Initial migration" # Works ✓ +flask db migrate -m "Add new column" # Fails ✗ + +# Fix: +flask db upgrade # Apply first migration +flask db migrate -m "Add new column" # Now works ✓ +``` + +### 4. Import Errors + +**Error**: `ModuleNotFoundError` or `ImportError` + +**Solution**: +```bash +# Ensure FLASK_APP is set +export FLASK_APP=app.py + +# Check imports +python -c "from app import app, db; print('OK')" +``` + +### 5. PostgreSQL Enum Issues + +**Error**: Cannot add new enum value in migration + +**Solution**: Edit the generated migration file: +```python +def upgrade(): + # Instead of using Enum type directly + # Use raw SQL for PostgreSQL enums + op.execute("ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'NEW_VALUE'") +``` + +### 6. Migration Conflicts After Git Pull + +**Error**: Conflicting migration heads + +**Solution**: +```bash +# Merge the migrations +flask db merge -m "Merge migrations" + +# Then upgrade +flask db upgrade +``` + +## Quick Diagnostic Commands + +```bash +# Run full diagnostics +python diagnose_migrations.py + +# Fix sequence issues +python fix_migration_sequence.py + +# Check current state +flask db current # Current DB revision +flask db heads # Latest file revision +flask db history # All migrations + +# Compare DB with models +flask db compare # Shows differences +``` + +## Best Practices to Avoid Issues + +1. **Always upgrade before new migrations**: + ```bash + flask db upgrade + flask db migrate -m "New changes" + ``` + +2. **Review generated migrations**: + - Check `migrations/versions/` folder + - Look for DROP commands you didn't intend + +3. **Test on development first**: + ```bash + # Test the migration + flask db upgrade + + # Test rollback + flask db downgrade + ``` + +4. **Handle enums carefully**: + - PostgreSQL enums need special handling + - Consider using String columns instead + +5. **Commit migrations with code**: + - Always commit migration files with model changes + - This keeps database and code in sync + +## Revision Mismatch Errors + +### "Can't locate revision identified by 'xxxxx'" + +This means your database thinks it's at a revision that doesn't exist in your migration files. + +**Quick Fix**: +```bash +# Run the automated fix +./quick_fix_revision.sh + +# Or manually: +# 1. Find your latest migration +ls migrations/versions/*.py + +# 2. Get the revision from the file +grep "revision = " migrations/versions/latest_file.py + +# 3. Stamp database to that revision +flask db stamp +``` + +**Detailed Diagnosis**: +```bash +python fix_revision_mismatch.py +``` + +## Emergency Fixes + +### Reset Migration State (Development Only!) + +```bash +# Remove migrations and start over +rm -rf migrations +python establish_baseline_4214e88.py +flask db stamp head +``` + +### Force Database to Current State + +```bash +# Mark database as up-to-date without running migrations +flask db stamp head + +# Or stamp to specific revision +flask db stamp +``` + +### Manual Migration Edit + +Sometimes you need to edit the generated migration: + +1. Generate migration: `flask db migrate -m "Changes"` +2. Edit file in `migrations/versions/` +3. Test with: `flask db upgrade` +4. Test rollback: `flask db downgrade` + +## Getting Help + +If these solutions don't work: + +1. Run diagnostics: `python diagnose_migrations.py` +2. Check the full error message +3. Look at the generated SQL: `flask db upgrade --sql` +4. Check Flask-Migrate logs in detail \ No newline at end of file diff --git a/diagnose_migrations.py b/diagnose_migrations.py new file mode 100755 index 0000000..3c150fe --- /dev/null +++ b/diagnose_migrations.py @@ -0,0 +1,236 @@ +#!/usr/bin/env python3 +""" +Diagnostic script for Flask-Migrate issues. +Helps identify common problems with migrations. +""" + +import os +import sys +import subprocess +import json +from pathlib import Path + +def run_command(cmd, capture=True): + """Run a command and return result.""" + if capture: + result = subprocess.run(cmd, shell=True, capture_output=True, text=True) + return result.returncode, result.stdout, result.stderr + else: + return subprocess.run(cmd, shell=True).returncode, "", "" + +def check_environment(): + """Check environment setup.""" + print("=== Environment Check ===") + + # Check FLASK_APP + flask_app = os.environ.get('FLASK_APP', 'Not set') + print(f"FLASK_APP: {flask_app}") + if flask_app == 'Not set': + print(" ⚠️ FLASK_APP not set. Setting to app.py") + os.environ['FLASK_APP'] = 'app.py' + + # Check DATABASE_URL + db_url = os.environ.get('DATABASE_URL', 'Not set') + if db_url != 'Not set': + # Hide password in output + if '@' in db_url: + parts = db_url.split('@') + proto_user = parts[0].split('://') + if len(proto_user) > 1 and ':' in proto_user[1]: + user_pass = proto_user[1].split(':') + safe_url = f"{proto_user[0]}://{user_pass[0]}:****@{parts[1]}" + else: + safe_url = db_url + else: + safe_url = db_url + print(f"DATABASE_URL: {safe_url}") + else: + print("DATABASE_URL: Using default from app.py") + + print() + +def check_migrations_directory(): + """Check migrations directory structure.""" + print("=== Migrations Directory Check ===") + + if not os.path.exists('migrations'): + print("❌ migrations/ directory not found!") + print(" Run: python establish_baseline_4214e88.py") + return False + + print("✓ migrations/ directory exists") + + # Check for required files + required_files = ['alembic.ini', 'env.py', 'script.py.mako'] + for file in required_files: + path = os.path.join('migrations', file) + if os.path.exists(path): + print(f"✓ {file} exists") + else: + print(f"❌ {file} missing!") + return False + + # Check versions directory + versions_dir = os.path.join('migrations', 'versions') + if not os.path.exists(versions_dir): + print("❌ versions/ directory missing!") + return False + + print("✓ versions/ directory exists") + + # List migration files + migration_files = [f for f in os.listdir(versions_dir) if f.endswith('.py')] + print(f"\nMigration files found: {len(migration_files)}") + for f in sorted(migration_files): + print(f" - {f}") + + print() + return True + +def check_database_state(): + """Check current database migration state.""" + print("=== Database State Check ===") + + # Check current revision + code, stdout, stderr = run_command("flask db current") + if code == 0: + print(f"Current revision: {stdout.strip()}") + else: + print("❌ Failed to get current revision") + print(f"Error: {stderr}") + return False + + # Check if database is up to date + code, stdout, stderr = run_command("flask db check") + if code == 0: + if "Database is up to date" in stdout: + print("✓ Database is up to date") + else: + print("⚠️ Database may need upgrade") + print(stdout) + else: + print("⚠️ Database check returned non-zero") + if stderr: + print(f"Error: {stderr}") + + print() + return True + +def check_model_imports(): + """Check if models can be imported.""" + print("=== Model Import Check ===") + + try: + from app import app, db + print("✓ Successfully imported app and db") + + with app.app_context(): + # Try to import all models + from models import ( + Company, User, Project, Task, TimeEntry, + CompanySettings, UserPreferences, Sprint + ) + print("✓ Successfully imported all main models") + + # Check if models have tables + print("\nModel tables:") + for model in [Company, User, Project, Task, TimeEntry]: + table_name = model.__tablename__ + print(f" - {model.__name__}: {table_name}") + + except Exception as e: + print(f"❌ Import error: {e}") + import traceback + traceback.print_exc() + return False + + print() + return True + +def test_migration_detection(): + """Test if Flask-Migrate can detect changes.""" + print("=== Migration Detection Test ===") + + # Try a dry run + code, stdout, stderr = run_command("flask db migrate --dry-run") + + if code == 0: + if "No changes in schema detected" in stdout: + print("ℹ️ No schema changes detected") + print(" This means your models match the current migration state") + else: + print("✓ Flask-Migrate can detect changes") + print("\nDetected changes:") + print(stdout) + else: + print("❌ Migration detection failed") + if "Target database is not up to date" in stderr: + print(" ⚠️ Database needs upgrade first!") + print(" Run: flask db upgrade") + else: + print(f"Error: {stderr}") + + print() + +def suggest_fixes(): + """Suggest fixes based on diagnostics.""" + print("=== Suggested Actions ===") + + # Check if we need to upgrade + code, stdout, stderr = run_command("flask db heads") + if code == 0: + heads = stdout.strip() + code2, current, _ = run_command("flask db current") + if code2 == 0 and current.strip() != heads: + print("1. Your database is not at the latest migration:") + print(" flask db upgrade") + print() + + # Check for pending migrations + code, stdout, stderr = run_command("flask db show") + if code == 0 and "pending upgrade" in stdout.lower(): + print("2. You have pending migrations to apply:") + print(" flask db upgrade") + print() + + print("3. To create a new migration after making model changes:") + print(" flask db migrate -m 'Description of changes'") + print(" flask db upgrade") + print() + + print("4. If you're getting 'No changes detected':") + print(" - Ensure you've actually modified a model") + print(" - Check that the model is imported in models/__init__.py") + print(" - Try comparing with: flask db compare") + print() + + print("5. For 'Target database is not up to date' errors:") + print(" flask db stamp head # Force mark as current") + print(" flask db migrate -m 'Your changes'") + print() + +def main(): + """Run all diagnostics.""" + print("=== Flask-Migrate Diagnostic Tool ===\n") + + # Run checks + check_environment() + + if not check_migrations_directory(): + print("\n❌ Migrations not properly initialized") + print("Run: python establish_baseline_4214e88.py") + return 1 + + if not check_model_imports(): + print("\n❌ Model import issues detected") + return 1 + + check_database_state() + test_migration_detection() + suggest_fixes() + + print("=== Diagnostic Complete ===") + return 0 + +if __name__ == "__main__": + sys.exit(main()) \ No newline at end of file diff --git a/establish_baseline_4214e88.py b/establish_baseline_4214e88.py index fd0a7aa..464c418 100755 --- a/establish_baseline_4214e88.py +++ b/establish_baseline_4214e88.py @@ -85,105 +85,133 @@ def main(): return 1 run_command("rm -rf migrations", "Removing existing migrations directory") - # Step 2: Create a temporary directory for baseline models - with tempfile.TemporaryDirectory() as tmpdir: - print(f"\nCreating temporary directory: {tmpdir}") + # Step 2: Backup current models and extract baseline + print(f"\nPreparing baseline models from commit {BASELINE_COMMIT}...") + + # Check if baseline commit has models.py or models/ directory + result = subprocess.run( + f"git show {BASELINE_COMMIT}:models.py", + shell=True, + capture_output=True + ) + has_single_models_file = result.returncode == 0 + + if has_single_models_file: + print("✓ Found models.py in baseline commit (monolithic structure)") + + # Backup current models directory + if os.path.exists('models'): + print("Backing up current models/ directory...") + run_command("mv models models_backup_temp", "Backing up current models") + + # Extract baseline models.py + run_command( + f"git show {BASELINE_COMMIT}:models.py > models.py", + "Extracting baseline models.py" + ) - # Step 3: Extract models from baseline commit - print(f"\nExtracting models from commit {BASELINE_COMMIT}...") + # We need to ensure the models.py imports db correctly + # The old file might have different imports + print("Adjusting imports in baseline models.py...") + with open('models.py', 'r') as f: + content = f.read() - # Get the models directory from the baseline commit - models_files = [ - "models/__init__.py", - "models/base.py", - "models/enums.py", - "models/company.py", - "models/user.py", - "models/team.py", - "models/project.py", - "models/task.py", - "models/time_entry.py", - "models/sprint.py", - "models/system.py", - "models/announcement.py", - "models/dashboard.py", - "models/work_config.py", - "models/invitation.py", - "models/note.py", - "models/note_share.py" - ] + # Ensure it has proper imports for Flask-Migrate + if 'from flask_sqlalchemy import SQLAlchemy' not in content: + # Add the import at the top if missing + lines = content.split('\n') + for i, line in enumerate(lines): + if line.strip() and not line.startswith('#'): + lines.insert(i, 'from flask_sqlalchemy import SQLAlchemy\ndb = SQLAlchemy()\n') + break + content = '\n'.join(lines) + + with open('models.py', 'w') as f: + f.write(content) + else: + print("⚠️ No models.py found in baseline commit") + print("Checking for models/ directory...") - # Also check if models_old.py exists in baseline (fallback) + # Try to check if models/ exists result = subprocess.run( - f"git show {BASELINE_COMMIT}:models_old.py", + f"git show {BASELINE_COMMIT}:models/__init__.py", shell=True, capture_output=True ) - use_models_old = result.returncode == 0 - if use_models_old: - print("Using models_old.py from baseline commit") - # Save current models - run_command("cp -r models models_current", "Backing up current models") - - # Get models_old.py from baseline - run_command( - f"git show {BASELINE_COMMIT}:models_old.py > models_baseline.py", - "Extracting baseline models" - ) - - # Temporarily replace models with baseline - # This is a bit hacky but ensures we generate the right migration - print("\nPreparing baseline schema...") - with open('models_baseline.py', 'r') as f: - baseline_content = f.read() - - # We need to be careful here - save current state and restore later + if result.returncode == 0: + print("Found models/ directory in baseline commit") + # This shouldn't happen for commit 4214e88, but handle it anyway + # ... existing code for models/ directory ... else: - print("Using models/ directory from baseline commit") - # Extract each model file from baseline - os.makedirs(os.path.join(tmpdir, "models"), exist_ok=True) - - for model_file in models_files: - result = subprocess.run( - f"git show {BASELINE_COMMIT}:{model_file}", - shell=True, - capture_output=True, - text=True - ) - if result.returncode == 0: - file_path = os.path.join(tmpdir, model_file) - os.makedirs(os.path.dirname(file_path), exist_ok=True) - with open(file_path, 'w') as f: - f.write(result.stdout) - print(f" ✓ Extracted {model_file}") - else: - print(f" ⚠️ Could not extract {model_file}") - - # Step 4: Initialize Flask-Migrate - run_command("flask db init", "Initializing Flask-Migrate") + print("❌ Neither models.py nor models/ found in baseline commit!") + print("This commit might not have SQLAlchemy models yet.") + return 1 - # Step 5: Create the baseline migration - print("\n📝 Creating baseline migration...") - print("This migration represents the schema at commit 4214e88") + # Step 3: Initialize Flask-Migrate + run_command("flask db init", "Initializing Flask-Migrate") + + # Step 4: Create the baseline migration + print("\n📝 Creating baseline migration...") + print("This migration represents the schema at commit 4214e88") + + migration_message = f"Baseline schema from commit {BASELINE_COMMIT[:8]} ({BASELINE_DATE})" + + # Need to temporarily update app.py imports if using old models.py + if has_single_models_file: + print("Temporarily adjusting app.py imports...") + with open('app.py', 'r') as f: + app_content = f.read() - migration_message = f"Baseline schema from commit {BASELINE_COMMIT[:8]} ({BASELINE_DATE})" - run_command( - f'flask db migrate -m "{migration_message}"', - "Generating baseline migration" + # Replace models imports temporarily + app_content_backup = app_content + app_content = app_content.replace( + 'from models import db,', + 'from models import db,' + ).replace( + 'from models import', + 'from models import' ) - # Step 6: Add a note to the migration file - migration_files = os.listdir("migrations/versions") - if migration_files: - latest_migration = sorted(migration_files)[-1] - migration_path = os.path.join("migrations/versions", latest_migration) - - with open(migration_path, 'r') as f: - content = f.read() - - # Add comment at the top of the file - baseline_note = f'''""" + with open('app.py', 'w') as f: + f.write(app_content) + + # Generate the migration + result = run_command( + f'flask db migrate -m "{migration_message}"', + "Generating baseline migration" + ) + + # Step 5: Restore current models structure + if has_single_models_file: + print("\nRestoring current models structure...") + + # Remove temporary models.py + if os.path.exists('models.py'): + os.remove('models.py') + print("✓ Removed temporary models.py") + + # Restore models directory + if os.path.exists('models_backup_temp'): + run_command("mv models_backup_temp models", "Restoring models directory") + + # Restore app.py if we modified it + if 'app_content_backup' in locals(): + with open('app.py', 'w') as f: + f.write(app_content_backup) + print("✓ Restored app.py") + + # Step 6: Add a note to the migration file + migration_files = os.listdir("migrations/versions") + if migration_files: + latest_migration = sorted(migration_files)[-1] + migration_path = os.path.join("migrations/versions", latest_migration) + + with open(migration_path, 'r') as f: + content = f.read() + + # Add comment at the top of the file + baseline_note = f'''""" BASELINE MIGRATION - DO NOT MODIFY This migration represents the database schema at commit {BASELINE_COMMIT}. @@ -201,11 +229,11 @@ def main(): """ ''' - - with open(migration_path, 'w') as f: - f.write(baseline_note + content) - - print(f"✓ Added baseline note to migration: {latest_migration}") + + with open(migration_path, 'w') as f: + f.write(baseline_note + content) + + print(f"✓ Added baseline note to migration: {latest_migration}") # Step 7: Create documentation doc_content = f"""# Flask-Migrate Baseline Information diff --git a/fix_migration_sequence.py b/fix_migration_sequence.py new file mode 100755 index 0000000..f10fa31 --- /dev/null +++ b/fix_migration_sequence.py @@ -0,0 +1,73 @@ +#!/usr/bin/env python3 +""" +Fix common Flask-Migrate sequencing issues. +Handles the case where you need to apply migrations before creating new ones. +""" + +import os +import sys +import subprocess + +def run_command(cmd, description): + """Run a command with output.""" + print(f"\n➜ {description}") + print(f" Command: {cmd}") + result = subprocess.run(cmd, shell=True) + return result.returncode == 0 + +def main(): + """Fix migration sequence issues.""" + print("=== Flask-Migrate Sequence Fix ===") + + # Set environment + os.environ['FLASK_APP'] = 'app.py' + + print("\nThis script will:") + print("1. Show current migration status") + print("2. Apply any pending migrations") + print("3. Prepare for creating new migrations") + + input("\nPress Enter to continue...") + + # Step 1: Show current status + print("\n" + "="*50) + print("STEP 1: Current Status") + print("="*50) + + run_command("flask db current", "Current database revision") + run_command("flask db heads", "Latest migration in files") + + # Step 2: Check if upgrade needed + print("\n" + "="*50) + print("STEP 2: Checking for pending migrations") + print("="*50) + + # Try to upgrade + if run_command("flask db upgrade", "Applying pending migrations"): + print("✅ Database is now up to date") + else: + print("⚠️ Upgrade failed. Trying to fix...") + + # Try stamping head + response = input("\nStamp database as current? (y/N): ") + if response.lower() == 'y': + if run_command("flask db stamp head", "Stamping database"): + print("✅ Database stamped as current") + + # Step 3: Test creating a migration + print("\n" + "="*50) + print("STEP 3: Testing migration creation") + print("="*50) + + if run_command("flask db migrate --dry-run", "Dry run of migration"): + print("✅ Ready to create new migrations") + print("\nYou can now run:") + print(" flask db migrate -m 'Your migration message'") + else: + print("❌ Still having issues") + print("\nTry running: python diagnose_migrations.py") + + return 0 + +if __name__ == "__main__": + sys.exit(main()) \ No newline at end of file diff --git a/fix_revision_mismatch.py b/fix_revision_mismatch.py new file mode 100755 index 0000000..5317c27 --- /dev/null +++ b/fix_revision_mismatch.py @@ -0,0 +1,183 @@ +#!/usr/bin/env python3 +""" +Fix Flask-Migrate revision mismatch errors. +Handles cases where database references a revision that doesn't exist in files. +""" + +import os +import sys +import subprocess +import glob +from pathlib import Path + +def run_command(cmd, capture=True): + """Run a command and return result.""" + if capture: + result = subprocess.run(cmd, shell=True, capture_output=True, text=True) + return result.returncode, result.stdout, result.stderr + else: + result = subprocess.run(cmd, shell=True) + return result.returncode, "", "" + +def get_database_revision(): + """Get current revision from database.""" + print("Checking database revision...") + code, stdout, stderr = run_command("flask db current") + + if code != 0: + if "Can't locate revision" in stderr: + # Extract the problematic revision + import re + match = re.search(r"Can't locate revision identified by '([^']+)'", stderr) + if match: + return match.group(1), True # revision, is_missing + print(f"Error getting current revision: {stderr}") + return None, False + + # Extract revision from output + revision = stdout.strip().split()[0] if stdout.strip() else None + return revision, False + +def get_file_revisions(): + """Get all revisions from migration files.""" + versions_dir = Path("migrations/versions") + if not versions_dir.exists(): + return [] + + revisions = [] + for file in versions_dir.glob("*.py"): + if file.name == "__pycache__": + continue + + with open(file, 'r') as f: + content = f.read() + + # Extract revision + import re + revision_match = re.search(r"^revision = ['\"]([^'\"]+)['\"]", content, re.MULTILINE) + down_revision_match = re.search(r"^down_revision = ['\"]([^'\"]+)['\"]", content, re.MULTILINE) + + if revision_match: + revisions.append({ + 'file': file.name, + 'revision': revision_match.group(1), + 'down_revision': down_revision_match.group(1) if down_revision_match else None + }) + + return revisions + +def check_alembic_version_table(): + """Check the alembic_version table directly.""" + print("\nChecking alembic_version table...") + + # Try to connect to database and check + try: + from app import app, db + with app.app_context(): + result = db.engine.execute("SELECT version_num FROM alembic_version") + versions = [row[0] for row in result] + return versions + except Exception as e: + print(f"Could not check alembic_version table: {e}") + return [] + +def main(): + """Main repair function.""" + print("=== Flask-Migrate Revision Mismatch Repair ===\n") + + # Set environment + os.environ['FLASK_APP'] = 'app.py' + + # Step 1: Diagnose the problem + print("Step 1: Diagnosing the issue...") + + db_revision, is_missing = get_database_revision() + if is_missing: + print(f"❌ Database references missing revision: {db_revision}") + elif db_revision: + print(f"📍 Current database revision: {db_revision}") + else: + print("⚠️ Could not determine database revision") + + # Step 2: Check migration files + print("\nStep 2: Checking migration files...") + file_revisions = get_file_revisions() + + if not file_revisions: + print("❌ No migration files found!") + print("\nSolution: Re-initialize migrations") + print(" rm -rf migrations") + print(" python establish_baseline_4214e88.py") + return 1 + + print(f"Found {len(file_revisions)} migration files:") + for rev in file_revisions: + print(f" - {rev['revision'][:8]} in {rev['file']}") + + # Check if problematic revision exists in files + if is_missing and db_revision: + revision_exists = any(r['revision'] == db_revision for r in file_revisions) + if not revision_exists: + print(f"\n❌ Revision {db_revision} not found in migration files!") + + # Step 3: Check alembic_version table + db_versions = check_alembic_version_table() + if db_versions: + print(f"\nDatabase alembic_version table contains: {db_versions}") + + # Step 4: Provide solutions + print("\n" + "="*50) + print("SOLUTIONS") + print("="*50) + + print("\nOption 1: Reset to latest migration file (Recommended)") + print("-" * 40) + if file_revisions: + latest_revision = file_revisions[-1]['revision'] + print(f"Latest revision in files: {latest_revision}") + print("\nRun these commands:") + print(f" flask db stamp {latest_revision}") + print(" flask db upgrade") + + print("\nOption 2: Start fresh (Nuclear option)") + print("-" * 40) + print("⚠️ Only do this if Option 1 fails!") + print("\nRun these commands:") + print(" # Clear alembic version from database") + print(" python -c \"from app import app, db; app.app_context().push(); db.engine.execute('DELETE FROM alembic_version')\"") + print(" # Stamp with latest revision") + if file_revisions: + print(f" flask db stamp {file_revisions[-1]['revision']}") + + print("\nOption 3: Complete reset (Last resort)") + print("-" * 40) + print("⚠️ This will recreate all migrations!") + print("\nRun these commands:") + print(" rm -rf migrations") + print(" python establish_baseline_4214e88.py") + print(" flask db stamp head") + + # Step 5: Automated fix attempt + print("\n" + "="*50) + print("AUTOMATED FIX") + print("="*50) + + if is_missing and file_revisions: + response = input(f"\nAttempt to fix by stamping to latest revision? (y/N): ") + if response.lower() == 'y': + latest_revision = file_revisions[-1]['revision'] + print(f"\nStamping database to revision: {latest_revision}") + code, stdout, stderr = run_command(f"flask db stamp {latest_revision}") + + if code == 0: + print("✅ Successfully stamped database!") + print("\nNow run: flask db upgrade") + else: + print(f"❌ Stamping failed: {stderr}") + print("\nTry manual SQL fix:") + print(f" UPDATE alembic_version SET version_num = '{latest_revision}';") + + return 0 + +if __name__ == "__main__": + sys.exit(main()) \ No newline at end of file diff --git a/quick_fix_revision.sh b/quick_fix_revision.sh new file mode 100755 index 0000000..6432881 --- /dev/null +++ b/quick_fix_revision.sh @@ -0,0 +1,95 @@ +#!/bin/bash +# Quick fix for revision mismatch error + +echo "=== Quick Fix for Revision 838055206ef5 Error ===" +echo "" +echo "This error occurs when the database references a migration that doesn't exist." +echo "We'll fix this by resetting to the current migration files." +echo "" + +# Set Flask app +export FLASK_APP=app.py + +# Show current situation +echo "Current migration files:" +ls -la migrations/versions/*.py 2>/dev/null || echo "No migration files found!" + +echo "" +echo "Attempting to get current database state:" +flask db current 2>&1 || true + +echo "" +echo "Available options:" +echo "1. Reset to latest migration file (safest)" +echo "2. Clear migration history and start fresh" +echo "3. Cancel and investigate manually" +echo "" +read -p "Choose option (1-3): " choice + +case $choice in + 1) + echo "" + echo "Finding latest migration..." + # Get the latest migration revision + latest_revision=$(ls -t migrations/versions/*.py 2>/dev/null | head -1 | xargs grep "^revision = " | cut -d"'" -f2) + + if [ -z "$latest_revision" ]; then + echo "❌ No migration files found!" + echo "Run: python establish_baseline_4214e88.py" + exit 1 + fi + + echo "Latest revision: $latest_revision" + echo "Stamping database to this revision..." + + flask db stamp $latest_revision + + if [ $? -eq 0 ]; then + echo "✅ Success! Database stamped to $latest_revision" + echo "" + echo "Next steps:" + echo "1. Run: flask db upgrade" + echo "2. Then you can create new migrations" + else + echo "❌ Stamping failed. Try option 2." + fi + ;; + + 2) + echo "" + echo "⚠️ This will clear all migration history!" + read -p "Are you sure? (y/N): " confirm + + if [ "$confirm" = "y" ]; then + echo "Clearing alembic_version table..." + python -c " +from app import app, db +with app.app_context(): + try: + db.engine.execute('DELETE FROM alembic_version') + print('✅ Cleared alembic_version table') + except Exception as e: + print(f'❌ Error: {e}') +" + + echo "" + echo "Now re-establishing baseline..." + python establish_baseline_4214e88.py + + if [ $? -eq 0 ]; then + flask db stamp head + echo "✅ Migration state reset successfully!" + fi + else + echo "Cancelled." + fi + ;; + + 3) + echo "Cancelled. Run 'python fix_revision_mismatch.py' for detailed diagnostics." + ;; + + *) + echo "Invalid option" + ;; +esac \ No newline at end of file diff --git a/simple_baseline_4214e88.py b/simple_baseline_4214e88.py new file mode 100755 index 0000000..c3af910 --- /dev/null +++ b/simple_baseline_4214e88.py @@ -0,0 +1,142 @@ +#!/usr/bin/env python3 +""" +Simplified baseline establishment for commit 4214e88. +Handles the models.py (monolithic) to models/ (modular) transition properly. +""" + +import os +import sys +import subprocess +import shutil + +def run_command(cmd, description, check=True): + """Run a command and handle errors.""" + print(f"\n➜ {description}") + print(f" Command: {cmd}") + result = subprocess.run(cmd, shell=True) + if result.returncode != 0 and check: + print(f"❌ Command failed!") + sys.exit(1) + return result.returncode == 0 + +def main(): + """Main function.""" + print("=== Simplified Baseline Setup for Commit 4214e88 ===") + print("\nThis script will:") + print("1. Extract models.py from commit 4214e88") + print("2. Create a baseline migration") + print("3. Restore your current models structure") + + response = input("\nContinue? (y/N): ") + if response.lower() != 'y': + print("Aborting...") + return 1 + + # Set environment + os.environ['FLASK_APP'] = 'app.py' + BASELINE_COMMIT = "4214e88d18fce7a9c75927753b8d4e9222771e14" + + # Step 1: Clean up + if os.path.exists('migrations'): + print("\n⚠️ Removing existing migrations directory...") + shutil.rmtree('migrations') + + # Step 2: Backup current structure + print("\nBacking up current models...") + if os.path.exists('models'): + shutil.move('models', 'models_backup') + print("✓ Backed up models/ to models_backup/") + + if os.path.exists('models.py'): + shutil.move('models.py', 'models.py.backup') + print("✓ Backed up models.py to models.py.backup") + + try: + # Step 3: Get models.py from baseline commit + print(f"\nExtracting models.py from commit {BASELINE_COMMIT[:8]}...") + result = subprocess.run( + f"git show {BASELINE_COMMIT}:models.py > models.py", + shell=True, + capture_output=True, + text=True + ) + + if result.returncode != 0: + print("❌ Failed to extract models.py from baseline commit!") + print("Error:", result.stderr) + return 1 + + print("✓ Extracted models.py") + + # Step 4: Initialize Flask-Migrate + print("\nInitializing Flask-Migrate...") + run_command("flask db init", "Creating migrations directory") + + # Step 5: Create baseline migration + print("\nCreating baseline migration...") + run_command( + 'flask db migrate -m "Baseline schema from commit 4214e88"', + "Generating migration" + ) + + print("✅ Baseline migration created!") + + finally: + # Step 6: Always restore original structure + print("\nRestoring original models structure...") + + if os.path.exists('models.py'): + os.remove('models.py') + print("✓ Removed temporary models.py") + + if os.path.exists('models.py.backup'): + shutil.move('models.py.backup', 'models.py') + print("✓ Restored models.py.backup") + + if os.path.exists('models_backup'): + shutil.move('models_backup', 'models') + print("✓ Restored models/ directory") + + # Step 7: Add note to migration + print("\nFinalizing migration...") + try: + import glob + migration_files = glob.glob('migrations/versions/*.py') + if migration_files: + latest = max(migration_files, key=os.path.getctime) + + with open(latest, 'r') as f: + content = f.read() + + note = '''"""BASELINE MIGRATION FROM COMMIT 4214e88 + +This represents the database schema from the monolithic models.py file. +DO NOT MODIFY THIS MIGRATION. + +For existing databases: flask db stamp head +For new databases: flask db upgrade +""" + +''' + with open(latest, 'w') as f: + f.write(note + content) + + print(f"✓ Added baseline note to {os.path.basename(latest)}") + except Exception as e: + print(f"⚠️ Could not add note to migration: {e}") + + # Step 8: Summary + print("\n" + "="*60) + print("✨ SUCCESS!") + print("="*60) + print("\nBaseline migration created from commit 4214e88") + print("\nNext steps:") + print("1. For existing database: flask db stamp head") + print("2. For new database: flask db upgrade") + print("3. Create new migrations: flask db migrate -m 'Your changes'") + print("\nIMPORTANT: Review the migration in migrations/versions/ before applying!") + + return 0 + +if __name__ == "__main__": + sys.exit(main()) \ No newline at end of file From 2895748907cf4b35da92876fc96196115a12ae62 Mon Sep 17 00:00:00 2001 From: Jens Luedicke Date: Wed, 9 Jul 2025 19:57:30 +0200 Subject: [PATCH 03/14] Adapt DB Migration Scripts for Docker. --- DOCKER_MIGRATIONS_GUIDE.md | 189 +++++++++++++++++++++++++ FLASK_MIGRATE_GUIDE.md | 2 + FLASK_MIGRATE_TROUBLESHOOTING.md | 5 +- MIGRATIONS_README.md | 76 ++++++++++ docker_migrate_init.py | 231 +++++++++++++++++++++++++++++++ startup.sh | 31 ++++- startup_postgres.sh | 31 ++++- 7 files changed, 551 insertions(+), 14 deletions(-) create mode 100644 DOCKER_MIGRATIONS_GUIDE.md create mode 100644 MIGRATIONS_README.md create mode 100755 docker_migrate_init.py diff --git a/DOCKER_MIGRATIONS_GUIDE.md b/DOCKER_MIGRATIONS_GUIDE.md new file mode 100644 index 0000000..2cfd8e1 --- /dev/null +++ b/DOCKER_MIGRATIONS_GUIDE.md @@ -0,0 +1,189 @@ +# Flask-Migrate in Docker Deployments + +## Overview + +Docker containers typically don't include Git repositories, so we can't use Git commands to extract historical schemas. This guide explains how to use Flask-Migrate in Docker environments. + +## Initial Setup (First Deployment) + +When deploying with Flask-Migrate for the first time: + +### Automatic Setup (via startup scripts) + +The `startup.sh` and `startup_postgres.sh` scripts now automatically handle migration initialization: + +1. **For existing databases with data:** + - Creates a baseline migration from current models + - Stamps the database as current (no changes applied) + - Ready for future migrations + +2. **For empty databases:** + - Creates a baseline migration from current models + - Applies it to create all tables + - Ready for future migrations + +### Manual Setup + +If you need to set up manually: + +```bash +# Inside your Docker container +python docker_migrate_init.py + +# For existing database with tables: +flask db stamp head + +# For new empty database: +flask db upgrade +``` + +## Creating New Migrations + +After initial setup, create new migrations normally: + +```bash +# 1. Make changes to your models + +# 2. Generate migration +flask db migrate -m "Add user preferences" + +# 3. Review the generated migration +cat migrations/versions/*.py + +# 4. Apply the migration +flask db upgrade +``` + +## Helper Script + +The `docker_migrate_init.py` script creates a `migrate.sh` helper: + +```bash +# Check current migration status +./migrate.sh status + +# Apply pending migrations +./migrate.sh apply + +# Create new migration +./migrate.sh create "Add company settings" + +# Mark database as current (existing DBs) +./migrate.sh mark-current +``` + +## Docker Compose Example + +```yaml +version: '3.8' +services: + web: + build: . + environment: + - DATABASE_URL=postgresql://user:pass@db:5432/timetrack + - FLASK_APP=app.py + volumes: + # Persist migrations between container restarts + - ./migrations:/app/migrations + depends_on: + - db + command: ./startup_postgres.sh + + db: + image: postgres:13 + environment: + - POSTGRES_DB=timetrack + - POSTGRES_USER=user + - POSTGRES_PASSWORD=pass + volumes: + - postgres_data:/var/lib/postgresql/data + +volumes: + postgres_data: +``` + +## Important Notes + +### 1. Migrations Directory + +- The `migrations/` directory should be persisted between deployments +- Either use a volume mount or include it in your Docker image +- Don't regenerate migrations on each deployment + +### 2. Environment Variables + +Always set these in your Docker environment: +```bash +FLASK_APP=app.py +DATABASE_URL=your_database_url +``` + +### 3. Production Workflow + +1. **Development**: Create and test migrations locally +2. **Commit**: Add migration files to Git +3. **Build**: Include migrations in Docker image +4. **Deploy**: Startup script applies migrations automatically + +### 4. Rollback Strategy + +To rollback a migration: +```bash +# Inside container +flask db downgrade # Go back one migration +flask db downgrade -2 # Go back two migrations +``` + +## Troubleshooting + +### "No Git repository found" + +This is expected in Docker. Use `docker_migrate_init.py` instead of the Git-based scripts. + +### "Can't locate revision" + +Your database references a migration that doesn't exist: +```bash +# Reset to current state +python docker_migrate_init.py +flask db stamp head +``` + +### Migration conflicts after deployment + +If migrations were created in different environments: +```bash +# Merge migrations +flask db merge -m "Merge production and development" +flask db upgrade +``` + +## Best Practices + +1. **Always test migrations** in a staging environment first +2. **Back up your database** before applying migrations in production +3. **Include migrations in your Docker image** for consistency +4. **Don't generate migrations in production** - only apply pre-tested ones +5. **Monitor the startup logs** to ensure migrations apply successfully + +## Migration State in Different Scenarios + +### Scenario 1: Fresh deployment, empty database +- Startup script runs `docker_migrate_init.py` +- Creates baseline migration +- Applies it to create all tables + +### Scenario 2: Existing database, first Flask-Migrate setup +- Startup script runs `docker_migrate_init.py` +- Creates baseline migration matching current schema +- Stamps database as current (no changes) + +### Scenario 3: Subsequent deployments with new migrations +- Startup script detects `migrations/` exists +- Runs `flask db upgrade` to apply new migrations + +### Scenario 4: Container restart (no new code) +- Startup script detects `migrations/` exists +- Runs `flask db upgrade` (no-op if already current) + +This approach ensures migrations work correctly in all Docker deployment scenarios! \ No newline at end of file diff --git a/FLASK_MIGRATE_GUIDE.md b/FLASK_MIGRATE_GUIDE.md index d787629..c21f802 100644 --- a/FLASK_MIGRATE_GUIDE.md +++ b/FLASK_MIGRATE_GUIDE.md @@ -6,6 +6,8 @@ TimeTrack has been refactored to use Flask-Migrate (which wraps Alembic) for dat **IMPORTANT**: The baseline for Flask-Migrate is set at git commit `4214e88d18fce7a9c75927753b8d4e9222771e14`. All schema changes after this commit need to be recreated as Flask-Migrate migrations. +**For Docker Deployments**: See `DOCKER_MIGRATIONS_GUIDE.md` for Docker-specific instructions (no Git required). + ## Migration from Old System ### For Existing Deployments diff --git a/FLASK_MIGRATE_TROUBLESHOOTING.md b/FLASK_MIGRATE_TROUBLESHOOTING.md index 5617119..47600f6 100644 --- a/FLASK_MIGRATE_TROUBLESHOOTING.md +++ b/FLASK_MIGRATE_TROUBLESHOOTING.md @@ -14,10 +14,11 @@ **Solution**: ```bash -# Use the simplified baseline script instead +# For local development with Git: python simple_baseline_4214e88.py -# This properly handles the models.py → models/ transition +# For Docker deployments (no Git): +python docker_migrate_init.py ``` ### 1. "Target database is not up to date" diff --git a/MIGRATIONS_README.md b/MIGRATIONS_README.md new file mode 100644 index 0000000..b4d810c --- /dev/null +++ b/MIGRATIONS_README.md @@ -0,0 +1,76 @@ +# TimeTrack Database Migrations + +## Quick Start + +### Docker Deployments +```bash +# Automatic: startup scripts handle everything +# Manual: python docker_migrate_init.py +``` +See `DOCKER_MIGRATIONS_GUIDE.md` for details. + +### Local Development +```bash +# With Git history: +python simple_baseline_4214e88.py + +# Without Git history: +python docker_migrate_init.py +``` + +## Documentation Structure + +1. **FLASK_MIGRATE_GUIDE.md** - Complete Flask-Migrate documentation +2. **DOCKER_MIGRATIONS_GUIDE.md** - Docker-specific instructions +3. **FLASK_MIGRATE_TROUBLESHOOTING.md** - Common issues and solutions +4. **POST_BASELINE_MIGRATIONS.md** - Required migrations after baseline +5. **MIGRATION_QUICK_REFERENCE.md** - Command cheat sheet + +## Key Scripts + +### For Docker (No Git Required) +- `docker_migrate_init.py` - Initialize from current schema +- `migrate.sh` - Helper script (created by docker_migrate_init.py) + +### For Development (Git Required) +- `simple_baseline_4214e88.py` - Initialize from commit 4214e88 +- `establish_baseline_4214e88.py` - Advanced baseline setup + +### Troubleshooting +- `diagnose_migrations.py` - Comprehensive diagnostics +- `fix_migration_sequence.py` - Fix sequence issues +- `fix_revision_mismatch.py` - Fix revision errors +- `quick_fix_revision.sh` - Quick revision fix + +## Common Workflows + +### First Deployment (Docker) +Handled automatically by startup scripts, or: +```bash +python docker_migrate_init.py +flask db stamp head # For existing DB +flask db upgrade # For new DB +``` + +### Create New Migration +```bash +flask db migrate -m "Add user preferences" +flask db upgrade +``` + +### Check Status +```bash +flask db current # Current revision +flask db history # All migrations +./migrate.sh status # In Docker +``` + +## Important Notes + +1. **Docker containers don't have Git** - Use docker_migrate_init.py +2. **Always review generated migrations** before applying +3. **Test on staging first** before production +4. **Include migrations/ in Docker image** or use volume mount +5. **Startup scripts handle initialization** automatically + +Choose the appropriate guide based on your deployment environment! \ No newline at end of file diff --git a/docker_migrate_init.py b/docker_migrate_init.py new file mode 100755 index 0000000..8b7554c --- /dev/null +++ b/docker_migrate_init.py @@ -0,0 +1,231 @@ +#!/usr/bin/env python3 +""" +Docker-friendly Flask-Migrate initialization. +No Git required - works with current schema as baseline. +""" + +import os +import sys +import subprocess +import shutil +from datetime import datetime + +def run_command(cmd, description, check=True): + """Run a command and handle errors.""" + print(f"\n➜ {description}") + print(f" Command: {cmd}") + result = subprocess.run(cmd, shell=True, capture_output=True, text=True) + + if result.returncode == 0: + print(f"✓ Success") + if result.stdout.strip(): + print(f" {result.stdout.strip()}") + return True + else: + print(f"✗ Failed") + if result.stderr: + print(f" Error: {result.stderr}") + if check: + sys.exit(1) + return False + +def check_database_connection(): + """Check if we can connect to the database.""" + print("\nChecking database connection...") + try: + from app import app, db + with app.app_context(): + # Try a simple query + db.engine.execute("SELECT 1") + print("✓ Database connection successful") + return True + except Exception as e: + print(f"✗ Database connection failed: {e}") + return False + +def check_existing_tables(): + """Check what tables exist in the database.""" + print("\nChecking existing tables...") + try: + from app import app, db + with app.app_context(): + # Get table names + inspector = db.inspect(db.engine) + tables = inspector.get_table_names() + + if tables: + print(f"✓ Found {len(tables)} existing tables:") + for table in sorted(tables): + if table != 'alembic_version': + print(f" - {table}") + return True + else: + print("ℹ️ No tables found (empty database)") + return False + except Exception as e: + print(f"✗ Error checking tables: {e}") + return False + +def main(): + """Main initialization function.""" + print("=== Flask-Migrate Docker Initialization ===") + print("\nThis script will set up Flask-Migrate for your Docker deployment.") + print("It uses your CURRENT schema as the baseline (no Git required).") + + # Set environment + os.environ['FLASK_APP'] = 'app.py' + + # Check prerequisites + if not check_database_connection(): + print("\n❌ Cannot connect to database. Check your DATABASE_URL.") + return 1 + + has_tables = check_existing_tables() + + print("\n" + "="*50) + if has_tables: + print("SCENARIO: Existing database with tables") + print("="*50) + print("\nYour database already has tables. We'll create a baseline") + print("migration and mark it as already applied.") + else: + print("SCENARIO: Empty database") + print("="*50) + print("\nYour database is empty. We'll create a baseline") + print("migration that can be applied to create all tables.") + + response = input("\nContinue? (y/N): ") + if response.lower() != 'y': + print("Aborting...") + return 1 + + # Step 1: Clean up any existing migrations + if os.path.exists('migrations'): + print("\n⚠️ Removing existing migrations directory...") + shutil.rmtree('migrations') + + # Step 2: Initialize Flask-Migrate + print("\nInitializing Flask-Migrate...") + if not run_command("flask db init", "Creating migrations directory"): + return 1 + + # Step 3: Create baseline migration + print("\nCreating baseline migration from current models...") + baseline_date = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + + if not run_command( + f'flask db migrate -m "Docker baseline migration - {baseline_date}"', + "Generating migration" + ): + return 1 + + # Step 4: Add documentation to the migration + print("\nDocumenting the migration...") + try: + import glob + migration_files = glob.glob('migrations/versions/*.py') + if migration_files: + latest = max(migration_files, key=os.path.getctime) + + with open(latest, 'r') as f: + content = f.read() + + note = f'''"""DOCKER BASELINE MIGRATION +Generated: {baseline_date} + +This migration represents the current state of your models. +It serves as the baseline for all future migrations. + +For existing databases with tables: + flask db stamp head # Mark as current without running + +For new empty databases: + flask db upgrade # Create all tables + +DO NOT MODIFY THIS MIGRATION +""" + +''' + with open(latest, 'w') as f: + f.write(note + content) + + print(f"✓ Documented {os.path.basename(latest)}") + except Exception as e: + print(f"⚠️ Could not document migration: {e}") + + # Step 5: Handle based on database state + print("\n" + "="*50) + print("NEXT STEPS") + print("="*50) + + if has_tables: + print("\nYour database already has tables. Run this command to") + print("mark it as up-to-date WITHOUT running the migration:") + print("\n flask db stamp head") + print("\nThen you can create new migrations normally:") + print(" flask db migrate -m 'Add new feature'") + print(" flask db upgrade") + else: + print("\nYour database is empty. Run this command to") + print("create all tables from the baseline migration:") + print("\n flask db upgrade") + print("\nThen you can create new migrations normally:") + print(" flask db migrate -m 'Add new feature'") + print(" flask db upgrade") + + # Create a helper script + helper_content = f"""#!/bin/bash +# Flask-Migrate helper for Docker +# Generated: {baseline_date} + +export FLASK_APP=app.py + +case "$1" in + status) + echo "Current migration status:" + flask db current + ;; + + apply) + echo "Applying pending migrations..." + flask db upgrade + ;; + + create) + if [ -z "$2" ]; then + echo "Usage: $0 create 'Migration message'" + exit 1 + fi + echo "Creating new migration: $2" + flask db migrate -m "$2" + echo "Review the migration, then run: $0 apply" + ;; + + mark-current) + echo "Marking database as current (no changes)..." + flask db stamp head + ;; + + *) + echo "Flask-Migrate Docker Helper" + echo "Usage:" + echo " $0 status - Show current migration status" + echo " $0 apply - Apply pending migrations" + echo " $0 create 'msg' - Create new migration" + echo " $0 mark-current - Mark DB as current (existing DBs)" + ;; +esac +""" + + with open('migrate.sh', 'w') as f: + f.write(helper_content) + + os.chmod('migrate.sh', 0o755) + print("\n✓ Created migrate.sh helper script") + + print("\n✨ Initialization complete!") + + return 0 + +if __name__ == "__main__": + sys.exit(main()) \ No newline at end of file diff --git a/startup.sh b/startup.sh index 6c8d9a4..7d067af 100755 --- a/startup.sh +++ b/startup.sh @@ -27,16 +27,35 @@ if [ -d "migrations" ]; then echo "✅ Database migrations completed successfully" else echo "⚠️ No migrations directory found. Initializing Flask-Migrate..." - echo "Using baseline from commit 4214e88..." - python establish_baseline_4214e88.py + + # Use Docker-friendly initialization (no Git required) + python docker_migrate_init.py if [ $? -ne 0 ]; then echo "❌ Migration initialization failed!" - echo "Please run manually: python establish_baseline_4214e88.py" exit 1 fi - # Stamp the database as being at baseline - flask db stamp head - echo "✅ Database marked at baseline commit 4214e88" + + # Check if database has existing tables + python -c " +from app import app, db +with app.app_context(): + inspector = db.inspect(db.engine) + tables = [t for t in inspector.get_table_names() if t != 'alembic_version'] + if tables: + print('has_tables') +" > /tmp/db_check.txt + + if grep -q "has_tables" /tmp/db_check.txt 2>/dev/null; then + echo "📊 Existing database detected. Marking as current..." + flask db stamp head + echo "✅ Database marked as current" + else + echo "🆕 Empty database detected. Creating tables..." + flask db upgrade + echo "✅ Database tables created" + fi + + rm -f /tmp/db_check.txt fi # Legacy migration support (can be removed after full transition) diff --git a/startup_postgres.sh b/startup_postgres.sh index 8133289..251b9dd 100755 --- a/startup_postgres.sh +++ b/startup_postgres.sh @@ -27,16 +27,35 @@ if [ -d "migrations" ]; then echo "✅ Database migrations completed successfully" else echo "⚠️ No migrations directory found. Initializing Flask-Migrate..." - echo "Using baseline from commit 4214e88..." - python establish_baseline_4214e88.py + + # Use Docker-friendly initialization (no Git required) + python docker_migrate_init.py if [ $? -ne 0 ]; then echo "❌ Migration initialization failed!" - echo "Please run manually: python establish_baseline_4214e88.py" exit 1 fi - # Stamp the database as being at baseline - flask db stamp head - echo "✅ Database marked at baseline commit 4214e88" + + # Check if database has existing tables + python -c " +from app import app, db +with app.app_context(): + inspector = db.inspect(db.engine) + tables = [t for t in inspector.get_table_names() if t != 'alembic_version'] + if tables: + print('has_tables') +" > /tmp/db_check.txt + + if grep -q "has_tables" /tmp/db_check.txt 2>/dev/null; then + echo "📊 Existing database detected. Marking as current..." + flask db stamp head + echo "✅ Database marked as current" + else + echo "🆕 Empty database detected. Creating tables..." + flask db upgrade + echo "✅ Database tables created" + fi + + rm -f /tmp/db_check.txt fi # Legacy migration support (can be removed after full transition) From ae565bd11c9c4191804239fbf4454e5b819a0c22 Mon Sep 17 00:00:00 2001 From: Jens Luedicke Date: Wed, 9 Jul 2025 20:09:00 +0200 Subject: [PATCH 04/14] Integration Hell. --- DEBUGGING_MIGRATIONS.md | 205 +++++++++++++++++++++++++++++++++++++++ debug_entrypoint.sh | 83 ++++++++++++++++ docker-compose.debug.yml | 53 ++++++++++ startup_postgres_safe.sh | 124 +++++++++++++++++++++++ 4 files changed, 465 insertions(+) create mode 100644 DEBUGGING_MIGRATIONS.md create mode 100755 debug_entrypoint.sh create mode 100644 docker-compose.debug.yml create mode 100755 startup_postgres_safe.sh diff --git a/DEBUGGING_MIGRATIONS.md b/DEBUGGING_MIGRATIONS.md new file mode 100644 index 0000000..1fc743a --- /dev/null +++ b/DEBUGGING_MIGRATIONS.md @@ -0,0 +1,205 @@ +# Debugging Migration Issues in Docker + +## Quick Solutions + +### Container Exits Immediately + +Use one of these approaches: + +1. **Debug Mode (Recommended)** + ```bash + docker-compose down + DEBUG_MODE=true docker-compose up + ``` + +2. **Skip Migrations Temporarily** + ```bash + docker-compose down + SKIP_MIGRATIONS=true docker-compose up + ``` + +3. **Use Debug Compose File** + ```bash + docker-compose -f docker-compose.debug.yml up + docker exec -it timetrack_web_1 bash + ``` + +## Debug Entrypoint + +The `debug_entrypoint.sh` keeps the container running and provides diagnostic info: + +```bash +# In docker-compose.yml, change: +command: ["./startup_postgres.sh"] +# To: +entrypoint: ["./debug_entrypoint.sh"] + +# Then: +docker-compose up -d +docker exec -it bash +``` + +## Safe Startup Script + +`startup_postgres_safe.sh` has three modes: + +1. **Normal Mode**: Exits on migration failure (default) +2. **Debug Mode**: Continues running even if migrations fail + ```bash + DEBUG_MODE=true docker-compose up + ``` +3. **Skip Mode**: Skips migrations entirely + ```bash + SKIP_MIGRATIONS=true docker-compose up + ``` + +## Common Debugging Steps + +### 1. Get Into the Container +```bash +# If container keeps exiting, use debug compose: +docker-compose -f docker-compose.debug.yml up -d web +docker exec -it timetrack_web_1 bash + +# Or modify your docker-compose.yml: +# Add: stdin_open: true +# Add: tty: true +# Change: entrypoint: ["/bin/bash"] +``` + +### 2. Manual Migration Setup +```bash +# Inside container: +export FLASK_APP=app.py + +# Check what's wrong +python diagnose_migrations.py + +# Initialize migrations +python docker_migrate_init.py + +# Fix revision issues +python fix_revision_mismatch.py +``` + +### 3. Database Connection Issues +```bash +# Test connection +python -c "from app import app, db; app.app_context().push(); db.engine.execute('SELECT 1')" + +# Check environment +echo $DATABASE_URL +echo $POSTGRES_HOST +``` + +### 4. Reset Everything +```bash +# Inside container: +rm -rf migrations +python docker_migrate_init.py +flask db stamp head # For existing DB +flask db upgrade # For new DB +``` + +## Docker Compose Examples + +### Development with Auto-Restart +```yaml +services: + web: + environment: + - DEBUG_MODE=true + restart: unless-stopped # Auto-restart on failure +``` + +### Interactive Debugging +```yaml +services: + web: + entrypoint: ["/app/debug_entrypoint.sh"] + stdin_open: true + tty: true +``` + +### Skip Migrations for Testing +```yaml +services: + web: + environment: + - SKIP_MIGRATIONS=true +``` + +## Environment Variables + +- `DEBUG_MODE=true` - Continue running even if migrations fail +- `SKIP_MIGRATIONS=true` - Skip all migration steps +- `FLASK_APP=app.py` - Required for Flask-Migrate +- `DATABASE_URL` - PostgreSQL connection string + +## Step-by-Step Troubleshooting + +1. **Container won't start?** + ```bash + # Use debug compose + docker-compose -f docker-compose.debug.yml up + ``` + +2. **Migration fails?** + ```bash + # Get into container + docker exec -it bash + + # Run diagnostics + python diagnose_migrations.py + ``` + +3. **Revision mismatch?** + ```bash + # Quick fix + ./quick_fix_revision.sh + + # Or manual fix + flask db stamp + ``` + +4. **Can't initialize migrations?** + ```bash + # Check database connection first + python -c "from app import app; print(app.config['SQLALCHEMY_DATABASE_URI'])" + + # Then initialize + python docker_migrate_init.py + ``` + +## Tips + +1. **Always use volumes** for migrations directory in development +2. **Check logs carefully** - the error is usually clear +3. **Don't run migrations in production containers** - include pre-tested migrations in image +4. **Use DEBUG_MODE** during development for easier troubleshooting +5. **Test locally first** before deploying to production + +## Recovery Commands + +If everything is broken: + +```bash +# 1. Start with debug entrypoint +docker-compose -f docker-compose.debug.yml up -d web + +# 2. Get into container +docker exec -it timetrack_web_1 bash + +# 3. Reset migrations +rm -rf migrations +python docker_migrate_init.py + +# 4. Mark as current (existing DB) or create tables (new DB) +flask db stamp head # Existing +flask db upgrade # New + +# 5. Test the app +python app.py # Run in debug mode + +# 6. If working, update docker-compose.yml and restart normally +``` \ No newline at end of file diff --git a/debug_entrypoint.sh b/debug_entrypoint.sh new file mode 100755 index 0000000..315c29e --- /dev/null +++ b/debug_entrypoint.sh @@ -0,0 +1,83 @@ +#!/bin/bash +# Debug entrypoint for troubleshooting migration issues + +echo "=== TimeTrack Debug Entrypoint ===" +echo "" +echo "This entrypoint keeps the container running for debugging." +echo "The application is NOT started automatically." +echo "" + +# Set Flask app +export FLASK_APP=app.py + +# Wait for PostgreSQL if needed +if [ -n "$DATABASE_URL" ] || [ -n "$POSTGRES_HOST" ]; then + echo "Waiting for PostgreSQL to be ready..." + while ! pg_isready -h ${POSTGRES_HOST:-db} -p ${POSTGRES_PORT:-5432} -U "$POSTGRES_USER" > /dev/null 2>&1; do + echo "PostgreSQL is not ready yet. Waiting..." + sleep 2 + done + echo "✅ PostgreSQL is ready!" +fi + +echo "" +echo "=== Environment Info ===" +echo "FLASK_APP: $FLASK_APP" +echo "DATABASE_URL: ${DATABASE_URL:-(not set)}" +echo "Working directory: $(pwd)" +echo "Python version: $(python --version)" +echo "" + +echo "=== Quick Diagnostics ===" + +# Check if migrations directory exists +if [ -d "migrations" ]; then + echo "✅ migrations/ directory exists" + + # Try to check current migration + echo -n "Current migration: " + flask db current 2>&1 || echo "❌ Failed to get current migration" +else + echo "❌ migrations/ directory not found" +fi + +# Check database connection +echo -n "Database connection: " +python -c " +from app import app, db +try: + with app.app_context(): + db.engine.execute('SELECT 1') + print('✅ Connected') +except Exception as e: + print(f'❌ Failed: {e}') +" 2>&1 + +echo "" +echo "=== Available Commands ===" +echo "" +echo "Migration commands:" +echo " python docker_migrate_init.py # Initialize migrations (Docker-friendly)" +echo " flask db current # Show current migration" +echo " flask db history # Show migration history" +echo " flask db upgrade # Apply migrations" +echo " flask db stamp head # Mark DB as current" +echo "" +echo "Diagnostic commands:" +echo " python diagnose_migrations.py # Full diagnostics" +echo " python fix_revision_mismatch.py # Fix revision errors" +echo " ./quick_fix_revision.sh # Quick revision fix" +echo "" +echo "Start application manually:" +echo " ./startup_postgres.sh # Normal startup" +echo " ./startup_postgres_safe.sh # Safe startup (won't exit)" +echo " python app.py # Development server" +echo "" +echo "To exit this container:" +echo " exit" +echo "" +echo "=== Container Ready for Debugging ===" +echo "" + +# Keep container running +exec /bin/bash \ No newline at end of file diff --git a/docker-compose.debug.yml b/docker-compose.debug.yml new file mode 100644 index 0000000..3b5b16f --- /dev/null +++ b/docker-compose.debug.yml @@ -0,0 +1,53 @@ +version: '3.8' + +# Debug version of docker-compose for troubleshooting migration issues +# Usage: docker-compose -f docker-compose.debug.yml up + +services: + web: + build: . + ports: + - "5000:5000" + environment: + - DATABASE_URL=postgresql://timetrack:timetrack@db:5432/timetrack + - FLASK_APP=app.py + - FLASK_ENV=development + # Debug options - uncomment as needed: + - DEBUG_MODE=true # Continue running even if migrations fail + # - SKIP_MIGRATIONS=true # Skip migrations entirely + volumes: + - .:/app # Mount entire directory for easy debugging + depends_on: + - db + # Use debug entrypoint that keeps container running + entrypoint: ["/app/debug_entrypoint.sh"] + stdin_open: true # Keep stdin open + tty: true # Allocate a pseudo-TTY + + web_safe: + build: . + ports: + - "5001:5000" + environment: + - DATABASE_URL=postgresql://timetrack:timetrack@db:5432/timetrack + - FLASK_APP=app.py + - DEBUG_MODE=true # Won't exit on migration failure + volumes: + - .:/app + depends_on: + - db + command: ["/app/startup_postgres_safe.sh"] + + db: + image: postgres:13 + environment: + - POSTGRES_DB=timetrack + - POSTGRES_USER=timetrack + - POSTGRES_PASSWORD=timetrack + ports: + - "5432:5432" # Expose for external debugging + volumes: + - postgres_data:/var/lib/postgresql/data + +volumes: + postgres_data: \ No newline at end of file diff --git a/startup_postgres_safe.sh b/startup_postgres_safe.sh new file mode 100755 index 0000000..58baf0d --- /dev/null +++ b/startup_postgres_safe.sh @@ -0,0 +1,124 @@ +#!/bin/bash +set -e + +echo "Starting TimeTrack application (PostgreSQL-only mode)..." + +# Check for debug/bypass mode +if [ "$SKIP_MIGRATIONS" = "true" ]; then + echo "⚠️ SKIP_MIGRATIONS=true - Skipping all migration steps!" +else + # Wait for PostgreSQL to be ready + echo "Waiting for PostgreSQL to be ready..." + while ! pg_isready -h db -p 5432 -U "$POSTGRES_USER" > /dev/null 2>&1; do + echo "PostgreSQL is not ready yet. Waiting..." + sleep 2 + done + echo "PostgreSQL is ready!" + + # Run Flask-Migrate migrations + echo "" + echo "=== Running Database Migrations ===" + export FLASK_APP=app.py + + # Check if migrations directory exists + if [ -d "migrations" ]; then + echo "Applying database migrations..." + flask db upgrade + if [ $? -ne 0 ]; then + echo "❌ Migration failed! Check the logs above." + + # Don't exit in debug mode + if [ "$DEBUG_MODE" = "true" ]; then + echo "⚠️ DEBUG_MODE=true - Continuing despite migration failure..." + echo "⚠️ The application may not work correctly!" + echo "" + echo "To debug, you can:" + echo " 1. docker exec -it bash" + echo " 2. python diagnose_migrations.py" + echo " 3. flask db current" + echo "" + else + echo "To bypass migrations for debugging, restart with:" + echo " SKIP_MIGRATIONS=true docker-compose up" + echo "Or:" + echo " DEBUG_MODE=true docker-compose up" + exit 1 + fi + else + echo "✅ Database migrations completed successfully" + fi + else + echo "⚠️ No migrations directory found. Initializing Flask-Migrate..." + + # Try to initialize, but don't exit if it fails + python docker_migrate_init.py + if [ $? -ne 0 ]; then + echo "❌ Migration initialization failed!" + + if [ "$DEBUG_MODE" = "true" ]; then + echo "⚠️ DEBUG_MODE=true - Continuing without migrations..." + echo "⚠️ The database may not be properly initialized!" + else + echo "To debug the issue:" + echo " 1. Set DEBUG_MODE=true and restart" + echo " 2. docker exec -it bash" + echo " 3. python docker_migrate_init.py" + exit 1 + fi + else + # Check if database has existing tables + python -c " +from app import app, db +with app.app_context(): + inspector = db.inspect(db.engine) + tables = [t for t in inspector.get_table_names() if t != 'alembic_version'] + if tables: + print('has_tables') +" > /tmp/db_check.txt 2>/dev/null || echo "db_check_failed" > /tmp/db_check.txt + + if grep -q "has_tables" /tmp/db_check.txt 2>/dev/null; then + echo "📊 Existing database detected. Marking as current..." + flask db stamp head + echo "✅ Database marked as current" + elif grep -q "db_check_failed" /tmp/db_check.txt 2>/dev/null; then + echo "⚠️ Could not check database tables" + if [ "$DEBUG_MODE" != "true" ]; then + exit 1 + fi + else + echo "🆕 Empty database detected. Creating tables..." + flask db upgrade + if [ $? -ne 0 ]; then + echo "❌ Failed to create database tables!" + if [ "$DEBUG_MODE" != "true" ]; then + exit 1 + fi + else + echo "✅ Database tables created" + fi + fi + + rm -f /tmp/db_check.txt + fi + fi + + # Legacy migration support (can be removed after full transition) + if [ -f "migrations_old/run_postgres_migrations.py" ]; then + echo "" + echo "=== Checking Legacy Migrations ===" + echo "Found old migration system. Consider removing after confirming Flask-Migrate is working." + fi +fi + +# Start the Flask application with gunicorn +echo "" +echo "=== Starting Application ===" +echo "Starting Flask application with gunicorn..." + +# In debug mode, start with more verbose logging +if [ "$DEBUG_MODE" = "true" ]; then + echo "🐛 Running in DEBUG MODE with verbose logging" + exec gunicorn --bind 0.0.0.0:5000 --workers 1 --threads 2 --timeout 30 --log-level debug --access-logfile - --error-logfile - app:app +else + exec gunicorn --bind 0.0.0.0:5000 --workers 4 --threads 2 --timeout 30 app:app +fi \ No newline at end of file From eabf020afd76daecf43e9d824d7783f9358a8e6a Mon Sep 17 00:00:00 2001 From: Jens Luedicke Date: Wed, 9 Jul 2025 20:17:17 +0200 Subject: [PATCH 05/14] Integration Hell #2 --- fix_migration_838055206ef5.py | 88 +++++++++++++++++++++++++++++++++++ manual_migration_fix.sql | 17 +++++++ reset_migrations.sh | 69 +++++++++++++++++++++++++++ 3 files changed, 174 insertions(+) create mode 100755 fix_migration_838055206ef5.py create mode 100644 manual_migration_fix.sql create mode 100755 reset_migrations.sh diff --git a/fix_migration_838055206ef5.py b/fix_migration_838055206ef5.py new file mode 100755 index 0000000..9bbd0fa --- /dev/null +++ b/fix_migration_838055206ef5.py @@ -0,0 +1,88 @@ +#!/usr/bin/env python3 +""" +Fix the specific revision error: Can't locate revision identified by '838055206ef5' +This script will clean up the database migration state and re-initialize. +""" + +import os +import sys + +def main(): + """Fix the revision mismatch.""" + print("=== Fixing Revision 838055206ef5 Error ===\n") + + os.environ['FLASK_APP'] = 'app.py' + + print("This error means your database thinks it's at revision '838055206ef5'") + print("but that revision doesn't exist in your migration files.\n") + + print("We'll fix this by:") + print("1. Clearing the incorrect revision from the database") + print("2. Re-initializing migrations from current schema") + print("3. Marking the database as up-to-date\n") + + response = input("Continue? (y/N): ") + if response.lower() != 'y': + print("Aborting...") + return 1 + + # Step 1: Clear the alembic_version table + print("\nStep 1: Clearing migration history from database...") + try: + from app import app, db + with app.app_context(): + # Check if alembic_version exists + result = db.engine.execute(""" + SELECT EXISTS ( + SELECT 1 FROM information_schema.tables + WHERE table_name = 'alembic_version' + ) + """) + exists = result.fetchone()[0] + + if exists: + # Clear the incorrect revision + db.engine.execute("DELETE FROM alembic_version") + print("✓ Cleared alembic_version table") + else: + print("ℹ️ No alembic_version table found (this is OK)") + + except Exception as e: + print(f"❌ Error clearing alembic_version: {e}") + print("\nTry running this SQL manually:") + print(" DELETE FROM alembic_version;") + return 1 + + # Step 2: Remove and recreate migrations directory + print("\nStep 2: Re-initializing migrations...") + + import shutil + if os.path.exists('migrations'): + print("Removing old migrations directory...") + shutil.rmtree('migrations') + + # Run the Docker-friendly initialization + print("Running docker_migrate_init.py...") + result = os.system("python docker_migrate_init.py") + + if result != 0: + print("❌ Failed to initialize migrations") + return 1 + + # Step 3: Stamp the database + print("\nStep 3: Marking database as current...") + result = os.system("flask db stamp head") + + if result == 0: + print("\n✅ Success! The revision error has been fixed.") + print("\nYou can now:") + print("1. Create new migrations: flask db migrate -m 'Your changes'") + print("2. Apply migrations: flask db upgrade") + else: + print("\n⚠️ Failed to stamp database.") + print("Try running manually: flask db stamp head") + + return 0 + +if __name__ == "__main__": + sys.exit(main()) \ No newline at end of file diff --git a/manual_migration_fix.sql b/manual_migration_fix.sql new file mode 100644 index 0000000..3a5fac5 --- /dev/null +++ b/manual_migration_fix.sql @@ -0,0 +1,17 @@ +-- Manual SQL commands to fix migration revision error +-- Run these commands in your PostgreSQL database if the scripts fail + +-- 1. Check current revision (optional) +SELECT * FROM alembic_version; + +-- 2. Clear the incorrect revision +DELETE FROM alembic_version; + +-- 3. If you want to set a specific revision manually: +-- First, check what revisions you have: +-- ls migrations/versions/*.py +-- Then insert the revision ID from one of those files: +-- INSERT INTO alembic_version (version_num) VALUES ('your_revision_id_here'); + +-- 4. Or just leave it empty and let Flask-Migrate handle it +-- The next 'flask db stamp head' will set the correct revision \ No newline at end of file diff --git a/reset_migrations.sh b/reset_migrations.sh new file mode 100755 index 0000000..e559efc --- /dev/null +++ b/reset_migrations.sh @@ -0,0 +1,69 @@ +#!/bin/bash +# Quick reset script for migration issues + +echo "=== Migration Reset Script ===" +echo "" +echo "This will completely reset your Flask-Migrate setup." +echo "Your data will NOT be affected, only migration tracking." +echo "" +read -p "Continue? (y/N): " response + +if [ "$response" != "y" ]; then + echo "Aborting..." + exit 0 +fi + +export FLASK_APP=app.py + +echo "" +echo "Step 1: Clearing database migration history..." +python -c " +from app import app, db +with app.app_context(): + try: + db.engine.execute('DELETE FROM alembic_version') + print('✓ Cleared alembic_version table') + except Exception as e: + print(f'⚠️ Could not clear alembic_version: {e}') + print(' (This is OK if the table does not exist)') +" + +echo "" +echo "Step 2: Removing migrations directory..." +rm -rf migrations +echo "✓ Removed migrations directory" + +echo "" +echo "Step 3: Re-initializing migrations..." +flask db init +if [ $? -ne 0 ]; then + echo "❌ Failed to initialize migrations" + exit 1 +fi +echo "✓ Initialized Flask-Migrate" + +echo "" +echo "Step 4: Creating baseline migration..." +flask db migrate -m "Reset baseline migration $(date +%Y%m%d_%H%M%S)" +if [ $? -ne 0 ]; then + echo "❌ Failed to create migration" + exit 1 +fi +echo "✓ Created baseline migration" + +echo "" +echo "Step 5: Marking database as current..." +flask db stamp head +if [ $? -ne 0 ]; then + echo "❌ Failed to stamp database" + exit 1 +fi +echo "✓ Database marked as current" + +echo "" +echo "✨ Migration reset complete!" +echo "" +echo "Next steps:" +echo "1. Review the generated migration in migrations/versions/" +echo "2. Create new migrations: flask db migrate -m 'Your changes'" +echo "3. Apply migrations: flask db upgrade" \ No newline at end of file From 3ac44fdaba34d4976955eb016b4ef3a8cb5f3aca Mon Sep 17 00:00:00 2001 From: Jens Luedicke Date: Wed, 9 Jul 2025 20:32:44 +0200 Subject: [PATCH 06/14] Fix for Task Model --- models/task.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/models/task.py b/models/task.py index 7810319..869fc1c 100644 --- a/models/task.py +++ b/models/task.py @@ -5,7 +5,7 @@ from datetime import datetime from . import db from .enums import TaskStatus, TaskPriority, CommentVisibility, Role - +from .project import Project class Task(db.Model): """Task model for project management""" From b406b9394949f879ec6ec51fdeb495f004ebb117 Mon Sep 17 00:00:00 2001 From: Jens Luedicke Date: Wed, 9 Jul 2025 20:40:31 +0200 Subject: [PATCH 07/14] Enum handling for Postgres. --- fix_postgres_enums.py | 117 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 117 insertions(+) create mode 100755 fix_postgres_enums.py diff --git a/fix_postgres_enums.py b/fix_postgres_enums.py new file mode 100755 index 0000000..4bd9a99 --- /dev/null +++ b/fix_postgres_enums.py @@ -0,0 +1,117 @@ +#!/usr/bin/env python3 +""" +Fix PostgreSQL enum issues for Flask-Migrate. +Handles the TODO vs To Do issue and other enum mismatches. +""" + +import os +import sys + +def main(): + """Fix enum issues.""" + print("=== PostgreSQL Enum Fix ===\n") + + os.environ['FLASK_APP'] = 'app.py' + + from app import app, db + from models import TaskStatus, TaskPriority, Role, WorkRegion + + with app.app_context(): + print("Checking enum values in database vs models...\n") + + # Check all enums + enums_to_check = [ + ('taskstatus', TaskStatus, 'task', 'status'), + ('taskpriority', TaskPriority, 'task', 'priority'), + ('role', Role, 'user', 'role'), + ('workregion', WorkRegion, 'company_work_config', 'work_region') + ] + + fixes_needed = [] + + for enum_name, enum_class, table_name, column_name in enums_to_check: + print(f"Checking {enum_name}:") + + try: + # Get database values + result = db.engine.execute(f""" + SELECT enumlabel + FROM pg_enum + WHERE enumtypid = (SELECT oid FROM pg_type WHERE typname = '{enum_name}') + ORDER BY enumsortorder + """) + db_values = [row[0] for row in result] + print(f" Database values: {db_values}") + + # Get model values + model_values = [item.value for item in enum_class] + print(f" Model values: {model_values}") + + # Check for mismatches + missing_in_db = set(model_values) - set(db_values) + extra_in_db = set(db_values) - set(model_values) + + if missing_in_db: + print(f" ⚠️ Missing in database: {missing_in_db}") + for value in missing_in_db: + fixes_needed.append(f"ALTER TYPE {enum_name} ADD VALUE '{value}';") + + if extra_in_db: + print(f" ⚠️ Extra in database (not in model): {extra_in_db}") + # Note: Can't easily remove enum values in PostgreSQL + + if not missing_in_db and not extra_in_db: + print(" ✅ All values match") + + except Exception as e: + print(f" ❌ Error checking {enum_name}: {e}") + + print() + + if fixes_needed: + print("\nRequired fixes:") + print("Create a new migration and add these to the upgrade() function:\n") + + for fix in fixes_needed: + print(f" op.execute(\"{fix}\")") + + print("\nOr run this SQL directly:") + for fix in fixes_needed: + print(fix) + + # Create a migration file + print("\n\nCreating migration file...") + migration_content = '''"""Fix enum values + +Revision ID: fix_enums +Revises: +Create Date: 2024-01-01 00:00:00.000000 + +""" +from alembic import op +import sqlalchemy as sa + +def upgrade(): + # Fix enum values +''' + for fix in fixes_needed: + migration_content += f' op.execute("{fix}")\n' + + migration_content += ''' +def downgrade(): + # Note: PostgreSQL doesn't support removing enum values + pass +''' + + with open('fix_enums_migration.py', 'w') as f: + f.write(migration_content) + + print("✅ Created fix_enums_migration.py") + print("\nTo apply, either:") + print("1. Copy this content to a new migration file") + print("2. Run the SQL commands directly") + + return 0 + +if __name__ == "__main__": + sys.exit(main()) \ No newline at end of file From 29aed2c750e5a997204a9c18b6cedab1e13711b2 Mon Sep 17 00:00:00 2001 From: Jens Luedicke Date: Wed, 9 Jul 2025 20:50:41 +0200 Subject: [PATCH 08/14] Enum Handling for Postgres. --- POSTGRES_ENUM_GUIDE.md | 184 +++++++++++++++++++++++++++++++++++++++++ fix_enum_mismatch.py | 132 +++++++++++++++++++++++++++++ quick_enum_fix.sql | 27 ++++++ 3 files changed, 343 insertions(+) create mode 100644 POSTGRES_ENUM_GUIDE.md create mode 100755 fix_enum_mismatch.py create mode 100644 quick_enum_fix.sql diff --git a/POSTGRES_ENUM_GUIDE.md b/POSTGRES_ENUM_GUIDE.md new file mode 100644 index 0000000..f2b1560 --- /dev/null +++ b/POSTGRES_ENUM_GUIDE.md @@ -0,0 +1,184 @@ +# PostgreSQL Enums with Flask-Migrate + +## The Problem + +PostgreSQL enums are **immutable** in many ways: +- Can't remove values +- Can't rename values +- Can't change order (in older PostgreSQL versions) +- Flask-Migrate often doesn't detect enum changes + +Your specific issue: The model has `TODO` but the database might have `To Do` or vice versa. + +## Best Practices + +### 1. Always Use UPPERCASE for Enum Values + +```python +class TaskStatus(enum.Enum): + TODO = "TODO" # Good + IN_PROGRESS = "IN_PROGRESS" # Good + # To Do = "To Do" # Bad - spaces cause issues +``` + +### 2. Handle Enum Changes Manually + +Flask-Migrate won't automatically handle enum changes. You must: + +```python +# In your migration file's upgrade() function: +def upgrade(): + # Add new enum value + op.execute("ALTER TYPE taskstatus ADD VALUE 'NEW_STATUS'") + + # Note: You CANNOT remove enum values in PostgreSQL! +``` + +### 3. Check Enum State Before Migrations + +```bash +# Run this to see current state +python fix_postgres_enums.py + +# Or manually check in psql: +\dT+ taskstatus +``` + +## Fixing Current Enum Issues + +### Option 1: Quick Fix (Add Missing Values) + +```sql +-- If model expects 'TODO' but DB has 'To Do' +ALTER TYPE taskstatus ADD VALUE 'TODO'; + +-- If model expects 'IN_PROGRESS' but DB has 'In Progress' +ALTER TYPE taskstatus ADD VALUE 'IN_PROGRESS'; +``` + +### Option 2: Create Migration + +```bash +# Generate empty migration +flask db revision -m "Fix enum values" + +# Edit migrations/versions/xxx_fix_enum_values.py +``` + +Add to upgrade(): +```python +def upgrade(): + # Add missing enum values + op.execute("ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'TODO'") + op.execute("ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'IN_PROGRESS'") + op.execute("ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'DONE'") + op.execute("ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'ARCHIVED'") +``` + +### Option 3: Data Migration (Complex) + +If you need to change existing data from old to new values: + +```python +def upgrade(): + # Add new value + op.execute("ALTER TYPE taskstatus ADD VALUE 'TODO'") + + # Update existing data + op.execute("UPDATE task SET status = 'TODO' WHERE status = 'To Do'") + + # Note: Can't remove 'To Do' - it stays forever! +``` + +## Enum Strategy Going Forward + +### 1. Use String Columns Instead + +Consider replacing enums with string columns + check constraints: + +```python +# Instead of enum +status = db.Column(db.Enum(TaskStatus), default=TaskStatus.TODO) + +# Use string with constraint +status = db.Column(db.String(20), default='TODO') +__table_args__ = ( + db.CheckConstraint("status IN ('TODO', 'IN_PROGRESS', 'DONE')"), +) +``` + +### 2. Create Enum Tables + +Use a separate table for statuses: + +```python +class TaskStatus(db.Model): + id = db.Column(db.Integer, primary_key=True) + name = db.Column(db.String(50), unique=True) + +class Task(db.Model): + status_id = db.Column(db.Integer, db.ForeignKey('task_status.id')) +``` + +### 3. If Keeping Enums, Document Them + +```python +class TaskStatus(enum.Enum): + """ + Task status enum values. + WARNING: These are PostgreSQL enums. + - NEVER change existing values + - ONLY add new values at the end + - To deprecate, mark in comments but don't remove + """ + TODO = "TODO" + IN_PROGRESS = "IN_PROGRESS" + DONE = "DONE" + ARCHIVED = "ARCHIVED" + # DEPRECATED - DO NOT USE + # OLD_STATUS = "OLD_STATUS" # Deprecated 2024-01-01 +``` + +## Debugging Enum Issues + +```bash +# 1. Check what's in the database +psql $DATABASE_URL -c "SELECT enum_range(NULL::taskstatus)" + +# 2. Check what's in the model +python -c "from models import TaskStatus; print([e.value for e in TaskStatus])" + +# 3. Run diagnostic +python fix_postgres_enums.py +``` + +## Emergency Fixes + +If completely stuck: + +```sql +-- Nuclear option: Drop and recreate +-- WARNING: This will fail if column is in use! + +-- 1. Change column to text temporarily +ALTER TABLE task ALTER COLUMN status TYPE TEXT; + +-- 2. Drop the enum +DROP TYPE taskstatus; + +-- 3. Recreate with correct values +CREATE TYPE taskstatus AS ENUM ('TODO', 'IN_PROGRESS', 'DONE', 'ARCHIVED'); + +-- 4. Change column back +ALTER TABLE task ALTER COLUMN status TYPE taskstatus USING status::taskstatus; +``` + +## Prevention + +1. **Always test enum migrations** on a copy of production data +2. **Keep enum values simple** - no spaces, all uppercase +3. **Document all enum values** in the model +4. **Consider alternatives** to enums for frequently changing values +5. **Add CHECK constraints** in addition to enums for validation + +Remember: PostgreSQL enums are powerful but inflexible. Choose wisely! \ No newline at end of file diff --git a/fix_enum_mismatch.py b/fix_enum_mismatch.py new file mode 100755 index 0000000..336a3e6 --- /dev/null +++ b/fix_enum_mismatch.py @@ -0,0 +1,132 @@ +#!/usr/bin/env python3 +""" +Fix enum value mismatches between Python models and PostgreSQL. +The issue: Python sends enum.name ('TODO') but PostgreSQL expects enum.value ('To Do'). +""" + +import os +import sys + +def main(): + """Fix enum mismatches.""" + print("=== Enum Value Mismatch Fix ===\n") + + os.environ['FLASK_APP'] = 'app.py' + + from app import app, db + from models import TaskStatus, TaskPriority, Role, WorkRegion + + print("The Problem:") + print("- Your Python code sends: task.status = TaskStatus.TODO") + print("- SQLAlchemy sends to DB: 'TODO' (the enum name)") + print("- But the enum value is: 'To Do'") + print("- PostgreSQL expects: 'To Do' (the enum value)\n") + + with app.app_context(): + # Show the mismatch + print("TaskStatus enum mapping:") + for status in TaskStatus: + print(f" {status.name} -> '{status.value}'") + print(f" Python sends: '{status.name}'") + print(f" DB expects: '{status.value}'") + + print("\n" + "="*50) + print("SOLUTION OPTIONS") + print("="*50) + + print("\nOption 1: Add enum NAMES to PostgreSQL (Recommended)") + print("This allows both 'TODO' and 'To Do' to work:\n") + + # Generate SQL to add enum names + sql_fixes = [] + + # Check what's in the database + try: + result = db.engine.execute(""" + SELECT enumlabel + FROM pg_enum + WHERE enumtypid = (SELECT oid FROM pg_type WHERE typname = 'taskstatus') + """) + db_values = set(row[0] for row in result) + + print(f"Current database values: {list(db_values)}\n") + + # Add missing enum NAMES + for status in TaskStatus: + if status.name not in db_values: + sql_fixes.append(f"ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS '{status.name}';") + + if sql_fixes: + print("SQL to run:") + for sql in sql_fixes: + print(f" {sql}") + + except Exception as e: + print(f"Error checking database: {e}") + + print("\n\nOption 2: Fix Python enum definitions") + print("Change enums to use name as value:\n") + print("# In models/enums.py:") + print("class TaskStatus(enum.Enum):") + print(" TODO = 'TODO' # Instead of 'To Do'") + print(" IN_PROGRESS = 'IN_PROGRESS' # Instead of 'In Progress'") + + print("\n\nOption 3: Create migration to fix this properly") + + # Create a migration file + migration_content = '''"""Fix enum value mismatches + +Revision ID: fix_enum_values +Create Date: 2024-01-01 + +""" +from alembic import op +import sqlalchemy as sa + +def upgrade(): + # Add enum NAMES as valid values (keeping the display values too) + # This allows both 'TODO' and 'To Do' to work + + # TaskStatus + op.execute("ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'TODO';") + op.execute("ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'IN_PROGRESS';") + op.execute("ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'IN_REVIEW';") + op.execute("ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'DONE';") + op.execute("ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'CANCELLED';") + op.execute("ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'ARCHIVED';") + + # TaskPriority + op.execute("ALTER TYPE taskpriority ADD VALUE IF NOT EXISTS 'LOW';") + op.execute("ALTER TYPE taskpriority ADD VALUE IF NOT EXISTS 'MEDIUM';") + op.execute("ALTER TYPE taskpriority ADD VALUE IF NOT EXISTS 'HIGH';") + op.execute("ALTER TYPE taskpriority ADD VALUE IF NOT EXISTS 'URGENT';") + + # Role (if using enum in DB) + op.execute("ALTER TYPE role ADD VALUE IF NOT EXISTS 'TEAM_MEMBER';") + op.execute("ALTER TYPE role ADD VALUE IF NOT EXISTS 'TEAM_LEADER';") + op.execute("ALTER TYPE role ADD VALUE IF NOT EXISTS 'SUPERVISOR';") + op.execute("ALTER TYPE role ADD VALUE IF NOT EXISTS 'ADMIN';") + op.execute("ALTER TYPE role ADD VALUE IF NOT EXISTS 'SYSTEM_ADMIN';") + +def downgrade(): + # Cannot remove enum values in PostgreSQL + pass +''' + + with open('fix_enum_values_migration.py', 'w') as f: + f.write(migration_content) + + print("Created: fix_enum_values_migration.py") + print("\nTo apply Option 1:") + print("1. Copy the migration content to a new migration") + print("2. Run: flask db upgrade") + + print("\n\nWHY THIS HAPPENS:") + print("- SQLAlchemy sends the enum NAME (TODO) not VALUE ('To Do')") + print("- This is a common issue with PostgreSQL enums") + print("- Best practice: Make enum name == enum value") + + return 0 + +if __name__ == "__main__": + sys.exit(main()) \ No newline at end of file diff --git a/quick_enum_fix.sql b/quick_enum_fix.sql new file mode 100644 index 0000000..f51eaf3 --- /dev/null +++ b/quick_enum_fix.sql @@ -0,0 +1,27 @@ +-- Quick fix for enum value mismatches +-- Run this directly in PostgreSQL to fix the immediate issue + +-- TaskStatus: Add enum NAMES as valid values +ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'TODO'; +ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'IN_PROGRESS'; +ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'IN_REVIEW'; +ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'DONE'; +ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'CANCELLED'; +ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'ARCHIVED'; + +-- TaskPriority: Add enum NAMES as valid values +ALTER TYPE taskpriority ADD VALUE IF NOT EXISTS 'LOW'; +ALTER TYPE taskpriority ADD VALUE IF NOT EXISTS 'MEDIUM'; +ALTER TYPE taskpriority ADD VALUE IF NOT EXISTS 'HIGH'; +ALTER TYPE taskpriority ADD VALUE IF NOT EXISTS 'URGENT'; + +-- Role: Add enum NAMES as valid values (if used as enum) +-- ALTER TYPE role ADD VALUE IF NOT EXISTS 'TEAM_MEMBER'; +-- ALTER TYPE role ADD VALUE IF NOT EXISTS 'TEAM_LEADER'; +-- ALTER TYPE role ADD VALUE IF NOT EXISTS 'SUPERVISOR'; +-- ALTER TYPE role ADD VALUE IF NOT EXISTS 'ADMIN'; +-- ALTER TYPE role ADD VALUE IF NOT EXISTS 'SYSTEM_ADMIN'; + +-- To check what values are in each enum: +-- SELECT enum_range(NULL::taskstatus); +-- SELECT enum_range(NULL::taskpriority); \ No newline at end of file From 3038a07db73406d1eb43a60b77096c25b9b231f9 Mon Sep 17 00:00:00 2001 From: Jens Luedicke Date: Wed, 9 Jul 2025 21:04:54 +0200 Subject: [PATCH 09/14] Improve Postgres Enum Handling, --- fix_postgres_enums.py | 7 ++- models/enums_as_integers.py | 101 ++++++++++++++++++++++++++++++++++++ startup_postgres.sh | 8 +++ 3 files changed, 115 insertions(+), 1 deletion(-) create mode 100644 models/enums_as_integers.py diff --git a/fix_postgres_enums.py b/fix_postgres_enums.py index 4bd9a99..3a33c9a 100755 --- a/fix_postgres_enums.py +++ b/fix_postgres_enums.py @@ -43,10 +43,15 @@ def main(): db_values = [row[0] for row in result] print(f" Database values: {db_values}") - # Get model values + # Get model values - use the actual enum values, not names model_values = [item.value for item in enum_class] print(f" Model values: {model_values}") + # Debug: also show enum names vs values + print(f" Model enum mapping:") + for item in enum_class: + print(f" {item.name} = '{item.value}'") + # Check for mismatches missing_in_db = set(model_values) - set(db_values) extra_in_db = set(db_values) - set(model_values) diff --git a/models/enums_as_integers.py b/models/enums_as_integers.py new file mode 100644 index 0000000..29ad50d --- /dev/null +++ b/models/enums_as_integers.py @@ -0,0 +1,101 @@ +""" +Alternative enum implementation using integers instead of PostgreSQL enums. +This avoids all PostgreSQL enum issues by using simple integers with Python-side validation. +""" + +import enum + +class IntEnum(enum.IntEnum): + """Base class for integer-based enums.""" + + @classmethod + def choices(cls): + """Return choices for forms.""" + return [(item.value, item.display_name) for item in cls] + + @property + def display_name(self): + """Get display name for the enum value.""" + return self._display_names.get(self, self.name.replace('_', ' ').title()) + + +class TaskStatus(IntEnum): + """Task status using integers.""" + TODO = 1 + IN_PROGRESS = 2 + IN_REVIEW = 3 + DONE = 4 + CANCELLED = 5 + ARCHIVED = 6 + + _display_names = { + TODO: "To Do", + IN_PROGRESS: "In Progress", + IN_REVIEW: "In Review", + DONE: "Done", + CANCELLED: "Cancelled", + ARCHIVED: "Archived" + } + + +class TaskPriority(IntEnum): + """Task priority using integers.""" + LOW = 1 + MEDIUM = 2 + HIGH = 3 + URGENT = 4 + + _display_names = { + LOW: "Low", + MEDIUM: "Medium", + HIGH: "High", + URGENT: "Urgent" + } + + +class Role(IntEnum): + """User roles using integers.""" + TEAM_MEMBER = 1 + TEAM_LEADER = 2 + SUPERVISOR = 3 + ADMIN = 4 + SYSTEM_ADMIN = 5 + + _display_names = { + TEAM_MEMBER: "Team Member", + TEAM_LEADER: "Team Leader", + SUPERVISOR: "Supervisor", + ADMIN: "Administrator", + SYSTEM_ADMIN: "System Administrator" + } + + +# Example model usage: +""" +from sqlalchemy import Integer, CheckConstraint +from models.enums_as_integers import TaskStatus, TaskPriority + +class Task(db.Model): + # Instead of: status = db.Column(db.Enum(TaskStatus)) + status = db.Column(db.Integer, default=TaskStatus.TODO) + priority = db.Column(db.Integer, default=TaskPriority.MEDIUM) + + __table_args__ = ( + CheckConstraint( + status.in_([s.value for s in TaskStatus]), + name='check_task_status' + ), + CheckConstraint( + priority.in_([p.value for p in TaskPriority]), + name='check_task_priority' + ), + ) + + @property + def status_display(self): + return TaskStatus(self.status).display_name if self.status else None + + @property + def priority_display(self): + return TaskPriority(self.priority).display_name if self.priority else None +""" \ No newline at end of file diff --git a/startup_postgres.sh b/startup_postgres.sh index 251b9dd..8361e09 100755 --- a/startup_postgres.sh +++ b/startup_postgres.sh @@ -58,6 +58,14 @@ with app.app_context(): rm -f /tmp/db_check.txt fi +# Sync PostgreSQL enums with Python models +echo "" +echo "=== Syncing PostgreSQL Enums ===" +python sync_postgres_enums.py +if [ $? -ne 0 ]; then + echo "⚠️ Enum sync failed, but continuing..." +fi + # Legacy migration support (can be removed after full transition) if [ -f "migrations_old/run_postgres_migrations.py" ]; then echo "" From f8a756a6a98119f7b26574d71992f80c81217ac0 Mon Sep 17 00:00:00 2001 From: Jens Luedicke Date: Wed, 9 Jul 2025 21:09:30 +0200 Subject: [PATCH 10/14] Improve Postgres Enum Handling --- sync_postgres_enums.py | 111 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 111 insertions(+) create mode 100755 sync_postgres_enums.py diff --git a/sync_postgres_enums.py b/sync_postgres_enums.py new file mode 100755 index 0000000..fc58fb7 --- /dev/null +++ b/sync_postgres_enums.py @@ -0,0 +1,111 @@ +#!/usr/bin/env python3 +""" +Automatically sync PostgreSQL enums with Python models. +Run this before starting the application to ensure all enum values exist. +""" + +import os +import sys +from sqlalchemy import create_engine, text +from sqlalchemy.exc import ProgrammingError + +def get_enum_values_from_db(engine, enum_name): + """Get current enum values from PostgreSQL.""" + try: + result = engine.execute(text(f""" + SELECT enumlabel + FROM pg_enum + WHERE enumtypid = (SELECT oid FROM pg_type WHERE typname = :enum_name) + ORDER BY enumsortorder + """), {"enum_name": enum_name}) + return set(row[0] for row in result) + except Exception: + return set() + +def sync_enum(engine, enum_name, python_enum_class): + """Sync a PostgreSQL enum with Python enum values.""" + print(f"\nSyncing {enum_name}...") + + # Get current DB values + db_values = get_enum_values_from_db(engine, enum_name) + if not db_values: + print(f" ⚠️ Enum {enum_name} not found in database (might not be used)") + return + + print(f" DB values: {sorted(db_values)}") + + # Get Python values - BOTH name and value + python_values = set() + for item in python_enum_class: + python_values.add(item.name) # Add the NAME (what SQLAlchemy sends) + python_values.add(item.value) # Add the VALUE (for compatibility) + + print(f" Python values: {sorted(python_values)}") + + # Find missing values + missing_values = python_values - db_values + + if not missing_values: + print(f" ✅ All values present") + return + + # Add missing values + print(f" 📝 Adding missing values: {missing_values}") + for value in missing_values: + try: + # Use parameterized query for safety, but we need dynamic SQL for ALTER TYPE + # Validate that value is safe (alphanumeric, spaces, underscores only) + if not all(c.isalnum() or c in ' _-' for c in value): + print(f" ⚠️ Skipping unsafe value: {value}") + continue + + engine.execute(text(f"ALTER TYPE {enum_name} ADD VALUE IF NOT EXISTS '{value}'")) + print(f" ✅ Added: {value}") + except Exception as e: + print(f" ❌ Failed to add {value}: {e}") + +def main(): + """Main sync function.""" + print("=== PostgreSQL Enum Sync ===") + + # Get database URL + database_url = os.environ.get('DATABASE_URL') + if not database_url: + print("❌ DATABASE_URL not set") + return 1 + + # Create engine + engine = create_engine(database_url) + + # Import enums + try: + from models.enums import TaskStatus, TaskPriority, Role, WorkRegion, SprintStatus + + # Define enum mappings (db_type_name, python_enum_class) + enum_mappings = [ + ('taskstatus', TaskStatus), + ('taskpriority', TaskPriority), + ('role', Role), + ('workregion', WorkRegion), + ('sprintstatus', SprintStatus), + ] + + # Sync each enum + for db_enum_name, python_enum in enum_mappings: + sync_enum(engine, db_enum_name, python_enum) + + print("\n✅ Enum sync complete!") + + except Exception as e: + print(f"\n❌ Error: {e}") + import traceback + traceback.print_exc() + return 1 + + finally: + engine.dispose() + + return 0 + +if __name__ == "__main__": + sys.exit(main()) \ No newline at end of file From ca2c64b2eeb7d455712f145206a221c398fd3e8f Mon Sep 17 00:00:00 2001 From: Jens Luedicke Date: Wed, 9 Jul 2025 21:22:37 +0200 Subject: [PATCH 11/14] Delete DB migration/debugging cruft. --- MIGRATIONS_README.md | 76 - MIGRATION_FREELANCERS.md | 180 -- MIGRATION_PROJECTS.md | 174 -- MIGRATION_QUICK_REFERENCE.md | 78 - MIGRATION_SETUP_FINAL.md | 74 + POSTGRES_ENUM_GUIDE.md | 184 -- POST_BASELINE_MIGRATIONS.md | 179 -- SCHEMA_CHANGES_SUMMARY.md | 176 -- debug_entrypoint.sh | 83 - diagnose_migrations.py | 236 -- establish_baseline_4214e88.py | 307 --- fix_enum_mismatch.py | 132 -- fix_migration_838055206ef5.py | 88 - fix_migration_sequence.py | 73 - fix_postgres_enums.py | 122 -- fix_revision_mismatch.py | 183 -- fly.toml | 34 - init_migrations.py | 56 - manage_migrations.py | 136 -- manual_migration_fix.sql | 17 - migrate_to_alembic.py | 195 -- .../add_cascade_delete_note_links.sql | 20 - .../add_cascade_delete_note_links_sqlite.sql | 25 - migrations_old/add_folder_to_notes.sql | 5 - migrations_old/add_note_folder_table.sql | 17 - migrations_old/add_note_sharing.sql | 21 - migrations_old/add_time_preferences.sql | 20 - migrations_old/migration_list.txt | 24 - .../old_migrations/00_migration_summary.py | 79 - .../old_migrations/01_migrate_db.py | 1897 ----------------- .../02_migrate_sqlite_to_postgres.py | 408 ---- .../02_migrate_sqlite_to_postgres_fixed.py | 361 ---- .../03_add_dashboard_columns.py | 104 - .../04_add_user_preferences_columns.py | 159 -- .../old_migrations/05_fix_task_status_enum.py | 244 --- .../old_migrations/06_add_archived_status.py | 77 - .../07_fix_company_work_config_columns.py | 141 -- .../old_migrations/08_fix_work_region_enum.py | 145 -- .../09_add_germany_to_workregion.py | 78 - .../10_add_company_settings_columns.py | 108 - .../11_fix_company_work_config_usage.py | 188 -- .../12_fix_task_status_usage.py | 172 -- .../13_fix_work_region_usage.py | 154 -- .../old_migrations/14_fix_removed_fields.py | 227 -- .../old_migrations/15_repair_user_roles.py | 67 - .../19_add_company_invitations.py | 65 - .../20_add_company_updated_at.py | 94 - .../old_migrations/run_all_db_migrations.py | 138 -- .../old_migrations/run_code_migrations.py | 166 -- migrations_old/postgres_only_migration.py | 327 --- migrations_old/remove_email_preferences.sql | 8 - migrations_old/run_postgres_migrations.py | 161 -- models/enums_as_integers.py | 101 - models_old.py | 1508 ------------- quick_enum_fix.sql | 27 - quick_fix_revision.sh | 95 - reset_migrations.sh | 69 - simple_baseline_4214e88.py | 142 -- startup_postgres_safe.sh | 124 -- uwsgi.ini | 55 - 60 files changed, 74 insertions(+), 10530 deletions(-) delete mode 100644 MIGRATIONS_README.md delete mode 100644 MIGRATION_FREELANCERS.md delete mode 100644 MIGRATION_PROJECTS.md delete mode 100644 MIGRATION_QUICK_REFERENCE.md create mode 100644 MIGRATION_SETUP_FINAL.md delete mode 100644 POSTGRES_ENUM_GUIDE.md delete mode 100644 POST_BASELINE_MIGRATIONS.md delete mode 100644 SCHEMA_CHANGES_SUMMARY.md delete mode 100755 debug_entrypoint.sh delete mode 100755 diagnose_migrations.py delete mode 100755 establish_baseline_4214e88.py delete mode 100755 fix_enum_mismatch.py delete mode 100755 fix_migration_838055206ef5.py delete mode 100755 fix_migration_sequence.py delete mode 100755 fix_postgres_enums.py delete mode 100755 fix_revision_mismatch.py delete mode 100644 fly.toml delete mode 100755 init_migrations.py delete mode 100755 manage_migrations.py delete mode 100644 manual_migration_fix.sql delete mode 100755 migrate_to_alembic.py delete mode 100644 migrations_old/add_cascade_delete_note_links.sql delete mode 100644 migrations_old/add_cascade_delete_note_links_sqlite.sql delete mode 100644 migrations_old/add_folder_to_notes.sql delete mode 100644 migrations_old/add_note_folder_table.sql delete mode 100644 migrations_old/add_note_sharing.sql delete mode 100644 migrations_old/add_time_preferences.sql delete mode 100644 migrations_old/migration_list.txt delete mode 100755 migrations_old/old_migrations/00_migration_summary.py delete mode 100644 migrations_old/old_migrations/01_migrate_db.py delete mode 100644 migrations_old/old_migrations/02_migrate_sqlite_to_postgres.py delete mode 100644 migrations_old/old_migrations/02_migrate_sqlite_to_postgres_fixed.py delete mode 100644 migrations_old/old_migrations/03_add_dashboard_columns.py delete mode 100755 migrations_old/old_migrations/04_add_user_preferences_columns.py delete mode 100755 migrations_old/old_migrations/05_fix_task_status_enum.py delete mode 100755 migrations_old/old_migrations/06_add_archived_status.py delete mode 100755 migrations_old/old_migrations/07_fix_company_work_config_columns.py delete mode 100755 migrations_old/old_migrations/08_fix_work_region_enum.py delete mode 100755 migrations_old/old_migrations/09_add_germany_to_workregion.py delete mode 100755 migrations_old/old_migrations/10_add_company_settings_columns.py delete mode 100755 migrations_old/old_migrations/11_fix_company_work_config_usage.py delete mode 100755 migrations_old/old_migrations/12_fix_task_status_usage.py delete mode 100755 migrations_old/old_migrations/13_fix_work_region_usage.py delete mode 100755 migrations_old/old_migrations/14_fix_removed_fields.py delete mode 100644 migrations_old/old_migrations/15_repair_user_roles.py delete mode 100644 migrations_old/old_migrations/19_add_company_invitations.py delete mode 100755 migrations_old/old_migrations/20_add_company_updated_at.py delete mode 100755 migrations_old/old_migrations/run_all_db_migrations.py delete mode 100755 migrations_old/old_migrations/run_code_migrations.py delete mode 100755 migrations_old/postgres_only_migration.py delete mode 100644 migrations_old/remove_email_preferences.sql delete mode 100755 migrations_old/run_postgres_migrations.py delete mode 100644 models/enums_as_integers.py delete mode 100644 models_old.py delete mode 100644 quick_enum_fix.sql delete mode 100755 quick_fix_revision.sh delete mode 100755 reset_migrations.sh delete mode 100755 simple_baseline_4214e88.py delete mode 100755 startup_postgres_safe.sh delete mode 100644 uwsgi.ini diff --git a/MIGRATIONS_README.md b/MIGRATIONS_README.md deleted file mode 100644 index b4d810c..0000000 --- a/MIGRATIONS_README.md +++ /dev/null @@ -1,76 +0,0 @@ -# TimeTrack Database Migrations - -## Quick Start - -### Docker Deployments -```bash -# Automatic: startup scripts handle everything -# Manual: python docker_migrate_init.py -``` -See `DOCKER_MIGRATIONS_GUIDE.md` for details. - -### Local Development -```bash -# With Git history: -python simple_baseline_4214e88.py - -# Without Git history: -python docker_migrate_init.py -``` - -## Documentation Structure - -1. **FLASK_MIGRATE_GUIDE.md** - Complete Flask-Migrate documentation -2. **DOCKER_MIGRATIONS_GUIDE.md** - Docker-specific instructions -3. **FLASK_MIGRATE_TROUBLESHOOTING.md** - Common issues and solutions -4. **POST_BASELINE_MIGRATIONS.md** - Required migrations after baseline -5. **MIGRATION_QUICK_REFERENCE.md** - Command cheat sheet - -## Key Scripts - -### For Docker (No Git Required) -- `docker_migrate_init.py` - Initialize from current schema -- `migrate.sh` - Helper script (created by docker_migrate_init.py) - -### For Development (Git Required) -- `simple_baseline_4214e88.py` - Initialize from commit 4214e88 -- `establish_baseline_4214e88.py` - Advanced baseline setup - -### Troubleshooting -- `diagnose_migrations.py` - Comprehensive diagnostics -- `fix_migration_sequence.py` - Fix sequence issues -- `fix_revision_mismatch.py` - Fix revision errors -- `quick_fix_revision.sh` - Quick revision fix - -## Common Workflows - -### First Deployment (Docker) -Handled automatically by startup scripts, or: -```bash -python docker_migrate_init.py -flask db stamp head # For existing DB -flask db upgrade # For new DB -``` - -### Create New Migration -```bash -flask db migrate -m "Add user preferences" -flask db upgrade -``` - -### Check Status -```bash -flask db current # Current revision -flask db history # All migrations -./migrate.sh status # In Docker -``` - -## Important Notes - -1. **Docker containers don't have Git** - Use docker_migrate_init.py -2. **Always review generated migrations** before applying -3. **Test on staging first** before production -4. **Include migrations/ in Docker image** or use volume mount -5. **Startup scripts handle initialization** automatically - -Choose the appropriate guide based on your deployment environment! \ No newline at end of file diff --git a/MIGRATION_FREELANCERS.md b/MIGRATION_FREELANCERS.md deleted file mode 100644 index df37547..0000000 --- a/MIGRATION_FREELANCERS.md +++ /dev/null @@ -1,180 +0,0 @@ -# Freelancer Migration Guide - -This document explains the database migration for freelancer support in TimeTrack. - -## Overview - -The freelancer migration adds support for independent users who can register without a company token. It introduces: - -1. **Account Types**: Users can be either "Company User" or "Freelancer" -2. **Personal Companies**: Freelancers automatically get their own company workspace -3. **Business Names**: Optional field for freelancers to specify their business name - -## Database Changes - -### User Table Changes -- `account_type` VARCHAR(20) DEFAULT 'COMPANY_USER' - Type of account -- `business_name` VARCHAR(100) - Optional business name for freelancers -- `company_id` INTEGER - Foreign key to company table (for multi-tenancy) - -### Company Table Changes -- `is_personal` BOOLEAN DEFAULT 0 - Marks companies auto-created for freelancers - -## Migration Options - -### Option 1: Automatic Migration (Recommended) -The main migration script (`migrate_db.py`) now includes freelancer support: - -```bash -python migrate_db.py -``` - -This will: -- Add new columns to existing tables -- Create company table if it doesn't exist -- Set default values for existing users - -### Option 2: Dedicated Freelancer Migration -Use the dedicated freelancer migration script: - -```bash -python migrate_freelancers.py -``` - -### Option 3: Manual SQL Migration -If you prefer manual control: - -```sql --- Add columns to user table -ALTER TABLE user ADD COLUMN account_type VARCHAR(20) DEFAULT 'COMPANY_USER'; -ALTER TABLE user ADD COLUMN business_name VARCHAR(100); -ALTER TABLE user ADD COLUMN company_id INTEGER; - --- Create company table (if it doesn't exist) -CREATE TABLE company ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - name VARCHAR(100) UNIQUE NOT NULL, - slug VARCHAR(50) UNIQUE NOT NULL, - description TEXT, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - is_personal BOOLEAN DEFAULT 0, - is_active BOOLEAN DEFAULT 1, - max_users INTEGER DEFAULT 100 -); - --- Or add column to existing company table -ALTER TABLE company ADD COLUMN is_personal BOOLEAN DEFAULT 0; - --- Update existing users -UPDATE user SET account_type = 'COMPANY_USER' WHERE account_type IS NULL; -``` - -## Post-Migration Steps - -### For Existing Installations -1. **Create Default Company**: If you have existing users without a company, create one: - ```python - # In Python/Flask shell - from models import db, Company, User - - # Create default company - company = Company( - name="Default Company", - slug="default-company", - description="Default company for existing users" - ) - db.session.add(company) - db.session.flush() - - # Assign existing users to default company - User.query.filter_by(company_id=None).update({'company_id': company.id}) - db.session.commit() - ``` - -2. **Verify Migration**: Check that all users have a company_id: - ```sql - SELECT COUNT(*) FROM user WHERE company_id IS NULL; - -- Should return 0 - ``` - -### Testing Freelancer Registration -1. Visit `/register/freelancer` -2. Register a new freelancer account -3. Verify the personal company was created -4. Test login and time tracking functionality - -## New Features Available - -### Freelancer Registration -- **URL**: `/register/freelancer` -- **Features**: - - No company token required - - Auto-creates personal workspace - - Optional business name field - - Immediate account activation - -### Registration Options -- **Company Registration**: `/register` (existing) -- **Freelancer Registration**: `/register/freelancer` (new) -- **Login Page**: Shows both registration options - -### User Experience -- Freelancers get admin privileges in their personal company -- Can create projects and track time immediately -- Personal workspace is limited to 1 user by default -- Can optionally expand to hire employees later - -## Troubleshooting - -### Common Issues - -**Migration fails with "column already exists"** -- This is normal if you've run the migration before -- The migration script checks for existing columns - -**Users missing company_id after migration** -- Run the post-migration steps above to assign a default company - -**Freelancer registration fails** -- Check that the AccountType enum is imported correctly -- Verify database migration completed successfully - -### Rollback (Limited) -SQLite doesn't support dropping columns, so rollback is limited: - -```bash -python migrate_freelancers.py rollback -``` - -For full rollback, you would need to: -1. Export user data -2. Recreate tables without freelancer columns -3. Re-import data - -## Verification Commands - -```bash -# Verify migration applied -python migrate_freelancers.py verify - -# Check table structure -sqlite3 timetrack.db ".schema user" -sqlite3 timetrack.db ".schema company" - -# Check data -sqlite3 timetrack.db "SELECT account_type, COUNT(*) FROM user GROUP BY account_type;" -``` - -## Security Considerations - -- Freelancers get unique usernames/emails globally (not per-company) -- Personal companies are limited to 1 user by default -- Freelancers have admin privileges only in their personal workspace -- Multi-tenant isolation is maintained - -## Future Enhancements - -- Allow freelancers to upgrade to team accounts -- Billing integration for freelancer vs company accounts -- Advanced freelancer-specific features -- Integration with invoicing systems \ No newline at end of file diff --git a/MIGRATION_PROJECTS.md b/MIGRATION_PROJECTS.md deleted file mode 100644 index 39a25a3..0000000 --- a/MIGRATION_PROJECTS.md +++ /dev/null @@ -1,174 +0,0 @@ -# Project Time Logging Migration Guide - -This document explains how to migrate your TimeTrack database to support the new Project Time Logging feature. - -## Overview - -The Project Time Logging feature adds the ability to: -- Track time against specific projects -- Manage projects with role-based access control -- Filter and report on project-based time entries -- Export data with project information - -## Database Changes - -### New Tables -- **`project`**: Stores project information including name, code, description, team assignment, and dates - -### Modified Tables -- **`time_entry`**: Added `project_id` (foreign key) and `notes` (text) columns -- **Existing data**: All existing time entries remain unchanged and will show as "No project assigned" - -## Migration Options - -### Option 1: Run Main Migration Script (Recommended) -The main migration script has been updated to include project functionality: - -```bash -python migrate_db.py -``` - -This will: -- Create the project table -- Add project_id and notes columns to time_entry -- Create 3 sample projects (if no admin user exists) -- Maintain all existing data - -### Option 2: Run Project-Specific Migration -For existing installations, you can run the project-specific migration: - -```bash -python migrate_projects.py -``` - -### Option 3: Manual Migration -If you prefer to handle the migration manually, execute these SQL commands: - -```sql --- Create project table -CREATE TABLE project ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - name VARCHAR(100) NOT NULL, - description TEXT, - code VARCHAR(20) NOT NULL UNIQUE, - is_active BOOLEAN DEFAULT 1, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - created_by_id INTEGER NOT NULL, - team_id INTEGER, - start_date DATE, - end_date DATE, - FOREIGN KEY (created_by_id) REFERENCES user (id), - FOREIGN KEY (team_id) REFERENCES team (id) -); - --- Add columns to time_entry table -ALTER TABLE time_entry ADD COLUMN project_id INTEGER; -ALTER TABLE time_entry ADD COLUMN notes TEXT; -``` - -## Sample Projects - -The migration creates these sample projects (if admin user exists): - -1. **ADMIN001** - General Administration -2. **DEV001** - Development Project -3. **SUPPORT001** - Customer Support - -These can be modified or deleted after migration. - -## Rollback - -To rollback the project functionality (removes projects but keeps time entry columns): - -```bash -python migrate_projects.py rollback -``` - -**Note**: Due to SQLite limitations, the `project_id` and `notes` columns cannot be removed from the `time_entry` table during rollback. - -## Post-Migration Steps - -1. **Verify Migration**: Check that the migration completed successfully -2. **Create Projects**: Admin/Supervisor users can create projects via the web interface -3. **Assign Teams**: Optionally assign projects to specific teams -4. **User Training**: Inform users about the new project selection feature - -## Migration Verification - -After running the migration, verify it worked by: - -1. **Check Tables**: - ```sql - .tables -- Should show 'project' table - .schema project -- Verify project table structure - .schema time_entry -- Verify project_id and notes columns - ``` - -2. **Check Web Interface**: - - Admin/Supervisor users should see "Manage Projects" in their dropdown menu - - Time tracking interface should show project selection dropdown - - History page should have project filtering - -3. **Check Sample Projects**: - ```sql - SELECT * FROM project; -- Should show 3 sample projects - ``` - -## Troubleshooting - -### Migration Fails -- Ensure no active connections to the database -- Check file permissions -- Verify admin user exists in the database - -### Missing Navigation Links -- Clear browser cache -- Verify user has Admin or Supervisor role -- Check that the templates have been updated - -### Project Selection Not Available -- Verify migration completed successfully -- Check that active projects exist in the database -- Ensure user has permission to access projects - -## Feature Access - -### Admin Users -- Create, edit, delete, and manage all projects -- Access project management interface -- View all project reports - -### Supervisor Users -- Create, edit, and manage projects -- Access project management interface -- View project reports - -### Team Leader Users -- View team hours with project breakdown -- No project creation/management access - -### Team Member Users -- Select projects when tracking time -- View personal history with project filtering -- No project management access - -## File Changes - -The migration affects these files: -- `migrate_db.py` - Updated main migration script -- `migrate_projects.py` - New project-specific migration -- `models.py` - Added Project model and updated TimeEntry -- `app.py` - Added project routes and updated existing routes -- Templates - Updated with project functionality -- `static/js/script.js` - Updated time tracking JavaScript - -## Backup Recommendation - -Before running any migration, it's recommended to backup your database: - -```bash -cp timetrack.db timetrack.db.backup -``` - -This allows you to restore the original database if needed. \ No newline at end of file diff --git a/MIGRATION_QUICK_REFERENCE.md b/MIGRATION_QUICK_REFERENCE.md deleted file mode 100644 index ab4a6eb..0000000 --- a/MIGRATION_QUICK_REFERENCE.md +++ /dev/null @@ -1,78 +0,0 @@ -# Flask-Migrate Quick Reference - -## 🚀 Quick Start (For Existing Database) - -```bash -# One-time setup for existing database -python migrate_to_alembic.py -``` - -## 📝 Common Commands - -### Make Changes & Generate Migration -```bash -# 1. Edit your models in models/*.py -# 2. Generate migration -flask db migrate -m "Add user preferences table" - -# 3. Review the generated file in migrations/versions/ -# 4. Apply the migration -flask db upgrade -``` - -### Check Status -```bash -flask db current # Show current version -flask db history # Show all migrations -``` - -### Rollback -```bash -flask db downgrade # Go back one version -``` - -## 🔧 Helper Scripts - -```bash -# Interactive migration manager -python manage_migrations.py create # Create new migration -python manage_migrations.py apply # Apply migrations -python manage_migrations.py history # View history -``` - -## ⚠️ Important Notes - -1. **Always review generated migrations** before applying -2. **PostgreSQL enums** may need manual tweaking -3. **Test on development first** -4. **Migrations run automatically on startup** - -## 🆘 Troubleshooting - -### "Target database is not up to date" -```bash -flask db stamp head # Mark as current -flask db upgrade # Apply pending -``` - -### Migration conflicts after git pull -```bash -flask db merge -m "Merge migrations" # Resolve conflicts -``` - -## 📁 Structure -``` -migrations/ -├── versions/ # Your migration files -├── alembic.ini # Config (don't edit) -└── README.md # Detailed docs -``` - -## 🔄 Workflow - -1. `git pull` → `flask db upgrade` → Make changes -2. `flask db migrate -m "..."` → Review → Test -3. `git add migrations/versions/*.py` → Commit → Push - ---- -See `FLASK_MIGRATE_GUIDE.md` for detailed documentation. \ No newline at end of file diff --git a/MIGRATION_SETUP_FINAL.md b/MIGRATION_SETUP_FINAL.md new file mode 100644 index 0000000..5c933b1 --- /dev/null +++ b/MIGRATION_SETUP_FINAL.md @@ -0,0 +1,74 @@ +# Final Migration Setup for TimeTrack + +## What's Working Now + +Your migration system is now fully functional with: + +1. **Flask-Migrate** - Handles database schema changes +2. **Automatic Enum Sync** - Handles PostgreSQL enum values +3. **Docker Support** - Works without Git in containers + +## Essential Files to Keep + +### Core Migration Files +- `migrations/` - Flask-Migrate directory (required) +- `sync_postgres_enums.py` - Auto-syncs enum values on startup +- `docker_migrate_init.py` - Initializes migrations in Docker + +### Updated Startup Scripts +- `startup_postgres.sh` - Now includes enum sync +- `startup_postgres_safe.sh` - Debug version with error handling +- `startup.sh` - Updated for Flask-Migrate + +### Debug Tools (Optional) +- `debug_entrypoint.sh` - For troubleshooting +- `docker-compose.debug.yml` - Debug Docker setup + +### Documentation +- `FLASK_MIGRATE_GUIDE.md` - Complete guide +- `DOCKER_MIGRATIONS_GUIDE.md` - Docker-specific guide +- `POSTGRES_ENUM_GUIDE.md` - Enum handling guide +- `FLASK_MIGRATE_TROUBLESHOOTING.md` - Troubleshooting guide + +## Workflow Summary + +### For New Schema Changes +```bash +# 1. Modify your models +# 2. Generate migration +flask db migrate -m "Add new feature" +# 3. Review the generated file +# 4. Apply migration +flask db upgrade +``` + +### For New Enum Values +```python +# Just add to Python enum - sync happens automatically +class TaskStatus(enum.Enum): + NEW_STATUS = "New Status" +``` + +### Docker Deployment +```bash +# Everything is automatic in startup scripts: +# 1. Migrations applied +# 2. Enums synced +# 3. App starts +``` + +## Cleanup + +Run the cleanup script to remove all temporary files: +```bash +./cleanup_migration_cruft.sh +``` + +This removes ~20+ temporary scripts while keeping the essential ones. + +## Notes + +- The old migration system (`migrations_old/`) can be removed after confirming everything works +- PostgreSQL enums now support both names (TODO) and values (To Do) +- All future migrations are handled by Flask-Migrate +- Enum sync runs automatically on every startup \ No newline at end of file diff --git a/POSTGRES_ENUM_GUIDE.md b/POSTGRES_ENUM_GUIDE.md deleted file mode 100644 index f2b1560..0000000 --- a/POSTGRES_ENUM_GUIDE.md +++ /dev/null @@ -1,184 +0,0 @@ -# PostgreSQL Enums with Flask-Migrate - -## The Problem - -PostgreSQL enums are **immutable** in many ways: -- Can't remove values -- Can't rename values -- Can't change order (in older PostgreSQL versions) -- Flask-Migrate often doesn't detect enum changes - -Your specific issue: The model has `TODO` but the database might have `To Do` or vice versa. - -## Best Practices - -### 1. Always Use UPPERCASE for Enum Values - -```python -class TaskStatus(enum.Enum): - TODO = "TODO" # Good - IN_PROGRESS = "IN_PROGRESS" # Good - # To Do = "To Do" # Bad - spaces cause issues -``` - -### 2. Handle Enum Changes Manually - -Flask-Migrate won't automatically handle enum changes. You must: - -```python -# In your migration file's upgrade() function: -def upgrade(): - # Add new enum value - op.execute("ALTER TYPE taskstatus ADD VALUE 'NEW_STATUS'") - - # Note: You CANNOT remove enum values in PostgreSQL! -``` - -### 3. Check Enum State Before Migrations - -```bash -# Run this to see current state -python fix_postgres_enums.py - -# Or manually check in psql: -\dT+ taskstatus -``` - -## Fixing Current Enum Issues - -### Option 1: Quick Fix (Add Missing Values) - -```sql --- If model expects 'TODO' but DB has 'To Do' -ALTER TYPE taskstatus ADD VALUE 'TODO'; - --- If model expects 'IN_PROGRESS' but DB has 'In Progress' -ALTER TYPE taskstatus ADD VALUE 'IN_PROGRESS'; -``` - -### Option 2: Create Migration - -```bash -# Generate empty migration -flask db revision -m "Fix enum values" - -# Edit migrations/versions/xxx_fix_enum_values.py -``` - -Add to upgrade(): -```python -def upgrade(): - # Add missing enum values - op.execute("ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'TODO'") - op.execute("ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'IN_PROGRESS'") - op.execute("ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'DONE'") - op.execute("ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'ARCHIVED'") -``` - -### Option 3: Data Migration (Complex) - -If you need to change existing data from old to new values: - -```python -def upgrade(): - # Add new value - op.execute("ALTER TYPE taskstatus ADD VALUE 'TODO'") - - # Update existing data - op.execute("UPDATE task SET status = 'TODO' WHERE status = 'To Do'") - - # Note: Can't remove 'To Do' - it stays forever! -``` - -## Enum Strategy Going Forward - -### 1. Use String Columns Instead - -Consider replacing enums with string columns + check constraints: - -```python -# Instead of enum -status = db.Column(db.Enum(TaskStatus), default=TaskStatus.TODO) - -# Use string with constraint -status = db.Column(db.String(20), default='TODO') -__table_args__ = ( - db.CheckConstraint("status IN ('TODO', 'IN_PROGRESS', 'DONE')"), -) -``` - -### 2. Create Enum Tables - -Use a separate table for statuses: - -```python -class TaskStatus(db.Model): - id = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(50), unique=True) - -class Task(db.Model): - status_id = db.Column(db.Integer, db.ForeignKey('task_status.id')) -``` - -### 3. If Keeping Enums, Document Them - -```python -class TaskStatus(enum.Enum): - """ - Task status enum values. - WARNING: These are PostgreSQL enums. - - NEVER change existing values - - ONLY add new values at the end - - To deprecate, mark in comments but don't remove - """ - TODO = "TODO" - IN_PROGRESS = "IN_PROGRESS" - DONE = "DONE" - ARCHIVED = "ARCHIVED" - # DEPRECATED - DO NOT USE - # OLD_STATUS = "OLD_STATUS" # Deprecated 2024-01-01 -``` - -## Debugging Enum Issues - -```bash -# 1. Check what's in the database -psql $DATABASE_URL -c "SELECT enum_range(NULL::taskstatus)" - -# 2. Check what's in the model -python -c "from models import TaskStatus; print([e.value for e in TaskStatus])" - -# 3. Run diagnostic -python fix_postgres_enums.py -``` - -## Emergency Fixes - -If completely stuck: - -```sql --- Nuclear option: Drop and recreate --- WARNING: This will fail if column is in use! - --- 1. Change column to text temporarily -ALTER TABLE task ALTER COLUMN status TYPE TEXT; - --- 2. Drop the enum -DROP TYPE taskstatus; - --- 3. Recreate with correct values -CREATE TYPE taskstatus AS ENUM ('TODO', 'IN_PROGRESS', 'DONE', 'ARCHIVED'); - --- 4. Change column back -ALTER TABLE task ALTER COLUMN status TYPE taskstatus USING status::taskstatus; -``` - -## Prevention - -1. **Always test enum migrations** on a copy of production data -2. **Keep enum values simple** - no spaces, all uppercase -3. **Document all enum values** in the model -4. **Consider alternatives** to enums for frequently changing values -5. **Add CHECK constraints** in addition to enums for validation - -Remember: PostgreSQL enums are powerful but inflexible. Choose wisely! \ No newline at end of file diff --git a/POST_BASELINE_MIGRATIONS.md b/POST_BASELINE_MIGRATIONS.md deleted file mode 100644 index cf8431e..0000000 --- a/POST_BASELINE_MIGRATIONS.md +++ /dev/null @@ -1,179 +0,0 @@ -# Post-Baseline Migrations Required - -After establishing the baseline at commit `4214e88d18fce7a9c75927753b8d4e9222771e14`, the following schema changes need to be recreated as Flask-Migrate migrations: - -## Required Migrations (in order) - -### 1. Add company.updated_at -```bash -flask db migrate -m "Add updated_at to company table" -``` - -Expected changes: -- ADD COLUMN company.updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP - -### 2. Add user 2FA and avatar columns -```bash -flask db migrate -m "Add two-factor auth and avatar columns to user" -``` - -Expected changes: -- ADD COLUMN user.two_factor_enabled BOOLEAN DEFAULT FALSE -- ADD COLUMN user.two_factor_secret VARCHAR(32) -- ADD COLUMN user.avatar_url VARCHAR(255) - -### 3. Create company_invitation table -```bash -flask db migrate -m "Create company invitation system" -``` - -Expected changes: -- CREATE TABLE company_invitation (with all columns as defined in models/invitation.py) -- Note: The current model has slightly different columns than the old migration - -### 4. Add user_preferences columns -```bash -flask db migrate -m "Add missing columns to user preferences" -``` - -Expected changes: -- Multiple columns for theme, language, timezone, notifications, etc. - -### 5. Add user_dashboard layout columns -```bash -flask db migrate -m "Add layout and lock columns to user dashboard" -``` - -Expected changes: -- ADD COLUMN user_dashboard.layout JSON DEFAULT '{}' -- ADD COLUMN user_dashboard.is_locked BOOLEAN DEFAULT FALSE - -### 6. Add company_work_config columns -```bash -flask db migrate -m "Add work configuration columns" -``` - -Expected changes: -- Multiple columns for overtime, rates, thresholds - -### 7. Add company_settings columns -```bash -flask db migrate -m "Add company settings columns" -``` - -Expected changes: -- Multiple columns for work week, time tracking, features - -### 8. Add dashboard_widget config columns -```bash -flask db migrate -m "Add widget configuration columns" -``` - -Expected changes: -- ADD COLUMN dashboard_widget.config JSON DEFAULT '{}' -- ADD COLUMN dashboard_widget.is_visible BOOLEAN DEFAULT TRUE - -### 9. Update enums -```bash -# These might need manual migration files -flask db migrate -m "Add GERMANY to WorkRegion enum" -flask db migrate -m "Add ARCHIVED to TaskStatus enum" -flask db migrate -m "Add new WidgetType enum values" -``` - -### 10. Add note sharing functionality -```bash -flask db migrate -m "Add note sharing tables and columns" -``` - -Expected changes: -- CREATE TABLE note_share -- ADD COLUMN note.folder VARCHAR(100) -- CREATE TABLE note_folder -- Cascade delete constraints on note_link - -### 11. Add time preferences -```bash -flask db migrate -m "Add time formatting preferences" -``` - -Expected changes: -- ADD COLUMN user_preferences.time_format_24h BOOLEAN DEFAULT TRUE -- ADD COLUMN user_preferences.time_rounding_minutes INTEGER DEFAULT 0 -- ADD COLUMN user_preferences.round_to_nearest BOOLEAN DEFAULT FALSE - -## Migration Order Script - -Create a script to apply all migrations in order: - -```bash -#!/bin/bash -# apply_post_baseline_migrations.sh - -echo "Applying post-baseline migrations..." - -# Mark database at baseline if not already done -flask db stamp head - -# Generate and apply each migration -flask db migrate -m "Add updated_at to company table" -flask db upgrade - -flask db migrate -m "Add two-factor auth and avatar columns to user" -flask db upgrade - -flask db migrate -m "Create company invitation system" -flask db upgrade - -# ... continue for all migrations -``` - -## Manual Migration Adjustments - -Some migrations may need manual adjustments: - -### PostgreSQL Enums -Edit the generated migration files to add enum values: - -```python -def upgrade(): - # For WorkRegion enum - op.execute("ALTER TYPE workregion ADD VALUE IF NOT EXISTS 'GERMANY'") - - # For TaskStatus enum - op.execute("ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'ARCHIVED'") - - # For WidgetType enum - op.execute("ALTER TYPE widgettype ADD VALUE IF NOT EXISTS 'REVENUE_CHART'") - # ... add other widget types -``` - -### Foreign Key Constraints -Ensure CASCADE deletes are properly set: - -```python -def upgrade(): - # For note_link table - op.create_foreign_key( - 'note_link_source_note_id_fkey', - 'note_link', 'note', - ['source_note_id'], ['id'], - ondelete='CASCADE' - ) -``` - -## Verification - -After applying all migrations: - -1. Compare schema with production database -2. Verify all enum values are present -3. Check foreign key constraints -4. Test rollback functionality - -## Notes - -- Review `migrations_old/postgres_only_migration.py` for the complete list of changes -- Some columns in the old migrations may not exist in current models - skip those -- Always test on development database first -- Keep this document updated as migrations are applied \ No newline at end of file diff --git a/SCHEMA_CHANGES_SUMMARY.md b/SCHEMA_CHANGES_SUMMARY.md deleted file mode 100644 index 6793597..0000000 --- a/SCHEMA_CHANGES_SUMMARY.md +++ /dev/null @@ -1,176 +0,0 @@ -# Database Schema Changes Summary - -This document summarizes all database schema changes between commit 4214e88 and the current state of the TimeTrack application. - -## Architecture Changes - -### 1. **Model Structure Refactoring** -- **Before**: Single monolithic `models.py` file containing all models -- **After**: Models split into domain-specific modules: - - `models/__init__.py` - Package initialization - - `models/base.py` - Base model definitions - - `models/company.py` - Company-related models - - `models/user.py` - User-related models - - `models/project.py` - Project-related models - - `models/task.py` - Task-related models - - `models/time_entry.py` - Time entry model - - `models/sprint.py` - Sprint model - - `models/team.py` - Team model - - `models/system.py` - System settings models - - `models/announcement.py` - Announcement model - - `models/dashboard.py` - Dashboard-related models - - `models/work_config.py` - Work configuration model - - `models/invitation.py` - Company invitation model - - `models/enums.py` - All enum definitions - -## New Tables Added - -### 1. **company_invitation** (NEW) -- Purpose: Email-based company registration invitations -- Columns: - - `id` (INTEGER, PRIMARY KEY) - - `company_id` (INTEGER, FOREIGN KEY → company.id) - - `email` (VARCHAR(120), NOT NULL) - - `token` (VARCHAR(64), UNIQUE, NOT NULL) - - `role` (VARCHAR(50), DEFAULT 'Team Member') - - `invited_by_id` (INTEGER, FOREIGN KEY → user.id) - - `created_at` (TIMESTAMP, DEFAULT CURRENT_TIMESTAMP) - - `expires_at` (TIMESTAMP, NOT NULL) - - `accepted` (BOOLEAN, DEFAULT FALSE) - - `accepted_at` (TIMESTAMP) - - `accepted_by_user_id` (INTEGER, FOREIGN KEY → user.id) -- Indexes: - - `idx_invitation_token` on token - - `idx_invitation_email` on email - - `idx_invitation_company` on company_id - - `idx_invitation_expires` on expires_at - -## Modified Tables - -### 1. **company** -- Added columns: - - `updated_at` (TIMESTAMP, DEFAULT CURRENT_TIMESTAMP) - NEW - -### 2. **user** -- Added columns: - - `two_factor_enabled` (BOOLEAN, DEFAULT FALSE) - NEW - - `two_factor_secret` (VARCHAR(32), NULLABLE) - NEW - - `avatar_url` (VARCHAR(255), NULLABLE) - NEW - -### 3. **user_preferences** -- Added columns: - - `theme` (VARCHAR(20), DEFAULT 'light') - - `language` (VARCHAR(10), DEFAULT 'en') - - `timezone` (VARCHAR(50), DEFAULT 'UTC') - - `date_format` (VARCHAR(20), DEFAULT 'YYYY-MM-DD') - - `time_format` (VARCHAR(10), DEFAULT '24h') - - `email_notifications` (BOOLEAN, DEFAULT TRUE) - - `email_daily_summary` (BOOLEAN, DEFAULT FALSE) - - `email_weekly_summary` (BOOLEAN, DEFAULT TRUE) - - `default_project_id` (INTEGER, FOREIGN KEY → project.id) - - `timer_reminder_enabled` (BOOLEAN, DEFAULT TRUE) - - `timer_reminder_interval` (INTEGER, DEFAULT 60) - - `dashboard_layout` (JSON, NULLABLE) - -### 4. **user_dashboard** -- Added columns: - - `layout` (JSON, NULLABLE) - Alternative grid layout configuration - - `is_locked` (BOOLEAN, DEFAULT FALSE) - Prevent accidental changes - -### 5. **company_work_config** -- Added columns: - - `standard_hours_per_day` (FLOAT, DEFAULT 8.0) - - `standard_hours_per_week` (FLOAT, DEFAULT 40.0) - - `overtime_enabled` (BOOLEAN, DEFAULT TRUE) - - `overtime_rate` (FLOAT, DEFAULT 1.5) - - `double_time_enabled` (BOOLEAN, DEFAULT FALSE) - - `double_time_threshold` (FLOAT, DEFAULT 12.0) - - `double_time_rate` (FLOAT, DEFAULT 2.0) - - `require_breaks` (BOOLEAN, DEFAULT TRUE) - - `break_duration_minutes` (INTEGER, DEFAULT 30) - - `break_after_hours` (FLOAT, DEFAULT 6.0) - - `weekly_overtime_threshold` (FLOAT, DEFAULT 40.0) - - `weekly_overtime_rate` (FLOAT, DEFAULT 1.5) - -### 6. **company_settings** -- Added columns: - - `work_week_start` (INTEGER, DEFAULT 1) - - `work_days` (VARCHAR(20), DEFAULT '1,2,3,4,5') - - `allow_overlapping_entries` (BOOLEAN, DEFAULT FALSE) - - `require_project_for_time_entry` (BOOLEAN, DEFAULT TRUE) - - `allow_future_entries` (BOOLEAN, DEFAULT FALSE) - - `max_hours_per_entry` (FLOAT, DEFAULT 24.0) - - `enable_tasks` (BOOLEAN, DEFAULT TRUE) - - `enable_sprints` (BOOLEAN, DEFAULT FALSE) - - `enable_client_access` (BOOLEAN, DEFAULT FALSE) - - `notify_on_overtime` (BOOLEAN, DEFAULT TRUE) - - `overtime_threshold_daily` (FLOAT, DEFAULT 8.0) - - `overtime_threshold_weekly` (FLOAT, DEFAULT 40.0) - -### 7. **dashboard_widget** -- Added columns: - - `config` (JSON) - Widget-specific configuration - - `is_visible` (BOOLEAN, DEFAULT TRUE) - -## Enum Changes - -### 1. **WorkRegion** enum -- Added value: - - `GERMANY = "Germany"` - NEW - -### 2. **TaskStatus** enum -- Added value: - - `ARCHIVED = "Archived"` - NEW - -### 3. **WidgetType** enum -- Expanded with many new widget types: - - Time Tracking: `CURRENT_TIMER`, `DAILY_SUMMARY`, `WEEKLY_CHART`, `BREAK_REMINDER`, `TIME_SUMMARY` - - Project Management: `ACTIVE_PROJECTS`, `PROJECT_PROGRESS`, `PROJECT_ACTIVITY`, `PROJECT_DEADLINES`, `PROJECT_STATUS` - - Task Management: `ASSIGNED_TASKS`, `TASK_PRIORITY`, `TASK_CALENDAR`, `UPCOMING_TASKS`, `TASK_LIST` - - Sprint: `SPRINT_OVERVIEW`, `SPRINT_BURNDOWN`, `SPRINT_PROGRESS` - - Team & Analytics: `TEAM_WORKLOAD`, `TEAM_PRESENCE`, `TEAM_ACTIVITY` - - Performance: `PRODUCTIVITY_STATS`, `TIME_DISTRIBUTION`, `PERSONAL_STATS` - - Actions: `QUICK_ACTIONS`, `RECENT_ACTIVITY` - -## Migration Requirements - -### PostgreSQL Migration Steps: - -1. **Add company_invitation table** (migration 19) -2. **Add updated_at to company table** (migration 20) -3. **Add new columns to user table** for 2FA and avatar -4. **Add new columns to user_preferences table** -5. **Add new columns to user_dashboard table** -6. **Add new columns to company_work_config table** -7. **Add new columns to company_settings table** -8. **Add new columns to dashboard_widget table** -9. **Update enum types** for WorkRegion and TaskStatus -10. **Update WidgetType enum** with new values - -### Data Migration Considerations: - -1. **Default values**: All new columns have appropriate defaults -2. **Nullable fields**: Most new fields are nullable or have defaults -3. **Foreign keys**: New invitation table has proper FK constraints -4. **Indexes**: Performance indexes added for invitation lookups -5. **Enum migrations**: Need to handle enum type changes carefully in PostgreSQL - -### Breaking Changes: - -- None identified - all changes are additive or have defaults - -### Rollback Strategy: - -1. Drop new tables (company_invitation) -2. Drop new columns from existing tables -3. Revert enum changes (remove new values) - -## Summary - -The main changes involve: -1. Adding email invitation functionality with a new table -2. Enhancing user features with 2FA and avatars -3. Expanding dashboard and widget capabilities -4. Adding comprehensive work configuration options -5. Better tracking with updated_at timestamps -6. Regional compliance support with expanded WorkRegion enum \ No newline at end of file diff --git a/debug_entrypoint.sh b/debug_entrypoint.sh deleted file mode 100755 index 315c29e..0000000 --- a/debug_entrypoint.sh +++ /dev/null @@ -1,83 +0,0 @@ -#!/bin/bash -# Debug entrypoint for troubleshooting migration issues - -echo "=== TimeTrack Debug Entrypoint ===" -echo "" -echo "This entrypoint keeps the container running for debugging." -echo "The application is NOT started automatically." -echo "" - -# Set Flask app -export FLASK_APP=app.py - -# Wait for PostgreSQL if needed -if [ -n "$DATABASE_URL" ] || [ -n "$POSTGRES_HOST" ]; then - echo "Waiting for PostgreSQL to be ready..." - while ! pg_isready -h ${POSTGRES_HOST:-db} -p ${POSTGRES_PORT:-5432} -U "$POSTGRES_USER" > /dev/null 2>&1; do - echo "PostgreSQL is not ready yet. Waiting..." - sleep 2 - done - echo "✅ PostgreSQL is ready!" -fi - -echo "" -echo "=== Environment Info ===" -echo "FLASK_APP: $FLASK_APP" -echo "DATABASE_URL: ${DATABASE_URL:-(not set)}" -echo "Working directory: $(pwd)" -echo "Python version: $(python --version)" -echo "" - -echo "=== Quick Diagnostics ===" - -# Check if migrations directory exists -if [ -d "migrations" ]; then - echo "✅ migrations/ directory exists" - - # Try to check current migration - echo -n "Current migration: " - flask db current 2>&1 || echo "❌ Failed to get current migration" -else - echo "❌ migrations/ directory not found" -fi - -# Check database connection -echo -n "Database connection: " -python -c " -from app import app, db -try: - with app.app_context(): - db.engine.execute('SELECT 1') - print('✅ Connected') -except Exception as e: - print(f'❌ Failed: {e}') -" 2>&1 - -echo "" -echo "=== Available Commands ===" -echo "" -echo "Migration commands:" -echo " python docker_migrate_init.py # Initialize migrations (Docker-friendly)" -echo " flask db current # Show current migration" -echo " flask db history # Show migration history" -echo " flask db upgrade # Apply migrations" -echo " flask db stamp head # Mark DB as current" -echo "" -echo "Diagnostic commands:" -echo " python diagnose_migrations.py # Full diagnostics" -echo " python fix_revision_mismatch.py # Fix revision errors" -echo " ./quick_fix_revision.sh # Quick revision fix" -echo "" -echo "Start application manually:" -echo " ./startup_postgres.sh # Normal startup" -echo " ./startup_postgres_safe.sh # Safe startup (won't exit)" -echo " python app.py # Development server" -echo "" -echo "To exit this container:" -echo " exit" -echo "" -echo "=== Container Ready for Debugging ===" -echo "" - -# Keep container running -exec /bin/bash \ No newline at end of file diff --git a/diagnose_migrations.py b/diagnose_migrations.py deleted file mode 100755 index 3c150fe..0000000 --- a/diagnose_migrations.py +++ /dev/null @@ -1,236 +0,0 @@ -#!/usr/bin/env python3 -""" -Diagnostic script for Flask-Migrate issues. -Helps identify common problems with migrations. -""" - -import os -import sys -import subprocess -import json -from pathlib import Path - -def run_command(cmd, capture=True): - """Run a command and return result.""" - if capture: - result = subprocess.run(cmd, shell=True, capture_output=True, text=True) - return result.returncode, result.stdout, result.stderr - else: - return subprocess.run(cmd, shell=True).returncode, "", "" - -def check_environment(): - """Check environment setup.""" - print("=== Environment Check ===") - - # Check FLASK_APP - flask_app = os.environ.get('FLASK_APP', 'Not set') - print(f"FLASK_APP: {flask_app}") - if flask_app == 'Not set': - print(" ⚠️ FLASK_APP not set. Setting to app.py") - os.environ['FLASK_APP'] = 'app.py' - - # Check DATABASE_URL - db_url = os.environ.get('DATABASE_URL', 'Not set') - if db_url != 'Not set': - # Hide password in output - if '@' in db_url: - parts = db_url.split('@') - proto_user = parts[0].split('://') - if len(proto_user) > 1 and ':' in proto_user[1]: - user_pass = proto_user[1].split(':') - safe_url = f"{proto_user[0]}://{user_pass[0]}:****@{parts[1]}" - else: - safe_url = db_url - else: - safe_url = db_url - print(f"DATABASE_URL: {safe_url}") - else: - print("DATABASE_URL: Using default from app.py") - - print() - -def check_migrations_directory(): - """Check migrations directory structure.""" - print("=== Migrations Directory Check ===") - - if not os.path.exists('migrations'): - print("❌ migrations/ directory not found!") - print(" Run: python establish_baseline_4214e88.py") - return False - - print("✓ migrations/ directory exists") - - # Check for required files - required_files = ['alembic.ini', 'env.py', 'script.py.mako'] - for file in required_files: - path = os.path.join('migrations', file) - if os.path.exists(path): - print(f"✓ {file} exists") - else: - print(f"❌ {file} missing!") - return False - - # Check versions directory - versions_dir = os.path.join('migrations', 'versions') - if not os.path.exists(versions_dir): - print("❌ versions/ directory missing!") - return False - - print("✓ versions/ directory exists") - - # List migration files - migration_files = [f for f in os.listdir(versions_dir) if f.endswith('.py')] - print(f"\nMigration files found: {len(migration_files)}") - for f in sorted(migration_files): - print(f" - {f}") - - print() - return True - -def check_database_state(): - """Check current database migration state.""" - print("=== Database State Check ===") - - # Check current revision - code, stdout, stderr = run_command("flask db current") - if code == 0: - print(f"Current revision: {stdout.strip()}") - else: - print("❌ Failed to get current revision") - print(f"Error: {stderr}") - return False - - # Check if database is up to date - code, stdout, stderr = run_command("flask db check") - if code == 0: - if "Database is up to date" in stdout: - print("✓ Database is up to date") - else: - print("⚠️ Database may need upgrade") - print(stdout) - else: - print("⚠️ Database check returned non-zero") - if stderr: - print(f"Error: {stderr}") - - print() - return True - -def check_model_imports(): - """Check if models can be imported.""" - print("=== Model Import Check ===") - - try: - from app import app, db - print("✓ Successfully imported app and db") - - with app.app_context(): - # Try to import all models - from models import ( - Company, User, Project, Task, TimeEntry, - CompanySettings, UserPreferences, Sprint - ) - print("✓ Successfully imported all main models") - - # Check if models have tables - print("\nModel tables:") - for model in [Company, User, Project, Task, TimeEntry]: - table_name = model.__tablename__ - print(f" - {model.__name__}: {table_name}") - - except Exception as e: - print(f"❌ Import error: {e}") - import traceback - traceback.print_exc() - return False - - print() - return True - -def test_migration_detection(): - """Test if Flask-Migrate can detect changes.""" - print("=== Migration Detection Test ===") - - # Try a dry run - code, stdout, stderr = run_command("flask db migrate --dry-run") - - if code == 0: - if "No changes in schema detected" in stdout: - print("ℹ️ No schema changes detected") - print(" This means your models match the current migration state") - else: - print("✓ Flask-Migrate can detect changes") - print("\nDetected changes:") - print(stdout) - else: - print("❌ Migration detection failed") - if "Target database is not up to date" in stderr: - print(" ⚠️ Database needs upgrade first!") - print(" Run: flask db upgrade") - else: - print(f"Error: {stderr}") - - print() - -def suggest_fixes(): - """Suggest fixes based on diagnostics.""" - print("=== Suggested Actions ===") - - # Check if we need to upgrade - code, stdout, stderr = run_command("flask db heads") - if code == 0: - heads = stdout.strip() - code2, current, _ = run_command("flask db current") - if code2 == 0 and current.strip() != heads: - print("1. Your database is not at the latest migration:") - print(" flask db upgrade") - print() - - # Check for pending migrations - code, stdout, stderr = run_command("flask db show") - if code == 0 and "pending upgrade" in stdout.lower(): - print("2. You have pending migrations to apply:") - print(" flask db upgrade") - print() - - print("3. To create a new migration after making model changes:") - print(" flask db migrate -m 'Description of changes'") - print(" flask db upgrade") - print() - - print("4. If you're getting 'No changes detected':") - print(" - Ensure you've actually modified a model") - print(" - Check that the model is imported in models/__init__.py") - print(" - Try comparing with: flask db compare") - print() - - print("5. For 'Target database is not up to date' errors:") - print(" flask db stamp head # Force mark as current") - print(" flask db migrate -m 'Your changes'") - print() - -def main(): - """Run all diagnostics.""" - print("=== Flask-Migrate Diagnostic Tool ===\n") - - # Run checks - check_environment() - - if not check_migrations_directory(): - print("\n❌ Migrations not properly initialized") - print("Run: python establish_baseline_4214e88.py") - return 1 - - if not check_model_imports(): - print("\n❌ Model import issues detected") - return 1 - - check_database_state() - test_migration_detection() - suggest_fixes() - - print("=== Diagnostic Complete ===") - return 0 - -if __name__ == "__main__": - sys.exit(main()) \ No newline at end of file diff --git a/establish_baseline_4214e88.py b/establish_baseline_4214e88.py deleted file mode 100755 index 464c418..0000000 --- a/establish_baseline_4214e88.py +++ /dev/null @@ -1,307 +0,0 @@ -#!/usr/bin/env python3 -""" -Establish Flask-Migrate baseline from git commit 4214e88d18fce7a9c75927753b8d4e9222771e14. - -This script: -1. Checks out the models from commit 4214e88 -2. Initializes Flask-Migrate -3. Creates an initial migration representing that schema -4. Stamps the database to mark it as up-to-date with that baseline - -This allows all migrations after commit 4214e88 to be managed by Flask-Migrate. -""" - -import os -import sys -import subprocess -import tempfile -import shutil -from datetime import datetime - -def run_command(cmd, description, check=True): - """Run a command and handle errors.""" - print(f"\n{description}...") - result = subprocess.run(cmd, shell=True, capture_output=True, text=True) - - if result.returncode == 0: - print(f"✓ {description} completed") - if result.stdout.strip(): - print(result.stdout) - return True - else: - print(f"✗ {description} failed") - if result.stderr: - print(f"Error: {result.stderr}") - if result.stdout: - print(f"Output: {result.stdout}") - if check: - sys.exit(1) - return False - -def check_git_status(): - """Ensure git working directory is clean.""" - result = subprocess.run("git status --porcelain", shell=True, capture_output=True, text=True) - if result.stdout.strip(): - print("❌ Git working directory is not clean!") - print("Please commit or stash your changes before running this script.") - return False - return True - -def get_commit_date(commit_hash): - """Get the date of a specific commit.""" - result = subprocess.run( - f"git show -s --format=%ci {commit_hash}", - shell=True, - capture_output=True, - text=True - ) - if result.returncode == 0: - return result.stdout.strip() - return datetime.now().isoformat() - -def main(): - """Main function to establish baseline.""" - print("=== Establishing Flask-Migrate Baseline from Commit 4214e88 ===") - - # Configuration - BASELINE_COMMIT = "4214e88d18fce7a9c75927753b8d4e9222771e14" - BASELINE_DATE = get_commit_date(BASELINE_COMMIT) - - print(f"Baseline commit: {BASELINE_COMMIT}") - print(f"Commit date: {BASELINE_DATE}") - - # Check prerequisites - if not check_git_status(): - return 1 - - # Set Flask app - os.environ['FLASK_APP'] = 'app.py' - - # Step 1: Clean up any existing migrations - if os.path.exists('migrations'): - response = input("\n⚠️ Migrations directory already exists. Remove it? (y/N): ") - if response.lower() != 'y': - print("Aborting...") - return 1 - run_command("rm -rf migrations", "Removing existing migrations directory") - - # Step 2: Backup current models and extract baseline - print(f"\nPreparing baseline models from commit {BASELINE_COMMIT}...") - - # Check if baseline commit has models.py or models/ directory - result = subprocess.run( - f"git show {BASELINE_COMMIT}:models.py", - shell=True, - capture_output=True - ) - has_single_models_file = result.returncode == 0 - - if has_single_models_file: - print("✓ Found models.py in baseline commit (monolithic structure)") - - # Backup current models directory - if os.path.exists('models'): - print("Backing up current models/ directory...") - run_command("mv models models_backup_temp", "Backing up current models") - - # Extract baseline models.py - run_command( - f"git show {BASELINE_COMMIT}:models.py > models.py", - "Extracting baseline models.py" - ) - - # We need to ensure the models.py imports db correctly - # The old file might have different imports - print("Adjusting imports in baseline models.py...") - with open('models.py', 'r') as f: - content = f.read() - - # Ensure it has proper imports for Flask-Migrate - if 'from flask_sqlalchemy import SQLAlchemy' not in content: - # Add the import at the top if missing - lines = content.split('\n') - for i, line in enumerate(lines): - if line.strip() and not line.startswith('#'): - lines.insert(i, 'from flask_sqlalchemy import SQLAlchemy\ndb = SQLAlchemy()\n') - break - content = '\n'.join(lines) - - with open('models.py', 'w') as f: - f.write(content) - else: - print("⚠️ No models.py found in baseline commit") - print("Checking for models/ directory...") - - # Try to check if models/ exists - result = subprocess.run( - f"git show {BASELINE_COMMIT}:models/__init__.py", - shell=True, - capture_output=True - ) - - if result.returncode == 0: - print("Found models/ directory in baseline commit") - # This shouldn't happen for commit 4214e88, but handle it anyway - # ... existing code for models/ directory ... - else: - print("❌ Neither models.py nor models/ found in baseline commit!") - print("This commit might not have SQLAlchemy models yet.") - return 1 - - # Step 3: Initialize Flask-Migrate - run_command("flask db init", "Initializing Flask-Migrate") - - # Step 4: Create the baseline migration - print("\n📝 Creating baseline migration...") - print("This migration represents the schema at commit 4214e88") - - migration_message = f"Baseline schema from commit {BASELINE_COMMIT[:8]} ({BASELINE_DATE})" - - # Need to temporarily update app.py imports if using old models.py - if has_single_models_file: - print("Temporarily adjusting app.py imports...") - with open('app.py', 'r') as f: - app_content = f.read() - - # Replace models imports temporarily - app_content_backup = app_content - app_content = app_content.replace( - 'from models import db,', - 'from models import db,' - ).replace( - 'from models import', - 'from models import' - ) - - with open('app.py', 'w') as f: - f.write(app_content) - - # Generate the migration - result = run_command( - f'flask db migrate -m "{migration_message}"', - "Generating baseline migration" - ) - - # Step 5: Restore current models structure - if has_single_models_file: - print("\nRestoring current models structure...") - - # Remove temporary models.py - if os.path.exists('models.py'): - os.remove('models.py') - print("✓ Removed temporary models.py") - - # Restore models directory - if os.path.exists('models_backup_temp'): - run_command("mv models_backup_temp models", "Restoring models directory") - - # Restore app.py if we modified it - if 'app_content_backup' in locals(): - with open('app.py', 'w') as f: - f.write(app_content_backup) - print("✓ Restored app.py") - - # Step 6: Add a note to the migration file - migration_files = os.listdir("migrations/versions") - if migration_files: - latest_migration = sorted(migration_files)[-1] - migration_path = os.path.join("migrations/versions", latest_migration) - - with open(migration_path, 'r') as f: - content = f.read() - - # Add comment at the top of the file - baseline_note = f'''""" -BASELINE MIGRATION - DO NOT MODIFY - -This migration represents the database schema at commit {BASELINE_COMMIT}. -Date: {BASELINE_DATE} - -This is the starting point for Flask-Migrate. All future schema changes -should be managed through Flask-Migrate migrations. - -If you have a database that was created before this point, you should: -1. Ensure your database schema matches this migration -2. Run: flask db stamp head - -If you're creating a new database: -1. Run: flask db upgrade -""" - -''' - - with open(migration_path, 'w') as f: - f.write(baseline_note + content) - - print(f"✓ Added baseline note to migration: {latest_migration}") - - # Step 7: Create documentation - doc_content = f"""# Flask-Migrate Baseline Information - -## Baseline Commit -- Commit: {BASELINE_COMMIT} -- Date: {BASELINE_DATE} -- Description: This is the baseline schema for Flask-Migrate - -## For Existing Databases - -If your database was created from the schema at or after commit {BASELINE_COMMIT[:8]}: - -```bash -# Mark your database as being at the baseline -flask db stamp head -``` - -## For New Databases - -```bash -# Create all tables from the baseline -flask db upgrade -``` - -## Post-Baseline Migrations - -All migrations after commit {BASELINE_COMMIT[:8]} that were previously in the -old migration system need to be recreated as Flask-Migrate migrations: - -1. Company settings additions -2. User preferences columns -3. Dashboard widget updates -4. Work configuration changes -5. Note sharing functionality -6. Time preferences - -Use `flask db migrate -m "description"` to create these migrations. - -## Important Notes - -- Do NOT modify the baseline migration -- Always review generated migrations before applying -- Test migrations on a development database first -""" - - with open('migrations/BASELINE_INFO.md', 'w') as f: - f.write(doc_content) - - print("\n✅ Created migrations/BASELINE_INFO.md") - - # Step 8: Show summary - print("\n" + "="*60) - print("✨ Baseline establishment completed!") - print("="*60) - print(f"\nBaseline: Commit {BASELINE_COMMIT[:8]} ({BASELINE_DATE})") - print("\nNext steps:") - print("\n1. For existing databases at or after this commit:") - print(" flask db stamp head") - print("\n2. For new databases:") - print(" flask db upgrade") - print("\n3. To add post-baseline changes:") - print(" - Review migrations_old/postgres_only_migration.py") - print(" - Create new migrations for changes after 4214e88") - print(" - Example: flask db migrate -m 'Add company settings columns'") - print("\n4. Always review generated migrations in migrations/versions/") - - return 0 - -if __name__ == "__main__": - sys.exit(main()) \ No newline at end of file diff --git a/fix_enum_mismatch.py b/fix_enum_mismatch.py deleted file mode 100755 index 336a3e6..0000000 --- a/fix_enum_mismatch.py +++ /dev/null @@ -1,132 +0,0 @@ -#!/usr/bin/env python3 -""" -Fix enum value mismatches between Python models and PostgreSQL. -The issue: Python sends enum.name ('TODO') but PostgreSQL expects enum.value ('To Do'). -""" - -import os -import sys - -def main(): - """Fix enum mismatches.""" - print("=== Enum Value Mismatch Fix ===\n") - - os.environ['FLASK_APP'] = 'app.py' - - from app import app, db - from models import TaskStatus, TaskPriority, Role, WorkRegion - - print("The Problem:") - print("- Your Python code sends: task.status = TaskStatus.TODO") - print("- SQLAlchemy sends to DB: 'TODO' (the enum name)") - print("- But the enum value is: 'To Do'") - print("- PostgreSQL expects: 'To Do' (the enum value)\n") - - with app.app_context(): - # Show the mismatch - print("TaskStatus enum mapping:") - for status in TaskStatus: - print(f" {status.name} -> '{status.value}'") - print(f" Python sends: '{status.name}'") - print(f" DB expects: '{status.value}'") - - print("\n" + "="*50) - print("SOLUTION OPTIONS") - print("="*50) - - print("\nOption 1: Add enum NAMES to PostgreSQL (Recommended)") - print("This allows both 'TODO' and 'To Do' to work:\n") - - # Generate SQL to add enum names - sql_fixes = [] - - # Check what's in the database - try: - result = db.engine.execute(""" - SELECT enumlabel - FROM pg_enum - WHERE enumtypid = (SELECT oid FROM pg_type WHERE typname = 'taskstatus') - """) - db_values = set(row[0] for row in result) - - print(f"Current database values: {list(db_values)}\n") - - # Add missing enum NAMES - for status in TaskStatus: - if status.name not in db_values: - sql_fixes.append(f"ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS '{status.name}';") - - if sql_fixes: - print("SQL to run:") - for sql in sql_fixes: - print(f" {sql}") - - except Exception as e: - print(f"Error checking database: {e}") - - print("\n\nOption 2: Fix Python enum definitions") - print("Change enums to use name as value:\n") - print("# In models/enums.py:") - print("class TaskStatus(enum.Enum):") - print(" TODO = 'TODO' # Instead of 'To Do'") - print(" IN_PROGRESS = 'IN_PROGRESS' # Instead of 'In Progress'") - - print("\n\nOption 3: Create migration to fix this properly") - - # Create a migration file - migration_content = '''"""Fix enum value mismatches - -Revision ID: fix_enum_values -Create Date: 2024-01-01 - -""" -from alembic import op -import sqlalchemy as sa - -def upgrade(): - # Add enum NAMES as valid values (keeping the display values too) - # This allows both 'TODO' and 'To Do' to work - - # TaskStatus - op.execute("ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'TODO';") - op.execute("ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'IN_PROGRESS';") - op.execute("ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'IN_REVIEW';") - op.execute("ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'DONE';") - op.execute("ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'CANCELLED';") - op.execute("ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'ARCHIVED';") - - # TaskPriority - op.execute("ALTER TYPE taskpriority ADD VALUE IF NOT EXISTS 'LOW';") - op.execute("ALTER TYPE taskpriority ADD VALUE IF NOT EXISTS 'MEDIUM';") - op.execute("ALTER TYPE taskpriority ADD VALUE IF NOT EXISTS 'HIGH';") - op.execute("ALTER TYPE taskpriority ADD VALUE IF NOT EXISTS 'URGENT';") - - # Role (if using enum in DB) - op.execute("ALTER TYPE role ADD VALUE IF NOT EXISTS 'TEAM_MEMBER';") - op.execute("ALTER TYPE role ADD VALUE IF NOT EXISTS 'TEAM_LEADER';") - op.execute("ALTER TYPE role ADD VALUE IF NOT EXISTS 'SUPERVISOR';") - op.execute("ALTER TYPE role ADD VALUE IF NOT EXISTS 'ADMIN';") - op.execute("ALTER TYPE role ADD VALUE IF NOT EXISTS 'SYSTEM_ADMIN';") - -def downgrade(): - # Cannot remove enum values in PostgreSQL - pass -''' - - with open('fix_enum_values_migration.py', 'w') as f: - f.write(migration_content) - - print("Created: fix_enum_values_migration.py") - print("\nTo apply Option 1:") - print("1. Copy the migration content to a new migration") - print("2. Run: flask db upgrade") - - print("\n\nWHY THIS HAPPENS:") - print("- SQLAlchemy sends the enum NAME (TODO) not VALUE ('To Do')") - print("- This is a common issue with PostgreSQL enums") - print("- Best practice: Make enum name == enum value") - - return 0 - -if __name__ == "__main__": - sys.exit(main()) \ No newline at end of file diff --git a/fix_migration_838055206ef5.py b/fix_migration_838055206ef5.py deleted file mode 100755 index 9bbd0fa..0000000 --- a/fix_migration_838055206ef5.py +++ /dev/null @@ -1,88 +0,0 @@ -#!/usr/bin/env python3 -""" -Fix the specific revision error: Can't locate revision identified by '838055206ef5' -This script will clean up the database migration state and re-initialize. -""" - -import os -import sys - -def main(): - """Fix the revision mismatch.""" - print("=== Fixing Revision 838055206ef5 Error ===\n") - - os.environ['FLASK_APP'] = 'app.py' - - print("This error means your database thinks it's at revision '838055206ef5'") - print("but that revision doesn't exist in your migration files.\n") - - print("We'll fix this by:") - print("1. Clearing the incorrect revision from the database") - print("2. Re-initializing migrations from current schema") - print("3. Marking the database as up-to-date\n") - - response = input("Continue? (y/N): ") - if response.lower() != 'y': - print("Aborting...") - return 1 - - # Step 1: Clear the alembic_version table - print("\nStep 1: Clearing migration history from database...") - try: - from app import app, db - with app.app_context(): - # Check if alembic_version exists - result = db.engine.execute(""" - SELECT EXISTS ( - SELECT 1 FROM information_schema.tables - WHERE table_name = 'alembic_version' - ) - """) - exists = result.fetchone()[0] - - if exists: - # Clear the incorrect revision - db.engine.execute("DELETE FROM alembic_version") - print("✓ Cleared alembic_version table") - else: - print("ℹ️ No alembic_version table found (this is OK)") - - except Exception as e: - print(f"❌ Error clearing alembic_version: {e}") - print("\nTry running this SQL manually:") - print(" DELETE FROM alembic_version;") - return 1 - - # Step 2: Remove and recreate migrations directory - print("\nStep 2: Re-initializing migrations...") - - import shutil - if os.path.exists('migrations'): - print("Removing old migrations directory...") - shutil.rmtree('migrations') - - # Run the Docker-friendly initialization - print("Running docker_migrate_init.py...") - result = os.system("python docker_migrate_init.py") - - if result != 0: - print("❌ Failed to initialize migrations") - return 1 - - # Step 3: Stamp the database - print("\nStep 3: Marking database as current...") - result = os.system("flask db stamp head") - - if result == 0: - print("\n✅ Success! The revision error has been fixed.") - print("\nYou can now:") - print("1. Create new migrations: flask db migrate -m 'Your changes'") - print("2. Apply migrations: flask db upgrade") - else: - print("\n⚠️ Failed to stamp database.") - print("Try running manually: flask db stamp head") - - return 0 - -if __name__ == "__main__": - sys.exit(main()) \ No newline at end of file diff --git a/fix_migration_sequence.py b/fix_migration_sequence.py deleted file mode 100755 index f10fa31..0000000 --- a/fix_migration_sequence.py +++ /dev/null @@ -1,73 +0,0 @@ -#!/usr/bin/env python3 -""" -Fix common Flask-Migrate sequencing issues. -Handles the case where you need to apply migrations before creating new ones. -""" - -import os -import sys -import subprocess - -def run_command(cmd, description): - """Run a command with output.""" - print(f"\n➜ {description}") - print(f" Command: {cmd}") - result = subprocess.run(cmd, shell=True) - return result.returncode == 0 - -def main(): - """Fix migration sequence issues.""" - print("=== Flask-Migrate Sequence Fix ===") - - # Set environment - os.environ['FLASK_APP'] = 'app.py' - - print("\nThis script will:") - print("1. Show current migration status") - print("2. Apply any pending migrations") - print("3. Prepare for creating new migrations") - - input("\nPress Enter to continue...") - - # Step 1: Show current status - print("\n" + "="*50) - print("STEP 1: Current Status") - print("="*50) - - run_command("flask db current", "Current database revision") - run_command("flask db heads", "Latest migration in files") - - # Step 2: Check if upgrade needed - print("\n" + "="*50) - print("STEP 2: Checking for pending migrations") - print("="*50) - - # Try to upgrade - if run_command("flask db upgrade", "Applying pending migrations"): - print("✅ Database is now up to date") - else: - print("⚠️ Upgrade failed. Trying to fix...") - - # Try stamping head - response = input("\nStamp database as current? (y/N): ") - if response.lower() == 'y': - if run_command("flask db stamp head", "Stamping database"): - print("✅ Database stamped as current") - - # Step 3: Test creating a migration - print("\n" + "="*50) - print("STEP 3: Testing migration creation") - print("="*50) - - if run_command("flask db migrate --dry-run", "Dry run of migration"): - print("✅ Ready to create new migrations") - print("\nYou can now run:") - print(" flask db migrate -m 'Your migration message'") - else: - print("❌ Still having issues") - print("\nTry running: python diagnose_migrations.py") - - return 0 - -if __name__ == "__main__": - sys.exit(main()) \ No newline at end of file diff --git a/fix_postgres_enums.py b/fix_postgres_enums.py deleted file mode 100755 index 3a33c9a..0000000 --- a/fix_postgres_enums.py +++ /dev/null @@ -1,122 +0,0 @@ -#!/usr/bin/env python3 -""" -Fix PostgreSQL enum issues for Flask-Migrate. -Handles the TODO vs To Do issue and other enum mismatches. -""" - -import os -import sys - -def main(): - """Fix enum issues.""" - print("=== PostgreSQL Enum Fix ===\n") - - os.environ['FLASK_APP'] = 'app.py' - - from app import app, db - from models import TaskStatus, TaskPriority, Role, WorkRegion - - with app.app_context(): - print("Checking enum values in database vs models...\n") - - # Check all enums - enums_to_check = [ - ('taskstatus', TaskStatus, 'task', 'status'), - ('taskpriority', TaskPriority, 'task', 'priority'), - ('role', Role, 'user', 'role'), - ('workregion', WorkRegion, 'company_work_config', 'work_region') - ] - - fixes_needed = [] - - for enum_name, enum_class, table_name, column_name in enums_to_check: - print(f"Checking {enum_name}:") - - try: - # Get database values - result = db.engine.execute(f""" - SELECT enumlabel - FROM pg_enum - WHERE enumtypid = (SELECT oid FROM pg_type WHERE typname = '{enum_name}') - ORDER BY enumsortorder - """) - db_values = [row[0] for row in result] - print(f" Database values: {db_values}") - - # Get model values - use the actual enum values, not names - model_values = [item.value for item in enum_class] - print(f" Model values: {model_values}") - - # Debug: also show enum names vs values - print(f" Model enum mapping:") - for item in enum_class: - print(f" {item.name} = '{item.value}'") - - # Check for mismatches - missing_in_db = set(model_values) - set(db_values) - extra_in_db = set(db_values) - set(model_values) - - if missing_in_db: - print(f" ⚠️ Missing in database: {missing_in_db}") - for value in missing_in_db: - fixes_needed.append(f"ALTER TYPE {enum_name} ADD VALUE '{value}';") - - if extra_in_db: - print(f" ⚠️ Extra in database (not in model): {extra_in_db}") - # Note: Can't easily remove enum values in PostgreSQL - - if not missing_in_db and not extra_in_db: - print(" ✅ All values match") - - except Exception as e: - print(f" ❌ Error checking {enum_name}: {e}") - - print() - - if fixes_needed: - print("\nRequired fixes:") - print("Create a new migration and add these to the upgrade() function:\n") - - for fix in fixes_needed: - print(f" op.execute(\"{fix}\")") - - print("\nOr run this SQL directly:") - for fix in fixes_needed: - print(fix) - - # Create a migration file - print("\n\nCreating migration file...") - migration_content = '''"""Fix enum values - -Revision ID: fix_enums -Revises: -Create Date: 2024-01-01 00:00:00.000000 - -""" -from alembic import op -import sqlalchemy as sa - -def upgrade(): - # Fix enum values -''' - for fix in fixes_needed: - migration_content += f' op.execute("{fix}")\n' - - migration_content += ''' -def downgrade(): - # Note: PostgreSQL doesn't support removing enum values - pass -''' - - with open('fix_enums_migration.py', 'w') as f: - f.write(migration_content) - - print("✅ Created fix_enums_migration.py") - print("\nTo apply, either:") - print("1. Copy this content to a new migration file") - print("2. Run the SQL commands directly") - - return 0 - -if __name__ == "__main__": - sys.exit(main()) \ No newline at end of file diff --git a/fix_revision_mismatch.py b/fix_revision_mismatch.py deleted file mode 100755 index 5317c27..0000000 --- a/fix_revision_mismatch.py +++ /dev/null @@ -1,183 +0,0 @@ -#!/usr/bin/env python3 -""" -Fix Flask-Migrate revision mismatch errors. -Handles cases where database references a revision that doesn't exist in files. -""" - -import os -import sys -import subprocess -import glob -from pathlib import Path - -def run_command(cmd, capture=True): - """Run a command and return result.""" - if capture: - result = subprocess.run(cmd, shell=True, capture_output=True, text=True) - return result.returncode, result.stdout, result.stderr - else: - result = subprocess.run(cmd, shell=True) - return result.returncode, "", "" - -def get_database_revision(): - """Get current revision from database.""" - print("Checking database revision...") - code, stdout, stderr = run_command("flask db current") - - if code != 0: - if "Can't locate revision" in stderr: - # Extract the problematic revision - import re - match = re.search(r"Can't locate revision identified by '([^']+)'", stderr) - if match: - return match.group(1), True # revision, is_missing - print(f"Error getting current revision: {stderr}") - return None, False - - # Extract revision from output - revision = stdout.strip().split()[0] if stdout.strip() else None - return revision, False - -def get_file_revisions(): - """Get all revisions from migration files.""" - versions_dir = Path("migrations/versions") - if not versions_dir.exists(): - return [] - - revisions = [] - for file in versions_dir.glob("*.py"): - if file.name == "__pycache__": - continue - - with open(file, 'r') as f: - content = f.read() - - # Extract revision - import re - revision_match = re.search(r"^revision = ['\"]([^'\"]+)['\"]", content, re.MULTILINE) - down_revision_match = re.search(r"^down_revision = ['\"]([^'\"]+)['\"]", content, re.MULTILINE) - - if revision_match: - revisions.append({ - 'file': file.name, - 'revision': revision_match.group(1), - 'down_revision': down_revision_match.group(1) if down_revision_match else None - }) - - return revisions - -def check_alembic_version_table(): - """Check the alembic_version table directly.""" - print("\nChecking alembic_version table...") - - # Try to connect to database and check - try: - from app import app, db - with app.app_context(): - result = db.engine.execute("SELECT version_num FROM alembic_version") - versions = [row[0] for row in result] - return versions - except Exception as e: - print(f"Could not check alembic_version table: {e}") - return [] - -def main(): - """Main repair function.""" - print("=== Flask-Migrate Revision Mismatch Repair ===\n") - - # Set environment - os.environ['FLASK_APP'] = 'app.py' - - # Step 1: Diagnose the problem - print("Step 1: Diagnosing the issue...") - - db_revision, is_missing = get_database_revision() - if is_missing: - print(f"❌ Database references missing revision: {db_revision}") - elif db_revision: - print(f"📍 Current database revision: {db_revision}") - else: - print("⚠️ Could not determine database revision") - - # Step 2: Check migration files - print("\nStep 2: Checking migration files...") - file_revisions = get_file_revisions() - - if not file_revisions: - print("❌ No migration files found!") - print("\nSolution: Re-initialize migrations") - print(" rm -rf migrations") - print(" python establish_baseline_4214e88.py") - return 1 - - print(f"Found {len(file_revisions)} migration files:") - for rev in file_revisions: - print(f" - {rev['revision'][:8]} in {rev['file']}") - - # Check if problematic revision exists in files - if is_missing and db_revision: - revision_exists = any(r['revision'] == db_revision for r in file_revisions) - if not revision_exists: - print(f"\n❌ Revision {db_revision} not found in migration files!") - - # Step 3: Check alembic_version table - db_versions = check_alembic_version_table() - if db_versions: - print(f"\nDatabase alembic_version table contains: {db_versions}") - - # Step 4: Provide solutions - print("\n" + "="*50) - print("SOLUTIONS") - print("="*50) - - print("\nOption 1: Reset to latest migration file (Recommended)") - print("-" * 40) - if file_revisions: - latest_revision = file_revisions[-1]['revision'] - print(f"Latest revision in files: {latest_revision}") - print("\nRun these commands:") - print(f" flask db stamp {latest_revision}") - print(" flask db upgrade") - - print("\nOption 2: Start fresh (Nuclear option)") - print("-" * 40) - print("⚠️ Only do this if Option 1 fails!") - print("\nRun these commands:") - print(" # Clear alembic version from database") - print(" python -c \"from app import app, db; app.app_context().push(); db.engine.execute('DELETE FROM alembic_version')\"") - print(" # Stamp with latest revision") - if file_revisions: - print(f" flask db stamp {file_revisions[-1]['revision']}") - - print("\nOption 3: Complete reset (Last resort)") - print("-" * 40) - print("⚠️ This will recreate all migrations!") - print("\nRun these commands:") - print(" rm -rf migrations") - print(" python establish_baseline_4214e88.py") - print(" flask db stamp head") - - # Step 5: Automated fix attempt - print("\n" + "="*50) - print("AUTOMATED FIX") - print("="*50) - - if is_missing and file_revisions: - response = input(f"\nAttempt to fix by stamping to latest revision? (y/N): ") - if response.lower() == 'y': - latest_revision = file_revisions[-1]['revision'] - print(f"\nStamping database to revision: {latest_revision}") - code, stdout, stderr = run_command(f"flask db stamp {latest_revision}") - - if code == 0: - print("✅ Successfully stamped database!") - print("\nNow run: flask db upgrade") - else: - print(f"❌ Stamping failed: {stderr}") - print("\nTry manual SQL fix:") - print(f" UPDATE alembic_version SET version_num = '{latest_revision}';") - - return 0 - -if __name__ == "__main__": - sys.exit(main()) \ No newline at end of file diff --git a/fly.toml b/fly.toml deleted file mode 100644 index a46b6e7..0000000 --- a/fly.toml +++ /dev/null @@ -1,34 +0,0 @@ -# fly.toml app configuration file generated for timetrack-2whuug on 2025-07-01T09:27:14Z -# -# See https://fly.io/docs/reference/configuration/ for information about how to use this file. -# - -app = 'timetrack-2whuug' -primary_region = 'fra' - -[build] - -[http_service] - internal_port = 5000 - force_https = true - auto_stop_machines = 'stop' - auto_start_machines = true - min_machines_running = 0 - processes = ['app'] - -[env] - MAIL_SERVER = "smtp.ionos.de" - MAIL_PORT = 587 - MAIL_USE_TLS = 1 - MAIL_USERNAME = "jens@luedicke.cloud" - MAIL_DEFAULT_SENDER = "jens@luedicke.cloud" - - -[mounts] - source = "timetrack_data" - destination = "/data" - -[[vm]] - cpu_kind = 'shared' - cpus = 1 - memory_mb = 256 diff --git a/init_migrations.py b/init_migrations.py deleted file mode 100755 index 7d31334..0000000 --- a/init_migrations.py +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/env python3 -""" -Initialize Flask-Migrate for the TimeTrack application. -This script sets up the migrations directory and creates the initial migration. -""" - -import os -import sys -import subprocess - -def run_command(cmd, description): - """Run a command and handle errors.""" - print(f"\n{description}...") - result = subprocess.run(cmd, shell=True, capture_output=True, text=True) - - if result.returncode == 0: - print(f"✓ {description} completed successfully") - if result.stdout: - print(result.stdout) - else: - print(f"✗ {description} failed") - if result.stderr: - print(f"Error: {result.stderr}") - if result.stdout: - print(f"Output: {result.stdout}") - return False - return True - -def main(): - """Main initialization function.""" - print("=== Flask-Migrate Initialization ===") - - # Set Flask app environment variable - os.environ['FLASK_APP'] = 'app.py' - - # Initialize migrations directory - if not run_command("flask db init", "Initializing migrations directory"): - return 1 - - # Create initial migration - if not run_command( - 'flask db migrate -m "Initial migration from existing schema"', - "Creating initial migration" - ): - return 1 - - print("\n✨ Flask-Migrate initialization completed!") - print("\nNext steps:") - print("1. Review the generated migration in migrations/versions/") - print("2. Apply the migration with: flask db upgrade") - print("3. For future schema changes, use: flask db migrate -m 'Description'") - - return 0 - -if __name__ == "__main__": - sys.exit(main()) \ No newline at end of file diff --git a/manage_migrations.py b/manage_migrations.py deleted file mode 100755 index f9f8619..0000000 --- a/manage_migrations.py +++ /dev/null @@ -1,136 +0,0 @@ -#!/usr/bin/env python3 -""" -Migration management script for TimeTrack. -Handles both development and production migration scenarios. -""" - -import os -import sys -import subprocess -import argparse -from datetime import datetime - -def run_command(cmd, description, check=True): - """Run a command and handle errors.""" - print(f"\n{description}...") - result = subprocess.run(cmd, shell=True, capture_output=True, text=True) - - if result.returncode == 0: - print(f"✓ {description} completed") - if result.stdout: - print(result.stdout) - return True - else: - print(f"✗ {description} failed") - if result.stderr: - print(f"Error: {result.stderr}") - if result.stdout: - print(f"Output: {result.stdout}") - if check: - sys.exit(1) - return False - -def init_migrations(): - """Initialize Flask-Migrate.""" - os.environ['FLASK_APP'] = 'app.py' - - if os.path.exists('migrations'): - print("⚠️ Migrations directory already exists!") - response = input("Do you want to reinitialize? This will delete existing migrations. (y/N): ") - if response.lower() != 'y': - print("Aborting...") - return False - run_command("rm -rf migrations", "Removing existing migrations directory") - - run_command("flask db init", "Initializing Flask-Migrate") - return True - -def create_migration(message): - """Create a new migration.""" - os.environ['FLASK_APP'] = 'app.py' - - if not message: - message = input("Enter migration message: ") - - run_command(f'flask db migrate -m "{message}"', "Creating migration") - print("\n📝 Please review the generated migration before applying it!") - return True - -def apply_migrations(): - """Apply pending migrations.""" - os.environ['FLASK_APP'] = 'app.py' - - # Show current version - run_command("flask db current", "Current database version", check=False) - - # Show pending migrations - print("\nPending migrations:") - run_command("flask db show", "Migration history", check=False) - - response = input("\nApply pending migrations? (y/N): ") - if response.lower() == 'y': - run_command("flask db upgrade", "Applying migrations") - return True - -def rollback_migration(): - """Rollback to previous migration.""" - os.environ['FLASK_APP'] = 'app.py' - - run_command("flask db current", "Current database version") - response = input("\nRollback to previous version? (y/N): ") - - if response.lower() == 'y': - run_command("flask db downgrade", "Rolling back migration") - return True - -def show_history(): - """Show migration history.""" - os.environ['FLASK_APP'] = 'app.py' - - run_command("flask db history", "Migration history") - return True - -def stamp_database(revision='head'): - """Stamp database with a specific revision without running migrations.""" - os.environ['FLASK_APP'] = 'app.py' - - print(f"⚠️ This will mark the database as being at revision '{revision}' without running any migrations.") - response = input("Continue? (y/N): ") - - if response.lower() == 'y': - run_command(f"flask db stamp {revision}", f"Stamping database with revision {revision}") - return True - -def main(): - """Main function.""" - parser = argparse.ArgumentParser(description='Manage Flask-Migrate migrations') - parser.add_argument('command', choices=['init', 'create', 'apply', 'rollback', 'history', 'stamp'], - help='Command to execute') - parser.add_argument('-m', '--message', help='Migration message (for create command)') - parser.add_argument('-r', '--revision', default='head', help='Revision to stamp (for stamp command)') - - args = parser.parse_args() - - commands = { - 'init': init_migrations, - 'create': lambda: create_migration(args.message), - 'apply': apply_migrations, - 'rollback': rollback_migration, - 'history': show_history, - 'stamp': lambda: stamp_database(args.revision) - } - - print(f"=== TimeTrack Migration Manager ===") - print(f"Command: {args.command}") - print(f"Time: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}") - - success = commands[args.command]() - - if success: - print("\n✨ Operation completed successfully!") - else: - print("\n❌ Operation failed or was cancelled") - sys.exit(1) - -if __name__ == "__main__": - main() \ No newline at end of file diff --git a/manual_migration_fix.sql b/manual_migration_fix.sql deleted file mode 100644 index 3a5fac5..0000000 --- a/manual_migration_fix.sql +++ /dev/null @@ -1,17 +0,0 @@ --- Manual SQL commands to fix migration revision error --- Run these commands in your PostgreSQL database if the scripts fail - --- 1. Check current revision (optional) -SELECT * FROM alembic_version; - --- 2. Clear the incorrect revision -DELETE FROM alembic_version; - --- 3. If you want to set a specific revision manually: --- First, check what revisions you have: --- ls migrations/versions/*.py --- Then insert the revision ID from one of those files: --- INSERT INTO alembic_version (version_num) VALUES ('your_revision_id_here'); - --- 4. Or just leave it empty and let Flask-Migrate handle it --- The next 'flask db stamp head' will set the correct revision \ No newline at end of file diff --git a/migrate_to_alembic.py b/migrate_to_alembic.py deleted file mode 100755 index 27bf6c9..0000000 --- a/migrate_to_alembic.py +++ /dev/null @@ -1,195 +0,0 @@ -#!/usr/bin/env python3 -""" -Special migration script to transition from manual migrations to Flask-Migrate/Alembic. -This script handles the existing database schema and creates a baseline migration. -""" - -import os -import sys -import subprocess -import psycopg2 -from urllib.parse import urlparse - -def check_database_exists(): - """Check if database exists and has tables.""" - database_url = os.environ.get('DATABASE_URL', 'sqlite:////data/timetrack.db') - - if database_url.startswith('sqlite'): - db_path = database_url.replace('sqlite:///', '') - return os.path.exists(db_path) - - # PostgreSQL - try: - parsed = urlparse(database_url) - conn = psycopg2.connect( - host=parsed.hostname, - port=parsed.port or 5432, - database=parsed.path[1:], - user=parsed.username, - password=parsed.password - ) - cursor = conn.cursor() - cursor.execute(""" - SELECT COUNT(*) FROM information_schema.tables - WHERE table_schema = 'public' AND table_type = 'BASE TABLE' - """) - table_count = cursor.fetchone()[0] - cursor.close() - conn.close() - return table_count > 0 - except Exception as e: - print(f"Error checking database: {e}") - return False - -def run_command(cmd, description): - """Run a command and handle errors.""" - print(f"\n{description}...") - result = subprocess.run(cmd, shell=True, capture_output=True, text=True) - - if result.returncode == 0: - print(f"✓ {description} completed") - if result.stdout: - print(result.stdout) - return True - else: - print(f"✗ {description} failed") - if result.stderr: - print(f"Error: {result.stderr}") - if result.stdout: - print(f"Output: {result.stdout}") - return False - -def main(): - """Main migration function.""" - print("=== Migrating to Flask-Migrate/Alembic ===") - print("\n⚠️ IMPORTANT: This script assumes your database is at the current schema.") - print("For baseline at commit 4214e88, use: python establish_baseline_4214e88.py") - - # Set Flask app - os.environ['FLASK_APP'] = 'app.py' - - # Check if we have an existing database - has_existing_db = check_database_exists() - - if has_existing_db: - print("\n📊 Existing database detected!") - print("This process will:") - print("1. Initialize Flask-Migrate") - print("2. Create a baseline migration matching your CURRENT schema") - print("3. Mark the database as up-to-date without running migrations") - print("\nThis allows you to start using Flask-Migrate for future changes.") - print("\n⚠️ If your database is at commit 4214e88, use establish_baseline_4214e88.py instead!") - - response = input("\nContinue with current schema? (y/N): ") - if response.lower() != 'y': - print("Aborting...") - return 1 - else: - print("\n🆕 No existing database detected.") - print("This will set up a fresh Flask-Migrate installation.") - - # Step 1: Initialize Flask-Migrate - if not run_command("flask db init", "Initializing Flask-Migrate"): - return 1 - - if has_existing_db: - # Step 2: Create initial migration from existing schema - if not run_command( - 'flask db migrate -m "Initial migration from existing database"', - "Creating migration from existing schema" - ): - return 1 - - print("\n📝 Review the generated migration!") - print("The migration file is in migrations/versions/") - print("Make sure it matches your existing schema.") - - response = input("\nHave you reviewed the migration? Continue? (y/N): ") - if response.lower() != 'y': - print("Please review and run: flask db stamp head") - return 0 - - # Step 3: Stamp the database without running migrations - if not run_command("flask db stamp head", "Marking database as up-to-date"): - return 1 - - print("\n✅ Database marked as up-to-date with current schema") - else: - # Fresh installation - create tables - if not run_command( - 'flask db migrate -m "Initial database creation"', - "Creating initial migration" - ): - return 1 - - if not run_command("flask db upgrade", "Creating database tables"): - return 1 - - print("\n✅ Database tables created successfully") - - print("\n✨ Migration to Flask-Migrate completed!") - print("\nFuture migrations:") - print("1. Make changes to your models") - print("2. Run: flask db migrate -m 'Description of changes'") - print("3. Review the generated migration") - print("4. Run: flask db upgrade") - - # Create a README for the team - readme_content = """# Flask-Migrate Usage - -This project now uses Flask-Migrate (Alembic) for database migrations. - -## Common Commands - -### Create a new migration -```bash -flask db migrate -m "Description of changes" -``` - -### Apply migrations -```bash -flask db upgrade -``` - -### View migration history -```bash -flask db history -``` - -### Rollback one migration -```bash -flask db downgrade -``` - -### View current migration -```bash -flask db current -``` - -## Important Notes - -1. **Always review generated migrations** before applying them -2. **Test migrations** on a development database first -3. **Back up your database** before applying migrations in production -4. **Custom enums** may need manual adjustment in migration files - -## Migration Files - -- `migrations/` - Main migrations directory -- `migrations/versions/` - Individual migration files -- `migrations/alembic.ini` - Alembic configuration - -## For Production - -The startup scripts have been updated to automatically run migrations. -""" - - with open('migrations/README.md', 'w') as f: - f.write(readme_content) - - print("\n📄 Created migrations/README.md with usage instructions") - - return 0 - -if __name__ == "__main__": - sys.exit(main()) \ No newline at end of file diff --git a/migrations_old/add_cascade_delete_note_links.sql b/migrations_old/add_cascade_delete_note_links.sql deleted file mode 100644 index 697aa16..0000000 --- a/migrations_old/add_cascade_delete_note_links.sql +++ /dev/null @@ -1,20 +0,0 @@ --- Migration to add CASCADE delete to note_link foreign keys --- This ensures that when a note is deleted, all links to/from it are also deleted - --- For PostgreSQL --- Drop existing foreign key constraints -ALTER TABLE note_link DROP CONSTRAINT IF EXISTS note_link_source_note_id_fkey; -ALTER TABLE note_link DROP CONSTRAINT IF EXISTS note_link_target_note_id_fkey; - --- Add new foreign key constraints with CASCADE -ALTER TABLE note_link - ADD CONSTRAINT note_link_source_note_id_fkey - FOREIGN KEY (source_note_id) - REFERENCES note(id) - ON DELETE CASCADE; - -ALTER TABLE note_link - ADD CONSTRAINT note_link_target_note_id_fkey - FOREIGN KEY (target_note_id) - REFERENCES note(id) - ON DELETE CASCADE; \ No newline at end of file diff --git a/migrations_old/add_cascade_delete_note_links_sqlite.sql b/migrations_old/add_cascade_delete_note_links_sqlite.sql deleted file mode 100644 index 3816bfe..0000000 --- a/migrations_old/add_cascade_delete_note_links_sqlite.sql +++ /dev/null @@ -1,25 +0,0 @@ --- SQLite migration for cascade delete on note_link --- SQLite doesn't support ALTER TABLE for foreign keys, so we need to recreate the table - --- Create new table with CASCADE delete -CREATE TABLE note_link_new ( - id INTEGER PRIMARY KEY, - source_note_id INTEGER NOT NULL, - target_note_id INTEGER NOT NULL, - link_type VARCHAR(50) DEFAULT 'related', - created_at DATETIME DEFAULT CURRENT_TIMESTAMP, - created_by_id INTEGER NOT NULL, - FOREIGN KEY (source_note_id) REFERENCES note(id) ON DELETE CASCADE, - FOREIGN KEY (target_note_id) REFERENCES note(id) ON DELETE CASCADE, - FOREIGN KEY (created_by_id) REFERENCES user(id), - UNIQUE(source_note_id, target_note_id) -); - --- Copy data from old table -INSERT INTO note_link_new SELECT * FROM note_link; - --- Drop old table -DROP TABLE note_link; - --- Rename new table -ALTER TABLE note_link_new RENAME TO note_link; \ No newline at end of file diff --git a/migrations_old/add_folder_to_notes.sql b/migrations_old/add_folder_to_notes.sql deleted file mode 100644 index e7bf3fb..0000000 --- a/migrations_old/add_folder_to_notes.sql +++ /dev/null @@ -1,5 +0,0 @@ --- Add folder column to notes table -ALTER TABLE note ADD COLUMN IF NOT EXISTS folder VARCHAR(100); - --- Create an index on folder for faster filtering -CREATE INDEX IF NOT EXISTS idx_note_folder ON note(folder) WHERE folder IS NOT NULL; \ No newline at end of file diff --git a/migrations_old/add_note_folder_table.sql b/migrations_old/add_note_folder_table.sql deleted file mode 100644 index 4b1687a..0000000 --- a/migrations_old/add_note_folder_table.sql +++ /dev/null @@ -1,17 +0,0 @@ --- Create note_folder table for tracking folders independently of notes -CREATE TABLE IF NOT EXISTS note_folder ( - id SERIAL PRIMARY KEY, - name VARCHAR(100) NOT NULL, - path VARCHAR(500) NOT NULL, - parent_path VARCHAR(500), - description TEXT, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - created_by_id INTEGER NOT NULL REFERENCES "user"(id), - company_id INTEGER NOT NULL REFERENCES company(id), - CONSTRAINT uq_folder_path_company UNIQUE (path, company_id) -); - --- Create indexes for better performance -CREATE INDEX IF NOT EXISTS idx_note_folder_company ON note_folder(company_id); -CREATE INDEX IF NOT EXISTS idx_note_folder_parent_path ON note_folder(parent_path); -CREATE INDEX IF NOT EXISTS idx_note_folder_created_by ON note_folder(created_by_id); \ No newline at end of file diff --git a/migrations_old/add_note_sharing.sql b/migrations_old/add_note_sharing.sql deleted file mode 100644 index 5cae4cc..0000000 --- a/migrations_old/add_note_sharing.sql +++ /dev/null @@ -1,21 +0,0 @@ --- Add note_share table for public note sharing functionality -CREATE TABLE IF NOT EXISTS note_share ( - id SERIAL PRIMARY KEY, - note_id INTEGER NOT NULL REFERENCES note(id) ON DELETE CASCADE, - token VARCHAR(64) UNIQUE NOT NULL, - expires_at TIMESTAMP, - password_hash VARCHAR(255), - view_count INTEGER DEFAULT 0, - max_views INTEGER, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - created_by_id INTEGER NOT NULL REFERENCES "user"(id), - last_accessed_at TIMESTAMP -); - --- Create indexes for better performance -CREATE INDEX IF NOT EXISTS idx_note_share_token ON note_share(token); -CREATE INDEX IF NOT EXISTS idx_note_share_note_id ON note_share(note_id); -CREATE INDEX IF NOT EXISTS idx_note_share_created_by ON note_share(created_by_id); - --- Add comment -COMMENT ON TABLE note_share IS 'Public sharing links for notes with optional password protection and view limits'; \ No newline at end of file diff --git a/migrations_old/add_time_preferences.sql b/migrations_old/add_time_preferences.sql deleted file mode 100644 index acf47df..0000000 --- a/migrations_old/add_time_preferences.sql +++ /dev/null @@ -1,20 +0,0 @@ --- Add time formatting and rounding preferences to user_preferences table --- These columns support user-specific time display and rounding settings - --- Add time formatting preference (24h vs 12h) -ALTER TABLE user_preferences - ADD COLUMN IF NOT EXISTS time_format_24h BOOLEAN DEFAULT TRUE; - --- Add time rounding preference (0, 5, 10, 15, 30, 60 minutes) -ALTER TABLE user_preferences - ADD COLUMN IF NOT EXISTS time_rounding_minutes INTEGER DEFAULT 0; - --- Add rounding direction preference (false=round down, true=round to nearest) -ALTER TABLE user_preferences - ADD COLUMN IF NOT EXISTS round_to_nearest BOOLEAN DEFAULT FALSE; - --- Update existing date_format column default if needed --- (The column should already exist, but let's ensure the default is correct) -UPDATE user_preferences -SET date_format = 'ISO' -WHERE date_format = 'YYYY-MM-DD' OR date_format IS NULL; \ No newline at end of file diff --git a/migrations_old/migration_list.txt b/migrations_old/migration_list.txt deleted file mode 100644 index 00816f9..0000000 --- a/migrations_old/migration_list.txt +++ /dev/null @@ -1,24 +0,0 @@ -# Database Migration Scripts - In Order of Execution - -## Phase 1: SQLite Schema Updates (Run first) -01_migrate_db.py - Update SQLite schema with all necessary columns and tables - -## Phase 2: Data Migration (Run after SQLite updates) -02_migrate_sqlite_to_postgres.py - Migrate data from updated SQLite to PostgreSQL - -## Phase 3: PostgreSQL Schema Migrations (Run after data migration) -03_add_dashboard_columns.py - Add missing columns to user_dashboard table -04_add_user_preferences_columns.py - Add missing columns to user_preferences table -05_fix_task_status_enum.py - Fix task status enum values in database -06_add_archived_status.py - Add ARCHIVED status to task_status enum -07_fix_company_work_config_columns.py - Fix company work config column names -08_fix_work_region_enum.py - Fix work region enum values -09_add_germany_to_workregion.py - Add GERMANY back to work_region enum -10_add_company_settings_columns.py - Add missing columns to company_settings table - -## Phase 4: Code Migrations (Run after all schema migrations) -11_fix_company_work_config_usage.py - Update code references to CompanyWorkConfig fields -12_fix_task_status_usage.py - Update code references to TaskStatus enum values -13_fix_work_region_usage.py - Update code references to WorkRegion enum values -14_fix_removed_fields.py - Handle removed fields in code -15_repair_user_roles.py - Fix user roles from string to enum values \ No newline at end of file diff --git a/migrations_old/old_migrations/00_migration_summary.py b/migrations_old/old_migrations/00_migration_summary.py deleted file mode 100755 index 48c1e61..0000000 --- a/migrations_old/old_migrations/00_migration_summary.py +++ /dev/null @@ -1,79 +0,0 @@ -#!/usr/bin/env python3 -""" -Summary of all model migrations to be performed -""" - -import os -from pathlib import Path - -def print_section(title, items): - """Print a formatted section""" - print(f"\n{'='*60}") - print(f"📌 {title}") - print('='*60) - for item in items: - print(f" {item}") - -def main(): - print("🔍 Model Migration Summary") - print("="*60) - print("\nThis will update your codebase to match the refactored models.") - - # CompanyWorkConfig changes - print_section("CompanyWorkConfig Field Changes", [ - "✓ work_hours_per_day → standard_hours_per_day", - "✓ mandatory_break_minutes → break_duration_minutes", - "✓ break_threshold_hours → break_after_hours", - "✓ region → work_region", - "✗ REMOVED: additional_break_minutes", - "✗ REMOVED: additional_break_threshold_hours", - "✗ REMOVED: region_name (use work_region.value)", - "✗ REMOVED: created_by_id", - "+ ADDED: standard_hours_per_week, overtime_enabled, overtime_rate, etc." - ]) - - # TaskStatus changes - print_section("TaskStatus Enum Changes", [ - "✓ NOT_STARTED → TODO", - "✓ COMPLETED → DONE", - "✓ ON_HOLD → IN_REVIEW", - "+ KEPT: ARCHIVED (separate from CANCELLED)" - ]) - - # WorkRegion changes - print_section("WorkRegion Enum Changes", [ - "✓ UNITED_STATES → USA", - "✓ UNITED_KINGDOM → UK", - "✓ FRANCE → EU", - "✓ EUROPEAN_UNION → EU", - "✓ CUSTOM → OTHER", - "! KEPT: GERMANY (specific labor laws)" - ]) - - # Files to be modified - print_section("Files That Will Be Modified", [ - "Python files: app.py, routes/*.py", - "Templates: admin_company.html, admin_work_policies.html, config.html", - "JavaScript: static/js/*.js (for task status)", - "Removed field references will be commented out" - ]) - - # Safety notes - print_section("⚠️ Important Notes", [ - "BACKUP your code before running migrations", - "Removed fields will be commented with # REMOVED:", - "Review all changes after migration", - "Test thoroughly, especially:", - " - Company work policy configuration", - " - Task status transitions", - " - Regional preset selection", - "Consider implementing audit logging for created_by tracking" - ]) - - print("\n" + "="*60) - print("🎯 To run all migrations: python migrations/run_all_migrations.py") - print("🎯 To run individually: python migrations/01_fix_company_work_config_usage.py") - print("="*60) - -if __name__ == "__main__": - main() \ No newline at end of file diff --git a/migrations_old/old_migrations/01_migrate_db.py b/migrations_old/old_migrations/01_migrate_db.py deleted file mode 100644 index 4ef819a..0000000 --- a/migrations_old/old_migrations/01_migrate_db.py +++ /dev/null @@ -1,1897 +0,0 @@ -#!/usr/bin/env python3 -""" -Database Migration Script for TimeTrack -Consolidates all database migrations and provides command line interface. -""" - -import sqlite3 -import os -import sys -import argparse -from datetime import datetime - -# Add parent directory to path to import app -sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) - -# Try to import from Flask app context if available -try: - from app import app, db - from models import (User, TimeEntry, WorkConfig, SystemSettings, Team, Role, Project, - Company, CompanyWorkConfig, CompanySettings, UserPreferences, WorkRegion, AccountType, - ProjectCategory, Task, SubTask, TaskStatus, TaskPriority, Announcement, SystemEvent, - WidgetType, UserDashboard, DashboardWidget, WidgetTemplate, Comment, CommentVisibility, - BrandingSettings) - from werkzeug.security import generate_password_hash - FLASK_AVAILABLE = True -except ImportError: - print("Flask app not available. Running in standalone mode.") - FLASK_AVAILABLE = False - # Define Role and AccountType enums for standalone mode - import enum - - class Role(enum.Enum): - TEAM_MEMBER = "Team Member" - TEAM_LEADER = "Team Leader" - SUPERVISOR = "Supervisor" - ADMIN = "Administrator" - SYSTEM_ADMIN = "System Administrator" - - class AccountType(enum.Enum): - COMPANY_USER = "Company User" - FREELANCER = "Freelancer" - - -def get_db_path(db_file=None): - """Determine database path based on environment or provided file.""" - if db_file: - return db_file - - # Check for Docker environment - if os.path.exists('/data'): - return '/data/timetrack.db' - - return 'timetrack.db' - - -def run_all_migrations(db_path=None): - """Run all database migrations in sequence.""" - db_path = get_db_path(db_path) - print(f"Running migrations on database: {db_path}") - - # Check if database exists - if not os.path.exists(db_path): - print("Database doesn't exist. Creating new database.") - if FLASK_AVAILABLE: - with app.app_context(): - db.create_all() - init_system_settings() - else: - create_new_database(db_path) - return - - print("Running database migrations...") - - # Run migrations in sequence - run_basic_migrations(db_path) - migrate_to_company_model(db_path) - migrate_work_config_data(db_path) - migrate_task_system(db_path) - migrate_system_events(db_path) - migrate_dashboard_system(db_path) - migrate_comment_system(db_path) - migrate_notes_system(db_path) - update_note_link_cascade(db_path) - - # Run PostgreSQL-specific migrations if applicable - if FLASK_AVAILABLE: - migrate_postgresql_schema() - - if FLASK_AVAILABLE: - with app.app_context(): - # Handle company migration and admin user setup - migrate_data() - - print("Database migrations completed successfully!") - - -def run_basic_migrations(db_path): - """Run basic table structure migrations.""" - conn = sqlite3.connect(db_path) - cursor = conn.cursor() - - try: - # Check if time_entry table exists first - cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='time_entry'") - if not cursor.fetchone(): - print("time_entry table doesn't exist. Creating all tables...") - if FLASK_AVAILABLE: - with app.app_context(): - db.create_all() - init_system_settings() - else: - create_all_tables(cursor) - conn.commit() - conn.close() - return - - # Migrate time_entry table - cursor.execute("PRAGMA table_info(time_entry)") - time_entry_columns = [column[1] for column in cursor.fetchall()] - - migrations = [ - ('is_paused', "ALTER TABLE time_entry ADD COLUMN is_paused BOOLEAN DEFAULT 0"), - ('pause_start_time', "ALTER TABLE time_entry ADD COLUMN pause_start_time TIMESTAMP"), - ('total_break_duration', "ALTER TABLE time_entry ADD COLUMN total_break_duration INTEGER DEFAULT 0"), - ('user_id', "ALTER TABLE time_entry ADD COLUMN user_id INTEGER"), - ('project_id', "ALTER TABLE time_entry ADD COLUMN project_id INTEGER"), - ('notes', "ALTER TABLE time_entry ADD COLUMN notes TEXT"), - ('task_id', "ALTER TABLE time_entry ADD COLUMN task_id INTEGER"), - ('subtask_id', "ALTER TABLE time_entry ADD COLUMN subtask_id INTEGER") - ] - - for column_name, sql_command in migrations: - if column_name not in time_entry_columns: - print(f"Adding {column_name} column to time_entry...") - cursor.execute(sql_command) - - # Migrate work_config table - cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='work_config'") - if not cursor.fetchone(): - print("Creating work_config table...") - cursor.execute(""" - CREATE TABLE work_config ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - work_hours_per_day FLOAT DEFAULT 8.0, - mandatory_break_minutes INTEGER DEFAULT 30, - break_threshold_hours FLOAT DEFAULT 6.0, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - user_id INTEGER, - additional_break_minutes INTEGER DEFAULT 15, - additional_break_threshold_hours FLOAT DEFAULT 9.0 - ) - """) - else: - cursor.execute("PRAGMA table_info(work_config)") - work_config_columns = [column[1] for column in cursor.fetchall()] - - work_config_migrations = [ - ('additional_break_minutes', "ALTER TABLE work_config ADD COLUMN additional_break_minutes INTEGER DEFAULT 15"), - ('additional_break_threshold_hours', "ALTER TABLE work_config ADD COLUMN additional_break_threshold_hours FLOAT DEFAULT 9.0"), - ('user_id', "ALTER TABLE work_config ADD COLUMN user_id INTEGER") - ] - - for column_name, sql_command in work_config_migrations: - if column_name not in work_config_columns: - print(f"Adding {column_name} column to work_config...") - cursor.execute(sql_command) - - # Migrate user table - cursor.execute("PRAGMA table_info(user)") - user_columns = [column[1] for column in cursor.fetchall()] - - user_migrations = [ - ('is_verified', "ALTER TABLE user ADD COLUMN is_verified BOOLEAN DEFAULT 0"), - ('verification_token', "ALTER TABLE user ADD COLUMN verification_token VARCHAR(100)"), - ('token_expiry', "ALTER TABLE user ADD COLUMN token_expiry TIMESTAMP"), - ('is_blocked', "ALTER TABLE user ADD COLUMN is_blocked BOOLEAN DEFAULT 0"), - ('role', "ALTER TABLE user ADD COLUMN role VARCHAR(50) DEFAULT 'Team Member'"), - ('team_id', "ALTER TABLE user ADD COLUMN team_id INTEGER"), - ('account_type', f"ALTER TABLE user ADD COLUMN account_type VARCHAR(20) DEFAULT '{AccountType.COMPANY_USER.value}'"), - ('business_name', "ALTER TABLE user ADD COLUMN business_name VARCHAR(100)"), - ('company_id', "ALTER TABLE user ADD COLUMN company_id INTEGER"), - ('two_factor_enabled', "ALTER TABLE user ADD COLUMN two_factor_enabled BOOLEAN DEFAULT 0"), - ('two_factor_secret', "ALTER TABLE user ADD COLUMN two_factor_secret VARCHAR(32)"), - ('avatar_url', "ALTER TABLE user ADD COLUMN avatar_url VARCHAR(255)") - ] - - for column_name, sql_command in user_migrations: - if column_name not in user_columns: - print(f"Adding {column_name} column to user...") - cursor.execute(sql_command) - - # Handle is_admin to role migration - if 'is_admin' in user_columns and 'role' in user_columns: - print("Migrating is_admin column to role...") - cursor.execute("UPDATE user SET role = ? WHERE is_admin = 1 AND (role IS NULL OR role = '')", (Role.ADMIN.value,)) - cursor.execute("UPDATE user SET role = ? WHERE is_admin = 0 AND (role IS NULL OR role = '')", (Role.TEAM_MEMBER.value,)) - - # Create other tables if they don't exist - create_missing_tables(cursor) - - conn.commit() - - except Exception as e: - print(f"Error during basic migrations: {e}") - conn.rollback() - raise - finally: - conn.close() - - -def create_missing_tables(cursor): - """Create missing tables.""" - - # Team table - cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='team'") - if not cursor.fetchone(): - print("Creating team table...") - cursor.execute(""" - CREATE TABLE team ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - name VARCHAR(100) NOT NULL, - description VARCHAR(255), - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - company_id INTEGER NOT NULL, - FOREIGN KEY (company_id) REFERENCES company (id), - UNIQUE(company_id, name) - ) - """) - - # System settings table - cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='system_settings'") - if not cursor.fetchone(): - print("Creating system_settings table...") - cursor.execute(""" - CREATE TABLE system_settings ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - key VARCHAR(50) UNIQUE NOT NULL, - value VARCHAR(255) NOT NULL, - description VARCHAR(255), - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP - ) - """) - - # Project table - cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='project'") - if not cursor.fetchone(): - print("Creating project table...") - cursor.execute(""" - CREATE TABLE project ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - name VARCHAR(100) NOT NULL, - description TEXT, - code VARCHAR(20) NOT NULL, - is_active BOOLEAN DEFAULT 1, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - company_id INTEGER NOT NULL, - created_by_id INTEGER NOT NULL, - team_id INTEGER, - category_id INTEGER, - start_date DATE, - end_date DATE, - FOREIGN KEY (company_id) REFERENCES company (id), - FOREIGN KEY (created_by_id) REFERENCES user (id), - FOREIGN KEY (team_id) REFERENCES team (id), - FOREIGN KEY (category_id) REFERENCES project_category (id), - UNIQUE(company_id, code) - ) - """) - - # Company table - cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='company'") - if not cursor.fetchone(): - print("Creating company table...") - cursor.execute(""" - CREATE TABLE company ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - name VARCHAR(100) NOT NULL, - slug VARCHAR(50) UNIQUE NOT NULL, - description TEXT, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - is_personal BOOLEAN DEFAULT 0, - is_active BOOLEAN DEFAULT 1, - max_users INTEGER DEFAULT 100, - UNIQUE(name) - ) - """) - - # Announcement table - cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='announcement'") - if not cursor.fetchone(): - print("Creating announcement table...") - cursor.execute(""" - CREATE TABLE announcement ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - title VARCHAR(200) NOT NULL, - content TEXT NOT NULL, - is_active BOOLEAN DEFAULT 1, - is_urgent BOOLEAN DEFAULT 0, - announcement_type VARCHAR(20) DEFAULT 'info', - start_date TIMESTAMP, - end_date TIMESTAMP, - target_all_users BOOLEAN DEFAULT 1, - target_roles TEXT, - target_companies TEXT, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - created_by_id INTEGER NOT NULL, - FOREIGN KEY (created_by_id) REFERENCES user (id) - ) - """) - - # Company Settings table - cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='company_settings'") - if not cursor.fetchone(): - print("Creating company_settings table...") - cursor.execute(""" - CREATE TABLE company_settings ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - company_id INTEGER NOT NULL, - default_comment_visibility VARCHAR(20) DEFAULT 'Company', - allow_team_visibility_comments BOOLEAN DEFAULT 1, - require_task_assignment BOOLEAN DEFAULT 0, - allow_task_creation_by_members BOOLEAN DEFAULT 1, - restrict_project_access_by_team BOOLEAN DEFAULT 0, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - created_by_id INTEGER, - FOREIGN KEY (company_id) REFERENCES company (id), - FOREIGN KEY (created_by_id) REFERENCES user (id), - UNIQUE(company_id) - ) - """) - - # Branding Settings table - cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='branding_settings'") - if not cursor.fetchone(): - print("Creating branding_settings table...") - cursor.execute(""" - CREATE TABLE branding_settings ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - app_name VARCHAR(100) NOT NULL DEFAULT 'Time Tracker', - logo_filename VARCHAR(255), - logo_alt_text VARCHAR(255) DEFAULT 'Logo', - favicon_filename VARCHAR(255), - primary_color VARCHAR(7) DEFAULT '#007bff', - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_by_id INTEGER, - FOREIGN KEY (updated_by_id) REFERENCES user (id) - ) - """) - - -def migrate_to_company_model(db_path): - """Migrate to company-based multi-tenancy model.""" - conn = sqlite3.connect(db_path) - cursor = conn.cursor() - - try: - # Check if company table exists, create if not - cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='company'") - if not cursor.fetchone(): - create_missing_tables(cursor) - - # Check and add missing columns to existing company table - cursor.execute("PRAGMA table_info(company)") - company_columns = [column[1] for column in cursor.fetchall()] - - company_migrations = [ - ('is_personal', "ALTER TABLE company ADD COLUMN is_personal BOOLEAN DEFAULT 0") - ] - - for column_name, sql_command in company_migrations: - if column_name not in company_columns: - print(f"Adding {column_name} column to company...") - cursor.execute(sql_command) - - # Add company_id to tables that need it - add_company_id_to_tables(cursor) - - # Handle user role enum migration - migrate_user_roles(cursor) - - conn.commit() - - except Exception as e: - print(f"Error during company model migration: {e}") - conn.rollback() - raise - finally: - conn.close() - - -def add_company_id_to_tables(cursor): - """Add company_id columns to tables that need multi-tenancy.""" - - tables_needing_company = ['project', 'team'] - - for table_name in tables_needing_company: - cursor.execute(f"PRAGMA table_info({table_name})") - columns = [column[1] for column in cursor.fetchall()] - - if 'company_id' not in columns: - print(f"Adding company_id column to {table_name}...") - cursor.execute(f"ALTER TABLE {table_name} ADD COLUMN company_id INTEGER") - - -def migrate_user_roles(cursor): - """Handle user role enum migration with constraint updates.""" - - cursor.execute("PRAGMA table_info(user)") - user_columns = cursor.fetchall() - - # Check if we need to migrate the role enum constraint - cursor.execute("SELECT sql FROM sqlite_master WHERE type='table' AND name='user'") - create_table_sql = cursor.fetchone() - - if create_table_sql and 'System Administrator' not in create_table_sql[0]: - print("Updating role enum constraint to include SYSTEM_ADMIN...") - - # Check existing role values - cursor.execute("SELECT DISTINCT role FROM user WHERE role IS NOT NULL") - existing_roles = [row[0] for row in cursor.fetchall()] - print(f"Found existing roles: {existing_roles}") - - # First normalize role values in the existing table - print("Normalizing role values before table recreation...") - role_mapping = { - 'TEAM_MEMBER': Role.TEAM_MEMBER.value, - 'TEAM_LEADER': Role.TEAM_LEADER.value, - 'SUPERVISOR': Role.SUPERVISOR.value, - 'ADMIN': Role.ADMIN.value, - 'SYSTEM_ADMIN': Role.SYSTEM_ADMIN.value - } - - for old_role, new_role in role_mapping.items(): - cursor.execute("UPDATE user SET role = ? WHERE role = ?", (new_role, old_role)) - updated_count = cursor.rowcount - if updated_count > 0: - print(f"Updated {updated_count} users from role '{old_role}' to '{new_role}'") - - # Set any NULL or invalid roles to defaults - cursor.execute("UPDATE user SET role = ? WHERE role IS NULL OR role NOT IN (?, ?, ?, ?, ?)", - (Role.TEAM_MEMBER.value, Role.TEAM_MEMBER.value, Role.TEAM_LEADER.value, - Role.SUPERVISOR.value, Role.ADMIN.value, Role.SYSTEM_ADMIN.value)) - null_roles = cursor.rowcount - if null_roles > 0: - print(f"Set {null_roles} NULL/invalid roles to 'Team Member'") - - # Ensure all users have a company_id before creating NOT NULL constraint - print("Checking for users without company_id...") - cursor.execute("SELECT COUNT(*) FROM user WHERE company_id IS NULL") - null_company_count = cursor.fetchone()[0] - print(f"Found {null_company_count} users without company_id") - - if null_company_count > 0: - print(f"Assigning {null_company_count} users to default company...") - - # Get or create a default company - cursor.execute("SELECT id FROM company ORDER BY id LIMIT 1") - company_result = cursor.fetchone() - - if company_result: - default_company_id = company_result[0] - print(f"Using existing company ID {default_company_id} as default") - else: - # Create a default company if none exists - print("No companies found, creating default company...") - cursor.execute(""" - INSERT INTO company (name, slug, description, created_at, is_personal, is_active, max_users) - VALUES (?, ?, ?, CURRENT_TIMESTAMP, 0, 1, 100) - """, ("Default Company", "default-company", "Auto-created default company for migration")) - default_company_id = cursor.lastrowid - print(f"Created default company with ID {default_company_id}") - - # Assign all users without company_id to the default company - cursor.execute("UPDATE user SET company_id = ? WHERE company_id IS NULL", (default_company_id,)) - updated_users = cursor.rowcount - print(f"Assigned {updated_users} users to default company") - - # Verify the fix - cursor.execute("SELECT COUNT(*) FROM user WHERE company_id IS NULL") - remaining_null = cursor.fetchone()[0] - print(f"After assignment, {remaining_null} users still have NULL company_id") - else: - print("All users already have company_id assigned") - - # Drop user_new table if it exists from previous failed migration - cursor.execute("DROP TABLE IF EXISTS user_new") - - # Create a backup table with the new enum constraint - cursor.execute(""" - CREATE TABLE user_new ( - id INTEGER PRIMARY KEY, - username VARCHAR(80) NOT NULL, - email VARCHAR(120) NOT NULL, - password_hash VARCHAR(128), - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - company_id INTEGER NOT NULL, - is_verified BOOLEAN DEFAULT 0, - verification_token VARCHAR(100), - token_expiry TIMESTAMP, - is_blocked BOOLEAN DEFAULT 0, - role VARCHAR(50) DEFAULT 'Team Member' CHECK (role IN ('Team Member', 'Team Leader', 'Supervisor', 'Administrator', 'System Administrator')), - team_id INTEGER, - account_type VARCHAR(20) DEFAULT 'Company User' CHECK (account_type IN ('Company User', 'Freelancer')), - business_name VARCHAR(100), - two_factor_enabled BOOLEAN DEFAULT 0, - two_factor_secret VARCHAR(32), - avatar_url VARCHAR(255), - FOREIGN KEY (company_id) REFERENCES company (id), - FOREIGN KEY (team_id) REFERENCES team (id) - ) - """) - - # Get default company ID for any remaining NULL company_id values - cursor.execute("SELECT id FROM company ORDER BY id LIMIT 1") - company_result = cursor.fetchone() - default_company_id = company_result[0] if company_result else 1 - - # Copy all data from old table to new table with validation - cursor.execute(""" - INSERT INTO user_new - SELECT id, username, email, password_hash, created_at, - COALESCE(company_id, ?) as company_id, - is_verified, verification_token, token_expiry, is_blocked, - CASE - WHEN role IN (?, ?, ?, ?, ?) THEN role - ELSE ? - END as role, - team_id, - CASE - WHEN account_type IN (?, ?) THEN account_type - ELSE ? - END as account_type, - business_name, two_factor_enabled, two_factor_secret, avatar_url - FROM user - """, (default_company_id, Role.TEAM_MEMBER.value, Role.TEAM_LEADER.value, Role.SUPERVISOR.value, - Role.ADMIN.value, Role.SYSTEM_ADMIN.value, Role.TEAM_MEMBER.value, - AccountType.COMPANY_USER.value, AccountType.FREELANCER.value, - AccountType.COMPANY_USER.value)) - - # Drop the old table and rename the new one - cursor.execute("DROP TABLE user") - cursor.execute("ALTER TABLE user_new RENAME TO user") - - print("✓ Role enum constraint updated successfully") - - # Additional normalization for account_type values - print("Normalizing account_type values...") - account_type_mapping = { - 'COMPANY_USER': AccountType.COMPANY_USER.value, - 'FREELANCER': AccountType.FREELANCER.value - } - - for old_type, new_type in account_type_mapping.items(): - cursor.execute("UPDATE user SET account_type = ? WHERE account_type = ?", (new_type, old_type)) - updated_count = cursor.rowcount - if updated_count > 0: - print(f"Updated {updated_count} users account_type from '{old_type}' to '{new_type}'") - - # Set any remaining NULL values to defaults - cursor.execute("UPDATE user SET account_type = ? WHERE account_type IS NULL", (AccountType.COMPANY_USER.value,)) - null_accounts = cursor.rowcount - if null_accounts > 0: - print(f"Set {null_accounts} NULL account_types to 'Company User'") - - -def migrate_work_config_data(db_path): - """Migrate work configuration data to new company-based model.""" - if not FLASK_AVAILABLE: - print("Skipping work config data migration - Flask not available") - return - - with app.app_context(): - try: - # Create CompanyWorkConfig for all companies that don't have one - companies = Company.query.all() - for company in companies: - existing_config = CompanyWorkConfig.query.filter_by(company_id=company.id).first() - if not existing_config: - print(f"Creating CompanyWorkConfig for {company.name}") - - # Use Germany defaults (existing system default) - preset = CompanyWorkConfig.get_regional_preset(WorkRegion.GERMANY) - - company_config = CompanyWorkConfig( - company_id=company.id, - work_hours_per_day=preset['work_hours_per_day'], - mandatory_break_minutes=preset['mandatory_break_minutes'], - break_threshold_hours=preset['break_threshold_hours'], - additional_break_minutes=preset['additional_break_minutes'], - additional_break_threshold_hours=preset['additional_break_threshold_hours'], - region=preset['region'], - region_name=preset['region_name'] - ) - db.session.add(company_config) - - # Migrate existing WorkConfig user preferences to UserPreferences - old_configs = WorkConfig.query.filter(WorkConfig.user_id.isnot(None)).all() - for old_config in old_configs: - user = User.query.get(old_config.user_id) - if user: - existing_prefs = UserPreferences.query.filter_by(user_id=user.id).first() - if not existing_prefs: - print(f"Migrating preferences for user {user.username}") - - user_prefs = UserPreferences( - user_id=user.id, - time_format_24h=getattr(old_config, 'time_format_24h', True), - date_format=getattr(old_config, 'date_format', 'YYYY-MM-DD'), - round_minutes_interval=getattr(old_config, 'round_minutes_interval', 0), - round_to_nearest=getattr(old_config, 'round_to_nearest', True) - ) - db.session.add(user_prefs) - - db.session.commit() - print("Work config data migration completed successfully") - - except Exception as e: - print(f"Error during work config migration: {e}") - db.session.rollback() - - -def migrate_task_system(db_path): - """Create tables for the task management system.""" - conn = sqlite3.connect(db_path) - cursor = conn.cursor() - - try: - # Check if project_category table exists - cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='project_category'") - if not cursor.fetchone(): - print("Creating project_category table...") - cursor.execute(""" - CREATE TABLE project_category ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - name VARCHAR(100) NOT NULL, - description TEXT, - color VARCHAR(7) DEFAULT '#007bff', - icon VARCHAR(50), - company_id INTEGER NOT NULL, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - created_by_id INTEGER NOT NULL, - FOREIGN KEY (company_id) REFERENCES company (id), - FOREIGN KEY (created_by_id) REFERENCES user (id), - UNIQUE(company_id, name) - ) - """) - - # Check if task table exists - cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='task'") - if not cursor.fetchone(): - print("Creating task table...") - cursor.execute(""" - CREATE TABLE task ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - task_number VARCHAR(20) NOT NULL UNIQUE, - name VARCHAR(200) NOT NULL, - description TEXT, - status VARCHAR(50) DEFAULT 'Not Started', - priority VARCHAR(50) DEFAULT 'Medium', - estimated_hours FLOAT, - project_id INTEGER NOT NULL, - sprint_id INTEGER, - assigned_to_id INTEGER, - start_date DATE, - due_date DATE, - completed_date DATE, - archived_date DATE, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - created_by_id INTEGER NOT NULL, - FOREIGN KEY (project_id) REFERENCES project (id), - FOREIGN KEY (sprint_id) REFERENCES sprint (id), - FOREIGN KEY (assigned_to_id) REFERENCES user (id), - FOREIGN KEY (created_by_id) REFERENCES user (id) - ) - """) - else: - # Add missing columns to existing task table - cursor.execute("PRAGMA table_info(task)") - task_columns = [column[1] for column in cursor.fetchall()] - - task_migrations = [ - ('task_number', "ALTER TABLE task ADD COLUMN task_number VARCHAR(20)"), - ('sprint_id', "ALTER TABLE task ADD COLUMN sprint_id INTEGER"), - ('archived_date', "ALTER TABLE task ADD COLUMN archived_date DATE") - ] - - for column_name, sql_command in task_migrations: - if column_name not in task_columns: - print(f"Adding {column_name} column to task table...") - cursor.execute(sql_command) - - # Add unique constraint for task_number if it was just added - if 'task_number' not in task_columns: - print("Adding unique constraint for task_number...") - # For SQLite, we need to recreate the table to add unique constraint - cursor.execute("CREATE UNIQUE INDEX idx_task_number ON task(task_number)") - - # Generate task numbers for existing tasks that don't have them - print("Generating task numbers for existing tasks...") - cursor.execute("SELECT id FROM task WHERE task_number IS NULL ORDER BY id") - tasks_without_numbers = cursor.fetchall() - - for i, (task_id,) in enumerate(tasks_without_numbers, 1): - task_number = f"TSK-{i:03d}" - cursor.execute("UPDATE task SET task_number = ? WHERE id = ?", (task_number, task_id)) - - if tasks_without_numbers: - print(f"Generated {len(tasks_without_numbers)} task numbers") - - # Check if sub_task table exists - cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='sub_task'") - if not cursor.fetchone(): - print("Creating sub_task table...") - cursor.execute(""" - CREATE TABLE sub_task ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - name VARCHAR(200) NOT NULL, - description TEXT, - status VARCHAR(50) DEFAULT 'Not Started', - priority VARCHAR(50) DEFAULT 'Medium', - estimated_hours FLOAT, - task_id INTEGER NOT NULL, - assigned_to_id INTEGER, - start_date DATE, - due_date DATE, - completed_date DATE, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - created_by_id INTEGER NOT NULL, - FOREIGN KEY (task_id) REFERENCES task (id) ON DELETE CASCADE, - FOREIGN KEY (assigned_to_id) REFERENCES user (id), - FOREIGN KEY (created_by_id) REFERENCES user (id) - ) - """) - - # Create index for better performance - print("Creating index on sub_task.task_id...") - cursor.execute("CREATE INDEX idx_subtask_task_id ON sub_task(task_id)") - else: - # Check if the index exists - cursor.execute("SELECT name FROM sqlite_master WHERE type='index' AND name='idx_subtask_task_id'") - if not cursor.fetchone(): - print("Creating missing index on sub_task.task_id...") - cursor.execute("CREATE INDEX idx_subtask_task_id ON sub_task(task_id)") - - # Check if task_dependency table exists - cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='task_dependency'") - if not cursor.fetchone(): - print("Creating task_dependency table...") - cursor.execute(""" - CREATE TABLE task_dependency ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - blocked_task_id INTEGER NOT NULL, - blocking_task_id INTEGER NOT NULL, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - FOREIGN KEY (blocked_task_id) REFERENCES task (id), - FOREIGN KEY (blocking_task_id) REFERENCES task (id), - UNIQUE(blocked_task_id, blocking_task_id), - CHECK (blocked_task_id != blocking_task_id) - ) - """) - - # Check if sprint table exists - cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='sprint'") - if not cursor.fetchone(): - print("Creating sprint table...") - cursor.execute(""" - CREATE TABLE sprint ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - name VARCHAR(200) NOT NULL, - description TEXT, - status VARCHAR(50) DEFAULT 'PLANNING', - goal TEXT, - start_date DATE NOT NULL, - end_date DATE NOT NULL, - capacity_hours INTEGER, - project_id INTEGER, - company_id INTEGER NOT NULL, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - created_by_id INTEGER NOT NULL, - FOREIGN KEY (project_id) REFERENCES project (id), - FOREIGN KEY (company_id) REFERENCES company (id), - FOREIGN KEY (created_by_id) REFERENCES user (id), - UNIQUE(company_id, name) - ) - """) - - # Add category_id to project table if it doesn't exist - cursor.execute("PRAGMA table_info(project)") - project_columns = [column[1] for column in cursor.fetchall()] - if 'category_id' not in project_columns: - print("Adding category_id column to project table...") - cursor.execute("ALTER TABLE project ADD COLUMN category_id INTEGER") - - # Add task_id and subtask_id to time_entry table if they don't exist - cursor.execute("PRAGMA table_info(time_entry)") - time_entry_columns = [column[1] for column in cursor.fetchall()] - - task_migrations = [ - ('task_id', "ALTER TABLE time_entry ADD COLUMN task_id INTEGER"), - ('subtask_id', "ALTER TABLE time_entry ADD COLUMN subtask_id INTEGER") - ] - - for column_name, sql_command in task_migrations: - if column_name not in time_entry_columns: - print(f"Adding {column_name} column to time_entry...") - cursor.execute(sql_command) - - conn.commit() - print("Task system migration completed successfully!") - - except Exception as e: - print(f"Error during task system migration: {e}") - conn.rollback() - raise - finally: - conn.close() - - -def migrate_system_events(db_path): - """Create system_event table for activity logging.""" - conn = sqlite3.connect(db_path) - cursor = conn.cursor() - - try: - # Check if system_event table exists - cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='system_event'") - if not cursor.fetchone(): - print("Creating system_event table...") - cursor.execute(""" - CREATE TABLE system_event ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - event_type VARCHAR(50) NOT NULL, - event_category VARCHAR(30) NOT NULL, - description TEXT NOT NULL, - severity VARCHAR(20) DEFAULT 'info', - timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - user_id INTEGER, - company_id INTEGER, - event_metadata TEXT, - ip_address VARCHAR(45), - user_agent TEXT, - FOREIGN KEY (user_id) REFERENCES user (id), - FOREIGN KEY (company_id) REFERENCES company (id) - ) - """) - - # Add an initial system event if Flask is available - if FLASK_AVAILABLE: - # We'll add the initial event after the table is created - cursor.execute(""" - INSERT INTO system_event (event_type, event_category, description, severity, timestamp) - VALUES (?, ?, ?, ?, CURRENT_TIMESTAMP) - """, ('system_migration', 'system', 'SystemEvent table created and initialized', 'info')) - print("Added initial system event") - - conn.commit() - print("System events migration completed successfully!") - - except Exception as e: - print(f"Error during system events migration: {e}") - conn.rollback() - raise - finally: - conn.close() - - -def migrate_data(): - """Handle data migration with Flask app context.""" - if not FLASK_AVAILABLE: - print("Skipping data migration - Flask not available") - return - - try: - # Update existing users with null/invalid data - users = User.query.all() - for user in users: - if user.role is None: - user.role = Role.TEAM_MEMBER - if user.two_factor_enabled is None: - user.two_factor_enabled = False - - # Check if any system admin users exist - system_admin_count = User.query.filter_by(role=Role.SYSTEM_ADMIN).count() - if system_admin_count == 0: - print("No system administrators found. Consider promoting a user to SYSTEM_ADMIN role manually.") - print(f"To promote a user: UPDATE user SET role = '{Role.SYSTEM_ADMIN.value}' WHERE username = 'your_username';") - else: - print(f"Found {system_admin_count} system administrator(s)") - - db.session.commit() - print("Data migration completed successfully") - - except Exception as e: - print(f"Error during data migration: {e}") - db.session.rollback() - - -def init_system_settings(): - """Initialize system settings with default values if they don't exist.""" - if not FLASK_AVAILABLE: - print("Skipping system settings initialization - Flask not available") - return - - # Check if registration_enabled setting exists - reg_setting = SystemSettings.query.filter_by(key='registration_enabled').first() - if not reg_setting: - print("Adding registration_enabled system setting...") - reg_setting = SystemSettings( - key='registration_enabled', - value='true', - description='Controls whether new user registration is allowed' - ) - db.session.add(reg_setting) - db.session.commit() - print("Registration setting initialized to enabled") - - # Check if email_verification_required setting exists - email_verification_setting = SystemSettings.query.filter_by(key='email_verification_required').first() - if not email_verification_setting: - print("Adding email_verification_required system setting...") - email_verification_setting = SystemSettings( - key='email_verification_required', - value='true', - description='Controls whether email verification is required for new user accounts' - ) - db.session.add(email_verification_setting) - db.session.commit() - print("Email verification setting initialized to enabled") - - # Check if tracking_script_enabled setting exists - tracking_script_setting = SystemSettings.query.filter_by(key='tracking_script_enabled').first() - if not tracking_script_setting: - print("Adding tracking_script_enabled system setting...") - tracking_script_setting = SystemSettings( - key='tracking_script_enabled', - value='false', - description='Controls whether custom tracking script is enabled' - ) - db.session.add(tracking_script_setting) - db.session.commit() - print("Tracking script setting initialized to disabled") - - # Check if tracking_script_code setting exists - tracking_script_code_setting = SystemSettings.query.filter_by(key='tracking_script_code').first() - if not tracking_script_code_setting: - print("Adding tracking_script_code system setting...") - tracking_script_code_setting = SystemSettings( - key='tracking_script_code', - value='', - description='Custom tracking script code (HTML/JavaScript)' - ) - db.session.add(tracking_script_code_setting) - db.session.commit() - print("Tracking script code setting initialized") - - -def create_new_database(db_path): - """Create a new database with all tables.""" - print(f"Creating new database at {db_path}") - - conn = sqlite3.connect(db_path) - cursor = conn.cursor() - - try: - create_all_tables(cursor) - conn.commit() - print("New database created successfully") - except Exception as e: - print(f"Error creating new database: {e}") - conn.rollback() - raise - finally: - conn.close() - - -def create_all_tables(cursor): - """Create all tables from scratch.""" - # This would contain all CREATE TABLE statements - # For brevity, showing key tables only - - cursor.execute(""" - CREATE TABLE company ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - name VARCHAR(100) NOT NULL, - slug VARCHAR(50) UNIQUE NOT NULL, - description TEXT, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - is_personal BOOLEAN DEFAULT 0, - is_active BOOLEAN DEFAULT 1, - max_users INTEGER DEFAULT 100, - UNIQUE(name) - ) - """) - - cursor.execute(""" - CREATE TABLE user ( - id INTEGER PRIMARY KEY, - username VARCHAR(80) NOT NULL, - email VARCHAR(120) NOT NULL, - password_hash VARCHAR(128), - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - company_id INTEGER NOT NULL, - is_verified BOOLEAN DEFAULT 0, - verification_token VARCHAR(100), - token_expiry TIMESTAMP, - is_blocked BOOLEAN DEFAULT 0, - role VARCHAR(50) DEFAULT 'Team Member' CHECK (role IN ('Team Member', 'Team Leader', 'Supervisor', 'Administrator', 'System Administrator')), - team_id INTEGER, - account_type VARCHAR(20) DEFAULT 'Company User' CHECK (account_type IN ('Company User', 'Freelancer')), - business_name VARCHAR(100), - two_factor_enabled BOOLEAN DEFAULT 0, - two_factor_secret VARCHAR(32), - avatar_url VARCHAR(255), - FOREIGN KEY (company_id) REFERENCES company (id), - FOREIGN KEY (team_id) REFERENCES team (id) - ) - """) - - # Add other table creation statements as needed - print("All tables created") - - -def migrate_postgresql_schema(): - """Migrate PostgreSQL schema for archive functionality.""" - if not FLASK_AVAILABLE: - print("Skipping PostgreSQL migration - Flask not available") - return - - try: - import psycopg2 - from sqlalchemy import text - - with app.app_context(): - # Check if we're using PostgreSQL - database_url = app.config['SQLALCHEMY_DATABASE_URI'] - if not ('postgresql://' in database_url or 'postgres://' in database_url): - print("Not using PostgreSQL - skipping PostgreSQL migration") - return - - print("Running PostgreSQL schema migrations...") - - # Check if archived_date column exists - result = db.session.execute(text(""" - SELECT column_name - FROM information_schema.columns - WHERE table_name = 'task' AND column_name = 'archived_date' - """)) - - if not result.fetchone(): - print("Adding archived_date column to task table...") - db.session.execute(text("ALTER TABLE task ADD COLUMN archived_date DATE")) - db.session.commit() - - # Check if ARCHIVED status exists in enum - result = db.session.execute(text(""" - SELECT enumlabel - FROM pg_enum - WHERE enumtypid = (SELECT oid FROM pg_type WHERE typname = 'taskstatus') - AND enumlabel = 'ARCHIVED' - """)) - - if not result.fetchone(): - print("Adding ARCHIVED status to TaskStatus enum...") - db.session.execute(text("ALTER TYPE taskstatus ADD VALUE 'ARCHIVED'")) - db.session.commit() - - # Check if task_number column exists - result = db.session.execute(text(""" - SELECT column_name - FROM information_schema.columns - WHERE table_name = 'task' AND column_name = 'task_number' - """)) - - if not result.fetchone(): - print("Adding task_number column to task table...") - db.session.execute(text("ALTER TABLE task ADD COLUMN task_number VARCHAR(20) UNIQUE")) - - # Generate task numbers for existing tasks - print("Generating task numbers for existing tasks...") - result = db.session.execute(text("SELECT id FROM task WHERE task_number IS NULL ORDER BY id")) - tasks_without_numbers = result.fetchall() - - for i, (task_id,) in enumerate(tasks_without_numbers, 1): - task_number = f"TSK-{i:03d}" - db.session.execute(text("UPDATE task SET task_number = :task_number WHERE id = :task_id"), - {"task_number": task_number, "task_id": task_id}) - - db.session.commit() - if tasks_without_numbers: - print(f"Generated {len(tasks_without_numbers)} task numbers") - - # Check if sprint_id column exists - result = db.session.execute(text(""" - SELECT column_name - FROM information_schema.columns - WHERE table_name = 'task' AND column_name = 'sprint_id' - """)) - - if not result.fetchone(): - print("Adding sprint_id column to task table...") - db.session.execute(text("ALTER TABLE task ADD COLUMN sprint_id INTEGER")) - db.session.execute(text("ALTER TABLE task ADD CONSTRAINT fk_task_sprint FOREIGN KEY (sprint_id) REFERENCES sprint (id)")) - db.session.commit() - - # Check if task_dependency table exists - result = db.session.execute(text(""" - SELECT table_name - FROM information_schema.tables - WHERE table_name = 'task_dependency' - """)) - - if not result.fetchone(): - print("Creating task_dependency table...") - db.session.execute(text(""" - CREATE TABLE task_dependency ( - id SERIAL PRIMARY KEY, - blocked_task_id INTEGER NOT NULL, - blocking_task_id INTEGER NOT NULL, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - FOREIGN KEY (blocked_task_id) REFERENCES task (id), - FOREIGN KEY (blocking_task_id) REFERENCES task (id), - UNIQUE(blocked_task_id, blocking_task_id), - CHECK (blocked_task_id <> blocking_task_id) - ) - """)) - db.session.commit() - - # Check if sub_task table exists - result = db.session.execute(text(""" - SELECT table_name - FROM information_schema.tables - WHERE table_name = 'sub_task' - """)) - - if not result.fetchone(): - print("Creating sub_task table...") - db.session.execute(text(""" - CREATE TABLE sub_task ( - id SERIAL PRIMARY KEY, - name VARCHAR(200) NOT NULL, - description TEXT, - status taskstatus DEFAULT 'NOT_STARTED', - priority taskpriority DEFAULT 'MEDIUM', - estimated_hours FLOAT, - task_id INTEGER NOT NULL, - assigned_to_id INTEGER, - start_date DATE, - due_date DATE, - completed_date DATE, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - created_by_id INTEGER NOT NULL, - FOREIGN KEY (task_id) REFERENCES task (id) ON DELETE CASCADE, - FOREIGN KEY (assigned_to_id) REFERENCES "user" (id), - FOREIGN KEY (created_by_id) REFERENCES "user" (id) - ) - """)) - - # Create index for better performance - db.session.execute(text("CREATE INDEX idx_subtask_task_id ON sub_task(task_id)")) - db.session.commit() - - # Check if avatar_url column exists in user table - result = db.session.execute(text(""" - SELECT column_name - FROM information_schema.columns - WHERE table_name = 'user' AND column_name = 'avatar_url' - """)) - - if not result.fetchone(): - print("Adding avatar_url column to user table...") - db.session.execute(text('ALTER TABLE "user" ADD COLUMN avatar_url VARCHAR(255)')) - db.session.commit() - - # Check if comment table exists - result = db.session.execute(text(""" - SELECT table_name - FROM information_schema.tables - WHERE table_name = 'comment' - """)) - - if not result.fetchone(): - print("Creating comment table...") - - # Create comment visibility enum type if it doesn't exist - db.session.execute(text(""" - DO $$ BEGIN - CREATE TYPE commentvisibility AS ENUM ('TEAM', 'COMPANY'); - EXCEPTION - WHEN duplicate_object THEN null; - END $$; - """)) - - db.session.execute(text(""" - CREATE TABLE comment ( - id SERIAL PRIMARY KEY, - content TEXT NOT NULL, - task_id INTEGER NOT NULL, - parent_comment_id INTEGER, - visibility commentvisibility DEFAULT 'COMPANY', - is_edited BOOLEAN DEFAULT FALSE, - edited_at TIMESTAMP, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - created_by_id INTEGER NOT NULL, - FOREIGN KEY (task_id) REFERENCES task (id) ON DELETE CASCADE, - FOREIGN KEY (parent_comment_id) REFERENCES comment (id), - FOREIGN KEY (created_by_id) REFERENCES "user" (id) - ) - """)) - - # Create indexes for better performance - db.session.execute(text("CREATE INDEX idx_comment_task ON comment(task_id)")) - db.session.execute(text("CREATE INDEX idx_comment_parent ON comment(parent_comment_id)")) - db.session.execute(text("CREATE INDEX idx_comment_created_by ON comment(created_by_id)")) - db.session.execute(text("CREATE INDEX idx_comment_created_at ON comment(created_at DESC)")) - db.session.commit() - - # Check if branding_settings table exists - result = db.session.execute(text(""" - SELECT table_name - FROM information_schema.tables - WHERE table_name = 'branding_settings' - """)) - - if not result.fetchone(): - print("Creating branding_settings table...") - db.session.execute(text(""" - CREATE TABLE branding_settings ( - id SERIAL PRIMARY KEY, - app_name VARCHAR(100) NOT NULL DEFAULT 'Time Tracker', - logo_filename VARCHAR(255), - logo_alt_text VARCHAR(255) DEFAULT 'Logo', - favicon_filename VARCHAR(255), - primary_color VARCHAR(7) DEFAULT '#007bff', - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_by_id INTEGER, - FOREIGN KEY (updated_by_id) REFERENCES "user" (id) - ) - """)) - db.session.commit() - - # Check if company_settings table exists - result = db.session.execute(text(""" - SELECT table_name - FROM information_schema.tables - WHERE table_name = 'company_settings' - """)) - - if not result.fetchone(): - print("Creating company_settings table...") - db.session.execute(text(""" - CREATE TABLE company_settings ( - id SERIAL PRIMARY KEY, - company_id INTEGER NOT NULL, - default_comment_visibility commentvisibility DEFAULT 'COMPANY', - allow_team_visibility_comments BOOLEAN DEFAULT TRUE, - require_task_assignment BOOLEAN DEFAULT FALSE, - allow_task_creation_by_members BOOLEAN DEFAULT TRUE, - restrict_project_access_by_team BOOLEAN DEFAULT FALSE, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - created_by_id INTEGER, - FOREIGN KEY (company_id) REFERENCES company (id), - FOREIGN KEY (created_by_id) REFERENCES "user" (id), - UNIQUE(company_id) - ) - """)) - db.session.commit() - - # Check if note table exists - result = db.session.execute(text(""" - SELECT table_name - FROM information_schema.tables - WHERE table_name = 'note' - """)) - - if result.fetchone(): - # Table exists, check for folder column - result = db.session.execute(text(""" - SELECT column_name - FROM information_schema.columns - WHERE table_name = 'note' AND column_name = 'folder' - """)) - - if not result.fetchone(): - print("Adding folder column to note table...") - db.session.execute(text("ALTER TABLE note ADD COLUMN folder VARCHAR(100)")) - db.session.execute(text("CREATE INDEX IF NOT EXISTS idx_note_folder ON note(folder)")) - db.session.commit() - print("Folder column added successfully!") - else: - print("Creating note and note_link tables...") - - # Create NoteVisibility enum type - db.session.execute(text(""" - DO $$ BEGIN - CREATE TYPE notevisibility AS ENUM ('Private', 'Team', 'Company'); - EXCEPTION - WHEN duplicate_object THEN null; - END $$; - """)) - - db.session.execute(text(""" - CREATE TABLE note ( - id SERIAL PRIMARY KEY, - title VARCHAR(200) NOT NULL, - content TEXT NOT NULL, - slug VARCHAR(100) NOT NULL, - visibility notevisibility NOT NULL DEFAULT 'Private', - folder VARCHAR(100), - company_id INTEGER NOT NULL, - created_by_id INTEGER NOT NULL, - project_id INTEGER, - task_id INTEGER, - tags TEXT[], - is_archived BOOLEAN DEFAULT FALSE, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - FOREIGN KEY (company_id) REFERENCES company (id), - FOREIGN KEY (created_by_id) REFERENCES "user" (id), - FOREIGN KEY (project_id) REFERENCES project (id), - FOREIGN KEY (task_id) REFERENCES task (id) - ) - """)) - - # Create note_link table - db.session.execute(text(""" - CREATE TABLE note_link ( - source_note_id INTEGER NOT NULL, - target_note_id INTEGER NOT NULL, - link_type VARCHAR(50) DEFAULT 'related', - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - PRIMARY KEY (source_note_id, target_note_id), - FOREIGN KEY (source_note_id) REFERENCES note (id) ON DELETE CASCADE, - FOREIGN KEY (target_note_id) REFERENCES note (id) ON DELETE CASCADE - ) - """)) - - # Check if note_folder table exists - result = db.session.execute(text(""" - SELECT table_name - FROM information_schema.tables - WHERE table_name = 'note_folder' - """)) - - if not result.fetchone(): - print("Creating note_folder table...") - db.session.execute(text(""" - CREATE TABLE note_folder ( - id SERIAL PRIMARY KEY, - name VARCHAR(100) NOT NULL, - path VARCHAR(500) NOT NULL, - parent_path VARCHAR(500), - description TEXT, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - created_by_id INTEGER NOT NULL, - company_id INTEGER NOT NULL, - FOREIGN KEY (created_by_id) REFERENCES "user" (id), - FOREIGN KEY (company_id) REFERENCES company (id), - CONSTRAINT uq_folder_path_company UNIQUE (path, company_id) - ) - """)) - - # Create indexes - db.session.execute(text("CREATE INDEX idx_note_company ON note(company_id)")) - db.session.execute(text("CREATE INDEX idx_note_created_by ON note(created_by_id)")) - db.session.execute(text("CREATE INDEX idx_note_project ON note(project_id)")) - db.session.execute(text("CREATE INDEX idx_note_task ON note(task_id)")) - db.session.execute(text("CREATE INDEX idx_note_slug ON note(company_id, slug)")) - db.session.execute(text("CREATE INDEX idx_note_visibility ON note(visibility)")) - db.session.execute(text("CREATE INDEX idx_note_archived ON note(is_archived)")) - db.session.execute(text("CREATE INDEX idx_note_created_at ON note(created_at DESC)")) - db.session.execute(text("CREATE INDEX idx_note_folder ON note(folder)")) - db.session.execute(text("CREATE INDEX idx_note_link_source ON note_link(source_note_id)")) - db.session.execute(text("CREATE INDEX idx_note_link_target ON note_link(target_note_id)")) - - # Create indexes for note_folder if table was created - result = db.session.execute(text(""" - SELECT table_name - FROM information_schema.tables - WHERE table_name = 'note_folder' - """)) - if result.fetchone(): - db.session.execute(text("CREATE INDEX IF NOT EXISTS idx_note_folder_company ON note_folder(company_id)")) - db.session.execute(text("CREATE INDEX IF NOT EXISTS idx_note_folder_parent_path ON note_folder(parent_path)")) - db.session.execute(text("CREATE INDEX IF NOT EXISTS idx_note_folder_created_by ON note_folder(created_by_id)")) - - db.session.commit() - - print("PostgreSQL schema migration completed successfully!") - - except Exception as e: - print(f"Error during PostgreSQL migration: {e}") - if FLASK_AVAILABLE: - db.session.rollback() - raise - - -def migrate_dashboard_system(db_file=None): - """Migrate to add Dashboard widget system.""" - db_path = get_db_path(db_file) - - print(f"Migrating Dashboard system in {db_path}...") - - if not os.path.exists(db_path): - print(f"Database file {db_path} does not exist. Run basic migration first.") - return False - - conn = sqlite3.connect(db_path) - cursor = conn.cursor() - - try: - # Check if user_dashboard table already exists - cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='user_dashboard'") - if cursor.fetchone(): - print("Dashboard tables already exist. Skipping migration.") - return True - - print("Creating Dashboard system tables...") - - # Create user_dashboard table - cursor.execute(""" - CREATE TABLE user_dashboard ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - user_id INTEGER NOT NULL, - name VARCHAR(100) DEFAULT 'My Dashboard', - is_default BOOLEAN DEFAULT 1, - layout_config TEXT, - grid_columns INTEGER DEFAULT 6, - theme VARCHAR(20) DEFAULT 'light', - auto_refresh INTEGER DEFAULT 300, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - FOREIGN KEY (user_id) REFERENCES user (id) - ) - """) - - # Create dashboard_widget table - cursor.execute(""" - CREATE TABLE dashboard_widget ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - dashboard_id INTEGER NOT NULL, - widget_type VARCHAR(50) NOT NULL, - grid_x INTEGER NOT NULL DEFAULT 0, - grid_y INTEGER NOT NULL DEFAULT 0, - grid_width INTEGER NOT NULL DEFAULT 1, - grid_height INTEGER NOT NULL DEFAULT 1, - title VARCHAR(100), - config TEXT, - refresh_interval INTEGER DEFAULT 60, - is_visible BOOLEAN DEFAULT 1, - is_minimized BOOLEAN DEFAULT 0, - z_index INTEGER DEFAULT 1, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - FOREIGN KEY (dashboard_id) REFERENCES user_dashboard (id) - ) - """) - - # Create widget_template table - cursor.execute(""" - CREATE TABLE widget_template ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - widget_type VARCHAR(50) NOT NULL, - name VARCHAR(100) NOT NULL, - description TEXT, - icon VARCHAR(50), - default_width INTEGER DEFAULT 1, - default_height INTEGER DEFAULT 1, - default_config TEXT, - required_role VARCHAR(50) DEFAULT 'Team Member', - is_active BOOLEAN DEFAULT 1, - category VARCHAR(50) DEFAULT 'General', - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP - ) - """) - - # Create indexes for better performance - cursor.execute("CREATE INDEX idx_user_dashboard_user ON user_dashboard(user_id)") - cursor.execute("CREATE INDEX idx_user_dashboard_default ON user_dashboard(user_id, is_default)") - cursor.execute("CREATE INDEX idx_dashboard_widget_dashboard ON dashboard_widget(dashboard_id)") - cursor.execute("CREATE INDEX idx_dashboard_widget_type ON dashboard_widget(widget_type)") - cursor.execute("CREATE INDEX idx_widget_template_type ON widget_template(widget_type)") - cursor.execute("CREATE INDEX idx_widget_template_category ON widget_template(category)") - - # Insert default widget templates - default_templates = [ - # Time Tracking Widgets - ('current_timer', 'Current Timer', 'Shows active time tracking session', '⏲️', 2, 1, '{}', 'Team Member', 'Time'), - ('daily_summary', 'Daily Summary', 'Today\'s time tracking summary', '📊', 2, 1, '{}', 'Team Member', 'Time'), - ('weekly_chart', 'Weekly Chart', 'Weekly time distribution chart', '📈', 3, 2, '{}', 'Team Member', 'Time'), - ('break_reminder', 'Break Reminder', 'Reminds when breaks are due', '☕', 1, 1, '{}', 'Team Member', 'Time'), - - # Project Management Widgets - ('active_projects', 'Active Projects', 'List of current active projects', '📁', 2, 2, '{}', 'Team Member', 'Projects'), - ('project_progress', 'Project Progress', 'Visual progress of projects', '🎯', 2, 1, '{}', 'Team Member', 'Projects'), - ('project_activity', 'Recent Activity', 'Recent project activities', '🔄', 2, 1, '{}', 'Team Member', 'Projects'), - ('project_deadlines', 'Upcoming Deadlines', 'Projects with approaching deadlines', '⚠️', 2, 1, '{}', 'Team Member', 'Projects'), - - # Task Management Widgets - ('assigned_tasks', 'My Tasks', 'Tasks assigned to me', '✅', 2, 2, '{}', 'Team Member', 'Tasks'), - ('task_priority', 'Priority Matrix', 'Tasks organized by priority', '🔥', 2, 2, '{}', 'Team Member', 'Tasks'), - ('task_trends', 'Task Trends', 'Task completion trends', '📉', 2, 1, '{}', 'Team Member', 'Tasks'), - - # Analytics Widgets - ('productivity_metrics', 'Productivity', 'Personal productivity metrics', '⚡', 1, 1, '{}', 'Team Member', 'Analytics'), - ('time_distribution', 'Time Distribution', 'How time is distributed', '🥧', 2, 2, '{}', 'Team Member', 'Analytics'), - ('goal_progress', 'Goals', 'Progress towards goals', '🎯', 1, 1, '{}', 'Team Member', 'Analytics'), - ('performance_comparison', 'Performance', 'Performance comparison over time', '📊', 2, 1, '{}', 'Team Member', 'Analytics'), - - # Team Widgets (Role-based) - ('team_overview', 'Team Overview', 'Overview of team performance', '👥', 3, 2, '{}', 'Team Leader', 'Team'), - ('resource_allocation', 'Resources', 'Team resource allocation', '📊', 2, 2, '{}', 'Administrator', 'Team'), - ('team_performance', 'Team Performance', 'Team performance metrics', '📈', 3, 1, '{}', 'Supervisor', 'Team'), - ('company_metrics', 'Company Metrics', 'Company-wide metrics', '🏢', 3, 2, '{}', 'System Administrator', 'Team'), - - # Quick Action Widgets - ('quick_timer', 'Quick Timer', 'Quick time tracking controls', '▶️', 1, 1, '{}', 'Team Member', 'Actions'), - ('favorite_projects', 'Favorites', 'Quick access to favorite projects', '⭐', 1, 2, '{}', 'Team Member', 'Actions'), - ('recent_actions', 'Recent Actions', 'Recently performed actions', '🕒', 2, 1, '{}', 'Team Member', 'Actions'), - ('shortcuts_panel', 'Shortcuts', 'Quick action shortcuts', '🚀', 1, 1, '{}', 'Team Member', 'Actions'), - ] - - cursor.executemany(""" - INSERT INTO widget_template - (widget_type, name, description, icon, default_width, default_height, default_config, required_role, category) - VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) - """, default_templates) - - conn.commit() - print("Dashboard system migration completed successfully!") - return True - - except Exception as e: - print(f"Error during Dashboard system migration: {e}") - conn.rollback() - raise - finally: - conn.close() - - -def migrate_comment_system(db_file=None): - """Migrate to add Comment system for tasks.""" - db_path = get_db_path(db_file) - - print(f"Migrating Comment system in {db_path}...") - - if not os.path.exists(db_path): - print(f"Database file {db_path} does not exist. Run basic migration first.") - return False - - conn = sqlite3.connect(db_path) - cursor = conn.cursor() - - try: - # Check if comment table already exists - cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='comment'") - if cursor.fetchone(): - print("Comment table already exists. Skipping migration.") - return True - - print("Creating Comment system table...") - - # Create comment table - cursor.execute(""" - CREATE TABLE comment ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - content TEXT NOT NULL, - task_id INTEGER NOT NULL, - parent_comment_id INTEGER, - visibility VARCHAR(20) DEFAULT 'Company', - is_edited BOOLEAN DEFAULT 0, - edited_at TIMESTAMP, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - created_by_id INTEGER NOT NULL, - FOREIGN KEY (task_id) REFERENCES task (id) ON DELETE CASCADE, - FOREIGN KEY (parent_comment_id) REFERENCES comment (id), - FOREIGN KEY (created_by_id) REFERENCES user (id) - ) - """) - - # Create indexes for better performance - cursor.execute("CREATE INDEX idx_comment_task ON comment(task_id)") - cursor.execute("CREATE INDEX idx_comment_parent ON comment(parent_comment_id)") - cursor.execute("CREATE INDEX idx_comment_created_by ON comment(created_by_id)") - cursor.execute("CREATE INDEX idx_comment_created_at ON comment(created_at DESC)") - - conn.commit() - print("Comment system migration completed successfully!") - return True - - except Exception as e: - print(f"Error during Comment system migration: {e}") - conn.rollback() - raise - finally: - conn.close() - - -def migrate_notes_system(db_file=None): - """Migrate to add Notes system with markdown support.""" - db_path = get_db_path(db_file) - - print(f"Migrating Notes system in {db_path}...") - - if not os.path.exists(db_path): - print(f"Database file {db_path} does not exist. Run basic migration first.") - return False - - conn = sqlite3.connect(db_path) - cursor = conn.cursor() - - try: - # Check if note table already exists - cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='note'") - if cursor.fetchone(): - print("Note table already exists. Checking for updates...") - - # Check if folder column exists - cursor.execute("PRAGMA table_info(note)") - columns = [column[1] for column in cursor.fetchall()] - - if 'folder' not in columns: - print("Adding folder column to note table...") - cursor.execute("ALTER TABLE note ADD COLUMN folder VARCHAR(100)") - cursor.execute("CREATE INDEX IF NOT EXISTS idx_note_folder ON note(folder)") - conn.commit() - print("Folder column added successfully!") - - # Check if note_folder table exists - cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='note_folder'") - if not cursor.fetchone(): - print("Creating note_folder table...") - cursor.execute(""" - CREATE TABLE note_folder ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - name VARCHAR(100) NOT NULL, - path VARCHAR(500) NOT NULL, - parent_path VARCHAR(500), - description TEXT, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - created_by_id INTEGER NOT NULL, - company_id INTEGER NOT NULL, - FOREIGN KEY (created_by_id) REFERENCES user(id), - FOREIGN KEY (company_id) REFERENCES company(id), - UNIQUE(path, company_id) - ) - """) - - # Create indexes for note_folder - cursor.execute("CREATE INDEX idx_note_folder_company ON note_folder(company_id)") - cursor.execute("CREATE INDEX idx_note_folder_parent_path ON note_folder(parent_path)") - cursor.execute("CREATE INDEX idx_note_folder_created_by ON note_folder(created_by_id)") - conn.commit() - print("Note folder table created successfully!") - - return True - - print("Creating Notes system tables...") - - # Create note table - cursor.execute(""" - CREATE TABLE note ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - title VARCHAR(200) NOT NULL, - content TEXT NOT NULL, - slug VARCHAR(100) NOT NULL, - visibility VARCHAR(20) NOT NULL DEFAULT 'Private', - folder VARCHAR(100), - company_id INTEGER NOT NULL, - created_by_id INTEGER NOT NULL, - project_id INTEGER, - task_id INTEGER, - tags TEXT, - archived BOOLEAN DEFAULT 0, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - FOREIGN KEY (company_id) REFERENCES company (id), - FOREIGN KEY (created_by_id) REFERENCES user (id), - FOREIGN KEY (project_id) REFERENCES project (id), - FOREIGN KEY (task_id) REFERENCES task (id) - ) - """) - - # Create note_link table for linking notes - cursor.execute(""" - CREATE TABLE note_link ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - source_note_id INTEGER NOT NULL, - target_note_id INTEGER NOT NULL, - link_type VARCHAR(50) DEFAULT 'related', - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - created_by_id INTEGER NOT NULL, - FOREIGN KEY (source_note_id) REFERENCES note (id) ON DELETE CASCADE, - FOREIGN KEY (target_note_id) REFERENCES note (id) ON DELETE CASCADE, - FOREIGN KEY (created_by_id) REFERENCES user (id), - UNIQUE(source_note_id, target_note_id) - ) - """) - - # Create indexes for better performance - cursor.execute("CREATE INDEX idx_note_company ON note(company_id)") - cursor.execute("CREATE INDEX idx_note_created_by ON note(created_by_id)") - cursor.execute("CREATE INDEX idx_note_project ON note(project_id)") - cursor.execute("CREATE INDEX idx_note_task ON note(task_id)") - cursor.execute("CREATE INDEX idx_note_slug ON note(company_id, slug)") - cursor.execute("CREATE INDEX idx_note_visibility ON note(visibility)") - cursor.execute("CREATE INDEX idx_note_archived ON note(archived)") - cursor.execute("CREATE INDEX idx_note_created_at ON note(created_at DESC)") - - # Create indexes for note links - cursor.execute("CREATE INDEX idx_note_link_source ON note_link(source_note_id)") - cursor.execute("CREATE INDEX idx_note_link_target ON note_link(target_note_id)") - - # Create note_folder table - cursor.execute(""" - CREATE TABLE note_folder ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - name VARCHAR(100) NOT NULL, - path VARCHAR(500) NOT NULL, - parent_path VARCHAR(500), - description TEXT, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - created_by_id INTEGER NOT NULL, - company_id INTEGER NOT NULL, - FOREIGN KEY (created_by_id) REFERENCES user(id), - FOREIGN KEY (company_id) REFERENCES company(id), - UNIQUE(path, company_id) - ) - """) - - # Create indexes for note_folder - cursor.execute("CREATE INDEX idx_note_folder_company ON note_folder(company_id)") - cursor.execute("CREATE INDEX idx_note_folder_parent_path ON note_folder(parent_path)") - cursor.execute("CREATE INDEX idx_note_folder_created_by ON note_folder(created_by_id)") - - conn.commit() - print("Notes system migration completed successfully!") - return True - - except Exception as e: - print(f"Error during Notes system migration: {e}") - conn.rollback() - return False - - finally: - conn.close() - - -def update_note_link_cascade(db_path): - """Update note_link table to ensure CASCADE delete is enabled.""" - print("Checking note_link cascade delete constraints...") - - try: - conn = sqlite3.connect(db_path) - cursor = conn.cursor() - - # Check if note_link table exists - cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='note_link'") - if not cursor.fetchone(): - print("note_link table does not exist, skipping cascade update") - return - - # Check current foreign key constraints - cursor.execute("PRAGMA foreign_key_list(note_link)") - fk_info = cursor.fetchall() - - # Check if CASCADE is already set - has_cascade = any('CASCADE' in str(fk) for fk in fk_info) - - if not has_cascade: - print("Updating note_link table with CASCADE delete...") - - # SQLite doesn't support ALTER TABLE for foreign keys, so recreate the table - cursor.execute(""" - CREATE TABLE note_link_temp ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - source_note_id INTEGER NOT NULL, - target_note_id INTEGER NOT NULL, - link_type VARCHAR(50) DEFAULT 'related', - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - created_by_id INTEGER NOT NULL, - FOREIGN KEY (source_note_id) REFERENCES note(id) ON DELETE CASCADE, - FOREIGN KEY (target_note_id) REFERENCES note(id) ON DELETE CASCADE, - FOREIGN KEY (created_by_id) REFERENCES user(id), - UNIQUE(source_note_id, target_note_id) - ) - """) - - # Copy data - cursor.execute("INSERT INTO note_link_temp SELECT * FROM note_link") - - # Drop old table and rename new one - cursor.execute("DROP TABLE note_link") - cursor.execute("ALTER TABLE note_link_temp RENAME TO note_link") - - # Recreate indexes - cursor.execute("CREATE INDEX idx_note_link_source ON note_link(source_note_id)") - cursor.execute("CREATE INDEX idx_note_link_target ON note_link(target_note_id)") - - print("note_link table updated with CASCADE delete") - else: - print("note_link table already has CASCADE delete") - - conn.commit() - - except Exception as e: - print(f"Error updating note_link cascade: {e}") - if conn: - conn.rollback() - finally: - if conn: - conn.close() - - -def main(): - """Main function with command line interface.""" - parser = argparse.ArgumentParser(description='TimeTrack Database Migration Tool') - parser.add_argument('--db-file', '-d', help='Path to SQLite database file') - parser.add_argument('--create-new', '-c', action='store_true', - help='Create a new database (will overwrite existing)') - parser.add_argument('--migrate-all', '-m', action='store_true', - help='Run all migrations (default action)') - parser.add_argument('--task-system', '-t', action='store_true', - help='Run only task system migration') - parser.add_argument('--company-model', '-p', action='store_true', - help='Run only company model migration') - parser.add_argument('--basic', '-b', action='store_true', - help='Run only basic table migrations') - parser.add_argument('--system-events', '-s', action='store_true', - help='Run only system events migration') - parser.add_argument('--dashboard', '--dash', action='store_true', - help='Run only dashboard system migration') - parser.add_argument('--postgresql', '--pg', action='store_true', - help='Run only PostgreSQL schema migration') - - args = parser.parse_args() - - db_path = get_db_path(args.db_file) - - print(f"TimeTrack Database Migration Tool") - print(f"Database: {db_path}") - print(f"Flask available: {FLASK_AVAILABLE}") - print("-" * 50) - - try: - if args.create_new: - if os.path.exists(db_path): - response = input(f"Database {db_path} exists. Overwrite? (y/N): ") - if response.lower() != 'y': - print("Operation cancelled") - return - os.remove(db_path) - create_new_database(db_path) - - elif args.task_system: - migrate_task_system(db_path) - - elif args.company_model: - migrate_to_company_model(db_path) - - elif args.basic: - run_basic_migrations(db_path) - - elif args.system_events: - migrate_system_events(db_path) - - - elif args.dashboard: - migrate_dashboard_system(db_path) - - elif args.postgresql: - migrate_postgresql_schema() - - else: - # Default: run all migrations - run_all_migrations(db_path) - - print("\nMigration completed successfully!") - - except Exception as e: - print(f"\nError during migration: {e}") - sys.exit(1) - - -if __name__ == "__main__": - main() \ No newline at end of file diff --git a/migrations_old/old_migrations/02_migrate_sqlite_to_postgres.py b/migrations_old/old_migrations/02_migrate_sqlite_to_postgres.py deleted file mode 100644 index 2f2a2eb..0000000 --- a/migrations_old/old_migrations/02_migrate_sqlite_to_postgres.py +++ /dev/null @@ -1,408 +0,0 @@ -#!/usr/bin/env python3 -""" -SQLite to PostgreSQL Migration Script for TimeTrack -This script migrates data from SQLite to PostgreSQL database. -""" - -import sqlite3 -import psycopg2 -import os -import sys -import logging -from datetime import datetime -from psycopg2.extras import RealDictCursor -import json - -# Add parent directory to path to import app -sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) - -# Configure logging -logging.basicConfig( - level=logging.INFO, - format='%(asctime)s - %(levelname)s - %(message)s', - handlers=[ - logging.FileHandler('migration.log'), - logging.StreamHandler() - ] -) -logger = logging.getLogger(__name__) - -class SQLiteToPostgresMigration: - def __init__(self, sqlite_path, postgres_url): - self.sqlite_path = sqlite_path - self.postgres_url = postgres_url - self.sqlite_conn = None - self.postgres_conn = None - self.migration_stats = {} - - def connect_databases(self): - """Connect to both SQLite and PostgreSQL databases""" - try: - # Connect to SQLite - self.sqlite_conn = sqlite3.connect(self.sqlite_path) - self.sqlite_conn.row_factory = sqlite3.Row - logger.info(f"Connected to SQLite database: {self.sqlite_path}") - - # Connect to PostgreSQL - self.postgres_conn = psycopg2.connect(self.postgres_url) - self.postgres_conn.autocommit = False - logger.info("Connected to PostgreSQL database") - - return True - except Exception as e: - logger.error(f"Failed to connect to databases: {e}") - return False - - def close_connections(self): - """Close database connections""" - if self.sqlite_conn: - self.sqlite_conn.close() - if self.postgres_conn: - self.postgres_conn.close() - - def backup_postgres(self): - """Create a backup of existing PostgreSQL data""" - try: - with self.postgres_conn.cursor() as cursor: - # Check if tables exist and have data - cursor.execute(""" - SELECT table_name FROM information_schema.tables - WHERE table_schema = 'public' - """) - tables = cursor.fetchall() - - if tables: - backup_file = f"postgres_backup_{datetime.now().strftime('%Y%m%d_%H%M%S')}.sql" - logger.info(f"Creating PostgreSQL backup: {backup_file}") - - # Use pg_dump for backup - os.system(f"pg_dump '{self.postgres_url}' > {backup_file}") - logger.info(f"Backup created: {backup_file}") - return backup_file - else: - logger.info("No existing PostgreSQL tables found, skipping backup") - return None - except Exception as e: - logger.error(f"Failed to create backup: {e}") - return None - - def check_sqlite_database(self): - """Check if SQLite database exists and has data""" - if not os.path.exists(self.sqlite_path): - logger.error(f"SQLite database not found: {self.sqlite_path}") - return False - - try: - cursor = self.sqlite_conn.cursor() - cursor.execute("SELECT name FROM sqlite_master WHERE type='table'") - tables = cursor.fetchall() - - if not tables: - logger.info("SQLite database is empty, nothing to migrate") - return False - - logger.info(f"Found {len(tables)} tables in SQLite database") - return True - except Exception as e: - logger.error(f"Error checking SQLite database: {e}") - return False - - def create_postgres_tables(self, clear_existing=False): - """Create PostgreSQL tables using Flask-SQLAlchemy models""" - try: - # Import Flask app and create tables - from app import app, db - - with app.app_context(): - # Set the database URI to PostgreSQL - app.config['SQLALCHEMY_DATABASE_URI'] = self.postgres_url - - if clear_existing: - logger.info("Clearing existing PostgreSQL data...") - db.drop_all() - logger.info("Dropped all existing tables") - - # Create all tables - db.create_all() - logger.info("Created PostgreSQL tables") - return True - except Exception as e: - logger.error(f"Failed to create PostgreSQL tables: {e}") - return False - - def migrate_table_data(self, table_name, column_mapping=None): - """Migrate data from SQLite table to PostgreSQL""" - try: - sqlite_cursor = self.sqlite_conn.cursor() - postgres_cursor = self.postgres_conn.cursor() - - # Check if table exists in SQLite - sqlite_cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name=?", (table_name,)) - if not sqlite_cursor.fetchone(): - logger.info(f"Table {table_name} does not exist in SQLite, skipping...") - self.migration_stats[table_name] = 0 - return True - - # Get data from SQLite - sqlite_cursor.execute(f"SELECT * FROM {table_name}") - rows = sqlite_cursor.fetchall() - - if not rows: - logger.info(f"No data found in table: {table_name}") - self.migration_stats[table_name] = 0 - return True - - # Get column names - column_names = [description[0] for description in sqlite_cursor.description] - - # Apply column mapping if provided - if column_mapping: - column_names = [column_mapping.get(col, col) for col in column_names] - - # Prepare insert statement - placeholders = ', '.join(['%s'] * len(column_names)) - columns = ', '.join([f'"{col}"' for col in column_names]) # Quote column names - insert_sql = f'INSERT INTO "{table_name}" ({columns}) VALUES ({placeholders})' # Quote table name - - # Convert rows to list of tuples - data_rows = [] - for row in rows: - data_row = [] - for i, value in enumerate(row): - col_name = column_names[i] - # Handle special data type conversions - if value is None: - data_row.append(None) - elif isinstance(value, str) and value.startswith('{"') and value.endswith('}'): - # Handle JSON strings - data_row.append(value) - elif (col_name.startswith('is_') or col_name.endswith('_enabled') or col_name in ['is_paused']) and isinstance(value, int): - # Convert integer boolean to actual boolean for PostgreSQL - data_row.append(bool(value)) - elif isinstance(value, str) and value == '': - # Convert empty strings to None for PostgreSQL - data_row.append(None) - else: - data_row.append(value) - data_rows.append(tuple(data_row)) - - # Check if we should clear existing data first (for tables with unique constraints) - if table_name in ['company', 'team', 'user']: - postgres_cursor.execute(f'SELECT COUNT(*) FROM "{table_name}"') - existing_count = postgres_cursor.fetchone()[0] - if existing_count > 0: - logger.warning(f"Table {table_name} already has {existing_count} rows. Skipping to avoid duplicates.") - self.migration_stats[table_name] = 0 - return True - - # Insert data in batches - batch_size = 1000 - for i in range(0, len(data_rows), batch_size): - batch = data_rows[i:i + batch_size] - try: - postgres_cursor.executemany(insert_sql, batch) - self.postgres_conn.commit() - except Exception as batch_error: - logger.error(f"Error inserting batch {i//batch_size + 1} for table {table_name}: {batch_error}") - # Try inserting rows one by one to identify problematic rows - self.postgres_conn.rollback() - for j, row in enumerate(batch): - try: - postgres_cursor.execute(insert_sql, row) - self.postgres_conn.commit() - except Exception as row_error: - logger.error(f"Error inserting row {i + j} in table {table_name}: {row_error}") - logger.error(f"Problematic row data: {row}") - self.postgres_conn.rollback() - - logger.info(f"Migrated {len(rows)} rows from table: {table_name}") - self.migration_stats[table_name] = len(rows) - return True - - except Exception as e: - logger.error(f"Failed to migrate table {table_name}: {e}") - self.postgres_conn.rollback() - return False - - def update_sequences(self): - """Update PostgreSQL sequences after data migration""" - try: - with self.postgres_conn.cursor() as cursor: - # Get all sequences - fix the query to properly extract sequence names - cursor.execute(""" - SELECT - pg_get_serial_sequence(table_name, column_name) as sequence_name, - column_name, - table_name - FROM information_schema.columns - WHERE column_default LIKE 'nextval%' - AND table_schema = 'public' - """) - sequences = cursor.fetchall() - - for seq_name, col_name, table_name in sequences: - if seq_name is None: - continue - # Get the maximum value for each sequence - cursor.execute(f'SELECT MAX("{col_name}") FROM "{table_name}"') - max_val = cursor.fetchone()[0] - - if max_val is not None: - # Update sequence to start from max_val + 1 - don't quote sequence name from pg_get_serial_sequence - cursor.execute(f'ALTER SEQUENCE {seq_name} RESTART WITH {max_val + 1}') - logger.info(f"Updated sequence {seq_name} to start from {max_val + 1}") - - self.postgres_conn.commit() - logger.info("Updated PostgreSQL sequences") - return True - except Exception as e: - logger.error(f"Failed to update sequences: {e}") - self.postgres_conn.rollback() - return False - - def migrate_all_data(self): - """Migrate all data from SQLite to PostgreSQL""" - # Define table migration order (respecting foreign key constraints) - migration_order = [ - 'company', - 'team', - 'project_category', - 'user', - 'project', - 'task', - 'sub_task', - 'time_entry', - 'work_config', - 'company_work_config', - 'user_preferences', - 'system_settings' - ] - - for table_name in migration_order: - if not self.migrate_table_data(table_name): - logger.error(f"Migration failed at table: {table_name}") - return False - - # Update sequences after all data is migrated - if not self.update_sequences(): - logger.error("Failed to update sequences") - return False - - return True - - def verify_migration(self): - """Verify that migration was successful""" - try: - sqlite_cursor = self.sqlite_conn.cursor() - postgres_cursor = self.postgres_conn.cursor() - - # Get table names from SQLite - sqlite_cursor.execute("SELECT name FROM sqlite_master WHERE type='table'") - sqlite_tables = [row[0] for row in sqlite_cursor.fetchall()] - - verification_results = {} - - for table_name in sqlite_tables: - if table_name == 'sqlite_sequence': - continue - - # Count rows in SQLite - sqlite_cursor.execute(f"SELECT COUNT(*) FROM {table_name}") - sqlite_count = sqlite_cursor.fetchone()[0] - - # Count rows in PostgreSQL - postgres_cursor.execute(f'SELECT COUNT(*) FROM "{table_name}"') - postgres_count = postgres_cursor.fetchone()[0] - - verification_results[table_name] = { - 'sqlite_count': sqlite_count, - 'postgres_count': postgres_count, - 'match': sqlite_count == postgres_count - } - - if sqlite_count == postgres_count: - logger.info(f"✓ Table {table_name}: {sqlite_count} rows migrated successfully") - else: - logger.error(f"✗ Table {table_name}: SQLite={sqlite_count}, PostgreSQL={postgres_count}") - - return verification_results - - except Exception as e: - logger.error(f"Verification failed: {e}") - return None - - def run_migration(self, clear_existing=False): - """Run the complete migration process""" - logger.info("Starting SQLite to PostgreSQL migration...") - - # Connect to databases - if not self.connect_databases(): - return False - - try: - # Check SQLite database - if not self.check_sqlite_database(): - return False - - # Create backup - backup_file = self.backup_postgres() - - # Create PostgreSQL tables - if not self.create_postgres_tables(clear_existing=clear_existing): - return False - - # Migrate data - if not self.migrate_all_data(): - return False - - # Verify migration - verification = self.verify_migration() - if verification: - logger.info("Migration verification completed") - for table, stats in verification.items(): - if not stats['match']: - logger.error(f"Migration verification failed for table: {table}") - return False - - logger.info("Migration completed successfully!") - logger.info(f"Migration statistics: {self.migration_stats}") - return True - - except Exception as e: - logger.error(f"Migration failed: {e}") - return False - finally: - self.close_connections() - -def main(): - """Main migration function""" - import argparse - - parser = argparse.ArgumentParser(description='Migrate SQLite to PostgreSQL') - parser.add_argument('--clear-existing', action='store_true', - help='Clear existing PostgreSQL data before migration') - parser.add_argument('--sqlite-path', default=os.environ.get('SQLITE_PATH', '/data/timetrack.db'), - help='Path to SQLite database') - args = parser.parse_args() - - # Get database paths from environment variables - sqlite_path = args.sqlite_path - postgres_url = os.environ.get('DATABASE_URL') - - if not postgres_url: - logger.error("DATABASE_URL environment variable not set") - return 1 - - # Check if SQLite database exists - if not os.path.exists(sqlite_path): - logger.info(f"SQLite database not found at {sqlite_path}, skipping migration") - return 0 - - # Run migration - migration = SQLiteToPostgresMigration(sqlite_path, postgres_url) - success = migration.run_migration(clear_existing=args.clear_existing) - - return 0 if success else 1 - -if __name__ == "__main__": - sys.exit(main()) \ No newline at end of file diff --git a/migrations_old/old_migrations/02_migrate_sqlite_to_postgres_fixed.py b/migrations_old/old_migrations/02_migrate_sqlite_to_postgres_fixed.py deleted file mode 100644 index f6e8004..0000000 --- a/migrations_old/old_migrations/02_migrate_sqlite_to_postgres_fixed.py +++ /dev/null @@ -1,361 +0,0 @@ -#!/usr/bin/env python3 -""" -Fixed SQLite to PostgreSQL Migration Script for TimeTrack -This script properly handles empty SQLite databases and column mapping issues. -""" - -import sqlite3 -import psycopg2 -import os -import sys -import logging -from datetime import datetime -from psycopg2.extras import RealDictCursor -import json - -# Add parent directory to path to import app -sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) - -# Configure logging -logging.basicConfig( - level=logging.INFO, - format='%(asctime)s - %(levelname)s - %(message)s', - handlers=[ - logging.FileHandler('migration.log'), - logging.StreamHandler() - ] -) -logger = logging.getLogger(__name__) - -class SQLiteToPostgresMigration: - def __init__(self, sqlite_path, postgres_url): - self.sqlite_path = sqlite_path - self.postgres_url = postgres_url - self.sqlite_conn = None - self.postgres_conn = None - self.migration_stats = {} - - # Column mapping for SQLite to PostgreSQL - self.column_mapping = { - 'project': { - # Map SQLite columns to PostgreSQL columns - # Ensure company_id is properly mapped - 'company_id': 'company_id', - 'user_id': 'company_id' # Map user_id to company_id if needed - } - } - - def connect_databases(self): - """Connect to both SQLite and PostgreSQL databases""" - try: - # Connect to SQLite - self.sqlite_conn = sqlite3.connect(self.sqlite_path) - self.sqlite_conn.row_factory = sqlite3.Row - logger.info(f"Connected to SQLite database: {self.sqlite_path}") - - # Connect to PostgreSQL - self.postgres_conn = psycopg2.connect(self.postgres_url) - self.postgres_conn.autocommit = False - logger.info("Connected to PostgreSQL database") - - return True - except Exception as e: - logger.error(f"Failed to connect to databases: {e}") - return False - - def close_connections(self): - """Close database connections""" - if self.sqlite_conn: - self.sqlite_conn.close() - if self.postgres_conn: - self.postgres_conn.close() - - def check_sqlite_database(self): - """Check if SQLite database exists and has data""" - if not os.path.exists(self.sqlite_path): - logger.error(f"SQLite database not found: {self.sqlite_path}") - return False - - try: - cursor = self.sqlite_conn.cursor() - cursor.execute("SELECT name FROM sqlite_master WHERE type='table'") - tables = cursor.fetchall() - - if not tables: - logger.info("SQLite database is empty, nothing to migrate") - return False - - logger.info(f"Found {len(tables)} tables in SQLite database") - for table in tables: - logger.info(f" - {table[0]}") - return True - except Exception as e: - logger.error(f"Error checking SQLite database: {e}") - return False - - def clear_postgres_data(self): - """Clear existing data from PostgreSQL tables that will be migrated""" - try: - with self.postgres_conn.cursor() as cursor: - # Tables to clear in reverse order of dependencies - tables_to_clear = [ - 'time_entry', - 'sub_task', - 'task', - 'project', - 'user', - 'team', - 'company', - 'work_config', - 'system_settings' - ] - - for table in tables_to_clear: - try: - cursor.execute(f'DELETE FROM "{table}"') - logger.info(f"Cleared table: {table}") - except Exception as e: - logger.warning(f"Could not clear table {table}: {e}") - self.postgres_conn.rollback() - - self.postgres_conn.commit() - return True - except Exception as e: - logger.error(f"Failed to clear PostgreSQL data: {e}") - self.postgres_conn.rollback() - return False - - def migrate_table_data(self, table_name): - """Migrate data from SQLite table to PostgreSQL""" - try: - sqlite_cursor = self.sqlite_conn.cursor() - postgres_cursor = self.postgres_conn.cursor() - - # Check if table exists in SQLite - sqlite_cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name=?", (table_name,)) - if not sqlite_cursor.fetchone(): - logger.info(f"Table {table_name} does not exist in SQLite, skipping...") - self.migration_stats[table_name] = 0 - return True - - # Get data from SQLite - sqlite_cursor.execute(f"SELECT * FROM {table_name}") - rows = sqlite_cursor.fetchall() - - if not rows: - logger.info(f"No data found in table: {table_name}") - self.migration_stats[table_name] = 0 - return True - - # Get column names from SQLite - column_names = [description[0] for description in sqlite_cursor.description] - logger.info(f"SQLite columns for {table_name}: {column_names}") - - # Get PostgreSQL column names - postgres_cursor.execute(f""" - SELECT column_name - FROM information_schema.columns - WHERE table_name = %s - ORDER BY ordinal_position - """, (table_name,)) - pg_columns = [row[0] for row in postgres_cursor.fetchall()] - logger.info(f"PostgreSQL columns for {table_name}: {pg_columns}") - - # For project table, ensure company_id is properly handled - if table_name == 'project': - # Check if company_id exists in the data - for i, row in enumerate(rows): - row_dict = dict(zip(column_names, row)) - if 'company_id' not in row_dict or row_dict['company_id'] is None: - # If user_id exists, use it as company_id - if 'user_id' in row_dict and row_dict['user_id'] is not None: - logger.info(f"Mapping user_id {row_dict['user_id']} to company_id for project {row_dict.get('id')}") - # Update the row data - row_list = list(row) - if 'company_id' in column_names: - company_id_idx = column_names.index('company_id') - user_id_idx = column_names.index('user_id') - row_list[company_id_idx] = row_list[user_id_idx] - else: - # Add company_id column - column_names.append('company_id') - user_id_idx = column_names.index('user_id') - row_list.append(row[user_id_idx]) - rows[i] = tuple(row_list) - - # Filter columns to only those that exist in PostgreSQL - valid_columns = [col for col in column_names if col in pg_columns] - column_indices = [column_names.index(col) for col in valid_columns] - - # Prepare insert statement - placeholders = ', '.join(['%s'] * len(valid_columns)) - columns = ', '.join([f'"{col}"' for col in valid_columns]) - insert_sql = f'INSERT INTO "{table_name}" ({columns}) VALUES ({placeholders})' - - # Convert rows to list of tuples with only valid columns - data_rows = [] - for row in rows: - data_row = [] - for i in column_indices: - value = row[i] - col_name = valid_columns[column_indices.index(i)] - # Handle special data type conversions - if value is None: - data_row.append(None) - elif isinstance(value, str) and value.startswith('{"') and value.endswith('}'): - # Handle JSON strings - data_row.append(value) - elif (col_name.startswith('is_') or col_name.endswith('_enabled') or col_name in ['is_paused']) and isinstance(value, int): - # Convert integer boolean to actual boolean for PostgreSQL - data_row.append(bool(value)) - elif isinstance(value, str) and value == '': - # Convert empty strings to None for PostgreSQL - data_row.append(None) - else: - data_row.append(value) - data_rows.append(tuple(data_row)) - - # Insert data one by one to better handle errors - successful_inserts = 0 - for i, row in enumerate(data_rows): - try: - postgres_cursor.execute(insert_sql, row) - self.postgres_conn.commit() - successful_inserts += 1 - except Exception as row_error: - logger.error(f"Error inserting row {i} in table {table_name}: {row_error}") - logger.error(f"Problematic row data: {row}") - logger.error(f"Columns: {valid_columns}") - self.postgres_conn.rollback() - - logger.info(f"Migrated {successful_inserts}/{len(rows)} rows from table: {table_name}") - self.migration_stats[table_name] = successful_inserts - return True - - except Exception as e: - logger.error(f"Failed to migrate table {table_name}: {e}") - self.postgres_conn.rollback() - return False - - def update_sequences(self): - """Update PostgreSQL sequences after data migration""" - try: - with self.postgres_conn.cursor() as cursor: - # Get all sequences - cursor.execute(""" - SELECT - pg_get_serial_sequence(table_name, column_name) as sequence_name, - column_name, - table_name - FROM information_schema.columns - WHERE column_default LIKE 'nextval%' - AND table_schema = 'public' - """) - sequences = cursor.fetchall() - - for seq_name, col_name, table_name in sequences: - if seq_name is None: - continue - # Get the maximum value for each sequence - cursor.execute(f'SELECT MAX("{col_name}") FROM "{table_name}"') - max_val = cursor.fetchone()[0] - - if max_val is not None: - # Update sequence to start from max_val + 1 - cursor.execute(f'ALTER SEQUENCE {seq_name} RESTART WITH {max_val + 1}') - logger.info(f"Updated sequence {seq_name} to start from {max_val + 1}") - - self.postgres_conn.commit() - logger.info("Updated PostgreSQL sequences") - return True - except Exception as e: - logger.error(f"Failed to update sequences: {e}") - self.postgres_conn.rollback() - return False - - def run_migration(self, clear_existing=False): - """Run the complete migration process""" - logger.info("Starting SQLite to PostgreSQL migration...") - - # Connect to databases - if not self.connect_databases(): - return False - - try: - # Check SQLite database - if not self.check_sqlite_database(): - logger.info("No data to migrate from SQLite") - return True - - # Clear existing PostgreSQL data if requested - if clear_existing: - if not self.clear_postgres_data(): - logger.warning("Failed to clear some PostgreSQL data, continuing anyway...") - - # Define table migration order (respecting foreign key constraints) - migration_order = [ - 'company', - 'team', - 'project_category', - 'user', - 'project', - 'task', - 'sub_task', - 'time_entry', - 'work_config', - 'company_work_config', - 'user_preferences', - 'system_settings' - ] - - # Migrate data - for table_name in migration_order: - if not self.migrate_table_data(table_name): - logger.error(f"Migration failed at table: {table_name}") - - # Update sequences after all data is migrated - if not self.update_sequences(): - logger.error("Failed to update sequences") - - logger.info("Migration completed!") - logger.info(f"Migration statistics: {self.migration_stats}") - return True - - except Exception as e: - logger.error(f"Migration failed: {e}") - return False - finally: - self.close_connections() - -def main(): - """Main migration function""" - import argparse - - parser = argparse.ArgumentParser(description='Migrate SQLite to PostgreSQL') - parser.add_argument('--clear-existing', action='store_true', - help='Clear existing PostgreSQL data before migration') - parser.add_argument('--sqlite-path', default=os.environ.get('SQLITE_PATH', '/data/timetrack.db'), - help='Path to SQLite database') - args = parser.parse_args() - - # Get database paths from environment variables - sqlite_path = args.sqlite_path - postgres_url = os.environ.get('DATABASE_URL') - - if not postgres_url: - logger.error("DATABASE_URL environment variable not set") - return 1 - - # Check if SQLite database exists - if not os.path.exists(sqlite_path): - logger.info(f"SQLite database not found at {sqlite_path}, skipping migration") - return 0 - - # Run migration - migration = SQLiteToPostgresMigration(sqlite_path, postgres_url) - success = migration.run_migration(clear_existing=args.clear_existing) - - return 0 if success else 1 - -if __name__ == "__main__": - sys.exit(main()) \ No newline at end of file diff --git a/migrations_old/old_migrations/03_add_dashboard_columns.py b/migrations_old/old_migrations/03_add_dashboard_columns.py deleted file mode 100644 index 2be4d3c..0000000 --- a/migrations_old/old_migrations/03_add_dashboard_columns.py +++ /dev/null @@ -1,104 +0,0 @@ -#!/usr/bin/env python3 -""" -Add missing columns to user_dashboard table -""" - -import os -import psycopg2 -from psycopg2 import sql -from urllib.parse import urlparse - -# Get database URL from environment -DATABASE_URL = os.environ.get('DATABASE_URL', 'postgresql://timetrack:timetrack123@localhost:5432/timetrack') - -def add_missing_columns(): - """Add missing columns to user_dashboard table""" - # Parse database URL - parsed = urlparse(DATABASE_URL) - - # Connect to database - conn = psycopg2.connect( - host=parsed.hostname, - port=parsed.port or 5432, - user=parsed.username, - password=parsed.password, - database=parsed.path[1:] # Remove leading slash - ) - - try: - with conn.cursor() as cur: - # Check if columns exist - cur.execute(""" - SELECT column_name - FROM information_schema.columns - WHERE table_name = 'user_dashboard' - AND column_name IN ('layout', 'is_locked', 'created_at', 'updated_at', - 'name', 'is_default', 'layout_config', 'grid_columns', - 'theme', 'auto_refresh') - """) - existing_columns = [row[0] for row in cur.fetchall()] - - # Add missing columns - if 'name' not in existing_columns: - print("Adding 'name' column to user_dashboard table...") - cur.execute("ALTER TABLE user_dashboard ADD COLUMN name VARCHAR(100) DEFAULT 'My Dashboard'") - print("Added 'name' column") - - if 'is_default' not in existing_columns: - print("Adding 'is_default' column to user_dashboard table...") - cur.execute("ALTER TABLE user_dashboard ADD COLUMN is_default BOOLEAN DEFAULT TRUE") - print("Added 'is_default' column") - - if 'layout_config' not in existing_columns: - print("Adding 'layout_config' column to user_dashboard table...") - cur.execute("ALTER TABLE user_dashboard ADD COLUMN layout_config TEXT") - print("Added 'layout_config' column") - - if 'grid_columns' not in existing_columns: - print("Adding 'grid_columns' column to user_dashboard table...") - cur.execute("ALTER TABLE user_dashboard ADD COLUMN grid_columns INTEGER DEFAULT 6") - print("Added 'grid_columns' column") - - if 'theme' not in existing_columns: - print("Adding 'theme' column to user_dashboard table...") - cur.execute("ALTER TABLE user_dashboard ADD COLUMN theme VARCHAR(20) DEFAULT 'light'") - print("Added 'theme' column") - - if 'auto_refresh' not in existing_columns: - print("Adding 'auto_refresh' column to user_dashboard table...") - cur.execute("ALTER TABLE user_dashboard ADD COLUMN auto_refresh INTEGER DEFAULT 300") - print("Added 'auto_refresh' column") - - if 'layout' not in existing_columns: - print("Adding 'layout' column to user_dashboard table...") - cur.execute("ALTER TABLE user_dashboard ADD COLUMN layout JSON") - print("Added 'layout' column") - - if 'is_locked' not in existing_columns: - print("Adding 'is_locked' column to user_dashboard table...") - cur.execute("ALTER TABLE user_dashboard ADD COLUMN is_locked BOOLEAN DEFAULT FALSE") - print("Added 'is_locked' column") - - if 'created_at' not in existing_columns: - print("Adding 'created_at' column to user_dashboard table...") - cur.execute("ALTER TABLE user_dashboard ADD COLUMN created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP") - print("Added 'created_at' column") - - if 'updated_at' not in existing_columns: - print("Adding 'updated_at' column to user_dashboard table...") - cur.execute("ALTER TABLE user_dashboard ADD COLUMN updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP") - print("Added 'updated_at' column") - - # Commit changes - conn.commit() - print("Dashboard columns migration completed successfully!") - - except Exception as e: - print(f"Error during migration: {e}") - conn.rollback() - raise - finally: - conn.close() - -if __name__ == "__main__": - add_missing_columns() \ No newline at end of file diff --git a/migrations_old/old_migrations/04_add_user_preferences_columns.py b/migrations_old/old_migrations/04_add_user_preferences_columns.py deleted file mode 100755 index 6ad579b..0000000 --- a/migrations_old/old_migrations/04_add_user_preferences_columns.py +++ /dev/null @@ -1,159 +0,0 @@ -#!/usr/bin/env python3 -""" -Add missing columns to user_preferences table -""" - -import os -import psycopg2 -from psycopg2 import sql -from urllib.parse import urlparse - -# Get database URL from environment -DATABASE_URL = os.environ.get('DATABASE_URL', 'postgresql://timetrack:timetrack123@localhost:5432/timetrack') - -def add_missing_columns(): - """Add missing columns to user_preferences table""" - # Parse database URL - parsed = urlparse(DATABASE_URL) - - # Connect to database - conn = psycopg2.connect( - host=parsed.hostname, - port=parsed.port or 5432, - user=parsed.username, - password=parsed.password, - database=parsed.path[1:] # Remove leading slash - ) - - try: - with conn.cursor() as cur: - # Check if table exists - cur.execute(""" - SELECT EXISTS ( - SELECT FROM information_schema.tables - WHERE table_name = 'user_preferences' - ) - """) - table_exists = cur.fetchone()[0] - - if not table_exists: - print("user_preferences table does not exist. Creating it...") - cur.execute(""" - CREATE TABLE user_preferences ( - id SERIAL PRIMARY KEY, - user_id INTEGER UNIQUE NOT NULL REFERENCES "user"(id), - theme VARCHAR(20) DEFAULT 'light', - language VARCHAR(10) DEFAULT 'en', - timezone VARCHAR(50) DEFAULT 'UTC', - date_format VARCHAR(20) DEFAULT 'YYYY-MM-DD', - time_format VARCHAR(10) DEFAULT '24h', - email_notifications BOOLEAN DEFAULT TRUE, - email_daily_summary BOOLEAN DEFAULT FALSE, - email_weekly_summary BOOLEAN DEFAULT TRUE, - default_project_id INTEGER REFERENCES project(id), - timer_reminder_enabled BOOLEAN DEFAULT TRUE, - timer_reminder_interval INTEGER DEFAULT 60, - dashboard_layout JSON, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP - ) - """) - print("Created user_preferences table") - else: - # Check which columns exist - cur.execute(""" - SELECT column_name - FROM information_schema.columns - WHERE table_name = 'user_preferences' - AND column_name IN ('theme', 'language', 'timezone', 'date_format', - 'time_format', 'email_notifications', 'email_daily_summary', - 'email_weekly_summary', 'default_project_id', - 'timer_reminder_enabled', 'timer_reminder_interval', - 'dashboard_layout', 'created_at', 'updated_at') - """) - existing_columns = [row[0] for row in cur.fetchall()] - - # Add missing columns - if 'theme' not in existing_columns: - print("Adding 'theme' column to user_preferences table...") - cur.execute("ALTER TABLE user_preferences ADD COLUMN theme VARCHAR(20) DEFAULT 'light'") - print("Added 'theme' column") - - if 'language' not in existing_columns: - print("Adding 'language' column to user_preferences table...") - cur.execute("ALTER TABLE user_preferences ADD COLUMN language VARCHAR(10) DEFAULT 'en'") - print("Added 'language' column") - - if 'timezone' not in existing_columns: - print("Adding 'timezone' column to user_preferences table...") - cur.execute("ALTER TABLE user_preferences ADD COLUMN timezone VARCHAR(50) DEFAULT 'UTC'") - print("Added 'timezone' column") - - if 'date_format' not in existing_columns: - print("Adding 'date_format' column to user_preferences table...") - cur.execute("ALTER TABLE user_preferences ADD COLUMN date_format VARCHAR(20) DEFAULT 'YYYY-MM-DD'") - print("Added 'date_format' column") - - if 'time_format' not in existing_columns: - print("Adding 'time_format' column to user_preferences table...") - cur.execute("ALTER TABLE user_preferences ADD COLUMN time_format VARCHAR(10) DEFAULT '24h'") - print("Added 'time_format' column") - - if 'email_notifications' not in existing_columns: - print("Adding 'email_notifications' column to user_preferences table...") - cur.execute("ALTER TABLE user_preferences ADD COLUMN email_notifications BOOLEAN DEFAULT TRUE") - print("Added 'email_notifications' column") - - if 'email_daily_summary' not in existing_columns: - print("Adding 'email_daily_summary' column to user_preferences table...") - cur.execute("ALTER TABLE user_preferences ADD COLUMN email_daily_summary BOOLEAN DEFAULT FALSE") - print("Added 'email_daily_summary' column") - - if 'email_weekly_summary' not in existing_columns: - print("Adding 'email_weekly_summary' column to user_preferences table...") - cur.execute("ALTER TABLE user_preferences ADD COLUMN email_weekly_summary BOOLEAN DEFAULT TRUE") - print("Added 'email_weekly_summary' column") - - if 'default_project_id' not in existing_columns: - print("Adding 'default_project_id' column to user_preferences table...") - cur.execute("ALTER TABLE user_preferences ADD COLUMN default_project_id INTEGER REFERENCES project(id)") - print("Added 'default_project_id' column") - - if 'timer_reminder_enabled' not in existing_columns: - print("Adding 'timer_reminder_enabled' column to user_preferences table...") - cur.execute("ALTER TABLE user_preferences ADD COLUMN timer_reminder_enabled BOOLEAN DEFAULT TRUE") - print("Added 'timer_reminder_enabled' column") - - if 'timer_reminder_interval' not in existing_columns: - print("Adding 'timer_reminder_interval' column to user_preferences table...") - cur.execute("ALTER TABLE user_preferences ADD COLUMN timer_reminder_interval INTEGER DEFAULT 60") - print("Added 'timer_reminder_interval' column") - - if 'dashboard_layout' not in existing_columns: - print("Adding 'dashboard_layout' column to user_preferences table...") - cur.execute("ALTER TABLE user_preferences ADD COLUMN dashboard_layout JSON") - print("Added 'dashboard_layout' column") - - if 'created_at' not in existing_columns: - print("Adding 'created_at' column to user_preferences table...") - cur.execute("ALTER TABLE user_preferences ADD COLUMN created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP") - print("Added 'created_at' column") - - if 'updated_at' not in existing_columns: - print("Adding 'updated_at' column to user_preferences table...") - cur.execute("ALTER TABLE user_preferences ADD COLUMN updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP") - print("Added 'updated_at' column") - - # Commit changes - conn.commit() - print("User preferences migration completed successfully!") - - except Exception as e: - print(f"Error during migration: {e}") - conn.rollback() - raise - finally: - conn.close() - -if __name__ == "__main__": - add_missing_columns() \ No newline at end of file diff --git a/migrations_old/old_migrations/05_fix_task_status_enum.py b/migrations_old/old_migrations/05_fix_task_status_enum.py deleted file mode 100755 index fcec4cc..0000000 --- a/migrations_old/old_migrations/05_fix_task_status_enum.py +++ /dev/null @@ -1,244 +0,0 @@ -#!/usr/bin/env python3 -""" -Fix task status enum in the database to match Python enum -""" - -import os -import psycopg2 -from psycopg2 import sql -from urllib.parse import urlparse - -# Get database URL from environment -DATABASE_URL = os.environ.get('DATABASE_URL', 'postgresql://timetrack:timetrack123@localhost:5432/timetrack') - -def fix_task_status_enum(): - """Update task status enum in database""" - # Parse database URL - parsed = urlparse(DATABASE_URL) - - # Connect to database - conn = psycopg2.connect( - host=parsed.hostname, - port=parsed.port or 5432, - user=parsed.username, - password=parsed.password, - database=parsed.path[1:] # Remove leading slash - ) - - try: - with conn.cursor() as cur: - print("Starting task status enum migration...") - - # First check if the enum already has the correct values - cur.execute(""" - SELECT enumlabel - FROM pg_enum - WHERE enumtypid = (SELECT oid FROM pg_type WHERE typname = 'taskstatus') - ORDER BY enumsortorder - """) - current_values = [row[0] for row in cur.fetchall()] - print(f"Current enum values: {current_values}") - - # Check if migration is needed - expected_values = ['TODO', 'IN_PROGRESS', 'IN_REVIEW', 'DONE', 'CANCELLED'] - if all(val in current_values for val in expected_values): - print("Task status enum already has correct values. Skipping migration.") - return - - # Check if task table exists and has a status column - cur.execute(""" - SELECT column_name, data_type - FROM information_schema.columns - WHERE table_name = 'task' AND column_name = 'status' - """) - if not cur.fetchone(): - print("No task table or status column found. Skipping migration.") - return - - # Check if temporary column already exists - cur.execute(""" - SELECT column_name - FROM information_schema.columns - WHERE table_name = 'task' AND column_name = 'status_temp' - """) - temp_exists = cur.fetchone() is not None - - if not temp_exists: - # First, we need to create a temporary column to hold the data - print("1. Creating temporary column...") - cur.execute("ALTER TABLE task ADD COLUMN status_temp VARCHAR(50)") - cur.execute("ALTER TABLE sub_task ADD COLUMN status_temp VARCHAR(50)") - else: - print("1. Temporary column already exists...") - - # Copy current status values to temp column with mapping - print("2. Copying and mapping status values...") - # First check what values actually exist in the database - cur.execute("SELECT DISTINCT status::text FROM task WHERE status IS NOT NULL") - existing_statuses = [row[0] for row in cur.fetchall()] - print(f" Existing status values in task table: {existing_statuses}") - - # If no statuses exist, skip the mapping - if not existing_statuses: - print(" No existing status values to migrate") - else: - # Build dynamic mapping based on what exists - mapping_sql = "UPDATE task SET status_temp = CASE " - has_cases = False - if 'NOT_STARTED' in existing_statuses: - mapping_sql += "WHEN status::text = 'NOT_STARTED' THEN 'TODO' " - has_cases = True - if 'TODO' in existing_statuses: - mapping_sql += "WHEN status::text = 'TODO' THEN 'TODO' " - has_cases = True - if 'IN_PROGRESS' in existing_statuses: - mapping_sql += "WHEN status::text = 'IN_PROGRESS' THEN 'IN_PROGRESS' " - has_cases = True - if 'ON_HOLD' in existing_statuses: - mapping_sql += "WHEN status::text = 'ON_HOLD' THEN 'IN_REVIEW' " - has_cases = True - if 'IN_REVIEW' in existing_statuses: - mapping_sql += "WHEN status::text = 'IN_REVIEW' THEN 'IN_REVIEW' " - has_cases = True - if 'COMPLETED' in existing_statuses: - mapping_sql += "WHEN status::text = 'COMPLETED' THEN 'DONE' " - has_cases = True - if 'DONE' in existing_statuses: - mapping_sql += "WHEN status::text = 'DONE' THEN 'DONE' " - has_cases = True - if 'CANCELLED' in existing_statuses: - mapping_sql += "WHEN status::text = 'CANCELLED' THEN 'CANCELLED' " - has_cases = True - if 'ARCHIVED' in existing_statuses: - mapping_sql += "WHEN status::text = 'ARCHIVED' THEN 'CANCELLED' " - has_cases = True - - if has_cases: - mapping_sql += "ELSE status::text END WHERE status IS NOT NULL" - cur.execute(mapping_sql) - print(f" Updated {cur.rowcount} tasks") - - # Check sub_task table - cur.execute(""" - SELECT column_name - FROM information_schema.columns - WHERE table_name = 'sub_task' AND column_name = 'status' - """) - if cur.fetchone(): - # Get existing subtask statuses - cur.execute("SELECT DISTINCT status::text FROM sub_task WHERE status IS NOT NULL") - existing_subtask_statuses = [row[0] for row in cur.fetchall()] - print(f" Existing status values in sub_task table: {existing_subtask_statuses}") - - # If no statuses exist, skip the mapping - if not existing_subtask_statuses: - print(" No existing subtask status values to migrate") - else: - # Build dynamic mapping for subtasks - mapping_sql = "UPDATE sub_task SET status_temp = CASE " - has_cases = False - if 'NOT_STARTED' in existing_subtask_statuses: - mapping_sql += "WHEN status::text = 'NOT_STARTED' THEN 'TODO' " - has_cases = True - if 'TODO' in existing_subtask_statuses: - mapping_sql += "WHEN status::text = 'TODO' THEN 'TODO' " - has_cases = True - if 'IN_PROGRESS' in existing_subtask_statuses: - mapping_sql += "WHEN status::text = 'IN_PROGRESS' THEN 'IN_PROGRESS' " - has_cases = True - if 'ON_HOLD' in existing_subtask_statuses: - mapping_sql += "WHEN status::text = 'ON_HOLD' THEN 'IN_REVIEW' " - has_cases = True - if 'IN_REVIEW' in existing_subtask_statuses: - mapping_sql += "WHEN status::text = 'IN_REVIEW' THEN 'IN_REVIEW' " - has_cases = True - if 'COMPLETED' in existing_subtask_statuses: - mapping_sql += "WHEN status::text = 'COMPLETED' THEN 'DONE' " - has_cases = True - if 'DONE' in existing_subtask_statuses: - mapping_sql += "WHEN status::text = 'DONE' THEN 'DONE' " - has_cases = True - if 'CANCELLED' in existing_subtask_statuses: - mapping_sql += "WHEN status::text = 'CANCELLED' THEN 'CANCELLED' " - has_cases = True - if 'ARCHIVED' in existing_subtask_statuses: - mapping_sql += "WHEN status::text = 'ARCHIVED' THEN 'CANCELLED' " - has_cases = True - - if has_cases: - mapping_sql += "ELSE status::text END WHERE status IS NOT NULL" - cur.execute(mapping_sql) - print(f" Updated {cur.rowcount} subtasks") - - # Drop the old status columns - print("3. Dropping old status columns...") - cur.execute("ALTER TABLE task DROP COLUMN status") - cur.execute("ALTER TABLE sub_task DROP COLUMN status") - - # Drop the old enum type - print("4. Dropping old enum type...") - cur.execute("DROP TYPE IF EXISTS taskstatus") - - # Create new enum type with correct values - print("5. Creating new enum type...") - cur.execute(""" - CREATE TYPE taskstatus AS ENUM ( - 'TODO', - 'IN_PROGRESS', - 'IN_REVIEW', - 'DONE', - 'CANCELLED' - ) - """) - - # Add new status columns with correct enum type - print("6. Adding new status columns...") - cur.execute("ALTER TABLE task ADD COLUMN status taskstatus") - cur.execute("ALTER TABLE sub_task ADD COLUMN status taskstatus") - - # Copy data from temp columns to new status columns - print("7. Copying data to new columns...") - cur.execute("UPDATE task SET status = status_temp::taskstatus") - cur.execute("UPDATE sub_task SET status = status_temp::taskstatus") - - # Drop temporary columns - print("8. Dropping temporary columns...") - cur.execute("ALTER TABLE task DROP COLUMN status_temp") - cur.execute("ALTER TABLE sub_task DROP COLUMN status_temp") - - # Add NOT NULL constraint - print("9. Adding NOT NULL constraints...") - cur.execute("ALTER TABLE task ALTER COLUMN status SET NOT NULL") - cur.execute("ALTER TABLE sub_task ALTER COLUMN status SET NOT NULL") - - # Set default value - print("10. Setting default values...") - cur.execute("ALTER TABLE task ALTER COLUMN status SET DEFAULT 'TODO'") - cur.execute("ALTER TABLE sub_task ALTER COLUMN status SET DEFAULT 'TODO'") - - # Commit changes - conn.commit() - print("\nTask status enum migration completed successfully!") - - # Verify the new enum values - print("\nVerifying new enum values:") - cur.execute(""" - SELECT enumlabel - FROM pg_enum - WHERE enumtypid = ( - SELECT oid FROM pg_type WHERE typname = 'taskstatus' - ) - ORDER BY enumsortorder - """) - for row in cur.fetchall(): - print(f" - {row[0]}") - - except Exception as e: - print(f"Error during migration: {e}") - conn.rollback() - raise - finally: - conn.close() - -if __name__ == "__main__": - fix_task_status_enum() \ No newline at end of file diff --git a/migrations_old/old_migrations/06_add_archived_status.py b/migrations_old/old_migrations/06_add_archived_status.py deleted file mode 100755 index 65089ad..0000000 --- a/migrations_old/old_migrations/06_add_archived_status.py +++ /dev/null @@ -1,77 +0,0 @@ -#!/usr/bin/env python3 -""" -Add ARCHIVED status back to task status enum -""" - -import os -import psycopg2 -from psycopg2 import sql -from urllib.parse import urlparse - -# Get database URL from environment -DATABASE_URL = os.environ.get('DATABASE_URL', 'postgresql://timetrack:timetrack123@localhost:5432/timetrack') - -def add_archived_status(): - """Add ARCHIVED status to task status enum""" - # Parse database URL - parsed = urlparse(DATABASE_URL) - - # Connect to database - conn = psycopg2.connect( - host=parsed.hostname, - port=parsed.port or 5432, - user=parsed.username, - password=parsed.password, - database=parsed.path[1:] # Remove leading slash - ) - - try: - with conn.cursor() as cur: - print("Adding ARCHIVED status to taskstatus enum...") - - # Check if ARCHIVED already exists - cur.execute(""" - SELECT EXISTS ( - SELECT 1 FROM pg_enum - WHERE enumtypid = (SELECT oid FROM pg_type WHERE typname = 'taskstatus') - AND enumlabel = 'ARCHIVED' - ) - """) - - if cur.fetchone()[0]: - print("ARCHIVED status already exists in enum") - return - - # Add ARCHIVED to the enum - cur.execute(""" - ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'ARCHIVED' AFTER 'CANCELLED' - """) - - print("Successfully added ARCHIVED status to enum") - - # Verify the enum values - print("\nCurrent taskstatus enum values:") - cur.execute(""" - SELECT enumlabel - FROM pg_enum - WHERE enumtypid = ( - SELECT oid FROM pg_type WHERE typname = 'taskstatus' - ) - ORDER BY enumsortorder - """) - for row in cur.fetchall(): - print(f" - {row[0]}") - - # Commit changes - conn.commit() - print("\nMigration completed successfully!") - - except Exception as e: - print(f"Error during migration: {e}") - conn.rollback() - raise - finally: - conn.close() - -if __name__ == "__main__": - add_archived_status() \ No newline at end of file diff --git a/migrations_old/old_migrations/07_fix_company_work_config_columns.py b/migrations_old/old_migrations/07_fix_company_work_config_columns.py deleted file mode 100755 index f33fbe3..0000000 --- a/migrations_old/old_migrations/07_fix_company_work_config_columns.py +++ /dev/null @@ -1,141 +0,0 @@ -#!/usr/bin/env python3 -""" -Fix company_work_config table columns to match model definition -""" - -import os -import psycopg2 -from psycopg2 import sql -from urllib.parse import urlparse - -# Get database URL from environment -DATABASE_URL = os.environ.get('DATABASE_URL', 'postgresql://timetrack:timetrack123@localhost:5432/timetrack') - -def fix_company_work_config_columns(): - """Rename and add columns to match the new model definition""" - # Parse database URL - parsed = urlparse(DATABASE_URL) - - # Connect to database - conn = psycopg2.connect( - host=parsed.hostname, - port=parsed.port or 5432, - user=parsed.username, - password=parsed.password, - database=parsed.path[1:] # Remove leading slash - ) - - try: - with conn.cursor() as cur: - # Check which columns exist - cur.execute(""" - SELECT column_name - FROM information_schema.columns - WHERE table_name = 'company_work_config' - """) - existing_columns = [row[0] for row in cur.fetchall()] - print(f"Existing columns: {existing_columns}") - - # Rename columns if they exist with old names - if 'work_hours_per_day' in existing_columns and 'standard_hours_per_day' not in existing_columns: - print("Renaming work_hours_per_day to standard_hours_per_day...") - cur.execute("ALTER TABLE company_work_config RENAME COLUMN work_hours_per_day TO standard_hours_per_day") - - # Add missing columns - if 'standard_hours_per_day' not in existing_columns and 'work_hours_per_day' not in existing_columns: - print("Adding standard_hours_per_day column...") - cur.execute("ALTER TABLE company_work_config ADD COLUMN standard_hours_per_day FLOAT DEFAULT 8.0") - - if 'standard_hours_per_week' not in existing_columns: - print("Adding standard_hours_per_week column...") - cur.execute("ALTER TABLE company_work_config ADD COLUMN standard_hours_per_week FLOAT DEFAULT 40.0") - - # Rename region to work_region if needed - if 'region' in existing_columns and 'work_region' not in existing_columns: - print("Renaming region to work_region...") - cur.execute("ALTER TABLE company_work_config RENAME COLUMN region TO work_region") - elif 'work_region' not in existing_columns: - print("Adding work_region column...") - cur.execute("ALTER TABLE company_work_config ADD COLUMN work_region VARCHAR(50) DEFAULT 'OTHER'") - - # Add new columns that don't exist - if 'overtime_enabled' not in existing_columns: - print("Adding overtime_enabled column...") - cur.execute("ALTER TABLE company_work_config ADD COLUMN overtime_enabled BOOLEAN DEFAULT TRUE") - - if 'overtime_rate' not in existing_columns: - print("Adding overtime_rate column...") - cur.execute("ALTER TABLE company_work_config ADD COLUMN overtime_rate FLOAT DEFAULT 1.5") - - if 'double_time_enabled' not in existing_columns: - print("Adding double_time_enabled column...") - cur.execute("ALTER TABLE company_work_config ADD COLUMN double_time_enabled BOOLEAN DEFAULT FALSE") - - if 'double_time_threshold' not in existing_columns: - print("Adding double_time_threshold column...") - cur.execute("ALTER TABLE company_work_config ADD COLUMN double_time_threshold FLOAT DEFAULT 12.0") - - if 'double_time_rate' not in existing_columns: - print("Adding double_time_rate column...") - cur.execute("ALTER TABLE company_work_config ADD COLUMN double_time_rate FLOAT DEFAULT 2.0") - - if 'require_breaks' not in existing_columns: - print("Adding require_breaks column...") - cur.execute("ALTER TABLE company_work_config ADD COLUMN require_breaks BOOLEAN DEFAULT TRUE") - - if 'break_duration_minutes' not in existing_columns: - # Rename mandatory_break_minutes if it exists - if 'mandatory_break_minutes' in existing_columns: - print("Renaming mandatory_break_minutes to break_duration_minutes...") - cur.execute("ALTER TABLE company_work_config RENAME COLUMN mandatory_break_minutes TO break_duration_minutes") - else: - print("Adding break_duration_minutes column...") - cur.execute("ALTER TABLE company_work_config ADD COLUMN break_duration_minutes INTEGER DEFAULT 30") - - if 'break_after_hours' not in existing_columns: - # Rename break_threshold_hours if it exists - if 'break_threshold_hours' in existing_columns: - print("Renaming break_threshold_hours to break_after_hours...") - cur.execute("ALTER TABLE company_work_config RENAME COLUMN break_threshold_hours TO break_after_hours") - else: - print("Adding break_after_hours column...") - cur.execute("ALTER TABLE company_work_config ADD COLUMN break_after_hours FLOAT DEFAULT 6.0") - - if 'weekly_overtime_threshold' not in existing_columns: - print("Adding weekly_overtime_threshold column...") - cur.execute("ALTER TABLE company_work_config ADD COLUMN weekly_overtime_threshold FLOAT DEFAULT 40.0") - - if 'weekly_overtime_rate' not in existing_columns: - print("Adding weekly_overtime_rate column...") - cur.execute("ALTER TABLE company_work_config ADD COLUMN weekly_overtime_rate FLOAT DEFAULT 1.5") - - # Drop columns that are no longer needed - if 'region_name' in existing_columns: - print("Dropping region_name column...") - cur.execute("ALTER TABLE company_work_config DROP COLUMN region_name") - - if 'additional_break_minutes' in existing_columns: - print("Dropping additional_break_minutes column...") - cur.execute("ALTER TABLE company_work_config DROP COLUMN additional_break_minutes") - - if 'additional_break_threshold_hours' in existing_columns: - print("Dropping additional_break_threshold_hours column...") - cur.execute("ALTER TABLE company_work_config DROP COLUMN additional_break_threshold_hours") - - if 'created_by_id' in existing_columns: - print("Dropping created_by_id column...") - cur.execute("ALTER TABLE company_work_config DROP COLUMN created_by_id") - - # Commit changes - conn.commit() - print("\nCompany work config migration completed successfully!") - - except Exception as e: - print(f"Error during migration: {e}") - conn.rollback() - raise - finally: - conn.close() - -if __name__ == "__main__": - fix_company_work_config_columns() \ No newline at end of file diff --git a/migrations_old/old_migrations/08_fix_work_region_enum.py b/migrations_old/old_migrations/08_fix_work_region_enum.py deleted file mode 100755 index d06bb90..0000000 --- a/migrations_old/old_migrations/08_fix_work_region_enum.py +++ /dev/null @@ -1,145 +0,0 @@ -#!/usr/bin/env python3 -""" -Fix work region enum values in the database -""" - -import os -import psycopg2 -from psycopg2 import sql -from urllib.parse import urlparse - -# Get database URL from environment -DATABASE_URL = os.environ.get('DATABASE_URL', 'postgresql://timetrack:timetrack123@localhost:5432/timetrack') - -def fix_work_region_enum(): - """Update work region enum values in database""" - # Parse database URL - parsed = urlparse(DATABASE_URL) - - # Connect to database - conn = psycopg2.connect( - host=parsed.hostname, - port=parsed.port or 5432, - user=parsed.username, - password=parsed.password, - database=parsed.path[1:] # Remove leading slash - ) - - try: - with conn.cursor() as cur: - print("Starting work region enum migration...") - - # First check if work_region column is using enum type - cur.execute(""" - SELECT data_type - FROM information_schema.columns - WHERE table_name = 'company_work_config' - AND column_name = 'work_region' - """) - data_type = cur.fetchone() - - if data_type and data_type[0] == 'USER-DEFINED': - # It's an enum, we need to update it - print("work_region is an enum type, migrating...") - - # Create temporary column - print("1. Creating temporary column...") - cur.execute("ALTER TABLE company_work_config ADD COLUMN work_region_temp VARCHAR(50)") - - # Copy and map values - print("2. Copying and mapping values...") - cur.execute(""" - UPDATE company_work_config SET work_region_temp = CASE - WHEN work_region::text = 'GERMANY' THEN 'EU' - WHEN work_region::text = 'DE' THEN 'EU' - WHEN work_region::text = 'UNITED_STATES' THEN 'USA' - WHEN work_region::text = 'US' THEN 'USA' - WHEN work_region::text = 'UNITED_KINGDOM' THEN 'UK' - WHEN work_region::text = 'GB' THEN 'UK' - WHEN work_region::text = 'FRANCE' THEN 'EU' - WHEN work_region::text = 'FR' THEN 'EU' - WHEN work_region::text = 'EUROPEAN_UNION' THEN 'EU' - WHEN work_region::text = 'CUSTOM' THEN 'OTHER' - ELSE COALESCE(work_region::text, 'OTHER') - END - """) - print(f" Updated {cur.rowcount} rows") - - # Drop old column - print("3. Dropping old work_region column...") - cur.execute("ALTER TABLE company_work_config DROP COLUMN work_region") - - # Check if enum type exists and drop it - cur.execute(""" - SELECT EXISTS ( - SELECT 1 FROM pg_type WHERE typname = 'workregion' - ) - """) - if cur.fetchone()[0]: - print("4. Dropping old workregion enum type...") - cur.execute("DROP TYPE IF EXISTS workregion CASCADE") - - # Create new enum type - print("5. Creating new workregion enum type...") - cur.execute(""" - CREATE TYPE workregion AS ENUM ( - 'USA', - 'CANADA', - 'UK', - 'EU', - 'AUSTRALIA', - 'OTHER' - ) - """) - - # Add new column with enum type - print("6. Adding new work_region column...") - cur.execute("ALTER TABLE company_work_config ADD COLUMN work_region workregion DEFAULT 'OTHER'") - - # Copy data back - print("7. Copying data to new column...") - cur.execute("UPDATE company_work_config SET work_region = work_region_temp::workregion") - - # Drop temporary column - print("8. Dropping temporary column...") - cur.execute("ALTER TABLE company_work_config DROP COLUMN work_region_temp") - - else: - # It's already a varchar, just update the values - print("work_region is already a varchar, updating values...") - cur.execute(""" - UPDATE company_work_config SET work_region = CASE - WHEN work_region = 'GERMANY' THEN 'EU' - WHEN work_region = 'DE' THEN 'EU' - WHEN work_region = 'UNITED_STATES' THEN 'USA' - WHEN work_region = 'US' THEN 'USA' - WHEN work_region = 'UNITED_KINGDOM' THEN 'UK' - WHEN work_region = 'GB' THEN 'UK' - WHEN work_region = 'FRANCE' THEN 'EU' - WHEN work_region = 'FR' THEN 'EU' - WHEN work_region = 'EUROPEAN_UNION' THEN 'EU' - WHEN work_region = 'CUSTOM' THEN 'OTHER' - ELSE COALESCE(work_region, 'OTHER') - END - """) - print(f"Updated {cur.rowcount} rows") - - # Commit changes - conn.commit() - print("\nWork region enum migration completed successfully!") - - # Verify the results - print("\nCurrent work_region values in database:") - cur.execute("SELECT DISTINCT work_region FROM company_work_config ORDER BY work_region") - for row in cur.fetchall(): - print(f" - {row[0]}") - - except Exception as e: - print(f"Error during migration: {e}") - conn.rollback() - raise - finally: - conn.close() - -if __name__ == "__main__": - fix_work_region_enum() \ No newline at end of file diff --git a/migrations_old/old_migrations/09_add_germany_to_workregion.py b/migrations_old/old_migrations/09_add_germany_to_workregion.py deleted file mode 100755 index 4546890..0000000 --- a/migrations_old/old_migrations/09_add_germany_to_workregion.py +++ /dev/null @@ -1,78 +0,0 @@ -#!/usr/bin/env python3 -""" -Add GERMANY back to work region enum -""" - -import os -import psycopg2 -from psycopg2 import sql -from urllib.parse import urlparse - -# Get database URL from environment -DATABASE_URL = os.environ.get('DATABASE_URL', 'postgresql://timetrack:timetrack123@localhost:5432/timetrack') - -def add_germany_to_workregion(): - """Add GERMANY to work region enum""" - # Parse database URL - parsed = urlparse(DATABASE_URL) - - # Connect to database - conn = psycopg2.connect( - host=parsed.hostname, - port=parsed.port or 5432, - user=parsed.username, - password=parsed.password, - database=parsed.path[1:] # Remove leading slash - ) - - try: - with conn.cursor() as cur: - print("Adding GERMANY to workregion enum...") - - # Check if GERMANY already exists - cur.execute(""" - SELECT EXISTS ( - SELECT 1 FROM pg_enum - WHERE enumtypid = (SELECT oid FROM pg_type WHERE typname = 'workregion') - AND enumlabel = 'GERMANY' - ) - """) - - if cur.fetchone()[0]: - print("GERMANY already exists in enum") - return - - # Add GERMANY to the enum after UK - cur.execute(""" - ALTER TYPE workregion ADD VALUE IF NOT EXISTS 'GERMANY' AFTER 'UK' - """) - - print("Successfully added GERMANY to enum") - - # Update any EU records that should be Germany based on other criteria - # For now, we'll leave existing EU records as is, but new records can choose Germany - - # Verify the enum values - print("\nCurrent workregion enum values:") - cur.execute(""" - SELECT enumlabel - FROM pg_enum - WHERE enumtypid = (SELECT oid FROM pg_type WHERE typname = 'workregion') - ORDER BY enumsortorder - """) - for row in cur.fetchall(): - print(f" - {row[0]}") - - # Commit changes - conn.commit() - print("\nMigration completed successfully!") - - except Exception as e: - print(f"Error during migration: {e}") - conn.rollback() - raise - finally: - conn.close() - -if __name__ == "__main__": - add_germany_to_workregion() \ No newline at end of file diff --git a/migrations_old/old_migrations/10_add_company_settings_columns.py b/migrations_old/old_migrations/10_add_company_settings_columns.py deleted file mode 100755 index fa028a3..0000000 --- a/migrations_old/old_migrations/10_add_company_settings_columns.py +++ /dev/null @@ -1,108 +0,0 @@ -#!/usr/bin/env python3 -""" -Add missing columns to company_settings table -""" - -import os -import psycopg2 -from psycopg2 import sql -from urllib.parse import urlparse - -# Get database URL from environment -DATABASE_URL = os.environ.get('DATABASE_URL', 'postgresql://timetrack:timetrack123@localhost:5432/timetrack') - -def add_missing_columns(): - """Add missing columns to company_settings table""" - # Parse database URL - parsed = urlparse(DATABASE_URL) - - # Connect to database - conn = psycopg2.connect( - host=parsed.hostname, - port=parsed.port or 5432, - user=parsed.username, - password=parsed.password, - database=parsed.path[1:] # Remove leading slash - ) - - try: - with conn.cursor() as cur: - # Check if table exists - cur.execute(""" - SELECT EXISTS ( - SELECT FROM information_schema.tables - WHERE table_name = 'company_settings' - ) - """) - table_exists = cur.fetchone()[0] - - if not table_exists: - print("company_settings table does not exist. Creating it...") - cur.execute(""" - CREATE TABLE company_settings ( - id SERIAL PRIMARY KEY, - company_id INTEGER UNIQUE NOT NULL REFERENCES company(id), - work_week_start INTEGER DEFAULT 1, - work_days VARCHAR(20) DEFAULT '1,2,3,4,5', - allow_overlapping_entries BOOLEAN DEFAULT FALSE, - require_project_for_time_entry BOOLEAN DEFAULT TRUE, - allow_future_entries BOOLEAN DEFAULT FALSE, - max_hours_per_entry FLOAT DEFAULT 24.0, - enable_tasks BOOLEAN DEFAULT TRUE, - enable_sprints BOOLEAN DEFAULT FALSE, - enable_client_access BOOLEAN DEFAULT FALSE, - notify_on_overtime BOOLEAN DEFAULT TRUE, - overtime_threshold_daily FLOAT DEFAULT 8.0, - overtime_threshold_weekly FLOAT DEFAULT 40.0, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP - ) - """) - print("Created company_settings table") - else: - # Check which columns exist - cur.execute(""" - SELECT column_name - FROM information_schema.columns - WHERE table_name = 'company_settings' - """) - existing_columns = [row[0] for row in cur.fetchall()] - print(f"Existing columns: {existing_columns}") - - # Add missing columns - columns_to_add = { - 'work_week_start': 'INTEGER DEFAULT 1', - 'work_days': "VARCHAR(20) DEFAULT '1,2,3,4,5'", - 'allow_overlapping_entries': 'BOOLEAN DEFAULT FALSE', - 'require_project_for_time_entry': 'BOOLEAN DEFAULT TRUE', - 'allow_future_entries': 'BOOLEAN DEFAULT FALSE', - 'max_hours_per_entry': 'FLOAT DEFAULT 24.0', - 'enable_tasks': 'BOOLEAN DEFAULT TRUE', - 'enable_sprints': 'BOOLEAN DEFAULT FALSE', - 'enable_client_access': 'BOOLEAN DEFAULT FALSE', - 'notify_on_overtime': 'BOOLEAN DEFAULT TRUE', - 'overtime_threshold_daily': 'FLOAT DEFAULT 8.0', - 'overtime_threshold_weekly': 'FLOAT DEFAULT 40.0', - 'created_at': 'TIMESTAMP DEFAULT CURRENT_TIMESTAMP', - 'updated_at': 'TIMESTAMP DEFAULT CURRENT_TIMESTAMP' - } - - for column, definition in columns_to_add.items(): - if column not in existing_columns: - print(f"Adding {column} column...") - cur.execute(f"ALTER TABLE company_settings ADD COLUMN {column} {definition}") - print(f"Added {column} column") - - # Commit changes - conn.commit() - print("\nCompany settings migration completed successfully!") - - except Exception as e: - print(f"Error during migration: {e}") - conn.rollback() - raise - finally: - conn.close() - -if __name__ == "__main__": - add_missing_columns() \ No newline at end of file diff --git a/migrations_old/old_migrations/11_fix_company_work_config_usage.py b/migrations_old/old_migrations/11_fix_company_work_config_usage.py deleted file mode 100755 index bed6528..0000000 --- a/migrations_old/old_migrations/11_fix_company_work_config_usage.py +++ /dev/null @@ -1,188 +0,0 @@ -#!/usr/bin/env python3 -""" -Fix CompanyWorkConfig field usage throughout the codebase -""" - -import os -import re -from pathlib import Path - -# Define old to new field mappings -FIELD_MAPPINGS = { - 'work_hours_per_day': 'standard_hours_per_day', - 'mandatory_break_minutes': 'break_duration_minutes', - 'break_threshold_hours': 'break_after_hours', - 'region': 'work_region', -} - -# Fields that were removed -REMOVED_FIELDS = [ - 'additional_break_minutes', - 'additional_break_threshold_hours', - 'region_name', - 'created_by_id' -] - -def update_python_files(): - """Update Python files with new field names""" - python_files = [ - 'app.py', - 'routes/company.py', - ] - - for filepath in python_files: - if not os.path.exists(filepath): - print(f"Skipping {filepath} - file not found") - continue - - print(f"Processing {filepath}...") - - with open(filepath, 'r') as f: - content = f.read() - - original_content = content - - # Update field references - for old_field, new_field in FIELD_MAPPINGS.items(): - # Update attribute access: .old_field -> .new_field - content = re.sub( - rf'\.{old_field}\b', - f'.{new_field}', - content - ) - - # Update dictionary access: ['old_field'] -> ['new_field'] - content = re.sub( - rf'\[[\'"]{old_field}[\'"]\]', - f"['{new_field}']", - content - ) - - # Update keyword arguments: old_field= -> new_field= - content = re.sub( - rf'\b{old_field}=', - f'{new_field}=', - content - ) - - # Handle special cases for app.py - if filepath == 'app.py': - # Update WorkRegion.GERMANY references where appropriate - content = re.sub( - r'WorkRegion\.GERMANY', - 'WorkRegion.GERMANY # Note: Germany has specific labor laws', - content - ) - - # Handle removed fields - comment them out with explanation - for removed_field in ['additional_break_minutes', 'additional_break_threshold_hours']: - content = re.sub( - rf'^(\s*)(.*{removed_field}.*)$', - r'\1# REMOVED: \2 # This field no longer exists in the model', - content, - flags=re.MULTILINE - ) - - # Handle region_name specially in routes/company.py - if filepath == 'routes/company.py': - # Remove region_name assignments - content = re.sub( - r"work_config\.region_name = .*\n", - "# region_name removed - using work_region enum value instead\n", - content - ) - - # Fix WorkRegion.CUSTOM -> WorkRegion.OTHER - content = re.sub( - r'WorkRegion\.CUSTOM', - 'WorkRegion.OTHER', - content - ) - - if content != original_content: - with open(filepath, 'w') as f: - f.write(content) - print(f" ✓ Updated {filepath}") - else: - print(f" - No changes needed in {filepath}") - -def update_template_files(): - """Update template files with new field names""" - template_files = [ - 'templates/admin_company.html', - 'templates/admin_work_policies.html', - 'templates/config.html', - ] - - for filepath in template_files: - if not os.path.exists(filepath): - print(f"Skipping {filepath} - file not found") - continue - - print(f"Processing {filepath}...") - - with open(filepath, 'r') as f: - content = f.read() - - original_content = content - - # Update field references in templates - for old_field, new_field in FIELD_MAPPINGS.items(): - # Update Jinja2 variable access: {{ obj.old_field }} -> {{ obj.new_field }} - content = re.sub( - r'(\{\{[^}]*\.)' + re.escape(old_field) + r'(\s*\}\})', - r'\1' + new_field + r'\2', - content - ) - - # Update form field names and IDs - content = re.sub( - rf'(name|id)=[\'"]{old_field}[\'"]', - rf'\1="{new_field}"', - content - ) - - # Handle region_name in templates - if 'region_name' in content: - # Replace region_name with work_region.value - content = re.sub( - r'(\{\{[^}]*\.)region_name(\s*\}\})', - r'\1work_region.value\2', - content - ) - - # Handle removed fields in admin_company.html - if filepath == 'templates/admin_company.html' and 'additional_break' in content: - # Remove entire config-item divs for removed fields - content = re.sub( - r'
.*?additional_break.*?
\s*', - '', - content, - flags=re.DOTALL - ) - - if content != original_content: - with open(filepath, 'w') as f: - f.write(content) - print(f" ✓ Updated {filepath}") - else: - print(f" - No changes needed in {filepath}") - -def main(): - print("=== Fixing CompanyWorkConfig Field Usage ===\n") - - print("1. Updating Python files...") - update_python_files() - - print("\n2. Updating template files...") - update_template_files() - - print("\n✅ CompanyWorkConfig migration complete!") - print("\nNote: Some fields have been removed from the model:") - print(" - additional_break_minutes") - print(" - additional_break_threshold_hours") - print(" - region_name (use work_region.value instead)") - print(" - created_by_id") - -if __name__ == "__main__": - main() \ No newline at end of file diff --git a/migrations_old/old_migrations/12_fix_task_status_usage.py b/migrations_old/old_migrations/12_fix_task_status_usage.py deleted file mode 100755 index 3f90e76..0000000 --- a/migrations_old/old_migrations/12_fix_task_status_usage.py +++ /dev/null @@ -1,172 +0,0 @@ -#!/usr/bin/env python3 -""" -Fix TaskStatus enum usage throughout the codebase -""" - -import os -import re -from pathlib import Path - -# Define old to new status mappings -STATUS_MAPPINGS = { - 'NOT_STARTED': 'TODO', - 'COMPLETED': 'DONE', - 'ON_HOLD': 'IN_REVIEW', -} - -def update_python_files(): - """Update Python files with new TaskStatus values""" - # Find all Python files that might use TaskStatus - python_files = [] - - # Add specific known files - known_files = ['app.py', 'routes/tasks.py', 'routes/tasks_api.py', 'routes/sprints.py', 'routes/sprints_api.py'] - python_files.extend([f for f in known_files if os.path.exists(f)]) - - # Search for more Python files in routes/ - if os.path.exists('routes'): - python_files.extend([str(p) for p in Path('routes').glob('*.py')]) - - # Remove duplicates - python_files = list(set(python_files)) - - for filepath in python_files: - print(f"Processing {filepath}...") - - with open(filepath, 'r') as f: - content = f.read() - - original_content = content - - # Update TaskStatus enum references - for old_status, new_status in STATUS_MAPPINGS.items(): - # Update enum access: TaskStatus.OLD_STATUS -> TaskStatus.NEW_STATUS - content = re.sub( - rf'TaskStatus\.{old_status}\b', - f'TaskStatus.{new_status}', - content - ) - - # Update string comparisons: == 'OLD_STATUS' -> == 'NEW_STATUS' - content = re.sub( - rf"['\"]({old_status})['\"]", - f"'{new_status}'", - content - ) - - if content != original_content: - with open(filepath, 'w') as f: - f.write(content) - print(f" ✓ Updated {filepath}") - else: - print(f" - No changes needed in {filepath}") - -def update_javascript_files(): - """Update JavaScript files with new TaskStatus values""" - js_files = [] - - # Find all JS files - if os.path.exists('static/js'): - js_files.extend([str(p) for p in Path('static/js').glob('*.js')]) - - for filepath in js_files: - print(f"Processing {filepath}...") - - with open(filepath, 'r') as f: - content = f.read() - - original_content = content - - # Update status values in JavaScript - for old_status, new_status in STATUS_MAPPINGS.items(): - # Update string literals - content = re.sub( - rf"['\"]({old_status})['\"]", - f"'{new_status}'", - content - ) - - # Update in case statements or object keys - content = re.sub( - rf'\b{old_status}\b:', - f'{new_status}:', - content - ) - - if content != original_content: - with open(filepath, 'w') as f: - f.write(content) - print(f" ✓ Updated {filepath}") - else: - print(f" - No changes needed in {filepath}") - -def update_template_files(): - """Update template files with new TaskStatus values""" - template_files = [] - - # Find all template files that might have task status - if os.path.exists('templates'): - template_files.extend([str(p) for p in Path('templates').glob('*.html')]) - - for filepath in template_files: - # Skip if file doesn't contain task-related content - with open(filepath, 'r') as f: - content = f.read() - - if 'task' not in content.lower() and 'status' not in content.lower(): - continue - - print(f"Processing {filepath}...") - - original_content = content - - # Update status values in templates - for old_status, new_status in STATUS_MAPPINGS.items(): - # Update in option values: value="OLD_STATUS" -> value="NEW_STATUS" - content = re.sub( - rf'value=[\'"]{old_status}[\'"]', - f'value="{new_status}"', - content - ) - - # Update display text (be more careful here) - if old_status == 'NOT_STARTED': - content = re.sub(r'>Not Started<', '>To Do<', content) - elif old_status == 'COMPLETED': - content = re.sub(r'>Completed<', '>Done<', content) - elif old_status == 'ON_HOLD': - content = re.sub(r'>On Hold<', '>In Review<', content) - - # Update in JavaScript within templates - content = re.sub( - rf"['\"]({old_status})['\"]", - f"'{new_status}'", - content - ) - - if content != original_content: - with open(filepath, 'w') as f: - f.write(content) - print(f" ✓ Updated {filepath}") - else: - print(f" - No changes needed in {filepath}") - -def main(): - print("=== Fixing TaskStatus Enum Usage ===\n") - - print("1. Updating Python files...") - update_python_files() - - print("\n2. Updating JavaScript files...") - update_javascript_files() - - print("\n3. Updating template files...") - update_template_files() - - print("\n✅ TaskStatus migration complete!") - print("\nStatus mappings applied:") - for old, new in STATUS_MAPPINGS.items(): - print(f" - {old} → {new}") - -if __name__ == "__main__": - main() \ No newline at end of file diff --git a/migrations_old/old_migrations/13_fix_work_region_usage.py b/migrations_old/old_migrations/13_fix_work_region_usage.py deleted file mode 100755 index bed60a1..0000000 --- a/migrations_old/old_migrations/13_fix_work_region_usage.py +++ /dev/null @@ -1,154 +0,0 @@ -#!/usr/bin/env python3 -""" -Fix WorkRegion enum usage throughout the codebase -""" - -import os -import re -from pathlib import Path - -# Define old to new region mappings -REGION_MAPPINGS = { - 'UNITED_STATES': 'USA', - 'UNITED_KINGDOM': 'UK', - 'FRANCE': 'EU', - 'EUROPEAN_UNION': 'EU', - 'CUSTOM': 'OTHER', -} - -# Note: GERMANY is kept as is - it has specific labor laws - -def update_python_files(): - """Update Python files with new WorkRegion values""" - python_files = [] - - # Add known files - known_files = ['app.py', 'routes/company.py', 'routes/system_admin.py'] - python_files.extend([f for f in known_files if os.path.exists(f)]) - - # Search for more Python files - if os.path.exists('routes'): - python_files.extend([str(p) for p in Path('routes').glob('*.py')]) - - # Remove duplicates - python_files = list(set(python_files)) - - for filepath in python_files: - with open(filepath, 'r') as f: - content = f.read() - - # Skip if no WorkRegion references - if 'WorkRegion' not in content: - continue - - print(f"Processing {filepath}...") - - original_content = content - - # Update WorkRegion enum references - for old_region, new_region in REGION_MAPPINGS.items(): - # Update enum access: WorkRegion.OLD_REGION -> WorkRegion.NEW_REGION - content = re.sub( - rf'WorkRegion\.{old_region}\b', - f'WorkRegion.{new_region}', - content - ) - - # Update string comparisons - content = re.sub( - rf"['\"]({old_region})['\"]", - f"'{new_region}'", - content - ) - - # Add comments for GERMANY usage to note it has specific laws - if 'WorkRegion.GERMANY' in content and '# Note:' not in content: - content = re.sub( - r'(WorkRegion\.GERMANY)', - r'\1 # Germany has specific labor laws beyond EU', - content, - count=1 # Only comment the first occurrence - ) - - if content != original_content: - with open(filepath, 'w') as f: - f.write(content) - print(f" ✓ Updated {filepath}") - else: - print(f" - No changes needed in {filepath}") - -def update_template_files(): - """Update template files with new WorkRegion values""" - template_files = [] - - # Find relevant templates - if os.path.exists('templates'): - for template in Path('templates').glob('*.html'): - with open(template, 'r') as f: - if 'region' in f.read().lower(): - template_files.append(str(template)) - - for filepath in template_files: - print(f"Processing {filepath}...") - - with open(filepath, 'r') as f: - content = f.read() - - original_content = content - - # Update region values - for old_region, new_region in REGION_MAPPINGS.items(): - # Update in option values - content = re.sub( - rf'value=[\'"]{old_region}[\'"]', - f'value="{new_region}"', - content - ) - - # Update display names - display_mappings = { - 'UNITED_STATES': 'United States', - 'United States': 'United States', - 'UNITED_KINGDOM': 'United Kingdom', - 'United Kingdom': 'United Kingdom', - 'FRANCE': 'European Union', - 'France': 'European Union', - 'EUROPEAN_UNION': 'European Union', - 'European Union': 'European Union', - 'CUSTOM': 'Other', - 'Custom': 'Other' - } - - for old_display, new_display in display_mappings.items(): - if old_display in ['France', 'FRANCE']: - # France is now part of EU - content = re.sub( - rf'>{old_display}<', - f'>{new_display}<', - content - ) - - if content != original_content: - with open(filepath, 'w') as f: - f.write(content) - print(f" ✓ Updated {filepath}") - else: - print(f" - No changes needed in {filepath}") - -def main(): - print("=== Fixing WorkRegion Enum Usage ===\n") - - print("1. Updating Python files...") - update_python_files() - - print("\n2. Updating template files...") - update_template_files() - - print("\n✅ WorkRegion migration complete!") - print("\nRegion mappings applied:") - for old, new in REGION_MAPPINGS.items(): - print(f" - {old} → {new}") - print("\nNote: GERMANY remains as a separate option due to specific labor laws") - -if __name__ == "__main__": - main() \ No newline at end of file diff --git a/migrations_old/old_migrations/14_fix_removed_fields.py b/migrations_old/old_migrations/14_fix_removed_fields.py deleted file mode 100755 index 9c25af6..0000000 --- a/migrations_old/old_migrations/14_fix_removed_fields.py +++ /dev/null @@ -1,227 +0,0 @@ -#!/usr/bin/env python3 -""" -Fix references to removed fields throughout the codebase -""" - -import os -import re -from pathlib import Path - -# Fields that were removed from various models -REMOVED_FIELDS = { - 'created_by_id': { - 'models': ['Task', 'Project', 'Sprint', 'Announcement', 'CompanyWorkConfig'], - 'replacement': 'None', # or could track via audit log - 'comment': 'Field removed - consider using audit log for creator tracking' - }, - 'region_name': { - 'models': ['CompanyWorkConfig'], - 'replacement': 'work_region.value', - 'comment': 'Use work_region enum value instead' - }, - 'additional_break_minutes': { - 'models': ['CompanyWorkConfig'], - 'replacement': 'None', - 'comment': 'Field removed - simplified break configuration' - }, - 'additional_break_threshold_hours': { - 'models': ['CompanyWorkConfig'], - 'replacement': 'None', - 'comment': 'Field removed - simplified break configuration' - } -} - -def update_python_files(): - """Update Python files to handle removed fields""" - python_files = [] - - # Get all Python files - for root, dirs, files in os.walk('.'): - # Skip virtual environments and cache - if 'venv' in root or '__pycache__' in root or '.git' in root: - continue - for file in files: - if file.endswith('.py'): - python_files.append(os.path.join(root, file)) - - for filepath in python_files: - # Skip migration scripts - if 'migrations/' in filepath: - continue - - with open(filepath, 'r') as f: - content = f.read() - - original_content = content - modified = False - - for field, info in REMOVED_FIELDS.items(): - if field not in content: - continue - - print(f"Processing {filepath} for {field}...") - - # Handle different patterns - if field == 'created_by_id': - # Comment out lines that assign created_by_id - content = re.sub( - rf'^(\s*)([^#\n]*created_by_id\s*=\s*[^,\n]+,?)(.*)$', - rf'\1# REMOVED: \2 # {info["comment"]}\3', - content, - flags=re.MULTILINE - ) - - # Remove from query filters - content = re.sub( - rf'\.filter_by\(created_by_id=[^)]+\)', - '.filter_by() # REMOVED: created_by_id filter', - content - ) - - # Remove from dictionary accesses - content = re.sub( - rf"['\"]created_by_id['\"]\s*:\s*[^,}}]+[,}}]", - '# "created_by_id" removed from model', - content - ) - - elif field == 'region_name': - # Replace with work_region.value - content = re.sub( - rf'\.region_name\b', - '.work_region.value', - content - ) - content = re.sub( - rf"\['region_name'\]", - "['work_region'].value", - content - ) - - elif field in ['additional_break_minutes', 'additional_break_threshold_hours']: - # Comment out references - content = re.sub( - rf'^(\s*)([^#\n]*{field}[^#\n]*)$', - rf'\1# REMOVED: \2 # {info["comment"]}', - content, - flags=re.MULTILINE - ) - - if content != original_content: - modified = True - - if modified: - with open(filepath, 'w') as f: - f.write(content) - print(f" ✓ Updated {filepath}") - -def update_template_files(): - """Update template files to handle removed fields""" - template_files = [] - - if os.path.exists('templates'): - template_files = [str(p) for p in Path('templates').glob('*.html')] - - for filepath in template_files: - with open(filepath, 'r') as f: - content = f.read() - - original_content = content - modified = False - - for field, info in REMOVED_FIELDS.items(): - if field not in content: - continue - - print(f"Processing {filepath} for {field}...") - - if field == 'created_by_id': - # Remove or comment out created_by references in templates - # Match {{...created_by_id...}} patterns - pattern = r'\{\{[^}]*\.created_by_id[^}]*\}\}' - content = re.sub( - pattern, - '', - content - ) - - elif field == 'region_name': - # Replace with work_region.value - # Match {{...region_name...}} and replace region_name with work_region.value - pattern = r'(\{\{[^}]*\.)region_name([^}]*\}\})' - content = re.sub( - pattern, - r'\1work_region.value\2', - content - ) - - elif field in ['additional_break_minutes', 'additional_break_threshold_hours']: - # Remove entire form groups for these fields - pattern = r']*>(?:[^<]|<(?!/div))*' + re.escape(field) + r'.*?\s*' - content = re.sub( - pattern, - f'\n', - content, - flags=re.DOTALL - ) - - if content != original_content: - modified = True - - if modified: - with open(filepath, 'w') as f: - f.write(content) - print(f" ✓ Updated {filepath}") - -def create_audit_log_migration(): - """Create a migration to add audit fields if needed""" - migration_content = '''#!/usr/bin/env python3 -""" -Add audit log fields to replace removed created_by_id -""" - -# This is a template for adding audit logging if needed -# to replace the removed created_by_id functionality - -def add_audit_fields(): - """ - Consider adding these fields to models that lost created_by_id: - - created_by_username (store username instead of ID) - - created_at (if not already present) - - updated_by_username - - updated_at - - Or implement a separate audit log table - """ - pass - -if __name__ == "__main__": - print("Consider implementing audit logging to track who created/modified records") -''' - - with open('migrations/05_add_audit_fields_template.py', 'w') as f: - f.write(migration_content) - print("\n✓ Created template for audit field migration") - -def main(): - print("=== Fixing References to Removed Fields ===\n") - - print("1. Updating Python files...") - update_python_files() - - print("\n2. Updating template files...") - update_template_files() - - print("\n3. Creating audit field migration template...") - create_audit_log_migration() - - print("\n✅ Removed fields migration complete!") - print("\nFields handled:") - for field, info in REMOVED_FIELDS.items(): - print(f" - {field}: {info['comment']}") - - print("\n⚠️ Important: Review commented-out code and decide on appropriate replacements") - print(" Consider implementing audit logging for creator tracking") - -if __name__ == "__main__": - main() \ No newline at end of file diff --git a/migrations_old/old_migrations/15_repair_user_roles.py b/migrations_old/old_migrations/15_repair_user_roles.py deleted file mode 100644 index 06bbed0..0000000 --- a/migrations_old/old_migrations/15_repair_user_roles.py +++ /dev/null @@ -1,67 +0,0 @@ -#!/usr/bin/env python3 -""" -Repair user roles from string to enum values -""" - -import os -import sys -import logging - -# Add parent directory to path to import app -sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) - -try: - from app import app, db - from models import User, Role -except Exception as e: - print(f"Error importing modules: {e}") - print("This migration requires Flask app context. Skipping...") - sys.exit(0) - -logging.basicConfig(level=logging.INFO) -logger = logging.getLogger(__name__) - -def repair_user_roles(): - with app.app_context(): - logger.info("Starting user role repair...") - - # Map string role values to enum values - role_mapping = { - 'Team Member': Role.TEAM_MEMBER, - 'TEAM_MEMBER': Role.TEAM_MEMBER, - 'Team Leader': Role.TEAM_LEADER, - 'TEAM_LEADER': Role.TEAM_LEADER, - 'Supervisor': Role.SUPERVISOR, - 'SUPERVISOR': Role.SUPERVISOR, - 'Administrator': Role.ADMIN, - 'ADMIN': Role.ADMIN - } - - users = User.query.all() - fixed_count = 0 - - for user in users: - original_role = user.role - - # Fix role if it's a string or None - if isinstance(user.role, str): - user.role = role_mapping.get(user.role, Role.TEAM_MEMBER) - fixed_count += 1 - elif user.role is None: - user.role = Role.TEAM_MEMBER - fixed_count += 1 - - if fixed_count > 0: - db.session.commit() - logger.info(f"Fixed roles for {fixed_count} users") - else: - logger.info("No role fixes needed") - - logger.info("Role repair completed") - -if __name__ == "__main__": - try: - repair_user_roles() - except Exception as e: - logger.error(f"Migration failed: {e}") - sys.exit(1) \ No newline at end of file diff --git a/migrations_old/old_migrations/19_add_company_invitations.py b/migrations_old/old_migrations/19_add_company_invitations.py deleted file mode 100644 index e4c4069..0000000 --- a/migrations_old/old_migrations/19_add_company_invitations.py +++ /dev/null @@ -1,65 +0,0 @@ -#!/usr/bin/env python3 -""" -Add company invitations table for email-based registration -""" - -import sys -import os -sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) - -from flask import Flask -from models import db -from sqlalchemy import text -import logging - -logging.basicConfig(level=logging.INFO) -logger = logging.getLogger(__name__) - -def migrate(): - """Add company_invitation table""" - app = Flask(__name__) - app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('DATABASE_URL', 'sqlite:////data/timetrack.db') - app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False - - db.init_app(app) - - with app.app_context(): - try: - # Create company_invitation table - create_table_sql = text(""" - CREATE TABLE IF NOT EXISTS company_invitation ( - id SERIAL PRIMARY KEY, - company_id INTEGER NOT NULL REFERENCES company(id), - email VARCHAR(120) NOT NULL, - token VARCHAR(64) UNIQUE NOT NULL, - role VARCHAR(50) DEFAULT 'Team Member', - invited_by_id INTEGER NOT NULL REFERENCES "user"(id), - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - expires_at TIMESTAMP NOT NULL, - accepted BOOLEAN DEFAULT FALSE, - accepted_at TIMESTAMP, - accepted_by_user_id INTEGER REFERENCES "user"(id) - ); - """) - - db.session.execute(create_table_sql) - - # Create indexes for better performance - db.session.execute(text("CREATE INDEX IF NOT EXISTS idx_invitation_token ON company_invitation(token);")) - db.session.execute(text("CREATE INDEX IF NOT EXISTS idx_invitation_email ON company_invitation(email);")) - db.session.execute(text("CREATE INDEX IF NOT EXISTS idx_invitation_company ON company_invitation(company_id);")) - db.session.execute(text("CREATE INDEX IF NOT EXISTS idx_invitation_expires ON company_invitation(expires_at);")) - - db.session.commit() - logger.info("Successfully created company_invitation table") - - return True - - except Exception as e: - logger.error(f"Error creating company_invitation table: {str(e)}") - db.session.rollback() - return False - -if __name__ == '__main__': - success = migrate() - sys.exit(0 if success else 1) \ No newline at end of file diff --git a/migrations_old/old_migrations/20_add_company_updated_at.py b/migrations_old/old_migrations/20_add_company_updated_at.py deleted file mode 100755 index b9adf2d..0000000 --- a/migrations_old/old_migrations/20_add_company_updated_at.py +++ /dev/null @@ -1,94 +0,0 @@ -#!/usr/bin/env python3 -""" -Add updated_at column to company table -""" - -import os -import sys -import logging -from datetime import datetime - -# Add parent directory to path to import app -sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) - -from app import app, db -from sqlalchemy import text - -# Configure logging -logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') -logger = logging.getLogger(__name__) - -def run_migration(): - """Add updated_at column to company table""" - with app.app_context(): - try: - # Check if we're using PostgreSQL or SQLite - database_url = app.config['SQLALCHEMY_DATABASE_URI'] - is_postgres = 'postgresql://' in database_url or 'postgres://' in database_url - - if is_postgres: - # PostgreSQL migration - logger.info("Running PostgreSQL migration to add updated_at to company table...") - - # Check if column exists - result = db.session.execute(text(""" - SELECT column_name - FROM information_schema.columns - WHERE table_name = 'company' AND column_name = 'updated_at' - """)) - - if not result.fetchone(): - logger.info("Adding updated_at column to company table...") - db.session.execute(text(""" - ALTER TABLE company - ADD COLUMN updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP - """)) - - # Update existing rows to have updated_at = created_at - db.session.execute(text(""" - UPDATE company - SET updated_at = created_at - WHERE updated_at IS NULL - """)) - - db.session.commit() - logger.info("Successfully added updated_at column to company table") - else: - logger.info("updated_at column already exists in company table") - else: - # SQLite migration - logger.info("Running SQLite migration to add updated_at to company table...") - - # For SQLite, we need to check differently - result = db.session.execute(text("PRAGMA table_info(company)")) - columns = [row[1] for row in result.fetchall()] - - if 'updated_at' not in columns: - logger.info("Adding updated_at column to company table...") - db.session.execute(text(""" - ALTER TABLE company - ADD COLUMN updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP - """)) - - # Update existing rows to have updated_at = created_at - db.session.execute(text(""" - UPDATE company - SET updated_at = created_at - WHERE updated_at IS NULL - """)) - - db.session.commit() - logger.info("Successfully added updated_at column to company table") - else: - logger.info("updated_at column already exists in company table") - - return True - - except Exception as e: - logger.error(f"Migration failed: {e}") - db.session.rollback() - return False - -if __name__ == "__main__": - success = run_migration() - sys.exit(0 if success else 1) \ No newline at end of file diff --git a/migrations_old/old_migrations/run_all_db_migrations.py b/migrations_old/old_migrations/run_all_db_migrations.py deleted file mode 100755 index af837f4..0000000 --- a/migrations_old/old_migrations/run_all_db_migrations.py +++ /dev/null @@ -1,138 +0,0 @@ -#!/usr/bin/env python3 -""" -Master database migration runner -Runs all database schema migrations in the correct order -""" - -import os -import sys -import subprocess -import json -from datetime import datetime - -# Migration state file -MIGRATION_STATE_FILE = '/data/db_migrations_state.json' - -# List of database schema migrations in order -DB_MIGRATIONS = [ - '01_migrate_db.py', # SQLite schema updates (must run before data migration) - '20_add_company_updated_at.py', # Add updated_at column BEFORE data migration - '02_migrate_sqlite_to_postgres_fixed.py', # Fixed SQLite to PostgreSQL data migration - '03_add_dashboard_columns.py', - '04_add_user_preferences_columns.py', - '05_fix_task_status_enum.py', - '06_add_archived_status.py', - '07_fix_company_work_config_columns.py', - '08_fix_work_region_enum.py', - '09_add_germany_to_workregion.py', - '10_add_company_settings_columns.py', - '19_add_company_invitations.py' -] - -def load_migration_state(): - """Load the migration state from file""" - if os.path.exists(MIGRATION_STATE_FILE): - try: - with open(MIGRATION_STATE_FILE, 'r') as f: - return json.load(f) - except: - return {} - return {} - -def save_migration_state(state): - """Save the migration state to file""" - os.makedirs(os.path.dirname(MIGRATION_STATE_FILE), exist_ok=True) - with open(MIGRATION_STATE_FILE, 'w') as f: - json.dump(state, f, indent=2) - -def run_migration(migration_file): - """Run a single migration script""" - script_path = os.path.join(os.path.dirname(__file__), migration_file) - - if not os.path.exists(script_path): - print(f"⚠️ Migration {migration_file} not found, skipping...") - return False - - print(f"\n🔄 Running migration: {migration_file}") - - try: - # Run the migration script - result = subprocess.run( - [sys.executable, script_path], - capture_output=True, - text=True - ) - - if result.returncode == 0: - print(f"✅ {migration_file} completed successfully") - if result.stdout: - print(result.stdout) - return True - else: - print(f"❌ {migration_file} failed with return code {result.returncode}") - if result.stderr: - print(f"Error output: {result.stderr}") - if result.stdout: - print(f"Standard output: {result.stdout}") - return False - - except Exception as e: - print(f"❌ Error running {migration_file}: {e}") - return False - -def main(): - """Run all database migrations""" - print("=== Database Schema Migrations ===") - print(f"Running {len(DB_MIGRATIONS)} migrations...") - - # Load migration state - state = load_migration_state() - - success_count = 0 - failed_count = 0 - skipped_count = 0 - - for migration in DB_MIGRATIONS: - # Check if migration has already been run successfully - if state.get(migration, {}).get('status') == 'success': - print(f"\n⏭️ Skipping {migration} (already completed)") - skipped_count += 1 - continue - - # Run the migration - success = run_migration(migration) - - # Update state - state[migration] = { - 'status': 'success' if success else 'failed', - 'timestamp': datetime.now().isoformat(), - 'attempts': state.get(migration, {}).get('attempts', 0) + 1 - } - - if success: - success_count += 1 - else: - failed_count += 1 - # Don't stop on failure, continue with other migrations - print(f"⚠️ Continuing despite failure in {migration}") - - # Save state after each migration - save_migration_state(state) - - # Summary - print("\n" + "="*50) - print("Database Migration Summary:") - print(f"✅ Successful: {success_count}") - print(f"❌ Failed: {failed_count}") - print(f"⏭️ Skipped: {skipped_count}") - print(f"📊 Total: {len(DB_MIGRATIONS)}") - - if failed_count > 0: - print("\n⚠️ Some migrations failed. Check the logs above for details.") - return 1 - else: - print("\n✨ All database migrations completed successfully!") - return 0 - -if __name__ == "__main__": - sys.exit(main()) \ No newline at end of file diff --git a/migrations_old/old_migrations/run_code_migrations.py b/migrations_old/old_migrations/run_code_migrations.py deleted file mode 100755 index 54d630c..0000000 --- a/migrations_old/old_migrations/run_code_migrations.py +++ /dev/null @@ -1,166 +0,0 @@ -#!/usr/bin/env python3 -""" -Run code migrations during startup - updates code to match model changes -""" - -import os -import sys -import subprocess -from pathlib import Path -import hashlib -import json -from datetime import datetime - -MIGRATION_STATE_FILE = '/data/code_migrations_state.json' - -def get_migration_hash(script_path): - """Get hash of migration script to detect changes""" - with open(script_path, 'rb') as f: - return hashlib.md5(f.read()).hexdigest() - -def load_migration_state(): - """Load state of previously run migrations""" - if os.path.exists(MIGRATION_STATE_FILE): - try: - with open(MIGRATION_STATE_FILE, 'r') as f: - return json.load(f) - except: - return {} - return {} - -def save_migration_state(state): - """Save migration state""" - os.makedirs(os.path.dirname(MIGRATION_STATE_FILE), exist_ok=True) - with open(MIGRATION_STATE_FILE, 'w') as f: - json.dump(state, f, indent=2) - -def should_run_migration(script_path, state): - """Check if migration should run based on state""" - script_name = os.path.basename(script_path) - current_hash = get_migration_hash(script_path) - - if script_name not in state: - return True - - # Re-run if script has changed - if state[script_name].get('hash') != current_hash: - return True - - # Skip if already run successfully - if state[script_name].get('status') == 'success': - return False - - return True - -def run_migration(script_path, state): - """Run a single migration script""" - script_name = os.path.basename(script_path) - print(f"\n{'='*60}") - print(f"Running code migration: {script_name}") - print('='*60) - - try: - result = subprocess.run( - [sys.executable, script_path], - capture_output=True, - text=True, - check=True, - timeout=300 # 5 minute timeout - ) - - print(result.stdout) - if result.stderr: - print("Warnings:", result.stderr) - - # Update state - state[script_name] = { - 'hash': get_migration_hash(script_path), - 'status': 'success', - 'last_run': str(datetime.now()), - 'output': result.stdout[-1000:] if result.stdout else '' # Last 1000 chars - } - save_migration_state(state) - return True - - except subprocess.CalledProcessError as e: - print(f"❌ Error running {script_name}:") - print(e.stdout) - print(e.stderr) - - # Update state with failure - state[script_name] = { - 'hash': get_migration_hash(script_path), - 'status': 'failed', - 'last_run': str(datetime.now()), - 'error': str(e) - } - save_migration_state(state) - return False - except subprocess.TimeoutExpired: - print(f"❌ Migration {script_name} timed out!") - state[script_name] = { - 'hash': get_migration_hash(script_path), - 'status': 'timeout', - 'last_run': str(datetime.now()) - } - save_migration_state(state) - return False - -def main(): - """Run all code migrations that need to be run""" - - print("🔄 Checking for code migrations...") - - # Get migration state - state = load_migration_state() - - # Get all migration scripts - migrations_dir = Path(__file__).parent - migration_scripts = sorted([ - str(p) for p in migrations_dir.glob('*.py') - if p.name.startswith(('11_', '12_', '13_', '14_', '15_')) - and 'template' not in p.name.lower() - ]) - - if not migration_scripts: - print("No code migration scripts found.") - return 0 - - # Check which migrations need to run - to_run = [] - for script in migration_scripts: - if should_run_migration(script, state): - to_run.append(script) - - if not to_run: - print("✅ All code migrations are up to date.") - return 0 - - print(f"\n📋 Found {len(to_run)} code migrations to run:") - for script in to_run: - print(f" - {Path(script).name}") - - # Run migrations - failed = [] - for script in to_run: - if not run_migration(script, state): - failed.append(script) - # Continue with other migrations even if one fails - print(f"\n⚠️ Migration {Path(script).name} failed, continuing with others...") - - # Summary - print("\n" + "="*60) - if failed: - print(f"⚠️ {len(failed)} code migrations failed:") - for script in failed: - print(f" - {Path(script).name}") - print("\nThe application may not work correctly.") - print("Check the logs and fix the issues.") - # Don't exit with error - let the app start anyway - return 0 - else: - print("✅ All code migrations completed successfully!") - return 0 - -if __name__ == "__main__": - sys.exit(main()) \ No newline at end of file diff --git a/migrations_old/postgres_only_migration.py b/migrations_old/postgres_only_migration.py deleted file mode 100755 index 545e03c..0000000 --- a/migrations_old/postgres_only_migration.py +++ /dev/null @@ -1,327 +0,0 @@ -#!/usr/bin/env python3 -""" -PostgreSQL-only migration script for TimeTrack -Applies all schema changes from commit 4214e88 onward -""" - -import os -import sys -import psycopg2 -from psycopg2.extras import RealDictCursor -import logging -from datetime import datetime - -# Configure logging -logging.basicConfig( - level=logging.INFO, - format='%(asctime)s - %(levelname)s - %(message)s' -) -logger = logging.getLogger(__name__) - - -class PostgresMigration: - def __init__(self, database_url): - self.database_url = database_url - self.conn = None - - def connect(self): - """Connect to PostgreSQL database""" - try: - self.conn = psycopg2.connect(self.database_url) - self.conn.autocommit = False - logger.info("Connected to PostgreSQL database") - return True - except Exception as e: - logger.error(f"Failed to connect to database: {e}") - return False - - def close(self): - """Close database connection""" - if self.conn: - self.conn.close() - - def execute_migration(self, name, sql_statements): - """Execute a migration with proper error handling""" - logger.info(f"Running migration: {name}") - cursor = self.conn.cursor() - - try: - for statement in sql_statements: - if statement.strip(): - cursor.execute(statement) - self.conn.commit() - logger.info(f"✓ {name} completed successfully") - return True - except Exception as e: - self.conn.rollback() - logger.error(f"✗ {name} failed: {e}") - return False - finally: - cursor.close() - - def check_column_exists(self, table_name, column_name): - """Check if a column exists in a table""" - cursor = self.conn.cursor() - cursor.execute(""" - SELECT EXISTS ( - SELECT 1 FROM information_schema.columns - WHERE table_name = %s AND column_name = %s - ) - """, (table_name, column_name)) - exists = cursor.fetchone()[0] - cursor.close() - return exists - - def check_table_exists(self, table_name): - """Check if a table exists""" - cursor = self.conn.cursor() - cursor.execute(""" - SELECT EXISTS ( - SELECT 1 FROM information_schema.tables - WHERE table_name = %s - ) - """, (table_name,)) - exists = cursor.fetchone()[0] - cursor.close() - return exists - - def check_enum_value_exists(self, enum_name, value): - """Check if an enum value exists""" - cursor = self.conn.cursor() - cursor.execute(""" - SELECT EXISTS ( - SELECT 1 FROM pg_enum - WHERE enumlabel = %s - AND enumtypid = (SELECT oid FROM pg_type WHERE typname = %s) - ) - """, (value, enum_name)) - exists = cursor.fetchone()[0] - cursor.close() - return exists - - def run_all_migrations(self): - """Run all migrations in order""" - if not self.connect(): - return False - - success = True - - # 1. Add company.updated_at - if not self.check_column_exists('company', 'updated_at'): - success &= self.execute_migration("Add company.updated_at", [ - """ - ALTER TABLE company - ADD COLUMN updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP; - """, - """ - UPDATE company SET updated_at = created_at WHERE updated_at IS NULL; - """ - ]) - - # 2. Add user columns for 2FA and avatar - if not self.check_column_exists('user', 'two_factor_enabled'): - success &= self.execute_migration("Add user 2FA and avatar columns", [ - """ - ALTER TABLE "user" - ADD COLUMN two_factor_enabled BOOLEAN DEFAULT FALSE, - ADD COLUMN two_factor_secret VARCHAR(32), - ADD COLUMN avatar_url VARCHAR(255); - """ - ]) - - # 3. Create company_invitation table - if not self.check_table_exists('company_invitation'): - success &= self.execute_migration("Create company_invitation table", [ - """ - CREATE TABLE company_invitation ( - id SERIAL PRIMARY KEY, - company_id INTEGER NOT NULL REFERENCES company(id), - email VARCHAR(255) NOT NULL, - role VARCHAR(50) NOT NULL, - token VARCHAR(255) UNIQUE NOT NULL, - invited_by_id INTEGER REFERENCES "user"(id), - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - expires_at TIMESTAMP NOT NULL, - used_at TIMESTAMP, - used_by_id INTEGER REFERENCES "user"(id) - ); - """, - """ - CREATE INDEX idx_invitation_token ON company_invitation(token); - """, - """ - CREATE INDEX idx_invitation_company ON company_invitation(company_id); - """, - """ - CREATE INDEX idx_invitation_email ON company_invitation(email); - """ - ]) - - # 4. Add user_preferences columns - if self.check_table_exists('user_preferences'): - columns_to_add = [ - ('theme', 'VARCHAR(20) DEFAULT \'light\''), - ('language', 'VARCHAR(10) DEFAULT \'en\''), - ('timezone', 'VARCHAR(50) DEFAULT \'UTC\''), - ('date_format', 'VARCHAR(20) DEFAULT \'YYYY-MM-DD\''), - ('time_format', 'VARCHAR(10) DEFAULT \'24h\''), - ('week_start', 'INTEGER DEFAULT 1'), - ('show_weekends', 'BOOLEAN DEFAULT TRUE'), - ('compact_mode', 'BOOLEAN DEFAULT FALSE'), - ('email_notifications', 'BOOLEAN DEFAULT TRUE'), - ('push_notifications', 'BOOLEAN DEFAULT FALSE'), - ('task_reminders', 'BOOLEAN DEFAULT TRUE'), - ('daily_summary', 'BOOLEAN DEFAULT FALSE'), - ('weekly_report', 'BOOLEAN DEFAULT TRUE'), - ('mention_notifications', 'BOOLEAN DEFAULT TRUE'), - ('task_assigned_notifications', 'BOOLEAN DEFAULT TRUE'), - ('task_completed_notifications', 'BOOLEAN DEFAULT FALSE'), - ('sound_enabled', 'BOOLEAN DEFAULT TRUE'), - ('keyboard_shortcuts', 'BOOLEAN DEFAULT TRUE'), - ('auto_start_timer', 'BOOLEAN DEFAULT FALSE'), - ('idle_time_detection', 'BOOLEAN DEFAULT TRUE'), - ('pomodoro_enabled', 'BOOLEAN DEFAULT FALSE'), - ('pomodoro_duration', 'INTEGER DEFAULT 25'), - ('pomodoro_break', 'INTEGER DEFAULT 5') - ] - - for col_name, col_def in columns_to_add: - if not self.check_column_exists('user_preferences', col_name): - success &= self.execute_migration(f"Add user_preferences.{col_name}", [ - f'ALTER TABLE user_preferences ADD COLUMN {col_name} {col_def};' - ]) - - # 5. Add user_dashboard columns - if self.check_table_exists('user_dashboard'): - if not self.check_column_exists('user_dashboard', 'layout'): - success &= self.execute_migration("Add user_dashboard layout columns", [ - """ - ALTER TABLE user_dashboard - ADD COLUMN layout JSON DEFAULT '{}', - ADD COLUMN is_locked BOOLEAN DEFAULT FALSE; - """ - ]) - - # 6. Add company_work_config columns - if self.check_table_exists('company_work_config'): - columns_to_add = [ - ('standard_hours_per_day', 'FLOAT DEFAULT 8.0'), - ('standard_hours_per_week', 'FLOAT DEFAULT 40.0'), - ('overtime_rate', 'FLOAT DEFAULT 1.5'), - ('double_time_enabled', 'BOOLEAN DEFAULT FALSE'), - ('double_time_threshold', 'FLOAT DEFAULT 12.0'), - ('double_time_rate', 'FLOAT DEFAULT 2.0'), - ('weekly_overtime_threshold', 'FLOAT DEFAULT 40.0'), - ('weekly_overtime_rate', 'FLOAT DEFAULT 1.5'), - ('created_at', 'TIMESTAMP DEFAULT CURRENT_TIMESTAMP'), - ('updated_at', 'TIMESTAMP DEFAULT CURRENT_TIMESTAMP') - ] - - for col_name, col_def in columns_to_add: - if not self.check_column_exists('company_work_config', col_name): - success &= self.execute_migration(f"Add company_work_config.{col_name}", [ - f'ALTER TABLE company_work_config ADD COLUMN {col_name} {col_def};' - ]) - - # 7. Add company_settings columns - if self.check_table_exists('company_settings'): - columns_to_add = [ - ('work_week_start', 'INTEGER DEFAULT 1'), - ('work_days', 'VARCHAR(20) DEFAULT \'1,2,3,4,5\''), - ('time_tracking_mode', 'VARCHAR(20) DEFAULT \'flexible\''), - ('allow_manual_time', 'BOOLEAN DEFAULT TRUE'), - ('require_project_selection', 'BOOLEAN DEFAULT TRUE'), - ('allow_future_entries', 'BOOLEAN DEFAULT FALSE'), - ('max_hours_per_entry', 'FLOAT DEFAULT 24.0'), - ('min_hours_per_entry', 'FLOAT DEFAULT 0.0'), - ('round_time_to', 'INTEGER DEFAULT 1'), - ('auto_break_deduction', 'BOOLEAN DEFAULT FALSE'), - ('allow_overlapping_entries', 'BOOLEAN DEFAULT FALSE'), - ('require_daily_notes', 'BOOLEAN DEFAULT FALSE'), - ('enable_tasks', 'BOOLEAN DEFAULT TRUE'), - ('enable_projects', 'BOOLEAN DEFAULT TRUE'), - ('enable_teams', 'BOOLEAN DEFAULT TRUE'), - ('enable_reports', 'BOOLEAN DEFAULT TRUE'), - ('enable_invoicing', 'BOOLEAN DEFAULT FALSE'), - ('enable_client_access', 'BOOLEAN DEFAULT FALSE'), - ('default_currency', 'VARCHAR(3) DEFAULT \'USD\''), - ('created_at', 'TIMESTAMP DEFAULT CURRENT_TIMESTAMP'), - ('updated_at', 'TIMESTAMP DEFAULT CURRENT_TIMESTAMP') - ] - - for col_name, col_def in columns_to_add: - if not self.check_column_exists('company_settings', col_name): - success &= self.execute_migration(f"Add company_settings.{col_name}", [ - f'ALTER TABLE company_settings ADD COLUMN {col_name} {col_def};' - ]) - - # 8. Add dashboard_widget columns - if self.check_table_exists('dashboard_widget'): - if not self.check_column_exists('dashboard_widget', 'config'): - success &= self.execute_migration("Add dashboard_widget config columns", [ - """ - ALTER TABLE dashboard_widget - ADD COLUMN config JSON DEFAULT '{}', - ADD COLUMN is_visible BOOLEAN DEFAULT TRUE; - """ - ]) - - # 9. Update WorkRegion enum - if not self.check_enum_value_exists('workregion', 'GERMANY'): - success &= self.execute_migration("Add GERMANY to WorkRegion enum", [ - """ - ALTER TYPE workregion ADD VALUE IF NOT EXISTS 'GERMANY'; - """ - ]) - - # 10. Update TaskStatus enum - if not self.check_enum_value_exists('taskstatus', 'ARCHIVED'): - success &= self.execute_migration("Add ARCHIVED to TaskStatus enum", [ - """ - ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'ARCHIVED'; - """ - ]) - - # 11. Update WidgetType enum - widget_types_to_add = [ - 'REVENUE_CHART', 'EXPENSE_CHART', 'PROFIT_CHART', 'CASH_FLOW', - 'INVOICE_STATUS', 'CLIENT_LIST', 'PROJECT_BUDGET', 'TEAM_CAPACITY', - 'SPRINT_BURNDOWN', 'VELOCITY_CHART', 'BACKLOG_STATUS', 'RELEASE_TIMELINE', - 'CODE_COMMITS', 'BUILD_STATUS', 'DEPLOYMENT_HISTORY', 'ERROR_RATE', - 'SYSTEM_HEALTH', 'USER_ACTIVITY', 'SECURITY_ALERTS', 'AUDIT_LOG' - ] - - for widget_type in widget_types_to_add: - if not self.check_enum_value_exists('widgettype', widget_type): - success &= self.execute_migration(f"Add {widget_type} to WidgetType enum", [ - f"ALTER TYPE widgettype ADD VALUE IF NOT EXISTS '{widget_type}';" - ]) - - self.close() - - if success: - logger.info("\n✅ All migrations completed successfully!") - else: - logger.error("\n❌ Some migrations failed. Check the logs above.") - - return success - - -def main(): - """Main migration function""" - # Get database URL from environment - database_url = os.environ.get('DATABASE_URL') - - if not database_url: - logger.error("DATABASE_URL environment variable not set") - return 1 - - # Run migrations - migration = PostgresMigration(database_url) - success = migration.run_all_migrations() - - return 0 if success else 1 - - -if __name__ == "__main__": - sys.exit(main()) \ No newline at end of file diff --git a/migrations_old/remove_email_preferences.sql b/migrations_old/remove_email_preferences.sql deleted file mode 100644 index 36a0b69..0000000 --- a/migrations_old/remove_email_preferences.sql +++ /dev/null @@ -1,8 +0,0 @@ --- Remove unused columns from user_preferences table --- These columns were defined in the model but never used in the application - -ALTER TABLE user_preferences - DROP COLUMN IF EXISTS email_daily_summary, - DROP COLUMN IF EXISTS email_notifications, - DROP COLUMN IF EXISTS email_weekly_summary, - DROP COLUMN IF EXISTS default_project_id; \ No newline at end of file diff --git a/migrations_old/run_postgres_migrations.py b/migrations_old/run_postgres_migrations.py deleted file mode 100755 index 1ed0216..0000000 --- a/migrations_old/run_postgres_migrations.py +++ /dev/null @@ -1,161 +0,0 @@ -#!/usr/bin/env python3 -""" -PostgreSQL-only migration runner -Manages migration state and runs migrations in order -""" - -import os -import sys -import json -import subprocess -from datetime import datetime -from pathlib import Path - -# Migration state file -MIGRATION_STATE_FILE = '/data/postgres_migrations_state.json' - -# List of PostgreSQL migrations in order -POSTGRES_MIGRATIONS = [ - 'postgres_only_migration.py', # Main migration from commit 4214e88 onward - 'add_note_sharing.sql', # Add note sharing functionality - 'remove_email_preferences.sql', # Remove unused email preference columns - 'add_time_preferences.sql', # Add time formatting and rounding preferences -] - - -def load_migration_state(): - """Load the migration state from file""" - if os.path.exists(MIGRATION_STATE_FILE): - try: - with open(MIGRATION_STATE_FILE, 'r') as f: - return json.load(f) - except: - return {} - return {} - - -def save_migration_state(state): - """Save the migration state to file""" - os.makedirs(os.path.dirname(MIGRATION_STATE_FILE), exist_ok=True) - with open(MIGRATION_STATE_FILE, 'w') as f: - json.dump(state, f, indent=2) - - -def run_migration(migration_file): - """Run a single migration script""" - script_path = os.path.join(os.path.dirname(__file__), migration_file) - - if not os.path.exists(script_path): - print(f"⚠️ Migration {migration_file} not found, skipping...") - return False - - print(f"\n🔄 Running migration: {migration_file}") - - try: - # Check if it's a SQL file - if migration_file.endswith('.sql'): - # Run SQL file using psql - # Try to parse DATABASE_URL first, fall back to individual env vars - database_url = os.environ.get('DATABASE_URL') - if database_url: - # Parse DATABASE_URL: postgresql://user:password@host:port/dbname - from urllib.parse import urlparse - parsed = urlparse(database_url) - db_host = parsed.hostname or 'db' - db_port = parsed.port or 5432 - db_name = parsed.path.lstrip('/') or 'timetrack' - db_user = parsed.username or 'timetrack' - db_password = parsed.password or 'timetrack' - else: - db_host = os.environ.get('POSTGRES_HOST', 'db') - db_name = os.environ.get('POSTGRES_DB', 'timetrack') - db_user = os.environ.get('POSTGRES_USER', 'timetrack') - db_password = os.environ.get('POSTGRES_PASSWORD', 'timetrack') - - result = subprocess.run( - ['psql', '-h', db_host, '-U', db_user, '-d', db_name, '-f', script_path], - capture_output=True, - text=True, - env={**os.environ, 'PGPASSWORD': db_password} - ) - else: - # Run Python migration script - result = subprocess.run( - [sys.executable, script_path], - capture_output=True, - text=True - ) - - if result.returncode == 0: - print(f"✅ {migration_file} completed successfully") - if result.stdout: - print(result.stdout) - return True - else: - print(f"❌ {migration_file} failed with return code {result.returncode}") - if result.stderr: - print(f"Error output: {result.stderr}") - if result.stdout: - print(f"Standard output: {result.stdout}") - return False - - except Exception as e: - print(f"❌ Error running {migration_file}: {e}") - return False - - -def main(): - """Run all PostgreSQL migrations""" - print("=== PostgreSQL Database Migrations ===") - print(f"Running {len(POSTGRES_MIGRATIONS)} migrations...") - - # Load migration state - state = load_migration_state() - - success_count = 0 - failed_count = 0 - skipped_count = 0 - - for migration in POSTGRES_MIGRATIONS: - # Check if migration has already been run successfully - if state.get(migration, {}).get('status') == 'success': - print(f"\n⏭️ Skipping {migration} (already completed)") - skipped_count += 1 - continue - - # Run the migration - success = run_migration(migration) - - # Update state - state[migration] = { - 'status': 'success' if success else 'failed', - 'timestamp': datetime.now().isoformat(), - 'attempts': state.get(migration, {}).get('attempts', 0) + 1 - } - - if success: - success_count += 1 - else: - failed_count += 1 - - # Save state after each migration - save_migration_state(state) - - # Summary - print("\n" + "="*50) - print("PostgreSQL Migration Summary:") - print(f"✅ Successful: {success_count}") - print(f"❌ Failed: {failed_count}") - print(f"⏭️ Skipped: {skipped_count}") - print(f"📊 Total: {len(POSTGRES_MIGRATIONS)}") - - if failed_count > 0: - print("\n⚠️ Some migrations failed. Check the logs above for details.") - return 1 - else: - print("\n✨ All PostgreSQL migrations completed successfully!") - return 0 - - -if __name__ == "__main__": - sys.exit(main()) \ No newline at end of file diff --git a/models/enums_as_integers.py b/models/enums_as_integers.py deleted file mode 100644 index 29ad50d..0000000 --- a/models/enums_as_integers.py +++ /dev/null @@ -1,101 +0,0 @@ -""" -Alternative enum implementation using integers instead of PostgreSQL enums. -This avoids all PostgreSQL enum issues by using simple integers with Python-side validation. -""" - -import enum - -class IntEnum(enum.IntEnum): - """Base class for integer-based enums.""" - - @classmethod - def choices(cls): - """Return choices for forms.""" - return [(item.value, item.display_name) for item in cls] - - @property - def display_name(self): - """Get display name for the enum value.""" - return self._display_names.get(self, self.name.replace('_', ' ').title()) - - -class TaskStatus(IntEnum): - """Task status using integers.""" - TODO = 1 - IN_PROGRESS = 2 - IN_REVIEW = 3 - DONE = 4 - CANCELLED = 5 - ARCHIVED = 6 - - _display_names = { - TODO: "To Do", - IN_PROGRESS: "In Progress", - IN_REVIEW: "In Review", - DONE: "Done", - CANCELLED: "Cancelled", - ARCHIVED: "Archived" - } - - -class TaskPriority(IntEnum): - """Task priority using integers.""" - LOW = 1 - MEDIUM = 2 - HIGH = 3 - URGENT = 4 - - _display_names = { - LOW: "Low", - MEDIUM: "Medium", - HIGH: "High", - URGENT: "Urgent" - } - - -class Role(IntEnum): - """User roles using integers.""" - TEAM_MEMBER = 1 - TEAM_LEADER = 2 - SUPERVISOR = 3 - ADMIN = 4 - SYSTEM_ADMIN = 5 - - _display_names = { - TEAM_MEMBER: "Team Member", - TEAM_LEADER: "Team Leader", - SUPERVISOR: "Supervisor", - ADMIN: "Administrator", - SYSTEM_ADMIN: "System Administrator" - } - - -# Example model usage: -""" -from sqlalchemy import Integer, CheckConstraint -from models.enums_as_integers import TaskStatus, TaskPriority - -class Task(db.Model): - # Instead of: status = db.Column(db.Enum(TaskStatus)) - status = db.Column(db.Integer, default=TaskStatus.TODO) - priority = db.Column(db.Integer, default=TaskPriority.MEDIUM) - - __table_args__ = ( - CheckConstraint( - status.in_([s.value for s in TaskStatus]), - name='check_task_status' - ), - CheckConstraint( - priority.in_([p.value for p in TaskPriority]), - name='check_task_priority' - ), - ) - - @property - def status_display(self): - return TaskStatus(self.status).display_name if self.status else None - - @property - def priority_display(self): - return TaskPriority(self.priority).display_name if self.priority else None -""" \ No newline at end of file diff --git a/models_old.py b/models_old.py deleted file mode 100644 index 65fc813..0000000 --- a/models_old.py +++ /dev/null @@ -1,1508 +0,0 @@ -from flask_sqlalchemy import SQLAlchemy -from werkzeug.security import generate_password_hash, check_password_hash -from datetime import datetime, timedelta -import secrets -import enum - -db = SQLAlchemy() - -# Define Role as an Enum for better type safety -class Role(enum.Enum): - TEAM_MEMBER = "Team Member" - TEAM_LEADER = "Team Leader" - SUPERVISOR = "Supervisor" - ADMIN = "Administrator" # Company-level admin - SYSTEM_ADMIN = "System Administrator" # System-wide admin - -# Define Account Type for freelancer support -class AccountType(enum.Enum): - COMPANY_USER = "Company User" - FREELANCER = "Freelancer" - -# Company model for multi-tenancy -class Company(db.Model): - id = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(100), nullable=False, unique=True) - slug = db.Column(db.String(50), unique=True, nullable=False) # URL-friendly identifier - description = db.Column(db.Text) - created_at = db.Column(db.DateTime, default=datetime.now) - - # Freelancer support - is_personal = db.Column(db.Boolean, default=False) # True for auto-created freelancer companies - - # Company settings - is_active = db.Column(db.Boolean, default=True) - max_users = db.Column(db.Integer, default=100) # Optional user limit - - # Relationships - users = db.relationship('User', backref='company', lazy=True) - teams = db.relationship('Team', backref='company', lazy=True) - projects = db.relationship('Project', backref='company', lazy=True) - - def __repr__(self): - return f'' - - def generate_slug(self): - """Generate URL-friendly slug from company name""" - import re - slug = re.sub(r'[^\w\s-]', '', self.name.lower()) - slug = re.sub(r'[-\s]+', '-', slug) - return slug.strip('-') - -# Create Team model -class Team(db.Model): - id = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(100), nullable=False) - description = db.Column(db.String(255)) - created_at = db.Column(db.DateTime, default=datetime.now) - - # Company association for multi-tenancy - company_id = db.Column(db.Integer, db.ForeignKey('company.id'), nullable=False) - - # Relationship with users (one team has many users) - users = db.relationship('User', backref='team', lazy=True) - - # Unique constraint per company - __table_args__ = (db.UniqueConstraint('company_id', 'name', name='uq_team_name_per_company'),) - - def __repr__(self): - return f'' - -class Project(db.Model): - id = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(100), nullable=False) - description = db.Column(db.Text, nullable=True) - code = db.Column(db.String(20), nullable=False) # Project code (e.g., PRJ001) - is_active = db.Column(db.Boolean, default=True) - created_at = db.Column(db.DateTime, default=datetime.now) - updated_at = db.Column(db.DateTime, default=datetime.now, onupdate=datetime.now) - - # Company association for multi-tenancy - company_id = db.Column(db.Integer, db.ForeignKey('company.id'), nullable=False) - - # Foreign key to user who created the project (Admin/Supervisor) - created_by_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) - - # Optional team assignment - if set, only team members can log time to this project - team_id = db.Column(db.Integer, db.ForeignKey('team.id'), nullable=True) - - # Project categorization - category_id = db.Column(db.Integer, db.ForeignKey('project_category.id'), nullable=True) - - # Project dates - start_date = db.Column(db.Date, nullable=True) - end_date = db.Column(db.Date, nullable=True) - - # Relationships - created_by = db.relationship('User', foreign_keys=[created_by_id], backref='created_projects') - team = db.relationship('Team', backref='projects') - time_entries = db.relationship('TimeEntry', backref='project', lazy=True) - category = db.relationship('ProjectCategory', back_populates='projects') - - # Unique constraint per company - __table_args__ = (db.UniqueConstraint('company_id', 'code', name='uq_project_code_per_company'),) - - def __repr__(self): - return f'' - - def is_user_allowed(self, user): - """Check if a user is allowed to log time to this project""" - if not self.is_active: - return False - - # Must be in same company - if self.company_id != user.company_id: - return False - - # Admins and Supervisors can log time to any project in their company - if user.role in [Role.ADMIN, Role.SUPERVISOR]: - return True - - # If project is team-specific, only team members can log time - if self.team_id: - return user.team_id == self.team_id - - # If no team restriction, any user in the company can log time - return True - -# Update User model to include role and team relationship -class User(db.Model): - id = db.Column(db.Integer, primary_key=True) - username = db.Column(db.String(80), nullable=False) - email = db.Column(db.String(120), nullable=True) - password_hash = db.Column(db.String(128)) - created_at = db.Column(db.DateTime, default=datetime.utcnow) - - # Company association for multi-tenancy - company_id = db.Column(db.Integer, db.ForeignKey('company.id'), nullable=False) - - # Email verification fields - is_verified = db.Column(db.Boolean, default=False) - verification_token = db.Column(db.String(100), unique=True, nullable=True) - token_expiry = db.Column(db.DateTime, nullable=True) - - # New field for blocking users - is_blocked = db.Column(db.Boolean, default=False) - - # New fields for role and team - role = db.Column(db.Enum(Role, values_callable=lambda obj: [e.value for e in obj]), default=Role.TEAM_MEMBER) - team_id = db.Column(db.Integer, db.ForeignKey('team.id'), nullable=True) - - # Freelancer support - account_type = db.Column(db.Enum(AccountType, values_callable=lambda obj: [e.value for e in obj]), default=AccountType.COMPANY_USER) - business_name = db.Column(db.String(100), nullable=True) # Optional business name for freelancers - - # Unique constraints per company - __table_args__ = ( - db.UniqueConstraint('company_id', 'username', name='uq_user_username_per_company'), - db.UniqueConstraint('company_id', 'email', name='uq_user_email_per_company'), - ) - - # Two-Factor Authentication fields - two_factor_enabled = db.Column(db.Boolean, default=False) - two_factor_secret = db.Column(db.String(32), nullable=True) # Base32 encoded secret - - # Avatar field - avatar_url = db.Column(db.String(255), nullable=True) # URL to user's avatar image - - # Relationships - time_entries = db.relationship('TimeEntry', backref='user', lazy=True) - work_config = db.relationship('WorkConfig', backref='user', lazy=True, uselist=False) - - def set_password(self, password): - self.password_hash = generate_password_hash(password) - - def check_password(self, password): - return check_password_hash(self.password_hash, password) - - def generate_verification_token(self): - """Generate a verification token that expires in 24 hours""" - self.verification_token = secrets.token_urlsafe(32) - self.token_expiry = datetime.utcnow() + timedelta(hours=24) - return self.verification_token - - def verify_token(self, token): - """Verify the token and mark user as verified if valid""" - if token == self.verification_token and self.token_expiry > datetime.utcnow(): - self.is_verified = True - self.verification_token = None - self.token_expiry = None - return True - return False - - def generate_2fa_secret(self): - """Generate a new 2FA secret""" - import pyotp - self.two_factor_secret = pyotp.random_base32() - return self.two_factor_secret - - def get_2fa_uri(self, issuer_name=None): - """Get the provisioning URI for QR code generation""" - if not self.two_factor_secret: - return None - import pyotp - totp = pyotp.TOTP(self.two_factor_secret) - if issuer_name is None: - issuer_name = "Time Tracker" # Default fallback - return totp.provisioning_uri( - name=self.email, - issuer_name=issuer_name - ) - - def verify_2fa_token(self, token, allow_setup=False): - """Verify a 2FA token""" - if not self.two_factor_secret: - return False - # During setup, allow verification even if 2FA isn't enabled yet - if not allow_setup and not self.two_factor_enabled: - return False - import pyotp - totp = pyotp.TOTP(self.two_factor_secret) - return totp.verify(token, valid_window=1) # Allow 1 window tolerance - - def get_avatar_url(self, size=40): - """Get user's avatar URL or generate a default one""" - if self.avatar_url: - return self.avatar_url - - # Generate a default avatar using DiceBear Avatars (similar to GitHub's identicons) - # Using initials style for a clean, professional look - import hashlib - - # Create a hash from username for consistent colors - hash_input = f"{self.username}_{self.id}".encode('utf-8') - hash_hex = hashlib.md5(hash_input).hexdigest() - - # Use DiceBear API for avatar generation - # For initials style, we need to provide the actual initials - initials = self.get_initials() - - # Generate avatar URL with initials - # Using a color based on the hash for consistency - bg_colors = ['0ea5e9', '8b5cf6', 'ec4899', 'f59e0b', '10b981', 'ef4444', '3b82f6', '6366f1'] - color_index = int(hash_hex[:2], 16) % len(bg_colors) - bg_color = bg_colors[color_index] - - avatar_url = f"https://api.dicebear.com/7.x/initials/svg?seed={initials}&size={size}&backgroundColor={bg_color}&fontSize=50" - - return avatar_url - - def get_initials(self): - """Get user initials for avatar display""" - parts = self.username.split() - if len(parts) >= 2: - return f"{parts[0][0]}{parts[-1][0]}".upper() - elif self.username: - return self.username[:2].upper() - return "??" - - def __repr__(self): - return f'' - -class SystemSettings(db.Model): - id = db.Column(db.Integer, primary_key=True) - key = db.Column(db.String(50), unique=True, nullable=False) - value = db.Column(db.String(255), nullable=False) - description = db.Column(db.String(255)) - updated_at = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) - - def __repr__(self): - return f'' - -class BrandingSettings(db.Model): - id = db.Column(db.Integer, primary_key=True) - app_name = db.Column(db.String(100), nullable=False, default='Time Tracker') - logo_filename = db.Column(db.String(255), nullable=True) # Filename of uploaded logo - logo_alt_text = db.Column(db.String(255), nullable=True, default='Logo') - favicon_filename = db.Column(db.String(255), nullable=True) # Filename of uploaded favicon - primary_color = db.Column(db.String(7), nullable=True, default='#007bff') # Hex color - - # Imprint/Legal page settings - imprint_enabled = db.Column(db.Boolean, default=False) # Enable/disable imprint page - imprint_title = db.Column(db.String(200), nullable=True, default='Imprint') # Page title - imprint_content = db.Column(db.Text, nullable=True) # HTML content for imprint page - - # Meta fields - created_at = db.Column(db.DateTime, default=datetime.now) - updated_at = db.Column(db.DateTime, default=datetime.now, onupdate=datetime.now) - updated_by_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=True) - - # Relationships - updated_by = db.relationship('User', foreign_keys=[updated_by_id]) - - def __repr__(self): - return f'' - - @staticmethod - def get_current(): - """Get current branding settings or create defaults""" - settings = BrandingSettings.query.first() - if not settings: - settings = BrandingSettings( - app_name='Time Tracker', - logo_alt_text='Application Logo' - ) - db.session.add(settings) - db.session.commit() - return settings - -class TimeEntry(db.Model): - id = db.Column(db.Integer, primary_key=True) - arrival_time = db.Column(db.DateTime, nullable=False) - departure_time = db.Column(db.DateTime, nullable=True) - duration = db.Column(db.Integer, nullable=True) # Duration in seconds - is_paused = db.Column(db.Boolean, default=False) - pause_start_time = db.Column(db.DateTime, nullable=True) - total_break_duration = db.Column(db.Integer, default=0) # Total break duration in seconds - user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=True) - - # Project association - nullable for backward compatibility - project_id = db.Column(db.Integer, db.ForeignKey('project.id'), nullable=True) - - # Task/SubTask associations - nullable for backward compatibility - task_id = db.Column(db.Integer, db.ForeignKey('task.id'), nullable=True) - subtask_id = db.Column(db.Integer, db.ForeignKey('sub_task.id'), nullable=True) - - # Optional notes/description for the time entry - notes = db.Column(db.Text, nullable=True) - - def __repr__(self): - project_info = f" (Project: {self.project.code})" if self.project else "" - return f'' - -class WorkConfig(db.Model): - id = db.Column(db.Integer, primary_key=True) - work_hours_per_day = db.Column(db.Float, default=8.0) # Default 8 hours - mandatory_break_minutes = db.Column(db.Integer, default=30) # Default 30 minutes - break_threshold_hours = db.Column(db.Float, default=6.0) # Work hours that trigger mandatory break - additional_break_minutes = db.Column(db.Integer, default=15) # Default 15 minutes for additional break - additional_break_threshold_hours = db.Column(db.Float, default=9.0) # Work hours that trigger additional break - - # Time rounding settings - time_rounding_minutes = db.Column(db.Integer, default=0) # 0 = no rounding, 15 = 15 min, 30 = 30 min - round_to_nearest = db.Column(db.Boolean, default=True) # True = round to nearest, False = round up - - # Date/time format settings - time_format_24h = db.Column(db.Boolean, default=True) # True = 24h, False = 12h (AM/PM) - date_format = db.Column(db.String(20), default='ISO') # ISO, US, EU, etc. - - created_at = db.Column(db.DateTime, default=datetime.now) - updated_at = db.Column(db.DateTime, default=datetime.now, onupdate=datetime.now) - user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=True) - - def __repr__(self): - return f'' - -# Define regional presets as an Enum -class WorkRegion(enum.Enum): - GERMANY = "DE" - UNITED_STATES = "US" - UNITED_KINGDOM = "UK" - FRANCE = "FR" - EUROPEAN_UNION = "EU" - CUSTOM = "CUSTOM" - -# Company Work Configuration (Admin-only policies) -class CompanyWorkConfig(db.Model): - id = db.Column(db.Integer, primary_key=True) - company_id = db.Column(db.Integer, db.ForeignKey('company.id'), nullable=False) - - # Work policy settings (legal requirements) - work_hours_per_day = db.Column(db.Float, default=8.0) # Standard work hours per day - mandatory_break_minutes = db.Column(db.Integer, default=30) # Required break duration - break_threshold_hours = db.Column(db.Float, default=6.0) # Hours that trigger mandatory break - additional_break_minutes = db.Column(db.Integer, default=15) # Additional break duration - additional_break_threshold_hours = db.Column(db.Float, default=9.0) # Hours that trigger additional break - - # Regional compliance - region = db.Column(db.Enum(WorkRegion), default=WorkRegion.GERMANY) - region_name = db.Column(db.String(50), default='Germany') - - # Metadata - created_at = db.Column(db.DateTime, default=datetime.now) - updated_at = db.Column(db.DateTime, default=datetime.now, onupdate=datetime.now) - created_by_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=True) - - # Relationships - company = db.relationship('Company', backref='work_config') - created_by = db.relationship('User', foreign_keys=[created_by_id]) - - # Unique constraint - one config per company - __table_args__ = (db.UniqueConstraint('company_id', name='uq_company_work_config'),) - - def __repr__(self): - return f'' - - @classmethod - def get_regional_preset(cls, region): - """Get regional preset configuration.""" - presets = { - WorkRegion.GERMANY: { - 'work_hours_per_day': 8.0, - 'mandatory_break_minutes': 30, - 'break_threshold_hours': 6.0, - 'additional_break_minutes': 15, - 'additional_break_threshold_hours': 9.0, - 'region_name': 'Germany' - }, - WorkRegion.UNITED_STATES: { - 'work_hours_per_day': 8.0, - 'mandatory_break_minutes': 0, # No federal requirement - 'break_threshold_hours': 999.0, # Effectively disabled - 'additional_break_minutes': 0, - 'additional_break_threshold_hours': 999.0, - 'region_name': 'United States' - }, - WorkRegion.UNITED_KINGDOM: { - 'work_hours_per_day': 8.0, - 'mandatory_break_minutes': 20, - 'break_threshold_hours': 6.0, - 'additional_break_minutes': 0, - 'additional_break_threshold_hours': 999.0, - 'region_name': 'United Kingdom' - }, - WorkRegion.FRANCE: { - 'work_hours_per_day': 7.0, # 35-hour work week - 'mandatory_break_minutes': 20, - 'break_threshold_hours': 6.0, - 'additional_break_minutes': 0, - 'additional_break_threshold_hours': 999.0, - 'region_name': 'France' - }, - WorkRegion.EUROPEAN_UNION: { - 'work_hours_per_day': 8.0, - 'mandatory_break_minutes': 20, - 'break_threshold_hours': 6.0, - 'additional_break_minutes': 0, - 'additional_break_threshold_hours': 999.0, - 'region_name': 'European Union (General)' - } - } - return presets.get(region, presets[WorkRegion.GERMANY]) - -# Comment visibility enumeration -class CommentVisibility(enum.Enum): - TEAM = "Team" # Only visible to team members - COMPANY = "Company" # Visible to all company members - -# Company Settings (General company preferences) -class CompanySettings(db.Model): - id = db.Column(db.Integer, primary_key=True) - company_id = db.Column(db.Integer, db.ForeignKey('company.id'), nullable=False) - - # Comment settings - default_comment_visibility = db.Column(db.Enum(CommentVisibility), default=CommentVisibility.COMPANY) - allow_team_visibility_comments = db.Column(db.Boolean, default=True) # Allow users to set comments as team-only - - # Task settings - require_task_assignment = db.Column(db.Boolean, default=False) # Tasks must be assigned before work can begin - allow_task_creation_by_members = db.Column(db.Boolean, default=True) # Team members can create tasks - - # Project settings - restrict_project_access_by_team = db.Column(db.Boolean, default=False) # Only team members can access team projects - - # Metadata - created_at = db.Column(db.DateTime, default=datetime.now) - updated_at = db.Column(db.DateTime, default=datetime.now, onupdate=datetime.now) - created_by_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=True) - - # Relationships - company = db.relationship('Company', backref=db.backref('settings', uselist=False)) - created_by = db.relationship('User', foreign_keys=[created_by_id]) - - # Unique constraint - one settings per company - __table_args__ = (db.UniqueConstraint('company_id', name='uq_company_settings'),) - - def __repr__(self): - return f'' - -# User Preferences (User-configurable display settings) -class UserPreferences(db.Model): - id = db.Column(db.Integer, primary_key=True) - user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) - - # Display format preferences - time_format_24h = db.Column(db.Boolean, default=True) # True = 24h, False = 12h (AM/PM) - date_format = db.Column(db.String(20), default='ISO') # ISO, US, EU, etc. - - # Time rounding preferences - time_rounding_minutes = db.Column(db.Integer, default=0) # 0 = no rounding, 15 = 15 min, 30 = 30 min - round_to_nearest = db.Column(db.Boolean, default=True) # True = round to nearest, False = round up - - # Metadata - created_at = db.Column(db.DateTime, default=datetime.now) - updated_at = db.Column(db.DateTime, default=datetime.now, onupdate=datetime.now) - - # Relationships - user = db.relationship('User', backref=db.backref('preferences', uselist=False)) - - # Unique constraint - one preferences per user - __table_args__ = (db.UniqueConstraint('user_id', name='uq_user_preferences'),) - - def __repr__(self): - return f'' - -# Project Category model for organizing projects -class ProjectCategory(db.Model): - id = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(100), nullable=False) - description = db.Column(db.Text, nullable=True) - color = db.Column(db.String(7), default='#007bff') # Hex color for UI - icon = db.Column(db.String(50), nullable=True) # Icon name/emoji - - # Company association for multi-tenancy - company_id = db.Column(db.Integer, db.ForeignKey('company.id'), nullable=False) - - # Metadata - created_at = db.Column(db.DateTime, default=datetime.now) - updated_at = db.Column(db.DateTime, default=datetime.now, onupdate=datetime.now) - created_by_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) - - # Relationships - company = db.relationship('Company', backref='project_categories') - created_by = db.relationship('User', foreign_keys=[created_by_id]) - projects = db.relationship('Project', back_populates='category', lazy=True) - - # Unique constraint per company - __table_args__ = (db.UniqueConstraint('company_id', 'name', name='uq_category_name_per_company'),) - - def __repr__(self): - return f'' - -# Task status enumeration -class TaskStatus(enum.Enum): - NOT_STARTED = "Not Started" - IN_PROGRESS = "In Progress" - ON_HOLD = "On Hold" - COMPLETED = "Completed" - ARCHIVED = "Archived" - CANCELLED = "Cancelled" - -# Task priority enumeration -class TaskPriority(enum.Enum): - LOW = "Low" - MEDIUM = "Medium" - HIGH = "High" - URGENT = "Urgent" - -# Task model for project breakdown -class Task(db.Model): - id = db.Column(db.Integer, primary_key=True) - task_number = db.Column(db.String(20), nullable=False, unique=True) # e.g., "TSK-001", "TSK-002" - name = db.Column(db.String(200), nullable=False) - description = db.Column(db.Text, nullable=True) - - # Task properties - status = db.Column(db.Enum(TaskStatus), default=TaskStatus.NOT_STARTED) - priority = db.Column(db.Enum(TaskPriority), default=TaskPriority.MEDIUM) - estimated_hours = db.Column(db.Float, nullable=True) # Estimated time to complete - - # Project association - project_id = db.Column(db.Integer, db.ForeignKey('project.id'), nullable=False) - - # Sprint association (optional) - sprint_id = db.Column(db.Integer, db.ForeignKey('sprint.id'), nullable=True) - - # Task assignment - assigned_to_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=True) - - # Task dates - start_date = db.Column(db.Date, nullable=True) - due_date = db.Column(db.Date, nullable=True) - completed_date = db.Column(db.Date, nullable=True) - archived_date = db.Column(db.Date, nullable=True) - - # Metadata - created_at = db.Column(db.DateTime, default=datetime.now) - updated_at = db.Column(db.DateTime, default=datetime.now, onupdate=datetime.now) - created_by_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) - - # Relationships - project = db.relationship('Project', backref='tasks') - assigned_to = db.relationship('User', foreign_keys=[assigned_to_id], backref='assigned_tasks') - created_by = db.relationship('User', foreign_keys=[created_by_id]) - subtasks = db.relationship('SubTask', backref='parent_task', lazy=True, cascade='all, delete-orphan') - time_entries = db.relationship('TimeEntry', backref='task', lazy=True) - - def __repr__(self): - return f'' - - @property - def progress_percentage(self): - """Calculate task progress based on subtasks completion""" - if not self.subtasks: - return 100 if self.status == TaskStatus.COMPLETED else 0 - - completed_subtasks = sum(1 for subtask in self.subtasks if subtask.status == TaskStatus.COMPLETED) - return int((completed_subtasks / len(self.subtasks)) * 100) - - @property - def total_time_logged(self): - """Calculate total time logged to this task (in seconds)""" - return sum(entry.duration or 0 for entry in self.time_entries if entry.duration) - - def can_user_access(self, user): - """Check if a user can access this task""" - return self.project.is_user_allowed(user) - - @classmethod - def generate_task_number(cls, company_id): - """Generate next task number for the company""" - # Get the highest task number for this company - last_task = cls.query.join(Project).filter( - Project.company_id == company_id, - cls.task_number.like('TSK-%') - ).order_by(cls.task_number.desc()).first() - - if last_task and last_task.task_number: - try: - # Extract number from TSK-XXX format - last_num = int(last_task.task_number.split('-')[1]) - return f"TSK-{last_num + 1:03d}" - except (IndexError, ValueError): - pass - - return "TSK-001" - - @property - def blocked_by_tasks(self): - """Get tasks that are blocking this task""" - return [dep.blocking_task for dep in self.blocked_by_dependencies] - - @property - def blocking_tasks(self): - """Get tasks that this task is blocking""" - return [dep.blocked_task for dep in self.blocking_dependencies] - -# Task Dependencies model for tracking blocking relationships -class TaskDependency(db.Model): - id = db.Column(db.Integer, primary_key=True) - - # The task that is blocked (cannot start until blocking task is done) - blocked_task_id = db.Column(db.Integer, db.ForeignKey('task.id'), nullable=False) - - # The task that is blocking (must be completed before blocked task can start) - blocking_task_id = db.Column(db.Integer, db.ForeignKey('task.id'), nullable=False) - - # Dependency type (for future extension) - dependency_type = db.Column(db.String(50), default='blocks', nullable=False) # 'blocks', 'subtask', etc. - - # Metadata - created_at = db.Column(db.DateTime, default=datetime.now) - created_by_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) - - # Relationships - blocked_task = db.relationship('Task', foreign_keys=[blocked_task_id], - backref=db.backref('blocked_by_dependencies', cascade='all, delete-orphan')) - blocking_task = db.relationship('Task', foreign_keys=[blocking_task_id], - backref=db.backref('blocking_dependencies', cascade='all, delete-orphan')) - created_by = db.relationship('User', foreign_keys=[created_by_id]) - - # Ensure a task doesn't block itself and prevent duplicate dependencies - __table_args__ = ( - db.CheckConstraint('blocked_task_id != blocking_task_id', name='no_self_blocking'), - db.UniqueConstraint('blocked_task_id', 'blocking_task_id', name='unique_dependency'), - ) - - def __repr__(self): - return f'' - -# SubTask model for task breakdown -class SubTask(db.Model): - id = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(200), nullable=False) - description = db.Column(db.Text, nullable=True) - - # SubTask properties - status = db.Column(db.Enum(TaskStatus), default=TaskStatus.NOT_STARTED) - priority = db.Column(db.Enum(TaskPriority), default=TaskPriority.MEDIUM) - estimated_hours = db.Column(db.Float, nullable=True) - - # Parent task association - task_id = db.Column(db.Integer, db.ForeignKey('task.id'), nullable=False) - - # Assignment - assigned_to_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=True) - - # Dates - start_date = db.Column(db.Date, nullable=True) - due_date = db.Column(db.Date, nullable=True) - completed_date = db.Column(db.Date, nullable=True) - - # Metadata - created_at = db.Column(db.DateTime, default=datetime.now) - updated_at = db.Column(db.DateTime, default=datetime.now, onupdate=datetime.now) - created_by_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) - - # Relationships - assigned_to = db.relationship('User', foreign_keys=[assigned_to_id], backref='assigned_subtasks') - created_by = db.relationship('User', foreign_keys=[created_by_id]) - time_entries = db.relationship('TimeEntry', backref='subtask', lazy=True) - - def __repr__(self): - return f'' - - @property - def total_time_logged(self): - """Calculate total time logged to this subtask (in seconds)""" - return sum(entry.duration or 0 for entry in self.time_entries if entry.duration) - - def can_user_access(self, user): - """Check if a user can access this subtask""" - return self.parent_task.can_user_access(user) - -# Comment model for task discussions -class Comment(db.Model): - id = db.Column(db.Integer, primary_key=True) - content = db.Column(db.Text, nullable=False) - - # Task association - task_id = db.Column(db.Integer, db.ForeignKey('task.id'), nullable=False) - - # Parent comment for thread support - parent_comment_id = db.Column(db.Integer, db.ForeignKey('comment.id'), nullable=True) - - # Visibility setting - visibility = db.Column(db.Enum(CommentVisibility), default=CommentVisibility.COMPANY) - - # Edit tracking - is_edited = db.Column(db.Boolean, default=False) - edited_at = db.Column(db.DateTime, nullable=True) - - # Metadata - created_at = db.Column(db.DateTime, default=datetime.now) - created_by_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) - - # Relationships - task = db.relationship('Task', backref=db.backref('comments', lazy='dynamic', cascade='all, delete-orphan')) - created_by = db.relationship('User', foreign_keys=[created_by_id], backref='comments') - replies = db.relationship('Comment', backref=db.backref('parent_comment', remote_side=[id])) - - def __repr__(self): - return f'' - - def can_user_view(self, user): - """Check if a user can view this comment based on visibility settings""" - # First check if user can access the task - if not self.task.can_user_access(user): - return False - - # Then check visibility settings - if self.visibility == CommentVisibility.TEAM: - # Check if user is in the same team as the task's project - if self.task.project.team_id: - return user.team_id == self.task.project.team_id - # If no team assigned to project, fall back to company visibility - return user.company_id == self.task.project.company_id - elif self.visibility == CommentVisibility.COMPANY: - # Check if user is in the same company - return user.company_id == self.task.project.company_id - - return False - - def can_user_edit(self, user): - """Check if a user can edit this comment""" - # Only the comment creator can edit their own comments - return user.id == self.created_by_id - - def can_user_delete(self, user): - """Check if a user can delete this comment""" - # Comment creator can delete their own comments - if user.id == self.created_by_id: - return True - - # Admins and supervisors can delete any comment in their company - if user.role in [Role.ADMIN, Role.SUPERVISOR]: - return user.company_id == self.task.project.company_id - - return False - -# Announcement model for system-wide announcements -class Announcement(db.Model): - id = db.Column(db.Integer, primary_key=True) - title = db.Column(db.String(200), nullable=False) - content = db.Column(db.Text, nullable=False) - - # Announcement properties - is_active = db.Column(db.Boolean, default=True) - is_urgent = db.Column(db.Boolean, default=False) # For urgent announcements with different styling - announcement_type = db.Column(db.String(20), default='info') # info, warning, success, danger - - # Scheduling - start_date = db.Column(db.DateTime, nullable=True) # When to start showing - end_date = db.Column(db.DateTime, nullable=True) # When to stop showing - - # Targeting - target_all_users = db.Column(db.Boolean, default=True) - target_roles = db.Column(db.Text, nullable=True) # JSON string of roles if not all users - target_companies = db.Column(db.Text, nullable=True) # JSON string of company IDs if not all companies - - # Metadata - created_at = db.Column(db.DateTime, default=datetime.now) - updated_at = db.Column(db.DateTime, default=datetime.now, onupdate=datetime.now) - created_by_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) - - # Relationships - created_by = db.relationship('User', foreign_keys=[created_by_id]) - - def __repr__(self): - return f'' - - def is_visible_now(self): - """Check if announcement should be visible at current time""" - if not self.is_active: - return False - - now = datetime.now() - - # Check start date - if self.start_date and now < self.start_date: - return False - - # Check end date - if self.end_date and now > self.end_date: - return False - - return True - - def is_visible_to_user(self, user): - """Check if announcement should be visible to specific user""" - if not self.is_visible_now(): - return False - - # If targeting all users, show to everyone - if self.target_all_users: - return True - - # Check role targeting - if self.target_roles: - import json - try: - target_roles = json.loads(self.target_roles) - if user.role.value not in target_roles: - return False - except (json.JSONDecodeError, AttributeError): - pass - - # Check company targeting - if self.target_companies: - import json - try: - target_companies = json.loads(self.target_companies) - if user.company_id not in target_companies: - return False - except (json.JSONDecodeError, AttributeError): - pass - - return True - - @staticmethod - def get_active_announcements_for_user(user): - """Get all active announcements visible to a specific user""" - announcements = Announcement.query.filter_by(is_active=True).all() - return [ann for ann in announcements if ann.is_visible_to_user(user)] - -# System Event model for logging system activities -class SystemEvent(db.Model): - id = db.Column(db.Integer, primary_key=True) - event_type = db.Column(db.String(50), nullable=False) # e.g., 'login', 'logout', 'user_created', 'system_error' - event_category = db.Column(db.String(30), nullable=False) # e.g., 'auth', 'user_management', 'system', 'error' - description = db.Column(db.Text, nullable=False) - severity = db.Column(db.String(20), default='info') # 'info', 'warning', 'error', 'critical' - timestamp = db.Column(db.DateTime, default=datetime.now, nullable=False) - - # Optional associations - user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=True) - company_id = db.Column(db.Integer, db.ForeignKey('company.id'), nullable=True) - - # Additional metadata (JSON string) - event_metadata = db.Column(db.Text, nullable=True) # Store additional event data as JSON - - # IP address and user agent for security tracking - ip_address = db.Column(db.String(45), nullable=True) # IPv6 compatible - user_agent = db.Column(db.Text, nullable=True) - - # Relationships - user = db.relationship('User', backref='system_events') - company = db.relationship('Company', backref='system_events') - - def __repr__(self): - return f'' - - @staticmethod - def log_event(event_type, description, event_category='system', severity='info', - user_id=None, company_id=None, event_metadata=None, ip_address=None, user_agent=None): - """Helper method to log system events""" - event = SystemEvent( - event_type=event_type, - event_category=event_category, - description=description, - severity=severity, - user_id=user_id, - company_id=company_id, - event_metadata=event_metadata, - ip_address=ip_address, - user_agent=user_agent - ) - db.session.add(event) - try: - db.session.commit() - except Exception as e: - db.session.rollback() - # Log to application logger if DB logging fails - import logging - logging.error(f"Failed to log system event: {e}") - - @staticmethod - def get_recent_events(days=7, limit=100): - """Get recent system events from the last N days""" - from datetime import datetime, timedelta - since = datetime.now() - timedelta(days=days) - return SystemEvent.query.filter( - SystemEvent.timestamp >= since - ).order_by(SystemEvent.timestamp.desc()).limit(limit).all() - - @staticmethod - def get_events_by_severity(severity, days=7, limit=50): - """Get events by severity level""" - from datetime import datetime, timedelta - since = datetime.now() - timedelta(days=days) - return SystemEvent.query.filter( - SystemEvent.timestamp >= since, - SystemEvent.severity == severity - ).order_by(SystemEvent.timestamp.desc()).limit(limit).all() - - @staticmethod - def get_system_health_summary(): - """Get a summary of system health based on recent events""" - from datetime import datetime, timedelta - from sqlalchemy import func - - now = datetime.now() - last_24h = now - timedelta(hours=24) - last_week = now - timedelta(days=7) - - # Count events by severity in last 24h - recent_errors = SystemEvent.query.filter( - SystemEvent.timestamp >= last_24h, - SystemEvent.severity.in_(['error', 'critical']) - ).count() - - recent_warnings = SystemEvent.query.filter( - SystemEvent.timestamp >= last_24h, - SystemEvent.severity == 'warning' - ).count() - - # Count total events in last week - weekly_events = SystemEvent.query.filter( - SystemEvent.timestamp >= last_week - ).count() - - # Get most recent error - last_error = SystemEvent.query.filter( - SystemEvent.severity.in_(['error', 'critical']) - ).order_by(SystemEvent.timestamp.desc()).first() - - return { - 'errors_24h': recent_errors, - 'warnings_24h': recent_warnings, - 'total_events_week': weekly_events, - 'last_error': last_error, - 'health_status': 'healthy' if recent_errors == 0 else 'issues' if recent_errors < 5 else 'critical' - } - - -# Sprint Management System -class SprintStatus(enum.Enum): - PLANNING = "Planning" - ACTIVE = "Active" - COMPLETED = "Completed" - CANCELLED = "Cancelled" - -class Sprint(db.Model): - id = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(200), nullable=False) - description = db.Column(db.Text, nullable=True) - - # Sprint status - status = db.Column(db.Enum(SprintStatus), nullable=False, default=SprintStatus.PLANNING) - - # Company association - sprints are company-scoped - company_id = db.Column(db.Integer, db.ForeignKey('company.id'), nullable=False) - - # Optional project association - can be project-specific or company-wide - project_id = db.Column(db.Integer, db.ForeignKey('project.id'), nullable=True) - - # Sprint timeline - start_date = db.Column(db.Date, nullable=False) - end_date = db.Column(db.Date, nullable=False) - - # Sprint goals and metrics - goal = db.Column(db.Text, nullable=True) # Sprint goal description - capacity_hours = db.Column(db.Integer, nullable=True) # Planned capacity in hours - - # Metadata - created_at = db.Column(db.DateTime, default=datetime.now) - updated_at = db.Column(db.DateTime, default=datetime.now, onupdate=datetime.now) - created_by_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) - - # Relationships - company = db.relationship('Company', backref='sprints') - project = db.relationship('Project', backref='sprints') - created_by = db.relationship('User', foreign_keys=[created_by_id]) - tasks = db.relationship('Task', backref='sprint', lazy=True) - - def __repr__(self): - return f'' - - @property - def is_current(self): - """Check if this sprint is currently active""" - from datetime import date - today = date.today() - return (self.status == SprintStatus.ACTIVE and - self.start_date <= today <= self.end_date) - - @property - def duration_days(self): - """Get sprint duration in days""" - return (self.end_date - self.start_date).days + 1 - - @property - def days_remaining(self): - """Get remaining days in sprint""" - from datetime import date - today = date.today() - if self.end_date < today: - return 0 - elif self.start_date > today: - return self.duration_days - else: - return (self.end_date - today).days + 1 - - @property - def progress_percentage(self): - """Calculate sprint progress percentage based on dates""" - from datetime import date - today = date.today() - - if today < self.start_date: - return 0 - elif today > self.end_date: - return 100 - else: - total_days = self.duration_days - elapsed_days = (today - self.start_date).days + 1 - return min(100, int((elapsed_days / total_days) * 100)) - - def get_task_summary(self): - """Get summary of tasks in this sprint""" - total_tasks = len(self.tasks) - completed_tasks = len([t for t in self.tasks if t.status == TaskStatus.COMPLETED]) - in_progress_tasks = len([t for t in self.tasks if t.status == TaskStatus.IN_PROGRESS]) - - return { - 'total': total_tasks, - 'completed': completed_tasks, - 'in_progress': in_progress_tasks, - 'not_started': total_tasks - completed_tasks - in_progress_tasks, - 'completion_percentage': int((completed_tasks / total_tasks) * 100) if total_tasks > 0 else 0 - } - - def can_user_access(self, user): - """Check if user can access this sprint""" - # Must be in same company - if self.company_id != user.company_id: - return False - - # If sprint is project-specific, check project access - if self.project_id: - return self.project.is_user_allowed(user) - - # Company-wide sprints are accessible to all company members - return True - -# Dashboard Widget System -class WidgetType(enum.Enum): - # Time Tracking Widgets - CURRENT_TIMER = "current_timer" - DAILY_SUMMARY = "daily_summary" - WEEKLY_CHART = "weekly_chart" - BREAK_REMINDER = "break_reminder" - - # Project Management Widgets - ACTIVE_PROJECTS = "active_projects" - PROJECT_PROGRESS = "project_progress" - PROJECT_ACTIVITY = "project_activity" - PROJECT_DEADLINES = "project_deadlines" - - # Task Management Widgets - ASSIGNED_TASKS = "assigned_tasks" - TASK_PRIORITY = "task_priority" - TASK_TRENDS = "task_trends" - - # Analytics Widgets - PRODUCTIVITY_METRICS = "productivity_metrics" - TIME_DISTRIBUTION = "time_distribution" - GOAL_PROGRESS = "goal_progress" - PERFORMANCE_COMPARISON = "performance_comparison" - - # Team Widgets (Role-based) - TEAM_OVERVIEW = "team_overview" - RESOURCE_ALLOCATION = "resource_allocation" - TEAM_PERFORMANCE = "team_performance" - COMPANY_METRICS = "company_metrics" - - # Quick Action Widgets - QUICK_TIMER = "quick_timer" - FAVORITE_PROJECTS = "favorite_projects" - RECENT_ACTIONS = "recent_actions" - SHORTCUTS_PANEL = "shortcuts_panel" - -class WidgetSize(enum.Enum): - SMALL = "1x1" # 1 grid unit - MEDIUM = "2x1" # 2 grid units wide, 1 high - LARGE = "2x2" # 2x2 grid units - WIDE = "3x1" # 3 grid units wide, 1 high - TALL = "1x2" # 1 grid unit wide, 2 high - EXTRA_LARGE = "3x2" # 3x2 grid units - -class UserDashboard(db.Model): - id = db.Column(db.Integer, primary_key=True) - user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) - name = db.Column(db.String(100), default='My Dashboard') - is_default = db.Column(db.Boolean, default=True) - layout_config = db.Column(db.Text) # JSON string for grid layout configuration - - # Dashboard settings - grid_columns = db.Column(db.Integer, default=6) # Number of grid columns - theme = db.Column(db.String(20), default='light') # light, dark, auto - auto_refresh = db.Column(db.Integer, default=300) # Auto-refresh interval in seconds - - # Metadata - created_at = db.Column(db.DateTime, default=datetime.now) - updated_at = db.Column(db.DateTime, default=datetime.now, onupdate=datetime.now) - - # Relationships - user = db.relationship('User', backref='dashboards') - widgets = db.relationship('DashboardWidget', backref='dashboard', lazy=True, cascade='all, delete-orphan') - - # Unique constraint - one default dashboard per user - __table_args__ = (db.Index('idx_user_default_dashboard', 'user_id', 'is_default'),) - - def __repr__(self): - return f'' - -class DashboardWidget(db.Model): - id = db.Column(db.Integer, primary_key=True) - dashboard_id = db.Column(db.Integer, db.ForeignKey('user_dashboard.id'), nullable=False) - widget_type = db.Column(db.Enum(WidgetType), nullable=False) - - # Grid position and size - grid_x = db.Column(db.Integer, nullable=False, default=0) # X position in grid - grid_y = db.Column(db.Integer, nullable=False, default=0) # Y position in grid - grid_width = db.Column(db.Integer, nullable=False, default=1) # Width in grid units - grid_height = db.Column(db.Integer, nullable=False, default=1) # Height in grid units - - # Widget configuration - title = db.Column(db.String(100)) # Custom widget title - config = db.Column(db.Text) # JSON string for widget-specific configuration - refresh_interval = db.Column(db.Integer, default=60) # Refresh interval in seconds - - # Widget state - is_visible = db.Column(db.Boolean, default=True) - is_minimized = db.Column(db.Boolean, default=False) - z_index = db.Column(db.Integer, default=1) # Stacking order - - # Metadata - created_at = db.Column(db.DateTime, default=datetime.now) - updated_at = db.Column(db.DateTime, default=datetime.now, onupdate=datetime.now) - - def __repr__(self): - return f'' - - @property - def config_dict(self): - """Parse widget configuration JSON""" - if self.config: - import json - try: - return json.loads(self.config) - except: - return {} - return {} - - @config_dict.setter - def config_dict(self, value): - """Set widget configuration as JSON""" - import json - self.config = json.dumps(value) if value else None - -class WidgetTemplate(db.Model): - """Pre-defined widget templates for easy dashboard setup""" - id = db.Column(db.Integer, primary_key=True) - widget_type = db.Column(db.Enum(WidgetType), nullable=False) - name = db.Column(db.String(100), nullable=False) - description = db.Column(db.Text) - icon = db.Column(db.String(50)) # Icon name or emoji - - # Default configuration - default_width = db.Column(db.Integer, default=1) - default_height = db.Column(db.Integer, default=1) - default_config = db.Column(db.Text) # JSON string for default widget configuration - - # Access control - required_role = db.Column(db.Enum(Role), default=Role.TEAM_MEMBER) - is_active = db.Column(db.Boolean, default=True) - - # Categories for organization - category = db.Column(db.String(50), default='General') # Time, Projects, Tasks, Analytics, Team, Actions - - # Metadata - created_at = db.Column(db.DateTime, default=datetime.now) - - def __repr__(self): - return f'' - - def can_user_access(self, user): - """Check if user has required role to use this widget""" - if not self.is_active: - return False - - # Define role hierarchy - role_hierarchy = { - Role.TEAM_MEMBER: 1, - Role.TEAM_LEADER: 2, - Role.SUPERVISOR: 3, - Role.ADMIN: 4, - Role.SYSTEM_ADMIN: 5 - } - - user_level = role_hierarchy.get(user.role, 0) - required_level = role_hierarchy.get(self.required_role, 0) - - return user_level >= required_level - - -# Note Sharing Visibility -class NoteVisibility(enum.Enum): - PRIVATE = "Private" - TEAM = "Team" - COMPANY = "Company" - - -class Note(db.Model): - """Markdown notes with sharing capabilities""" - id = db.Column(db.Integer, primary_key=True) - title = db.Column(db.String(200), nullable=False) - content = db.Column(db.Text, nullable=False) # Markdown content - slug = db.Column(db.String(100), nullable=False) # URL-friendly identifier - - # Visibility and sharing - visibility = db.Column(db.Enum(NoteVisibility), nullable=False, default=NoteVisibility.PRIVATE) - - # Folder organization - folder = db.Column(db.String(100), nullable=True) # Folder path like "Work/Projects" or "Personal" - - # Metadata - created_at = db.Column(db.DateTime, default=datetime.now) - updated_at = db.Column(db.DateTime, default=datetime.now, onupdate=datetime.now) - - # Associations - created_by_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) - company_id = db.Column(db.Integer, db.ForeignKey('company.id'), nullable=False) - - # Optional associations - team_id = db.Column(db.Integer, db.ForeignKey('team.id'), nullable=True) # For team-specific notes - project_id = db.Column(db.Integer, db.ForeignKey('project.id'), nullable=True) # Link to project - task_id = db.Column(db.Integer, db.ForeignKey('task.id'), nullable=True) # Link to task - - # Tags for organization - tags = db.Column(db.String(500)) # Comma-separated tags - - # Pin important notes - is_pinned = db.Column(db.Boolean, default=False) - - # Soft delete - is_archived = db.Column(db.Boolean, default=False) - archived_at = db.Column(db.DateTime, nullable=True) - - # Relationships - created_by = db.relationship('User', foreign_keys=[created_by_id], backref='notes') - company = db.relationship('Company', backref='notes') - team = db.relationship('Team', backref='notes') - project = db.relationship('Project', backref='notes') - task = db.relationship('Task', backref='notes') - - # Unique constraint on slug per company - __table_args__ = (db.UniqueConstraint('company_id', 'slug', name='uq_note_slug_per_company'),) - - def __repr__(self): - return f'' - - def generate_slug(self): - """Generate URL-friendly slug from title""" - import re - # Remove special characters and convert to lowercase - slug = re.sub(r'[^\w\s-]', '', self.title.lower()) - # Replace spaces with hyphens - slug = re.sub(r'[-\s]+', '-', slug) - # Remove leading/trailing hyphens - slug = slug.strip('-') - - # Ensure uniqueness within company - base_slug = slug - counter = 1 - while Note.query.filter_by(company_id=self.company_id, slug=slug).filter(Note.id != self.id).first(): - slug = f"{base_slug}-{counter}" - counter += 1 - - return slug - - def can_user_view(self, user): - """Check if user can view this note""" - # Creator can always view - if user.id == self.created_by_id: - return True - - # Check company match - if user.company_id != self.company_id: - return False - - # Check visibility - if self.visibility == NoteVisibility.COMPANY: - return True - elif self.visibility == NoteVisibility.TEAM: - # Check if user is in the same team - if self.team_id and user.team_id == self.team_id: - return True - # Admins can view all team notes - if user.role in [Role.ADMIN, Role.SYSTEM_ADMIN]: - return True - - return False - - def can_user_edit(self, user): - """Check if user can edit this note""" - # Creator can always edit - if user.id == self.created_by_id: - return True - - # Admins can edit company notes - if user.role in [Role.ADMIN, Role.SYSTEM_ADMIN] and user.company_id == self.company_id: - return True - - return False - - def get_tags_list(self): - """Get tags as a list""" - if not self.tags: - return [] - return [tag.strip() for tag in self.tags.split(',') if tag.strip()] - - def set_tags_list(self, tags_list): - """Set tags from a list""" - self.tags = ','.join(tags_list) if tags_list else None - - def get_preview(self, length=200): - """Get a plain text preview of the note content""" - # Strip markdown formatting for preview - import re - from frontmatter_utils import parse_frontmatter - - # Extract body content without frontmatter - _, body = parse_frontmatter(self.content) - text = body - - # Remove headers - text = re.sub(r'^#+\s+', '', text, flags=re.MULTILINE) - # Remove emphasis - text = re.sub(r'\*{1,2}([^\*]+)\*{1,2}', r'\1', text) - text = re.sub(r'_{1,2}([^_]+)_{1,2}', r'\1', text) - # Remove links - text = re.sub(r'\[([^\]]+)\]\([^\)]+\)', r'\1', text) - # Remove code blocks - text = re.sub(r'```[^`]*```', '', text, flags=re.DOTALL) - text = re.sub(r'`([^`]+)`', r'\1', text) - # Clean up whitespace - text = ' '.join(text.split()) - - if len(text) > length: - return text[:length] + '...' - return text - - def render_html(self): - """Render markdown content to HTML""" - try: - import markdown - from frontmatter_utils import parse_frontmatter - # Extract body content without frontmatter - _, body = parse_frontmatter(self.content) - # Use extensions for better markdown support - html = markdown.markdown(body, extensions=['extra', 'codehilite', 'toc']) - return html - except ImportError: - # Fallback if markdown not installed - return f'
{self.content}
' - - def get_frontmatter(self): - """Get frontmatter metadata from content""" - from frontmatter_utils import parse_frontmatter - metadata, _ = parse_frontmatter(self.content) - return metadata - - def update_frontmatter(self): - """Update content with current metadata as frontmatter""" - from frontmatter_utils import update_frontmatter - metadata = { - 'title': self.title, - 'visibility': self.visibility.value.lower(), - 'folder': self.folder, - 'tags': self.get_tags_list() if self.tags else None, - 'project': self.project.code if self.project else None, - 'task_id': self.task_id, - 'pinned': self.is_pinned if self.is_pinned else None, - 'created': self.created_at.isoformat() if self.created_at else None, - 'updated': self.updated_at.isoformat() if self.updated_at else None, - 'author': self.created_by.username if self.created_by else None - } - # Remove None values - metadata = {k: v for k, v in metadata.items() if v is not None} - self.content = update_frontmatter(self.content, metadata) - - def sync_from_frontmatter(self): - """Update model fields from frontmatter in content""" - from frontmatter_utils import parse_frontmatter - metadata, _ = parse_frontmatter(self.content) - - if metadata: - # Update fields from frontmatter - if 'title' in metadata: - self.title = metadata['title'] - if 'visibility' in metadata: - try: - self.visibility = NoteVisibility[metadata['visibility'].upper()] - except KeyError: - pass - if 'folder' in metadata: - self.folder = metadata['folder'] - if 'tags' in metadata: - if isinstance(metadata['tags'], list): - self.set_tags_list(metadata['tags']) - elif isinstance(metadata['tags'], str): - self.tags = metadata['tags'] - if 'pinned' in metadata: - self.is_pinned = bool(metadata['pinned']) - - -class NoteLink(db.Model): - """Links between notes for creating relationships""" - id = db.Column(db.Integer, primary_key=True) - - # Source and target notes with cascade deletion - source_note_id = db.Column(db.Integer, db.ForeignKey('note.id', ondelete='CASCADE'), nullable=False) - target_note_id = db.Column(db.Integer, db.ForeignKey('note.id', ondelete='CASCADE'), nullable=False) - - # Link metadata - link_type = db.Column(db.String(50), default='related') # related, parent, child, etc. - created_at = db.Column(db.DateTime, default=datetime.now) - created_by_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) - - # Relationships with cascade deletion - source_note = db.relationship('Note', foreign_keys=[source_note_id], - backref=db.backref('outgoing_links', cascade='all, delete-orphan')) - target_note = db.relationship('Note', foreign_keys=[target_note_id], - backref=db.backref('incoming_links', cascade='all, delete-orphan')) - created_by = db.relationship('User', foreign_keys=[created_by_id]) - - # Unique constraint to prevent duplicate links - __table_args__ = (db.UniqueConstraint('source_note_id', 'target_note_id', name='uq_note_link'),) - - def __repr__(self): - return f' {self.target_note_id}>' - - -class NoteFolder(db.Model): - """Represents a folder for organizing notes""" - id = db.Column(db.Integer, primary_key=True) - - # Folder properties - name = db.Column(db.String(100), nullable=False) - path = db.Column(db.String(500), nullable=False) # Full path like "Work/Projects/Q1" - parent_path = db.Column(db.String(500), nullable=True) # Parent folder path - description = db.Column(db.Text, nullable=True) - - # Metadata - created_at = db.Column(db.DateTime, default=datetime.now) - created_by_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) - company_id = db.Column(db.Integer, db.ForeignKey('company.id'), nullable=False) - - # Relationships - created_by = db.relationship('User', foreign_keys=[created_by_id]) - company = db.relationship('Company', foreign_keys=[company_id]) - - # Unique constraint to prevent duplicate paths within a company - __table_args__ = (db.UniqueConstraint('path', 'company_id', name='uq_folder_path_company'),) - - def __repr__(self): - return f'' \ No newline at end of file diff --git a/quick_enum_fix.sql b/quick_enum_fix.sql deleted file mode 100644 index f51eaf3..0000000 --- a/quick_enum_fix.sql +++ /dev/null @@ -1,27 +0,0 @@ --- Quick fix for enum value mismatches --- Run this directly in PostgreSQL to fix the immediate issue - --- TaskStatus: Add enum NAMES as valid values -ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'TODO'; -ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'IN_PROGRESS'; -ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'IN_REVIEW'; -ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'DONE'; -ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'CANCELLED'; -ALTER TYPE taskstatus ADD VALUE IF NOT EXISTS 'ARCHIVED'; - --- TaskPriority: Add enum NAMES as valid values -ALTER TYPE taskpriority ADD VALUE IF NOT EXISTS 'LOW'; -ALTER TYPE taskpriority ADD VALUE IF NOT EXISTS 'MEDIUM'; -ALTER TYPE taskpriority ADD VALUE IF NOT EXISTS 'HIGH'; -ALTER TYPE taskpriority ADD VALUE IF NOT EXISTS 'URGENT'; - --- Role: Add enum NAMES as valid values (if used as enum) --- ALTER TYPE role ADD VALUE IF NOT EXISTS 'TEAM_MEMBER'; --- ALTER TYPE role ADD VALUE IF NOT EXISTS 'TEAM_LEADER'; --- ALTER TYPE role ADD VALUE IF NOT EXISTS 'SUPERVISOR'; --- ALTER TYPE role ADD VALUE IF NOT EXISTS 'ADMIN'; --- ALTER TYPE role ADD VALUE IF NOT EXISTS 'SYSTEM_ADMIN'; - --- To check what values are in each enum: --- SELECT enum_range(NULL::taskstatus); --- SELECT enum_range(NULL::taskpriority); \ No newline at end of file diff --git a/quick_fix_revision.sh b/quick_fix_revision.sh deleted file mode 100755 index 6432881..0000000 --- a/quick_fix_revision.sh +++ /dev/null @@ -1,95 +0,0 @@ -#!/bin/bash -# Quick fix for revision mismatch error - -echo "=== Quick Fix for Revision 838055206ef5 Error ===" -echo "" -echo "This error occurs when the database references a migration that doesn't exist." -echo "We'll fix this by resetting to the current migration files." -echo "" - -# Set Flask app -export FLASK_APP=app.py - -# Show current situation -echo "Current migration files:" -ls -la migrations/versions/*.py 2>/dev/null || echo "No migration files found!" - -echo "" -echo "Attempting to get current database state:" -flask db current 2>&1 || true - -echo "" -echo "Available options:" -echo "1. Reset to latest migration file (safest)" -echo "2. Clear migration history and start fresh" -echo "3. Cancel and investigate manually" -echo "" -read -p "Choose option (1-3): " choice - -case $choice in - 1) - echo "" - echo "Finding latest migration..." - # Get the latest migration revision - latest_revision=$(ls -t migrations/versions/*.py 2>/dev/null | head -1 | xargs grep "^revision = " | cut -d"'" -f2) - - if [ -z "$latest_revision" ]; then - echo "❌ No migration files found!" - echo "Run: python establish_baseline_4214e88.py" - exit 1 - fi - - echo "Latest revision: $latest_revision" - echo "Stamping database to this revision..." - - flask db stamp $latest_revision - - if [ $? -eq 0 ]; then - echo "✅ Success! Database stamped to $latest_revision" - echo "" - echo "Next steps:" - echo "1. Run: flask db upgrade" - echo "2. Then you can create new migrations" - else - echo "❌ Stamping failed. Try option 2." - fi - ;; - - 2) - echo "" - echo "⚠️ This will clear all migration history!" - read -p "Are you sure? (y/N): " confirm - - if [ "$confirm" = "y" ]; then - echo "Clearing alembic_version table..." - python -c " -from app import app, db -with app.app_context(): - try: - db.engine.execute('DELETE FROM alembic_version') - print('✅ Cleared alembic_version table') - except Exception as e: - print(f'❌ Error: {e}') -" - - echo "" - echo "Now re-establishing baseline..." - python establish_baseline_4214e88.py - - if [ $? -eq 0 ]; then - flask db stamp head - echo "✅ Migration state reset successfully!" - fi - else - echo "Cancelled." - fi - ;; - - 3) - echo "Cancelled. Run 'python fix_revision_mismatch.py' for detailed diagnostics." - ;; - - *) - echo "Invalid option" - ;; -esac \ No newline at end of file diff --git a/reset_migrations.sh b/reset_migrations.sh deleted file mode 100755 index e559efc..0000000 --- a/reset_migrations.sh +++ /dev/null @@ -1,69 +0,0 @@ -#!/bin/bash -# Quick reset script for migration issues - -echo "=== Migration Reset Script ===" -echo "" -echo "This will completely reset your Flask-Migrate setup." -echo "Your data will NOT be affected, only migration tracking." -echo "" -read -p "Continue? (y/N): " response - -if [ "$response" != "y" ]; then - echo "Aborting..." - exit 0 -fi - -export FLASK_APP=app.py - -echo "" -echo "Step 1: Clearing database migration history..." -python -c " -from app import app, db -with app.app_context(): - try: - db.engine.execute('DELETE FROM alembic_version') - print('✓ Cleared alembic_version table') - except Exception as e: - print(f'⚠️ Could not clear alembic_version: {e}') - print(' (This is OK if the table does not exist)') -" - -echo "" -echo "Step 2: Removing migrations directory..." -rm -rf migrations -echo "✓ Removed migrations directory" - -echo "" -echo "Step 3: Re-initializing migrations..." -flask db init -if [ $? -ne 0 ]; then - echo "❌ Failed to initialize migrations" - exit 1 -fi -echo "✓ Initialized Flask-Migrate" - -echo "" -echo "Step 4: Creating baseline migration..." -flask db migrate -m "Reset baseline migration $(date +%Y%m%d_%H%M%S)" -if [ $? -ne 0 ]; then - echo "❌ Failed to create migration" - exit 1 -fi -echo "✓ Created baseline migration" - -echo "" -echo "Step 5: Marking database as current..." -flask db stamp head -if [ $? -ne 0 ]; then - echo "❌ Failed to stamp database" - exit 1 -fi -echo "✓ Database marked as current" - -echo "" -echo "✨ Migration reset complete!" -echo "" -echo "Next steps:" -echo "1. Review the generated migration in migrations/versions/" -echo "2. Create new migrations: flask db migrate -m 'Your changes'" -echo "3. Apply migrations: flask db upgrade" \ No newline at end of file diff --git a/simple_baseline_4214e88.py b/simple_baseline_4214e88.py deleted file mode 100755 index c3af910..0000000 --- a/simple_baseline_4214e88.py +++ /dev/null @@ -1,142 +0,0 @@ -#!/usr/bin/env python3 -""" -Simplified baseline establishment for commit 4214e88. -Handles the models.py (monolithic) to models/ (modular) transition properly. -""" - -import os -import sys -import subprocess -import shutil - -def run_command(cmd, description, check=True): - """Run a command and handle errors.""" - print(f"\n➜ {description}") - print(f" Command: {cmd}") - result = subprocess.run(cmd, shell=True) - if result.returncode != 0 and check: - print(f"❌ Command failed!") - sys.exit(1) - return result.returncode == 0 - -def main(): - """Main function.""" - print("=== Simplified Baseline Setup for Commit 4214e88 ===") - print("\nThis script will:") - print("1. Extract models.py from commit 4214e88") - print("2. Create a baseline migration") - print("3. Restore your current models structure") - - response = input("\nContinue? (y/N): ") - if response.lower() != 'y': - print("Aborting...") - return 1 - - # Set environment - os.environ['FLASK_APP'] = 'app.py' - BASELINE_COMMIT = "4214e88d18fce7a9c75927753b8d4e9222771e14" - - # Step 1: Clean up - if os.path.exists('migrations'): - print("\n⚠️ Removing existing migrations directory...") - shutil.rmtree('migrations') - - # Step 2: Backup current structure - print("\nBacking up current models...") - if os.path.exists('models'): - shutil.move('models', 'models_backup') - print("✓ Backed up models/ to models_backup/") - - if os.path.exists('models.py'): - shutil.move('models.py', 'models.py.backup') - print("✓ Backed up models.py to models.py.backup") - - try: - # Step 3: Get models.py from baseline commit - print(f"\nExtracting models.py from commit {BASELINE_COMMIT[:8]}...") - result = subprocess.run( - f"git show {BASELINE_COMMIT}:models.py > models.py", - shell=True, - capture_output=True, - text=True - ) - - if result.returncode != 0: - print("❌ Failed to extract models.py from baseline commit!") - print("Error:", result.stderr) - return 1 - - print("✓ Extracted models.py") - - # Step 4: Initialize Flask-Migrate - print("\nInitializing Flask-Migrate...") - run_command("flask db init", "Creating migrations directory") - - # Step 5: Create baseline migration - print("\nCreating baseline migration...") - run_command( - 'flask db migrate -m "Baseline schema from commit 4214e88"', - "Generating migration" - ) - - print("✅ Baseline migration created!") - - finally: - # Step 6: Always restore original structure - print("\nRestoring original models structure...") - - if os.path.exists('models.py'): - os.remove('models.py') - print("✓ Removed temporary models.py") - - if os.path.exists('models.py.backup'): - shutil.move('models.py.backup', 'models.py') - print("✓ Restored models.py.backup") - - if os.path.exists('models_backup'): - shutil.move('models_backup', 'models') - print("✓ Restored models/ directory") - - # Step 7: Add note to migration - print("\nFinalizing migration...") - try: - import glob - migration_files = glob.glob('migrations/versions/*.py') - if migration_files: - latest = max(migration_files, key=os.path.getctime) - - with open(latest, 'r') as f: - content = f.read() - - note = '''"""BASELINE MIGRATION FROM COMMIT 4214e88 - -This represents the database schema from the monolithic models.py file. -DO NOT MODIFY THIS MIGRATION. - -For existing databases: flask db stamp head -For new databases: flask db upgrade -""" - -''' - with open(latest, 'w') as f: - f.write(note + content) - - print(f"✓ Added baseline note to {os.path.basename(latest)}") - except Exception as e: - print(f"⚠️ Could not add note to migration: {e}") - - # Step 8: Summary - print("\n" + "="*60) - print("✨ SUCCESS!") - print("="*60) - print("\nBaseline migration created from commit 4214e88") - print("\nNext steps:") - print("1. For existing database: flask db stamp head") - print("2. For new database: flask db upgrade") - print("3. Create new migrations: flask db migrate -m 'Your changes'") - print("\nIMPORTANT: Review the migration in migrations/versions/ before applying!") - - return 0 - -if __name__ == "__main__": - sys.exit(main()) \ No newline at end of file diff --git a/startup_postgres_safe.sh b/startup_postgres_safe.sh deleted file mode 100755 index 58baf0d..0000000 --- a/startup_postgres_safe.sh +++ /dev/null @@ -1,124 +0,0 @@ -#!/bin/bash -set -e - -echo "Starting TimeTrack application (PostgreSQL-only mode)..." - -# Check for debug/bypass mode -if [ "$SKIP_MIGRATIONS" = "true" ]; then - echo "⚠️ SKIP_MIGRATIONS=true - Skipping all migration steps!" -else - # Wait for PostgreSQL to be ready - echo "Waiting for PostgreSQL to be ready..." - while ! pg_isready -h db -p 5432 -U "$POSTGRES_USER" > /dev/null 2>&1; do - echo "PostgreSQL is not ready yet. Waiting..." - sleep 2 - done - echo "PostgreSQL is ready!" - - # Run Flask-Migrate migrations - echo "" - echo "=== Running Database Migrations ===" - export FLASK_APP=app.py - - # Check if migrations directory exists - if [ -d "migrations" ]; then - echo "Applying database migrations..." - flask db upgrade - if [ $? -ne 0 ]; then - echo "❌ Migration failed! Check the logs above." - - # Don't exit in debug mode - if [ "$DEBUG_MODE" = "true" ]; then - echo "⚠️ DEBUG_MODE=true - Continuing despite migration failure..." - echo "⚠️ The application may not work correctly!" - echo "" - echo "To debug, you can:" - echo " 1. docker exec -it bash" - echo " 2. python diagnose_migrations.py" - echo " 3. flask db current" - echo "" - else - echo "To bypass migrations for debugging, restart with:" - echo " SKIP_MIGRATIONS=true docker-compose up" - echo "Or:" - echo " DEBUG_MODE=true docker-compose up" - exit 1 - fi - else - echo "✅ Database migrations completed successfully" - fi - else - echo "⚠️ No migrations directory found. Initializing Flask-Migrate..." - - # Try to initialize, but don't exit if it fails - python docker_migrate_init.py - if [ $? -ne 0 ]; then - echo "❌ Migration initialization failed!" - - if [ "$DEBUG_MODE" = "true" ]; then - echo "⚠️ DEBUG_MODE=true - Continuing without migrations..." - echo "⚠️ The database may not be properly initialized!" - else - echo "To debug the issue:" - echo " 1. Set DEBUG_MODE=true and restart" - echo " 2. docker exec -it bash" - echo " 3. python docker_migrate_init.py" - exit 1 - fi - else - # Check if database has existing tables - python -c " -from app import app, db -with app.app_context(): - inspector = db.inspect(db.engine) - tables = [t for t in inspector.get_table_names() if t != 'alembic_version'] - if tables: - print('has_tables') -" > /tmp/db_check.txt 2>/dev/null || echo "db_check_failed" > /tmp/db_check.txt - - if grep -q "has_tables" /tmp/db_check.txt 2>/dev/null; then - echo "📊 Existing database detected. Marking as current..." - flask db stamp head - echo "✅ Database marked as current" - elif grep -q "db_check_failed" /tmp/db_check.txt 2>/dev/null; then - echo "⚠️ Could not check database tables" - if [ "$DEBUG_MODE" != "true" ]; then - exit 1 - fi - else - echo "🆕 Empty database detected. Creating tables..." - flask db upgrade - if [ $? -ne 0 ]; then - echo "❌ Failed to create database tables!" - if [ "$DEBUG_MODE" != "true" ]; then - exit 1 - fi - else - echo "✅ Database tables created" - fi - fi - - rm -f /tmp/db_check.txt - fi - fi - - # Legacy migration support (can be removed after full transition) - if [ -f "migrations_old/run_postgres_migrations.py" ]; then - echo "" - echo "=== Checking Legacy Migrations ===" - echo "Found old migration system. Consider removing after confirming Flask-Migrate is working." - fi -fi - -# Start the Flask application with gunicorn -echo "" -echo "=== Starting Application ===" -echo "Starting Flask application with gunicorn..." - -# In debug mode, start with more verbose logging -if [ "$DEBUG_MODE" = "true" ]; then - echo "🐛 Running in DEBUG MODE with verbose logging" - exec gunicorn --bind 0.0.0.0:5000 --workers 1 --threads 2 --timeout 30 --log-level debug --access-logfile - --error-logfile - app:app -else - exec gunicorn --bind 0.0.0.0:5000 --workers 4 --threads 2 --timeout 30 app:app -fi \ No newline at end of file diff --git a/uwsgi.ini b/uwsgi.ini deleted file mode 100644 index 8bff069..0000000 --- a/uwsgi.ini +++ /dev/null @@ -1,55 +0,0 @@ -[uwsgi] -# Application module -wsgi-file = app.py -callable = app -pythonpath = /app -chdir = /app - -# Process management -master = true -processes = 4 -threads = 2 -max-requests = 1000 -harakiri = 30 -thunder-lock = true - -# UNIX Domain Socket configuration for nginx -socket = /host/shared/uwsgi.sock -chmod-socket = 666 -chown-socket = www-data:www-data - -# HTTP socket for direct access -http-socket = :5000 - -vacuum = true - -# Logging -logto = /var/log/uwsgi/timetrack.log -log-maxsize = 50000000 -disable-logging = false - -# Memory and CPU optimization -memory-report = true -cpu-affinity = 1 -reload-on-rss = 512 -worker-reload-mercy = 60 - -# Security -no-site = true -strict = true - -# Hot reload in development -py-autoreload = 1 - -# Buffer size -buffer-size = 32768 - -# Enable stats server (optional) -stats = 127.0.0.1:9191 -stats-http = true - -# Die on term signal -die-on-term = true - -# Lazy apps for better memory usage -lazy-apps = true \ No newline at end of file From 2402b8a570786d8d9fa7d6945d122309be20291c Mon Sep 17 00:00:00 2001 From: Jens Luedicke Date: Wed, 9 Jul 2025 21:35:25 +0200 Subject: [PATCH 12/14] Fix Project deletion. --- routes/projects.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/routes/projects.py b/routes/projects.py index a5221c0..12286d0 100644 --- a/routes/projects.py +++ b/routes/projects.py @@ -163,6 +163,15 @@ def delete_project(project_id): # Delete all related data in the correct order + # Delete time entries first (they reference tasks) + # Delete by project_id + TimeEntry.query.filter_by(project_id=project_id).delete() + + # Also delete time entries that reference tasks in this project + TimeEntry.query.filter(TimeEntry.task_id.in_( + db.session.query(Task.id).filter(Task.project_id == project_id) + )).delete(synchronize_session=False) + # Delete comments on tasks in this project Comment.query.filter(Comment.task_id.in_( db.session.query(Task.id).filter(Task.project_id == project_id) @@ -182,15 +191,12 @@ def delete_project(project_id): ) ).delete(synchronize_session=False) - # Delete tasks + # Delete tasks (after all references are removed) Task.query.filter_by(project_id=project_id).delete() # Delete sprints Sprint.query.filter_by(project_id=project_id).delete() - # Delete time entries - TimeEntry.query.filter_by(project_id=project_id).delete() - # Finally, delete the project project_repo.delete(project) db.session.commit() From baa8f8ff531b098840643a9869956a4118e70110 Mon Sep 17 00:00:00 2001 From: Jens Luedicke Date: Thu, 10 Jul 2025 08:50:02 +0200 Subject: [PATCH 13/14] Add initial db migrations for Flask Migrate. --- apply_migration.py | 16 ++ check_migration_state.py | 65 +++++ clean_migration_state.py | 96 +++++++ create_migration.py | 23 ++ docker-compose.yml | 1 - init_db.py | 27 ++ migrations/README | 1 + migrations/alembic.ini | 50 ++++ migrations/env.py | 91 +++++++ migrations/script.py.mako | 24 ++ .../c72667903a91_initial_migration.py | 251 ++++++++++++++++++ test_migrate.py | 19 ++ 12 files changed, 663 insertions(+), 1 deletion(-) create mode 100644 apply_migration.py create mode 100644 check_migration_state.py create mode 100644 clean_migration_state.py create mode 100644 create_migration.py create mode 100644 init_db.py create mode 100644 migrations/README create mode 100644 migrations/alembic.ini create mode 100644 migrations/env.py create mode 100644 migrations/script.py.mako create mode 100644 migrations/versions/c72667903a91_initial_migration.py create mode 100644 test_migrate.py diff --git a/apply_migration.py b/apply_migration.py new file mode 100644 index 0000000..cd0408b --- /dev/null +++ b/apply_migration.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python +"""Apply database migrations with Flask-Migrate""" + +from flask_migrate import upgrade +from app import app, db + +if __name__ == '__main__': + with app.app_context(): + print("Applying migrations...") + try: + upgrade() + print("Migrations applied successfully!") + except Exception as e: + print(f"Error applying migrations: {e}") + import traceback + traceback.print_exc() \ No newline at end of file diff --git a/check_migration_state.py b/check_migration_state.py new file mode 100644 index 0000000..796ea1f --- /dev/null +++ b/check_migration_state.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python +"""Check and fix migration state in the database""" + +from app import app, db +from sqlalchemy import text + +def check_alembic_version(): + """Check the current alembic version in the database""" + with app.app_context(): + try: + # Check if alembic_version table exists + result = db.session.execute(text( + "SELECT table_name FROM information_schema.tables " + "WHERE table_schema = 'public' AND table_name = 'alembic_version'" + )) + + if result.rowcount == 0: + print("No alembic_version table found. This is a fresh database.") + return None + + # Get current version + result = db.session.execute(text("SELECT version_num FROM alembic_version")) + row = result.fetchone() + + if row: + print(f"Current migration version in database: {row[0]}") + return row[0] + else: + print("alembic_version table exists but is empty") + return None + + except Exception as e: + print(f"Error checking migration state: {e}") + return None + +def clean_migration_state(): + """Clean up the migration state""" + with app.app_context(): + try: + print("\nCleaning migration state...") + # Drop the alembic_version table + db.session.execute(text("DROP TABLE IF EXISTS alembic_version")) + db.session.commit() + print("Migration state cleaned successfully!") + return True + except Exception as e: + print(f"Error cleaning migration state: {e}") + db.session.rollback() + return False + +if __name__ == '__main__': + print("Checking migration state...") + version = check_alembic_version() + + if version: + print(f"\nThe database references migration '{version}' which doesn't exist in files.") + response = input("Do you want to clean the migration state? (yes/no): ") + + if response.lower() == 'yes': + if clean_migration_state(): + print("\nYou can now create a fresh initial migration.") + else: + print("\nFailed to clean migration state.") + else: + print("\nNo migration issues found. You can create a fresh initial migration.") \ No newline at end of file diff --git a/clean_migration_state.py b/clean_migration_state.py new file mode 100644 index 0000000..23712f1 --- /dev/null +++ b/clean_migration_state.py @@ -0,0 +1,96 @@ +#!/usr/bin/env python +"""Clean migration state and handle orphaned tables""" + +from app import app, db +from sqlalchemy import text + +def get_all_tables(): + """Get all tables in the database""" + with app.app_context(): + result = db.session.execute(text( + "SELECT table_name FROM information_schema.tables " + "WHERE table_schema = 'public' AND table_type = 'BASE TABLE'" + )) + return [row[0] for row in result] + +def check_migration_state(): + """Check current migration state""" + with app.app_context(): + try: + result = db.session.execute(text("SELECT version_num FROM alembic_version")) + row = result.fetchone() + if row: + print(f"Current migration version: {row[0]}") + return row[0] + except: + print("No alembic_version table found") + return None + +def clean_migration_only(): + """Clean only the migration state, keep all other tables""" + with app.app_context(): + try: + print("Cleaning migration state only...") + db.session.execute(text("DELETE FROM alembic_version")) + db.session.commit() + print("Migration state cleaned successfully!") + return True + except Exception as e: + print(f"Error: {e}") + db.session.rollback() + return False + +def list_orphaned_tables(): + """List tables that exist in DB but not in models""" + with app.app_context(): + all_tables = get_all_tables() + + # Get tables from current models + model_tables = set() + for table in db.metadata.tables.values(): + model_tables.add(table.name) + + # Find orphaned tables + orphaned = [] + for table in all_tables: + if table not in model_tables and table != 'alembic_version': + orphaned.append(table) + + return orphaned + +if __name__ == '__main__': + print("=== Migration State Check ===") + + # Check current state + version = check_migration_state() + + # List all tables + print("\n=== Database Tables ===") + tables = get_all_tables() + for table in sorted(tables): + print(f" - {table}") + + # Check for orphaned tables + orphaned = list_orphaned_tables() + if orphaned: + print("\n=== Orphaned Tables (not in current models) ===") + for table in sorted(orphaned): + print(f" - {table}") + print("\nThese tables exist in the database but are not defined in your current models.") + print("They might be from old features or previous schema versions.") + + if version: + print(f"\n=== Action Required ===") + print(f"The database has migration '{version}' but no migration files exist.") + print("\nOptions:") + print("1. Clean migration state only (keeps all tables)") + print("2. Cancel and handle manually") + + choice = input("\nEnter your choice (1 or 2): ") + + if choice == '1': + if clean_migration_only(): + print("\n✓ Migration state cleaned!") + print("You can now run: python create_migration.py") + else: + print("\nCancelled. No changes made.") \ No newline at end of file diff --git a/create_migration.py b/create_migration.py new file mode 100644 index 0000000..e40c921 --- /dev/null +++ b/create_migration.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python +"""Create a new migration with Flask-Migrate""" + +import os +import sys +from flask_migrate import migrate as _migrate +from app import app, db + +if __name__ == '__main__': + with app.app_context(): + print("Creating migration...") + try: + # Get migration message from command line or use default + message = sys.argv[1] if len(sys.argv) > 1 else "Initial migration" + + # Create the migration + _migrate(message=message) + print(f"Migration '{message}' created successfully!") + print("Review the migration file in migrations/versions/") + print("To apply the migration, run: python apply_migration.py") + except Exception as e: + print(f"Error creating migration: {e}") + sys.exit(1) \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 259bc97..777fa96 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -48,7 +48,6 @@ services: condition: service_healthy volumes: - ${DATA_PATH:-./data}:/data - - shared_socket:/host/shared volumes: postgres_data: diff --git a/init_db.py b/init_db.py new file mode 100644 index 0000000..5ab7a44 --- /dev/null +++ b/init_db.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python +"""Initialize the database migrations manually""" + +import os +import sys +from flask import Flask +from flask_sqlalchemy import SQLAlchemy +from flask_migrate import Migrate, init + +# Create a minimal Flask app +app = Flask(__name__) +app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('DATABASE_URL', 'sqlite:////data/timetrack.db') +app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False + +# Create db and migrate instances +db = SQLAlchemy(app) +migrate = Migrate(app, db) + +if __name__ == '__main__': + with app.app_context(): + print("Initializing migration repository...") + try: + init() + print("Migration repository initialized successfully!") + except Exception as e: + print(f"Error: {e}") + sys.exit(1) \ No newline at end of file diff --git a/migrations/README b/migrations/README new file mode 100644 index 0000000..0e04844 --- /dev/null +++ b/migrations/README @@ -0,0 +1 @@ +Single-database configuration for Flask. diff --git a/migrations/alembic.ini b/migrations/alembic.ini new file mode 100644 index 0000000..ec9d45c --- /dev/null +++ b/migrations/alembic.ini @@ -0,0 +1,50 @@ +# A generic, single database configuration. + +[alembic] +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic,flask_migrate + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[logger_flask_migrate] +level = INFO +handlers = +qualname = flask_migrate + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/migrations/env.py b/migrations/env.py new file mode 100644 index 0000000..68feded --- /dev/null +++ b/migrations/env.py @@ -0,0 +1,91 @@ +from __future__ import with_statement + +import logging +from logging.config import fileConfig + +from flask import current_app + +from alembic import context + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) +logger = logging.getLogger('alembic.env') + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +config.set_main_option( + 'sqlalchemy.url', + str(current_app.extensions['migrate'].db.get_engine().url).replace( + '%', '%%')) +target_metadata = current_app.extensions['migrate'].db.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, target_metadata=target_metadata, literal_binds=True + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + + # this callback is used to prevent an auto-migration from being generated + # when there are no changes to the schema + # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html + def process_revision_directives(context, revision, directives): + if getattr(config.cmd_opts, 'autogenerate', False): + script = directives[0] + if script.upgrade_ops.is_empty(): + directives[:] = [] + logger.info('No changes in schema detected.') + + connectable = current_app.extensions['migrate'].db.get_engine() + + with connectable.connect() as connection: + context.configure( + connection=connection, + target_metadata=target_metadata, + process_revision_directives=process_revision_directives, + **current_app.extensions['migrate'].configure_args + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/migrations/script.py.mako b/migrations/script.py.mako new file mode 100644 index 0000000..2c01563 --- /dev/null +++ b/migrations/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/migrations/versions/c72667903a91_initial_migration.py b/migrations/versions/c72667903a91_initial_migration.py new file mode 100644 index 0000000..6629e7a --- /dev/null +++ b/migrations/versions/c72667903a91_initial_migration.py @@ -0,0 +1,251 @@ +"""Initial migration + +Revision ID: c72667903a91 +Revises: +Create Date: 2025-07-10 08:35:55.412151 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = 'c72667903a91' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('kanban_card') + op.drop_table('kanban_column') + op.drop_table('kanban_board') + op.drop_index(op.f('idx_invitation_company'), table_name='company_invitation') + op.drop_index(op.f('idx_invitation_email'), table_name='company_invitation') + op.drop_index(op.f('idx_invitation_expires'), table_name='company_invitation') + op.drop_index(op.f('idx_invitation_token'), table_name='company_invitation') + op.drop_constraint(op.f('company_settings_created_by_id_fkey'), 'company_settings', type_='foreignkey') + op.drop_column('company_settings', 'allow_team_visibility_comments') + op.drop_column('company_settings', 'round_time_to') + op.drop_column('company_settings', 'time_tracking_mode') + op.drop_column('company_settings', 'enable_projects') + op.drop_column('company_settings', 'default_comment_visibility') + op.drop_column('company_settings', 'require_task_assignment') + op.drop_column('company_settings', 'enable_teams') + op.drop_column('company_settings', 'require_project_selection') + op.drop_column('company_settings', 'restrict_project_access_by_team') + op.drop_column('company_settings', 'enable_invoicing') + op.drop_column('company_settings', 'auto_break_deduction') + op.drop_column('company_settings', 'allow_task_creation_by_members') + op.drop_column('company_settings', 'created_by_id') + op.drop_column('company_settings', 'enable_reports') + op.drop_column('company_settings', 'require_daily_notes') + op.drop_column('company_settings', 'min_hours_per_entry') + op.drop_column('company_settings', 'default_currency') + op.drop_column('company_settings', 'allow_manual_time') + op.drop_constraint(op.f('uq_company_work_config'), 'company_work_config', type_='unique') + op.drop_index(op.f('idx_note_folder'), table_name='note') + op.drop_index(op.f('idx_note_folder_company'), table_name='note_folder') + op.drop_index(op.f('idx_note_folder_created_by'), table_name='note_folder') + op.drop_index(op.f('idx_note_folder_parent_path'), table_name='note_folder') + op.drop_constraint(op.f('note_link_target_note_id_fkey'), 'note_link', type_='foreignkey') + op.drop_constraint(op.f('note_link_source_note_id_fkey'), 'note_link', type_='foreignkey') + op.create_foreign_key(None, 'note_link', 'note', ['target_note_id'], ['id'], ondelete='CASCADE') + op.create_foreign_key(None, 'note_link', 'note', ['source_note_id'], ['id'], ondelete='CASCADE') + op.drop_index(op.f('idx_note_share_created_by'), table_name='note_share') + op.drop_index(op.f('idx_note_share_note_id'), table_name='note_share') + op.drop_index(op.f('idx_note_share_token'), table_name='note_share') + op.drop_table_comment( + 'note_share', + existing_comment='Public sharing links for notes with optional password protection and view limits', + schema=None + ) + op.alter_column('project_category', 'name', + existing_type=sa.VARCHAR(length=100), + type_=sa.String(length=50), + existing_nullable=False) + op.alter_column('project_category', 'description', + existing_type=sa.TEXT(), + type_=sa.String(length=255), + existing_nullable=True) + op.drop_constraint(op.f('project_category_created_by_id_fkey'), 'project_category', type_='foreignkey') + op.drop_column('project_category', 'icon') + op.drop_column('project_category', 'created_by_id') + op.drop_column('project_category', 'updated_at') + op.alter_column('sub_task', 'status', + existing_type=postgresql.ENUM('TODO', 'IN_PROGRESS', 'IN_REVIEW', 'DONE', 'CANCELLED', 'ARCHIVED', 'In Progress', 'To Do', 'Cancelled', 'In Review', 'Archived', 'Done', name='taskstatus'), + nullable=True, + existing_server_default=sa.text("'TODO'::taskstatus")) + op.alter_column('task', 'status', + existing_type=postgresql.ENUM('TODO', 'IN_PROGRESS', 'IN_REVIEW', 'DONE', 'CANCELLED', 'ARCHIVED', 'In Progress', 'To Do', 'Cancelled', 'In Review', 'Archived', 'Done', name='taskstatus'), + nullable=True, + existing_server_default=sa.text("'TODO'::taskstatus")) + op.drop_index(op.f('idx_user_default_dashboard'), table_name='user_dashboard') + op.drop_constraint(op.f('user_preferences_default_project_id_fkey'), 'user_preferences', type_='foreignkey') + op.drop_column('user_preferences', 'daily_summary') + op.drop_column('user_preferences', 'mention_notifications') + op.drop_column('user_preferences', 'pomodoro_duration') + op.drop_column('user_preferences', 'keyboard_shortcuts') + op.drop_column('user_preferences', 'week_start') + op.drop_column('user_preferences', 'default_project_id') + op.drop_column('user_preferences', 'sound_enabled') + op.drop_column('user_preferences', 'email_notifications') + op.drop_column('user_preferences', 'task_assigned_notifications') + op.drop_column('user_preferences', 'pomodoro_enabled') + op.drop_column('user_preferences', 'email_daily_summary') + op.drop_column('user_preferences', 'email_weekly_summary') + op.drop_column('user_preferences', 'task_reminders') + op.drop_column('user_preferences', 'auto_start_timer') + op.drop_column('user_preferences', 'weekly_report') + op.drop_column('user_preferences', 'push_notifications') + op.drop_column('user_preferences', 'compact_mode') + op.drop_column('user_preferences', 'pomodoro_break') + op.drop_column('user_preferences', 'idle_time_detection') + op.drop_column('user_preferences', 'task_completed_notifications') + op.drop_column('user_preferences', 'show_weekends') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('user_preferences', sa.Column('show_weekends', sa.BOOLEAN(), server_default=sa.text('true'), autoincrement=False, nullable=True)) + op.add_column('user_preferences', sa.Column('task_completed_notifications', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True)) + op.add_column('user_preferences', sa.Column('idle_time_detection', sa.BOOLEAN(), server_default=sa.text('true'), autoincrement=False, nullable=True)) + op.add_column('user_preferences', sa.Column('pomodoro_break', sa.INTEGER(), server_default=sa.text('5'), autoincrement=False, nullable=True)) + op.add_column('user_preferences', sa.Column('compact_mode', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True)) + op.add_column('user_preferences', sa.Column('push_notifications', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True)) + op.add_column('user_preferences', sa.Column('weekly_report', sa.BOOLEAN(), server_default=sa.text('true'), autoincrement=False, nullable=True)) + op.add_column('user_preferences', sa.Column('auto_start_timer', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True)) + op.add_column('user_preferences', sa.Column('task_reminders', sa.BOOLEAN(), server_default=sa.text('true'), autoincrement=False, nullable=True)) + op.add_column('user_preferences', sa.Column('email_weekly_summary', sa.BOOLEAN(), server_default=sa.text('true'), autoincrement=False, nullable=True)) + op.add_column('user_preferences', sa.Column('email_daily_summary', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True)) + op.add_column('user_preferences', sa.Column('pomodoro_enabled', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True)) + op.add_column('user_preferences', sa.Column('task_assigned_notifications', sa.BOOLEAN(), server_default=sa.text('true'), autoincrement=False, nullable=True)) + op.add_column('user_preferences', sa.Column('email_notifications', sa.BOOLEAN(), server_default=sa.text('true'), autoincrement=False, nullable=True)) + op.add_column('user_preferences', sa.Column('sound_enabled', sa.BOOLEAN(), server_default=sa.text('true'), autoincrement=False, nullable=True)) + op.add_column('user_preferences', sa.Column('default_project_id', sa.INTEGER(), autoincrement=False, nullable=True)) + op.add_column('user_preferences', sa.Column('week_start', sa.INTEGER(), server_default=sa.text('1'), autoincrement=False, nullable=True)) + op.add_column('user_preferences', sa.Column('keyboard_shortcuts', sa.BOOLEAN(), server_default=sa.text('true'), autoincrement=False, nullable=True)) + op.add_column('user_preferences', sa.Column('pomodoro_duration', sa.INTEGER(), server_default=sa.text('25'), autoincrement=False, nullable=True)) + op.add_column('user_preferences', sa.Column('mention_notifications', sa.BOOLEAN(), server_default=sa.text('true'), autoincrement=False, nullable=True)) + op.add_column('user_preferences', sa.Column('daily_summary', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True)) + op.create_foreign_key(op.f('user_preferences_default_project_id_fkey'), 'user_preferences', 'project', ['default_project_id'], ['id']) + op.create_index(op.f('idx_user_default_dashboard'), 'user_dashboard', ['user_id', 'is_default'], unique=False) + op.alter_column('task', 'status', + existing_type=postgresql.ENUM('TODO', 'IN_PROGRESS', 'IN_REVIEW', 'DONE', 'CANCELLED', 'ARCHIVED', 'In Progress', 'To Do', 'Cancelled', 'In Review', 'Archived', 'Done', name='taskstatus'), + nullable=False, + existing_server_default=sa.text("'TODO'::taskstatus")) + op.alter_column('sub_task', 'status', + existing_type=postgresql.ENUM('TODO', 'IN_PROGRESS', 'IN_REVIEW', 'DONE', 'CANCELLED', 'ARCHIVED', 'In Progress', 'To Do', 'Cancelled', 'In Review', 'Archived', 'Done', name='taskstatus'), + nullable=False, + existing_server_default=sa.text("'TODO'::taskstatus")) + op.add_column('project_category', sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True)) + op.add_column('project_category', sa.Column('created_by_id', sa.INTEGER(), autoincrement=False, nullable=False)) + op.add_column('project_category', sa.Column('icon', sa.VARCHAR(length=50), autoincrement=False, nullable=True)) + op.create_foreign_key(op.f('project_category_created_by_id_fkey'), 'project_category', 'user', ['created_by_id'], ['id']) + op.alter_column('project_category', 'description', + existing_type=sa.String(length=255), + type_=sa.TEXT(), + existing_nullable=True) + op.alter_column('project_category', 'name', + existing_type=sa.String(length=50), + type_=sa.VARCHAR(length=100), + existing_nullable=False) + op.create_table_comment( + 'note_share', + 'Public sharing links for notes with optional password protection and view limits', + existing_comment=None, + schema=None + ) + op.create_index(op.f('idx_note_share_token'), 'note_share', ['token'], unique=False) + op.create_index(op.f('idx_note_share_note_id'), 'note_share', ['note_id'], unique=False) + op.create_index(op.f('idx_note_share_created_by'), 'note_share', ['created_by_id'], unique=False) + op.drop_constraint(None, 'note_link', type_='foreignkey') + op.drop_constraint(None, 'note_link', type_='foreignkey') + op.create_foreign_key(op.f('note_link_source_note_id_fkey'), 'note_link', 'note', ['source_note_id'], ['id']) + op.create_foreign_key(op.f('note_link_target_note_id_fkey'), 'note_link', 'note', ['target_note_id'], ['id']) + op.create_index(op.f('idx_note_folder_parent_path'), 'note_folder', ['parent_path'], unique=False) + op.create_index(op.f('idx_note_folder_created_by'), 'note_folder', ['created_by_id'], unique=False) + op.create_index(op.f('idx_note_folder_company'), 'note_folder', ['company_id'], unique=False) + op.create_index(op.f('idx_note_folder'), 'note', ['folder'], unique=False) + op.create_unique_constraint(op.f('uq_company_work_config'), 'company_work_config', ['company_id']) + op.add_column('company_settings', sa.Column('allow_manual_time', sa.BOOLEAN(), server_default=sa.text('true'), autoincrement=False, nullable=True)) + op.add_column('company_settings', sa.Column('default_currency', sa.VARCHAR(length=3), server_default=sa.text("'USD'::character varying"), autoincrement=False, nullable=True)) + op.add_column('company_settings', sa.Column('min_hours_per_entry', postgresql.DOUBLE_PRECISION(precision=53), server_default=sa.text('0.0'), autoincrement=False, nullable=True)) + op.add_column('company_settings', sa.Column('require_daily_notes', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True)) + op.add_column('company_settings', sa.Column('enable_reports', sa.BOOLEAN(), server_default=sa.text('true'), autoincrement=False, nullable=True)) + op.add_column('company_settings', sa.Column('created_by_id', sa.INTEGER(), autoincrement=False, nullable=True)) + op.add_column('company_settings', sa.Column('allow_task_creation_by_members', sa.BOOLEAN(), autoincrement=False, nullable=True)) + op.add_column('company_settings', sa.Column('auto_break_deduction', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True)) + op.add_column('company_settings', sa.Column('enable_invoicing', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True)) + op.add_column('company_settings', sa.Column('restrict_project_access_by_team', sa.BOOLEAN(), autoincrement=False, nullable=True)) + op.add_column('company_settings', sa.Column('require_project_selection', sa.BOOLEAN(), server_default=sa.text('true'), autoincrement=False, nullable=True)) + op.add_column('company_settings', sa.Column('enable_teams', sa.BOOLEAN(), server_default=sa.text('true'), autoincrement=False, nullable=True)) + op.add_column('company_settings', sa.Column('require_task_assignment', sa.BOOLEAN(), autoincrement=False, nullable=True)) + op.add_column('company_settings', sa.Column('default_comment_visibility', postgresql.ENUM('TEAM', 'COMPANY', name='commentvisibility'), autoincrement=False, nullable=True)) + op.add_column('company_settings', sa.Column('enable_projects', sa.BOOLEAN(), server_default=sa.text('true'), autoincrement=False, nullable=True)) + op.add_column('company_settings', sa.Column('time_tracking_mode', sa.VARCHAR(length=20), server_default=sa.text("'flexible'::character varying"), autoincrement=False, nullable=True)) + op.add_column('company_settings', sa.Column('round_time_to', sa.INTEGER(), server_default=sa.text('1'), autoincrement=False, nullable=True)) + op.add_column('company_settings', sa.Column('allow_team_visibility_comments', sa.BOOLEAN(), autoincrement=False, nullable=True)) + op.create_foreign_key(op.f('company_settings_created_by_id_fkey'), 'company_settings', 'user', ['created_by_id'], ['id']) + op.create_index(op.f('idx_invitation_token'), 'company_invitation', ['token'], unique=False) + op.create_index(op.f('idx_invitation_expires'), 'company_invitation', ['expires_at'], unique=False) + op.create_index(op.f('idx_invitation_email'), 'company_invitation', ['email'], unique=False) + op.create_index(op.f('idx_invitation_company'), 'company_invitation', ['company_id'], unique=False) + op.create_table('kanban_board', + sa.Column('id', sa.INTEGER(), server_default=sa.text("nextval('kanban_board_id_seq'::regclass)"), autoincrement=True, nullable=False), + sa.Column('name', sa.VARCHAR(length=100), autoincrement=False, nullable=False), + sa.Column('description', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('company_id', sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column('is_active', sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column('is_default', sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('created_by_id', sa.INTEGER(), autoincrement=False, nullable=False), + sa.ForeignKeyConstraint(['company_id'], ['company.id'], name='kanban_board_company_id_fkey'), + sa.ForeignKeyConstraint(['created_by_id'], ['user.id'], name='kanban_board_created_by_id_fkey'), + sa.PrimaryKeyConstraint('id', name='kanban_board_pkey'), + sa.UniqueConstraint('company_id', 'name', name='uq_kanban_board_name_per_company'), + postgresql_ignore_search_path=False + ) + op.create_table('kanban_column', + sa.Column('id', sa.INTEGER(), server_default=sa.text("nextval('kanban_column_id_seq'::regclass)"), autoincrement=True, nullable=False), + sa.Column('name', sa.VARCHAR(length=100), autoincrement=False, nullable=False), + sa.Column('description', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('position', sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column('color', sa.VARCHAR(length=7), autoincrement=False, nullable=True), + sa.Column('wip_limit', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('is_active', sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column('board_id', sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint(['board_id'], ['kanban_board.id'], name='kanban_column_board_id_fkey'), + sa.PrimaryKeyConstraint('id', name='kanban_column_pkey'), + sa.UniqueConstraint('board_id', 'name', name='uq_kanban_column_name_per_board'), + postgresql_ignore_search_path=False + ) + op.create_table('kanban_card', + sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column('title', sa.VARCHAR(length=200), autoincrement=False, nullable=False), + sa.Column('description', sa.TEXT(), autoincrement=False, nullable=True), + sa.Column('position', sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column('color', sa.VARCHAR(length=7), autoincrement=False, nullable=True), + sa.Column('is_active', sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column('column_id', sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column('project_id', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('task_id', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('assigned_to_id', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('due_date', sa.DATE(), autoincrement=False, nullable=True), + sa.Column('completed_date', sa.DATE(), autoincrement=False, nullable=True), + sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('created_by_id', sa.INTEGER(), autoincrement=False, nullable=False), + sa.ForeignKeyConstraint(['assigned_to_id'], ['user.id'], name=op.f('kanban_card_assigned_to_id_fkey')), + sa.ForeignKeyConstraint(['column_id'], ['kanban_column.id'], name=op.f('kanban_card_column_id_fkey')), + sa.ForeignKeyConstraint(['created_by_id'], ['user.id'], name=op.f('kanban_card_created_by_id_fkey')), + sa.ForeignKeyConstraint(['project_id'], ['project.id'], name=op.f('kanban_card_project_id_fkey')), + sa.ForeignKeyConstraint(['task_id'], ['task.id'], name=op.f('kanban_card_task_id_fkey')), + sa.PrimaryKeyConstraint('id', name=op.f('kanban_card_pkey')) + ) + # ### end Alembic commands ### diff --git a/test_migrate.py b/test_migrate.py new file mode 100644 index 0000000..c3c135f --- /dev/null +++ b/test_migrate.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python +"""Test script to verify Flask-Migrate setup""" + +from app import app, db, migrate +from flask_migrate import init, migrate as _migrate, upgrade + +with app.app_context(): + print("Flask app created successfully") + print(f"Database URI: {app.config['SQLALCHEMY_DATABASE_URI']}") + print(f"Migrate instance: {migrate}") + print(f"Available commands: {app.cli.commands}") + + # Check if 'db' command is registered + if 'db' in app.cli.commands: + print("'db' command is registered!") + print(f"Subcommands: {list(app.cli.commands['db'].commands.keys())}") + else: + print("ERROR: 'db' command is NOT registered!") + print(f"Available commands: {list(app.cli.commands.keys())}") \ No newline at end of file From 5f2a634ac832e7cf9e3411be9b517988cb69e375 Mon Sep 17 00:00:00 2001 From: Jens Luedicke Date: Thu, 10 Jul 2025 08:57:18 +0200 Subject: [PATCH 14/14] Fix for HTTPS handling in Flask. --- app.py | 19 +++++++++++++++++++ security_headers.py | 40 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 59 insertions(+) create mode 100644 security_headers.py diff --git a/app.py b/app.py index 9ccc23f..e485a0c 100644 --- a/app.py +++ b/app.py @@ -48,6 +48,7 @@ # Import utility functions from utils.auth import is_system_admin, can_access_system_settings +from security_headers import init_security from utils.settings import get_system_setting # Import analytics data function from export module @@ -66,6 +67,24 @@ app.config['SECRET_KEY'] = os.environ.get('SECRET_KEY', 'dev_key_for_timetrack') app.config['PERMANENT_SESSION_LIFETIME'] = timedelta(days=7) # Session lasts for 7 days +# Fix for HTTPS behind proxy (nginx, load balancer, etc) +# This ensures forms use https:// URLs when behind a reverse proxy +from werkzeug.middleware.proxy_fix import ProxyFix +app.wsgi_app = ProxyFix( + app.wsgi_app, + x_for=1, # Trust X-Forwarded-For + x_proto=1, # Trust X-Forwarded-Proto + x_host=1, # Trust X-Forwarded-Host + x_prefix=1 # Trust X-Forwarded-Prefix +) + +# Force HTTPS URL scheme in production +if not app.debug and os.environ.get('FORCE_HTTPS', 'false').lower() in ['true', '1', 'yes']: + app.config['PREFERRED_URL_SCHEME'] = 'https' + +# Initialize security headers +init_security(app) + # Configure Flask-Mail app.config['MAIL_SERVER'] = os.environ.get('MAIL_SERVER', 'smtp.example.com') app.config['MAIL_PORT'] = int(os.environ.get('MAIL_PORT') or 587) diff --git a/security_headers.py b/security_headers.py new file mode 100644 index 0000000..8e977ea --- /dev/null +++ b/security_headers.py @@ -0,0 +1,40 @@ +""" +Security headers middleware for Flask. +Add this to ensure secure form submission and prevent security warnings. +""" + +from flask import request + +def add_security_headers(response): + """Add security headers to all responses.""" + + # Force HTTPS for all resources + if request.is_secure or not request.app.debug: + # Strict Transport Security - force HTTPS for 1 year + response.headers['Strict-Transport-Security'] = 'max-age=31536000; includeSubDomains' + + # Content Security Policy - allow forms to submit only over HTTPS + # Adjust this based on your needs + csp = ( + "default-src 'self' https:; " + "script-src 'self' 'unsafe-inline' 'unsafe-eval' https:; " + "style-src 'self' 'unsafe-inline' https:; " + "img-src 'self' data: https:; " + "font-src 'self' data: https:; " + "form-action 'self' https:; " # Forms can only submit to HTTPS + "upgrade-insecure-requests; " # Upgrade any HTTP requests to HTTPS + ) + response.headers['Content-Security-Policy'] = csp + + # Other security headers + response.headers['X-Content-Type-Options'] = 'nosniff' + response.headers['X-Frame-Options'] = 'SAMEORIGIN' + response.headers['X-XSS-Protection'] = '1; mode=block' + response.headers['Referrer-Policy'] = 'strict-origin-when-cross-origin' + + return response + + +def init_security(app): + """Initialize security headers for the Flask app.""" + app.after_request(add_security_headers) \ No newline at end of file