Initial commit
Some checks failed
CI/CD Pipeline / Code Quality & Linting (push) Has been cancelled
CI/CD Pipeline / Policy Validation (push) Has been cancelled
CI/CD Pipeline / Test Suite (push) Has been cancelled
CI/CD Pipeline / Build Docker Images (svc-coverage) (push) Has been cancelled
CI/CD Pipeline / Build Docker Images (svc-extract) (push) Has been cancelled
CI/CD Pipeline / Build Docker Images (svc-firm-connectors) (push) Has been cancelled
CI/CD Pipeline / Build Docker Images (svc-forms) (push) Has been cancelled
CI/CD Pipeline / Build Docker Images (svc-hmrc) (push) Has been cancelled
CI/CD Pipeline / Build Docker Images (svc-ingestion) (push) Has been cancelled
CI/CD Pipeline / Build Docker Images (svc-kg) (push) Has been cancelled
CI/CD Pipeline / Build Docker Images (svc-normalize-map) (push) Has been cancelled
CI/CD Pipeline / Build Docker Images (svc-ocr) (push) Has been cancelled
CI/CD Pipeline / Build Docker Images (svc-rag-indexer) (push) Has been cancelled
CI/CD Pipeline / Build Docker Images (svc-rag-retriever) (push) Has been cancelled
CI/CD Pipeline / Build Docker Images (svc-reason) (push) Has been cancelled
CI/CD Pipeline / Build Docker Images (svc-rpa) (push) Has been cancelled
CI/CD Pipeline / Build Docker Images (ui-review) (push) Has been cancelled
CI/CD Pipeline / Security Scanning (svc-coverage) (push) Has been cancelled
CI/CD Pipeline / Security Scanning (svc-extract) (push) Has been cancelled
CI/CD Pipeline / Security Scanning (svc-kg) (push) Has been cancelled
CI/CD Pipeline / Security Scanning (svc-rag-retriever) (push) Has been cancelled
CI/CD Pipeline / Security Scanning (ui-review) (push) Has been cancelled
CI/CD Pipeline / Generate SBOM (push) Has been cancelled
CI/CD Pipeline / Deploy to Staging (push) Has been cancelled
CI/CD Pipeline / Deploy to Production (push) Has been cancelled
CI/CD Pipeline / Notifications (push) Has been cancelled
Some checks failed
CI/CD Pipeline / Code Quality & Linting (push) Has been cancelled
CI/CD Pipeline / Policy Validation (push) Has been cancelled
CI/CD Pipeline / Test Suite (push) Has been cancelled
CI/CD Pipeline / Build Docker Images (svc-coverage) (push) Has been cancelled
CI/CD Pipeline / Build Docker Images (svc-extract) (push) Has been cancelled
CI/CD Pipeline / Build Docker Images (svc-firm-connectors) (push) Has been cancelled
CI/CD Pipeline / Build Docker Images (svc-forms) (push) Has been cancelled
CI/CD Pipeline / Build Docker Images (svc-hmrc) (push) Has been cancelled
CI/CD Pipeline / Build Docker Images (svc-ingestion) (push) Has been cancelled
CI/CD Pipeline / Build Docker Images (svc-kg) (push) Has been cancelled
CI/CD Pipeline / Build Docker Images (svc-normalize-map) (push) Has been cancelled
CI/CD Pipeline / Build Docker Images (svc-ocr) (push) Has been cancelled
CI/CD Pipeline / Build Docker Images (svc-rag-indexer) (push) Has been cancelled
CI/CD Pipeline / Build Docker Images (svc-rag-retriever) (push) Has been cancelled
CI/CD Pipeline / Build Docker Images (svc-reason) (push) Has been cancelled
CI/CD Pipeline / Build Docker Images (svc-rpa) (push) Has been cancelled
CI/CD Pipeline / Build Docker Images (ui-review) (push) Has been cancelled
CI/CD Pipeline / Security Scanning (svc-coverage) (push) Has been cancelled
CI/CD Pipeline / Security Scanning (svc-extract) (push) Has been cancelled
CI/CD Pipeline / Security Scanning (svc-kg) (push) Has been cancelled
CI/CD Pipeline / Security Scanning (svc-rag-retriever) (push) Has been cancelled
CI/CD Pipeline / Security Scanning (ui-review) (push) Has been cancelled
CI/CD Pipeline / Generate SBOM (push) Has been cancelled
CI/CD Pipeline / Deploy to Staging (push) Has been cancelled
CI/CD Pipeline / Deploy to Production (push) Has been cancelled
CI/CD Pipeline / Notifications (push) Has been cancelled
This commit is contained in:
92
apps/svc_coverage/alembic/env.py
Normal file
92
apps/svc_coverage/alembic/env.py
Normal file
@@ -0,0 +1,92 @@
|
||||
"""Alembic environment configuration for coverage service."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from logging.config import fileConfig
|
||||
|
||||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
|
||||
# Add the parent directory to the path so we can import our models
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), "..", "..", ".."))
|
||||
|
||||
# Import your models here
|
||||
from apps.svc_coverage.models import Base
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
target_metadata = Base.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def get_url():
|
||||
"""Get database URL from environment or config."""
|
||||
return os.getenv("DATABASE_URL", config.get_main_option("sqlalchemy.url"))
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = get_url()
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
configuration = config.get_section(config.config_ini_section)
|
||||
configuration["sqlalchemy.url"] = get_url()
|
||||
|
||||
connectable = engine_from_config(
|
||||
configuration,
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection, target_metadata=target_metadata
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
24
apps/svc_coverage/alembic/script.py.mako
Normal file
24
apps/svc_coverage/alembic/script.py.mako
Normal file
@@ -0,0 +1,24 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -0,0 +1,76 @@
|
||||
"""Initial coverage tables
|
||||
|
||||
Revision ID: 0001
|
||||
Revises:
|
||||
Create Date: 2024-09-14 12:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '0001'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create coverage_versions table
|
||||
op.create_table(
|
||||
'coverage_versions',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('version', sa.String(length=50), nullable=False),
|
||||
sa.Column('jurisdiction', sa.String(length=10), nullable=False),
|
||||
sa.Column('tax_year', sa.String(length=10), nullable=False),
|
||||
sa.Column('tenant_id', sa.String(length=100), nullable=True),
|
||||
sa.Column('source_files', postgresql.JSON(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('compiled_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('hash', sa.String(length=64), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create indexes for coverage_versions
|
||||
op.create_index('ix_coverage_versions_version', 'coverage_versions', ['version'])
|
||||
op.create_index('ix_coverage_versions_jurisdiction_tax_year', 'coverage_versions', ['jurisdiction', 'tax_year'])
|
||||
op.create_index('ix_coverage_versions_tenant_id', 'coverage_versions', ['tenant_id'])
|
||||
op.create_index('ix_coverage_versions_hash', 'coverage_versions', ['hash'])
|
||||
|
||||
# Create coverage_audit table
|
||||
op.create_table(
|
||||
'coverage_audit',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('taxpayer_id', sa.String(length=100), nullable=False),
|
||||
sa.Column('tax_year', sa.String(length=10), nullable=False),
|
||||
sa.Column('policy_version', sa.String(length=50), nullable=False),
|
||||
sa.Column('overall_status', sa.String(length=20), nullable=False),
|
||||
sa.Column('blocking_items', postgresql.JSON(astext_type=sa.Text()), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('trace_id', sa.String(length=100), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create indexes for coverage_audit
|
||||
op.create_index('ix_coverage_audit_taxpayer_id', 'coverage_audit', ['taxpayer_id'])
|
||||
op.create_index('ix_coverage_audit_tax_year', 'coverage_audit', ['tax_year'])
|
||||
op.create_index('ix_coverage_audit_taxpayer_tax_year', 'coverage_audit', ['taxpayer_id', 'tax_year'])
|
||||
op.create_index('ix_coverage_audit_created_at', 'coverage_audit', ['created_at'])
|
||||
op.create_index('ix_coverage_audit_trace_id', 'coverage_audit', ['trace_id'])
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop coverage_audit table and indexes
|
||||
op.drop_index('ix_coverage_audit_trace_id', table_name='coverage_audit')
|
||||
op.drop_index('ix_coverage_audit_created_at', table_name='coverage_audit')
|
||||
op.drop_index('ix_coverage_audit_taxpayer_tax_year', table_name='coverage_audit')
|
||||
op.drop_index('ix_coverage_audit_tax_year', table_name='coverage_audit')
|
||||
op.drop_index('ix_coverage_audit_taxpayer_id', table_name='coverage_audit')
|
||||
op.drop_table('coverage_audit')
|
||||
|
||||
# Drop coverage_versions table and indexes
|
||||
op.drop_index('ix_coverage_versions_hash', table_name='coverage_versions')
|
||||
op.drop_index('ix_coverage_versions_tenant_id', table_name='coverage_versions')
|
||||
op.drop_index('ix_coverage_versions_jurisdiction_tax_year', table_name='coverage_versions')
|
||||
op.drop_index('ix_coverage_versions_version', table_name='coverage_versions')
|
||||
op.drop_table('coverage_versions')
|
||||
Reference in New Issue
Block a user