endpoint setup

This commit is contained in:
exolonConfidental
2026-02-08 11:18:47 +05:30
parent 8fb3b7cf67
commit a77788fc47
33 changed files with 932 additions and 352 deletions

View File

@@ -21,12 +21,35 @@ from app.db.models.base import Base
from app.db.models.location import Location
from app.db.models.owner import Owner
from app.db.models.property import Property
from app.db.models.location import Location
from app.db.models.insurance_details import InsuranceDetails
target_metadata = Base.metadata
# ---- IMPORTANT: IGNORE POSTGIS TABLES ----
def include_object(object, name, type_, reflected, compare_to):
POSTGIS_TABLES = {
"spatial_ref_sys",
"layer",
"topology",
"geography_columns",
"geometry_columns",
"raster_columns",
"raster_overviews",
}
# ignore PostGIS internal tables
if type_ == "table" and name in POSTGIS_TABLES:
return False
# ignore entire topology schema
if getattr(object, "schema", None) in {"topology", "tiger", "tiger_data"}:
return False
return True
# ---- OFFLINE MIGRATIONS ----
def run_migrations_offline():
url = config.get_main_option("sqlalchemy.url")
@@ -36,12 +59,15 @@ def run_migrations_offline():
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
include_object=include_object, # ✅ applied here
compare_type=True,
)
with context.begin_transaction():
context.run_migrations()
# ---- ONLINE MIGRATIONS (ASYNC) ----
async def run_migrations_online():
connectable = async_engine_from_config(
@@ -56,6 +82,8 @@ async def run_migrations_online():
context.configure(
connection=connection,
target_metadata=target_metadata,
include_object=include_object, # ✅ THIS WAS MISSING
compare_type=True,
)
with context.begin_transaction():
@@ -65,6 +93,8 @@ async def run_migrations_online():
await connectable.dispose()
# ---- ENTRYPOINT ----
def run():
if context.is_offline_mode():
@@ -74,4 +104,4 @@ def run():
asyncio.run(run_migrations_online())
run()
run()

View File

@@ -0,0 +1,60 @@
"""add insurance details table
Revision ID: 471bdc3c5b51
Revises: b7538fce8343
Create Date: 2026-02-07 23:47:44.253489
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '471bdc3c5b51'
down_revision: Union[str, Sequence[str], None] = 'b7538fce8343'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('insurance_details',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('property_id', sa.Integer(), nullable=False),
sa.Column('insurance_company', sa.String(length=150), nullable=False),
sa.Column('claim_number', sa.String(length=100), nullable=False),
sa.Column('date_of_loss', sa.Date(), nullable=False),
sa.Column('adjuster_name', sa.String(length=150), nullable=True),
sa.Column('adjuster_phone', sa.String(length=30), nullable=True),
sa.Column('adjuster_email', sa.String(length=150), nullable=True),
sa.Column('claim_filed', sa.Boolean(), nullable=False),
sa.Column('claim_approved', sa.Boolean(), nullable=False),
sa.Column('policy_number', sa.String(length=100), nullable=True),
sa.Column('coverage_type', sa.String(length=50), nullable=True),
sa.Column('claim_type', sa.String(length=50), nullable=True),
sa.Column('deductible_amount', sa.Integer(), nullable=True),
sa.Column('claim_amount', sa.Integer(), nullable=True),
sa.Column('approved_amount', sa.Integer(), nullable=True),
sa.Column('payment_status', sa.String(length=50), nullable=True),
sa.Column('date_claim_filed', sa.Date(), nullable=True),
sa.Column('date_claim_closed', sa.Date(), nullable=True),
sa.Column('insurance_agent_name', sa.String(length=150), nullable=True),
sa.Column('insurance_agent_phone', sa.String(length=30), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['property_id'], ['properties.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('claim_number')
)
op.create_index(op.f('ix_insurance_details_property_id'), 'insurance_details', ['property_id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_insurance_details_property_id'), table_name='insurance_details')
op.drop_table('insurance_details')
# ### end Alembic commands ###

View File

@@ -0,0 +1,73 @@
"""add geom column
Revision ID: b7538fce8343
Revises: 6cd12cae8c96
Create Date: 2026-02-07 14:33:51.832269
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from geoalchemy2 import Geography
# revision identifiers, used by Alembic.
revision: str = 'b7538fce8343'
down_revision: Union[str, Sequence[str], None] = '6cd12cae8c96'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
# op.drop_table('layer')
# op.drop_table('spatial_ref_sys')
# op.drop_table('topology')
op.add_column('locations', sa.Column('geom',Geography(geometry_type='POINT', srid=4326, dimension=2, from_text='ST_GeogFromText', name='geography', nullable=False), nullable=False))
op.execute(
"CREATE INDEX IF NOT EXISTS idx_locations_geom ON locations USING gist (geom);"
)
op.create_unique_constraint('uq_osm_location', 'locations', ['osm_type', 'osm_id'])
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('uq_osm_location', 'locations', type_='unique')
op.drop_index('idx_locations_geom', table_name='locations', postgresql_using='gist')
op.drop_column('locations', 'geom')
# op.create_table('topology',
# sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
# sa.Column('name', sa.VARCHAR(), autoincrement=False, nullable=False),
# sa.Column('srid', sa.INTEGER(), autoincrement=False, nullable=False),
# sa.Column('precision', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=False),
# sa.Column('hasz', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=False),
# sa.Column('useslargeids', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=False),
# sa.PrimaryKeyConstraint('id', name=op.f('topology_pkey')),
# sa.UniqueConstraint('name', name=op.f('topology_name_key'), postgresql_include=[], postgresql_nulls_not_distinct=False)
# )
# op.create_table('spatial_ref_sys',
# sa.Column('srid', sa.INTEGER(), autoincrement=False, nullable=False),
# sa.Column('auth_name', sa.VARCHAR(length=256), autoincrement=False, nullable=True),
# sa.Column('auth_srid', sa.INTEGER(), autoincrement=False, nullable=True),
# sa.Column('srtext', sa.VARCHAR(length=2048), autoincrement=False, nullable=True),
# sa.Column('proj4text', sa.VARCHAR(length=2048), autoincrement=False, nullable=True),
# sa.CheckConstraint('srid > 0 AND srid <= 998999', name=op.f('spatial_ref_sys_srid_check')),
# sa.PrimaryKeyConstraint('srid', name=op.f('spatial_ref_sys_pkey'))
# )
# op.create_table('layer',
# sa.Column('topology_id', sa.INTEGER(), autoincrement=False, nullable=False),
# sa.Column('layer_id', sa.INTEGER(), autoincrement=False, nullable=False),
# sa.Column('schema_name', sa.VARCHAR(), autoincrement=False, nullable=False),
# sa.Column('table_name', sa.VARCHAR(), autoincrement=False, nullable=False),
# sa.Column('feature_column', sa.VARCHAR(), autoincrement=False, nullable=False),
# sa.Column('feature_type', sa.INTEGER(), autoincrement=False, nullable=False),
# sa.Column('level', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=False),
# sa.Column('child_id', sa.INTEGER(), autoincrement=False, nullable=True),
# sa.ForeignKeyConstraint(['topology_id'], ['topology.id'], name=op.f('layer_topology_id_fkey')),
# sa.PrimaryKeyConstraint('topology_id', 'layer_id', name=op.f('layer_pkey')),
# sa.UniqueConstraint('schema_name', 'table_name', 'feature_column', name=op.f('layer_schema_name_table_name_feature_column_key'), postgresql_include=[], postgresql_nulls_not_distinct=False)
# )
# ### end Alembic commands ###