major claude changes

This commit is contained in:
2026-01-28 21:55:10 -05:00
parent 3f311980db
commit 2dbd3ea53f
41 changed files with 1235 additions and 278 deletions

1
migrations/README Normal file
View File

@@ -0,0 +1 @@
Single-database configuration for Flask.

50
migrations/alembic.ini Normal file
View File

@@ -0,0 +1,50 @@
# A generic, single database configuration.
[alembic]
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic,flask_migrate
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[logger_flask_migrate]
level = INFO
handlers =
qualname = flask_migrate
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

113
migrations/env.py Normal file
View File

@@ -0,0 +1,113 @@
import logging
from logging.config import fileConfig
from flask import current_app
from alembic import context
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')
def get_engine():
try:
# this works with Flask-SQLAlchemy<3 and Alchemical
return current_app.extensions['migrate'].db.get_engine()
except (TypeError, AttributeError):
# this works with Flask-SQLAlchemy>=3
return current_app.extensions['migrate'].db.engine
def get_engine_url():
try:
return get_engine().url.render_as_string(hide_password=False).replace(
'%', '%%')
except AttributeError:
return str(get_engine().url).replace('%', '%%')
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
config.set_main_option('sqlalchemy.url', get_engine_url())
target_db = current_app.extensions['migrate'].db
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def get_metadata():
if hasattr(target_db, 'metadatas'):
return target_db.metadatas[None]
return target_db.metadata
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url, target_metadata=get_metadata(), literal_binds=True
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
# this callback is used to prevent an auto-migration from being generated
# when there are no changes to the schema
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
def process_revision_directives(context, revision, directives):
if getattr(config.cmd_opts, 'autogenerate', False):
script = directives[0]
if script.upgrade_ops.is_empty():
directives[:] = []
logger.info('No changes in schema detected.')
conf_args = current_app.extensions['migrate'].configure_args
if conf_args.get("process_revision_directives") is None:
conf_args["process_revision_directives"] = process_revision_directives
connectable = get_engine()
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=get_metadata(),
**conf_args
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

24
migrations/script.py.mako Normal file
View File

@@ -0,0 +1,24 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,185 @@
"""Initial baseline
Revision ID: b43a39b1cf25
Revises:
Create Date: 2026-01-21 02:25:55.179218
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = 'b43a39b1cf25'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('printer_jobs')
op.drop_table('query_town_ist')
op.drop_table('taxes_pricing')
op.drop_table('pricing_service_general')
op.drop_table('portal_user')
op.drop_table('delivery_payment')
with op.batch_alter_table('street_reference', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_public_street_reference_osm_id'))
batch_op.drop_index(batch_op.f('ix_public_street_reference_street_name_normalized'))
batch_op.drop_index(batch_op.f('ix_street_ref_name_town'))
batch_op.drop_index(batch_op.f('ix_street_ref_town_state'))
op.drop_table('street_reference')
with op.batch_alter_table('auth_users', schema=None) as batch_op:
batch_op.create_unique_constraint(None, ['id'])
with op.batch_alter_table('auto_delivery', schema=None) as batch_op:
batch_op.alter_column('estimated_gallons_left',
existing_type=sa.INTEGER(),
type_=sa.DECIMAL(precision=6, scale=2),
existing_nullable=True)
batch_op.alter_column('estimated_gallons_left_prev_day',
existing_type=sa.INTEGER(),
type_=sa.DECIMAL(precision=6, scale=2),
existing_nullable=True)
with op.batch_alter_table('customer_customer', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_public_customer_customer_auth_net_profile_id'), ['auth_net_profile_id'], unique=True)
batch_op.drop_column('verified_at')
with op.batch_alter_table('service_service', schema=None) as batch_op:
batch_op.alter_column('when_ordered',
existing_type=sa.DATE(),
type_=sa.DATETIME(),
existing_nullable=True)
batch_op.alter_column('scheduled_date',
existing_type=postgresql.DOMAIN('time_stamp', TIMESTAMP()),
type_=sa.DATETIME(),
existing_nullable=True,
existing_server_default=sa.text('CURRENT_TIMESTAMP(2)'))
batch_op.alter_column('service_cost',
existing_type=sa.NUMERIC(precision=10, scale=2),
nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('service_service', schema=None) as batch_op:
batch_op.alter_column('service_cost',
existing_type=sa.NUMERIC(precision=10, scale=2),
nullable=True)
batch_op.alter_column('scheduled_date',
existing_type=sa.DATETIME(),
type_=postgresql.DOMAIN('time_stamp', TIMESTAMP()),
existing_nullable=True,
existing_server_default=sa.text('CURRENT_TIMESTAMP(2)'))
batch_op.alter_column('when_ordered',
existing_type=sa.DATETIME(),
type_=sa.DATE(),
existing_nullable=True)
with op.batch_alter_table('customer_customer', schema=None) as batch_op:
batch_op.add_column(sa.Column('verified_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True))
batch_op.drop_index(batch_op.f('ix_public_customer_customer_auth_net_profile_id'))
with op.batch_alter_table('auto_delivery', schema=None) as batch_op:
batch_op.alter_column('estimated_gallons_left_prev_day',
existing_type=sa.DECIMAL(precision=6, scale=2),
type_=sa.INTEGER(),
existing_nullable=True)
batch_op.alter_column('estimated_gallons_left',
existing_type=sa.DECIMAL(precision=6, scale=2),
type_=sa.INTEGER(),
existing_nullable=True)
with op.batch_alter_table('auth_users', schema=None) as batch_op:
batch_op.drop_constraint(None, type_='unique')
op.create_table('street_reference',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('street_name', sa.VARCHAR(length=500), autoincrement=False, nullable=False),
sa.Column('street_name_normalized', sa.VARCHAR(length=500), autoincrement=False, nullable=False),
sa.Column('street_number_low', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('street_number_high', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('town', sa.VARCHAR(length=140), autoincrement=False, nullable=False),
sa.Column('town_normalized', sa.VARCHAR(length=140), autoincrement=False, nullable=False),
sa.Column('state', sa.VARCHAR(length=2), autoincrement=False, nullable=False),
sa.Column('zip_codes', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('osm_id', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('street_reference_pkey'))
)
with op.batch_alter_table('street_reference', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_street_ref_town_state'), ['town_normalized', 'state'], unique=False)
batch_op.create_index(batch_op.f('ix_street_ref_name_town'), ['street_name_normalized', 'town_normalized'], unique=False)
batch_op.create_index(batch_op.f('ix_public_street_reference_street_name_normalized'), ['street_name_normalized'], unique=False)
batch_op.create_index(batch_op.f('ix_public_street_reference_osm_id'), ['osm_id'], unique=False)
op.create_table('delivery_payment',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('delivery_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('time_added', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('total_amount_oil', sa.NUMERIC(precision=50, scale=2), autoincrement=False, nullable=True),
sa.Column('total_amount_emergency', sa.NUMERIC(precision=50, scale=2), autoincrement=False, nullable=True),
sa.Column('total_amount_prime', sa.NUMERIC(precision=50, scale=2), autoincrement=False, nullable=True),
sa.Column('total_amount_fee', sa.NUMERIC(precision=50, scale=2), autoincrement=False, nullable=True),
sa.Column('total_amount', sa.NUMERIC(precision=50, scale=2), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('delivery_payment_pkey'))
)
op.create_table('portal_user',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('username', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('account_number', sa.VARCHAR(length=32), autoincrement=False, nullable=True),
sa.Column('house_number', sa.VARCHAR(length=32), autoincrement=False, nullable=True),
sa.Column('email', sa.VARCHAR(length=350), autoincrement=False, nullable=True),
sa.Column('password_hash', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('member_since', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True),
sa.Column('last_seen', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True),
sa.Column('admin', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('admin_role', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('confirmed', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('active', sa.INTEGER(), server_default=sa.text('1'), autoincrement=False, nullable=True),
sa.Column('password_reset_expires', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
sa.Column('password_reset_token', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('confirmation_token', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('confirmation_sent_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
sa.Column('confirmed_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('portal_user_pkey'))
)
op.create_table('pricing_service_general',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('price_service_hour', sa.NUMERIC(precision=50, scale=2), autoincrement=False, nullable=True),
sa.Column('price_emergency_service_hour', sa.NUMERIC(precision=50, scale=2), autoincrement=False, nullable=True),
sa.Column('price_emergency_call', sa.NUMERIC(precision=50, scale=2), autoincrement=False, nullable=True),
sa.Column('price_out_of_oil', sa.NUMERIC(precision=50, scale=2), autoincrement=False, nullable=True),
sa.Column('price_prime', sa.NUMERIC(precision=50, scale=2), autoincrement=False, nullable=True),
sa.Column('price_cleaning', sa.NUMERIC(precision=50, scale=2), autoincrement=False, nullable=True),
sa.Column('date', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pricing_service_general_pkey'))
)
op.create_table('taxes_pricing',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('state_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('taxes_oil', sa.NUMERIC(precision=50, scale=2), autoincrement=False, nullable=True),
sa.Column('taxes_other', sa.NUMERIC(precision=50, scale=2), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('taxes_pricing_pkey'))
)
op.create_table('query_town_ist',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('value', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('text', sa.VARCHAR(length=240), autoincrement=False, nullable=True)
)
op.create_table('printer_jobs',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('delivery_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('date_added', sa.DATE(), autoincrement=False, nullable=True),
sa.Column('date_completed', sa.DATE(), autoincrement=False, nullable=True),
sa.Column('employee_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('status', sa.INTEGER(), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('printer_jobs_pkey'))
)
# ### end Alembic commands ###