Auto-update: Wed Jul 24 16:16:20 PDT 2024
This commit is contained in:
parent
7865486079
commit
9fa83cc36f
3 changed files with 82 additions and 50 deletions
sijapi
|
@ -372,6 +372,7 @@ class APIConfig(BaseModel):
|
||||||
await self.apply_schema_changes(pool_entry, source_schema, target_schema)
|
await self.apply_schema_changes(pool_entry, source_schema, target_schema)
|
||||||
info(f"Synced schema to {pool_entry['ts_ip']}")
|
info(f"Synced schema to {pool_entry['ts_ip']}")
|
||||||
|
|
||||||
|
|
||||||
async def get_schema(self, pool_entry: Dict[str, Any]):
|
async def get_schema(self, pool_entry: Dict[str, Any]):
|
||||||
async with self.get_connection(pool_entry) as conn:
|
async with self.get_connection(pool_entry) as conn:
|
||||||
tables = await conn.fetch("""
|
tables = await conn.fetch("""
|
||||||
|
@ -403,64 +404,59 @@ class APIConfig(BaseModel):
|
||||||
|
|
||||||
async def apply_schema_changes(self, pool_entry: Dict[str, Any], source_schema, target_schema):
|
async def apply_schema_changes(self, pool_entry: Dict[str, Any], source_schema, target_schema):
|
||||||
async with self.get_connection(pool_entry) as conn:
|
async with self.get_connection(pool_entry) as conn:
|
||||||
# Compare and update tables and columns
|
source_tables = {t['table_name']: t for t in source_schema['tables']}
|
||||||
source_tables = {(t['table_name'], t['column_name']): t for t in source_schema['tables']}
|
target_tables = {t['table_name']: t for t in target_schema['tables']}
|
||||||
target_tables = {(t['table_name'], t['column_name']): t for t in target_schema['tables']}
|
|
||||||
|
for table_name, source_table in source_tables.items():
|
||||||
for (table_name, column_name), source_column in source_tables.items():
|
if table_name not in target_tables:
|
||||||
if (table_name, column_name) not in target_tables:
|
columns = [f"\"{t['column_name']}\" {t['data_type']}" +
|
||||||
await conn.execute(f"""
|
(f"({t['character_maximum_length']})" if t['character_maximum_length'] else "") +
|
||||||
ALTER TABLE {table_name}
|
(" NOT NULL" if t['is_nullable'] == 'NO' else "") +
|
||||||
ADD COLUMN {column_name} {source_column['data_type']}
|
(f" DEFAULT {t['column_default']}" if t['column_default'] else "")
|
||||||
{'' if source_column['is_nullable'] == 'YES' else 'NOT NULL'}
|
for t in source_schema['tables'] if t['table_name'] == table_name]
|
||||||
{f"DEFAULT {source_column['column_default']}" if source_column['column_default'] else ''}
|
await conn.execute(f'CREATE TABLE "{table_name}" ({", ".join(columns)})')
|
||||||
""")
|
|
||||||
else:
|
else:
|
||||||
target_column = target_tables[(table_name, column_name)]
|
target_table = target_tables[table_name]
|
||||||
if source_column != target_column:
|
source_columns = {t['column_name']: t for t in source_schema['tables'] if t['table_name'] == table_name}
|
||||||
await conn.execute(f"""
|
target_columns = {t['column_name']: t for t in target_schema['tables'] if t['table_name'] == table_name}
|
||||||
ALTER TABLE {table_name}
|
|
||||||
ALTER COLUMN {column_name} TYPE {source_column['data_type']},
|
for col_name, source_col in source_columns.items():
|
||||||
ALTER COLUMN {column_name} {'' if source_column['is_nullable'] == 'YES' else 'SET NOT NULL'},
|
if col_name not in target_columns:
|
||||||
ALTER COLUMN {column_name} {f"SET DEFAULT {source_column['column_default']}" if source_column['column_default'] else 'DROP DEFAULT'}
|
col_def = f"\"{col_name}\" {source_col['data_type']}" + \
|
||||||
""")
|
(f"({source_col['character_maximum_length']})" if source_col['character_maximum_length'] else "") + \
|
||||||
|
(" NOT NULL" if source_col['is_nullable'] == 'NO' else "") + \
|
||||||
for (table_name, column_name) in target_tables.keys():
|
(f" DEFAULT {source_col['column_default']}" if source_col['column_default'] else "")
|
||||||
if (table_name, column_name) not in source_tables:
|
await conn.execute(f'ALTER TABLE "{table_name}" ADD COLUMN {col_def}')
|
||||||
await conn.execute(f"ALTER TABLE {table_name} DROP COLUMN {column_name}")
|
else:
|
||||||
|
target_col = target_columns[col_name]
|
||||||
# Compare and update indexes
|
if source_col != target_col:
|
||||||
|
await conn.execute(f'ALTER TABLE "{table_name}" ALTER COLUMN "{col_name}" TYPE {source_col["data_type"]}')
|
||||||
|
if source_col['is_nullable'] != target_col['is_nullable']:
|
||||||
|
null_constraint = "DROP NOT NULL" if source_col['is_nullable'] == 'YES' else "SET NOT NULL"
|
||||||
|
await conn.execute(f'ALTER TABLE "{table_name}" ALTER COLUMN "{col_name}" {null_constraint}')
|
||||||
|
if source_col['column_default'] != target_col['column_default']:
|
||||||
|
default_clause = f"SET DEFAULT {source_col['column_default']}" if source_col['column_default'] else "DROP DEFAULT"
|
||||||
|
await conn.execute(f'ALTER TABLE "{table_name}" ALTER COLUMN "{col_name}" {default_clause}')
|
||||||
|
|
||||||
source_indexes = {idx['indexname']: idx['indexdef'] for idx in source_schema['indexes']}
|
source_indexes = {idx['indexname']: idx['indexdef'] for idx in source_schema['indexes']}
|
||||||
target_indexes = {idx['indexname']: idx['indexdef'] for idx in target_schema['indexes']}
|
target_indexes = {idx['indexname']: idx['indexdef'] for idx in target_schema['indexes']}
|
||||||
|
|
||||||
for index_name, index_def in source_indexes.items():
|
for idx_name, idx_def in source_indexes.items():
|
||||||
if index_name not in target_indexes:
|
if idx_name not in target_indexes:
|
||||||
await conn.execute(index_def)
|
await conn.execute(idx_def)
|
||||||
elif index_def != target_indexes[index_name]:
|
elif idx_def != target_indexes[idx_name]:
|
||||||
await conn.execute(f"DROP INDEX {index_name}")
|
await conn.execute(f'DROP INDEX "{idx_name}"')
|
||||||
await conn.execute(index_def)
|
await conn.execute(idx_def)
|
||||||
|
|
||||||
for index_name in target_indexes.keys():
|
|
||||||
if index_name not in source_indexes:
|
|
||||||
await conn.execute(f"DROP INDEX {index_name}")
|
|
||||||
|
|
||||||
# Compare and update constraints
|
|
||||||
source_constraints = {con['conname']: con for con in source_schema['constraints']}
|
source_constraints = {con['conname']: con for con in source_schema['constraints']}
|
||||||
target_constraints = {con['conname']: con for con in target_schema['constraints']}
|
target_constraints = {con['conname']: con for con in target_schema['constraints']}
|
||||||
|
|
||||||
for con_name, source_con in source_constraints.items():
|
for con_name, source_con in source_constraints.items():
|
||||||
if con_name not in target_constraints:
|
if con_name not in target_constraints:
|
||||||
await conn.execute(f"ALTER TABLE {source_con['table_name']} ADD CONSTRAINT {con_name} {source_con['definition']}")
|
await conn.execute(f'ALTER TABLE "{source_con["table_name"]}" ADD CONSTRAINT "{con_name}" {source_con["definition"]}')
|
||||||
elif source_con != target_constraints[con_name]:
|
elif source_con != target_constraints[con_name]:
|
||||||
await conn.execute(f"ALTER TABLE {source_con['table_name']} DROP CONSTRAINT {con_name}")
|
await conn.execute(f'ALTER TABLE "{source_con["table_name"]}" DROP CONSTRAINT "{con_name}"')
|
||||||
await conn.execute(f"ALTER TABLE {source_con['table_name']} ADD CONSTRAINT {con_name} {source_con['definition']}")
|
await conn.execute(f'ALTER TABLE "{source_con["table_name"]}" ADD CONSTRAINT "{con_name}" {source_con["definition"]}')
|
||||||
|
|
||||||
for con_name, target_con in target_constraints.items():
|
|
||||||
if con_name not in source_constraints:
|
|
||||||
await conn.execute(f"ALTER TABLE {target_con['table_name']} DROP CONSTRAINT {con_name}")
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class Location(BaseModel):
|
class Location(BaseModel):
|
||||||
latitude: float
|
latitude: float
|
||||||
|
|
25
sijapi/helpers/db/docker-compose.yaml
Normal file
25
sijapi/helpers/db/docker-compose.yaml
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
version: '3.8'
|
||||||
|
|
||||||
|
services:
|
||||||
|
db:
|
||||||
|
image: postgis/postgis:16-3.4
|
||||||
|
container_name: sij_postgres
|
||||||
|
environment:
|
||||||
|
POSTGRES_DB: sij
|
||||||
|
POSTGRES_USER: sij
|
||||||
|
POSTGRES_PASSWORD: Synchr0!
|
||||||
|
volumes:
|
||||||
|
- postgres_data:/var/lib/postgresql/data
|
||||||
|
- ./init-db.sh:/docker-entrypoint-initdb.d/init-db.sh
|
||||||
|
ports:
|
||||||
|
- "5432:5432"
|
||||||
|
networks:
|
||||||
|
- sij_network
|
||||||
|
|
||||||
|
networks:
|
||||||
|
sij_network:
|
||||||
|
driver: bridge
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
postgres_data:
|
||||||
|
|
11
sijapi/helpers/db/init-db.sh
Executable file
11
sijapi/helpers/db/init-db.sh
Executable file
|
@ -0,0 +1,11 @@
|
||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL
|
||||||
|
CREATE EXTENSION IF NOT EXISTS postgis;
|
||||||
|
CREATE EXTENSION IF NOT EXISTS postgis_topology;
|
||||||
|
EOSQL
|
||||||
|
|
||||||
|
# Modify pg_hba.conf to allow connections from Tailscale network
|
||||||
|
echo "host all all 100.64.64.0/24 trust" >> /var/lib/postgresql/data/pg_hba.conf
|
||||||
|
|
Loading…
Add table
Reference in a new issue