Auto-update: Wed Jul 24 16:16:20 PDT 2024
This commit is contained in:
parent
668addea75
commit
fb0dd4ece8
4 changed files with 82 additions and 119 deletions
|
@ -1,69 +0,0 @@
|
|||
[
|
||||
{
|
||||
"Land Owner": "SIERRA PACIFIC INDUSTRIES",
|
||||
"Location": "HBM: T2N R3E S17 ; HBM: \nT2N R3E S18 ; HBM: T2N \nR3E S7 ; HBM: T2N R3E S8",
|
||||
"PLSS Coordinates": [
|
||||
"HBM: T2N R3E S17",
|
||||
"HBM: T2N R3E S8"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Land Owner": "1/29/2024\n2/8/2024\n \n7/12/2024\n \n515.00\nBALLARD RESERVOIR\n(5526.510202);GRAVEN \nRESERVOIR\n(5526.510301);RALSTON \nGULCH(5526.510201)\nMDBM: T41N R10E S25 ; \nMDBM: T41N R10E S26 ; \nMDBM: T41N R10E S27 ; \nMDBM: T41N R10E S28 ; \nMDBM: T41N R10E S33 ; \nMDBM: T41N R10E S34 ; \nMDBM: T41N R10E S35 ; \nMDBM: T41N R10E S36 \nSIERRA PACIFIC LAND & TIMBER \nCOMPANY",
|
||||
"Location": "MDBM:",
|
||||
"PLSS Coordinates": []
|
||||
},
|
||||
{
|
||||
"Land Owner": "2/16/2024\n2/22/2024\n \n7/26/2024\n \n520.00\nARMENTROUT FLAT \n(5526.620003)\n(5526.620003);JIMMERSON \nSPRING\n(5526.610104);MOSQUITO \nLAKE(5526.420403)\nMDBM: T40N R5E S13 ; \nMDBM: T40N R5E S14 ; \nMDBM: T40N R5E S22 ; \nMDBM: T40N R5E S23 ; \nMDBM: T40N R5E S24 ; \nMDBM: T40N R5E S25 ; \nMDBM: T40N R5E S26 ; \nMDBM: T40N R5E S36 ; \nMDBM: T40N R6E S19 ; \nMDBM: T40N R6E S30 \nSIERRA PACIFIC LAND & TIMBER \nCOMPANY \nPage: \n7\n of \n10\n\nTimber Harvesting Plans\nHarvest Document\nReceived\nFiled\nApproval\nTentative \nEnd of \nPublic \nComments\nPublic \nComment \nPeriod \nClosed\nTotal\nAcres\nWatershed\nLocation\nLand Owner(s)",
|
||||
"Location": "MDBM:",
|
||||
"PLSS Coordinates": []
|
||||
},
|
||||
{
|
||||
"Land Owner": "3/5/2024\n3/14/2024\n6/25/2024\n6/21/2024\n6/21/2024\n968.00\nLOWER BUTTE CREEK\n(5526.360103);POISON LAKE\n(5526.360201)\nMDBM: T33N R7E S16 ; \nMDBM: T33N R7E S17 ; \nMDBM: T33N R7E S18 ; \nMDBM: T33N R7E S19 ; \nMDBM: T33N R7E S20 ; \nMDBM: T33N R7E S33 ; \nMDBM: T33N R7E S4 ; \nMDBM: T33N R7E S5 ; \nMDBM: T33N R7E S8 ; \nMDBM: T33N R7E S9 ; \nMDBM: T34N R7E S27 ; \nMDBM: T34N R7E S33 ; \nMDBM: T34N R7E S34 ; \nMDBM: T34N R7E S35 ; \nMDBM: T34N R7E S36 \nSIERRA PACIFIC LAND & TIMBER \nCOMPANY",
|
||||
"Location": "MDBM:",
|
||||
"PLSS Coordinates": []
|
||||
},
|
||||
{
|
||||
"Land Owner": "5/13/2024\n5/23/2024\n \n7/29/2024\n \n351.00\nCEDAR CREEK (1106.400710)\n(1106.400710);COPPER \nCREEK (1106.400704)\n(1106.400704);SQUIRREL \nGULCH(1106.400701)\nMDBM: T36N R7W S3 ; \nMDBM: T37N R7W S21 ; \nMDBM: T37N R7W S22 ; \nMDBM: T37N R7W S27 ; \nMDBM: T37N R7W S33 ; \nMDBM: T37N R7W S35 \nSIERRA PACIFIC LAND & TIMBER \nCOMPANY",
|
||||
"Location": "MDBM:",
|
||||
"PLSS Coordinates": []
|
||||
},
|
||||
{
|
||||
"Land Owner": "4/10/2024\n4/18/2024\n \n5/13/2024\n \n362.00\nCHASE CREEK (8638.000201)\n(8638.000201)\nMDBM: T34N R12E S21 ; \nMDBM: T34N R12E S22 ; \nMDBM: T34N R12E S27 ; \nMDBM: T34N R12E S28 ; \nMDBM: T34N R12E S33 ; \nMDBM: T34N R12E S34 \nSIERRA PACIFIC LAND & TIMBER \nCOMPANY \nPage: \n8\n of \n10\n\nTimber Harvesting Plans\nHarvest Document\nReceived\nFiled\nApproval\nTentative \nEnd of \nPublic \nComments\nPublic \nComment \nPeriod \nClosed\nTotal\nAcres\nWatershed\nLocation\nLand Owner(s)",
|
||||
"Location": "MDBM:",
|
||||
"PLSS Coordinates": []
|
||||
},
|
||||
{
|
||||
"Land Owner": "6/28/2024\n7/3/2024\n \n7/29/2024\n \n500.00\nMCCARTY CREEK\n(5509.630203);PANTHER \nSPRING\n(5509.630202);REFUGE\n(5509.630201)\nMDBM: T27N R2E S1 ; \nMDBM: T27N R2E S10 ; \nMDBM: T27N R2E S11 ; \nMDBM: T27N R2E S2 ; \nMDBM: T27N R2E S3 ; \nMDBM: T27N R3E S5 ; \nMDBM: T27N R3E S6 ; \nMDBM: T28N R2E S34 ; \nMDBM: T28N R2E S35 ; \nMDBM: T28N R2E S36 ; \nMDBM: T28N R3E S31 ; \nMDBM: T28N R3E S32 \nSIERRA PACIFIC LAND & TIMBER \nCOMPANY",
|
||||
"Location": "MDBM:",
|
||||
"PLSS Coordinates": []
|
||||
},
|
||||
{
|
||||
"Land Owner": "SIERRA PACIFIC INDUSTRIES",
|
||||
"Location": "MDBM: T32N R1E S12 ; \nMDBM: T32N R1E S13 ; \nMDBM: T32N R1E S14 ; \nMDBM: T32N R1E S23 ; \nMDBM: T32N R1E S24 ; \nMDBM: T32N R2E S18 ; \nMDBM: T32N R2E S19 ; \nMDBM: T32N R2E S7",
|
||||
"PLSS Coordinates": [
|
||||
"MDBM: T32N R1E S12",
|
||||
"MDBM: T32N R1E S13",
|
||||
"MDBM: T32N R1E S14",
|
||||
"MDBM: T32N R1E S23",
|
||||
"MDBM: T32N R1E S24",
|
||||
"MDBM: T32N R2E S18",
|
||||
"MDBM: T32N R2E S19",
|
||||
"MDBM: T32N R2E S7"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Land Owner": "7/19/2024\n \n \n8/5/2024\n \n562.00\nBILLIE GULCH\n(1106.400703);CLAWTON \nGULCH\n(1106.400804);HATCHET \nCREEK(1106.400705);HAY \nGULCH\n(1106.400808);NELSON \nCREEK (1106.400702)\n(1106.400702)\nMDBM: T36N R7W S13 ; \nMDBM: T36N R7W S23 ; \nMDBM: T36N R7W S25 ; \nMDBM: T36N R7W S27 ; \nMDBM: T36N R7W S33 ; \nMDBM: T36N R7W S34 ; \nMDBM: T36N R7W S35 \nSIERRA PACIFIC LAND & TIMBER \nCOMPANY \n2-24NTMP-00004-SHA\n7/19/2024\n \n \n9/2/2024\n \n480.00\nLOWER SODA CREEK\n(5525.210202)\nMDBM: T38N R4W S11 ; \nMDBM: T38N R4W S12 ; \nMDBM: T38N R4W S14 ; \nMDBM: T38N R4W S2 \nCASTLE CRAGS LLC",
|
||||
"Location": "MDBM: T38N R4W S11 ; \nMDBM: T38N R4W S12 ; \nMDBM: T38N R4W S14 ; \nMDBM: T38N R4W S2 \nCASTLE CRAGS LLC",
|
||||
"PLSS Coordinates": [
|
||||
"MDBM: T38N R4W S11",
|
||||
"MDBM: T38N R4W S12",
|
||||
"MDBM: T38N R4W S14",
|
||||
"MDBM: T38N R4W S2"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Land Owner": "5/31/2024\n6/6/2024\n \n7/19/2024\n \n497.00\nLITTLE SILVER CREEK\n(5514.330206);PEAVINE \nCREEK(5514.330101);UNION \nVALLEY RESERVOIR\n(5514.340301)\nMDBM: T12N R14E S28 ; \nMDBM: T12N R14E S29 ; \nMDBM: T12N R14E S32 \nSIERRA PACIFIC LAND & TIMBER \nCOMPANY \n81\n7/22/2024 12:18:13 PM\nPage: \n10\n of \n10",
|
||||
"Location": "MDBM:",
|
||||
"PLSS Coordinates": []
|
||||
}
|
||||
]
|
|
@ -372,6 +372,7 @@ class APIConfig(BaseModel):
|
|||
await self.apply_schema_changes(pool_entry, source_schema, target_schema)
|
||||
info(f"Synced schema to {pool_entry['ts_ip']}")
|
||||
|
||||
|
||||
async def get_schema(self, pool_entry: Dict[str, Any]):
|
||||
async with self.get_connection(pool_entry) as conn:
|
||||
tables = await conn.fetch("""
|
||||
|
@ -403,64 +404,59 @@ class APIConfig(BaseModel):
|
|||
|
||||
async def apply_schema_changes(self, pool_entry: Dict[str, Any], source_schema, target_schema):
|
||||
async with self.get_connection(pool_entry) as conn:
|
||||
# Compare and update tables and columns
|
||||
source_tables = {(t['table_name'], t['column_name']): t for t in source_schema['tables']}
|
||||
target_tables = {(t['table_name'], t['column_name']): t for t in target_schema['tables']}
|
||||
|
||||
for (table_name, column_name), source_column in source_tables.items():
|
||||
if (table_name, column_name) not in target_tables:
|
||||
await conn.execute(f"""
|
||||
ALTER TABLE {table_name}
|
||||
ADD COLUMN {column_name} {source_column['data_type']}
|
||||
{'' if source_column['is_nullable'] == 'YES' else 'NOT NULL'}
|
||||
{f"DEFAULT {source_column['column_default']}" if source_column['column_default'] else ''}
|
||||
""")
|
||||
source_tables = {t['table_name']: t for t in source_schema['tables']}
|
||||
target_tables = {t['table_name']: t for t in target_schema['tables']}
|
||||
|
||||
for table_name, source_table in source_tables.items():
|
||||
if table_name not in target_tables:
|
||||
columns = [f"\"{t['column_name']}\" {t['data_type']}" +
|
||||
(f"({t['character_maximum_length']})" if t['character_maximum_length'] else "") +
|
||||
(" NOT NULL" if t['is_nullable'] == 'NO' else "") +
|
||||
(f" DEFAULT {t['column_default']}" if t['column_default'] else "")
|
||||
for t in source_schema['tables'] if t['table_name'] == table_name]
|
||||
await conn.execute(f'CREATE TABLE "{table_name}" ({", ".join(columns)})')
|
||||
else:
|
||||
target_column = target_tables[(table_name, column_name)]
|
||||
if source_column != target_column:
|
||||
await conn.execute(f"""
|
||||
ALTER TABLE {table_name}
|
||||
ALTER COLUMN {column_name} TYPE {source_column['data_type']},
|
||||
ALTER COLUMN {column_name} {'' if source_column['is_nullable'] == 'YES' else 'SET NOT NULL'},
|
||||
ALTER COLUMN {column_name} {f"SET DEFAULT {source_column['column_default']}" if source_column['column_default'] else 'DROP DEFAULT'}
|
||||
""")
|
||||
|
||||
for (table_name, column_name) in target_tables.keys():
|
||||
if (table_name, column_name) not in source_tables:
|
||||
await conn.execute(f"ALTER TABLE {table_name} DROP COLUMN {column_name}")
|
||||
|
||||
# Compare and update indexes
|
||||
target_table = target_tables[table_name]
|
||||
source_columns = {t['column_name']: t for t in source_schema['tables'] if t['table_name'] == table_name}
|
||||
target_columns = {t['column_name']: t for t in target_schema['tables'] if t['table_name'] == table_name}
|
||||
|
||||
for col_name, source_col in source_columns.items():
|
||||
if col_name not in target_columns:
|
||||
col_def = f"\"{col_name}\" {source_col['data_type']}" + \
|
||||
(f"({source_col['character_maximum_length']})" if source_col['character_maximum_length'] else "") + \
|
||||
(" NOT NULL" if source_col['is_nullable'] == 'NO' else "") + \
|
||||
(f" DEFAULT {source_col['column_default']}" if source_col['column_default'] else "")
|
||||
await conn.execute(f'ALTER TABLE "{table_name}" ADD COLUMN {col_def}')
|
||||
else:
|
||||
target_col = target_columns[col_name]
|
||||
if source_col != target_col:
|
||||
await conn.execute(f'ALTER TABLE "{table_name}" ALTER COLUMN "{col_name}" TYPE {source_col["data_type"]}')
|
||||
if source_col['is_nullable'] != target_col['is_nullable']:
|
||||
null_constraint = "DROP NOT NULL" if source_col['is_nullable'] == 'YES' else "SET NOT NULL"
|
||||
await conn.execute(f'ALTER TABLE "{table_name}" ALTER COLUMN "{col_name}" {null_constraint}')
|
||||
if source_col['column_default'] != target_col['column_default']:
|
||||
default_clause = f"SET DEFAULT {source_col['column_default']}" if source_col['column_default'] else "DROP DEFAULT"
|
||||
await conn.execute(f'ALTER TABLE "{table_name}" ALTER COLUMN "{col_name}" {default_clause}')
|
||||
|
||||
source_indexes = {idx['indexname']: idx['indexdef'] for idx in source_schema['indexes']}
|
||||
target_indexes = {idx['indexname']: idx['indexdef'] for idx in target_schema['indexes']}
|
||||
|
||||
for index_name, index_def in source_indexes.items():
|
||||
if index_name not in target_indexes:
|
||||
await conn.execute(index_def)
|
||||
elif index_def != target_indexes[index_name]:
|
||||
await conn.execute(f"DROP INDEX {index_name}")
|
||||
await conn.execute(index_def)
|
||||
|
||||
for index_name in target_indexes.keys():
|
||||
if index_name not in source_indexes:
|
||||
await conn.execute(f"DROP INDEX {index_name}")
|
||||
|
||||
# Compare and update constraints
|
||||
|
||||
for idx_name, idx_def in source_indexes.items():
|
||||
if idx_name not in target_indexes:
|
||||
await conn.execute(idx_def)
|
||||
elif idx_def != target_indexes[idx_name]:
|
||||
await conn.execute(f'DROP INDEX "{idx_name}"')
|
||||
await conn.execute(idx_def)
|
||||
|
||||
source_constraints = {con['conname']: con for con in source_schema['constraints']}
|
||||
target_constraints = {con['conname']: con for con in target_schema['constraints']}
|
||||
|
||||
|
||||
for con_name, source_con in source_constraints.items():
|
||||
if con_name not in target_constraints:
|
||||
await conn.execute(f"ALTER TABLE {source_con['table_name']} ADD CONSTRAINT {con_name} {source_con['definition']}")
|
||||
await conn.execute(f'ALTER TABLE "{source_con["table_name"]}" ADD CONSTRAINT "{con_name}" {source_con["definition"]}')
|
||||
elif source_con != target_constraints[con_name]:
|
||||
await conn.execute(f"ALTER TABLE {source_con['table_name']} DROP CONSTRAINT {con_name}")
|
||||
await conn.execute(f"ALTER TABLE {source_con['table_name']} ADD CONSTRAINT {con_name} {source_con['definition']}")
|
||||
|
||||
for con_name, target_con in target_constraints.items():
|
||||
if con_name not in source_constraints:
|
||||
await conn.execute(f"ALTER TABLE {target_con['table_name']} DROP CONSTRAINT {con_name}")
|
||||
|
||||
|
||||
|
||||
await conn.execute(f'ALTER TABLE "{source_con["table_name"]}" DROP CONSTRAINT "{con_name}"')
|
||||
await conn.execute(f'ALTER TABLE "{source_con["table_name"]}" ADD CONSTRAINT "{con_name}" {source_con["definition"]}')
|
||||
|
||||
class Location(BaseModel):
|
||||
latitude: float
|
||||
|
|
25
sijapi/helpers/db/docker-compose.yaml
Normal file
25
sijapi/helpers/db/docker-compose.yaml
Normal file
|
@ -0,0 +1,25 @@
|
|||
version: '3.8'
|
||||
|
||||
services:
|
||||
db:
|
||||
image: postgis/postgis:16-3.4
|
||||
container_name: sij_postgres
|
||||
environment:
|
||||
POSTGRES_DB: sij
|
||||
POSTGRES_USER: sij
|
||||
POSTGRES_PASSWORD: Synchr0!
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
- ./init-db.sh:/docker-entrypoint-initdb.d/init-db.sh
|
||||
ports:
|
||||
- "5432:5432"
|
||||
networks:
|
||||
- sij_network
|
||||
|
||||
networks:
|
||||
sij_network:
|
||||
driver: bridge
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
|
11
sijapi/helpers/db/init-db.sh
Executable file
11
sijapi/helpers/db/init-db.sh
Executable file
|
@ -0,0 +1,11 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL
|
||||
CREATE EXTENSION IF NOT EXISTS postgis;
|
||||
CREATE EXTENSION IF NOT EXISTS postgis_topology;
|
||||
EOSQL
|
||||
|
||||
# Modify pg_hba.conf to allow connections from Tailscale network
|
||||
echo "host all all 100.64.64.0/24 trust" >> /var/lib/postgresql/data/pg_hba.conf
|
||||
|
Loading…
Reference in a new issue