Auto-update: Wed Jul 24 23:55:42 PDT 2024

This commit is contained in:
sanj 2024-07-24 23:55:42 -07:00
parent a72829673f
commit 9c05a250a3

View file

@ -338,53 +338,53 @@ class APIConfig(BaseModel):
return None return None
async def pull_changes(self, source_pool_entry: Dict[str, Any] = None): async def pull_changes(self, source_pool_entry: Dict[str, Any] = None):
if source_pool_entry is None: if source_pool_entry is None:
source_pool_entry = await self.get_default_source() source_pool_entry = await self.get_default_source()
if source_pool_entry is None: if source_pool_entry is None:
err("No available source for pulling changes") err("No available source for pulling changes")
return return
async with self.get_connection(source_pool_entry) as source_conn: async with self.get_connection(source_pool_entry) as source_conn:
async with self.get_connection() as dest_conn: async with self.get_connection() as dest_conn:
tables = await source_conn.fetch( tables = await source_conn.fetch(
"SELECT tablename FROM pg_tables WHERE schemaname = 'public'" "SELECT tablename FROM pg_tables WHERE schemaname = 'public'"
) )
for table in tables: for table in tables:
table_name = table['tablename'] table_name = table['tablename']
info(f"Processing table: {table_name}") info(f"Processing table: {table_name}")
# Get primary key column(s) # Get primary key column(s)
pk_columns = await source_conn.fetch(""" pk_columns = await source_conn.fetch("""
SELECT a.attname SELECT a.attname
FROM pg_index i FROM pg_index i
JOIN pg_attribute a ON a.attrelid = i.indrelid JOIN pg_attribute a ON a.attrelid = i.indrelid
AND a.attnum = ANY(i.indkey) AND a.attnum = ANY(i.indkey)
WHERE i.indrelid = $1::regclass WHERE i.indrelid = $1::regclass
AND i.indisprimary; AND i.indisprimary;
""", table_name) """, table_name)
pk_cols = [col['attname'] for col in pk_columns] pk_cols = [col['attname'] for col in pk_columns]
if not pk_cols: if not pk_cols:
warn(f"No primary key found for table {table_name}. Skipping.") warn(f"No primary key found for table {table_name}. Skipping.")
continue continue
# Fetch all rows from the source table # Fetch all rows from the source table
rows = await source_conn.fetch(f"SELECT * FROM {table_name}") rows = await source_conn.fetch(f"SELECT * FROM {table_name}")
if rows: if rows:
columns = rows[0].keys() columns = rows[0].keys()
# Upsert records to the destination table # Upsert records to the destination table
await dest_conn.executemany(f""" await dest_conn.executemany(f"""
INSERT INTO {table_name} ({', '.join(columns)}) INSERT INTO {table_name} ({', '.join(columns)})
VALUES ({', '.join(f'${i+1}' for i in range(len(columns)))}) VALUES ({', '.join(f'${i+1}' for i in range(len(columns)))})
ON CONFLICT ({', '.join(pk_cols)}) DO UPDATE SET ON CONFLICT ({', '.join(pk_cols)}) DO UPDATE SET
{', '.join(f"{col} = EXCLUDED.{col}" for col in columns if col not in pk_cols)} {', '.join(f"{col} = EXCLUDED.{col}" for col in columns if col not in pk_cols)}
""", [tuple(row[col] for col in columns) for row in rows]) """, [tuple(row[col] for col in columns) for row in rows])
info(f"Completed processing table: {table_name}") info(f"Completed processing table: {table_name}")
info(f"Successfully pulled changes from {source_pool_entry['ts_ip']}") info(f"Successfully pulled changes from {source_pool_entry['ts_ip']}")