Auto-update: Thu Jul 25 00:16:07 PDT 2024
This commit is contained in:
parent
03ccf529c9
commit
8775c05927
1 changed files with 60 additions and 45 deletions
|
@ -5,6 +5,7 @@ import yaml
|
||||||
import math
|
import math
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import traceback
|
||||||
import aiofiles
|
import aiofiles
|
||||||
import aiohttp
|
import aiohttp
|
||||||
import asyncpg
|
import asyncpg
|
||||||
|
@ -339,6 +340,7 @@ class APIConfig(BaseModel):
|
||||||
|
|
||||||
|
|
||||||
async def pull_changes(self, source_pool_entry: Dict[str, Any] = None):
|
async def pull_changes(self, source_pool_entry: Dict[str, Any] = None):
|
||||||
|
try:
|
||||||
if source_pool_entry is None:
|
if source_pool_entry is None:
|
||||||
source_pool_entry = await self.get_default_source()
|
source_pool_entry = await self.get_default_source()
|
||||||
|
|
||||||
|
@ -366,26 +368,39 @@ class APIConfig(BaseModel):
|
||||||
""", table_name)
|
""", table_name)
|
||||||
|
|
||||||
pk_cols = [col['attname'] for col in pk_columns]
|
pk_cols = [col['attname'] for col in pk_columns]
|
||||||
|
info(f"Primary key columns for {table_name}: {pk_cols}")
|
||||||
if not pk_cols:
|
if not pk_cols:
|
||||||
warn(f"No primary key found for table {table_name}. Skipping.")
|
warn(f"No primary key found for table {table_name}. Skipping.")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Fetch all rows from the source table
|
# Fetch all rows from the source table
|
||||||
rows = await source_conn.fetch(f"SELECT * FROM {table_name}")
|
rows = await source_conn.fetch(f"SELECT * FROM {table_name}")
|
||||||
|
info(f"Fetched {len(rows)} rows from {table_name}")
|
||||||
if rows:
|
if rows:
|
||||||
columns = rows[0].keys()
|
columns = list(rows[0].keys())
|
||||||
|
info(f"Columns for {table_name}: {columns}")
|
||||||
# Upsert records to the destination table
|
# Upsert records to the destination table
|
||||||
for row in rows:
|
for row in rows:
|
||||||
await dest_conn.execute(f"""
|
try:
|
||||||
|
query = f"""
|
||||||
INSERT INTO {table_name} ({', '.join(columns)})
|
INSERT INTO {table_name} ({', '.join(columns)})
|
||||||
VALUES ({', '.join(f'${i+1}' for i in range(len(columns)))})
|
VALUES ({', '.join(f'${i+1}' for i in range(len(columns)))})
|
||||||
ON CONFLICT ({', '.join(pk_cols)}) DO UPDATE SET
|
ON CONFLICT ({', '.join(pk_cols)}) DO UPDATE SET
|
||||||
{', '.join(f"{col} = EXCLUDED.{col}" for col in columns if col not in pk_cols)}
|
{', '.join(f"{col} = EXCLUDED.{col}" for col in columns if col not in pk_cols)}
|
||||||
""", *[row[col] for col in columns])
|
"""
|
||||||
|
info(f"Executing query: {query}")
|
||||||
|
info(f"With values: {[row[col] for col in columns]}")
|
||||||
|
await dest_conn.execute(query, *[row[col] for col in columns])
|
||||||
|
except Exception as e:
|
||||||
|
err(f"Error processing row in {table_name}: {str(e)}")
|
||||||
|
err(f"Problematic row: {row}")
|
||||||
|
|
||||||
info(f"Completed processing table: {table_name}")
|
info(f"Completed processing table: {table_name}")
|
||||||
|
|
||||||
info(f"Successfully pulled changes from {source_pool_entry['ts_ip']}")
|
info(f"Successfully pulled changes from {source_pool_entry['ts_ip']}")
|
||||||
|
except Exception as e:
|
||||||
|
err(f"Unexpected error in pull_changes: {str(e)}")
|
||||||
|
err(f"Traceback: {traceback.format_exc()}")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue