Auto-update: Tue Jul 30 15:31:26 PDT 2024

This commit is contained in:
sanj 2024-07-30 15:31:26 -07:00
parent d2e54009e0
commit 1fe99ec75d

View file

@ -300,38 +300,37 @@ class APIConfig(BaseModel):
if pool_entry is None: if pool_entry is None:
pool_entry = self.local_db pool_entry = self.local_db
pool_key = f"{pool_entry['ts_ip']}:{pool_entry['db_port']}" info(f"Attempting to connect to database: {pool_entry['ts_ip']}:{pool_entry['db_port']}")
if pool_key not in self._db_pools:
try: try:
self._db_pools[pool_key] = await asyncpg.create_pool( conn = await asyncpg.connect(
host=pool_entry['ts_ip'], host=pool_entry['ts_ip'],
port=pool_entry['db_port'], port=pool_entry['db_port'],
user=pool_entry['db_user'], user=pool_entry['db_user'],
password=pool_entry['db_pass'], password=pool_entry['db_pass'],
database=pool_entry['db_name'], database=pool_entry['db_name'],
min_size=1, timeout=5 # Add a timeout to prevent hanging
max_size=10, # adjust as needed
timeout=5 # connection timeout in seconds
) )
except Exception as e: info(f"Successfully connected to {pool_entry['ts_ip']}:{pool_entry['db_port']}")
err(f"Failed to create connection pool for {pool_key}: {str(e)}")
raise
try: try:
async with self._db_pools[pool_key].acquire() as conn:
yield conn yield conn
finally:
await conn.close()
info(f"Closed connection to {pool_entry['ts_ip']}:{pool_entry['db_port']}")
except asyncpg.exceptions.ConnectionDoesNotExistError: except asyncpg.exceptions.ConnectionDoesNotExistError:
err(f"Failed to acquire connection from pool for {pool_key}: Connection does not exist") err(f"Failed to connect to database: {pool_entry['ts_ip']}:{pool_entry['db_port']} - Connection does not exist")
raise raise
except asyncpg.exceptions.ConnectionFailureError: except asyncpg.exceptions.ConnectionFailureError:
err(f"Failed to acquire connection from pool for {pool_key}: Connection failure") err(f"Failed to connect to database: {pool_entry['ts_ip']}:{pool_entry['db_port']} - Connection failure")
raise
except asyncpg.exceptions.PostgresError as e:
err(f"PostgreSQL error when connecting to {pool_entry['ts_ip']}:{pool_entry['db_port']}: {str(e)}")
raise raise
except Exception as e: except Exception as e:
err(f"Unexpected error when acquiring connection from pool for {pool_key}: {str(e)}") err(f"Unexpected error when connecting to {pool_entry['ts_ip']}:{pool_entry['db_port']}: {str(e)}")
raise raise
async def close_db_pools(self): async def close_db_pools(self):
info("Closing database connection pools...") info("Closing database connection pools...")
for pool_key, pool in self._db_pools.items(): for pool_key, pool in self._db_pools.items():
@ -606,7 +605,6 @@ class APIConfig(BaseModel):
try: try:
last_synced_version = await self.get_last_synced_version(dest_conn, table_name, source_id) last_synced_version = await self.get_last_synced_version(dest_conn, table_name, source_id)
while True:
changes = await source_conn.fetch(f""" changes = await source_conn.fetch(f"""
SELECT * FROM "{table_name}" SELECT * FROM "{table_name}"
WHERE version > $1 AND server_id = $2 WHERE version > $1 AND server_id = $2
@ -614,9 +612,7 @@ class APIConfig(BaseModel):
LIMIT $3 LIMIT $3
""", last_synced_version, source_id, batch_size) """, last_synced_version, source_id, batch_size)
if not changes: if changes:
break
changes_count = await self.apply_batch_changes(dest_conn, table_name, changes) changes_count = await self.apply_batch_changes(dest_conn, table_name, changes)
total_changes += changes_count total_changes += changes_count
@ -625,6 +621,8 @@ class APIConfig(BaseModel):
await self.update_sync_status(dest_conn, table_name, source_id, last_synced_version) await self.update_sync_status(dest_conn, table_name, source_id, last_synced_version)
info(f"Synced batch for {table_name}: {changes_count} changes. Total so far: {total_changes}") info(f"Synced batch for {table_name}: {changes_count} changes. Total so far: {total_changes}")
else:
info(f"No changes to sync for {table_name}")
except Exception as e: except Exception as e:
err(f"Error syncing table {table_name}: {str(e)}") err(f"Error syncing table {table_name}: {str(e)}")
@ -640,13 +638,14 @@ class APIConfig(BaseModel):
return total_changes return total_changes
async def apply_batch_changes(self, conn, table_name, changes): async def apply_batch_changes(self, conn, table_name, changes):
if not changes: if not changes:
return 0 return 0
try: try:
# Prepare the insert statement # Convert the keys to a list
columns = changes[0].keys() columns = list(changes[0].keys())
placeholders = [f'${i+1}' for i in range(len(columns))] placeholders = [f'${i+1}' for i in range(len(columns))]
insert_query = f""" insert_query = f"""
INSERT INTO "{table_name}" ({', '.join(columns)}) INSERT INTO "{table_name}" ({', '.join(columns)})
@ -655,8 +654,7 @@ class APIConfig(BaseModel):
{', '.join(f"{col} = EXCLUDED.{col}" for col in columns if col != 'id')} {', '.join(f"{col} = EXCLUDED.{col}" for col in columns if col != 'id')}
""" """
# Execute the insert for all changes in a single transaction # Execute the insert for each change
async with conn.transaction():
affected_rows = 0 affected_rows = 0
for change in changes: for change in changes:
values = [change[col] for col in columns] values = [change[col] for col in columns]