Skip to content

Commit

Permalink
restore: Make max stale_seconds retry with a longer timeout
Browse files Browse the repository at this point in the history
When timing out downloading a backup, on next retry, retry with a longer
timeout.
  • Loading branch information
RommelLayco committed Oct 21, 2024
1 parent 69d1115 commit 80dc11b
Showing 1 changed file with 15 additions and 1 deletion.
16 changes: 15 additions & 1 deletion pghoard/restore.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import errno
import io
import logging
import math
import multiprocessing
import multiprocessing.pool
import os
Expand Down Expand Up @@ -657,7 +658,17 @@ def run(self, args=None):


class BasebackupFetcher:
def __init__(self, *, app_config, debug, site, pgdata, tablespaces, data_files: List[FileInfo], status_output_file=None):
def __init__(
self,
*,
app_config,
debug,
site,
pgdata,
tablespaces,
data_files: List[FileInfo],
status_output_file=None,
):
self.log = logging.getLogger(self.__class__.__name__)
self.completed_jobs: Set[str] = set()
self.config = app_config
Expand Down Expand Up @@ -698,6 +709,9 @@ def fetch_all(self):
except TimeoutError:
self.pending_jobs.clear()
self.last_progress_ts = time.monotonic()

# Increase the timeout and retry
self.max_stale_seconds *= 2
if self.errors:
break

Expand Down

0 comments on commit 80dc11b

Please sign in to comment.