Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add timeout parameter to requests.Session.get to prevent hangs #54

Closed
wants to merge 3 commits into from
Closed
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 17 additions & 1 deletion conda_mirror/conda_mirror.py
Original file line number Diff line number Diff line change
Expand Up @@ -382,6 +382,16 @@ def _make_arg_parser():
default=100,
dest="max_retries",
)
ap.add_argument(
"--timeout",
help=(
"Connection and read timeout in seconds, "
"defaults to 10"
xhochy marked this conversation as resolved.
Show resolved Hide resolved
),
type=int,
default=10,
xhochy marked this conversation as resolved.
Show resolved Hide resolved
dest="con_timeout",
)
ap.add_argument(
"--no-progress",
action="store_false",
Expand Down Expand Up @@ -502,6 +512,7 @@ def pdb_hook(exctype, value, traceback):
"proxies": proxies,
"ssl_verify": args.ssl_verify,
"max_retries": args.max_retries,
"con_timeout": args.con_timeout,
"show_progress": args.show_progress,
}

Expand Down Expand Up @@ -636,6 +647,7 @@ def _download(
ssl_verify=None,
chunk_size: int = DEFAULT_CHUNK_SIZE,
show_progress=False,
con_timeout=10
xhochy marked this conversation as resolved.
Show resolved Hide resolved
):
"""Download `url` to `target_directory`

Expand Down Expand Up @@ -666,7 +678,7 @@ def _download(
download_filename = os.path.join(target_directory, target_filename)
logger.debug("downloading to %s", download_filename)
with open(download_filename, "w+b") as tf:
ret = session.get(url, stream=True, proxies=proxies, verify=ssl_verify)
ret = session.get(url, stream=True, proxies=proxies, verify=ssl_verify, timeout=con_timeout)
size = int(ret.headers.get("Content-Length", 0))
progress = tqdm(
desc=target_filename,
Expand Down Expand Up @@ -694,6 +706,7 @@ def _download_backoff_retry(
chunk_size: int = DEFAULT_CHUNK_SIZE,
max_retries: int = 100,
show_progress=True,
con_timeout = 10
xhochy marked this conversation as resolved.
Show resolved Hide resolved
):
"""Download `url` to `target_directory` with exponential backoff in the
event of failure.
Expand Down Expand Up @@ -738,6 +751,7 @@ def _download_backoff_retry(
ssl_verify=ssl_verify,
chunk_size=chunk_size,
show_progress=show_progress,
con_timeout=con_timeout
)
break
except Exception:
Expand Down Expand Up @@ -906,6 +920,7 @@ def main(
ssl_verify=None,
chunk_size: int = DEFAULT_CHUNK_SIZE,
max_retries=100,
con_timeout=10,
xhochy marked this conversation as resolved.
Show resolved Hide resolved
show_progress: bool = True,
):
"""
Expand Down Expand Up @@ -1140,6 +1155,7 @@ def main(
chunk_size=chunk_size,
max_retries=max_retries,
show_progress=show_progress,
con_timeout=con_timeout
)

# make sure we have enough free disk space in the target folder to meet threshold
Expand Down