Skip to content
This repository has been archived by the owner on May 8, 2020. It is now read-only.

Added proxy support #249

Open
wants to merge 1 commit into
base: dev
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 26 additions & 18 deletions pyppeteer/chromium_downloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,9 @@
REVISION = os.environ.get(
'PYPPETEER_CHROMIUM_REVISION', __chromium_revision__)

HTTP_PROXY = os.environ.get('HTTP_PROXY', '')
HTTPS_PROXY = os.environ.get('HTTPS_PROXY', HTTP_PROXY)

NO_PROGRESS_BAR = os.environ.get('PYPPETEER_NO_PROGRESS_BAR', '')
if NO_PROGRESS_BAR.lower() in ('1', 'true'):
NO_PROGRESS_BAR = True # type: ignore
Expand Down Expand Up @@ -76,27 +79,32 @@ def download_zip(url: str) -> BytesIO:
# see https://urllib3.readthedocs.io/en/latest/advanced-usage.html for more
urllib3.disable_warnings()

with urllib3.PoolManager() as http:
# Get data from url.
# set preload_content=False means using stream later.
data = http.request('GET', url, preload_content=False)
if HTTPS_PROXY:
logger.warning(f'Using proxy: {HTTPS_PROXY}')
http = urllib3.ProxyManager(HTTPS_PROXY)
else:
http = urllib3.PoolManager()

try:
total_length = int(data.headers['content-length'])
except (KeyError, ValueError, AttributeError):
total_length = 0
# Get data from url.
# set preload_content=False means using stream later.
data = http.request('GET', url, preload_content=False)

process_bar = tqdm(
total=total_length,
file=os.devnull if NO_PROGRESS_BAR else None,
)
try:
total_length = int(data.headers['content-length'])
except (KeyError, ValueError, AttributeError):
total_length = 0

process_bar = tqdm(
total=total_length,
file=os.devnull if NO_PROGRESS_BAR else None,
)

# 10 * 1024
_data = BytesIO()
for chunk in data.stream(10240):
_data.write(chunk)
process_bar.update(len(chunk))
process_bar.close()
# 10 * 1024
_data = BytesIO()
for chunk in data.stream(10240):
_data.write(chunk)
process_bar.update(len(chunk))
process_bar.close()

logger.warning('\nchromium download done.')
return _data
Expand Down