Skip to content

Commit

Permalink
fixed+enabled recently disabled tests for download + compressed bodies
Browse files Browse the repository at this point in the history
  • Loading branch information
Ousret committed Jun 28, 2024
1 parent d02b882 commit fa4e887
Show file tree
Hide file tree
Showing 3 changed files with 68 additions and 68 deletions.
26 changes: 11 additions & 15 deletions httpie/downloads.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,12 +211,6 @@ def pre_request(self, request_headers: dict):
Might alter `request_headers`.
"""
# Ask the server not to encode the content so that we can resume, etc.
# TODO: Reconsider this once the underlying library can report raw download size (i.e., not decoded).
# Then it might still be needed when resuming. But in the default case, it won’t probably be necessary.
# <https://github.com/jawah/niquests/issues/127>
request_headers['Accept-Encoding'] = 'identity'

if self._resume:
bytes_have = os.path.getsize(self._output_file.name)
if bytes_have:
Expand Down Expand Up @@ -301,11 +295,11 @@ def failed(self):
def is_interrupted(self) -> bool:
return self.status.is_interrupted

def chunk_downloaded(self, chunk_or_new_total: Union[bytes, int]):
def chunk_downloaded(self, chunk_or_new_total: Union[bytes, int]) -> None:
"""
A download progress callback.
:param chunk: A chunk of response body data that has just
:param chunk_or_new_total: A chunk of response body data that has just
been downloaded and written to the output.
"""
Expand Down Expand Up @@ -333,8 +327,7 @@ def _get_output_file_from_response(
return open(unique_filename, buffering=0, mode='a+b')


DECODED_FROM_SUFFIX = ' - decoded from {encodings}'
DECODED_SIZE_NOTE_SUFFIX = ' - decoded size'
DECODED_FROM_SUFFIX = ' - decoded using {encodings}'


class DownloadStatus:
Expand Down Expand Up @@ -365,8 +358,14 @@ def start_display(self, output_file):
ProgressDisplayFull
)
message = f'Downloading to {output_file.name}'
message_suffix = ''
summary_suffix = ''

if self.decoded_from:
encodings = ', '.join(f'`{enc}`' for enc in self.decoded_from)
message_suffix = DECODED_FROM_SUFFIX.format(encodings=encodings)
else:
message_suffix = ''

if not self.env.show_displays:
progress_display_class = DummyProgressDisplay
else:
Expand All @@ -375,11 +374,8 @@ def start_display(self, output_file):
if has_reliable_total:
progress_display_class = ProgressDisplayFull
else:
if self.decoded_from:
encodings = ', '.join(f'`{enc}`' for enc in self.decoded_from)
message_suffix = DECODED_FROM_SUFFIX.format(encodings=encodings)
summary_suffix = DECODED_SIZE_NOTE_SUFFIX
progress_display_class = ProgressDisplayNoTotal

self.display = progress_display_class(
env=self.env,
total_size=self.total_size,
Expand Down
7 changes: 7 additions & 0 deletions httpie/output/streams.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,8 +79,15 @@ def __iter__(self) -> Iterable[bytes]:
# Useful when the remote compress the body. We use the "untouched" amt of data to determine
# the download speed.
if hasattr(self.msg, "_orig") and hasattr(self.msg._orig, "download_progress") and self.msg._orig.download_progress:
# this is plan A: using public interfaces!
self.on_body_chunk_downloaded(self.msg._orig.download_progress.total)
elif hasattr(self.msg, "_orig") and hasattr(self.msg._orig, "raw") and hasattr(self.msg._orig.raw, "_fp_bytes_read"):
# plan B, falling back on a private property that may disapear from urllib3-future...
# this case is mandatory due to how the mocking library works. it does not use any "socket" but
# rather a simple io.BytesIO.
self.on_body_chunk_downloaded(self.msg._orig.raw._fp_bytes_read)
else:
# well. this case will certainly cause issues if the body is compressed.
self.on_body_chunk_downloaded(chunk)
except DataSuppressedError as e:
if self.output_options.headers:
Expand Down
103 changes: 50 additions & 53 deletions tests/test_downloads.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import os
import tempfile
import time
import zlib
from unittest import mock
from urllib.request import urlopen

Expand All @@ -15,7 +16,7 @@
ContentRangeError,
Downloader,
PARTIAL_CONTENT,
DECODED_SIZE_NOTE_SUFFIX,
DECODED_FROM_SUFFIX,
)
from niquests.structures import CaseInsensitiveDict
from .utils import http, MockEnvironment, cd_clean_tmp_dir, DUMMY_URL
Expand Down Expand Up @@ -264,7 +265,7 @@ def test_download_gzip_content_encoding(self, httpbin):
@responses.activate
def test_incomplete_response(self):
# We have incompleteness checks in the downloader, but it might not be needed as it’s built into (ni|req)uests.
error_msg = 'peer closed connection without sending complete message body (received 2 bytes, expected 1 more)'
error_msg = 'IncompleteRead(2 bytes read, 1 more expected)'
responses.add(
method=responses.GET,
url=DUMMY_URL,
Expand All @@ -281,55 +282,53 @@ def test_incomplete_response(self):
class TestDecodedDownloads:
"""Test downloading responses with `Content-Encoding`"""

# todo: find an appropriate way to mock compressed bodies within those tests.
# @responses.activate
# def test_decoded_response_no_content_length(self):
# responses.add(
# method=responses.GET,
# url=DUMMY_URL,
# headers={
# 'Content-Encoding': 'gzip, br',
# },
# body='123',
# )
# with cd_clean_tmp_dir():
# r = http('--download', '--headers', DUMMY_URL)
# print(r.stderr)
# assert DECODED_FROM_SUFFIX.format(encodings='`gzip`, `br`') in r.stderr
# assert DECODED_SIZE_NOTE_SUFFIX in r.stderr
#
# @responses.activate
# def test_decoded_response_with_content_length(self):
# responses.add(
# method=responses.GET,
# url=DUMMY_URL,
# headers={
# 'Content-Encoding': 'gzip, br',
# 'Content-Length': '3',
# },
# body='123',
# )
# with cd_clean_tmp_dir():
# r = http('--download', DUMMY_URL)
# print(r.stderr)
# assert DECODED_FROM_SUFFIX.format(encodings='`gzip`, `br`') in r.stderr
# assert DECODED_SIZE_NOTE_SUFFIX in r.stderr
#
# @responses.activate
# def test_decoded_response_without_content_length(self):
# responses.add(
# method=responses.GET,
# url=DUMMY_URL,
# headers={
# 'Content-Encoding': 'gzip, br',
# },
# body='123',
# )
# with cd_clean_tmp_dir():
# r = http('--download', DUMMY_URL)
# print(r.stderr)
# assert DECODED_FROM_SUFFIX.format(encodings='`gzip`, `br`') in r.stderr
# assert DECODED_SIZE_NOTE_SUFFIX in r.stderr
@responses.activate
def test_decoded_response_no_content_length(self):
responses.add(
method=responses.GET,
url=DUMMY_URL,
headers={
'Content-Encoding': 'deflate',
},
body=zlib.compress(b"foobar"),
)
with cd_clean_tmp_dir():
r = http('--download', '--headers', DUMMY_URL)
print(r.stderr)
assert DECODED_FROM_SUFFIX.format(encodings='`deflate`') in r.stderr

@responses.activate
def test_decoded_response_with_content_length(self):
payload = zlib.compress(b"foobar")

responses.add(
method=responses.GET,
url=DUMMY_URL,
headers={
'Content-Encoding': 'deflate',
'Content-Length': str(len(payload)),
},
body=payload,
)
with cd_clean_tmp_dir():
r = http('--download', DUMMY_URL)
print(r.stderr)
assert DECODED_FROM_SUFFIX.format(encodings='`deflate`') in r.stderr

@responses.activate
def test_decoded_response_without_content_length(self):
responses.add(
method=responses.GET,
url=DUMMY_URL,
headers={
'Content-Encoding': 'deflate',
},
body=zlib.compress(b'foobar'),
)
with cd_clean_tmp_dir():
r = http('--download', DUMMY_URL)
print(r.stderr)
assert DECODED_FROM_SUFFIX.format(encodings='`deflate`') in r.stderr

@responses.activate
def test_non_decoded_response_without_content_length(self):
Expand All @@ -344,7 +343,6 @@ def test_non_decoded_response_without_content_length(self):
with cd_clean_tmp_dir():
r = http('--download', DUMMY_URL)
print(r.stderr)
assert DECODED_SIZE_NOTE_SUFFIX not in r.stderr

@responses.activate
def test_non_decoded_response_with_content_length(self):
Expand All @@ -358,4 +356,3 @@ def test_non_decoded_response_with_content_length(self):
with cd_clean_tmp_dir():
r = http('--download', DUMMY_URL)
print(r.stderr)
assert DECODED_SIZE_NOTE_SUFFIX not in r.stderr

0 comments on commit fa4e887

Please sign in to comment.