diff --git a/httpie/cli/argparser.py b/httpie/cli/argparser.py index d79d0c4cfd..bf981900fd 100644 --- a/httpie/cli/argparser.py +++ b/httpie/cli/argparser.py @@ -616,6 +616,17 @@ def print_usage(self, file): def error(self, message): """Prints a usage message incorporating the message to stderr and exits.""" + + # We shall release the files in that case + # the process is going to quit early anyway. + if hasattr(self.args, "multipart_data"): + for f in self.args.multipart_data: + if isinstance(self.args.multipart_data[f], tuple): + self.args.multipart_data[f][1].close() + elif isinstance(self.args.multipart_data[f], list): + for item in self.args.multipart_data[f]: + item[1].close() + self.print_usage(sys.stderr) self.env.rich_error_console.print( dedent( diff --git a/httpie/client.py b/httpie/client.py index 2191cf1291..7a2dc0a28a 100644 --- a/httpie/client.py +++ b/httpie/client.py @@ -157,6 +157,13 @@ def collect_messages( **send_kwargs, ) if args.max_headers and len(response.headers) > args.max_headers: + try: + requests_session.close() + # we consume the content to allow the connection to be put back into the pool, and closed! + response.content + except NotImplementedError: # We allow custom transports that may not implement close. + pass + raise niquests.ConnectionError(f"got more than {args.max_headers} headers") response._httpie_headers_parsed_at = monotonic() expired_cookies += get_expired_cookies( @@ -183,7 +190,7 @@ def collect_messages( try: requests_session.close() - except NotImplementedError: + except NotImplementedError: # We allow custom transports that may not implement close. pass diff --git a/httpie/models.py b/httpie/models.py index 76ccc8c748..f20363f588 100644 --- a/httpie/models.py +++ b/httpie/models.py @@ -103,16 +103,28 @@ def metadata(self) -> str: # metrics aren't guaranteed to be there. act with caution. # see https://niquests.readthedocs.io/en/latest/user/advanced.html#event-hooks for more. if hasattr(self._orig, "conn_info") and self._orig.conn_info: - if self._orig.conn_info.resolution_latency: - data[ELAPSED_DNS_RESOLUTION_LABEL] = str(round(self._orig.conn_info.resolution_latency.total_seconds(), 10)) + 's' - if self._orig.conn_info.established_latency: - data[ELAPSED_ESTABLISH_CONN] = str(round(self._orig.conn_info.established_latency.total_seconds(), 10)) + 's' - if self._orig.conn_info.tls_handshake_latency: - data[ELAPSED_TLS_HANDSHAKE] = str(round(self._orig.conn_info.tls_handshake_latency.total_seconds(), 10)) + 's' - if self._orig.conn_info.request_sent_latency: - data[ELAPSED_REQUEST_SEND] = str(round(self._orig.conn_info.request_sent_latency.total_seconds(), 10)) + 's' - - data[ELAPSED_TIME_LABEL] = str(round(time_elapsed, 10)) + 's' + if self._orig.conn_info.resolution_latency is not None: + if self._orig.conn_info.resolution_latency: + data[ELAPSED_DNS_RESOLUTION_LABEL] = f"{round(self._orig.conn_info.resolution_latency.total_seconds(), 10):6f}s" + else: + data[ELAPSED_DNS_RESOLUTION_LABEL] = "0s" + if self._orig.conn_info.established_latency is not None: + if self._orig.conn_info.established_latency: + data[ELAPSED_ESTABLISH_CONN] = f"{round(self._orig.conn_info.established_latency.total_seconds(), 10):6f}s" + else: + data[ELAPSED_ESTABLISH_CONN] = "0s" + if self._orig.conn_info.tls_handshake_latency is not None: + if self._orig.conn_info.tls_handshake_latency: + data[ELAPSED_TLS_HANDSHAKE] = f"{round(self._orig.conn_info.tls_handshake_latency.total_seconds(), 10):6f}s" + else: + data[ELAPSED_TLS_HANDSHAKE] = "0s" + if self._orig.conn_info.request_sent_latency is not None: + if self._orig.conn_info.request_sent_latency: + data[ELAPSED_REQUEST_SEND] = f"{round(self._orig.conn_info.request_sent_latency.total_seconds(), 10):6f}s" + else: + data[ELAPSED_REQUEST_SEND] = "0s" + + data[ELAPSED_TIME_LABEL] = f"{round(time_elapsed, 10):6f}s" return '\n'.join( f'{key}: {value}' diff --git a/httpie/output/lexers/http.py b/httpie/output/lexers/http.py index aea827401e..728490115f 100644 --- a/httpie/output/lexers/http.py +++ b/httpie/output/lexers/http.py @@ -66,7 +66,7 @@ class SimplifiedHTTPLexer(pygments.lexer.RegexLexer): tokens = { 'root': [ # Request-Line - (r'([A-Z]+)( +)([^ ]+)( +)(HTTP)(/)(\d+\.\d+)', + (r'([A-Z]+)( +)([^ ]+)( +)(HTTP)(/)([0-9].?[0-9]?)', pygments.lexer.bygroups( request_method, pygments.token.Text, @@ -77,7 +77,7 @@ class SimplifiedHTTPLexer(pygments.lexer.RegexLexer): pygments.token.Number )), # Response Status-Line - (r'(HTTP)(/)(\d+\.\d+)( +)(.+)', + (r'(HTTP)(/)([0-9].?[0-9]?)( +)(.+)', pygments.lexer.bygroups( pygments.token.Keyword.Reserved, # 'HTTP' pygments.token.Operator, # '/' diff --git a/httpie/output/lexers/metadata.py b/httpie/output/lexers/metadata.py index 7f5c77f54d..1d41a67446 100644 --- a/httpie/output/lexers/metadata.py +++ b/httpie/output/lexers/metadata.py @@ -36,7 +36,7 @@ class MetadataLexer(pygments.lexer.RegexLexer): tokens = { 'root': [ ( - fr'({ELAPSED_TIME_LABEL}|{ELAPSED_DNS_RESOLUTION_LABEL}|{ELAPSED_REQUEST_SEND}|{ELAPSED_TLS_HANDSHAKE}|{ELAPSED_ESTABLISH_CONN})( *)(:)( *)(\d+\.[\de\-]+)(s)', pygments.lexer.bygroups( + fr'({ELAPSED_TIME_LABEL}|{ELAPSED_DNS_RESOLUTION_LABEL}|{ELAPSED_REQUEST_SEND}|{ELAPSED_TLS_HANDSHAKE}|{ELAPSED_ESTABLISH_CONN})( *)(:)( *)([\d]+[.\d]{{0,}})(s)', pygments.lexer.bygroups( pygments.token.Name.Decorator, # Name pygments.token.Text, pygments.token.Operator, # Colon diff --git a/httpie/output/streams.py b/httpie/output/streams.py index b88886ab40..1686a97913 100644 --- a/httpie/output/streams.py +++ b/httpie/output/streams.py @@ -86,6 +86,7 @@ def __iter__(self) -> Iterable[bytes]: yield b'\n\n' yield self.get_metadata() + yield b'\n\n' class RawStream(BaseStream): diff --git a/setup.cfg b/setup.cfg index 64aa59d373..5f2933e0cd 100644 --- a/setup.cfg +++ b/setup.cfg @@ -27,6 +27,9 @@ filterwarnings = # change the concerned code. Python 3.13 may remove them, so we'll need to think about it soon. ignore:ssl\.PROTOCOL_(TLSv1|TLSv1_1|TLSv1_2) is deprecated:DeprecationWarning ignore:ssl\.TLSVersion\.(TLSv1|TLSv1_1|TLSv1_2) is deprecated:DeprecationWarning + # Happen in Windows. Oppose no threats to our test suite. + # "An operation was attempted on something that is not a socket" during shutdown + ignore:Exception in thread:pytest.PytestUnhandledThreadExceptionWarning [metadata] diff --git a/tests/test_meta.py b/tests/test_meta.py index e0912958c6..7a5f57eb4d 100644 --- a/tests/test_meta.py +++ b/tests/test_meta.py @@ -19,6 +19,12 @@ def test_meta_extended_tls(remote_httpbin_secure): assert 'Issuer' in r assert 'Revocation status' in r + # If this fail, you missed two extraneous RC after the metadata render. + # see output/streams.py L89 + # why do we need two? short story, in case of redirect, expect metadata to appear multiple times, + # and we don't want them glued to the request line for example. + assert str(r).endswith("\n\n") + @pytest.mark.parametrize('style', ['auto', 'fruity', *PIE_STYLE_NAMES]) def test_meta_elapsed_time_colors(httpbin, style): diff --git a/tests/test_uploads.py b/tests/test_uploads.py index e4723d6f6b..01128bdf9c 100644 --- a/tests/test_uploads.py +++ b/tests/test_uploads.py @@ -145,6 +145,8 @@ def test_reading_from_stdin(httpbin, wait): @pytest.mark.requires_external_processes @pytest.mark.skipif(is_windows, reason="Windows doesn't support select() calls into files") def test_stdin_read_warning(httpbin): + """This test is flaky. Expect random failure in the CI under MacOS. + It's mainly due to the poor VM performance.""" with stdin_processes(httpbin) as (process_1, process_2): # Wait before sending any data time.sleep(1)