Skip to content

Commit cd82752

Browse files
Claudealandtse
andcommitted
style: apply isort and black formatting
Applied automated code formatting with isort and black to ensure consistency with the project's code style guidelines. This resolves the lint failures in the CI pipeline. Changes: - Fixed import order in auth_capture_proxy.py - Applied black formatting to auth_capture_proxy.py and amazon_waf.py Co-authored-by: alandtse <7086117+alandtse@users.noreply.github.com>
1 parent 106b009 commit cd82752

File tree

2 files changed

+178
-200
lines changed

2 files changed

+178
-200
lines changed

authcaptureproxy/auth_capture_proxy.py

Lines changed: 28 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -4,23 +4,17 @@
44
import logging
55
import posixpath
66
import re
7-
from json import JSONDecodeError
87
from functools import partial
8+
from json import JSONDecodeError
99
from ssl import SSLContext, create_default_context
1010
from typing import Any, Callable, Dict, List, Optional, Set, Text, Tuple, Union
1111

1212
import httpx
13-
from aiohttp import (
14-
MultipartReader,
15-
MultipartWriter,
16-
hdrs,
17-
web,
18-
)
13+
from aiohttp import MultipartReader, MultipartWriter, hdrs, web
1914
from multidict import CIMultiDict
2015
from yarl import URL
2116

2217
from authcaptureproxy.const import SKIP_AUTO_HEADERS
23-
from authcaptureproxy.interceptor import BaseInterceptor, InterceptContext
2418
from authcaptureproxy.examples.modifiers import (
2519
prepend_relative_urls,
2620
replace_empty_action_urls,
@@ -34,6 +28,7 @@
3428
run_func,
3529
swap_url,
3630
)
31+
from authcaptureproxy.interceptor import BaseInterceptor, InterceptContext
3732
from authcaptureproxy.stackoverflow import get_open_port
3833

3934
# Pre-configure SSL context
@@ -48,7 +43,8 @@ class AuthCaptureProxy:
4843
This class relies on tests to be provided to indicate the proxy has completed. At proxy completion all data can be found in self.session, self.data, and self.query.
4944
"""
5045

51-
def __init__(self,
46+
def __init__(
47+
self,
5248
proxy_url: URL,
5349
host_url: URL,
5450
session: Optional[httpx.AsyncClient] = None,
@@ -257,8 +253,11 @@ def refresh_modifiers(self, site: Optional[URL] = None) -> None:
257253
def _filter_ajax_headers(resp: httpx.Response) -> dict:
258254
"""Filter headers for AJAX responses, removing hop-by-hop and CSP headers."""
259255
_skip_headers = {
260-
"content-type", "content-length", "content-encoding",
261-
"transfer-encoding", "connection",
256+
"content-type",
257+
"content-length",
258+
"content-encoding",
259+
"transfer-encoding",
260+
"connection",
262261
"x-connection-hash",
263262
"content-security-policy",
264263
"content-security-policy-report-only",
@@ -464,8 +463,7 @@ async def _process_multipart(reader: MultipartReader, writer: MultipartWriter) -
464463
json_data = None
465464
# Only attempt JSON decoding for JSON requests; avoid raising for form posts.
466465
if request.has_body and (
467-
request.content_type == "application/json"
468-
or request.content_type.endswith("+json")
466+
request.content_type == "application/json" or request.content_type.endswith("+json")
469467
):
470468
try:
471469
json_data = await request.json()
@@ -576,9 +574,7 @@ async def _process_multipart(reader: MultipartReader, writer: MultipartWriter) -
576574
text=f"Error connecting to {site}; too many redirects: {ex}"
577575
)
578576
except httpx.TimeoutException as ex:
579-
_LOGGER.warning(
580-
"Timeout connecting to %s: %s", site, ex
581-
)
577+
_LOGGER.warning("Timeout connecting to %s: %s", site, ex)
582578
return await self._build_response(
583579
text=(
584580
f"Timeout connecting to {site}: {ex}. "
@@ -587,9 +583,7 @@ async def _process_multipart(reader: MultipartReader, writer: MultipartWriter) -
587583
)
588584
)
589585
except httpx.HTTPError as ex:
590-
return await self._build_response(
591-
text=f"Error connecting to {site}: {ex}"
592-
)
586+
return await self._build_response(text=f"Error connecting to {site}: {ex}")
593587
if resp is None:
594588
return await self._build_response(text=f"Error connecting to {site}; please retry")
595589
self.last_resp = resp
@@ -658,7 +652,9 @@ async def _process_multipart(reader: MultipartReader, writer: MultipartWriter) -
658652
# initialization). Without them, it may fail silently.
659653
_ajax_headers = self._filter_ajax_headers(resp) if resp is not None else {}
660654
return await self._build_response(
661-
resp, body=_ajax_body, content_type=content_type,
655+
resp,
656+
body=_ajax_body,
657+
content_type=content_type,
662658
headers=_ajax_headers,
663659
)
664660
# Also skip modifiers for non-HTML AJAX responses (JSON, binary, etc.)
@@ -671,7 +667,9 @@ async def _process_multipart(reader: MultipartReader, writer: MultipartWriter) -
671667
_resp_body = resp.content
672668
_ajax_headers_nh = self._filter_ajax_headers(resp) if resp is not None else {}
673669
return await self._build_response(
674-
resp, body=_resp_body, content_type=content_type,
670+
resp,
671+
body=_resp_body,
672+
content_type=content_type,
675673
headers=_ajax_headers_nh,
676674
)
677675
self.refresh_modifiers(URL(str(resp.url)))
@@ -693,9 +691,7 @@ async def _process_multipart(reader: MultipartReader, writer: MultipartWriter) -
693691
def _resolve_form_action(form_match):
694692
"""Resolve relative action URLs only inside <form> tags."""
695693
form_tag = form_match.group(0)
696-
action_m = re.search(
697-
r'(\s+action=["\'])([^"\']*?)(["\'])', form_tag
698-
)
694+
action_m = re.search(r'(\s+action=["\'])([^"\']*?)(["\'])', form_tag)
699695
if not action_m:
700696
return form_tag
701697
action = action_m.group(2)
@@ -705,23 +701,19 @@ def _resolve_form_action(form_match):
705701
resolved_path = posixpath.normpath(_resp_dir + action)
706702
_proxy_base = self.access_url().path.rstrip("/")
707703
abs_url = str(
708-
self.access_url().with_path(
709-
_proxy_base + resolved_path
710-
).with_query({})
704+
self.access_url().with_path(_proxy_base + resolved_path).with_query({})
711705
)
712706
_LOGGER.debug(
713707
"Resolved relative form action '%s' -> '%s' (page: %s)",
714-
action, abs_url, _resp_url.path,
715-
)
716-
return (
717-
form_tag[: action_m.start(2)]
718-
+ abs_url
719-
+ form_tag[action_m.end(2) :]
708+
action,
709+
abs_url,
710+
_resp_url.path,
720711
)
712+
return form_tag[: action_m.start(2)] + abs_url + form_tag[action_m.end(2) :]
721713
return form_tag
722714

723715
text = re.sub(
724-
r'<form\b[^>]*>',
716+
r"<form\b[^>]*>",
725717
_resolve_form_action,
726718
text,
727719
flags=re.IGNORECASE,
@@ -833,7 +825,8 @@ def _swap_proxy_and_host(self, text: Text, domain_only: bool = False) -> Text:
833825
"""
834826
host_string: Text = str(self._host_url.with_path("/"))
835827
proxy_string: Text = str(
836-
self.access_url() if not domain_only else self.access_url().with_path("/"))
828+
self.access_url() if not domain_only else self.access_url().with_path("/")
829+
)
837830
if str(self.access_url().with_path("/")).replace("https", "http") in text:
838831
_LOGGER.debug(
839832
"Replacing %s with %s",

0 commit comments

Comments
 (0)