📝 (parse.py): Update parse_context function to use the new ParsedArgs namedtuple for better code organization and clarity
This commit is contained in:
parent
4236d22098
commit
bed6cd86dc
1 changed files with 90 additions and 10 deletions
|
|
@ -15,10 +15,25 @@ import shlex
|
|||
from collections import OrderedDict, namedtuple
|
||||
from http.cookies import SimpleCookie
|
||||
|
||||
from uncurl.api import parser # type: ignore
|
||||
|
||||
parser.add_argument("-x", "--proxy", default={})
|
||||
parser.add_argument("-U", "--proxy-user", default="")
|
||||
ParsedArgs = namedtuple(
|
||||
"ParsedContext",
|
||||
[
|
||||
"command",
|
||||
"url",
|
||||
"data",
|
||||
"data_binary",
|
||||
"method",
|
||||
"headers",
|
||||
"compressed",
|
||||
"insecure",
|
||||
"user",
|
||||
"include",
|
||||
"silent",
|
||||
"proxy",
|
||||
"proxy_user",
|
||||
"cookies",
|
||||
],
|
||||
)
|
||||
|
||||
ParsedContext = namedtuple("ParsedContext", ["method", "url", "data", "headers", "cookies", "verify", "auth", "proxy"])
|
||||
|
||||
|
|
@ -27,24 +42,89 @@ def normalize_newlines(multiline_text):
|
|||
return multiline_text.replace(" \\\n", " ")
|
||||
|
||||
|
||||
def parse_curl_command(curl_command):
|
||||
tokens = shlex.split(normalize_newlines(curl_command))
|
||||
tokens = [token for token in tokens if token and token != " "]
|
||||
if "curl" not in tokens[0]:
|
||||
raise ValueError("Invalid curl command")
|
||||
args_template = {
|
||||
"command": None,
|
||||
"url": None,
|
||||
"data": None,
|
||||
"data_binary": None,
|
||||
"method": "get",
|
||||
"headers": [],
|
||||
"compressed": False,
|
||||
"insecure": False,
|
||||
"user": (),
|
||||
"include": False,
|
||||
"silent": False,
|
||||
"proxy": None,
|
||||
"proxy_user": None,
|
||||
"cookies": {},
|
||||
}
|
||||
args = args_template.copy()
|
||||
|
||||
i = 0
|
||||
while i < len(tokens):
|
||||
token = tokens[i]
|
||||
if token == "-X":
|
||||
i += 1
|
||||
args["method"] = tokens[i].lower()
|
||||
elif token in ("-d", "--data"):
|
||||
i += 1
|
||||
args["data"] = tokens[i]
|
||||
args["method"] = "post"
|
||||
elif token in ("-b", "--data-binary", "--data-raw"):
|
||||
i += 1
|
||||
args["data_binary"] = tokens[i]
|
||||
args["method"] = "post"
|
||||
elif token in ("-H", "--header"):
|
||||
i += 1
|
||||
args["headers"].append(tokens[i])
|
||||
elif token == "--compressed":
|
||||
args["compressed"] = True
|
||||
elif token in ("-k", "--insecure"):
|
||||
args["insecure"] = True
|
||||
elif token in ("-u", "--user"):
|
||||
i += 1
|
||||
args["user"] = tuple(tokens[i].split(":"))
|
||||
elif token in ("-I", "--include"):
|
||||
args["include"] = True
|
||||
elif token in ("-s", "--silent"):
|
||||
args["silent"] = True
|
||||
elif token in ("-x", "--proxy"):
|
||||
i += 1
|
||||
args["proxy"] = tokens[i]
|
||||
elif token in ("-U", "--proxy-user"):
|
||||
i += 1
|
||||
args["proxy_user"] = tokens[i]
|
||||
elif not token.startswith("-"):
|
||||
if args["command"] is None:
|
||||
args["command"] = token
|
||||
else:
|
||||
args["url"] = token
|
||||
i += 1
|
||||
|
||||
return ParsedArgs(**args)
|
||||
|
||||
|
||||
def parse_context(curl_command):
|
||||
method = "get"
|
||||
|
||||
tokens = shlex.split(normalize_newlines(curl_command))
|
||||
tokens = [token for token in tokens if token and token != " "]
|
||||
parsed_args = parser.parse_args(tokens)
|
||||
parsed_args: ParsedArgs = parse_curl_command(curl_command)
|
||||
|
||||
post_data = parsed_args.data or parsed_args.data_binary
|
||||
if post_data:
|
||||
method = "post"
|
||||
|
||||
if parsed_args.X:
|
||||
method = parsed_args.X.lower()
|
||||
if parsed_args.method:
|
||||
method = parsed_args.method.lower()
|
||||
|
||||
cookie_dict = OrderedDict()
|
||||
quoted_headers = OrderedDict()
|
||||
|
||||
for curl_header in parsed_args.header:
|
||||
for curl_header in parsed_args.headers:
|
||||
if curl_header.startswith(":"):
|
||||
occurrence = [m.start() for m in re.finditer(":", curl_header)]
|
||||
header_key, header_value = curl_header[: occurrence[1]], curl_header[occurrence[1] + 1 :]
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue