summaryrefslogtreecommitdiff
path: root/day7/utils.py
diff options
context:
space:
mode:
authorAndrew <saintruler@gmail.com>2019-05-25 18:12:29 +0400
committerAndrew <saintruler@gmail.com>2019-05-25 18:12:29 +0400
commit7f8d38bcbde018590cccf532f1d96bd6c2d62e44 (patch)
tree792d938f31242695fa6dbfda3a1bb09b093e19b2 /day7/utils.py
parent15690f0f1ae23c9956046187d6deb24cdfd19931 (diff)
Изменен формат параметров некоторых функций.
Расширены функции логирования. Изменен декодировщик url.
Diffstat (limited to 'day7/utils.py')
-rw-r--r--day7/utils.py48
1 files changed, 30 insertions, 18 deletions
diff --git a/day7/utils.py b/day7/utils.py
index dfd2b5f..c58c014 100644
--- a/day7/utils.py
+++ b/day7/utils.py
@@ -1,23 +1,19 @@
import re
from time import strftime, gmtime
-
-_URI_RESERVED = {
- '21': '!', '23': '#', '24': '$', '26': '&',
- '27': '\'', '28': '(', '29': ')', '2A': '*',
- '2B': '+', '2C': ',', '2F': '/', '3A': ':',
- '3B': ';', '3D': '=', '3F': '?', '40': '@',
- '5B': '[', '5D': ']'
-}
+from string import ascii_letters
BAD_REQUEST = 'HTTP/1.1 400 Bad Request'
NOT_FOUND = 'HTTP/1.1 404 Not Found'
SUCCESS = 'HTTP/1.1 200 OK'
METHOD_NOT_ALLOWED = 'HTTP/1.1 405 Method Not Allowed'
-URL_REGEX_PATTERN = re.compile(r'/((.*?/?)+)?')
+
HTTP_METHODS = ['GET', 'POST', 'OPTIONS', 'HEAD', 'PUT', 'PATCH', 'DELETE', 'TRACE', 'CONNECT']
+URL_REGEX_PATTERN = re.compile(r'/((.*?/?)+)?')
+FIRST_LINE_PATTERN = re.compile(rf'{"(" + "|".join(HTTP_METHODS) + ")"} {URL_REGEX_PATTERN.pattern} HTTP/1\.[01]')
+
-def add_headers(status, html: str):
+def add_text_headers(status, html: str):
return '\r\n'.join([
status,
f'Date: {strftime("%a, %d %b %Y %H:%M:%S GMT", gmtime())}',
@@ -33,9 +29,7 @@ def validate_url(url):
def validate_first_line(line):
- methods_groups = f"({'|'.join(HTTP_METHODS)})"
- first_line_pattern = re.compile(rf'{methods_groups} {URL_REGEX_PATTERN.pattern} HTTP/1\.[01]')
- return bool(first_line_pattern.fullmatch(line))
+ return bool(FIRST_LINE_PATTERN.fullmatch(line))
def parse_cookies(cookies_line: str):
@@ -51,6 +45,14 @@ def parse_cookies(cookies_line: str):
return d
+def format_cookies(cookies: dict):
+ pairs = []
+ for key, value in cookies:
+ pairs.append(f'{key}={value}')
+
+ return ';'.join(pairs)
+
+
def parse_headers(request_line: str):
request = request_line.split('\r\n')
@@ -98,12 +100,8 @@ def url_decoder(url_line: str):
while i < len(url_line):
if url_line[i] == '%':
hex_value = url_line[i + 1: i + 3]
- if hex_value in _URI_RESERVED:
- integer = ord(_URI_RESERVED[hex_value])
- else:
- integer = int(hex_value, 16)
+ encoded += bytes([int(hex_value, 16)])
- encoded += bytes([integer])
i += 3
continue
else:
@@ -111,3 +109,17 @@ def url_decoder(url_line: str):
i += 1
return encoded.decode()
+
+
+def url_encoder(line: str):
+ s = ''
+ for char in line:
+ if char == ' ':
+ s += '+'
+ elif char not in ascii_letters:
+ for byte in char.encode():
+ s += '%' + hex(byte)[2:].upper()
+ else:
+ s += char
+
+ return s