about summary refs log tree commit diff
diff options
context:
space:
mode:
authorUltraQbik <no1skill@yandex.ru>2024-08-27 01:26:35 +0300
committerUltraQbik <no1skill@yandex.ru>2024-08-27 01:26:35 +0300
commit817b9b5131f665390e8340941817169b0615f441 (patch)
tree554332d6700c5adb0f5d06dc1be483ae596b21c4
parentfae7cca5091c69083f9f2943310ee5e9c5016520 (diff)
downloadhttpy-817b9b5131f665390e8340941817169b0615f441.tar.gz
httpy-817b9b5131f665390e8340941817169b0615f441.zip
Add compression
-rw-r--r--main.py23
-rw-r--r--src/file_man.py52
2 files changed, 57 insertions, 18 deletions
diff --git a/main.py b/main.py
index ad1e2fe..88674af 100644
--- a/main.py
+++ b/main.py
@@ -156,7 +156,8 @@ class HTTPServer:
         if request.path in self.path_map:  # assume browser
             filepath = self.path_map[request.path]["path"]
             filedata = self.fetch_file(filepath)
-            response = Response(b'', STATUS_CODE_OK, data_stream=filedata)
+            headers = self.fetch_file_headers(filepath)
+            response = Response(b'', STATUS_CODE_OK, data_stream=filedata, headers=headers)
 
             return response
         elif len(split_path) >= 2 and split_path[0] in API_VERSIONS:  # assume script
@@ -211,22 +212,32 @@ class HTTPServer:
                 break
         return None
 
-    def fetch_file(self, path: str) -> tuple[Generator[bytes, None, None], dict[str, str]] | None:
+    def fetch_file_headers(self, path: str) -> dict[str, Any] | None:
+        """
+        Fetcher file header data
+        :param path: filepath
+        :return: headers
+        """
+
+        if path in self.path_map:
+            return self.path_map[path]["headers"]
+        return None
+
+    def fetch_file(self, path: str) -> Generator[bytes, None, None] | None:
         """
         Fetches file
         :param path: filepath
-        :return: data stream, headers
+        :return: data stream
         """
 
         if path in self.path_map:
             filepath = self.path_map[path]["path"]
-            headers = self.path_map[path]["headers"]
             with open(filepath, "rb") as file:
-                yield file.read(BUFFER_LENGTH), headers
+                yield file.read(BUFFER_LENGTH)
+        yield None
 
 
 def main():
-
     server = HTTPServer(port=13700, enable_ssl=False)
     server.start()
 
diff --git a/src/file_man.py b/src/file_man.py
index bfd2fd4..58b4015 100644
--- a/src/file_man.py
+++ b/src/file_man.py
@@ -1,6 +1,6 @@
 import os
 from typing import Any
-from src.config import FILE_MAN_PATH_MAP, FILE_MAN_VERBOSE
+from src.config import FILE_MAN_PATH_MAP, FILE_MAN_VERBOSE, FILE_MAN_COMPRESS
 
 
 def list_path(path) -> list[str]:
@@ -43,14 +43,18 @@ def generate_path_map() -> dict[str, dict[str, Any]]:
                 headers["Content-Type"] = "text/css"
             case ".txt":
                 headers["Content-Type"] = "text/plain"
+            case ".js":
+                headers["Content-Type"] = "text/javascript"
             case ".png":
                 headers["Content-Type"] = "image/png"
             case ".webp":
                 headers["Content-Type"] = "image/avif"
             case ".jpg" | ".jpeg":
                 headers["Content-Type"] = "image/jpeg"
-        headers["Content-Length"] = os.path.getsize(val["path"])
+            case _:
+                headers["Content-Type"] = "*/*"
         val["headers"] = headers
+        headers["Content-Length"] = os.path.getsize(val["path"])
 
     # print list of paths
     if FILE_MAN_VERBOSE:
@@ -68,31 +72,55 @@ def generate_path_map() -> dict[str, dict[str, Any]]:
 
 def compress_path_map(path_map: dict[str, dict[str, Any]], path_prefix: str = "compress", regen: bool = False):
     """
-    Compresses all files using gzip
+    Compresses all files using brotli
     """
 
-    import gzip
+    import brotli
+    from src.minimizer import minimize_html
     if not os.path.exists(path_prefix):
         os.mkdir(path_prefix)
     for val in path_map.values():
         filepath = f"{path_prefix}/{val["path"]}"
+        if val["headers"]["Content-Type"].split("/")[0] == "image":  # ignore images
+            continue
         if not os.path.exists((dirs := os.path.dirname(filepath))):  # add missing folders
             os.makedirs(dirs)
         if not os.path.exists(filepath) or regen:
-            with gzip.open(filepath, "wb") as comp:  # compress
-                with open(val["path"], "rb") as file:
-                    comp.writelines(file)
+            # brotli compress
+            if val["headers"]["Content-Type"] == "text/html":
+                with open(filepath, "wb") as comp:
+                    with open(val["path"], "rb") as file:
+                        comp.write(
+                            brotli.compress(minimize_html(file.read()))
+                        )
+            else:
+                with open(filepath, "wb") as comp:
+                    br = brotli.Compressor()
+                    with open(val["path"], "rb") as file:
+                        while (chunk := file.read(65536)):
+                            br.process(chunk)
+                            comp.write(br.flush())
+
         val["path"] = filepath
         val["headers"]["Content-Length"] = os.path.getsize(filepath)
+        val["headers"]["Content-Encoding"] = "br"
 
     if FILE_MAN_VERBOSE:
-        print("COMPRESSED PATH MAP:")
-        max_len = max([len(x["path"]) for x in path_map.values()])
-        for val in path_map.values():
-            print(f"\t'{val['path']: <{max_len}}' {val['headers']['Content-Length']} bytes")
+        print("COMPRESSED PATH:")
+        max_key_len = max([len(x) for x in path_map.keys()])
+        max_val_len = max([len(x["path"]) for x in path_map.values()])
+        max_size_len = max([len(x["headers"]["Content-Length"].__repr__()) for x in path_map.values()])
+        print(f"\t{'web': ^{max_key_len}} | {'path': ^{max_val_len}} | {'size': ^{max_size_len}}\n"
+              f"\t{'=' * max_key_len}=#={'=' * max_val_len}=#={'=' * max_size_len}")
+        for key, val in path_map.items():
+            print(f"\t{key: <{max_key_len}} | "
+                  f"{val['path']: <{max_val_len}} | "
+                  f"{val['headers']['Content-Length']: <{max_size_len}}")
         print("END OF LIST.", len(path_map), end="\n\n")
 
     return path_map
 
 
-PATH_MAP = compress_path_map(generate_path_map())
+PATH_MAP = generate_path_map()
+if FILE_MAN_COMPRESS:
+    PATH_MAP = compress_path_map(PATH_MAP, regen=True)