about summary refs log tree commit diff
path: root/src/file_man.py
blob: 17593380ba146280490f9de496d1168fd5698548 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
import os
from typing import Any
from src.config import FILE_MAN_PATH_MAP, FILE_MAN_VERBOSE, FILE_MAN_COMPRESS


def list_path(path) -> list[str]:
    if os.path.isfile(path):
        return [path]
    paths = []
    for file in os.listdir(path):
        paths += list_path(f"{path}/{file}")
    return paths


def generate_path_map() -> dict[str, dict[str, Any]]:
    """
    Generate a full path map for HTTP server
    """

    # generate basic path map
    path_map = {}
    for key in FILE_MAN_PATH_MAP.keys():
        if not (os.path.exists(FILE_MAN_PATH_MAP[key]["path"]) or os.path.exists(FILE_MAN_PATH_MAP[key]["path"][:-2])):
            if FILE_MAN_VERBOSE:
                print(f"Undefined path for '{key}' ({FILE_MAN_PATH_MAP[key]['path']})")
            continue
        if key[-1] == "*":
            keypath = FILE_MAN_PATH_MAP[key]["path"][:-2]
            for path in list_path(keypath):
                webpath = f"{key[:-1]}{path.replace(keypath+'/', '')}"
                path_map[webpath] = {
                    "path": path,
                    "compress": FILE_MAN_PATH_MAP[key]["compress"]}
        else:
            path_map[key] = {
                "path": FILE_MAN_PATH_MAP[key]["path"],
                "compress": FILE_MAN_PATH_MAP[key]["compress"]}

    # add headers
    for val in path_map.values():
        extension = os.path.splitext(val["path"])[1]
        headers = {}
        match extension:
            case ".htm" | ".html":
                headers["Content-Type"] = "text/html"
            case ".css":
                headers["Content-Type"] = "text/css"
            case ".txt":
                headers["Content-Type"] = "text/plain"
            case ".js":
                headers["Content-Type"] = "text/javascript"
            case ".png":
                headers["Content-Type"] = "image/png"
            case ".webp":
                headers["Content-Type"] = "image/webp"
            case ".jpg" | ".jpeg":
                headers["Content-Type"] = "image/jpeg"
            case _:
                headers["Content-Type"] = "*/*"
        headers["Content-Length"] = os.path.getsize(val["path"])
        val["headers"] = headers

    # print list of paths
    if FILE_MAN_VERBOSE:
        print("LIST OF ALLOWED PATHS:")
        max_key_len = max([len(x) for x in path_map.keys()])
        max_val_len = max([len(x["path"]) for x in path_map.values()])
        print(f"\t{'web': ^{max_key_len}} | {'path': ^{max_val_len}}\n"
              f"\t{'='*max_key_len}=#={'='*max_val_len}")
        for key, val in path_map.items():
            print(f"\t{key: <{max_key_len}} | {val['path']}")
        print("END OF LIST.", len(path_map), end="\n\n")

    return path_map


def compress_path_map(path_map: dict[str, dict[str, Any]], path_prefix: str = "compress", regen: bool = False):
    """
    Compresses all files using brotli
    """

    import brotli
    import htmlmin
    if not os.path.exists(path_prefix):
        os.mkdir(path_prefix)
    for val in path_map.values():
        filepath = f"{path_prefix}/{val["path"]}"
        if not val["compress"]:
            continue
        if not os.path.exists((dirs := os.path.dirname(filepath))):  # add missing folders
            os.makedirs(dirs)
        if not os.path.exists(filepath) or regen:
            # brotli compress
            if val["headers"]["Content-Type"] == "text/html":
                with open(filepath, "wb") as comp:
                    with open(val["path"], "rb") as file:
                        comp.write(
                            brotli.compress(htmlmin.minify(
                                file.read().decode("utf-8"),
                                remove_comments=True,
                                remove_empty_space=True,
                                remove_all_empty_space=True,
                                reduce_boolean_attributes=True).encode("utf-8")))
            else:
                with open(filepath, "wb") as comp:
                    br = brotli.Compressor()
                    with open(val["path"], "rb") as file:
                        while (chunk := file.read(65536)):
                            br.process(chunk)
                            comp.write(br.flush())

        val["path"] = filepath
        val["headers"]["Content-Length"] = os.path.getsize(filepath)
        val["headers"]["Content-Encoding"] = "br"

    if FILE_MAN_VERBOSE:
        print("COMPRESSED PATH:")
        max_key_len = max([len(x) for x in path_map.keys()])
        max_val_len = max([len(x["path"]) for x in path_map.values()])
        max_size_len = max([len(x["headers"]["Content-Length"].__repr__()) for x in path_map.values()])
        print(f"\t{'web': ^{max_key_len}} | {'path': ^{max_val_len}} | {'size': ^{max_size_len}}\n"
              f"\t{'=' * max_key_len}=#={'=' * max_val_len}=#={'=' * max_size_len}")
        for key, val in path_map.items():
            print(f"\t{key: <{max_key_len}} | "
                  f"{val['path']: <{max_val_len}} | "
                  f"{val['headers']['Content-Length']: <{max_size_len}}")
        print("END OF LIST.", len(path_map), end="\n\n")

    return path_map


PATH_MAP = generate_path_map()
if FILE_MAN_COMPRESS:
    PATH_MAP = compress_path_map(PATH_MAP, regen=True)