about summary refs log tree commit diff
path: root/src
diff options
context:
space:
mode:
authorUltraQbik <no1skill@yandex.ru>2024-08-24 17:54:07 +0300
committerUltraQbik <no1skill@yandex.ru>2024-08-24 17:54:07 +0300
commit9c78cc1783af4202313a58f922b9c1cad900d0c3 (patch)
tree7c05ee402b5883c2ba192746030776cf0cfb1d6d /src
parent5af071d2e8a745a70fa238230995a83766767cb9 (diff)
downloadhttpy-9c78cc1783af4202313a58f922b9c1cad900d0c3.tar.gz
httpy-9c78cc1783af4202313a58f922b9c1cad900d0c3.zip
Update to data streams
Instead of sending full files, and therefore storing them in ram (which is not always possible), send data in chunks of 64KiB
Diffstat (limited to 'src')
-rw-r--r--src/APIv1.py22
-rw-r--r--src/config.py2
-rw-r--r--src/request.py21
3 files changed, 33 insertions, 12 deletions
diff --git a/src/APIv1.py b/src/APIv1.py
index f163c5b..c396825 100644
--- a/src/APIv1.py
+++ b/src/APIv1.py
@@ -1,4 +1,5 @@
 import random
+from ssl import SSLSocket
 from src.request import *
 from src.status_code import *
 
@@ -7,20 +8,19 @@ API_FILE_RANDOM_MIN_SIZE_LIMIT = 1
 API_FILE_RANDOM_MAX_SIZE_LIMIT = 2**30 * 2
 
 
-def random_data_gen(size: int) -> bytes:
+def random_data_gen(size: int, chunk_size: int = 65536) -> bytes:
     """
-    Generates SIZE bytes of random data in 64kib chunks
+    Generates SIZE bytes of random data in CHUNK_SIZE byte chunks
     :param size: bytes to generate
+    :param chunk_size: size of each chunk (bytes)
     :return: random bytes
     """
 
-    data = bytearray()
-    int_size = size // 65536
+    int_size = size // chunk_size
     for _ in range(int_size):
-        data += random.randbytes(65536)
-    data += random.randbytes((int_size * 65536) - size)
-
-    return data
+        yield random.randbytes(chunk_size)
+    if (final_size := (int_size * chunk_size) - size) > 0:
+        yield random.randbytes(final_size)
 
 
 def decode_size(size: str) -> int:
@@ -78,7 +78,11 @@ def api_call(client: SSLSocket, request: Request) -> Response:
             if size < API_FILE_RANDOM_MIN_SIZE_LIMIT or size > API_FILE_RANDOM_MAX_SIZE_LIMIT:
                 return Response(b'', STATUS_CODE_BAD_REQUEST)
 
-            return Response(random_data_gen(size), STATUS_CODE_OK, compress=False)
+            return Response(
+                b'',
+                STATUS_CODE_OK,
+                headers={"Content-Length": size},
+                data_stream=random_data_gen(size))
         else:
             return Response(b'', STATUS_CODE_BAD_REQUEST)
     else:
diff --git a/src/config.py b/src/config.py
new file mode 100644
index 0000000..e39d18c
--- /dev/null
+++ b/src/config.py
@@ -0,0 +1,2 @@
+BUFFER_LENGTH = 65536
+
diff --git a/src/request.py b/src/request.py
index 003783a..a197649 100644
--- a/src/request.py
+++ b/src/request.py
@@ -1,5 +1,5 @@
-from typing import Any
-from ssl import SSLSocket
+from typing import Any, Generator
+from src.config import BUFFER_LENGTH
 from src.status_code import StatusCode
 
 
@@ -72,10 +72,25 @@ class Response:
         :param data: response data
         :param status: response status code
         :param headers: headers to include
-        :param kwarg: compress - whether to compress data or not
+        :key compress: compress data or not
+        :key data_stream: stream of data
         """
 
         self.data: bytes = data
+        self.data_stream: Generator[bytes, None, None] | None = kwargs.get("data_stream")
         self.status: StatusCode = status
         self.headers: dict[str, Any] = headers if headers is not None else dict()
         self.compress: bool = kwargs.get("compress", True)
+
+        # check for content-length when using data_stream
+        if self.data_stream is not None and self.headers.get("Content-Length") is None:
+            raise Exception("Undefined length for data stream")
+
+    def get_data_stream(self):
+        if self.data_stream is None:
+            def generator() -> bytes:
+                for i in range(0, len(self.data), BUFFER_LENGTH):
+                    yield self.data[i:i+BUFFER_LENGTH]
+            return generator()
+        else:
+            return self.data_stream