about summary refs log tree commit diff
path: root/src/request.py
diff options
context:
space:
mode:
authorUltraQbik <no1skill@yandex.ru>2024-08-24 17:54:07 +0300
committerUltraQbik <no1skill@yandex.ru>2024-08-24 17:54:07 +0300
commit9c78cc1783af4202313a58f922b9c1cad900d0c3 (patch)
tree7c05ee402b5883c2ba192746030776cf0cfb1d6d /src/request.py
parent5af071d2e8a745a70fa238230995a83766767cb9 (diff)
downloadhttpy-9c78cc1783af4202313a58f922b9c1cad900d0c3.tar.gz
httpy-9c78cc1783af4202313a58f922b9c1cad900d0c3.zip
Update to data streams
Instead of sending full files, and therefore storing them in ram (which is not always possible), send data in chunks of 64KiB
Diffstat (limited to 'src/request.py')
-rw-r--r--src/request.py21
1 files changed, 18 insertions, 3 deletions
diff --git a/src/request.py b/src/request.py
index 003783a..a197649 100644
--- a/src/request.py
+++ b/src/request.py
@@ -1,5 +1,5 @@
-from typing import Any
-from ssl import SSLSocket
+from typing import Any, Generator
+from src.config import BUFFER_LENGTH
 from src.status_code import StatusCode
 
 
@@ -72,10 +72,25 @@ class Response:
         :param data: response data
         :param status: response status code
         :param headers: headers to include
-        :param kwarg: compress - whether to compress data or not
+        :key compress: compress data or not
+        :key data_stream: stream of data
         """
 
         self.data: bytes = data
+        self.data_stream: Generator[bytes, None, None] | None = kwargs.get("data_stream")
         self.status: StatusCode = status
         self.headers: dict[str, Any] = headers if headers is not None else dict()
         self.compress: bool = kwargs.get("compress", True)
+
+        # check for content-length when using data_stream
+        if self.data_stream is not None and self.headers.get("Content-Length") is None:
+            raise Exception("Undefined length for data stream")
+
+    def get_data_stream(self):
+        if self.data_stream is None:
+            def generator() -> bytes:
+                for i in range(0, len(self.data), BUFFER_LENGTH):
+                    yield self.data[i:i+BUFFER_LENGTH]
+            return generator()
+        else:
+            return self.data_stream