From 2e6ccd7a72130d39f6899e38cb43b166a57bd1d6 Mon Sep 17 00:00:00 2001 From: mrazimi99 Date: Tue, 22 Dec 2020 19:01:17 +0330 Subject: [PATCH 1/3] Add chunked gzip encoded data --- httpbin/core.py | 124 ++++++++++++++++++++++++++++++- httpbin/templates/httpbin.1.html | 1 + 2 files changed, 123 insertions(+), 2 deletions(-) diff --git a/httpbin/core.py b/httpbin/core.py index 305c9882..a9705217 100644 --- a/httpbin/core.py +++ b/httpbin/core.py @@ -698,7 +698,6 @@ def absolute_redirect_n_times(n): return _redirect("absolute", n, True) - @app.route("/stream/") def stream_n_messages(n): """Stream n JSON responses @@ -725,7 +724,6 @@ def generate_stream(): return Response(generate_stream(), headers={"Content-Type": "application/json"}) - @app.route( "/status/", methods=["GET", "POST", "PUT", "DELETE", "PATCH", "TRACE"] ) @@ -1777,6 +1775,128 @@ def a_json_endpoint(): } ) +import gzip as gzip2 +from six import BytesIO +import zlib + +@app.route("/stream/gzip/") +def stream_n_gzip_messages(n): + """Stream n JSON responses + --- + tags: + - Dynamic data + parameters: + - in: path + name: n + type: int + produces: + - application/json + responses: + 200: + description: Streamed JSON responses. + """ + response = get_dict("url", "args", "headers", "origin") + n = min(n, 100) + plain_data = [] + + for j in range(n): + response["id"] = j + content = json.dumps(response) + "\n" + plain_data.append(content) + + complete_data = ''.join(plain_data) + gzip_buffer = BytesIO() + gzip_file = gzip2.GzipFile( + mode='wb', + compresslevel=4, + fileobj=gzip_buffer + ) + gzip_file.write(complete_data.encode('ascii')) + gzip_file.close() + + gzip_data = gzip_buffer.getvalue() + chunks, chunk_size = len(gzip_data), int(len(gzip_data) / n) + gzipped_array = [ gzip_data[i:i+chunk_size] for i in range(0, chunks, chunk_size) ] + + def generate_stream(): + for i in range(n): + content = gzipped_array[i] + yield (content) + + return Response(generate_stream(), headers={"Content-Type": "application/json", "Content-Encoding": "gzip"}) + +@app.route("/stream/deflate/") +def stream_n_deflated_messages(n): + """Stream n JSON responses + --- + tags: + - Dynamic data + parameters: + - in: path + name: n + type: int + produces: + - application/json + responses: + 200: + description: Streamed JSON responses. + """ + response = get_dict("url", "args", "headers", "origin") + n = min(n, 100) + + def generate_stream(): + for i in range(n): + response["id"] = i + content = json.dumps(response) + "\n" + deflater = zlib.compressobj() + deflated_data = deflater.compress(content.encode('ascii')) + deflated_data += deflater.flush() + + yield (deflated_data) + + return Response(generate_stream(), headers={"Content-Type": "application/json", "Content-Encoding": "deflate"}) + +@app.route("/stream/brotli/") +def stream_n_brotli_messages(n): + """Stream n JSON responses + --- + tags: + - Dynamic data + parameters: + - in: path + name: n + type: int + produces: + - application/json + responses: + 200: + description: Streamed JSON responses. + """ + response = get_dict("url", "args", "headers", "origin") + n = min(n, 100) + + def generate_stream(): + for i in range(n): + response["id"] = i + content = json.dumps(response) + "\n" + # gzip_buffer = BytesIO() + # gzip_file = gzip2.GzipFile( + # mode='wb', + # compresslevel=4, + # fileobj=gzip_buffer + # ) + # gzip_file.write(content.encode('ascii')) + # gzip_file.close() + + # gzip_data = gzip_buffer.getvalue() + deflater = zlib.compressobj() + deflated_data = deflater.compress(content.encode('ascii')) + deflated_data += deflater.flush() + + yield (deflated_data) + + return Response(generate_stream(), headers={"Content-Type": "application/json", "Content-Encoding": "br"}) + if __name__ == "__main__": parser = argparse.ArgumentParser() diff --git a/httpbin/templates/httpbin.1.html b/httpbin/templates/httpbin.1.html index 0d0c8386..4753feaf 100644 --- a/httpbin/templates/httpbin.1.html +++ b/httpbin/templates/httpbin.1.html @@ -43,6 +43,7 @@

ENDPOINTS

  • /digest-auth/:qop/:user/:passwd/:algorithm Challenges HTTP Digest Auth.
  • /digest-auth/:qop/:user/:passwd Challenges HTTP Digest Auth.
  • /stream/:n Streams min(n, 100) lines.
  • +
  • /stream/gzip:n Streams min(n, 100) lines.
  • /delay/:n Delays responding for min(n, 10) seconds.
  • /drip?numbytes=n&duration=s&delay=s&code=code Drips data over a duration after an optional initial delay, then (optionally) returns with the given status code.
  • /range/1024?duration=s&chunk_size=code Streams n bytes, and allows specifying a Range header to select a subset of the data. Accepts a chunk_size and request duration parameter.
  • From fa2d9de1a8781646dde88cc480e75f7f487567e6 Mon Sep 17 00:00:00 2001 From: mrazimi99 Date: Tue, 22 Dec 2020 20:06:08 +0330 Subject: [PATCH 2/3] Add chunked deflate and brotli encoded data. --- httpbin/core.py | 89 +++++++++++++++----------------- httpbin/templates/httpbin.1.html | 2 + 2 files changed, 43 insertions(+), 48 deletions(-) diff --git a/httpbin/core.py b/httpbin/core.py index a9705217..492a42f3 100644 --- a/httpbin/core.py +++ b/httpbin/core.py @@ -1778,10 +1778,28 @@ def a_json_endpoint(): import gzip as gzip2 from six import BytesIO import zlib +import brotli as _brotli + +def split_data_into_chunkes(data, n): + data_size, chunk_size = len(data), int(len(data) / n) + chunks = [ data[i : i + chunk_size] for i in range(0, data_size, chunk_size) ] + # chunks[-1] = data[(n - 1) * chunk_size : -1] + return chunks + +def create_data_for_streaming(n): + response = get_dict("url", "args", "headers", "origin") + data = [] + + for i in range(n): + response["id"] = i + data.append(json.dumps(response) + "\n") + + complete_data = ''.join(data) + return complete_data @app.route("/stream/gzip/") def stream_n_gzip_messages(n): - """Stream n JSON responses + """Stream n GZip-encoded JSON responses --- tags: - Dynamic data @@ -1793,18 +1811,11 @@ def stream_n_gzip_messages(n): - application/json responses: 200: - description: Streamed JSON responses. + description: Streamed GZip-encoded JSON responses. """ - response = get_dict("url", "args", "headers", "origin") n = min(n, 100) - plain_data = [] - - for j in range(n): - response["id"] = j - content = json.dumps(response) + "\n" - plain_data.append(content) + complete_data = create_data_for_streaming(n) - complete_data = ''.join(plain_data) gzip_buffer = BytesIO() gzip_file = gzip2.GzipFile( mode='wb', @@ -1815,19 +1826,17 @@ def stream_n_gzip_messages(n): gzip_file.close() gzip_data = gzip_buffer.getvalue() - chunks, chunk_size = len(gzip_data), int(len(gzip_data) / n) - gzipped_array = [ gzip_data[i:i+chunk_size] for i in range(0, chunks, chunk_size) ] + gzipped_array = split_data_into_chunkes(gzip_data, n) def generate_stream(): - for i in range(n): - content = gzipped_array[i] - yield (content) + for i in range(len(gzipped_array)): + yield (gzipped_array[i]) return Response(generate_stream(), headers={"Content-Type": "application/json", "Content-Encoding": "gzip"}) @app.route("/stream/deflate/") -def stream_n_deflated_messages(n): - """Stream n JSON responses +def stream_n_deflate_messages(n): + """Stream n Deflate-encoded JSON responses --- tags: - Dynamic data @@ -1839,26 +1848,24 @@ def stream_n_deflated_messages(n): - application/json responses: 200: - description: Streamed JSON responses. + description: Streamed Deflate-encoded JSON responses. """ - response = get_dict("url", "args", "headers", "origin") n = min(n, 100) + complete_data = create_data_for_streaming(n) + deflater = zlib.compressobj() + deflated_data = deflater.compress(complete_data.encode('ascii')) + deflated_data += deflater.flush() + deflated_array = split_data_into_chunkes(deflated_data, n) def generate_stream(): - for i in range(n): - response["id"] = i - content = json.dumps(response) + "\n" - deflater = zlib.compressobj() - deflated_data = deflater.compress(content.encode('ascii')) - deflated_data += deflater.flush() - - yield (deflated_data) + for i in range(len(deflated_array)): + yield (deflated_array[i]) return Response(generate_stream(), headers={"Content-Type": "application/json", "Content-Encoding": "deflate"}) @app.route("/stream/brotli/") def stream_n_brotli_messages(n): - """Stream n JSON responses + """Stream n Brotli-encoded JSON responses --- tags: - Dynamic data @@ -1870,30 +1877,16 @@ def stream_n_brotli_messages(n): - application/json responses: 200: - description: Streamed JSON responses. + description: Streamed Brotli-encoded JSON responses. """ - response = get_dict("url", "args", "headers", "origin") n = min(n, 100) + complete_data = create_data_for_streaming(n) + deflated_data = _brotli.compress(complete_data.encode('ascii')) + deflated_array = split_data_into_chunkes(deflated_data, n) def generate_stream(): - for i in range(n): - response["id"] = i - content = json.dumps(response) + "\n" - # gzip_buffer = BytesIO() - # gzip_file = gzip2.GzipFile( - # mode='wb', - # compresslevel=4, - # fileobj=gzip_buffer - # ) - # gzip_file.write(content.encode('ascii')) - # gzip_file.close() - - # gzip_data = gzip_buffer.getvalue() - deflater = zlib.compressobj() - deflated_data = deflater.compress(content.encode('ascii')) - deflated_data += deflater.flush() - - yield (deflated_data) + for i in range(len(deflated_array)): + yield (deflated_array[i]) return Response(generate_stream(), headers={"Content-Type": "application/json", "Content-Encoding": "br"}) diff --git a/httpbin/templates/httpbin.1.html b/httpbin/templates/httpbin.1.html index 4753feaf..973c93c3 100644 --- a/httpbin/templates/httpbin.1.html +++ b/httpbin/templates/httpbin.1.html @@ -44,6 +44,8 @@

    ENDPOINTS

  • /digest-auth/:qop/:user/:passwd Challenges HTTP Digest Auth.
  • /stream/:n Streams min(n, 100) lines.
  • /stream/gzip:n Streams min(n, 100) lines.
  • +
  • /stream/deflate:n Streams min(n, 100) lines.
  • +
  • /stream/brotli:n Streams min(n, 100) lines.
  • /delay/:n Delays responding for min(n, 10) seconds.
  • /drip?numbytes=n&duration=s&delay=s&code=code Drips data over a duration after an optional initial delay, then (optionally) returns with the given status code.
  • /range/1024?duration=s&chunk_size=code Streams n bytes, and allows specifying a Range header to select a subset of the data. Accepts a chunk_size and request duration parameter.
  • From f373c01e3e2ede51e1b3e5114cd843c82e6cdc26 Mon Sep 17 00:00:00 2001 From: mrazimi99 Date: Tue, 22 Dec 2020 20:29:21 +0330 Subject: [PATCH 3/3] Fix some imports.. --- httpbin/core.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/httpbin/core.py b/httpbin/core.py index 492a42f3..5c012b30 100644 --- a/httpbin/core.py +++ b/httpbin/core.py @@ -15,6 +15,11 @@ import uuid import argparse +import gzip as gzip2 +from six import BytesIO +import zlib +import brotli as _brotli + from flask import ( Flask, Response, @@ -1775,11 +1780,6 @@ def a_json_endpoint(): } ) -import gzip as gzip2 -from six import BytesIO -import zlib -import brotli as _brotli - def split_data_into_chunkes(data, n): data_size, chunk_size = len(data), int(len(data) / n) chunks = [ data[i : i + chunk_size] for i in range(0, data_size, chunk_size) ]