I use the request-module of python 2.7 to post a bigger chunk of data to a service I can\'t change. Since the data is mostly text, it is large but would compress quite well. The
I needed my posts to be chunked, since I had several very large files being uploaded in parallel. Here is a solution I came up with.
import requests
import zlib
"""Generator that reads a file in chunks and compresses them"""
def chunked_read_and_compress(file_to_send, zlib_obj, chunk_size):
compression_incomplete = True
with open(file_to_send,'rb') as f:
# The zlib might not give us any data back, so we have nothing to yield, just
# run another loop until we get data to yield.
while compression_incomplete:
plain_data = f.read(chunk_size)
if plain_data:
compressed_data = zlib_obj.compress(plain_data)
else:
compressed_data = zlib_obj.flush()
compression_incomplete = False
if compressed_data:
yield compressed_data
"""Post a file to a url that is content-encoded gzipped compressed and chunked (for large files)"""
def post_file_gzipped(url, file_to_send, chunk_size=5*1024*1024, compress_level=6, headers={}, requests_kwargs={}):
headers_to_send = {'Content-Encoding': 'gzip'}
headers_to_send.update(headers)
zlib_obj = zlib.compressobj(compress_level, zlib.DEFLATED, 31)
return requests.post(url, data=chunked_read_and_compress(file_to_send, zlib_obj, chunk_size), headers=headers_to_send, **requests_kwargs)
resp = post_file_gzipped('http://httpbin.org/post', 'somefile')
resp.raise_for_status()