from __future__ import print_function # Python 2/3 compatibility
import boto3
import json
import decimal
#kinesis = boto3.resource(\'kinesis\', region_name=\'eu-west-1\
from __future__ import print_function # Python 2/3 compatibility
import boto3
import json
import decimal
import time
def putdatatokinesis(RecordKinesis):
start = time.clock()
response = client.put_records(Records=RecordKinesis, StreamName='LoadtestKinesis')
print ("Time taken to process" + len(Records) + " is " +time.clock() - start)
return response
client = boto3.client('kinesis')
firehoseclient = boto3.client('firehose')
with open("questions.json") as json_file:
questions = json.load(json_file)
Records = []
RecordKinesis = []
count = 0
for question in questions:
value1 = question['value']
if value1 is None:
value1 = '0'
recordkinesis = { 'Data':b'question','PartitionKey':value1 }
RecordKinesis.append(recordkinesis)
Records.append(record)
count +=1
if count == 500:
putdatatokinesis(RecordKinesis)
Records = []
RecordKinesis = []
This worked , The idea is to pass the argument Records as a keyed argument .
When passing multiple records, you need to encapsulate the records in a list of records, and then add the stream identifier.
Format is like so:
{
"Records": [
{
"Data": blob,
"ExplicitHashKey": "string",
"PartitionKey": "string"
},
{
"Data": "another record",
"ExplicitHashKey": "string",
"PartitionKey": "string"
}
],
"StreamName": "string"
}
See the Kinesis docs for more info.