Formatting DynamoDB data to normal JSON in AWS Lambda
Solution 1:
Node.js
Use the unmarshall
function from AWSJavaScriptSDK:
const AWS = require("aws-sdk");
exports.handler = function( event, context, callback ) {
const newImages = event.Records.map(
(record) => AWS.DynamoDB.Converter.unmarshall(record.dynamodb.NewImage)
);
console.log('Converted records', newImages);
callback(null, `Success`);
}
Python
Use TypeDeserializer.deserialize
from boto3.dynamodb.types:
import json
from boto3.dynamodb.types import TypeDeserializer
def ddb_deserialize(r, type_deserializer = TypeDeserializer()):
return type_deserializer.deserialize({"M": r})
def lambda_handler(event, context):
new_images = [ ddb_deserialize(r["dynamodb"]["NewImage"]) for r in event['Records'] ]
print('Converted records', json.dumps(new_images, indent=2))
Solution 2:
I know is a bit old but I had the same problem processing stream data from dynamoDB in node js lambda function. I used the proposed by @churro
import sdk and output converter
var AWS = require("aws-sdk");
var parse = AWS.DynamoDB.Converter.output;
use the parse function with a small hack
exports.handler = function( event, context, callback ) {
var docClient = new AWS.DynamoDB.DocumentClient();
event.Records.forEach((record) => {
console.log(record.eventID);
console.log(record.eventName);
console.log('DynamoDB Record:', parse({ "M": record.dynamodb.NewImage }));
});
callback(null, `Successfully processed ${event.Records.length} records.`);
}
Hope it helps