I'm writing the backend for my current iOS App in JavaScript using node.js, AWS Lambda along with DynamoDB.
My AWS Lambda code is mostly AWS Lambda agnostic except for the initial handler methods, this makes them fairly testable outside of AWS. However, they depend on the DynamoDB. It's quite easy to write Unit Tests that run against a live version of DynamoDB but I wanted to run against a local instance, ideally an in-memory instance so that it would be quick (not that running against a real instance is that slow) and so that I could have a clean dB each time.
NOTES
- As these tests are running against a dB, it might be more accurate to call them Integration Tests implemented using a Unit Testing framework, but I'll refer to them as Unit Tests (UTs).
- This is running on MacOS.
- I don't Unit Test the actual AWS Lambda function. Instead, I export the underlying functions & objects that the AWS Lambda uses and Unit Test these.
- I'm a JavaScript, Node, AWS n00b so this if you spot something wrong or that'd bad please comment.
- I don't like the callback pyramid so I use Promises where I can. I would use
async
and await
but the latest version of Node that AWS Lambda supports doesn't support them :-(
In order to run this locally, you'll need:
My goal was to have a clean dB for each individual Unit Test. The simplest way to achieve this I thought was to create an in-memory of dynamoDB and destroy it after each Unit Test.
'use strict';
const assert = require("assert");
const sinon = require("sinon");
const aws = require("aws-sdk");
const spawn = require("child_process").spawn;
aws.config.update({region: "us-east-1" });
describe("awsLambdaToTest", function()
{
let dynamoInstance;
let db;
beforeEach(function()
{
dynamoInstance = spawn('/usr/local/bin/dynamodb-local', ['-inMemory']);
db = new aws.DynamoDB({ endpoint: "http://localhost:8000" });
});
afterEach(() => dynamoInstance.kill());
}
This uses the child_process npm
package to create an instance before each test, store the handle to the process in local-ish variable and following the tasks just kill it. The important points here are that the '-inMemory
' option is used meaning that when the dB instance is killed and another re-started, everything is effectively wiped without having to do everything.
The problem I had with this approach is that in addition to creating the dB, each time I also needed to create a table. Whilst the documentation for local dynamoDB
says that one of the differences between the AWS hosted & the local versions is that CreateTable
completes immediately, it seems that the function does indeed complete immediately, the table isn't immediately available. This meant the UT using the table often failed with:
1) awsLambdaToTest The querying an empty db for a user Returns{}:
ResourceNotFoundException: Cannot do operations on a non-existent table
I'm going to jump ahead and show the completed Unit Test file and explain the various things I had to do in order to get it working. This shows the tests.
'use strict';
const assert = require("assert");
const sinon = require("sinon");
const aws = require("aws-sdk");
const spawn = require("child_process").spawn;
aws.config.update({region: "us-east-1" });
describe("AWSLambdaToTest", function()
{
this.timeout(5000);
const testLambdaFns = require("../src/TestLambda.js");
let dynamoInstance;
let db;
before(function()
{
require("sleep").sleep(1);
dynamoInstance = spawn('/usr/local/bin/dynamodb-local', ['-inMemory']);
db = new aws.DynamoDB({ endpoint: "http://localhost:8000" });
});
after(() => dynamoInstance.kill());
const createTable = function()
{
var params =
{
TableName: "TestTable",
KeySchema: [{ AttributeName: "emailAddress", KeyType: "HASH" }],
AttributeDefinitions:
[
{ AttributeName: "emailAddress", AttributeType: "S" },
],
ProvisionedThroughput:
{
ReadCapacityUnits: 10,
WriteCapacityUnits: 10
}
};
return db.createTable(params).promise();
}
const dropTable = function()
{
return db.deleteTable({ TableName: "TestTable"}).promise();
}
beforeEach(function()
{
return createTable()
.then(() => db.waitFor("tableExists", { TableName: "TestTable"}));
});
afterEach(function()
{
return dropTable()
.then(() => db.waitFor("tableNotExists", { TableName: "TestTable"}))
});
const ARBITRARY_EMAIL_ADDRESS = "fred@bloggs.com";
describe("When querying an empty db for a user", function()
{
it("Returns {}", function()
{
return testLambdaFns._test.findUserByEmailAddress(ARBITRARY_EMAIL_ADDRESS)
.then(res =>
{
assert(JSON.stringify(res) === "{}");
})
});
});
describe("When adding a user to an empty db", function()
{
it("Doesn't error - it can return nothing", function()
{
return testLambdaFns._test.addUser(ARBITRARY_EMAIL_ADDRESS);
});
});
describe("When finding a user", function()
{
it("Can be updated", function()
{
return testLambdaFns._test.addUser(ARBITRARY_EMAIL_ADDRESS)
.then(() => testLambdaFns._test.findUserByEmailAddress(ARBITRARY_EMAIL_ADDRESS))
.then(res =>
{
const info =
{
Name: "Fred",
FavouriteProgrammingBook: "The C Programming Language"
}
return testLambdaFns._test.updateUserInfo(res.Item, info)
})
.then(updatedUser =>
{
assert(updatedUser.version == 1);
});
});
});
describe("When valid request sent", function()
{
describe("ONE", function()
{
it("ONE", function() { assert(true); });
});
describe("for non-added user", function()
{
it("succeeds", function()
{
const req =
{
rpcVersion:1,
emailAddress:ARBITRARY_EMAIL_ADDRESS,
info:
{
Name:"Fred Blogs",
FavouriteProgrammingBook:"Modern C++ Design"
}
};
return testLambdaFns._test.updateOrAddUser(req)
.then(res =>
{
assert(res.emailAddress == ARBITRARY_EMAIL_ADDRESS);
});
});
});
describe("TWO", function()
{
it("TWO", function() { });
});
});
});
before()/after() - Creating/Destroying dynamoDB
Rather than attempting to create & destroy the dB for each Unit Test, I settled with creating it once per Unit Test file. This is handled in the begin()
& after()
functions. Here, the local instance of dynamoDB
is spawned using the child_process
package and reference to the process retained. This is then used to kill it afterwards. The important point to note here is the use of the sleep package & function.
I found when I had multiple test files, each with their own begin()
& after()
functions that did the same as these, even though kill had purported to have killed the processed (I checked the killed flag), it seemed the process hadn't died immediately. This meant that the before()
function in the next set of tests would successfully connect to the dying instance of dynamoDB
. Then later, when any operation was performed, it would just hang until Mocha timed-out the Unit Test/before handler. I tried various ways to detect that the process was really dead but none worked so settled for a sleep.
beforeEach()/afterEach() - Creating/Destroying the Table
Where possible, I use Promises. Mocha handles promises quite simply for both hooks (the before*/after* functions) and Unit Tests. The key is to make sure to return the final promise (or pass in the done parameter & call it - though I don't use this mechanism).
Looking at the beforeEach()
function, createTable()
is called which returns a promise (from the AWS.Request
type that aws-sdk.DynamoDB.createTable()
returns. This promise is then chained too by the synchronous waitFor
method. This polls the dB for state of the table. The returned promise will not complete until the table has been created and waitFor
has completed.
I am not convinced that waitFor
is needed. According to the AWS vs Local DynamoDB guide for local instances, tables are created immediately. I added this check as occasionally I was getting resources errors like the one earlier. However, I think the cause for that was because I forgot the return
statement before the call to createTable()
meaning no Promise was returned to Mocha so it thought the beforeEach()
function had completed. I have removed this since in my real Unit Tests and they all seem to work.
Unit Tests
That's really it. The hard part wasn't writing the UTs but getting a local instance of DynamoDB
running with the table that the functions to test used in the correct state. Again, due to the functions being tested usually returning promises themselves, it is necessary to return Promise. The assertion(s) are made synchronously in a then
continuation chained to Promise returned from the function being tested and Promise from the whole chain returned.
If an assertion returns false
, then even though it's within a continuation, Mocha detects this and the test fails. If the function under test throws then Mocha also catches this and the test fails.
The Actual Code Being Tested
'use strict';
const AWS = require("aws-sdk");
const errors = require("./Errors.js");
const findUserByEmailAddress = function(emailAddress)
{
AWS.config.dynamodb = { endpoint: "http://localhost:8000" };
const docClient = new AWS.DynamoDB.DocumentClient();
const query =
{
TableName: "TestTable",
Key:
{
emailAddress: emailAddress
}
};
return docClient.get(query).promise(function(err, data)
{
if (err)
reject(err);
else
resolve(data.Item);
});
}
const addUser = function(emailAddress)
{
const newItem =
{
TableName: "TestTable",
Item:
{
version: 0,
emailAddress : emailAddress,
info: {}
}
}
return new Promise(function(resolve, reject)
{
AWS.config.dynamodb = { endpoint: "http://localhost:8000" };
const docClient = new AWS.DynamoDB.DocumentClient();
docClient.put(newItem).promise()
.then(() => resolve(newItem.Item))
.catch(err => reject(err))
});
}
const updateUserInfo = function(user, info)
{
const currentVersion = user.version;
const updatedVersion = currentVersion + 1;
const updateRequest =
{
TableName: "TestTable",
Key: { emailAddress: user.emailAddress },
ReturnValues: "ALL_NEW",
UpdateExpression: "SET info = :info, version=:updatedVersion",
ConditionExpression: "version = :currenVersion",
ExpressionAttributeValues :
{
":currenVersion": currentVersion,
":updatedVersion": updatedVersion,
":info": info
}
}
const docClient = new AWS.DynamoDB.DocumentClient();
return new Promise(function(resolve, reject)
{
docClient.update(updateRequest).promise()
.then(updateRequest => resolve(updateRequest.Attributes))
.catch(err => reject(err));
});
}
const isValidVersion = function(event)
{
return event.rpcVersion === 1 || false;
}
const updateOrAddUser = function(request)
{
if (isValidVersion(request) === false)
{
return Promise.reject({ Error: -1 });
}
return new Promise(function(resolve, reject)
{
findUserByEmailAddress(request.emailAddress)
.then(user =>
{
if (JSON.stringify(user) === "{}")
return addUser(request.emailAddress);
else
return user;
})
.then(function(user)
{
return updateUserInfo(user, request.info);
})
.then(updatedUser => resolve(updatedUser))
.catch(function(err)
{
reject(err);
})
});
}
exports.handler = (event, context, callback) =>
{
console.log('Received event:', JSON.stringify(event, null, 2));
updateOrAddUser(event)
.then(function(user)
{
console.log("RESULT:", user);
callback(null, user);
})
.catch(function(err)
{
callback(JSON.stringify(err));
});
};
exports._test =
{
findUserByEmailAddress: findUserByEmailAddress,
addUser: addUser,
updateUserInfo: updateUserInfo,
updateOrAddUser: updateOrAddUser
}