Skip to content

Commit cda66df

Browse files
committed
Apply hexagonal architecture
1 parent b303713 commit cda66df

File tree

13 files changed

+20867
-1902
lines changed

13 files changed

+20867
-1902
lines changed

README.md

Lines changed: 8 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -20,15 +20,12 @@ This project has the following folder structure:
2020
├── package.json
2121
├── samconfig.toml # AWS SAM config file, generated by SAM
2222
├── src # Source code for all functions
23-
│   ├── parse-covid-csv # Fuction source code
24-
│   │ ├── events # Events for local testing
25-
│   │ │   ├── context.ts
26-
│   │ │   └── event.json
27-
│   │ └── lambda.ts # Function source code
28-
│   └── tests
29-
│ └── parse-covid-csv
30-
│ └── lambda.test.ts
31-
├── template.yaml # Main CloudFormation file
23+
│   └── parse-covid-csv # Fuction source code
24+
│   ├── events # Events for local testing
25+
│   │   ├── context.ts
26+
│   │   └── event.json
27+
│   └── lambda.ts # Function source code
28+
├── template.yaml # Main CloudFormation file
3229
├── tsconfig.json
3330
├── webpack.config.js # Webpack config
3431
└── yarn.lock
@@ -82,16 +79,8 @@ sam deploy
8279

8380
### Testing
8481

85-
To run the application tests, run the following command:
86-
87-
```bash
88-
npm test
89-
```
90-
91-
This will run the jest tests for the sample serverless component, particularly the [`/src/tests/parse-covid-csv/lambda.test.ts`](./src/tests/parse-covid-csv/lambda.test.ts).
92-
93-
Note. If you notice error logs during the test run, please do not get concerned, as the tests are also evaluating the error cases and it is recommended to trace the particular error logs.
82+
This section will be added tomorrow.
9483

9584
## License
9685

97-
MIT, see [LICENSE](LICENSE).
86+
MIT, see [LICENSE](LICENSE).

WHO-COVID-19-global-data.csv

Lines changed: 20708 additions & 1574 deletions
Large diffs are not rendered by default.

adapters/database-adapter.ts

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
import { DocumentClient } from 'aws-sdk/clients/dynamodb'
2+
import { v4 as uuidv4 } from 'uuid'
3+
const documentClient = new DocumentClient()
4+
5+
// This Database Adapter's sole responsibility is handling the communication protocol with AWS DynamoDB
6+
// 1. Accepts an item (JSON), tableName, to know where to store the data and
7+
// as an optional parameter it accepts the database client library, where we by default set the DynamoDBs Document Client
8+
// In this case the reason behind it is because we want to make it easier to test and mock.
9+
// This approach enables simple JavaScript objects or spy objects to be passed, without having to extensively mock the DocumentClient
10+
export default {
11+
saveItem: async function saveItem(item: any, tableName: string, db = documentClient): Promise<void> {
12+
// 2. We generate the item ID
13+
item.id = uuidv4()
14+
try {
15+
// 3. Try to store the file into our current database
16+
await db.put({
17+
TableName: tableName,
18+
Item: item
19+
}).promise()
20+
} catch(e) {
21+
throw JSON.stringify({message: `Error saving ${JSON.stringify(e)}` })
22+
}
23+
}
24+
}

adapters/filestorage-adapter.ts

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
import { Readable } from 'stream'
2+
import S3 from 'aws-sdk/clients/s3'
3+
const s3 = new S3()
4+
5+
// This File Storage Adapter's sole responsibility is handling the communication protocol with AWS S3
6+
// 1. Accepts a file key (identifier), the bucket where its stored, and
7+
// as an optional parameter it accepts the file storage client library, where we by default set the AWS S3 client instance
8+
// As with the database adapter, in this case the reason behind it is because we want to make it easier to test and mock.
9+
// This approach enables simple JavaScript objects or spy objects to be passed, without having to extensively mock the S3 Client library
10+
export default {
11+
getFileStream: async function getFileStream(key: string, bucket: string, filestorageLibrary = s3): Promise<Readable> {
12+
console.log('getting file stream', bucket, key)
13+
// 2. We simply call the file storage library to get us the object based on its file information details
14+
return filestorageLibrary.getObject({
15+
Bucket: bucket,
16+
Key: key
17+
}).createReadStream()
18+
// We return the File Read Stream, as we don't have to download the file, but by passing the stream we can just attach it to our csv parser library
19+
}
20+
}

package-lock.json

Lines changed: 3 additions & 21 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,13 @@
11
{
2-
"name": "aws-sam-ts-template",
3-
"version": "1.0.0",
2+
"name": "sample-testing-app",
3+
"version": "2.0.0",
44
"description": "",
55
"main": "index.js",
66
"scripts": {
77
"watch": "NODE_ENV=dev webpack --watch --mode=development",
88
"build": "NODE_ENV=prod BUNDLE_ANALYZER=false webpack --mode=production --progress",
99
"build-analyze": "NODE_ENV=prod BUNDLE_ANALYZER=true webpack --mode=production --progress",
10-
"lint": "eslint -c .eslintrc.js --ext ts src",
10+
"lint": "eslint -c .eslintrc.js --ext ts functions",
1111
"pretest": "npm run lint",
1212
"test": "jest",
1313
"coverage": "jest --coverage"
@@ -21,17 +21,17 @@
2121
"@types/uuid": "^8.0.0",
2222
"aws-lambda": "^1.0.5",
2323
"aws-sdk": "^2.639.0",
24+
"aws-xray-sdk": "^2.5.0",
2425
"neat-csv": "^5.2.0",
2526
"source-map-support": "^0.5.16",
2627
"tslint": "^6.1.0",
27-
"uuid": "^8.1.0"
28+
"uuid": "^8.2.0"
2829
},
2930
"devDependencies": {
3031
"@types/jest": "^25.1.4",
3132
"@typescript-eslint/eslint-plugin": "^2.23.0",
3233
"@typescript-eslint/eslint-plugin-tslint": "^2.23.0",
3334
"@typescript-eslint/parser": "^2.23.0",
34-
"aws-sdk-mock": "^5.1.0",
3535
"eslint": "^6.8.0",
3636
"eslint-plugin-prefer-arrow": "^1.1.7",
3737
"eslint-plugin-prefer-arrow-functions": "^3.0.1",

src/parse-covid-csv/lambda.ts

Lines changed: 27 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -1,51 +1,32 @@
11
// Allow CloudWatch to read source maps
22
import 'source-map-support/register'
3-
import { DocumentClient } from 'aws-sdk/clients/dynamodb'
4-
import { S3Event, S3EventRecord } from 'aws-lambda'
5-
import S3 from 'aws-sdk/clients/s3'
6-
import path from 'path'
7-
import os from 'os'
8-
import util from 'util'
9-
import * as stream from 'stream'
10-
import * as fs from 'fs'
11-
import neatCsv from 'neat-csv'
12-
import { v4 as uuidv4 } from 'uuid'
13-
const pipeline = util.promisify(stream.pipeline)
3+
import AWSXRay from 'aws-xray-sdk-core'
4+
import { S3Event } from 'aws-lambda'
5+
import https from 'https'
6+
7+
// Instead of keeping all of our logic in one file,
8+
// it is much easier to handle the logic by storing it the /src folder
9+
import processCSV from './src/main'
10+
11+
if (process.env.AWS_EXECUTION_ENV) {
12+
AWSXRay.captureHTTPsGlobal(https, true)
13+
}
14+
15+
// Here we are introducing a File Event Port, per Hexagonal architecture principles
16+
import parseFileEvent from './src/parse-file-event'
17+
18+
// Since we are going to reference a File structure, { key, bucket }, quite a lot,
19+
// it is much better to define a proper interface than to rely on "any" a type
20+
import IFile from './src/IFile'
21+
1422
const TABLE_NAME = process.env.TABLE_NAME || ''
15-
const documentClient = new DocumentClient()
16-
const s3 = new S3()
23+
1724
export async function handler(event: S3Event): Promise<any> {
18-
const s3Records = event.Records
19-
await Promise.all(s3Records.map(async (record: S3EventRecord) => processCSV(record)))
20-
}
21-
async function processCSV(record: S3EventRecord): Promise<any> {
22-
const downloadPath = path.join(os.tmpdir(), record.s3.object.key)
23-
try {
24-
const readable = s3.getObject({
25-
Bucket: record.s3.bucket.name,
26-
Key: record.s3.object.key,
27-
}).createReadStream()
28-
const writable = fs.createWriteStream(downloadPath, { encoding: 'utf8' })
29-
await pipeline(
30-
readable,
31-
writable
32-
)
33-
} catch (e) {
34-
console.log(e)
35-
throw e
36-
}
37-
const readCsv = fs.createReadStream(downloadPath)
38-
const jsonData = await neatCsv(readCsv)
39-
await Promise.all(jsonData.map(async (entry: any) => {
40-
entry.id = uuidv4()
41-
try {
42-
return await documentClient.put({
43-
TableName: TABLE_NAME,
44-
Item: entry,
45-
}).promise()
46-
} catch (e) {
47-
console.error(e)
48-
throw e
49-
}
50-
}))
25+
26+
// With it we are parsing an external event to a format more suitable for our business logic
27+
const receivedFiles: IFile[] = parseFileEvent(event)
28+
29+
// Simply, we are going through all event Files,
30+
// and passing them to our main business logic for processing
31+
return await Promise.all(receivedFiles.map(async (file: IFile) => processCSV(file, TABLE_NAME)))
5132
}

src/parse-covid-csv/src/IFile.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
export default interface IFile {
2+
bucket: string
3+
key: string
4+
}

src/parse-covid-csv/src/main.ts

Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
import IFile from './IFile'
2+
3+
// Importing the adapters to other external services we are using
4+
// *** Note! Take a look at the adapter file names, we aren't referencing S3 or DynamoDB anywhere
5+
import fileStorageAdapter from '../../../adapters/filestorage-adapter'
6+
import databaseAdapter from '../../../adapters/database-adapter'
7+
8+
// This is the NPM library we are going to use to parse a CSV file into a JSON structure
9+
import neatCsv from 'neat-csv'
10+
11+
// This file represents our main business logic where it needs to
12+
// get and process a CSV file and store its data into a database
13+
// Take a note of the parameters passed:
14+
// 1. storedFile - representing the file location info (where its stored and by which key)
15+
// 2. tableName - what is the name of the database table where we want to store the file contents
16+
17+
// Now, particularly these are the more important for us:
18+
// 3. fileAdapter - the adapter responsible(!) for the implementation protocol with our current file storage service
19+
// 4. dbAdapter - the adapter responsible(!) for the implementation protocol with our current database service
20+
//
21+
// Note!
22+
// The reason for setting these adapters as optional paramters and also defining their default values is
23+
// because then we can easily override the parameters with another of our choosing.
24+
// Making it easier to:
25+
// - Migrate to another database or fileStorage service. Say we run a Lambda and want to use Azure CosmosDB,
26+
// just change the implementation of the dbAdapter
27+
// - Ease mocking when testing. Just call the "processCSV" function with simple JavaScript objects, or spy objects.
28+
29+
30+
// Now to explain the core business logic within
31+
export default async function processCSV(storedFile: IFile, tableName: string, fileAdapter = fileStorageAdapter, dbAdapter = databaseAdapter): Promise<any> {
32+
const { key, bucket } = storedFile
33+
34+
// 1. Retrieving a File from its Storage
35+
const fileStream = await fileAdapter.getFileStream(key, bucket)
36+
37+
// 2. Invoking the CSV library and parsing it to JSON.
38+
// The "jsonData" is now an Array of JSON objects, each JSON object is one CSV row
39+
const jsonData = await neatCsv(fileStream)
40+
41+
// 3. Going over each parsed CSV row (which is now in the JSON format)
42+
// And calling the dbAdapter to store it.
43+
return await Promise.all(jsonData.map(async (entry: any) => await dbAdapter.saveItem(entry, tableName)))
44+
}
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
import { S3Event, S3EventRecord } from 'aws-lambda'
2+
import IFile from './IFile'
3+
4+
// This File Event Parser is solely responsible for handling the data structure of Amazon S3 events
5+
// 1. Accepts an S3 event
6+
export default function parseFileEvent(event: S3Event): IFile[] {
7+
// 2. Extracts S3 Records, which are basically files that the S3 event wanted to notify us about
8+
const s3Records: S3EventRecord[] = event.Records
9+
// 2. Extracts File Information of objects from S3 Records,
10+
// which are defined by the bucket they belong to, the key as their identifier and their size
11+
const extractObject = (record: S3EventRecord) => {
12+
const { bucket, object } = record.s3
13+
return { bucket: bucket.name, key: object.key, size: object.size }
14+
}
15+
// Goes through each S3 record and at the end returns an array of the extracted file information objects
16+
return s3Records.map(extractObject)
17+
}
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
// Tests will go here

0 commit comments

Comments
 (0)