From 8f018885fd79e98de95fd8fa55d17bbc27bb8af5 Mon Sep 17 00:00:00 2001 From: Chris Kalafarski Date: Tue, 30 Jan 2024 15:00:52 -0500 Subject: [PATCH] Upgrade to Node.js 20 and ES modules --- .eslintrc | 3 ++- .github/workflows/check-project-std.yml | 6 ++--- .tool-versions | 2 +- Dockerfile | 3 ++- package.json | 3 ++- src/action.js | 32 ++++++++++--------------- src/writer.js | 6 ++--- 7 files changed, 26 insertions(+), 29 deletions(-) diff --git a/.eslintrc b/.eslintrc index abbc84d..38f0380 100644 --- a/.eslintrc +++ b/.eslintrc @@ -9,6 +9,7 @@ "sourceType": "module" }, "rules": { - "no-console": ["off"] + "no-console": ["off"], + "import/prefer-default-export": "off", } } diff --git a/.github/workflows/check-project-std.yml b/.github/workflows/check-project-std.yml index 57ccf86..aa84614 100644 --- a/.github/workflows/check-project-std.yml +++ b/.github/workflows/check-project-std.yml @@ -6,10 +6,10 @@ jobs: check-javascript: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: actions/setup-node@v3 + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 with: - node-version: 18 + node-version: 20 cache: yarn - run: yarn install - run: npm exec prettier -- --check "**/*.{js,json,yml}" diff --git a/.tool-versions b/.tool-versions index 1d5b302..2ebcb4a 100644 --- a/.tool-versions +++ b/.tool-versions @@ -1 +1 @@ -nodejs 18.15.0 # Should match the AWS Lambda runtime being used +nodejs 20.9.0 # Should match the AWS Lambda runtime being used diff --git a/Dockerfile b/Dockerfile index 19b6986..fcb77ea 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM node:18-alpine +FROM node:20-alpine LABEL maintainer="PRX " LABEL org.prx.spire.publish.s3="LAMBDA_ZIP" @@ -9,6 +9,7 @@ RUN apk add zip RUN mkdir --parents /.prxci +ADD package.json . ADD src/action.js . ADD src/writer.js . diff --git a/package.json b/package.json index 498f03c..796085e 100644 --- a/package.json +++ b/package.json @@ -3,8 +3,9 @@ "version": "1.0.0", "license": "AGPL-3.0", "engines": { - "node": ">= 18.0.0" + "node": ">= 20.0.0" }, + "type": "module", "repository": { "type": "git", "url": "git://github.com/PRX/the-count.git" diff --git a/src/action.js b/src/action.js index 3545aab..886269b 100644 --- a/src/action.js +++ b/src/action.js @@ -1,7 +1,7 @@ -const { Kinesis } = require("@aws-sdk/client-kinesis"); -const crypto = require("crypto"); +import { KinesisClient, PutRecordCommand } from "@aws-sdk/client-kinesis"; +import { createHash } from "node:crypto"; -const kinesis = new Kinesis({ apiVersion: "2013-12-02" }); +const kinesis = new KinesisClient({ apiVersion: "2013-12-02" }); // Base64 1x1 transparent GIF const PIXEL = "R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7"; @@ -52,11 +52,7 @@ function getUserId(event) { // Create a new user ID if there isn't one yet // The user ID is a hash of the IP and the current timestamp const msg = event.requestContext.http.sourceIp + new Date().getTime(); - return crypto - .createHash("sha1") - .update(msg) - .digest("base64") - .substring(0, 27); + return createHash("sha1").update(msg).digest("base64").substring(0, 27); } function getSessionId(event, userId) { @@ -76,11 +72,7 @@ function getSessionId(event, userId) { new Date().getTime(), event.requestContext.http.sourceIp, ].join(""); - return crypto - .createHash("sha1") - .update(msg) - .digest("base64") - .substring(0, 27); + return createHash("sha1").update(msg).digest("base64").substring(0, 27); } function bakeCookie(cookieName, cookieValue, maxAge) { @@ -96,7 +88,7 @@ function bakeCookie(cookieName, cookieValue, maxAge) { return [cookieName, configuredValue].join("="); } -exports.handler = async (event) => { +export const handler = async (event) => { console.log(JSON.stringify(event)); if (event?.queryStringParameters?.persist === "false") { @@ -112,11 +104,13 @@ exports.handler = async (event) => { const logData = dataFromEvent(event, userId, sessionId); const logCsv = formatToCSVLine(logData); - await kinesis.putRecord({ - StreamName: process.env.ACTION_LOG_STREAM_NAME, - PartitionKey: crypto.createHash("md5").update(logCsv).digest("hex"), - Data: Buffer.from(logCsv), - }); + await kinesis.send( + new PutRecordCommand({ + StreamName: process.env.ACTION_LOG_STREAM_NAME, + PartitionKey: createHash("md5").update(logCsv).digest("hex"), + Data: Buffer.from(logCsv), + }) + ); return { isBase64Encoded: true, diff --git a/src/writer.js b/src/writer.js index ed5da0f..24bd5c3 100644 --- a/src/writer.js +++ b/src/writer.js @@ -1,6 +1,6 @@ -const fs = require("fs"); +import { writeFileSync } from "node:fs"; -exports.handler = async (event, context) => { +export const handler = async (event, context) => { console.log(JSON.stringify(event)); const logLines = event?.Records?.map((r) => @@ -11,6 +11,6 @@ exports.handler = async (event, context) => { const filename = `actions-${+new Date()}-${context.awsRequestId}.log`; // This directory must match the LocalMountPath of the function's // configuration - fs.writeFileSync(`/mnt/count_files/${filename}`, logLines.join("\n")); + writeFileSync(`/mnt/count_files/${filename}`, logLines.join("\n")); } };