I was using Pipedream to store my Zoom recordings in S3 but they changed the pricing and it is not free anymore. So, I decided that this would be a fun project in any case.
ChatGPT wasn't very helpful, eventually this repo helped me. Here's the final code:
import { S3Client } from '@aws-sdk/client-s3';
import crypto from 'crypto';
import { nanoid } from 'nanoid';
import { Upload } from '@aws-sdk/lib-storage';
import got from 'got';
import { format } from 'date-fns';
const zoomSecretToken = process.env.ZOOM_SECRET_TOKEN;
const s3 = new S3Client({ region: "us-east-1" });
const bucketName = "...";
export const handler = async (event) => {
try {
const headers = event.headers;
const zoomEvent = typeof event.body === 'string' ? JSON.parse(event.body) : event.body;
if (zoomEvent.event === 'endpoint.url_validation') {
const encryptedToken = crypto.createHmac('sha256', zoomSecretToken).update(zoomEvent.payload.plainToken).digest('hex');
return {
statusCode: 200,
body: JSON.stringify({ plainToken: zoomEvent.payload.plainToken, encryptedToken })
}
} else if (zoomEvent.event !== 'recording.completed') {
return { statusCode: 422, body: 'Invalid event type' };
}
const meetingId = zoomEvent.payload.object.id;
const datePath = format(new Date(), 'yyyy-MM-dd');
// Retrieve recording URLs from the event payload
const recordingFiles = zoomEvent.payload.object.recording_files;
const videoRecordings = recordingFiles.filter(file => file.file_type === "MP4" && !file.recording_type.includes("audio"));
if (videoRecordings.length === 0) {
return { statusCode: 409, body: 'No video recordings found' };
}
const file = videoRecordings[0];
const fileUrl = file.download_url + "?access_token=" + zoomEvent.download_token;
const filepath = `videos/${meetingId}/${datePath}/${nanoid(10)}.${file.file_type.toLowerCase()}`;
// Download the file and upload to S3
await uploadToS3(fileUrl, bucketName, filepath);
return {
statusCode: 200,
body: 'Zoom recording successfully uploaded to S3',
};
} catch (error) {
return {
statusCode: 500,
body: JSON.stringify({ message: 'Error uploading Zoom recording', error: error.message }),
};
}
};
// Helper function to upload file from URL to S3
const uploadToS3 = async (fileUrl, bucketName, key) => {
const stream = got.stream(fileUrl);
const upload = new Upload({
client: s3,
params: {
Bucket: bucketName,
Key: key,
Body: stream,
ContentType: 'video/mp4',
ACL: 'public-read',
},
});
const result = await upload.done();
return result;
};
My package.json
:
{
"name": "zoom-to-s3",
"version": "1.0.0",
"description": "",
"main": "index.mjs",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"zip": "zip -r function.zip ."
},
"keywords": [],
"author": "",
"license": "ISC",
"dependencies": {
"@aws-sdk/client-lambda": "^3.659.0",
"date-fns": "^4.1.0",
"got": "^14.4.2",
"nanoid": "^5.0.7",
"postmark": "^4.0.5"
}
}
And my .github/workflows/deploy.yml
to deploy it when I push to main:
name: Deploy Lambda
on:
push:
branches:
- main
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- name: Checkout Code
uses: actions/checkout@v3
- name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: '20'
- name: Install Dependencies
run: npm install
- name: Zip Lambda Function
run: npm run zip
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v2
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: Deploy Lambda
run: |
aws lambda update-function-code \
--function-name zoom-recording-to-s3 \
--zip-file fileb://function.zip
Top comments (0)