-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathindex.mjs
More file actions
102 lines (87 loc) · 3.47 KB
/
index.mjs
File metadata and controls
102 lines (87 loc) · 3.47 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
// @ts-nocheck
import { S3Client, GetObjectCommand, PutObjectCommand, DeleteObjectCommand } from '@aws-sdk/client-s3';
import { SQSClient, SendMessageCommand } from '@aws-sdk/client-sqs';
import archiver from 'archiver';
import unzipper from 'unzipper';
import fs from 'fs';
const s3 = new S3Client();
const sqs = new SQSClient({
region: 'us-east-2',
endpoint: 'https://sqs.us-east-2.amazonaws.com'
});
export const handler = async (event) => {
try {
const { bucket, partKeys, finalKey, updatesQueueUrl, queueId, totalSize, fileCount } = event;
if (!bucket || !partKeys || !finalKey || !updatesQueueUrl) {
throw new Error('Missing required parameters');
}
const zipKeys = partKeys.map(item => item.zipKey).filter(key => key !== null && key !== undefined);
if (zipKeys.length === 0) {
throw new Error('No valid zipKeys found');
}
const tmpZipPath = `/tmp/final.zip`;
const output = fs.createWriteStream(tmpZipPath);
const archive = archiver('zip', { zlib: { level: 9 } });
archive.pipe(output);
for (const zipKey of zipKeys) {
const { Body } = await s3.send(new GetObjectCommand({ Bucket: bucket, Key: zipKey }));
await Body.pipe(unzipper.Parse())
.on('entry', entry => {
if (entry.type === 'File') {
archive.append(entry, { name: entry.path });
} else {
entry.autodrain();
}
})
.promise();
}
await archive.finalize();
await new Promise(resolve => output.on('close', resolve));
const body = fs.createReadStream(tmpZipPath);
await s3.send(new PutObjectCommand({ Bucket: bucket, Key: finalKey, Body: body, ContentType: 'application/zip' }));
fs.unlinkSync(tmpZipPath);
// CLEANUP: Delete all partial ZIPs
for (const item of partKeys) {
const partKey = item?.zipKey;
if (partKey) {
try {
await s3.send(new DeleteObjectCommand({ Bucket: bucket, Key: partKey }));
} catch (err) {
console.error(`Failed to delete partial ZIP ${partKey}: ${err.message}`);
}
}
}
await sqs.send(new SendMessageCommand({
QueueUrl: updatesQueueUrl,
MessageBody: JSON.stringify({
queueId,
zipFileName: finalKey.split('/').pop(),
status: 'zip-ready',
function: 'BiospexZipMerger',
totalSize,
fileCount,
zipKey: finalKey
}),
}));
return { status: 'success', finalKey };
} catch (error) {
console.error('Merge failed:', error.message);
try {
const { updatesQueueUrl, queueId } = event;
if (updatesQueueUrl) {
await sqs.send(new SendMessageCommand({
QueueUrl: updatesQueueUrl,
MessageBody: JSON.stringify({
queueId,
status: 'zip-failed',
error: error.message,
function: 'BiospexZipMerger'
}),
}));
}
} catch (sendError) {
console.error('Failed to send failure notification:', sendError.message);
}
throw error;
}
};