Browse Source

update to v3 of AWS SDK, add mime-type to upload

pull/1/head
Trent Larson 7 months ago
parent
commit
dbcbd0c544
  1. 6
      README.md
  2. 3
      package.json
  3. 1299
      pnpm-lock.yaml
  4. 41
      server.js

6
README.md

@ -13,3 +13,9 @@ pnpm install
``` ```
node server.js node server.js
``` ```
## test
```
curl -X POST -F "image=@./test.png" http://localhost:3000/image
```

3
package.json

@ -4,7 +4,8 @@
"description": "", "description": "",
"license": "UNLICENSED", "license": "UNLICENSED",
"dependencies": { "dependencies": {
"aws-sdk": "^2.1564.0", "@aws-sdk/client-s3": "^3.521.0",
"@aws-sdk/lib-storage": "^3.521.0",
"dotenv": "^16.4.5", "dotenv": "^16.4.5",
"express": "^4.18.2", "express": "^4.18.2",
"multer": "1.4.5-lts.1" "multer": "1.4.5-lts.1"

1299
pnpm-lock.yaml

File diff suppressed because it is too large

41
server.js

@ -1,4 +1,5 @@
const AWS = require('aws-sdk'); const { S3Client, PutObjectCommand } = require('@aws-sdk/client-s3');
const crypto = require('crypto');
const express = require('express'); const express = require('express');
const fs = require('fs'); const fs = require('fs');
const multer = require('multer'); const multer = require('multer');
@ -9,13 +10,14 @@ const app = express();
const port = 3000; const port = 3000;
// Configure AWS // Configure AWS
AWS.config.update({ const s3Client = new S3Client({
accessKeyId: process.env.AWS_ACCESS_KEY,
secretAccessKey: process.env.AWS_SECRET_KEY,
region: process.env.AWS_REGION, region: process.env.AWS_REGION,
credentials: {
accessKeyId: process.env.AWS_ACCESS_KEY,
secretAccessKey: process.env.AWS_SECRET_KEY
}
}); });
const s3 = new AWS.S3();
const upload = multer({ dest: 'uploads/' }); const upload = multer({ dest: 'uploads/' });
// POST endpoint to upload an image // POST endpoint to upload an image
@ -23,23 +25,32 @@ app.post('/image', upload.single('image'), (req, res) => {
const file = req.file; const file = req.file;
// Read the file from the temporary location // Read the file from the temporary location
fs.readFile(file.path, (err, data) => { fs.readFile(file.path, async (err, data) => {
if (err) throw err; // Handle error if (err) throw err; // Handle error
const hashSum = crypto.createHash('sha256');
hashSum.update(data);
const hashHex = hashSum.digest('hex');
const bucketName = 'gifts-image';
const fileName = `${hashHex}_${file.originalname}`;
const params = { const params = {
Bucket: 'gifts-image', // S3 Bucket name
Key: `${Date.now()}_${path.basename(file.originalname)}`, // File name to use in S3
Body: data, Body: data,
//ACL: 'public-read' // Optional: if you want the uploaded file to be publicly accessible Bucket: bucketName, // S3 Bucket name
ContentType: file.mimetype, // File content type
Key: fileName, // File name to use in S3
}; };
// Upload the file to S3 // Upload the file to S3
s3.upload(params, function(s3Err, data) { try {
if (s3Err) throw s3Err; // Handle upload error const command = new PutObjectCommand(params);
const response = await s3Client.send(command);
// Once successfully uploaded to S3, send back the URL of the uploaded file const finalUrl = `https://${bucketName}.s3.amazonaws.com/${fileName}`;
res.send(`File uploaded successfully at ${data.Location}`); res.send(JSON.stringify({ success: true, url: finalUrl }));
}); } catch (uploadError) {
console.error('Error uploading to S3:', uploadError);
res.status(500).send(JSON.stringify({ success: false, message: 'Error uploading file.' }));
}
}); });
}); });

Loading…
Cancel
Save