Browse Source

add DB to record each image upload

pull/1/head
Trent Larson 7 months ago
parent
commit
5b21723273
  1. 4
      .gitignore
  2. 1
      README.md
  3. 9
      package.json
  4. 1324
      pnpm-lock.yaml
  5. 40
      server.js
  6. 8
      sql/flyway.conf
  7. 8
      sql/migrations/V1__Create_image_table.sql

4
.gitignore

@ -1,4 +1,6 @@
.aider*
.env .env
.idea
node_modules node_modules
sqlite-db.sqlite
uploads uploads

1
README.md

@ -7,6 +7,7 @@ sh <(curl https://pkgx.sh) +pnpm sh
pnpm install pnpm install
# create the directory for files that are being uploaded; should stay empty afterward # create the directory for files that are being uploaded; should stay empty afterward
mkdir uploads mkdir uploads
pnpm run migrate
``` ```
Now manually set these AWS_ variables inside a .env file: Now manually set these AWS_ variables inside a .env file:

9
package.json

@ -9,6 +9,13 @@
"cors": "^2.8.5", "cors": "^2.8.5",
"dotenv": "^16.4.5", "dotenv": "^16.4.5",
"express": "^4.18.2", "express": "^4.18.2",
"multer": "1.4.5-lts.1" "multer": "1.4.5-lts.1",
"sqlite3": "^5.1.7"
},
"devDependencies": {
"flywaydb-cli": "^0.9.0"
},
"scripts": {
"migrate": "flyway -configFiles=sql/flyway.conf migrate"
} }
} }

1324
pnpm-lock.yaml

File diff suppressed because it is too large

40
server.js

@ -5,6 +5,7 @@ const express = require('express');
const fs = require('fs'); const fs = require('fs');
const multer = require('multer'); const multer = require('multer');
const path = require('path'); const path = require('path');
const sqlite3 = require('sqlite3').verbose();
require('dotenv').config() require('dotenv').config()
const app = express(); const app = express();
@ -12,6 +13,13 @@ app.use(cors());
const port = 3000; const port = 3000;
// Open a connection to the SQLite database
const db = new sqlite3.Database('sqlite-db.sqlite', (err) => {
if (err) {
console.error('Error opening database:', err);
}
});
// Configure AWS // Configure AWS
const s3Client = new S3Client({ const s3Client = new S3Client({
region: process.env.AWS_REGION, region: process.env.AWS_REGION,
@ -21,7 +29,8 @@ const s3Client = new S3Client({
} }
}); });
const upload = multer({ dest: 'uploads/' }); const UPLOAD_DIR = 'uploads';
const upload = multer({ dest: UPLOAD_DIR + '/' });
// POST endpoint to upload an image // POST endpoint to upload an image
app.post('/image', upload.single('image'), (req, res) => { app.post('/image', upload.single('image'), (req, res) => {
@ -36,7 +45,6 @@ app.post('/image', upload.single('image'), (req, res) => {
const hashHex = hashSum.digest('hex'); const hashHex = hashSum.digest('hex');
const bucketName = 'gifts-image'; const bucketName = 'gifts-image';
console.log(file.originalname, '=>', file.path, '=>', hashHex);
const fileName = hashHex; const fileName = hashHex;
const params = { const params = {
Body: data, Body: data,
@ -51,7 +59,7 @@ app.post('/image', upload.single('image'), (req, res) => {
const response = await s3Client.send(command); const response = await s3Client.send(command);
if (response.$metadata.httpStatusCode !== 200) { if (response.$metadata.httpStatusCode !== 200) {
const errorTime = new Date().toISOString(); const errorTime = new Date().toISOString();
console.log(errorTime, "Error uploading to S3 with bad HTTP status. Metadata:", response.$metadata); console.error(errorTime, "Error uploading to S3 with bad HTTP status. Metadata:", response.$metadata);
res.status(500).send(JSON.stringify({ success: false, message: "Got bad status of " + response.$metadata.httpStatusCode + " from AWS. See server logs at " + errorTime })); res.status(500).send(JSON.stringify({ success: false, message: "Got bad status of " + response.$metadata.httpStatusCode + " from AWS. See server logs at " + errorTime }));
} else { } else {
const finalUrl = `https://${bucketName}.s3.amazonaws.com/${fileName}`; const finalUrl = `https://${bucketName}.s3.amazonaws.com/${fileName}`;
@ -60,6 +68,21 @@ app.post('/image', upload.single('image'), (req, res) => {
console.error("Error deleting temp file", file.path, "with error:", err); console.error("Error deleting temp file", file.path, "with error:", err);
} }
}); });
// Record the upload in the database
const currentDate = new Date().toISOString();
const localFile = file.path.startsWith(UPLOAD_DIR + '/') ? file.path.substring(UPLOAD_DIR.length + 1) : file.path;
db.run('INSERT INTO image (date, did, local_file, size, aws_file, url) VALUES (?, ?, ?, ?, ?, ?)', [
currentDate,
"UNKNOWN",
localFile,
file.size,
fileName,
finalUrl
], (dbErr) => {
if (dbErr) {
console.error(currentDate, "Error inserting record from", "UNKNOWN", "into database:", dbErr);
}
});
res.send(JSON.stringify({ success: true, url: finalUrl })); res.send(JSON.stringify({ success: true, url: finalUrl }));
} }
} catch (uploadError) { } catch (uploadError) {
@ -74,3 +97,14 @@ app.listen(port, () => {
console.log(`Server running at http://localhost:${port}`); console.log(`Server running at http://localhost:${port}`);
}); });
// Close the database connection when the Node.js app ends
process.on('SIGINT', () => {
db.close((err) => {
if (err) {
return console.error('Error closing DB connection', err);
}
console.log('Closed DB connection.');
process.exit(0);
});
});

8
sql/flyway.conf

@ -0,0 +1,8 @@
# env vars are FLYWAY_URL, FLYWAY_USER, FLYWAY_PASSWORD
# https://documentation.red-gate.com/fd/parameters-224919673.html
flyway.url=jdbc:sqlite:./sqlite-db.sqlite
flyway.user=admin
flyway.password=password
# also potentially referenced in .env or an environment variable
flyway.locations=filesystem:./sql/migrations

8
sql/migrations/V1__Create_image_table.sql

@ -0,0 +1,8 @@
CREATE TABLE image (
date TEXT NOT NULL,
did TEXT NOT NULL,
local_file TEXT NOT NULL,
size INTEGER NOT NULL,
aws_file TEXT NOT NULL,
url TEXT NOT NULL
);
Loading…
Cancel
Save