const { S3Client, PutObjectCommand } = require('@aws-sdk/client-s3'); const cors = require('cors'); const crypto = require('crypto'); const didJwt = require('did-jwt'); const { Resolver } = require('did-resolver'); const express = require('express'); const { getResolver } = require('ethr-did-resolver'); const fs = require('fs'); const { DateTime } = require('luxon'); const multer = require('multer'); const path = require('path'); const sqlite3 = require('sqlite3').verbose(); require('dotenv').config() const app = express(); app.use(cors()); const port = process.env.PORT || 3001; // file name also referenced in flyway.conf and potentially in .env files or in environment variables const dbFile = process.env.SQLITE_FILE || './image-db.sqlite'; const bucketName = process.env.AWS_BUCKET_NAME || 'gifts-image-test'; const ethrDidResolver = getResolver; const resolver = new Resolver({ ...ethrDidResolver({ infuraProjectId: process.env.INFURA_PROJECT_ID || 'fake-infura-project-id' }) }) // Open a connection to the SQLite database const db = new sqlite3.Database(dbFile, (err) => { if (err) { console.error('Error opening database:', err); } }); const endorserApiUrl = process.env.ENDORSER_API_URL || 'http://localhost:3000'; // Configure AWS const s3Client = new S3Client({ region: process.env.AWS_REGION, credentials: { accessKeyId: process.env.AWS_ACCESS_KEY, secretAccessKey: process.env.AWS_SECRET_KEY } }); const uploadDir = 'uploads'; const uploadMulter = multer({ dest: uploadDir + '/' }); // POST endpoint to upload an image app.post('/image', uploadMulter.single('image'), async (req, res) => { const reqFile = req.file; if (reqFile == null) { return res.status(400).send(JSON.stringify({ success: false, message: 'No file uploaded.' })); } // Verify the JWT try { const auth = req.headers.authorization; if (!auth || !auth.startsWith('Bearer ')) { return res.status(401).send(JSON.stringify({ success: false, message: 'Missing "Bearer JWT" in Authorization header.'})); } const jwt = auth.substring('Bearer '.length); const verified = await didJwt.verifyJWT(jwt, { resolver }); if (!verified.verified) { const errorTime = new Date().toISOString(); console.error(errorTime, 'Got invalid JWT in Authorization header:', verified); return res.status(401).send(JSON.stringify({ success: false, message: 'Got invalid JWT in Authorization header. See server logs at ' + errorTime })); } const issuerDid = verified.issuer; // Check the user's limits, first from the DB and then from the server let limitPerWeek = await new Promise((resolve, reject) => { db.get( 'SELECT per_week FROM user WHERE did = ?', [ issuerDid ], (dbErr, row) => { if (dbErr) { console.error('Error getting user record from database (but continuing):', dbErr) // may not matter, so continue } resolve(row?.per_week); } ); }); if (limitPerWeek == null) { const headers = { 'Authorization': `Bearer ${jwt}`, 'Content-Type': 'application/json' } const response = await fetch(endorserApiUrl + '/api/report/rateLimits', { headers }); if (response.status !== 200) { console.error("Got bad response of", response.status, "when checking rate limits for", issuerDid); return res.status(400).send(JSON.stringify({ success: false, message: 'Got bad status of ' + response.status + ' when checking limits with endorser server. Verify that the account exists and that the JWT works for that server.'})); } else { const body = await response.json(); limitPerWeek = body.maxClaimsPerWeek await new Promise((resolve, reject) => { db.run( 'INSERT INTO user (did, per_week) VALUES (?, ?)', [issuerDid, limitPerWeek], (dbErr) => { if (dbErr) { console.error("Error inserting user record for", issuerDid, "into database (but continuing):", dbErr); // we can continue... it just means we'll check the endorser server again next time } resolve(); } ); }); } } if (limitPerWeek == null) { return res.status(400).send(JSON.stringify({ success: false, message: 'Unable to determine rate limits for this user. Verify that the account exists and that the JWT works for that server.' })); } // check the user's claims so far this week const startOfWeekDate = DateTime.utc().startOf('week') // luxon weeks start on Mondays const startOfWeekString = startOfWeekDate.toISO() let imagesCount = await new Promise((resolve, reject) => { db.get( 'SELECT COUNT(*) AS week_count FROM image WHERE did = ? AND time >= ?', [issuerDid, startOfWeekString], (dbErr, row) => { if (dbErr) { console.error(currentDate, "Error counting records for", issuerDid, "into database (but continuing):", dbErr); // we can continue... it just means we'll check the endorser server again next time } resolve(row?.week_count); } ); }); if (imagesCount >= limitPerWeek) { return res.status(400).send(JSON.stringify({ success: false, message: 'You have reached your weekly limit of ' + limitPerWeek + ' images.' })); } // Read the file from the temporary location fs.readFile(reqFile.path, async (err, data) => { if (err) throw err; // Handle error const hashSum = crypto.createHash('sha256'); hashSum.update(data); const hashHex = hashSum.digest('hex'); const fileName = hashHex + path.extname(reqFile.originalname); try { // record the upload in the database const currentDate = new Date().toISOString(); const localFile = reqFile.path.startsWith(uploadDir + '/') ? reqFile.path.substring(uploadDir.length + 1) : reqFile.path; const finalUrl = `https://${bucketName}.s3.amazonaws.com/${fileName}`; await new Promise((resolve, reject) => { db.run( 'INSERT INTO image (time, did, local_file, size, final_file, url) VALUES (?, ?, ?, ?, ?, ?)', [ currentDate, issuerDid, localFile, reqFile.size, fileName, finalUrl ], (dbErr) => { if (dbErr) { console.error(currentDate, "Error inserting record from", issuerDid, "into database:", dbErr); // don't continue because then we'll have storage we cannot track (and potentially limit) reject(dbErr); } resolve(); } ); }); // send to AWS const params = { Body: data, Bucket: bucketName, // S3 Bucket name ContentType: reqFile.mimetype, // File content type Key: fileName, // File name to use in S3 }; const command = new PutObjectCommand(params); const response = await s3Client.send(command); if (response.$metadata.httpStatusCode !== 200) { const errorTime = new Date().toISOString(); console.error(errorTime, "Error uploading to S3 with bad HTTP status, with metadata:", response.$metadata); res.status(500).send(JSON.stringify({ success: false, message: "Got bad status of " + response.$metadata.httpStatusCode + " from AWS. See server logs at " + errorTime })); } else { fs.rm(reqFile.path, (err) => { if (err) { console.error("Error deleting temp file", reqFile.path, "with error (but continuing):", err); } }); res.send(JSON.stringify({success: true, url: finalUrl})); } } catch (uploadError) { const errorTime = new Date().toISOString(); console.error(errorTime, "Error uploading to S3:", uploadError); res.status(500).send(JSON.stringify({ success: false, message: "Got error uploading file. See server logs at " + errorTime + " Error Details: " + uploadError })); } }) } catch (error) { const errorTime = new Date().toISOString(); console.error(errorTime, "Error processing image upload:", error); res.status(500).send(JSON.stringify({ success: false, message: "Got error processing image upload. See server logs at " + errorTime + " Error Details: " + error })); } }); app.listen(port, () => { console.log(`Server running at http://localhost:${port}`); }); // Close the database connection when the Node.js app ends process.on('SIGINT', () => { db.close((err) => { if (err) { console.error('Error closing DB connection:', err); return; } process.exit(0); }); });