Compare commits

...

19 Commits

Author SHA1 Message Date
Trent Larson bb83982bc7 Revert "Revert "bump version and add "-beta""" 4 months ago
Trent Larson 1e61c55e5b Revert "bump version and add "-beta"" 4 months ago
Trent Larson 7a2551cbae bump version and add "-beta" 4 months ago
Trent Larson 947f307305 bump version to 1.2.3 4 months ago
Trent Larson a23f6d10ad fix the local ETHR resolver code to work in Docker (because it loaded as undefined) 4 months ago
Trent Larson 8228970116 bump version and add "-beta" 4 months ago
Trent Larson 2d5a09de00 fix a problem stopping "pnpm start:dev" 4 months ago
Trent Larson 400a4a1e06 add another test image file 4 months ago
Trent Larson 8750c78897 fix Dockerfile & add a builder element to it 4 months ago
Trent Larson 653a351653 try a build without "--prod" on the install 4 months ago
Trent Larson f4345fe2b9 fix declaration of did-eth-local-resolver & bump version to 1.2.1 4 months ago
Trent Larson deedbefcad bump to version 1.2.0 4 months ago
Trent Larson 2d5fe40971 add test for did:peer JWT 4 months ago
Trent Larson 537add2488 pull in did:peer verification (from endorser-ch) 4 months ago
Trent Larson dcf539eaa0 make into a typescript project 4 months ago
trentlarson 86109cf44b Merge pull request 'replace-image' (#1) from replace-image into master 4 months ago
Trent Larson 2f7d46569e fix multiple-user check, add tests, use local resolver for did:ethr 4 months ago
Trent Larson 127244731d start the feature of replacing an existing image 6 months ago
Trent Larson 6cd4a5a988 add Dockerfile and tweak deployment instructions 7 months ago
  1. 9
      .env.sample
  2. 29
      CHANGELOG.md
  3. 19
      Dockerfile
  4. 103
      Makefile.test
  5. 30
      README.md
  6. 25
      package.json
  7. 2360
      pnpm-lock.yaml
  8. 2
      sql/migrations/V2__add_is_replacement.sql
  9. 255
      src/server.ts
  10. 46
      src/vc/did-eth-local-resolver.ts
  11. 139
      src/vc/didPeer.ts
  12. 88
      src/vc/index.ts
  13. 104
      src/vc/passkeyDidPeer.ts
  14. 6
      test/Makefile
  15. 147
      test/test.sh
  16. BIN
      test/test0.png
  17. BIN
      test/test1.png
  18. 86
      test/test2.svg
  19. 114
      tsconfig.json

9
.env.sample

@ -1,3 +1,4 @@
# shellcheck disable=SC2034
# These settings work for American Cloud.
#S3_ACCESS_KEY=???
@ -19,15 +20,13 @@ S3_SET_ACL=false
#ENDORSER_API_URL=https://test-api.endorser.ch
#ENDORSER_API_URL=https://api.endorser.ch
INFURA_PROJECT_ID=???
# host where the final image can be accessed by the public
# default is https://test-image.timesafari.app
#DOWNLOAD_IMAGE_SERVER=test-image.timesafari.app
# default is 3000
# default is 3002
#PORT=3000
# default is jdbc:sqlite:./sqlite-db.sqlite
# default is ./image.sqlite
# file name also referenced in flyway.conf and in code
#SQLITE_FILE=./image-db.sqlite
#SQLITE_FILE=./image.sqlite

29
CHANGELOG.md

@ -0,0 +1,29 @@
# Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [Unreleased]
### Changed in DB or environment
- Nothing
##[1.2.3] - 2024.07.18 - 947f307305d1ad583c5ec7c61fe178fa45490adf
### Added
- Replacement of an existing file
- Local resolver for did:ethr
- Validation of did:peer JWANT
- Testing for file deletion
### Fixed
- Incorrect check for others who recorded same image
### Changed
- Dockerfile uses a builder image
### Changed in DB or environment
- New SQL migration (for the new file deletion feature)
## [1.0.0]
### Added
- All endpoints: image POST & DELETE, image-limits, ping

19
Dockerfile

@ -0,0 +1,19 @@
# syntax=docker/dockerfile:1
FROM node:22-alpine AS builder
ARG IMAGE_API_VERSION
RUN npm install -g pnpm
RUN apk add git
RUN git clone https://gitea.anomalistdesign.com/log-trade/image-api.git
WORKDIR image-api
RUN git checkout $IMAGE_API_VERSION
# dev dependencies like TypeScript are needed to build
RUN pnpm install
RUN pnpm build
RUN pnpm install --prod
FROM node:22-alpine
COPY --from=builder /image-api/dist /image-api/dist
COPY --from=builder /image-api/node_modules /image-api/node_modules
WORKDIR image-api
CMD node dist/server.js

103
Makefile.test

@ -0,0 +1,103 @@
# from https://github.com/box/Makefile.test
# `make -C test -j`
# Makefile that has a convenient check target.
# It can be included from another Makefile that only has a TESTS variable
# defined like this
#
# TESTS ?=
#
# Runs the specified test executables. Prepends the test's name to each test's output
# and gives a nice summary at the end of test execution about passed failed
# tests.
# Only bash is supported
SHELL := /bin/bash
THIS_FILE := $(realpath $(lastword $(MAKEFILE_LIST)))
# The directory where Makefile.test (this file) resides
THIS_FILE_DIR := $(shell dirname $(THIS_FILE))
# FIRST_MAKEFILE may be passed from parent make to child make. If it is not
# absent, do not overwrite it.
FIRST_MAKEFILE ?= $(realpath $(firstword $(MAKEFILE_LIST)))
export FIRST_MAKEFILE
# The directory where the Makefile, that is invoked from the command line,
# resides. That makefile would define the TESTS variable. We assume that the
# binaries defined in the TESTS variable also reside in the directory as
# the Makefile. The generated intermediate files will also go to this directory.
FIRST_MAKEFILE_DIR ?= $(shell dirname $(FIRST_MAKEFILE))
export FIRST_MAKEFILE_DIR
# So that the child makefiles can see the same TESTS variable.
export TESTS
failedTestsName := .makefile_test_failed_tests
executedTestsName := .makefile_test_executed_tests
TEST_TARGETS := $(TESTS:%=TARGET_FOR_%)
export TEST_TARGETS
# If the tests need a different environment one can append to this variable.
TEST_ENVIRONMENT = PYTHONPATH=$(THIS_FILE_DIR):$$PYTHONPATH PATH=$(THIS_FILE_DIR):$$PATH
# TODO: Only write to intermediate files, if they exist already.
# https://unix.stackexchange.com/q/405497/212862
# There is still a race condition here. Maybe we should use sed for appending.
define RUN_ONE_TEST
TARGET_FOR_$(1): $$(FIRST_MAKEFILE_DIR)/$(1)
+@export PATH=$$$$(pwd):$$$$PATH; \
if [ -e $$(FIRST_MAKEFILE_DIR)/$$(executedTestsName) ]; then \
echo $$< >> $$(FIRST_MAKEFILE_DIR)/$$(executedTestsName); \
fi; \
$$(TEST_ENVIRONMENT) $$< 2>&1 | sed "s/^/ [$$$$(basename $$<)] /"; test $$$${PIPESTATUS[0]} -eq 0; \
if [ $$$$? -eq 0 ]; then \
echo " PASSED: $$$$(basename $$<)"; \
else \
echo " FAILED: $$$$(basename $$<)"; \
if [ -e $$(FIRST_MAKEFILE_DIR)/$$(failedTestsName) ]; then \
echo $$< >> $$(FIRST_MAKEFILE_DIR)/$$(failedTestsName); \
fi; \
fi;
endef
# Build the above rule to run one test, for all tests.
$(foreach currtest,$(TESTS),$(eval $(call RUN_ONE_TEST,$(currtest))))
# execute the tests and look at the generated temp files afterwards.
actualCheck: $(TEST_TARGETS)
+@failed_tests=$$(cat $(FIRST_MAKEFILE_DIR)/$(failedTestsName) 2> /dev/null | wc -l;); \
executed_tests=$$(cat $(FIRST_MAKEFILE_DIR)/$(executedTestsName) 2> /dev/null | wc -l;); \
if [ $$failed_tests -ne 0 -a $$executed_tests -ne 0 ]; then \
echo ---------------------------------; \
echo "Failed $$failed_tests out of $$executed_tests tests"; \
echo ---------------------------------; \
elif [ $$failed_tests -eq 0 ]; then \
echo ---------------------------------; \
echo "All $$executed_tests tests passed"; \
echo ---------------------------------; \
fi; \
exit $$failed_tests;
# A commonly used bash command to clean intermediate files. Instead of writing
# it every time re-use this variable.
RM_INTERMEDIATE_FILES := rm -f $(FIRST_MAKEFILE_DIR)/$(failedTestsName) $(FIRST_MAKEFILE_DIR)/$(executedTestsName)
# At the start of the make, we want to start with empty intermediate files.
TRUNCATE_INTERMEDIATE_FILES := cat /dev/null > $(FIRST_MAKEFILE_DIR)/$(failedTestsName) && cat /dev/null > $(FIRST_MAKEFILE_DIR)/$(executedTestsName)
# With trap make sure the clean step is always executed before and after the
# tests run time. Do not leave residual files in the repo.
check:
+@trap "code=\$$?; \
$(RM_INTERMEDIATE_FILES); \
exit \$${code};" EXIT; \
$(TRUNCATE_INTERMEDIATE_FILES); \
$(MAKE) -f $(THIS_FILE) actualCheck;
all: check
.PHONY: all check preCheck actualCheck $(TEST_TARGETS)
.DEFAULT_GOAL := all

30
README.md

@ -13,39 +13,55 @@ mkdir uploads
pnpm run migrate
```
Now set up an S3 bucket & Infura project, and create a .env file from .env.sample with these important settings:
Now set up an S3 bucket, and create a .env file from .env.sample with these important settings:
```
AWS_ACCESS_KEY=
AWS_SECRET_KEY=
AWS_REGION=
INFURA_PROJECT_ID=
```
## dev
```
node server.js
node src/server.js
```
## test
#### automated
```shell
make -C test -j
```
#### manual
```shell
# run this first command in a directory where `npm install did-jwt` has been run
CODE='OWNER_DID="did:ethr:0x0000694B58C2cC69658993A90D3840C560f2F51F"; OWNER_PRIVATE_KEY_HEX="2b6472c026ec2aa2c4235c994a63868fc9212d18b58f6cbfe861b52e71330f5b"; didJwt = require("did-jwt"); didJwt.createJWT({ exp: Math.floor(Date.now() / 1000) + 60, iat: Math.floor(Date.now() / 1000), iss: OWNER_DID }, { issuer: OWNER_DID, signer: didJwt.SimpleSigner(OWNER_PRIVATE_KEY_HEX) }).then(console.log)'
JWT=`node -e "$CODE"`; curl -X GET -H "Authorization: Bearer $JWT" http://localhost:3001/image-limits
JWT=`node -e "$CODE"`; curl -X POST -H "Authorization: Bearer $JWT" -F "image=@./test.png" http://localhost:3001/image
JWT=`node -e "$CODE"`; curl -X DELETE -H "Authorization: Bearer $JWT" http://localhost:3001/image/https%3A%2F%2Fgifts-image-test.s3.amazonaws.com%2F4599145c3a8792a678f458747f2d8512c680e8680bf5563c35b06cd770051ed2.png
```
## deploy to prod first time
* Do the necessary steps from "setup" above.
* Do the necessary steps from "setup" above, or `docker build` it.
* In object storage, set up bucket (and erase any test data).
* In object storage, set up bucket and erase any test data.
* Create a cert for the image delivery & image API servers.
* In haproxy, set a permanent web forward to the correct storage location.
* eg. `http-request redirect code 301 location https://a2-west.americancloud.com/images6618:giftsimagetest%[capture.req.uri] if { hdr_dom(host) -i test-image.timesafari.app }`
## deploy to prod subsequent times
* Add CHANGELOG.md entry. Update version in server.js file.
* Update version in package.json, server.js 'ping' endpoint, and CHANGELOG.md entry.
* Commit, push, and tag.
* Build the docker image.
* Bump version and add "-beta" in package.json & server.js 'ping' endpoint.

25
package.json

@ -1,25 +1,34 @@
{
"name": "Images for Trade",
"version": "0.0.1",
"version": "1.2.4-beta",
"description": "",
"license": "UNLICENSED",
"dependencies": {
"@aws-sdk/client-s3": "^3.521.0",
"@aws-sdk/lib-storage": "^3.521.0",
"@aws-sdk/client-s3": "^3.614.0",
"@aws-sdk/lib-storage": "^3.614.0",
"@peculiar/asn1-ecc": "^2.3.8",
"@peculiar/asn1-schema": "^2.3.8",
"base64url": "^3.0.1",
"cbor-x": "^1.5.9",
"cors": "^2.8.5",
"did-jwt": "^8.0.1",
"did-jwt": "^7.4.7",
"did-resolver": "^4.1.0",
"dotenv": "^16.4.5",
"ethr-did-resolver": "^10.1.5",
"express": "^4.18.2",
"express": "^4.19.2",
"luxon": "^3.4.4",
"multer": "1.4.5-lts.1",
"sqlite3": "^5.1.7"
},
"devDependencies": {
"flywaydb-cli": "^0.9.0"
"flywaydb-cli": "^0.9.0",
"nodemon": "^3.1.4",
"ts-node": "^10.9.2",
"typescript": "^5.5.3"
},
"scripts": {
"migrate": "flyway -configFiles=sql/flyway.conf migrate"
"build": "tsc",
"migrate": "flyway -configFiles=sql/flyway.conf migrate",
"start": "node dist/server.js",
"start:dev": "nodemon --exec ts-node src/server.ts"
}
}

2360
pnpm-lock.yaml

File diff suppressed because it is too large

2
sql/migrations/V2__add_is_replacement.sql

@ -0,0 +1,2 @@
ALTER TABLE image ADD COLUMN is_replacement BOOLEAN NOT NULL DEFAULT FALSE;

255
server.js → src/server.ts

@ -1,35 +1,30 @@
//@ts-nocheck
const { DeleteObjectCommand, PutObjectCommand, S3Client } = require('@aws-sdk/client-s3');
const cors = require('cors');
const crypto = require('crypto');
const didJwt = require('did-jwt');
const { Resolver } = require('did-resolver');
const express = require('express');
const { getResolver } = require('ethr-did-resolver');
const fs = require('fs');
const { DateTime } = require('luxon');
const multer = require('multer');
const path = require('path');
const sqlite3 = require('sqlite3').verbose();
import { decodeAndVerifyJwt } from "./vc";
require('dotenv').config()
const app = express();
app.use(cors());
const port = process.env.PORT || 3001;
const port = process.env.PORT || 3002;
// file name also referenced in flyway.conf and potentially in .env files or in environment variables
const dbFile = process.env.SQLITE_FILE || './image-db.sqlite';
const bucketName = process.env.S3_BUCKET_NAME || 'gifts-image-test';
const imageServer = process.env.DOWNLOAD_IMAGE_SERVER || 'test-image.timesafari.app';
const ethrDidResolver = getResolver;
const resolver =
new Resolver({
...ethrDidResolver({
infuraProjectId: process.env.INFURA_PROJECT_ID || 'fake-infura-project-id'
})
})
// Open a connection to the SQLite database
const db = new sqlite3.Database(dbFile, (err) => {
if (err) {
@ -54,39 +49,52 @@ const uploadDir = 'uploads';
const uploadMulter = multer({ dest: uploadDir + '/' });
app.get('/ping', async (req, res) => {
res.send('pong v1.0.0');
res.send('pong - v 1.2.3-beta'); // version
});
app.get('/image-limits', async (req, res) => {
limitsResult = await retrievelimits(req, res);
if (!limitsResult.success) {
return limitsResult.result;
try {
const limitsResult = await retrievelimits(req, res);
if (!limitsResult.success) {
return limitsResult.result;
}
return res.status(200).send({
success: true,
doneImagesThisWeek: limitsResult.doneImagesThisWeek,
maxImagesPerWeek: limitsResult.maxImagesPerWeek,
nextWeekBeginDateTime: limitsResult.nextWeekBeginDateTime
});
} catch (e) {
console.error('Error getting image limits:', e, ' ... with this string: ' + JSON.stringify(e));
return res.status(500).send({ success: false, message: 'Got this error retrieving limits: ' + JSON.stringify(e) });
}
return res.status(200).send(JSON.stringify({
success: true,
doneImagesThisWeek: limitsResult.doneImagesThisWeek,
maxImagesPerWeek: limitsResult.maxImagesPerWeek,
nextWeekBeginDateTime: limitsResult.nextWeekBeginDateTime
}));
});
// POST endpoint to upload an image
/**
* POST endpoint to upload an image
*
* Send as FormData, with:
* - "image" file Blob
* - "claimType" (optional, eg. "GiveAction", "PlanAction", "profile")
* - "handleId" (optional)
* - "fileName" (optional, if you want to replace an previous image)
*/
app.post('/image', uploadMulter.single('image'), async (req, res) => {
const reqFile = req.file;
if (reqFile == null) {
return res.status(400).send(JSON.stringify({ success: false, message: 'No file uploaded.' }));
}
if (reqFile.size > 10000000) {
fs.rm(reqFile.path, (err) => {
if (err) {
console.error("Error deleting too-large temp file", reqFile.path, "with error (but continuing):", err);
}
});
return res.status(400).send(JSON.stringify({success: false, message: 'File size is too large. Maximum file size is 10MB.'}));
return res.status(400).send({ success: false, message: 'No file uploaded.' });
}
try {
limitsResult = await retrievelimits(req, res);
if (reqFile.size > 10485760) { // 10MB
fs.rm(reqFile.path, (err) => {
if (err) {
console.error("Error deleting too-large temp file", reqFile.path, "with error (but continuing):", err);
}
});
return res.status(400).send({success: false, message: 'File size is too large. Maximum file size is 10MB.'});
}
const limitsResult = await retrievelimits(req, res);
if (!limitsResult.success) {
return limitsResult.result;
}
@ -95,48 +103,130 @@ app.post('/image', uploadMulter.single('image'), async (req, res) => {
const issuerDid = limitsResult.issuerDid;
if (doneImagesThisWeek >= maxImagesPerWeek) {
return res.status(400).send(JSON.stringify({ success: false, message: 'You have reached your weekly limit of ' + maxImagesPerWeek + ' images.' }));
return res.status(400).send({ success: false, message: 'You have reached your weekly limit of ' + maxImagesPerWeek + ' images.' });
}
// Read the file from the temporary location
fs.readFile(reqFile.path, async (err, data) => {
if (err) throw err; // Handle error
const hashSum = crypto.createHash('sha256');
hashSum.update(data);
const hashHex = hashSum.digest('hex');
try {
let finalFileName;
if (req.body.fileName) {
// replacement file name given
finalFileName = req.body.fileName;
// check if the file to replace was sent by this user earlier
const didForOriginal = await new Promise((resolve, reject) => {
// For some reason, this prepared-statement SQL gives seg fault: "SELECT did FROM image WHERE did = ? and final_file = ?"
if (issuerDid.indexOf("'") >= 0 || finalFileName.indexOf("'") >= 0) {
console.error("Error: SQL injection attempt with", issuerDid, finalFileName);
return res.status(400).send({ success: false, message: 'SQL injection attempt detected.' });
}
const sql = "SELECT did FROM image WHERE did = '" + issuerDid + "' and final_file = '" + finalFileName + "'";
db.get(
sql,
[],
(dbErr, row) => {
if (dbErr) {
console.error(currentDate, 'Error getting image for user from database:', dbErr)
reject(dbErr);
}
resolve(row?.did);
}
);
});
if (!didForOriginal) {
return res.status(404).send({ success: false, message: 'No image entry found for user ' + issuerDid + ' for file ' + finalFileName });
}
const fileName = hashHex + path.extname(reqFile.originalname);
// check if any other user recorded this image
const othersWhoSentImage = await new Promise((resolve, reject) => {
db.get(
'SELECT did FROM image WHERE final_file = ? and did != ?',
[ finalFileName, issuerDid ],
(dbErr, row) => {
if (dbErr) {
console.error(currentDate, 'Error getting image for other users from database:', dbErr)
reject(dbErr);
}
resolve(row?.did);
}
);
});
if (othersWhoSentImage) {
return res.status(400).send({ success: false, message: 'Other users have also saved this image so it cannot be modified. You will have to replace your own references.' });
}
try {
// remove from S3
const params = {
Bucket: bucketName, // S3 Bucket name
Key: finalFileName, // File name to use in S3
};
const command = new DeleteObjectCommand(params);
const response = await s3Client.send(command);
if (response.$metadata.httpStatusCode !== 200
&& response.$metadata.httpStatusCode !== 202
&& response.$metadata.httpStatusCode !== 204) {
const errorTime = new Date().toISOString();
console.error(errorTime, "Error deleting from S3 with bad HTTP status, with metadata:", response.$metadata);
return res.status(500).send({
success: false,
message: "Got bad status of " + response.$metadata.httpStatusCode + " from S3. See server logs at " + errorTime
});
}
// look to see if this image already exists
const imageUrl = await new Promise((resolve, reject) => {
db.get(
'SELECT url FROM image WHERE final_file = ? and did = ?',
[ fileName, issuerDid ],
(dbErr, row) => {
if (dbErr) {
console.error(currentDate, 'Error getting image for user from database:', dbErr)
// continue anyway
// might as well remove from DB and add it all back again later
await new Promise((resolve, reject) => {
db.run(
'DELETE FROM image where did = ? and final_file = ?',
[ issuerDid, finalFileName ],
(dbErr) => {
if (dbErr) {
const currentDate = new Date().toISOString();
console.error(currentDate, "Error deleting record by", issuerDid, "named", finalFileName, "from database:", dbErr);
// don't continue because then we'll have storage we cannot track (and potentially limit)
reject(dbErr);
}
resolve();
}
resolve(row?.url);
}
);
});
if (imageUrl) {
return res.status(201).send(JSON.stringify({ success: true, url: imageUrl, message: 'This image already existed.' }));
);
});
} else {
// no replacement file name given so it's a new file
const hashSum = crypto.createHash('sha256');
hashSum.update(data);
const hashHex = hashSum.digest('hex');
finalFileName = hashHex + path.extname(reqFile.originalname);
// look to see if this image already exists for this user
const imageUrl = await new Promise((resolve, reject) => {
db.get(
'SELECT url FROM image WHERE final_file = ? and did = ?',
[ finalFileName, issuerDid ],
(dbErr, row) => {
if (dbErr) {
console.error(currentDate, 'Error getting image for user from database:', dbErr)
// continue anyway
}
resolve(row?.url);
}
);
});
if (imageUrl) {
return res.status(201).send({ success: true, url: imageUrl, message: 'This image already existed.' });
}
}
// record the upload in the database
const currentDate = new Date().toISOString();
const localFile = reqFile.path.startsWith(uploadDir + '/') ? reqFile.path.substring(uploadDir.length + 1) : reqFile.path;
const finalUrl = `https://${imageServer}/${fileName}`;
const finalUrl = `https://${imageServer}/${finalFileName}`;
const claimType = req.body.claimType;
const handleId = req.body.handleId;
await new Promise((resolve, reject) => {
db.run(
'INSERT INTO image (time, did, claim_type, handle_id, local_file, size, final_file, mime_type, url) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)',
'INSERT INTO image (time, did, claim_type, handle_id, local_file, size, final_file, mime_type, url, is_replacement) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)',
[
currentDate,
issuerDid,
@ -144,9 +234,10 @@ app.post('/image', uploadMulter.single('image'), async (req, res) => {
handleId,
localFile,
reqFile.size,
fileName,
finalFileName,
reqFile.mimetype,
finalUrl
finalUrl,
!!req.body.fileName,
],
(dbErr) => {
if (dbErr) {
@ -164,7 +255,7 @@ app.post('/image', uploadMulter.single('image'), async (req, res) => {
Body: data,
Bucket: bucketName, // S3 Bucket name
ContentType: reqFile.mimetype, // File content type
Key: fileName, // File name to use in S3
Key: finalFileName, // File name to use in S3
};
if (process.env.S3_SET_ACL === 'true') {
params.ACL = 'public-read';
@ -174,10 +265,10 @@ app.post('/image', uploadMulter.single('image'), async (req, res) => {
if (response.$metadata.httpStatusCode !== 200) {
const errorTime = new Date().toISOString();
console.error(errorTime, "Error uploading to S3 with bad HTTP status, with metadata:", response.$metadata);
return res.status(500).send(JSON.stringify({
return res.status(500).send({
success: false,
message: "Got bad status of " + response.$metadata.httpStatusCode + " from S3. See server logs at " + errorTime
}));
});
} else {
fs.rm(reqFile.path, (err) => {
if (err) {
@ -186,24 +277,24 @@ app.post('/image', uploadMulter.single('image'), async (req, res) => {
});
// AWS URL: https://gifts-image-test.s3.amazonaws.com/gifts-image-test/FILE
// American Cloud URL: https://a2-west.americancloud.com/TENANT:giftsimagetest/FILE
return res.status(200).send(JSON.stringify({success: true, url: finalUrl}));
return res.status(201).send({success: true, url: finalUrl});
}
} catch (uploadError) {
const errorTime = new Date().toISOString();
console.error(errorTime, "Error uploading to S3:", uploadError);
return res.status(500).send(JSON.stringify({
return res.status(500).send({
success: false,
message: "Got error uploading file. See server logs at " + errorTime + " Error Details: " + uploadError
}));
});
}
})
} catch (error) {
const errorTime = new Date().toISOString();
console.error(errorTime, "Error processing image upload:", error);
res.status(500).send(JSON.stringify({
res.status(500).send({
success: false,
message: "Got error processing image upload. See server logs at " + errorTime + " Error Details: " + error
}));
message: "Got error processing image upload. See server logs at " + errorTime + " Error Details: " + JSON.stringify(error)
});
}
});
@ -240,11 +331,11 @@ app.delete('/image/:url', async (req, res) => {
});
if (!thisUserImageFile) {
console.error('No image entry found for user', issuerDid, '& URL', url, 'so returning 404.');
return res.status(404).send(JSON.stringify({ success: false, message: 'No image entry found for user ' + issuerDid + ' & URL ' + url }));
return res.status(404).send({ success: false, message: 'No image entry found for user ' + issuerDid + ' & URL ' + url });
}
// check if any other user recorded this image
const otherUserImage = await new Promise((resolve, reject) => {
const othersWhoSentImage = await new Promise((resolve, reject) => {
db.get(
'SELECT did FROM image WHERE url = ? and did != ?',
[ url, issuerDid ],
@ -258,7 +349,7 @@ app.delete('/image/:url', async (req, res) => {
);
});
if (!otherUserImage) {
if (!othersWhoSentImage) {
// remove from S3 since nobody else recorded it
const params = {
Bucket: bucketName, // S3 Bucket name
@ -271,10 +362,10 @@ app.delete('/image/:url', async (req, res) => {
&& response.$metadata.httpStatusCode !== 204) {
const errorTime = new Date().toISOString();
console.error(errorTime, "Error deleting from S3 with bad HTTP status, with metadata:", response.$metadata);
return res.status(500).send(JSON.stringify({
return res.status(500).send({
success: false,
message: "Got bad status of " + response.$metadata.httpStatusCode + " from S3. See server logs at " + errorTime
}));
});
}
}
@ -286,22 +377,22 @@ app.delete('/image/:url', async (req, res) => {
(dbErr) => {
if (dbErr) {
const currentDate = new Date().toISOString();
console.error(currentDate, "Error deleting record from", issuerDid, "into database:", dbErr);
// don't continue because then we'll have storage we cannot track (and potentially limit)
console.error(currentDate, "Error deleting record by", issuerDid, "with URL", url, "from database:", dbErr);
// we'll let them know that it's not all cleaned up so they can try again
reject(dbErr);
}
resolve();
}
);
});
return res.status(204).send(JSON.stringify({ success: true }));
return res.status(204).send({ success: true });
} catch (error) {
const errorTime = new Date().toISOString();
console.error(errorTime, "Error processing image delete:", error);
return res.status(500).send(JSON.stringify({
return res.status(500).send({
success: false,
message: "Got error processing image delete. See server logs at " + errorTime + " Error Details: " + error
}));
message: "Got error processing image delete. See server logs at " + errorTime + " Error Details: " + JSON.stringify(error)
});
}
});
@ -337,7 +428,7 @@ async function retrievelimits(req, res) {
console.error("Got bad response of", response.status, "when checking rate limits for", issuerDid);
return {
success: false,
result: res.status(400).send(JSON.stringify({ success: false, message: 'Got bad status of ' + response.status + ' when checking limits with endorser server. Verify that the account exists and that the JWT works for that server.'}))
result: res.status(400).send({ success: false, message: 'Got bad status of ' + response.status + ' when checking limits with endorser server. Verify that the account exists and that the JWT works for that server.'})
};
} else {
const body = await response.json();
@ -362,7 +453,7 @@ async function retrievelimits(req, res) {
if (maxImagesPerWeek == null) {
return {
success: false,
result: res.status(400).send(JSON.stringify({ success: false, message: 'Unable to determine rate limits for this user. Verify that the account exists and that the JWT works for that server.' }))
result: res.status(400).send({ success: false, message: 'Unable to determine rate limits for this user. Verify that the account exists and that the JWT works for that server.' })
};
}
@ -406,17 +497,17 @@ async function decodeJwt(req, res) {
if (!auth || !auth.startsWith('Bearer ')) {
return {
success: false,
result: res.status(401).send(JSON.stringify({success: false, message: 'Missing "Bearer JWT" in Authorization header.'}))
result: res.status(401).send({success: false, message: 'Missing "Bearer JWT" in Authorization header.'})
};
}
const jwt = auth.substring('Bearer '.length);
const verified = await didJwt.verifyJWT(jwt, { resolver });
const verified = await decodeAndVerifyJwt(jwt);
if (!verified.verified) {
const errorTime = new Date().toISOString();
console.error(errorTime, 'Got invalid JWT in Authorization header:', verified);
return {
success: false,
result: res.status(401).send(JSON.stringify({ success: false, message: 'Got invalid JWT in Authorization header. See server logs at ' + errorTime }))
result: res.status(401).send({ success: false, message: 'Got invalid JWT in Authorization header. See server logs at ' + errorTime })
};
}
return { success: true, issuerDid: verified.issuer, jwt: jwt };

46
src/vc/did-eth-local-resolver.ts

@ -0,0 +1,46 @@
import {DIDResolutionResult} from "did-resolver";
/**
* This did:ethr resolver instructs the did-jwt machinery to use the
* EcdsaSecp256k1RecoveryMethod2020Uses verification method which adds the recovery bit to the
* signature to recover the DID's public key from a signature.
*
* This effectively hard codes the did:ethr DID resolver to use the address as the public key.
* @param did : string
* @returns {Promise<DIDResolutionResult>}
*
* Similar code resides in endorser-ch
*/
export const didEthLocalResolver = async(did: string): Promise<DIDResolutionResult> => {
const didRegex = /^did:ethr:(0x[0-9a-fA-F]{40})$/;
const match = did.match(didRegex);
if (match) {
const address = match[1]; // Extract eth address: 0x...
const publicKeyHex = address; // Use the address directly as a public key placeholder
return {
didDocumentMetadata: {},
didResolutionMetadata: {
contentType: "application/did+ld+json"
},
didDocument: {
'@context': [
'https://www.w3.org/ns/did/v1',
"https://w3id.org/security/suites/secp256k1recovery-2020/v2"
],
id: did,
verificationMethod: [{
id: `${did}#controller`,
type: 'EcdsaSecp256k1RecoveryMethod2020',
controller: did,
blockchainAccountId: "eip155:1:" + publicKeyHex,
}],
authentication: [`${did}#controller`],
assertionMethod: [`${did}#controller`],
},
};
}
throw new Error(`Unsupported DID format: ${did}`);
};

139
src/vc/didPeer.ts

@ -0,0 +1,139 @@
import { AsnParser } from "@peculiar/asn1-schema";
import { ECDSASigValue } from "@peculiar/asn1-ecc";
import crypto from "crypto";
import { decode as cborDecode } from "cbor-x";
/**
*
*
* similar code is in crowd-funder-for-time-pwa libs/crypto/vc/passkeyDidPeer.ts verifyJwtWebCrypto
*
* @returns {Promise<boolean>}
*/
export async function verifyPeerSignature(
payloadBytes: Uint8Array,
publicKeyBytes: Uint8Array,
signatureBytes: Uint8Array
) {
// this simple approach doesn't work
//const verify = crypto.createVerify('sha256')
//verify.update(preimage)
//const result = verify.verify(publicKey, signature)
const finalSignatureBuffer = unwrapEC2Signature(signatureBytes);
const verifyAlgorithm = {
name: "ECDSA",
hash: { name: "SHA-256" },
};
const publicKeyJwk = cborToKeys(publicKeyBytes).publicKeyJwk;
const keyAlgorithm = {
name: "ECDSA",
namedCurve: publicKeyJwk.crv,
};
const publicKeyCryptoKey = await crypto.subtle.importKey(
"jwk",
publicKeyJwk,
keyAlgorithm,
false,
["verify"],
);
const verified = await crypto.subtle.verify(
verifyAlgorithm,
publicKeyCryptoKey,
finalSignatureBuffer,
payloadBytes,
);
return verified;
}
function cborToKeys(publicKeyBytes: Uint8Array) {
const jwkObj = cborDecode(publicKeyBytes);
if (
jwkObj[1] != 2 || // kty "EC"
jwkObj[3] != -7 || // alg "ES256"
jwkObj[-1] != 1 || // crv "P-256"
jwkObj[-2].length != 32 || // x
jwkObj[-3].length != 32 // y
) {
throw new Error("Unable to extract key.");
}
const publicKeyJwk = {
alg: "ES256",
crv: "P-256",
kty: "EC",
x: arrayToBase64Url(jwkObj[-2]),
y: arrayToBase64Url(jwkObj[-3]),
};
const publicKeyBuffer = Buffer.concat([
Buffer.from(jwkObj[-2]),
Buffer.from(jwkObj[-3]),
]);
return { publicKeyJwk, publicKeyBuffer };
}
function toBase64Url(anythingB64: string) {
return anythingB64.replace(/\+/g, "-").replace(/\//g, "_").replace(/=+$/, "");
}
function arrayToBase64Url(anything: Uint8Array) {
return toBase64Url(Buffer.from(anything).toString("base64"));
}
/**
* In WebAuthn, EC2 signatures are wrapped in ASN.1 structure so we need to peel r and s apart.
*
* See https://www.w3.org/TR/webauthn-2/#sctn-signature-attestation-types
*
* @return Uint8Array of the signature inside
*/
function unwrapEC2Signature(signature: Uint8Array) {
const parsedSignature = AsnParser.parse(signature, ECDSASigValue);
let rBytes = new Uint8Array(parsedSignature.r);
let sBytes = new Uint8Array(parsedSignature.s);
if (shouldRemoveLeadingZero(rBytes)) {
rBytes = rBytes.slice(1);
}
if (shouldRemoveLeadingZero(sBytes)) {
sBytes = sBytes.slice(1);
}
const finalSignature = isoUint8ArrayConcat([rBytes, sBytes]);
return finalSignature;
}
/**
* Determine if the DER-specific `00` byte at the start of an ECDSA signature byte sequence
* should be removed based on the following logic:
*
* "If the leading byte is 0x0, and the high order bit on the second byte is not set to 0,
* then remove the leading 0x0 byte"
*
* @return true if leading zero should be removed
*/
function shouldRemoveLeadingZero(bytes: Uint8Array) {
return bytes[0] === 0x0 && (bytes[1] & (1 << 7)) !== 0;
}
// from https://github.com/MasterKale/SimpleWebAuthn/blob/master/packages/server/src/helpers/iso/isoUint8Array.ts#L49
/**
* Combine multiple Uint8Arrays into a single Uint8Array
*
* @param arrays - Uint8Array[]
* @return Uint8Array
*/
function isoUint8ArrayConcat(arrays: Uint8Array[]) {
let pointer = 0;
const totalLength = arrays.reduce((prev, curr) => prev + curr.length, 0);
const toReturn = new Uint8Array(totalLength);
arrays.forEach((arr) => {
toReturn.set(arr, pointer);
pointer += arr.length;
});
return toReturn;
}

88
src/vc/index.ts

@ -0,0 +1,88 @@
/**
* Verifiable Credential & DID functions, specifically for EndorserSearch.org tools
*
* The goal is to make this folder similar across projects, then move it to a library.
* Other projects: endorser-ch, crowd-funder-for-time-pwa
*
*/
import base64url from "base64url";
import didJwt from "did-jwt";
import {Resolver} from "did-resolver";
import {didEthLocalResolver} from "./did-eth-local-resolver";
import {verifyJwt as peerVerifyJwt} from "./passkeyDidPeer";
export const TEST_BYPASS_ENV_VALUE = "test-local";
export const ETHR_DID_PREFIX = 'did:ethr:'
export const PEER_DID_PREFIX = 'did:peer:'
export const JWT_VERIFY_FAILED_CODE = "JWT_VERIFY_FAILED_CODE"
export const UNSUPPORTED_DID_METHOD_CODE = "UNSUPPORTED_DID_METHOD"
const resolver = new Resolver({
'ethr': didEthLocalResolver
});
// return Promise of at least { issuer, payload, verified boolean }
// ... and also if successfully verified by did-jwt (not JWANT): data, doc, signature, signer
export async function decodeAndVerifyJwt(jwt: string) {
const pieces = jwt.split('.')
const header = JSON.parse(base64url.decode(pieces[0]))
const payload = JSON.parse(base64url.decode(pieces[1]))
const issuerDid = payload.iss
if (!issuerDid) {
return Promise.reject({
clientError: {
message: `Missing "iss" field in JWT.`,
}
})
}
if (issuerDid && issuerDid.startsWith(ETHR_DID_PREFIX) && process.env.NODE_ENV === TEST_BYPASS_ENV_VALUE) {
// Error of "Cannot read property 'toString' of undefined" usually means the JWT is malformed
// eg. no "." separators.
let nowEpoch = Math.floor(new Date().getTime() / 1000)
if (payload.exp < nowEpoch) {
console.log("JWT with exp " + payload.exp
+ " has expired but we're in test mode so we'll use a new time."
)
payload.exp = nowEpoch + 100
}
return { issuer: issuerDid, payload, verified: true } // other elements will = undefined
}
if (issuerDid.startsWith(ETHR_DID_PREFIX)) {
try {
let verified = await didJwt.verifyJWT(jwt, {resolver})
return verified
} catch (e) {
return Promise.reject({
clientError: {
message: `JWT failed verification: ` + e,
code: JWT_VERIFY_FAILED_CODE
}
})
}
}
if (issuerDid.startsWith(PEER_DID_PREFIX) && header.typ === "JWANT") {
const { claimPayload, verified } = await peerVerifyJwt(payload, issuerDid, pieces[2])
return { issuer: issuerDid, payload: claimPayload, verified: verified }
}
if (issuerDid.startsWith(PEER_DID_PREFIX)) {
return Promise.reject({
clientError: {
message: `JWT with a PEER DID currently only supported with typ == JWANT. Contact us us for JWT suport since it should be straightforward.`
}
})
}
return Promise.reject({
clientError: {
message: `Unsupported DID method ${issuerDid}`,
code: UNSUPPORTED_DID_METHOD_CODE
}
})
}

104
src/vc/passkeyDidPeer.ts

@ -0,0 +1,104 @@
import crypto from "crypto";
import didJwt from "did-jwt";
import {PEER_DID_PREFIX, TEST_BYPASS_ENV_VALUE} from "./index";
import {verifyPeerSignature} from "./didPeer";
/**
*
* @param payload
* @param issuerDid
* @param signatureString
* @returns {Promise<{claimPayload: string, verified: boolean}>}
*/
export async function verifyJwt(payload: any, issuerDid: any, signatureString: any) {
if (!payload.iss) {
return Promise.reject({
clientError: {
message: `JWT is missing an "iss" field.`,
}
})
}
let nowEpoch = Math.floor(new Date().getTime() / 1000)
if (!payload.exp) {
return Promise.reject({
clientError: {
message: `JWT with is missing an "exp" field.`,
}
})
}
if (payload.exp < nowEpoch && process.env.NODE_ENV !== TEST_BYPASS_ENV_VALUE) {
return Promise.reject({
clientError: {
message: `JWT with exp ${payload.exp} has expired.`,
}
});
}
const authData: string = payload.AuthenticationDataB64URL
const clientData: string = payload.ClientDataJSONB64URL
if (!authData || !clientData) {
return Promise.reject({
clientError: {
message: `JWT with typ == JWANT requires AuthenticationData and ClientDataJSON.`
}
})
}
const decodedAuthDataBuff = Buffer.from(authData, 'base64url')
const decodedClientData = Buffer.from(clientData, 'base64url')
let claimPayload = JSON.parse(decodedClientData.toString())
if (claimPayload.challenge) {
claimPayload = JSON.parse(Buffer.from(claimPayload.challenge, "base64url").toString())
if (!claimPayload.exp) {
claimPayload.exp = payload.exp
}
if (!claimPayload.iat) {
claimPayload.iat = payload.iat
}
if (!claimPayload.iss) {
claimPayload.iss = payload.iss
}
}
if (!claimPayload.exp) {
return Promise.reject({
clientError: {
message: `JWT client data challenge is missing an "exp" field.`,
}
})
}
if (claimPayload.exp < nowEpoch && process.env.NODE_ENV !== TEST_BYPASS_ENV_VALUE) {
return Promise.reject({
clientError: {
message: `JWT client data challenge exp time is past.`,
}
})
}
if (claimPayload.exp !== payload.exp) {
return Promise.reject({
clientError: {
message: `JWT client data challenge "exp" field doesn't match the outside payload "exp".`,
}
})
}
if (claimPayload.iss !== payload.iss) {
return Promise.reject({
clientError: {
message: `JWT client data challenge "iss" field doesn't match the outside payload "iss".`,
}
})
}
const hashedClientDataBuff = crypto.createHash("sha256")
.update(decodedClientData)
.digest()
const preimage = new Uint8Array(Buffer.concat([decodedAuthDataBuff, hashedClientDataBuff]))
const PEER_DID_MULTIBASE_PREFIX = PEER_DID_PREFIX + "0"
// Uint8Array
const publicKey = didJwt.multibaseToBytes(issuerDid.substring(PEER_DID_MULTIBASE_PREFIX.length));
const signature = new Uint8Array(Buffer.from(signatureString, 'base64url'))
const verified = await verifyPeerSignature(preimage, publicKey, signature)
return { claimPayload, verified }
}

6
test/Makefile

@ -0,0 +1,6 @@
# see ../Makefile.test
TESTS ?= \
test.sh
include ../Makefile.test

147
test/test.sh

@ -0,0 +1,147 @@
#!/usr/bin/env bash
# Execute from the "test" directory so that the test files are available.
#
# We recommend you have the pkgx.dev tools installed.
# If you want to use your installed curl & jq & node, you can comment out the two "pkgx" commands.
HOST=http://localhost:3002
if ! [[ "$PWD" == */test ]]; then
echo "Error: Run this script in the 'test' directory."
exit 1
fi
# load the tools: curl, jq, node
eval "$(pkgx --shellcode)"
env +curl +jq +node
JWT_CODE_USER_0='OWNER_DID="did:ethr:0x0000694B58C2cC69658993A90D3840C560f2F51F"; OWNER_PRIVATE_KEY_HEX="2b6472c026ec2aa2c4235c994a63868fc9212d18b58f6cbfe861b52e71330f5b"; didJwt = require("did-jwt"); didJwt.createJWT({ exp: Math.floor(Date.now() / 1000) + 60, iat: Math.floor(Date.now() / 1000), iss: OWNER_DID }, { issuer: OWNER_DID, signer: didJwt.SimpleSigner(OWNER_PRIVATE_KEY_HEX) }).then(console.log)'
JWT_CODE_USER_1='OWNER_DID="did:ethr:0x111d15564f824D56C7a07b913aA7aDd03382aA39"; OWNER_PRIVATE_KEY_HEX="be64d297e1c6f3545971cd0bc24c3bf32656f8639a2ae32cb84a1e3c75ad69cd"; didJwt = require("did-jwt"); didJwt.createJWT({ exp: Math.floor(Date.now() / 1000) + 60, iat: Math.floor(Date.now() / 1000), iss: OWNER_DID }, { issuer: OWNER_DID, signer: didJwt.SimpleSigner(OWNER_PRIVATE_KEY_HEX) }).then(console.log)'
# exit as soon as anything fails
set -e
echo "Upload test0.png by user #0"
JWT=$(node -e "$JWT_CODE_USER_0")
echo JWT: $JWT
RESULT=$(curl -X POST -H "Authorization: Bearer $JWT" -F "image=@test0.png" "$HOST/image")
echo curl result: $RESULT
SUCCESS=$(echo $RESULT | jq -r '.success')
if [ $SUCCESS = "true" ]; then
echo "User #0 uploaded file."
else
echo "User #0 failed to upload a file.";
exit 1
fi
echo "Download from the URL supplied"
URL0=$(echo $RESULT | jq -r '.url')
# -L to follow redirect because the returned URL is a timesafari.app URL
STATUS_CODE=$(curl -o test0-back.png -w "%{http_code}" -L $URL0);
if [ $STATUS_CODE -ne 200 ]; then
echo "File is not accessible, received status code: $STATUS_CODE";
fi
echo "Check that downloaded file is the same as the original"
if diff "test0.png" "test0-back.png" >/dev/null; then
echo "Got the same file."
else
echo "Did not get the same file."
exit 1
fi
echo "Upload test1.png by user #1"
JWT=$(node -e "$JWT_CODE_USER_1")
echo JWT: $JWT
RESULT=$(curl -X POST -H "Authorization: Bearer $JWT" -F "image=@test1.png" "$HOST/image")
echo curl result: $RESULT
URL2=$(echo $RESULT | jq -r '.url')
if [ "$URL0" != "$URL2" ]; then
echo "URLs 0 & 1 are different."
else
echo "URLs 0 & 1 are not different."
exit 1
fi
echo "Now fail to upload a change to the image by user 1"
FILENAME0=$(basename $URL0)
JWT=$(node -e "$JWT_CODE_USER_1")
echo JWT: $JWT
RESULT=$(curl -X POST -H "Authorization: Bearer $JWT" -F "image=@test1.png" -F "fileName=$FILENAME0" "$HOST/image")
echo curl result: $RESULT
SUCCESS=$(echo $RESULT | jq -r '.success')
if [ $SUCCESS = "false" ]; then
echo "User #1 could not replace existing file."
else
echo "File may have been replaced wrongly.";
exit 1
fi
echo "Now successfully upload a change to the image by user 0"
JWT=$(node -e "$JWT_CODE_USER_0")
echo JWT: $JWT
RESULT=$(curl -X POST -H "Authorization: Bearer $JWT" -F "image=@test1.png" -F "fileName=$FILENAME0" "$HOST/image")
echo curl result: $RESULT
SUCCESS=$(echo $RESULT | jq -r '.success')
if [ $SUCCESS = "true" ]; then
echo "User #0 did replace file.";
else
echo "User #0 couldn't replace file.";
exit 1
fi
echo "Fail to remove test file 0 from the service"
TEST_URL="https%3A%2F%2Ftest-image.timesafari.app%2F4599145c3a8792a678f458747f2d8512c680e8680bf5563c35b06cd770051ed2.png"
JWT=$(node -e "$JWT_CODE_USER_1")
echo JWT: $JWT
RESULT=$(curl -X DELETE -H "Authorization: Bearer $JWT" "$HOST/image/$TEST_URL")
echo curl result: $RESULT
SUCCESS=$(echo $RESULT | jq -r '.success')
if [ "$SUCCESS" = "false" ]; then
echo "Test file 0 was not cleaned off server."
else
echo "Test file 0 was cleaned off server.";
exit 1
fi
echo "Remove test file 0 from the service"
TEST_URL="https%3A%2F%2Ftest-image.timesafari.app%2F4599145c3a8792a678f458747f2d8512c680e8680bf5563c35b06cd770051ed2.png"
JWT=$(node -e "$JWT_CODE_USER_0")
echo JWT: $JWT
RESULT=$(curl -X DELETE -H "Authorization: Bearer $JWT" "$HOST/image/$TEST_URL")
echo curl result: $RESULT
SUCCESS=$(echo $RESULT | jq -r '.success')
if [[ -z "$RESULT" ]] || [[ "$SUCCESS" = "true" ]]; then
echo "Test file 0 was cleaned off server."
else
echo "Test file 0 was not cleaned off server.";
exit 1
fi
echo "Remove test file 1 from the service"
TEST_URL="https%3A%2F%2Ftest-image.timesafari.app%2F83801e59789f962ddd19dbf99abd65b416e4c6560c28bdb3e663cea045561b07.png"
JWT=$(node -e "$JWT_CODE_USER_1")
echo JWT: $JWT
RESULT=$(curl -X DELETE -H "Authorization: Bearer $JWT" "$HOST/image/$TEST_URL")
echo curl result: $RESULT
SUCCESS=$(echo $RESULT | jq -r '.success')
if [[ -z "$RESULT" ]] || [[ "$SUCCESS" = "true" ]]; then
echo "Test file 1 was cleaned off server."
else
echo "Test file 1 was not cleaned off server.";
exit 1
fi
echo "Upload test2.png by did:peer user"
JWT="eyJ0eXAiOiJKV0FOVCIsImFsZyI6IkVTMjU2In0.eyJBdXRoZW50aWNhdGlvbkRhdGFCNjRVUkwiOiJTWllONVlnT2pHaDBOQmNQWkhaZ1c0X2tycm1paGpMSG1Wenp1b01kbDJNRkFBQUFBQSIsIkNsaWVudERhdGFKU09OQjY0VVJMIjoiZXlKMGVYQmxJam9pZDJWaVlYVjBhRzR1WjJWMElpd2lZMmhoYkd4bGJtZGxJam9pWlhsS01sbDVTVFpsZVVwcVkyMVdhMXBYTlRCaFYwWnpWVE5XYVdGdFZtcGtRMGsyWlhsS1FWa3lPWFZrUjFZMFpFTkpOa2x0YURCa1NFSjZUMms0ZG1NeVRtOWFWekZvVEcwNWVWcDVTWE5KYTBJd1pWaENiRWxxYjJsU01td3lXbFZHYW1SSGJIWmlhVWx6U1cxU2JHTXlUbmxoV0VJd1lWYzVkVWxxYjJsalIydzJaVzFGYVdaWU1ITkpiVlkwWTBOSk5rMVVZM2xOUkUwMFQwUlZNazE1ZDJsaFYwWXdTV3B2ZUU1NlNYZE5lbWMwVGxSQmVreERTbkJqTTAxcFQybEthMkZYVVRaalIxWnNZMnB2ZDJWcmRFNVNiWEF5Vmxka1dtTnJNSGhoUm1nelVrZE9kR0pVVWtOWmJtaE1XV3hLVDFWWFJsbFZhM0I1VVRGQ2FGRnNjREJSTWpsd1lVaE9UVlpHUWt0UmJrSnVWbGhTUkU5VmRHdFBXRUo1WldwR2RsWklSalJXTWxaMFVtMWFUMVl3VGs5YU1IaEdVMjVzVVU1RlduWlVSWFF3WWxjMVdHRkdSbmhaVlU1MVVXMVdiVll5T1hSU00wcFVVVlJPTWs1RFNqa2lMQ0p2Y21sbmFXNGlPaUpvZEhSd09pOHZiRzlqWVd4b2IzTjBPamd3T0RBaUxDSmpjbTl6YzA5eWFXZHBiaUk2Wm1Gc2MyVjkiLCJleHAiOjE3MjAzODg1NjMsImlhdCI6MTcyMDM4ODUwMywiaXNzIjoiZGlkOnBlZXI6MHpLTUZqdlVnWXJNMWhYd0RjbW00QmJ4S2JSTlFhWFJKckNQYUJadENvaWhzTFRQSkJwZ1V0QzlLZDlwcnoxb1RxeFdlbUZmTldDTmdMRUp5UDRGb0xLdG1uV2hRcWFDbkJlZldvbUdyU0EzdjQifQ.MEQCIAsMMNUcSjoxn0LZuE6FvZ6dsm-uQROeX3RPWt6QlRyPAiA670XdJXnLw8QFR9a6KCMt-qUyGZg88mMfT-1DtipcwA"
echo JWT: $JWT
RESULT=$(curl -X POST -H "Authorization: Bearer $JWT" -F "image=@test2.svg" "$HOST/image")
echo curl result: $RESULT
SUCCESS=$(echo $RESULT | jq -r '.success')
if [ $SUCCESS = "true" ]; then
echo "User #2 uploaded SVG file."
else
echo "User #2 failed to upload SVG file. Note that this may be because the server wasn't started with NODE_ENV=test-local which bypasses check of the exp date.";
exit 1
fi

BIN
test/test0.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.4 KiB

BIN
test/test1.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.7 KiB

86
test/test2.svg

@ -0,0 +1,86 @@
<?xml version="1.0" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 20010904//EN"
"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
<svg version="1.0" xmlns="http://www.w3.org/2000/svg"
width="512.000000pt" height="512.000000pt" viewBox="0 0 512.000000 512.000000"
preserveAspectRatio="xMidYMid meet">
<g transform="translate(0.000000,512.000000) scale(0.100000,-0.100000)"
fill="#000000" stroke="none">
<path d="M2480 4005 c-25 -7 -58 -20 -75 -29 -16 -9 -40 -16 -52 -16 -17 0
-24 -7 -28 -27 -3 -16 -14 -45 -24 -65 -21 -41 -13 -55 18 -38 25 13 67 13 92
-1 15 -8 35 -4 87 17 99 39 130 41 197 10 64 -29 77 -31 107 -15 20 11 20 11
-3 35 -12 13 -30 24 -38 24 -24 1 -132 38 -148 51 -8 7 -11 20 -7 32 12 37
-40 47 -126 22z"/>
<path d="M1450 3775 c-7 -8 -18 -15 -24 -15 -7 0 -31 -14 -54 -32 -29 -22 -38
-34 -29 -40 17 -11 77 -10 77 1 0 5 16 16 35 25 60 29 220 19 290 -18 17 -9
33 -16 37 -16 4 0 31 -15 60 -34 108 -70 224 -215 282 -353 30 -71 53 -190 42
-218 -10 -27 -23 -8 -52 75 -30 90 -88 188 -120 202 -13 6 -26 9 -29 6 -3 -2
11 -51 30 -108 28 -83 35 -119 35 -179 0 -120 -22 -127 -54 -17 -11 37 -13 21
-18 -154 -5 -180 -8 -200 -32 -264 -51 -132 -129 -245 -199 -288 -21 -12 -79
-49 -129 -80 -161 -102 -294 -141 -473 -141 -228 0 -384 76 -535 259 -81 99
-118 174 -154 312 -31 121 -35 273 -11 437 19 127 19 125 -4 125 -23 0 -51
-34 -87 -104 -14 -28 -33 -64 -41 -81 -19 -34 -22 -253 -7 -445 9 -106 12
-119 44 -170 19 -30 42 -67 50 -81 64 -113 85 -140 130 -169 28 -18 53 -44 61
-62 8 -20 36 -45 83 -76 62 -39 80 -46 151 -54 44 -5 96 -13 115 -18 78 -20
238 -31 282 -19 24 6 66 8 95 5 76 -9 169 24 319 114 32 19 80 56 106 82 27
26 52 48 58 48 5 0 27 26 50 58 48 66 56 70 132 71 62 1 165 29 238 64 112 55
177 121 239 245 37 76 39 113 10 267 -12 61 -23 131 -26 156 -5 46 -5 47 46
87 92 73 182 70 263 -8 l51 -49 -6 -61 c-4 -34 -13 -85 -21 -113 -28 -103 -30
-161 -4 -228 16 -44 32 -67 55 -83 18 -11 39 -37 47 -58 10 -23 37 -53 73 -81
32 -25 69 -57 82 -71 14 -14 34 -26 47 -26 12 0 37 -7 56 -15 20 -8 66 -17
104 -20 107 -10 110 -11 150 -71 50 -75 157 -177 197 -187 18 -5 53 -24 78
-42 71 -51 176 -82 304 -89 61 -4 127 -12 147 -18 29 -9 45 -8 77 6 23 9 50
16 60 16 31 0 163 46 216 76 28 15 75 46 105 69 30 23 69 49 85 58 17 8 46 31
64 51 19 20 40 36 47 36 18 0 77 70 100 120 32 66 45 108 55 173 5 32 16 71
24 87 43 84 43 376 0 549 -27 105 -43 127 -135 188 -30 21 -65 46 -77 57 -13
11 -23 17 -23 14 0 -3 21 -46 47 -94 79 -151 85 -166 115 -263 25 -83 28 -110
28 -226 0 -144 -17 -221 -75 -335 -39 -77 -208 -244 -304 -299 -451 -263 -975
-67 -1138 426 -23 70 -26 95 -28 254 -1 108 -7 183 -14 196 -6 12 -11 31 -11
43 0 32 31 122 52 149 10 13 18 28 18 34 0 5 25 40 56 78 60 73 172 170 219
190 30 12 30 13 6 17 -15 2 -29 -2 -37 -12 -6 -9 -16 -16 -22 -16 -6 0 -23
-11 -39 -24 -15 -12 -33 -25 -40 -27 -17 -6 -82 -60 -117 -97 -65 -70 -75 -82
-107 -133 -23 -34 -35 -46 -37 -35 -3 16 20 87 44 134 6 12 9 34 6 48 -4 22
-8 25 -31 19 -14 -3 -38 -15 -53 -26 -34 -24 -34 -21 -6 28 65 112 184 206
291 227 15 3 39 9 55 12 l27 6 -24 9 c-90 35 -304 -66 -478 -225 -39 -36 -74
-66 -77 -66 -22 0 18 82 72 148 19 23 32 46 28 49 -4 4 -26 13 -49 19 -73 21
-161 54 -171 64 -6 6 -20 10 -32 10 -21 0 -21 -1 -8 -40 45 -130 8 -247 -93
-299 -25 -13 -31 0 -14 29 15 22 1 33 -22 17 -56 -36 -117 -22 -117 28 0 13
-16 47 -35 76 -22 34 -33 60 -29 73 4 16 -3 26 -26 39 -16 10 -30 21 -30 25 1
18 54 64 87 76 l38 13 -33 5 c-30 4 -115 -18 -154 -42 -13 -7 -20 -5 -27 8 -9
16 -12 16 -53 1 -160 -61 -258 -104 -258 -114 0 -7 10 -20 21 -31 103 -91 217
-297 249 -449 28 -135 41 -237 35 -276 -14 -91 -48 -170 -97 -220 -44 -47 -68
-60 -68 -40 0 6 4 12 8 15 5 3 24 35 42 72 l33 67 -6 141 c-4 103 -11 158 -26
205 -12 35 -21 70 -21 77 0 7 -20 56 -45 108 -82 173 -227 322 -392 401 -67
33 -90 39 -163 42 -108 5 -130 10 -130 28 0 20 -63 20 -80 0z"/>
<path d="M3710 3765 c0 -20 8 -28 39 -41 22 -8 42 -22 45 -30 5 -14 42 -19 70
-8 10 4 -7 21 -58 55 -41 27 -79 49 -85 49 -6 0 -11 -11 -11 -25z"/>
<path d="M3173 3734 c-9 -25 10 -36 35 -18 12 8 22 19 22 25 0 16 -50 10 -57
-7z"/>
<path d="M1982 3728 c6 -16 36 -34 44 -26 3 4 4 14 1 23 -7 17 -51 21 -45 3z"/>
<path d="M1540 3620 c0 -5 7 -10 16 -10 8 0 12 5 9 10 -3 6 -10 10 -16 10 -5
0 -9 -4 -9 -10z"/>
<path d="M4467 3624 c-4 -4 23 -27 60 -50 84 -56 99 -58 67 -9 -28 43 -107 79
-127 59z"/>
<path d="M655 3552 c-11 -2 -26 -9 -33 -14 -7 -6 -27 -18 -45 -27 -36 -18 -58
-64 -39 -83 9 -9 25 1 70 43 53 48 78 78 70 84 -2 1 -12 -1 -23 -3z"/>
<path d="M1015 3460 c-112 -24 -247 -98 -303 -165 -53 -65 -118 -214 -136
-311 -20 -113 -20 -145 -1 -231 20 -88 49 -153 102 -230 79 -113 186 -182 331
-214 108 -24 141 -24 247 1 130 30 202 72 316 181 102 100 153 227 152 384 0
142 -58 293 -150 395 -60 67 -180 145 -261 171 -75 23 -232 34 -297 19z m340
-214 c91 -43 174 -154 175 -234 0 -18 -9 -51 -21 -73 -19 -37 -19 -42 -5 -64
35 -54 12 -121 -48 -142 -22 -7 -47 -19 -55 -27 -9 -8 -41 -27 -71 -42 -50
-26 -64 -29 -155 -29 -111 0 -152 14 -206 68 -49 49 -63 85 -64 162 0 59 4 78
28 118 31 52 96 105 141 114 23 5 33 17 56 68 46 103 121 130 225 81z"/>
<path d="M3985 3464 c-44 -7 -154 -44 -200 -67 -55 -28 -138 -96 -162 -132
-10 -16 -39 -75 -64 -130 l-44 -100 0 -160 0 -160 45 -90 c53 -108 152 -214
245 -264 59 -31 215 -71 281 -71 53 0 206 40 255 67 98 53 203 161 247 253 53
113 74 193 74 280 -1 304 -253 564 -557 575 -49 2 -103 1 -120 -1z m311 -220
c129 -68 202 -209 160 -309 -15 -35 -15 -42 -1 -72 26 -55 -3 -118 -59 -129
-19 -3 -43 -15 -53 -26 -26 -29 -99 -64 -165 -78 -45 -10 -69 -10 -120 -1 -74
15 -113 37 -161 91 -110 120 -50 331 109 385 24 8 44 23 52 39 6 14 18 38 25
53 33 72 127 93 213 47z"/>
<path d="M487 3394 c-21 -12 -27 -21 -25 -40 2 -14 7 -26 12 -27 14 -3 48 48
44 66 -3 14 -6 14 -31 1z"/>
</g>
</svg>

After

Width:  |  Height:  |  Size: 5.6 KiB

114
tsconfig.json

@ -0,0 +1,114 @@
{
"compilerOptions": {
/* Visit https://aka.ms/tsconfig to read more about this file */
/* Projects */
// "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */
// "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */
// "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */
// "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */
// "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */
// "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */
/* Language and Environment */
"target": "es2016", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */
// "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */
// "jsx": "preserve", /* Specify what JSX code is generated. */
// "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */
// "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */
// "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */
// "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */
// "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */
// "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */
// "noLib": true, /* Disable including any library files, including the default lib.d.ts. */
// "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */
// "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */
/* Modules */
"module": "commonjs", /* Specify what module code is generated. */
// "rootDir": "./", /* Specify the root folder within your source files. */
// "moduleResolution": "node10", /* Specify how TypeScript looks up a file from a given module specifier. */
// "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */
// "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */
// "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */
// "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */
// "types": [], /* Specify type package names to be included without being referenced in a source file. */
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
// "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */
// "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */
// "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */
// "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */
// "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */
// "resolveJsonModule": true, /* Enable importing .json files. */
// "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */
// "noResolve": true, /* Disallow 'import's, 'require's or '<reference>'s from expanding the number of files TypeScript should add to a project. */
/* JavaScript Support */
// "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */
// "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */
// "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */
/* Emit */
// "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */
// "declarationMap": true, /* Create sourcemaps for d.ts files. */
// "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */
// "sourceMap": true, /* Create source map files for emitted JavaScript files. */
// "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */
// "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */
"outDir": "./dist", /* Specify an output folder for all emitted files. */
// "removeComments": true, /* Disable emitting comments. */
// "noEmit": true, /* Disable emitting files from a compilation. */
// "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */
// "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */
// "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
// "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */
// "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */
// "newLine": "crlf", /* Set the newline character for emitting files. */
// "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */
// "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */
// "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */
// "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */
// "declarationDir": "./", /* Specify the output directory for generated declaration files. */
/* Interop Constraints */
// "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */
// "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */
// "isolatedDeclarations": true, /* Require sufficient annotation on exports so other tools can trivially generate declaration files. */
// "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */
"esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */
// "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */
"forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */
/* Type Checking */
"strict": true, /* Enable all strict type-checking options. */
// "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */
// "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */
// "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */
// "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */
// "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */
// "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */
// "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */
// "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */
// "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */
// "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */
// "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */
// "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */
// "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */
// "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */
// "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */
// "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */
// "allowUnusedLabels": true, /* Disable error reporting for unused labels. */
// "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */
/* Completeness */
// "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */
"skipLibCheck": true /* Skip type checking all .d.ts files. */
},
"include": [
"src"
],
"exclude": [
"node_modules"
]
}
Loading…
Cancel
Save