Browse Source

fix multiple-user check, add tests, use local resolver for did:ethr

pull/1/head
Trent Larson 4 months ago
parent
commit
2f7d46569e
  1. 3
      .env.sample
  2. 21
      CHANGELOG.md
  3. 4
      Dockerfile
  4. 103
      Makefile.test
  5. 17
      README.md
  6. 15
      package.json
  7. 1927
      pnpm-lock.yaml
  8. 82
      src/server.js
  9. 47
      src/vc/did-eth-local-resolver.js
  10. 7
      src/vc/index.js
  11. 6
      test/Makefile
  12. 123
      test/test.sh
  13. BIN
      test/test0.png
  14. BIN
      test/test1.png

3
.env.sample

@ -1,3 +1,4 @@
# shellcheck disable=SC2034
# These settings work for American Cloud.
#S3_ACCESS_KEY=???
@ -19,8 +20,6 @@ S3_SET_ACL=false
#ENDORSER_API_URL=https://test-api.endorser.ch
#ENDORSER_API_URL=https://api.endorser.ch
INFURA_PROJECT_ID=???
# host where the final image can be accessed by the public
# default is https://test-image.timesafari.app
#DOWNLOAD_IMAGE_SERVER=test-image.timesafari.app

21
CHANGELOG.md

@ -0,0 +1,21 @@
# Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [Unreleased]
### Added
- Replacement of an existing file
- Local resolver for did:ethr
- Testing for file deletion
### Fixed
- Incorrect check for others who recorded same image
### Changed in DB or environment
- Nothing
## [1.0.0]
### Added
- All endpoints: image POST & DELETE, image-limits, ping

4
Dockerfile

@ -1,6 +1,6 @@
# syntax=docker/dockerfile:1
FROM node:21-alpine
FROM node:22-alpine
ARG IMAGE_API_VERSION
RUN npm install -g pnpm
RUN apk add git
@ -10,4 +10,4 @@ WORKDIR image-api
RUN git checkout $IMAGE_API_VERSION
RUN pnpm install --prod
CMD node server.js
CMD node src/server.js

103
Makefile.test

@ -0,0 +1,103 @@
# from https://github.com/box/Makefile.test
# `make -C test -j`
# Makefile that has a convenient check target.
# It can be included from another Makefile that only has a TESTS variable
# defined like this
#
# TESTS ?=
#
# Runs the specified test executables. Prepends the test's name to each test's output
# and gives a nice summary at the end of test execution about passed failed
# tests.
# Only bash is supported
SHELL := /bin/bash
THIS_FILE := $(realpath $(lastword $(MAKEFILE_LIST)))
# The directory where Makefile.test (this file) resides
THIS_FILE_DIR := $(shell dirname $(THIS_FILE))
# FIRST_MAKEFILE may be passed from parent make to child make. If it is not
# absent, do not overwrite it.
FIRST_MAKEFILE ?= $(realpath $(firstword $(MAKEFILE_LIST)))
export FIRST_MAKEFILE
# The directory where the Makefile, that is invoked from the command line,
# resides. That makefile would define the TESTS variable. We assume that the
# binaries defined in the TESTS variable also reside in the directory as
# the Makefile. The generated intermediate files will also go to this directory.
FIRST_MAKEFILE_DIR ?= $(shell dirname $(FIRST_MAKEFILE))
export FIRST_MAKEFILE_DIR
# So that the child makefiles can see the same TESTS variable.
export TESTS
failedTestsName := .makefile_test_failed_tests
executedTestsName := .makefile_test_executed_tests
TEST_TARGETS := $(TESTS:%=TARGET_FOR_%)
export TEST_TARGETS
# If the tests need a different environment one can append to this variable.
TEST_ENVIRONMENT = PYTHONPATH=$(THIS_FILE_DIR):$$PYTHONPATH PATH=$(THIS_FILE_DIR):$$PATH
# TODO: Only write to intermediate files, if they exist already.
# https://unix.stackexchange.com/q/405497/212862
# There is still a race condition here. Maybe we should use sed for appending.
define RUN_ONE_TEST
TARGET_FOR_$(1): $$(FIRST_MAKEFILE_DIR)/$(1)
+@export PATH=$$$$(pwd):$$$$PATH; \
if [ -e $$(FIRST_MAKEFILE_DIR)/$$(executedTestsName) ]; then \
echo $$< >> $$(FIRST_MAKEFILE_DIR)/$$(executedTestsName); \
fi; \
$$(TEST_ENVIRONMENT) $$< 2>&1 | sed "s/^/ [$$$$(basename $$<)] /"; test $$$${PIPESTATUS[0]} -eq 0; \
if [ $$$$? -eq 0 ]; then \
echo " PASSED: $$$$(basename $$<)"; \
else \
echo " FAILED: $$$$(basename $$<)"; \
if [ -e $$(FIRST_MAKEFILE_DIR)/$$(failedTestsName) ]; then \
echo $$< >> $$(FIRST_MAKEFILE_DIR)/$$(failedTestsName); \
fi; \
fi;
endef
# Build the above rule to run one test, for all tests.
$(foreach currtest,$(TESTS),$(eval $(call RUN_ONE_TEST,$(currtest))))
# execute the tests and look at the generated temp files afterwards.
actualCheck: $(TEST_TARGETS)
+@failed_tests=$$(cat $(FIRST_MAKEFILE_DIR)/$(failedTestsName) 2> /dev/null | wc -l;); \
executed_tests=$$(cat $(FIRST_MAKEFILE_DIR)/$(executedTestsName) 2> /dev/null | wc -l;); \
if [ $$failed_tests -ne 0 -a $$executed_tests -ne 0 ]; then \
echo ---------------------------------; \
echo "Failed $$failed_tests out of $$executed_tests tests"; \
echo ---------------------------------; \
elif [ $$failed_tests -eq 0 ]; then \
echo ---------------------------------; \
echo "All $$executed_tests tests passed"; \
echo ---------------------------------; \
fi; \
exit $$failed_tests;
# A commonly used bash command to clean intermediate files. Instead of writing
# it every time re-use this variable.
RM_INTERMEDIATE_FILES := rm -f $(FIRST_MAKEFILE_DIR)/$(failedTestsName) $(FIRST_MAKEFILE_DIR)/$(executedTestsName)
# At the start of the make, we want to start with empty intermediate files.
TRUNCATE_INTERMEDIATE_FILES := cat /dev/null > $(FIRST_MAKEFILE_DIR)/$(failedTestsName) && cat /dev/null > $(FIRST_MAKEFILE_DIR)/$(executedTestsName)
# With trap make sure the clean step is always executed before and after the
# tests run time. Do not leave residual files in the repo.
check:
+@trap "code=\$$?; \
$(RM_INTERMEDIATE_FILES); \
exit \$${code};" EXIT; \
$(TRUNCATE_INTERMEDIATE_FILES); \
$(MAKE) -f $(THIS_FILE) actualCheck;
all: check
.PHONY: all check preCheck actualCheck $(TEST_TARGETS)
.DEFAULT_GOAL := all

17
README.md

@ -13,23 +13,29 @@ mkdir uploads
pnpm run migrate
```
Now set up an S3 bucket & Infura project, and create a .env file from .env.sample with these important settings:
Now set up an S3 bucket, and create a .env file from .env.sample with these important settings:
```
AWS_ACCESS_KEY=
AWS_SECRET_KEY=
AWS_REGION=
INFURA_PROJECT_ID=
```
## dev
```
node server.js
node src/server.js
```
## test
#### automated
```shell
make -C test -j
```
#### manual
```shell
# run this first command in a directory where `npm install did-jwt` has been run
CODE='OWNER_DID="did:ethr:0x0000694B58C2cC69658993A90D3840C560f2F51F"; OWNER_PRIVATE_KEY_HEX="2b6472c026ec2aa2c4235c994a63868fc9212d18b58f6cbfe861b52e71330f5b"; didJwt = require("did-jwt"); didJwt.createJWT({ exp: Math.floor(Date.now() / 1000) + 60, iat: Math.floor(Date.now() / 1000), iss: OWNER_DID }, { issuer: OWNER_DID, signer: didJwt.SimpleSigner(OWNER_PRIVATE_KEY_HEX) }).then(console.log)'
@ -37,9 +43,6 @@ JWT=`node -e "$CODE"`; curl -X POST -H "Authorization: Bearer $JWT" -F "image=@.
JWT=`node -e "$CODE"`; curl -X DELETE -H "Authorization: Bearer $JWT" http://localhost:3001/image/https%3A%2F%2Fgifts-image-test.s3.amazonaws.com%2F4599145c3a8792a678f458747f2d8512c680e8680bf5563c35b06cd770051ed2.png
```
https://github.com/box/Makefile.test
`make -C test -j`
## deploy to prod first time
* Do the necessary steps from "setup" above, or `docker build` it.

15
package.json

@ -1,18 +1,16 @@
{
"name": "Images for Trade",
"version": "0.0.1",
"version": "1.2.0-beta",
"description": "",
"license": "UNLICENSED",
"dependencies": {
"@aws-sdk/client-s3": "^3.521.0",
"@aws-sdk/lib-storage": "^3.521.0",
"@aws-sdk/client-s3": "^3.614.0",
"@aws-sdk/lib-storage": "^3.614.0",
"cors": "^2.8.5",
"did-jwt": "^8.0.1",
"did-jwt": "^8.0.4",
"did-resolver": "^4.1.0",
"dotenv": "^16.4.5",
"ethr-did-resolver": "^10.1.5",
"express": "^4.18.2",
"express": "^4.19.2",
"luxon": "^3.4.4",
"multer": "1.4.5-lts.1",
"sqlite3": "^5.1.7"
@ -21,6 +19,7 @@
"flywaydb-cli": "^0.9.0"
},
"scripts": {
"migrate": "flyway -configFiles=sql/flyway.conf migrate"
"migrate": "flyway -configFiles=sql/flyway.conf migrate",
"start": "node src/server.js"
}
}

1927
pnpm-lock.yaml

File diff suppressed because it is too large

82
server.js → src/server.js

@ -4,13 +4,14 @@ const crypto = require('crypto');
const didJwt = require('did-jwt');
const { Resolver } = require('did-resolver');
const express = require('express');
const { getResolver } = require('ethr-did-resolver');
const fs = require('fs');
const { DateTime } = require('luxon');
const multer = require('multer');
const path = require('path');
const sqlite3 = require('sqlite3').verbose();
const { didEthLocalResolver } = require("./vc/did-eth-local-resolver");
require('dotenv').config()
const app = express();
@ -22,13 +23,7 @@ const dbFile = process.env.SQLITE_FILE || './image-db.sqlite';
const bucketName = process.env.S3_BUCKET_NAME || 'gifts-image-test';
const imageServer = process.env.DOWNLOAD_IMAGE_SERVER || 'test-image.timesafari.app';
const ethrDidResolver = getResolver;
const resolver =
new Resolver({
...ethrDidResolver({
infuraProjectId: process.env.INFURA_PROJECT_ID || 'fake-infura-project-id'
})
})
const resolver = new Resolver({ 'ethr': didEthLocalResolver });
// Open a connection to the SQLite database
const db = new sqlite3.Database(dbFile, (err) => {
@ -54,20 +49,25 @@ const uploadDir = 'uploads';
const uploadMulter = multer({ dest: uploadDir + '/' });
app.get('/ping', async (req, res) => {
res.send('pong - v 1.1.0'); // version
res.send('pong - v 0.0.1'); // version
});
app.get('/image-limits', async (req, res) => {
limitsResult = await retrievelimits(req, res);
if (!limitsResult.success) {
return limitsResult.result;
try {
limitsResult = await retrievelimits(req, res);
if (!limitsResult.success) {
return limitsResult.result;
}
return res.status(200).send(JSON.stringify({
success: true,
doneImagesThisWeek: limitsResult.doneImagesThisWeek,
maxImagesPerWeek: limitsResult.maxImagesPerWeek,
nextWeekBeginDateTime: limitsResult.nextWeekBeginDateTime
}));
} catch (e) {
console.error('Error getting image limits:', e, ' ... with this string: ' + e);
return res.status(500).send(JSON.stringify({ success: false, message: 'Got this error retrieving limits: ' + e }));
}
return res.status(200).send(JSON.stringify({
success: true,
doneImagesThisWeek: limitsResult.doneImagesThisWeek,
maxImagesPerWeek: limitsResult.maxImagesPerWeek,
nextWeekBeginDateTime: limitsResult.nextWeekBeginDateTime
}));
});
/**
@ -84,16 +84,16 @@ app.post('/image', uploadMulter.single('image'), async (req, res) => {
if (reqFile == null) {
return res.status(400).send(JSON.stringify({ success: false, message: 'No file uploaded.' }));
}
if (reqFile.size > 10485760) { // 10MB
fs.rm(reqFile.path, (err) => {
if (err) {
console.error("Error deleting too-large temp file", reqFile.path, "with error (but continuing):", err);
}
});
return res.status(400).send(JSON.stringify({success: false, message: 'File size is too large. Maximum file size is 10MB.'}));
}
try {
if (reqFile.size > 10485760) { // 10MB
fs.rm(reqFile.path, (err) => {
if (err) {
console.error("Error deleting too-large temp file", reqFile.path, "with error (but continuing):", err);
}
});
return res.status(400).send(JSON.stringify({success: false, message: 'File size is too large. Maximum file size is 10MB.'}));
}
limitsResult = await retrievelimits(req, res);
if (!limitsResult.success) {
return limitsResult.result;
@ -113,13 +113,20 @@ app.post('/image', uploadMulter.single('image'), async (req, res) => {
try {
let finalFileName;
if (req.body.fileName) {
// replacement file name given
finalFileName = req.body.fileName;
// check if the file to replace was sent by this user earlier
const didForOriginal = await new Promise((resolve, reject) => {
// For some reason, this prepared-statement SQL gives seg fault: "SELECT did FROM image WHERE did = ? and final_file = ?"
if (issuerDid.indexOf("'") >= 0 || finalFileName.indexOf("'") >= 0) {
console.error("Error: SQL injection attempt with", issuerDid, finalFileName);
return res.status(400).send(JSON.stringify({ success: false, message: 'SQL injection attempt detected.' }));
}
const sql = "SELECT did FROM image WHERE did = '" + issuerDid + "' and final_file = '" + finalFileName + "'";
db.get(
'SELECT did FROM image WHERE did = ? and final_file = ?'
[ finalFileName, issuerDid ],
sql,
[],
(dbErr, row) => {
if (dbErr) {
console.error(currentDate, 'Error getting image for user from database:', dbErr)
@ -137,7 +144,7 @@ app.post('/image', uploadMulter.single('image'), async (req, res) => {
const othersWhoSentImage = await new Promise((resolve, reject) => {
db.get(
'SELECT did FROM image WHERE final_file = ? and did != ?',
[ url, issuerDid ],
[ finalFileName, issuerDid ],
(dbErr, row) => {
if (dbErr) {
console.error(currentDate, 'Error getting image for other users from database:', dbErr)
@ -186,6 +193,7 @@ app.post('/image', uploadMulter.single('image'), async (req, res) => {
);
});
} else {
// no replacement file name given so it's a new file
const hashSum = crypto.createHash('sha256');
hashSum.update(data);
const hashHex = hashSum.digest('hex');
@ -269,7 +277,7 @@ app.post('/image', uploadMulter.single('image'), async (req, res) => {
});
// AWS URL: https://gifts-image-test.s3.amazonaws.com/gifts-image-test/FILE
// American Cloud URL: https://a2-west.americancloud.com/TENANT:giftsimagetest/FILE
return res.status(201).send(JSON.stringify({success: true, url: finalUrl}));
return res.status(201).send({success: true, url: finalUrl});
}
} catch (uploadError) {
const errorTime = new Date().toISOString();
@ -323,7 +331,7 @@ app.delete('/image/:url', async (req, res) => {
});
if (!thisUserImageFile) {
console.error('No image entry found for user', issuerDid, '& URL', url, 'so returning 404.');
return res.status(404).send(JSON.stringify({ success: false, message: 'No image entry found for user ' + issuerDid + ' & URL ' + url }));
return res.status(404).send({ success: false, message: 'No image entry found for user ' + issuerDid + ' & URL ' + url });
}
// check if any other user recorded this image
@ -354,10 +362,10 @@ app.delete('/image/:url', async (req, res) => {
&& response.$metadata.httpStatusCode !== 204) {
const errorTime = new Date().toISOString();
console.error(errorTime, "Error deleting from S3 with bad HTTP status, with metadata:", response.$metadata);
return res.status(500).send(JSON.stringify({
return res.status(500).send({
success: false,
message: "Got bad status of " + response.$metadata.httpStatusCode + " from S3. See server logs at " + errorTime
}));
});
}
}
@ -377,14 +385,14 @@ app.delete('/image/:url', async (req, res) => {
}
);
});
return res.status(204).send(JSON.stringify({ success: true }));
return res.status(204).send({ success: true });
} catch (error) {
const errorTime = new Date().toISOString();
console.error(errorTime, "Error processing image delete:", error);
return res.status(500).send(JSON.stringify({
return res.status(500).send({
success: false,
message: "Got error processing image delete. See server logs at " + errorTime + " Error Details: " + error
}));
});
}
});

47
src/vc/did-eth-local-resolver.js

@ -0,0 +1,47 @@
const { DIDResolutionResult } = require('did-resolver');
/**
* This did:ethr resolver instructs the did-jwt machinery to use the
* EcdsaSecp256k1RecoveryMethod2020Uses verification method which adds the recovery bit to the
* signature to recover the DID's public key from a signature.
*
* This effectively hard codes the did:ethr DID resolver to use the address as the public key.
* @param did : string
* @returns {Promise<DIDResolutionResult>}
*
* Similar code resides in endorser-ch
*/
const didEthLocalResolver = async(did) => {
const didRegex = /^did:ethr:(0x[0-9a-fA-F]{40})$/;
const match = did.match(didRegex);
if (match) {
const address = match[1]; // Extract eth address: 0x...
const publicKeyHex = address; // Use the address directly as a public key placeholder
return {
didDocumentMetadata: {},
didResolutionMetadata: {
contentType: "application/did+ld+json"
},
didDocument: {
'@context': [
'https://www.w3.org/ns/did/v1',
"https://w3id.org/security/suites/secp256k1recovery-2020/v2"
],
id: did,
verificationMethod: [{
id: `${did}#controller`,
type: 'EcdsaSecp256k1RecoveryMethod2020',
controller: did,
blockchainAccountId: "eip155:1:" + publicKeyHex,
}],
authentication: [`${did}#controller`],
assertionMethod: [`${did}#controller`],
},
};
}
throw new Error(`Unsupported DID format: ${did}`);
};
module.exports = { didEthLocalResolver };

7
src/vc/index.js

@ -0,0 +1,7 @@
/**
* Verifiable Credential & DID functions, specifically for EndorserSearch.org tools
*
* The goal is to make this folder similar across projects, then move it to a library.
* Other projects: endorser-ch, crowd-funder-for-time-pwa
*
*/

6
test/Makefile

@ -0,0 +1,6 @@
# see ../Makefile.test
TESTS ?= \
test.sh
include ../Makefile.test

123
test/test.sh

@ -1,18 +1,127 @@
# requires node & curl & jq
#!/usr/bin/env bash
# Execute from the "test" directory so that the test files are available.
#
# We recommend you have the pkgx.dev tools installed.
# If you want to use your installed curl & jq & node, you can comment out the two "pkgx" commands.
HOST=http://localhost:3002
CODE='OWNER_DID="did:ethr:0x0000694B58C2cC69658993A90D3840C560f2F51F"; OWNER_PRIVATE_KEY_HEX="2b6472c026ec2aa2c4235c994a63868fc9212d18b58f6cbfe861b52e71330f5b"; didJwt = require("did-jwt"); didJwt.createJWT({ exp: Math.floor(Date.now() / 1000) + 60, iat: Math.floor(Date.now() / 1000), iss: OWNER_DID }, { issuer: OWNER_DID, signer: didJwt.SimpleSigner(OWNER_PRIVATE_KEY_HEX) }).then(console.log)'
if ! [[ "$PWD" == */test ]]; then
echo "Error: Run this script in the 'test' directory."
exit 1
fi
# load the tools: curl, jq, node
eval "$(pkgx --shellcode)"
env +curl +jq +node
JWT_CODE_USER_0='OWNER_DID="did:ethr:0x0000694B58C2cC69658993A90D3840C560f2F51F"; OWNER_PRIVATE_KEY_HEX="2b6472c026ec2aa2c4235c994a63868fc9212d18b58f6cbfe861b52e71330f5b"; didJwt = require("did-jwt"); didJwt.createJWT({ exp: Math.floor(Date.now() / 1000) + 60, iat: Math.floor(Date.now() / 1000), iss: OWNER_DID }, { issuer: OWNER_DID, signer: didJwt.SimpleSigner(OWNER_PRIVATE_KEY_HEX) }).then(console.log)'
JWT_CODE_USER_1='OWNER_DID="did:ethr:0x111d15564f824D56C7a07b913aA7aDd03382aA39"; OWNER_PRIVATE_KEY_HEX="be64d297e1c6f3545971cd0bc24c3bf32656f8639a2ae32cb84a1e3c75ad69cd"; didJwt = require("did-jwt"); didJwt.createJWT({ exp: Math.floor(Date.now() / 1000) + 60, iat: Math.floor(Date.now() / 1000), iss: OWNER_DID }, { issuer: OWNER_DID, signer: didJwt.SimpleSigner(OWNER_PRIVATE_KEY_HEX) }).then(console.log)'
# exit as soon as anything fails
set -e
JWT=$(node -e "$CODE")
RESULT=$(curl -X POST -H "Authorization: Bearer $JWT" -F "image=@test1.png" $HOST/image)
echo $RESULT
URL=$(echo $RESULT | jq -r '.url')
echo "Upload test0.png by user #0"
JWT=$(node -e "$JWT_CODE_USER_0")
echo JWT: $JWT
RESULT=$(curl -X POST -H "Authorization: Bearer $JWT" -F "image=@test0.png" "$HOST/image")
echo curl result: $RESULT
STATUS_CODE=$(curl -o out-test1.png -w "%{http_code}" $URL);
echo "Download from the URL supplied"
URL0=$(echo $RESULT | jq -r '.url')
# -L to follow redirect because the returned URL is a timesafari.app URL
STATUS_CODE=$(curl -o test0-back.png -w "%{http_code}" -L $URL0);
if [ $STATUS_CODE -ne 200 ]; then
echo "File is not accessible, received status code: $STATUS_CODE";
fi
echo "Check that downloaded file is the same as the original"
if diff "test0.png" "test0-back.png" >/dev/null; then
echo "Got the same file."
else
echo "Did not get the same file."
exit 1
fi
echo "Upload test1.png by user #1"
JWT=$(node -e "$JWT_CODE_USER_1")
echo JWT: $JWT
RESULT=$(curl -X POST -H "Authorization: Bearer $JWT" -F "image=@test1.png" "$HOST/image")
echo curl result: $RESULT
URL2=$(echo $RESULT | jq -r '.url')
if [ "$URL0" != "$URL2" ]; then
echo "URLs 0 & 1 are different."
else
echo "URLs 0 & 1 are not different."
exit 1
fi
echo "Now unsuccessfully upload a change to the image by user 1"
FILENAME0=$(basename $URL0)
JWT=$(node -e "$JWT_CODE_USER_1")
echo JWT: $JWT
RESULT=$(curl -X POST -H "Authorization: Bearer $JWT" -F "image=@test1.png" -F "fileName=$FILENAME0" "$HOST/image")
echo curl result: $RESULT
SUCCESS=$(echo $RESULT | jq -r '.success')
if [ $SUCCESS = "false" ]; then
echo "User #1 could not replace existing file."
else
echo "File may have been replaced wrongly.";
exit 1
fi
echo "Now successfully upload a change to the image by user 0"
JWT=$(node -e "$JWT_CODE_USER_0")
echo JWT: $JWT
RESULT=$(curl -X POST -H "Authorization: Bearer $JWT" -F "image=@test1.png" -F "fileName=$FILENAME0" "$HOST/image")
echo curl result: $RESULT
SUCCESS=$(echo $RESULT | jq -r '.success')
if [ $SUCCESS = "true" ]; then
echo "User #0 did replace file.";
else
echo "User #0 couldn't replace file.";
exit 1
fi
echo "Fail to remove test file 0 from the service"
TEST_URL="https%3A%2F%2Ftest-image.timesafari.app%2F4599145c3a8792a678f458747f2d8512c680e8680bf5563c35b06cd770051ed2.png"
JWT=$(node -e "$JWT_CODE_USER_1")
echo JWT: $JWT
RESULT=$(curl -X DELETE -H "Authorization: Bearer $JWT" "$HOST/image/$TEST_URL")
echo curl result: $RESULT
SUCCESS=$(echo $RESULT | jq -r '.success')
if [ "$SUCCESS" = "false" ]; then
echo "Test file 0 was not cleaned off server."
else
echo "Test file 0 was cleaned off server.";
exit 1
fi
echo "Remove test file 0 from the service"
TEST_URL="https%3A%2F%2Ftest-image.timesafari.app%2F4599145c3a8792a678f458747f2d8512c680e8680bf5563c35b06cd770051ed2.png"
JWT=$(node -e "$JWT_CODE_USER_0")
echo JWT: $JWT
RESULT=$(curl -X DELETE -H "Authorization: Bearer $JWT" "$HOST/image/$TEST_URL")
echo curl result: $RESULT
SUCCESS=$(echo $RESULT | jq -r '.success')
if [[ -z "$RESULT" ]] || [[ "$SUCCESS" = "true" ]]; then
echo "Test file 0 was cleaned off server."
else
echo "Test file 0 was not cleaned off server.";
exit 1
fi
echo "Remove test file 1 from the service"
TEST_URL="https%3A%2F%2Ftest-image.timesafari.app%2F83801e59789f962ddd19dbf99abd65b416e4c6560c28bdb3e663cea045561b07.png"
JWT=$(node -e "$JWT_CODE_USER_1")
echo JWT: $JWT
RESULT=$(curl -X DELETE -H "Authorization: Bearer $JWT" "$HOST/image/$TEST_URL")
echo curl result: $RESULT
SUCCESS=$(echo $RESULT | jq -r '.success')
if [[ -z "$RESULT" ]] || [[ "$SUCCESS" = "true" ]]; then
echo "Test file 1 was cleaned off server."
else
echo "Test file 1 was not cleaned off server.";
exit 1
fi

BIN
test/test0.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.4 KiB

BIN
test/test1.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.7 KiB

Loading…
Cancel
Save