Compare commits
19 Commits
Author | SHA1 | Date |
---|---|---|
Trent Larson | bb83982bc7 | 4 months ago |
Trent Larson | 1e61c55e5b | 4 months ago |
Trent Larson | 7a2551cbae | 4 months ago |
Trent Larson | 947f307305 | 4 months ago |
Trent Larson | a23f6d10ad | 4 months ago |
Trent Larson | 8228970116 | 4 months ago |
Trent Larson | 2d5a09de00 | 4 months ago |
Trent Larson | 400a4a1e06 | 4 months ago |
Trent Larson | 8750c78897 | 4 months ago |
Trent Larson | 653a351653 | 4 months ago |
Trent Larson | f4345fe2b9 | 4 months ago |
Trent Larson | deedbefcad | 4 months ago |
Trent Larson | 2d5fe40971 | 4 months ago |
Trent Larson | 537add2488 | 4 months ago |
Trent Larson | dcf539eaa0 | 4 months ago |
trentlarson | 86109cf44b | 4 months ago |
Trent Larson | 2f7d46569e | 4 months ago |
Trent Larson | 127244731d | 6 months ago |
Trent Larson | 6cd4a5a988 | 7 months ago |
19 changed files with 2476 additions and 1086 deletions
@ -0,0 +1,29 @@ |
|||
# Changelog |
|||
|
|||
All notable changes to this project will be documented in this file. |
|||
|
|||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), |
|||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). |
|||
|
|||
## [Unreleased] |
|||
### Changed in DB or environment |
|||
- Nothing |
|||
|
|||
|
|||
##[1.2.3] - 2024.07.18 - 947f307305d1ad583c5ec7c61fe178fa45490adf |
|||
### Added |
|||
- Replacement of an existing file |
|||
- Local resolver for did:ethr |
|||
- Validation of did:peer JWANT |
|||
- Testing for file deletion |
|||
### Fixed |
|||
- Incorrect check for others who recorded same image |
|||
### Changed |
|||
- Dockerfile uses a builder image |
|||
### Changed in DB or environment |
|||
- New SQL migration (for the new file deletion feature) |
|||
|
|||
|
|||
## [1.0.0] |
|||
### Added |
|||
- All endpoints: image POST & DELETE, image-limits, ping |
@ -0,0 +1,19 @@ |
|||
# syntax=docker/dockerfile:1 |
|||
|
|||
FROM node:22-alpine AS builder |
|||
ARG IMAGE_API_VERSION |
|||
RUN npm install -g pnpm |
|||
RUN apk add git |
|||
RUN git clone https://gitea.anomalistdesign.com/log-trade/image-api.git |
|||
WORKDIR image-api |
|||
RUN git checkout $IMAGE_API_VERSION |
|||
# dev dependencies like TypeScript are needed to build |
|||
RUN pnpm install |
|||
RUN pnpm build |
|||
RUN pnpm install --prod |
|||
|
|||
FROM node:22-alpine |
|||
COPY --from=builder /image-api/dist /image-api/dist |
|||
COPY --from=builder /image-api/node_modules /image-api/node_modules |
|||
WORKDIR image-api |
|||
CMD node dist/server.js |
@ -0,0 +1,103 @@ |
|||
# from https://github.com/box/Makefile.test
|
|||
# `make -C test -j`
|
|||
|
|||
# Makefile that has a convenient check target.
|
|||
# It can be included from another Makefile that only has a TESTS variable
|
|||
# defined like this
|
|||
#
|
|||
# TESTS ?=
|
|||
#
|
|||
# Runs the specified test executables. Prepends the test's name to each test's output
|
|||
# and gives a nice summary at the end of test execution about passed failed
|
|||
# tests.
|
|||
|
|||
# Only bash is supported
|
|||
SHELL := /bin/bash |
|||
|
|||
THIS_FILE := $(realpath $(lastword $(MAKEFILE_LIST))) |
|||
# The directory where Makefile.test (this file) resides
|
|||
THIS_FILE_DIR := $(shell dirname $(THIS_FILE)) |
|||
|
|||
# FIRST_MAKEFILE may be passed from parent make to child make. If it is not
|
|||
# absent, do not overwrite it.
|
|||
FIRST_MAKEFILE ?= $(realpath $(firstword $(MAKEFILE_LIST))) |
|||
export FIRST_MAKEFILE |
|||
|
|||
# The directory where the Makefile, that is invoked from the command line,
|
|||
# resides. That makefile would define the TESTS variable. We assume that the
|
|||
# binaries defined in the TESTS variable also reside in the directory as
|
|||
# the Makefile. The generated intermediate files will also go to this directory.
|
|||
FIRST_MAKEFILE_DIR ?= $(shell dirname $(FIRST_MAKEFILE)) |
|||
export FIRST_MAKEFILE_DIR |
|||
|
|||
# So that the child makefiles can see the same TESTS variable.
|
|||
export TESTS |
|||
|
|||
failedTestsName := .makefile_test_failed_tests |
|||
executedTestsName := .makefile_test_executed_tests |
|||
TEST_TARGETS := $(TESTS:%=TARGET_FOR_%) |
|||
export TEST_TARGETS |
|||
|
|||
# If the tests need a different environment one can append to this variable.
|
|||
TEST_ENVIRONMENT = PYTHONPATH=$(THIS_FILE_DIR):$$PYTHONPATH PATH=$(THIS_FILE_DIR):$$PATH |
|||
|
|||
# TODO: Only write to intermediate files, if they exist already.
|
|||
# https://unix.stackexchange.com/q/405497/212862
|
|||
# There is still a race condition here. Maybe we should use sed for appending.
|
|||
define RUN_ONE_TEST |
|||
TARGET_FOR_$(1): $$(FIRST_MAKEFILE_DIR)/$(1) |
|||
+@export PATH=$$$$(pwd):$$$$PATH; \
|
|||
if [ -e $$(FIRST_MAKEFILE_DIR)/$$(executedTestsName) ]; then \
|
|||
echo $$< >> $$(FIRST_MAKEFILE_DIR)/$$(executedTestsName); \
|
|||
fi; \
|
|||
$$(TEST_ENVIRONMENT) $$< 2>&1 | sed "s/^/ [$$$$(basename $$<)] /"; test $$$${PIPESTATUS[0]} -eq 0; \
|
|||
if [ $$$$? -eq 0 ]; then \
|
|||
echo " PASSED: $$$$(basename $$<)"; \
|
|||
else \
|
|||
echo " FAILED: $$$$(basename $$<)"; \
|
|||
if [ -e $$(FIRST_MAKEFILE_DIR)/$$(failedTestsName) ]; then \
|
|||
echo $$< >> $$(FIRST_MAKEFILE_DIR)/$$(failedTestsName); \
|
|||
fi; \
|
|||
fi; |
|||
endef |
|||
|
|||
# Build the above rule to run one test, for all tests.
|
|||
$(foreach currtest,$(TESTS),$(eval $(call RUN_ONE_TEST,$(currtest)))) |
|||
|
|||
# execute the tests and look at the generated temp files afterwards.
|
|||
actualCheck: $(TEST_TARGETS) |
|||
+@failed_tests=$$(cat $(FIRST_MAKEFILE_DIR)/$(failedTestsName) 2> /dev/null | wc -l;); \
|
|||
executed_tests=$$(cat $(FIRST_MAKEFILE_DIR)/$(executedTestsName) 2> /dev/null | wc -l;); \
|
|||
if [ $$failed_tests -ne 0 -a $$executed_tests -ne 0 ]; then \
|
|||
echo ---------------------------------; \
|
|||
echo "Failed $$failed_tests out of $$executed_tests tests"; \
|
|||
echo ---------------------------------; \
|
|||
elif [ $$failed_tests -eq 0 ]; then \
|
|||
echo ---------------------------------; \
|
|||
echo "All $$executed_tests tests passed"; \
|
|||
echo ---------------------------------; \
|
|||
fi; \
|
|||
exit $$failed_tests; |
|||
|
|||
# A commonly used bash command to clean intermediate files. Instead of writing
|
|||
# it every time re-use this variable.
|
|||
RM_INTERMEDIATE_FILES := rm -f $(FIRST_MAKEFILE_DIR)/$(failedTestsName) $(FIRST_MAKEFILE_DIR)/$(executedTestsName) |
|||
|
|||
# At the start of the make, we want to start with empty intermediate files.
|
|||
TRUNCATE_INTERMEDIATE_FILES := cat /dev/null > $(FIRST_MAKEFILE_DIR)/$(failedTestsName) && cat /dev/null > $(FIRST_MAKEFILE_DIR)/$(executedTestsName) |
|||
|
|||
# With trap make sure the clean step is always executed before and after the
|
|||
# tests run time. Do not leave residual files in the repo.
|
|||
check: |
|||
+@trap "code=\$$?; \
|
|||
$(RM_INTERMEDIATE_FILES); \
|
|||
exit \$${code};" EXIT; \
|
|||
$(TRUNCATE_INTERMEDIATE_FILES); \
|
|||
$(MAKE) -f $(THIS_FILE) actualCheck; |
|||
|
|||
all: check |
|||
|
|||
.PHONY: all check preCheck actualCheck $(TEST_TARGETS) |
|||
.DEFAULT_GOAL := all |
|||
|
|||
|
@ -1,25 +1,34 @@ |
|||
{ |
|||
"name": "Images for Trade", |
|||
"version": "0.0.1", |
|||
"version": "1.2.4-beta", |
|||
"description": "", |
|||
"license": "UNLICENSED", |
|||
"dependencies": { |
|||
"@aws-sdk/client-s3": "^3.521.0", |
|||
"@aws-sdk/lib-storage": "^3.521.0", |
|||
"@aws-sdk/client-s3": "^3.614.0", |
|||
"@aws-sdk/lib-storage": "^3.614.0", |
|||
"@peculiar/asn1-ecc": "^2.3.8", |
|||
"@peculiar/asn1-schema": "^2.3.8", |
|||
"base64url": "^3.0.1", |
|||
"cbor-x": "^1.5.9", |
|||
"cors": "^2.8.5", |
|||
"did-jwt": "^8.0.1", |
|||
"did-jwt": "^7.4.7", |
|||
"did-resolver": "^4.1.0", |
|||
"dotenv": "^16.4.5", |
|||
"ethr-did-resolver": "^10.1.5", |
|||
"express": "^4.18.2", |
|||
"express": "^4.19.2", |
|||
"luxon": "^3.4.4", |
|||
"multer": "1.4.5-lts.1", |
|||
"sqlite3": "^5.1.7" |
|||
}, |
|||
"devDependencies": { |
|||
"flywaydb-cli": "^0.9.0" |
|||
"flywaydb-cli": "^0.9.0", |
|||
"nodemon": "^3.1.4", |
|||
"ts-node": "^10.9.2", |
|||
"typescript": "^5.5.3" |
|||
}, |
|||
"scripts": { |
|||
"migrate": "flyway -configFiles=sql/flyway.conf migrate" |
|||
"build": "tsc", |
|||
"migrate": "flyway -configFiles=sql/flyway.conf migrate", |
|||
"start": "node dist/server.js", |
|||
"start:dev": "nodemon --exec ts-node src/server.ts" |
|||
} |
|||
} |
|||
|
File diff suppressed because it is too large
@ -0,0 +1,2 @@ |
|||
|
|||
ALTER TABLE image ADD COLUMN is_replacement BOOLEAN NOT NULL DEFAULT FALSE; |
@ -0,0 +1,46 @@ |
|||
import {DIDResolutionResult} from "did-resolver"; |
|||
|
|||
/** |
|||
* This did:ethr resolver instructs the did-jwt machinery to use the |
|||
* EcdsaSecp256k1RecoveryMethod2020Uses verification method which adds the recovery bit to the |
|||
* signature to recover the DID's public key from a signature. |
|||
* |
|||
* This effectively hard codes the did:ethr DID resolver to use the address as the public key. |
|||
* @param did : string |
|||
* @returns {Promise<DIDResolutionResult>} |
|||
* |
|||
* Similar code resides in endorser-ch |
|||
*/ |
|||
export const didEthLocalResolver = async(did: string): Promise<DIDResolutionResult> => { |
|||
const didRegex = /^did:ethr:(0x[0-9a-fA-F]{40})$/; |
|||
const match = did.match(didRegex); |
|||
|
|||
if (match) { |
|||
const address = match[1]; // Extract eth address: 0x...
|
|||
const publicKeyHex = address; // Use the address directly as a public key placeholder
|
|||
|
|||
return { |
|||
didDocumentMetadata: {}, |
|||
didResolutionMetadata: { |
|||
contentType: "application/did+ld+json" |
|||
}, |
|||
didDocument: { |
|||
'@context': [ |
|||
'https://www.w3.org/ns/did/v1', |
|||
"https://w3id.org/security/suites/secp256k1recovery-2020/v2" |
|||
], |
|||
id: did, |
|||
verificationMethod: [{ |
|||
id: `${did}#controller`, |
|||
type: 'EcdsaSecp256k1RecoveryMethod2020', |
|||
controller: did, |
|||
blockchainAccountId: "eip155:1:" + publicKeyHex, |
|||
}], |
|||
authentication: [`${did}#controller`], |
|||
assertionMethod: [`${did}#controller`], |
|||
}, |
|||
}; |
|||
} |
|||
|
|||
throw new Error(`Unsupported DID format: ${did}`); |
|||
}; |
@ -0,0 +1,139 @@ |
|||
import { AsnParser } from "@peculiar/asn1-schema"; |
|||
import { ECDSASigValue } from "@peculiar/asn1-ecc"; |
|||
import crypto from "crypto"; |
|||
import { decode as cborDecode } from "cbor-x"; |
|||
|
|||
/** |
|||
* |
|||
* |
|||
* similar code is in crowd-funder-for-time-pwa libs/crypto/vc/passkeyDidPeer.ts verifyJwtWebCrypto |
|||
* |
|||
* @returns {Promise<boolean>} |
|||
*/ |
|||
export async function verifyPeerSignature( |
|||
payloadBytes: Uint8Array, |
|||
publicKeyBytes: Uint8Array, |
|||
signatureBytes: Uint8Array |
|||
) { |
|||
// this simple approach doesn't work
|
|||
//const verify = crypto.createVerify('sha256')
|
|||
//verify.update(preimage)
|
|||
//const result = verify.verify(publicKey, signature)
|
|||
|
|||
const finalSignatureBuffer = unwrapEC2Signature(signatureBytes); |
|||
const verifyAlgorithm = { |
|||
name: "ECDSA", |
|||
hash: { name: "SHA-256" }, |
|||
}; |
|||
const publicKeyJwk = cborToKeys(publicKeyBytes).publicKeyJwk; |
|||
const keyAlgorithm = { |
|||
name: "ECDSA", |
|||
namedCurve: publicKeyJwk.crv, |
|||
}; |
|||
const publicKeyCryptoKey = await crypto.subtle.importKey( |
|||
"jwk", |
|||
publicKeyJwk, |
|||
keyAlgorithm, |
|||
false, |
|||
["verify"], |
|||
); |
|||
const verified = await crypto.subtle.verify( |
|||
verifyAlgorithm, |
|||
publicKeyCryptoKey, |
|||
finalSignatureBuffer, |
|||
payloadBytes, |
|||
); |
|||
return verified; |
|||
} |
|||
|
|||
function cborToKeys(publicKeyBytes: Uint8Array) { |
|||
const jwkObj = cborDecode(publicKeyBytes); |
|||
if ( |
|||
jwkObj[1] != 2 || // kty "EC"
|
|||
jwkObj[3] != -7 || // alg "ES256"
|
|||
jwkObj[-1] != 1 || // crv "P-256"
|
|||
jwkObj[-2].length != 32 || // x
|
|||
jwkObj[-3].length != 32 // y
|
|||
) { |
|||
throw new Error("Unable to extract key."); |
|||
} |
|||
const publicKeyJwk = { |
|||
alg: "ES256", |
|||
crv: "P-256", |
|||
kty: "EC", |
|||
x: arrayToBase64Url(jwkObj[-2]), |
|||
y: arrayToBase64Url(jwkObj[-3]), |
|||
}; |
|||
const publicKeyBuffer = Buffer.concat([ |
|||
Buffer.from(jwkObj[-2]), |
|||
Buffer.from(jwkObj[-3]), |
|||
]); |
|||
return { publicKeyJwk, publicKeyBuffer }; |
|||
} |
|||
|
|||
function toBase64Url(anythingB64: string) { |
|||
return anythingB64.replace(/\+/g, "-").replace(/\//g, "_").replace(/=+$/, ""); |
|||
} |
|||
|
|||
function arrayToBase64Url(anything: Uint8Array) { |
|||
return toBase64Url(Buffer.from(anything).toString("base64")); |
|||
} |
|||
|
|||
/** |
|||
* In WebAuthn, EC2 signatures are wrapped in ASN.1 structure so we need to peel r and s apart. |
|||
* |
|||
* See https://www.w3.org/TR/webauthn-2/#sctn-signature-attestation-types
|
|||
* |
|||
* @return Uint8Array of the signature inside |
|||
*/ |
|||
function unwrapEC2Signature(signature: Uint8Array) { |
|||
const parsedSignature = AsnParser.parse(signature, ECDSASigValue); |
|||
let rBytes = new Uint8Array(parsedSignature.r); |
|||
let sBytes = new Uint8Array(parsedSignature.s); |
|||
|
|||
if (shouldRemoveLeadingZero(rBytes)) { |
|||
rBytes = rBytes.slice(1); |
|||
} |
|||
|
|||
if (shouldRemoveLeadingZero(sBytes)) { |
|||
sBytes = sBytes.slice(1); |
|||
} |
|||
|
|||
const finalSignature = isoUint8ArrayConcat([rBytes, sBytes]); |
|||
|
|||
return finalSignature; |
|||
} |
|||
|
|||
/** |
|||
* Determine if the DER-specific `00` byte at the start of an ECDSA signature byte sequence |
|||
* should be removed based on the following logic: |
|||
* |
|||
* "If the leading byte is 0x0, and the high order bit on the second byte is not set to 0, |
|||
* then remove the leading 0x0 byte" |
|||
* |
|||
* @return true if leading zero should be removed |
|||
*/ |
|||
function shouldRemoveLeadingZero(bytes: Uint8Array) { |
|||
return bytes[0] === 0x0 && (bytes[1] & (1 << 7)) !== 0; |
|||
} |
|||
|
|||
// from https://github.com/MasterKale/SimpleWebAuthn/blob/master/packages/server/src/helpers/iso/isoUint8Array.ts#L49
|
|||
/** |
|||
* Combine multiple Uint8Arrays into a single Uint8Array |
|||
* |
|||
* @param arrays - Uint8Array[] |
|||
* @return Uint8Array |
|||
*/ |
|||
function isoUint8ArrayConcat(arrays: Uint8Array[]) { |
|||
let pointer = 0; |
|||
const totalLength = arrays.reduce((prev, curr) => prev + curr.length, 0); |
|||
|
|||
const toReturn = new Uint8Array(totalLength); |
|||
|
|||
arrays.forEach((arr) => { |
|||
toReturn.set(arr, pointer); |
|||
pointer += arr.length; |
|||
}); |
|||
|
|||
return toReturn; |
|||
} |
@ -0,0 +1,88 @@ |
|||
/** |
|||
* Verifiable Credential & DID functions, specifically for EndorserSearch.org tools |
|||
* |
|||
* The goal is to make this folder similar across projects, then move it to a library. |
|||
* Other projects: endorser-ch, crowd-funder-for-time-pwa |
|||
* |
|||
*/ |
|||
|
|||
import base64url from "base64url"; |
|||
import didJwt from "did-jwt"; |
|||
import {Resolver} from "did-resolver"; |
|||
|
|||
import {didEthLocalResolver} from "./did-eth-local-resolver"; |
|||
import {verifyJwt as peerVerifyJwt} from "./passkeyDidPeer"; |
|||
|
|||
|
|||
export const TEST_BYPASS_ENV_VALUE = "test-local"; |
|||
export const ETHR_DID_PREFIX = 'did:ethr:' |
|||
export const PEER_DID_PREFIX = 'did:peer:' |
|||
export const JWT_VERIFY_FAILED_CODE = "JWT_VERIFY_FAILED_CODE" |
|||
export const UNSUPPORTED_DID_METHOD_CODE = "UNSUPPORTED_DID_METHOD" |
|||
|
|||
const resolver = new Resolver({ |
|||
'ethr': didEthLocalResolver |
|||
}); |
|||
|
|||
// return Promise of at least { issuer, payload, verified boolean }
|
|||
// ... and also if successfully verified by did-jwt (not JWANT): data, doc, signature, signer
|
|||
export async function decodeAndVerifyJwt(jwt: string) { |
|||
const pieces = jwt.split('.') |
|||
const header = JSON.parse(base64url.decode(pieces[0])) |
|||
const payload = JSON.parse(base64url.decode(pieces[1])) |
|||
const issuerDid = payload.iss |
|||
if (!issuerDid) { |
|||
return Promise.reject({ |
|||
clientError: { |
|||
message: `Missing "iss" field in JWT.`, |
|||
} |
|||
}) |
|||
} |
|||
if (issuerDid && issuerDid.startsWith(ETHR_DID_PREFIX) && process.env.NODE_ENV === TEST_BYPASS_ENV_VALUE) { |
|||
// Error of "Cannot read property 'toString' of undefined" usually means the JWT is malformed
|
|||
// eg. no "." separators.
|
|||
let nowEpoch = Math.floor(new Date().getTime() / 1000) |
|||
if (payload.exp < nowEpoch) { |
|||
console.log("JWT with exp " + payload.exp |
|||
+ " has expired but we're in test mode so we'll use a new time." |
|||
) |
|||
payload.exp = nowEpoch + 100 |
|||
} |
|||
return { issuer: issuerDid, payload, verified: true } // other elements will = undefined
|
|||
} |
|||
|
|||
if (issuerDid.startsWith(ETHR_DID_PREFIX)) { |
|||
try { |
|||
let verified = await didJwt.verifyJWT(jwt, {resolver}) |
|||
return verified |
|||
|
|||
} catch (e) { |
|||
return Promise.reject({ |
|||
clientError: { |
|||
message: `JWT failed verification: ` + e, |
|||
code: JWT_VERIFY_FAILED_CODE |
|||
} |
|||
}) |
|||
} |
|||
} |
|||
|
|||
if (issuerDid.startsWith(PEER_DID_PREFIX) && header.typ === "JWANT") { |
|||
const { claimPayload, verified } = await peerVerifyJwt(payload, issuerDid, pieces[2]) |
|||
return { issuer: issuerDid, payload: claimPayload, verified: verified } |
|||
} |
|||
|
|||
if (issuerDid.startsWith(PEER_DID_PREFIX)) { |
|||
return Promise.reject({ |
|||
clientError: { |
|||
message: `JWT with a PEER DID currently only supported with typ == JWANT. Contact us us for JWT suport since it should be straightforward.` |
|||
} |
|||
}) |
|||
} |
|||
|
|||
return Promise.reject({ |
|||
clientError: { |
|||
message: `Unsupported DID method ${issuerDid}`, |
|||
code: UNSUPPORTED_DID_METHOD_CODE |
|||
} |
|||
}) |
|||
} |
@ -0,0 +1,104 @@ |
|||
import crypto from "crypto"; |
|||
import didJwt from "did-jwt"; |
|||
|
|||
import {PEER_DID_PREFIX, TEST_BYPASS_ENV_VALUE} from "./index"; |
|||
import {verifyPeerSignature} from "./didPeer"; |
|||
|
|||
/** |
|||
* |
|||
* @param payload |
|||
* @param issuerDid |
|||
* @param signatureString |
|||
* @returns {Promise<{claimPayload: string, verified: boolean}>} |
|||
*/ |
|||
export async function verifyJwt(payload: any, issuerDid: any, signatureString: any) { |
|||
if (!payload.iss) { |
|||
return Promise.reject({ |
|||
clientError: { |
|||
message: `JWT is missing an "iss" field.`, |
|||
} |
|||
}) |
|||
} |
|||
let nowEpoch = Math.floor(new Date().getTime() / 1000) |
|||
if (!payload.exp) { |
|||
return Promise.reject({ |
|||
clientError: { |
|||
message: `JWT with is missing an "exp" field.`, |
|||
} |
|||
}) |
|||
} |
|||
if (payload.exp < nowEpoch && process.env.NODE_ENV !== TEST_BYPASS_ENV_VALUE) { |
|||
return Promise.reject({ |
|||
clientError: { |
|||
message: `JWT with exp ${payload.exp} has expired.`, |
|||
} |
|||
}); |
|||
} |
|||
|
|||
const authData: string = payload.AuthenticationDataB64URL |
|||
const clientData: string = payload.ClientDataJSONB64URL |
|||
if (!authData || !clientData) { |
|||
return Promise.reject({ |
|||
clientError: { |
|||
message: `JWT with typ == JWANT requires AuthenticationData and ClientDataJSON.` |
|||
} |
|||
}) |
|||
} |
|||
|
|||
const decodedAuthDataBuff = Buffer.from(authData, 'base64url') |
|||
const decodedClientData = Buffer.from(clientData, 'base64url') |
|||
|
|||
let claimPayload = JSON.parse(decodedClientData.toString()) |
|||
if (claimPayload.challenge) { |
|||
claimPayload = JSON.parse(Buffer.from(claimPayload.challenge, "base64url").toString()) |
|||
if (!claimPayload.exp) { |
|||
claimPayload.exp = payload.exp |
|||
} |
|||
if (!claimPayload.iat) { |
|||
claimPayload.iat = payload.iat |
|||
} |
|||
if (!claimPayload.iss) { |
|||
claimPayload.iss = payload.iss |
|||
} |
|||
} |
|||
if (!claimPayload.exp) { |
|||
return Promise.reject({ |
|||
clientError: { |
|||
message: `JWT client data challenge is missing an "exp" field.`, |
|||
} |
|||
}) |
|||
} |
|||
if (claimPayload.exp < nowEpoch && process.env.NODE_ENV !== TEST_BYPASS_ENV_VALUE) { |
|||
return Promise.reject({ |
|||
clientError: { |
|||
message: `JWT client data challenge exp time is past.`, |
|||
} |
|||
}) |
|||
} |
|||
if (claimPayload.exp !== payload.exp) { |
|||
return Promise.reject({ |
|||
clientError: { |
|||
message: `JWT client data challenge "exp" field doesn't match the outside payload "exp".`, |
|||
} |
|||
}) |
|||
} |
|||
if (claimPayload.iss !== payload.iss) { |
|||
return Promise.reject({ |
|||
clientError: { |
|||
message: `JWT client data challenge "iss" field doesn't match the outside payload "iss".`, |
|||
} |
|||
}) |
|||
} |
|||
|
|||
const hashedClientDataBuff = crypto.createHash("sha256") |
|||
.update(decodedClientData) |
|||
.digest() |
|||
const preimage = new Uint8Array(Buffer.concat([decodedAuthDataBuff, hashedClientDataBuff])) |
|||
const PEER_DID_MULTIBASE_PREFIX = PEER_DID_PREFIX + "0" |
|||
// Uint8Array
|
|||
const publicKey = didJwt.multibaseToBytes(issuerDid.substring(PEER_DID_MULTIBASE_PREFIX.length)); |
|||
const signature = new Uint8Array(Buffer.from(signatureString, 'base64url')) |
|||
const verified = await verifyPeerSignature(preimage, publicKey, signature) |
|||
return { claimPayload, verified } |
|||
|
|||
} |
@ -0,0 +1,6 @@ |
|||
# see ../Makefile.test
|
|||
|
|||
TESTS ?= \
|
|||
test.sh |
|||
|
|||
include ../Makefile.test |
@ -0,0 +1,147 @@ |
|||
#!/usr/bin/env bash |
|||
|
|||
# Execute from the "test" directory so that the test files are available. |
|||
# |
|||
# We recommend you have the pkgx.dev tools installed. |
|||
# If you want to use your installed curl & jq & node, you can comment out the two "pkgx" commands. |
|||
|
|||
HOST=http://localhost:3002 |
|||
|
|||
if ! [[ "$PWD" == */test ]]; then |
|||
echo "Error: Run this script in the 'test' directory." |
|||
exit 1 |
|||
fi |
|||
|
|||
# load the tools: curl, jq, node |
|||
eval "$(pkgx --shellcode)" |
|||
env +curl +jq +node |
|||
|
|||
JWT_CODE_USER_0='OWNER_DID="did:ethr:0x0000694B58C2cC69658993A90D3840C560f2F51F"; OWNER_PRIVATE_KEY_HEX="2b6472c026ec2aa2c4235c994a63868fc9212d18b58f6cbfe861b52e71330f5b"; didJwt = require("did-jwt"); didJwt.createJWT({ exp: Math.floor(Date.now() / 1000) + 60, iat: Math.floor(Date.now() / 1000), iss: OWNER_DID }, { issuer: OWNER_DID, signer: didJwt.SimpleSigner(OWNER_PRIVATE_KEY_HEX) }).then(console.log)' |
|||
JWT_CODE_USER_1='OWNER_DID="did:ethr:0x111d15564f824D56C7a07b913aA7aDd03382aA39"; OWNER_PRIVATE_KEY_HEX="be64d297e1c6f3545971cd0bc24c3bf32656f8639a2ae32cb84a1e3c75ad69cd"; didJwt = require("did-jwt"); didJwt.createJWT({ exp: Math.floor(Date.now() / 1000) + 60, iat: Math.floor(Date.now() / 1000), iss: OWNER_DID }, { issuer: OWNER_DID, signer: didJwt.SimpleSigner(OWNER_PRIVATE_KEY_HEX) }).then(console.log)' |
|||
|
|||
# exit as soon as anything fails |
|||
set -e |
|||
|
|||
echo "Upload test0.png by user #0" |
|||
JWT=$(node -e "$JWT_CODE_USER_0") |
|||
echo JWT: $JWT |
|||
RESULT=$(curl -X POST -H "Authorization: Bearer $JWT" -F "image=@test0.png" "$HOST/image") |
|||
echo curl result: $RESULT |
|||
SUCCESS=$(echo $RESULT | jq -r '.success') |
|||
if [ $SUCCESS = "true" ]; then |
|||
echo "User #0 uploaded file." |
|||
else |
|||
echo "User #0 failed to upload a file."; |
|||
exit 1 |
|||
fi |
|||
|
|||
echo "Download from the URL supplied" |
|||
URL0=$(echo $RESULT | jq -r '.url') |
|||
# -L to follow redirect because the returned URL is a timesafari.app URL |
|||
STATUS_CODE=$(curl -o test0-back.png -w "%{http_code}" -L $URL0); |
|||
if [ $STATUS_CODE -ne 200 ]; then |
|||
echo "File is not accessible, received status code: $STATUS_CODE"; |
|||
fi |
|||
|
|||
echo "Check that downloaded file is the same as the original" |
|||
if diff "test0.png" "test0-back.png" >/dev/null; then |
|||
echo "Got the same file." |
|||
else |
|||
echo "Did not get the same file." |
|||
exit 1 |
|||
fi |
|||
|
|||
echo "Upload test1.png by user #1" |
|||
JWT=$(node -e "$JWT_CODE_USER_1") |
|||
echo JWT: $JWT |
|||
RESULT=$(curl -X POST -H "Authorization: Bearer $JWT" -F "image=@test1.png" "$HOST/image") |
|||
echo curl result: $RESULT |
|||
URL2=$(echo $RESULT | jq -r '.url') |
|||
if [ "$URL0" != "$URL2" ]; then |
|||
echo "URLs 0 & 1 are different." |
|||
else |
|||
echo "URLs 0 & 1 are not different." |
|||
exit 1 |
|||
fi |
|||
|
|||
echo "Now fail to upload a change to the image by user 1" |
|||
FILENAME0=$(basename $URL0) |
|||
JWT=$(node -e "$JWT_CODE_USER_1") |
|||
echo JWT: $JWT |
|||
RESULT=$(curl -X POST -H "Authorization: Bearer $JWT" -F "image=@test1.png" -F "fileName=$FILENAME0" "$HOST/image") |
|||
echo curl result: $RESULT |
|||
SUCCESS=$(echo $RESULT | jq -r '.success') |
|||
if [ $SUCCESS = "false" ]; then |
|||
echo "User #1 could not replace existing file." |
|||
else |
|||
echo "File may have been replaced wrongly."; |
|||
exit 1 |
|||
fi |
|||
|
|||
echo "Now successfully upload a change to the image by user 0" |
|||
JWT=$(node -e "$JWT_CODE_USER_0") |
|||
echo JWT: $JWT |
|||
RESULT=$(curl -X POST -H "Authorization: Bearer $JWT" -F "image=@test1.png" -F "fileName=$FILENAME0" "$HOST/image") |
|||
echo curl result: $RESULT |
|||
SUCCESS=$(echo $RESULT | jq -r '.success') |
|||
if [ $SUCCESS = "true" ]; then |
|||
echo "User #0 did replace file."; |
|||
else |
|||
echo "User #0 couldn't replace file."; |
|||
exit 1 |
|||
fi |
|||
|
|||
echo "Fail to remove test file 0 from the service" |
|||
TEST_URL="https%3A%2F%2Ftest-image.timesafari.app%2F4599145c3a8792a678f458747f2d8512c680e8680bf5563c35b06cd770051ed2.png" |
|||
JWT=$(node -e "$JWT_CODE_USER_1") |
|||
echo JWT: $JWT |
|||
RESULT=$(curl -X DELETE -H "Authorization: Bearer $JWT" "$HOST/image/$TEST_URL") |
|||
echo curl result: $RESULT |
|||
SUCCESS=$(echo $RESULT | jq -r '.success') |
|||
if [ "$SUCCESS" = "false" ]; then |
|||
echo "Test file 0 was not cleaned off server." |
|||
else |
|||
echo "Test file 0 was cleaned off server."; |
|||
exit 1 |
|||
fi |
|||
|
|||
echo "Remove test file 0 from the service" |
|||
TEST_URL="https%3A%2F%2Ftest-image.timesafari.app%2F4599145c3a8792a678f458747f2d8512c680e8680bf5563c35b06cd770051ed2.png" |
|||
JWT=$(node -e "$JWT_CODE_USER_0") |
|||
echo JWT: $JWT |
|||
RESULT=$(curl -X DELETE -H "Authorization: Bearer $JWT" "$HOST/image/$TEST_URL") |
|||
echo curl result: $RESULT |
|||
SUCCESS=$(echo $RESULT | jq -r '.success') |
|||
if [[ -z "$RESULT" ]] || [[ "$SUCCESS" = "true" ]]; then |
|||
echo "Test file 0 was cleaned off server." |
|||
else |
|||
echo "Test file 0 was not cleaned off server."; |
|||
exit 1 |
|||
fi |
|||
|
|||
echo "Remove test file 1 from the service" |
|||
TEST_URL="https%3A%2F%2Ftest-image.timesafari.app%2F83801e59789f962ddd19dbf99abd65b416e4c6560c28bdb3e663cea045561b07.png" |
|||
JWT=$(node -e "$JWT_CODE_USER_1") |
|||
echo JWT: $JWT |
|||
RESULT=$(curl -X DELETE -H "Authorization: Bearer $JWT" "$HOST/image/$TEST_URL") |
|||
echo curl result: $RESULT |
|||
SUCCESS=$(echo $RESULT | jq -r '.success') |
|||
if [[ -z "$RESULT" ]] || [[ "$SUCCESS" = "true" ]]; then |
|||
echo "Test file 1 was cleaned off server." |
|||
else |
|||
echo "Test file 1 was not cleaned off server."; |
|||
exit 1 |
|||
fi |
|||
|
|||
echo "Upload test2.png by did:peer user" |
|||
JWT="eyJ0eXAiOiJKV0FOVCIsImFsZyI6IkVTMjU2In0.eyJBdXRoZW50aWNhdGlvbkRhdGFCNjRVUkwiOiJTWllONVlnT2pHaDBOQmNQWkhaZ1c0X2tycm1paGpMSG1Wenp1b01kbDJNRkFBQUFBQSIsIkNsaWVudERhdGFKU09OQjY0VVJMIjoiZXlKMGVYQmxJam9pZDJWaVlYVjBhRzR1WjJWMElpd2lZMmhoYkd4bGJtZGxJam9pWlhsS01sbDVTVFpsZVVwcVkyMVdhMXBYTlRCaFYwWnpWVE5XYVdGdFZtcGtRMGsyWlhsS1FWa3lPWFZrUjFZMFpFTkpOa2x0YURCa1NFSjZUMms0ZG1NeVRtOWFWekZvVEcwNWVWcDVTWE5KYTBJd1pWaENiRWxxYjJsU01td3lXbFZHYW1SSGJIWmlhVWx6U1cxU2JHTXlUbmxoV0VJd1lWYzVkVWxxYjJsalIydzJaVzFGYVdaWU1ITkpiVlkwWTBOSk5rMVVZM2xOUkUwMFQwUlZNazE1ZDJsaFYwWXdTV3B2ZUU1NlNYZE5lbWMwVGxSQmVreERTbkJqTTAxcFQybEthMkZYVVRaalIxWnNZMnB2ZDJWcmRFNVNiWEF5Vmxka1dtTnJNSGhoUm1nelVrZE9kR0pVVWtOWmJtaE1XV3hLVDFWWFJsbFZhM0I1VVRGQ2FGRnNjREJSTWpsd1lVaE9UVlpHUWt0UmJrSnVWbGhTUkU5VmRHdFBXRUo1WldwR2RsWklSalJXTWxaMFVtMWFUMVl3VGs5YU1IaEdVMjVzVVU1RlduWlVSWFF3WWxjMVdHRkdSbmhaVlU1MVVXMVdiVll5T1hSU00wcFVVVlJPTWs1RFNqa2lMQ0p2Y21sbmFXNGlPaUpvZEhSd09pOHZiRzlqWVd4b2IzTjBPamd3T0RBaUxDSmpjbTl6YzA5eWFXZHBiaUk2Wm1Gc2MyVjkiLCJleHAiOjE3MjAzODg1NjMsImlhdCI6MTcyMDM4ODUwMywiaXNzIjoiZGlkOnBlZXI6MHpLTUZqdlVnWXJNMWhYd0RjbW00QmJ4S2JSTlFhWFJKckNQYUJadENvaWhzTFRQSkJwZ1V0QzlLZDlwcnoxb1RxeFdlbUZmTldDTmdMRUp5UDRGb0xLdG1uV2hRcWFDbkJlZldvbUdyU0EzdjQifQ.MEQCIAsMMNUcSjoxn0LZuE6FvZ6dsm-uQROeX3RPWt6QlRyPAiA670XdJXnLw8QFR9a6KCMt-qUyGZg88mMfT-1DtipcwA" |
|||
echo JWT: $JWT |
|||
RESULT=$(curl -X POST -H "Authorization: Bearer $JWT" -F "image=@test2.svg" "$HOST/image") |
|||
echo curl result: $RESULT |
|||
SUCCESS=$(echo $RESULT | jq -r '.success') |
|||
if [ $SUCCESS = "true" ]; then |
|||
echo "User #2 uploaded SVG file." |
|||
else |
|||
echo "User #2 failed to upload SVG file. Note that this may be because the server wasn't started with NODE_ENV=test-local which bypasses check of the exp date."; |
|||
exit 1 |
|||
fi |
After Width: | Height: | Size: 3.4 KiB |
After Width: | Height: | Size: 9.7 KiB |
After Width: | Height: | Size: 5.6 KiB |
@ -0,0 +1,114 @@ |
|||
{ |
|||
"compilerOptions": { |
|||
/* Visit https://aka.ms/tsconfig to read more about this file */ |
|||
|
|||
/* Projects */ |
|||
// "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ |
|||
// "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ |
|||
// "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ |
|||
// "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ |
|||
// "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ |
|||
// "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ |
|||
|
|||
/* Language and Environment */ |
|||
"target": "es2016", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ |
|||
// "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ |
|||
// "jsx": "preserve", /* Specify what JSX code is generated. */ |
|||
// "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */ |
|||
// "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ |
|||
// "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ |
|||
// "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ |
|||
// "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ |
|||
// "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ |
|||
// "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ |
|||
// "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ |
|||
// "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ |
|||
|
|||
/* Modules */ |
|||
"module": "commonjs", /* Specify what module code is generated. */ |
|||
// "rootDir": "./", /* Specify the root folder within your source files. */ |
|||
// "moduleResolution": "node10", /* Specify how TypeScript looks up a file from a given module specifier. */ |
|||
// "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ |
|||
// "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ |
|||
// "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ |
|||
// "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ |
|||
// "types": [], /* Specify type package names to be included without being referenced in a source file. */ |
|||
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ |
|||
// "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ |
|||
// "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */ |
|||
// "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */ |
|||
// "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */ |
|||
// "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */ |
|||
// "resolveJsonModule": true, /* Enable importing .json files. */ |
|||
// "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */ |
|||
// "noResolve": true, /* Disallow 'import's, 'require's or '<reference>'s from expanding the number of files TypeScript should add to a project. */ |
|||
|
|||
/* JavaScript Support */ |
|||
// "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ |
|||
// "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ |
|||
// "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ |
|||
|
|||
/* Emit */ |
|||
// "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ |
|||
// "declarationMap": true, /* Create sourcemaps for d.ts files. */ |
|||
// "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ |
|||
// "sourceMap": true, /* Create source map files for emitted JavaScript files. */ |
|||
// "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ |
|||
// "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ |
|||
"outDir": "./dist", /* Specify an output folder for all emitted files. */ |
|||
// "removeComments": true, /* Disable emitting comments. */ |
|||
// "noEmit": true, /* Disable emitting files from a compilation. */ |
|||
// "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ |
|||
// "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ |
|||
// "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ |
|||
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ |
|||
// "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ |
|||
// "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ |
|||
// "newLine": "crlf", /* Set the newline character for emitting files. */ |
|||
// "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ |
|||
// "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ |
|||
// "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ |
|||
// "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ |
|||
// "declarationDir": "./", /* Specify the output directory for generated declaration files. */ |
|||
|
|||
/* Interop Constraints */ |
|||
// "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ |
|||
// "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */ |
|||
// "isolatedDeclarations": true, /* Require sufficient annotation on exports so other tools can trivially generate declaration files. */ |
|||
// "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ |
|||
"esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ |
|||
// "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ |
|||
"forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ |
|||
|
|||
/* Type Checking */ |
|||
"strict": true, /* Enable all strict type-checking options. */ |
|||
// "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ |
|||
// "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ |
|||
// "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ |
|||
// "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ |
|||
// "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ |
|||
// "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ |
|||
// "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ |
|||
// "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ |
|||
// "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ |
|||
// "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ |
|||
// "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ |
|||
// "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ |
|||
// "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ |
|||
// "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ |
|||
// "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ |
|||
// "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ |
|||
// "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ |
|||
// "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ |
|||
|
|||
/* Completeness */ |
|||
// "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ |
|||
"skipLibCheck": true /* Skip type checking all .d.ts files. */ |
|||
}, |
|||
"include": [ |
|||
"src" |
|||
], |
|||
"exclude": [ |
|||
"node_modules" |
|||
] |
|||
} |
Loading…
Reference in new issue