From dd6439a8eecc9694f1977337261fab2c512b74bb Mon Sep 17 00:00:00 2001 From: bitbeckers Date: Sun, 26 Jan 2025 22:00:18 +0100 Subject: [PATCH 1/3] feat(validator): implement validator classes Refactors the SDK validators to use classes instead of a single file with repeating methods. - Create base abstract SchemaValidator with AJV integration - Implement specialized validators: - MetadataValidator with nested schema support - ClaimDataValidator for hypercert claims - AllowlistValidator for merkle tree entries - MerkleProofValidator for proof verification - Add comprehensive test suite for all validators - Maintain backwards compatibility through legacy API wrapper - Improve error handling with structured ValidationError type - Introduce ValidatorFactory for consistent instantiation --- eslint.config.mjs | 25 +-- package.json | 3 +- pnpm-lock.yaml | 13 ++ src/validator/ValidatorFactory.ts | 24 +++ src/validator/base/SchemaValidator.ts | 40 ++++ src/validator/index.ts | 173 +++++++----------- src/validator/interfaces.ts | 16 ++ .../validators/AllowListValidator.ts | 71 +++++++ .../validators/MerkleProofValidator.ts | 52 ++++++ src/validator/validators/MetadataValidator.ts | 16 ++ test/validator.test.ts | 3 +- test/validator/base/SchemaValidator.test.ts | 62 +++++++ .../validators/AllowListValidator.test.ts | 62 +++++++ .../validators/MerkleProofValidator.test.ts | 65 +++++++ .../validators/MetadataValidator.test.ts | 150 +++++++++++++++ 15 files changed, 654 insertions(+), 121 deletions(-) create mode 100644 src/validator/ValidatorFactory.ts create mode 100644 src/validator/base/SchemaValidator.ts create mode 100644 src/validator/interfaces.ts create mode 100644 src/validator/validators/AllowListValidator.ts create mode 100644 src/validator/validators/MerkleProofValidator.ts create mode 100644 src/validator/validators/MetadataValidator.ts create mode 100644 test/validator/base/SchemaValidator.test.ts create mode 100644 test/validator/validators/AllowListValidator.test.ts create mode 100644 test/validator/validators/MerkleProofValidator.test.ts create mode 100644 test/validator/validators/MetadataValidator.test.ts diff --git a/eslint.config.mjs b/eslint.config.mjs index 158f6de..a52ded6 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -1,11 +1,14 @@ -import globals from "globals"; -import pluginJs from "@eslint/js"; -import tseslint from "typescript-eslint"; - - -export default [ - { files: ["**/*.{js,mjs,cjs,ts}"] }, - { languageOptions: { globals: { ...globals.browser, ...globals.node } } }, - pluginJs.configs.recommended, - ...tseslint.configs.recommended, -]; \ No newline at end of file +export default { + root: true, + env: { + browser: true, + node: true, + }, + extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended", "plugin:chai-friendly/recommended"], + parser: "@typescript-eslint/parser", + plugins: ["@typescript-eslint", "chai-friendly"], + rules: { + "no-unused-expressions": "off", + "chai-friendly/no-unused-expressions": "error", + }, +}; diff --git a/package.json b/package.json index f674684..da304a7 100644 --- a/package.json +++ b/package.json @@ -52,6 +52,7 @@ "esbuild": "^0.19.8", "eslint": "^9.18.0", "eslint-config-prettier": "^9.1.0", + "eslint-plugin-chai-friendly": "^1.0.1", "globals": "^15.14.0", "husky": "^9.1.7", "json-schema-to-typescript": "^13.1.1", @@ -83,7 +84,7 @@ "commitlint": "commitlint --config commitlintrc.ts --edit" }, "lint-staged": { - "*.{js, jsx,ts,tsx}": [ + "*.{js,jsx,ts,tsx}": [ "eslint --quiet --fix" ], "*.{json,js,ts,jsx,tsx,html}": [ diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index fcd8133..777fba8 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -96,6 +96,9 @@ importers: eslint-config-prettier: specifier: ^9.1.0 version: 9.1.0(eslint@9.18.0(jiti@2.4.2)) + eslint-plugin-chai-friendly: + specifier: ^1.0.1 + version: 1.0.1(eslint@9.18.0(jiti@2.4.2)) globals: specifier: ^15.14.0 version: 15.14.0 @@ -2030,6 +2033,12 @@ packages: peerDependencies: eslint: '>=7.0.0' + eslint-plugin-chai-friendly@1.0.1: + resolution: {integrity: sha512-dxD/uz1YKJ8U4yah1i+V/p/u+kHRy3YxTPe2nJGqb5lCR+ucan/KIexfZ5+q4X+tkllyMe86EBbAkdlwxNy3oQ==} + engines: {node: '>=0.10.0'} + peerDependencies: + eslint: '>=3.0.0' + eslint-scope@8.2.0: resolution: {integrity: sha512-PHlWUfG6lvPc3yvP5A4PNyBL1W8fkDUccmI21JUu/+GKZBoH/W5u6usENXUrWFRsyoW5ACUjFGgAFQp5gUlb/A==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -6215,6 +6224,10 @@ snapshots: dependencies: eslint: 9.18.0(jiti@2.4.2) + eslint-plugin-chai-friendly@1.0.1(eslint@9.18.0(jiti@2.4.2)): + dependencies: + eslint: 9.18.0(jiti@2.4.2) + eslint-scope@8.2.0: dependencies: esrecurse: 4.3.0 diff --git a/src/validator/ValidatorFactory.ts b/src/validator/ValidatorFactory.ts new file mode 100644 index 0000000..2437615 --- /dev/null +++ b/src/validator/ValidatorFactory.ts @@ -0,0 +1,24 @@ +import { HypercertMetadata, HypercertClaimdata, AllowlistEntry } from "src/types"; +import { IValidator } from "./interfaces"; +import { MerkleProofData, MerkleProofValidator } from "./validators/MerkleProofValidator"; +import { MetadataValidator, ClaimDataValidator } from "./validators/MetadataValidator"; +import { AllowlistValidator } from "./validators/AllowListValidator"; +import { AllowlistValidationParams } from "./validators/AllowListValidator"; + +export class ValidatorFactory { + static createMetadataValidator(): IValidator { + return new MetadataValidator(); + } + + static createClaimDataValidator(): IValidator { + return new ClaimDataValidator(); + } + + static createAllowlistValidator(): IValidator { + return new AllowlistValidator(); + } + + static createMerkleProofValidator(): IValidator { + return new MerkleProofValidator(); + } +} diff --git a/src/validator/base/SchemaValidator.ts b/src/validator/base/SchemaValidator.ts new file mode 100644 index 0000000..dfafd42 --- /dev/null +++ b/src/validator/base/SchemaValidator.ts @@ -0,0 +1,40 @@ +import Ajv, { Schema, ErrorObject } from "ajv"; +import { IValidator, ValidationError, ValidationResult } from "../interfaces"; + +export abstract class SchemaValidator implements IValidator { + protected ajv: Ajv; + protected schema: Schema; + + constructor(schema: Schema, additionalSchemas: Schema[] = []) { + this.ajv = new Ajv({ allErrors: true }); + // Add any additional schemas first + additionalSchemas.forEach((schema) => this.ajv.addSchema(schema)); + this.schema = schema; + } + + validate(data: unknown): ValidationResult { + const validate = this.ajv.compile(this.schema); + + if (!validate(data)) { + return { + isValid: false, + errors: this.formatErrors(validate.errors || []), + }; + } + + return { + isValid: true, + data: data as T, + errors: [], + }; + } + + protected formatErrors(errors: ErrorObject[]): ValidationError[] { + return errors.map((error) => ({ + code: "SCHEMA_VALIDATION_ERROR", + message: error.message || "Unknown validation error", + field: error.instancePath || (error.params.missingProperty as string) || "", + details: error.params, + })); + } +} diff --git a/src/validator/index.ts b/src/validator/index.ts index 141ac01..d61ff63 100644 --- a/src/validator/index.ts +++ b/src/validator/index.ts @@ -1,17 +1,6 @@ -import { StandardMerkleTree } from "@openzeppelin/merkle-tree"; -import Ajv from "ajv"; - -import claimDataSchema from "../resources/schema/claimdata.json"; -import evaluationSchema from "../resources/schema/evaluation.json"; -import metaDataSchema from "../resources/schema/metadata.json"; import { AllowlistEntry, HypercertClaimdata, HypercertMetadata, MintingError } from "../types"; -import { isAddress } from "viem"; - -//TODO replace with ZOD -const ajv = new Ajv({ allErrors: true }); // options can be passed, e.g. {allErrors: true} -ajv.addSchema(metaDataSchema, "metaData"); -ajv.addSchema(claimDataSchema, "claimData"); -ajv.addSchema(evaluationSchema, "evaluation.json"); +import { ValidationError } from "./interfaces"; +import { ValidatorFactory } from "./ValidatorFactory"; /** * Represents the result of a validation operation. @@ -26,139 +15,109 @@ type ValidationResult = { errors: Record; }; +/** + * Maps new validator errors to the legacy format + */ +const mapErrors = (errors: ValidationError[]): Record => { + return errors.reduce( + (acc, err) => ({ + ...acc, + [err.field?.replace("/", "") || err.code]: err.message, + }), + {}, + ); +}; + /** * Validates Hypercert metadata. * - * This function uses the AJV library to validate the metadata. It first retrieves the schema for the metadata, - * then validates the data against the schema. If the schema is not found, it returns an error. If the data does not - * conform to the schema, it returns the validation errors. If the data is valid, it returns a success message. + * Uses the AJV library to validate the metadata against its schema. If the data does not + * conform to the schema, it returns the validation errors. * - * @param {unknown} data - The metadata to validate. This should be an object that conforms to the HypercertMetadata type. - * @returns {ValidationResult} An object that includes a validity flag and any errors that occurred during validation. + * @param {unknown} data - The metadata to validate. Should conform to the HypercertMetadata type. + * @returns {ValidationResult} Object containing validity flag, validated data, and any validation errors. + * @deprecated use ValidatorFactory.createMetadataValidator() instead */ -const validateMetaData = (data: unknown): ValidationResult => { - const schemaName = "metaData"; - const validate = ajv.getSchema(schemaName); - if (!validate) { - return { data, valid: false, errors: { schema: "Schema not found" } }; - } - - if (!validate(data)) { - const errors: Record = {}; - for (const e of validate.errors || []) { - const key = e.params.missingProperty || "other"; - if (key && e.message) { - errors[key] = e.message; - } - } - return { data: data as unknown, valid: false, errors }; - } - - return { data: data as HypercertMetadata, valid: true, errors: {} }; +export const validateMetaData = (data: unknown): ValidationResult => { + const result = ValidatorFactory.createMetadataValidator().validate(data); + + console.log(result.errors); + return { + data: result.data || data, + valid: result.isValid, + errors: mapErrors(result.errors), + }; }; /** * Validates Hypercert claim data. * - * This function uses the AJV library to validate the claim data. It first retrieves the schema for the claim data, - * then validates the data against the schema. If the schema is not found, it returns an error. If the data does not - * conform to the schema, it returns the validation errors. If the data is valid, it returns a success message. + * Uses the AJV library to validate the claim data against its schema. If the data does not + * conform to the schema, it returns the validation errors. * - * @param {unknown} data - The claim data to validate. This should be an object that conforms to the HypercertClaimdata type. - * @returns {ValidationResult} An object that includes a validity flag and any errors that occurred during validation. + * @param {unknown} data - The claim data to validate. Should conform to the HypercertClaimdata type. + * @returns {ValidationResult} Object containing validity flag, validated data, and any validation errors. + * @deprecated use ValidatorFactory.createClaimDataValidator() instead */ -const validateClaimData = (data: unknown): ValidationResult => { - const schemaName = "claimData"; - const validate = ajv.getSchema(schemaName); - if (!validate) { - return { data, valid: false, errors: { schema: "Schema not found" } }; - } - - if (!validate(data)) { - const errors: Record = {}; - for (const e of validate.errors || []) { - const key = e.params.missingProperty || "other"; - if (key && e.message) { - errors[key] = e.message; - } - } - return { data: data as unknown, valid: false, errors }; - } - - return { data: data as HypercertClaimdata, valid: true, errors: {} }; +export const validateClaimData = (data: unknown): ValidationResult => { + const result = ValidatorFactory.createClaimDataValidator().validate(data); + return { + data: result.data || data, + valid: result.isValid, + errors: mapErrors(result.errors), + }; }; /** * Validates an array of allowlist entries. * - * This function checks that the total units in the allowlist match the expected total units, that the total units are greater than 0, - * and that all addresses in the allowlist are valid Ethereum addresses. It returns an object that includes a validity flag and any errors that occurred during validation. + * Checks that the total units match the expected total, units are greater than 0, + * and all addresses are valid Ethereum addresses. * - * @param {AllowlistEntry[]} data - The allowlist entries to validate. Each entry should be an object that includes an address and a number of units. + * @param {AllowlistEntry[]} data - The allowlist entries to validate. * @param {bigint} units - The expected total units in the allowlist. - * @returns {ValidationResult} An object that includes a validity flag and any errors that occurred during validation. The keys in the errors object are the names of the invalid properties, and the values are the error messages. + * @returns {ValidationResult} Object containing validity flag, validated data, and any validation errors. + * @deprecated use ValidatorFactory.createAllowlistValidator() instead */ -const validateAllowlist = (data: AllowlistEntry[], units: bigint): ValidationResult => { - const errors: Record = {}; - const totalUnits = data.reduce((acc, curr) => acc + BigInt(curr.units.toString()), 0n); - if (totalUnits != units) { - errors[ - "units" - ] = `Total units in allowlist must match total units [expected: ${units}, got: ${totalUnits.toString()}]`; - } - - if (totalUnits == 0n) { - errors["units"] = "Total units in allowlist must be greater than 0"; - } - - const filteredAddresses = data.filter((entry) => !isAddress(entry.address.toLowerCase())); - if (filteredAddresses.length > 0) { - errors["address"] = filteredAddresses.map((entry) => entry.address); - } - - if (Object.keys(errors).length > 0) { - return { data: data as unknown, valid: Object.keys(errors).length === 0, errors }; - } - - return { data: data as AllowlistEntry[], valid: Object.keys(errors).length === 0, errors }; +export const validateAllowlist = (data: AllowlistEntry[], units: bigint): ValidationResult => { + const result = ValidatorFactory.createAllowlistValidator().validate(data, { expectedUnits: units }); + return { + data: result.data || data, + valid: result.isValid, + errors: mapErrors(result.errors), + }; }; /** * Verifies a Merkle proof for a given root, signer address, units, and proof. * - * This function first checks if the signer address is a valid Ethereum address. If it's not, it throws a `MintingError`. - * It then verifies the Merkle proof using the `StandardMerkleTree.verify` method. If the verification fails, it throws a `MintingError`. - * * @param {string} root - The root of the Merkle tree. * @param {string} signerAddress - The signer's Ethereum address. * @param {bigint} units - The number of units. * @param {string[]} proof - The Merkle proof to verify. - * @throws {MintingError} Will throw a `MintingError` if the signer address is invalid or if the Merkle proof verification fails. + * @throws {MintingError} Will throw if the signer address is invalid or if the Merkle proof verification fails. + * @deprecated use ValidatorFactory.createMerkleProofValidator() instead */ -function verifyMerkleProof(root: string, signerAddress: string, units: bigint, proof: string[]): void { - if (!isAddress(signerAddress.toLowerCase())) { - throw new MintingError("Invalid address", { signerAddress }); - } +export const verifyMerkleProof = (root: string, signerAddress: string, units: bigint, proof: string[]): void => { + const validator = ValidatorFactory.createMerkleProofValidator(); + const result = validator.validate({ root, signerAddress, units, proof }); - const verified = StandardMerkleTree.verify(root, ["address", "uint256"], [signerAddress, units], proof); - if (!verified) { - throw new MintingError("Merkle proof verification failed", { root, proof }); + if (!result.isValid) { + throw new MintingError(result.errors[0].message, { root, proof }); } -} +}; /** * Verifies multiple Merkle proofs for given roots, a signer address, units, and proofs. * - * This function first checks if the lengths of the roots, units, and proofs arrays are equal. If they're not, it throws a `MintingError`. - * It then iterates over the arrays and verifies each Merkle proof using the `verifyMerkleProof` function. If any verification fails, it throws a `MintingError`. - * * @param {string[]} roots - The roots of the Merkle trees. * @param {string} signerAddress - The signer's Ethereum address. * @param {bigint[]} units - The numbers of units. * @param {string[][]} proofs - The Merkle proofs to verify. - * @throws {MintingError} Will throw a `MintingError` if the lengths of the input arrays are not equal or if any Merkle proof verification fails. + * @throws {MintingError} Will throw if input arrays have mismatched lengths or if any proof verification fails. + * @deprecated use ValidatorFactory.createMerkleProofValidator() instead */ -function verifyMerkleProofs(roots: string[], signerAddress: string, units: bigint[], proofs: string[][]) { +export const verifyMerkleProofs = (roots: string[], signerAddress: string, units: bigint[], proofs: string[][]) => { if (roots.length !== units.length || units.length !== proofs.length) { throw new MintingError("Invalid input", { roots, units, proofs }); } @@ -166,6 +125,4 @@ function verifyMerkleProofs(roots: string[], signerAddress: string, units: bigin for (let i = 0; i < roots.length; i++) { verifyMerkleProof(roots[i], signerAddress, units[i], proofs[i]); } -} - -export { validateMetaData, validateClaimData, validateAllowlist, verifyMerkleProof, verifyMerkleProofs }; +}; diff --git a/src/validator/interfaces.ts b/src/validator/interfaces.ts new file mode 100644 index 0000000..c6250d4 --- /dev/null +++ b/src/validator/interfaces.ts @@ -0,0 +1,16 @@ +export interface IValidator { + validate(data: unknown, params?: P): ValidationResult; +} + +export interface ValidationResult { + isValid: boolean; + data?: T; + errors: ValidationError[]; +} + +export interface ValidationError { + code: string; + message: string; + field?: string; + details?: unknown; +} diff --git a/src/validator/validators/AllowListValidator.ts b/src/validator/validators/AllowListValidator.ts new file mode 100644 index 0000000..9920219 --- /dev/null +++ b/src/validator/validators/AllowListValidator.ts @@ -0,0 +1,71 @@ +import { ValidationError, IValidator, ValidationResult } from "../interfaces"; +import { AllowlistEntry } from "src/types"; +import { isAddress } from "viem"; + +export interface AllowlistValidationParams { + expectedUnits: bigint; +} + +export class AllowlistValidator implements IValidator { + validate(data: unknown, params?: AllowlistValidationParams): ValidationResult { + if (!params?.expectedUnits) { + return { + isValid: false, + errors: [ + { + code: "MISSING_PARAMS", + message: "Expected units parameter is required", + }, + ], + }; + } + + if (!Array.isArray(data)) { + return { + isValid: false, + errors: [ + { + code: "INVALID_INPUT", + message: "Input must be an array", + }, + ], + }; + } + + const entries = data as AllowlistEntry[]; + const errors: ValidationError[] = []; + + // Validate total units + const totalUnits = entries.reduce((acc, curr) => acc + BigInt(curr.units.toString()), 0n); + + if (totalUnits !== params.expectedUnits) { + errors.push({ + code: "INVALID_TOTAL_UNITS", + message: `Total units in allowlist must match expected units`, + details: { + expected: params.expectedUnits.toString(), + actual: totalUnits.toString(), + }, + }); + } + + // Validate addresses + const invalidAddresses = entries + .filter((entry) => !isAddress(entry.address.toLowerCase())) + .map((entry) => entry.address); + + if (invalidAddresses.length > 0) { + errors.push({ + code: "INVALID_ADDRESSES", + message: "Invalid Ethereum addresses found in allowlist", + details: invalidAddresses, + }); + } + + return { + isValid: errors.length === 0, + data: errors.length === 0 ? data : undefined, + errors, + }; + } +} diff --git a/src/validator/validators/MerkleProofValidator.ts b/src/validator/validators/MerkleProofValidator.ts new file mode 100644 index 0000000..eadf8e4 --- /dev/null +++ b/src/validator/validators/MerkleProofValidator.ts @@ -0,0 +1,52 @@ +import { StandardMerkleTree } from "@openzeppelin/merkle-tree"; +import { IValidator, ValidationError, ValidationResult } from "../interfaces"; +import { isAddress } from "viem"; + +export interface MerkleProofData { + root: string; + signerAddress: string; + units: bigint; + proof: string[]; +} + +export class MerkleProofValidator implements IValidator { + validate(data: unknown): ValidationResult { + const proofData = data as MerkleProofData; + const errors: ValidationError[] = []; + + if (!isAddress(proofData.signerAddress)) { + errors.push({ + code: "INVALID_ADDRESS", + message: "Invalid signer address", + }); + } + + try { + const verified = StandardMerkleTree.verify( + proofData.root, + ["address", "uint256"], + [proofData.signerAddress, proofData.units], + proofData.proof, + ); + + if (!verified) { + errors.push({ + code: "INVALID_PROOF", + message: "Merkle proof verification failed", + }); + } + } catch (error) { + errors.push({ + code: "VERIFICATION_ERROR", + message: "Error during verification", + details: error, + }); + } + + return { + isValid: errors.length === 0, + data: errors.length === 0 ? proofData : undefined, + errors, + }; + } +} diff --git a/src/validator/validators/MetadataValidator.ts b/src/validator/validators/MetadataValidator.ts new file mode 100644 index 0000000..d6025ad --- /dev/null +++ b/src/validator/validators/MetadataValidator.ts @@ -0,0 +1,16 @@ +import { HypercertClaimdata, HypercertMetadata } from "src/types/metadata"; +import { SchemaValidator } from "../base/SchemaValidator"; +import claimDataSchema from "../../resources/schema/claimdata.json"; +import metaDataSchema from "../../resources/schema/metadata.json"; + +export class MetadataValidator extends SchemaValidator { + constructor() { + super(metaDataSchema, [claimDataSchema]); + } +} + +export class ClaimDataValidator extends SchemaValidator { + constructor() { + super(claimDataSchema); + } +} diff --git a/test/validator.test.ts b/test/validator.test.ts index 296e49d..5f86873 100644 --- a/test/validator.test.ts +++ b/test/validator.test.ts @@ -8,8 +8,9 @@ import { mockDataSets } from "./helpers"; describe("Validate claim test", () => { const { hypercertData, hypercertMetadata } = mockDataSets; - it("checking default metadata", () => { + it("checking default metadata", (): void => { const result = validateMetaData(hypercertMetadata.data); + console.log(result); expect(result.valid).to.be.true; const invalidResult = validateMetaData({} as HypercertMetadata); diff --git a/test/validator/base/SchemaValidator.test.ts b/test/validator/base/SchemaValidator.test.ts new file mode 100644 index 0000000..ff6728b --- /dev/null +++ b/test/validator/base/SchemaValidator.test.ts @@ -0,0 +1,62 @@ +import { expect } from "chai"; +import { Schema } from "ajv"; +import { SchemaValidator } from "../../../src/validator/base/SchemaValidator"; +import { describe, it } from "vitest"; + +// Create a concrete test implementation +class TestValidator extends SchemaValidator { + constructor(schema: Schema, additionalSchemas: Schema[] = []) { + super(schema, additionalSchemas); + } +} + +describe("SchemaValidator", () => { + const simpleSchema: Schema = { + type: "object", + properties: { + name: { type: "string" }, + age: { type: "number" }, + }, + required: ["name"], + }; + + it("should validate valid data", () => { + const validator = new TestValidator(simpleSchema); + const result = validator.validate({ name: "Test", age: 25 }); + + expect(result.isValid).to.be.true; + expect(result.data).to.deep.equal({ name: "Test", age: 25 }); + expect(result.errors).to.be.empty; + }); + + it("should return errors for invalid data", () => { + const validator = new TestValidator(simpleSchema); + const result = validator.validate({ age: 25 }); + + expect(result.isValid).to.be.false; + expect(result.data).to.be.undefined; + expect(result.errors).to.have.lengthOf(1); + expect(result.errors[0].field).to.equal("name"); + }); + + it("should handle additional schemas", () => { + const refSchema: Schema = { + type: "object", + properties: { + type: { type: "string" }, + }, + }; + + const mainSchema: Schema = { + type: "object", + properties: { + data: { $ref: "ref#" }, + }, + }; + + const validator = new TestValidator(mainSchema, [{ ...refSchema, $id: "ref" }]); + const result = validator.validate({ data: { type: "test" } }); + + expect(result.isValid).to.be.true; + }); +}); diff --git a/test/validator/validators/AllowListValidator.test.ts b/test/validator/validators/AllowListValidator.test.ts new file mode 100644 index 0000000..48499bf --- /dev/null +++ b/test/validator/validators/AllowListValidator.test.ts @@ -0,0 +1,62 @@ +import { expect } from "chai"; +import { AllowlistValidator } from "../../../src/validator/validators/AllowListValidator"; +import { AllowlistEntry } from "../../../src/types"; +import { describe, it } from "vitest"; + +describe("AllowlistValidator", () => { + const validator = new AllowlistValidator(); + + const validAddress = "0x1234567890123456789012345678901234567890"; + const invalidAddress = "0xinvalid"; + + it("should validate a valid allowlist", () => { + const allowlist: AllowlistEntry[] = [ + { address: validAddress, units: 500n }, + { address: "0x2234567890123456789012345678901234567890", units: 500n }, + ]; + + const result = validator.validate(allowlist, { expectedUnits: 1000n }); + + expect(result.isValid).to.be.true; + expect(result.data).to.deep.equal(allowlist); + expect(result.errors).to.be.empty; + }); + + it("should require expectedUnits parameter", () => { + const allowlist: AllowlistEntry[] = [{ address: validAddress, units: 1000n }]; + + const result = validator.validate(allowlist); + + expect(result.isValid).to.be.false; + expect(result.errors[0].code).to.equal("MISSING_PARAMS"); + }); + + it("should validate total units match expected units", () => { + const allowlist: AllowlistEntry[] = [{ address: validAddress, units: 500n }]; + + const result = validator.validate(allowlist, { expectedUnits: 1000n }); + + expect(result.isValid).to.be.false; + expect(result.errors[0].code).to.equal("INVALID_TOTAL_UNITS"); + }); + + it("should validate ethereum addresses", () => { + const allowlist: AllowlistEntry[] = [ + { address: invalidAddress, units: 500n }, + { address: validAddress, units: 500n }, + ]; + + const result = validator.validate(allowlist, { expectedUnits: 1000n }); + + expect(result.isValid).to.be.false; + expect(result.errors[0].code).to.equal("INVALID_ADDRESSES"); + expect(result.errors[0].details).to.include(invalidAddress); + }); + + it("should validate input is an array", () => { + const result = validator.validate({}, { expectedUnits: 1000n }); + + expect(result.isValid).to.be.false; + expect(result.errors[0].code).to.equal("INVALID_INPUT"); + }); +}); diff --git a/test/validator/validators/MerkleProofValidator.test.ts b/test/validator/validators/MerkleProofValidator.test.ts new file mode 100644 index 0000000..5f1b359 --- /dev/null +++ b/test/validator/validators/MerkleProofValidator.test.ts @@ -0,0 +1,65 @@ +import { expect } from "chai"; +import { MerkleProofValidator } from "../../../src/validator/validators/MerkleProofValidator"; +import { StandardMerkleTree } from "@openzeppelin/merkle-tree"; +import { describe, it } from "vitest"; + +describe("MerkleProofValidator", () => { + const validator = new MerkleProofValidator(); + const validAddress = "0x1234567890123456789012345678901234567890"; + + // Create a real merkle tree for testing + const values = [ + [validAddress, 1000n], + ["0x2234567890123456789012345678901234567890", 2000n], + ]; + const tree = StandardMerkleTree.of(values, ["address", "uint256"]); + const proof = tree.getProof(0); // Get proof for first entry + + it("should validate a valid merkle proof", () => { + const result = validator.validate({ + root: tree.root, + signerAddress: validAddress, + units: 1000n, + proof, + }); + + expect(result.isValid).to.be.true; + expect(result.errors).to.be.empty; + }); + + it("should validate ethereum address", () => { + const result = validator.validate({ + root: tree.root, + signerAddress: "0xinvalid", + units: 1000n, + proof, + }); + + expect(result.isValid).to.be.false; + expect(result.errors[0].code).to.equal("INVALID_ADDRESS"); + }); + + it("should validate merkle proof verification", () => { + const result = validator.validate({ + root: tree.root, + signerAddress: validAddress, + units: 2000n, // Wrong units + proof, + }); + + expect(result.isValid).to.be.false; + expect(result.errors[0].code).to.equal("INVALID_PROOF"); + }); + + it("should handle verification errors", () => { + const result = validator.validate({ + root: "invalid_root", + signerAddress: validAddress, + units: 1000n, + proof, + }); + + expect(result.isValid).to.be.false; + expect(result.errors[0].code).to.equal("VERIFICATION_ERROR"); + }); +}); diff --git a/test/validator/validators/MetadataValidator.test.ts b/test/validator/validators/MetadataValidator.test.ts new file mode 100644 index 0000000..f188265 --- /dev/null +++ b/test/validator/validators/MetadataValidator.test.ts @@ -0,0 +1,150 @@ +import { expect } from "chai"; +import { MetadataValidator, ClaimDataValidator } from "../../../src/validator/validators/MetadataValidator"; +import { HypercertMetadata, HypercertClaimdata } from "../../../src/types"; +import { describe, it } from "vitest"; + +describe("MetadataValidator", () => { + const validator = new MetadataValidator(); + + const validClaimData: HypercertClaimdata = { + impact_scope: { + name: "Impact Scope", + value: ["global"], + display_value: "Global", + }, + work_scope: { + name: "Work Scope", + value: ["research"], + display_value: "Research", + }, + work_timeframe: { + name: "Work Timeframe", + value: [1672531200, 1704067200], // 2023 + display_value: "2023", + }, + impact_timeframe: { + name: "Impact Timeframe", + value: [1672531200, 1704067200], // 2023 + display_value: "2023", + }, + contributors: { + name: "Contributors", + value: ["0x1234567890123456789012345678901234567890"], + display_value: "Contributor 1", + }, + }; + + const validMetadata: HypercertMetadata = { + name: "Test Hypercert", + description: "Test Description", + image: "ipfs://test", + version: "0.0.1", + ref: "ref", + hypercert: validClaimData, + }; + + it("should validate valid metadata", () => { + const result = validator.validate(validMetadata); + + expect(result.isValid).to.be.true; + expect(result.data).to.deep.equal(validMetadata); + expect(result.errors).to.be.empty; + }); + + it("should validate required fields", () => { + const invalidMetadata = { + description: "Test Description", + image: "ipfs://test", + }; + + const result = validator.validate(invalidMetadata); + + expect(result.isValid).to.be.false; + expect(result.errors[0].field).to.equal("name"); + }); + + it("should validate nested claim data", () => { + const invalidMetadata = { + ...validMetadata, + hypercert: { + ...validClaimData, + impact_scope: undefined, + }, + }; + + const result = validator.validate(invalidMetadata); + + expect(result.isValid).to.be.false; + expect(result.errors[0].field).to.equal("/hypercert"); + // or if we want to check the specific error message: + expect(result.errors[0].message).to.include("impact_scope"); + }); +}); + +describe("ClaimDataValidator", () => { + const validator = new ClaimDataValidator(); + + const validClaimData: HypercertClaimdata = { + impact_scope: { + name: "Impact Scope", + value: ["global"], + display_value: "Global", + }, + work_scope: { + name: "Work Scope", + value: ["research"], + display_value: "Research", + }, + work_timeframe: { + name: "Work Timeframe", + value: [1672531200, 1704067200], // 2023 + display_value: "2023", + }, + impact_timeframe: { + name: "Impact Timeframe", + value: [1672531200, 1704067200], // 2023 + display_value: "2023", + }, + contributors: { + name: "Contributors", + value: ["0x1234567890123456789012345678901234567890"], + display_value: "Contributor 1", + }, + }; + + it("should validate valid claim data", () => { + const result = validator.validate(validClaimData); + + expect(result.isValid).to.be.true; + expect(result.data).to.deep.equal(validClaimData); + expect(result.errors).to.be.empty; + }); + + it("should validate required fields", () => { + const invalidClaimData = { + impact_scope: validClaimData.impact_scope, + work_scope: validClaimData.work_scope, + // missing required fields + }; + + const result = validator.validate(invalidClaimData); + + expect(result.isValid).to.be.false; + expect(result.errors).to.have.length.greaterThan(0); + }); + + it("should validate array values", () => { + const invalidClaimData = { + ...validClaimData, + impact_scope: { + ...validClaimData.impact_scope, + value: "not an array", // should be an array + }, + }; + + const result = validator.validate(invalidClaimData); + + expect(result.isValid).to.be.false; + expect(result.errors[0].field).to.include("impact_scope"); + }); +}); From 4e2bfd71480f98a91a3f9bfea296dbebe4ccf587 Mon Sep 17 00:00:00 2001 From: bitbeckers Date: Sun, 26 Jan 2025 22:42:23 +0100 Subject: [PATCH 2/3] feat(validator): add property validator Add property validator that also specifically checks for geoJSONs. The pattern should be scalable to multiple specific types of properties. Updated the metadata validator to validate properties in metadata when applicable. Add and update test suite --- src/resources/schema/metadata.json | 24 ++- src/types/metadata.d.ts | 17 +- src/validator/ValidatorFactory.ts | 5 + src/validator/validators/MetadataValidator.ts | 27 +++ src/validator/validators/PropertyValidator.ts | 100 ++++++++++ .../validators/MetadataValidator.test.ts | 171 +++++++++++++++--- .../validators/PropertyValidator.test.ts | 138 ++++++++++++++ 7 files changed, 445 insertions(+), 37 deletions(-) create mode 100644 src/validator/validators/PropertyValidator.ts create mode 100644 test/validator/validators/PropertyValidator.test.ts diff --git a/src/resources/schema/metadata.json b/src/resources/schema/metadata.json index f1a6d96..7884d69 100644 --- a/src/resources/schema/metadata.json +++ b/src/resources/schema/metadata.json @@ -36,14 +36,26 @@ "type": "array", "items": { "type": "object", - "properties": { - "trait_type": { - "type": "string" + "oneOf": [ + { + "properties": { + "trait_type": { "type": "string" }, + "value": { "type": "string" } + }, + "required": ["trait_type", "value"], + "additionalProperties": false }, - "value": { - "type": "string" + { + "properties": { + "trait_type": { "type": "string" }, + "type": { "type": "string" }, + "src": { "type": "string" }, + "name": { "type": "string" } + }, + "required": ["trait_type", "type", "src", "name"], + "additionalProperties": false } - } + ] } }, "hypercert": { diff --git a/src/types/metadata.d.ts b/src/types/metadata.d.ts index 295ca8b..9a67a57 100644 --- a/src/types/metadata.d.ts +++ b/src/types/metadata.d.ts @@ -37,11 +37,18 @@ export interface HypercertMetadata { * A CID pointer to the merke tree proof json on ipfs */ allowList?: string; - properties?: { - trait_type?: string; - value?: string; - [k: string]: unknown; - }[]; + properties?: ( + | { + trait_type: string; + value: string; + } + | { + trait_type: string; + type: string; + src: string; + name: string; + } + )[]; hypercert?: HypercertClaimdata; } /** diff --git a/src/validator/ValidatorFactory.ts b/src/validator/ValidatorFactory.ts index 2437615..309ebe2 100644 --- a/src/validator/ValidatorFactory.ts +++ b/src/validator/ValidatorFactory.ts @@ -4,6 +4,7 @@ import { MerkleProofData, MerkleProofValidator } from "./validators/MerkleProofV import { MetadataValidator, ClaimDataValidator } from "./validators/MetadataValidator"; import { AllowlistValidator } from "./validators/AllowListValidator"; import { AllowlistValidationParams } from "./validators/AllowListValidator"; +import { PropertyValidator, PropertyValue } from "./validators/PropertyValidator"; export class ValidatorFactory { static createMetadataValidator(): IValidator { @@ -21,4 +22,8 @@ export class ValidatorFactory { static createMerkleProofValidator(): IValidator { return new MerkleProofValidator(); } + + static createPropertyValidator(): IValidator { + return new PropertyValidator(); + } } diff --git a/src/validator/validators/MetadataValidator.ts b/src/validator/validators/MetadataValidator.ts index d6025ad..d534ccb 100644 --- a/src/validator/validators/MetadataValidator.ts +++ b/src/validator/validators/MetadataValidator.ts @@ -2,10 +2,37 @@ import { HypercertClaimdata, HypercertMetadata } from "src/types/metadata"; import { SchemaValidator } from "../base/SchemaValidator"; import claimDataSchema from "../../resources/schema/claimdata.json"; import metaDataSchema from "../../resources/schema/metadata.json"; +import { PropertyValidator } from "./PropertyValidator"; export class MetadataValidator extends SchemaValidator { + private propertyValidator: PropertyValidator; + constructor() { super(metaDataSchema, [claimDataSchema]); + this.propertyValidator = new PropertyValidator(); + } + + validate(data: unknown) { + const result = super.validate(data); + const errors = [...(result.errors || [])]; + + if (data) { + const metadata = data as HypercertMetadata; + if (metadata.properties?.length) { + const propertyErrors = metadata.properties + .map((property) => this.propertyValidator.validate(property)) + .filter((result) => !result.isValid) + .flatMap((result) => result.errors); + + errors.push(...propertyErrors); + } + } + + return { + isValid: errors.length === 0, + data: errors.length === 0 ? result.data : undefined, + errors, + }; } } diff --git a/src/validator/validators/PropertyValidator.ts b/src/validator/validators/PropertyValidator.ts new file mode 100644 index 0000000..2c1c3ce --- /dev/null +++ b/src/validator/validators/PropertyValidator.ts @@ -0,0 +1,100 @@ +import { ValidationError } from "../interfaces"; +import { SchemaValidator } from "../base/SchemaValidator"; +import { HypercertMetadata } from "src/types"; +import metaDataSchema from "../../resources/schema/metadata.json"; + +export type PropertyValues = HypercertMetadata["properties"]; +type PropertyValue = NonNullable[number]; + +interface PropertyValidationStrategy { + validate(property: NonNullable): ValidationError[]; +} + +interface GeoJSONProperty { + trait_type: string; + type: string; + src: string; + name: string; +} + +class GeoJSONValidationStrategy implements PropertyValidationStrategy { + private readonly MIME_TYPE = "applications/geo+json"; + + validate(property: NonNullable): ValidationError[] { + if (!this.isGeoJSONProperty(property)) { + return [ + { + field: "type", + code: "missing_type", + message: "GeoJSON property must have type field", + }, + ]; + } + + const errors: ValidationError[] = []; + + if (property.type !== this.MIME_TYPE) { + errors.push({ + field: "type", + code: "invalid_mime_type", + message: `GeoJSON type must be ${this.MIME_TYPE}`, + }); + } + + if (!property.src?.startsWith("ipfs://") && !property.src?.startsWith("https://")) { + errors.push({ + field: "src", + code: "invalid_url", + message: "GeoJSON src must start with ipfs:// or https://", + }); + } + + if (!property.name?.endsWith(".geojson")) { + errors.push({ + field: "name", + code: "invalid_file_extension", + message: "GeoJSON name must end with .geojson", + }); + } + + return errors; + } + + private isGeoJSONProperty(property: any): property is GeoJSONProperty { + return "type" in property && "src" in property && "name" in property; + } +} + +export class PropertyValidator extends SchemaValidator { + private readonly validationStrategies: Record = { + geoJSON: new GeoJSONValidationStrategy(), + }; + + constructor() { + super(metaDataSchema.properties.properties.items); + } + + validate(data: unknown) { + const result = super.validate(data); + + if (!result.isValid || !result.data) { + return result; + } + + const property = result.data as NonNullable; + const strategy = this.validationStrategies[property.trait_type]; + + if (strategy) { + const errors = strategy.validate(property); + if (errors.length > 0) { + return { + isValid: false, + data: undefined, + errors, + }; + } + } + + return result; + } +} diff --git a/test/validator/validators/MetadataValidator.test.ts b/test/validator/validators/MetadataValidator.test.ts index f188265..c23887f 100644 --- a/test/validator/validators/MetadataValidator.test.ts +++ b/test/validator/validators/MetadataValidator.test.ts @@ -43,41 +43,160 @@ describe("MetadataValidator", () => { hypercert: validClaimData, }; - it("should validate valid metadata", () => { - const result = validator.validate(validMetadata); + describe("Basic Metadata Validation", () => { + it("should validate valid metadata", () => { + const result = validator.validate(validMetadata); + expect(result.isValid).to.be.true; + expect(result.data).to.deep.equal(validMetadata); + expect(result.errors).to.be.empty; + }); - expect(result.isValid).to.be.true; - expect(result.data).to.deep.equal(validMetadata); - expect(result.errors).to.be.empty; + it("should validate required fields", () => { + const invalidMetadata = { + description: "Test Description", + image: "ipfs://test", + }; + + const result = validator.validate(invalidMetadata); + expect(result.isValid).to.be.false; + expect(result.errors[0].field).to.equal("name"); + }); }); - it("should validate required fields", () => { - const invalidMetadata = { - description: "Test Description", - image: "ipfs://test", - }; + describe("Property Validation", () => { + it("should validate metadata with valid properties", () => { + const metadataWithProperties = { + ...validMetadata, + properties: [ + { + trait_type: "category", + value: "education", + }, + { + trait_type: "geoJSON", + type: "applications/geo+json", + src: "ipfs://QmExample", + name: "location.geojson", + }, + ], + }; - const result = validator.validate(invalidMetadata); + const result = validator.validate(metadataWithProperties); + expect(result.isValid).to.be.true; + expect(result.data).to.deep.equal(metadataWithProperties); + }); - expect(result.isValid).to.be.false; - expect(result.errors[0].field).to.equal("name"); + it("should reject metadata with invalid simple property", () => { + const metadataWithInvalidProperty = { + ...validMetadata, + properties: [ + { + trait_type: "category", + // missing required 'value' field + }, + ], + }; + + const result = validator.validate(metadataWithInvalidProperty); + expect(result.isValid).to.be.false; + expect(result.errors).to.have.length.greaterThan(0); + }); + + it("should reject metadata with invalid geoJSON property", () => { + const metadataWithInvalidGeoJSON = { + ...validMetadata, + properties: [ + { + trait_type: "geoJSON", + type: "wrong/type", + src: "invalid://QmExample", + name: "location.wrong", + }, + ], + }; + + const result = validator.validate(metadataWithInvalidGeoJSON); + expect(result.isValid).to.be.false; + expect(result.errors).to.have.length(3); // MIME type, URL, and file extension errors + }); + + it("should collect all property validation errors", () => { + const metadataWithMultipleInvalidProperties = { + ...validMetadata, + properties: [ + { + trait_type: "category", + // missing value + }, + { + trait_type: "geoJSON", + type: "wrong/type", + src: "invalid://QmExample", + name: "location.wrong", + }, + ], + }; + + const result = validator.validate(metadataWithMultipleInvalidProperties); + expect(result.isValid).to.be.false; + expect(result.errors.length).to.be.greaterThan(3); // Schema error plus GeoJSON errors + }); + + it("should handle empty properties array", () => { + const metadataWithEmptyProperties = { + ...validMetadata, + properties: [], + }; + + const result = validator.validate(metadataWithEmptyProperties); + expect(result.isValid).to.be.true; + }); }); - it("should validate nested claim data", () => { - const invalidMetadata = { - ...validMetadata, - hypercert: { - ...validClaimData, - impact_scope: undefined, - }, - }; + describe("Combined Validation", () => { + it("should validate metadata with both valid properties and claim data", () => { + const completeMetadata = { + ...validMetadata, + properties: [ + { + trait_type: "category", + value: "education", + }, + { + trait_type: "geoJSON", + type: "applications/geo+json", + src: "ipfs://QmExample", + name: "location.geojson", + }, + ], + }; - const result = validator.validate(invalidMetadata); + const result = validator.validate(completeMetadata); + expect(result.isValid).to.be.true; + expect(result.data).to.deep.equal(completeMetadata); + }); - expect(result.isValid).to.be.false; - expect(result.errors[0].field).to.equal("/hypercert"); - // or if we want to check the specific error message: - expect(result.errors[0].message).to.include("impact_scope"); + it("should collect errors from both metadata and property validation", () => { + const invalidMetadata = { + description: "Test Description", // missing required name + image: "ipfs://test", + properties: [ + { + trait_type: "geoJSON", + type: "wrong/type", + src: "invalid://QmExample", + name: "location.wrong", + }, + ], + }; + + const result = validator.validate(invalidMetadata); + + console.log(result.errors); + + expect(result.isValid).to.be.false; + expect(result.errors).to.have.length.greaterThan(3); // Schema errors plus property errors + }); }); }); diff --git a/test/validator/validators/PropertyValidator.test.ts b/test/validator/validators/PropertyValidator.test.ts new file mode 100644 index 0000000..8d7688b --- /dev/null +++ b/test/validator/validators/PropertyValidator.test.ts @@ -0,0 +1,138 @@ +import { expect } from "chai"; +import { describe, it } from "vitest"; +import { PropertyValidator } from "../../../src/validator/validators/PropertyValidator"; + +describe("PropertyValidator", () => { + const validator = new PropertyValidator(); + + describe("Basic Property Validation", () => { + it("should validate a simple property with trait_type and value", () => { + const property = { + trait_type: "category", + value: "education", + }; + + const result = validator.validate(property); + expect(result.isValid).to.be.true; + expect(result.data).to.deep.equal(property); + }); + + it("should reject property with missing required fields", () => { + const property = { + trait_type: "category", + }; + + const result = validator.validate(property); + expect(result.isValid).to.be.false; + expect(result.errors).to.have.length.greaterThan(0); + }); + }); + + describe("GeoJSON Property Validation", () => { + it("should validate a valid geoJSON property", () => { + const property = { + trait_type: "geoJSON", + type: "applications/geo+json", + src: "ipfs://QmExample", + name: "location.geojson", + }; + + const result = validator.validate(property); + expect(result.isValid).to.be.true; + expect(result.data).to.deep.equal(property); + }); + + it("should accept HTTPS source", () => { + const property = { + trait_type: "geoJSON", + type: "applications/geo+json", + src: "https://example.com/location.geojson", + name: "location.geojson", + }; + + const result = validator.validate(property); + expect(result.isValid).to.be.true; + }); + + it("should reject invalid MIME type", () => { + const property = { + trait_type: "geoJSON", + type: "wrong/type", + src: "ipfs://QmExample", + name: "location.geojson", + }; + + const result = validator.validate(property); + expect(result.isValid).to.be.false; + expect(result.errors).to.deep.include({ + field: "type", + code: "invalid_mime_type", + message: "GeoJSON type must be applications/geo+json", + }); + }); + + it("should reject invalid source URL", () => { + const property = { + trait_type: "geoJSON", + type: "applications/geo+json", + src: "invalid://QmExample", + name: "location.geojson", + }; + + const result = validator.validate(property); + expect(result.isValid).to.be.false; + expect(result.errors).to.deep.include({ + field: "src", + code: "invalid_url", + message: "GeoJSON src must start with ipfs:// or https://", + }); + }); + + it("should reject invalid file extension", () => { + const property = { + trait_type: "geoJSON", + type: "applications/geo+json", + src: "ipfs://QmExample", + name: "location.wrong", + }; + + const result = validator.validate(property); + expect(result.isValid).to.be.false; + expect(result.errors).to.deep.include({ + field: "name", + code: "invalid_file_extension", + message: "GeoJSON name must end with .geojson", + }); + }); + + it("should collect multiple validation errors", () => { + const property = { + trait_type: "geoJSON", + type: "wrong/type", + src: "invalid://QmExample", + name: "location.wrong", + }; + + const result = validator.validate(property); + expect(result.isValid).to.be.false; + expect(result.errors).to.have.length(3); + }); + }); + + describe("Edge Cases", () => { + it("should handle undefined input", () => { + const result = validator.validate(undefined); + expect(result.isValid).to.be.false; + }); + + it("should handle null input", () => { + const result = validator.validate(null); + expect(result.isValid).to.be.false; + }); + + it("should handle empty object", () => { + const result = validator.validate({}); + expect(result.isValid).to.be.false; + }); + }); +}); From f205913a9bf751b2a4f3d12900416c2d638f1eee Mon Sep 17 00:00:00 2001 From: bitbeckers Date: Sun, 26 Jan 2025 22:44:44 +0100 Subject: [PATCH 3/3] chore(coverage): bump coverage thresholds Updates thresholds to represent current coverage --- vitest.config.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/vitest.config.ts b/vitest.config.ts index 03bb5ce..9cd55fe 100644 --- a/vitest.config.ts +++ b/vitest.config.ts @@ -10,10 +10,10 @@ export default defineConfig({ // If you want a coverage reports even if your tests are failing, include the reportOnFailure option reportOnFailure: true, thresholds: { - lines: 67, - branches: 80, - functions: 62, - statements: 67, + lines: 72, + branches: 84, + functions: 66, + statements: 72, }, include: ["src/**/*.ts"], exclude: [