diff --git a/.devrev/repo.yml b/.devrev/repo.yml new file mode 100644 index 0000000..af3e7a6 --- /dev/null +++ b/.devrev/repo.yml @@ -0,0 +1 @@ +deployable: true \ No newline at end of file diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000..5fe39fa --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,2 @@ +# Code owners (if set required to merge PR) +* @gasperzgonec @radovanjorgic @devrev/airdrop \ No newline at end of file diff --git a/build/jest.config.js b/build/jest.config.js index eea6dac..91a2d2c 100644 --- a/build/jest.config.js +++ b/build/jest.config.js @@ -1,7 +1,4 @@ module.exports = { preset: 'ts-jest', testEnvironment: 'node', - testTimeout: 60000, // Increase default timeout to 60 seconds - resetMocks: false, // Don't reset mocks between tests to maintain mock behavior - testPathIgnorePatterns: ['/node_modules/', '/dist/'] }; \ No newline at end of file diff --git a/build/package-lock.json b/build/package-lock.json deleted file mode 100644 index fbf6aab..0000000 --- a/build/package-lock.json +++ /dev/null @@ -1,10168 +0,0 @@ -{ - "name": "airdrop-snap-in", - "version": "1.1.6", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "airdrop-snap-in", - "version": "1.1.6", - "license": "ISC", - "dependencies": { - "@devrev/ts-adaas": "1.5.1", - "@devrev/typescript-sdk": "1.1.63", - "axios": "^1.9.0", - "dotenv": "^16.0.3", - "js-jsonl": "^1.1.1", - "yargs": "^17.6.2" - }, - "devDependencies": { - "@babel/core": "^7.26.10", - "@babel/preset-env": "^7.20.2", - "@babel/preset-typescript": "^7.18.6", - "@types/body-parser": "^1.19.5", - "@types/express": "^4.17.21", - "@types/jest": "^29.4.0", - "@types/node": "^18.13.0", - "@types/yargs": "^17.0.24", - "@typescript-eslint/eslint-plugin": "^8.32.0", - "@typescript-eslint/parser": "^8.32.0", - "babel-jest": "^29.4.2", - "body-parser": "^1.20.3", - "dotenv": "^16.0.3", - "eslint": "^9.26.0", - "eslint-config-prettier": "^9.0.0", - "eslint-plugin-import": "^2.28.1", - "eslint-plugin-prettier": "4.0.0", - "eslint-plugin-simple-import-sort": "^10.0.0", - "eslint-plugin-sort-keys-fix": "^1.1.2", - "eslint-plugin-unused-imports": "^4.1.4", - "express": "^4.21.0", - "jest": "^29.4.2", - "nodemon": "^3.0.3", - "prettier": "^2.8.3", - "prettier-plugin-organize-imports": "^3.2.2", - "rimraf": "^4.1.2", - "ts-jest": "^29.0.5", - "ts-node": "^10.9.1", - "typescript": "^4.9.5", - "yargs": "^17.6.2" - } - }, - "node_modules/@ampproject/remapping": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", - "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.24" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/code-frame": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", - "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-validator-identifier": "^7.27.1", - "js-tokens": "^4.0.0", - "picocolors": "^1.1.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/compat-data": { - "version": "7.28.0", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.28.0.tgz", - "integrity": "sha512-60X7qkglvrap8mn1lh2ebxXdZYtUcpd7gsmy9kLaBJ4i/WdY8PqTSdxyA8qraikqKQK5C1KRBKXqznrVapyNaw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/core": { - "version": "7.28.0", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.0.tgz", - "integrity": "sha512-UlLAnTPrFdNGoFtbSXwcGFQBtQZJCNjaN6hQNP3UPvuNXT1i82N26KL3dZeIpNalWywr9IuQuncaAfUaS1g6sQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@ampproject/remapping": "^2.2.0", - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.0", - "@babel/helper-compilation-targets": "^7.27.2", - "@babel/helper-module-transforms": "^7.27.3", - "@babel/helpers": "^7.27.6", - "@babel/parser": "^7.28.0", - "@babel/template": "^7.27.2", - "@babel/traverse": "^7.28.0", - "@babel/types": "^7.28.0", - "convert-source-map": "^2.0.0", - "debug": "^4.1.0", - "gensync": "^1.0.0-beta.2", - "json5": "^2.2.3", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/babel" - } - }, - "node_modules/@babel/generator": { - "version": "7.28.0", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.0.tgz", - "integrity": "sha512-lJjzvrbEeWrhB4P3QBsH7tey117PjLZnDbLiQEKjQ/fNJTjuq4HSqgFA+UNSwZT8D7dxxbnuSBMsa1lrWzKlQg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/parser": "^7.28.0", - "@babel/types": "^7.28.0", - "@jridgewell/gen-mapping": "^0.3.12", - "@jridgewell/trace-mapping": "^0.3.28", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-annotate-as-pure": { - "version": "7.27.3", - "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.27.3.tgz", - "integrity": "sha512-fXSwMQqitTGeHLBC08Eq5yXz2m37E4pJX1qAU1+2cNedz/ifv/bVXft90VeSav5nFO61EcNgwr0aJxbyPaWBPg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.27.3" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-compilation-targets": { - "version": "7.27.2", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz", - "integrity": "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/compat-data": "^7.27.2", - "@babel/helper-validator-option": "^7.27.1", - "browserslist": "^4.24.0", - "lru-cache": "^5.1.1", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-create-class-features-plugin": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.27.1.tgz", - "integrity": "sha512-QwGAmuvM17btKU5VqXfb+Giw4JcN0hjuufz3DYnpeVDvZLAObloM77bhMXiqry3Iio+Ai4phVRDwl6WU10+r5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.27.1", - "@babel/helper-member-expression-to-functions": "^7.27.1", - "@babel/helper-optimise-call-expression": "^7.27.1", - "@babel/helper-replace-supers": "^7.27.1", - "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1", - "@babel/traverse": "^7.27.1", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-create-regexp-features-plugin": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.27.1.tgz", - "integrity": "sha512-uVDC72XVf8UbrH5qQTc18Agb8emwjTiZrQE11Nv3CuBEZmVvTwwE9CBUEvHku06gQCAyYf8Nv6ja1IN+6LMbxQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.27.1", - "regexpu-core": "^6.2.0", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-define-polyfill-provider": { - "version": "0.6.5", - "resolved": "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.6.5.tgz", - "integrity": "sha512-uJnGFcPsWQK8fvjgGP5LZUZZsYGIoPeRjSF5PGwrelYgq7Q15/Ft9NGFp1zglwgIv//W0uG4BevRuSJRyylZPg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-compilation-targets": "^7.27.2", - "@babel/helper-plugin-utils": "^7.27.1", - "debug": "^4.4.1", - "lodash.debounce": "^4.0.8", - "resolve": "^1.22.10" - }, - "peerDependencies": { - "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" - } - }, - "node_modules/@babel/helper-globals": { - "version": "7.28.0", - "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", - "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-member-expression-to-functions": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.27.1.tgz", - "integrity": "sha512-E5chM8eWjTp/aNoVpcbfM7mLxu9XGLWYise2eBKGQomAk/Mb4XoxyqXTZbuTohbsl8EKqdlMhnDI2CCLfcs9wA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/traverse": "^7.27.1", - "@babel/types": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-module-imports": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz", - "integrity": "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/traverse": "^7.27.1", - "@babel/types": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-module-transforms": { - "version": "7.27.3", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.27.3.tgz", - "integrity": "sha512-dSOvYwvyLsWBeIRyOeHXp5vPj5l1I011r52FM1+r1jCERv+aFXYk4whgQccYEGYxK2H3ZAIA8nuPkQ0HaUo3qg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-module-imports": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1", - "@babel/traverse": "^7.27.3" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-optimise-call-expression": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.27.1.tgz", - "integrity": "sha512-URMGH08NzYFhubNSGJrpUEphGKQwMQYBySzat5cAByY1/YgIRkULnIy3tAMeszlL/so2HbeilYloUmSpd7GdVw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-plugin-utils": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.27.1.tgz", - "integrity": "sha512-1gn1Up5YXka3YYAHGKpbideQ5Yjf1tDa9qYcgysz+cNCXukyLl6DjPXhD3VRwSb8c0J9tA4b2+rHEZtc6R0tlw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-remap-async-to-generator": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.27.1.tgz", - "integrity": "sha512-7fiA521aVw8lSPeI4ZOD3vRFkoqkJcS+z4hFo82bFSH/2tNd6eJ5qCVMS5OzDmZh/kaHQeBaeyxK6wljcPtveA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.27.1", - "@babel/helper-wrap-function": "^7.27.1", - "@babel/traverse": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-replace-supers": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.27.1.tgz", - "integrity": "sha512-7EHz6qDZc8RYS5ElPoShMheWvEgERonFCs7IAonWLLUTXW59DP14bCZt89/GKyreYn8g3S83m21FelHKbeDCKA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-member-expression-to-functions": "^7.27.1", - "@babel/helper-optimise-call-expression": "^7.27.1", - "@babel/traverse": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-skip-transparent-expression-wrappers": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.27.1.tgz", - "integrity": "sha512-Tub4ZKEXqbPjXgWLl2+3JpQAYBJ8+ikpQ2Ocj/q/r0LwE3UhENh7EUabyHjz2kCEsrRY83ew2DQdHluuiDQFzg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/traverse": "^7.27.1", - "@babel/types": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-string-parser": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", - "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-validator-identifier": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz", - "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-validator-option": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", - "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-wrap-function": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.27.1.tgz", - "integrity": "sha512-NFJK2sHUvrjo8wAU/nQTWU890/zB2jj0qBcCbZbbf+005cAsv6tMjXz31fBign6M5ov1o0Bllu+9nbqkfsjjJQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/template": "^7.27.1", - "@babel/traverse": "^7.27.1", - "@babel/types": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helpers": { - "version": "7.27.6", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.27.6.tgz", - "integrity": "sha512-muE8Tt8M22638HU31A3CgfSUciwz1fhATfoVai05aPXGor//CdWDCbnlY1yvBPo07njuVOCNGCSp/GTt12lIug==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/template": "^7.27.2", - "@babel/types": "^7.27.6" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/parser": { - "version": "7.28.0", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.0.tgz", - "integrity": "sha512-jVZGvOxOuNSsuQuLRTh13nU0AogFlw32w/MT+LV6D3sP5WdbW61E77RnkbaO2dUvmPAYrBDJXGn5gGS6tH4j8g==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.28.0" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/plugin-bugfix-firefox-class-in-computed-class-key": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-firefox-class-in-computed-class-key/-/plugin-bugfix-firefox-class-in-computed-class-key-7.27.1.tgz", - "integrity": "sha512-QPG3C9cCVRQLxAVwmefEmwdTanECuUBMQZ/ym5kiw3XKCGA7qkuQLcjWWHcrD/GKbn/WmJwaezfuuAOcyKlRPA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/traverse": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-bugfix-safari-class-field-initializer-scope": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-class-field-initializer-scope/-/plugin-bugfix-safari-class-field-initializer-scope-7.27.1.tgz", - "integrity": "sha512-qNeq3bCKnGgLkEXUuFry6dPlGfCdQNZbn7yUAPCInwAJHMU7THJfrBSozkcWq5sNM6RcF3S8XyQL2A52KNR9IA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.27.1.tgz", - "integrity": "sha512-g4L7OYun04N1WyqMNjldFwlfPCLVkgB54A/YCXICZYBsvJJE3kByKv9c9+R/nAfmIfjl2rKYLNyMHboYbZaWaA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.27.1.tgz", - "integrity": "sha512-oO02gcONcD5O1iTLi/6frMJBIwWEHceWGSGqrpCmEL8nogiS6J9PBlE48CaK20/Jx1LuRml9aDftLgdjXT8+Cw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1", - "@babel/plugin-transform-optional-chaining": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.13.0" - } - }, - "node_modules/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/-/plugin-bugfix-v8-static-class-fields-redefine-readonly-7.27.1.tgz", - "integrity": "sha512-6BpaYGDavZqkI6yT+KSPdpZFfpnd68UKXbcjI9pJ13pvHhPrCKWOOLp+ysvMeA+DxnhuPpgIaRpxRxo5A9t5jw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/traverse": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-proposal-private-property-in-object": { - "version": "7.21.0-placeholder-for-preset-env.2", - "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.21.0-placeholder-for-preset-env.2.tgz", - "integrity": "sha512-SOSkfJDddaM7mak6cPEpswyTRnuRltl429hMraQEglW+OkovnCzsiszTmsrlY//qLFjCpQDFRvjdm2wA5pPm9w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-async-generators": { - "version": "7.8.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz", - "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.8.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-bigint": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz", - "integrity": "sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.8.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-class-properties": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz", - "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.12.13" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-class-static-block": { - "version": "7.14.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz", - "integrity": "sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.14.5" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-import-assertions": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.27.1.tgz", - "integrity": "sha512-UT/Jrhw57xg4ILHLFnzFpPDlMbcdEicaAtjPQpbj9wa8T4r5KVWCimHcL/460g8Ht0DMxDyjsLgiWSkVjnwPFg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-import-attributes": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.27.1.tgz", - "integrity": "sha512-oFT0FrKHgF53f4vOsZGi2Hh3I35PfSmVs4IBFLFj4dnafP+hIWDLg3VyKmUHfLoLHlyxY4C7DGtmHuJgn+IGww==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-import-meta": { - "version": "7.10.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz", - "integrity": "sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.10.4" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-json-strings": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz", - "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.8.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-jsx": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.27.1.tgz", - "integrity": "sha512-y8YTNIeKoyhGd9O0Jiyzyyqk8gdjnumGTQPsz0xOZOQ2RmkVJeZ1vmmfIvFEKqucBG6axJGBZDE/7iI5suUI/w==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-logical-assignment-operators": { - "version": "7.10.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz", - "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.10.4" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-nullish-coalescing-operator": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz", - "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.8.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-numeric-separator": { - "version": "7.10.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz", - "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.10.4" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-object-rest-spread": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", - "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.8.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-optional-catch-binding": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz", - "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.8.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-optional-chaining": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz", - "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.8.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-private-property-in-object": { - "version": "7.14.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz", - "integrity": "sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.14.5" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-top-level-await": { - "version": "7.14.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz", - "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.14.5" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-typescript": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.27.1.tgz", - "integrity": "sha512-xfYCBMxveHrRMnAWl1ZlPXOZjzkN82THFvLhQhFXFt81Z5HnN+EtUkZhv/zcKpmT3fzmWZB0ywiBrbC3vogbwQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-unicode-sets-regex": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-unicode-sets-regex/-/plugin-syntax-unicode-sets-regex-7.18.6.tgz", - "integrity": "sha512-727YkEAPwSIQTv5im8QHz3upqp92JTWhidIC81Tdx4VJYIte/VndKf1qKrfnnhPLiPghStWfvC/iFaMCQu7Nqg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.18.6", - "@babel/helper-plugin-utils": "^7.18.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-transform-arrow-functions": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.27.1.tgz", - "integrity": "sha512-8Z4TGic6xW70FKThA5HYEKKyBpOOsucTOD1DjU3fZxDg+K3zBJcXMFnt/4yQiZnf5+MiOMSXQ9PaEK/Ilh1DeA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-async-generator-functions": { - "version": "7.28.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.28.0.tgz", - "integrity": "sha512-BEOdvX4+M765icNPZeidyADIvQ1m1gmunXufXxvRESy/jNNyfovIqUyE7MVgGBjWktCoJlzvFA1To2O4ymIO3Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-remap-async-to-generator": "^7.27.1", - "@babel/traverse": "^7.28.0" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-async-to-generator": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.27.1.tgz", - "integrity": "sha512-NREkZsZVJS4xmTr8qzE5y8AfIPqsdQfRuUiLRTEzb7Qii8iFWCyDKaUV2c0rCuh4ljDZ98ALHP/PetiBV2nddA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-module-imports": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-remap-async-to-generator": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-block-scoped-functions": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.27.1.tgz", - "integrity": "sha512-cnqkuOtZLapWYZUYM5rVIdv1nXYuFVIltZ6ZJ7nIj585QsjKM5dhL2Fu/lICXZ1OyIAFc7Qy+bvDAtTXqGrlhg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-block-scoping": { - "version": "7.28.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.28.0.tgz", - "integrity": "sha512-gKKnwjpdx5sER/wl0WN0efUBFzF/56YZO0RJrSYP4CljXnP31ByY7fol89AzomdlLNzI36AvOTmYHsnZTCkq8Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-class-properties": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-properties/-/plugin-transform-class-properties-7.27.1.tgz", - "integrity": "sha512-D0VcalChDMtuRvJIu3U/fwWjf8ZMykz5iZsg77Nuj821vCKI3zCyRLwRdWbsuJ/uRwZhZ002QtCqIkwC/ZkvbA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-create-class-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-class-static-block": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.27.1.tgz", - "integrity": "sha512-s734HmYU78MVzZ++joYM+NkJusItbdRcbm+AGRgJCt3iA+yux0QpD9cBVdz3tKyrjVYWRl7j0mHSmv4lhV0aoA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-create-class-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.12.0" - } - }, - "node_modules/@babel/plugin-transform-classes": { - "version": "7.28.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.28.0.tgz", - "integrity": "sha512-IjM1IoJNw72AZFlj33Cu8X0q2XK/6AaVC3jQu+cgQ5lThWD5ajnuUAml80dqRmOhmPkTH8uAwnpMu9Rvj0LTRA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.27.3", - "@babel/helper-compilation-targets": "^7.27.2", - "@babel/helper-globals": "^7.28.0", - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-replace-supers": "^7.27.1", - "@babel/traverse": "^7.28.0" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-computed-properties": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.27.1.tgz", - "integrity": "sha512-lj9PGWvMTVksbWiDT2tW68zGS/cyo4AkZ/QTp0sQT0mjPopCmrSkzxeXkznjqBxzDI6TclZhOJbBmbBLjuOZUw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/template": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-destructuring": { - "version": "7.28.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.28.0.tgz", - "integrity": "sha512-v1nrSMBiKcodhsyJ4Gf+Z0U/yawmJDBOTpEB3mcQY52r9RIyPneGyAS/yM6seP/8I+mWI3elOMtT5dB8GJVs+A==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/traverse": "^7.28.0" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-dotall-regex": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.27.1.tgz", - "integrity": "sha512-gEbkDVGRvjj7+T1ivxrfgygpT7GUd4vmODtYpbs0gZATdkX8/iSnOtZSxiZnsgm1YjTgjI6VKBGSJJevkrclzw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-duplicate-keys": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.27.1.tgz", - "integrity": "sha512-MTyJk98sHvSs+cvZ4nOauwTTG1JeonDjSGvGGUNHreGQns+Mpt6WX/dVzWBHgg+dYZhkC4X+zTDfkTU+Vy9y7Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-duplicate-named-capturing-groups-regex": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-named-capturing-groups-regex/-/plugin-transform-duplicate-named-capturing-groups-regex-7.27.1.tgz", - "integrity": "sha512-hkGcueTEzuhB30B3eJCbCYeCaaEQOmQR0AdvzpD4LoN0GXMWzzGSuRrxR2xTnCrvNbVwK9N6/jQ92GSLfiZWoQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-transform-dynamic-import": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dynamic-import/-/plugin-transform-dynamic-import-7.27.1.tgz", - "integrity": "sha512-MHzkWQcEmjzzVW9j2q8LGjwGWpG2mjwaaB0BNQwst3FIjqsg8Ct/mIZlvSPJvfi9y2AC8mi/ktxbFVL9pZ1I4A==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-explicit-resource-management": { - "version": "7.28.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-explicit-resource-management/-/plugin-transform-explicit-resource-management-7.28.0.tgz", - "integrity": "sha512-K8nhUcn3f6iB+P3gwCv/no7OdzOZQcKchW6N389V6PD8NUWKZHzndOd9sPDVbMoBsbmjMqlB4L9fm+fEFNVlwQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/plugin-transform-destructuring": "^7.28.0" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-exponentiation-operator": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.27.1.tgz", - "integrity": "sha512-uspvXnhHvGKf2r4VVtBpeFnuDWsJLQ6MF6lGJLC89jBR1uoVeqM416AZtTuhTezOfgHicpJQmoD5YUakO/YmXQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-export-namespace-from": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-export-namespace-from/-/plugin-transform-export-namespace-from-7.27.1.tgz", - "integrity": "sha512-tQvHWSZ3/jH2xuq/vZDy0jNn+ZdXJeM8gHvX4lnJmsc3+50yPlWdZXIc5ay+umX+2/tJIqHqiEqcJvxlmIvRvQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-for-of": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.27.1.tgz", - "integrity": "sha512-BfbWFFEJFQzLCQ5N8VocnCtA8J1CLkNTe2Ms2wocj75dd6VpiqS5Z5quTYcUoo4Yq+DN0rtikODccuv7RU81sw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-function-name": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.27.1.tgz", - "integrity": "sha512-1bQeydJF9Nr1eBCMMbC+hdwmRlsv5XYOMu03YSWFwNs0HsAmtSxxF1fyuYPqemVldVyFmlCU7w8UE14LupUSZQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-compilation-targets": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/traverse": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-json-strings": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-json-strings/-/plugin-transform-json-strings-7.27.1.tgz", - "integrity": "sha512-6WVLVJiTjqcQauBhn1LkICsR2H+zm62I3h9faTDKt1qP4jn2o72tSvqMwtGFKGTpojce0gJs+76eZ2uCHRZh0Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-literals": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.27.1.tgz", - "integrity": "sha512-0HCFSepIpLTkLcsi86GG3mTUzxV5jpmbv97hTETW3yzrAij8aqlD36toB1D0daVFJM8NK6GvKO0gslVQmm+zZA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-logical-assignment-operators": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.27.1.tgz", - "integrity": "sha512-SJvDs5dXxiae4FbSL1aBJlG4wvl594N6YEVVn9e3JGulwioy6z3oPjx/sQBO3Y4NwUu5HNix6KJ3wBZoewcdbw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-member-expression-literals": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.27.1.tgz", - "integrity": "sha512-hqoBX4dcZ1I33jCSWcXrP+1Ku7kdqXf1oeah7ooKOIiAdKQ+uqftgCFNOSzA5AMS2XIHEYeGFg4cKRCdpxzVOQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-modules-amd": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.27.1.tgz", - "integrity": "sha512-iCsytMg/N9/oFq6n+gFTvUYDZQOMK5kEdeYxmxt91fcJGycfxVP9CnrxoliM0oumFERba2i8ZtwRUCMhvP1LnA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-module-transforms": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-modules-commonjs": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.27.1.tgz", - "integrity": "sha512-OJguuwlTYlN0gBZFRPqwOGNWssZjfIUdS7HMYtN8c1KmwpwHFBwTeFZrg9XZa+DFTitWOW5iTAG7tyCUPsCCyw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-module-transforms": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-modules-systemjs": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.27.1.tgz", - "integrity": "sha512-w5N1XzsRbc0PQStASMksmUeqECuzKuTJer7kFagK8AXgpCMkeDMO5S+aaFb7A51ZYDF7XI34qsTX+fkHiIm5yA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-module-transforms": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1", - "@babel/traverse": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-modules-umd": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.27.1.tgz", - "integrity": "sha512-iQBE/xC5BV1OxJbp6WG7jq9IWiD+xxlZhLrdwpPkTX3ydmXdvoCpyfJN7acaIBZaOqTfr76pgzqBJflNbeRK+w==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-module-transforms": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-named-capturing-groups-regex": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.27.1.tgz", - "integrity": "sha512-SstR5JYy8ddZvD6MhV0tM/j16Qds4mIpJTOd1Yu9J9pJjH93bxHECF7pgtc28XvkzTD6Pxcm/0Z73Hvk7kb3Ng==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-transform-new-target": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.27.1.tgz", - "integrity": "sha512-f6PiYeqXQ05lYq3TIfIDu/MtliKUbNwkGApPUvyo6+tc7uaR4cPjPe7DFPr15Uyycg2lZU6btZ575CuQoYh7MQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-nullish-coalescing-operator": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-nullish-coalescing-operator/-/plugin-transform-nullish-coalescing-operator-7.27.1.tgz", - "integrity": "sha512-aGZh6xMo6q9vq1JGcw58lZ1Z0+i0xB2x0XaauNIUXd6O1xXc3RwoWEBlsTQrY4KQ9Jf0s5rgD6SiNkaUdJegTA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-numeric-separator": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-numeric-separator/-/plugin-transform-numeric-separator-7.27.1.tgz", - "integrity": "sha512-fdPKAcujuvEChxDBJ5c+0BTaS6revLV7CJL08e4m3de8qJfNIuCc2nc7XJYOjBoTMJeqSmwXJ0ypE14RCjLwaw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-object-rest-spread": { - "version": "7.28.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.28.0.tgz", - "integrity": "sha512-9VNGikXxzu5eCiQjdE4IZn8sb9q7Xsk5EXLDBKUYg1e/Tve8/05+KJEtcxGxAgCY5t/BpKQM+JEL/yT4tvgiUA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-compilation-targets": "^7.27.2", - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/plugin-transform-destructuring": "^7.28.0", - "@babel/plugin-transform-parameters": "^7.27.7", - "@babel/traverse": "^7.28.0" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-object-super": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.27.1.tgz", - "integrity": "sha512-SFy8S9plRPbIcxlJ8A6mT/CxFdJx/c04JEctz4jf8YZaVS2px34j7NXRrlGlHkN/M2gnpL37ZpGRGVFLd3l8Ng==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-replace-supers": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-optional-catch-binding": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-catch-binding/-/plugin-transform-optional-catch-binding-7.27.1.tgz", - "integrity": "sha512-txEAEKzYrHEX4xSZN4kJ+OfKXFVSWKB2ZxM9dpcE3wT7smwkNmXo5ORRlVzMVdJbD+Q8ILTgSD7959uj+3Dm3Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-optional-chaining": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.27.1.tgz", - "integrity": "sha512-BQmKPPIuc8EkZgNKsv0X4bPmOoayeu4F1YCwx2/CfmDSXDbp7GnzlUH+/ul5VGfRg1AoFPsrIThlEBj2xb4CAg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-parameters": { - "version": "7.27.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.27.7.tgz", - "integrity": "sha512-qBkYTYCb76RRxUM6CcZA5KRu8K4SM8ajzVeUgVdMVO9NN9uI/GaVmBg/WKJJGnNokV9SY8FxNOVWGXzqzUidBg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-private-methods": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-methods/-/plugin-transform-private-methods-7.27.1.tgz", - "integrity": "sha512-10FVt+X55AjRAYI9BrdISN9/AQWHqldOeZDUoLyif1Kn05a56xVBXb8ZouL8pZ9jem8QpXaOt8TS7RHUIS+GPA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-create-class-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-private-property-in-object": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.27.1.tgz", - "integrity": "sha512-5J+IhqTi1XPa0DXF83jYOaARrX+41gOewWbkPyjMNRDqgOCqdffGh8L3f/Ek5utaEBZExjSAzcyjmV9SSAWObQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.27.1", - "@babel/helper-create-class-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-property-literals": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.27.1.tgz", - "integrity": "sha512-oThy3BCuCha8kDZ8ZkgOg2exvPYUlprMukKQXI1r1pJ47NCvxfkEy8vK+r/hT9nF0Aa4H1WUPZZjHTFtAhGfmQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-regenerator": { - "version": "7.28.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.28.1.tgz", - "integrity": "sha512-P0QiV/taaa3kXpLY+sXla5zec4E+4t4Aqc9ggHlfZ7a2cp8/x/Gv08jfwEtn9gnnYIMvHx6aoOZ8XJL8eU71Dg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-regexp-modifiers": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regexp-modifiers/-/plugin-transform-regexp-modifiers-7.27.1.tgz", - "integrity": "sha512-TtEciroaiODtXvLZv4rmfMhkCv8jx3wgKpL68PuiPh2M4fvz5jhsA7697N1gMvkvr/JTF13DrFYyEbY9U7cVPA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-transform-reserved-words": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.27.1.tgz", - "integrity": "sha512-V2ABPHIJX4kC7HegLkYoDpfg9PVmuWy/i6vUM5eGK22bx4YVFD3M5F0QQnWQoDs6AGsUWTVOopBiMFQgHaSkVw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-shorthand-properties": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.27.1.tgz", - "integrity": "sha512-N/wH1vcn4oYawbJ13Y/FxcQrWk63jhfNa7jef0ih7PHSIHX2LB7GWE1rkPrOnka9kwMxb6hMl19p7lidA+EHmQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-spread": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.27.1.tgz", - "integrity": "sha512-kpb3HUqaILBJcRFVhFUs6Trdd4mkrzcGXss+6/mxUd273PfbWqSDHRzMT2234gIg2QYfAjvXLSquP1xECSg09Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-sticky-regex": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.27.1.tgz", - "integrity": "sha512-lhInBO5bi/Kowe2/aLdBAawijx+q1pQzicSgnkB6dUPc1+RC8QmJHKf2OjvU+NZWitguJHEaEmbV6VWEouT58g==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-template-literals": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.27.1.tgz", - "integrity": "sha512-fBJKiV7F2DxZUkg5EtHKXQdbsbURW3DZKQUWphDum0uRP6eHGGa/He9mc0mypL680pb+e/lDIthRohlv8NCHkg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-typeof-symbol": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.27.1.tgz", - "integrity": "sha512-RiSILC+nRJM7FY5srIyc4/fGIwUhyDuuBSdWn4y6yT6gm652DpCHZjIipgn6B7MQ1ITOUnAKWixEUjQRIBIcLw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-typescript": { - "version": "7.28.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.28.0.tgz", - "integrity": "sha512-4AEiDEBPIZvLQaWlc9liCavE0xRM0dNca41WtBeM3jgFptfUOSG9z0uteLhq6+3rq+WB6jIvUwKDTpXEHPJ2Vg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.27.3", - "@babel/helper-create-class-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1", - "@babel/plugin-syntax-typescript": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-unicode-escapes": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.27.1.tgz", - "integrity": "sha512-Ysg4v6AmF26k9vpfFuTZg8HRfVWzsh1kVfowA23y9j/Gu6dOuahdUVhkLqpObp3JIv27MLSii6noRnuKN8H0Mg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-unicode-property-regex": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-property-regex/-/plugin-transform-unicode-property-regex-7.27.1.tgz", - "integrity": "sha512-uW20S39PnaTImxp39O5qFlHLS9LJEmANjMG7SxIhap8rCHqu0Ik+tLEPX5DKmHn6CsWQ7j3lix2tFOa5YtL12Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-unicode-regex": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.27.1.tgz", - "integrity": "sha512-xvINq24TRojDuyt6JGtHmkVkrfVV3FPT16uytxImLeBZqW3/H52yN+kM1MGuyPkIQxrzKwPHs5U/MP3qKyzkGw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-unicode-sets-regex": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-sets-regex/-/plugin-transform-unicode-sets-regex-7.27.1.tgz", - "integrity": "sha512-EtkOujbc4cgvb0mlpQefi4NTPBzhSIevblFevACNLUspmrALgmEBdL/XfnyyITfd8fKBZrZys92zOWcik7j9Tw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/preset-env": { - "version": "7.28.0", - "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.28.0.tgz", - "integrity": "sha512-VmaxeGOwuDqzLl5JUkIRM1X2Qu2uKGxHEQWh+cvvbl7JuJRgKGJSfsEF/bUaxFhJl/XAyxBe7q7qSuTbKFuCyg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/compat-data": "^7.28.0", - "@babel/helper-compilation-targets": "^7.27.2", - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-validator-option": "^7.27.1", - "@babel/plugin-bugfix-firefox-class-in-computed-class-key": "^7.27.1", - "@babel/plugin-bugfix-safari-class-field-initializer-scope": "^7.27.1", - "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.27.1", - "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.27.1", - "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": "^7.27.1", - "@babel/plugin-proposal-private-property-in-object": "7.21.0-placeholder-for-preset-env.2", - "@babel/plugin-syntax-import-assertions": "^7.27.1", - "@babel/plugin-syntax-import-attributes": "^7.27.1", - "@babel/plugin-syntax-unicode-sets-regex": "^7.18.6", - "@babel/plugin-transform-arrow-functions": "^7.27.1", - "@babel/plugin-transform-async-generator-functions": "^7.28.0", - "@babel/plugin-transform-async-to-generator": "^7.27.1", - "@babel/plugin-transform-block-scoped-functions": "^7.27.1", - "@babel/plugin-transform-block-scoping": "^7.28.0", - "@babel/plugin-transform-class-properties": "^7.27.1", - "@babel/plugin-transform-class-static-block": "^7.27.1", - "@babel/plugin-transform-classes": "^7.28.0", - "@babel/plugin-transform-computed-properties": "^7.27.1", - "@babel/plugin-transform-destructuring": "^7.28.0", - "@babel/plugin-transform-dotall-regex": "^7.27.1", - "@babel/plugin-transform-duplicate-keys": "^7.27.1", - "@babel/plugin-transform-duplicate-named-capturing-groups-regex": "^7.27.1", - "@babel/plugin-transform-dynamic-import": "^7.27.1", - "@babel/plugin-transform-explicit-resource-management": "^7.28.0", - "@babel/plugin-transform-exponentiation-operator": "^7.27.1", - "@babel/plugin-transform-export-namespace-from": "^7.27.1", - "@babel/plugin-transform-for-of": "^7.27.1", - "@babel/plugin-transform-function-name": "^7.27.1", - "@babel/plugin-transform-json-strings": "^7.27.1", - "@babel/plugin-transform-literals": "^7.27.1", - "@babel/plugin-transform-logical-assignment-operators": "^7.27.1", - "@babel/plugin-transform-member-expression-literals": "^7.27.1", - "@babel/plugin-transform-modules-amd": "^7.27.1", - "@babel/plugin-transform-modules-commonjs": "^7.27.1", - "@babel/plugin-transform-modules-systemjs": "^7.27.1", - "@babel/plugin-transform-modules-umd": "^7.27.1", - "@babel/plugin-transform-named-capturing-groups-regex": "^7.27.1", - "@babel/plugin-transform-new-target": "^7.27.1", - "@babel/plugin-transform-nullish-coalescing-operator": "^7.27.1", - "@babel/plugin-transform-numeric-separator": "^7.27.1", - "@babel/plugin-transform-object-rest-spread": "^7.28.0", - "@babel/plugin-transform-object-super": "^7.27.1", - "@babel/plugin-transform-optional-catch-binding": "^7.27.1", - "@babel/plugin-transform-optional-chaining": "^7.27.1", - "@babel/plugin-transform-parameters": "^7.27.7", - "@babel/plugin-transform-private-methods": "^7.27.1", - "@babel/plugin-transform-private-property-in-object": "^7.27.1", - "@babel/plugin-transform-property-literals": "^7.27.1", - "@babel/plugin-transform-regenerator": "^7.28.0", - "@babel/plugin-transform-regexp-modifiers": "^7.27.1", - "@babel/plugin-transform-reserved-words": "^7.27.1", - "@babel/plugin-transform-shorthand-properties": "^7.27.1", - "@babel/plugin-transform-spread": "^7.27.1", - "@babel/plugin-transform-sticky-regex": "^7.27.1", - "@babel/plugin-transform-template-literals": "^7.27.1", - "@babel/plugin-transform-typeof-symbol": "^7.27.1", - "@babel/plugin-transform-unicode-escapes": "^7.27.1", - "@babel/plugin-transform-unicode-property-regex": "^7.27.1", - "@babel/plugin-transform-unicode-regex": "^7.27.1", - "@babel/plugin-transform-unicode-sets-regex": "^7.27.1", - "@babel/preset-modules": "0.1.6-no-external-plugins", - "babel-plugin-polyfill-corejs2": "^0.4.14", - "babel-plugin-polyfill-corejs3": "^0.13.0", - "babel-plugin-polyfill-regenerator": "^0.6.5", - "core-js-compat": "^3.43.0", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/preset-modules": { - "version": "0.1.6-no-external-plugins", - "resolved": "https://registry.npmjs.org/@babel/preset-modules/-/preset-modules-0.1.6-no-external-plugins.tgz", - "integrity": "sha512-HrcgcIESLm9aIR842yhJ5RWan/gebQUJ6E/E5+rf0y9o6oj7w0Br+sWuL6kEQ/o/AdfvR1Je9jG18/gnpwjEyA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/types": "^7.4.4", - "esutils": "^2.0.2" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0 || ^8.0.0-0 <8.0.0" - } - }, - "node_modules/@babel/preset-typescript": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.27.1.tgz", - "integrity": "sha512-l7WfQfX0WK4M0v2RudjuQK4u99BS6yLHYEmdtVPP7lKV013zr9DygFuWNlnbvQ9LR+LS0Egz/XAvGx5U9MX0fQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-validator-option": "^7.27.1", - "@babel/plugin-syntax-jsx": "^7.27.1", - "@babel/plugin-transform-modules-commonjs": "^7.27.1", - "@babel/plugin-transform-typescript": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/template": { - "version": "7.27.2", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz", - "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/parser": "^7.27.2", - "@babel/types": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/traverse": { - "version": "7.28.0", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.0.tgz", - "integrity": "sha512-mGe7UK5wWyh0bKRfupsUchrQGqvDbZDbKJw+kcRGSmdHVYrv+ltd0pnpDTVpiTqnaBru9iEvA8pz8W46v0Amwg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.0", - "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.0", - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.0", - "debug": "^4.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/types": { - "version": "7.28.1", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.1.tgz", - "integrity": "sha512-x0LvFTekgSX+83TI28Y9wYPUfzrnl2aT5+5QLnO6v7mSJYtEEevuDRN0F0uSHRk1G1IWZC43o00Y0xDDrpBGPQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@bcoe/v8-coverage": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", - "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", - "dev": true, - "license": "MIT" - }, - "node_modules/@cspotcode/source-map-support": { - "version": "0.8.1", - "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", - "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", - "license": "MIT", - "dependencies": { - "@jridgewell/trace-mapping": "0.3.9" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping": { - "version": "0.3.9", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", - "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", - "license": "MIT", - "dependencies": { - "@jridgewell/resolve-uri": "^3.0.3", - "@jridgewell/sourcemap-codec": "^1.4.10" - } - }, - "node_modules/@devrev/ts-adaas": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/@devrev/ts-adaas/-/ts-adaas-1.5.1.tgz", - "integrity": "sha512-0s5Xt1BKzgB/LYjqWatfsKuFM3fhvrt2MAbu7Kcq4mK89o8925abMVAuyQ1B6ebCz9cmh/PVNx3Alk/Pd/PXWQ==", - "license": "ISC", - "dependencies": { - "@devrev/typescript-sdk": "^1.1.59", - "axios": "^1.9.0", - "axios-retry": "^4.5.0", - "form-data": "^4.0.1", - "js-jsonl": "^1.1.1", - "lambda-log": "^3.1.0", - "ts-node": "^10.9.2", - "yargs": "^17.7.2" - } - }, - "node_modules/@devrev/typescript-sdk": { - "version": "1.1.63", - "resolved": "https://registry.npmjs.org/@devrev/typescript-sdk/-/typescript-sdk-1.1.63.tgz", - "integrity": "sha512-n53xSg9hRZMiM3VqvnBumxKU2hB60ibmY/LzfjnAqEM1OvOWt9lbAFQJ/URSvoLOPZPpc7EWQxsXDFErxUJqUA==", - "license": "MIT", - "dependencies": { - "@types/yargs": "^17.0.22", - "axios": "^1.9.0", - "dotenv": "^16.0.3", - "protobufjs": "^7.3.0", - "yargs": "^17.6.2" - } - }, - "node_modules/@eslint-community/eslint-utils": { - "version": "4.7.0", - "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.7.0.tgz", - "integrity": "sha512-dyybb3AcajC7uha6CvhdVRJqaKyn7w2YKqKyAN37NKYgZT36w+iRb0Dymmc5qEJ549c/S31cMMSFd75bteCpCw==", - "dev": true, - "license": "MIT", - "dependencies": { - "eslint-visitor-keys": "^3.4.3" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - }, - "peerDependencies": { - "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" - } - }, - "node_modules/@eslint-community/regexpp": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz", - "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^12.0.0 || ^14.0.0 || >=16.0.0" - } - }, - "node_modules/@eslint/config-array": { - "version": "0.21.0", - "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.0.tgz", - "integrity": "sha512-ENIdc4iLu0d93HeYirvKmrzshzofPw6VkZRKQGe9Nv46ZnWUzcF1xV01dcvEg/1wXUR61OmmlSfyeyO7EvjLxQ==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "@eslint/object-schema": "^2.1.6", - "debug": "^4.3.1", - "minimatch": "^3.1.2" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - } - }, - "node_modules/@eslint/config-array/node_modules/brace-expansion": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", - "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/@eslint/config-array/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/@eslint/config-helpers": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.3.0.tgz", - "integrity": "sha512-ViuymvFmcJi04qdZeDc2whTHryouGcDlaxPqarTD0ZE10ISpxGUVZGZDx4w01upyIynL3iu6IXH2bS1NhclQMw==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - } - }, - "node_modules/@eslint/core": { - "version": "0.15.1", - "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.15.1.tgz", - "integrity": "sha512-bkOp+iumZCCbt1K1CmWf0R9pM5yKpDv+ZXtvSyQpudrI9kuFLp+bM2WOPXImuD/ceQuaa8f5pj93Y7zyECIGNA==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "@types/json-schema": "^7.0.15" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - } - }, - "node_modules/@eslint/eslintrc": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.1.tgz", - "integrity": "sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "ajv": "^6.12.4", - "debug": "^4.3.2", - "espree": "^10.0.1", - "globals": "^14.0.0", - "ignore": "^5.2.0", - "import-fresh": "^3.2.1", - "js-yaml": "^4.1.0", - "minimatch": "^3.1.2", - "strip-json-comments": "^3.1.1" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/@eslint/eslintrc/node_modules/argparse": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", - "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", - "dev": true, - "license": "Python-2.0" - }, - "node_modules/@eslint/eslintrc/node_modules/brace-expansion": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", - "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/@eslint/eslintrc/node_modules/ignore": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", - "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 4" - } - }, - "node_modules/@eslint/eslintrc/node_modules/js-yaml": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", - "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", - "dev": true, - "license": "MIT", - "dependencies": { - "argparse": "^2.0.1" - }, - "bin": { - "js-yaml": "bin/js-yaml.js" - } - }, - "node_modules/@eslint/eslintrc/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/@eslint/js": { - "version": "9.31.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.31.0.tgz", - "integrity": "sha512-LOm5OVt7D4qiKCqoiPbA7LWmI+tbw1VbTUowBcUMgQSuM6poJufkFkYDcQpo5KfgD39TnNySV26QjOh7VFpSyw==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "url": "https://eslint.org/donate" - } - }, - "node_modules/@eslint/object-schema": { - "version": "2.1.6", - "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.6.tgz", - "integrity": "sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - } - }, - "node_modules/@eslint/plugin-kit": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.3.3.tgz", - "integrity": "sha512-1+WqvgNMhmlAambTvT3KPtCl/Ibr68VldY2XY40SL1CE0ZXiakFR/cbTspaF5HsnpDMvcYYoJHfl4980NBjGag==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "@eslint/core": "^0.15.1", - "levn": "^0.4.1" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - } - }, - "node_modules/@humanfs/core": { - "version": "0.19.1", - "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz", - "integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": ">=18.18.0" - } - }, - "node_modules/@humanfs/node": { - "version": "0.16.6", - "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.6.tgz", - "integrity": "sha512-YuI2ZHQL78Q5HbhDiBA1X4LmYdXCKCMQIfw0pw7piHJwyREFebJUvrQN4cMssyES6x+vfUbx1CIpaQUKYdQZOw==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "@humanfs/core": "^0.19.1", - "@humanwhocodes/retry": "^0.3.0" - }, - "engines": { - "node": ">=18.18.0" - } - }, - "node_modules/@humanfs/node/node_modules/@humanwhocodes/retry": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.3.1.tgz", - "integrity": "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": ">=18.18" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/nzakas" - } - }, - "node_modules/@humanwhocodes/module-importer": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", - "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": ">=12.22" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/nzakas" - } - }, - "node_modules/@humanwhocodes/retry": { - "version": "0.4.3", - "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.3.tgz", - "integrity": "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": ">=18.18" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/nzakas" - } - }, - "node_modules/@istanbuljs/load-nyc-config": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", - "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "camelcase": "^5.3.1", - "find-up": "^4.1.0", - "get-package-type": "^0.1.0", - "js-yaml": "^3.13.1", - "resolve-from": "^5.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/@istanbuljs/schema": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", - "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/@jest/console": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/console/-/console-29.7.0.tgz", - "integrity": "sha512-5Ni4CU7XHQi32IJ398EEP4RrB8eV09sXP2ROqD4bksHrnTree52PsxvX8tpL8LvTZ3pFzXyPbNQReSN41CAhOg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/types": "^29.6.3", - "@types/node": "*", - "chalk": "^4.0.0", - "jest-message-util": "^29.7.0", - "jest-util": "^29.7.0", - "slash": "^3.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/core": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/core/-/core-29.7.0.tgz", - "integrity": "sha512-n7aeXWKMnGtDA48y8TLWJPJmLmmZ642Ceo78cYWEpiD7FzDgmNDV/GCVRorPABdXLJZ/9wzzgZAlHjXjxDHGsg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/console": "^29.7.0", - "@jest/reporters": "^29.7.0", - "@jest/test-result": "^29.7.0", - "@jest/transform": "^29.7.0", - "@jest/types": "^29.6.3", - "@types/node": "*", - "ansi-escapes": "^4.2.1", - "chalk": "^4.0.0", - "ci-info": "^3.2.0", - "exit": "^0.1.2", - "graceful-fs": "^4.2.9", - "jest-changed-files": "^29.7.0", - "jest-config": "^29.7.0", - "jest-haste-map": "^29.7.0", - "jest-message-util": "^29.7.0", - "jest-regex-util": "^29.6.3", - "jest-resolve": "^29.7.0", - "jest-resolve-dependencies": "^29.7.0", - "jest-runner": "^29.7.0", - "jest-runtime": "^29.7.0", - "jest-snapshot": "^29.7.0", - "jest-util": "^29.7.0", - "jest-validate": "^29.7.0", - "jest-watcher": "^29.7.0", - "micromatch": "^4.0.4", - "pretty-format": "^29.7.0", - "slash": "^3.0.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - }, - "peerDependencies": { - "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" - }, - "peerDependenciesMeta": { - "node-notifier": { - "optional": true - } - } - }, - "node_modules/@jest/environment": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-29.7.0.tgz", - "integrity": "sha512-aQIfHDq33ExsN4jP1NWGXhxgQ/wixs60gDiKO+XVMd8Mn0NWPWgc34ZQDTb2jKaUWQ7MuwoitXAsN2XVXNMpAw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/fake-timers": "^29.7.0", - "@jest/types": "^29.6.3", - "@types/node": "*", - "jest-mock": "^29.7.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/expect": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/expect/-/expect-29.7.0.tgz", - "integrity": "sha512-8uMeAMycttpva3P1lBHB8VciS9V0XAr3GymPpipdyQXbBcuhkLQOSe8E/p92RyAdToS6ZD1tFkX+CkhoECE0dQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "expect": "^29.7.0", - "jest-snapshot": "^29.7.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/expect-utils": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/expect-utils/-/expect-utils-29.7.0.tgz", - "integrity": "sha512-GlsNBWiFQFCVi9QVSx7f5AgMeLxe9YCCs5PuP2O2LdjDAA8Jh9eX7lA1Jq/xdXw3Wb3hyvlFNfZIfcRetSzYcA==", - "dev": true, - "license": "MIT", - "dependencies": { - "jest-get-type": "^29.6.3" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/fake-timers": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-29.7.0.tgz", - "integrity": "sha512-q4DH1Ha4TTFPdxLsqDXK1d3+ioSL7yL5oCMJZgDYm6i+6CygW5E5xVr/D1HdsGxjt1ZWSfUAs9OxSB/BNelWrQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/types": "^29.6.3", - "@sinonjs/fake-timers": "^10.0.2", - "@types/node": "*", - "jest-message-util": "^29.7.0", - "jest-mock": "^29.7.0", - "jest-util": "^29.7.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/globals": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/globals/-/globals-29.7.0.tgz", - "integrity": "sha512-mpiz3dutLbkW2MNFubUGUEVLkTGiqW6yLVTA+JbP6fI6J5iL9Y0Nlg8k95pcF8ctKwCS7WVxteBs29hhfAotzQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/environment": "^29.7.0", - "@jest/expect": "^29.7.0", - "@jest/types": "^29.6.3", - "jest-mock": "^29.7.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/reporters": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-29.7.0.tgz", - "integrity": "sha512-DApq0KJbJOEzAFYjHADNNxAE3KbhxQB1y5Kplb5Waqw6zVbuWatSnMjE5gs8FUgEPmNsnZA3NCWl9NG0ia04Pg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@bcoe/v8-coverage": "^0.2.3", - "@jest/console": "^29.7.0", - "@jest/test-result": "^29.7.0", - "@jest/transform": "^29.7.0", - "@jest/types": "^29.6.3", - "@jridgewell/trace-mapping": "^0.3.18", - "@types/node": "*", - "chalk": "^4.0.0", - "collect-v8-coverage": "^1.0.0", - "exit": "^0.1.2", - "glob": "^7.1.3", - "graceful-fs": "^4.2.9", - "istanbul-lib-coverage": "^3.0.0", - "istanbul-lib-instrument": "^6.0.0", - "istanbul-lib-report": "^3.0.0", - "istanbul-lib-source-maps": "^4.0.0", - "istanbul-reports": "^3.1.3", - "jest-message-util": "^29.7.0", - "jest-util": "^29.7.0", - "jest-worker": "^29.7.0", - "slash": "^3.0.0", - "string-length": "^4.0.1", - "strip-ansi": "^6.0.0", - "v8-to-istanbul": "^9.0.1" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - }, - "peerDependencies": { - "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" - }, - "peerDependenciesMeta": { - "node-notifier": { - "optional": true - } - } - }, - "node_modules/@jest/reporters/node_modules/istanbul-lib-instrument": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-6.0.3.tgz", - "integrity": "sha512-Vtgk7L/R2JHyyGW07spoFlB8/lpjiOLTjMdms6AFMraYt3BaJauod/NGrfnVG/y4Ix1JEuMRPDPEj2ua+zz1/Q==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "@babel/core": "^7.23.9", - "@babel/parser": "^7.23.9", - "@istanbuljs/schema": "^0.1.3", - "istanbul-lib-coverage": "^3.2.0", - "semver": "^7.5.4" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/@jest/reporters/node_modules/semver": { - "version": "7.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", - "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/@jest/schemas": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz", - "integrity": "sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@sinclair/typebox": "^0.27.8" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/source-map": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-29.6.3.tgz", - "integrity": "sha512-MHjT95QuipcPrpLM+8JMSzFx6eHp5Bm+4XeFDJlwsvVBjmKNiIAvasGK2fxz2WbGRlnvqehFbh07MMa7n3YJnw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/trace-mapping": "^0.3.18", - "callsites": "^3.0.0", - "graceful-fs": "^4.2.9" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/test-result": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-29.7.0.tgz", - "integrity": "sha512-Fdx+tv6x1zlkJPcWXmMDAG2HBnaR9XPSd5aDWQVsfrZmLVT3lU1cwyxLgRmXR9yrq4NBoEm9BMsfgFzTQAbJYA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/console": "^29.7.0", - "@jest/types": "^29.6.3", - "@types/istanbul-lib-coverage": "^2.0.0", - "collect-v8-coverage": "^1.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/test-sequencer": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-29.7.0.tgz", - "integrity": "sha512-GQwJ5WZVrKnOJuiYiAF52UNUJXgTZx1NHjFSEB0qEMmSZKAkdMoIzw/Cj6x6NF4AvV23AUqDpFzQkN/eYCYTxw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/test-result": "^29.7.0", - "graceful-fs": "^4.2.9", - "jest-haste-map": "^29.7.0", - "slash": "^3.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/transform": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-29.7.0.tgz", - "integrity": "sha512-ok/BTPFzFKVMwO5eOHRrvnBVHdRy9IrsrW1GpMaQ9MCnilNLXQKmAX8s1YXDFaai9xJpac2ySzV0YeRRECr2Vw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/core": "^7.11.6", - "@jest/types": "^29.6.3", - "@jridgewell/trace-mapping": "^0.3.18", - "babel-plugin-istanbul": "^6.1.1", - "chalk": "^4.0.0", - "convert-source-map": "^2.0.0", - "fast-json-stable-stringify": "^2.1.0", - "graceful-fs": "^4.2.9", - "jest-haste-map": "^29.7.0", - "jest-regex-util": "^29.6.3", - "jest-util": "^29.7.0", - "micromatch": "^4.0.4", - "pirates": "^4.0.4", - "slash": "^3.0.0", - "write-file-atomic": "^4.0.2" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/types": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-29.6.3.tgz", - "integrity": "sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/schemas": "^29.6.3", - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^3.0.0", - "@types/node": "*", - "@types/yargs": "^17.0.8", - "chalk": "^4.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jridgewell/gen-mapping": { - "version": "0.3.12", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.12.tgz", - "integrity": "sha512-OuLGC46TjB5BbN1dH8JULVVZY4WTdkF7tV9Ys6wLL1rubZnCMstOhNHueU5bLCrnRuDhKPDM4g6sw4Bel5Gzqg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/sourcemap-codec": "^1.5.0", - "@jridgewell/trace-mapping": "^0.3.24" - } - }, - "node_modules/@jridgewell/resolve-uri": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", - "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", - "license": "MIT", - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.5.4", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.4.tgz", - "integrity": "sha512-VT2+G1VQs/9oz078bLrYbecdZKs912zQlkelYpuf+SXF+QvZDYJlbx/LSx+meSAwdDFnF8FVXW92AVjjkVmgFw==", - "license": "MIT" - }, - "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.29", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.29.tgz", - "integrity": "sha512-uw6guiW/gcAGPDhLmd77/6lW8QLeiV5RUTsAX46Db6oLhGaVj4lhnPwb184s1bkc8kdVg/+h988dro8GRDpmYQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/resolve-uri": "^3.1.0", - "@jridgewell/sourcemap-codec": "^1.4.14" - } - }, - "node_modules/@nodelib/fs.scandir": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", - "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", - "dev": true, - "license": "MIT", - "dependencies": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.stat": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", - "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.walk": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", - "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@protobufjs/aspromise": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", - "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==", - "license": "BSD-3-Clause" - }, - "node_modules/@protobufjs/base64": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz", - "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==", - "license": "BSD-3-Clause" - }, - "node_modules/@protobufjs/codegen": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz", - "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==", - "license": "BSD-3-Clause" - }, - "node_modules/@protobufjs/eventemitter": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz", - "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==", - "license": "BSD-3-Clause" - }, - "node_modules/@protobufjs/fetch": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz", - "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==", - "license": "BSD-3-Clause", - "dependencies": { - "@protobufjs/aspromise": "^1.1.1", - "@protobufjs/inquire": "^1.1.0" - } - }, - "node_modules/@protobufjs/float": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz", - "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==", - "license": "BSD-3-Clause" - }, - "node_modules/@protobufjs/inquire": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz", - "integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==", - "license": "BSD-3-Clause" - }, - "node_modules/@protobufjs/path": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz", - "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==", - "license": "BSD-3-Clause" - }, - "node_modules/@protobufjs/pool": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz", - "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==", - "license": "BSD-3-Clause" - }, - "node_modules/@protobufjs/utf8": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", - "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==", - "license": "BSD-3-Clause" - }, - "node_modules/@rtsao/scc": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@rtsao/scc/-/scc-1.1.0.tgz", - "integrity": "sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g==", - "dev": true, - "license": "MIT" - }, - "node_modules/@sinclair/typebox": { - "version": "0.27.8", - "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", - "integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==", - "dev": true, - "license": "MIT" - }, - "node_modules/@sindresorhus/is": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-4.6.0.tgz", - "integrity": "sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sindresorhus/is?sponsor=1" - } - }, - "node_modules/@sinonjs/commons": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-3.0.1.tgz", - "integrity": "sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "type-detect": "4.0.8" - } - }, - "node_modules/@sinonjs/fake-timers": { - "version": "10.3.0", - "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz", - "integrity": "sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "@sinonjs/commons": "^3.0.0" - } - }, - "node_modules/@tsconfig/node10": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.11.tgz", - "integrity": "sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==", - "license": "MIT" - }, - "node_modules/@tsconfig/node12": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz", - "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==", - "license": "MIT" - }, - "node_modules/@tsconfig/node14": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz", - "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==", - "license": "MIT" - }, - "node_modules/@tsconfig/node16": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz", - "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==", - "license": "MIT" - }, - "node_modules/@types/babel__core": { - "version": "7.20.5", - "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", - "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/parser": "^7.20.7", - "@babel/types": "^7.20.7", - "@types/babel__generator": "*", - "@types/babel__template": "*", - "@types/babel__traverse": "*" - } - }, - "node_modules/@types/babel__generator": { - "version": "7.27.0", - "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz", - "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.0.0" - } - }, - "node_modules/@types/babel__template": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", - "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/parser": "^7.1.0", - "@babel/types": "^7.0.0" - } - }, - "node_modules/@types/babel__traverse": { - "version": "7.20.7", - "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.20.7.tgz", - "integrity": "sha512-dkO5fhS7+/oos4ciWxyEyjWe48zmG6wbCheo/G2ZnHx4fs3EU6YC6UM8rk56gAjNJ9P3MTH2jo5jb92/K6wbng==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.20.7" - } - }, - "node_modules/@types/body-parser": { - "version": "1.19.6", - "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.6.tgz", - "integrity": "sha512-HLFeCYgz89uk22N5Qg3dvGvsv46B8GLvKKo1zKG4NybA8U2DiEO3w9lqGg29t/tfLRJpJ6iQxnVw4OnB7MoM9g==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/connect": "*", - "@types/node": "*" - } - }, - "node_modules/@types/connect": { - "version": "3.4.38", - "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz", - "integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/estree": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", - "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/express": { - "version": "4.17.23", - "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.23.tgz", - "integrity": "sha512-Crp6WY9aTYP3qPi2wGDo9iUe/rceX01UMhnF1jmwDcKCFM6cx7YhGP/Mpr3y9AASpfHixIG0E6azCcL5OcDHsQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/body-parser": "*", - "@types/express-serve-static-core": "^4.17.33", - "@types/qs": "*", - "@types/serve-static": "*" - } - }, - "node_modules/@types/express-serve-static-core": { - "version": "4.19.6", - "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.19.6.tgz", - "integrity": "sha512-N4LZ2xG7DatVqhCZzOGb1Yi5lMbXSZcmdLDe9EzSndPV2HpWYWzRbaerl2n27irrm94EPpprqa8KpskPT085+A==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/node": "*", - "@types/qs": "*", - "@types/range-parser": "*", - "@types/send": "*" - } - }, - "node_modules/@types/graceful-fs": { - "version": "4.1.9", - "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.9.tgz", - "integrity": "sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/http-errors": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.5.tgz", - "integrity": "sha512-r8Tayk8HJnX0FztbZN7oVqGccWgw98T/0neJphO91KkmOzug1KkofZURD4UaD5uH8AqcFLfdPErnBod0u71/qg==", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/istanbul-lib-coverage": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz", - "integrity": "sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/istanbul-lib-report": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.3.tgz", - "integrity": "sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/istanbul-lib-coverage": "*" - } - }, - "node_modules/@types/istanbul-reports": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.4.tgz", - "integrity": "sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/istanbul-lib-report": "*" - } - }, - "node_modules/@types/jest": { - "version": "29.5.14", - "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.5.14.tgz", - "integrity": "sha512-ZN+4sdnLUbo8EVvVc2ao0GFW6oVrQRPn4K2lglySj7APvSrgzxHiNNK99us4WDMi57xxA2yggblIAMNhXOotLQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "expect": "^29.0.0", - "pretty-format": "^29.0.0" - } - }, - "node_modules/@types/json-schema": { - "version": "7.0.15", - "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", - "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/json5": { - "version": "0.0.29", - "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", - "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/mime": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz", - "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/node": { - "version": "18.19.119", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.119.tgz", - "integrity": "sha512-d0F6m9itIPaKnrvEMlzE48UjwZaAnFW7Jwibacw9MNdqadjKNpUm9tfJYDwmShJmgqcoqYUX3EMKO1+RWiuuNg==", - "license": "MIT", - "dependencies": { - "undici-types": "~5.26.4" - } - }, - "node_modules/@types/qs": { - "version": "6.14.0", - "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.14.0.tgz", - "integrity": "sha512-eOunJqu0K1923aExK6y8p6fsihYEn/BYuQ4g0CxAAgFc4b/ZLN4CrsRZ55srTdqoiLzU2B2evC+apEIxprEzkQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/range-parser": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.7.tgz", - "integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/send": { - "version": "0.17.5", - "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.5.tgz", - "integrity": "sha512-z6F2D3cOStZvuk2SaP6YrwkNO65iTZcwA2ZkSABegdkAh/lf+Aa/YQndZVfmEXT5vgAp6zv06VQ3ejSVjAny4w==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/mime": "^1", - "@types/node": "*" - } - }, - "node_modules/@types/serve-static": { - "version": "1.15.8", - "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.8.tgz", - "integrity": "sha512-roei0UY3LhpOJvjbIP6ZZFngyLKl5dskOtDhxY5THRSpO+ZI+nzJ+m5yUMzGrp89YRa7lvknKkMYjqQFGwA7Sg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/http-errors": "*", - "@types/node": "*", - "@types/send": "*" - } - }, - "node_modules/@types/stack-utils": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.3.tgz", - "integrity": "sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/yargs": { - "version": "17.0.33", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.33.tgz", - "integrity": "sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA==", - "license": "MIT", - "dependencies": { - "@types/yargs-parser": "*" - } - }, - "node_modules/@types/yargs-parser": { - "version": "21.0.3", - "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz", - "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==", - "license": "MIT" - }, - "node_modules/@typescript-eslint/eslint-plugin": { - "version": "8.37.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.37.0.tgz", - "integrity": "sha512-jsuVWeIkb6ggzB+wPCsR4e6loj+rM72ohW6IBn2C+5NCvfUVY8s33iFPySSVXqtm5Hu29Ne/9bnA0JmyLmgenA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@eslint-community/regexpp": "^4.10.0", - "@typescript-eslint/scope-manager": "8.37.0", - "@typescript-eslint/type-utils": "8.37.0", - "@typescript-eslint/utils": "8.37.0", - "@typescript-eslint/visitor-keys": "8.37.0", - "graphemer": "^1.4.0", - "ignore": "^7.0.0", - "natural-compare": "^1.4.0", - "ts-api-utils": "^2.1.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "@typescript-eslint/parser": "^8.37.0", - "eslint": "^8.57.0 || ^9.0.0", - "typescript": ">=4.8.4 <5.9.0" - } - }, - "node_modules/@typescript-eslint/parser": { - "version": "8.37.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.37.0.tgz", - "integrity": "sha512-kVIaQE9vrN9RLCQMQ3iyRlVJpTiDUY6woHGb30JDkfJErqrQEmtdWH3gV0PBAfGZgQXoqzXOO0T3K6ioApbbAA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@typescript-eslint/scope-manager": "8.37.0", - "@typescript-eslint/types": "8.37.0", - "@typescript-eslint/typescript-estree": "8.37.0", - "@typescript-eslint/visitor-keys": "8.37.0", - "debug": "^4.3.4" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "eslint": "^8.57.0 || ^9.0.0", - "typescript": ">=4.8.4 <5.9.0" - } - }, - "node_modules/@typescript-eslint/project-service": { - "version": "8.37.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.37.0.tgz", - "integrity": "sha512-BIUXYsbkl5A1aJDdYJCBAo8rCEbAvdquQ8AnLb6z5Lp1u3x5PNgSSx9A/zqYc++Xnr/0DVpls8iQ2cJs/izTXA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@typescript-eslint/tsconfig-utils": "^8.37.0", - "@typescript-eslint/types": "^8.37.0", - "debug": "^4.3.4" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "typescript": ">=4.8.4 <5.9.0" - } - }, - "node_modules/@typescript-eslint/scope-manager": { - "version": "8.37.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.37.0.tgz", - "integrity": "sha512-0vGq0yiU1gbjKob2q691ybTg9JX6ShiVXAAfm2jGf3q0hdP6/BruaFjL/ManAR/lj05AvYCH+5bbVo0VtzmjOA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@typescript-eslint/types": "8.37.0", - "@typescript-eslint/visitor-keys": "8.37.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/tsconfig-utils": { - "version": "8.37.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.37.0.tgz", - "integrity": "sha512-1/YHvAVTimMM9mmlPvTec9NP4bobA1RkDbMydxG8omqwJJLEW/Iy2C4adsAESIXU3WGLXFHSZUU+C9EoFWl4Zg==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "typescript": ">=4.8.4 <5.9.0" - } - }, - "node_modules/@typescript-eslint/type-utils": { - "version": "8.37.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.37.0.tgz", - "integrity": "sha512-SPkXWIkVZxhgwSwVq9rqj/4VFo7MnWwVaRNznfQDc/xPYHjXnPfLWn+4L6FF1cAz6e7dsqBeMawgl7QjUMj4Ow==", - "dev": true, - "license": "MIT", - "dependencies": { - "@typescript-eslint/types": "8.37.0", - "@typescript-eslint/typescript-estree": "8.37.0", - "@typescript-eslint/utils": "8.37.0", - "debug": "^4.3.4", - "ts-api-utils": "^2.1.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "eslint": "^8.57.0 || ^9.0.0", - "typescript": ">=4.8.4 <5.9.0" - } - }, - "node_modules/@typescript-eslint/types": { - "version": "8.37.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.37.0.tgz", - "integrity": "sha512-ax0nv7PUF9NOVPs+lmQ7yIE7IQmAf8LGcXbMvHX5Gm+YJUYNAl340XkGnrimxZ0elXyoQJuN5sbg6C4evKA4SQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/typescript-estree": { - "version": "8.37.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.37.0.tgz", - "integrity": "sha512-zuWDMDuzMRbQOM+bHyU4/slw27bAUEcKSKKs3hcv2aNnc/tvE/h7w60dwVw8vnal2Pub6RT1T7BI8tFZ1fE+yg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@typescript-eslint/project-service": "8.37.0", - "@typescript-eslint/tsconfig-utils": "8.37.0", - "@typescript-eslint/types": "8.37.0", - "@typescript-eslint/visitor-keys": "8.37.0", - "debug": "^4.3.4", - "fast-glob": "^3.3.2", - "is-glob": "^4.0.3", - "minimatch": "^9.0.4", - "semver": "^7.6.0", - "ts-api-utils": "^2.1.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "typescript": ">=4.8.4 <5.9.0" - } - }, - "node_modules/@typescript-eslint/typescript-estree/node_modules/semver": { - "version": "7.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", - "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/@typescript-eslint/utils": { - "version": "8.37.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.37.0.tgz", - "integrity": "sha512-TSFvkIW6gGjN2p6zbXo20FzCABbyUAuq6tBvNRGsKdsSQ6a7rnV6ADfZ7f4iI3lIiXc4F4WWvtUfDw9CJ9pO5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "@eslint-community/eslint-utils": "^4.7.0", - "@typescript-eslint/scope-manager": "8.37.0", - "@typescript-eslint/types": "8.37.0", - "@typescript-eslint/typescript-estree": "8.37.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "eslint": "^8.57.0 || ^9.0.0", - "typescript": ">=4.8.4 <5.9.0" - } - }, - "node_modules/@typescript-eslint/visitor-keys": { - "version": "8.37.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.37.0.tgz", - "integrity": "sha512-YzfhzcTnZVPiLfP/oeKtDp2evwvHLMe0LOy7oe+hb9KKIumLNohYS9Hgp1ifwpu42YWxhZE8yieggz6JpqO/1w==", - "dev": true, - "license": "MIT", - "dependencies": { - "@typescript-eslint/types": "8.37.0", - "eslint-visitor-keys": "^4.2.1" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/visitor-keys/node_modules/eslint-visitor-keys": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", - "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/accepts": { - "version": "1.3.8", - "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", - "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", - "dev": true, - "license": "MIT", - "dependencies": { - "mime-types": "~2.1.34", - "negotiator": "0.6.3" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/acorn": { - "version": "8.15.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", - "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", - "license": "MIT", - "bin": { - "acorn": "bin/acorn" - }, - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/acorn-jsx": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", - "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", - "dev": true, - "license": "MIT", - "peerDependencies": { - "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" - } - }, - "node_modules/acorn-walk": { - "version": "8.3.4", - "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.4.tgz", - "integrity": "sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==", - "license": "MIT", - "dependencies": { - "acorn": "^8.11.0" - }, - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", - "dev": true, - "license": "MIT", - "dependencies": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/ansi-escapes": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", - "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "type-fest": "^0.21.3" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "license": "MIT", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/anymatch": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", - "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", - "dev": true, - "license": "ISC", - "dependencies": { - "normalize-path": "^3.0.0", - "picomatch": "^2.0.4" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/arg": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", - "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", - "license": "MIT" - }, - "node_modules/argparse": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", - "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", - "dev": true, - "license": "MIT", - "dependencies": { - "sprintf-js": "~1.0.2" - } - }, - "node_modules/array-buffer-byte-length": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.2.tgz", - "integrity": "sha512-LHE+8BuR7RYGDKvnrmcuSq3tDcKv9OFEXQt/HpbZhY7V6h0zlUXutnAD82GiFx9rdieCMjkvtcsPqBwgUl1Iiw==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.3", - "is-array-buffer": "^3.0.5" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/array-flatten": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", - "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==", - "dev": true, - "license": "MIT" - }, - "node_modules/array-includes": { - "version": "3.1.9", - "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.9.tgz", - "integrity": "sha512-FmeCCAenzH0KH381SPT5FZmiA/TmpndpcaShhfgEN9eCVjnFBqq3l1xrI42y8+PPLI6hypzou4GXw00WHmPBLQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.8", - "call-bound": "^1.0.4", - "define-properties": "^1.2.1", - "es-abstract": "^1.24.0", - "es-object-atoms": "^1.1.1", - "get-intrinsic": "^1.3.0", - "is-string": "^1.1.1", - "math-intrinsics": "^1.1.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/array.prototype.findlastindex": { - "version": "1.2.6", - "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.6.tgz", - "integrity": "sha512-F/TKATkzseUExPlfvmwQKGITM3DGTK+vkAsCZoDc5daVygbJBnjEUCbgkAvVFsgfXfX4YIqZ/27G3k3tdXrTxQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.8", - "call-bound": "^1.0.4", - "define-properties": "^1.2.1", - "es-abstract": "^1.23.9", - "es-errors": "^1.3.0", - "es-object-atoms": "^1.1.1", - "es-shim-unscopables": "^1.1.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/array.prototype.flat": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.3.tgz", - "integrity": "sha512-rwG/ja1neyLqCuGZ5YYrznA62D4mZXg0i1cIskIUKSiqF3Cje9/wXAls9B9s1Wa2fomMsIv8czB8jZcPmxCXFg==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.8", - "define-properties": "^1.2.1", - "es-abstract": "^1.23.5", - "es-shim-unscopables": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/array.prototype.flatmap": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.3.tgz", - "integrity": "sha512-Y7Wt51eKJSyi80hFrJCePGGNo5ktJCslFuboqJsbf57CCPcm5zztluPlc4/aD8sWsKvlwatezpV4U1efk8kpjg==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.8", - "define-properties": "^1.2.1", - "es-abstract": "^1.23.5", - "es-shim-unscopables": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/arraybuffer.prototype.slice": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.4.tgz", - "integrity": "sha512-BNoCY6SXXPQ7gF2opIP4GBE+Xw7U+pHMYKuzjgCN3GwiaIR09UUeKfheyIry77QtrCBlC0KK0q5/TER/tYh3PQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "array-buffer-byte-length": "^1.0.1", - "call-bind": "^1.0.8", - "define-properties": "^1.2.1", - "es-abstract": "^1.23.5", - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.6", - "is-array-buffer": "^3.0.4" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/async": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/async/-/async-3.2.6.tgz", - "integrity": "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==", - "dev": true, - "license": "MIT" - }, - "node_modules/async-function": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/async-function/-/async-function-1.0.0.tgz", - "integrity": "sha512-hsU18Ae8CDTR6Kgu9DYf0EbCr/a5iGL0rytQDobUcdpYOKokk8LEjVphnXkDkgpi0wYVsqrXuP0bZxJaTqdgoA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/asynckit": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", - "license": "MIT" - }, - "node_modules/available-typed-arrays": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", - "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "possible-typed-array-names": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/axios": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.10.0.tgz", - "integrity": "sha512-/1xYAC4MP/HEG+3duIhFr4ZQXR4sQXOIe+o6sdqzeykGLx6Upp/1p8MHqhINOvGeP7xyNHe7tsiJByc4SSVUxw==", - "license": "MIT", - "dependencies": { - "follow-redirects": "^1.15.6", - "form-data": "^4.0.0", - "proxy-from-env": "^1.1.0" - } - }, - "node_modules/axios-retry": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/axios-retry/-/axios-retry-4.5.0.tgz", - "integrity": "sha512-aR99oXhpEDGo0UuAlYcn2iGRds30k366Zfa05XWScR9QaQD4JYiP3/1Qt1u7YlefUOK+cn0CcwoL1oefavQUlQ==", - "license": "Apache-2.0", - "dependencies": { - "is-retry-allowed": "^2.2.0" - }, - "peerDependencies": { - "axios": "0.x || 1.x" - } - }, - "node_modules/babel-jest": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-29.7.0.tgz", - "integrity": "sha512-BrvGY3xZSwEcCzKvKsCi2GgHqDqsYkOP4/by5xCgIwGXQxIEh+8ew3gmrE1y7XRR6LHZIj6yLYnUi/mm2KXKBg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/transform": "^29.7.0", - "@types/babel__core": "^7.1.14", - "babel-plugin-istanbul": "^6.1.1", - "babel-preset-jest": "^29.6.3", - "chalk": "^4.0.0", - "graceful-fs": "^4.2.9", - "slash": "^3.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.8.0" - } - }, - "node_modules/babel-plugin-istanbul": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz", - "integrity": "sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0", - "@istanbuljs/load-nyc-config": "^1.0.0", - "@istanbuljs/schema": "^0.1.2", - "istanbul-lib-instrument": "^5.0.4", - "test-exclude": "^6.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/babel-plugin-jest-hoist": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-29.6.3.tgz", - "integrity": "sha512-ESAc/RJvGTFEzRwOTT4+lNDk/GNHMkKbNzsvT0qKRfDyyYTskxB5rnU2njIDYVxXCBHHEI1c0YwHob3WaYujOg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/template": "^7.3.3", - "@babel/types": "^7.3.3", - "@types/babel__core": "^7.1.14", - "@types/babel__traverse": "^7.0.6" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/babel-plugin-polyfill-corejs2": { - "version": "0.4.14", - "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.4.14.tgz", - "integrity": "sha512-Co2Y9wX854ts6U8gAAPXfn0GmAyctHuK8n0Yhfjd6t30g7yvKjspvvOo9yG+z52PZRgFErt7Ka2pYnXCjLKEpg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/compat-data": "^7.27.7", - "@babel/helper-define-polyfill-provider": "^0.6.5", - "semver": "^6.3.1" - }, - "peerDependencies": { - "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" - } - }, - "node_modules/babel-plugin-polyfill-corejs3": { - "version": "0.13.0", - "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.13.0.tgz", - "integrity": "sha512-U+GNwMdSFgzVmfhNm8GJUX88AadB3uo9KpJqS3FaqNIPKgySuvMb+bHPsOmmuWyIcuqZj/pzt1RUIUZns4y2+A==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-define-polyfill-provider": "^0.6.5", - "core-js-compat": "^3.43.0" - }, - "peerDependencies": { - "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" - } - }, - "node_modules/babel-plugin-polyfill-regenerator": { - "version": "0.6.5", - "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.6.5.tgz", - "integrity": "sha512-ISqQ2frbiNU9vIJkzg7dlPpznPZ4jOiUQ1uSmB0fEHeowtN3COYRsXr/xexn64NpU13P06jc/L5TgiJXOgrbEg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-define-polyfill-provider": "^0.6.5" - }, - "peerDependencies": { - "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" - } - }, - "node_modules/babel-preset-current-node-syntax": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.1.0.tgz", - "integrity": "sha512-ldYss8SbBlWva1bs28q78Ju5Zq1F+8BrqBZZ0VFhLBvhh6lCpC2o3gDJi/5DRLs9FgYZCnmPYIVFU4lRXCkyUw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/plugin-syntax-async-generators": "^7.8.4", - "@babel/plugin-syntax-bigint": "^7.8.3", - "@babel/plugin-syntax-class-properties": "^7.12.13", - "@babel/plugin-syntax-class-static-block": "^7.14.5", - "@babel/plugin-syntax-import-attributes": "^7.24.7", - "@babel/plugin-syntax-import-meta": "^7.10.4", - "@babel/plugin-syntax-json-strings": "^7.8.3", - "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", - "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", - "@babel/plugin-syntax-numeric-separator": "^7.10.4", - "@babel/plugin-syntax-object-rest-spread": "^7.8.3", - "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", - "@babel/plugin-syntax-optional-chaining": "^7.8.3", - "@babel/plugin-syntax-private-property-in-object": "^7.14.5", - "@babel/plugin-syntax-top-level-await": "^7.14.5" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/babel-preset-jest": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-29.6.3.tgz", - "integrity": "sha512-0B3bhxR6snWXJZtR/RliHTDPRgn1sNHOR0yVtq/IiQFyuOVjFS+wuio/R4gSNkyYmKmJB4wGZv2NZanmKmTnNA==", - "dev": true, - "license": "MIT", - "dependencies": { - "babel-plugin-jest-hoist": "^29.6.3", - "babel-preset-current-node-syntax": "^1.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/balanced-match": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", - "dev": true, - "license": "MIT" - }, - "node_modules/binary-extensions": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", - "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/body-parser": { - "version": "1.20.3", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", - "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", - "dev": true, - "license": "MIT", - "dependencies": { - "bytes": "3.1.2", - "content-type": "~1.0.5", - "debug": "2.6.9", - "depd": "2.0.0", - "destroy": "1.2.0", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "on-finished": "2.4.1", - "qs": "6.13.0", - "raw-body": "2.5.2", - "type-is": "~1.6.18", - "unpipe": "1.0.0" - }, - "engines": { - "node": ">= 0.8", - "npm": "1.2.8000 || >= 1.4.16" - } - }, - "node_modules/body-parser/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/body-parser/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "dev": true, - "license": "MIT" - }, - "node_modules/brace-expansion": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", - "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/braces": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", - "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", - "dev": true, - "license": "MIT", - "dependencies": { - "fill-range": "^7.1.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/browserslist": { - "version": "4.25.1", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.25.1.tgz", - "integrity": "sha512-KGj0KoOMXLpSNkkEI6Z6mShmQy0bc1I+T7K9N81k4WWMrfz+6fQ6es80B/YLAeRoKvjYE1YSHHOW1qe9xIVzHw==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/browserslist" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "caniuse-lite": "^1.0.30001726", - "electron-to-chromium": "^1.5.173", - "node-releases": "^2.0.19", - "update-browserslist-db": "^1.1.3" - }, - "bin": { - "browserslist": "cli.js" - }, - "engines": { - "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" - } - }, - "node_modules/bs-logger": { - "version": "0.2.6", - "resolved": "https://registry.npmjs.org/bs-logger/-/bs-logger-0.2.6.tgz", - "integrity": "sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog==", - "dev": true, - "license": "MIT", - "dependencies": { - "fast-json-stable-stringify": "2.x" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/bser": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/bser/-/bser-2.1.1.tgz", - "integrity": "sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "node-int64": "^0.4.0" - } - }, - "node_modules/buffer-from": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", - "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/bytes": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", - "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/call-bind": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz", - "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind-apply-helpers": "^1.0.0", - "es-define-property": "^1.0.0", - "get-intrinsic": "^1.2.4", - "set-function-length": "^1.2.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/call-bind-apply-helpers": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", - "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/call-bound": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", - "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind-apply-helpers": "^1.0.2", - "get-intrinsic": "^1.3.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/callsites": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", - "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/camelcase": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", - "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/caniuse-lite": { - "version": "1.0.30001727", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001727.tgz", - "integrity": "sha512-pB68nIHmbN6L/4C6MH1DokyR3bYqFwjaSs/sWDHGj4CTcFtQUQMuJftVwWkXq7mNWOybD3KhUv3oWHoGxgP14Q==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/caniuse-lite" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "CC-BY-4.0" - }, - "node_modules/chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/char-regex": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/char-regex/-/char-regex-1.0.2.tgz", - "integrity": "sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - } - }, - "node_modules/chokidar": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", - "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", - "dev": true, - "license": "MIT", - "dependencies": { - "anymatch": "~3.1.2", - "braces": "~3.0.2", - "glob-parent": "~5.1.2", - "is-binary-path": "~2.1.0", - "is-glob": "~4.0.1", - "normalize-path": "~3.0.0", - "readdirp": "~3.6.0" - }, - "engines": { - "node": ">= 8.10.0" - }, - "funding": { - "url": "https://paulmillr.com/funding/" - }, - "optionalDependencies": { - "fsevents": "~2.3.2" - } - }, - "node_modules/chokidar/node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, - "license": "ISC", - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/ci-info": { - "version": "3.9.0", - "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz", - "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/sibiraj-s" - } - ], - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/cjs-module-lexer": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.4.3.tgz", - "integrity": "sha512-9z8TZaGM1pfswYeXrUpzPrkx8UnWYdhJclsiYMm6x/w5+nN+8Tf/LnAgfLGQCm59qAOxU8WwHEq2vNwF6i4j+Q==", - "dev": true, - "license": "MIT" - }, - "node_modules/cliui": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", - "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", - "license": "ISC", - "dependencies": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.1", - "wrap-ansi": "^7.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/co": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", - "integrity": "sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==", - "dev": true, - "license": "MIT", - "engines": { - "iojs": ">= 1.0.0", - "node": ">= 0.12.0" - } - }, - "node_modules/collect-v8-coverage": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.2.tgz", - "integrity": "sha512-lHl4d5/ONEbLlJvaJNtsF/Lz+WvB07u2ycqTYbdrq7UypDXailES4valYb2eWiJFxZlVmpGekfqoxQhzyFdT4Q==", - "dev": true, - "license": "MIT" - }, - "node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "license": "MIT", - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "license": "MIT" - }, - "node_modules/combined-stream": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", - "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", - "license": "MIT", - "dependencies": { - "delayed-stream": "~1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", - "dev": true, - "license": "MIT" - }, - "node_modules/content-disposition": { - "version": "0.5.4", - "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", - "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "safe-buffer": "5.2.1" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/content-type": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", - "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/convert-source-map": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", - "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", - "dev": true, - "license": "MIT" - }, - "node_modules/cookie": { - "version": "0.7.1", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz", - "integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/cookie-signature": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", - "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/core-js-compat": { - "version": "3.44.0", - "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.44.0.tgz", - "integrity": "sha512-JepmAj2zfl6ogy34qfWtcE7nHKAJnKsQFRn++scjVS2bZFllwptzw61BZcZFYBPpUznLfAvh0LGhxKppk04ClA==", - "dev": true, - "license": "MIT", - "dependencies": { - "browserslist": "^4.25.1" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/core-js" - } - }, - "node_modules/create-jest": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/create-jest/-/create-jest-29.7.0.tgz", - "integrity": "sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/types": "^29.6.3", - "chalk": "^4.0.0", - "exit": "^0.1.2", - "graceful-fs": "^4.2.9", - "jest-config": "^29.7.0", - "jest-util": "^29.7.0", - "prompts": "^2.0.1" - }, - "bin": { - "create-jest": "bin/create-jest.js" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/create-require": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", - "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", - "license": "MIT" - }, - "node_modules/cross-spawn": { - "version": "7.0.6", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", - "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", - "dev": true, - "license": "MIT", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/data-view-buffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.2.tgz", - "integrity": "sha512-EmKO5V3OLXh1rtK2wgXRansaK1/mtVdTUEiEI0W8RkvgT05kfxaH29PliLnpLP73yYO6142Q72QNa8Wx/A5CqQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.3", - "es-errors": "^1.3.0", - "is-data-view": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/data-view-byte-length": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.2.tgz", - "integrity": "sha512-tuhGbE6CfTM9+5ANGf+oQb72Ky/0+s3xKUpHvShfiz2RxMFgFPjsXuRLBVMtvMs15awe45SRb83D6wH4ew6wlQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.3", - "es-errors": "^1.3.0", - "is-data-view": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/inspect-js" - } - }, - "node_modules/data-view-byte-offset": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.1.tgz", - "integrity": "sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.2", - "es-errors": "^1.3.0", - "is-data-view": "^1.0.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/debug": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", - "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "^2.1.3" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/dedent": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/dedent/-/dedent-1.6.0.tgz", - "integrity": "sha512-F1Z+5UCFpmQUzJa11agbyPVMbpgT/qA3/SKyJ1jyBgm7dUcUEa8v9JwDkerSQXfakBwFljIxhOJqGkjUwZ9FSA==", - "dev": true, - "license": "MIT", - "peerDependencies": { - "babel-plugin-macros": "^3.1.0" - }, - "peerDependenciesMeta": { - "babel-plugin-macros": { - "optional": true - } - } - }, - "node_modules/deep-is": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", - "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/deepmerge": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", - "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/define-data-property": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", - "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", - "dev": true, - "license": "MIT", - "dependencies": { - "es-define-property": "^1.0.0", - "es-errors": "^1.3.0", - "gopd": "^1.0.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/define-properties": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", - "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==", - "dev": true, - "license": "MIT", - "dependencies": { - "define-data-property": "^1.0.1", - "has-property-descriptors": "^1.0.0", - "object-keys": "^1.1.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/delayed-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", - "license": "MIT", - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/depd": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", - "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/destroy": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", - "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8", - "npm": "1.2.8000 || >= 1.4.16" - } - }, - "node_modules/detect-newline": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", - "integrity": "sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/diff": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", - "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", - "license": "BSD-3-Clause", - "engines": { - "node": ">=0.3.1" - } - }, - "node_modules/diff-sequences": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-29.6.3.tgz", - "integrity": "sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/doctrine": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", - "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "esutils": "^2.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/dotenv": { - "version": "16.6.1", - "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.6.1.tgz", - "integrity": "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==", - "license": "BSD-2-Clause", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://dotenvx.com" - } - }, - "node_modules/dunder-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", - "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", - "license": "MIT", - "dependencies": { - "call-bind-apply-helpers": "^1.0.1", - "es-errors": "^1.3.0", - "gopd": "^1.2.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/ee-first": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", - "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", - "dev": true, - "license": "MIT" - }, - "node_modules/ejs": { - "version": "3.1.10", - "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.10.tgz", - "integrity": "sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "jake": "^10.8.5" - }, - "bin": { - "ejs": "bin/cli.js" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/electron-to-chromium": { - "version": "1.5.185", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.185.tgz", - "integrity": "sha512-dYOZfUk57hSMPePoIQ1fZWl1Fkj+OshhEVuPacNKWzC1efe56OsHY3l/jCfiAgIICOU3VgOIdoq7ahg7r7n6MQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/emittery": { - "version": "0.13.1", - "resolved": "https://registry.npmjs.org/emittery/-/emittery-0.13.1.tgz", - "integrity": "sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sindresorhus/emittery?sponsor=1" - } - }, - "node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "license": "MIT" - }, - "node_modules/encodeurl": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", - "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/error-ex": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", - "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-arrayish": "^0.2.1" - } - }, - "node_modules/es-abstract": { - "version": "1.24.0", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.24.0.tgz", - "integrity": "sha512-WSzPgsdLtTcQwm4CROfS5ju2Wa1QQcVeT37jFjYzdFz1r9ahadC8B8/a4qxJxM+09F18iumCdRmlr96ZYkQvEg==", - "dev": true, - "license": "MIT", - "dependencies": { - "array-buffer-byte-length": "^1.0.2", - "arraybuffer.prototype.slice": "^1.0.4", - "available-typed-arrays": "^1.0.7", - "call-bind": "^1.0.8", - "call-bound": "^1.0.4", - "data-view-buffer": "^1.0.2", - "data-view-byte-length": "^1.0.2", - "data-view-byte-offset": "^1.0.1", - "es-define-property": "^1.0.1", - "es-errors": "^1.3.0", - "es-object-atoms": "^1.1.1", - "es-set-tostringtag": "^2.1.0", - "es-to-primitive": "^1.3.0", - "function.prototype.name": "^1.1.8", - "get-intrinsic": "^1.3.0", - "get-proto": "^1.0.1", - "get-symbol-description": "^1.1.0", - "globalthis": "^1.0.4", - "gopd": "^1.2.0", - "has-property-descriptors": "^1.0.2", - "has-proto": "^1.2.0", - "has-symbols": "^1.1.0", - "hasown": "^2.0.2", - "internal-slot": "^1.1.0", - "is-array-buffer": "^3.0.5", - "is-callable": "^1.2.7", - "is-data-view": "^1.0.2", - "is-negative-zero": "^2.0.3", - "is-regex": "^1.2.1", - "is-set": "^2.0.3", - "is-shared-array-buffer": "^1.0.4", - "is-string": "^1.1.1", - "is-typed-array": "^1.1.15", - "is-weakref": "^1.1.1", - "math-intrinsics": "^1.1.0", - "object-inspect": "^1.13.4", - "object-keys": "^1.1.1", - "object.assign": "^4.1.7", - "own-keys": "^1.0.1", - "regexp.prototype.flags": "^1.5.4", - "safe-array-concat": "^1.1.3", - "safe-push-apply": "^1.0.0", - "safe-regex-test": "^1.1.0", - "set-proto": "^1.0.0", - "stop-iteration-iterator": "^1.1.0", - "string.prototype.trim": "^1.2.10", - "string.prototype.trimend": "^1.0.9", - "string.prototype.trimstart": "^1.0.8", - "typed-array-buffer": "^1.0.3", - "typed-array-byte-length": "^1.0.3", - "typed-array-byte-offset": "^1.0.4", - "typed-array-length": "^1.0.7", - "unbox-primitive": "^1.1.0", - "which-typed-array": "^1.1.19" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/es-define-property": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", - "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-errors": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", - "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-object-atoms": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", - "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-set-tostringtag": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", - "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.6", - "has-tostringtag": "^1.0.2", - "hasown": "^2.0.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-shim-unscopables": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.1.0.tgz", - "integrity": "sha512-d9T8ucsEhh8Bi1woXCf+TIKDIROLG5WCkxg8geBCbvk22kzwC5G2OnXVMO6FUsvQlgUUXQ2itephWDLqDzbeCw==", - "dev": true, - "license": "MIT", - "dependencies": { - "hasown": "^2.0.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-to-primitive": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.3.0.tgz", - "integrity": "sha512-w+5mJ3GuFL+NjVtJlvydShqE1eN3h3PbI7/5LAsYJP/2qtuMXjfL2LpHSRqo4b4eSF5K/DH1JXKUAHSB2UW50g==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-callable": "^1.2.7", - "is-date-object": "^1.0.5", - "is-symbol": "^1.0.4" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/escalade": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", - "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/escape-html": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", - "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", - "dev": true, - "license": "MIT" - }, - "node_modules/escape-string-regexp": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", - "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/eslint": { - "version": "9.31.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.31.0.tgz", - "integrity": "sha512-QldCVh/ztyKJJZLr4jXNUByx3gR+TDYZCRXEktiZoUR3PGy4qCmSbkxcIle8GEwGpb5JBZazlaJ/CxLidXdEbQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@eslint-community/eslint-utils": "^4.2.0", - "@eslint-community/regexpp": "^4.12.1", - "@eslint/config-array": "^0.21.0", - "@eslint/config-helpers": "^0.3.0", - "@eslint/core": "^0.15.0", - "@eslint/eslintrc": "^3.3.1", - "@eslint/js": "9.31.0", - "@eslint/plugin-kit": "^0.3.1", - "@humanfs/node": "^0.16.6", - "@humanwhocodes/module-importer": "^1.0.1", - "@humanwhocodes/retry": "^0.4.2", - "@types/estree": "^1.0.6", - "@types/json-schema": "^7.0.15", - "ajv": "^6.12.4", - "chalk": "^4.0.0", - "cross-spawn": "^7.0.6", - "debug": "^4.3.2", - "escape-string-regexp": "^4.0.0", - "eslint-scope": "^8.4.0", - "eslint-visitor-keys": "^4.2.1", - "espree": "^10.4.0", - "esquery": "^1.5.0", - "esutils": "^2.0.2", - "fast-deep-equal": "^3.1.3", - "file-entry-cache": "^8.0.0", - "find-up": "^5.0.0", - "glob-parent": "^6.0.2", - "ignore": "^5.2.0", - "imurmurhash": "^0.1.4", - "is-glob": "^4.0.0", - "json-stable-stringify-without-jsonify": "^1.0.1", - "lodash.merge": "^4.6.2", - "minimatch": "^3.1.2", - "natural-compare": "^1.4.0", - "optionator": "^0.9.3" - }, - "bin": { - "eslint": "bin/eslint.js" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "url": "https://eslint.org/donate" - }, - "peerDependencies": { - "jiti": "*" - }, - "peerDependenciesMeta": { - "jiti": { - "optional": true - } - } - }, - "node_modules/eslint-config-prettier": { - "version": "9.1.0", - "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-9.1.0.tgz", - "integrity": "sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw==", - "dev": true, - "license": "MIT", - "bin": { - "eslint-config-prettier": "bin/cli.js" - }, - "peerDependencies": { - "eslint": ">=7.0.0" - } - }, - "node_modules/eslint-import-resolver-node": { - "version": "0.3.9", - "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.9.tgz", - "integrity": "sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g==", - "dev": true, - "license": "MIT", - "dependencies": { - "debug": "^3.2.7", - "is-core-module": "^2.13.0", - "resolve": "^1.22.4" - } - }, - "node_modules/eslint-import-resolver-node/node_modules/debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "^2.1.1" - } - }, - "node_modules/eslint-module-utils": { - "version": "2.12.1", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.12.1.tgz", - "integrity": "sha512-L8jSWTze7K2mTg0vos/RuLRS5soomksDPoJLXIslC7c8Wmut3bx7CPpJijDcBZtxQ5lrbUdM+s0OlNbz0DCDNw==", - "dev": true, - "license": "MIT", - "dependencies": { - "debug": "^3.2.7" - }, - "engines": { - "node": ">=4" - }, - "peerDependenciesMeta": { - "eslint": { - "optional": true - } - } - }, - "node_modules/eslint-module-utils/node_modules/debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "^2.1.1" - } - }, - "node_modules/eslint-plugin-import": { - "version": "2.32.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.32.0.tgz", - "integrity": "sha512-whOE1HFo/qJDyX4SnXzP4N6zOWn79WhnCUY/iDR0mPfQZO8wcYE4JClzI2oZrhBnnMUCBCHZhO6VQyoBU95mZA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@rtsao/scc": "^1.1.0", - "array-includes": "^3.1.9", - "array.prototype.findlastindex": "^1.2.6", - "array.prototype.flat": "^1.3.3", - "array.prototype.flatmap": "^1.3.3", - "debug": "^3.2.7", - "doctrine": "^2.1.0", - "eslint-import-resolver-node": "^0.3.9", - "eslint-module-utils": "^2.12.1", - "hasown": "^2.0.2", - "is-core-module": "^2.16.1", - "is-glob": "^4.0.3", - "minimatch": "^3.1.2", - "object.fromentries": "^2.0.8", - "object.groupby": "^1.0.3", - "object.values": "^1.2.1", - "semver": "^6.3.1", - "string.prototype.trimend": "^1.0.9", - "tsconfig-paths": "^3.15.0" - }, - "engines": { - "node": ">=4" - }, - "peerDependencies": { - "eslint": "^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8 || ^9" - } - }, - "node_modules/eslint-plugin-import/node_modules/brace-expansion": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", - "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/eslint-plugin-import/node_modules/debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "^2.1.1" - } - }, - "node_modules/eslint-plugin-import/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/eslint-plugin-prettier": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-4.0.0.tgz", - "integrity": "sha512-98MqmCJ7vJodoQK359bqQWaxOE0CS8paAz/GgjaZLyex4TTk3g9HugoO89EqWCrFiOqn9EVvcoo7gZzONCWVwQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "prettier-linter-helpers": "^1.0.0" - }, - "engines": { - "node": ">=6.0.0" - }, - "peerDependencies": { - "eslint": ">=7.28.0", - "prettier": ">=2.0.0" - }, - "peerDependenciesMeta": { - "eslint-config-prettier": { - "optional": true - } - } - }, - "node_modules/eslint-plugin-simple-import-sort": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-simple-import-sort/-/eslint-plugin-simple-import-sort-10.0.0.tgz", - "integrity": "sha512-AeTvO9UCMSNzIHRkg8S6c3RPy5YEwKWSQPx3DYghLedo2ZQxowPFLGDN1AZ2evfg6r6mjBSZSLxLFsWSu3acsw==", - "dev": true, - "license": "MIT", - "peerDependencies": { - "eslint": ">=5.0.0" - } - }, - "node_modules/eslint-plugin-sort-keys-fix": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/eslint-plugin-sort-keys-fix/-/eslint-plugin-sort-keys-fix-1.1.2.tgz", - "integrity": "sha512-DNPHFGCA0/hZIsfODbeLZqaGY/+q3vgtshF85r+YWDNCQ2apd9PNs/zL6ttKm0nD1IFwvxyg3YOTI7FHl4unrw==", - "dev": true, - "license": "ISC", - "dependencies": { - "espree": "^6.1.2", - "esutils": "^2.0.2", - "natural-compare": "^1.4.0", - "requireindex": "~1.2.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/eslint-plugin-sort-keys-fix/node_modules/acorn": { - "version": "7.4.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", - "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", - "dev": true, - "license": "MIT", - "bin": { - "acorn": "bin/acorn" - }, - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/eslint-plugin-sort-keys-fix/node_modules/eslint-visitor-keys": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", - "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": ">=4" - } - }, - "node_modules/eslint-plugin-sort-keys-fix/node_modules/espree": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/espree/-/espree-6.2.1.tgz", - "integrity": "sha512-ysCxRQY3WaXJz9tdbWOwuWr5Y/XrPTGX9Kiz3yoUXwW0VZ4w30HTkQLaGx/+ttFjF8i+ACbArnB4ce68a9m5hw==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "acorn": "^7.1.1", - "acorn-jsx": "^5.2.0", - "eslint-visitor-keys": "^1.1.0" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/eslint-plugin-unused-imports": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/eslint-plugin-unused-imports/-/eslint-plugin-unused-imports-4.1.4.tgz", - "integrity": "sha512-YptD6IzQjDardkl0POxnnRBhU1OEePMV0nd6siHaRBbd+lyh6NAhFEobiznKU7kTsSsDeSD62Pe7kAM1b7dAZQ==", - "dev": true, - "license": "MIT", - "peerDependencies": { - "@typescript-eslint/eslint-plugin": "^8.0.0-0 || ^7.0.0 || ^6.0.0 || ^5.0.0", - "eslint": "^9.0.0 || ^8.0.0" - }, - "peerDependenciesMeta": { - "@typescript-eslint/eslint-plugin": { - "optional": true - } - } - }, - "node_modules/eslint-scope": { - "version": "8.4.0", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.4.0.tgz", - "integrity": "sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "esrecurse": "^4.3.0", - "estraverse": "^5.2.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/eslint-visitor-keys": { - "version": "3.4.3", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", - "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/eslint/node_modules/brace-expansion": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", - "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/eslint/node_modules/eslint-visitor-keys": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", - "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/eslint/node_modules/find-up": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", - "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", - "dev": true, - "license": "MIT", - "dependencies": { - "locate-path": "^6.0.0", - "path-exists": "^4.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/eslint/node_modules/ignore": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", - "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 4" - } - }, - "node_modules/eslint/node_modules/locate-path": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", - "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", - "dev": true, - "license": "MIT", - "dependencies": { - "p-locate": "^5.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/eslint/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/eslint/node_modules/p-locate": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", - "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", - "dev": true, - "license": "MIT", - "dependencies": { - "p-limit": "^3.0.2" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/espree": { - "version": "10.4.0", - "resolved": "https://registry.npmjs.org/espree/-/espree-10.4.0.tgz", - "integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "acorn": "^8.15.0", - "acorn-jsx": "^5.3.2", - "eslint-visitor-keys": "^4.2.1" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/espree/node_modules/eslint-visitor-keys": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", - "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/esprima": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", - "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", - "dev": true, - "license": "BSD-2-Clause", - "bin": { - "esparse": "bin/esparse.js", - "esvalidate": "bin/esvalidate.js" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/esquery": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", - "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "estraverse": "^5.1.0" - }, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/esrecurse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", - "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "estraverse": "^5.2.0" - }, - "engines": { - "node": ">=4.0" - } - }, - "node_modules/estraverse": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", - "dev": true, - "license": "BSD-2-Clause", - "engines": { - "node": ">=4.0" - } - }, - "node_modules/esutils": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", - "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", - "dev": true, - "license": "BSD-2-Clause", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/etag": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", - "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/execa": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", - "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", - "dev": true, - "license": "MIT", - "dependencies": { - "cross-spawn": "^7.0.3", - "get-stream": "^6.0.0", - "human-signals": "^2.1.0", - "is-stream": "^2.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^4.0.1", - "onetime": "^5.1.2", - "signal-exit": "^3.0.3", - "strip-final-newline": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sindresorhus/execa?sponsor=1" - } - }, - "node_modules/exit": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", - "integrity": "sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==", - "dev": true, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/expect": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/expect/-/expect-29.7.0.tgz", - "integrity": "sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/expect-utils": "^29.7.0", - "jest-get-type": "^29.6.3", - "jest-matcher-utils": "^29.7.0", - "jest-message-util": "^29.7.0", - "jest-util": "^29.7.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/express": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz", - "integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==", - "dev": true, - "license": "MIT", - "dependencies": { - "accepts": "~1.3.8", - "array-flatten": "1.1.1", - "body-parser": "1.20.3", - "content-disposition": "0.5.4", - "content-type": "~1.0.4", - "cookie": "0.7.1", - "cookie-signature": "1.0.6", - "debug": "2.6.9", - "depd": "2.0.0", - "encodeurl": "~2.0.0", - "escape-html": "~1.0.3", - "etag": "~1.8.1", - "finalhandler": "1.3.1", - "fresh": "0.5.2", - "http-errors": "2.0.0", - "merge-descriptors": "1.0.3", - "methods": "~1.1.2", - "on-finished": "2.4.1", - "parseurl": "~1.3.3", - "path-to-regexp": "0.1.12", - "proxy-addr": "~2.0.7", - "qs": "6.13.0", - "range-parser": "~1.2.1", - "safe-buffer": "5.2.1", - "send": "0.19.0", - "serve-static": "1.16.2", - "setprototypeof": "1.2.0", - "statuses": "2.0.1", - "type-is": "~1.6.18", - "utils-merge": "1.0.1", - "vary": "~1.1.2" - }, - "engines": { - "node": ">= 0.10.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/express" - } - }, - "node_modules/express/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/express/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "dev": true, - "license": "MIT" - }, - "node_modules/fast-deep-equal": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "dev": true, - "license": "MIT" - }, - "node_modules/fast-diff": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.3.0.tgz", - "integrity": "sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw==", - "dev": true, - "license": "Apache-2.0" - }, - "node_modules/fast-glob": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", - "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.8" - }, - "engines": { - "node": ">=8.6.0" - } - }, - "node_modules/fast-glob/node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, - "license": "ISC", - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/fast-json-stable-stringify": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", - "dev": true, - "license": "MIT" - }, - "node_modules/fast-levenshtein": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", - "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", - "dev": true, - "license": "MIT" - }, - "node_modules/fast-safe-stringify": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", - "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==", - "license": "MIT" - }, - "node_modules/fastq": { - "version": "1.19.1", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", - "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "reusify": "^1.0.4" - } - }, - "node_modules/fb-watchman": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.2.tgz", - "integrity": "sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "bser": "2.1.1" - } - }, - "node_modules/file-entry-cache": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", - "integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "flat-cache": "^4.0.0" - }, - "engines": { - "node": ">=16.0.0" - } - }, - "node_modules/filelist": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.4.tgz", - "integrity": "sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "minimatch": "^5.0.1" - } - }, - "node_modules/filelist/node_modules/minimatch": { - "version": "5.1.6", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", - "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/fill-range": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", - "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", - "dev": true, - "license": "MIT", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/finalhandler": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz", - "integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "debug": "2.6.9", - "encodeurl": "~2.0.0", - "escape-html": "~1.0.3", - "on-finished": "2.4.1", - "parseurl": "~1.3.3", - "statuses": "2.0.1", - "unpipe": "~1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/finalhandler/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/finalhandler/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "dev": true, - "license": "MIT" - }, - "node_modules/find-up": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", - "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", - "dev": true, - "license": "MIT", - "dependencies": { - "locate-path": "^5.0.0", - "path-exists": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/flat-cache": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-4.0.1.tgz", - "integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==", - "dev": true, - "license": "MIT", - "dependencies": { - "flatted": "^3.2.9", - "keyv": "^4.5.4" - }, - "engines": { - "node": ">=16" - } - }, - "node_modules/flatted": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", - "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", - "dev": true, - "license": "ISC" - }, - "node_modules/follow-redirects": { - "version": "1.15.9", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz", - "integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==", - "funding": [ - { - "type": "individual", - "url": "https://github.com/sponsors/RubenVerborgh" - } - ], - "license": "MIT", - "engines": { - "node": ">=4.0" - }, - "peerDependenciesMeta": { - "debug": { - "optional": true - } - } - }, - "node_modules/for-each": { - "version": "0.3.5", - "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.5.tgz", - "integrity": "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-callable": "^1.2.7" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/form-data": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.3.tgz", - "integrity": "sha512-qsITQPfmvMOSAdeyZ+12I1c+CKSstAFAwu+97zrnWAbIr5u8wfsExUzCesVLC8NgHuRUqNN4Zy6UPWUTRGslcA==", - "license": "MIT", - "dependencies": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.8", - "es-set-tostringtag": "^2.1.0", - "hasown": "^2.0.2", - "mime-types": "^2.1.12" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/forwarded": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", - "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/fresh": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", - "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", - "dev": true, - "license": "ISC" - }, - "node_modules/fsevents": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", - "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } - }, - "node_modules/function-bind": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", - "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/function.prototype.name": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.8.tgz", - "integrity": "sha512-e5iwyodOHhbMr/yNrc7fDYG4qlbIvI5gajyzPnb5TCwyhjApznQh1BMFou9b30SevY43gCJKXycoCBjMbsuW0Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.8", - "call-bound": "^1.0.3", - "define-properties": "^1.2.1", - "functions-have-names": "^1.2.3", - "hasown": "^2.0.2", - "is-callable": "^1.2.7" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/functions-have-names": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", - "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", - "dev": true, - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/gensync": { - "version": "1.0.0-beta.2", - "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", - "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/get-caller-file": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", - "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", - "license": "ISC", - "engines": { - "node": "6.* || 8.* || >= 10.*" - } - }, - "node_modules/get-intrinsic": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", - "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", - "license": "MIT", - "dependencies": { - "call-bind-apply-helpers": "^1.0.2", - "es-define-property": "^1.0.1", - "es-errors": "^1.3.0", - "es-object-atoms": "^1.1.1", - "function-bind": "^1.1.2", - "get-proto": "^1.0.1", - "gopd": "^1.2.0", - "has-symbols": "^1.1.0", - "hasown": "^2.0.2", - "math-intrinsics": "^1.1.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/get-package-type": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz", - "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8.0.0" - } - }, - "node_modules/get-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", - "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", - "license": "MIT", - "dependencies": { - "dunder-proto": "^1.0.1", - "es-object-atoms": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/get-stream": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", - "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/get-symbol-description": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.1.0.tgz", - "integrity": "sha512-w9UMqWwJxHNOvoNzSJ2oPF5wvYcvP7jUvYzhp67yEhTi17ZDBBC1z9pTdGuzjD+EFIqLSYRweZjqfiPzQ06Ebg==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.3", - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.6" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", - "deprecated": "Glob versions prior to v9 are no longer supported", - "dev": true, - "license": "ISC", - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/glob-parent": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", - "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", - "dev": true, - "license": "ISC", - "dependencies": { - "is-glob": "^4.0.3" - }, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/glob/node_modules/brace-expansion": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", - "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/glob/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/globals": { - "version": "14.0.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-14.0.0.tgz", - "integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/globalthis": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.4.tgz", - "integrity": "sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "define-properties": "^1.2.1", - "gopd": "^1.0.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/gopd": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", - "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/graceful-fs": { - "version": "4.2.11", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", - "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/graphemer": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", - "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", - "dev": true, - "license": "MIT" - }, - "node_modules/has-bigints": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.1.0.tgz", - "integrity": "sha512-R3pbpkcIqv2Pm3dUwgjclDRVmWpTJW2DcMzcIhEXEx1oh/CEMObMm3KLmRJOdvhM7o4uQBnwr8pzRK2sJWIqfg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/has-property-descriptors": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", - "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", - "dev": true, - "license": "MIT", - "dependencies": { - "es-define-property": "^1.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-proto": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.2.0.tgz", - "integrity": "sha512-KIL7eQPfHQRC8+XluaIw7BHUwwqL19bQn4hzNgdr+1wXoU0KKj6rufu47lhY7KbJR2C6T6+PfyN0Ea7wkSS+qQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "dunder-proto": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-symbols": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", - "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-tostringtag": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", - "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", - "license": "MIT", - "dependencies": { - "has-symbols": "^1.0.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/hasown": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", - "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", - "license": "MIT", - "dependencies": { - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/html-escaper": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", - "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", - "dev": true, - "license": "MIT" - }, - "node_modules/http-errors": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", - "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "depd": "2.0.0", - "inherits": "2.0.4", - "setprototypeof": "1.2.0", - "statuses": "2.0.1", - "toidentifier": "1.0.1" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/human-signals": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", - "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": ">=10.17.0" - } - }, - "node_modules/iconv-lite": { - "version": "0.4.24", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", - "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", - "dev": true, - "license": "MIT", - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/ignore": { - "version": "7.0.5", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz", - "integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 4" - } - }, - "node_modules/ignore-by-default": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/ignore-by-default/-/ignore-by-default-1.0.1.tgz", - "integrity": "sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==", - "dev": true, - "license": "ISC" - }, - "node_modules/import-fresh": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", - "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "parent-module": "^1.0.0", - "resolve-from": "^4.0.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/import-fresh/node_modules/resolve-from": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", - "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/import-local": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.2.0.tgz", - "integrity": "sha512-2SPlun1JUPWoM6t3F0dw0FkCF/jWY8kttcY4f599GLTSjh2OCuuhdTkJQsEcZzBqbXZGKMK2OqW1oZsjtf/gQA==", - "dev": true, - "license": "MIT", - "dependencies": { - "pkg-dir": "^4.2.0", - "resolve-cwd": "^3.0.0" - }, - "bin": { - "import-local-fixture": "fixtures/cli.js" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/imurmurhash": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.8.19" - } - }, - "node_modules/inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", - "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", - "dev": true, - "license": "ISC", - "dependencies": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "node_modules/inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/internal-slot": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.1.0.tgz", - "integrity": "sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw==", - "dev": true, - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "hasown": "^2.0.2", - "side-channel": "^1.1.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/ipaddr.js": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", - "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/is-array-buffer": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.5.tgz", - "integrity": "sha512-DDfANUiiG2wC1qawP66qlTugJeL5HyzMpfr8lLK+jMQirGzNod0B12cFB/9q838Ru27sBwfw78/rdoU7RERz6A==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.8", - "call-bound": "^1.0.3", - "get-intrinsic": "^1.2.6" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-arrayish": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", - "dev": true, - "license": "MIT" - }, - "node_modules/is-async-function": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-async-function/-/is-async-function-2.1.1.tgz", - "integrity": "sha512-9dgM/cZBnNvjzaMYHVoxxfPj2QXt22Ev7SuuPrs+xav0ukGB0S6d4ydZdEiM48kLx5kDV+QBPrpVnFyefL8kkQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "async-function": "^1.0.0", - "call-bound": "^1.0.3", - "get-proto": "^1.0.1", - "has-tostringtag": "^1.0.2", - "safe-regex-test": "^1.1.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-bigint": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.1.0.tgz", - "integrity": "sha512-n4ZT37wG78iz03xPRKJrHTdZbe3IicyucEtdRsV5yglwc3GyUfbAfpSeD0FJ41NbUNSt5wbhqfp1fS+BgnvDFQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "has-bigints": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-binary-path": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", - "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", - "dev": true, - "license": "MIT", - "dependencies": { - "binary-extensions": "^2.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/is-boolean-object": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.2.2.tgz", - "integrity": "sha512-wa56o2/ElJMYqjCjGkXri7it5FbebW5usLw/nPmCMs5DeZ7eziSYZhSmPRn0txqeW4LnAmQQU7FgqLpsEFKM4A==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.3", - "has-tostringtag": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-callable": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", - "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-core-module": { - "version": "2.16.1", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", - "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", - "dev": true, - "license": "MIT", - "dependencies": { - "hasown": "^2.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-data-view": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.2.tgz", - "integrity": "sha512-RKtWF8pGmS87i2D6gqQu/l7EYRlVdfzemCJN/P3UOs//x1QE7mfhvzHIApBTRf7axvT6DMGwSwBXYCT0nfB9xw==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.2", - "get-intrinsic": "^1.2.6", - "is-typed-array": "^1.1.13" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-date-object": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.1.0.tgz", - "integrity": "sha512-PwwhEakHVKTdRNVOw+/Gyh0+MzlCl4R6qKvkhuvLtPMggI1WAHt9sOwZxQLSGpUaDnrdyDsomoRgNnCfKNSXXg==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.2", - "has-tostringtag": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-finalizationregistry": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/is-finalizationregistry/-/is-finalizationregistry-1.1.1.tgz", - "integrity": "sha512-1pC6N8qWJbWoPtEjgcL2xyhQOP491EQjeUo3qTKcmV8YSDDJrOepfG8pcC7h/QgnQHYSv0mJ3Z/ZWxmatVrysg==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/is-generator-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-generator-fn/-/is-generator-fn-2.1.0.tgz", - "integrity": "sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/is-generator-function": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.1.0.tgz", - "integrity": "sha512-nPUB5km40q9e8UfN/Zc24eLlzdSf9OfKByBw9CIdw4H1giPMeA0OIJvbchsCu4npfI2QcMVBsGEBHKZ7wLTWmQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.3", - "get-proto": "^1.0.0", - "has-tostringtag": "^1.0.2", - "safe-regex-test": "^1.1.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-extglob": "^2.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-map": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.3.tgz", - "integrity": "sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-negative-zero": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.3.tgz", - "integrity": "sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/is-number-object": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.1.1.tgz", - "integrity": "sha512-lZhclumE1G6VYD8VHe35wFaIif+CTy5SJIi5+3y4psDgWu4wPDoBhF8NxUOinEc7pHgiTsT6MaBb92rKhhD+Xw==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.3", - "has-tostringtag": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-regex": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz", - "integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.2", - "gopd": "^1.2.0", - "has-tostringtag": "^1.0.2", - "hasown": "^2.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-retry-allowed": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/is-retry-allowed/-/is-retry-allowed-2.2.0.tgz", - "integrity": "sha512-XVm7LOeLpTW4jV19QSH38vkswxoLud8sQ57YwJVTPWdiaI9I8keEhGFpBlslyVsgdQy4Opg8QOLb8YRgsyZiQg==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-set": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.3.tgz", - "integrity": "sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-shared-array-buffer": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.4.tgz", - "integrity": "sha512-ISWac8drv4ZGfwKl5slpHG9OwPNty4jOWPRIhBpxOoD+hqITiwuipOQ2bNthAzwA3B4fIjO4Nln74N0S9byq8A==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-stream": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", - "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-string": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.1.1.tgz", - "integrity": "sha512-BtEeSsoaQjlSPBemMQIrY1MY0uM6vnS1g5fmufYOtnxLGUZM2178PKbhsk7Ffv58IX+ZtcvoGwccYsh0PglkAA==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.3", - "has-tostringtag": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-symbol": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.1.1.tgz", - "integrity": "sha512-9gGx6GTtCQM73BgmHQXfDmLtfjjTUDSyoxTCbp5WtoixAhfgsDirWIcVQ/IHpvI5Vgd5i/J5F7B9cN/WlVbC/w==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.2", - "has-symbols": "^1.1.0", - "safe-regex-test": "^1.1.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-typed-array": { - "version": "1.1.15", - "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.15.tgz", - "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "which-typed-array": "^1.1.16" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-weakmap": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.2.tgz", - "integrity": "sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-weakref": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.1.1.tgz", - "integrity": "sha512-6i9mGWSlqzNMEqpCp93KwRS1uUOodk2OJ6b+sq7ZPDSy2WuI5NFIxp/254TytR8ftefexkWn5xNiHUNpPOfSew==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-weakset": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.4.tgz", - "integrity": "sha512-mfcwb6IzQyOKTs84CQMrOwW4gQcaTOAWJ0zzJCl2WSPDrWk/OzDaImWFH3djXhb24g4eudZfLRozAvPGw4d9hQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.3", - "get-intrinsic": "^1.2.6" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/isarray": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", - "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", - "dev": true, - "license": "MIT" - }, - "node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", - "dev": true, - "license": "ISC" - }, - "node_modules/istanbul-lib-coverage": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz", - "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==", - "dev": true, - "license": "BSD-3-Clause", - "engines": { - "node": ">=8" - } - }, - "node_modules/istanbul-lib-instrument": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz", - "integrity": "sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "@babel/core": "^7.12.3", - "@babel/parser": "^7.14.7", - "@istanbuljs/schema": "^0.1.2", - "istanbul-lib-coverage": "^3.2.0", - "semver": "^6.3.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/istanbul-lib-report": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz", - "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "istanbul-lib-coverage": "^3.0.0", - "make-dir": "^4.0.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/istanbul-lib-source-maps": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz", - "integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "debug": "^4.1.1", - "istanbul-lib-coverage": "^3.0.0", - "source-map": "^0.6.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/istanbul-reports": { - "version": "3.1.7", - "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.7.tgz", - "integrity": "sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "html-escaper": "^2.0.0", - "istanbul-lib-report": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/jake": { - "version": "10.9.2", - "resolved": "https://registry.npmjs.org/jake/-/jake-10.9.2.tgz", - "integrity": "sha512-2P4SQ0HrLQ+fw6llpLnOaGAvN2Zu6778SJMrCUwns4fOoG9ayrTiZk3VV8sCPkVZF8ab0zksVpS8FDY5pRCNBA==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "async": "^3.2.3", - "chalk": "^4.0.2", - "filelist": "^1.0.4", - "minimatch": "^3.1.2" - }, - "bin": { - "jake": "bin/cli.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/jake/node_modules/brace-expansion": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", - "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/jake/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/jest": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest/-/jest-29.7.0.tgz", - "integrity": "sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/core": "^29.7.0", - "@jest/types": "^29.6.3", - "import-local": "^3.0.2", - "jest-cli": "^29.7.0" - }, - "bin": { - "jest": "bin/jest.js" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - }, - "peerDependencies": { - "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" - }, - "peerDependenciesMeta": { - "node-notifier": { - "optional": true - } - } - }, - "node_modules/jest-changed-files": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-29.7.0.tgz", - "integrity": "sha512-fEArFiwf1BpQ+4bXSprcDc3/x4HSzL4al2tozwVpDFpsxALjLYdyiIK4e5Vz66GQJIbXJ82+35PtysofptNX2w==", - "dev": true, - "license": "MIT", - "dependencies": { - "execa": "^5.0.0", - "jest-util": "^29.7.0", - "p-limit": "^3.1.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-circus": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-circus/-/jest-circus-29.7.0.tgz", - "integrity": "sha512-3E1nCMgipcTkCocFwM90XXQab9bS+GMsjdpmPrlelaxwD93Ad8iVEjX/vvHPdLPnFf+L40u+5+iutRdA1N9myw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/environment": "^29.7.0", - "@jest/expect": "^29.7.0", - "@jest/test-result": "^29.7.0", - "@jest/types": "^29.6.3", - "@types/node": "*", - "chalk": "^4.0.0", - "co": "^4.6.0", - "dedent": "^1.0.0", - "is-generator-fn": "^2.0.0", - "jest-each": "^29.7.0", - "jest-matcher-utils": "^29.7.0", - "jest-message-util": "^29.7.0", - "jest-runtime": "^29.7.0", - "jest-snapshot": "^29.7.0", - "jest-util": "^29.7.0", - "p-limit": "^3.1.0", - "pretty-format": "^29.7.0", - "pure-rand": "^6.0.0", - "slash": "^3.0.0", - "stack-utils": "^2.0.3" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-cli": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-29.7.0.tgz", - "integrity": "sha512-OVVobw2IubN/GSYsxETi+gOe7Ka59EFMR/twOU3Jb2GnKKeMGJB5SGUUrEz3SFVmJASUdZUzy83sLNNQ2gZslg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/core": "^29.7.0", - "@jest/test-result": "^29.7.0", - "@jest/types": "^29.6.3", - "chalk": "^4.0.0", - "create-jest": "^29.7.0", - "exit": "^0.1.2", - "import-local": "^3.0.2", - "jest-config": "^29.7.0", - "jest-util": "^29.7.0", - "jest-validate": "^29.7.0", - "yargs": "^17.3.1" - }, - "bin": { - "jest": "bin/jest.js" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - }, - "peerDependencies": { - "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" - }, - "peerDependenciesMeta": { - "node-notifier": { - "optional": true - } - } - }, - "node_modules/jest-config": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-29.7.0.tgz", - "integrity": "sha512-uXbpfeQ7R6TZBqI3/TxCU4q4ttk3u0PJeC+E0zbfSoSjq6bJ7buBPxzQPL0ifrkY4DNu4JUdk0ImlBUYi840eQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/core": "^7.11.6", - "@jest/test-sequencer": "^29.7.0", - "@jest/types": "^29.6.3", - "babel-jest": "^29.7.0", - "chalk": "^4.0.0", - "ci-info": "^3.2.0", - "deepmerge": "^4.2.2", - "glob": "^7.1.3", - "graceful-fs": "^4.2.9", - "jest-circus": "^29.7.0", - "jest-environment-node": "^29.7.0", - "jest-get-type": "^29.6.3", - "jest-regex-util": "^29.6.3", - "jest-resolve": "^29.7.0", - "jest-runner": "^29.7.0", - "jest-util": "^29.7.0", - "jest-validate": "^29.7.0", - "micromatch": "^4.0.4", - "parse-json": "^5.2.0", - "pretty-format": "^29.7.0", - "slash": "^3.0.0", - "strip-json-comments": "^3.1.1" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - }, - "peerDependencies": { - "@types/node": "*", - "ts-node": ">=9.0.0" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - }, - "ts-node": { - "optional": true - } - } - }, - "node_modules/jest-diff": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-29.7.0.tgz", - "integrity": "sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw==", - "dev": true, - "license": "MIT", - "dependencies": { - "chalk": "^4.0.0", - "diff-sequences": "^29.6.3", - "jest-get-type": "^29.6.3", - "pretty-format": "^29.7.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-docblock": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-docblock/-/jest-docblock-29.7.0.tgz", - "integrity": "sha512-q617Auw3A612guyaFgsbFeYpNP5t2aoUNLwBUbc/0kD1R4t9ixDbyFTHd1nok4epoVFpr7PmeWHrhvuV3XaJ4g==", - "dev": true, - "license": "MIT", - "dependencies": { - "detect-newline": "^3.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-each": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-each/-/jest-each-29.7.0.tgz", - "integrity": "sha512-gns+Er14+ZrEoC5fhOfYCY1LOHHr0TI+rQUHZS8Ttw2l7gl+80eHc/gFf2Ktkw0+SIACDTeWvpFcv3B04VembQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/types": "^29.6.3", - "chalk": "^4.0.0", - "jest-get-type": "^29.6.3", - "jest-util": "^29.7.0", - "pretty-format": "^29.7.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-environment-node": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-29.7.0.tgz", - "integrity": "sha512-DOSwCRqXirTOyheM+4d5YZOrWcdu0LNZ87ewUoywbcb2XR4wKgqiG8vNeYwhjFMbEkfju7wx2GYH0P2gevGvFw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/environment": "^29.7.0", - "@jest/fake-timers": "^29.7.0", - "@jest/types": "^29.6.3", - "@types/node": "*", - "jest-mock": "^29.7.0", - "jest-util": "^29.7.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-get-type": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-29.6.3.tgz", - "integrity": "sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-haste-map": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-29.7.0.tgz", - "integrity": "sha512-fP8u2pyfqx0K1rGn1R9pyE0/KTn+G7PxktWidOBTqFPLYX0b9ksaMFkhK5vrS3DVun09pckLdlx90QthlW7AmA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/types": "^29.6.3", - "@types/graceful-fs": "^4.1.3", - "@types/node": "*", - "anymatch": "^3.0.3", - "fb-watchman": "^2.0.0", - "graceful-fs": "^4.2.9", - "jest-regex-util": "^29.6.3", - "jest-util": "^29.7.0", - "jest-worker": "^29.7.0", - "micromatch": "^4.0.4", - "walker": "^1.0.8" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - }, - "optionalDependencies": { - "fsevents": "^2.3.2" - } - }, - "node_modules/jest-leak-detector": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-29.7.0.tgz", - "integrity": "sha512-kYA8IJcSYtST2BY9I+SMC32nDpBT3J2NvWJx8+JCuCdl/CR1I4EKUJROiP8XtCcxqgTTBGJNdbB1A8XRKbTetw==", - "dev": true, - "license": "MIT", - "dependencies": { - "jest-get-type": "^29.6.3", - "pretty-format": "^29.7.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-matcher-utils": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-29.7.0.tgz", - "integrity": "sha512-sBkD+Xi9DtcChsI3L3u0+N0opgPYnCRPtGcQYrgXmR+hmt/fYfWAL0xRXYU8eWOdfuLgBe0YCW3AFtnRLagq/g==", - "dev": true, - "license": "MIT", - "dependencies": { - "chalk": "^4.0.0", - "jest-diff": "^29.7.0", - "jest-get-type": "^29.6.3", - "pretty-format": "^29.7.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-message-util": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-29.7.0.tgz", - "integrity": "sha512-GBEV4GRADeP+qtB2+6u61stea8mGcOT4mCtrYISZwfu9/ISHFJ/5zOMXYbpBE9RsS5+Gb63DW4FgmnKJ79Kf6w==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.12.13", - "@jest/types": "^29.6.3", - "@types/stack-utils": "^2.0.0", - "chalk": "^4.0.0", - "graceful-fs": "^4.2.9", - "micromatch": "^4.0.4", - "pretty-format": "^29.7.0", - "slash": "^3.0.0", - "stack-utils": "^2.0.3" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-mock": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-29.7.0.tgz", - "integrity": "sha512-ITOMZn+UkYS4ZFh83xYAOzWStloNzJFO2s8DWrE4lhtGD+AorgnbkiKERe4wQVBydIGPx059g6riW5Btp6Llnw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/types": "^29.6.3", - "@types/node": "*", - "jest-util": "^29.7.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-pnp-resolver": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/jest-pnp-resolver/-/jest-pnp-resolver-1.2.3.tgz", - "integrity": "sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - }, - "peerDependencies": { - "jest-resolve": "*" - }, - "peerDependenciesMeta": { - "jest-resolve": { - "optional": true - } - } - }, - "node_modules/jest-regex-util": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-29.6.3.tgz", - "integrity": "sha512-KJJBsRCyyLNWCNBOvZyRDnAIfUiRJ8v+hOBQYGn8gDyF3UegwiP4gwRR3/SDa42g1YbVycTidUF3rKjyLFDWbg==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-resolve": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-29.7.0.tgz", - "integrity": "sha512-IOVhZSrg+UvVAshDSDtHyFCCBUl/Q3AAJv8iZ6ZjnZ74xzvwuzLXid9IIIPgTnY62SJjfuupMKZsZQRsCvxEgA==", - "dev": true, - "license": "MIT", - "dependencies": { - "chalk": "^4.0.0", - "graceful-fs": "^4.2.9", - "jest-haste-map": "^29.7.0", - "jest-pnp-resolver": "^1.2.2", - "jest-util": "^29.7.0", - "jest-validate": "^29.7.0", - "resolve": "^1.20.0", - "resolve.exports": "^2.0.0", - "slash": "^3.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-resolve-dependencies": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-29.7.0.tgz", - "integrity": "sha512-un0zD/6qxJ+S0et7WxeI3H5XSe9lTBBR7bOHCHXkKR6luG5mwDDlIzVQ0V5cZCuoTgEdcdwzTghYkTWfubi+nA==", - "dev": true, - "license": "MIT", - "dependencies": { - "jest-regex-util": "^29.6.3", - "jest-snapshot": "^29.7.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-runner": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-29.7.0.tgz", - "integrity": "sha512-fsc4N6cPCAahybGBfTRcq5wFR6fpLznMg47sY5aDpsoejOcVYFb07AHuSnR0liMcPTgBsA3ZJL6kFOjPdoNipQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/console": "^29.7.0", - "@jest/environment": "^29.7.0", - "@jest/test-result": "^29.7.0", - "@jest/transform": "^29.7.0", - "@jest/types": "^29.6.3", - "@types/node": "*", - "chalk": "^4.0.0", - "emittery": "^0.13.1", - "graceful-fs": "^4.2.9", - "jest-docblock": "^29.7.0", - "jest-environment-node": "^29.7.0", - "jest-haste-map": "^29.7.0", - "jest-leak-detector": "^29.7.0", - "jest-message-util": "^29.7.0", - "jest-resolve": "^29.7.0", - "jest-runtime": "^29.7.0", - "jest-util": "^29.7.0", - "jest-watcher": "^29.7.0", - "jest-worker": "^29.7.0", - "p-limit": "^3.1.0", - "source-map-support": "0.5.13" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-runtime": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-29.7.0.tgz", - "integrity": "sha512-gUnLjgwdGqW7B4LvOIkbKs9WGbn+QLqRQQ9juC6HndeDiezIwhDP+mhMwHWCEcfQ5RUXa6OPnFF8BJh5xegwwQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/environment": "^29.7.0", - "@jest/fake-timers": "^29.7.0", - "@jest/globals": "^29.7.0", - "@jest/source-map": "^29.6.3", - "@jest/test-result": "^29.7.0", - "@jest/transform": "^29.7.0", - "@jest/types": "^29.6.3", - "@types/node": "*", - "chalk": "^4.0.0", - "cjs-module-lexer": "^1.0.0", - "collect-v8-coverage": "^1.0.0", - "glob": "^7.1.3", - "graceful-fs": "^4.2.9", - "jest-haste-map": "^29.7.0", - "jest-message-util": "^29.7.0", - "jest-mock": "^29.7.0", - "jest-regex-util": "^29.6.3", - "jest-resolve": "^29.7.0", - "jest-snapshot": "^29.7.0", - "jest-util": "^29.7.0", - "slash": "^3.0.0", - "strip-bom": "^4.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-snapshot": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-29.7.0.tgz", - "integrity": "sha512-Rm0BMWtxBcioHr1/OX5YCP8Uov4riHvKPknOGs804Zg9JGZgmIBkbtlxJC/7Z4msKYVbIJtfU+tKb8xlYNfdkw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/core": "^7.11.6", - "@babel/generator": "^7.7.2", - "@babel/plugin-syntax-jsx": "^7.7.2", - "@babel/plugin-syntax-typescript": "^7.7.2", - "@babel/types": "^7.3.3", - "@jest/expect-utils": "^29.7.0", - "@jest/transform": "^29.7.0", - "@jest/types": "^29.6.3", - "babel-preset-current-node-syntax": "^1.0.0", - "chalk": "^4.0.0", - "expect": "^29.7.0", - "graceful-fs": "^4.2.9", - "jest-diff": "^29.7.0", - "jest-get-type": "^29.6.3", - "jest-matcher-utils": "^29.7.0", - "jest-message-util": "^29.7.0", - "jest-util": "^29.7.0", - "natural-compare": "^1.4.0", - "pretty-format": "^29.7.0", - "semver": "^7.5.3" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-snapshot/node_modules/semver": { - "version": "7.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", - "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/jest-util": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-29.7.0.tgz", - "integrity": "sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/types": "^29.6.3", - "@types/node": "*", - "chalk": "^4.0.0", - "ci-info": "^3.2.0", - "graceful-fs": "^4.2.9", - "picomatch": "^2.2.3" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-validate": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-29.7.0.tgz", - "integrity": "sha512-ZB7wHqaRGVw/9hST/OuFUReG7M8vKeq0/J2egIGLdvjHCmYqGARhzXmtgi+gVeZ5uXFF219aOc3Ls2yLg27tkw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/types": "^29.6.3", - "camelcase": "^6.2.0", - "chalk": "^4.0.0", - "jest-get-type": "^29.6.3", - "leven": "^3.1.0", - "pretty-format": "^29.7.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-validate/node_modules/camelcase": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", - "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/jest-watcher": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-watcher/-/jest-watcher-29.7.0.tgz", - "integrity": "sha512-49Fg7WXkU3Vl2h6LbLtMQ/HyB6rXSIX7SqvBLQmssRBGN9I0PNvPmAmCWSOY6SOvrjhI/F7/bGAv9RtnsPA03g==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/test-result": "^29.7.0", - "@jest/types": "^29.6.3", - "@types/node": "*", - "ansi-escapes": "^4.2.1", - "chalk": "^4.0.0", - "emittery": "^0.13.1", - "jest-util": "^29.7.0", - "string-length": "^4.0.1" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-worker": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-29.7.0.tgz", - "integrity": "sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/node": "*", - "jest-util": "^29.7.0", - "merge-stream": "^2.0.0", - "supports-color": "^8.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-worker/node_modules/supports-color": { - "version": "8.1.1", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", - "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "has-flag": "^4.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/supports-color?sponsor=1" - } - }, - "node_modules/js-jsonl": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/js-jsonl/-/js-jsonl-1.1.1.tgz", - "integrity": "sha512-VkkV3ac6N6tRaK32NIaXStzs9l3py/XK5pCbTEyiUt5Ch5We3H8ZcrSQndQ4TyIisfKMIjvoiTNWsb7mhQcZZw==", - "license": "MIT", - "dependencies": { - "@sindresorhus/is": "^4.6.0" - } - }, - "node_modules/js-tokens": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/js-yaml": { - "version": "3.14.1", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", - "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", - "dev": true, - "license": "MIT", - "dependencies": { - "argparse": "^1.0.7", - "esprima": "^4.0.0" - }, - "bin": { - "js-yaml": "bin/js-yaml.js" - } - }, - "node_modules/jsesc": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", - "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", - "dev": true, - "license": "MIT", - "bin": { - "jsesc": "bin/jsesc" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/json-buffer": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", - "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/json-parse-even-better-errors": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", - "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", - "dev": true, - "license": "MIT" - }, - "node_modules/json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true, - "license": "MIT" - }, - "node_modules/json-stable-stringify-without-jsonify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", - "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", - "dev": true, - "license": "MIT" - }, - "node_modules/json5": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", - "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", - "dev": true, - "license": "MIT", - "bin": { - "json5": "lib/cli.js" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/keyv": { - "version": "4.5.4", - "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", - "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", - "dev": true, - "license": "MIT", - "dependencies": { - "json-buffer": "3.0.1" - } - }, - "node_modules/kleur": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", - "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/lambda-log": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/lambda-log/-/lambda-log-3.1.0.tgz", - "integrity": "sha512-dfkFw+e0CoFftuOjxkufhi1srIpa4dM3sbbe+0KtfjX11auNl7Kok+5/+07IffjXUuspKAFeiHlHUPAHJzcL3A==", - "license": "MIT", - "dependencies": { - "fast-safe-stringify": "^2.1.1" - }, - "engines": { - "node": ">=10.0.0" - } - }, - "node_modules/leven": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", - "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/levn": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", - "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "prelude-ls": "^1.2.1", - "type-check": "~0.4.0" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/lines-and-columns": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", - "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", - "dev": true, - "license": "MIT" - }, - "node_modules/locate-path": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", - "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", - "dev": true, - "license": "MIT", - "dependencies": { - "p-locate": "^4.1.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/lodash.debounce": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz", - "integrity": "sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==", - "dev": true, - "license": "MIT" - }, - "node_modules/lodash.memoize": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", - "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==", - "dev": true, - "license": "MIT" - }, - "node_modules/lodash.merge": { - "version": "4.6.2", - "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", - "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/long": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/long/-/long-5.3.2.tgz", - "integrity": "sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==", - "license": "Apache-2.0" - }, - "node_modules/lru-cache": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", - "dev": true, - "license": "ISC", - "dependencies": { - "yallist": "^3.0.2" - } - }, - "node_modules/make-dir": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", - "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==", - "dev": true, - "license": "MIT", - "dependencies": { - "semver": "^7.5.3" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/make-dir/node_modules/semver": { - "version": "7.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", - "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/make-error": { - "version": "1.3.6", - "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", - "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", - "license": "ISC" - }, - "node_modules/makeerror": { - "version": "1.0.12", - "resolved": "https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz", - "integrity": "sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "tmpl": "1.0.5" - } - }, - "node_modules/math-intrinsics": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", - "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/media-typer": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", - "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/merge-descriptors": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", - "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==", - "dev": true, - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/merge-stream": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", - "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", - "dev": true, - "license": "MIT" - }, - "node_modules/merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 8" - } - }, - "node_modules/methods": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", - "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/micromatch": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", - "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", - "dev": true, - "license": "MIT", - "dependencies": { - "braces": "^3.0.3", - "picomatch": "^2.3.1" - }, - "engines": { - "node": ">=8.6" - } - }, - "node_modules/mime": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", - "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", - "dev": true, - "license": "MIT", - "bin": { - "mime": "cli.js" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "license": "MIT", - "dependencies": { - "mime-db": "1.52.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/mimic-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/minimatch": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", - "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/minimist": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", - "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", - "dev": true, - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/minipass": { - "version": "4.2.8", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-4.2.8.tgz", - "integrity": "sha512-fNzuVyifolSLFL4NzpF+wEF4qrgqaaKX0haXPQEdQ7NKAN+WecoKMHV09YcuL/DHxrUsYQOK3MiuDf7Ip2OXfQ==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=8" - } - }, - "node_modules/ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", - "dev": true, - "license": "MIT" - }, - "node_modules/natural-compare": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", - "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", - "dev": true, - "license": "MIT" - }, - "node_modules/negotiator": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", - "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/node-int64": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", - "integrity": "sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==", - "dev": true, - "license": "MIT" - }, - "node_modules/node-releases": { - "version": "2.0.19", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.19.tgz", - "integrity": "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==", - "dev": true, - "license": "MIT" - }, - "node_modules/nodemon": { - "version": "3.1.10", - "resolved": "https://registry.npmjs.org/nodemon/-/nodemon-3.1.10.tgz", - "integrity": "sha512-WDjw3pJ0/0jMFmyNDp3gvY2YizjLmmOUQo6DEBY+JgdvW/yQ9mEeSw6H5ythl5Ny2ytb7f9C2nIbjSxMNzbJXw==", - "dev": true, - "license": "MIT", - "dependencies": { - "chokidar": "^3.5.2", - "debug": "^4", - "ignore-by-default": "^1.0.1", - "minimatch": "^3.1.2", - "pstree.remy": "^1.1.8", - "semver": "^7.5.3", - "simple-update-notifier": "^2.0.0", - "supports-color": "^5.5.0", - "touch": "^3.1.0", - "undefsafe": "^2.0.5" - }, - "bin": { - "nodemon": "bin/nodemon.js" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/nodemon" - } - }, - "node_modules/nodemon/node_modules/brace-expansion": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", - "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/nodemon/node_modules/has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/nodemon/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/nodemon/node_modules/semver": { - "version": "7.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", - "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/nodemon/node_modules/supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dev": true, - "license": "MIT", - "dependencies": { - "has-flag": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/normalize-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", - "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/npm-run-path": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", - "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", - "dev": true, - "license": "MIT", - "dependencies": { - "path-key": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/object-inspect": { - "version": "1.13.4", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", - "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/object-keys": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", - "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/object.assign": { - "version": "4.1.7", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.7.tgz", - "integrity": "sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.8", - "call-bound": "^1.0.3", - "define-properties": "^1.2.1", - "es-object-atoms": "^1.0.0", - "has-symbols": "^1.1.0", - "object-keys": "^1.1.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/object.fromentries": { - "version": "2.0.8", - "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.8.tgz", - "integrity": "sha512-k6E21FzySsSK5a21KRADBd/NGneRegFO5pLHfdQLpRDETUNJueLXs3WCzyQ3tFRDYgbq3KHGXfTbi2bs8WQ6rQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.7", - "define-properties": "^1.2.1", - "es-abstract": "^1.23.2", - "es-object-atoms": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/object.groupby": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.3.tgz", - "integrity": "sha512-+Lhy3TQTuzXI5hevh8sBGqbmurHbbIjAi0Z4S63nthVLmLxfbj4T54a4CfZrXIrt9iP4mVAPYMo/v99taj3wjQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.7", - "define-properties": "^1.2.1", - "es-abstract": "^1.23.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/object.values": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.2.1.tgz", - "integrity": "sha512-gXah6aZrcUxjWg2zR2MwouP2eHlCBzdV4pygudehaKXSGW4v2AsRQUK+lwwXhii6KFZcunEnmSUoYp5CXibxtA==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.8", - "call-bound": "^1.0.3", - "define-properties": "^1.2.1", - "es-object-atoms": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/on-finished": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", - "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", - "dev": true, - "license": "MIT", - "dependencies": { - "ee-first": "1.1.1" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "dev": true, - "license": "ISC", - "dependencies": { - "wrappy": "1" - } - }, - "node_modules/onetime": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", - "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", - "dev": true, - "license": "MIT", - "dependencies": { - "mimic-fn": "^2.1.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/optionator": { - "version": "0.9.4", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", - "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", - "dev": true, - "license": "MIT", - "dependencies": { - "deep-is": "^0.1.3", - "fast-levenshtein": "^2.0.6", - "levn": "^0.4.1", - "prelude-ls": "^1.2.1", - "type-check": "^0.4.0", - "word-wrap": "^1.2.5" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/own-keys": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/own-keys/-/own-keys-1.0.1.tgz", - "integrity": "sha512-qFOyK5PjiWZd+QQIh+1jhdb9LpxTF0qs7Pm8o5QHYZ0M3vKqSqzsZaEB6oWlxZ+q2sJBMI/Ktgd2N5ZwQoRHfg==", - "dev": true, - "license": "MIT", - "dependencies": { - "get-intrinsic": "^1.2.6", - "object-keys": "^1.1.1", - "safe-push-apply": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/p-limit": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", - "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "yocto-queue": "^0.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-locate": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", - "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", - "dev": true, - "license": "MIT", - "dependencies": { - "p-limit": "^2.2.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/p-locate/node_modules/p-limit": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", - "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", - "dev": true, - "license": "MIT", - "dependencies": { - "p-try": "^2.0.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-try": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", - "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/parent-module": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", - "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", - "dev": true, - "license": "MIT", - "dependencies": { - "callsites": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/parse-json": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", - "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.0.0", - "error-ex": "^1.3.1", - "json-parse-even-better-errors": "^2.3.0", - "lines-and-columns": "^1.1.6" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/parseurl": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", - "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/path-exists": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", - "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/path-parse": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", - "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", - "dev": true, - "license": "MIT" - }, - "node_modules/path-scurry": { - "version": "1.11.1", - "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", - "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", - "dev": true, - "license": "BlueOak-1.0.0", - "dependencies": { - "lru-cache": "^10.2.0", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" - }, - "engines": { - "node": ">=16 || 14 >=14.18" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/path-scurry/node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/path-scurry/node_modules/minipass": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", - "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, - "node_modules/path-to-regexp": { - "version": "0.1.12", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz", - "integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/picocolors": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", - "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", - "dev": true, - "license": "ISC" - }, - "node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/pirates": { - "version": "4.0.7", - "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz", - "integrity": "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 6" - } - }, - "node_modules/pkg-dir": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", - "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "find-up": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/possible-typed-array-names": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.1.0.tgz", - "integrity": "sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/prelude-ls": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", - "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/prettier": { - "version": "2.8.8", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.8.tgz", - "integrity": "sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q==", - "dev": true, - "license": "MIT", - "bin": { - "prettier": "bin-prettier.js" - }, - "engines": { - "node": ">=10.13.0" - }, - "funding": { - "url": "https://github.com/prettier/prettier?sponsor=1" - } - }, - "node_modules/prettier-linter-helpers": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz", - "integrity": "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==", - "dev": true, - "license": "MIT", - "dependencies": { - "fast-diff": "^1.1.2" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/prettier-plugin-organize-imports": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/prettier-plugin-organize-imports/-/prettier-plugin-organize-imports-3.2.4.tgz", - "integrity": "sha512-6m8WBhIp0dfwu0SkgfOxJqh+HpdyfqSSLfKKRZSFbDuEQXDDndb8fTpRWkUrX/uBenkex3MgnVk0J3b3Y5byog==", - "dev": true, - "license": "MIT", - "peerDependencies": { - "@volar/vue-language-plugin-pug": "^1.0.4", - "@volar/vue-typescript": "^1.0.4", - "prettier": ">=2.0", - "typescript": ">=2.9" - }, - "peerDependenciesMeta": { - "@volar/vue-language-plugin-pug": { - "optional": true - }, - "@volar/vue-typescript": { - "optional": true - } - } - }, - "node_modules/pretty-format": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.7.0.tgz", - "integrity": "sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jest/schemas": "^29.6.3", - "ansi-styles": "^5.0.0", - "react-is": "^18.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/pretty-format/node_modules/ansi-styles": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", - "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/prompts": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", - "integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "kleur": "^3.0.3", - "sisteransi": "^1.0.5" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/protobufjs": { - "version": "7.5.3", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.5.3.tgz", - "integrity": "sha512-sildjKwVqOI2kmFDiXQ6aEB0fjYTafpEvIBs8tOR8qI4spuL9OPROLVu2qZqi/xgCfsHIwVqlaF8JBjWFHnKbw==", - "hasInstallScript": true, - "license": "BSD-3-Clause", - "dependencies": { - "@protobufjs/aspromise": "^1.1.2", - "@protobufjs/base64": "^1.1.2", - "@protobufjs/codegen": "^2.0.4", - "@protobufjs/eventemitter": "^1.1.0", - "@protobufjs/fetch": "^1.1.0", - "@protobufjs/float": "^1.0.2", - "@protobufjs/inquire": "^1.1.0", - "@protobufjs/path": "^1.1.2", - "@protobufjs/pool": "^1.1.0", - "@protobufjs/utf8": "^1.1.0", - "@types/node": ">=13.7.0", - "long": "^5.0.0" - }, - "engines": { - "node": ">=12.0.0" - } - }, - "node_modules/proxy-addr": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", - "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", - "dev": true, - "license": "MIT", - "dependencies": { - "forwarded": "0.2.0", - "ipaddr.js": "1.9.1" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/proxy-from-env": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", - "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", - "license": "MIT" - }, - "node_modules/pstree.remy": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/pstree.remy/-/pstree.remy-1.1.8.tgz", - "integrity": "sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==", - "dev": true, - "license": "MIT" - }, - "node_modules/punycode": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", - "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/pure-rand": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-6.1.0.tgz", - "integrity": "sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA==", - "dev": true, - "funding": [ - { - "type": "individual", - "url": "https://github.com/sponsors/dubzzz" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/fast-check" - } - ], - "license": "MIT" - }, - "node_modules/qs": { - "version": "6.13.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", - "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "side-channel": "^1.0.6" - }, - "engines": { - "node": ">=0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/queue-microtask": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", - "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, - "node_modules/range-parser": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", - "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/raw-body": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", - "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", - "dev": true, - "license": "MIT", - "dependencies": { - "bytes": "3.1.2", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "unpipe": "1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/react-is": { - "version": "18.3.1", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz", - "integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==", - "dev": true, - "license": "MIT" - }, - "node_modules/readdirp": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", - "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", - "dev": true, - "license": "MIT", - "dependencies": { - "picomatch": "^2.2.1" - }, - "engines": { - "node": ">=8.10.0" - } - }, - "node_modules/reflect.getprototypeof": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.10.tgz", - "integrity": "sha512-00o4I+DVrefhv+nX0ulyi3biSHCPDe+yLv5o/p6d/UVlirijB8E16FtfwSAi4g3tcqrQ4lRAqQSoFEZJehYEcw==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.8", - "define-properties": "^1.2.1", - "es-abstract": "^1.23.9", - "es-errors": "^1.3.0", - "es-object-atoms": "^1.0.0", - "get-intrinsic": "^1.2.7", - "get-proto": "^1.0.1", - "which-builtin-type": "^1.2.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/regenerate": { - "version": "1.4.2", - "resolved": "https://registry.npmjs.org/regenerate/-/regenerate-1.4.2.tgz", - "integrity": "sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==", - "dev": true, - "license": "MIT" - }, - "node_modules/regenerate-unicode-properties": { - "version": "10.2.0", - "resolved": "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-10.2.0.tgz", - "integrity": "sha512-DqHn3DwbmmPVzeKj9woBadqmXxLvQoQIwu7nopMc72ztvxVmVk2SBhSnx67zuye5TP+lJsb/TBQsjLKhnDf3MA==", - "dev": true, - "license": "MIT", - "dependencies": { - "regenerate": "^1.4.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/regexp.prototype.flags": { - "version": "1.5.4", - "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.4.tgz", - "integrity": "sha512-dYqgNSZbDwkaJ2ceRd9ojCGjBq+mOm9LmtXnAnEGyHhN/5R7iDW2TRw3h+o/jCFxus3P2LfWIIiwowAjANm7IA==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.8", - "define-properties": "^1.2.1", - "es-errors": "^1.3.0", - "get-proto": "^1.0.1", - "gopd": "^1.2.0", - "set-function-name": "^2.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/regexpu-core": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-6.2.0.tgz", - "integrity": "sha512-H66BPQMrv+V16t8xtmq+UC0CBpiTBA60V8ibS1QVReIp8T1z8hwFxqcGzm9K6lgsN7sB5edVH8a+ze6Fqm4weA==", - "dev": true, - "license": "MIT", - "dependencies": { - "regenerate": "^1.4.2", - "regenerate-unicode-properties": "^10.2.0", - "regjsgen": "^0.8.0", - "regjsparser": "^0.12.0", - "unicode-match-property-ecmascript": "^2.0.0", - "unicode-match-property-value-ecmascript": "^2.1.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/regjsgen": { - "version": "0.8.0", - "resolved": "https://registry.npmjs.org/regjsgen/-/regjsgen-0.8.0.tgz", - "integrity": "sha512-RvwtGe3d7LvWiDQXeQw8p5asZUmfU1G/l6WbUXeHta7Y2PEIvBTwH6E2EfmYUK8pxcxEdEmaomqyp0vZZ7C+3Q==", - "dev": true, - "license": "MIT" - }, - "node_modules/regjsparser": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.12.0.tgz", - "integrity": "sha512-cnE+y8bz4NhMjISKbgeVJtqNbtf5QpjZP+Bslo+UqkIt9QPnX9q095eiRRASJG1/tz6dlNr6Z5NsBiWYokp6EQ==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "jsesc": "~3.0.2" - }, - "bin": { - "regjsparser": "bin/parser" - } - }, - "node_modules/regjsparser/node_modules/jsesc": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.0.2.tgz", - "integrity": "sha512-xKqzzWXDttJuOcawBt4KnKHHIf5oQ/Cxax+0PWFG+DFDgHNAdi+TXECADI+RYiFUMmx8792xsMbbgXj4CwnP4g==", - "dev": true, - "license": "MIT", - "bin": { - "jsesc": "bin/jsesc" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/require-directory": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", - "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/requireindex": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/requireindex/-/requireindex-1.2.0.tgz", - "integrity": "sha512-L9jEkOi3ASd9PYit2cwRfyppc9NoABujTP8/5gFcbERmo5jUoAKovIC3fsF17pkTnGsrByysqX+Kxd2OTNI1ww==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.5" - } - }, - "node_modules/resolve": { - "version": "1.22.10", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", - "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-core-module": "^2.16.0", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - }, - "bin": { - "resolve": "bin/resolve" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/resolve-cwd": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz", - "integrity": "sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==", - "dev": true, - "license": "MIT", - "dependencies": { - "resolve-from": "^5.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/resolve-from": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", - "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/resolve.exports": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/resolve.exports/-/resolve.exports-2.0.3.tgz", - "integrity": "sha512-OcXjMsGdhL4XnbShKpAcSqPMzQoYkYyhbEaeSko47MjRP9NfEQMhZkXL1DoFlt9LWQn4YttrdnV6X2OiyzBi+A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - } - }, - "node_modules/reusify": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", - "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", - "dev": true, - "license": "MIT", - "engines": { - "iojs": ">=1.0.0", - "node": ">=0.10.0" - } - }, - "node_modules/rimraf": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-4.4.1.tgz", - "integrity": "sha512-Gk8NlF062+T9CqNGn6h4tls3k6T1+/nXdOcSZVikNVtlRdYpA7wRJJMoXmuvOnLW844rPjdQ7JgXCYM6PPC/og==", - "dev": true, - "license": "ISC", - "dependencies": { - "glob": "^9.2.0" - }, - "bin": { - "rimraf": "dist/cjs/src/bin.js" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/rimraf/node_modules/glob": { - "version": "9.3.5", - "resolved": "https://registry.npmjs.org/glob/-/glob-9.3.5.tgz", - "integrity": "sha512-e1LleDykUz2Iu+MTYdkSsuWX8lvAjAcs0Xef0lNIu0S2wOAzuTxCJtcd9S3cijlwYF18EsU3rzb8jPVobxDh9Q==", - "dev": true, - "license": "ISC", - "dependencies": { - "fs.realpath": "^1.0.0", - "minimatch": "^8.0.2", - "minipass": "^4.2.4", - "path-scurry": "^1.6.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/rimraf/node_modules/minimatch": { - "version": "8.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-8.0.4.tgz", - "integrity": "sha512-W0Wvr9HyFXZRGIDgCicunpQ299OKXs9RgZfaukz4qAW/pJhcpUfupc9c+OObPOFueNy8VSrZgEmDtk6Kh4WzDA==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/run-parallel": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", - "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT", - "dependencies": { - "queue-microtask": "^1.2.2" - } - }, - "node_modules/safe-array-concat": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.3.tgz", - "integrity": "sha512-AURm5f0jYEOydBj7VQlVvDrjeFgthDdEF5H1dP+6mNpoXOMo1quQqJ4wvJDyRZ9+pO3kGWoOdmV08cSv2aJV6Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.8", - "call-bound": "^1.0.2", - "get-intrinsic": "^1.2.6", - "has-symbols": "^1.1.0", - "isarray": "^2.0.5" - }, - "engines": { - "node": ">=0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, - "node_modules/safe-push-apply": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/safe-push-apply/-/safe-push-apply-1.0.0.tgz", - "integrity": "sha512-iKE9w/Z7xCzUMIZqdBsp6pEQvwuEebH4vdpjcDWnyzaI6yl6O9FHvVpmGelvEHNsoY6wGblkxR6Zty/h00WiSA==", - "dev": true, - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "isarray": "^2.0.5" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/safe-regex-test": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.1.0.tgz", - "integrity": "sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.2", - "es-errors": "^1.3.0", - "is-regex": "^1.2.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/safer-buffer": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", - "dev": true, - "license": "MIT" - }, - "node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/send": { - "version": "0.19.0", - "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", - "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", - "dev": true, - "license": "MIT", - "dependencies": { - "debug": "2.6.9", - "depd": "2.0.0", - "destroy": "1.2.0", - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "etag": "~1.8.1", - "fresh": "0.5.2", - "http-errors": "2.0.0", - "mime": "1.6.0", - "ms": "2.1.3", - "on-finished": "2.4.1", - "range-parser": "~1.2.1", - "statuses": "2.0.1" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/send/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/send/node_modules/debug/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "dev": true, - "license": "MIT" - }, - "node_modules/send/node_modules/encodeurl": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/serve-static": { - "version": "1.16.2", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz", - "integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==", - "dev": true, - "license": "MIT", - "dependencies": { - "encodeurl": "~2.0.0", - "escape-html": "~1.0.3", - "parseurl": "~1.3.3", - "send": "0.19.0" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/set-function-length": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", - "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", - "dev": true, - "license": "MIT", - "dependencies": { - "define-data-property": "^1.1.4", - "es-errors": "^1.3.0", - "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.4", - "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/set-function-name": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz", - "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "define-data-property": "^1.1.4", - "es-errors": "^1.3.0", - "functions-have-names": "^1.2.3", - "has-property-descriptors": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/set-proto": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/set-proto/-/set-proto-1.0.0.tgz", - "integrity": "sha512-RJRdvCo6IAnPdsvP/7m6bsQqNnn1FCBX5ZNtFL98MmFF/4xAIJTIg1YbHW5DC2W5SKZanrC6i4HsJqlajw/dZw==", - "dev": true, - "license": "MIT", - "dependencies": { - "dunder-proto": "^1.0.1", - "es-errors": "^1.3.0", - "es-object-atoms": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/setprototypeof": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", - "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", - "dev": true, - "license": "ISC" - }, - "node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "license": "MIT", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/side-channel": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", - "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", - "dev": true, - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "object-inspect": "^1.13.3", - "side-channel-list": "^1.0.0", - "side-channel-map": "^1.0.1", - "side-channel-weakmap": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel-list": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", - "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", - "dev": true, - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "object-inspect": "^1.13.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel-map": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", - "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.2", - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.5", - "object-inspect": "^1.13.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel-weakmap": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", - "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.2", - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.5", - "object-inspect": "^1.13.3", - "side-channel-map": "^1.0.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/signal-exit": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/simple-update-notifier": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-2.0.0.tgz", - "integrity": "sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==", - "dev": true, - "license": "MIT", - "dependencies": { - "semver": "^7.5.3" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/simple-update-notifier/node_modules/semver": { - "version": "7.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", - "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/sisteransi": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", - "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==", - "dev": true, - "license": "MIT" - }, - "node_modules/slash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", - "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "dev": true, - "license": "BSD-3-Clause", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/source-map-support": { - "version": "0.5.13", - "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.13.tgz", - "integrity": "sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==", - "dev": true, - "license": "MIT", - "dependencies": { - "buffer-from": "^1.0.0", - "source-map": "^0.6.0" - } - }, - "node_modules/sprintf-js": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", - "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", - "dev": true, - "license": "BSD-3-Clause" - }, - "node_modules/stack-utils": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz", - "integrity": "sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "escape-string-regexp": "^2.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/stack-utils/node_modules/escape-string-regexp": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", - "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/statuses": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", - "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/stop-iteration-iterator": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.1.0.tgz", - "integrity": "sha512-eLoXW/DHyl62zxY4SCaIgnRhuMr6ri4juEYARS8E6sCEqzKpOiE521Ucofdx+KnDZl5xmvGYaaKCk5FEOxJCoQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "internal-slot": "^1.1.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/string-length": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/string-length/-/string-length-4.0.2.tgz", - "integrity": "sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "char-regex": "^1.0.2", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/string.prototype.trim": { - "version": "1.2.10", - "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.10.tgz", - "integrity": "sha512-Rs66F0P/1kedk5lyYyH9uBzuiI/kNRmwJAR9quK6VOtIpZ2G+hMZd+HQbbv25MgCA6gEffoMZYxlTod4WcdrKA==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.8", - "call-bound": "^1.0.2", - "define-data-property": "^1.1.4", - "define-properties": "^1.2.1", - "es-abstract": "^1.23.5", - "es-object-atoms": "^1.0.0", - "has-property-descriptors": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/string.prototype.trimend": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.9.tgz", - "integrity": "sha512-G7Ok5C6E/j4SGfyLCloXTrngQIQU3PWtXGst3yM7Bea9FRURf1S42ZHlZZtsNque2FN2PoUhfZXYLNWwEr4dLQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.8", - "call-bound": "^1.0.2", - "define-properties": "^1.2.1", - "es-object-atoms": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/string.prototype.trimstart": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.8.tgz", - "integrity": "sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.7", - "define-properties": "^1.2.1", - "es-object-atoms": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-bom": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", - "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-final-newline": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", - "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/strip-json-comments": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", - "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "dev": true, - "license": "MIT", - "dependencies": { - "has-flag": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/supports-preserve-symlinks-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", - "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/test-exclude": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", - "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", - "dev": true, - "license": "ISC", - "dependencies": { - "@istanbuljs/schema": "^0.1.2", - "glob": "^7.1.4", - "minimatch": "^3.0.4" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/test-exclude/node_modules/brace-expansion": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", - "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/test-exclude/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/tmpl": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz", - "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==", - "dev": true, - "license": "BSD-3-Clause" - }, - "node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, - "node_modules/toidentifier": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", - "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.6" - } - }, - "node_modules/touch": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/touch/-/touch-3.1.1.tgz", - "integrity": "sha512-r0eojU4bI8MnHr8c5bNo7lJDdI2qXlWWJk6a9EAFG7vbhTjElYhBVS3/miuE0uOuoLdb8Mc/rVfsmm6eo5o9GA==", - "dev": true, - "license": "ISC", - "bin": { - "nodetouch": "bin/nodetouch.js" - } - }, - "node_modules/ts-api-utils": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz", - "integrity": "sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18.12" - }, - "peerDependencies": { - "typescript": ">=4.8.4" - } - }, - "node_modules/ts-jest": { - "version": "29.4.0", - "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.4.0.tgz", - "integrity": "sha512-d423TJMnJGu80/eSgfQ5w/R+0zFJvdtTxwtF9KzFFunOpSeD+79lHJQIiAhluJoyGRbvj9NZJsl9WjCUo0ND7Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "bs-logger": "^0.2.6", - "ejs": "^3.1.10", - "fast-json-stable-stringify": "^2.1.0", - "json5": "^2.2.3", - "lodash.memoize": "^4.1.2", - "make-error": "^1.3.6", - "semver": "^7.7.2", - "type-fest": "^4.41.0", - "yargs-parser": "^21.1.1" - }, - "bin": { - "ts-jest": "cli.js" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || ^18.0.0 || >=20.0.0" - }, - "peerDependencies": { - "@babel/core": ">=7.0.0-beta.0 <8", - "@jest/transform": "^29.0.0 || ^30.0.0", - "@jest/types": "^29.0.0 || ^30.0.0", - "babel-jest": "^29.0.0 || ^30.0.0", - "jest": "^29.0.0 || ^30.0.0", - "jest-util": "^29.0.0 || ^30.0.0", - "typescript": ">=4.3 <6" - }, - "peerDependenciesMeta": { - "@babel/core": { - "optional": true - }, - "@jest/transform": { - "optional": true - }, - "@jest/types": { - "optional": true - }, - "babel-jest": { - "optional": true - }, - "esbuild": { - "optional": true - }, - "jest-util": { - "optional": true - } - } - }, - "node_modules/ts-jest/node_modules/semver": { - "version": "7.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", - "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/ts-jest/node_modules/type-fest": { - "version": "4.41.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.41.0.tgz", - "integrity": "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA==", - "dev": true, - "license": "(MIT OR CC0-1.0)", - "engines": { - "node": ">=16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/ts-node": { - "version": "10.9.2", - "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz", - "integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==", - "license": "MIT", - "dependencies": { - "@cspotcode/source-map-support": "^0.8.0", - "@tsconfig/node10": "^1.0.7", - "@tsconfig/node12": "^1.0.7", - "@tsconfig/node14": "^1.0.0", - "@tsconfig/node16": "^1.0.2", - "acorn": "^8.4.1", - "acorn-walk": "^8.1.1", - "arg": "^4.1.0", - "create-require": "^1.1.0", - "diff": "^4.0.1", - "make-error": "^1.1.1", - "v8-compile-cache-lib": "^3.0.1", - "yn": "3.1.1" - }, - "bin": { - "ts-node": "dist/bin.js", - "ts-node-cwd": "dist/bin-cwd.js", - "ts-node-esm": "dist/bin-esm.js", - "ts-node-script": "dist/bin-script.js", - "ts-node-transpile-only": "dist/bin-transpile.js", - "ts-script": "dist/bin-script-deprecated.js" - }, - "peerDependencies": { - "@swc/core": ">=1.2.50", - "@swc/wasm": ">=1.2.50", - "@types/node": "*", - "typescript": ">=2.7" - }, - "peerDependenciesMeta": { - "@swc/core": { - "optional": true - }, - "@swc/wasm": { - "optional": true - } - } - }, - "node_modules/tsconfig-paths": { - "version": "3.15.0", - "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz", - "integrity": "sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/json5": "^0.0.29", - "json5": "^1.0.2", - "minimist": "^1.2.6", - "strip-bom": "^3.0.0" - } - }, - "node_modules/tsconfig-paths/node_modules/json5": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz", - "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==", - "dev": true, - "license": "MIT", - "dependencies": { - "minimist": "^1.2.0" - }, - "bin": { - "json5": "lib/cli.js" - } - }, - "node_modules/tsconfig-paths/node_modules/strip-bom": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", - "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/type-check": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", - "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", - "dev": true, - "license": "MIT", - "dependencies": { - "prelude-ls": "^1.2.1" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/type-detect": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", - "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/type-fest": { - "version": "0.21.3", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", - "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", - "dev": true, - "license": "(MIT OR CC0-1.0)", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/type-is": { - "version": "1.6.18", - "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", - "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", - "dev": true, - "license": "MIT", - "dependencies": { - "media-typer": "0.3.0", - "mime-types": "~2.1.24" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/typed-array-buffer": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.3.tgz", - "integrity": "sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.3", - "es-errors": "^1.3.0", - "is-typed-array": "^1.1.14" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/typed-array-byte-length": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.3.tgz", - "integrity": "sha512-BaXgOuIxz8n8pIq3e7Atg/7s+DpiYrxn4vdot3w9KbnBhcRQq6o3xemQdIfynqSeXeDrF32x+WvfzmOjPiY9lg==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.8", - "for-each": "^0.3.3", - "gopd": "^1.2.0", - "has-proto": "^1.2.0", - "is-typed-array": "^1.1.14" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/typed-array-byte-offset": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.4.tgz", - "integrity": "sha512-bTlAFB/FBYMcuX81gbL4OcpH5PmlFHqlCCpAl8AlEzMz5k53oNDvN8p1PNOWLEmI2x4orp3raOFB51tv9X+MFQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "available-typed-arrays": "^1.0.7", - "call-bind": "^1.0.8", - "for-each": "^0.3.3", - "gopd": "^1.2.0", - "has-proto": "^1.2.0", - "is-typed-array": "^1.1.15", - "reflect.getprototypeof": "^1.0.9" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/typed-array-length": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.7.tgz", - "integrity": "sha512-3KS2b+kL7fsuk/eJZ7EQdnEmQoaho/r6KUef7hxvltNA5DR8NAUM+8wJMbJyZ4G9/7i3v5zPBIMN5aybAh2/Jg==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.7", - "for-each": "^0.3.3", - "gopd": "^1.0.1", - "is-typed-array": "^1.1.13", - "possible-typed-array-names": "^1.0.0", - "reflect.getprototypeof": "^1.0.6" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/typescript": { - "version": "4.9.5", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.5.tgz", - "integrity": "sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==", - "license": "Apache-2.0", - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=4.2.0" - } - }, - "node_modules/unbox-primitive": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.1.0.tgz", - "integrity": "sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.3", - "has-bigints": "^1.0.2", - "has-symbols": "^1.1.0", - "which-boxed-primitive": "^1.1.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/undefsafe": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/undefsafe/-/undefsafe-2.0.5.tgz", - "integrity": "sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==", - "dev": true, - "license": "MIT" - }, - "node_modules/undici-types": { - "version": "5.26.5", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", - "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", - "license": "MIT" - }, - "node_modules/unicode-canonical-property-names-ecmascript": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.1.tgz", - "integrity": "sha512-dA8WbNeb2a6oQzAQ55YlT5vQAWGV9WXOsi3SskE3bcCdM0P4SDd+24zS/OCacdRq5BkdsRj9q3Pg6YyQoxIGqg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/unicode-match-property-ecmascript": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz", - "integrity": "sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "unicode-canonical-property-names-ecmascript": "^2.0.0", - "unicode-property-aliases-ecmascript": "^2.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/unicode-match-property-value-ecmascript": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.2.0.tgz", - "integrity": "sha512-4IehN3V/+kkr5YeSSDDQG8QLqO26XpL2XP3GQtqwlT/QYSECAwFztxVHjlbh0+gjJ3XmNLS0zDsbgs9jWKExLg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/unicode-property-aliases-ecmascript": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz", - "integrity": "sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/unpipe": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", - "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/update-browserslist-db": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz", - "integrity": "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/browserslist" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "escalade": "^3.2.0", - "picocolors": "^1.1.1" - }, - "bin": { - "update-browserslist-db": "cli.js" - }, - "peerDependencies": { - "browserslist": ">= 4.21.0" - } - }, - "node_modules/uri-js": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", - "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "punycode": "^2.1.0" - } - }, - "node_modules/utils-merge": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", - "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4.0" - } - }, - "node_modules/v8-compile-cache-lib": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", - "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==", - "license": "MIT" - }, - "node_modules/v8-to-istanbul": { - "version": "9.3.0", - "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.3.0.tgz", - "integrity": "sha512-kiGUalWN+rgBJ/1OHZsBtU4rXZOfj/7rKQxULKlIzwzQSvMJUUNgPwJEEh7gU6xEVxC0ahoOBvN2YI8GH6FNgA==", - "dev": true, - "license": "ISC", - "dependencies": { - "@jridgewell/trace-mapping": "^0.3.12", - "@types/istanbul-lib-coverage": "^2.0.1", - "convert-source-map": "^2.0.0" - }, - "engines": { - "node": ">=10.12.0" - } - }, - "node_modules/vary": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", - "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/walker": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/walker/-/walker-1.0.8.tgz", - "integrity": "sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "makeerror": "1.0.12" - } - }, - "node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "license": "ISC", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/which-boxed-primitive": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.1.1.tgz", - "integrity": "sha512-TbX3mj8n0odCBFVlY8AxkqcHASw3L60jIuF8jFP78az3C2YhmGvqbHBpAjTRH2/xqYunrJ9g1jSyjCjpoWzIAA==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-bigint": "^1.1.0", - "is-boolean-object": "^1.2.1", - "is-number-object": "^1.1.1", - "is-string": "^1.1.1", - "is-symbol": "^1.1.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/which-builtin-type": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/which-builtin-type/-/which-builtin-type-1.2.1.tgz", - "integrity": "sha512-6iBczoX+kDQ7a3+YJBnh3T+KZRxM/iYNPXicqk66/Qfm1b93iu+yOImkg0zHbj5LNOcNv1TEADiZ0xa34B4q6Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.2", - "function.prototype.name": "^1.1.6", - "has-tostringtag": "^1.0.2", - "is-async-function": "^2.0.0", - "is-date-object": "^1.1.0", - "is-finalizationregistry": "^1.1.0", - "is-generator-function": "^1.0.10", - "is-regex": "^1.2.1", - "is-weakref": "^1.0.2", - "isarray": "^2.0.5", - "which-boxed-primitive": "^1.1.0", - "which-collection": "^1.0.2", - "which-typed-array": "^1.1.16" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/which-collection": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.2.tgz", - "integrity": "sha512-K4jVyjnBdgvc86Y6BkaLZEN933SwYOuBFkdmBu9ZfkcAbdVbpITnDmjvZ/aQjRXQrv5EPkTnD1s39GiiqbngCw==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-map": "^2.0.3", - "is-set": "^2.0.3", - "is-weakmap": "^2.0.2", - "is-weakset": "^2.0.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/which-typed-array": { - "version": "1.1.19", - "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.19.tgz", - "integrity": "sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==", - "dev": true, - "license": "MIT", - "dependencies": { - "available-typed-arrays": "^1.0.7", - "call-bind": "^1.0.8", - "call-bound": "^1.0.4", - "for-each": "^0.3.5", - "get-proto": "^1.0.1", - "gopd": "^1.2.0", - "has-tostringtag": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/word-wrap": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", - "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/wrap-ansi": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/write-file-atomic": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-4.0.2.tgz", - "integrity": "sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==", - "dev": true, - "license": "ISC", - "dependencies": { - "imurmurhash": "^0.1.4", - "signal-exit": "^3.0.7" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - } - }, - "node_modules/y18n": { - "version": "5.0.8", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", - "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", - "license": "ISC", - "engines": { - "node": ">=10" - } - }, - "node_modules/yallist": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", - "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", - "dev": true, - "license": "ISC" - }, - "node_modules/yargs": { - "version": "17.7.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", - "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", - "license": "MIT", - "dependencies": { - "cliui": "^8.0.1", - "escalade": "^3.1.1", - "get-caller-file": "^2.0.5", - "require-directory": "^2.1.1", - "string-width": "^4.2.3", - "y18n": "^5.0.5", - "yargs-parser": "^21.1.1" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/yargs-parser": { - "version": "21.1.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", - "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/yn": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", - "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/yocto-queue": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", - "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - } - } -} diff --git a/build/package.json b/build/package.json index 99260d5..77e3df4 100644 --- a/build/package.json +++ b/build/package.json @@ -52,7 +52,7 @@ "yargs": "^17.6.2" }, "dependencies": { - "@devrev/ts-adaas": "1.5.1", + "@devrev/ts-adaas": "1.9.0", "@devrev/typescript-sdk": "1.1.63", "axios": "^1.9.0", "dotenv": "^16.0.3", diff --git a/build/src/core/types.ts b/build/src/core/types.ts new file mode 100644 index 0000000..228c5d0 --- /dev/null +++ b/build/src/core/types.ts @@ -0,0 +1,52 @@ +/** + * Type definitions for DevRev function inputs and related types + */ + +export type Context = { + // ID of the dev org for which the function is being invoked. + dev_oid: string; + // ID of the automation/command/snap-kit Action/Event Source for which the function is being invoked. + source_id: string; + // ID of the snap-in as part of which the function is being invoked. + snap_in_id: string; + // ID of the snap-in Version as part of which the function is being invoked. + snap_in_version_id: string; + // ID of the service account. + service_account_id: string; + // This secrets map would contain some secrets which platform would provide to the snap-in. + // `service_account_token`: This is the token of the service account which belongs to this snap-in. This can be used to make API calls to DevRev. + // `actor_session_token`: For commands, and snap-kits, where the user is performing some action, this is the token of the user who is performing the action. + secrets: Record; + }; + + export type ExecutionMetadata = { + // A unique id for the function invocation. Can be used to filter logs for a particular invocation. + request_id: string; + // Function name as defined in the manifest being invoked. + function_name: string; + // Type of event that triggered the function invocation as defined in manifest. + event_type: string; + // DevRev endpoint to which the function can make API calls. + // Example : "https://api.devrev.ai/" + devrev_endpoint: string; + }; + + export type InputData = { + // Map of organization inputs and their corresponding values stored in snap-in. + // The values are passed as string and typing need to be handled by the function + global_values: Record; + // Map of event sources and their corresponding ids stored in snap-in. + // These could be used to schedule events on a schedule based event source. + event_sources: Record; + }; + + // Event sent to our app. + export type FunctionInput = { + // Actual payload of the event. + payload: Record; + // Context of the function invocation. + context: Context; + // Metadata of the function invocation. + execution_metadata: ExecutionMetadata; + input_data: InputData; + }; \ No newline at end of file diff --git a/build/src/core/utils.ts b/build/src/core/utils.ts new file mode 100644 index 0000000..9f49545 --- /dev/null +++ b/build/src/core/utils.ts @@ -0,0 +1,52 @@ +import { AirdropEvent, AirdropMessage } from '@devrev/ts-adaas'; +import * as fs from 'fs'; +import * as path from 'path'; +import { FunctionInput } from './types'; + +export function convertToAirdropEvent(fi: FunctionInput): AirdropEvent { + // Create a properly structured AirdropMessage + const airdropMessage: AirdropMessage = { + connection_data: fi.payload.connection_data, + event_context: { + ...fi.payload.event_context, + ...fi.context, + request_id: fi.execution_metadata?.request_id, + }, + event_type: fi.payload.event_type, + event_data: fi.payload.event_data || {}, + }; + + return { + context: { + ...fi.context, + secrets: { + service_account_token: fi.context.secrets?.service_account_token || '', + ...fi.context.secrets, + }, + }, + payload: airdropMessage, + execution_metadata: fi.execution_metadata, + input_data: fi.input_data, + }; + } + +/** + * Resolves the correct worker file path based on the runtime environment. + * In development (ts-node), uses .ts extension. + * In production (compiled), uses .js extension. + * + * @param baseDir - The base directory (typically __dirname) + * @param relativePath - The relative path to the worker file (with .ts extension) + * @returns The resolved worker file path with the correct extension + */ +export function resolveWorkerPath(baseDir: string, relativePath: string): string { + // Try .ts first (development mode with ts-node) + const tsPath = path.join(baseDir, relativePath); + if (fs.existsSync(tsPath)) { + return tsPath; + } + + // Fall back to .js (production mode) + const jsPath = tsPath.replace(/\.ts$/, '.js'); + return jsPath; +} \ No newline at end of file diff --git a/build/src/core/wrike-api-methods.ts b/build/src/core/wrike-api-methods.ts new file mode 100644 index 0000000..f7e54fb --- /dev/null +++ b/build/src/core/wrike-api-methods.ts @@ -0,0 +1,206 @@ +import { AxiosInstance, AxiosResponse } from 'axios'; +import { + WrikeApiResponse, + WrikeContact, + WrikeContactsResponse, + WrikeFolder, + WrikeFoldersResponse, + WrikeTask, + WrikeTasksResponse, + TaskQueryOptions, + WrikeTasksApiResponse, + WrikeAttachment, + WrikeAttachmentsResponse, + WrikeComment, + WrikeCommentsResponse, +} from './wrike-types'; +import { handleWrikeError, WrikeApiError } from './wrike-error-handler'; + +/** + * Get current user information (me endpoint). + * Used for authentication verification. + */ +export async function getMe(client: AxiosInstance): Promise> { + try { + const response: AxiosResponse = await client.get('/contacts', { + params: { + me: true, + }, + }); + + return { + status_code: response.status, + api_delay: 0, + message: 'Successfully authenticated with Wrike API', + data: response.data.data[0], + }; + } catch (error) { + const errorResponse = handleWrikeError(error); + if (errorResponse.status_code === 429) { + throw new WrikeApiError(errorResponse.message, errorResponse.status_code, errorResponse.api_delay); + } + return errorResponse; + } +} + +/** + * Get contacts (users) of type Person from Wrike. + */ +export async function getContacts(client: AxiosInstance): Promise> { + try { + const response: AxiosResponse = await client.get('/contacts', { + params: { + deleted: false, + types: '[Person]', + }, + }); + + return { + status_code: response.status, + api_delay: 0, + message: 'Successfully fetched contacts from Wrike', + data: response.data.data, + }; + } catch (error) { + const errorResponse = handleWrikeError(error); + if (errorResponse.status_code === 429) { + throw new WrikeApiError(errorResponse.message, errorResponse.status_code, errorResponse.api_delay); + } + return errorResponse; + } +} + +/** + * Get folders in a specific space. + */ +export async function getFolders(client: AxiosInstance, spaceId: string): Promise> { + try { + const response: AxiosResponse = await client.get( + `/spaces/${spaceId}/folders` + ); + + return { + status_code: response.status, + api_delay: 0, + message: 'Successfully fetched folders from Wrike space', + data: response.data.data, + }; + } catch (error) { + const errorResponse = handleWrikeError(error); + if (errorResponse.status_code === 429) { + throw new WrikeApiError(errorResponse.message, errorResponse.status_code, errorResponse.api_delay); + } + return errorResponse; + } +} + +/** + * Get tasks in a specific folder. + */ +export async function getTasks( + client: AxiosInstance, + folderId: string, + options?: TaskQueryOptions +): Promise { + try { + const params: any = { + descendants: true, + fields: '[responsibleIds,hasAttachments]', + }; + + if (options?.pageSize) { + params.pageSize = options.pageSize; + } + + if (options?.nextPageToken) { + params.nextPageToken = options.nextPageToken; + } + + if (options?.updatedDate) { + params.updatedDate = JSON.stringify({ start: options.updatedDate }); + } + + const response: AxiosResponse = await client.get( + `/folders/${folderId}/tasks`, + { params } + ); + + return { + status_code: response.status, + api_delay: 0, + message: 'Successfully fetched tasks from Wrike folder', + data: response.data.data, + nextPageToken: response.data.nextPageToken, + hasMore: !!response.data.nextPageToken, + }; + } catch (error) { + const errorResponse = handleWrikeError(error); + if (errorResponse.status_code === 429) { + throw new WrikeApiError(errorResponse.message, errorResponse.status_code, errorResponse.api_delay); + } + return { + ...errorResponse, + nextPageToken: undefined, + hasMore: false, + }; + } +} + +/** + * Get attachments for a specific task. + */ +export async function getTaskAttachments( + client: AxiosInstance, + taskId: string +): Promise> { + try { + const response: AxiosResponse = await client.get( + `/tasks/${taskId}/attachments`, + { + params: { + withUrls: true, + }, + } + ); + + return { + status_code: response.status, + api_delay: 0, + message: 'Successfully fetched attachments from Wrike task', + data: response.data.data, + }; + } catch (error) { + const errorResponse = handleWrikeError(error); + if (errorResponse.status_code === 429) { + throw new WrikeApiError(errorResponse.message, errorResponse.status_code, errorResponse.api_delay); + } + return errorResponse; + } +} + +/** + * Get comments for a specific task. + */ +export async function getTaskComments( + client: AxiosInstance, + taskId: string +): Promise> { + try { + const response: AxiosResponse = await client.get( + `/tasks/${taskId}/comments` + ); + + return { + status_code: response.status, + api_delay: 0, + message: 'Successfully fetched comments from Wrike task', + data: response.data.data, + }; + } catch (error) { + const errorResponse = handleWrikeError(error); + if (errorResponse.status_code === 429) { + throw new WrikeApiError(errorResponse.message, errorResponse.status_code, errorResponse.api_delay); + } + return errorResponse; + } +} \ No newline at end of file diff --git a/build/src/core/wrike-client.test.cases.ts b/build/src/core/wrike-client.test.cases.ts new file mode 100644 index 0000000..88d0dc9 --- /dev/null +++ b/build/src/core/wrike-client.test.cases.ts @@ -0,0 +1,166 @@ +import { AxiosError } from 'axios'; + +/** + * Shared test case generators for WrikeClient tests + */ + +export interface ErrorTestCase { + description: string; + statusCode: number; + expectedMessage: string; + expectedApiDelay?: number; + errorData?: any; + headers?: Record; +} + +/** + * Generates common error test cases that apply to both getMe and getFolders + */ +export function generateCommonErrorTestCases(): ErrorTestCase[] { + return [ + { + description: 'should handle 401 authentication error', + statusCode: 401, + expectedMessage: 'Authentication failed: Invalid or expired API key', + errorData: { + error: 'unauthorized', + errorDescription: 'Invalid token', + }, + }, + { + description: 'should handle 403 forbidden error', + statusCode: 403, + expectedMessage: 'Authentication failed: Access forbidden', + }, + { + description: 'should handle 429 rate limit error with retry-after header', + statusCode: 429, + expectedMessage: 'Rate limit exceeded. Retry after 49 seconds.', + expectedApiDelay: 49, + errorData: { + error: 'rate_limit_exceeded', + errorDescription: 'Rate limit exceeded, try again later', + }, + headers: { 'retry-after': '49' }, + }, + { + description: 'should handle 429 rate limit error without retry-after header', + statusCode: 429, + expectedMessage: 'Rate limit exceeded. Retry after 60 seconds.', + expectedApiDelay: 60, + }, + { + description: 'should handle other HTTP errors', + statusCode: 500, + expectedMessage: 'Wrike API error: Internal server error', + errorData: { + error: 'internal_error', + errorDescription: 'Internal server error', + }, + }, + { + description: 'should handle network errors', + statusCode: 0, + expectedMessage: 'Network error: Unable to reach Wrike API', + }, + { + description: 'should handle unknown errors', + statusCode: 0, + expectedMessage: 'Unexpected error: Unknown error', + }, + ]; +} + +/** + * Creates a mock AxiosError for testing + */ +export function createMockAxiosError( + status: number, + errorData?: any, + headers?: Record +): Partial { + return { + isAxiosError: true, + response: { + status, + data: errorData || {}, + statusText: getStatusText(status), + headers: headers || {}, + config: {} as any, + }, + config: {} as any, + toJSON: () => ({}), + name: 'AxiosError', + message: `Request failed with status code ${status}`, + }; +} + +/** + * Creates a mock network error (no response) + */ +export function createMockNetworkError(): Partial { + return { + isAxiosError: true, + request: {}, + config: {} as any, + toJSON: () => ({}), + name: 'AxiosError', + message: 'Network Error', + }; +} + +/** + * Helper to get status text for HTTP status codes + */ +function getStatusText(status: number): string { + const statusTexts: Record = { + 200: 'OK', + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 429: 'Too Many Requests', + 500: 'Internal Server Error', + }; + return statusTexts[status] || 'Unknown'; +} + +/** + * Runs a test case for error scenarios + */ +export function runErrorTestCase( + testCase: ErrorTestCase, + getMockMethod: () => jest.Mock, + isAxiosErrorMock: any, + testFn: () => Promise +) { + return async () => { + let mockError: Partial; + + if (testCase.statusCode === 0 && testCase.expectedMessage.includes('Network error')) { + mockError = createMockNetworkError(); + } else if (testCase.statusCode === 0 && testCase.expectedMessage.includes('Unexpected error')) { + mockError = new Error('Unknown error') as any; + isAxiosErrorMock.mockReturnValue(false); + } else { + mockError = createMockAxiosError(testCase.statusCode, testCase.errorData, testCase.headers); + } + + getMockMethod().mockRejectedValue(mockError); + if (testCase.statusCode !== 0 || testCase.expectedMessage.includes('Network error')) { + isAxiosErrorMock.mockReturnValue(true); + } + + // For 429 errors, expect the error to be thrown + if (testCase.statusCode === 429) { + await expect(testFn()).rejects.toThrow('Rate limit exceeded'); + return; + } + + const result = await testFn(); + + expect(result.status_code).toBe(testCase.statusCode); + expect(result.api_delay).toBe(testCase.expectedApiDelay || 0); + expect(result.message).toBe(testCase.expectedMessage); + expect(result.data).toBeUndefined(); + }; +} \ No newline at end of file diff --git a/build/src/core/wrike-client.test.config-helpers.ts b/build/src/core/wrike-client.test.config-helpers.ts new file mode 100644 index 0000000..c6ae2eb --- /dev/null +++ b/build/src/core/wrike-client.test.config-helpers.ts @@ -0,0 +1,127 @@ +import { TestSuiteConfig } from './wrike-client.test.suite-factory'; + +/** + * Helper functions for creating test configurations. + * Reduces duplication in wrike-client.test.configs.ts + */ + +/** + * Creates a 404 not found error test case + */ +export function create404ErrorTest(errorDescription: string) { + return { + description: 'should handle 404 not found error', + setup: (mockedAxios: any) => { + const mockError = { + isAxiosError: true, + response: { + status: 404, + data: { + error: 'not_found', + errorDescription, + }, + statusText: 'Not Found', + headers: {}, + config: {} as any, + }, + config: {} as any, + toJSON: () => ({}), + name: 'AxiosError', + message: 'Request failed with status code 404', + }; + + const mockAxiosInstanceLocal = mockedAxios.create(); + (mockAxiosInstanceLocal.get as jest.Mock).mockRejectedValue(mockError); + mockedAxios.isAxiosError.mockReturnValue(true); + }, + assertions: (result: any) => { + expect(result.status_code).toBe(404); + expect(result.api_delay).toBe(0); + expect(result.message).toBe(`Wrike API error: ${errorDescription}`); + }, + }; +} + +/** + * Creates an empty list test case + */ +export function createEmptyListTest(mockResponseData: any, dataKey: string = 'data') { + return { + description: `should handle empty ${dataKey} list`, + setup: (mockedAxios: any) => { + const mockResponse = { + status: 200, + data: mockResponseData, + }; + + const mockAxiosInstanceLocal = mockedAxios.create(); + (mockAxiosInstanceLocal.get as jest.Mock).mockResolvedValue(mockResponse); + }, + assertions: (result: any) => { + expect(result.status_code).toBe(200); + expect(result.data).toEqual([]); + }, + }; +} + +/** + * Creates a basic test configuration with common structure + */ +export function createBasicTestConfig( + methodName: string, + description: string, + mockResponse: any, + endpoint: string, + expectedMessage: string, + options?: any, + args: any[] = [] +): TestSuiteConfig { + return { + methodName, + successTest: { + description, + mockResponse, + expectedParams: { + endpoint, + options, + args, + }, + expectedResult: { + status_code: 200, + api_delay: 0, + message: expectedMessage, + }, + }, + }; +} + +/** + * Creates pagination test for getTasks-like methods + */ +export function createPaginationTest(endpoint: string, nextPageToken: string = 'NEXT_PAGE_TOKEN') { + return { + description: 'should support pagination parameters', + setup: (mockedAxios: any) => { + const mockResponse = { + status: 200, + data: { + kind: 'tasks', + data: [], + nextPageToken, + }, + }; + const mockAxiosInstanceLocal = mockedAxios.create(); + (mockAxiosInstanceLocal.get as jest.Mock).mockResolvedValue(mockResponse); + }, + assertions: (result: any, mockMethod: jest.Mock) => { + expect(mockMethod).toHaveBeenCalledWith(endpoint, { + params: { + descendants: true, + fields: '[responsibleIds,hasAttachments]', + }, + }); + expect(result.nextPageToken).toBe(nextPageToken); + expect(result.hasMore).toBe(true); + }, + }; +} \ No newline at end of file diff --git a/build/src/core/wrike-client.test.configs.ts b/build/src/core/wrike-client.test.configs.ts new file mode 100644 index 0000000..261b98e --- /dev/null +++ b/build/src/core/wrike-client.test.configs.ts @@ -0,0 +1,180 @@ +import { TestSuiteConfig } from './wrike-client.test.suite-factory'; +import { + mockContact, + mockFolder, + createMockSuccessResponse, + createMockFoldersSuccessResponse, + createMockAttachmentsSuccessResponse, + createMockTasksSuccessResponse, + createMockCommentsSuccessResponse, +} from './wrike-client.test.helpers'; +import { + create404ErrorTest, + createEmptyListTest, + createPaginationTest, +} from './wrike-client.test.config-helpers'; + +/** + * Test configurations for WrikeClient test suites. + * Each configuration defines the test parameters for a specific method. + */ + +export const getMeTestConfig: TestSuiteConfig = { + methodName: 'getMe', + successTest: { + description: 'should successfully get user information', + mockResponse: createMockSuccessResponse(), + expectedParams: { + endpoint: '/contacts', + options: { params: { me: true } }, + args: [], + }, + expectedResult: { + status_code: 200, + api_delay: 0, + message: 'Successfully authenticated with Wrike API', + data: mockContact, + }, + }, +}; + +export const getContactsTestConfig: TestSuiteConfig = { + methodName: 'getContacts', + successTest: { + description: 'should successfully get contacts', + mockResponse: { + status: 200, + data: { + kind: 'contacts', + data: [mockContact], + }, + }, + expectedParams: { + endpoint: '/contacts', + options: { + params: { + deleted: false, + types: '[Person]', + }, + }, + args: [], + }, + expectedResult: { + status_code: 200, + api_delay: 0, + message: 'Successfully fetched contacts from Wrike', + data: [mockContact], + }, + }, +}; + +export const getFoldersTestConfig: TestSuiteConfig = { + methodName: 'getFolders', + successTest: { + description: 'should successfully get folders from space', + mockResponse: createMockFoldersSuccessResponse(), + expectedParams: { + endpoint: '/spaces/IEAGS6BYI5RFMPPY/folders', + args: ['IEAGS6BYI5RFMPPY'], + }, + expectedResult: { + status_code: 200, + api_delay: 0, + message: 'Successfully fetched folders from Wrike space', + data: [mockFolder], + }, + }, + additionalTests: [ + create404ErrorTest('Space not found'), + createEmptyListTest({ kind: 'folders', data: [] }, 'folder'), + ], +}; + +export const getTasksTestConfig: TestSuiteConfig = { + methodName: 'getTasks', + successTest: { + description: 'should successfully get tasks from folder', + mockResponse: createMockTasksSuccessResponse(), + expectedParams: { + endpoint: '/folders/IEAGS6BYI5RFMPP7/tasks', + options: { + params: { + descendants: true, + fields: '[responsibleIds,hasAttachments]', + }, + }, + args: ['IEAGS6BYI5RFMPP7'], + }, + expectedResult: { + status_code: 200, + api_delay: 0, + message: 'Successfully fetched tasks from Wrike folder', + hasMore: false, + }, + }, + additionalTests: [ + createPaginationTest('/folders/IEAGS6BYI5RFMPP7/tasks'), + { + description: 'should support updatedDate filter', + setup: (mockedAxios: any) => { + const mockResponse = createMockTasksSuccessResponse(); + const mockAxiosInstanceLocal = mockedAxios.create(); + (mockAxiosInstanceLocal.get as jest.Mock).mockResolvedValue(mockResponse); + }, + assertions: (result: any, mockGetTasks: jest.Mock) => { + expect(mockGetTasks).toHaveBeenCalledWith('/folders/IEAGS6BYI5RFMPP7/tasks', { + params: { + descendants: true, + fields: '[responsibleIds,hasAttachments]', + }, + }); + }, + }, + createEmptyListTest({ kind: 'tasks', data: [] }, 'task'), + ], +}; + +export const getTaskCommentsTestConfig: TestSuiteConfig = { + methodName: 'getTaskComments', + successTest: { + description: 'should successfully get comments from task', + mockResponse: createMockCommentsSuccessResponse(), + expectedParams: { + endpoint: '/tasks/IEACW7SVKQOKD5EG/comments', + args: ['IEACW7SVKQOKD5EG'], + }, + expectedResult: { + status_code: 200, + api_delay: 0, + message: 'Successfully fetched comments from Wrike task', + }, + }, + additionalTests: [ + create404ErrorTest('Task not found'), + ], +}; + +export const getTaskAttachmentsTestConfig: TestSuiteConfig = { + methodName: 'getTaskAttachments', + successTest: { + description: 'should successfully get attachments from task', + mockResponse: createMockAttachmentsSuccessResponse(), + expectedParams: { + endpoint: '/tasks/IEACW7SVKQOKD5EG/attachments', + options: { + params: { + withUrls: true, + }, + }, + args: ['IEACW7SVKQOKD5EG'], + }, + expectedResult: { + status_code: 200, + api_delay: 0, + message: 'Successfully fetched attachments from Wrike task', + }, + }, + additionalTests: [ + create404ErrorTest('Task not found'), + ], +}; \ No newline at end of file diff --git a/build/src/core/wrike-client.test.helpers.ts b/build/src/core/wrike-client.test.helpers.ts new file mode 100644 index 0000000..1667c1f --- /dev/null +++ b/build/src/core/wrike-client.test.helpers.ts @@ -0,0 +1,129 @@ +import { AxiosError } from 'axios'; +import { + mockContact, + mockFolder, + mockTask, + mockAttachment, + mockComment, +} from './wrike-client.test.mock-data'; + +// Re-export mock data for backward compatibility +export { mockContact, mockFolder, mockTask, mockAttachment, mockComment }; + +/** + * Creates a mock successful response from Wrike API + */ +export function createMockSuccessResponse() { + return { + status: 200, + data: { + kind: 'contacts', + data: [mockContact], + }, + }; +} + +/** + * Creates a mock successful response for folders from Wrike API + */ +export function createMockFoldersSuccessResponse() { + return { + status: 200, + data: { + kind: 'folders', + data: [mockFolder], + }, + }; +} + +/** + * Creates a mock successful response for tasks from Wrike API + */ +export function createMockTasksSuccessResponse(nextPageToken?: string) { + return { + status: 200, + data: { + kind: 'tasks', + data: [mockTask], + nextPageToken, + }, + }; +} + +/** + * Creates a mock successful response for attachments from Wrike API + */ +export function createMockAttachmentsSuccessResponse() { + return { + status: 200, + data: { + kind: 'attachments', + data: [mockAttachment], + }, + }; +} + +/** + * Creates a mock successful response for comments from Wrike API + */ +export function createMockCommentsSuccessResponse() { + return { + status: 200, + data: { + kind: 'comments', + data: [mockComment], + }, + }; +} + +/** + * Creates a mock AxiosError for testing error scenarios + */ +export function createMockAxiosError( + status: number, + errorData?: any, + headers?: Record +): Partial { + return { + isAxiosError: true, + response: { + status, + data: errorData || {}, + statusText: getStatusText(status), + headers: headers || {}, + config: {} as any, + }, + config: {} as any, + toJSON: () => ({}), + name: 'AxiosError', + message: `Request failed with status code ${status}`, + }; +} + +/** + * Creates a mock network error (no response) + */ +export function createMockNetworkError(): Partial { + return { + isAxiosError: true, + request: {}, + config: {} as any, + toJSON: () => ({}), + name: 'AxiosError', + message: 'Network Error', + }; +} + +/** + * Helper to get status text for HTTP status codes + */ +function getStatusText(status: number): string { + const statusTexts: Record = { + 200: 'OK', + 401: 'Unauthorized', + 403: 'Forbidden', + 429: 'Too Many Requests', + 500: 'Internal Server Error', + }; + return statusTexts[status] || 'Unknown'; +} \ No newline at end of file diff --git a/build/src/core/wrike-client.test.mock-data.ts b/build/src/core/wrike-client.test.mock-data.ts new file mode 100644 index 0000000..97382b0 --- /dev/null +++ b/build/src/core/wrike-client.test.mock-data.ts @@ -0,0 +1,118 @@ +import { WrikeContact, WrikeFolder, WrikeTask, WrikeAttachment, WrikeComment } from './wrike-types'; + +/** + * Mock contact data for testing + */ +export const mockContact: WrikeContact = { + id: 'KUANFJBJ', + firstName: 'Jane', + lastName: 'Smith', + type: 'Person', + profiles: [ + { + accountId: 'IEAGS6BY', + email: 'janesmith@company.com', + role: 'User', + external: false, + admin: false, + owner: false, + active: true, + }, + ], + avatarUrl: 'https://www.wrike.com/avatars/test.png', + timezone: 'Europe/London', + locale: 'en', + deleted: false, + title: 'Accountant', + primaryEmail: 'janesmith@company.com', +}; + +/** + * Mock folder data for testing + */ +export const mockFolder: WrikeFolder = { + id: 'IEAGS6BYI5RFMPP7', + accountId: 'IEAGS6BY', + title: 'First project', + createdDate: '2025-04-29T07:18:32Z', + updatedDate: '2025-05-26T07:44:20Z', + description: '', + sharedIds: ['KUAVRIOP', 'KX7XOYQF', 'KUAUZTPW', 'KUAVRIOO', 'KUAVRIOS'], + parentIds: ['IEAGS6BYI5RFMPPY'], + childIds: [], + scope: 'WsFolder', + permalink: 'https://www.wrike.com/open.htm?id=1649819135', + workflowId: 'IEAGS6BYK4F3BCSQ', + project: { + authorId: 'KUAUZTPW', + ownerIds: ['KUAUZTPW'], + customStatusId: 'IEAGS6BYJMF3BCR4', + createdDate: '2025-04-29T07:18:32Z', + }, +}; + +/** + * Mock task data for testing + */ +export const mockTask: WrikeTask = { + id: 'IEACW7SVKQOKD5EG', + accountId: 'IEAGS6BY', + title: 'Test Task', + description: 'Test task description', + briefDescription: 'Test brief', + parentIds: ['IEAGS6BYI5RFMPP7'], + superParentIds: [], + sharedIds: ['KUAVRIOP'], + responsibleIds: ['KUANFJBJ'], + status: 'Active', + importance: 'Normal', + createdDate: '2025-01-01T10:00:00Z', + updatedDate: '2025-01-02T15:30:00Z', + dates: { + type: 'Planned', + duration: 86400000, + start: '2025-01-01', + due: '2025-01-02', + }, + scope: 'WsTask', + authorIds: ['KUANFJBJ'], + customStatusId: 'IEAGS6BYJMF3BCR4', + hasAttachments: false, + permalink: 'https://www.wrike.com/open.htm?id=123456', + priority: '02', + followedByMe: false, + followerIds: [], + superTaskIds: [], + subTaskIds: [], + dependencyIds: [], + metadata: [], + customFields: [], +}; + +/** + * Mock attachment data for testing + */ +export const mockAttachment: WrikeAttachment = { + id: 'IEACW7SVIYEV4HBN', + authorId: 'IEAGS6BY', + name: 'Result from test.com', + createdDate: '2025-07-25T07:53:33Z', + version: '1', + size: 1024, + type: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', + url: 'https://www.wrike.com/attachments/IEACW7SVIYEV4HBN/download/Lorem Ipsum.docx', + taskId: 'IEACW7SVKQOKD5EG', + width: 100, + height: 100, +}; + +/** + * Mock comment data for testing + */ +export const mockComment: WrikeComment = { + id: 'IEACW7SVICOMMENT1', + authorId: 'KUANFJBJ', + text: 'This is a test comment', + createdDate: '2025-07-25T08:00:00Z', + taskId: 'IEACW7SVKQOKD5EG', +}; \ No newline at end of file diff --git a/build/src/core/wrike-client.test.suite-factory.ts b/build/src/core/wrike-client.test.suite-factory.ts new file mode 100644 index 0000000..2efca1b --- /dev/null +++ b/build/src/core/wrike-client.test.suite-factory.ts @@ -0,0 +1,79 @@ +import { generateCommonErrorTestCases, runErrorTestCase } from './wrike-client.test.cases'; + +/** + * Configuration for a test suite + */ +export interface TestSuiteConfig { + methodName: string; + successTest: { + description: string; + mockResponse: any; + expectedParams: any; + expectedResult: any; + }; + additionalTests?: Array<{ + description: string; + setup: (mockedAxios: any) => void; + assertions: (result: any, mockMethod: jest.Mock) => void; + }>; +} + +/** + * Creates a test suite for a WrikeClient method + */ +export function createTestSuite( + config: TestSuiteConfig, + getMockAxiosInstance: () => any, + mockedAxios: any, + getClient: () => any +) { + return () => { + // Success test + it(config.successTest.description, async () => { + const mockAxiosInstanceLocal = mockedAxios.create(); + (mockAxiosInstanceLocal.get as jest.Mock).mockResolvedValue(config.successTest.mockResponse); + + const result = await getClient()[config.methodName](...(config.successTest.expectedParams.args || [])); + + if (config.successTest.expectedParams.hasOwnProperty('options')) { + expect(mockAxiosInstanceLocal.get).toHaveBeenCalledWith( + config.successTest.expectedParams.endpoint, + config.successTest.expectedParams.options + ); + } else { + expect(mockAxiosInstanceLocal.get).toHaveBeenCalledWith( + config.successTest.expectedParams.endpoint + ); + } + + Object.entries(config.successTest.expectedResult).forEach(([key, value]) => { + expect(result[key]).toEqual(value); + }); + }); + + // Common error test cases + const errorTestCases = generateCommonErrorTestCases(); + errorTestCases.forEach((testCase) => { + it( + testCase.description, + runErrorTestCase( + testCase, + () => getMockAxiosInstance().get as jest.Mock, + mockedAxios.isAxiosError, + () => getClient()[config.methodName](...(config.successTest.expectedParams.args || [])) + ) + ); + }); + + // Additional method-specific tests + if (config.additionalTests) { + config.additionalTests.forEach((test) => { + it(test.description, async () => { + test.setup(mockedAxios); + const result = await getClient()[config.methodName](...(config.successTest.expectedParams.args || [])); + test.assertions(result, getMockAxiosInstance().get as jest.Mock); + }); + }); + } + }; +} \ No newline at end of file diff --git a/build/src/core/wrike-client.test.suites.ts b/build/src/core/wrike-client.test.suites.ts new file mode 100644 index 0000000..0401298 --- /dev/null +++ b/build/src/core/wrike-client.test.suites.ts @@ -0,0 +1,38 @@ +import { createTestSuite } from './wrike-client.test.suite-factory'; +import { + getMeTestConfig, + getContactsTestConfig, + getFoldersTestConfig, + getTasksTestConfig, + getTaskAttachmentsTestConfig, + getTaskCommentsTestConfig, +} from './wrike-client.test.configs'; + +/** + * Test suite generators for WrikeClient methods. + * Each function returns a test suite that can be executed with describe(). + */ + +export function createGetMeTests(getMockAxiosInstance: () => any, mockedAxios: any, getClient: () => any) { + return createTestSuite(getMeTestConfig, getMockAxiosInstance, mockedAxios, getClient); +} + +export function createGetContactsTests(getMockAxiosInstance: () => any, mockedAxios: any, getClient: () => any) { + return createTestSuite(getContactsTestConfig, getMockAxiosInstance, mockedAxios, getClient); +} + +export function createGetFoldersTests(getMockAxiosInstance: () => any, mockedAxios: any, getClient: () => any) { + return createTestSuite(getFoldersTestConfig, getMockAxiosInstance, mockedAxios, getClient); +} + +export function createGetTasksTests(getMockAxiosInstance: () => any, mockedAxios: any, getClient: () => any) { + return createTestSuite(getTasksTestConfig, getMockAxiosInstance, mockedAxios, getClient); +} + +export function createGetTaskAttachmentsTests(getMockAxiosInstance: () => any, mockedAxios: any, getClient: () => any) { + return createTestSuite(getTaskAttachmentsTestConfig, getMockAxiosInstance, mockedAxios, getClient); +} + +export function createGetTaskCommentsTests(getMockAxiosInstance: () => any, mockedAxios: any, getClient: () => any) { + return createTestSuite(getTaskCommentsTestConfig, getMockAxiosInstance, mockedAxios, getClient); +} \ No newline at end of file diff --git a/build/src/core/wrike-client.test.ts b/build/src/core/wrike-client.test.ts new file mode 100644 index 0000000..c47b463 --- /dev/null +++ b/build/src/core/wrike-client.test.ts @@ -0,0 +1,70 @@ +import axios from 'axios'; +import { WrikeClient } from './wrike-client'; +import { + createGetMeTests, + createGetContactsTests, + createGetFoldersTests, + createGetTasksTests, + createGetTaskAttachmentsTests, + createGetTaskCommentsTests, +} from './wrike-client.test.suites'; + +// Mock axios +jest.mock('axios'); +const mockedAxios = axios as jest.Mocked; + +describe('WrikeClient', () => { + const mockApiKey = 'test-api-key'; + let client: WrikeClient; + let mockAxiosInstance: any; + + beforeEach(() => { + jest.clearAllMocks(); + + // Mock axios.create to return a mock instance + mockAxiosInstance = { + get: jest.fn(), + }; + mockedAxios.create.mockReturnValue(mockAxiosInstance as any); + + client = new WrikeClient({ apiKey: mockApiKey }); + }); + + describe('constructor', () => { + it('should create client with default base URL', () => { + expect(mockedAxios.create).toHaveBeenCalledWith( + expect.objectContaining({ + baseURL: 'https://www.wrike.com/api/v4', + headers: { + 'Authorization': `Bearer ${mockApiKey}`, + 'Content-Type': 'application/json', + }, + timeout: 30000, + }) + ); + }); + + it('should create client with custom base URL', () => { + const customUrl = 'https://custom.wrike.com/api/v4'; + new WrikeClient({ apiKey: mockApiKey, baseUrl: customUrl }); + + expect(mockedAxios.create).toHaveBeenCalledWith( + expect.objectContaining({ + baseURL: customUrl, + }) + ); + }); + }); + + describe('getMe', createGetMeTests(() => mockAxiosInstance, mockedAxios, () => client)); + + describe('getContacts', createGetContactsTests(() => mockAxiosInstance, mockedAxios, () => client)); + + describe('getFolders', createGetFoldersTests(() => mockAxiosInstance, mockedAxios, () => client)); + + describe('getTasks', createGetTasksTests(() => mockAxiosInstance, mockedAxios, () => client)); + + describe('getTaskAttachments', createGetTaskAttachmentsTests(() => mockAxiosInstance, mockedAxios, () => client)); + + describe('getTaskComments', createGetTaskCommentsTests(() => mockAxiosInstance, mockedAxios, () => client)); +}); \ No newline at end of file diff --git a/build/src/core/wrike-client.ts b/build/src/core/wrike-client.ts new file mode 100644 index 0000000..30e8137 --- /dev/null +++ b/build/src/core/wrike-client.ts @@ -0,0 +1,95 @@ +import axios, { AxiosInstance } from 'axios'; +import { + WrikeApiResponse, + WrikeContact, + WrikeFolder, + TaskQueryOptions, + WrikeTasksApiResponse, + WrikeClientConfig, + WrikeAttachment, + WrikeComment, +} from './wrike-types'; +import * as apiMethods from './wrike-api-methods'; + +/** + * Wrike Internal Client for communicating with Wrike API. + * Handles authentication, rate limiting, and error handling. + */ +export class WrikeClient { + private client: AxiosInstance; + private apiKey: string; + + constructor(config: WrikeClientConfig) { + this.apiKey = config.apiKey; + const baseUrl = config.baseUrl || 'https://www.wrike.com/api/v4'; + + this.client = axios.create({ + baseURL: baseUrl, + headers: { + 'Authorization': `Bearer ${this.apiKey}`, + 'Content-Type': 'application/json', + }, + timeout: 30000, // 30 seconds timeout + }); + } + + /** + * Get current user information (me endpoint). + * Used for authentication verification. + * + * @returns WrikeApiResponse with user contact information + */ + async getMe(): Promise> { + return apiMethods.getMe(this.client); + } + + /** + * Get contacts (users) of type Person from Wrike. + * + * @returns WrikeApiResponse with array of contacts + */ + async getContacts(): Promise> { + return apiMethods.getContacts(this.client); + } + + /** + * Get folders in a specific space. + * + * @param spaceId - The ID of the space + * @returns WrikeApiResponse with array of folders + */ + async getFolders(spaceId: string): Promise> { + return apiMethods.getFolders(this.client, spaceId); + } + + /** + * Get tasks in a specific folder. + * + * @param folderId - The ID of the folder + * @param options - Query options (pageSize, nextPageToken, updatedDate) + * @returns WrikeTasksApiResponse with array of tasks and pagination info + */ + async getTasks(folderId: string, options?: TaskQueryOptions): Promise { + return apiMethods.getTasks(this.client, folderId, options); + } + + /** + * Get attachments for a specific task. + * + * @param taskId - The ID of the task + * @returns WrikeApiResponse with array of attachments + */ + async getTaskAttachments(taskId: string): Promise> { + return apiMethods.getTaskAttachments(this.client, taskId); + } + + /** + * Get comments for a specific task. + * + * @param taskId - The ID of the task + * @returns WrikeApiResponse with array of comments + */ + async getTaskComments(taskId: string): Promise> { + return apiMethods.getTaskComments(this.client, taskId); + } +} \ No newline at end of file diff --git a/build/src/core/wrike-error-handler.ts b/build/src/core/wrike-error-handler.ts new file mode 100644 index 0000000..27a5072 --- /dev/null +++ b/build/src/core/wrike-error-handler.ts @@ -0,0 +1,89 @@ +import axios, { AxiosError } from 'axios'; +import { WrikeApiResponse } from './wrike-types'; + +/** + * Custom error class for Wrike API errors. + * Includes status code and API delay information. + */ +export class WrikeApiError extends Error { + public statusCode: number; + public apiDelay: number; + + constructor(message: string, statusCode: number, apiDelay: number) { + super(message); + this.statusCode = statusCode; + this.apiDelay = apiDelay; + this.name = 'WrikeApiError'; + } +} + +/** + * Handle errors from Wrike API calls. + * Extracts status code, calculates API delay for rate limiting, + * and provides meaningful error messages. + * + * @param error - Error from axios request + * @returns WrikeApiResponse with error information + */ +export function handleWrikeError(error: unknown): WrikeApiResponse { + if (axios.isAxiosError(error)) { + const axiosError = error as AxiosError; + + // Handle rate limiting (429) + if (axiosError.response?.status === 429) { + const retryAfter = axiosError.response.headers['retry-after']; + const apiDelay = retryAfter ? parseInt(retryAfter, 10) : 60; + + return { + status_code: 429, + api_delay: apiDelay, + message: `Rate limit exceeded. Retry after ${apiDelay} seconds.`, + }; + } + + // Handle authentication errors (401, 403) + if (axiosError.response?.status === 401) { + return { + status_code: 401, + api_delay: 0, + message: 'Authentication failed: Invalid or expired API key', + }; + } + + if (axiosError.response?.status === 403) { + return { + status_code: 403, + api_delay: 0, + message: 'Authentication failed: Access forbidden', + }; + } + + // Handle other HTTP errors + if (axiosError.response) { + const errorData = axiosError.response.data as any; + const errorMessage = errorData?.errorDescription || errorData?.error || 'Unknown error'; + + return { + status_code: axiosError.response.status, + api_delay: 0, + message: `Wrike API error: ${errorMessage}`, + }; + } + + // Handle network errors + if (axiosError.request) { + return { + status_code: 0, + api_delay: 0, + message: 'Network error: Unable to reach Wrike API', + }; + } + } + + // Handle unknown errors + return { + status_code: 0, + api_delay: 0, + message: `Unexpected error: ${error instanceof Error ? error.message : 'Unknown error'}`, + }; +} \ No newline at end of file diff --git a/build/src/core/wrike-types.ts b/build/src/core/wrike-types.ts new file mode 100644 index 0000000..eb2dd15 --- /dev/null +++ b/build/src/core/wrike-types.ts @@ -0,0 +1,198 @@ +/** + * Type definitions for Wrike API client + */ + +/** + * Response structure for Wrike API calls + */ +export interface WrikeApiResponse { + status_code: number; + api_delay: number; + message: string; + data?: T; +} + +/** + * Wrike contact information + */ +export interface WrikeContact { + id: string; + firstName?: string; + lastName?: string; + type: string; + profiles?: Array<{ + accountId: string; + email: string; + role: string; + external: boolean; + admin: boolean; + owner: boolean; + active: boolean; + }>; + avatarUrl?: string; + timezone?: string; + locale?: string; + deleted: boolean; + title?: string; + primaryEmail?: string; +} + +/** + * Wrike API response for contacts + */ +export interface WrikeContactsResponse { + kind: string; + data: WrikeContact[]; +} + +/** + * Wrike folder information + */ +export interface WrikeFolder { + id: string; + accountId: string; + title: string; + createdDate: string; + updatedDate: string; + description?: string; + sharedIds?: string[]; + parentIds?: string[]; + childIds?: string[]; + scope?: string; + permalink?: string; + workflowId?: string; + project?: { + authorId?: string; + ownerIds?: string[]; + customStatusId?: string; + createdDate?: string; + }; +} + +/** + * Wrike API response for folders + */ +export interface WrikeFoldersResponse { + kind: string; + data: WrikeFolder[]; +} + +/** + * Wrike task information + */ +export interface WrikeTask { + id: string; + accountId: string; + title: string; + description?: string; + briefDescription?: string; + parentIds: string[]; + superParentIds: string[]; + sharedIds: string[]; + responsibleIds: string[]; + status: string; + importance: string; + createdDate: string; + updatedDate: string; + dates?: { + type: string; + duration?: number; + start?: string; + due?: string; + }; + scope: string; + authorIds: string[]; + customStatusId?: string; + hasAttachments: boolean; + permalink: string; + priority?: string; + followedByMe: boolean; + followerIds: string[]; + superTaskIds: string[]; + subTaskIds: string[]; + dependencyIds: string[]; + metadata: any[]; + customFields: any[]; +} + +/** + * Wrike API response for tasks + */ +export interface WrikeTasksResponse { + kind: string; + data: WrikeTask[]; + nextPageToken?: string; + responseSize?: number; +} + +/** + * Options for querying tasks + */ +export interface TaskQueryOptions { + pageSize?: number; + nextPageToken?: string; + updatedDate?: string; +} + +/** + * Extended WrikeApiResponse for tasks with pagination + */ +export interface WrikeTasksApiResponse extends WrikeApiResponse { + nextPageToken?: string; + hasMore?: boolean; +} + +/** + * Wrike attachment information + */ +export interface WrikeAttachment { + id: string; + authorId: string; + name: string; + createdDate: string; + version: string; + size: number; + type: string; + url?: string; + taskId?: string; + folderId?: string; + commentId?: string; + width?: number; + height?: number; +} + +/** + * Wrike API response for attachments + */ +export interface WrikeAttachmentsResponse { + kind: string; + data: WrikeAttachment[]; +} + +/** + * Wrike comment information + */ +export interface WrikeComment { + id: string; + authorId: string; + text: string; + createdDate: string; + updatedDate?: string; + taskId?: string; +} + +/** + * Wrike API response for comments + */ +export interface WrikeCommentsResponse { + kind: string; + data: WrikeComment[]; +} + +/** + * Configuration for Wrike client + */ +export interface WrikeClientConfig { + apiKey: string; + baseUrl?: string; +} \ No newline at end of file diff --git a/build/src/function-factory.ts b/build/src/function-factory.ts index 94b5cf3..d0510a0 100644 --- a/build/src/function-factory.ts +++ b/build/src/function-factory.ts @@ -1,30 +1,30 @@ -import { run as healthcheck } from './functions/healthcheck'; -import { run as extraction_workflow_check } from './functions/extraction_workflow_check'; -import { run as data_push_check } from './functions/data_push_check'; -import { run as extraction_external_sync_unit_check } from './functions/extraction_external_sync_unit_check'; -import { run as data_extraction_check } from './functions/data_extraction_check'; -import { run as auth_check } from './functions/auth_check'; -import { run as fetch_projects } from './functions/fetch_projects'; -import { run as fetch_contacts } from './functions/fetch_contacts'; -import { run as fetch_tasks } from './functions/fetch_tasks'; -import { run as generate_metadata } from './functions/generate_metadata'; -import { run as generate_initial_mapping } from './functions/generate_initial_mapping'; -import { run as extraction } from './functions/extraction'; +import check_authentication from './functions/check_authentication'; +import data_extraction_check from './functions/data_extraction_check'; +import extraction from './functions/extraction'; +import fetch_task_comments from './functions/fetch_task_comments'; +import fetch_space_folders from './functions/fetch_space_folders'; +import fetch_task_attachments from './functions/fetch_task_attachments'; +import fetch_folder_tasks from './functions/fetch_folder_tasks'; +import fetch_users from './functions/fetch_users'; +import health_check from './functions/health_check'; +import get_external_domain_metadata from './functions/get_external_domain_metadata'; +import get_initial_domain_mapping from './functions/get_initial_domain_mapping'; +import test_external_sync_units from './functions/test_external_sync_units'; export const functionFactory = { // Add your functions here - healthcheck, - extraction_workflow_check, - data_push_check, - extraction_external_sync_unit_check, + check_authentication, data_extraction_check, - auth_check, - fetch_projects, - fetch_contacts, - fetch_tasks, - generate_metadata, - generate_initial_mapping, - extraction + extraction, + fetch_task_comments, + fetch_folder_tasks, + fetch_space_folders, + fetch_task_attachments, + fetch_users, + get_external_domain_metadata, + health_check, + get_initial_domain_mapping, + test_external_sync_units, } as const; export type FunctionFactoryType = keyof typeof functionFactory; diff --git a/build/src/functions/auth_check/index.test.ts b/build/src/functions/auth_check/index.test.ts deleted file mode 100644 index 8d031c2..0000000 --- a/build/src/functions/auth_check/index.test.ts +++ /dev/null @@ -1,210 +0,0 @@ -// Mock axios before any imports -jest.mock('axios'); - -// Import the test utilities first -import { createMockEvent, EventType } from './test-utils'; -import { AirdropEvent } from '@devrev/ts-adaas'; -import axios from 'axios'; -import { run } from './index'; - -// Set up axios mock functions -const mockGet = jest.fn(); -const mockPost = jest.fn(); - -// Properly mock axios methods -jest.spyOn(axios, 'get').mockImplementation(mockGet); -jest.spyOn(axios, 'post').mockImplementation(mockPost); - -// Properly mock axios.isAxiosError with correct type handling -jest.spyOn(axios, 'isAxiosError').mockImplementation((error: any) => { - return error && error.isAxiosError === true; -}); - -describe('Authentication Check Function', () => { - beforeEach(() => { - // Clear all mocks before each test - jest.clearAllMocks(); - - // Mock console.log and console.error to prevent test output pollution - jest.spyOn(console, 'log').mockImplementation(() => {}); - jest.spyOn(console, 'error').mockImplementation(() => {}); - }); - - afterEach(() => { - // Restore console mocks - jest.restoreAllMocks(); - }); - - it('should return success when authentication is successful', async () => { - // Mock successful axios response - mockGet.mockResolvedValue({ - status: 200, - data: { - data: [ - { id: 'user1', firstName: 'John', lastName: 'Doe' } - ] - } - }); - - // Create a mock event - const mockEvent = createMockEvent(); - - // Call the function with the mock event - const result = await run([mockEvent]); - - // Verify axios was called with correct parameters - expect(axios.get).toHaveBeenCalledWith( - 'https://www.wrike.com/api/v4/contacts', - expect.objectContaining({ - headers: { - 'Authorization': 'Bearer mock-api-key' - }, - timeout: 10000 - }) - ); - - // Verify the result - expect(result).toEqual({ - status: 'success', - message: 'Authentication check completed with status 200', - auth_successful: true - }); - }); - - it('should return error when authentication fails with HTTP error', async () => { - // Mock failed axios response - mockGet.mockResolvedValue({ - status: 401, - data: { error: 'Unauthorized' } - }); - - // Create a mock event - const mockEvent = createMockEvent(); - - // Call the function with the mock event - const result = await run([mockEvent]); - - // Verify the result - expect(result).toEqual({ - status: 'error', - message: 'Authentication check completed with status 401', - auth_successful: false, - error: 'Received status code 401' - }); - }); - - it('should return error when axios throws an exception', async () => { - // Mock axios throwing an error with response - const axiosError = new Error('Request failed') as any; - axiosError.isAxiosError = true; - axiosError.response = { status: 401 }; - mockGet.mockRejectedValue(axiosError); - - // Create a mock event - const mockEvent = createMockEvent(); - - // Call the function with the mock event - const result = await run([mockEvent]); - - // Verify the result - expect(result).toEqual({ - status: 'error', - message: 'Authentication check failed', - auth_successful: false, - error: expect.stringContaining('API request failed with status 401') - }); - }); - - it('should return error when axios throws a network exception', async () => { - // Mock axios throwing a network error - const networkError = new Error('Network error'); - mockGet.mockRejectedValue(networkError); - - // Create a mock event - const mockEvent = createMockEvent(); - - // Call the function with the mock event - const result = await run([mockEvent]); - - // Verify the result - expect(result).toEqual({ - status: 'error', - message: 'Authentication check failed', - auth_successful: false, - error: 'Network error' - }); - }); - - it('should throw an error if events parameter is not an array', async () => { - // Call the function with invalid input - const invalidInput = null as unknown as AirdropEvent[]; - - // Call the function - const result = await run(invalidInput); - - // Verify the result - expect(result).toEqual({ - status: 'error', - message: 'Authentication check failed', - auth_successful: false, - error: 'Invalid input: events must be an array' - }); - }); - - it('should throw an error if events array is empty', async () => { - // Call the function with empty array - const result = await run([]); - - // Verify the result - expect(result).toEqual({ - status: 'error', - message: 'Authentication check failed', - auth_successful: false, - error: 'Invalid input: events array is empty' - }); - }); - - it('should throw an error if an event is missing required fields', async () => { - // Create an invalid event missing context - const invalidEvent = { - payload: {}, - execution_metadata: {} - } as unknown as AirdropEvent; - - // Call the function - const result = await run([invalidEvent]); - - // Verify the result - expect(result).toEqual({ - status: 'error', - message: 'Authentication check failed', - auth_successful: false, - error: 'Invalid event: missing required field \'context\'' - }); - }); - - it('should throw an error if API key is missing', async () => { - // Create a mock event with missing API key - const mockEvent: AirdropEvent = { - ...createMockEvent(), - payload: { - ...createMockEvent().payload, - connection_data: { - ...createMockEvent().payload.connection_data, - key: undefined as any // Use type assertion to allow undefined - } - } - }; - - // Call the function with the mock event - const result = await run([mockEvent]); - - // Verify the result - expect(result).toEqual({ - status: 'error', - message: 'Authentication check failed', - auth_successful: false, - error: 'Invalid event: missing required field \'payload.connection_data.key\'' - }); - }); -}); \ No newline at end of file diff --git a/build/src/functions/auth_check/index.ts b/build/src/functions/auth_check/index.ts deleted file mode 100644 index c091bd7..0000000 --- a/build/src/functions/auth_check/index.ts +++ /dev/null @@ -1,108 +0,0 @@ -import { AirdropEvent } from '@devrev/ts-adaas'; -import axios from 'axios'; - -/** - * A function that checks if authentication with the Wrike API works. - * - * @param events - Array of AirdropEvent objects - * @returns A response indicating whether authentication with the Wrike API works - */ -export async function run(events: AirdropEvent[]): Promise<{ - status: string, - message: string, - auth_successful: boolean, - error?: string -}> { - try { - // Validate input parameters - if (!events || !Array.isArray(events)) { - throw new Error('Invalid input: events must be an array'); - } - - if (events.length === 0) { - throw new Error('Invalid input: events array is empty'); - } - - // Use the first event for the check - const event = events[0]; - - // Validate that the event is a valid AirdropEvent with all required fields - if (!event || typeof event !== 'object') { - throw new Error('Invalid event: event must be a valid AirdropEvent object'); - } - - // Check for required fields according to AirdropEvent interface - if (!event.context) { - throw new Error('Invalid event: missing required field \'context\''); - } - - if (!event.context.secrets || !event.context.secrets.service_account_token) { - throw new Error('Invalid event: missing required field \'context.secrets.service_account_token\''); - } - - if (!event.payload) { - throw new Error('Invalid event: missing required field \'payload\''); - } - - if (!event.payload.connection_data) { - throw new Error('Invalid event: missing required field \'payload.connection_data\''); - } - - if (!event.payload.connection_data.key) { - throw new Error('Invalid event: missing required field \'payload.connection_data.key\''); - } - - // Extract the Wrike API key - const apiKey = event.payload.connection_data.key; - - // Define the Wrike API endpoint - const wrikeApiEndpoint = 'https://www.wrike.com/api/v4'; - - // Log the attempt for debugging purposes - console.log('Attempting to authenticate with Wrike API'); - - // Make a GET request to the Wrike API to get contacts - // This is a simple API call that should work if the API key is valid - const response = await axios.get(`${wrikeApiEndpoint}/contacts`, { - headers: { - 'Authorization': `Bearer ${apiKey}` - }, - timeout: 10000 // 10 seconds timeout - }); - - // Check if the request was successful - const isSuccessful = response.status >= 200 && response.status < 300; - - // Log the response for debugging purposes - console.log(`Wrike API authentication response status: ${response.status}`); - - // Return a success response - return { - status: isSuccessful ? 'success' : 'error', - message: `Authentication check completed with status ${response.status}`, - auth_successful: isSuccessful, - error: isSuccessful ? undefined : `Received status code ${response.status}` - }; - } catch (error) { - // Log the error for debugging - console.error('Error in authentication check function:', error); - - // Check if the error is an Axios error with a response - if (axios.isAxiosError(error) && error.response) { - return { - status: 'error', - message: 'Authentication check failed', - auth_successful: false, - error: `API request failed with status ${error.response.status}: ${error.message}` - }; - } - - // Return a generic error response - return { - status: 'error', - message: 'Authentication check failed', - auth_successful: false, - error: error instanceof Error ? error.message : 'Unknown error occurred' - }; - } -} \ No newline at end of file diff --git a/build/src/functions/auth_check/test-utils.ts b/build/src/functions/auth_check/test-utils.ts deleted file mode 100644 index 1fda88b..0000000 --- a/build/src/functions/auth_check/test-utils.ts +++ /dev/null @@ -1,68 +0,0 @@ -import { AirdropEvent } from '@devrev/ts-adaas'; - -// Mock for EventType from @devrev/ts-adaas -export enum EventType { - // Extraction - ExtractionExternalSyncUnitsStart = 'EXTRACTION_EXTERNAL_SYNC_UNITS_START', - ExtractionMetadataStart = 'EXTRACTION_METADATA_START', - ExtractionDataStart = 'EXTRACTION_DATA_START', - ExtractionDataContinue = 'EXTRACTION_DATA_CONTINUE', - ExtractionDataDelete = 'EXTRACTION_DATA_DELETE', - ExtractionAttachmentsStart = 'EXTRACTION_ATTACHMENTS_START', - ExtractionAttachmentsContinue = 'EXTRACTION_ATTACHMENTS_CONTINUE', - ExtractionAttachmentsDelete = 'EXTRACTION_ATTACHMENTS_DELETE' -} - -/** - * Helper function to create a mock AirdropEvent for testing - */ -export const createMockEvent = (): AirdropEvent => ({ - context: { - secrets: { - service_account_token: 'mock-token' - }, - snap_in_id: 'mock-snap-in-id', - snap_in_version_id: 'mock-version-id' - }, - payload: { - connection_data: { - org_id: 'mock-org-id', - org_name: 'mock-org-name', - key: 'mock-api-key', - key_type: 'mock-key-type' - }, - event_context: { - callback_url: 'mock-callback-url', - dev_org: 'mock-dev-org', - dev_org_id: 'mock-dev-org-id', - dev_user: 'mock-dev-user', - dev_user_id: 'mock-dev-user-id', - external_sync_unit: 'mock-external-sync-unit', - external_sync_unit_id: 'mock-external-sync-unit-id', - external_sync_unit_name: 'mock-external-sync-unit-name', - external_system: 'mock-external-system', - external_system_type: 'mock-external-system-type', - import_slug: 'mock-import-slug', - mode: 'INITIAL', - request_id: 'mock-request-id', - snap_in_slug: 'mock-snap-in-slug', - snap_in_version_id: 'mock-snap-in-version-id', - sync_run: 'mock-sync-run', - sync_run_id: 'mock-sync-run-id', - sync_tier: 'mock-sync-tier', - sync_unit: 'mock-sync-unit', - sync_unit_id: 'mock-sync-unit-id', - uuid: 'mock-uuid', - worker_data_url: 'mock-worker-data-url' - }, - event_type: EventType.ExtractionDataStart, - event_data: {} - }, - execution_metadata: { - devrev_endpoint: 'mock-endpoint' - }, - input_data: { - global_values: {}, - event_sources: {} - } -}); \ No newline at end of file diff --git a/build/src/functions/check_authentication/index.test.cases.ts b/build/src/functions/check_authentication/index.test.cases.ts new file mode 100644 index 0000000..ae750b6 --- /dev/null +++ b/build/src/functions/check_authentication/index.test.cases.ts @@ -0,0 +1,206 @@ +import { FunctionInput } from '../../core/types'; +import { + createMockEvent, + mockUserInfo, + createSuccessResponse, + createAuthFailureResponse, + createRateLimitResponse, + createNetworkErrorResponse, +} from './index.test.helpers'; + +export function createCheckAuthenticationTests( + runFunction: (events: FunctionInput[]) => Promise, + WrikeClientMock: jest.Mock +) { + return () => { + it('should return success response for valid authentication', async () => { + const mockGetMe = jest.fn().mockResolvedValue(createSuccessResponse()); + + WrikeClientMock.mockImplementation(() => ({ + getMe: mockGetMe, + })); + + const events = [createMockEvent()]; + const result = await runFunction(events); + + expect(result).toBeDefined(); + expect(result.status).toBe('success'); + expect(result.message).toBe('Successfully authenticated with Wrike API'); + expect(result.status_code).toBe(200); + expect(result.api_delay).toBe(0); + expect(result.metadata.authenticated).toBe(true); + expect(result.metadata.user_info).toEqual(mockUserInfo); + expect(result.metadata.function_name).toBe('check_authentication'); + expect(result.metadata.request_id).toBe('test-request-id'); + expect(result.timestamp).toBeDefined(); + expect(mockGetMe).toHaveBeenCalledTimes(1); + }); + + it('should return error response for authentication failure (401)', async () => { + const mockGetMe = jest.fn().mockResolvedValue( + createAuthFailureResponse(401, 'Authentication failed: Invalid or expired API key') + ); + + WrikeClientMock.mockImplementation(() => ({ + getMe: mockGetMe, + })); + + const events = [createMockEvent()]; + const result = await runFunction(events); + + expect(result.status).toBe('error'); + expect(result.message).toBe('Authentication failed: Invalid or expired API key'); + expect(result.status_code).toBe(401); + expect(result.api_delay).toBe(0); + expect(result.metadata.authenticated).toBe(false); + expect(result.metadata.user_info).toBeUndefined(); + }); + + it('should return error response for forbidden access (403)', async () => { + const mockGetMe = jest.fn().mockResolvedValue( + createAuthFailureResponse(403, 'Authentication failed: Access forbidden') + ); + + WrikeClientMock.mockImplementation(() => ({ + getMe: mockGetMe, + })); + + const events = [createMockEvent()]; + const result = await runFunction(events); + + expect(result.status).toBe('error'); + expect(result.message).toBe('Authentication failed: Access forbidden'); + expect(result.status_code).toBe(403); + expect(result.metadata.authenticated).toBe(false); + }); + + it('should handle rate limiting (429) correctly', async () => { + const mockGetMe = jest.fn().mockResolvedValue(createRateLimitResponse(49)); + + WrikeClientMock.mockImplementation(() => ({ + getMe: mockGetMe, + })); + + const events = [createMockEvent()]; + const result = await runFunction(events); + + expect(result.status).toBe('error'); + expect(result.message).toBe('Rate limit exceeded. Retry after 49 seconds.'); + expect(result.status_code).toBe(429); + expect(result.api_delay).toBe(49); + expect(result.metadata.authenticated).toBe(false); + }); + + it('should handle network errors', async () => { + const mockGetMe = jest.fn().mockResolvedValue(createNetworkErrorResponse()); + + WrikeClientMock.mockImplementation(() => ({ + getMe: mockGetMe, + })); + + const events = [createMockEvent()]; + const result = await runFunction(events); + + expect(result.status).toBe('error'); + expect(result.message).toBe('Network error: Unable to reach Wrike API'); + expect(result.status_code).toBe(0); + expect(result.metadata.authenticated).toBe(false); + }); + + it('should process only the first event when multiple events provided', async () => { + const mockGetMe = jest.fn().mockResolvedValue(createSuccessResponse()); + + WrikeClientMock.mockImplementation(() => ({ + getMe: mockGetMe, + })); + + const event1 = createMockEvent({ + execution_metadata: { + request_id: 'request-1', + function_name: 'check_authentication', + event_type: 'event-type-1', + devrev_endpoint: 'https://api.devrev.ai/', + }, + }); + const event2 = createMockEvent({ + execution_metadata: { + request_id: 'request-2', + function_name: 'check_authentication', + event_type: 'event-type-2', + devrev_endpoint: 'https://api.devrev.ai/', + }, + }); + + const result = await runFunction([event1, event2]); + + expect(result.metadata.request_id).toBe('request-1'); + expect(mockGetMe).toHaveBeenCalledTimes(1); + }); + + it('should throw error when no events provided', async () => { + await expect(runFunction([])).rejects.toThrow('No events provided to check_authentication function'); + }); + + it('should throw error when events array is null', async () => { + await expect(runFunction(null as any)).rejects.toThrow('No events provided to check_authentication function'); + }); + + it('should throw error when events array is undefined', async () => { + await expect(runFunction(undefined as any)).rejects.toThrow('No events provided to check_authentication function'); + }); + + it('should throw error when event is missing payload', async () => { + const invalidEvent = createMockEvent(); + delete (invalidEvent as any).payload; + + await expect(runFunction([invalidEvent])).rejects.toThrow('Invalid event: missing payload'); + }); + + it('should throw error when event is missing connection_data', async () => { + const invalidEvent = createMockEvent(); + delete (invalidEvent as any).payload.connection_data; + + await expect(runFunction([invalidEvent])).rejects.toThrow('Invalid event: missing connection_data in payload'); + }); + + it('should throw error when event is missing API key', async () => { + const invalidEvent = createMockEvent(); + delete (invalidEvent as any).payload.connection_data.key; + + await expect(runFunction([invalidEvent])).rejects.toThrow('Invalid event: missing API key in connection_data'); + }); + + it('should throw error when event is missing execution_metadata', async () => { + const invalidEvent = createMockEvent(); + delete (invalidEvent as any).execution_metadata; + + await expect(runFunction([invalidEvent])).rejects.toThrow('Invalid event: missing execution_metadata'); + }); + + it('should initialize WrikeClient with correct API key', async () => { + const mockGetMe = jest.fn().mockResolvedValue(createSuccessResponse()); + + WrikeClientMock.mockImplementation(() => ({ + getMe: mockGetMe, + })); + + const events = [createMockEvent()]; + await runFunction(events); + + expect(WrikeClientMock).toHaveBeenCalledWith({ apiKey: 'test-api-key' }); + }); + + it('should include timestamp in ISO format', async () => { + const mockGetMe = jest.fn().mockResolvedValue(createSuccessResponse()); + + WrikeClientMock.mockImplementation(() => ({ + getMe: mockGetMe, + })); + + const events = [createMockEvent()]; + const result = await runFunction(events); + + expect(result.timestamp).toMatch(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/); + }); + }; +} \ No newline at end of file diff --git a/build/src/functions/check_authentication/index.test.helpers.ts b/build/src/functions/check_authentication/index.test.helpers.ts new file mode 100644 index 0000000..ca06358 --- /dev/null +++ b/build/src/functions/check_authentication/index.test.helpers.ts @@ -0,0 +1,101 @@ +import { FunctionInput } from '../../core/types'; +import { WrikeContact } from '../../core/wrike-types'; + +/** + * Mock user information for testing + */ +export const mockUserInfo: WrikeContact = { + id: 'KUANFJBJ', + firstName: 'Jane', + lastName: 'Smith', + type: 'Person', + profiles: [ + { + accountId: 'IEAGS6BY', + email: 'janesmith@company.com', + role: 'User', + external: false, + admin: false, + owner: false, + active: true, + }, + ], + avatarUrl: 'https://www.wrike.com/avatars/test.png', + timezone: 'Europe/London', + locale: 'en', + deleted: false, + title: 'Accountant', + primaryEmail: 'janesmith@company.com', +}; + +/** + * Creates a mock FunctionInput event for testing + */ +export const createMockEvent = (overrides?: Partial): FunctionInput => ({ + payload: { + connection_data: { + org_id: 'test-org', + org_name: 'Test Org', + key: 'test-api-key', + key_type: 'oauth2', + }, + }, + context: { + dev_oid: 'test-dev-oid', + source_id: 'test-source-id', + snap_in_id: 'test-snap-in-id', + snap_in_version_id: 'test-version-id', + service_account_id: 'test-service-account-id', + secrets: { + service_account_token: 'test-token', + }, + }, + execution_metadata: { + request_id: 'test-request-id', + function_name: 'check_authentication', + event_type: 'test-event-type', + devrev_endpoint: 'https://api.devrev.ai/', + }, + input_data: { + global_values: {}, + event_sources: {}, + }, + ...overrides, +}); + +/** + * Creates a mock WrikeClient.getMe response for successful authentication + */ +export const createSuccessResponse = () => ({ + status_code: 200, + api_delay: 0, + message: 'Successfully authenticated with Wrike API', + data: mockUserInfo, +}); + +/** + * Creates a mock WrikeClient.getMe response for authentication failure + */ +export const createAuthFailureResponse = (statusCode: number, message: string) => ({ + status_code: statusCode, + api_delay: 0, + message, +}); + +/** + * Creates a mock WrikeClient.getMe response for rate limiting + */ +export const createRateLimitResponse = (apiDelay: number = 49) => ({ + status_code: 429, + api_delay: apiDelay, + message: `Rate limit exceeded. Retry after ${apiDelay} seconds.`, +}); + +/** + * Creates a mock WrikeClient.getMe response for network errors + */ +export const createNetworkErrorResponse = () => ({ + status_code: 0, + api_delay: 0, + message: 'Network error: Unable to reach Wrike API', +}); \ No newline at end of file diff --git a/build/src/functions/check_authentication/index.test.ts b/build/src/functions/check_authentication/index.test.ts new file mode 100644 index 0000000..f594e07 --- /dev/null +++ b/build/src/functions/check_authentication/index.test.ts @@ -0,0 +1,14 @@ +import run, { CheckAuthenticationResponse } from './index'; +import { WrikeClient } from '../../core/wrike-client'; +import { createCheckAuthenticationTests } from './index.test.cases'; + +// Mock the WrikeClient +jest.mock('../../core/wrike-client'); + +describe('check_authentication function', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('test cases', createCheckAuthenticationTests(run, WrikeClient as jest.Mock)); +}); \ No newline at end of file diff --git a/build/src/functions/check_authentication/index.ts b/build/src/functions/check_authentication/index.ts new file mode 100644 index 0000000..13fc7f5 --- /dev/null +++ b/build/src/functions/check_authentication/index.ts @@ -0,0 +1,109 @@ +import { FunctionInput } from '../../core/types'; +import { WrikeClient } from '../../core/wrike-client'; +import { WrikeContact } from '../../core/wrike-types'; +import { WrikeApiError } from '../../core/wrike-error-handler'; + +/** + * Response structure for check_authentication function + */ +export interface CheckAuthenticationResponse { + status: 'success' | 'error'; + message: string; + status_code: number; + api_delay: number; + metadata: { + authenticated: boolean; + user_info?: WrikeContact; + function_name: string; + request_id: string; + }; + timestamp: string; +} + +/** + * Check authentication function that verifies authentication with Wrike API. + * Makes a request to /contacts endpoint with me=true parameter. + * + * @param events - Array of function input events + * @returns Object indicating authentication status with metadata + */ +const run = async (events: FunctionInput[]): Promise => { + // Validate input + if (!events || events.length === 0) { + throw new Error('No events provided to check_authentication function'); + } + + // Process only the first event as per requirements + const event = events[0]; + + // Validate event structure + if (!event.payload) { + throw new Error('Invalid event: missing payload'); + } + + if (!event.payload.connection_data) { + throw new Error('Invalid event: missing connection_data in payload'); + } + + if (!event.payload.connection_data.key) { + throw new Error('Invalid event: missing API key in connection_data'); + } + + if (!event.execution_metadata) { + throw new Error('Invalid event: missing execution_metadata'); + } + + // Extract API key from event + const apiKey = event.payload.connection_data.key; + + // Initialize Wrike client + const wrikeClient = new WrikeClient({ apiKey }); + + try { + // Call getMe endpoint to verify authentication + const response = await wrikeClient.getMe(); + + // Determine if authentication was successful + const authenticated = response.status_code === 200 && response.data !== undefined; + + // Build response + const checkAuthResponse: CheckAuthenticationResponse = { + status: authenticated ? 'success' : 'error', + message: response.message, + status_code: response.status_code, + api_delay: response.api_delay, + metadata: { + authenticated, + user_info: response.data, + function_name: event.execution_metadata.function_name, + request_id: event.execution_metadata.request_id, + }, + timestamp: new Date().toISOString(), + }; + + return checkAuthResponse; + } catch (error) { + // Handle WrikeApiError (including rate limiting) + if (error instanceof WrikeApiError) { + console.error(`Wrike API error (${error.statusCode}):`, error.message); + + return { + status: 'error', + message: error.message, + status_code: error.statusCode, + api_delay: error.apiDelay, + metadata: { + authenticated: false, + function_name: event.execution_metadata.function_name, + request_id: event.execution_metadata.request_id, + }, + timestamp: new Date().toISOString(), + }; + } + + // Re-throw other errors + throw error; + } +}; + +export default run; \ No newline at end of file diff --git a/build/src/functions/data_extraction_check/external-domain-metadata.json b/build/src/functions/data_extraction_check/external-domain-metadata.json new file mode 100644 index 0000000..0ebda8f --- /dev/null +++ b/build/src/functions/data_extraction_check/external-domain-metadata.json @@ -0,0 +1,19 @@ +{ + "users": { + "record_type": "users", + "fields": { + "name": { + "field_type": "text", + "display_name": "Name" + }, + "email": { + "field_type": "text", + "display_name": "Email" + }, + "role": { + "field_type": "text", + "display_name": "Role" + } + } + } +} \ No newline at end of file diff --git a/build/src/functions/data_extraction_check/index.test.ts b/build/src/functions/data_extraction_check/index.test.ts index abbaef4..e5bfdf0 100644 --- a/build/src/functions/data_extraction_check/index.test.ts +++ b/build/src/functions/data_extraction_check/index.test.ts @@ -1,208 +1,190 @@ -// Mock the spawn function to prevent actual worker spawning during tests -const mockSpawn = jest.fn().mockResolvedValue(undefined); -jest.mock('../generate_initial_mapping/initial_domain_mapping.json', () => ({})); - -// Mock the @devrev/ts-adaas module -jest.mock('@devrev/ts-adaas', () => { - const actual = jest.requireActual('@devrev/ts-adaas'); - return { - ...actual, - spawn: mockSpawn - }; -}); - -import { run } from './index'; -import { AirdropEvent, EventType } from '@devrev/ts-adaas'; - -describe('Data Extraction Check Function', () => { - // Helper function to create a mock AirdropEvent - const createMockEvent = (eventType: EventType): AirdropEvent => ({ - context: { - secrets: { - service_account_token: 'mock-token' - }, - snap_in_id: 'mock-snap-in-id', - snap_in_version_id: 'mock-version-id' - }, +import run from './index'; +import { FunctionInput } from '../../core/types'; +import { EventType } from '@devrev/ts-adaas'; +import * as adaas from '@devrev/ts-adaas'; + +// Mock the spawn function +jest.mock('@devrev/ts-adaas', () => ({ + ...jest.requireActual('@devrev/ts-adaas'), + spawn: jest.fn(), +})); + +describe('data_extraction_check function', () => { + const createMockEvent = (eventType: EventType, overrides?: Partial): FunctionInput => ({ payload: { + event_type: eventType, connection_data: { - org_id: 'mock-org-id', - org_name: 'mock-org-name', - key: 'mock-key', - key_type: 'mock-key-type' + org_id: 'test-org', + org_name: 'Test Org', + key: 'test-key', + key_type: 'test-key-type', }, event_context: { - callback_url: 'mock-callback-url', - dev_org: 'mock-dev-org', - dev_org_id: 'mock-dev-org-id', - dev_user: 'mock-dev-user', - dev_user_id: 'mock-dev-user-id', - external_sync_unit: 'mock-external-sync-unit', - external_sync_unit_id: 'mock-external-sync-unit-id', - external_sync_unit_name: 'mock-external-sync-unit-name', - external_system: 'mock-external-system', - external_system_type: 'mock-external-system-type', - import_slug: 'mock-import-slug', + callback_url: 'https://test.callback.url', + dev_org: 'test-dev-org', + dev_org_id: 'test-dev-org-id', + dev_user: 'test-dev-user', + dev_user_id: 'test-dev-user-id', + external_sync_unit: 'test-sync-unit', + external_sync_unit_id: 'test-sync-unit-id', + external_sync_unit_name: 'Test Sync Unit', + external_system: 'test-system', + external_system_type: 'test-system-type', + import_slug: 'test-import-slug', mode: 'INITIAL', - request_id: 'mock-request-id', - snap_in_slug: 'mock-snap-in-slug', - snap_in_version_id: 'mock-snap-in-version-id', - sync_run: 'mock-sync-run', - sync_run_id: 'mock-sync-run-id', - sync_tier: 'mock-sync-tier', - sync_unit: 'mock-sync-unit', - sync_unit_id: 'mock-sync-unit-id', - uuid: 'mock-uuid', - worker_data_url: 'mock-worker-data-url' + request_id: 'test-request-id', + snap_in_slug: 'test-snap-in-slug', + snap_in_version_id: 'test-version-id', + sync_run: 'test-sync-run', + sync_run_id: 'test-sync-run-id', + sync_tier: 'test-sync-tier', + sync_unit: 'test-sync-unit', + sync_unit_id: 'test-sync-unit-id', + uuid: 'test-uuid', + worker_data_url: 'https://test.worker.data.url', + }, + event_data: {}, + }, + context: { + dev_oid: 'test-dev-oid', + source_id: 'test-source-id', + snap_in_id: 'test-snap-in-id', + snap_in_version_id: 'test-version-id', + service_account_id: 'test-service-account-id', + secrets: { + service_account_token: 'test-token', }, - event_type: eventType, - event_data: {} }, execution_metadata: { - devrev_endpoint: 'mock-endpoint' + request_id: 'test-request-id', + function_name: 'data_extraction_check', + event_type: 'test-event-type', + devrev_endpoint: 'https://api.devrev.ai/', }, - input_data: { - global_values: {}, - event_sources: {} - } + input_data: { + global_values: {}, + event_sources: {}, + }, + ...overrides, }); - it('should return success with valid_data_extraction_events=true for data extraction event types', async () => { - // Test with the data extraction event type - const mockEvent = createMockEvent(EventType.ExtractionDataStart); + beforeEach(() => { + jest.clearAllMocks(); + }); - // Call the function with the mock event - const result = await run([mockEvent]); + it('should spawn worker for EXTRACTION_DATA_START event', async () => { + const events = [createMockEvent(EventType.ExtractionDataStart)]; + const spawnMock = adaas.spawn as jest.Mock; + spawnMock.mockResolvedValue(undefined); - // Verify the result - expect(result).toEqual({ - status: 'success', - message: 'Data extraction check function successfully invoked', - valid_data_extraction_events: true - }); - - // Verify that spawn was called with the correct parameters - expect(mockSpawn).toHaveBeenCalled(); - expect(mockSpawn.mock.calls[0][0]).toHaveProperty('initialDomainMapping'); + await run(events); + + expect(spawnMock).toHaveBeenCalledTimes(1); + expect(spawnMock).toHaveBeenCalledWith( + expect.objectContaining({ + workerPath: expect.stringContaining('data-extraction-check.ts'), + initialState: {}, + }) + ); }); - it('should return success with valid_data_extraction_events=true for ExtractionDataContinue event type', async () => { - // Test with the data extraction continue event type - const mockEvent = createMockEvent(EventType.ExtractionDataContinue); + it('should spawn worker for EXTRACTION_DATA_CONTINUE event', async () => { + const events = [createMockEvent(EventType.ExtractionDataContinue)]; + const spawnMock = adaas.spawn as jest.Mock; + spawnMock.mockResolvedValue(undefined); - // Call the function with the mock event - const result = await run([mockEvent]); + await run(events); - // Verify the result - expect(result).toEqual({ - status: 'success', - message: 'Data extraction check function successfully invoked', - valid_data_extraction_events: true - }); - - // Verify that spawn was called with the correct parameters - expect(mockSpawn).toHaveBeenCalled(); + expect(spawnMock).toHaveBeenCalledTimes(1); + expect(spawnMock).toHaveBeenCalledWith( + expect.objectContaining({ + workerPath: expect.stringContaining('data-extraction-check.ts'), + initialState: {}, + }) + ); }); - it('should return success with valid_data_extraction_events=false for other extraction event types', async () => { - // Test with a different extraction event type - // Create a mock event with a non-data extraction event type - const mockEvent = createMockEvent(EventType.ExtractionExternalSyncUnitsStart); - - // Reset the mock before this specific test - mockSpawn.mockClear(); - - // Ensure the event type is explicitly set to a non-data extraction type - mockEvent.payload = { - ...mockEvent.payload, - event_type: EventType.ExtractionExternalSyncUnitsStart - }; - - // Call the function with the mock event - const result = await run([mockEvent]); - - // Verify the result - expect(result).toEqual({ - status: 'success', - message: 'Data extraction check function successfully invoked', - valid_data_extraction_events: false + it('should process only the first event when multiple events provided', async () => { + const event1 = createMockEvent(EventType.ExtractionDataStart); + const event2 = createMockEvent(EventType.ExtractionDataContinue, { + execution_metadata: { + request_id: 'request-2', + function_name: 'data_extraction_check', + event_type: 'event-type-2', + devrev_endpoint: 'https://api.devrev.ai/', + }, }); - - // Verify that spawn was not called - expect(mockSpawn).not.toHaveBeenCalled(); + + const spawnMock = adaas.spawn as jest.Mock; + spawnMock.mockResolvedValue(undefined); + + await run([event1, event2]); + + expect(spawnMock).toHaveBeenCalledTimes(1); }); - it('should throw an error if events parameter is not an array', async () => { - // Mock console.error to prevent test output pollution - jest.spyOn(console, 'error').mockImplementation(() => {}); - - // Call the function with invalid input - const invalidInput = null as unknown as AirdropEvent[]; - - // Expect the function to throw an error - await expect(run(invalidInput)).rejects.toThrow(); - - // Restore console.error - jest.restoreAllMocks(); + it('should throw error when no events provided', async () => { + await expect(run([])).rejects.toThrow('No events provided to data_extraction_check function'); }); - it('should throw an error if an event is missing required fields', async () => { - // Mock console.error to prevent test output pollution - jest.spyOn(console, 'error').mockImplementation(() => {}); - - // Create an invalid event missing context - const invalidEvent = { - payload: {}, - execution_metadata: {} - } as unknown as AirdropEvent; - - // Expect the function to throw an error - await expect(run([invalidEvent])).rejects.toThrow(); - - // Restore console.error - jest.restoreAllMocks(); + it('should throw error when events array is null', async () => { + await expect(run(null as any)).rejects.toThrow('No events provided to data_extraction_check function'); }); - it('should handle multiple events correctly', async () => { - // Create multiple events with different event types - const event1 = { - ...createMockEvent(EventType.ExtractionDataStart), - payload: { ...createMockEvent(EventType.ExtractionDataStart).payload, event_type: EventType.ExtractionDataStart } - }; - const event2 = { - ...createMockEvent(EventType.ExtractionMetadataStart), - payload: { ...createMockEvent(EventType.ExtractionMetadataStart).payload, event_type: EventType.ExtractionMetadataStart } - }; - - // Call the function with multiple events - const result = await run([event1, event2]); - - // Verify the result - should be true because at least one event is ExtractionDataStart - expect(result).toEqual({ - status: 'success', - message: 'Data extraction check function successfully invoked', - valid_data_extraction_events: true - }); - - // Verify that spawn was called with the correct parameters - expect(mockSpawn).toHaveBeenCalled(); + it('should throw error when events array is undefined', async () => { + await expect(run(undefined as any)).rejects.toThrow('No events provided to data_extraction_check function'); }); - it('should handle empty events array', async () => { - // Mock console.log to prevent test output pollution - jest.spyOn(console, 'log').mockImplementation(() => {}); - - // Call the function with an empty array - const result = await run([]); - - // Verify the result - should be false because there are no events - expect(result).toEqual({ - status: 'success', - message: 'Data extraction check function successfully invoked', - valid_data_extraction_events: false - }); - - // Restore console.log - jest.restoreAllMocks(); + it('should throw error when event is missing payload', async () => { + const invalidEvent = createMockEvent(EventType.ExtractionDataStart); + delete (invalidEvent as any).payload; + + await expect(run([invalidEvent])).rejects.toThrow('Invalid event: missing payload'); + }); + + it('should throw error when event is missing event_type', async () => { + const invalidEvent = createMockEvent(EventType.ExtractionDataStart); + delete (invalidEvent as any).payload.event_type; + + await expect(run([invalidEvent])).rejects.toThrow('Invalid event: missing event_type in payload'); + }); + + it('should throw error for unsupported event type', async () => { + const invalidEvent = createMockEvent('UNSUPPORTED_EVENT_TYPE' as any); + + await expect(run([invalidEvent])).rejects.toThrow('Unsupported event type'); + }); + + it('should handle spawn errors gracefully', async () => { + const events = [createMockEvent(EventType.ExtractionDataStart)]; + const spawnMock = adaas.spawn as jest.Mock; + const testError = new Error('Spawn failed'); + spawnMock.mockRejectedValue(testError); + + await expect(run(events)).rejects.toThrow('Spawn failed'); + }); + + it('should pass correct event to spawn', async () => { + const events = [createMockEvent(EventType.ExtractionDataStart)]; + const spawnMock = adaas.spawn as jest.Mock; + spawnMock.mockResolvedValue(undefined); + + await run(events); + + const spawnCall = spawnMock.mock.calls[0][0]; + expect(spawnCall.event).toBeDefined(); + expect(spawnCall.event.payload).toBeDefined(); + expect(spawnCall.event.payload.event_type).toBe(EventType.ExtractionDataStart); + }); + + it('should use correct worker path', async () => { + const events = [createMockEvent(EventType.ExtractionDataStart)]; + const spawnMock = adaas.spawn as jest.Mock; + spawnMock.mockResolvedValue(undefined); + + await run(events); + + const spawnCall = spawnMock.mock.calls[0][0]; + expect(spawnCall.workerPath).toContain('data-extraction-check.ts'); }); }); \ No newline at end of file diff --git a/build/src/functions/data_extraction_check/index.ts b/build/src/functions/data_extraction_check/index.ts index 16eae49..546ae61 100644 --- a/build/src/functions/data_extraction_check/index.ts +++ b/build/src/functions/data_extraction_check/index.ts @@ -1,111 +1,97 @@ -import { AirdropEvent, EventType, spawn } from '@devrev/ts-adaas'; -import path from 'path'; -import initialDomainMapping from '../generate_initial_mapping/initial_domain_mapping.json'; +import { convertToAirdropEvent, resolveWorkerPath } from '../../core/utils'; +import { FunctionInput } from '../../core/types'; +import { spawn, EventType } from '@devrev/ts-adaas'; + +export interface DataExtractionCheckState {} /** - * A function that checks if the data extraction workflow can be invoked. - * - * @param events - Array of AirdropEvent objects - * @returns A response indicating whether the data extraction workflow can be invoked + * Complete domain configuration for testing data extraction. + * Includes both external domain metadata and domain mapping. */ -export async function run(events: AirdropEvent[]): Promise<{ - status: string, - message: string, - valid_data_extraction_events: boolean -}> { - try { - // Validate input parameters - if (!events || !Array.isArray(events)) { - throw new Error('Invalid input: events must be an array'); - } +const initialDomainMapping = { + external_domain_metadata: { + users: { + record_type: 'users', + fields: { + name: { + field_type: 'text', + display_name: 'Name', + }, + email: { + field_type: 'text', + display_name: 'Email', + }, + role: { + field_type: 'text', + display_name: 'Role', + }, + }, + }, + }, + domain_mapping: { + users: { + devrev_leaf_type: 'dev_user', + fields: { + name: { + target_field_name: 'display_name', + }, + email: { + target_field_name: 'email', + }, + role: { + target_field_name: 'state', + }, + }, + }, + }, +}; - // Validate that each event is a valid AirdropEvent with all required fields - events.forEach((event, index) => { - if (!event || typeof event !== 'object') { - throw new Error(`Invalid event at index ${index}: event must be a valid AirdropEvent object`); - } - - // Check for required fields according to AirdropEvent interface - if (!event.context) { - throw new Error(`Invalid event at index ${index}: missing required field 'context'`); - } - - if (!event.context.secrets || !event.context.secrets.service_account_token) { - throw new Error(`Invalid event at index ${index}: missing required field 'context.secrets.service_account_token'`); - } - - if (!event.context.snap_in_version_id) { - throw new Error(`Invalid event at index ${index}: missing required field 'context.snap_in_version_id'`); - } - - if (!event.payload) { - throw new Error(`Invalid event at index ${index}: missing required field 'payload'`); - } - - if (!event.payload.event_context) { - throw new Error(`Invalid event at index ${index}: missing required field 'payload.event_context'`); - } - - if (!event.execution_metadata || !event.execution_metadata.devrev_endpoint) { - throw new Error(`Invalid event at index ${index}: missing required field 'execution_metadata.devrev_endpoint'`); - } - }); +export const initialState: DataExtractionCheckState = {}; - // Check if any of the events are data extraction events - const dataExtractionEventTypes = [ - EventType.ExtractionDataStart, - EventType.ExtractionDataContinue - ]; +/** + * Test function for data extraction workflow. + * Processes EXTRACTION_DATA_START and EXTRACTION_DATA_CONTINUE events, + * initializes a users repo, normalizes and pushes user data, then emits EXTRACTION_DATA_DONE. + * + * @param events - Array of function input events + */ +const run = async (events: FunctionInput[]): Promise => { + // Validate input + if (!events || events.length === 0) { + throw new Error('No events provided to data_extraction_check function'); + } - const hasDataExtractionEvents = events.some(event => - event.payload && - event.payload.event_type && - dataExtractionEventTypes.includes(event.payload.event_type) - ); + // Process only the first event as per requirements + const event = events[0]; - // Log the event for debugging purposes - console.log('Data extraction check function invoked with events:', JSON.stringify(events)); - - // Filter events to only include data extraction events (ExtractionDataStart or ExtractionDataContinue) - const dataExtractionEvents = events.filter(event => - event.payload && - event.payload.event_type && - dataExtractionEventTypes.includes(event.payload.event_type) - ); - - // For each data extraction event, spawn a worker to process it - for (const event of dataExtractionEvents) { - // Define the worker path - make sure to use .ts extension as required by the SDK - const workerPath = path.resolve(__dirname, 'worker.ts'); + // Validate event structure + if (!event.payload) { + throw new Error('Invalid event: missing payload'); + } - // Define initial state for the worker - const initialState = {}; + if (!event.payload.event_type) { + throw new Error('Invalid event: missing event_type in payload'); + } - console.log(`Spawning worker for event type: ${event.payload.event_type}`); - // Spawn the worker to process the event - note: no options key in the parameter object - // Spawn the worker to process the event - await spawn({ - event: { - ...event, - payload: { ...event.payload } - }, - initialDomainMapping, - initialState, - workerPath - }); - } + // Check if this is a data extraction event + if ( + event.payload.event_type === EventType.ExtractionDataStart || + event.payload.event_type === EventType.ExtractionDataContinue + ) { + const workerPath = resolveWorkerPath(__dirname, 'workers/data-extraction-check.ts'); - // Return a response - return { - status: 'success', - message: 'Data extraction check function successfully invoked', - valid_data_extraction_events: hasDataExtractionEvents - }; - } catch (error) { - // Log the error for debugging - console.error('Error in data extraction check function:', error); - - // Re-throw the error to be handled by the caller - throw error; + await spawn({ + event: convertToAirdropEvent(event), + workerPath: workerPath, + initialState: initialState, + initialDomainMapping: initialDomainMapping as any, + }); + } else { + throw new Error( + `Unsupported event type: ${event.payload.event_type}. ` + + `Expected: ${EventType.ExtractionDataStart} or ${EventType.ExtractionDataContinue}` + ); } -} \ No newline at end of file +}; + +export default run; \ No newline at end of file diff --git a/build/src/functions/data_extraction_check/worker.ts b/build/src/functions/data_extraction_check/worker.ts deleted file mode 100644 index efd44c3..0000000 --- a/build/src/functions/data_extraction_check/worker.ts +++ /dev/null @@ -1,40 +0,0 @@ -import { ExtractorEventType, processTask } from '@devrev/ts-adaas'; - -/** - * Worker for handling data extraction - * This worker is responsible for emitting the EXTRACTION_DATA_DONE event - */ -processTask({ - task: async ({ adapter }) => { - try { - console.log('Data extraction worker started'); - - // In a real implementation, this would extract data from an external system - // and push it to repositories - - // For this test function, we'll just emit the DONE event - await adapter.emit(ExtractorEventType.ExtractionDataDone); - - console.log('Data extraction completed successfully'); - } catch (error) { - console.error('Error in data extraction worker:', error); - - // Emit an error event if something goes wrong - await adapter.emit(ExtractorEventType.ExtractionDataError, { - error: { - message: error instanceof Error ? error.message : 'Unknown error in data extraction', - }, - }); - } - }, - onTimeout: async ({ adapter }) => { - console.error('Data extraction worker timed out'); - - // Emit an error event if the worker times out - await adapter.emit(ExtractorEventType.ExtractionDataError, { - error: { - message: 'Data extraction timed out', - }, - }); - }, -}); \ No newline at end of file diff --git a/build/src/functions/data_extraction_check/workers/data-extraction-check.ts b/build/src/functions/data_extraction_check/workers/data-extraction-check.ts new file mode 100644 index 0000000..4eea689 --- /dev/null +++ b/build/src/functions/data_extraction_check/workers/data-extraction-check.ts @@ -0,0 +1,72 @@ +import { ExtractorEventType, processTask, NormalizedItem } from '@devrev/ts-adaas'; +import { DataExtractionCheckState } from '../index'; + +/** + * Normalizes user data to match the NormalizedItem structure. + * + * @param user - Raw user data + * @returns Normalized user item + */ +function normalizeUser(user: any): NormalizedItem { + return { + id: user.id, + created_date: user.created_at || new Date().toISOString(), + modified_date: user.updated_at || new Date().toISOString(), + data: { + name: user.name, + email: user.email, + role: user.role, + }, + }; +} + +/** + * Worker for testing data extraction. + * Initializes users repo, normalizes and pushes user data, then emits EXTRACTION_DATA_DONE. + */ +processTask({ + task: async ({ adapter }) => { + // Initialize repos with a single "users" repo + const repos = [ + { + itemType: 'users', + normalize: normalizeUser, + }, + ]; + + adapter.initializeRepos(repos); + + // Create mock user data for testing + const users = [ + { + id: 'user-1', + name: 'Test User 1', + email: 'user1@example.com', + role: 'admin', + created_at: '2024-01-01T00:00:00Z', + updated_at: '2024-01-01T00:00:00Z', + }, + { + id: 'user-2', + name: 'Test User 2', + email: 'user2@example.com', + role: 'user', + created_at: '2024-01-02T00:00:00Z', + updated_at: '2024-01-02T00:00:00Z', + }, + ]; + + // Push normalized users to the repo + await adapter.getRepo('users')?.push(users); + + // Manually upload remaining items that don't fill a complete batch + await adapter.getRepo('users')?.upload(); + + // Emit the done event + await adapter.emit(ExtractorEventType.ExtractionDataDone); + }, + onTimeout: async ({ adapter }) => { + // Handle timeout by emitting progress event + await adapter.emit(ExtractorEventType.ExtractionDataProgress); + }, +}); \ No newline at end of file diff --git a/build/src/functions/data_push_check/index.test.ts b/build/src/functions/data_push_check/index.test.ts deleted file mode 100644 index bd30999..0000000 --- a/build/src/functions/data_push_check/index.test.ts +++ /dev/null @@ -1,181 +0,0 @@ -// Mock the dependencies before importing them -jest.mock('axios'); - -// Import the test utilities -import { createMockEvent, EventType } from './test-utils'; -import { run } from './index'; -import { AirdropEvent } from '@devrev/ts-adaas'; -import axios from 'axios'; - -// Mock @devrev/ts-adaas to use our mock EventType -jest.mock('@devrev/ts-adaas', () => ({ - EventType: require('./test-utils').EventType -})); - -describe('Data Push Check Function', () => { - beforeEach(() => { - // Clear all mocks before each test - jest.clearAllMocks(); - - // Mock console.log and console.error to prevent test output pollution - jest.spyOn(console, 'log').mockImplementation(() => {}); - jest.spyOn(console, 'error').mockImplementation(() => {}); - }); - - afterEach(() => { - // Restore console mocks - jest.restoreAllMocks(); - }); - - it('should return success when data push is successful', async () => { - // Mock successful axios response - (axios.post as jest.Mock).mockResolvedValue({ - status: 200, - data: { success: true } - }); - - // Create a mock event - const mockEvent = createMockEvent(); - - // Call the function with the mock event - const result = await run([mockEvent]); - - // Verify axios was called with correct parameters - expect(axios.post).toHaveBeenCalledWith( - 'https://mock-callback-url.com', - expect.objectContaining({ - test_data: 'This is a test payload', - timestamp: expect.any(String), - snap_in_version_id: 'mock-version-id' - }), - expect.objectContaining({ - headers: { - 'Content-Type': 'application/json', - 'Authorization': 'mock-token' - }, - timeout: 10000 - }) - ); - - // Verify the result - expect(result).toEqual({ - status: 'success', - message: 'Data push check function completed with status 200', - push_successful: true - }); - }); - - it('should return error when data push fails with HTTP error', async () => { - // Mock failed axios response - (axios.post as jest.Mock).mockResolvedValue({ - status: 403, - data: { error: 'Forbidden' } - }); - - // Create a mock event - const mockEvent = createMockEvent(); - - // Call the function with the mock event - const result = await run([mockEvent]); - - // Verify the result - expect(result).toEqual({ - status: 'error', - message: 'Data push check function completed with status 403', - push_successful: false, - error: 'Received status code 403' - }); - }); - - it('should return error when axios throws an exception', async () => { - // Mock axios throwing an error - (axios.post as jest.Mock).mockRejectedValue(new Error('Network error')); - - // Create a mock event - const mockEvent = createMockEvent(); - - // Call the function with the mock event - const result = await run([mockEvent]); - - // Verify the result - expect(result).toEqual({ - status: 'error', - message: 'Data push check function failed', - push_successful: false, - error: 'Network error' - }); - }); - - it('should throw an error if events parameter is not an array', async () => { - // Call the function with invalid input - const invalidInput = null as unknown as AirdropEvent[]; - - // Call the function - const result = await run(invalidInput); - - // Verify the result - expect(result).toEqual({ - status: 'error', - message: 'Data push check function failed', - push_successful: false, - error: 'Invalid input: events must be an array' - }); - }); - - it('should throw an error if events array is empty', async () => { - // Call the function with empty array - const result = await run([]); - - // Verify the result - expect(result).toEqual({ - status: 'error', - message: 'Data push check function failed', - push_successful: false, - error: 'Invalid input: events array is empty' - }); - }); - - it('should throw an error if an event is missing required fields', async () => { - // Create an invalid event missing context - const invalidEvent = { - payload: {}, - execution_metadata: {} - } as unknown as AirdropEvent; - - // Call the function - const result = await run([invalidEvent]); - - // Verify the result - expect(result).toEqual({ - status: 'error', - message: 'Data push check function failed', - push_successful: false, - error: 'Invalid event: missing required field \'context\'' - }); - }); - - it('should throw an error if callback_url is missing', async () => { - // Create a mock event with missing callback_url - const mockEvent: AirdropEvent = { - ...createMockEvent(), - payload: { - ...createMockEvent().payload, - event_context: { - ...createMockEvent().payload.event_context, - callback_url: undefined as any // Use type assertion to allow undefined - } - } - }; - - // Call the function with the mock event - const result = await run([mockEvent]); - - // Verify the result - expect(result).toEqual({ - status: 'error', - message: 'Data push check function failed', - push_successful: false, - error: 'Invalid event: missing required field \'payload.event_context.callback_url\'' - }); - }); -}); \ No newline at end of file diff --git a/build/src/functions/data_push_check/index.ts b/build/src/functions/data_push_check/index.ts deleted file mode 100644 index 76ddb93..0000000 --- a/build/src/functions/data_push_check/index.ts +++ /dev/null @@ -1,110 +0,0 @@ -import { AirdropEvent } from '@devrev/ts-adaas'; -import axios from 'axios'; - -/** - * A function that checks if pushing data to the callback URL works. - * - * @param events - Array of AirdropEvent objects - * @returns A response indicating whether the data push was successful - */ -export async function run(events: AirdropEvent[]): Promise<{ - status: string, - message: string, - push_successful: boolean, - error?: string -}> { - try { - // Validate input parameters - if (!events || !Array.isArray(events)) { - throw new Error('Invalid input: events must be an array'); - } - - if (events.length === 0) { - throw new Error('Invalid input: events array is empty'); - } - - // Use the first event for the check - const event = events[0]; - - // Validate that the event is a valid AirdropEvent with all required fields - if (!event || typeof event !== 'object') { - throw new Error('Invalid event: event must be a valid AirdropEvent object'); - } - - // Check for required fields according to AirdropEvent interface - if (!event.context) { - throw new Error('Invalid event: missing required field \'context\''); - } - - if (!event.context.secrets || !event.context.secrets.service_account_token) { - throw new Error('Invalid event: missing required field \'context.secrets.service_account_token\''); - } - - if (!event.context.snap_in_version_id) { - throw new Error('Invalid event: missing required field \'context.snap_in_version_id\''); - } - - if (!event.payload) { - throw new Error('Invalid event: missing required field \'payload\''); - } - - if (!event.payload.event_context) { - throw new Error('Invalid event: missing required field \'payload.event_context\''); - } - - if (!event.payload.event_context.callback_url) { - throw new Error('Invalid event: missing required field \'payload.event_context.callback_url\''); - } - - if (!event.execution_metadata || !event.execution_metadata.devrev_endpoint) { - throw new Error('Invalid event: missing required field \'execution_metadata.devrev_endpoint\''); - } - - // Extract the callback URL - const callbackUrl = event.payload.event_context.callback_url; - - // Create a test payload - const testPayload = { - test_data: 'This is a test payload', - timestamp: new Date().toISOString(), - snap_in_version_id: event.context.snap_in_version_id - }; - - // Log the attempt for debugging purposes - console.log(`Attempting to push data to callback URL: ${callbackUrl}`); - - // Make a POST request to the callback URL - const response = await axios.post(callbackUrl, testPayload, { - headers: { - 'Content-Type': 'application/json', - 'Authorization': event.context.secrets.service_account_token - }, - timeout: 10000 // 10 seconds timeout - }); - - // Check if the request was successful - const isSuccessful = response.status >= 200 && response.status < 300; - - // Log the response for debugging purposes - console.log(`Data push response status: ${response.status}`); - - // Return a success response - return { - status: isSuccessful ? 'success' : 'error', - message: `Data push check function completed with status ${response.status}`, - push_successful: isSuccessful, - error: isSuccessful ? undefined : `Received status code ${response.status}` - }; - } catch (error) { - // Log the error for debugging - console.error('Error in data push check function:', error); - - // Return an error response - return { - status: 'error', - message: 'Data push check function failed', - push_successful: false, - error: error instanceof Error ? error.message : 'Unknown error occurred' - }; - } -} \ No newline at end of file diff --git a/build/src/functions/data_push_check/test-utils.ts b/build/src/functions/data_push_check/test-utils.ts deleted file mode 100644 index 656b493..0000000 --- a/build/src/functions/data_push_check/test-utils.ts +++ /dev/null @@ -1,69 +0,0 @@ -import { AirdropEvent } from '@devrev/ts-adaas'; - -// Mock for EventType from @devrev/ts-adaas -export enum EventType { - // Extraction - ExtractionExternalSyncUnitsStart = 'EXTRACTION_EXTERNAL_SYNC_UNITS_START', - ExtractionMetadataStart = 'EXTRACTION_METADATA_START', - ExtractionDataStart = 'EXTRACTION_DATA_START', - ExtractionDataContinue = 'EXTRACTION_DATA_CONTINUE', - ExtractionDataDelete = 'EXTRACTION_DATA_DELETE', - ExtractionAttachmentsStart = 'EXTRACTION_ATTACHMENTS_START', - ExtractionAttachmentsContinue = 'EXTRACTION_ATTACHMENTS_CONTINUE', - ExtractionAttachmentsDelete = 'EXTRACTION_ATTACHMENTS_DELETE' -} - - -/** - * Helper function to create a mock AirdropEvent for testing - */ -export const createMockEvent = (): AirdropEvent => ({ - context: { - secrets: { - service_account_token: 'mock-token' - }, - snap_in_id: 'mock-snap-in-id', - snap_in_version_id: 'mock-version-id' - }, - payload: { - connection_data: { - org_id: 'mock-org-id', - org_name: 'mock-org-name', - key: 'mock-key', - key_type: 'mock-key-type' - }, - event_context: { - callback_url: 'https://mock-callback-url.com', - dev_org: 'mock-dev-org', - dev_org_id: 'mock-dev-org-id', - dev_user: 'mock-dev-user', - dev_user_id: 'mock-dev-user-id', - external_sync_unit: 'mock-external-sync-unit', - external_sync_unit_id: 'mock-external-sync-unit-id', - external_sync_unit_name: 'mock-external-sync-unit-name', - external_system: 'mock-external-system', - external_system_type: 'mock-external-system-type', - import_slug: 'mock-import-slug', - mode: 'INITIAL', - request_id: 'mock-request-id', - snap_in_slug: 'mock-snap-in-slug', - snap_in_version_id: 'mock-snap-in-version-id', - sync_run: 'mock-sync-run', - sync_run_id: 'mock-sync-run-id', - sync_tier: 'mock-sync-tier', - sync_unit: 'mock-sync-unit', - sync_unit_id: 'mock-sync-unit-id', - uuid: 'mock-uuid', - worker_data_url: 'mock-worker-data-url' - }, - event_type: EventType.ExtractionDataStart, - event_data: {} - }, - execution_metadata: { - devrev_endpoint: 'mock-endpoint' - }, - input_data: { - global_values: {}, - event_sources: {} - } -}); \ No newline at end of file diff --git a/build/src/functions/extraction/attachments-worker.ts b/build/src/functions/extraction/attachments-worker.ts deleted file mode 100644 index 3d60fec..0000000 --- a/build/src/functions/extraction/attachments-worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { ExtractorEventType, NormalizedItem, processTask, RepoInterface } from '@devrev/ts-adaas'; - -processTask({ - task: async ({ adapter }) => { - console.log('Starting extraction of attachments...'); - await adapter.emit(ExtractorEventType.ExtractionAttachmentsDone); - }, - onTimeout: async ({ adapter }) => { - await adapter.emit(ExtractorEventType.ExtractionAttachmentsError); - } -}); \ No newline at end of file diff --git a/build/src/functions/extraction/data-worker-test-mocks.ts b/build/src/functions/extraction/data-worker-test-mocks.ts deleted file mode 100644 index c16d24b..0000000 --- a/build/src/functions/extraction/data-worker-test-mocks.ts +++ /dev/null @@ -1,115 +0,0 @@ -/** - * Mock adapter for testing the data worker - */ -export const createMockAdapter = () => { - // Mock adapter functions - const mockPush = jest.fn().mockResolvedValue(true); - const mockUpload = jest.fn().mockResolvedValue(undefined); - const mockGetItems = jest.fn().mockReturnValue([]); - const mockGetRepo = jest.fn().mockReturnValue({ - push: mockPush, - upload: mockUpload, - getItems: mockGetItems - }); - const mockInitializeRepos = jest.fn(); - const mockEmit = jest.fn().mockResolvedValue(undefined); - - const mockAdapter = { - event: { - context: {}, - payload: { - connection_data: { - key: 'mock-api-key', - org_id: 'mock-space-id' - }, - event_context: { - external_sync_unit_id: 'mock-project-id' - } - } - }, - initializeRepos: mockInitializeRepos, - getRepo: mockGetRepo, - emit: mockEmit - }; - - return { - mockAdapter, - mockPush, - mockUpload, - mockGetItems, - mockGetRepo, - mockInitializeRepos, - mockEmit - }; -}; - -/** - * Mock API responses for testing - */ -export const mockApiResponses = { - // Mock space response with members - spaceResponse: { - status: 200, - data: { - data: [ - { - id: 'mock-space-id', - members: [ - { id: 'KUAFY3BJ' }, - { id: 'KUAFZBCJ' } - ] - } - ] - } - }, - - // Mock contacts response - contactsResponse: { - status: 200, - data: { - data: [ - { - id: 'KUAFY3BJ', - firstName: 'John', - lastName: 'Doe', - type: 'Person' - }, - { - id: 'KUAFZBCJ', - firstName: 'Jane', - lastName: 'Smith', - type: 'Person' - } - ] - } - }, - - // Mock tasks response - tasksResponse: { - status: 200, - data: { - data: [ - { - id: 'task1', - title: 'Task 1', - description: 'Description 1', - status: 'Active', - importance: 'Normal', - createdDate: '2023-01-01T00:00:00Z', - updatedDate: '2023-01-02T00:00:00Z', - parentIds: ['mock-project-id'] - }, - { - id: 'task2', - title: 'Task 2', - description: 'Description 2', - status: 'Completed', - importance: 'High', - createdDate: '2023-02-01T00:00:00Z', - updatedDate: '2023-02-02T00:00:00Z', - parentIds: ['mock-project-id'] - } - ] - } - } -}; \ No newline at end of file diff --git a/build/src/functions/extraction/data-worker.test.ts b/build/src/functions/extraction/data-worker.test.ts deleted file mode 100644 index d25b81d..0000000 --- a/build/src/functions/extraction/data-worker.test.ts +++ /dev/null @@ -1,186 +0,0 @@ -// Mock the dependencies -jest.mock('./wrike-api-client', () => { - return { - WrikeApiClient: jest.fn().mockImplementation(() => ({ - fetchSpaceMembers: jest.fn(), - fetchContacts: jest.fn(), - fetchTasks: jest.fn() - })) - }; -}); - -// Define ExtractorEventType at module level so it's accessible throughout the test file -const ExtractorEventType = { - ExtractionDataDone: 'EXTRACTION_DATA_DONE', - ExtractionDataError: 'EXTRACTION_DATA_ERROR' -}; - -jest.mock('@devrev/ts-adaas', () => { - return { - ExtractorEventType, - processTask: jest.fn((params) => { - // Store the task function for testing - (global as any).taskFunction = params.task; - (global as any).onTimeoutFunction = params.onTimeout; - }) - }; -}); - -// Mock axios -jest.mock('axios', () => ({ get: jest.fn() })); -import axios from 'axios'; - -// Import the mock data and adapter -import { createMockAdapter, mockApiResponses } from './data-worker-test-mocks'; - -// Import the worker file to trigger the processTask mock -import './data-worker'; - -// Import the WrikeApiClient -import { WrikeApiClient } from './wrike-api-client'; - -describe('Data Extraction Worker', () => { - // Get mock adapter and functions - const { - mockAdapter, - mockPush, - mockUpload, - mockGetItems, - mockGetRepo, - mockInitializeRepos, - mockEmit - } = createMockAdapter(); - - // Mock WrikeApiClient instance - const mockApiClient = new WrikeApiClient('mock-api-key'); - - beforeEach(() => { - jest.clearAllMocks(); - // Mock console methods to prevent test output pollution - jest.spyOn(console, 'log').mockImplementation(() => {}); - jest.spyOn(console, 'error').mockImplementation(() => {}); - - // Reset mocks - // Mock axios get method - (axios.get as jest.Mock) = jest.fn(); - }); - - afterEach(() => { - jest.restoreAllMocks(); - }); - - it('should fetch contacts and tasks and push them to repositories', async () => { - // Mock API client responses - (mockApiClient.fetchContacts as jest.Mock).mockResolvedValueOnce(mockApiResponses.contactsResponse.data.data); - (mockApiClient.fetchTasks as jest.Mock).mockResolvedValueOnce(mockApiResponses.tasksResponse.data.data); - (WrikeApiClient as jest.Mock).mockImplementation(() => mockApiClient); - - // Execute the task function - await (global as any).taskFunction({ adapter: mockAdapter }); - - // Verify repository initialization - expect(mockInitializeRepos).toHaveBeenCalledWith([ - { - itemType: 'users', - normalize: expect.any(Function) - }, - { - itemType: 'tasks', - normalize: expect.any(Function) - } - ]); - - // Verify getRepo was called for both repositories - expect(mockGetRepo).toHaveBeenCalledWith('users'); - expect(mockGetRepo).toHaveBeenCalledWith('tasks'); - - // Verify push was called for both repositories - expect(mockPush).toHaveBeenCalledTimes(2); - - // Verify upload was called for both repositories - expect(mockUpload).toHaveBeenCalledTimes(2); - - // Verify the emit call - expect(mockEmit).toHaveBeenCalledWith('EXTRACTION_DATA_DONE'); - }); - - it('should emit an error event when space API call fails', async () => { - // Mock API client to throw an error - (mockApiClient.fetchContacts as jest.Mock).mockRejectedValueOnce(new Error('API error')); - (WrikeApiClient as jest.Mock).mockImplementation(() => mockApiClient); - - // Execute the task function - await (global as any).taskFunction({ adapter: mockAdapter }); - - // Verify the emit call with error - expect(mockEmit).toHaveBeenCalledWith(ExtractorEventType.ExtractionDataError, { - error: { - message: 'Error fetching contacts: API error' - } - }); - }); - - it('should emit an error event when contacts API call fails', async () => { - // Mock API client to throw an error on contacts - (mockApiClient.fetchContacts as jest.Mock).mockRejectedValueOnce(new Error('Contacts API error')); - (WrikeApiClient as jest.Mock).mockImplementation(() => mockApiClient); - - // Execute the task function - await (global as any).taskFunction({ adapter: mockAdapter }); - - // Verify the emit call with error - expect(mockEmit).toHaveBeenCalledWith(ExtractorEventType.ExtractionDataError, { - error: { - message: 'Error fetching contacts: Contacts API error' - } - }); - }); - - it('should emit an error event when tasks API call fails', async () => { - // Mock API client to succeed on contacts but fail on tasks - (mockApiClient.fetchContacts as jest.Mock).mockResolvedValueOnce(mockApiResponses.contactsResponse.data.data); - (mockApiClient.fetchTasks as jest.Mock).mockRejectedValueOnce(new Error('Tasks API error')); - (WrikeApiClient as jest.Mock).mockImplementation(() => mockApiClient); - - // Execute the task function - await (global as any).taskFunction({ adapter: mockAdapter }); - - // Verify the emit call with error - expect(mockEmit).toHaveBeenCalledWith(ExtractorEventType.ExtractionDataError, { - error: { - message: 'Error fetching tasks: Tasks API error' - } - }); - }); - - it('should emit an error event when repository push fails', async () => { - // Mock API client to succeed - (mockApiClient.fetchContacts as jest.Mock).mockResolvedValueOnce(mockApiResponses.contactsResponse.data.data); - (WrikeApiClient as jest.Mock).mockImplementation(() => mockApiClient); - - // Mock push to fail - mockPush.mockRejectedValueOnce(new Error('Push failed')); - - // Execute the task function - await (global as any).taskFunction({ adapter: mockAdapter }); - - // Verify the emit call with error - expect(mockEmit).toHaveBeenCalledWith(ExtractorEventType.ExtractionDataError, { - error: { - message: 'Error pushing contacts: Push failed' - } - }); - }); - - it('should emit an error event on timeout', async () => { - // Execute the onTimeout function - await (global as any).onTimeoutFunction({ adapter: mockAdapter }); - - // Verify the emit call - expect(mockEmit).toHaveBeenCalledWith('EXTRACTION_DATA_ERROR', { - error: { - message: 'Data extraction timed out' - } - }); - }); -}); \ No newline at end of file diff --git a/build/src/functions/extraction/data-worker.ts b/build/src/functions/extraction/data-worker.ts deleted file mode 100644 index d9dc283..0000000 --- a/build/src/functions/extraction/data-worker.ts +++ /dev/null @@ -1,306 +0,0 @@ -import { ExtractorEventType, NormalizedItem, processTask, RepoInterface } from '@devrev/ts-adaas'; -import { WrikeApiClient, WrikeContact, WrikeTask } from './wrike-api-client'; - -/** - * Worker for handling data extraction - * This worker is responsible for: - * 1. Fetching contacts from Wrike API - * 2. Fetching tasks for the specific project from Wrike API - * 3. Pushing the data to repositories - * 4. Emitting a single EXTRACTION_DATA_DONE event - */ -processTask({ - task: async ({ adapter }) => { - // Flags to track state - let eventEmitted = false; - - try { - console.log('Data extraction worker started with event type:', adapter.event.payload?.event_type); - - const event = adapter.event; - - // Extract the Wrike API key, Space ID, and Project ID - const apiKey = event.payload.connection_data.key; - // Use the space ID from connection_data.org_id for contacts - const spaceId = event.payload.connection_data.org_id; - const projectId = event.payload.event_context.external_sync_unit_id; - - if (!apiKey) { - throw new Error('Missing API key in connection_data'); - } - - if (!spaceId || spaceId === '') { - throw new Error('Missing Space ID in connection_data'); - } - - if (!projectId) { - throw new Error('Missing Project ID in event_context.external_sync_unit_id'); - } - - // Create a new Wrike API client - const apiClient = new WrikeApiClient(apiKey); - - // Initialize repositories for users and tasks - adapter.initializeRepos([ - { - itemType: 'users', - normalize: (record: object) => normalizeContact(record as WrikeContact) - }, - { - itemType: 'tasks', - normalize: (record: object) => normalizeTask(record as WrikeTask) - } - ]); - - // Get repositories - const usersRepo = adapter.getRepo('users'); - if (!usersRepo) { - console.error('Failed to get users repository'); - } - - const tasksRepo = adapter.getRepo('tasks'); - if (!tasksRepo) { - throw new Error('Failed to get tasks repository'); - } - - // Step 1: Try to fetch contacts (users) - use spaceId from connection_data.org_id - console.log('Fetching contacts from Wrike API'); - try { - const contacts = await apiClient.fetchContacts(spaceId); - console.log(`Fetched ${contacts.length} contacts`); - - // Push contacts to the users repository if we have any - if (contacts.length > 0 && usersRepo) { - console.log('Pushing contacts to users repository'); - try { - const pushResult = await usersRepo.push(contacts); - if (!pushResult) { - const errorMsg = 'Failed to push contacts to repository'; - console.error(errorMsg); - // Emit an error event for contact push failure - if (!eventEmitted) { - await adapter.emit(ExtractorEventType.ExtractionDataError, { - error: { message: `Error pushing contacts: ${errorMsg}` } - }); - eventEmitted = true; - return; // Exit early since we've already emitted an error - } - } else { - console.log('Successfully pushed contacts to users repository'); - } - } catch (error) { - console.error(`Error pushing contacts: ${error instanceof Error ? error.message : String(error)}`); - const errorMsg = `Error pushing contacts: ${error instanceof Error ? error.message : String(error)}`; - // Emit an error event for contact push failure - if (!eventEmitted) { - await adapter.emit(ExtractorEventType.ExtractionDataError, { - error: { message: errorMsg } - }); - eventEmitted = true; - return; // Exit early since we've already emitted an error - } - } - } else { - // If we don't have any contacts or the repository is not available, - // log a warning but continue with task processing - console.log( - `No contacts to push or repository not available: ${contacts.length} contacts, ` + - `repository ${usersRepo ? 'available' : 'not available'}` - ); - } - } catch (error) { - // Log the error and emit an error event - console.error('Error fetching contacts:', error instanceof Error ? error.message : String(error)); - const errorMsg = error instanceof Error - ? error.message.startsWith('Error fetching contacts:') - ? error.message - : `Error fetching contacts: ${error.message}` - : `Error fetching contacts: ${String(error)}`; - - if (!eventEmitted) { - await adapter.emit(ExtractorEventType.ExtractionDataError, { - error: { message: errorMsg } - }); - eventEmitted = true; - return; // Exit early since we've already emitted an error - } - } - - // Only proceed to fetch tasks if no error event has been emitted - if (!eventEmitted) { - // Step 2: Fetch tasks for the project - console.log(`Fetching tasks for project ${projectId} from Wrike API`); - try { - const tasks = await apiClient.fetchTasks(projectId); - console.log(`Fetched ${tasks ? tasks.length : 0} tasks`); - - // Push tasks to the tasks repository - console.log(`Pushing ${tasks.length} tasks to tasks repository`); - try { - await tasksRepo.push(tasks); - console.log('Successfully pushed tasks to tasks repository'); - } catch (error) { - console.error('Error pushing tasks to repository:', error instanceof Error ? error.message : String(error)); - const errorMsg = `Error pushing tasks: ${error instanceof Error ? error.message : String(error)}`; - - // Emit an error event for task push failure - if (!eventEmitted) { - await adapter.emit(ExtractorEventType.ExtractionDataError, { - error: { message: errorMsg } - }); - eventEmitted = true; - return; // Exit early since we've already emitted an error - } - } - } catch (error) { - console.error('Error fetching tasks:', error instanceof Error ? error.message : String(error)); - const errorMsg = error instanceof Error - ? error.message.startsWith('Error fetching tasks:') - ? error.message - : `Error fetching tasks: ${error.message}` - : `Error fetching tasks: ${String(error)}`; - - // Emit an error event for task fetching failure - if (!eventEmitted) { - await adapter.emit(ExtractorEventType.ExtractionDataError, { - error: { message: errorMsg } - }); - - eventEmitted = true; - return; // Exit early since we've already emitted an error - } - } - } - - // Step 3: Make sure all data is uploaded (only if no error event has been emitted) - if (!eventEmitted) { - try { - // Upload any remaining items in the users repository - if (usersRepo) { - console.log('Uploading any remaining users data if available'); - try { - const usersResult = await usersRepo.upload(); - if (usersResult) { - console.error(`Error uploading contacts: ${JSON.stringify(usersResult)}`); - // Log the error but continue since contact upload failure is not critical - } - } catch (uploadError) { - console.error(`Error uploading contacts: ${uploadError instanceof Error ? uploadError.message : String(uploadError)}`); - // Log the error but continue since contact upload failure is not critical - } - } - - // Upload any remaining items in the tasks repository - console.log('Uploading any remaining tasks data if available'); - try { - const tasksUploadError = await tasksRepo.upload(); - if (tasksUploadError) { - console.error(`Error uploading tasks: ${JSON.stringify(tasksUploadError)}`); - // Emit an error event for task upload failure - if (!eventEmitted) { - await adapter.emit(ExtractorEventType.ExtractionDataError, { - error: { message: `Error uploading tasks: ${JSON.stringify(tasksUploadError)}` } - }); - - eventEmitted = true; - return; // Exit early since we've already emitted an error - } - } - } catch (uploadError) { - // Check if this is the "source.on is not a function" error - const isNonCriticalError = - (uploadError instanceof Error && - (uploadError.message.includes('source.on is not a function') || - uploadError.message.includes('is not a function'))); - - if (!isNonCriticalError) { - console.error(`Error uploading tasks: ${uploadError instanceof Error ? uploadError.message : String(uploadError)}`); - // Emit an error event for task upload failure - if (!eventEmitted) { - await adapter.emit(ExtractorEventType.ExtractionDataError, { - error: { message: `Error uploading tasks: ${uploadError instanceof Error ? uploadError.message : String(uploadError)}` } - }); - - eventEmitted = true; - return; // Exit early since we've already emitted an error - } - } else { - console.log('Non-critical upload error encountered, continuing with DONE event'); - } - } - - // If we haven't emitted an event yet, emit a success event - if (!eventEmitted) { - console.log('Data extraction completed successfully. Emitting DONE event.'); - await adapter.emit(ExtractorEventType.ExtractionDataDone); - eventEmitted = true; - console.log('Successfully emitted EXTRACTION_DATA_DONE event'); - } - } catch (finalError) { - // Handle any unexpected errors in the final processing - console.error(`Unexpected error in final processing: ${finalError instanceof Error ? finalError.message : String(finalError)}`); - - // If we haven't emitted an event yet, emit an error - if (!eventEmitted) { - await adapter.emit(ExtractorEventType.ExtractionDataError, { - error: { - message: `Unexpected error in final processing: ${finalError instanceof Error ? finalError.message : String(finalError)}` - } - }); - eventEmitted = true; - } - } - } - } catch (error) { - console.error('Error in data extraction worker:', error); - - // Emit an error event if something goes wrong and we haven't emitted an event yet - if (!eventEmitted) { - await adapter.emit(ExtractorEventType.ExtractionDataError, { - error: { - message: error instanceof Error ? error.message : String(error) - } - }); - eventEmitted = true; - } - } - }, - onTimeout: async ({ adapter }) => { - console.error('Data extraction worker timed out'); - - // Emit an error event if the worker times out - await adapter.emit(ExtractorEventType.ExtractionDataError, { - error: { - message: 'Data extraction timed out' - }, - }); - }, -}); - -/** - * Normalizes a Wrike contact into a NormalizedItem - * @param contact The Wrike contact to normalize - * @returns A normalized item - */ -function normalizeContact(contact: WrikeContact | any): NormalizedItem { - return { - id: contact.id, - created_date: new Date().toISOString(), // Wrike API doesn't provide creation date for contacts - modified_date: new Date().toISOString(), // Wrike API doesn't provide modification date for contacts - data: contact - }; -} - -/** - * Normalizes a Wrike task into a NormalizedItem - * @param task The Wrike task to normalize - * @returns A normalized item - */ -function normalizeTask(task: WrikeTask | any): NormalizedItem { - return { - id: task.id, - created_date: task.created_date, - modified_date: task.updated_date, - data: task - }; -} \ No newline at end of file diff --git a/build/src/functions/extraction/index.test.ts b/build/src/functions/extraction/index.test.ts deleted file mode 100644 index 49204cb..0000000 --- a/build/src/functions/extraction/index.test.ts +++ /dev/null @@ -1,156 +0,0 @@ -// Mock the spawn function before importing any modules -const mockSpawn = jest.fn().mockResolvedValue(undefined); -jest.mock('@devrev/ts-adaas', () => { - const actual = jest.requireActual('@devrev/ts-adaas'); - return { ...actual, spawn: mockSpawn }; -}); - -// Import the test setup utilities -import { - setupMockSpawn, - setupTestEnvironment, - cleanupTestEnvironment, - createMockEvent, - verifySpawnCalls -} from './test-setup'; -import { run } from './index'; -import { AirdropEvent } from '@devrev/ts-adaas'; -import { EventType } from './test-utils'; - -describe('Extraction Function', () => { - beforeEach(() => { - // Set up test environment - setupTestEnvironment(mockSpawn); - }); - - afterEach(() => { - // Clean up test environment - cleanupTestEnvironment(); - - // Clear the mock between tests - mockSpawn.mockClear(); - }); - - it('should spawn a worker for ExtractionExternalSyncUnitsStart event type', async () => { - // Test with the external sync units extraction event type - const mockEvent = createMockEvent(EventType.ExtractionExternalSyncUnitsStart); - - // Call the function with the mock event - const result = await run([mockEvent]); - - // Verify the result - expect(result).toEqual({ - status: 'success', - message: 'Extraction function successfully processed 1 external sync units events' - }); - - // Verify that spawn was called with the correct parameters - verifySpawnCalls(mockSpawn, 1); - }); - - it('should not spawn a worker for other event types', async () => { - // Test with a different event type - const mockEvent = createMockEvent(EventType.ExtractionDataDelete); - - // Call the function with the mock event - const result = await run([mockEvent]); - - // Verify the result - expect(result).toEqual({ - status: 'success', - message: 'Extraction function successfully processed 0 external sync units events' - }); - - // Verify that spawn was not called - verifySpawnCalls(mockSpawn, 0); - }); - - it('should spawn a worker for ExtractionMetadataStart event type', async () => { - // Test with the metadata extraction event type - const mockEvent = createMockEvent(EventType.ExtractionMetadataStart); - - // Call the function with the mock event - const result = await run([mockEvent]); - - // Verify the result - expect(result).toEqual({ - status: 'success', - message: 'Extraction function successfully processed 0 external sync units events' - }); - - // Verify that spawn was called with the correct parameters - verifySpawnCalls(mockSpawn, 1, 'metadata-worker.ts'); - }); - - it('should spawn a worker for ExtractionDataStart event type', async () => { - // Test with the data extraction event type - const mockEvent = createMockEvent(EventType.ExtractionDataStart); - - // Call the function with the mock event - const result = await run([mockEvent]); - - // Verify the result - expect(result).toEqual({ - status: 'success', - message: 'Extraction function successfully processed 0 external sync units events' - }); - - // Verify that spawn was called with the correct parameters - expect(mockSpawn).toHaveBeenCalledTimes(1); - expect(mockSpawn.mock.calls[0][0]).toHaveProperty('initialDomainMapping'); - expect(mockSpawn.mock.calls[0][0]).toHaveProperty('workerPath'); - expect(mockSpawn.mock.calls[0][0].workerPath).toContain('data-worker.ts'); - }); - - it('should handle multiple events correctly', async () => { - // Create multiple events with different event types - const event1 = createMockEvent(EventType.ExtractionExternalSyncUnitsStart); - const event2 = createMockEvent(EventType.ExtractionDataStart); - const event3 = createMockEvent(EventType.ExtractionExternalSyncUnitsStart); - - // Call the function with multiple events - const result = await run([event1, event2, event3]); - - // Verify the result - expect(result).toStrictEqual({ - status: 'success', - message: 'Extraction function successfully processed 2 external sync units events' - }); - - // Verify that spawn was called twice (once for each ExtractionExternalSyncUnitsStart event) - verifySpawnCalls(mockSpawn, 3); - }); - - it('should throw an error if events parameter is not an array', async () => { - // Call the function with invalid input - const invalidInput = null as unknown as AirdropEvent[]; - - // Expect the function to throw an error - await expect(run(invalidInput)).rejects.toThrow('Invalid input: events must be an array'); - }); - - it('should throw an error if an event is missing required fields', async () => { - // Create an invalid event missing context - const invalidEvent = { - payload: {}, - execution_metadata: {} - } as unknown as AirdropEvent; - - // Expect the function to throw an error - await expect(run([invalidEvent])).rejects.toThrow('missing required field \'context\''); - }); - - it('should handle empty events array', async () => { - // Call the function with an empty array - const result = await run([]); - - // Verify the result - expect(result).toEqual({ - status: 'success', - message: 'Extraction function successfully processed 0 external sync units events' - }); - - // Verify that spawn was not called - verifySpawnCalls(mockSpawn, 0); - }); -}); \ No newline at end of file diff --git a/build/src/functions/extraction/index.ts b/build/src/functions/extraction/index.ts index dbb969a..e5ed1e6 100644 --- a/build/src/functions/extraction/index.ts +++ b/build/src/functions/extraction/index.ts @@ -1,172 +1,65 @@ -import { AirdropEvent, EventType, spawn } from '@devrev/ts-adaas'; -import path from 'path'; -import initialDomainMapping from '../generate_initial_mapping/initial_domain_mapping.json'; +import { convertToAirdropEvent, resolveWorkerPath } from '../../core/utils'; +import { FunctionInput } from '../../core/types'; +import { spawn, EventType } from '@devrev/ts-adaas'; +import initialDomainMapping from '../get_initial_domain_mapping/initial-domain-mapping.json'; -/** - * A function that extracts data from Wrike and pushes it to DevRev. - * If the event type is EXTRACTION_EXTERNAL_SYNC_UNITS_START, it will - * fetch projects from Wrike and push them as external sync units. - * - * @param events - Array of AirdropEvent objects - * @returns A response indicating the status of the extraction - */ -export async function run(events: AirdropEvent[]): Promise<{ - status: string, - message: string -}> { - try { - // Validate input parameters - if (!events || !Array.isArray(events)) { - throw new Error('Invalid input: events must be an array'); - } +export interface ExtractorState { + users: { + completed: boolean; + }; + tasks: { + completed: boolean; + nextPageToken?: string; + modifiedSince?: string; + }; + comments: { + completed: boolean; + }; + attachments: { + completed: boolean; + metadata?: any[]; + }; +} - // Validate that each event is a valid AirdropEvent with all required fields - events.forEach((event, index) => { - if (!event || typeof event !== 'object') { - throw new Error(`Invalid event at index ${index}: event must be a valid AirdropEvent object`); - } - - // Check for required fields according to AirdropEvent interface - if (!event.context) { - throw new Error(`Invalid event at index ${index}: missing required field 'context'`); - } - - if (!event.context.secrets || !event.context.secrets.service_account_token) { - throw new Error(`Invalid event at index ${index}: missing required field 'context.secrets.service_account_token'`); - } - - if (!event.context.snap_in_version_id) { - throw new Error(`Invalid event at index ${index}: missing required field 'context.snap_in_version_id'`); - } - - if (!event.payload) { - throw new Error(`Invalid event at index ${index}: missing required field 'payload'`); - } - - if (!event.payload.event_context) { - throw new Error(`Invalid event at index ${index}: missing required field 'payload.event_context'`); - } - - if (!event.execution_metadata || !event.execution_metadata.devrev_endpoint) { - throw new Error(`Invalid event at index ${index}: missing required field 'execution_metadata.devrev_endpoint'`); - } - }); - - // Filter events to only include EXTRACTION_EXTERNAL_SYNC_UNITS_START events - const externalSyncUnitsEvents = events.filter(event => - event.payload && - event.payload.event_type === EventType.ExtractionExternalSyncUnitsStart - ); - - // Log the event for debugging purposes - console.log(`Extraction function invoked with ${events.length} events, ${externalSyncUnitsEvents.length} are external sync units events`); - - // For each external sync units event, spawn a worker to process it - for (const event of externalSyncUnitsEvents) { - // Define the worker path - make sure to use .ts extension as required by the SDK - const workerPath = path.resolve(__dirname, 'worker.ts'); - - // Define initial state for the worker - const initialState = {}; - - // Spawn the worker to process the event - await spawn({ - event: { - ...event, - payload: { ...event.payload } - }, - initialDomainMapping, - initialState, - workerPath - }); - } - - // Filter events to only include EXTRACTION_METADATA_START events - const metadataEvents = events.filter(event => - event.payload && - event.payload.event_type === EventType.ExtractionMetadataStart - ); - - // Log the metadata events for debugging purposes - console.log(`Found ${metadataEvents.length} metadata events`); - - // For each metadata event, spawn a worker to process it - for (const event of metadataEvents) { - // Define the worker path - make sure to use .ts extension as required by the SDK - const workerPath = path.resolve(__dirname, 'metadata-worker.ts'); - - // Define initial state for the worker - const initialState = {}; - - // Spawn the worker to process the event - await spawn({ - event: { - ...event, - payload: { ...event.payload } - }, - initialDomainMapping, - initialState, - workerPath - }); - } +export const initialState: ExtractorState = { + users: { completed: false }, + tasks: { completed: false, nextPageToken: undefined, modifiedSince: undefined }, + comments: { completed: false }, + attachments: { completed: false, metadata: [] }, +}; - // Filter events to only include EXTRACTION_DATA_START events - const dataEvents = events.filter(event => - event.payload && - event.payload.event_type === EventType.ExtractionDataStart - ); - - // Log the data events for debugging purposes - console.log(`Found ${dataEvents.length} data extraction events`); - - // For each data event, spawn a worker to process it - for (const event of dataEvents) { - // Define the worker path - make sure to use .ts extension as required by the SDK - const workerPath = path.resolve(__dirname, 'data-worker.ts'); - - // Define initial state for the worker - const initialState = {}; - - // Spawn the worker to process the event - await spawn({ - event, - initialDomainMapping, - initialState, - workerPath - }); - } - - const attachmentsEvents = events.filter(event => - event.payload && - event.payload.event_type === EventType.ExtractionAttachmentsStart - ); +function getWorkerPerExtractionPhase(event: FunctionInput) { + let path; + switch (event.payload.event_type) { + case EventType.ExtractionExternalSyncUnitsStart: + path = resolveWorkerPath(__dirname, 'workers/external-sync-units-extraction.ts'); + break; + case EventType.ExtractionMetadataStart: + path = resolveWorkerPath(__dirname, 'workers/metadata-extraction.ts'); + break; + case EventType.ExtractionDataStart: + case EventType.ExtractionDataContinue: + path = resolveWorkerPath(__dirname, 'workers/data-extraction.ts'); + break; + case EventType.ExtractionAttachmentsStart: + case EventType.ExtractionAttachmentsContinue: + path = resolveWorkerPath(__dirname, 'workers/attachments-extraction.ts'); + break; + } + return path; +} - for (const event of attachmentsEvents) { - // Define the worker path for attachments - const workerPath = path.resolve(__dirname, 'attachments-worker.ts'); - - // Define initial state for the worker - const initialState = {}; - - // Spawn the worker to process the event - await spawn({ - event, - initialDomainMapping, - initialState, - workerPath - }); - } - - // Return a success response that includes both types of events - return { - status: 'success', - message: `Extraction function successfully processed ${externalSyncUnitsEvents.length} external sync units events` - }; - } catch (error) { - // Log the error for debugging - console.error('Error in extraction function:', error); - - // Re-throw the error to be handled by the caller - throw error; +const run = async (events: FunctionInput[]) => { + for (const event of events) { + const file = getWorkerPerExtractionPhase(event); + await spawn({ + event: convertToAirdropEvent(event), + workerPath: file, + initialState: initialState, + initialDomainMapping: initialDomainMapping as any, + options: { timeout: 10 * 60 * 1000 }, + }); } -} \ No newline at end of file +}; + +export default run; \ No newline at end of file diff --git a/build/src/functions/extraction/metadata-worker.test.ts b/build/src/functions/extraction/metadata-worker.test.ts deleted file mode 100644 index f094721..0000000 --- a/build/src/functions/extraction/metadata-worker.test.ts +++ /dev/null @@ -1,131 +0,0 @@ -// Mock the dependencies -jest.mock('@devrev/ts-adaas', () => { - const ExtractorEventType = { - ExtractionMetadataDone: 'EXTRACTION_METADATA_DONE', - ExtractionMetadataError: 'EXTRACTION_METADATA_ERROR' - }; - - return { - ExtractorEventType, - processTask: jest.fn((params) => { - // Store the task function for testing - (global as any).taskFunction = params.task; - (global as any).onTimeoutFunction = params.onTimeout; - }) - }; -}); - -// Mock the external domain metadata -jest.mock('../generate_metadata/external_domain_metadata.json', () => ({ - schema_version: 'v0.2.0', - record_types: { - tasks: { name: 'Task' }, - users: { name: 'User' } - } -})); - -// Import the worker file to trigger the processTask mock -import './metadata-worker'; - -describe('Metadata Extraction Worker', () => { - // Mock adapter - const mockPush = jest.fn().mockResolvedValue(true); - const mockUpload = jest.fn().mockResolvedValue(undefined); - const mockGetRepo = jest.fn().mockReturnValue({ - push: mockPush, - upload: mockUpload - }); - const mockInitializeRepos = jest.fn(); - const mockEmit = jest.fn().mockResolvedValue(undefined); - - const mockAdapter = { - initializeRepos: mockInitializeRepos, - getRepo: mockGetRepo, - emit: mockEmit - }; - - beforeEach(() => { - jest.clearAllMocks(); - // Mock console methods to prevent test output pollution - jest.spyOn(console, 'log').mockImplementation(() => {}); - jest.spyOn(console, 'error').mockImplementation(() => {}); - }); - - afterEach(() => { - jest.restoreAllMocks(); - }); - - it('should initialize repository and push metadata', async () => { - // Execute the task function - await (global as any).taskFunction({ adapter: mockAdapter }); - - // Verify repository initialization - expect(mockInitializeRepos).toHaveBeenCalledWith([ - { - itemType: 'external_domain_metadata' - } - ]); - - // Verify getRepo was called - expect(mockGetRepo).toHaveBeenCalledWith('external_domain_metadata'); - - // Verify push was called with the metadata - expect(mockPush).toHaveBeenCalledWith([ - { - schema_version: 'v0.2.0', - record_types: { - tasks: { name: 'Task' }, - users: { name: 'User' } - } - } - ]); - - // Verify upload was called - expect(mockUpload).toHaveBeenCalled(); - - // Verify the emit call - expect(mockEmit).toHaveBeenCalledWith('EXTRACTION_METADATA_DONE'); - }); - - it('should emit an error event when repository initialization fails', async () => { - // Mock getRepo to return null (initialization failed) - mockGetRepo.mockReturnValueOnce(null); - - // Execute the task function - await (global as any).taskFunction({ adapter: mockAdapter }); - - // Verify the emit call with error - expect(mockEmit).toHaveBeenCalledWith('EXTRACTION_METADATA_ERROR', { - error: { - message: 'Failed to initialize external_domain_metadata repository' - } - }); - }); - - it('should emit an error event when push fails', async () => { - // Mock push to throw an error - mockPush.mockRejectedValueOnce(new Error('Push failed')); - - // Execute the task function - await (global as any).taskFunction({ adapter: mockAdapter }); - - // Verify the emit call with error - expect(mockEmit).toHaveBeenCalledWith('EXTRACTION_METADATA_ERROR', { - error: { - message: 'Error during metadata upload: Push failed' - } - }); - }); - - it('should emit an error event on timeout', async () => { - // Execute the onTimeout function - await (global as any).onTimeoutFunction({ adapter: mockAdapter }); - - // Verify the emit call - expect(mockEmit).toHaveBeenCalledWith('EXTRACTION_METADATA_ERROR', { - error: { - message: 'Metadata extraction timed out' - } - }); - }); -}); \ No newline at end of file diff --git a/build/src/functions/extraction/metadata-worker.ts b/build/src/functions/extraction/metadata-worker.ts deleted file mode 100644 index 256209e..0000000 --- a/build/src/functions/extraction/metadata-worker.ts +++ /dev/null @@ -1,76 +0,0 @@ -import { ExtractorEventType, processTask } from '@devrev/ts-adaas'; -import externalDomainMetadata from '../generate_metadata/external_domain_metadata.json'; - -/** - * Worker for handling metadata extraction - * This worker is responsible for pushing the External Domain Metadata to the repository - */ -processTask({ - task: async ({ adapter }) => { - try { - console.log('Metadata extraction worker started'); - - // Initialize the repository for external domain metadata - adapter.initializeRepos([ - { - itemType: 'external_domain_metadata' - // No normalize function since we don't want to normalize the metadata - } - ]); - - // Get the repository - const metadataRepo = adapter.getRepo('external_domain_metadata'); - - if (!metadataRepo) { - throw new Error('Failed to initialize external_domain_metadata repository'); - } - - console.log('Pushing external domain metadata to repository'); - - // Push the metadata to the repository - try { - // Note: We're not normalizing the metadata as per the requirement - const pushResult = await metadataRepo.push([externalDomainMetadata]); - - if (!pushResult) { - throw new Error('Failed to push metadata to repository'); - } - - // Make sure any remaining items are uploaded - const uploadError = await metadataRepo.upload(); - - if (uploadError) { - throw new Error(`Failed to upload metadata: ${JSON.stringify(uploadError)}`); - } - - console.log('Successfully pushed external domain metadata'); - } catch (uploadError) { - throw new Error(`Error during metadata upload: ${uploadError instanceof Error ? uploadError.message : JSON.stringify(uploadError)}`); - } - // Emit the DONE event - await adapter.emit(ExtractorEventType.ExtractionMetadataDone); - - console.log('Metadata extraction completed successfully'); - } catch (error) { - console.error('Error in metadata extraction worker:', error); - - const errorMessage = error instanceof Error ? error.message : 'Unknown error in metadata extraction'; - // Emit an error event if something goes wrong - await adapter.emit(ExtractorEventType.ExtractionMetadataError, { - error: { - message: errorMessage, - }, - }); - } - }, - onTimeout: async ({ adapter }) => { - console.error('Metadata extraction worker timed out'); - - // Emit an error event if the worker times out - await adapter.emit(ExtractorEventType.ExtractionMetadataError, { - error: { - message: 'Metadata extraction timed out', - }, - }); - }, -}); \ No newline at end of file diff --git a/build/src/functions/extraction/test-setup.ts b/build/src/functions/extraction/test-setup.ts deleted file mode 100644 index 7f7e09c..0000000 --- a/build/src/functions/extraction/test-setup.ts +++ /dev/null @@ -1,134 +0,0 @@ -import { AirdropEvent } from '@devrev/ts-adaas'; -import { EventType } from './test-utils'; -import path from 'path'; - -/** - * Sets up the mock spawn function - * @returns The mock spawn function - */ -export function setupMockSpawn() { - // Create a mock spawn function - const mockSpawn = jest.fn().mockResolvedValue(undefined); - - // Return the mock function for use in tests - return mockSpawn; -} - -/** - * Sets up the test environment with all necessary mocks - * @param mockSpawn The mock spawn function to use - */ -export function setupTestEnvironment(mockSpawn: jest.Mock) { - // Mock the initial domain mapping - jest.mock('../generate_initial_mapping/initial_domain_mapping.json', () => ({ - additional_mappings: {} - })); - - // Mock axios - jest.mock('axios'); - - // Mock the @devrev/ts-adaas module with a proper implementation - jest.mock('@devrev/ts-adaas', () => { - const actual = jest.requireActual('@devrev/ts-adaas'); - return { - ...actual, - spawn: mockSpawn, - EventType: require('./test-utils').EventType - }; - }); - - // Mock console methods to prevent test output pollution - jest.spyOn(console, 'log').mockImplementation(() => {}); - jest.spyOn(console, 'error').mockImplementation(() => {}); -} - -/** - * Cleans up the test environment - */ -export function cleanupTestEnvironment() { - // Clear all mocks - jest.clearAllMocks(); - - // Restore console mocks - jest.restoreAllMocks(); -} - -/** - * Helper function to create a mock AirdropEvent for testing - * @param eventType The event type to use - * @returns A mock AirdropEvent - */ -export function createMockEvent(eventType: EventType): AirdropEvent { - return { - context: { - secrets: { - service_account_token: 'mock-token' - }, - snap_in_id: 'mock-snap-in-id', - snap_in_version_id: 'mock-version-id' - }, - payload: { - connection_data: { - org_id: 'IEACW7SVI4O6BDQE', // Example Space ID - org_name: 'mock-org-name', - key: 'mock-api-key', - key_type: 'mock-key-type' - }, - event_context: { - callback_url: 'mock-callback-url', - dev_org: 'mock-dev-org', - dev_org_id: 'mock-dev-org-id', - dev_user: 'mock-dev-user', - dev_user_id: 'mock-dev-user-id', - external_sync_unit: 'mock-external-sync-unit', - external_sync_unit_id: 'mock-external-sync-unit-id', - external_sync_unit_name: 'mock-external-sync-unit-name', - external_system: 'mock-external-system', - external_system_type: 'mock-external-system-type', - import_slug: 'mock-import-slug', - mode: 'INITIAL', - request_id: 'mock-request-id', - snap_in_slug: 'mock-snap-in-slug', - snap_in_version_id: 'mock-snap-in-version-id', - sync_run: 'mock-sync-run', - sync_run_id: 'mock-sync-run-id', - sync_tier: 'mock-sync-tier', - sync_unit: 'mock-sync-unit', - sync_unit_id: 'mock-sync-unit-id', - uuid: 'mock-uuid', - worker_data_url: 'mock-worker-data-url' - }, - event_type: eventType, - event_data: {} - }, - execution_metadata: { - devrev_endpoint: 'mock-endpoint' - }, - input_data: { - global_values: {}, - event_sources: {} - } - }; -} - -/** - * Verifies that the spawn function was called with the correct parameters - * @param mockSpawn The mock spawn function - * @param expectedCallCount The expected number of calls - * @param workerPathContains Optional string that should be contained in the worker path - */ -export function verifySpawnCalls(mockSpawn: jest.Mock, expectedCallCount: number, workerPathContains?: string) { - expect(mockSpawn).toHaveBeenCalledTimes(expectedCallCount); - - if (expectedCallCount > 0) { - expect(mockSpawn.mock.calls[0][0]).toHaveProperty('initialDomainMapping'); - expect(mockSpawn.mock.calls[0][0]).toHaveProperty('workerPath'); - expect(mockSpawn.mock.calls[0][0]).toHaveProperty('event'); - expect(mockSpawn.mock.calls[0][0]).toHaveProperty('initialState'); - expect(mockSpawn.mock.calls[0][0]).not.toHaveProperty('options'); - - if (workerPathContains) { - expect(mockSpawn.mock.calls[0][0].workerPath).toContain(workerPathContains); - } - } -} \ No newline at end of file diff --git a/build/src/functions/extraction/test-utils.ts b/build/src/functions/extraction/test-utils.ts deleted file mode 100644 index 11e7b17..0000000 --- a/build/src/functions/extraction/test-utils.ts +++ /dev/null @@ -1,68 +0,0 @@ -import { AirdropEvent } from '@devrev/ts-adaas'; - -// Mock for EventType from @devrev/ts-adaas -export enum EventType { - // Extraction - ExtractionExternalSyncUnitsStart = 'EXTRACTION_EXTERNAL_SYNC_UNITS_START', - ExtractionMetadataStart = 'EXTRACTION_METADATA_START', - ExtractionDataStart = 'EXTRACTION_DATA_START', - ExtractionDataContinue = 'EXTRACTION_DATA_CONTINUE', - ExtractionDataDelete = 'EXTRACTION_DATA_DELETE', - ExtractionAttachmentsStart = 'EXTRACTION_ATTACHMENTS_START', - ExtractionAttachmentsContinue = 'EXTRACTION_ATTACHMENTS_CONTINUE', - ExtractionAttachmentsDelete = 'EXTRACTION_ATTACHMENTS_DELETE' -} - -/** - * Helper function to create a mock AirdropEvent for testing - */ -export const createMockEvent = (eventType: EventType): AirdropEvent => ({ - context: { - secrets: { - service_account_token: 'mock-token' - }, - snap_in_id: 'mock-snap-in-id', - snap_in_version_id: 'mock-version-id' - }, - payload: { - connection_data: { - org_id: 'IEACW7SVI4O6BDQE', // Example Space ID - org_name: 'mock-org-name', - key: 'mock-api-key', - key_type: 'mock-key-type' - }, - event_context: { - callback_url: 'mock-callback-url', - dev_org: 'mock-dev-org', - dev_org_id: 'mock-dev-org-id', - dev_user: 'mock-dev-user', - dev_user_id: 'mock-dev-user-id', - external_sync_unit: 'mock-external-sync-unit', - external_sync_unit_id: 'mock-external-sync-unit-id', - external_sync_unit_name: 'mock-external-sync-unit-name', - external_system: 'mock-external-system', - external_system_type: 'mock-external-system-type', - import_slug: 'mock-import-slug', - mode: 'INITIAL', - request_id: 'mock-request-id', - snap_in_slug: 'mock-snap-in-slug', - snap_in_version_id: 'mock-snap-in-version-id', - sync_run: 'mock-sync-run', - sync_run_id: 'mock-sync-run-id', - sync_tier: 'mock-sync-tier', - sync_unit: 'mock-sync-unit', - sync_unit_id: 'mock-sync-unit-id', - uuid: 'mock-uuid', - worker_data_url: 'mock-worker-data-url' - }, - event_type: eventType, - event_data: {} - }, - execution_metadata: { - devrev_endpoint: 'mock-endpoint' - }, - input_data: { - global_values: {}, - event_sources: {} - } -}); \ No newline at end of file diff --git a/build/src/functions/extraction/worker.test.ts b/build/src/functions/extraction/worker.test.ts deleted file mode 100644 index 550e85a..0000000 --- a/build/src/functions/extraction/worker.test.ts +++ /dev/null @@ -1,246 +0,0 @@ -// Mock axios before importing anything else -jest.mock('axios', () => ({ - get: jest.fn(), - isAxiosError: jest.fn() -})); - -// Import axios after mocking -import axios from 'axios'; - -// Mock the ExtractorEventType enum -const ExtractorEventType = { - ExtractionExternalSyncUnitsDone: 'EXTRACTION_EXTERNAL_SYNC_UNITS_DONE', - ExtractionExternalSyncUnitsError: 'EXTRACTION_EXTERNAL_SYNC_UNITS_ERROR' -}; - -const mockAxios = axios as jest.Mocked; - -// Mock the adapter -const mockEmit = jest.fn().mockResolvedValue(undefined); -const mockAdapter = { - event: { - payload: { - connection_data: { - key: 'mock-api-key', - org_id: 'mock-space-id' - } - } - }, - emit: mockEmit -}; - -// Mock processTask to capture the task function -jest.mock('@devrev/ts-adaas', () => ({ - ExtractorEventType, - processTask: jest.fn((params) => { - // Store the task function for testing - (global as any).taskFunction = params.task; - (global as any).onTimeoutFunction = params.onTimeout; - }) -})); - -// Import the worker file to trigger the processTask mock -import './worker'; - -describe('Extraction Worker', () => { - beforeEach(() => { - jest.clearAllMocks(); - // Mock console methods to prevent test output pollution - jest.spyOn(console, 'log').mockImplementation(() => {}); - jest.spyOn(console, 'error').mockImplementation(() => {}); - }); - - afterEach(() => { - jest.restoreAllMocks(); - }); - - it('should fetch projects and their task counts and emit them as external sync units', async () => { - // Mock the first API call to get projects - mockAxios.get.mockImplementationOnce(() => Promise.resolve({ - status: 200, - data: { - data: [ - { - id: 'project1', - title: 'Project 1', - description: 'Description 1' - }, - { - id: 'project2', - title: 'Project 2', - description: 'Description 2' - } - ] - } - })); - - // Mock the API calls to get task counts for each project - mockAxios.get.mockImplementationOnce(() => Promise.resolve({ - status: 200, - data: { - data: [ - { id: 'task1' }, - { id: 'task2' }, - { id: 'task3' } - ] - } - })); - - mockAxios.get.mockImplementationOnce(() => Promise.resolve({ - status: 200, - data: { - data: [ - { id: 'task4' }, - { id: 'task5' } - ] - } - })); - - // Execute the task function - await (global as any).taskFunction({ adapter: mockAdapter }); - - // Verify API calls - expect(mockAxios.get).toHaveBeenCalledTimes(3); - expect(mockAxios.get).toHaveBeenNthCalledWith( - 1, - 'https://www.wrike.com/api/v4/spaces/mock-space-id/folders', - expect.objectContaining({ - headers: { 'Authorization': 'Bearer mock-api-key' }, - params: { descendants: true } - }) - ); - expect(mockAxios.get).toHaveBeenNthCalledWith( - 2, - 'https://www.wrike.com/api/v4/folders/project1/tasks', - expect.objectContaining({ - headers: { 'Authorization': 'Bearer mock-api-key' }, - params: { descendants: true, subTasks: true } - }) - ); - expect(mockAxios.get).toHaveBeenNthCalledWith( - 3, - 'https://www.wrike.com/api/v4/folders/project2/tasks', - expect.objectContaining({ - headers: { 'Authorization': 'Bearer mock-api-key' }, - params: { descendants: true, subTasks: true } - }) - ); - - // Verify the emit call - expect(mockEmit).toHaveBeenCalledWith( - ExtractorEventType.ExtractionExternalSyncUnitsDone, - { - external_sync_units: [ - { - id: 'project1', - name: 'Project 1', - description: 'Description 1', - item_count: 3, - item_type: 'tasks' - }, - { - id: 'project2', - name: 'Project 2', - description: 'Description 2', - item_count: 2, - item_type: 'tasks' - } - ] - } - ); - }); - - it('should handle errors when fetching task counts', async () => { - // Mock the first API call to get projects - mockAxios.get.mockImplementationOnce(() => Promise.resolve({ - status: 200, - data: { - data: [ - { - id: 'project1', - title: 'Project 1', - description: 'Description 1' - }, - { - id: 'project2', - title: 'Project 2', - description: 'Description 2' - } - ] - } - })); - - // Mock the first task count API call to succeed - mockAxios.get.mockImplementationOnce(() => Promise.resolve({ - status: 200, - data: { - data: [ - { id: 'task1' }, - { id: 'task2' } - ] - } - })); - - // Mock the second task count API call to fail - mockAxios.get.mockImplementationOnce(() => Promise.reject(new Error('API error'))); - - // Execute the task function - await (global as any).taskFunction({ adapter: mockAdapter }); - - // Verify the emit call - should still include both projects - expect(mockEmit).toHaveBeenCalledWith( - ExtractorEventType.ExtractionExternalSyncUnitsDone, - { - external_sync_units: [ - { - id: 'project1', - name: 'Project 1', - description: 'Description 1', - item_count: 2, - item_type: 'tasks' - }, - { - id: 'project2', - name: 'Project 2', - description: 'Description 2', - item_count: 0, // Failed to get task count - item_type: 'tasks' - } - ] - } - ); - }); - - it('should emit an error event when the projects API call fails', async () => { - // Mock the API call to fail - mockAxios.get.mockImplementationOnce(() => Promise.reject(new Error('API error'))); - - // Execute the task function - await (global as any).taskFunction({ adapter: mockAdapter }); - - // Verify the emit call - expect(mockEmit).toHaveBeenCalledWith( - ExtractorEventType.ExtractionExternalSyncUnitsError, - { - error: { - message: 'API error' - } - } - ); - }); - - it('should emit an error event on timeout', async () => { - // Execute the onTimeout function - await (global as any).onTimeoutFunction({ adapter: mockAdapter }); - - // Verify the emit call - expect(mockEmit).toHaveBeenCalledWith( - ExtractorEventType.ExtractionExternalSyncUnitsError, - { - error: { - message: 'External sync units extraction timed out' - } - } - ); - }); -}); \ No newline at end of file diff --git a/build/src/functions/extraction/worker.ts b/build/src/functions/extraction/worker.ts deleted file mode 100644 index 377fcf5..0000000 --- a/build/src/functions/extraction/worker.ts +++ /dev/null @@ -1,129 +0,0 @@ -import { ExtractorEventType, ExternalSyncUnit, processTask } from '@devrev/ts-adaas'; -import axios from 'axios'; - -/** - * Worker for handling external sync units extraction - * This worker is responsible for fetching projects from Wrike and emitting them as external sync units - */ -processTask({ - task: async ({ adapter }) => { - try { - console.log('External sync units extraction worker started'); - - const event = adapter.event; - - // Extract the Wrike API key and Space ID - const apiKey = event.payload.connection_data.key; - const spaceId = event.payload.connection_data.org_id; - - if (!apiKey) { - throw new Error('Missing API key in connection_data'); - } - - if (!spaceId) { - throw new Error('Missing Space ID in connection_data'); - } - - // Define the Wrike API endpoint - const wrikeApiEndpoint = 'https://www.wrike.com/api/v4'; - - console.log(`Fetching projects from Wrike API for space ${spaceId}`); - - // Make a GET request to the Wrike API to get folders/projects - const response = await axios.get(`${wrikeApiEndpoint}/spaces/${encodeURIComponent(spaceId)}/folders`, { - headers: { - 'Authorization': `Bearer ${apiKey}` - }, - params: { - descendants: true - }, - timeout: 10000 // 10 seconds timeout - }); - - // Check if the request was successful - if (response.status !== 200) { - throw new Error(`Failed to fetch projects with status ${response.status}`); - } - - // Process the response data - if (!response.data || !response.data.data || !Array.isArray(response.data.data)) { - throw new Error('Invalid response format from Wrike API'); - } - - // Get the projects from the response - const projects = response.data.data; - - console.log(`Fetched ${projects.length} projects, now getting task counts`); - - // Transform the projects into external sync units with task counts - const externalSyncUnits: ExternalSyncUnit[] = []; - - // For each project, get the task count - for (const project of projects) { - try { - // Make a GET request to get tasks for this project to count them - const tasksResponse = await axios.get(`${wrikeApiEndpoint}/folders/${encodeURIComponent(project.id)}/tasks`, { - headers: { - 'Authorization': `Bearer ${apiKey}` - }, - params: { - descendants: true, - subTasks: true - }, - timeout: 10000 // 10 seconds timeout - }); - - // Get the task count - const taskCount = tasksResponse.data && tasksResponse.data.data ? tasksResponse.data.data.length : 0; - - // Add the project with task count to external sync units - externalSyncUnits.push({ - id: project.id, - name: project.title, - description: project.description || `Wrike project: ${project.title}`, - item_count: taskCount, - item_type: 'tasks' - }); - } catch (error) { - console.error(`Error fetching tasks for project ${project.id}:`, error); - // Still add the project, but with task count 0 - externalSyncUnits.push({ - id: project.id, - name: project.title, - description: project.description || `Wrike project: ${project.title}`, - item_count: 0, // Error fetching task count - item_type: 'tasks' - }); - } - } - - console.log(`Successfully transformed ${externalSyncUnits.length} projects into external sync units`); - - // Emit the DONE event with the external sync units - await adapter.emit(ExtractorEventType.ExtractionExternalSyncUnitsDone, { - external_sync_units: externalSyncUnits, - }); - - console.log('External sync units extraction completed successfully'); - } catch (error) { - console.error('Error in external sync units extraction worker:', error); - - // Emit an error event if something goes wrong - await adapter.emit(ExtractorEventType.ExtractionExternalSyncUnitsError, { - error: { - message: error instanceof Error ? error.message : 'Unknown error in external sync units extraction', - }, - }); - } - }, - onTimeout: async ({ adapter }) => { - console.error('External sync units extraction worker timed out'); - - // Emit an error event if the worker times out - await adapter.emit(ExtractorEventType.ExtractionExternalSyncUnitsError, { - error: { - message: 'External sync units extraction timed out', - }, - }); - }, -}); \ No newline at end of file diff --git a/build/src/functions/extraction/workers/attachments-extraction.test.cases.ts b/build/src/functions/extraction/workers/attachments-extraction.test.cases.ts new file mode 100644 index 0000000..183b2fb --- /dev/null +++ b/build/src/functions/extraction/workers/attachments-extraction.test.cases.ts @@ -0,0 +1,174 @@ +/** + * Test case generators for attachments-extraction worker. + * Contains reusable test case configurations for getAttachmentStream function. + */ + +import { + createMockAttachmentItem, + createMockStreamResponse, + createMockRateLimitResponse, + createMockAxiosError, + assertAxiosGetCalled, +} from './attachments-extraction.test.helpers'; + +export interface GetAttachmentStreamTestCase { + description: string; + setup: (adaas: any, mockItem?: any) => { mockItem: any; expectedResult?: any }; + assertions: (result: any, adaas: any, mockItem: any) => void; +} + +/** + * Generates test cases for successful attachment streaming + */ +export function generateSuccessTestCases(): GetAttachmentStreamTestCase[] { + return [ + { + description: 'should successfully fetch attachment stream', + setup: (adaas: any) => { + const mockItem = createMockAttachmentItem(); + const mockStreamResponse = createMockStreamResponse(); + adaas.axiosClient.get.mockResolvedValue(mockStreamResponse); + return { mockItem, expectedResult: { httpStream: mockStreamResponse } }; + }, + assertions: (result: any, adaas: any, mockItem: any) => { + assertAxiosGetCalled(adaas.axiosClient.get, mockItem.url, 'test-api-key'); + expect(result).toEqual({ httpStream: expect.any(Object) }); + }, + }, + ]; +} + +/** + * Generates test cases for rate limiting scenarios + */ +export function generateRateLimitTestCases(): GetAttachmentStreamTestCase[] { + return [ + { + description: 'should handle rate limiting (429)', + setup: (adaas: any) => { + const mockItem = createMockAttachmentItem(); + const mockStreamResponse = createMockRateLimitResponse('49'); + adaas.axiosClient.get.mockResolvedValue(mockStreamResponse); + return { mockItem, expectedResult: { delay: 49 } }; + }, + assertions: (result: any) => { + expect(result).toEqual({ delay: 49 }); + }, + }, + { + description: 'should handle rate limiting without retry-after header', + setup: (adaas: any) => { + const mockItem = createMockAttachmentItem(); + const mockStreamResponse = createMockRateLimitResponse(); + adaas.axiosClient.get.mockResolvedValue(mockStreamResponse); + return { mockItem, expectedResult: { delay: 60 } }; + }, + assertions: (result: any) => { + expect(result).toEqual({ delay: 60 }); + }, + }, + ]; +} + +/** + * Generates test cases for error handling + */ +export function generateErrorTestCases(): GetAttachmentStreamTestCase[] { + return [ + { + description: 'should handle axios errors', + setup: (adaas: any) => { + const mockItem = createMockAttachmentItem(); + const axiosError = createMockAxiosError(); + adaas.axiosClient.get.mockRejectedValue(axiosError); + adaas.axios.isAxiosError.mockReturnValue(true); + return { mockItem }; + }, + assertions: (result: any, adaas: any) => { + expect(result).toEqual({ + error: { + message: 'Error while fetching attachment ATTACH123 from URL.', + }, + }); + expect(adaas.serializeAxiosError).toHaveBeenCalled(); + }, + }, + { + description: 'should handle non-axios errors', + setup: (adaas: any) => { + const mockItem = createMockAttachmentItem(); + const genericError = new Error('Generic error'); + adaas.axiosClient.get.mockRejectedValue(genericError); + adaas.axios.isAxiosError.mockReturnValue(false); + return { mockItem }; + }, + assertions: (result: any) => { + expect(result).toEqual({ + error: { + message: 'Error while fetching attachment ATTACH123 from URL.', + }, + }); + }, + }, + ]; +} + +/** + * Generates test cases for HTTP headers validation + */ +export function generateHeaderTestCases(): GetAttachmentStreamTestCase[] { + return [ + { + description: 'should use correct authentication header', + setup: (adaas: any) => { + const mockItem = createMockAttachmentItem(); + adaas.axiosClient.get.mockResolvedValue(createMockStreamResponse()); + return { mockItem }; + }, + assertions: (result: any, adaas: any) => { + expect(adaas.axiosClient.get).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + headers: expect.objectContaining({ + 'Authorization': 'Bearer test-api-key', + }), + }) + ); + }, + }, + { + description: 'should set Accept-Encoding to identity', + setup: (adaas: any) => { + const mockItem = createMockAttachmentItem(); + adaas.axiosClient.get.mockResolvedValue(createMockStreamResponse()); + return { mockItem }; + }, + assertions: (result: any, adaas: any) => { + expect(adaas.axiosClient.get).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + headers: expect.objectContaining({ + 'Accept-Encoding': 'identity', + }), + }) + ); + }, + }, + { + description: 'should set responseType to stream', + setup: (adaas: any) => { + const mockItem = createMockAttachmentItem(); + adaas.axiosClient.get.mockResolvedValue(createMockStreamResponse()); + return { mockItem }; + }, + assertions: (result: any, adaas: any) => { + expect(adaas.axiosClient.get).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + responseType: 'stream', + }) + ); + }, + }, + ]; +} \ No newline at end of file diff --git a/build/src/functions/extraction/workers/attachments-extraction.test.helpers.ts b/build/src/functions/extraction/workers/attachments-extraction.test.helpers.ts new file mode 100644 index 0000000..79dfbdd --- /dev/null +++ b/build/src/functions/extraction/workers/attachments-extraction.test.helpers.ts @@ -0,0 +1,121 @@ +/** + * Test helpers for attachments-extraction worker tests. + * Contains mock data, factory functions, and assertion utilities. + */ + +/** + * Creates a mock attachment item for testing + */ +export function createMockAttachmentItem(overrides?: { + id?: string; + url?: string; +}): any { + return { + id: overrides?.id || 'ATTACH123', + url: overrides?.url || 'https://www.wrike.com/attachments/ATTACH123/download/file.pdf', + file_name: 'file.pdf', + parent_id: 'TASK123', + author_id: 'USER1', + }; +} + +/** + * Creates a mock event for attachment streaming + */ +export function createMockEvent(apiKey: string = 'test-api-key'): any { + return { + payload: { + connection_data: { + key: apiKey, + }, + }, + }; +} + +/** + * Creates a mock successful HTTP stream response + */ +export function createMockStreamResponse(status: number = 200, headers: Record = {}): any { + return { + status, + headers, + data: 'stream-data', + }; +} + +/** + * Creates a mock rate limit response + */ +export function createMockRateLimitResponse(retryAfter?: string): any { + return { + status: 429, + headers: retryAfter ? { 'retry-after': retryAfter } : {}, + }; +} + +/** + * Creates a mock axios error + */ +export function createMockAxiosError(status: number = 500): any { + return { + isAxiosError: true, + message: 'Network error', + response: { + status, + }, + }; +} + +/** + * Asserts that the adapter was called with correct parameters + */ +export function assertAdapterStreamAttachmentsCalled(mockAdapter: any): void { + expect(mockAdapter.streamAttachments).toHaveBeenCalledWith({ + stream: expect.any(Function), + }); +} + +/** + * Asserts that the done event was emitted + */ +export function assertDoneEventEmitted(mockAdapter: any, eventType: string): void { + expect(mockAdapter.emit).toHaveBeenCalledWith(eventType); +} + +/** + * Asserts that the delay event was emitted with correct delay + */ +export function assertDelayEventEmitted(mockAdapter: any, eventType: string, delay: number): void { + expect(mockAdapter.emit).toHaveBeenCalledWith(eventType, { delay }); +} + +/** + * Asserts that the error event was emitted + */ +export function assertErrorEventEmitted(mockAdapter: any, eventType: string): void { + expect(mockAdapter.emit).toHaveBeenCalledWith( + eventType, + { + error: { + message: expect.any(String), + }, + } + ); +} + +/** + * Asserts that axiosClient.get was called with correct parameters + */ +export function assertAxiosGetCalled( + axiosClientGet: jest.Mock, + url: string, + apiKey: string +): void { + expect(axiosClientGet).toHaveBeenCalledWith(url, { + responseType: 'stream', + headers: { + 'Accept-Encoding': 'identity', + 'Authorization': `Bearer ${apiKey}`, + }, + }); +} \ No newline at end of file diff --git a/build/src/functions/extraction/workers/attachments-extraction.test.ts b/build/src/functions/extraction/workers/attachments-extraction.test.ts new file mode 100644 index 0000000..9a97895 --- /dev/null +++ b/build/src/functions/extraction/workers/attachments-extraction.test.ts @@ -0,0 +1,214 @@ +import { ExtractorEventType } from '@devrev/ts-adaas'; +import { + createMockAttachmentItem, + createMockEvent, + assertAdapterStreamAttachmentsCalled, + assertDoneEventEmitted, + assertDelayEventEmitted, + assertErrorEventEmitted, +} from './attachments-extraction.test.helpers'; +import { + generateSuccessTestCases, + generateRateLimitTestCases, + generateErrorTestCases, + generateHeaderTestCases, +} from './attachments-extraction.test.cases'; + +// Mock the dependencies +let mockTask: any; +let mockOnTimeout: any; +let mockGetAttachmentStream: any; + +jest.mock('@devrev/ts-adaas', () => { + const actual = jest.requireActual('@devrev/ts-adaas'); + return { + ...actual, + processTask: (config: any) => { + mockTask = config.task; + mockOnTimeout = config.onTimeout; + }, + axiosClient: { + get: jest.fn(), + }, + axios: { + isAxiosError: jest.fn(), + }, + serializeAxiosError: jest.fn((error) => JSON.stringify(error)), + ExtractorEventType: { + ExtractionAttachmentsDone: 'EXTRACTION_ATTACHMENTS_DONE', + ExtractionAttachmentsError: 'EXTRACTION_ATTACHMENTS_ERROR', + ExtractionAttachmentsDelay: 'EXTRACTION_ATTACHMENTS_DELAY', + ExtractionAttachmentsProgress: 'EXTRACTION_ATTACHMENTS_PROGRESS', + }, + }; +}); + +// Import after mocking +const adaas = require('@devrev/ts-adaas'); +require('./attachments-extraction'); + +describe('attachments-extraction worker', () => { + let mockAdapter: any; + + beforeEach(() => { + jest.clearAllMocks(); + + mockAdapter = { + event: createMockEvent(), + streamAttachments: jest.fn(), + emit: jest.fn().mockResolvedValue(undefined), + }; + }); + + describe('task execution', () => { + it('should successfully stream attachments and emit ExtractionAttachmentsDone', async () => { + mockAdapter.streamAttachments.mockResolvedValue(undefined); + + await mockTask({ adapter: mockAdapter }); + + assertAdapterStreamAttachmentsCalled(mockAdapter); + assertDoneEventEmitted(mockAdapter, ExtractorEventType.ExtractionAttachmentsDone); + }); + + it('should emit delay event when rate limit is exceeded', async () => { + mockAdapter.streamAttachments.mockResolvedValue({ + delay: 60, + }); + + await mockTask({ adapter: mockAdapter }); + + assertDelayEventEmitted(mockAdapter, ExtractorEventType.ExtractionAttachmentsDelay, 60); + }); + + it('should emit error event when streaming fails', async () => { + mockAdapter.streamAttachments.mockResolvedValue({ + error: { + message: 'Failed to stream attachment', + }, + }); + + await mockTask({ adapter: mockAdapter }); + + expect(mockAdapter.emit).toHaveBeenCalledWith( + ExtractorEventType.ExtractionAttachmentsError, + { + error: { + message: 'Failed to stream attachment', + }, + } + ); + }); + + it('should handle exceptions during task execution', async () => { + const error = new Error('Unexpected error'); + mockAdapter.streamAttachments.mockRejectedValue(error); + + // Should not throw + await mockTask({ adapter: mockAdapter }); + + expect(mockAdapter.streamAttachments).toHaveBeenCalled(); + }); + }); + + describe('getAttachmentStream function', () => { + let capturedStreamFunction: any; + + beforeEach(async () => { + // Mock streamAttachments to capture the stream function + mockAdapter.streamAttachments.mockImplementation(({ stream }: any) => { + capturedStreamFunction = stream; + return undefined; + }); + + // Initialize the worker by calling the task + await mockTask({ adapter: mockAdapter }); + + // Verify the stream function was captured + expect(capturedStreamFunction).toBeDefined(); + }); + + // Success test cases + const successTestCases = generateSuccessTestCases(); + successTestCases.forEach(({ description, setup, assertions }) => { + it(description, async () => { + const { mockItem } = setup(adaas); + const result = await capturedStreamFunction({ + item: mockItem, + event: mockAdapter.event, + }); + assertions(result, adaas, mockItem); + }); + }); + + // Rate limit test cases + const rateLimitTestCases = generateRateLimitTestCases(); + rateLimitTestCases.forEach(({ description, setup, assertions }) => { + it(description, async () => { + const { mockItem } = setup(adaas); + const result = await capturedStreamFunction({ + item: mockItem, + event: mockAdapter.event, + }); + assertions(result, adaas, mockItem); + }); + }); + + // Error test cases + const errorTestCases = generateErrorTestCases(); + errorTestCases.forEach(({ description, setup, assertions }) => { + it(description, async () => { + const { mockItem } = setup(adaas); + const result = await capturedStreamFunction({ + item: mockItem, + event: mockAdapter.event, + }); + assertions(result, adaas, mockItem); + }); + }); + + // Header test cases + const headerTestCases = generateHeaderTestCases(); + headerTestCases.forEach(({ description, setup, assertions }) => { + it(description, async () => { + const { mockItem } = setup(adaas); + await capturedStreamFunction({ + item: mockItem, + event: mockAdapter.event, + }); + assertions(undefined, adaas, mockItem); + }); + }); + + it('should handle missing URL in attachment item', async () => { + const mockItem = createMockAttachmentItem(); + delete (mockItem as any).url; + + const result = await capturedStreamFunction({ + item: mockItem, + event: mockAdapter.event, + }); + + expect(result).toEqual({ + error: { + message: 'Error while fetching attachment ATTACH123: URL is missing from attachment metadata.', + }, + }); + }); + }); + + describe('timeout handling', () => { + it('should emit progress on timeout', async () => { + await mockOnTimeout({ adapter: mockAdapter }); + + expect(mockAdapter.emit).toHaveBeenCalledWith( + ExtractorEventType.ExtractionAttachmentsProgress + ); + }); + + it('should not call streamAttachments on timeout', async () => { + await mockOnTimeout({ adapter: mockAdapter }); + + expect(mockAdapter.streamAttachments).not.toHaveBeenCalled(); + }); + }); +}); \ No newline at end of file diff --git a/build/src/functions/extraction/workers/attachments-extraction.ts b/build/src/functions/extraction/workers/attachments-extraction.ts new file mode 100644 index 0000000..3c39dbb --- /dev/null +++ b/build/src/functions/extraction/workers/attachments-extraction.ts @@ -0,0 +1,106 @@ +import { + axios, + axiosClient, + ExternalSystemAttachmentStreamingParams, + ExternalSystemAttachmentStreamingResponse, + ExtractorEventType, + processTask, + serializeAxiosError, +} from '@devrev/ts-adaas'; +import { ExtractorState } from "../index"; +import { normalizeAttachment } from './normalization'; + +/** + * Fetches and streams an attachment from Wrike. + * Handles authentication, rate limiting, and errors. + * + * @param params - Attachment streaming parameters containing item and event + * @returns HTTP stream response, delay, or error + */ +const getAttachmentStream = async ({ + item, + event, +}: ExternalSystemAttachmentStreamingParams): Promise => { + const { id } = item; + + // Extract URL from the normalized attachment structure + // The item is a NormalizedAttachment with structure: { id, url, file_name, parent_id, ... } + const url = (item as any).url; + + if (!url) { + return { + error: { + message: `Error while fetching attachment ${id}: URL is missing from attachment metadata.`, + }, + }; + } + + // Extract API key for authentication + const apiKey = event.payload.connection_data.key; + + try { + // Fetch attachment with authentication + const fileStreamResponse = await axiosClient.get(url, { + responseType: 'stream', + headers: { + 'Accept-Encoding': 'identity', + 'Authorization': `Bearer ${apiKey}`, + }, + }); + + // Check if we were rate limited + if (fileStreamResponse.status === 429) { + const retryAfter = fileStreamResponse.headers['retry-after']; + const delay = retryAfter ? parseInt(retryAfter, 10) : 60; + return { + delay: delay, + }; + } + + // Return the HTTP stream + return { httpStream: fileStreamResponse }; + } catch (error) { + // Error handling logic + if (axios.isAxiosError(error)) { + console.warn(`Error while fetching attachment ${id} from URL.`, serializeAxiosError(error)); + console.warn('Failed attachment metadata', item); + } else { + console.warn(`Error while fetching attachment ${id} from URL.`, error); + console.warn('Failed attachment metadata', item); + } + + return { + error: { + message: 'Error while fetching attachment ' + id + ' from URL.', + }, + }; + } +}; + +processTask({ + task: async ({ adapter }) => { + try { + const response = await adapter.streamAttachments({ + stream: getAttachmentStream, + }); + + // Handle different response scenarios + if (response?.delay) { + await adapter.emit(ExtractorEventType.ExtractionAttachmentsDelay, { + delay: response.delay, + }); + } else if (response?.error) { + await adapter.emit(ExtractorEventType.ExtractionAttachmentsError, { + error: response.error, + }); + } else { + await adapter.emit(ExtractorEventType.ExtractionAttachmentsDone); + } + } catch (error) { + console.error('An error occured while processing a task.', error); + } + }, + onTimeout: async ({ adapter }) => { + await adapter.emit(ExtractorEventType.ExtractionAttachmentsProgress); + }, +}); \ No newline at end of file diff --git a/build/src/functions/extraction/workers/data-extraction.test.assertions.ts b/build/src/functions/extraction/workers/data-extraction.test.assertions.ts new file mode 100644 index 0000000..92c4993 --- /dev/null +++ b/build/src/functions/extraction/workers/data-extraction.test.assertions.ts @@ -0,0 +1,172 @@ +/** + * Assertion helpers for data-extraction worker tests. + * Contains reusable assertion patterns for test validation. + */ + +/** + * Asserts successful extraction of users, tasks, and comments + */ +export function assertSuccessfulExtraction( + mockAdapter: any, + mockGetContacts: jest.Mock, + mockGetTasks: jest.Mock +) { + expect(mockGetContacts).toHaveBeenCalledTimes(1); + expect(mockGetTasks).toHaveBeenCalledTimes(1); + expect(mockAdapter.initializeRepos).toHaveBeenCalledTimes(1); + expect(mockAdapter.state.users.completed).toBe(true); + expect(mockAdapter.state.tasks.completed).toBe(true); + expect(mockAdapter.state.comments.completed).toBe(true); + expect(mockAdapter.state.attachments.completed).toBe(true); + expect(mockAdapter.emit).toHaveBeenCalledWith('EXTRACTION_DATA_DONE'); +} + +/** + * Asserts error emission with expected message + */ +export function assertErrorEmission( + mockAdapter: any, + expectedMessage: string +) { + expect(mockAdapter.emit).toHaveBeenCalledWith( + 'EXTRACTION_DATA_ERROR', + { + error: { + message: expectedMessage, + }, + } + ); +} + +/** + * Asserts tasks extraction was called with correct parameters + */ +export function assertTasksExtraction( + mockGetTasks: jest.Mock, + folderId: string, + expectedOptions: any +) { + expect(mockGetTasks).toHaveBeenCalledWith(folderId, expectedOptions); +} + +/** + * Asserts attachments extraction was called correctly + */ +export function assertAttachmentsExtraction( + mockGetTaskAttachments: jest.Mock, + taskId: string +) { + expect(mockGetTaskAttachments).toHaveBeenCalledWith(taskId); +} + +/** + * Asserts comments extraction was called correctly + */ +export function assertCommentsExtraction( + mockGetTaskComments: jest.Mock, + taskId: string +) { + expect(mockGetTaskComments).toHaveBeenCalledWith(taskId); +} + +/** + * Asserts that users extraction was skipped + */ +export function assertUsersExtractionSkipped( + mockGetContacts: jest.Mock, + mockRepo: any +) { + expect(mockGetContacts).not.toHaveBeenCalled(); + expect(mockRepo.push).not.toHaveBeenCalled(); +} + +/** + * Asserts that tasks extraction was skipped + */ +export function assertTasksExtractionSkipped( + mockGetTasks: jest.Mock, + mockGetTaskAttachments: jest.Mock, + mockGetTaskComments: jest.Mock, + mockTasksRepo: any, + mockAttachmentsRepo: any, + mockCommentsRepo: any +) { + expect(mockGetTasks).not.toHaveBeenCalled(); + expect(mockGetTaskAttachments).not.toHaveBeenCalled(); + expect(mockGetTaskComments).not.toHaveBeenCalled(); + expect(mockTasksRepo.push).not.toHaveBeenCalled(); + expect(mockAttachmentsRepo.push).not.toHaveBeenCalled(); + expect(mockCommentsRepo.push).not.toHaveBeenCalled(); +} + +/** + * Asserts state completion flags + */ +export function assertStateCompletion( + mockAdapter: any, + usersCompleted: boolean, + tasksCompleted: boolean, + attachmentsCompleted: boolean, + commentsCompleted: boolean +) { + expect(mockAdapter.state.users.completed).toBe(usersCompleted); + expect(mockAdapter.state.tasks.completed).toBe(tasksCompleted); + expect(mockAdapter.state.attachments.completed).toBe(attachmentsCompleted); + expect(mockAdapter.state.comments.completed).toBe(commentsCompleted); +} + +/** + * Asserts pagination state + */ +export function assertPaginationState( + mockAdapter: any, + nextPageToken: string | undefined, + completed: boolean +) { + expect(mockAdapter.state.tasks.nextPageToken).toBe(nextPageToken); + expect(mockAdapter.state.tasks.completed).toBe(completed); +} + +/** + * Asserts repository operations + */ +export function assertRepoOperations( + mockRepo: any, + pushCallCount: number, + uploadCallCount: number +) { + expect(mockRepo.push).toHaveBeenCalledTimes(pushCallCount); + expect(mockRepo.upload).toHaveBeenCalledTimes(uploadCallCount); +} + +/** + * Asserts attachment extraction for tasks with hasAttachments flag + */ +export function assertAttachmentExtractionForTasks( + mockGetTaskAttachments: jest.Mock, + mockTasks: any[], + mockAttachmentsRepo: any +) { + expect(mockGetTaskAttachments).toHaveBeenCalledTimes(mockTasks.length); + mockTasks.forEach(task => { + expect(mockGetTaskAttachments).toHaveBeenCalledWith(task.id); + }); + expect(mockAttachmentsRepo.push).toHaveBeenCalled(); + expect(mockAttachmentsRepo.upload).toHaveBeenCalledTimes(1); +} + +/** + * Asserts comment extraction for all tasks + */ +export function assertCommentExtractionForTasks( + mockGetTaskComments: jest.Mock, + mockTasks: any[], + mockCommentsRepo: any +) { + expect(mockGetTaskComments).toHaveBeenCalledTimes(mockTasks.length); + mockTasks.forEach(task => { + expect(mockGetTaskComments).toHaveBeenCalledWith(task.id); + }); + expect(mockCommentsRepo.push).toHaveBeenCalled(); + expect(mockCommentsRepo.upload).toHaveBeenCalledTimes(1); +} \ No newline at end of file diff --git a/build/src/functions/extraction/workers/data-extraction.test.case-generators-attachments-comments.ts b/build/src/functions/extraction/workers/data-extraction.test.case-generators-attachments-comments.ts new file mode 100644 index 0000000..d37379e --- /dev/null +++ b/build/src/functions/extraction/workers/data-extraction.test.case-generators-attachments-comments.ts @@ -0,0 +1,171 @@ +/** + * Test case generators for attachments and comments extraction in data-extraction worker. + * Contains test configurations for handling attachments and comments extraction from tasks. + */ + +import { + setupPagination, +} from './data-extraction.test.setup'; +import { + assertAttachmentExtractionForTasks, + assertCommentExtractionForTasks, + assertStateCompletion, +} from './data-extraction.test.assertions'; + +/** + * Generates test cases for attachments extraction + */ +export function generateAttachmentsExtractionTestCases() { + return [ + { + description: 'should extract attachments from tasks with hasAttachments flag', + setup: (mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, mockUsers: any[], mockTasks: any[], mockAttachments: any[], mockComments: any[], mockAdapter: any) => { + mockAdapter.state.users.completed = true; + const tasksWithAttachments = mockTasks.map(t => ({ ...t, hasAttachments: true })); + mockGetTasks.mockResolvedValue({ + status_code: 200, + data: tasksWithAttachments, + nextPageToken: undefined, + hasMore: false, + }); + mockGetTaskAttachments.mockResolvedValue({ + status_code: 200, + data: mockAttachments, + }); + mockGetTaskComments.mockResolvedValue({ + status_code: 200, + data: mockComments, + }); + }, + assertions: (mockAdapter: any, mockRepo: any, mockTasksRepo: any, mockAttachmentsRepo: any, mockCommentsRepo: any, mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, mockTasks: any[], mockAttachments: any[]) => { + assertAttachmentExtractionForTasks(mockGetTaskAttachments, mockTasks, mockAttachmentsRepo); + assertStateCompletion(mockAdapter, true, true, true, true); + }, + }, + { + description: 'should skip attachment extraction for tasks without hasAttachments flag', + setup: (mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, mockUsers: any[], mockTasks: any[], mockAttachments: any[], mockComments: any[], mockAdapter: any) => { + mockAdapter.state.users.completed = true; + const tasksWithoutAttachments = mockTasks.map(t => ({ ...t, hasAttachments: false })); + mockGetTasks.mockResolvedValue({ + status_code: 200, + data: tasksWithoutAttachments, + nextPageToken: undefined, + hasMore: false, + }); + mockGetTaskComments.mockResolvedValue({ + status_code: 200, + data: mockComments, + }); + }, + assertions: (mockAdapter: any, mockRepo: any, mockTasksRepo: any, mockAttachmentsRepo: any, mockCommentsRepo: any, mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock) => { + expect(mockGetTaskAttachments).not.toHaveBeenCalled(); + assertStateCompletion(mockAdapter, true, true, true, true); + }, + }, + { + description: 'should continue extraction if attachment fetch fails for one task', + setup: (mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, mockUsers: any[], mockTasks: any[], mockAttachments: any[], mockComments: any[], mockAdapter: any) => { + mockAdapter.state.users.completed = true; + const tasksWithAttachments = mockTasks.map(t => ({ ...t, hasAttachments: true })); + mockGetTasks.mockResolvedValue({ + status_code: 200, + data: tasksWithAttachments, + nextPageToken: undefined, + hasMore: false, + }); + mockGetTaskAttachments + .mockResolvedValueOnce({ status_code: 401, message: 'Auth failed' }) + .mockResolvedValue({ status_code: 200, data: mockAttachments }); + mockGetTaskComments.mockResolvedValue({ status_code: 200, data: mockComments }); + }, + assertions: (mockAdapter: any, mockRepo: any, mockTasksRepo: any, mockAttachmentsRepo: any, mockCommentsRepo: any, mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, mockTasks: any[]) => { + expect(mockGetTaskAttachments).toHaveBeenCalledTimes(mockTasks.length); + expect(mockAttachmentsRepo.push).toHaveBeenCalled(); + assertStateCompletion(mockAdapter, true, true, true, true); + }, + }, + ]; +} + +/** + * Generates test cases for comments extraction + */ +export function generateCommentsExtractionTestCases() { + return [ + { + description: 'should extract comments from all tasks', + setup: (mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, mockUsers: any[], mockTasks: any[], mockAttachments: any[], mockComments: any[], mockAdapter: any) => { + mockAdapter.state.users.completed = true; + mockGetTasks.mockResolvedValue({ + status_code: 200, + data: mockTasks, + nextPageToken: undefined, + hasMore: false, + }); + mockGetTaskAttachments.mockResolvedValue({ + status_code: 200, + data: [], + }); + mockGetTaskComments.mockResolvedValue({ + status_code: 200, + data: mockComments, + }); + }, + assertions: (mockAdapter: any, mockRepo: any, mockTasksRepo: any, mockAttachmentsRepo: any, mockCommentsRepo: any, mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, mockTasks: any[], mockComments: any[]) => { + assertCommentExtractionForTasks(mockGetTaskComments, mockTasks, mockCommentsRepo); + assertStateCompletion(mockAdapter, true, true, true, true); + }, + }, + { + description: 'should continue extraction if comment fetch fails for one task', + setup: (mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, mockUsers: any[], mockTasks: any[], mockAttachments: any[], mockComments: any[], mockAdapter: any) => { + mockAdapter.state.users.completed = true; + // Create multiple tasks for this test - need at least 2 tasks + const multipleTasks = mockTasks.length >= 2 ? mockTasks : [ + mockTasks[0], + { ...mockTasks[0], id: 'TASK2', title: 'Task 2' } + ]; + mockGetTasks.mockResolvedValue({ + status_code: 200, + data: multipleTasks, + nextPageToken: undefined, + hasMore: false, + }); + mockGetTaskAttachments.mockResolvedValue({ + status_code: 200, + data: [], + }); + mockGetTaskComments + .mockResolvedValueOnce({ status_code: 401, message: 'Auth failed' }) + .mockResolvedValue({ status_code: 200, data: mockComments }); + }, + assertions: (mockAdapter: any, mockRepo: any, mockTasksRepo: any, mockAttachmentsRepo: any, mockCommentsRepo: any, mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, mockTasks: any[]) => { + expect(mockGetTaskComments).toHaveBeenCalledTimes(2); + expect(mockCommentsRepo.push).toHaveBeenCalled(); + assertStateCompletion(mockAdapter, true, true, true, true); + }, + }, + { + description: 'should skip comments extraction if already completed', + setup: (mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, mockUsers: any[], mockTasks: any[], mockAttachments: any[], mockComments: any[], mockAdapter: any) => { + mockAdapter.state.users.completed = true; + mockAdapter.state.comments.completed = true; + mockGetTasks.mockResolvedValue({ + status_code: 200, + data: mockTasks, + nextPageToken: undefined, + hasMore: false, + }); + mockGetTaskAttachments.mockResolvedValue({ + status_code: 200, + data: [], + }); + }, + assertions: (mockAdapter: any, mockRepo: any, mockTasksRepo: any, mockAttachmentsRepo: any, mockCommentsRepo: any, mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock) => { + expect(mockGetTaskComments).not.toHaveBeenCalled(); + assertStateCompletion(mockAdapter, true, true, true, true); + }, + }, + ]; +} \ No newline at end of file diff --git a/build/src/functions/extraction/workers/data-extraction.test.case-generators-incremental.ts b/build/src/functions/extraction/workers/data-extraction.test.case-generators-incremental.ts new file mode 100644 index 0000000..9830375 --- /dev/null +++ b/build/src/functions/extraction/workers/data-extraction.test.case-generators-incremental.ts @@ -0,0 +1,48 @@ +/** + * Test case generators for incremental sync scenarios in data-extraction worker. + * Contains test configurations for handling incremental data extraction. + */ + +import { + setupIncrementalSync, +} from './data-extraction.test.setup'; +import { + assertTasksExtraction, + assertStateCompletion, +} from './data-extraction.test.assertions'; + +/** + * Generates test cases for incremental sync + */ +export function generateIncrementalSyncTestCases() { + return [ + { + description: 'should handle incremental sync with modifiedSince', + setup: (mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, mockUsers: any[], mockTasks: any[], mockAttachments: any[], mockComments: any[], mockAdapter: any) => { + setupIncrementalSync(mockAdapter, mockGetTasks, mockGetTaskAttachments, mockGetTaskComments, mockTasks, mockAttachments, mockComments); + }, + assertions: (mockAdapter: any, mockRepo: any, mockTasksRepo: any, mockAttachmentsRepo: any, mockCommentsRepo: any, mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock) => { + assertTasksExtraction(mockGetTasks, 'FOLDER1', { + pageSize: 100, + nextPageToken: undefined, + updatedDate: '2025-01-01T00:00:00Z', + }); + expect(mockAdapter.state.tasks.modifiedSince).toBeDefined(); + assertStateCompletion(mockAdapter, true, true, true, true); + }, + }, + { + description: 'should use state modifiedSince if available', + setup: (mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, mockUsers: any[], mockTasks: any[], mockAttachments: any[], mockComments: any[], mockAdapter: any) => { + setupIncrementalSync(mockAdapter, mockGetTasks, mockGetTaskAttachments, mockGetTaskComments, mockTasks, mockAttachments, mockComments, '2025-01-01T00:00:00Z', '2025-01-15T00:00:00Z'); + }, + assertions: (mockAdapter: any, mockRepo: any, mockTasksRepo: any, mockAttachmentsRepo: any, mockCommentsRepo: any, mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock) => { + assertTasksExtraction(mockGetTasks, 'FOLDER1', { + pageSize: 100, + nextPageToken: undefined, + updatedDate: '2025-01-15T00:00:00Z', + }); + }, + }, + ]; +} \ No newline at end of file diff --git a/build/src/functions/extraction/workers/data-extraction.test.case-generators-rate-limit.ts b/build/src/functions/extraction/workers/data-extraction.test.case-generators-rate-limit.ts new file mode 100644 index 0000000..5b433b4 --- /dev/null +++ b/build/src/functions/extraction/workers/data-extraction.test.case-generators-rate-limit.ts @@ -0,0 +1,97 @@ +/** + * Test case generators for rate limiting and timeout scenarios in data-extraction worker. + * Contains test configurations for handling API rate limits and function timeouts. + */ + +import { WrikeApiError } from '../../../core/wrike-error-handler'; +import { + assertErrorEmission, + assertStateCompletion, +} from './data-extraction.test.assertions'; + +/** + * Generates test cases for rate limiting scenarios + */ +export function generateRateLimitingTestCases() { + return [ + { + description: 'should emit delay event when rate limit is exceeded (429)', + setup: (mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, mockUsers: any[], mockTasks: any[], mockAttachments: any[], mockComments: any[], mockAdapter: any) => { + const rateLimitError = new WrikeApiError('Rate limit exceeded', 429, 60); + mockGetContacts.mockRejectedValue(rateLimitError); + }, + assertions: (mockAdapter: any) => { + expect(mockAdapter.emit).toHaveBeenCalledWith( + 'EXTRACTION_DATA_DELAY', + { + delay: 60, + } + ); + }, + }, + { + description: 'should emit delay event when rate limit is exceeded during tasks extraction', + setup: (mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, mockUsers: any[], mockTasks: any[], mockAttachments: any[], mockComments: any[], mockAdapter: any) => { + mockAdapter.state.users.completed = true; + const rateLimitError = new WrikeApiError('Rate limit exceeded', 429, 45); + mockGetTasks.mockRejectedValue(rateLimitError); + }, + assertions: (mockAdapter: any) => { + expect(mockAdapter.emit).toHaveBeenCalledWith( + 'EXTRACTION_DATA_DELAY', + { + delay: 45, + } + ); + }, + }, + { + description: 'should handle rate limiting with custom delay value', + setup: (mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, mockUsers: any[], mockTasks: any[], mockAttachments: any[], mockComments: any[], mockAdapter: any) => { + const rateLimitError = new WrikeApiError('Rate limit exceeded', 429, 120); + mockGetContacts.mockRejectedValue(rateLimitError); + }, + assertions: (mockAdapter: any) => { + expect(mockAdapter.emit).toHaveBeenCalledWith( + 'EXTRACTION_DATA_DELAY', + { + delay: 120, + } + ); + }, + }, + ]; +} + +/** + * Generates test cases for timeout handling + */ +export function generateTimeoutTestCases() { + return [ + { + description: 'should emit progress on timeout', + assertions: (mockAdapter: any) => { + expect(mockAdapter.emit).toHaveBeenCalledWith('EXTRACTION_DATA_PROGRESS'); + }, + }, + { + description: 'should not modify state on timeout', + setup: (mockAdapter: any) => { + // Store original state for comparison + return { ...mockAdapter.state }; + }, + assertions: (mockAdapter: any, originalState: any) => { + expect(mockAdapter.state).toEqual(originalState); + }, + }, + { + description: 'should not call any API methods on timeout', + assertions: (mockAdapter: any, mockRepo: any, mockTasksRepo: any, mockAttachmentsRepo: any, mockCommentsRepo: any, mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock) => { + expect(mockGetContacts).not.toHaveBeenCalled(); + expect(mockGetTasks).not.toHaveBeenCalled(); + expect(mockGetTaskAttachments).not.toHaveBeenCalled(); + expect(mockGetTaskComments).not.toHaveBeenCalled(); + }, + }, + ]; +} \ No newline at end of file diff --git a/build/src/functions/extraction/workers/data-extraction.test.case-generators.ts b/build/src/functions/extraction/workers/data-extraction.test.case-generators.ts new file mode 100644 index 0000000..ef6c83a --- /dev/null +++ b/build/src/functions/extraction/workers/data-extraction.test.case-generators.ts @@ -0,0 +1,170 @@ +/** + * Additional test case generators for data-extraction worker. + * Contains complex test case configurations for tasks and pagination. + */ + +import { + setupTasksApiError, + setupPagination, +} from './data-extraction.test.setup'; +import { + assertErrorEmission, + assertTasksExtraction, + assertTasksExtractionSkipped, + assertStateCompletion, + assertPaginationState, + assertRepoOperations, +} from './data-extraction.test.assertions'; + +/** + * Generates test cases for tasks extraction + */ +export function generateTasksExtractionTestCases() { + return [ + { + description: 'should successfully extract tasks, attachments, and comments after users and emit ExtractionDataDone', + setup: (mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, mockUsers: any[], mockTasks: any[], mockAttachments: any[], mockComments: any[], mockAdapter: any) => { + mockAdapter.state.users.completed = true; + // Ensure tasks have attachments flag set + const tasksWithAttachments = mockTasks.map(t => ({ ...t, hasAttachments: true })); + mockGetTasks.mockResolvedValue({ + status_code: 200, + api_delay: 0, + message: 'Success', + data: tasksWithAttachments, + nextPageToken: undefined, + hasMore: false, + }); + mockGetTaskAttachments.mockResolvedValue({ status_code: 200, data: mockAttachments }); + mockGetTaskComments.mockResolvedValue({ status_code: 200, data: mockComments }); + }, + assertions: (mockAdapter: any, mockRepo: any, mockTasksRepo: any, mockAttachmentsRepo: any, mockCommentsRepo: any, mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock) => { + expect(mockGetTasks).toHaveBeenCalledTimes(1); + assertTasksExtraction(mockGetTasks, 'FOLDER1', { + pageSize: 100, + nextPageToken: undefined, + updatedDate: undefined, + }); + assertRepoOperations(mockTasksRepo, 1, 1); + assertRepoOperations(mockAttachmentsRepo, 1, 1); + assertRepoOperations(mockCommentsRepo, 1, 1); + assertPaginationState(mockAdapter, undefined, true); + assertStateCompletion(mockAdapter, true, true, true, true); + expect(mockAdapter.emit).toHaveBeenCalledWith('EXTRACTION_DATA_DONE'); + }, + }, + { + description: 'should skip tasks extraction if already completed', + setup: (mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, mockUsers: any[], mockTasks: any[], mockAttachments: any[], mockComments: any[], mockAdapter: any) => { + mockAdapter.state.users.completed = true; + mockAdapter.state.tasks.completed = true; + mockAdapter.state.attachments.completed = true; + mockAdapter.state.comments.completed = true; + }, + assertions: (mockAdapter: any, mockRepo: any, mockTasksRepo: any, mockAttachmentsRepo: any, mockCommentsRepo: any, mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock) => { + assertTasksExtractionSkipped(mockGetTasks, mockGetTaskAttachments, mockGetTaskComments, mockTasksRepo, mockAttachmentsRepo, mockCommentsRepo); + expect(mockAdapter.emit).toHaveBeenCalledWith('EXTRACTION_DATA_DONE'); + }, + }, + { + description: 'should handle empty tasks list', + setup: (mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, mockUsers: any[], mockTasks: any[], mockAttachments: any[], mockComments: any[], mockAdapter: any) => { + mockAdapter.state.users.completed = true; + setupTasksApiError(mockGetTasks, 200, 'Success'); + mockGetTasks.mockResolvedValue({ + status_code: 200, + api_delay: 0, + message: 'Success', + data: [], + nextPageToken: undefined, + hasMore: false, + }); + }, + assertions: (mockAdapter: any, mockRepo: any, mockTasksRepo: any, mockAttachmentsRepo: any, mockCommentsRepo: any, mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock) => { + expect(mockTasksRepo.push).toHaveBeenCalledWith([]); + assertStateCompletion(mockAdapter, true, true, true, true); + expect(mockAdapter.emit).toHaveBeenCalledWith('EXTRACTION_DATA_DONE'); + }, + }, + { + description: 'should emit error when tasks API returns non-200 status', + setup: (mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, mockUsers: any[], mockTasks: any[], mockAttachments: any[], mockComments: any[], mockAdapter: any) => { + mockAdapter.state.users.completed = true; + setupTasksApiError(mockGetTasks, 401, 'Authentication failed'); + }, + assertions: (mockAdapter: any, mockRepo: any, mockTasksRepo: any, mockAttachmentsRepo: any, mockCommentsRepo: any, mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock) => { + assertErrorEmission(mockAdapter, 'Failed to fetch tasks: Authentication failed'); + assertStateCompletion(mockAdapter, true, false, false, false); + }, + }, + { + description: 'should emit error when tasks API returns no data', + setup: (mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, mockUsers: any[], mockTasks: any[], mockAttachments: any[], mockComments: any[], mockAdapter: any) => { + mockAdapter.state.users.completed = true; + mockGetTasks.mockResolvedValue({ + status_code: 200, + api_delay: 0, + message: 'Success', + data: undefined, + }); + }, + assertions: (mockAdapter: any, mockRepo: any, mockTasksRepo: any, mockAttachmentsRepo: any, mockCommentsRepo: any, mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock) => { + assertErrorEmission(mockAdapter, 'Failed to fetch tasks: Success'); + }, + }, + { + description: 'should emit error when folder ID is missing', + setup: (mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, mockUsers: any[], mockTasks: any[], mockAttachments: any[], mockComments: any[], mockAdapter: any) => { + mockAdapter.state.users.completed = true; + delete mockAdapter.event.payload.event_context.external_sync_unit_id; + }, + assertions: (mockAdapter: any, mockRepo: any, mockTasksRepo: any, mockAttachmentsRepo: any, mockCommentsRepo: any, mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock) => { + assertErrorEmission(mockAdapter, 'Missing external_sync_unit_id (folder ID) for tasks extraction'); + }, + }, + ]; +} + +/** + * Generates test cases for pagination + */ +export function generatePaginationTestCases() { + return [ + { + description: 'should handle pagination for tasks extraction', + setup: (mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, mockUsers: any[], mockTasks: any[], mockAttachments: any[], mockComments: any[], mockAdapter: any, firstPageTasks?: any[], secondPageTasks?: any[]) => { + mockAdapter.state.users.completed = true; + setupPagination(mockGetTasks, mockGetTaskAttachments, mockGetTaskComments, firstPageTasks!, secondPageTasks!, mockComments); + }, + assertions: (mockAdapter: any, mockRepo: any, mockTasksRepo: any, mockAttachmentsRepo: any, mockCommentsRepo: any, mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, firstPageTasks?: any[], secondPageTasks?: any[]) => { + expect(mockGetTasks).toHaveBeenCalledTimes(2); + assertTasksExtraction(mockGetTasks, 'FOLDER1', { + pageSize: 100, + nextPageToken: undefined, + updatedDate: undefined, + }); + expect(mockGetTasks).toHaveBeenNthCalledWith(2, 'FOLDER1', { + pageSize: 100, + nextPageToken: 'PAGE2_TOKEN', + updatedDate: undefined, + }); + expect(mockTasksRepo.push).toHaveBeenCalledTimes(2); + expect(mockTasksRepo.push).toHaveBeenNthCalledWith(1, firstPageTasks); + expect(mockTasksRepo.push).toHaveBeenNthCalledWith(2, secondPageTasks); + assertPaginationState(mockAdapter, undefined, true); + assertStateCompletion(mockAdapter, true, true, true, true); + }, + }, + { + description: 'should handle pagination with exact page size boundary', + setup: (mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, mockUsers: any[], mockTasks: any[], mockAttachments: any[], mockComments: any[], mockAdapter: any, firstPageTasks?: any[], secondPageTasks?: any[]) => { + mockAdapter.state.users.completed = true; + setupPagination(mockGetTasks, mockGetTaskAttachments, mockGetTaskComments, firstPageTasks!, secondPageTasks!, mockComments); + }, + assertions: (mockAdapter: any, mockRepo: any, mockTasksRepo: any, mockAttachmentsRepo: any, mockCommentsRepo: any, mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, firstPageTasks?: any[], secondPageTasks?: any[]) => { + assertPaginationState(mockAdapter, undefined, true); + assertStateCompletion(mockAdapter, true, true, true, true); + }, + }, + ]; +} \ No newline at end of file diff --git a/build/src/functions/extraction/workers/data-extraction.test.cases.ts b/build/src/functions/extraction/workers/data-extraction.test.cases.ts new file mode 100644 index 0000000..bda3e46 --- /dev/null +++ b/build/src/functions/extraction/workers/data-extraction.test.cases.ts @@ -0,0 +1,137 @@ +/** + * Test case generators for data-extraction worker. + * Contains reusable test case configurations and generators. + */ + +import { + setupSuccessfulExtraction, + setupApiError, +} from './data-extraction.test.setup'; +import { + assertSuccessfulExtraction, + assertErrorEmission, + assertStateCompletion, +} from './data-extraction.test.assertions'; + +/** + * Generates test cases for successful data extraction + */ +export function generateSuccessTestCases() { + return [ + { + description: 'should successfully extract users, tasks, attachments, and comments and emit ExtractionDataDone', + setup: (mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, mockUsers: any[], mockTasks: any[], mockAttachments: any[], mockComments: any[], mockAdapter: any) => { + setupSuccessfulExtraction(mockGetContacts, mockGetTasks, mockGetTaskAttachments, mockGetTaskComments, mockUsers, mockTasks, mockAttachments, mockComments); + }, + assertions: (mockAdapter: any, mockRepo: any, mockTasksRepo: any, mockAttachmentsRepo: any, mockCommentsRepo: any, mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock) => { + assertSuccessfulExtraction(mockAdapter, mockGetContacts, mockGetTasks); + assertStateCompletion(mockAdapter, true, true, true, true); + }, + }, + { + description: 'should skip users extraction if already completed', + setup: (mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, mockUsers: any[], mockTasks: any[], mockAttachments: any[], mockComments: any[], mockAdapter: any) => { + mockAdapter.state.users.completed = true; + mockGetTasks.mockResolvedValue({ + status_code: 200, + api_delay: 0, + message: 'Success', + data: mockTasks, + nextPageToken: undefined, + hasMore: false, + }); + mockGetTaskAttachments.mockResolvedValue({ status_code: 200, data: [] }); + mockGetTaskComments.mockResolvedValue({ status_code: 200, data: mockComments }); + }, + assertions: (mockAdapter: any, mockRepo: any, mockTasksRepo: any, mockAttachmentsRepo: any, mockCommentsRepo: any, mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock) => { + expect(mockGetContacts).not.toHaveBeenCalled(); + expect(mockRepo.push).not.toHaveBeenCalled(); + expect(mockAdapter.initializeRepos).toHaveBeenCalledTimes(1); + expect(mockTasksRepo.push).toHaveBeenCalledTimes(1); + expect(mockTasksRepo.upload).toHaveBeenCalledTimes(1); + assertStateCompletion(mockAdapter, true, true, true, true); + expect(mockAdapter.emit).toHaveBeenCalledWith('EXTRACTION_DATA_DONE'); + }, + }, + { + description: 'should handle empty users list', + setup: (mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, mockUsers: any[], mockTasks: any[], mockAttachments: any[], mockComments: any[], mockAdapter: any) => { + setupSuccessfulExtraction(mockGetContacts, mockGetTasks, mockGetTaskAttachments, mockGetTaskComments, [], [], [], []); + }, + assertions: (mockAdapter: any, mockRepo: any, mockTasksRepo: any, mockAttachmentsRepo: any, mockCommentsRepo: any, mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock) => { + expect(mockRepo.push).toHaveBeenCalledWith([]); + expect(mockTasksRepo.push).toHaveBeenCalledWith([]); + assertStateCompletion(mockAdapter, true, true, true, true); + expect(mockAdapter.emit).toHaveBeenCalledWith('EXTRACTION_DATA_DONE'); + }, + }, + ]; +} + +/** + * Generates test cases for error handling + */ +export function generateErrorTestCases() { + return [ + { + description: 'should emit error when API returns non-200 status', + setup: (mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, mockUsers: any[], mockTasks: any[], mockAttachments: any[], mockComments: any[], mockAdapter: any) => { + setupApiError(mockGetContacts, 401, 'Authentication failed'); + }, + assertions: (mockAdapter: any) => { + assertErrorEmission(mockAdapter, 'Failed to fetch users: Authentication failed'); + assertStateCompletion(mockAdapter, false, false, false, false); + }, + }, + { + description: 'should emit error when API returns no data', + setup: (mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, mockUsers: any[], mockTasks: any[], mockAttachments: any[], mockComments: any[], mockAdapter: any) => { + mockGetContacts.mockResolvedValue({ + status_code: 200, + api_delay: 0, + message: 'Success', + data: undefined, + }); + }, + assertions: (mockAdapter: any) => { + assertErrorEmission(mockAdapter, 'Failed to fetch users: Success'); + }, + }, + { + description: 'should handle network errors', + setup: (mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, mockUsers: any[], mockTasks: any[], mockAttachments: any[], mockComments: any[], mockAdapter: any) => { + const networkError = new Error('Network error: Unable to reach Wrike API'); + mockGetContacts.mockRejectedValue(networkError); + }, + assertions: (mockAdapter: any) => { + assertErrorEmission(mockAdapter, 'Network error: Unable to reach Wrike API'); + }, + }, + { + description: 'should handle non-Error exceptions', + setup: (mockGetContacts: jest.Mock, mockGetTasks: jest.Mock, mockGetTaskAttachments: jest.Mock, mockGetTaskComments: jest.Mock, mockUsers: any[], mockTasks: any[], mockAttachments: any[], mockComments: any[], mockAdapter: any) => { + mockGetContacts.mockRejectedValue('String error'); + }, + assertions: (mockAdapter: any) => { + assertErrorEmission(mockAdapter, 'Failed to extract data'); + }, + }, + ]; +} + +// Re-export generators from the case-generators files +export { + generateTasksExtractionTestCases, + generatePaginationTestCases, +} from './data-extraction.test.case-generators'; + +export { generateAttachmentsExtractionTestCases, generateCommentsExtractionTestCases } from './data-extraction.test.case-generators-attachments-comments'; + +export { + generateRateLimitingTestCases, + generateTimeoutTestCases, +} from './data-extraction.test.case-generators-rate-limit'; + +export { + generateIncrementalSyncTestCases, +} from './data-extraction.test.case-generators-incremental'; \ No newline at end of file diff --git a/build/src/functions/extraction/workers/data-extraction.test.helpers.ts b/build/src/functions/extraction/workers/data-extraction.test.helpers.ts new file mode 100644 index 0000000..d9edb01 --- /dev/null +++ b/build/src/functions/extraction/workers/data-extraction.test.helpers.ts @@ -0,0 +1,222 @@ +import { WrikeContact, WrikeTask, WrikeAttachment, WrikeComment } from '../../../core/wrike-types'; + +/** + * Test helpers for data-extraction worker tests. + * Contains mock data, factory functions, and utilities. + */ + +/** + * Creates mock user data for testing + */ +export function createMockUsers(): WrikeContact[] { + return [ + { + id: 'USER1', + firstName: 'John', + lastName: 'Doe', + type: 'Person', + deleted: false, + primaryEmail: 'john.doe@example.com', + title: 'Developer', + }, + { + id: 'USER2', + firstName: 'Jane', + lastName: 'Smith', + type: 'Person', + deleted: false, + primaryEmail: 'jane.smith@example.com', + title: 'Manager', + }, + ]; +} + +/** + * Creates mock task data for testing + */ +export function createMockTasks(): WrikeTask[] { + return [ + { + id: 'TASK1', + accountId: 'ACCOUNT1', + title: 'Test Task 1', + description: 'Description 1', + briefDescription: 'Brief 1', + parentIds: ['FOLDER1'], + superParentIds: [], + sharedIds: [], + responsibleIds: ['USER1'], + status: 'Active', + importance: 'Normal', + createdDate: '2025-01-01T10:00:00Z', + updatedDate: '2025-01-02T15:30:00Z', + scope: 'WsTask', + authorIds: ['USER1'], + hasAttachments: false, + permalink: 'https://www.wrike.com/open.htm?id=1', + priority: '02', + followedByMe: false, + followerIds: [], + superTaskIds: [], + subTaskIds: [], + dependencyIds: [], + metadata: [], + customFields: [], + dates: { + type: 'Planned', + duration: 86400000, + start: '2025-01-01', + due: '2025-01-02', + }, + }, + ]; +} + +/** + * Creates mock attachment data for testing + */ +export function createMockAttachments(): WrikeAttachment[] { + return [ + { + id: 'ATTACH1', + authorId: 'USER1', + name: 'document.pdf', + createdDate: '2025-01-01T10:00:00Z', + version: '1', + size: 1024, + type: 'application/pdf', + url: 'https://www.wrike.com/attachments/ATTACH1/download/document.pdf', + taskId: 'TASK1', + }, + ]; +} + +/** + * Creates mock comment data for testing + */ +export function createMockComments(): WrikeComment[] { + return [ + { + id: 'COMMENT1', + authorId: 'USER1', + text: 'This is a test comment', + createdDate: '2025-01-01T10:00:00Z', + updatedDate: '2025-01-01T12:00:00Z', + taskId: 'TASK1', + }, + ]; +} + +/** + * Creates a batch of mock tasks for pagination testing + */ +export function createMockTaskBatch(startIndex: number, count: number): WrikeTask[] { + const baseTask = createMockTasks()[0]; + return Array(count).fill(null).map((_, i) => ({ + ...baseTask, + id: `TASK${startIndex + i}`, + title: `Task ${startIndex + i}`, + })); +} + +/** + * Creates a batch of mock tasks with attachments for testing + */ +export function createMockTasksWithAttachments(count: number): WrikeTask[] { + const baseTask = createMockTasks()[0]; + return Array(count).fill(null).map((_, i) => ({ + ...baseTask, + id: `TASK${i}`, + title: `Task ${i}`, + hasAttachments: true, + })); +} + +/** + * Creates a mock repository for testing + */ +export function createMockRepo() { + return { + push: jest.fn().mockResolvedValue(true), + upload: jest.fn().mockResolvedValue(undefined), + }; +} + +/** + * Creates a mock adapter for testing + */ +export function createMockAdapter( + apiKey: string = 'test-api-key', + orgId: string = 'SPACE123', + folderId: string = 'FOLDER1', + mode: string = 'INITIAL' +) { + return { + event: { + payload: { + connection_data: { + key: apiKey, + org_id: orgId, + }, + event_context: { + external_sync_unit_id: folderId, + mode: mode, + }, + }, + }, + state: { + users: { completed: false }, + tasks: { completed: false }, + comments: { completed: false }, + attachments: { completed: false }, + }, + initializeRepos: jest.fn(), + getRepo: jest.fn(), + emit: jest.fn().mockResolvedValue(undefined), + }; +} + +/** + * Creates a successful API response + */ +export function createSuccessResponse(data: any, nextPageToken?: string, hasMore?: boolean) { + return { + status_code: 200, + api_delay: 0, + message: 'Success', + data, + nextPageToken, + hasMore: hasMore ?? false, + }; +} + +/** + * Creates an error API response + */ +export function createErrorResponse(statusCode: number, message: string) { + return { + status_code: statusCode, + api_delay: statusCode === 429 ? 60 : 0, + message, + data: undefined, + }; +} + +/** + * Configures mock adapter to return specific repos + */ +export function configureMockAdapterRepos( + mockAdapter: any, + mockRepo: any, + mockTasksRepo: any, + mockAttachmentsRepo?: any, + mockCommentsRepo?: any +) { + mockAdapter.getRepo.mockImplementation((itemType: string) => { + if (itemType === 'users') return mockRepo; + if (itemType === 'tasks') return mockTasksRepo; + if (itemType === 'attachments') return mockAttachmentsRepo || mockTasksRepo; + if (itemType === 'comments') return mockCommentsRepo || mockTasksRepo; + return null; + }); +} \ No newline at end of file diff --git a/build/src/functions/extraction/workers/data-extraction.test.setup.ts b/build/src/functions/extraction/workers/data-extraction.test.setup.ts new file mode 100644 index 0000000..412cf26 --- /dev/null +++ b/build/src/functions/extraction/workers/data-extraction.test.setup.ts @@ -0,0 +1,150 @@ +/** + * Setup utilities for data-extraction worker tests. + * Contains reusable mock setup patterns for various test scenarios. + */ + +/** + * Common setup pattern for successful extraction + */ +export function setupSuccessfulExtraction( + mockGetContacts: jest.Mock, + mockGetTasks: jest.Mock, + mockGetTaskAttachments: jest.Mock, + mockGetTaskComments: jest.Mock, + mockUsers: any[], + mockTasks: any[], + mockAttachments: any[], + mockComments: any[] +) { + mockGetContacts.mockResolvedValue({ + status_code: 200, + api_delay: 0, + message: 'Success', + data: mockUsers, + }); + mockGetTasks.mockResolvedValue({ + status_code: 200, + api_delay: 0, + message: 'Success', + data: mockTasks, + nextPageToken: undefined, + hasMore: false, + }); + mockGetTaskAttachments.mockResolvedValue({ + status_code: 200, + data: mockAttachments, + }); + mockGetTaskComments.mockResolvedValue({ + status_code: 200, + data: mockComments, + }); +} + +/** + * Common setup pattern for API errors + */ +export function setupApiError( + mockGetContacts: jest.Mock, + statusCode: number, + message: string +) { + mockGetContacts.mockResolvedValue({ + status_code: statusCode, + api_delay: 0, + message, + data: undefined, + }); +} + +/** + * Common setup pattern for tasks API errors + */ +export function setupTasksApiError( + mockGetTasks: jest.Mock, + statusCode: number, + message: string +) { + mockGetTasks.mockResolvedValue({ + status_code: statusCode, + api_delay: 0, + message, + data: undefined, + }); +} + +/** + * Common setup pattern for pagination + */ +export function setupPagination( + mockGetTasks: jest.Mock, + mockGetTaskAttachments: jest.Mock, + mockGetTaskComments: jest.Mock, + firstPageTasks: any[], + secondPageTasks: any[], + mockComments: any[], + nextPageToken: string = 'PAGE2_TOKEN' +) { + mockGetTasks + .mockResolvedValueOnce({ + status_code: 200, + api_delay: 0, + message: 'Success', + data: firstPageTasks, + nextPageToken, + hasMore: true, + }) + .mockResolvedValueOnce({ + status_code: 200, + api_delay: 0, + message: 'Success', + data: secondPageTasks, + nextPageToken: undefined, + hasMore: false, + }); + mockGetTaskAttachments.mockResolvedValue({ + status_code: 200, + data: [], + }); + mockGetTaskComments.mockResolvedValue({ + status_code: 200, + data: mockComments, + }); +} + +/** + * Common setup pattern for incremental sync + */ +export function setupIncrementalSync( + mockAdapter: any, + mockGetTasks: jest.Mock, + mockGetTaskAttachments: jest.Mock, + mockGetTaskComments: jest.Mock, + mockTasks: any[], + mockAttachments: any[], + mockComments: any[], + extractFrom: string = '2025-01-01T00:00:00Z', + modifiedSince?: string +) { + mockAdapter.state.users.completed = true; + mockAdapter.event.payload.event_context.mode = 'INCREMENTAL'; + mockAdapter.event.payload.event_context.extract_from = extractFrom; + if (modifiedSince) { + mockAdapter.state.tasks.modifiedSince = modifiedSince; + } + mockGetTasks.mockResolvedValue({ + status_code: 200, + api_delay: 0, + message: 'Success', + data: mockTasks, + nextPageToken: undefined, + hasMore: false, + }); + mockGetTaskAttachments.mockResolvedValue({ + status_code: 200, + data: mockAttachments, + }); + mockGetTaskComments.mockResolvedValue({ + status_code: 200, + data: mockComments, + }); +} \ No newline at end of file diff --git a/build/src/functions/extraction/workers/data-extraction.test.suites.ts b/build/src/functions/extraction/workers/data-extraction.test.suites.ts new file mode 100644 index 0000000..99413e3 --- /dev/null +++ b/build/src/functions/extraction/workers/data-extraction.test.suites.ts @@ -0,0 +1,316 @@ +import { ExtractorEventType } from '@devrev/ts-adaas'; +import { + createMockUsers, + createMockTasks, + createMockAttachments, + createMockComments, + createMockTaskBatch, + createMockTasksWithAttachments, +} from './data-extraction.test.helpers'; +import { + generateSuccessTestCases, + generateErrorTestCases, + generateTasksExtractionTestCases, + generatePaginationTestCases, + generateIncrementalSyncTestCases, + generateAttachmentsExtractionTestCases, + generateCommentsExtractionTestCases, +} from './data-extraction.test.cases'; + +// Re-export rate limiting and timeout test case generators +export { generateRateLimitingTestCases, generateTimeoutTestCases } from './data-extraction.test.cases'; + +/** + * Test suite generators for data-extraction worker. + * Contains reusable test suite creation logic. + */ + +export interface TestContext { + mockAdapter: any; + mockRepo: any; + mockTasksRepo: any; + mockAttachmentsRepo: any; + mockCommentsRepo: any; + mockGetContacts: jest.Mock; + mockGetTasks: jest.Mock; + mockGetTaskAttachments: jest.Mock; + mockGetTaskComments: jest.Mock; +} + +/** + * Creates success test cases suite + */ +export function createSuccessTestSuite(getContext: () => TestContext) { + return () => { + const successTestCases = generateSuccessTestCases(); + successTestCases.forEach((testCase) => { + it(testCase.description, async () => { + const mockUsers = createMockUsers(); + const mockTasks = createMockTasks(); + const mockAttachments = createMockAttachments(); + const mockComments = createMockComments(); + const context = getContext(); + testCase.setup( + context.mockGetContacts, + context.mockGetTasks, + context.mockGetTaskAttachments, + context.mockGetTaskComments, + mockUsers, + mockTasks, + mockAttachments, + mockComments, + context.mockAdapter + ); + await (global as any).mockTask({ adapter: context.mockAdapter }); + testCase.assertions( + context.mockAdapter, + context.mockRepo, + context.mockTasksRepo, + context.mockAttachmentsRepo, + context.mockCommentsRepo, + context.mockGetContacts, + context.mockGetTasks, + context.mockGetTaskAttachments, + context.mockGetTaskComments + ); + }); + }); + }; +} + +/** + * Creates error handling test cases suite + */ +export function createErrorHandlingTestSuite(getContext: () => TestContext) { + return () => { + const errorTestCases = generateErrorTestCases(); + errorTestCases.forEach((testCase) => { + it(testCase.description, async () => { + const context = getContext(); + testCase.setup( + context.mockGetContacts, + context.mockGetTasks, + context.mockGetTaskAttachments, + context.mockGetTaskComments, + [], + [], + [], + [], + context.mockAdapter + ); + await (global as any).mockTask({ adapter: context.mockAdapter }); + testCase.assertions(context.mockAdapter); + }); + }); + }; +} + +/** + * Creates tasks extraction test cases suite + */ +export function createTasksExtractionTestSuite(getContext: () => TestContext) { + return () => { + const tasksTestCases = generateTasksExtractionTestCases(); + tasksTestCases.forEach((testCase) => { + it(testCase.description, async () => { + const context = getContext(); + const mockTasks = createMockTasks(); + const mockAttachments = createMockAttachments(); + const mockComments = createMockComments(); + testCase.setup( + context.mockGetContacts, + context.mockGetTasks, + context.mockGetTaskAttachments, + context.mockGetTaskComments, + [], + mockTasks, + mockAttachments, + mockComments, + context.mockAdapter + ); + await (global as any).mockTask({ adapter: context.mockAdapter }); + testCase.assertions( + context.mockAdapter, + context.mockRepo, + context.mockTasksRepo, + context.mockAttachmentsRepo, + context.mockCommentsRepo, + context.mockGetContacts, + context.mockGetTasks, + context.mockGetTaskAttachments, + context.mockGetTaskComments + ); + }); + }); + }; +} + +/** + * Creates pagination test cases suite + */ +export function createPaginationTestSuite(getContext: () => TestContext) { + return () => { + const paginationTestCases = generatePaginationTestCases(); + paginationTestCases.forEach((testCase) => { + it(testCase.description, async () => { + const firstPageTasks = createMockTaskBatch(0, 100); + const secondPageTasks = createMockTaskBatch( + 100, + testCase.description.includes('exact') ? 100 : 50 + ); + const mockComments = createMockComments(); + const context = getContext(); + testCase.setup( + context.mockGetContacts, + context.mockGetTasks, + context.mockGetTaskAttachments, + context.mockGetTaskComments, + [], + [], + [], + mockComments, + context.mockAdapter, + firstPageTasks, + secondPageTasks + ); + await (global as any).mockTask({ adapter: context.mockAdapter }); + testCase.assertions( + context.mockAdapter, + context.mockRepo, + context.mockTasksRepo, + context.mockAttachmentsRepo, + context.mockCommentsRepo, + context.mockGetContacts, + context.mockGetTasks, + context.mockGetTaskAttachments, + context.mockGetTaskComments, + firstPageTasks, + secondPageTasks + ); + }); + }); + }; +} + +/** + * Creates incremental sync test cases suite + */ +export function createIncrementalSyncTestSuite(getContext: () => TestContext) { + return () => { + const incrementalSyncTestCases = generateIncrementalSyncTestCases(); + incrementalSyncTestCases.forEach((testCase) => { + it(testCase.description, async () => { + const context = getContext(); + const mockTasks = createMockTasks(); + const mockAttachments = createMockAttachments(); + const mockComments = createMockComments(); + testCase.setup( + context.mockGetContacts, + context.mockGetTasks, + context.mockGetTaskAttachments, + context.mockGetTaskComments, + [], + mockTasks, + mockAttachments, + mockComments, + context.mockAdapter + ); + await (global as any).mockTask({ adapter: context.mockAdapter }); + testCase.assertions( + context.mockAdapter, + context.mockRepo, + context.mockTasksRepo, + context.mockAttachmentsRepo, + context.mockCommentsRepo, + context.mockGetContacts, + context.mockGetTasks, + context.mockGetTaskAttachments, + context.mockGetTaskComments + ); + }); + }); + }; +} + +/** + * Creates attachments extraction test cases suite + */ +export function createAttachmentsExtractionTestSuite(getContext: () => TestContext) { + return () => { + const attachmentsTestCases = generateAttachmentsExtractionTestCases(); + attachmentsTestCases.forEach((testCase) => { + it(testCase.description, async () => { + const context = getContext(); + const mockTasks = createMockTasksWithAttachments(2); + const mockAttachments = createMockAttachments(); + const mockComments = createMockComments(); + testCase.setup( + context.mockGetContacts, + context.mockGetTasks, + context.mockGetTaskAttachments, + context.mockGetTaskComments, + [], + mockTasks, + mockAttachments, + mockComments, + context.mockAdapter + ); + await (global as any).mockTask({ adapter: context.mockAdapter }); + testCase.assertions( + context.mockAdapter, + context.mockRepo, + context.mockTasksRepo, + context.mockAttachmentsRepo, + context.mockCommentsRepo, + context.mockGetContacts, + context.mockGetTasks, + context.mockGetTaskAttachments, + context.mockGetTaskComments, + mockTasks, + mockAttachments + ); + }); + }); + }; +} + +/** + * Creates comments extraction test cases suite + */ +export function createCommentsExtractionTestSuite(getContext: () => TestContext) { + return () => { + const commentsTestCases = generateCommentsExtractionTestCases(); + commentsTestCases.forEach((testCase) => { + it(testCase.description, async () => { + const context = getContext(); + const mockTasks = createMockTasks(); + const mockComments = createMockComments(); + testCase.setup( + context.mockGetContacts, + context.mockGetTasks, + context.mockGetTaskAttachments, + context.mockGetTaskComments, + [], + mockTasks, + [], + mockComments, + context.mockAdapter + ); + await (global as any).mockTask({ adapter: context.mockAdapter }); + testCase.assertions( + context.mockAdapter, + context.mockRepo, + context.mockTasksRepo, + context.mockAttachmentsRepo, + context.mockCommentsRepo, + context.mockGetContacts, + context.mockGetTasks, + context.mockGetTaskAttachments, + context.mockGetTaskComments, + mockTasks, + mockComments + ); + }); + }); + }; +} \ No newline at end of file diff --git a/build/src/functions/extraction/workers/data-extraction.test.ts b/build/src/functions/extraction/workers/data-extraction.test.ts new file mode 100644 index 0000000..99a5ea5 --- /dev/null +++ b/build/src/functions/extraction/workers/data-extraction.test.ts @@ -0,0 +1,232 @@ +import { ExtractorEventType } from '@devrev/ts-adaas'; +import { WrikeClient } from '../../../core/wrike-client'; +import { WrikeApiError } from '../../../core/wrike-error-handler'; +import { + createMockUsers, + createMockTasks, + createMockAttachments, + createMockComments, + createMockRepo, + createMockAdapter, + createSuccessResponse, + configureMockAdapterRepos, +} from './data-extraction.test.helpers'; +import { + createSuccessTestSuite, + createErrorHandlingTestSuite, + createTasksExtractionTestSuite, + createPaginationTestSuite, + createIncrementalSyncTestSuite, + createAttachmentsExtractionTestSuite, + createCommentsExtractionTestSuite, + generateRateLimitingTestCases, + generateTimeoutTestCases, + TestContext, +} from './data-extraction.test.suites'; + +// Mock the WrikeClient +jest.mock('../../../core/wrike-client'); + +// Mock the processTask function +let mockTask: any; +let mockOnTimeout: any; + +jest.mock('@devrev/ts-adaas', () => ({ + ...jest.requireActual('@devrev/ts-adaas'), + processTask: (config: any) => { + mockTask = config.task; + mockOnTimeout = config.onTimeout; + }, + ExtractorEventType: { + ExtractionDataDone: 'EXTRACTION_DATA_DONE', + ExtractionDataError: 'EXTRACTION_DATA_ERROR', + ExtractionDataProgress: 'EXTRACTION_DATA_PROGRESS', + ExtractionDataDelay: 'EXTRACTION_DATA_DELAY', + }, +})); + +// Make mockTask available globally for test suites +(global as any).mockTask = mockTask; + +// Import the worker module after mocking to ensure mocks are in place +require('./data-extraction'); + +describe('data-extraction worker', () => { + let testContext: TestContext; + + beforeEach(() => { + jest.clearAllMocks(); + + const mockRepo = createMockRepo(); + const mockTasksRepo = createMockRepo(); + const mockAttachmentsRepo = createMockRepo(); + const mockCommentsRepo = createMockRepo(); + const mockAdapter = createMockAdapter(); + configureMockAdapterRepos(mockAdapter, mockRepo, mockTasksRepo, mockAttachmentsRepo, mockCommentsRepo); + + const mockGetContacts = jest.fn(); + const mockGetTasks = jest.fn(); + const mockGetTaskAttachments = jest.fn(); + const mockGetTaskComments = jest.fn(); + + (WrikeClient as jest.Mock).mockImplementation(() => ({ + getContacts: mockGetContacts, + getTasks: mockGetTasks, + getTaskAttachments: mockGetTaskAttachments, + getTaskComments: mockGetTaskComments, + })); + + testContext = { + mockAdapter, + mockRepo, + mockTasksRepo, + mockAttachmentsRepo, + mockCommentsRepo, + mockGetContacts, + mockGetTasks, + mockGetTaskAttachments, + mockGetTaskComments, + }; + + // Update global mockTask reference + (global as any).mockTask = mockTask; + }); + + describe('task execution', () => { + describe('success cases', createSuccessTestSuite(() => testContext)); + + describe('error handling', createErrorHandlingTestSuite(() => testContext)); + + it('should call upload after push to ensure data is sent', async () => { + const mockUsers = createMockUsers(); + testContext.mockGetContacts.mockResolvedValue(createSuccessResponse(mockUsers)); + testContext.mockGetTasks.mockResolvedValue(createSuccessResponse([])); + testContext.mockGetTaskAttachments.mockResolvedValue(createSuccessResponse([])); + testContext.mockGetTaskComments.mockResolvedValue(createSuccessResponse([])); + + await mockTask({ adapter: testContext.mockAdapter }); + + expect(testContext.mockRepo.push).toHaveBeenCalled(); + expect(testContext.mockRepo.upload).toHaveBeenCalled(); + + // Verify upload is called after push + const pushCallOrder = testContext.mockRepo.push.mock.invocationCallOrder[0]; + const uploadCallOrder = testContext.mockRepo.upload.mock.invocationCallOrder[0]; + expect(uploadCallOrder).toBeGreaterThan(pushCallOrder); + }); + + it('should initialize WrikeClient with correct API key', async () => { + testContext.mockGetContacts.mockResolvedValue(createSuccessResponse([])); + testContext.mockGetTasks.mockResolvedValue(createSuccessResponse([])); + testContext.mockGetTaskAttachments.mockResolvedValue(createSuccessResponse([])); + testContext.mockGetTaskComments.mockResolvedValue(createSuccessResponse([])); + + await mockTask({ adapter: testContext.mockAdapter }); + + expect(WrikeClient).toHaveBeenCalledWith({ apiKey: 'test-api-key' }); + }); + + describe('tasks extraction', () => { + const tasksTestSuite = createTasksExtractionTestSuite(() => testContext); + tasksTestSuite(); + + it('should call upload after push for tasks', async () => { + testContext.mockAdapter.state.users.completed = true; + const mockTasks = createMockTasks(); + const mockComments = createMockComments(); + testContext.mockGetTasks.mockResolvedValue(createSuccessResponse(mockTasks)); + testContext.mockGetTaskAttachments.mockResolvedValue(createSuccessResponse([])); + testContext.mockGetTaskComments.mockResolvedValue(createSuccessResponse(mockComments)); + + await mockTask({ adapter: testContext.mockAdapter }); + + expect(testContext.mockTasksRepo.push).toHaveBeenCalled(); + expect(testContext.mockTasksRepo.upload).toHaveBeenCalled(); + + // Verify upload is called after push + const pushCallOrder = testContext.mockTasksRepo.push.mock.invocationCallOrder[0]; + const uploadCallOrder = testContext.mockTasksRepo.upload.mock.invocationCallOrder[0]; + expect(uploadCallOrder).toBeGreaterThan(pushCallOrder); + }); + + it('should extract users, tasks, and comments in sequence', async () => { + const mockUsers = createMockUsers(); + const mockTasks = createMockTasks(); + const mockAttachments = createMockAttachments(); + const mockComments = createMockComments(); + + testContext.mockGetContacts.mockResolvedValue(createSuccessResponse(mockUsers)); + testContext.mockGetTasks.mockResolvedValue(createSuccessResponse(mockTasks)); + testContext.mockGetTaskAttachments.mockResolvedValue(createSuccessResponse(mockAttachments)); + testContext.mockGetTaskComments.mockResolvedValue(createSuccessResponse(mockComments)); + + await mockTask({ adapter: testContext.mockAdapter }); + + expect(testContext.mockGetContacts).toHaveBeenCalledTimes(1); + expect(testContext.mockGetTasks).toHaveBeenCalledTimes(1); + expect(testContext.mockGetTaskComments).toHaveBeenCalledTimes(1); + expect(testContext.mockAdapter.state.users.completed).toBe(true); + expect(testContext.mockAdapter.state.tasks.completed).toBe(true); + expect(testContext.mockAdapter.state.comments.completed).toBe(true); + expect(testContext.mockAdapter.state.attachments.completed).toBe(true); + expect(testContext.mockAdapter.emit).toHaveBeenCalledWith(ExtractorEventType.ExtractionDataDone); + }); + }); + + describe('pagination', createPaginationTestSuite(() => testContext)); + + describe('incremental sync', createIncrementalSyncTestSuite(() => testContext)); + + describe('attachments extraction', createAttachmentsExtractionTestSuite(() => testContext)); + + describe('comments extraction', createCommentsExtractionTestSuite(() => testContext)); + }); + + describe('timeout handling', () => { + const timeoutTestCases = generateTimeoutTestCases(); + + timeoutTestCases.forEach((testCase: any) => { + it(testCase.description, async () => { + let originalState; + if (testCase.setup) { + originalState = testCase.setup(testContext.mockAdapter); + } + + await mockOnTimeout({ adapter: testContext.mockAdapter }); + + if (originalState) { + testCase.assertions(testContext.mockAdapter, originalState); + } else { + testCase.assertions( + testContext.mockAdapter, + testContext.mockRepo, + testContext.mockTasksRepo, + testContext.mockAttachmentsRepo, + testContext.mockCommentsRepo, + testContext.mockGetContacts, + testContext.mockGetTasks, + testContext.mockGetTaskAttachments, + testContext.mockGetTaskComments + ); + } + }); + }); + }); + + describe('rate limiting', () => { + const rateLimitTestCases = generateRateLimitingTestCases(); + + rateLimitTestCases.forEach((testCase: any) => { + it(testCase.description, async () => { + const mockUsers = createMockUsers(); + const mockTasks = createMockTasks(); + const mockAttachments = createMockAttachments(); + const mockComments = createMockComments(); + + testCase.setup(testContext.mockGetContacts, testContext.mockGetTasks, testContext.mockGetTaskAttachments, testContext.mockGetTaskComments, mockUsers, mockTasks, mockAttachments, mockComments, testContext.mockAdapter); + await mockTask({ adapter: testContext.mockAdapter }); + testCase.assertions(testContext.mockAdapter); + }); + }); + }); +}); \ No newline at end of file diff --git a/build/src/functions/extraction/workers/data-extraction.ts b/build/src/functions/extraction/workers/data-extraction.ts new file mode 100644 index 0000000..dd97185 --- /dev/null +++ b/build/src/functions/extraction/workers/data-extraction.ts @@ -0,0 +1,192 @@ +import { ExtractorEventType, processTask } from "@devrev/ts-adaas"; +import { ExtractorState } from "../index"; +import { WrikeClient } from "../../../core/wrike-client"; +import { WrikeApiError } from "../../../core/wrike-error-handler"; +import { WrikeAttachment } from "../../../core/wrike-types"; +import { normalizeUser, normalizeTask, normalizeAttachment, normalizeComment } from "./normalization"; + +processTask({ + task: async ({ adapter }) => { + try { + // Initialize all repositories at the start to ensure they're available + // regardless of the current state + const repos = [ + { + itemType: 'users', + normalize: normalizeUser, + }, + { + itemType: 'tasks', + normalize: normalizeTask, + }, + { + itemType: 'attachments', + normalize: normalizeAttachment, + }, + { + itemType: 'comments', + normalize: normalizeComment, + }, + ]; + adapter.initializeRepos(repos); + + // Extract API key from event + const apiKey = adapter.event.payload.connection_data.key; + + // Extract folder ID and event context for tasks extraction + const folderId = adapter.event.payload.event_context?.external_sync_unit_id; + const mode = adapter.event.payload.event_context?.mode; + const extractFrom = adapter.event.payload.event_context?.extract_from; + + // Initialize Wrike client + const wrikeClient = new WrikeClient({ apiKey }); + + // Check if users data needs to be extracted + if (!adapter.state.users.completed) { + // Fetch users from Wrike + const response = await wrikeClient.getContacts(); + + if (response.status_code !== 200 || !response.data) { + throw new Error(`Failed to fetch users: ${response.message}`); + } + + // Push users to the repository + await adapter.getRepo('users')?.push(response.data); + + // Manually upload to ensure data is sent + await adapter.getRepo('users')?.upload(); + + // Update state + adapter.state.users.completed = true; + } + + // Check if tasks data needs to be extracted + if (adapter.state.users.completed && !adapter.state.tasks.completed) { + // Validate folder ID is present + if (!folderId) { + throw new Error('Missing external_sync_unit_id (folder ID) for tasks extraction'); + } + + // Determine if this is incremental sync + const isIncremental = mode === 'INCREMENTAL'; + const modifiedSince = adapter.state.tasks.modifiedSince || extractFrom; + + // Initialize array to collect all attachments metadata + const allAttachmentsMetadata: WrikeAttachment[] = []; + + // Pagination limit as per requirements + const pageSize = 100; + let hasMore = true; + + // Fetch tasks with pagination + while (hasMore) { + const response = await wrikeClient.getTasks(folderId, { + pageSize, + nextPageToken: adapter.state.tasks.nextPageToken, + updatedDate: isIncremental ? modifiedSince : undefined, + }); + + if (response.status_code !== 200 || !response.data) { + throw new Error(`Failed to fetch tasks: ${response.message}`); + } + + // Push tasks to the repository + await adapter.getRepo('tasks')?.push(response.data); + + // Extract attachments from tasks with hasAttachments flag + for (const task of response.data) { + if (task.hasAttachments) { + const attachmentsResponse = await wrikeClient.getTaskAttachments(task.id); + + if (attachmentsResponse.status_code !== 200 || !attachmentsResponse.data) { + console.error(`Failed to fetch attachments for task ${task.id}: ${attachmentsResponse.message}`); + // Continue with other tasks even if one fails + continue; + } + + // Collect attachments metadata for later streaming + allAttachmentsMetadata.push(...attachmentsResponse.data); + + // Push attachments to the repository + await adapter.getRepo('attachments')?.push(attachmentsResponse.data); + } + + // Extract comments from tasks + if (!adapter.state.comments.completed) { + const commentsResponse = await wrikeClient.getTaskComments(task.id); + + if (commentsResponse.status_code !== 200 || !commentsResponse.data) { + console.error(`Failed to fetch comments for task ${task.id}: ${commentsResponse.message}`); + // Continue with other tasks even if one fails + continue; + } + + // Push comments to the repository + await adapter.getRepo('comments')?.push(commentsResponse.data); + } + } + + // Update pagination state + if (response.data.length < pageSize) { + // No more pages + hasMore = false; + adapter.state.tasks.completed = true; + adapter.state.tasks.nextPageToken = undefined; + } else { + // More pages available + adapter.state.tasks.nextPageToken = response.nextPageToken; + hasMore = !!response.nextPageToken; + + if (!hasMore) { + // Last page + adapter.state.tasks.completed = true; + adapter.state.tasks.nextPageToken = undefined; + } + } + } + + // Store attachments metadata in state for attachment streaming phase + adapter.state.attachments.metadata = allAttachmentsMetadata; + + // Manually upload to ensure data is sent + await adapter.getRepo('tasks')?.upload(); + await adapter.getRepo('attachments')?.upload(); + await adapter.getRepo('comments')?.upload(); + + // Mark attachments and comments as completed when tasks are completed + adapter.state.attachments.completed = adapter.state.tasks.completed; + adapter.state.comments.completed = adapter.state.tasks.completed; + + // Update modifiedSince for next incremental sync + adapter.state.tasks.modifiedSince = new Date().toISOString(); + } + + // Emit success event only when all data is completed + if (adapter.state.users.completed && adapter.state.tasks.completed && + adapter.state.comments.completed) { + await adapter.emit(ExtractorEventType.ExtractionDataDone); + } + } catch (error) { + // Check if this is a rate limiting error + if (error instanceof WrikeApiError && error.statusCode === 429) { + console.log(`Rate limit exceeded. Emitting delay event with ${error.apiDelay} seconds delay.`); + await adapter.emit(ExtractorEventType.ExtractionDataDelay, { + delay: error.apiDelay, + }); + return; // Exit immediately after emitting delay event + } else { + // For all other errors, emit error event + console.error('Error extracting data:', error); + await adapter.emit(ExtractorEventType.ExtractionDataError, { + error: { + message: error instanceof Error ? error.message : 'Failed to extract data', + }, + }); + return; // Exit immediately after emitting error event + } + } + }, + onTimeout: async ({ adapter }) => { + await adapter.emit(ExtractorEventType.ExtractionDataProgress); + }, +}); \ No newline at end of file diff --git a/build/src/functions/extraction/workers/external-sync-units-extraction.test.helpers.ts b/build/src/functions/extraction/workers/external-sync-units-extraction.test.helpers.ts new file mode 100644 index 0000000..c081c82 --- /dev/null +++ b/build/src/functions/extraction/workers/external-sync-units-extraction.test.helpers.ts @@ -0,0 +1,86 @@ +import { WrikeFolder } from '../../../core/wrike-types'; + +/** + * Mock folder data for testing external sync units extraction + */ +export const mockFolders: WrikeFolder[] = [ + { + id: 'FOLDER1', + accountId: 'ACCOUNT1', + title: 'Project Alpha', + description: 'First project description', + createdDate: '2025-01-01T00:00:00Z', + updatedDate: '2025-01-02T00:00:00Z', + sharedIds: [], + parentIds: [], + childIds: [], + scope: 'WsFolder', + permalink: 'https://wrike.com/folder/1', + }, + { + id: 'FOLDER2', + accountId: 'ACCOUNT1', + title: 'Project Beta', + description: '', + createdDate: '2025-01-03T00:00:00Z', + updatedDate: '2025-01-04T00:00:00Z', + sharedIds: [], + parentIds: [], + childIds: [], + scope: 'WsFolder', + permalink: 'https://wrike.com/folder/2', + }, +]; + +/** + * Creates a single mock folder for testing + */ +export const createMockFolder = (overrides?: Partial): WrikeFolder => ({ + id: 'TEST_ID', + accountId: 'ACCOUNT1', + title: 'Test Title', + description: 'Test Description', + createdDate: '2025-01-01T00:00:00Z', + updatedDate: '2025-01-02T00:00:00Z', + sharedIds: [], + parentIds: [], + childIds: [], + scope: 'WsFolder', + permalink: 'https://wrike.com/folder/test', + ...overrides, +}); + +/** + * Creates a mock adapter for testing + */ +export const createMockAdapter = (apiKey: string = 'test-api-key', orgId: string = 'SPACE123') => ({ + event: { + payload: { + connection_data: { + key: apiKey, + org_id: orgId, + }, + }, + }, + emit: jest.fn(), +}); + +/** + * Creates a successful API response + */ +export const createSuccessResponse = (data: WrikeFolder[]) => ({ + status_code: 200, + api_delay: 0, + message: 'Success', + data, +}); + +/** + * Creates an error API response + */ +export const createErrorResponse = (statusCode: number, message: string) => ({ + status_code: statusCode, + api_delay: statusCode === 429 ? 60 : 0, + message, + data: undefined, +}); \ No newline at end of file diff --git a/build/src/functions/extraction/workers/external-sync-units-extraction.test.ts b/build/src/functions/extraction/workers/external-sync-units-extraction.test.ts new file mode 100644 index 0000000..5093af2 --- /dev/null +++ b/build/src/functions/extraction/workers/external-sync-units-extraction.test.ts @@ -0,0 +1,257 @@ +import { ExtractorEventType } from '@devrev/ts-adaas'; +import { WrikeClient } from '../../../core/wrike-client'; +import { + mockFolders, + createMockFolder, + createMockAdapter, + createSuccessResponse, + createErrorResponse, +} from './external-sync-units-extraction.test.helpers'; + +// Mock the WrikeClient +jest.mock('../../../core/wrike-client'); + +// Mock the processTask function +let mockTask: any; +let mockOnTimeout: any; + +jest.mock('@devrev/ts-adaas', () => ({ + ...jest.requireActual('@devrev/ts-adaas'), + processTask: (config: any) => { + mockTask = config.task; + mockOnTimeout = config.onTimeout; + }, + ExtractorEventType: { + ExtractionExternalSyncUnitsDone: 'EXTRACTION_EXTERNAL_SYNC_UNITS_DONE', + ExtractionExternalSyncUnitsError: 'EXTRACTION_EXTERNAL_SYNC_UNITS_ERROR', + }, +})); + +// Import the worker module after mocking to ensure mocks are in place +// This must happen before the describe block to register processTask +require('./external-sync-units-extraction'); + +describe('external-sync-units-extraction worker', () => { + let mockAdapter: any; + let mockGetFolders: jest.Mock; + + beforeEach(() => { + jest.clearAllMocks(); + + mockAdapter = createMockAdapter(); + + mockGetFolders = jest.fn(); + + (WrikeClient as jest.Mock).mockImplementation(() => ({ + getFolders: mockGetFolders, + })); + }); + + describe('task execution', () => { + it('should successfully fetch folders and emit ExtractionExternalSyncUnitsDone', async () => { + mockGetFolders.mockResolvedValue(createSuccessResponse(mockFolders)); + + await mockTask({ adapter: mockAdapter }); + + expect(WrikeClient).toHaveBeenCalledWith({ apiKey: 'test-api-key' }); + expect(mockGetFolders).toHaveBeenCalledWith('SPACE123'); + expect(mockAdapter.emit).toHaveBeenCalledWith( + ExtractorEventType.ExtractionExternalSyncUnitsDone, + { + external_sync_units: [ + { + id: 'FOLDER1', + name: 'Project Alpha', + description: 'First project description', + item_type: 'tasks', + }, + { + id: 'FOLDER2', + name: 'Project Beta', + description: '', + item_type: 'tasks', + }, + ], + } + ); + }); + + it('should map folder fields correctly to external sync unit', async () => { + const singleFolder = [createMockFolder()]; + + mockGetFolders.mockResolvedValue(createSuccessResponse(singleFolder)); + + await mockTask({ adapter: mockAdapter }); + + expect(mockAdapter.emit).toHaveBeenCalledWith( + ExtractorEventType.ExtractionExternalSyncUnitsDone, + { + external_sync_units: [ + { + id: 'TEST_ID', + name: 'Test Title', + description: 'Test Description', + item_type: 'tasks', + }, + ], + } + ); + }); + + it('should handle empty folder list', async () => { + mockGetFolders.mockResolvedValue(createSuccessResponse([])); + + await mockTask({ adapter: mockAdapter }); + + expect(mockAdapter.emit).toHaveBeenCalledWith( + ExtractorEventType.ExtractionExternalSyncUnitsDone, + { + external_sync_units: [], + } + ); + }); + + it('should handle folder with undefined description', async () => { + const folderWithoutDescription = [ + createMockFolder({ + id: 'FOLDER_NO_DESC', + title: 'No Description Project', + description: undefined, + permalink: 'https://wrike.com/folder/nodesc', + }), + ]; + + mockGetFolders.mockResolvedValue(createSuccessResponse(folderWithoutDescription)); + + await mockTask({ adapter: mockAdapter }); + + expect(mockAdapter.emit).toHaveBeenCalledWith( + ExtractorEventType.ExtractionExternalSyncUnitsDone, + { + external_sync_units: [ + { + id: 'FOLDER_NO_DESC', + name: 'No Description Project', + description: '', + item_type: 'tasks', + }, + ], + } + ); + }); + + it('should emit error when API returns non-200 status', async () => { + mockGetFolders.mockResolvedValue(createErrorResponse(401, 'Authentication failed')); + + await mockTask({ adapter: mockAdapter }); + + expect(mockAdapter.emit).toHaveBeenCalledWith( + ExtractorEventType.ExtractionExternalSyncUnitsError, + { + error: { + message: 'Authentication failed', + }, + } + ); + }); + + it('should emit error when API returns no data', async () => { + mockGetFolders.mockResolvedValue({ + status_code: 200, + api_delay: 0, + message: 'Success', + data: undefined, + }); + + await mockTask({ adapter: mockAdapter }); + + expect(mockAdapter.emit).toHaveBeenCalledWith( + ExtractorEventType.ExtractionExternalSyncUnitsError, + { + error: { + message: 'Success', + }, + } + ); + }); + + it('should handle network errors', async () => { + const networkError = new Error('Network error: Unable to reach Wrike API'); + mockGetFolders.mockRejectedValue(networkError); + + await mockTask({ adapter: mockAdapter }); + + expect(mockAdapter.emit).toHaveBeenCalledWith( + ExtractorEventType.ExtractionExternalSyncUnitsError, + { + error: { + message: 'Network error: Unable to reach Wrike API', + }, + } + ); + }); + + it('should handle rate limiting errors', async () => { + mockGetFolders.mockResolvedValue( + createErrorResponse(429, 'Rate limit exceeded. Retry after 60 seconds.') + ); + + await mockTask({ adapter: mockAdapter }); + + expect(mockAdapter.emit).toHaveBeenCalledWith( + ExtractorEventType.ExtractionExternalSyncUnitsError, + { + error: { + message: 'Rate limit exceeded. Retry after 60 seconds.', + }, + } + ); + }); + + it('should handle non-Error exceptions', async () => { + mockGetFolders.mockRejectedValue('String error'); + + await mockTask({ adapter: mockAdapter }); + + expect(mockAdapter.emit).toHaveBeenCalledWith( + ExtractorEventType.ExtractionExternalSyncUnitsError, + { + error: { + message: 'Failed to extract external sync units', + }, + } + ); + }); + + it('should initialize WrikeClient with correct API key', async () => { + mockGetFolders.mockResolvedValue(createSuccessResponse([])); + + await mockTask({ adapter: mockAdapter }); + + expect(WrikeClient).toHaveBeenCalledWith({ apiKey: 'test-api-key' }); + }); + + it('should call getFolders with correct space ID', async () => { + mockGetFolders.mockResolvedValue(createSuccessResponse([])); + + await mockTask({ adapter: mockAdapter }); + + expect(mockGetFolders).toHaveBeenCalledWith('SPACE123'); + }); + }); + + describe('timeout handling', () => { + it('should emit error on timeout', async () => { + await mockOnTimeout({ adapter: mockAdapter }); + + expect(mockAdapter.emit).toHaveBeenCalledWith( + ExtractorEventType.ExtractionExternalSyncUnitsError, + { + error: { + message: 'Failed to extract external sync units. Lambda timeout.', + }, + } + ); + }); + }); +}); \ No newline at end of file diff --git a/build/src/functions/extraction/workers/external-sync-units-extraction.ts b/build/src/functions/extraction/workers/external-sync-units-extraction.ts new file mode 100644 index 0000000..df41c5a --- /dev/null +++ b/build/src/functions/extraction/workers/external-sync-units-extraction.ts @@ -0,0 +1,61 @@ +import { ExtractorEventType, processTask, ExternalSyncUnit } from "@devrev/ts-adaas"; +import { ExtractorState } from "../index"; +import { WrikeClient } from "../../../core/wrike-client"; +import { WrikeFolder } from "../../../core/wrike-types"; + +/** + * Maps a Wrike folder to an ExternalSyncUnit. + * + * @param folder - Wrike folder to map + * @returns ExternalSyncUnit with mapped fields + */ +function mapFolderToExternalSyncUnit(folder: WrikeFolder): ExternalSyncUnit { + return { + id: folder.id, + name: folder.title, + description: folder.description || '', + item_type: 'tasks', + }; +} + +processTask({ + task: async ({ adapter }) => { + try { + // Extract API key and space ID from event + const apiKey = adapter.event.payload.connection_data.key; + const spaceId = adapter.event.payload.connection_data.org_id; + + // Initialize Wrike client + const wrikeClient = new WrikeClient({ apiKey }); + + // Fetch folders from the space + const response = await wrikeClient.getFolders(spaceId); + + if (response.status_code !== 200 || !response.data) { + throw new Error(response.message); + } + + // Map folders to external sync units + const externalSyncUnits: ExternalSyncUnit[] = response.data.map(mapFolderToExternalSyncUnit); + + // Emit success event with external sync units + await adapter.emit(ExtractorEventType.ExtractionExternalSyncUnitsDone, { + external_sync_units: externalSyncUnits, + }); + } catch (error) { + console.error('Error extracting external sync units:', error); + await adapter.emit(ExtractorEventType.ExtractionExternalSyncUnitsError, { + error: { + message: error instanceof Error ? error.message : 'Failed to extract external sync units', + }, + }); + } + }, + onTimeout: async ({ adapter }) => { + await adapter.emit(ExtractorEventType.ExtractionExternalSyncUnitsError, { + error: { + message: 'Failed to extract external sync units. Lambda timeout.', + }, + }); + }, +}); diff --git a/build/src/functions/extraction/workers/metadata-extraction.test.cases.ts b/build/src/functions/extraction/workers/metadata-extraction.test.cases.ts new file mode 100644 index 0000000..b16231f --- /dev/null +++ b/build/src/functions/extraction/workers/metadata-extraction.test.cases.ts @@ -0,0 +1,242 @@ +/** + * Test case generators for metadata-extraction worker. + * Contains reusable test case configurations and generators. + */ + +/** + * Generates test cases for successful metadata extraction + */ +export function generateSuccessTestCases() { + return [ + { + description: 'should successfully extract metadata and emit ExtractionMetadataDone', + assertions: (mockAdapter: any, mockRepo: any) => { + expect(mockAdapter.initializeRepos).toHaveBeenCalledWith([ + { itemType: 'external_domain_metadata' }, + ]); + expect(mockAdapter.getRepo).toHaveBeenCalledWith('external_domain_metadata'); + expect(mockRepo.push).toHaveBeenCalledTimes(1); + expect(mockRepo.upload).toHaveBeenCalledTimes(1); + expect(mockAdapter.emit).toHaveBeenCalledWith('EXTRACTION_METADATA_DONE'); + }, + }, + ]; +} + +/** + * Generates test cases for metadata structure validation + */ +export function generateMetadataStructureTestCases() { + return [ + { + description: 'should push metadata without normalization', + assertionFn: 'assertMetadataStructure', + additionalAssertions: ['assertAllRecordTypes'], + }, + { + description: 'should include all users fields in metadata', + assertionFn: 'assertUsersFields', + }, + { + description: 'should include all comments fields in metadata', + assertionFn: 'assertCommentsFields', + }, + { + description: 'should include all tasks fields in metadata', + assertionFn: 'assertTasksFields', + }, + ]; +} + +/** + * Generates test cases for repository initialization + */ +export function generateRepoInitializationTestCases() { + return [ + { + description: 'should initialize repos without normalize function', + assertions: (mockAdapter: any) => { + const reposConfig = mockAdapter.initializeRepos.mock.calls[0][0]; + expect(reposConfig).toHaveLength(1); + expect(reposConfig[0]).toEqual({ itemType: 'external_domain_metadata' }); + expect(reposConfig[0]).not.toHaveProperty('normalize'); + }, + }, + { + description: 'should call upload after push to ensure data is sent', + assertions: (mockAdapter: any, mockRepo: any) => { + expect(mockRepo.push).toHaveBeenCalled(); + expect(mockRepo.upload).toHaveBeenCalled(); + + // Verify upload is called after push + const pushCallOrder = mockRepo.push.mock.invocationCallOrder[0]; + const uploadCallOrder = mockRepo.upload.mock.invocationCallOrder[0]; + expect(uploadCallOrder).toBeGreaterThan(pushCallOrder); + }, + }, + ]; +} + +/** + * Generates test cases for error handling + */ +export function generateErrorHandlingTestCases() { + return [ + { + description: 'should emit error when push fails', + setup: (mockRepo: any) => { + const pushError = new Error('Push failed'); + mockRepo.push.mockRejectedValue(pushError); + }, + assertions: (mockAdapter: any) => { + expect(mockAdapter.emit).toHaveBeenCalledWith( + 'EXTRACTION_METADATA_ERROR', + { + error: { + message: 'Push failed', + }, + } + ); + }, + }, + { + description: 'should emit error when upload fails', + setup: (mockRepo: any) => { + const uploadError = new Error('Upload failed'); + mockRepo.upload.mockRejectedValue(uploadError); + }, + assertions: (mockAdapter: any) => { + expect(mockAdapter.emit).toHaveBeenCalledWith( + 'EXTRACTION_METADATA_ERROR', + { + error: { + message: 'Upload failed', + }, + } + ); + }, + }, + { + description: 'should emit error when initializeRepos fails', + setup: (mockRepo: any, mockAdapter: any) => { + const initError = new Error('Initialize repos failed'); + mockAdapter.initializeRepos.mockImplementation(() => { + throw initError; + }); + }, + assertions: (mockAdapter: any) => { + expect(mockAdapter.emit).toHaveBeenCalledWith( + 'EXTRACTION_METADATA_ERROR', + { + error: { + message: 'Initialize repos failed', + }, + } + ); + }, + }, + { + description: 'should handle non-Error exceptions', + setup: (mockRepo: any) => { + mockRepo.push.mockRejectedValue('String error'); + }, + assertions: (mockAdapter: any) => { + expect(mockAdapter.emit).toHaveBeenCalledWith( + 'EXTRACTION_METADATA_ERROR', + { + error: { + message: 'Failed to extract metadata', + }, + } + ); + }, + }, + { + description: 'should emit error when getRepo returns null', + setup: (mockRepo: any, mockAdapter: any) => { + mockAdapter.getRepo.mockReturnValue(null); + }, + assertions: (mockAdapter: any) => { + expect(mockAdapter.emit).toHaveBeenCalledWith( + 'EXTRACTION_METADATA_ERROR', + { + error: { + message: expect.stringContaining(''), + }, + } + ); + }, + }, + ]; +} + +/** + * Generates test cases for detailed metadata structure validation + */ +export function generateDetailedMetadataValidationTestCases() { + return [ + { + description: 'should include correct schema version', + assertionFn: (pushedData: any) => { + expect(pushedData[0].schema_version).toBe('v0.2.0'); + }, + }, + { + description: 'should include all three record types', + assertionFn: 'assertAllRecordTypes', + }, + { + description: 'should have correct field types for users', + assertionFn: 'assertUsersFieldTypes', + }, + { + description: 'should have correct field types for comments', + assertionFn: 'assertCommentsFieldTypes', + }, + { + description: 'should have correct field types for tasks', + assertionFn: 'assertTasksFieldTypes', + }, + { + description: 'should have correct enum values for status field', + assertionFn: 'assertStatusEnumValues', + }, + { + description: 'should have correct reference structure for responsible_ids', + assertionFn: 'assertResponsibleIdsReference', + }, + ]; +} + +/** + * Generates test cases for timeout handling + */ +export function generateTimeoutTestCases() { + return [ + { + description: 'should emit error on timeout', + assertions: (mockAdapter: any) => { + expect(mockAdapter.emit).toHaveBeenCalledWith( + 'EXTRACTION_METADATA_ERROR', + { + error: { + message: 'Failed to extract metadata. Lambda timeout.', + }, + } + ); + }, + }, + { + description: 'should not call initializeRepos on timeout', + assertions: (mockAdapter: any) => { + expect(mockAdapter.initializeRepos).not.toHaveBeenCalled(); + }, + }, + { + description: 'should not call push on timeout', + assertions: (mockAdapter: any, mockRepo: any) => { + expect(mockRepo.push).not.toHaveBeenCalled(); + }, + }, + ]; +} \ No newline at end of file diff --git a/build/src/functions/extraction/workers/metadata-extraction.test.helpers.ts b/build/src/functions/extraction/workers/metadata-extraction.test.helpers.ts new file mode 100644 index 0000000..5e78db5 --- /dev/null +++ b/build/src/functions/extraction/workers/metadata-extraction.test.helpers.ts @@ -0,0 +1,133 @@ +/** + * Test helpers for metadata-extraction worker tests. + * Contains mock creation utilities and assertion helpers. + */ + +/** + * Creates a mock repository for testing + */ +export function createMockRepo() { + return { + push: jest.fn().mockResolvedValue(true), + upload: jest.fn().mockResolvedValue(undefined), + }; +} + +/** + * Creates a mock adapter for testing + */ +export function createMockAdapter(mockRepo: any) { + return { + initializeRepos: jest.fn(), + getRepo: jest.fn().mockReturnValue(mockRepo), + emit: jest.fn().mockResolvedValue(undefined), + }; +} + +/** + * Asserts that the pushed metadata has the correct structure + */ +export function assertMetadataStructure(pushedData: any[]) { + expect(pushedData).toHaveLength(1); + expect(pushedData[0]).toHaveProperty('schema_version', 'v0.2.0'); + expect(pushedData[0]).toHaveProperty('record_types'); +} + +/** + * Asserts that all three record types exist in metadata + */ +export function assertAllRecordTypes(pushedData: any[]) { + const recordTypes = pushedData[0].record_types; + expect(Object.keys(recordTypes)).toHaveLength(3); + expect(recordTypes).toHaveProperty('users'); + expect(recordTypes).toHaveProperty('comments'); + expect(recordTypes).toHaveProperty('tasks'); +} + +/** + * Asserts that users fields are correct + */ +export function assertUsersFields(pushedData: any[]) { + const usersFields = pushedData[0].record_types.users.fields; + expect(usersFields).toHaveProperty('full_name'); + expect(usersFields).toHaveProperty('email'); + expect(usersFields).toHaveProperty('title'); + expect(Object.keys(usersFields)).toHaveLength(3); +} + +/** + * Asserts that comments fields are correct + */ +export function assertCommentsFields(pushedData: any[]) { + const commentsFields = pushedData[0].record_types.comments.fields; + expect(commentsFields).toHaveProperty('text'); + expect(commentsFields).toHaveProperty('author_id'); + expect(commentsFields).toHaveProperty('task_id'); + expect(Object.keys(commentsFields)).toHaveLength(3); +} + +/** + * Asserts that tasks fields are correct + */ +export function assertTasksFields(pushedData: any[]) { + const tasksFields = pushedData[0].record_types.tasks.fields; + expect(tasksFields).toHaveProperty('title'); + expect(tasksFields).toHaveProperty('description'); + expect(tasksFields).toHaveProperty('status'); + expect(tasksFields).toHaveProperty('permalink'); + expect(tasksFields).toHaveProperty('responsible_ids'); + expect(Object.keys(tasksFields)).toHaveLength(5); +} + +/** + * Asserts that field types are correct for users + */ +export function assertUsersFieldTypes(pushedData: any[]) { + const usersFields = pushedData[0].record_types.users.fields; + expect(usersFields.full_name.type).toBe('text'); + expect(usersFields.email.type).toBe('text'); + expect(usersFields.title.type).toBe('text'); +} + +/** + * Asserts that field types are correct for comments + */ +export function assertCommentsFieldTypes(pushedData: any[]) { + const commentsFields = pushedData[0].record_types.comments.fields; + expect(commentsFields.text.type).toBe('rich_text'); + expect(commentsFields.author_id.type).toBe('reference'); + expect(commentsFields.task_id.type).toBe('reference'); +} + +/** + * Asserts that field types are correct for tasks + */ +export function assertTasksFieldTypes(pushedData: any[]) { + const tasksFields = pushedData[0].record_types.tasks.fields; + expect(tasksFields.title.type).toBe('text'); + expect(tasksFields.description.type).toBe('rich_text'); + expect(tasksFields.status.type).toBe('enum'); + expect(tasksFields.permalink.type).toBe('text'); + expect(tasksFields.responsible_ids.type).toBe('reference'); +} + +/** + * Asserts that status enum values are correct + */ +export function assertStatusEnumValues(pushedData: any[]) { + const statusField = pushedData[0].record_types.tasks.fields.status; + expect(statusField.enum.values).toHaveLength(4); + expect(statusField.enum.values).toContainEqual({ key: 'active', name: 'Active' }); + expect(statusField.enum.values).toContainEqual({ key: 'completed', name: 'Completed' }); + expect(statusField.enum.values).toContainEqual({ key: 'deferred', name: 'Deferred' }); + expect(statusField.enum.values).toContainEqual({ key: 'cancelled', name: 'Cancelled' }); +} + +/** + * Asserts that responsible_ids reference structure is correct + */ +export function assertResponsibleIdsReference(pushedData: any[]) { + const responsibleIdsField = pushedData[0].record_types.tasks.fields.responsible_ids; + expect(responsibleIdsField.collection).toEqual({ max_length: 1 }); + expect(responsibleIdsField.reference.refers_to).toHaveProperty('#record:users'); +} \ No newline at end of file diff --git a/build/src/functions/extraction/workers/metadata-extraction.test.ts b/build/src/functions/extraction/workers/metadata-extraction.test.ts new file mode 100644 index 0000000..01d8445 --- /dev/null +++ b/build/src/functions/extraction/workers/metadata-extraction.test.ts @@ -0,0 +1,145 @@ +import { ExtractorEventType } from '@devrev/ts-adaas'; +import { + createMockRepo, + createMockAdapter, + assertMetadataStructure, + assertAllRecordTypes, + assertUsersFields, + assertCommentsFields, + assertTasksFields, + assertUsersFieldTypes, + assertCommentsFieldTypes, + assertTasksFieldTypes, + assertStatusEnumValues, + assertResponsibleIdsReference, +} from './metadata-extraction.test.helpers'; +import { + generateSuccessTestCases, + generateMetadataStructureTestCases, + generateRepoInitializationTestCases, + generateErrorHandlingTestCases, + generateDetailedMetadataValidationTestCases, + generateTimeoutTestCases, +} from './metadata-extraction.test.cases'; + +// Mock the processTask function +let mockTask: any; +let mockOnTimeout: any; + +jest.mock('@devrev/ts-adaas', () => ({ + ...jest.requireActual('@devrev/ts-adaas'), + processTask: (config: any) => { + mockTask = config.task; + mockOnTimeout = config.onTimeout; + }, + ExtractorEventType: { + ExtractionMetadataDone: 'EXTRACTION_METADATA_DONE', + ExtractionMetadataError: 'EXTRACTION_METADATA_ERROR', + }, +})); + +// Import the worker module after mocking to ensure mocks are in place +require('./metadata-extraction'); + +describe('metadata-extraction worker', () => { + let mockAdapter: any; + let mockRepo: any; + + beforeEach(() => { + jest.clearAllMocks(); + mockRepo = createMockRepo(); + mockAdapter = createMockAdapter(mockRepo); + }); + + describe('task execution', () => { + // Success test cases + const successTestCases = generateSuccessTestCases(); + successTestCases.forEach(({ description, assertions }) => { + it(description, async () => { + await mockTask({ adapter: mockAdapter }); + assertions(mockAdapter, mockRepo); + }); + }); + + // Metadata structure test cases + const metadataStructureTestCases = generateMetadataStructureTestCases(); + metadataStructureTestCases.forEach(({ description, assertionFn, additionalAssertions }) => { + it(description, async () => { + await mockTask({ adapter: mockAdapter }); + + const pushedData = mockRepo.push.mock.calls[0][0]; + + // Map assertion function names to actual functions + const assertionMap: Record void> = { + assertMetadataStructure, + assertAllRecordTypes, + assertUsersFields, + assertCommentsFields, + assertTasksFields, + }; + + assertionMap[assertionFn](pushedData); + + if (additionalAssertions) { + additionalAssertions.forEach(fnName => assertionMap[fnName](pushedData)); + } + }); + }); + + // Repository initialization test cases + const repoInitTestCases = generateRepoInitializationTestCases(); + repoInitTestCases.forEach(({ description, assertions }) => { + it(description, async () => { + await mockTask({ adapter: mockAdapter }); + assertions(mockAdapter, mockRepo); + }); + }); + + // Error handling test cases + const errorTestCases = generateErrorHandlingTestCases(); + errorTestCases.forEach(({ description, setup, assertions }) => { + it(description, async () => { + setup(mockRepo, mockAdapter); + await mockTask({ adapter: mockAdapter }); + assertions(mockAdapter); + }); + }); + }); + + describe('timeout handling', () => { + const timeoutTestCases = generateTimeoutTestCases(); + timeoutTestCases.forEach(({ description, assertions }) => { + it(description, async () => { + await mockOnTimeout({ adapter: mockAdapter }); + assertions(mockAdapter, mockRepo); + }); + }); + }); + + describe('metadata structure validation', () => { + const validationTestCases = generateDetailedMetadataValidationTestCases(); + validationTestCases.forEach(({ description, assertionFn }) => { + it(description, async () => { + await mockTask({ adapter: mockAdapter }); + + const pushedData = mockRepo.push.mock.calls[0][0]; + + if (typeof assertionFn === 'string') { + // Map assertion function names to actual functions + const assertionMap: Record void> = { + assertAllRecordTypes, + assertUsersFieldTypes, + assertCommentsFieldTypes, + assertTasksFieldTypes, + assertStatusEnumValues, + assertResponsibleIdsReference, + }; + assertionMap[assertionFn](pushedData); + } else { + // Direct assertion function + assertionFn(pushedData); + } + }); + }); + }); +}); \ No newline at end of file diff --git a/build/src/functions/extraction/workers/metadata-extraction.ts b/build/src/functions/extraction/workers/metadata-extraction.ts new file mode 100644 index 0000000..a62df40 --- /dev/null +++ b/build/src/functions/extraction/workers/metadata-extraction.ts @@ -0,0 +1,76 @@ +import { processTask, ExtractorEventType } from '@devrev/ts-adaas'; +import { ExtractorState } from '../index'; + +/** + * External domain metadata structure + * This matches the structure from get_external_domain_metadata function + */ +const externalDomainMetadata = { + schema_version: 'v0.2.0', + record_types: { + users: { + name: 'Users', + fields: { + full_name: { type: 'text', name: 'Full Name', is_required: true }, + email: { type: 'text', name: 'Email', is_required: true }, + title: { type: 'text', name: 'Title', is_required: false }, + }, + }, + comments: { + name: 'Comments', + fields: { + text: { type: 'rich_text', name: 'Text', is_required: true }, + author_id: { type: 'reference', name: 'Author ID', is_required: true, reference: { refers_to: { '#record:users': {} } } }, + task_id: { type: 'reference', name: 'Task ID', is_required: true, reference: { refers_to: { '#record:tasks': {} } } }, + }, + }, + tasks: { + name: 'Tasks', + fields: { + title: { type: 'text', name: 'Title', is_required: true }, + description: { type: 'rich_text', name: 'Description', is_required: true }, + status: { type: 'enum', name: 'Status', is_required: true, enum: { values: [{ key: 'active', name: 'Active' }, { key: 'completed', name: 'Completed' }, { key: 'deferred', name: 'Deferred' }, { key: 'cancelled', name: 'Cancelled' }] } }, + permalink: { type: 'text', name: 'URL', is_required: true }, + responsible_ids: { type: 'reference', name: 'Responsible IDs', is_required: true, collection: { max_length: 1 }, reference: { refers_to: { '#record:users': {} } } }, + }, + }, + }, +}; + +processTask({ + task: async ({ adapter }) => { + try { + // Initialize repository for external_domain_metadata + // IMPORTANT: No normalize function - metadata must NOT be normalized + const repos = [{ itemType: 'external_domain_metadata' }]; + adapter.initializeRepos(repos); + + // Get the repo and verify it exists + const metadataRepo = adapter.getRepo('external_domain_metadata'); + if (!metadataRepo) { + throw new Error('Failed to initialize external_domain_metadata repository'); + } + + // Push external domain metadata without normalization + await metadataRepo.push([externalDomainMetadata]); + + // Manually upload to ensure data is sent + await metadataRepo.upload(); + + // Emit success event + await adapter.emit(ExtractorEventType.ExtractionMetadataDone); + } catch (error) { + console.error('Error extracting metadata:', error); + await adapter.emit(ExtractorEventType.ExtractionMetadataError, { + error: { + message: error instanceof Error ? error.message : 'Failed to extract metadata', + }, + }); + } + }, + onTimeout: async ({ adapter }) => { + await adapter.emit(ExtractorEventType.ExtractionMetadataError, { + error: { message: 'Failed to extract metadata. Lambda timeout.' }, + }); + }, +}); \ No newline at end of file diff --git a/build/src/functions/extraction/workers/normalization.test.cases.ts b/build/src/functions/extraction/workers/normalization.test.cases.ts new file mode 100644 index 0000000..58917a1 --- /dev/null +++ b/build/src/functions/extraction/workers/normalization.test.cases.ts @@ -0,0 +1,395 @@ +import { + NormalizedUserData, + NormalizedTaskData, + NormalizedAttachmentData, + NormalizedCommentData, + mockContacts, + mockTasks, + mockAttachments, + mockComments, + createMockContact, + createMockTask, + createMockAttachment, + createMockComment, +} from './normalization.test.helpers'; + +/** + * Test case generators for normalization functions. + * Contains reusable test case configurations and assertion patterns. + */ + +export interface TestCase { + description: string; + input: any; + assertions: (result: any) => void; +} + +/** + * Generates test cases for normalizeUser function + */ +export function generateUserTestCases(): TestCase[] { + return [ + { + description: 'should normalize a complete user contact', + input: mockContacts.complete, + assertions: (result: any) => { + const data = result.data as NormalizedUserData; + expect(result.id).toBe('USER123'); + expect(data.full_name).toBe('John Doe'); + expect(data.email).toBe('john.doe@example.com'); + expect(data.title).toBe('Senior Developer'); + expect(result.created_date).toMatch(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/); + expect(result.modified_date).toMatch(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/); + }, + }, + { + description: 'should handle user with only firstName', + input: mockContacts.firstNameOnly, + assertions: (result: any) => { + const data = result.data as NormalizedUserData; + expect(data.full_name).toBe('Jane'); + }, + }, + { + description: 'should handle user with only lastName', + input: mockContacts.lastNameOnly, + assertions: (result: any) => { + const data = result.data as NormalizedUserData; + expect(data.full_name).toBe('Smith'); + }, + }, + { + description: 'should handle user with no name', + input: mockContacts.noName, + assertions: (result: any) => { + const data = result.data as NormalizedUserData; + expect(data.full_name).toBe('Unknown User'); + }, + }, + { + description: 'should handle user with no email', + input: mockContacts.noEmail, + assertions: (result: any) => { + const data = result.data as NormalizedUserData; + expect(data.email).toBe(''); + }, + }, + { + description: 'should handle user with no title', + input: mockContacts.noTitle, + assertions: (result: any) => { + const data = result.data as NormalizedUserData; + expect(data.title).toBeNull(); + }, + }, + { + description: 'should map all fields according to External Domain Metadata', + input: createMockContact({ + id: 'USER333', + firstName: 'Alice', + lastName: 'Johnson', + primaryEmail: 'alice.johnson@example.com', + title: 'Product Manager', + }), + assertions: (result: any) => { + const data = result.data as NormalizedUserData; + expect(Object.keys(result.data)).toHaveLength(3); + expect(data).toHaveProperty('full_name'); + expect(data).toHaveProperty('email'); + expect(data).toHaveProperty('title'); + }, + }, + { + description: 'should handle empty string values correctly', + input: mockContacts.emptyStrings, + assertions: (result: any) => { + const data = result.data as NormalizedUserData; + expect(data.full_name).toBe('Unknown User'); + expect(data.email).toBe(''); + expect(data.title).toBeNull(); + }, + }, + ]; +} + +/** + * Generates test cases for normalizeTask function + */ +export function generateTaskTestCases(): TestCase[] { + return [ + { + description: 'should normalize a complete task', + input: mockTasks.complete, + assertions: (result: any) => { + const data = result.data as NormalizedTaskData; + expect(result.id).toBe('TASK123'); + expect(result.created_date).toBe('2025-01-01T10:00:00Z'); + expect(result.modified_date).toBe('2025-01-02T15:30:00Z'); + expect(data.title).toBe('Test Task'); + expect(data.description).toEqual(['First line', 'Second line', 'Third line']); + expect(data.status).toBe('active'); + expect(data.permalink).toBe('https://www.wrike.com/open.htm?id=123456'); + expect(data.responsible_ids).toEqual(['USER1']); + }, + }, + { + description: 'should handle task with no description', + input: mockTasks.noDescription, + assertions: (result: any) => { + const data = result.data as NormalizedTaskData; + expect(data.description).toEqual([]); + }, + }, + { + description: 'should handle task with empty description', + input: mockTasks.emptyDescription, + assertions: (result: any) => { + const data = result.data as NormalizedTaskData; + expect(data.description).toEqual([]); + }, + }, + { + description: 'should handle task with no responsible users', + input: mockTasks.noResponsible, + assertions: (result: any) => { + const data = result.data as NormalizedTaskData; + expect(data.responsible_ids).toEqual([]); + }, + }, + { + description: 'should take only first responsible user (max_length 1)', + input: createMockTask({ + id: 'TASK111', + title: 'Multiple Responsible Task', + responsibleIds: ['USER1', 'USER2', 'USER3'], + permalink: 'https://www.wrike.com/open.htm?id=111', + }), + assertions: (result: any) => { + const data = result.data as NormalizedTaskData; + expect(data.responsible_ids).toEqual(['USER1']); + expect(data.responsible_ids.length).toBe(1); + }, + }, + { + description: 'should filter out empty lines from description', + input: mockTasks.emptyLines, + assertions: (result: any) => { + const data = result.data as NormalizedTaskData; + expect(data.description).toEqual(['First line', 'Third line', 'Sixth line']); + }, + }, + { + description: 'should map all fields according to External Domain Metadata', + input: createMockTask({ + id: 'TASK333', + title: 'Complete Task', + description: 'Task description', + permalink: 'https://www.wrike.com/open.htm?id=333', + }), + assertions: (result: any) => { + const data = result.data as NormalizedTaskData; + expect(Object.keys(result.data)).toHaveLength(5); + expect(data).toHaveProperty('title'); + expect(data).toHaveProperty('description'); + expect(data).toHaveProperty('status'); + expect(data).toHaveProperty('permalink'); + expect(data).toHaveProperty('responsible_ids'); + }, + }, + ]; +} + +/** + * Generates test cases for task status mapping + */ +export function generateTaskStatusTestCases(): TestCase[] { + const statuses = ['Active', 'Completed', 'Deferred', 'Cancelled']; + const expectedStatuses = ['active', 'completed', 'deferred', 'cancelled']; + + return statuses.map((status, index) => ({ + description: `should map status ${status} correctly`, + input: createMockTask({ + id: `TASK${index}`, + status, + }), + assertions: (result: any) => { + const data = result.data as NormalizedTaskData; + expect(data.status).toBe(expectedStatuses[index]); + }, + })); +} + +/** + * Generates test cases for normalizeAttachment function + */ +export function generateAttachmentTestCases(): TestCase[] { + return [ + { + description: 'should normalize a complete attachment', + input: mockAttachments.complete, + assertions: (result: any) => { + expect(result.id).toBe('ATTACH123'); + expect(result.url).toBe('https://www.wrike.com/attachments/ATTACH123/download/document.pdf'); + expect(result.file_name).toBe('document.pdf'); + expect(result.parent_id).toBe('TASK123'); + expect(result.author_id).toBe('USER1'); + }, + }, + { + description: 'should handle attachment with no URL', + input: mockAttachments.noUrl, + assertions: (result: any) => { + expect(result.url).toBe(''); + }, + }, + { + description: 'should handle attachment with no author', + input: mockAttachments.noAuthor, + assertions: (result: any) => { + expect(result.author_id).toBeUndefined(); + }, + }, + { + description: 'should handle attachment with no task ID', + input: mockAttachments.noTaskId, + assertions: (result: any) => { + expect(result.parent_id).toBe(''); + }, + }, + { + description: 'should map all fields according to Airdrop SDK requirements', + input: createMockAttachment({ + id: 'ATTACH333', + name: 'test.pdf', + url: 'https://example.com/test.pdf', + taskId: 'TASK333', + authorId: 'USER333', + }), + assertions: (result: any) => { + expect(Object.keys(result)).toHaveLength(5); + expect(result).toHaveProperty('id'); + expect(result).toHaveProperty('url'); + expect(result).toHaveProperty('file_name'); + expect(result).toHaveProperty('parent_id'); + expect(result).toHaveProperty('author_id'); + }, + }, + { + description: 'should handle attachments with dimensions', + input: mockAttachments.withDimensions, + assertions: (result: any) => { + expect(result).not.toHaveProperty('width'); + expect(result).not.toHaveProperty('height'); + expect(Object.keys(result)).toHaveLength(5); + }, + }, + { + description: 'should handle empty string values correctly', + input: createMockAttachment({ + url: '', + taskId: '', + }), + assertions: (result: any) => { + expect(result.url).toBe(''); + expect(result.parent_id).toBe(''); + }, + }, + ]; +} + +/** + * Generates test cases for normalizeComment function + */ +export function generateCommentTestCases(): TestCase[] { + return [ + { + description: 'should normalize a complete comment', + input: mockComments.complete, + assertions: (result: any) => { + const data = result.data as NormalizedCommentData; + expect(result.id).toBe('COMMENT123'); + expect(result.created_date).toBe('2025-01-01T10:00:00Z'); + expect(result.modified_date).toBe('2025-01-01T12:00:00Z'); + expect(data.text).toEqual(['This is a test comment']); + expect(data.author_id).toBe('USER1'); + expect(data.task_id).toBe('TASK123'); + }, + }, + { + description: 'should handle comment with no text', + input: mockComments.noText, + assertions: (result: any) => { + const data = result.data as NormalizedCommentData; + expect(data.text).toEqual([]); + }, + }, + { + description: 'should handle comment with empty text', + input: mockComments.emptyText, + assertions: (result: any) => { + const data = result.data as NormalizedCommentData; + expect(data.text).toEqual([]); + }, + }, + { + description: 'should handle comment with no task ID', + input: mockComments.noTaskId, + assertions: (result: any) => { + const data = result.data as NormalizedCommentData; + expect(data.task_id).toBe(''); + }, + }, + { + description: 'should handle comment with no updated date', + input: mockComments.noUpdatedDate, + assertions: (result: any) => { + expect(result.modified_date).toBe('2025-01-01T10:00:00Z'); + expect(result.modified_date).toBe(result.created_date); + }, + }, + { + description: 'should filter out empty lines from text', + input: mockComments.emptyLines, + assertions: (result: any) => { + const data = result.data as NormalizedCommentData; + expect(data.text).toEqual(['First line', 'Third line', 'Fifth line']); + }, + }, + { + description: 'should handle multiline comment text', + input: mockComments.multiline, + assertions: (result: any) => { + const data = result.data as NormalizedCommentData; + expect(data.text).toEqual(['Line 1', 'Line 2', 'Line 3']); + }, + }, + { + description: 'should map all fields according to External Domain Metadata', + input: createMockComment({ + id: 'COMMENT333', + text: 'Test comment', + authorId: 'USER333', + taskId: 'TASK333', + }), + assertions: (result: any) => { + const data = result.data as NormalizedCommentData; + expect(Object.keys(result.data)).toHaveLength(3); + expect(data).toHaveProperty('text'); + expect(data).toHaveProperty('author_id'); + expect(data).toHaveProperty('task_id'); + }, + }, + { + description: 'should handle empty string values correctly', + input: createMockComment({ + text: '', + taskId: '', + }), + assertions: (result: any) => { + const data = result.data as NormalizedCommentData; + expect(data.text).toEqual([]); + expect(data.task_id).toBe(''); + }, + }, + ]; +} \ No newline at end of file diff --git a/build/src/functions/extraction/workers/normalization.test.helpers.ts b/build/src/functions/extraction/workers/normalization.test.helpers.ts new file mode 100644 index 0000000..cc0883c --- /dev/null +++ b/build/src/functions/extraction/workers/normalization.test.helpers.ts @@ -0,0 +1,289 @@ +import { WrikeContact, WrikeTask, WrikeAttachment, WrikeComment } from '../../../core/wrike-types'; + +/** + * Helper interfaces for type-safe access to normalized data + */ +export interface NormalizedUserData { + full_name: string; + email: string; + title: string | null; +} + +export interface NormalizedTaskData { + title: string; + description: string[]; + status: string; + permalink: string; + responsible_ids: string[]; +} + +export interface NormalizedAttachmentData { + url: string; + file_name: string; + parent_id: string; + author_id?: string; +} + +export interface NormalizedCommentData { + text: string[]; + author_id: string; + task_id: string; +} + +/** + * Mock data factories for creating test objects + */ + +export function createMockContact(overrides?: Partial): WrikeContact { + return { + id: 'USER123', + firstName: 'John', + lastName: 'Doe', + type: 'Person', + deleted: false, + primaryEmail: 'john.doe@example.com', + title: 'Senior Developer', + ...overrides, + }; +} + +export function createMockTask(overrides?: Partial): WrikeTask { + return { + id: 'TASK123', + accountId: 'ACCOUNT1', + title: 'Test Task', + description: 'First line\nSecond line\nThird line', + briefDescription: 'Brief', + parentIds: ['FOLDER1'], + superParentIds: [], + sharedIds: [], + responsibleIds: ['USER1', 'USER2'], + status: 'Active', + importance: 'Normal', + createdDate: '2025-01-01T10:00:00Z', + updatedDate: '2025-01-02T15:30:00Z', + dates: { + type: 'Planned', + duration: 86400000, + start: '2025-01-01', + due: '2025-01-02', + }, + scope: 'WsTask', + authorIds: ['USER1'], + customStatusId: 'STATUS1', + hasAttachments: false, + permalink: 'https://www.wrike.com/open.htm?id=123456', + priority: '02', + followedByMe: false, + followerIds: [], + superTaskIds: [], + subTaskIds: [], + dependencyIds: [], + metadata: [], + customFields: [], + ...overrides, + }; +} + +export function createMockAttachment(overrides?: Partial): WrikeAttachment { + return { + id: 'ATTACH123', + authorId: 'USER1', + name: 'document.pdf', + createdDate: '2025-01-01T10:00:00Z', + version: '1', + size: 1024, + type: 'application/pdf', + url: 'https://www.wrike.com/attachments/ATTACH123/download/document.pdf', + taskId: 'TASK123', + ...overrides, + }; +} + +export function createMockComment(overrides?: Partial): WrikeComment { + return { + id: 'COMMENT123', + authorId: 'USER1', + text: 'This is a test comment', + createdDate: '2025-01-01T10:00:00Z', + updatedDate: '2025-01-01T12:00:00Z', + taskId: 'TASK123', + ...overrides, + }; +} + +/** + * Predefined mock contacts for common test scenarios + */ +export const mockContacts = { + complete: createMockContact(), + + firstNameOnly: createMockContact({ + id: 'USER456', + firstName: 'Jane', + lastName: undefined, + primaryEmail: 'jane@example.com', + }), + + lastNameOnly: createMockContact({ + id: 'USER789', + firstName: undefined, + lastName: 'Smith', + primaryEmail: 'smith@example.com', + }), + + noName: createMockContact({ + id: 'USER000', + firstName: undefined, + lastName: undefined, + primaryEmail: 'unknown@example.com', + }), + + noEmail: createMockContact({ + id: 'USER111', + firstName: 'Test', + lastName: 'User', + primaryEmail: undefined, + }), + + noTitle: createMockContact({ + id: 'USER222', + firstName: 'Test', + lastName: 'User', + primaryEmail: 'test@example.com', + title: undefined, + }), + + emptyStrings: createMockContact({ + id: 'USER444', + firstName: '', + lastName: '', + primaryEmail: '', + title: '', + }), +}; + +/** + * Predefined mock tasks for common test scenarios + */ +export const mockTasks = { + complete: createMockTask(), + + noDescription: createMockTask({ + id: 'TASK456', + title: 'No Description Task', + description: undefined, + briefDescription: '', + status: 'Completed', + permalink: 'https://www.wrike.com/open.htm?id=456', + }), + + emptyDescription: createMockTask({ + id: 'TASK789', + title: 'Empty Description Task', + description: '', + briefDescription: '', + status: 'Deferred', + permalink: 'https://www.wrike.com/open.htm?id=789', + }), + + noResponsible: createMockTask({ + id: 'TASK000', + title: 'Unassigned Task', + description: 'Task description', + responsibleIds: [], + status: 'Cancelled', + permalink: 'https://www.wrike.com/open.htm?id=000', + }), + + emptyLines: createMockTask({ + id: 'TASK222', + title: 'Task with Empty Lines', + description: 'First line\n\nThird line\n\n\nSixth line', + permalink: 'https://www.wrike.com/open.htm?id=222', + }), +}; + +/** + * Predefined mock attachments for common test scenarios + */ +export const mockAttachments = { + complete: createMockAttachment(), + + noUrl: createMockAttachment({ + id: 'ATTACH456', + name: 'image.png', + url: undefined, + taskId: 'TASK456', + }), + + noAuthor: createMockAttachment({ + id: 'ATTACH789', + name: 'spreadsheet.xlsx', + authorId: undefined, + taskId: 'TASK789', + }), + + noTaskId: createMockAttachment({ + id: 'ATTACH000', + name: 'presentation.pptx', + taskId: undefined, + }), + + withDimensions: createMockAttachment({ + id: 'ATTACH111', + name: 'photo.jpg', + type: 'image/jpeg', + url: 'https://www.wrike.com/attachments/ATTACH111/download/photo.jpg', + taskId: 'TASK111', + width: 1920, + height: 1080, + }), +}; + +/** + * Predefined mock comments for common test scenarios + */ +export const mockComments = { + complete: createMockComment(), + + noText: createMockComment({ + id: 'COMMENT456', + text: undefined as any, + authorId: 'USER2', + taskId: 'TASK456', + }), + + emptyText: createMockComment({ + id: 'COMMENT789', + text: '', + authorId: 'USER3', + taskId: 'TASK789', + }), + + noTaskId: createMockComment({ + id: 'COMMENT000', + text: 'Comment without task', + taskId: undefined, + }), + + noUpdatedDate: createMockComment({ + id: 'COMMENT111', + text: 'Comment without update', + updatedDate: undefined, + }), + + emptyLines: createMockComment({ + id: 'COMMENT222', + text: 'First line\n\nThird line\n\n\nFifth line', + authorId: 'USER4', + taskId: 'TASK222', + }), + + multiline: createMockComment({ + id: 'COMMENT333', + text: 'Line 1\nLine 2\nLine 3', + authorId: 'USER5', + taskId: 'TASK333', + }), +}; \ No newline at end of file diff --git a/build/src/functions/extraction/workers/normalization.test.ts b/build/src/functions/extraction/workers/normalization.test.ts new file mode 100644 index 0000000..b0b07fb --- /dev/null +++ b/build/src/functions/extraction/workers/normalization.test.ts @@ -0,0 +1,60 @@ +import { normalizeUser, normalizeTask, normalizeAttachment, normalizeComment } from './normalization'; +import { + generateUserTestCases, + generateTaskTestCases, + generateTaskStatusTestCases, + generateAttachmentTestCases, + generateCommentTestCases, +} from './normalization.test.cases'; + +describe('normalization functions', () => { + describe('normalizeUser', () => { + const userTestCases = generateUserTestCases(); + userTestCases.forEach(({ description, input, assertions }) => { + it(description, () => { + const result = normalizeUser(input); + assertions(result); + }); + }); + }); + + describe('normalizeTask', () => { + const taskTestCases = generateTaskTestCases(); + taskTestCases.forEach(({ description, input, assertions }) => { + it(description, () => { + const result = normalizeTask(input); + assertions(result); + }); + }); + + describe('status mapping', () => { + const statusTestCases = generateTaskStatusTestCases(); + statusTestCases.forEach(({ description, input, assertions }) => { + it(description, () => { + const result = normalizeTask(input); + assertions(result); + }); + }); + }); + }); + + describe('normalizeAttachment', () => { + const attachmentTestCases = generateAttachmentTestCases(); + attachmentTestCases.forEach(({ description, input, assertions }) => { + it(description, () => { + const result = normalizeAttachment(input); + assertions(result); + }); + }); + }); + + describe('normalizeComment', () => { + const commentTestCases = generateCommentTestCases(); + commentTestCases.forEach(({ description, input, assertions }) => { + it(description, () => { + const result = normalizeComment(input); + assertions(result); + }); + }); + }); +}); \ No newline at end of file diff --git a/build/src/functions/extraction/workers/normalization.ts b/build/src/functions/extraction/workers/normalization.ts new file mode 100644 index 0000000..c9ad112 --- /dev/null +++ b/build/src/functions/extraction/workers/normalization.ts @@ -0,0 +1,137 @@ +import { NormalizedItem, NormalizedAttachment } from '@devrev/ts-adaas'; +import { WrikeContact, WrikeTask, WrikeAttachment, WrikeComment } from '../../../core/wrike-types'; + +/** + * Normalizes a Wrike contact (user) to a NormalizedItem. + * Maps fields according to External Domain Metadata: + * - full_name: firstName + lastName + * - email: primaryEmail + * - title: title + * + * @param contact - Wrike contact to normalize + * @returns Normalized user item + */ +export function normalizeUser(record: object): NormalizedItem { + const contact = record as WrikeContact; + + // Construct full name from firstName and lastName + const fullName = [contact.firstName, contact.lastName] + .filter(Boolean) + .join(' ') || 'Unknown User'; + + return { + id: contact.id, + created_date: new Date().toISOString(), // Wrike doesn't provide created date for contacts + modified_date: new Date().toISOString(), // Wrike doesn't provide modified date for contacts + data: { + full_name: fullName, + email: contact.primaryEmail || '', + title: contact.title || null, + }, + }; +} + +/** + * Converts a plain text string to rich text format by splitting on newlines. + * Filters out empty lines and returns an array of strings. + * + * @param text - Plain text string to convert + * @returns Array of non-empty text lines + */ +function convertToRichText(text: string | undefined): string[] { + if (!text) { + return []; + } + return text.split('\n').filter(line => line.trim() !== ''); +} + +/** + * Maps Wrike task status to External Domain Metadata enum values. + * + * @param status - Wrike task status + * @returns Normalized status value (lowercase) + */ +function normalizeStatus(status: string): string { + // Map to lowercase for enum matching + return status.toLowerCase(); +} + +/** + * Normalizes a Wrike task to a NormalizedItem. + * Maps fields according to External Domain Metadata: + * - title: task title + * - description: task description (rich text) + * - status: task status (enum) + * - permalink: task URL + * - responsible_ids: array of responsible user IDs (max length 1) + * + * @param record - Wrike task to normalize + * @returns Normalized task item + */ +export function normalizeTask(record: object): NormalizedItem { + const task = record as WrikeTask; + + return { + id: task.id, + created_date: task.createdDate, + modified_date: task.updatedDate, + data: { + title: task.title, + description: convertToRichText(task.description), + status: normalizeStatus(task.status), + permalink: task.permalink, + // responsible_ids is an array with max_length 1, so take first element or empty array + responsible_ids: task.responsibleIds && task.responsibleIds.length > 0 + ? [task.responsibleIds[0]] + : [], + }, + }; +} + +/** + * Normalizes a Wrike attachment to a NormalizedAttachment. + * Maps fields according to the NormalizedAttachment interface: + * - url: attachment URL + * - id: attachment ID + * - file_name: attachment name + * - parent_id: task ID + * - author_id: author ID (optional) + * @param record - Wrike attachment to normalize + * @returns Normalized attachment + */ +export function normalizeAttachment(record: object): NormalizedAttachment { + const attachment = record as WrikeAttachment; + + return { + id: attachment.id, + url: attachment.url || '', + file_name: attachment.name, + parent_id: attachment.taskId || '', + author_id: attachment.authorId, + }; +} + +/** + * Normalizes a Wrike comment to a NormalizedItem. + * Maps fields according to External Domain Metadata: + * - text: comment text (rich text) + * - author_id: author user ID (reference to users) + * - task_id: task ID (reference to tasks) + * + * @param record - Wrike comment to normalize + * @returns Normalized comment item + */ +export function normalizeComment(record: object): NormalizedItem { + const comment = record as WrikeComment; + + return { + id: comment.id, + created_date: comment.createdDate, + modified_date: comment.updatedDate || comment.createdDate, + data: { + text: convertToRichText(comment.text), + author_id: comment.authorId, + task_id: comment.taskId || '', + }, + }; +} \ No newline at end of file diff --git a/build/src/functions/extraction/wrike-api-client.ts b/build/src/functions/extraction/wrike-api-client.ts deleted file mode 100644 index a894833..0000000 --- a/build/src/functions/extraction/wrike-api-client.ts +++ /dev/null @@ -1,198 +0,0 @@ -import axios from 'axios'; - -/** - * Interface for a Wrike task - */ -export interface WrikeTask { - id: string; - title: string; - description?: string; - status: string; - importance: string; - created_date: string; - updated_date: string; - completed_date?: string; - due_date?: string; - parent_ids: string[]; - responsible_ids?: string[]; - author_ids?: string[]; - custom_status_id?: string; - permalink?: string; -} - -/** - * Interface for a Wrike contact - */ -export interface WrikeContact { - id: string; - first_name: string; - last_name: string; - type: string; - profiles?: { - email?: string; - avatar_url?: string; - timezone?: string; - locale?: string; - }[]; - title?: string; - company_name?: string; - phone?: string; - location?: string; - is_deleted?: boolean; - me?: boolean; -} - -/** - * Client for interacting with the Wrike API - */ -export class WrikeApiClient { - private readonly apiEndpoint: string = 'https://www.wrike.com/api/v4'; - private readonly apiKey: string; - private readonly timeout: number = 60000; // Increased timeout for API calls - - /** - * Creates a new instance of the WrikeApiClient - * @param apiKey The Wrike API key - */ - constructor(apiKey: string) { - this.apiKey = apiKey; - } - - /** - * Fetches contacts from Wrike API - * @param spaceId The Space ID - * @returns Array of WrikeContact objects - */ - async fetchContacts(spaceId: string): Promise { - try { - console.log(`Fetching space members for space ID: ${spaceId}`); - const spaceResponse = await axios.get(`${this.apiEndpoint}/spaces/${encodeURIComponent(spaceId)}`, { - headers: { - 'Authorization': `Bearer ${this.apiKey}` - }, - params: { - fields: '[members]' - }, - timeout: this.timeout - }); - - if (spaceResponse.status !== 200) { - throw new Error(`Failed to fetch space members with status ${spaceResponse.status}: ${JSON.stringify(spaceResponse.data || 'No response data')}`); - } - - if (!spaceResponse.data || !spaceResponse.data.data || !Array.isArray(spaceResponse.data.data) || spaceResponse.data.data.length === 0) { - throw new Error('Invalid response format from Wrike API for space members'); - } - - // Extract member IDs from the space response - const spaceData = spaceResponse.data.data[0]; - - if (!spaceData.members || !Array.isArray(spaceData.members)) { - return []; - } - - const memberIds = spaceData.members.map((member: any) => member.id); - - if (memberIds.length === 0) { - return []; - } - - console.log(`Fetching contact details for ${memberIds.length} members`); - console.log(`Contact IDs: ${memberIds.join(', ')}`); - const contactsResponse = await axios.get(`${this.apiEndpoint}/contacts/${memberIds.join(',')}`, { - headers: { - 'Authorization': `Bearer ${this.apiKey}` - }, - timeout: this.timeout - }); - - if (contactsResponse.status !== 200) { - throw new Error(`Failed to fetch contact details with status ${contactsResponse.status}: ${JSON.stringify(contactsResponse.data)}`); - } - - if (!contactsResponse.data || !contactsResponse.data.data || !Array.isArray(contactsResponse.data.data)) { - throw new Error('Invalid response format from Wrike API for contacts'); - } - - // Transform the response data into our contact format - const contacts = contactsResponse.data.data.map((contact: any) => ({ - id: contact.id, - first_name: contact.firstName || '', - last_name: contact.lastName || '', - type: contact.type || '', - profiles: contact.profiles ? contact.profiles.map((profile: any) => ({ - email: profile.email, - avatar_url: profile.avatarUrl, - timezone: profile.timezone, - locale: profile.locale - })) : undefined, - title: contact.title, - company_name: contact.companyName, - phone: contact.phone, - location: contact.location, - is_deleted: contact.deleted, - me: contact.me - })); - - console.log(`Successfully transformed ${contacts.length} contacts`); - return contacts; - } catch (error) { - console.error('Error in fetchContacts:', error instanceof Error ? error.message : JSON.stringify(error)); - throw new Error(`Error fetching contacts: ${error instanceof Error ? error.message : String(error)}`); - } - } - - /** - * Fetches tasks for a project from Wrike API - * @param projectId The Project ID - * @returns Array of WrikeTask objects - */ - async fetchTasks(projectId: string): Promise { - try { - console.log(`Fetching tasks for project ID: ${projectId}`); - const response = await axios.get(`${this.apiEndpoint}/folders/${encodeURIComponent(projectId)}/tasks`, { - headers: { - 'Authorization': `Bearer ${this.apiKey}` - }, - params: { - descendants: true, - subTasks: true - }, - timeout: this.timeout - }); - - if (response.status !== 200) { - throw new Error(`Failed to fetch tasks with status ${response.status}: ${JSON.stringify(response.data)}`); - } - - if (!response.data || !response.data.data || !Array.isArray(response.data.data)) { - throw new Error('Invalid response format from Wrike API for tasks'); - } - - // Transform the response data into our task format - console.log(`Transforming ${response.data.data.length} tasks`); - const tasks = response.data.data.map((task: any) => ({ - id: task.id, - title: task.title, - description: task.description, - status: task.status, - importance: task.importance, - created_date: task.createdDate || '', - updated_date: task.updatedDate || '', - completed_date: task.completedDate, - due_date: task.dueDate, - parent_ids: task.parentIds || [], - responsible_ids: task.responsibleIds, - author_ids: task.authorIds, - custom_status_id: task.customStatusId, - permalink: task.permalink - })); - - console.log(`Successfully transformed ${tasks.length} tasks`); - return tasks; - } catch (error) { - console.error('Error in fetchTasks:', error); - throw new Error(`Error fetching tasks: ${error instanceof Error ? error.message : String(error)}`); - } - } -} \ No newline at end of file diff --git a/build/src/functions/extraction_external_sync_unit_check/index.test.ts b/build/src/functions/extraction_external_sync_unit_check/index.test.ts deleted file mode 100644 index a063484..0000000 --- a/build/src/functions/extraction_external_sync_unit_check/index.test.ts +++ /dev/null @@ -1,171 +0,0 @@ -// Mock the spawn function to prevent actual worker spawning during tests -const mockSpawn = jest.fn().mockResolvedValue(undefined); -jest.mock('../generate_initial_mapping/initial_domain_mapping.json', () => ({})); - -// Mock the @devrev/ts-adaas module -jest.mock('@devrev/ts-adaas', () => { - const actual = jest.requireActual('@devrev/ts-adaas'); - return { - ...actual, - spawn: mockSpawn - }; -}); - -import { run } from './index'; -import { AirdropEvent, EventType } from '@devrev/ts-adaas'; - -describe('External Sync Units Extraction Check Function', () => { - // Helper function to create a mock AirdropEvent - const createMockEvent = (eventType: EventType): AirdropEvent => ({ - context: { - secrets: { - service_account_token: 'mock-token' - }, - snap_in_id: 'mock-snap-in-id', - snap_in_version_id: 'mock-version-id' - }, - payload: { - connection_data: { - org_id: 'mock-org-id', - org_name: 'mock-org-name', - key: 'mock-key', - key_type: 'mock-key-type' - }, - event_context: { - callback_url: 'mock-callback-url', - dev_org: 'mock-dev-org', - dev_org_id: 'mock-dev-org-id', - dev_user: 'mock-dev-user', - dev_user_id: 'mock-dev-user-id', - external_sync_unit: 'mock-external-sync-unit', - external_sync_unit_id: 'mock-external-sync-unit-id', - external_sync_unit_name: 'mock-external-sync-unit-name', - external_system: 'mock-external-system', - external_system_type: 'mock-external-system-type', - import_slug: 'mock-import-slug', - mode: 'INITIAL', - request_id: 'mock-request-id', - snap_in_slug: 'mock-snap-in-slug', - snap_in_version_id: 'mock-snap-in-version-id', - sync_run: 'mock-sync-run', - sync_run_id: 'mock-sync-run-id', - sync_tier: 'mock-sync-tier', - sync_unit: 'mock-sync-unit', - sync_unit_id: 'mock-sync-unit-id', - uuid: 'mock-uuid', - worker_data_url: 'mock-worker-data-url' - }, - event_type: eventType, - event_data: {} - }, - execution_metadata: { - devrev_endpoint: 'mock-endpoint' - }, - input_data: { - global_values: {}, - event_sources: {} - } - }); - - it('should return success with valid_external_sync_unit_events=true for ExtractionExternalSyncUnitsStart event type', async () => { - // Test with the external sync units extraction event type - const mockEvent = createMockEvent(EventType.ExtractionExternalSyncUnitsStart); - - // Call the function with the mock event - const result = await run([mockEvent]); - - // Verify the result - expect(result).toEqual({ - status: 'success', - message: 'External sync units extraction check function successfully invoked', - valid_external_sync_unit_events: true - }); - - // Verify that spawn was called with the correct parameters - expect(mockSpawn).toHaveBeenCalled(); - expect(mockSpawn.mock.calls[0][0]).toHaveProperty('initialDomainMapping'); - }); - - it('should return success with valid_external_sync_unit_events=false for other extraction event types', async () => { - // Test with a different extraction event type - const mockEvent = createMockEvent(EventType.ExtractionDataStart); - - // Call the function with the mock event - const result = await run([mockEvent]); - - // Verify the result - expect(result).toEqual({ - status: 'success', - message: 'External sync units extraction check function successfully invoked', - valid_external_sync_unit_events: false - }); - }); - - it('should throw an error if events parameter is not an array', async () => { - // Mock console.error to prevent test output pollution - jest.spyOn(console, 'error').mockImplementation(() => {}); - - // Call the function with invalid input - const invalidInput = null as unknown as AirdropEvent[]; - - // Expect the function to throw an error - await expect(run(invalidInput)).rejects.toThrow('Invalid input: events must be an array'); - - // Restore console.error - jest.restoreAllMocks(); - }); - - it('should throw an error if an event is missing required fields', async () => { - // Mock console.error to prevent test output pollution - jest.spyOn(console, 'error').mockImplementation(() => {}); - - // Create an invalid event missing context - const invalidEvent = { - payload: {}, - execution_metadata: {} - } as unknown as AirdropEvent; - - // Expect the function to throw an error - await expect(run([invalidEvent])).rejects.toThrow('missing required field \'context\''); - - // Restore console.error - jest.restoreAllMocks(); - }); - - it('should handle multiple events correctly', async () => { - // Create multiple events with different event types - const event1 = createMockEvent(EventType.ExtractionExternalSyncUnitsStart); - const event2 = createMockEvent(EventType.ExtractionMetadataStart); - - // Call the function with multiple events - const result = await run([event1, event2]); - - // Verify the result - should be true because at least one event is ExtractionExternalSyncUnitsStart - expect(result).toEqual({ - status: 'success', - message: 'External sync units extraction check function successfully invoked', - valid_external_sync_unit_events: true - }); - - // Verify that spawn was called with the correct parameters - expect(mockSpawn).toHaveBeenCalled(); - }); - - it('should handle empty events array', async () => { - // Mock console.log to prevent test output pollution - jest.spyOn(console, 'log').mockImplementation(() => {}); - - // Call the function with an empty array - const result = await run([]); - - // Verify the result - should be false because there are no events - expect(result).toEqual({ - status: 'success', - message: 'External sync units extraction check function successfully invoked', - valid_external_sync_unit_events: false - }); - - // Restore console.log - jest.restoreAllMocks(); - }); -}); \ No newline at end of file diff --git a/build/src/functions/extraction_external_sync_unit_check/index.ts b/build/src/functions/extraction_external_sync_unit_check/index.ts deleted file mode 100644 index fe751d2..0000000 --- a/build/src/functions/extraction_external_sync_unit_check/index.ts +++ /dev/null @@ -1,98 +0,0 @@ -import { AirdropEvent, EventType, spawn } from '@devrev/ts-adaas'; -import path from 'path'; -import initialDomainMapping from '../generate_initial_mapping/initial_domain_mapping.json'; - -/** - * A function that checks if the external sync units extraction workflow can be invoked. - * - * @param events - Array of AirdropEvent objects - * @returns A response indicating whether the external sync units extraction workflow can be invoked - */ -export async function run(events: AirdropEvent[]): Promise<{ - status: string, - message: string, - valid_external_sync_unit_events: boolean -}> { - try { - // Validate input parameters - if (!events || !Array.isArray(events)) { - throw new Error('Invalid input: events must be an array'); - } - - // Validate that each event is a valid AirdropEvent with all required fields - events.forEach((event, index) => { - if (!event || typeof event !== 'object') { - throw new Error(`Invalid event at index ${index}: event must be a valid AirdropEvent object`); - } - - // Check for required fields according to AirdropEvent interface - if (!event.context) { - throw new Error(`Invalid event at index ${index}: missing required field 'context'`); - } - - if (!event.context.secrets || !event.context.secrets.service_account_token) { - throw new Error(`Invalid event at index ${index}: missing required field 'context.secrets.service_account_token'`); - } - - if (!event.context.snap_in_version_id) { - throw new Error(`Invalid event at index ${index}: missing required field 'context.snap_in_version_id'`); - } - - if (!event.payload) { - throw new Error(`Invalid event at index ${index}: missing required field 'payload'`); - } - - if (!event.payload.event_context) { - throw new Error(`Invalid event at index ${index}: missing required field 'payload.event_context'`); - } - - if (!event.execution_metadata || !event.execution_metadata.devrev_endpoint) { - throw new Error(`Invalid event at index ${index}: missing required field 'execution_metadata.devrev_endpoint'`); - } - }); - - // Check if any of the events are specifically external sync units extraction events - const hasExternalSyncUnitEvents = events.some(event => - event.payload && - event.payload.event_type === EventType.ExtractionExternalSyncUnitsStart - ); - - // Log the event for debugging purposes - console.log('External sync units extraction check function invoked with events:', JSON.stringify(events)); - - // For each ExtractionExternalSyncUnitsStart event, spawn a worker to process it - for (const event of events) { - if (event.payload && event.payload.event_type === EventType.ExtractionExternalSyncUnitsStart) { - // Define the worker path - make sure to use .ts extension as required by the SDK - const workerPath = path.resolve(__dirname, 'worker.ts'); - - // Define initial state for the worker - const initialState = {}; - - // Spawn the worker to process the event - note: no options key in the parameter object - await spawn({ - event: { - ...event, - payload: { ...event.payload } - }, - initialDomainMapping, - initialState, - workerPath - }); - } - } - - // Return a response - return { - status: 'success', - message: 'External sync units extraction check function successfully invoked', - valid_external_sync_unit_events: hasExternalSyncUnitEvents - }; - } catch (error) { - // Log the error for debugging - console.error('Error in external sync units extraction check function:', error); - - // Re-throw the error to be handled by the caller - throw error; - } -} \ No newline at end of file diff --git a/build/src/functions/extraction_external_sync_unit_check/worker.ts b/build/src/functions/extraction_external_sync_unit_check/worker.ts deleted file mode 100644 index d07d543..0000000 --- a/build/src/functions/extraction_external_sync_unit_check/worker.ts +++ /dev/null @@ -1,51 +0,0 @@ -import { ExtractorEventType, ExternalSyncUnit, processTask } from '@devrev/ts-adaas'; - -/** - * Worker for handling external sync units extraction - * This worker is responsible for emitting the EXTRACTION_EXTERNAL_SYNC_UNITS_DONE event - */ -processTask({ - task: async ({ adapter }) => { - try { - console.log('External sync units extraction worker started'); - - // Create a sample external sync unit - // In a real implementation, this would fetch data from an external system - const externalSyncUnits: ExternalSyncUnit[] = [ - { - id: 'sample-unit-1', - name: 'Sample Unit 1', - description: 'This is a sample external sync unit', - item_count: 10, - item_type: 'tasks' - } - ]; - - // Emit the DONE event with the external sync units - await adapter.emit(ExtractorEventType.ExtractionExternalSyncUnitsDone, { - external_sync_units: externalSyncUnits, - }); - - console.log('External sync units extraction completed successfully'); - } catch (error) { - console.error('Error in external sync units extraction worker:', error); - - // Emit an error event if something goes wrong - await adapter.emit(ExtractorEventType.ExtractionExternalSyncUnitsError, { - error: { - message: error instanceof Error ? error.message : 'Unknown error in external sync units extraction', - }, - }); - } - }, - onTimeout: async ({ adapter }) => { - console.error('External sync units extraction worker timed out'); - - // Emit an error event if the worker times out - await adapter.emit(ExtractorEventType.ExtractionExternalSyncUnitsError, { - error: { - message: 'External sync units extraction timed out', - }, - }); - }, -}); \ No newline at end of file diff --git a/build/src/functions/extraction_workflow_check/index.test.ts b/build/src/functions/extraction_workflow_check/index.test.ts deleted file mode 100644 index 5292c65..0000000 --- a/build/src/functions/extraction_workflow_check/index.test.ts +++ /dev/null @@ -1,156 +0,0 @@ -import { run } from './index'; -import { AirdropEvent, EventType } from '@devrev/ts-adaas'; - -describe('Extraction Workflow Check Function', () => { - // Helper function to create a mock AirdropEvent - const createMockEvent = (eventType: EventType): AirdropEvent => ({ - context: { - secrets: { - service_account_token: 'mock-token' - }, - snap_in_id: 'mock-snap-in-id', - snap_in_version_id: 'mock-version-id' - }, - payload: { - connection_data: { - org_id: 'mock-org-id', - org_name: 'mock-org-name', - key: 'mock-key', - key_type: 'mock-key-type' - }, - event_context: { - callback_url: 'mock-callback-url', - dev_org: 'mock-dev-org', - dev_org_id: 'mock-dev-org-id', - dev_user: 'mock-dev-user', - dev_user_id: 'mock-dev-user-id', - external_sync_unit: 'mock-external-sync-unit', - external_sync_unit_id: 'mock-external-sync-unit-id', - external_sync_unit_name: 'mock-external-sync-unit-name', - external_system: 'mock-external-system', - external_system_type: 'mock-external-system-type', - import_slug: 'mock-import-slug', - mode: 'INITIAL', - request_id: 'mock-request-id', - snap_in_slug: 'mock-snap-in-slug', - snap_in_version_id: 'mock-snap-in-version-id', - sync_run: 'mock-sync-run', - sync_run_id: 'mock-sync-run-id', - sync_tier: 'mock-sync-tier', - sync_unit: 'mock-sync-unit', - sync_unit_id: 'mock-sync-unit-id', - uuid: 'mock-uuid', - worker_data_url: 'mock-worker-data-url' - }, - event_type: eventType, - event_data: {} - }, - execution_metadata: { - devrev_endpoint: 'mock-endpoint' - }, - input_data: { - global_values: {}, - event_sources: {} - } - }); - - it('should return success with valid_extraction_events=true for extraction event types', async () => { - // Test with an extraction event type - const mockEvent = createMockEvent(EventType.ExtractionDataStart); - - // Call the function with the mock event - const result = await run([mockEvent]); - - // Verify the result - expect(result).toEqual({ - status: 'success', - message: 'Extraction workflow check function successfully invoked', - valid_extraction_events: true - }); - }); - - it('should return success with valid_extraction_events=false for non-extraction event types', async () => { - // Create a mock event with a non-extraction event type - const mockEvent = createMockEvent(EventType.ExtractionMetadataStart); - // Override with a non-extraction event type (this is just for test purposes) - mockEvent.payload.event_type = 'SOME_OTHER_EVENT_TYPE' as EventType; - - // Call the function with the mock event - const result = await run([mockEvent]); - - // Verify the result - expect(result).toEqual({ - status: 'success', - message: 'Extraction workflow check function successfully invoked', - valid_extraction_events: false - }); - }); - - it('should validate all extraction event types', async () => { - // Test all extraction event types - const extractionEventTypes = [ - EventType.ExtractionExternalSyncUnitsStart, - EventType.ExtractionMetadataStart, - EventType.ExtractionDataStart, - EventType.ExtractionDataContinue, - EventType.ExtractionDataDelete, - EventType.ExtractionAttachmentsStart, - EventType.ExtractionAttachmentsContinue, - EventType.ExtractionAttachmentsDelete - ]; - - for (const eventType of extractionEventTypes) { - const mockEvent = createMockEvent(eventType); - const result = await run([mockEvent]); - - expect(result.valid_extraction_events).toBe(true); - } - }); - - it('should throw an error if events parameter is not an array', async () => { - // Mock console.error to prevent test output pollution - jest.spyOn(console, 'error').mockImplementation(() => {}); - - // Call the function with invalid input - const invalidInput = null as unknown as AirdropEvent[]; - - // Expect the function to throw an error - await expect(run(invalidInput)).rejects.toThrow('Invalid input: events must be an array'); - - // Restore console.error - jest.restoreAllMocks(); - }); - - it('should throw an error if an event is missing required fields', async () => { - // Mock console.error to prevent test output pollution - jest.spyOn(console, 'error').mockImplementation(() => {}); - - // Create an invalid event missing context - const invalidEvent = { - payload: {}, - execution_metadata: {} - } as unknown as AirdropEvent; - - // Expect the function to throw an error - await expect(run([invalidEvent])).rejects.toThrow('missing required field \'context\''); - - // Restore console.error - jest.restoreAllMocks(); - }); - - it('should handle multiple events correctly', async () => { - // Create multiple events with different event types - const event1 = createMockEvent(EventType.ExtractionDataStart); - const event2 = createMockEvent(EventType.ExtractionMetadataStart); - - // Call the function with multiple events - const result = await run([event1, event2]); - - // Verify the result - expect(result).toEqual({ - status: 'success', - message: 'Extraction workflow check function successfully invoked', - valid_extraction_events: true - }); - }); -}); \ No newline at end of file diff --git a/build/src/functions/extraction_workflow_check/index.ts b/build/src/functions/extraction_workflow_check/index.ts deleted file mode 100644 index 950ae1f..0000000 --- a/build/src/functions/extraction_workflow_check/index.ts +++ /dev/null @@ -1,82 +0,0 @@ -import { AirdropEvent, EventType } from '@devrev/ts-adaas'; - -/** - * A function that checks if the data extraction workflow can be invoked. - * - * @param events - Array of AirdropEvent objects - * @returns A success message indicating the extraction workflow can be invoked - */ -export async function run(events: AirdropEvent[]): Promise<{ status: string, message: string, valid_extraction_events: boolean }> { - try { - // Validate input parameters - if (!events || !Array.isArray(events)) { - throw new Error('Invalid input: events must be an array'); - } - - // Validate that each event is a valid AirdropEvent with all required fields - events.forEach((event, index) => { - if (!event || typeof event !== 'object') { - throw new Error(`Invalid event at index ${index}: event must be a valid AirdropEvent object`); - } - - // Check for required fields according to AirdropEvent interface - if (!event.context) { - throw new Error(`Invalid event at index ${index}: missing required field 'context'`); - } - - if (!event.context.secrets || !event.context.secrets.service_account_token) { - throw new Error(`Invalid event at index ${index}: missing required field 'context.secrets.service_account_token'`); - } - - if (!event.context.snap_in_version_id) { - throw new Error(`Invalid event at index ${index}: missing required field 'context.snap_in_version_id'`); - } - - if (!event.payload) { - throw new Error(`Invalid event at index ${index}: missing required field 'payload'`); - } - - if (!event.payload.event_context) { - throw new Error(`Invalid event at index ${index}: missing required field 'payload.event_context'`); - } - - if (!event.execution_metadata || !event.execution_metadata.devrev_endpoint) { - throw new Error(`Invalid event at index ${index}: missing required field 'execution_metadata.devrev_endpoint'`); - } - }); - - // Check if any of the events are extraction-related events - const extractionEventTypes = [ - EventType.ExtractionExternalSyncUnitsStart, - EventType.ExtractionMetadataStart, - EventType.ExtractionDataStart, - EventType.ExtractionDataContinue, - EventType.ExtractionDataDelete, - EventType.ExtractionAttachmentsStart, - EventType.ExtractionAttachmentsContinue, - EventType.ExtractionAttachmentsDelete - ]; - - const hasExtractionEvents = events.some(event => - event.payload && - event.payload.event_type && - extractionEventTypes.includes(event.payload.event_type) - ); - - // Log the event for debugging purposes - console.log('Extraction workflow check function invoked with events:', JSON.stringify(events)); - - // Return a success response - return { - status: 'success', - message: 'Extraction workflow check function successfully invoked', - valid_extraction_events: hasExtractionEvents - }; - } catch (error) { - // Log the error for debugging - console.error('Error in extraction workflow check function:', error); - - // Re-throw the error to be handled by the caller - throw error; - } -} \ No newline at end of file diff --git a/build/src/functions/fetch_contacts/api-client.ts b/build/src/functions/fetch_contacts/api-client.ts deleted file mode 100644 index cecc008..0000000 --- a/build/src/functions/fetch_contacts/api-client.ts +++ /dev/null @@ -1,133 +0,0 @@ -import axios from 'axios'; - -/** - * Interface for a Wrike contact - */ -export interface WrikeContact { - id: string; - first_name: string; - last_name: string; - type: string; - profiles?: { - email?: string; - avatar_url?: string; - timezone?: string; - locale?: string; - }[]; - title?: string; - company_name?: string; - phone?: string; - location?: string; - is_deleted?: boolean; - me?: boolean; -} - -/** - * Client for interacting with the Wrike API - */ -export class WrikeApiClient { - private readonly apiEndpoint: string = 'https://www.wrike.com/api/v4'; - private readonly apiKey: string; - private readonly timeout: number = 10000; - - /** - * Creates a new instance of the WrikeApiClient - * @param apiKey The Wrike API key - */ - constructor(apiKey: string) { - this.apiKey = apiKey; - } - - /** - * Fetches members of a space - * @param spaceId The ID of the space - * @returns An array of member IDs - */ - async fetchSpaceMembers(spaceId: string): Promise { - const response = await axios.get(`${this.apiEndpoint}/spaces/${spaceId}`, { - headers: { - 'Authorization': `Bearer ${this.apiKey}` - }, - params: { - fields: '[members]' - }, - timeout: this.timeout - }); - - // Check if the request was successful - if (response.status !== 200) { - throw new Error(`Failed to fetch space members with status ${response.status}`); - } - - // Process the response data - if (!response.data || !response.data.data || !Array.isArray(response.data.data) || response.data.data.length === 0) { - throw new Error('Invalid response format from Wrike API for space members'); - } - - // Extract member IDs from the space response - const spaceData = response.data.data[0]; // Get the first (and should be only) space - - if (!spaceData.members || !Array.isArray(spaceData.members)) { - return []; - } - - return spaceData.members.map((member: any) => member.id); - } - - /** - * Fetches contact details for a list of member IDs - * @param memberIds Array of member IDs - * @returns Array of WrikeContact objects - */ - async fetchContactDetails(memberIds: string[]): Promise { - if (memberIds.length === 0) { - return []; - } - - const response = await axios.get(`${this.apiEndpoint}/contacts/${memberIds.join(',')}`, { - headers: { - 'Authorization': `Bearer ${this.apiKey}` - }, - timeout: this.timeout - }); - - // Check if the request was successful - if (response.status !== 200) { - throw new Error(`Failed to fetch contact details with status ${response.status}`); - } - - // Process the response data - if (!response.data || !response.data.data || !Array.isArray(response.data.data)) { - throw new Error('Invalid response format from Wrike API for contacts'); - } - - // Transform the response data into our contact format - return response.data.data.map((contact: any) => this.transformContactData(contact)); - } - - /** - * Transforms raw contact data from the API into our WrikeContact format - * @param contact Raw contact data from the API - * @returns Transformed WrikeContact object - */ - private transformContactData(contact: any): WrikeContact { - return { - id: contact.id, - first_name: contact.firstName || '', - last_name: contact.lastName || '', - type: contact.type || '', - profiles: contact.profiles ? contact.profiles.map((profile: any) => ({ - email: profile.email, - avatar_url: profile.avatarUrl, - timezone: profile.timezone, - locale: profile.locale - })) : undefined, - title: contact.title, - company_name: contact.companyName, - phone: contact.phone, - location: contact.location, - is_deleted: contact.deleted, - me: contact.me - }; - } -} \ No newline at end of file diff --git a/build/src/functions/fetch_contacts/index.test.ts b/build/src/functions/fetch_contacts/index.test.ts deleted file mode 100644 index 0b8a7e2..0000000 --- a/build/src/functions/fetch_contacts/index.test.ts +++ /dev/null @@ -1,71 +0,0 @@ -// Mock axios before any imports -jest.mock('axios'); - -// Import the test utilities and helpers -import { createMockEvent } from './test-utils'; -import { setupAxiosMocks, setupTestEnvironment, cleanupTestEnvironment } from './test-helpers'; -import { AirdropEvent } from '@devrev/ts-adaas'; -import { run } from './index'; -import { testCases } from './test-cases'; - -describe('Fetch Contacts Function', () => { - // Set up axios mocks - const { mockGet } = setupAxiosMocks(); - - beforeEach(() => { - // Clear all mocks before each test - jest.clearAllMocks(); - - // Set up test environment - setupTestEnvironment(); - }); - - afterEach(() => { - // Clean up test environment - cleanupTestEnvironment(); - }); - - // Add a test for API call parameters verification - it('should call the Wrike API with correct parameters', async () => { - // Use the first test case which has a successful API call - const testCase = testCases[0]; - testCase.setup(mockGet); - - await run(testCase.input); - - // Verify first API call to get space members - expect(mockGet).toHaveBeenCalledWith( - 'https://www.wrike.com/api/v4/spaces/IEACW7SVI4O6BDQE', - expect.objectContaining({ - headers: { - 'Authorization': 'Bearer mock-api-key' - }, - params: { - fields: '[members]' - }, - timeout: 10000 - }) - ); - - // Verify second API call to get contact details - expect(mockGet).toHaveBeenCalledWith( - 'https://www.wrike.com/api/v4/contacts/KUAFY3BJ,KUAFZBCJ', - expect.objectContaining({ - headers: { - 'Authorization': 'Bearer mock-api-key' - }, - timeout: 10000 - }) - ); - }); - - // Generate tests from test cases - testCases.forEach(testCase => { - it(testCase.name, async () => { - testCase.setup(mockGet); - - const result = await run(testCase.input); - expect(result).toEqual(testCase.expectedResult); - }); - }); -}); \ No newline at end of file diff --git a/build/src/functions/fetch_contacts/index.ts b/build/src/functions/fetch_contacts/index.ts deleted file mode 100644 index fe5e387..0000000 --- a/build/src/functions/fetch_contacts/index.ts +++ /dev/null @@ -1,166 +0,0 @@ -import { AirdropEvent } from '@devrev/ts-adaas'; -import axios from 'axios'; -import { WrikeApiClient, WrikeContact } from './api-client'; - -/** - * A function that fetches the list of contacts from a Wrike space. - * - * @param events - Array of AirdropEvent objects - * @returns A response containing the list of contacts - */ -export async function run(events: AirdropEvent[]): Promise<{ - status: string, - message: string, - contacts?: WrikeContact[], - error?: string -}> { - try { - // Validate input parameters - if (!events || !Array.isArray(events)) { - throw new Error('Invalid input: events must be an array'); - } - - if (events.length === 0) { - throw new Error('Invalid input: events array is empty'); - } - - // Use the first event for the check - const event = events[0]; - - // Validate that the event is a valid AirdropEvent with all required fields - if (!event || typeof event !== 'object') { - throw new Error('Invalid event: event must be a valid AirdropEvent object'); - } - - // Check for required fields according to AirdropEvent interface - if (!event.context) { - throw new Error('Invalid event: missing required field \'context\''); - } - - if (!event.context.secrets || !event.context.secrets.service_account_token) { - throw new Error('Invalid event: missing required field \'context.secrets.service_account_token\''); - } - - if (!event.payload) { - throw new Error('Invalid event: missing required field \'payload\''); - } - - if (!event.payload.connection_data) { - throw new Error('Invalid event: missing required field \'payload.connection_data\''); - } - - if (!event.payload.connection_data.key) { - throw new Error('Invalid event: missing required field \'payload.connection_data.key\''); - } - - if (!event.payload.connection_data.org_id) { - throw new Error('Invalid event: missing required field \'payload.connection_data.org_id\''); - } - - // Extract the Wrike API key and Space ID - const apiKey = event.payload.connection_data.key; - const spaceId = event.payload.connection_data.org_id; - - // Log the attempt for debugging purposes - console.log('Attempting to fetch space members from Wrike API'); - - // Create a new Wrike API client - const apiClient = new WrikeApiClient(apiKey); - - try { - // Step 1: Fetch space members - const memberIds = await apiClient.fetchSpaceMembers(spaceId); - - if (memberIds.length === 0) { - return { - status: 'success', - message: 'No members found in the space', - contacts: [] - }; - } - - // Log the progress for debugging purposes - console.log(`Found ${memberIds.length} members in the space, fetching contact details`); - - // Step 2: Fetch contact details - const contacts = await apiClient.fetchContactDetails(memberIds); - - // Log the success for debugging purposes - console.log(`Successfully fetched ${contacts.length} contacts from Wrike API`); - - // Return a success response with the contacts - return { - status: 'success', - message: `Successfully fetched ${contacts.length} contacts from Wrike API`, - contacts - }; - } catch (apiError: any) { - // Handle specific API errors - if (apiError instanceof Error) { - // Check for specific error messages from the API client - if (apiError.message.includes('Failed to fetch space members with status')) { - return { - status: 'error', - message: apiError.message, - error: 'Received status code 403' - }; - } else if (apiError.message.includes('Failed to fetch contact details with status')) { - return { - status: 'error', - message: apiError.message, - error: 'Received status code 403' - }; - } else if (apiError.message.includes('Invalid response format from Wrike API for space members')) { - return { - status: 'error', - message: 'Invalid response format from Wrike API for space members', - error: 'Response data is not in the expected format' - }; - } else if (apiError.message.includes('Invalid response format from Wrike API for contacts')) { - return { - status: 'error', - message: 'Invalid response format from Wrike API for contacts', - error: 'Response data is not in the expected format' - }; - } - return { - status: 'error', - message: 'Failed to fetch contacts from Wrike API', - error: apiError.message - }; - } - throw apiError; // Re-throw unexpected errors - } - } catch (error) { - // For validation errors, use the specific error message - if (error instanceof Error) { - // Check if this is a validation error (from our own validation checks) - if (error.message.startsWith('Invalid input:') || - error.message.startsWith('Invalid event:')) { - return { - status: 'error', - message: error.message, - error: error.message - }; - } - } - - console.error('Error in fetch contacts function:', error); - - // Check if the error is an Axios error with a response - if (axios.isAxiosError(error) && error.response) { - return { - status: 'error', - message: 'Failed to fetch contacts from Wrike API', - error: `API request failed with status ${error.response.status}` - }; - } - - // Return a generic error response - return { - status: 'error', - message: 'Failed to fetch contacts from Wrike API', - error: error instanceof Error ? error.message : 'Unknown error occurred' - }; - } -} \ No newline at end of file diff --git a/build/src/functions/fetch_contacts/test-cases.ts b/build/src/functions/fetch_contacts/test-cases.ts deleted file mode 100644 index 40089af..0000000 --- a/build/src/functions/fetch_contacts/test-cases.ts +++ /dev/null @@ -1,147 +0,0 @@ -import { AirdropEvent } from '@devrev/ts-adaas'; -import { createMockEvent } from './test-utils'; -import { mockResponses, expectedResults, createAxiosError } from './test-data'; - -export interface TestCase { - name: string; - setup: (mockGet: jest.Mock) => void; - input: AirdropEvent[]; - expectedResult: any; -} - -export const testCases: TestCase[] = [ - { - name: 'should return contacts when API calls are successful', - setup: (mockGet) => { - // First call to get space members - mockGet.mockImplementationOnce(() => Promise.resolve(mockResponses.successfulSpaceResponse)); - - // Second call to get contact details - mockGet.mockImplementationOnce(() => Promise.resolve(mockResponses.successfulContactsResponse)); - }, - input: [createMockEvent()], - expectedResult: expectedResults.successfulFetch - }, - { - name: 'should return empty contacts array when space has no members', - setup: (mockGet) => { - // Return space with no members - mockGet.mockImplementationOnce(() => Promise.resolve(mockResponses.emptySpaceResponse)); - }, - input: [createMockEvent()], - expectedResult: expectedResults.emptySpace - }, - { - name: 'should return error when first API call returns non-200 status', - setup: (mockGet) => { - mockGet.mockImplementationOnce(() => Promise.resolve(mockResponses.errorResponses.forbidden)); - }, - input: [createMockEvent()], - expectedResult: expectedResults.errors.spaceMembers - }, - { - name: 'should return error when second API call returns non-200 status', - setup: (mockGet) => { - // First call to get space members succeeds - mockGet.mockImplementationOnce(() => Promise.resolve(mockResponses.successfulSpaceResponse)); - - // Second call to get contact details fails - mockGet.mockImplementationOnce(() => Promise.resolve(mockResponses.errorResponses.forbidden)); - }, - input: [createMockEvent()], - expectedResult: expectedResults.errors.contactDetails - }, - { - name: 'should return error when first API response format is invalid', - setup: (mockGet) => { - mockGet.mockImplementationOnce(() => Promise.resolve(mockResponses.errorResponses.invalidFormat)); - }, - input: [createMockEvent()], - expectedResult: expectedResults.errors.invalidSpaceFormat - }, - { - name: 'should return error when second API response format is invalid', - setup: (mockGet) => { - // First call to get space members succeeds - mockGet.mockImplementationOnce(() => Promise.resolve(mockResponses.successfulSpaceResponse)); - - // Second call returns invalid format - mockGet.mockImplementationOnce(() => Promise.resolve(mockResponses.errorResponses.invalidFormat)); - }, - input: [createMockEvent()], - expectedResult: expectedResults.errors.invalidContactsFormat - }, - { - name: 'should return error when axios throws an exception on first call', - setup: (mockGet) => { - // Create a proper Axios error - mockGet.mockRejectedValueOnce(createAxiosError(401)); - }, - input: [createMockEvent()], - expectedResult: { - status: 'error', - message: 'Failed to fetch contacts from Wrike API', - error: expect.stringContaining('API request failed with status 401') - } - }, - { - name: 'should return error when axios throws a network exception', - setup: (mockGet) => { - const networkError = new Error('Network error'); - mockGet.mockRejectedValueOnce(networkError); - }, - input: [createMockEvent()], - expectedResult: expectedResults.errors.networkError - }, - { - name: 'should throw an error if events parameter is not an array', - setup: () => {}, - input: null as unknown as AirdropEvent[], // Use null to test non-array input - expectedResult: expectedResults.errors.invalidInput - }, - { - name: 'should throw an error if events array is empty', - setup: () => {}, - input: [], - expectedResult: expectedResults.errors.emptyEvents - }, - { - name: 'should throw an error if an event is missing required fields', - setup: () => {}, - input: [{ - payload: {}, - execution_metadata: {} - } as unknown as AirdropEvent], - expectedResult: expectedResults.errors.missingContext - }, - { - name: 'should throw an error if API key is missing', - setup: () => {}, - input: [{ - ...createMockEvent(), - payload: { - ...createMockEvent().payload, - connection_data: { - ...createMockEvent().payload.connection_data, - key: undefined as any - } - } - }], - expectedResult: expectedResults.errors.missingApiKey - }, - { - name: 'should throw an error if Space ID is missing', - setup: () => {}, - input: [{ - ...createMockEvent(), - payload: { - ...createMockEvent().payload, - connection_data: { - ...createMockEvent().payload.connection_data, - org_id: undefined as any - } - } - }], - expectedResult: expectedResults.errors.missingSpaceId - } -]; \ No newline at end of file diff --git a/build/src/functions/fetch_contacts/test-data.ts b/build/src/functions/fetch_contacts/test-data.ts deleted file mode 100644 index ea64e0d..0000000 --- a/build/src/functions/fetch_contacts/test-data.ts +++ /dev/null @@ -1,244 +0,0 @@ -import { AirdropEvent } from '@devrev/ts-adaas'; -import axios from 'axios'; - -/** - * Mock API responses for Wrike API - */ -export const mockResponses = { - /** - * Successful response for space with members - */ - successfulSpaceResponse: { - status: 200, - data: { - data: [ - { - id: 'IEACW7SVI4O6BDQE', - title: 'Test Space', - members: [ - { id: 'KUAFY3BJ' }, - { id: 'KUAFZBCJ' } - ] - } - ] - } - }, - - /** - * Successful response for contacts - */ - successfulContactsResponse: { - status: 200, - data: { - data: [ - { - id: 'KUAFY3BJ', - firstName: 'John', - lastName: 'Doe', - type: 'Person', - profiles: [ - { - email: 'john.doe@example.com', - avatarUrl: 'https://example.com/avatar1.jpg', - timezone: 'America/New_York', - locale: 'en-US' - } - ], - title: 'Software Engineer', - companyName: 'Example Corp', - phone: '+1234567890', - location: 'New York', - deleted: false, - me: false - }, - { - id: 'KUAFZBCJ', - firstName: 'Jane', - lastName: 'Smith', - type: 'Person', - profiles: [ - { - email: 'jane.smith@example.com', - avatarUrl: 'https://example.com/avatar2.jpg', - timezone: 'Europe/London', - locale: 'en-GB' - } - ], - title: 'Product Manager', - companyName: 'Example Corp', - phone: '+0987654321', - location: 'London', - deleted: false, - me: false - } - ] - } - }, - - /** - * Empty space response (no members) - */ - emptySpaceResponse: { - status: 200, - data: { - data: [ - { - id: 'IEACW7SVI4O6BDQE', - title: 'Test Space', - members: [] - } - ] - } - }, - - /** - * Error responses - */ - errorResponses: { - forbidden: { - status: 403, - data: { error: 'Forbidden' } - }, - invalidFormat: { - status: 200, - data: { invalid: 'format' } - } - } -}; - -/** - * Expected results for different test scenarios - */ -export const expectedResults = { - /** - * Expected result for successful contacts fetch - */ - successfulFetch: { - status: 'success', - message: 'Successfully fetched 2 contacts from Wrike API', - contacts: [ - { - id: 'KUAFY3BJ', - first_name: 'John', - last_name: 'Doe', - type: 'Person', - profiles: [ - { - email: 'john.doe@example.com', - avatar_url: 'https://example.com/avatar1.jpg', - timezone: 'America/New_York', - locale: 'en-US' - } - ], - title: 'Software Engineer', - company_name: 'Example Corp', - phone: '+1234567890', - location: 'New York', - is_deleted: false, - me: false - }, - { - id: 'KUAFZBCJ', - first_name: 'Jane', - last_name: 'Smith', - type: 'Person', - profiles: [ - { - email: 'jane.smith@example.com', - avatar_url: 'https://example.com/avatar2.jpg', - timezone: 'Europe/London', - locale: 'en-GB' - } - ], - title: 'Product Manager', - company_name: 'Example Corp', - phone: '+0987654321', - location: 'London', - is_deleted: false, - me: false - } - ] - }, - - /** - * Expected result for empty space - */ - emptySpace: { - status: 'success', - message: 'No members found in the space', - contacts: [] - }, - - /** - * Expected error results - */ - errors: { - spaceMembers: { - status: 'error', - message: 'Failed to fetch space members with status 403', - error: 'Received status code 403' - }, - contactDetails: { - status: 'error', - message: 'Failed to fetch contact details with status 403', - error: 'Received status code 403' - }, - invalidSpaceFormat: { - status: 'error', - message: 'Invalid response format from Wrike API for space members', - error: 'Response data is not in the expected format' - }, - invalidContactsFormat: { - status: 'error', - message: 'Invalid response format from Wrike API for contacts', - error: 'Response data is not in the expected format' - }, - axiosError: { - status: 'error', - message: 'Failed to fetch contacts from Wrike API', - error: 'API request failed with status 401' - }, - networkError: { - status: 'error', - message: 'Failed to fetch contacts from Wrike API', - error: 'Network error' - }, - invalidInput: { - status: 'error', - message: 'Invalid input: events must be an array', - error: 'Invalid input: events must be an array' - }, - emptyEvents: { - status: 'error', - message: 'Invalid input: events array is empty', - error: 'Invalid input: events array is empty' - }, - missingContext: { - status: 'error', - message: 'Invalid event: missing required field \'context\'', - error: 'Invalid event: missing required field \'context\'' - }, - missingApiKey: { - status: 'error', - message: 'Invalid event: missing required field \'payload.connection_data.key\'', - error: 'Invalid event: missing required field \'payload.connection_data.key\'' - }, - missingSpaceId: { - status: 'error', - message: 'Invalid event: missing required field \'payload.connection_data.org_id\'', - error: 'Invalid event: missing required field \'payload.connection_data.org_id\'' - } - } -}; - -/** - * Helper function to create an Axios error - */ -export function createAxiosError(status: number): any { - const axiosError = { - isAxiosError: true, - message: 'Request failed', - response: { status } - } as any; - return axiosError; -} \ No newline at end of file diff --git a/build/src/functions/fetch_contacts/test-helpers.ts b/build/src/functions/fetch_contacts/test-helpers.ts deleted file mode 100644 index 69c4d3e..0000000 --- a/build/src/functions/fetch_contacts/test-helpers.ts +++ /dev/null @@ -1,37 +0,0 @@ -import axios from 'axios'; - -/** - * Sets up the axios mocks for testing - * @returns The mock functions that can be used in tests - */ -export function setupAxiosMocks() { - // Set up axios mock functions - const mockGet = jest.fn(); - - // Properly mock axios methods - jest.spyOn(axios, 'get').mockImplementation(mockGet); - - // Properly mock axios.isAxiosError with correct type handling - jest.spyOn(axios, 'isAxiosError').mockImplementation((error: any) => { - return error && error.isAxiosError === true; - }); - - return { mockGet }; -} - -/** - * Sets up common test environment - */ -export function setupTestEnvironment() { - // Mock console.log and console.error to prevent test output pollution - jest.spyOn(console, 'log').mockImplementation(() => {}); - jest.spyOn(console, 'error').mockImplementation(() => {}); -} - -/** - * Cleans up the test environment - */ -export function cleanupTestEnvironment() { - // Restore console mocks - jest.restoreAllMocks(); -} \ No newline at end of file diff --git a/build/src/functions/fetch_contacts/test-utils.ts b/build/src/functions/fetch_contacts/test-utils.ts deleted file mode 100644 index c86fa9e..0000000 --- a/build/src/functions/fetch_contacts/test-utils.ts +++ /dev/null @@ -1,132 +0,0 @@ -import { AirdropEvent } from '@devrev/ts-adaas'; - -// Mock for EventType from @devrev/ts-adaas -export enum EventType { - // Extraction - ExtractionExternalSyncUnitsStart = 'EXTRACTION_EXTERNAL_SYNC_UNITS_START', - ExtractionMetadataStart = 'EXTRACTION_METADATA_START', - ExtractionDataStart = 'EXTRACTION_DATA_START', - ExtractionDataContinue = 'EXTRACTION_DATA_CONTINUE', - ExtractionDataDelete = 'EXTRACTION_DATA_DELETE', - ExtractionAttachmentsStart = 'EXTRACTION_ATTACHMENTS_START', - ExtractionAttachmentsContinue = 'EXTRACTION_ATTACHMENTS_CONTINUE', - ExtractionAttachmentsDelete = 'EXTRACTION_ATTACHMENTS_DELETE' -} - -/** - * Helper function to create a mock AirdropEvent for testing - */ -export const createMockEvent = (): AirdropEvent => ({ - context: { - secrets: { - service_account_token: 'mock-token' - }, - snap_in_id: 'mock-snap-in-id', - snap_in_version_id: 'mock-version-id' - }, - payload: { - connection_data: { - org_id: 'IEACW7SVI4O6BDQE', // Example Space ID from Postman collection - org_name: 'mock-org-name', - key: 'mock-api-key', - key_type: 'mock-key-type' - }, - event_context: { - callback_url: 'mock-callback-url', - dev_org: 'mock-dev-org', - dev_org_id: 'mock-dev-org-id', - dev_user: 'mock-dev-user', - dev_user_id: 'mock-dev-user-id', - external_sync_unit: 'mock-external-sync-unit', - external_sync_unit_id: 'mock-external-sync-unit-id', - external_sync_unit_name: 'mock-external-sync-unit-name', - external_system: 'mock-external-system', - external_system_type: 'mock-external-system-type', - import_slug: 'mock-import-slug', - mode: 'INITIAL', - request_id: 'mock-request-id', - snap_in_slug: 'mock-snap-in-slug', - snap_in_version_id: 'mock-snap-in-version-id', - sync_run: 'mock-sync-run', - sync_run_id: 'mock-sync-run-id', - sync_tier: 'mock-sync-tier', - sync_unit: 'mock-sync-unit', - sync_unit_id: 'mock-sync-unit-id', - uuid: 'mock-uuid', - worker_data_url: 'mock-worker-data-url' - }, - event_type: EventType.ExtractionDataStart, - event_data: {} - }, - execution_metadata: { - devrev_endpoint: 'mock-endpoint' - }, - input_data: { - global_values: {}, - event_sources: {} - } -}); - -/** - * Mock Wrike API response for space with members - */ -export const mockWrikeSpaceResponse = { - data: [ - { - id: 'IEACW7SVI4O6BDQE', - title: 'Test Space', - members: [ - { id: 'KUAFY3BJ' }, - { id: 'KUAFZBCJ' } - ] - } - ] -}; - -/** - * Mock Wrike API response for contacts - */ -export const mockWrikeContactsResponse = { - data: [ - { - id: 'KUAFY3BJ', - firstName: 'John', - lastName: 'Doe', - type: 'Person', - profiles: [ - { - email: 'john.doe@example.com', - avatarUrl: 'https://example.com/avatar1.jpg', - timezone: 'America/New_York', - locale: 'en-US' - } - ], - title: 'Software Engineer', - companyName: 'Example Corp', - phone: '+1234567890', - location: 'New York', - deleted: false, - me: false - }, - { - id: 'KUAFZBCJ', - firstName: 'Jane', - lastName: 'Smith', - type: 'Person', - profiles: [ - { - email: 'jane.smith@example.com', - avatarUrl: 'https://example.com/avatar2.jpg', - timezone: 'Europe/London', - locale: 'en-GB' - } - ], - title: 'Product Manager', - companyName: 'Example Corp', - phone: '+0987654321', - location: 'London', - deleted: false, - me: false - } - ] -}; \ No newline at end of file diff --git a/build/src/functions/fetch_folder_tasks/index.test.case-data.ts b/build/src/functions/fetch_folder_tasks/index.test.case-data.ts new file mode 100644 index 0000000..d4070af --- /dev/null +++ b/build/src/functions/fetch_folder_tasks/index.test.case-data.ts @@ -0,0 +1,68 @@ +import { FunctionInput } from '../../core/types'; +import { createMockEvent } from './index.test.helpers'; + +/** + * Test data and event configurations for fetch_folder_tasks tests. + * This file contains reusable event creation logic and test data. + */ + +/** + * Creates a mock event with custom folder and request IDs + */ +export function createMockEventWithIds( + folderId: string, + requestId: string, + spaceId: string = 'SPACE1', + spaceName: string = 'Space 1' +): FunctionInput { + return createMockEvent({ + payload: { + connection_data: { + org_id: spaceId, + org_name: spaceName, + key: 'test-api-key', + key_type: 'oauth2', + }, + event_context: { + callback_url: 'https://test.callback.url', + dev_org: 'test-dev-org', + dev_org_id: 'test-dev-org-id', + dev_user: 'test-dev-user', + dev_user_id: 'test-dev-user-id', + external_sync_unit: `folder-${folderId}`, + external_sync_unit_id: folderId, + external_sync_unit_name: `Folder ${folderId}`, + external_system: 'wrike', + external_system_type: 'wrike', + import_slug: 'test-import-slug', + mode: 'INITIAL', + request_id: requestId, + snap_in_slug: 'test-snap-in-slug', + snap_in_version_id: 'test-version-id', + sync_run: 'test-sync-run', + sync_run_id: 'test-sync-run-id', + sync_tier: 'test-sync-tier', + sync_unit: 'test-sync-unit', + sync_unit_id: 'test-sync-unit-id', + uuid: 'test-uuid', + worker_data_url: 'https://test.worker.data.url', + }, + }, + execution_metadata: { + request_id: requestId, + function_name: 'fetch_folder_tasks', + event_type: `event-type-${requestId}`, + devrev_endpoint: 'https://api.devrev.ai/', + }, + }); +} + +/** + * Creates multiple test events for testing event processing + */ +export function createMultipleTestEvents(): FunctionInput[] { + const event1 = createMockEventWithIds('FOLDER1', 'request-1', 'SPACE1', 'Space 1'); + const event2 = createMockEventWithIds('FOLDER2', 'request-2', 'SPACE2', 'Space 2'); + + return [event1, event2]; +} \ No newline at end of file diff --git a/build/src/functions/fetch_folder_tasks/index.test.case-generators.ts b/build/src/functions/fetch_folder_tasks/index.test.case-generators.ts new file mode 100644 index 0000000..7714a31 --- /dev/null +++ b/build/src/functions/fetch_folder_tasks/index.test.case-generators.ts @@ -0,0 +1,219 @@ +import { FunctionInput } from '../../core/types'; +import { + createMockEvent, + mockTasks, + createSuccessResponse, + createEmptyResponse, +} from './index.test.helpers'; +import { createMultipleTestEvents } from './index.test.case-data'; + +/** + * Test case generators for fetch_folder_tasks function. + * This file contains reusable test case generation logic. + */ + +/** + * Generates a test case for successful task fetching + */ +export function generateSuccessTestCase() { + return { + description: 'should return success response with tasks', + setup: (WrikeClientMock: jest.Mock) => { + const mockGetTasks = jest.fn().mockResolvedValue(createSuccessResponse()); + WrikeClientMock.mockImplementation(() => ({ + getTasks: mockGetTasks, + })); + return mockGetTasks; + }, + event: createMockEvent(), + assertions: (result: any, mockGetTasks?: jest.Mock) => { + expect(result).toBeDefined(); + expect(result.status).toBe('success'); + expect(result.message).toBe('Successfully fetched tasks from Wrike folder'); + expect(result.status_code).toBe(200); + expect(result.api_delay).toBe(0); + expect(result.metadata.folder_id).toBe('IEAGS6BYI5RFMPP7'); + expect(result.metadata.task_count).toBe(2); + expect(result.metadata.page_size).toBe(200); + expect(result.metadata.function_name).toBe('fetch_folder_tasks'); + expect(result.metadata.request_id).toBe('test-request-id'); + expect(result.data).toEqual(mockTasks); + expect(result.timestamp).toBeDefined(); + + if (mockGetTasks) { + expect(mockGetTasks).toHaveBeenCalledTimes(1); + expect(mockGetTasks).toHaveBeenCalledWith('IEAGS6BYI5RFMPP7', { + pageSize: 200, + nextPageToken: undefined, + updatedDate: undefined, + }); + } + }, + }; +} + +/** + * Generates a test case for pagination + */ +export function generatePaginationTestCase() { + return { + description: 'should return success response with pagination', + setup: (WrikeClientMock: jest.Mock) => { + const mockGetTasks = jest.fn().mockResolvedValue( + createSuccessResponse(mockTasks, 'NEXT_PAGE_TOKEN_123') + ); + WrikeClientMock.mockImplementation(() => ({ + getTasks: mockGetTasks, + })); + return mockGetTasks; + }, + event: createMockEvent({ + input_data: { + global_values: { + pageSize: '100', + nextPageToken: 'CURRENT_PAGE_TOKEN', + }, + event_sources: {}, + }, + }), + assertions: (result: any, mockGetTasks?: jest.Mock) => { + expect(result.status).toBe('success'); + expect(result.metadata.page_size).toBe(100); + expect(result.metadata.next_page_token).toBe('NEXT_PAGE_TOKEN_123'); + expect(result.metadata.has_more).toBe(true); + + if (mockGetTasks) { + expect(mockGetTasks).toHaveBeenCalledWith('IEAGS6BYI5RFMPP7', { + pageSize: 100, + nextPageToken: 'CURRENT_PAGE_TOKEN', + updatedDate: undefined, + }); + } + }, + }; +} + +/** + * Generates a test case for updatedDate filter + */ +export function generateUpdatedDateFilterTestCase() { + return { + description: 'should support updatedDate filter', + setup: (WrikeClientMock: jest.Mock) => { + const mockGetTasks = jest.fn().mockResolvedValue(createSuccessResponse()); + WrikeClientMock.mockImplementation(() => ({ + getTasks: mockGetTasks, + })); + return mockGetTasks; + }, + event: createMockEvent({ + input_data: { + global_values: { + pageSize: '200', + updatedDate: '2025-01-01T00:00:00Z', + }, + event_sources: {}, + }, + }), + assertions: (result: any, mockGetTasks?: jest.Mock) => { + expect(result.status).toBe('success'); + + if (mockGetTasks) { + expect(mockGetTasks).toHaveBeenCalledWith('IEAGS6BYI5RFMPP7', { + pageSize: 200, + nextPageToken: undefined, + updatedDate: '2025-01-01T00:00:00Z', + }); + } + }, + }; +} + +/** + * Generates a test case for empty task list + */ +export function generateEmptyTaskListTestCase() { + return { + description: 'should return success response with empty task list', + setup: (WrikeClientMock: jest.Mock) => { + const mockGetTasks = jest.fn().mockResolvedValue(createEmptyResponse()); + WrikeClientMock.mockImplementation(() => ({ + getTasks: mockGetTasks, + })); + return mockGetTasks; + }, + event: createMockEvent(), + assertions: (result: any, mockGetTasks?: jest.Mock) => { + expect(result.status).toBe('success'); + expect(result.status_code).toBe(200); + expect(result.metadata.task_count).toBe(0); + expect(result.metadata.has_more).toBe(false); + expect(result.data).toEqual([]); + }, + }; +} + +/** + * Generates a test case for missing global_values + */ +export function generateMissingGlobalValuesTestCase() { + return { + description: 'should handle missing global_values gracefully', + setup: (WrikeClientMock: jest.Mock) => { + const mockGetTasks = jest.fn().mockResolvedValue(createSuccessResponse()); + WrikeClientMock.mockImplementation(() => ({ + getTasks: mockGetTasks, + })); + return mockGetTasks; + }, + event: createMockEvent({ + input_data: { + global_values: {}, + event_sources: {}, + }, + }), + assertions: (result: any, mockGetTasks?: jest.Mock) => { + expect(result.status).toBe('success'); + + if (mockGetTasks) { + expect(mockGetTasks).toHaveBeenCalledWith('IEAGS6BYI5RFMPP7', { + pageSize: undefined, + nextPageToken: undefined, + updatedDate: undefined, + }); + } + }, + }; +} + +/** + * Generates test events for multiple event processing test + */ +export function generateMultipleEventsTestCase() { + const events = createMultipleTestEvents(); + + return { + description: 'should process only the first event when multiple events provided', + setup: (WrikeClientMock: jest.Mock) => { + const mockGetTasks = jest.fn().mockResolvedValue(createSuccessResponse()); + WrikeClientMock.mockImplementation(() => ({ + getTasks: mockGetTasks, + })); + return mockGetTasks; + }, + events: events, + assertions: (result: any, mockGetTasks?: jest.Mock) => { + expect(result.metadata.request_id).toBe('request-1'); + expect(result.metadata.folder_id).toBe('FOLDER1'); + + if (mockGetTasks) { + expect(mockGetTasks).toHaveBeenCalledTimes(1); + expect(mockGetTasks).toHaveBeenCalledWith('FOLDER1', { + pageSize: 200, + nextPageToken: undefined, + updatedDate: undefined, + }); + } + }, + }; +} \ No newline at end of file diff --git a/build/src/functions/fetch_folder_tasks/index.test.cases.ts b/build/src/functions/fetch_folder_tasks/index.test.cases.ts new file mode 100644 index 0000000..d8b767d --- /dev/null +++ b/build/src/functions/fetch_folder_tasks/index.test.cases.ts @@ -0,0 +1,137 @@ +import { FunctionInput } from '../../core/types'; +import { + createSuccessResponse, +} from './index.test.helpers'; +import { testCaseGenerators } from './index.test.shared'; +import { + generateSuccessTestCase, + generatePaginationTestCase, + generateUpdatedDateFilterTestCase, + generateEmptyTaskListTestCase, + generateMissingGlobalValuesTestCase, + generateMultipleEventsTestCase, +} from './index.test.case-generators'; +import { createMockEvent } from './index.test.helpers'; + +export function createFetchFolderTasksTests( + runFunction: (events: FunctionInput[]) => Promise, + WrikeClientMock: jest.Mock +) { + return () => { + // Success test case + const successTestCase = generateSuccessTestCase(); + it(successTestCase.description, async () => { + const mockGetTasks = successTestCase.setup(WrikeClientMock); + const result = await runFunction([successTestCase.event]); + successTestCase.assertions(result, mockGetTasks); + }); + + // Pagination test case + const paginationTestCase = generatePaginationTestCase(); + it(paginationTestCase.description, async () => { + const mockGetTasks = paginationTestCase.setup(WrikeClientMock); + const result = await runFunction([paginationTestCase.event]); + paginationTestCase.assertions(result, mockGetTasks); + }); + + // UpdatedDate filter test case + const updatedDateTestCase = generateUpdatedDateFilterTestCase(); + it(updatedDateTestCase.description, async () => { + const mockGetTasks = updatedDateTestCase.setup(WrikeClientMock); + const result = await runFunction([updatedDateTestCase.event]); + updatedDateTestCase.assertions(result, mockGetTasks); + }); + + // Empty task list test case + const emptyTaskListTestCase = generateEmptyTaskListTestCase(); + it(emptyTaskListTestCase.description, async () => { + const mockGetTasks = emptyTaskListTestCase.setup(WrikeClientMock); + const result = await runFunction([emptyTaskListTestCase.event]); + emptyTaskListTestCase.assertions(result, mockGetTasks); + }); + + // Missing global_values test case + const missingGlobalValuesTestCase = generateMissingGlobalValuesTestCase(); + it(missingGlobalValuesTestCase.description, async () => { + const mockGetTasks = missingGlobalValuesTestCase.setup(WrikeClientMock); + const result = await runFunction([missingGlobalValuesTestCase.event]); + missingGlobalValuesTestCase.assertions(result, mockGetTasks); + }); + + // API Error Tests + const apiErrorCases = testCaseGenerators.apiErrors(); + apiErrorCases.forEach(({ description, mockResponse, assertions }) => { + it(description, async () => { + const mockGetTasks = jest.fn().mockResolvedValue(mockResponse); + + WrikeClientMock.mockImplementation(() => ({ + getTasks: mockGetTasks, + })); + + const events = [createMockEvent()]; + const result = await runFunction(events); + + assertions(result); + }); + }); + + // Multiple events test case + const multipleEventsTestCase = generateMultipleEventsTestCase(); + it(multipleEventsTestCase.description, async () => { + const mockGetTasks = multipleEventsTestCase.setup(WrikeClientMock); + const result = await runFunction(multipleEventsTestCase.events); + multipleEventsTestCase.assertions(result, mockGetTasks); + }); + + // Validation Error Tests + it('should throw error when no events provided', async () => { + await expect(runFunction([])).rejects.toThrow('No events provided to fetch_folder_tasks function'); + }); + + it('should throw error when events array is null', async () => { + await expect(runFunction(null as any)).rejects.toThrow('No events provided to fetch_folder_tasks function'); + }); + + it('should throw error when events array is undefined', async () => { + await expect(runFunction(undefined as any)).rejects.toThrow( + 'No events provided to fetch_folder_tasks function' + ); + }); + + const validationCases = testCaseGenerators.validationErrors().slice(1); // Skip first case (no events) + validationCases.forEach(({ description, eventModifier, expectedError }) => { + it(description, async () => { + const invalidEvent = createMockEvent(); + eventModifier(invalidEvent); + + await expect(runFunction([invalidEvent])).rejects.toThrow(expectedError); + }); + }); + + it('should initialize WrikeClient with correct API key', async () => { + const mockGetTasks = jest.fn().mockResolvedValue(createSuccessResponse()); + + WrikeClientMock.mockImplementation(() => ({ + getTasks: mockGetTasks, + })); + + const events = [createMockEvent()]; + await runFunction(events); + + expect(WrikeClientMock).toHaveBeenCalledWith({ apiKey: 'test-api-key' }); + }); + + it('should include timestamp in ISO format', async () => { + const mockGetTasks = jest.fn().mockResolvedValue(createSuccessResponse()); + + WrikeClientMock.mockImplementation(() => ({ + getTasks: mockGetTasks, + })); + + const events = [createMockEvent()]; + const result = await runFunction(events); + + expect(result.timestamp).toMatch(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/); + }); + }; +} \ No newline at end of file diff --git a/build/src/functions/fetch_folder_tasks/index.test.helpers.ts b/build/src/functions/fetch_folder_tasks/index.test.helpers.ts new file mode 100644 index 0000000..6398f5f --- /dev/null +++ b/build/src/functions/fetch_folder_tasks/index.test.helpers.ts @@ -0,0 +1,198 @@ +import { FunctionInput } from '../../core/types'; +import { WrikeTask } from '../../core/wrike-types'; + +/** + * Mock task data for testing + */ +export const mockTask: WrikeTask = { + id: 'IEACW7SVKQOKD5EG', + accountId: 'IEAGS6BY', + title: 'Test Task', + description: 'Test task description', + briefDescription: 'Test brief', + parentIds: ['IEAGS6BYI5RFMPP7'], + superParentIds: [], + sharedIds: ['KUAVRIOP'], + responsibleIds: ['KUANFJBJ'], + status: 'Active', + importance: 'Normal', + createdDate: '2025-01-01T10:00:00Z', + updatedDate: '2025-01-02T15:30:00Z', + dates: { + type: 'Planned', + duration: 86400000, + start: '2025-01-01', + due: '2025-01-02', + }, + scope: 'WsTask', + authorIds: ['KUANFJBJ'], + customStatusId: 'IEAGS6BYJMF3BCR4', + hasAttachments: false, + permalink: 'https://www.wrike.com/open.htm?id=123456', + priority: '02', + followedByMe: false, + followerIds: [], + superTaskIds: [], + subTaskIds: [], + dependencyIds: [], + metadata: [], + customFields: [], +}; + +export const mockTasks: WrikeTask[] = [ + mockTask, + { + id: 'IEACW7SVKQOKD5EH', + accountId: 'IEAGS6BY', + title: 'Second Task', + description: 'Second task description', + briefDescription: 'Second brief', + parentIds: ['IEAGS6BYI5RFMPP7'], + superParentIds: [], + sharedIds: ['KUAVRIOP'], + responsibleIds: ['NVJKSNJK'], + status: 'Active', + importance: 'High', + createdDate: '2025-01-03T10:00:00Z', + updatedDate: '2025-01-04T15:30:00Z', + dates: { + type: 'Planned', + duration: 172800000, + start: '2025-01-03', + due: '2025-01-05', + }, + scope: 'WsTask', + authorIds: ['NVJKSNJK'], + customStatusId: 'IEAGS6BYJMF3BCR4', + hasAttachments: true, + permalink: 'https://www.wrike.com/open.htm?id=123457', + priority: '01', + followedByMe: false, + followerIds: [], + superTaskIds: [], + subTaskIds: [], + dependencyIds: [], + metadata: [], + customFields: [], + }, +]; + +/** + * Creates a mock FunctionInput event for testing + */ +export const createMockEvent = (overrides?: Partial): FunctionInput => ({ + payload: { + connection_data: { + org_id: 'IEAGS6BYI5RFMPPY', + org_name: 'Test Space', + key: 'test-api-key', + key_type: 'oauth2', + }, + event_context: { + callback_url: 'https://test.callback.url', + dev_org: 'test-dev-org', + dev_org_id: 'test-dev-org-id', + dev_user: 'test-dev-user', + dev_user_id: 'test-dev-user-id', + external_sync_unit: 'test-folder', + external_sync_unit_id: 'IEAGS6BYI5RFMPP7', + external_sync_unit_name: 'Test Folder', + external_system: 'wrike', + external_system_type: 'wrike', + import_slug: 'test-import-slug', + mode: 'INITIAL', + request_id: 'test-request-id', + snap_in_slug: 'test-snap-in-slug', + snap_in_version_id: 'test-version-id', + sync_run: 'test-sync-run', + sync_run_id: 'test-sync-run-id', + sync_tier: 'test-sync-tier', + sync_unit: 'test-sync-unit', + sync_unit_id: 'test-sync-unit-id', + uuid: 'test-uuid', + worker_data_url: 'https://test.worker.data.url', + }, + }, + context: { + dev_oid: 'test-dev-oid', + source_id: 'test-source-id', + snap_in_id: 'test-snap-in-id', + snap_in_version_id: 'test-version-id', + service_account_id: 'test-service-account-id', + secrets: { + service_account_token: 'test-token', + }, + }, + execution_metadata: { + request_id: 'test-request-id', + function_name: 'fetch_folder_tasks', + event_type: 'test-event-type', + devrev_endpoint: 'https://api.devrev.ai/', + }, + input_data: { + global_values: { + pageSize: '200', + }, + event_sources: {}, + }, + ...overrides, +}); + +/** + * Creates a mock WrikeClient.getTasks response for successful fetch + */ +export const createSuccessResponse = (tasks: WrikeTask[] = mockTasks, nextPageToken?: string) => ({ + status_code: 200, + api_delay: 0, + message: 'Successfully fetched tasks from Wrike folder', + data: tasks, + nextPageToken, + hasMore: !!nextPageToken, +}); + +/** + * Creates a mock WrikeClient.getTasks response for empty task list + */ +export const createEmptyResponse = () => ({ + status_code: 200, + api_delay: 0, + message: 'Successfully fetched tasks from Wrike folder', + data: [], + hasMore: false, +}); + +/** + * Creates a mock WrikeClient.getTasks response for authentication failure + */ +export const createAuthFailureResponse = (statusCode: number, message: string) => ({ + status_code: statusCode, + api_delay: 0, + message, +}); + +/** + * Creates a mock WrikeClient.getTasks response for rate limiting + */ +export const createRateLimitResponse = (apiDelay: number = 49) => ({ + status_code: 429, + api_delay: apiDelay, + message: `Rate limit exceeded. Retry after ${apiDelay} seconds.`, +}); + +/** + * Creates a mock WrikeClient.getTasks response for network errors + */ +export const createNetworkErrorResponse = () => ({ + status_code: 0, + api_delay: 0, + message: 'Network error: Unable to reach Wrike API', +}); + +/** + * Creates a mock WrikeClient.getTasks response for not found error + */ +export const createNotFoundResponse = () => ({ + status_code: 404, + api_delay: 0, + message: 'Wrike API error: Folder not found', +}); \ No newline at end of file diff --git a/build/src/functions/fetch_folder_tasks/index.test.shared.ts b/build/src/functions/fetch_folder_tasks/index.test.shared.ts new file mode 100644 index 0000000..606949f --- /dev/null +++ b/build/src/functions/fetch_folder_tasks/index.test.shared.ts @@ -0,0 +1,147 @@ +import { FunctionInput } from '../../core/types'; + +/** + * Shared test utilities for fetch_folder_tasks tests + */ + +/** + * Creates a test case for successful API responses + */ +export function createSuccessTestCase( + description: string, + mockResponse: any, + assertions: (result: any, mockGetTasks?: jest.Mock) => void +) { + return { + description, + mockResponse, + assertions, + }; +} + +/** + * Creates a test case for API error responses + */ +export function createErrorTestCase( + description: string, + statusCode: number, + message: string, + apiDelay: number = 0 +) { + return { + description, + mockResponse: { + status_code: statusCode, + api_delay: apiDelay, + message, + }, + assertions: (result: any, mockGetTasks?: jest.Mock) => { + expect(result.status).toBe('error'); + expect(result.message).toBe(message); + expect(result.status_code).toBe(statusCode); + expect(result.api_delay).toBe(apiDelay); + expect(result.metadata.task_count).toBe(0); + if (statusCode !== 429) { + expect(result.data).toBeUndefined(); + } + }, + }; +} + +/** + * Creates a test case for validation errors + */ +export function createValidationTestCase( + description: string, + eventModifier: (event: FunctionInput) => void, + expectedError: string +) { + return { + description, + eventModifier, + expectedError, + }; +} + +/** + * Test case generators for common scenarios + */ +export const testCaseGenerators = { + /** + * Generates validation error test cases + */ + validationErrors: () => [ + createValidationTestCase( + 'should throw error when no events provided', + () => {}, + 'No events provided to fetch_folder_tasks function' + ), + createValidationTestCase( + 'should throw error when event is missing payload', + (event: FunctionInput) => delete (event as any).payload, + 'Invalid event: missing payload' + ), + createValidationTestCase( + 'should throw error when event is missing connection_data', + (event: FunctionInput) => delete (event as any).payload.connection_data, + 'Invalid event: missing connection_data in payload' + ), + createValidationTestCase( + 'should throw error when event is missing API key', + (event: FunctionInput) => delete (event as any).payload.connection_data.key, + 'Invalid event: missing API key in connection_data' + ), + createValidationTestCase( + 'should throw error when event is missing event_context', + (event: FunctionInput) => delete (event as any).payload.event_context, + 'Invalid event: missing event_context in payload' + ), + createValidationTestCase( + 'should throw error when event is missing external_sync_unit_id (folder ID)', + (event: FunctionInput) => delete (event as any).payload.event_context.external_sync_unit_id, + 'Invalid event: missing external_sync_unit_id in event_context' + ), + createValidationTestCase( + 'should throw error when event is missing execution_metadata', + (event: FunctionInput) => delete (event as any).execution_metadata, + 'Invalid event: missing execution_metadata' + ), + createValidationTestCase( + 'should throw error when event is missing input_data', + (event: FunctionInput) => delete (event as any).input_data, + 'Invalid event: missing input_data' + ), + ], + + /** + * Generates API error test cases + */ + apiErrors: () => [ + createErrorTestCase( + 'should return error response for authentication failure (401)', + 401, + 'Authentication failed: Invalid or expired API key' + ), + createErrorTestCase( + 'should return error response for forbidden access (403)', + 403, + 'Authentication failed: Access forbidden' + ), + createErrorTestCase( + 'should handle rate limiting (429) correctly', + 429, + 'Rate limit exceeded. Retry after 49 seconds.', + 49 + ), + createErrorTestCase( + 'should handle network errors', + 0, + 'Network error: Unable to reach Wrike API' + ), + createErrorTestCase( + 'should handle not found error (404)', + 404, + 'Wrike API error: Folder not found' + ), + ], +}; \ No newline at end of file diff --git a/build/src/functions/fetch_folder_tasks/index.test.ts b/build/src/functions/fetch_folder_tasks/index.test.ts new file mode 100644 index 0000000..ed7a01c --- /dev/null +++ b/build/src/functions/fetch_folder_tasks/index.test.ts @@ -0,0 +1,14 @@ +import run, { FetchFolderTasksResponse } from './index'; +import { WrikeClient } from '../../core/wrike-client'; +import { createFetchFolderTasksTests } from './index.test.cases'; + +// Mock the WrikeClient +jest.mock('../../core/wrike-client'); + +describe('fetch_folder_tasks function', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('test cases', createFetchFolderTasksTests(run, WrikeClient as jest.Mock)); +}); \ No newline at end of file diff --git a/build/src/functions/fetch_folder_tasks/index.ts b/build/src/functions/fetch_folder_tasks/index.ts new file mode 100644 index 0000000..6c74ec6 --- /dev/null +++ b/build/src/functions/fetch_folder_tasks/index.ts @@ -0,0 +1,140 @@ +import { FunctionInput } from '../../core/types'; +import { WrikeClient } from '../../core/wrike-client'; +import { WrikeTask } from '../../core/wrike-types'; +import { WrikeApiError } from '../../core/wrike-error-handler'; + +/** + * Response structure for fetch_folder_tasks function + */ +export interface FetchFolderTasksResponse { + status: 'success' | 'error'; + message: string; + status_code: number; + api_delay: number; + metadata: { + folder_id: string; + task_count: number; + page_size?: number; + next_page_token?: string; + has_more?: boolean; + function_name: string; + request_id: string; + }; + data?: WrikeTask[]; + timestamp: string; +} + +/** + * Fetch folder tasks function that retrieves tasks from a Wrike folder. + * Makes a request to /folders/{folderId}/tasks endpoint. + * + * @param events - Array of function input events + * @returns Object containing tasks data with metadata + */ +const run = async (events: FunctionInput[]): Promise => { + // Validate input + if (!events || events.length === 0) { + throw new Error('No events provided to fetch_folder_tasks function'); + } + + // Process only the first event as per requirements + const event = events[0]; + + // Validate event structure + if (!event.payload) { + throw new Error('Invalid event: missing payload'); + } + + if (!event.payload.connection_data) { + throw new Error('Invalid event: missing connection_data in payload'); + } + + if (!event.payload.connection_data.key) { + throw new Error('Invalid event: missing API key in connection_data'); + } + + if (!event.payload.event_context) { + throw new Error('Invalid event: missing event_context in payload'); + } + + if (!event.payload.event_context.external_sync_unit_id) { + throw new Error('Invalid event: missing external_sync_unit_id in event_context'); + } + + if (!event.execution_metadata) { + throw new Error('Invalid event: missing execution_metadata'); + } + + if (!event.input_data) { + throw new Error('Invalid event: missing input_data'); + } + + // Extract API key and folder ID from event + const apiKey = event.payload.connection_data.key; + const folderId = event.payload.event_context.external_sync_unit_id; + + // Extract query parameters from input_data.global_values + const globalValues = event.input_data.global_values || {}; + const pageSize = globalValues.pageSize ? parseInt(globalValues.pageSize, 10) : undefined; + const nextPageToken = globalValues.nextPageToken; + const updatedDate = globalValues.updatedDate; + + // Initialize Wrike client + const wrikeClient = new WrikeClient({ apiKey }); + + try { + // Call getTasks endpoint + const response = await wrikeClient.getTasks(folderId, { + pageSize, + nextPageToken, + updatedDate, + }); + + // Determine if request was successful + const success = response.status_code === 200 && response.data !== undefined; + + // Build response + const fetchTasksResponse: FetchFolderTasksResponse = { + status: success ? 'success' : 'error', + message: response.message, + status_code: response.status_code, + api_delay: response.api_delay, + metadata: { + folder_id: folderId, + task_count: response.data?.length || 0, + page_size: pageSize, + next_page_token: response.nextPageToken, + has_more: response.hasMore, + function_name: event.execution_metadata.function_name, + request_id: event.execution_metadata.request_id, + }, + data: response.data, + timestamp: new Date().toISOString(), + }; + + return fetchTasksResponse; + } catch (error) { + // Handle WrikeApiError (including rate limiting) + if (error instanceof WrikeApiError) { + return { + status: 'error', + message: error.message, + status_code: error.statusCode, + api_delay: error.apiDelay, + metadata: { + folder_id: folderId, + task_count: 0, + page_size: pageSize, + function_name: event.execution_metadata.function_name, + request_id: event.execution_metadata.request_id, + }, + timestamp: new Date().toISOString(), + }; + } + + // Re-throw other errors + throw error; + } +}; + +export default run; \ No newline at end of file diff --git a/build/src/functions/fetch_projects/index.test.ts b/build/src/functions/fetch_projects/index.test.ts deleted file mode 100644 index 14a38c5..0000000 --- a/build/src/functions/fetch_projects/index.test.ts +++ /dev/null @@ -1,64 +0,0 @@ -// Mock axios before any imports -jest.mock('axios'); - -// Import the test utilities and helpers -import { createMockEvent } from './test-utils'; -import { setupAxiosMocks, setupTestEnvironment, cleanupTestEnvironment } from './test-helpers'; -import { AirdropEvent } from '@devrev/ts-adaas'; -import { run } from './index'; -import { testCases } from './test-cases'; - -describe('Fetch Projects Function', () => { - // Set up axios mocks - const { mockGet } = setupAxiosMocks(); - - beforeEach(() => { - // Clear all mocks before each test - jest.clearAllMocks(); - - // Set up test environment - setupTestEnvironment(); - }); - - afterEach(() => { - // Clean up test environment - cleanupTestEnvironment(); - }); - - // Add a test for API call parameters verification - it('should call the Wrike API with correct parameters', async () => { - // Use the first test case which has a successful API call - const testCase = testCases[0]; - testCase.setup(mockGet); - - await run(testCase.input); - - expect(mockGet).toHaveBeenCalledWith( - 'https://www.wrike.com/api/v4/spaces/IEACW7SVI4O6BDQE/folders', - expect.objectContaining({ - headers: { - 'Authorization': 'Bearer mock-api-key' - }, - params: { - descendants: true - }, - timeout: 10000 - }) - ); - }); - - // Generate tests from test cases - testCases.forEach(testCase => { - it(testCase.name, async () => { - testCase.setup(mockGet); - - // For test cases that expect the function to handle invalid inputs, - // we need to pass an empty array and let the function itself detect - // and handle the error condition. This avoids TypeScript errors while - // still testing the function's error handling capabilities. - - const result = await run(testCase.input); - expect(result).toEqual(testCase.expectedResult); - }); - }); -}); \ No newline at end of file diff --git a/build/src/functions/fetch_projects/index.ts b/build/src/functions/fetch_projects/index.ts deleted file mode 100644 index a888baa..0000000 --- a/build/src/functions/fetch_projects/index.ts +++ /dev/null @@ -1,156 +0,0 @@ -import { AirdropEvent } from '@devrev/ts-adaas'; -import axios from 'axios'; - -/** - * Interface for a Wrike project - */ -interface WrikeProject { - id: string; - title: string; - description?: string; - created_date: string; // Using snake_case as required - updated_date: string; // Using snake_case as required - scope: string; - project_status?: string; - custom_status_id?: string; - parent_ids?: string[]; - shared?: boolean; - permalink?: string; -} - -/** - * A function that fetches the list of projects from Wrike API. - * - * @param events - Array of AirdropEvent objects - * @returns A response containing the list of projects - */ -export async function run(events: AirdropEvent[]): Promise<{ - status: string, - message: string, - projects?: WrikeProject[], - error?: string -}> { - try { - // Validate input parameters - if (!events || !Array.isArray(events)) { - throw new Error('Invalid input: events must be an array'); - } - - if (events.length === 0) { - throw new Error('Invalid input: events array is empty'); - } - - // Use the first event for the check - const event = events[0]; - - // Validate that the event is a valid AirdropEvent with all required fields - if (!event || typeof event !== 'object') { - throw new Error('Invalid event: event must be a valid AirdropEvent object'); - } - - // Check for required fields according to AirdropEvent interface - if (!event.context) { - throw new Error('Invalid event: missing required field \'context\''); - } - - if (!event.context.secrets || !event.context.secrets.service_account_token) { - throw new Error('Invalid event: missing required field \'context.secrets.service_account_token\''); - } - - if (!event.payload) { - throw new Error('Invalid event: missing required field \'payload\''); - } - - if (!event.payload.connection_data) { - throw new Error('Invalid event: missing required field \'payload.connection_data\''); - } - - if (!event.payload.connection_data.key) { - throw new Error('Invalid event: missing required field \'payload.connection_data.key\''); - } - - // Extract the Wrike API key and Space ID - const apiKey = event.payload.connection_data.key; - const spaceId = event.payload.connection_data.org_id; - - // Define the Wrike API endpoint - const wrikeApiEndpoint = 'https://www.wrike.com/api/v4'; - - // Log the attempt for debugging purposes - console.log('Attempting to fetch projects from Wrike API'); - - // Make a GET request to the Wrike API to get folders/projects - // According to the Postman collection, we should use the /folders endpoint - const response = await axios.get(`${wrikeApiEndpoint}/spaces/${spaceId}/folders`, { - headers: { - 'Authorization': `Bearer ${apiKey}` - }, - params: { - descendants: true - }, - timeout: 10000 // 10 seconds timeout - }); - - // Check if the request was successful - if (response.status !== 200) { - return { - status: 'error', - message: `Failed to fetch projects with status ${response.status}`, - error: `Received status code ${response.status}` - }; - } - - // Process the response data - if (!response.data || !response.data.data || !Array.isArray(response.data.data)) { - return { - status: 'error', - message: 'Invalid response format from Wrike API', - error: 'Response data is not in the expected format' - }; - } - - // Transform the response data into our project format - const projects: WrikeProject[] = response.data.data.map((project: any) => ({ - id: project.id, - title: project.title, - description: project.description, - created_date: project.createdDate || '', - updated_date: project.updatedDate || '', - scope: project.scope, - project_status: project.project ? project.project.status : undefined, - custom_status_id: project.customStatusId, - parent_ids: project.parentIds, - shared: project.shared, - permalink: project.permalink - })); - - // Log the success for debugging purposes - console.log(`Successfully fetched ${projects.length} projects from Wrike API`); - - // Return a success response with the projects - return { - status: 'success', - message: `Successfully fetched ${projects.length} projects from Wrike API`, - projects - }; - } catch (error) { - // Log the error for debugging - console.error('Error in fetch projects function:', error); - - // Check if the error is an Axios error with a response - if (axios.isAxiosError(error) && error.response) { - return { - status: 'error', - message: 'Failed to fetch projects from Wrike API', - error: `API request failed with status ${error.response.status}: ${error.message}` - }; - } - - // Return a generic error response - return { - status: 'error', - message: 'Failed to fetch projects from Wrike API', - error: error instanceof Error ? error.message : 'Unknown error occurred' - }; - } -} \ No newline at end of file diff --git a/build/src/functions/fetch_projects/test-cases.ts b/build/src/functions/fetch_projects/test-cases.ts deleted file mode 100644 index 7efd3e5..0000000 --- a/build/src/functions/fetch_projects/test-cases.ts +++ /dev/null @@ -1,164 +0,0 @@ -import { AirdropEvent } from '@devrev/ts-adaas'; -import { createMockEvent, mockWrikeProjectsResponse } from './test-utils'; - -export interface TestCase { - name: string; - setup: (mockGet: jest.Mock) => void; - input: AirdropEvent[]; - expectedResult: any; -} - -export const testCases: TestCase[] = [ - { - name: 'should return projects when API call is successful', - setup: (mockGet) => { - mockGet.mockResolvedValue({ - status: 200, - data: mockWrikeProjectsResponse - }); - }, - input: [createMockEvent()], - expectedResult: { - status: 'success', - message: 'Successfully fetched 2 projects from Wrike API', - projects: [ - { - id: 'IEACW7SVI4OMYFIY', - title: 'Project 1', - description: 'This is project 1', - created_date: '2023-01-01T00:00:00Z', - updated_date: '2023-01-02T00:00:00Z', - scope: 'WsFolder', - project_status: 'Green', - custom_status_id: 'ABCD1234', - parent_ids: ['PARENT1'], - shared: true, - permalink: 'https://www.wrike.com/open.htm?id=123456789' - }, - { - id: 'IEACW7SVI4PZXTGO', - title: 'Project 2', - description: 'This is project 2', - created_date: '2023-02-01T00:00:00Z', - updated_date: '2023-02-02T00:00:00Z', - scope: 'WsFolder', - project_status: 'Yellow', - custom_status_id: 'EFGH5678', - parent_ids: ['PARENT2'], - shared: true, - permalink: 'https://www.wrike.com/open.htm?id=987654321' - } - ] - } - }, - { - name: 'should return error when API call returns non-200 status', - setup: (mockGet) => { - mockGet.mockResolvedValue({ - status: 403, - data: { error: 'Forbidden' } - }); - }, - input: [createMockEvent()], - expectedResult: { - status: 'error', - message: 'Failed to fetch projects with status 403', - error: 'Received status code 403' - } - }, - { - name: 'should return error when API response format is invalid', - setup: (mockGet) => { - mockGet.mockResolvedValue({ - status: 200, - data: { invalid: 'format' } - }); - }, - input: [createMockEvent()], - expectedResult: { - status: 'error', - message: 'Invalid response format from Wrike API', - error: 'Response data is not in the expected format' - } - }, - { - name: 'should return error when axios throws an exception', - setup: (mockGet) => { - const axiosError = new Error('Request failed') as any; - axiosError.isAxiosError = true; - axiosError.response = { status: 401 }; - mockGet.mockRejectedValue(axiosError); - }, - input: [createMockEvent()], - expectedResult: { - status: 'error', - message: 'Failed to fetch projects from Wrike API', - error: expect.stringContaining('API request failed with status 401') - } - }, - { - name: 'should return error when axios throws a network exception', - setup: (mockGet) => { - const networkError = new Error('Network error'); - mockGet.mockRejectedValue(networkError); - }, - input: [createMockEvent()], - expectedResult: { - status: 'error', - message: 'Failed to fetch projects from Wrike API', - error: 'Network error' - } - }, - { - name: 'should throw an error if events parameter is not an array', - setup: () => {}, - input: null as unknown as AirdropEvent[], // Use null to test non-array input - expectedResult: { - status: 'error', - message: 'Failed to fetch projects from Wrike API', - error: 'Invalid input: events must be an array' - } - }, - { - name: 'should throw an error if events array is empty', - setup: () => {}, - input: [], - expectedResult: { - status: 'error', - message: 'Failed to fetch projects from Wrike API', - error: 'Invalid input: events array is empty' - } - }, - { - name: 'should throw an error if an event is missing required fields', - setup: () => {}, - input: [{ - payload: {}, - execution_metadata: {} - } as unknown as AirdropEvent], - expectedResult: { - status: 'error', - message: 'Failed to fetch projects from Wrike API', - error: 'Invalid event: missing required field \'context\'' - } - }, - { - name: 'should throw an error if API key is missing', - setup: () => {}, - input: [{ - ...createMockEvent(), - payload: { - ...createMockEvent().payload, - connection_data: { - ...createMockEvent().payload.connection_data, - key: undefined as any - } - } - }], - expectedResult: { - status: 'error', - message: 'Failed to fetch projects from Wrike API', - error: 'Invalid event: missing required field \'payload.connection_data.key\'' - } - } -]; \ No newline at end of file diff --git a/build/src/functions/fetch_projects/test-helpers.ts b/build/src/functions/fetch_projects/test-helpers.ts deleted file mode 100644 index 69c4d3e..0000000 --- a/build/src/functions/fetch_projects/test-helpers.ts +++ /dev/null @@ -1,37 +0,0 @@ -import axios from 'axios'; - -/** - * Sets up the axios mocks for testing - * @returns The mock functions that can be used in tests - */ -export function setupAxiosMocks() { - // Set up axios mock functions - const mockGet = jest.fn(); - - // Properly mock axios methods - jest.spyOn(axios, 'get').mockImplementation(mockGet); - - // Properly mock axios.isAxiosError with correct type handling - jest.spyOn(axios, 'isAxiosError').mockImplementation((error: any) => { - return error && error.isAxiosError === true; - }); - - return { mockGet }; -} - -/** - * Sets up common test environment - */ -export function setupTestEnvironment() { - // Mock console.log and console.error to prevent test output pollution - jest.spyOn(console, 'log').mockImplementation(() => {}); - jest.spyOn(console, 'error').mockImplementation(() => {}); -} - -/** - * Cleans up the test environment - */ -export function cleanupTestEnvironment() { - // Restore console mocks - jest.restoreAllMocks(); -} \ No newline at end of file diff --git a/build/src/functions/fetch_projects/test-utils.ts b/build/src/functions/fetch_projects/test-utils.ts deleted file mode 100644 index 91900bd..0000000 --- a/build/src/functions/fetch_projects/test-utils.ts +++ /dev/null @@ -1,106 +0,0 @@ -import { AirdropEvent } from '@devrev/ts-adaas'; - -// Mock for EventType from @devrev/ts-adaas -export enum EventType { - // Extraction - ExtractionExternalSyncUnitsStart = 'EXTRACTION_EXTERNAL_SYNC_UNITS_START', - ExtractionMetadataStart = 'EXTRACTION_METADATA_START', - ExtractionDataStart = 'EXTRACTION_DATA_START', - ExtractionDataContinue = 'EXTRACTION_DATA_CONTINUE', - ExtractionDataDelete = 'EXTRACTION_DATA_DELETE', - ExtractionAttachmentsStart = 'EXTRACTION_ATTACHMENTS_START', - ExtractionAttachmentsContinue = 'EXTRACTION_ATTACHMENTS_CONTINUE', - ExtractionAttachmentsDelete = 'EXTRACTION_ATTACHMENTS_DELETE' -} - -/** - * Helper function to create a mock AirdropEvent for testing - */ -export const createMockEvent = (): AirdropEvent => ({ - context: { - secrets: { - service_account_token: 'mock-token' - }, - snap_in_id: 'mock-snap-in-id', - snap_in_version_id: 'mock-version-id' - }, - payload: { - connection_data: { - org_id: 'IEACW7SVI4O6BDQE', // Example Space ID from Postman collection - org_name: 'mock-org-name', - key: 'mock-api-key', - key_type: 'mock-key-type' - }, - event_context: { - callback_url: 'mock-callback-url', - dev_org: 'mock-dev-org', - dev_org_id: 'mock-dev-org-id', - dev_user: 'mock-dev-user', - dev_user_id: 'mock-dev-user-id', - external_sync_unit: 'mock-external-sync-unit', - external_sync_unit_id: 'mock-external-sync-unit-id', - external_sync_unit_name: 'mock-external-sync-unit-name', - external_system: 'mock-external-system', - external_system_type: 'mock-external-system-type', - import_slug: 'mock-import-slug', - mode: 'INITIAL', - request_id: 'mock-request-id', - snap_in_slug: 'mock-snap-in-slug', - snap_in_version_id: 'mock-snap-in-version-id', - sync_run: 'mock-sync-run', - sync_run_id: 'mock-sync-run-id', - sync_tier: 'mock-sync-tier', - sync_unit: 'mock-sync-unit', - sync_unit_id: 'mock-sync-unit-id', - uuid: 'mock-uuid', - worker_data_url: 'mock-worker-data-url' - }, - event_type: EventType.ExtractionDataStart, - event_data: {} - }, - execution_metadata: { - devrev_endpoint: 'mock-endpoint' - }, - input_data: { - global_values: {}, - event_sources: {} - } -}); - -/** - * Mock Wrike API response for projects - */ -export const mockWrikeProjectsResponse = { - data: [ - { - id: 'IEACW7SVI4OMYFIY', - title: 'Project 1', - description: 'This is project 1', - createdDate: '2023-01-01T00:00:00Z', - updatedDate: '2023-01-02T00:00:00Z', - scope: 'WsFolder', - project: { - status: 'Green' - }, - customStatusId: 'ABCD1234', - parentIds: ['PARENT1'], - shared: true, - permalink: 'https://www.wrike.com/open.htm?id=123456789' - }, - { - id: 'IEACW7SVI4PZXTGO', - title: 'Project 2', - description: 'This is project 2', - createdDate: '2023-02-01T00:00:00Z', - updatedDate: '2023-02-02T00:00:00Z', - scope: 'WsFolder', - project: { - status: 'Yellow' - }, - customStatusId: 'EFGH5678', - parentIds: ['PARENT2'], - shared: true, - permalink: 'https://www.wrike.com/open.htm?id=987654321' - } - ] -}; \ No newline at end of file diff --git a/build/src/functions/fetch_space_folders/index.test.cases.ts b/build/src/functions/fetch_space_folders/index.test.cases.ts new file mode 100644 index 0000000..a24cd26 --- /dev/null +++ b/build/src/functions/fetch_space_folders/index.test.cases.ts @@ -0,0 +1,174 @@ +import { FunctionInput } from '../../core/types'; +import { + createMockEvent, + mockFolders, + createSuccessResponse, + createEmptyResponse, + createAuthFailureResponse, + createRateLimitResponse, + createNetworkErrorResponse, + createNotFoundResponse, +} from './index.test.helpers'; +import { testCaseGenerators } from './index.test.shared'; + +export function createFetchSpaceFoldersTests( + runFunction: (events: FunctionInput[]) => Promise, + WrikeClientMock: jest.Mock +) { + return () => { + it('should return success response with folders', async () => { + const mockGetFolders = jest.fn().mockResolvedValue(createSuccessResponse()); + + WrikeClientMock.mockImplementation(() => ({ + getFolders: mockGetFolders, + })); + + const events = [createMockEvent()]; + const result = await runFunction(events); + + expect(result).toBeDefined(); + expect(result.status).toBe('success'); + expect(result.message).toBe('Successfully fetched folders from Wrike space'); + expect(result.status_code).toBe(200); + expect(result.api_delay).toBe(0); + expect(result.metadata.space_id).toBe('IEAGS6BYI5RFMPPY'); + expect(result.metadata.folder_count).toBe(2); + expect(result.metadata.function_name).toBe('fetch_space_folders'); + expect(result.metadata.request_id).toBe('test-request-id'); + expect(result.data).toEqual(mockFolders); + expect(result.timestamp).toBeDefined(); + expect(mockGetFolders).toHaveBeenCalledTimes(1); + expect(mockGetFolders).toHaveBeenCalledWith('IEAGS6BYI5RFMPPY'); + }); + + it('should return success response with empty folder list', async () => { + const mockGetFolders = jest.fn().mockResolvedValue(createEmptyResponse()); + + WrikeClientMock.mockImplementation(() => ({ + getFolders: mockGetFolders, + })); + + const events = [createMockEvent()]; + const result = await runFunction(events); + + expect(result.status).toBe('success'); + expect(result.status_code).toBe(200); + expect(result.metadata.folder_count).toBe(0); + expect(result.data).toEqual([]); + }); + + // API Error Tests + const apiErrorCases = testCaseGenerators.apiErrors(); + apiErrorCases.forEach(({ description, mockResponse, assertions }) => { + it(description, async () => { + const mockGetFolders = jest.fn().mockResolvedValue(mockResponse); + + WrikeClientMock.mockImplementation(() => ({ + getFolders: mockGetFolders, + })); + + const events = [createMockEvent()]; + const result = await runFunction(events); + + assertions(result); + }); + }); + + it('should process only the first event when multiple events provided', async () => { + const mockGetFolders = jest.fn().mockResolvedValue(createSuccessResponse()); + + WrikeClientMock.mockImplementation(() => ({ + getFolders: mockGetFolders, + })); + + const event1 = createMockEvent({ + payload: { + connection_data: { + org_id: 'SPACE1', + org_name: 'Space 1', + key: 'test-api-key', + key_type: 'oauth2', + }, + }, + execution_metadata: { + request_id: 'request-1', + function_name: 'fetch_space_folders', + event_type: 'event-type-1', + devrev_endpoint: 'https://api.devrev.ai/', + }, + }); + const event2 = createMockEvent({ + payload: { + connection_data: { + org_id: 'SPACE2', + org_name: 'Space 2', + key: 'test-api-key', + key_type: 'oauth2', + }, + }, + execution_metadata: { + request_id: 'request-2', + function_name: 'fetch_space_folders', + event_type: 'event-type-2', + devrev_endpoint: 'https://api.devrev.ai/', + }, + }); + + const result = await runFunction([event1, event2]); + + expect(result.metadata.request_id).toBe('request-1'); + expect(result.metadata.space_id).toBe('SPACE1'); + expect(mockGetFolders).toHaveBeenCalledTimes(1); + expect(mockGetFolders).toHaveBeenCalledWith('SPACE1'); + }); + + // Validation Error Tests + it('should throw error when no events provided', async () => { + await expect(runFunction([])).rejects.toThrow('No events provided to fetch_space_folders function'); + }); + + it('should throw error when events array is null', async () => { + await expect(runFunction(null as any)).rejects.toThrow('No events provided to fetch_space_folders function'); + }); + + it('should throw error when events array is undefined', async () => { + await expect(runFunction(undefined as any)).rejects.toThrow('No events provided to fetch_space_folders function'); + }); + + const validationCases = testCaseGenerators.validationErrors().slice(1); // Skip first case (no events) + validationCases.forEach(({ description, eventModifier, expectedError }) => { + it(description, async () => { + const invalidEvent = createMockEvent(); + eventModifier(invalidEvent); + + await expect(runFunction([invalidEvent])).rejects.toThrow(expectedError); + }); + }); + + it('should initialize WrikeClient with correct API key', async () => { + const mockGetFolders = jest.fn().mockResolvedValue(createSuccessResponse()); + + WrikeClientMock.mockImplementation(() => ({ + getFolders: mockGetFolders, + })); + + const events = [createMockEvent()]; + await runFunction(events); + + expect(WrikeClientMock).toHaveBeenCalledWith({ apiKey: 'test-api-key' }); + }); + + it('should include timestamp in ISO format', async () => { + const mockGetFolders = jest.fn().mockResolvedValue(createSuccessResponse()); + + WrikeClientMock.mockImplementation(() => ({ + getFolders: mockGetFolders, + })); + + const events = [createMockEvent()]; + const result = await runFunction(events); + + expect(result.timestamp).toMatch(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/); + }); + }; +} \ No newline at end of file diff --git a/build/src/functions/fetch_space_folders/index.test.helpers.ts b/build/src/functions/fetch_space_folders/index.test.helpers.ts new file mode 100644 index 0000000..c4b872d --- /dev/null +++ b/build/src/functions/fetch_space_folders/index.test.helpers.ts @@ -0,0 +1,135 @@ +import { FunctionInput } from '../../core/types'; +import { WrikeFolder } from '../../core/wrike-types'; + +/** + * Mock folder data for testing + */ +export const mockFolder: WrikeFolder = { + id: 'IEAGS6BYI5RFMPP7', + accountId: 'IEAGS6BY', + title: 'First project', + createdDate: '2025-04-29T07:18:32Z', + updatedDate: '2025-05-26T07:44:20Z', + description: '', + sharedIds: ['KUAVRIOP', 'KX7XOYQF', 'KUAUZTPW', 'KUAVRIOO', 'KUAVRIOS'], + parentIds: ['IEAGS6BYI5RFMPPY'], + childIds: [], + scope: 'WsFolder', + permalink: 'https://www.wrike.com/open.htm?id=1649819135', + workflowId: 'IEAGS6BYK4F3BCSQ', + project: { + authorId: 'KUAUZTPW', + ownerIds: ['KUAUZTPW'], + customStatusId: 'IEAGS6BYJMF3BCR4', + createdDate: '2025-04-29T07:18:32Z', + }, +}; + +export const mockFolders: WrikeFolder[] = [ + mockFolder, + { + id: 'IEAGS6BYI5RFMPP8', + accountId: 'IEAGS6BY', + title: 'Second project', + createdDate: '2025-05-01T10:00:00Z', + updatedDate: '2025-05-26T12:00:00Z', + description: 'Test project', + sharedIds: ['KUAVRIOP'], + parentIds: ['IEAGS6BYI5RFMPPY'], + childIds: [], + scope: 'WsFolder', + permalink: 'https://www.wrike.com/open.htm?id=1649819136', + workflowId: 'IEAGS6BYK4F3BCSQ', + }, +]; + +/** + * Creates a mock FunctionInput event for testing + */ +export const createMockEvent = (overrides?: Partial): FunctionInput => ({ + payload: { + connection_data: { + org_id: 'IEAGS6BYI5RFMPPY', + org_name: 'Test Space', + key: 'test-api-key', + key_type: 'oauth2', + }, + }, + context: { + dev_oid: 'test-dev-oid', + source_id: 'test-source-id', + snap_in_id: 'test-snap-in-id', + snap_in_version_id: 'test-version-id', + service_account_id: 'test-service-account-id', + secrets: { + service_account_token: 'test-token', + }, + }, + execution_metadata: { + request_id: 'test-request-id', + function_name: 'fetch_space_folders', + event_type: 'test-event-type', + devrev_endpoint: 'https://api.devrev.ai/', + }, + input_data: { + global_values: {}, + event_sources: {}, + }, + ...overrides, +}); + +/** + * Creates a mock WrikeClient.getFolders response for successful fetch + */ +export const createSuccessResponse = (folders: WrikeFolder[] = mockFolders) => ({ + status_code: 200, + api_delay: 0, + message: 'Successfully fetched folders from Wrike space', + data: folders, +}); + +/** + * Creates a mock WrikeClient.getFolders response for empty folder list + */ +export const createEmptyResponse = () => ({ + status_code: 200, + api_delay: 0, + message: 'Successfully fetched folders from Wrike space', + data: [], +}); + +/** + * Creates a mock WrikeClient.getFolders response for authentication failure + */ +export const createAuthFailureResponse = (statusCode: number, message: string) => ({ + status_code: statusCode, + api_delay: 0, + message, +}); + +/** + * Creates a mock WrikeClient.getFolders response for rate limiting + */ +export const createRateLimitResponse = (apiDelay: number = 49) => ({ + status_code: 429, + api_delay: apiDelay, + message: `Rate limit exceeded. Retry after ${apiDelay} seconds.`, +}); + +/** + * Creates a mock WrikeClient.getFolders response for network errors + */ +export const createNetworkErrorResponse = () => ({ + status_code: 0, + api_delay: 0, + message: 'Network error: Unable to reach Wrike API', +}); + +/** + * Creates a mock WrikeClient.getFolders response for not found error + */ +export const createNotFoundResponse = () => ({ + status_code: 404, + api_delay: 0, + message: 'Wrike API error: Space not found', +}); \ No newline at end of file diff --git a/build/src/functions/fetch_space_folders/index.test.shared.ts b/build/src/functions/fetch_space_folders/index.test.shared.ts new file mode 100644 index 0000000..72b7f6b --- /dev/null +++ b/build/src/functions/fetch_space_folders/index.test.shared.ts @@ -0,0 +1,137 @@ +import { FunctionInput } from '../../core/types'; + +/** + * Shared test utilities for fetch_space_folders tests + */ + +/** + * Creates a test case for successful API responses + */ +export function createSuccessTestCase( + description: string, + mockResponse: any, + assertions: (result: any) => void +) { + return { + description, + mockResponse, + assertions, + }; +} + +/** + * Creates a test case for API error responses + */ +export function createErrorTestCase( + description: string, + statusCode: number, + message: string, + apiDelay: number = 0 +) { + return { + description, + mockResponse: { + status_code: statusCode, + api_delay: apiDelay, + message, + }, + assertions: (result: any) => { + expect(result.status).toBe('error'); + expect(result.message).toBe(message); + expect(result.status_code).toBe(statusCode); + expect(result.api_delay).toBe(apiDelay); + expect(result.metadata.folder_count).toBe(0); + if (statusCode !== 429) { + expect(result.data).toBeUndefined(); + } + }, + }; +} + +/** + * Creates a test case for validation errors + */ +export function createValidationTestCase( + description: string, + eventModifier: (event: FunctionInput) => void, + expectedError: string +) { + return { + description, + eventModifier, + expectedError, + }; +} + +/** + * Test case generators for common scenarios + */ +export const testCaseGenerators = { + /** + * Generates validation error test cases + */ + validationErrors: () => [ + createValidationTestCase( + 'should throw error when no events provided', + () => {}, + 'No events provided to fetch_space_folders function' + ), + createValidationTestCase( + 'should throw error when event is missing payload', + (event: FunctionInput) => delete (event as any).payload, + 'Invalid event: missing payload' + ), + createValidationTestCase( + 'should throw error when event is missing connection_data', + (event: FunctionInput) => delete (event as any).payload.connection_data, + 'Invalid event: missing connection_data in payload' + ), + createValidationTestCase( + 'should throw error when event is missing API key', + (event: FunctionInput) => delete (event as any).payload.connection_data.key, + 'Invalid event: missing API key in connection_data' + ), + createValidationTestCase( + 'should throw error when event is missing org_id (space ID)', + (event: FunctionInput) => delete (event as any).payload.connection_data.org_id, + 'Invalid event: missing org_id (space ID) in connection_data' + ), + createValidationTestCase( + 'should throw error when event is missing execution_metadata', + (event: FunctionInput) => delete (event as any).execution_metadata, + 'Invalid event: missing execution_metadata' + ), + ], + + /** + * Generates API error test cases + */ + apiErrors: () => [ + createErrorTestCase( + 'should return error response for authentication failure (401)', + 401, + 'Authentication failed: Invalid or expired API key' + ), + createErrorTestCase( + 'should return error response for forbidden access (403)', + 403, + 'Authentication failed: Access forbidden' + ), + createErrorTestCase( + 'should handle rate limiting (429) correctly', + 429, + 'Rate limit exceeded. Retry after 49 seconds.', + 49 + ), + createErrorTestCase( + 'should handle network errors', + 0, + 'Network error: Unable to reach Wrike API' + ), + createErrorTestCase( + 'should handle not found error (404)', + 404, + 'Wrike API error: Space not found' + ), + ], +}; \ No newline at end of file diff --git a/build/src/functions/fetch_space_folders/index.test.ts b/build/src/functions/fetch_space_folders/index.test.ts new file mode 100644 index 0000000..d4696fd --- /dev/null +++ b/build/src/functions/fetch_space_folders/index.test.ts @@ -0,0 +1,14 @@ +import run, { FetchSpaceFoldersResponse } from './index'; +import { WrikeClient } from '../../core/wrike-client'; +import { createFetchSpaceFoldersTests } from './index.test.cases'; + +// Mock the WrikeClient +jest.mock('../../core/wrike-client'); + +describe('fetch_space_folders function', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('test cases', createFetchSpaceFoldersTests(run, WrikeClient as jest.Mock)); +}); \ No newline at end of file diff --git a/build/src/functions/fetch_space_folders/index.ts b/build/src/functions/fetch_space_folders/index.ts new file mode 100644 index 0000000..e98deb9 --- /dev/null +++ b/build/src/functions/fetch_space_folders/index.ts @@ -0,0 +1,115 @@ +import { FunctionInput } from '../../core/types'; +import { WrikeClient } from '../../core/wrike-client'; +import { WrikeFolder } from '../../core/wrike-types'; +import { WrikeApiError } from '../../core/wrike-error-handler'; + +/** + * Response structure for fetch_space_folders function + */ +export interface FetchSpaceFoldersResponse { + status: 'success' | 'error'; + message: string; + status_code: number; + api_delay: number; + metadata: { + space_id: string; + folder_count: number; + function_name: string; + request_id: string; + }; + data?: WrikeFolder[]; + timestamp: string; +} + +/** + * Fetch space folders function that retrieves folders from a Wrike space. + * Makes a request to /spaces/{spaceId}/folders endpoint. + * + * @param events - Array of function input events + * @returns Object containing folders data with metadata + */ +const run = async (events: FunctionInput[]): Promise => { + // Validate input + if (!events || events.length === 0) { + throw new Error('No events provided to fetch_space_folders function'); + } + + // Process only the first event as per requirements + const event = events[0]; + + // Validate event structure + if (!event.payload) { + throw new Error('Invalid event: missing payload'); + } + + if (!event.payload.connection_data) { + throw new Error('Invalid event: missing connection_data in payload'); + } + + if (!event.payload.connection_data.key) { + throw new Error('Invalid event: missing API key in connection_data'); + } + + if (!event.payload.connection_data.org_id) { + throw new Error('Invalid event: missing org_id (space ID) in connection_data'); + } + + if (!event.execution_metadata) { + throw new Error('Invalid event: missing execution_metadata'); + } + + // Extract API key and space ID from event + const apiKey = event.payload.connection_data.key; + const spaceId = event.payload.connection_data.org_id; + + // Initialize Wrike client + const wrikeClient = new WrikeClient({ apiKey }); + + try { + // Call getFolders endpoint + const response = await wrikeClient.getFolders(spaceId); + + // Determine if request was successful + const success = response.status_code === 200 && response.data !== undefined; + + // Build response + const fetchFoldersResponse: FetchSpaceFoldersResponse = { + status: success ? 'success' : 'error', + message: response.message, + status_code: response.status_code, + api_delay: response.api_delay, + metadata: { + space_id: spaceId, + folder_count: response.data?.length || 0, + function_name: event.execution_metadata.function_name, + request_id: event.execution_metadata.request_id, + }, + data: response.data, + timestamp: new Date().toISOString(), + }; + + return fetchFoldersResponse; + } catch (error) { + // Handle WrikeApiError (including rate limiting) + if (error instanceof WrikeApiError) { + return { + status: 'error', + message: error.message, + status_code: error.statusCode, + api_delay: error.apiDelay, + metadata: { + space_id: spaceId, + folder_count: 0, + function_name: event.execution_metadata.function_name, + request_id: event.execution_metadata.request_id, + }, + timestamp: new Date().toISOString(), + }; + } + + // Re-throw other errors + throw error; + } +}; + +export default run; \ No newline at end of file diff --git a/build/src/functions/fetch_task_attachments/index.test.case-generators.ts b/build/src/functions/fetch_task_attachments/index.test.case-generators.ts new file mode 100644 index 0000000..87c7e81 --- /dev/null +++ b/build/src/functions/fetch_task_attachments/index.test.case-generators.ts @@ -0,0 +1,178 @@ +import { FunctionInput } from '../../core/types'; +import { + createMockEvent, + mockAttachments, + createSuccessResponse, + createEmptyResponse, +} from './index.test.helpers'; + +/** + * Test case generators for fetch_task_attachments function. + * This file contains reusable test case generation logic. + */ + +/** + * Generates a test case for successful attachment fetching + */ +export function generateSuccessTestCase() { + return { + description: 'should return success response with attachments', + setup: (WrikeClientMock: jest.Mock) => { + const mockGetTaskAttachments = jest.fn().mockResolvedValue(createSuccessResponse()); + WrikeClientMock.mockImplementation(() => ({ + getTaskAttachments: mockGetTaskAttachments, + })); + return mockGetTaskAttachments; + }, + event: createMockEvent(), + assertions: (result: any, mockGetTaskAttachments?: jest.Mock) => { + expect(result).toBeDefined(); + expect(result.status).toBe('success'); + expect(result.message).toBe('Successfully fetched attachments from Wrike task'); + expect(result.status_code).toBe(200); + expect(result.api_delay).toBe(0); + expect(result.metadata.task_id).toBe('IEACW7SVKQOKD5EG'); + expect(result.metadata.attachment_count).toBe(2); + expect(result.metadata.function_name).toBe('fetch_task_attachments'); + expect(result.metadata.request_id).toBe('test-request-id'); + expect(result.data).toEqual(mockAttachments); + expect(result.timestamp).toBeDefined(); + + if (mockGetTaskAttachments) { + expect(mockGetTaskAttachments).toHaveBeenCalledTimes(1); + expect(mockGetTaskAttachments).toHaveBeenCalledWith('IEACW7SVKQOKD5EG'); + } + }, + }; +} + +/** + * Generates a test case for empty attachment list + */ +export function generateEmptyAttachmentListTestCase() { + return { + description: 'should return success response with empty attachment list', + setup: (WrikeClientMock: jest.Mock) => { + const mockGetTaskAttachments = jest.fn().mockResolvedValue(createEmptyResponse()); + WrikeClientMock.mockImplementation(() => ({ + getTaskAttachments: mockGetTaskAttachments, + })); + return mockGetTaskAttachments; + }, + event: createMockEvent(), + assertions: (result: any, mockGetTaskAttachments?: jest.Mock) => { + expect(result.status).toBe('success'); + expect(result.status_code).toBe(200); + expect(result.metadata.attachment_count).toBe(0); + expect(result.data).toEqual([]); + }, + }; +} + +/** + * Generates test events for multiple event processing test + */ +export function generateMultipleEventsTestCase() { + const event1 = createMockEvent({ + payload: { + connection_data: { + org_id: 'SPACE1', + org_name: 'Space 1', + key: 'test-api-key', + key_type: 'oauth2', + }, + event_context: { + callback_url: 'https://test.callback.url', + dev_org: 'test-dev-org', + dev_org_id: 'test-dev-org-id', + dev_user: 'test-dev-user', + dev_user_id: 'test-dev-user-id', + external_sync_unit: 'task-1', + external_sync_unit_id: 'TASK1', + external_sync_unit_name: 'Task 1', + external_system: 'wrike', + external_system_type: 'wrike', + import_slug: 'test-import-slug', + mode: 'INITIAL', + request_id: 'request-1', + snap_in_slug: 'test-snap-in-slug', + snap_in_version_id: 'test-version-id', + sync_run: 'test-sync-run', + sync_run_id: 'test-sync-run-id', + sync_tier: 'test-sync-tier', + sync_unit: 'test-sync-unit', + sync_unit_id: 'test-sync-unit-id', + uuid: 'test-uuid', + worker_data_url: 'https://test.worker.data.url', + }, + }, + execution_metadata: { + request_id: 'request-1', + function_name: 'fetch_task_attachments', + event_type: 'event-type-1', + devrev_endpoint: 'https://api.devrev.ai/', + }, + }); + + const event2 = createMockEvent({ + payload: { + connection_data: { + org_id: 'SPACE2', + org_name: 'Space 2', + key: 'test-api-key', + key_type: 'oauth2', + }, + event_context: { + callback_url: 'https://test.callback.url', + dev_org: 'test-dev-org', + dev_org_id: 'test-dev-org-id', + dev_user: 'test-dev-user', + dev_user_id: 'test-dev-user-id', + external_sync_unit: 'task-2', + external_sync_unit_id: 'TASK2', + external_sync_unit_name: 'Task 2', + external_system: 'wrike', + external_system_type: 'wrike', + import_slug: 'test-import-slug', + mode: 'INITIAL', + request_id: 'request-2', + snap_in_slug: 'test-snap-in-slug', + snap_in_version_id: 'test-version-id', + sync_run: 'test-sync-run', + sync_run_id: 'test-sync-run-id', + sync_tier: 'test-sync-tier', + sync_unit: 'test-sync-unit', + sync_unit_id: 'test-sync-unit-id', + uuid: 'test-uuid', + worker_data_url: 'https://test.worker.data.url', + }, + }, + execution_metadata: { + request_id: 'request-2', + function_name: 'fetch_task_attachments', + event_type: 'event-type-2', + devrev_endpoint: 'https://api.devrev.ai/', + }, + }); + + return { + description: 'should process only the first event when multiple events provided', + setup: (WrikeClientMock: jest.Mock) => { + const mockGetTaskAttachments = jest.fn().mockResolvedValue(createSuccessResponse()); + WrikeClientMock.mockImplementation(() => ({ + getTaskAttachments: mockGetTaskAttachments, + })); + return mockGetTaskAttachments; + }, + events: [event1, event2], + assertions: (result: any, mockGetTaskAttachments?: jest.Mock) => { + expect(result.metadata.request_id).toBe('request-1'); + expect(result.metadata.task_id).toBe('TASK1'); + + if (mockGetTaskAttachments) { + expect(mockGetTaskAttachments).toHaveBeenCalledTimes(1); + expect(mockGetTaskAttachments).toHaveBeenCalledWith('TASK1'); + } + }, + }; +} \ No newline at end of file diff --git a/build/src/functions/fetch_task_attachments/index.test.cases.ts b/build/src/functions/fetch_task_attachments/index.test.cases.ts new file mode 100644 index 0000000..23fd553 --- /dev/null +++ b/build/src/functions/fetch_task_attachments/index.test.cases.ts @@ -0,0 +1,112 @@ +import { FunctionInput } from '../../core/types'; +import { + createMockEvent, + createSuccessResponse, +} from './index.test.helpers'; +import { testCaseGenerators } from './index.test.shared'; +import { + generateSuccessTestCase, + generateEmptyAttachmentListTestCase, + generateMultipleEventsTestCase, +} from './index.test.case-generators'; + +export function createFetchTaskAttachmentsTests( + runFunction: (events: FunctionInput[]) => Promise, + WrikeClientMock: jest.Mock +) { + return () => { + // Success test case + const successTestCase = generateSuccessTestCase(); + it(successTestCase.description, async () => { + const mockGetTaskAttachments = successTestCase.setup(WrikeClientMock); + const result = await runFunction([successTestCase.event]); + successTestCase.assertions(result, mockGetTaskAttachments); + }); + + // Empty attachment list test case + const emptyAttachmentListTestCase = generateEmptyAttachmentListTestCase(); + it(emptyAttachmentListTestCase.description, async () => { + const mockGetTaskAttachments = emptyAttachmentListTestCase.setup(WrikeClientMock); + const result = await runFunction([emptyAttachmentListTestCase.event]); + emptyAttachmentListTestCase.assertions(result, mockGetTaskAttachments); + }); + + // API Error Tests + const apiErrorCases = testCaseGenerators.apiErrors(); + apiErrorCases.forEach(({ description, mockResponse, assertions }) => { + it(description, async () => { + const mockGetTaskAttachments = jest.fn().mockResolvedValue(mockResponse); + + WrikeClientMock.mockImplementation(() => ({ + getTaskAttachments: mockGetTaskAttachments, + })); + + const events = [createMockEvent()]; + const result = await runFunction(events); + + assertions(result); + }); + }); + + // Multiple events test case + const multipleEventsTestCase = generateMultipleEventsTestCase(); + it(multipleEventsTestCase.description, async () => { + const mockGetTaskAttachments = multipleEventsTestCase.setup(WrikeClientMock); + const result = await runFunction(multipleEventsTestCase.events); + multipleEventsTestCase.assertions(result, mockGetTaskAttachments); + }); + + // Validation Error Tests + it('should throw error when no events provided', async () => { + await expect(runFunction([])).rejects.toThrow('No events provided to fetch_task_attachments function'); + }); + + it('should throw error when events array is null', async () => { + await expect(runFunction(null as any)).rejects.toThrow( + 'No events provided to fetch_task_attachments function' + ); + }); + + it('should throw error when events array is undefined', async () => { + await expect(runFunction(undefined as any)).rejects.toThrow( + 'No events provided to fetch_task_attachments function' + ); + }); + + const validationCases = testCaseGenerators.validationErrors().slice(1); // Skip first case (no events) + validationCases.forEach(({ description, eventModifier, expectedError }) => { + it(description, async () => { + const invalidEvent = createMockEvent(); + eventModifier(invalidEvent); + + await expect(runFunction([invalidEvent])).rejects.toThrow(expectedError); + }); + }); + + it('should initialize WrikeClient with correct API key', async () => { + const mockGetTaskAttachments = jest.fn().mockResolvedValue(createSuccessResponse()); + + WrikeClientMock.mockImplementation(() => ({ + getTaskAttachments: mockGetTaskAttachments, + })); + + const events = [createMockEvent()]; + await runFunction(events); + + expect(WrikeClientMock).toHaveBeenCalledWith({ apiKey: 'test-api-key' }); + }); + + it('should include timestamp in ISO format', async () => { + const mockGetTaskAttachments = jest.fn().mockResolvedValue(createSuccessResponse()); + + WrikeClientMock.mockImplementation(() => ({ + getTaskAttachments: mockGetTaskAttachments, + })); + + const events = [createMockEvent()]; + const result = await runFunction(events); + + expect(result.timestamp).toMatch(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/); + }); + }; +} \ No newline at end of file diff --git a/build/src/functions/fetch_task_attachments/index.test.helpers.ts b/build/src/functions/fetch_task_attachments/index.test.helpers.ts new file mode 100644 index 0000000..26ba129 --- /dev/null +++ b/build/src/functions/fetch_task_attachments/index.test.helpers.ts @@ -0,0 +1,149 @@ +import { FunctionInput } from '../../core/types'; +import { WrikeAttachment } from '../../core/wrike-types'; + +/** + * Mock attachment data for testing + */ +export const mockAttachment: WrikeAttachment = { + id: 'IEACW7SVIYEV4HBN', + authorId: 'IEAGS6BY', + name: 'Result from test.com', + createdDate: '2025-07-25T07:53:33Z', + version: '1', + size: 1024, + type: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', + url: 'https://www.wrike.com/attachments/IEACW7SVIYEV4HBN/download/Lorem Ipsum.docx', + taskId: 'IEACW7SVKQOKD5EG', + width: 100, + height: 100, +}; + +export const mockAttachments: WrikeAttachment[] = [ + mockAttachment, + { + id: 'IEACW7SVIYEV4HBO', + authorId: 'NVJKSNJK', + name: 'Second Document.pdf', + createdDate: '2025-07-26T10:00:00Z', + version: '1', + size: 2048, + type: 'application/pdf', + url: 'https://www.wrike.com/attachments/IEACW7SVIYEV4HBO/download/Second Document.pdf', + taskId: 'IEACW7SVKQOKD5EG', + }, +]; + +/** + * Creates a mock FunctionInput event for testing + */ +export const createMockEvent = (overrides?: Partial): FunctionInput => ({ + payload: { + connection_data: { + org_id: 'IEAGS6BYI5RFMPPY', + org_name: 'Test Space', + key: 'test-api-key', + key_type: 'oauth2', + }, + event_context: { + callback_url: 'https://test.callback.url', + dev_org: 'test-dev-org', + dev_org_id: 'test-dev-org-id', + dev_user: 'test-dev-user', + dev_user_id: 'test-dev-user-id', + external_sync_unit: 'test-task', + external_sync_unit_id: 'IEACW7SVKQOKD5EG', + external_sync_unit_name: 'Test Task', + external_system: 'wrike', + external_system_type: 'wrike', + import_slug: 'test-import-slug', + mode: 'INITIAL', + request_id: 'test-request-id', + snap_in_slug: 'test-snap-in-slug', + snap_in_version_id: 'test-version-id', + sync_run: 'test-sync-run', + sync_run_id: 'test-sync-run-id', + sync_tier: 'test-sync-tier', + sync_unit: 'test-sync-unit', + sync_unit_id: 'test-sync-unit-id', + uuid: 'test-uuid', + worker_data_url: 'https://test.worker.data.url', + }, + }, + context: { + dev_oid: 'test-dev-oid', + source_id: 'test-source-id', + snap_in_id: 'test-snap-in-id', + snap_in_version_id: 'test-version-id', + service_account_id: 'test-service-account-id', + secrets: { + service_account_token: 'test-token', + }, + }, + execution_metadata: { + request_id: 'test-request-id', + function_name: 'fetch_task_attachments', + event_type: 'test-event-type', + devrev_endpoint: 'https://api.devrev.ai/', + }, + input_data: { + global_values: {}, + event_sources: {}, + }, + ...overrides, +}); + +/** + * Creates a mock WrikeClient.getTaskAttachments response for successful fetch + */ +export const createSuccessResponse = (attachments: WrikeAttachment[] = mockAttachments) => ({ + status_code: 200, + api_delay: 0, + message: 'Successfully fetched attachments from Wrike task', + data: attachments, +}); + +/** + * Creates a mock WrikeClient.getTaskAttachments response for empty attachment list + */ +export const createEmptyResponse = () => ({ + status_code: 200, + api_delay: 0, + message: 'Successfully fetched attachments from Wrike task', + data: [], +}); + +/** + * Creates a mock WrikeClient.getTaskAttachments response for authentication failure + */ +export const createAuthFailureResponse = (statusCode: number, message: string) => ({ + status_code: statusCode, + api_delay: 0, + message, +}); + +/** + * Creates a mock WrikeClient.getTaskAttachments response for rate limiting + */ +export const createRateLimitResponse = (apiDelay: number = 49) => ({ + status_code: 429, + api_delay: apiDelay, + message: `Rate limit exceeded. Retry after ${apiDelay} seconds.`, +}); + +/** + * Creates a mock WrikeClient.getTaskAttachments response for network errors + */ +export const createNetworkErrorResponse = () => ({ + status_code: 0, + api_delay: 0, + message: 'Network error: Unable to reach Wrike API', +}); + +/** + * Creates a mock WrikeClient.getTaskAttachments response for not found error + */ +export const createNotFoundResponse = () => ({ + status_code: 404, + api_delay: 0, + message: 'Wrike API error: Task not found', +}); \ No newline at end of file diff --git a/build/src/functions/fetch_task_attachments/index.test.shared.ts b/build/src/functions/fetch_task_attachments/index.test.shared.ts new file mode 100644 index 0000000..fd2efea --- /dev/null +++ b/build/src/functions/fetch_task_attachments/index.test.shared.ts @@ -0,0 +1,142 @@ +import { FunctionInput } from '../../core/types'; + +/** + * Shared test utilities for fetch_task_attachments tests + */ + +/** + * Creates a test case for successful API responses + */ +export function createSuccessTestCase( + description: string, + mockResponse: any, + assertions: (result: any, mockGetTaskAttachments?: jest.Mock) => void +) { + return { + description, + mockResponse, + assertions, + }; +} + +/** + * Creates a test case for API error responses + */ +export function createErrorTestCase( + description: string, + statusCode: number, + message: string, + apiDelay: number = 0 +) { + return { + description, + mockResponse: { + status_code: statusCode, + api_delay: apiDelay, + message, + }, + assertions: (result: any, mockGetTaskAttachments?: jest.Mock) => { + expect(result.status).toBe('error'); + expect(result.message).toBe(message); + expect(result.status_code).toBe(statusCode); + expect(result.api_delay).toBe(apiDelay); + expect(result.metadata.attachment_count).toBe(0); + if (statusCode !== 429) { + expect(result.data).toBeUndefined(); + } + }, + }; +} + +/** + * Creates a test case for validation errors + */ +export function createValidationTestCase( + description: string, + eventModifier: (event: FunctionInput) => void, + expectedError: string +) { + return { + description, + eventModifier, + expectedError, + }; +} + +/** + * Test case generators for common scenarios + */ +export const testCaseGenerators = { + /** + * Generates validation error test cases + */ + validationErrors: () => [ + createValidationTestCase( + 'should throw error when no events provided', + () => {}, + 'No events provided to fetch_task_attachments function' + ), + createValidationTestCase( + 'should throw error when event is missing payload', + (event: FunctionInput) => delete (event as any).payload, + 'Invalid event: missing payload' + ), + createValidationTestCase( + 'should throw error when event is missing connection_data', + (event: FunctionInput) => delete (event as any).payload.connection_data, + 'Invalid event: missing connection_data in payload' + ), + createValidationTestCase( + 'should throw error when event is missing API key', + (event: FunctionInput) => delete (event as any).payload.connection_data.key, + 'Invalid event: missing API key in connection_data' + ), + createValidationTestCase( + 'should throw error when event is missing event_context', + (event: FunctionInput) => delete (event as any).payload.event_context, + 'Invalid event: missing event_context in payload' + ), + createValidationTestCase( + 'should throw error when event is missing external_sync_unit_id (task ID)', + (event: FunctionInput) => delete (event as any).payload.event_context.external_sync_unit_id, + 'Invalid event: missing external_sync_unit_id in event_context' + ), + createValidationTestCase( + 'should throw error when event is missing execution_metadata', + (event: FunctionInput) => delete (event as any).execution_metadata, + 'Invalid event: missing execution_metadata' + ), + ], + + /** + * Generates API error test cases + */ + apiErrors: () => [ + createErrorTestCase( + 'should return error response for authentication failure (401)', + 401, + 'Authentication failed: Invalid or expired API key' + ), + createErrorTestCase( + 'should return error response for forbidden access (403)', + 403, + 'Authentication failed: Access forbidden' + ), + createErrorTestCase( + 'should handle rate limiting (429) correctly', + 429, + 'Rate limit exceeded. Retry after 49 seconds.', + 49 + ), + createErrorTestCase( + 'should handle network errors', + 0, + 'Network error: Unable to reach Wrike API' + ), + createErrorTestCase( + 'should handle not found error (404)', + 404, + 'Wrike API error: Task not found' + ), + ], +}; \ No newline at end of file diff --git a/build/src/functions/fetch_task_attachments/index.test.ts b/build/src/functions/fetch_task_attachments/index.test.ts new file mode 100644 index 0000000..20c4750 --- /dev/null +++ b/build/src/functions/fetch_task_attachments/index.test.ts @@ -0,0 +1,14 @@ +import run, { FetchTaskAttachmentsResponse } from './index'; +import { WrikeClient } from '../../core/wrike-client'; +import { createFetchTaskAttachmentsTests } from './index.test.cases'; + +// Mock the WrikeClient +jest.mock('../../core/wrike-client'); + +describe('fetch_task_attachments function', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('test cases', createFetchTaskAttachmentsTests(run, WrikeClient as jest.Mock)); +}); \ No newline at end of file diff --git a/build/src/functions/fetch_task_attachments/index.ts b/build/src/functions/fetch_task_attachments/index.ts new file mode 100644 index 0000000..e0dfbb3 --- /dev/null +++ b/build/src/functions/fetch_task_attachments/index.ts @@ -0,0 +1,119 @@ +import { FunctionInput } from '../../core/types'; +import { WrikeClient } from '../../core/wrike-client'; +import { WrikeAttachment } from '../../core/wrike-types'; +import { WrikeApiError } from '../../core/wrike-error-handler'; + +/** + * Response structure for fetch_task_attachments function + */ +export interface FetchTaskAttachmentsResponse { + status: 'success' | 'error'; + message: string; + status_code: number; + api_delay: number; + metadata: { + task_id: string; + attachment_count: number; + function_name: string; + request_id: string; + }; + data?: WrikeAttachment[]; + timestamp: string; +} + +/** + * Fetch task attachments function that retrieves attachments from a Wrike task. + * Makes a request to /tasks/{taskId}/attachments endpoint with withUrls=true. + * + * @param events - Array of function input events + * @returns Object containing attachments data with metadata + */ +const run = async (events: FunctionInput[]): Promise => { + // Validate input + if (!events || events.length === 0) { + throw new Error('No events provided to fetch_task_attachments function'); + } + + // Process only the first event as per requirements + const event = events[0]; + + // Validate event structure + if (!event.payload) { + throw new Error('Invalid event: missing payload'); + } + + if (!event.payload.connection_data) { + throw new Error('Invalid event: missing connection_data in payload'); + } + + if (!event.payload.connection_data.key) { + throw new Error('Invalid event: missing API key in connection_data'); + } + + if (!event.payload.event_context) { + throw new Error('Invalid event: missing event_context in payload'); + } + + if (!event.payload.event_context.external_sync_unit_id) { + throw new Error('Invalid event: missing external_sync_unit_id in event_context'); + } + + if (!event.execution_metadata) { + throw new Error('Invalid event: missing execution_metadata'); + } + + // Extract API key and task ID from event + const apiKey = event.payload.connection_data.key; + const taskId = event.payload.event_context.external_sync_unit_id; + + // Initialize Wrike client + const wrikeClient = new WrikeClient({ apiKey }); + + try { + // Call getTaskAttachments endpoint + const response = await wrikeClient.getTaskAttachments(taskId); + + // Determine if request was successful + const success = response.status_code === 200 && response.data !== undefined; + + // Build response + const fetchAttachmentsResponse: FetchTaskAttachmentsResponse = { + status: success ? 'success' : 'error', + message: response.message, + status_code: response.status_code, + api_delay: response.api_delay, + metadata: { + task_id: taskId, + attachment_count: response.data?.length || 0, + function_name: event.execution_metadata.function_name, + request_id: event.execution_metadata.request_id, + }, + data: response.data, + timestamp: new Date().toISOString(), + }; + + return fetchAttachmentsResponse; + } catch (error) { + // Handle WrikeApiError (including rate limiting) + if (error instanceof WrikeApiError) { + return { + status: 'error', + message: error.message, + status_code: error.statusCode, + api_delay: error.apiDelay, + metadata: { + task_id: taskId, + attachment_count: 0, + function_name: event.execution_metadata.function_name, + request_id: event.execution_metadata.request_id, + }, + timestamp: new Date().toISOString(), + }; + } + + // Re-throw other errors + throw error; + } +}; + +export default run; \ No newline at end of file diff --git a/build/src/functions/fetch_task_comments/index.test.case-generators.ts b/build/src/functions/fetch_task_comments/index.test.case-generators.ts new file mode 100644 index 0000000..1e2b328 --- /dev/null +++ b/build/src/functions/fetch_task_comments/index.test.case-generators.ts @@ -0,0 +1,178 @@ +import { FunctionInput } from '../../core/types'; +import { + createMockEvent, + mockComments, + createSuccessResponse, + createEmptyResponse, +} from './index.test.helpers'; + +/** + * Test case generators for fetch_task_comments function. + * This file contains reusable test case generation logic. + */ + +/** + * Generates a test case for successful comment fetching + */ +export function generateSuccessTestCase() { + return { + description: 'should return success response with comments', + setup: (WrikeClientMock: jest.Mock) => { + const mockGetTaskComments = jest.fn().mockResolvedValue(createSuccessResponse()); + WrikeClientMock.mockImplementation(() => ({ + getTaskComments: mockGetTaskComments, + })); + return mockGetTaskComments; + }, + event: createMockEvent(), + assertions: (result: any, mockGetTaskComments?: jest.Mock) => { + expect(result).toBeDefined(); + expect(result.status).toBe('success'); + expect(result.message).toBe('Successfully fetched comments from Wrike task'); + expect(result.status_code).toBe(200); + expect(result.api_delay).toBe(0); + expect(result.metadata.task_id).toBe('IEACW7SVKQOKD5EG'); + expect(result.metadata.comment_count).toBe(2); + expect(result.metadata.function_name).toBe('fetch_task_comments'); + expect(result.metadata.request_id).toBe('test-request-id'); + expect(result.data).toEqual(mockComments); + expect(result.timestamp).toBeDefined(); + + if (mockGetTaskComments) { + expect(mockGetTaskComments).toHaveBeenCalledTimes(1); + expect(mockGetTaskComments).toHaveBeenCalledWith('IEACW7SVKQOKD5EG'); + } + }, + }; +} + +/** + * Generates a test case for empty comment list + */ +export function generateEmptyCommentListTestCase() { + return { + description: 'should return success response with empty comment list', + setup: (WrikeClientMock: jest.Mock) => { + const mockGetTaskComments = jest.fn().mockResolvedValue(createEmptyResponse()); + WrikeClientMock.mockImplementation(() => ({ + getTaskComments: mockGetTaskComments, + })); + return mockGetTaskComments; + }, + event: createMockEvent(), + assertions: (result: any, mockGetTaskComments?: jest.Mock) => { + expect(result.status).toBe('success'); + expect(result.status_code).toBe(200); + expect(result.metadata.comment_count).toBe(0); + expect(result.data).toEqual([]); + }, + }; +} + +/** + * Generates test events for multiple event processing test + */ +export function generateMultipleEventsTestCase() { + const event1 = createMockEvent({ + payload: { + connection_data: { + org_id: 'SPACE1', + org_name: 'Space 1', + key: 'test-api-key', + key_type: 'oauth2', + }, + event_context: { + callback_url: 'https://test.callback.url', + dev_org: 'test-dev-org', + dev_org_id: 'test-dev-org-id', + dev_user: 'test-dev-user', + dev_user_id: 'test-dev-user-id', + external_sync_unit: 'task-1', + external_sync_unit_id: 'TASK1', + external_sync_unit_name: 'Task 1', + external_system: 'wrike', + external_system_type: 'wrike', + import_slug: 'test-import-slug', + mode: 'INITIAL', + request_id: 'request-1', + snap_in_slug: 'test-snap-in-slug', + snap_in_version_id: 'test-version-id', + sync_run: 'test-sync-run', + sync_run_id: 'test-sync-run-id', + sync_tier: 'test-sync-tier', + sync_unit: 'test-sync-unit', + sync_unit_id: 'test-sync-unit-id', + uuid: 'test-uuid', + worker_data_url: 'https://test.worker.data.url', + }, + }, + execution_metadata: { + request_id: 'request-1', + function_name: 'fetch_task_comments', + event_type: 'event-type-1', + devrev_endpoint: 'https://api.devrev.ai/', + }, + }); + + const event2 = createMockEvent({ + payload: { + connection_data: { + org_id: 'SPACE2', + org_name: 'Space 2', + key: 'test-api-key', + key_type: 'oauth2', + }, + event_context: { + callback_url: 'https://test.callback.url', + dev_org: 'test-dev-org', + dev_org_id: 'test-dev-org-id', + dev_user: 'test-dev-user', + dev_user_id: 'test-dev-user-id', + external_sync_unit: 'task-2', + external_sync_unit_id: 'TASK2', + external_sync_unit_name: 'Task 2', + external_system: 'wrike', + external_system_type: 'wrike', + import_slug: 'test-import-slug', + mode: 'INITIAL', + request_id: 'request-2', + snap_in_slug: 'test-snap-in-slug', + snap_in_version_id: 'test-version-id', + sync_run: 'test-sync-run', + sync_run_id: 'test-sync-run-id', + sync_tier: 'test-sync-tier', + sync_unit: 'test-sync-unit', + sync_unit_id: 'test-sync-unit-id', + uuid: 'test-uuid', + worker_data_url: 'https://test.worker.data.url', + }, + }, + execution_metadata: { + request_id: 'request-2', + function_name: 'fetch_task_comments', + event_type: 'event-type-2', + devrev_endpoint: 'https://api.devrev.ai/', + }, + }); + + return { + description: 'should process only the first event when multiple events provided', + setup: (WrikeClientMock: jest.Mock) => { + const mockGetTaskComments = jest.fn().mockResolvedValue(createSuccessResponse()); + WrikeClientMock.mockImplementation(() => ({ + getTaskComments: mockGetTaskComments, + })); + return mockGetTaskComments; + }, + events: [event1, event2], + assertions: (result: any, mockGetTaskComments?: jest.Mock) => { + expect(result.metadata.request_id).toBe('request-1'); + expect(result.metadata.task_id).toBe('TASK1'); + + if (mockGetTaskComments) { + expect(mockGetTaskComments).toHaveBeenCalledTimes(1); + expect(mockGetTaskComments).toHaveBeenCalledWith('TASK1'); + } + }, + }; +} \ No newline at end of file diff --git a/build/src/functions/fetch_task_comments/index.test.cases.ts b/build/src/functions/fetch_task_comments/index.test.cases.ts new file mode 100644 index 0000000..d06072b --- /dev/null +++ b/build/src/functions/fetch_task_comments/index.test.cases.ts @@ -0,0 +1,112 @@ +import { FunctionInput } from '../../core/types'; +import { + createMockEvent, + createSuccessResponse, +} from './index.test.helpers'; +import { testCaseGenerators } from './index.test.shared'; +import { + generateSuccessTestCase, + generateEmptyCommentListTestCase, + generateMultipleEventsTestCase, +} from './index.test.case-generators'; + +export function createFetchTaskCommentsTests( + runFunction: (events: FunctionInput[]) => Promise, + WrikeClientMock: jest.Mock +) { + return () => { + // Success test case + const successTestCase = generateSuccessTestCase(); + it(successTestCase.description, async () => { + const mockGetTaskComments = successTestCase.setup(WrikeClientMock); + const result = await runFunction([successTestCase.event]); + successTestCase.assertions(result, mockGetTaskComments); + }); + + // Empty comment list test case + const emptyCommentListTestCase = generateEmptyCommentListTestCase(); + it(emptyCommentListTestCase.description, async () => { + const mockGetTaskComments = emptyCommentListTestCase.setup(WrikeClientMock); + const result = await runFunction([emptyCommentListTestCase.event]); + emptyCommentListTestCase.assertions(result, mockGetTaskComments); + }); + + // API Error Tests + const apiErrorCases = testCaseGenerators.apiErrors(); + apiErrorCases.forEach(({ description, mockResponse, assertions }) => { + it(description, async () => { + const mockGetTaskComments = jest.fn().mockResolvedValue(mockResponse); + + WrikeClientMock.mockImplementation(() => ({ + getTaskComments: mockGetTaskComments, + })); + + const events = [createMockEvent()]; + const result = await runFunction(events); + + assertions(result); + }); + }); + + // Multiple events test case + const multipleEventsTestCase = generateMultipleEventsTestCase(); + it(multipleEventsTestCase.description, async () => { + const mockGetTaskComments = multipleEventsTestCase.setup(WrikeClientMock); + const result = await runFunction(multipleEventsTestCase.events); + multipleEventsTestCase.assertions(result, mockGetTaskComments); + }); + + // Validation Error Tests + it('should throw error when no events provided', async () => { + await expect(runFunction([])).rejects.toThrow('No events provided to fetch_task_comments function'); + }); + + it('should throw error when events array is null', async () => { + await expect(runFunction(null as any)).rejects.toThrow( + 'No events provided to fetch_task_comments function' + ); + }); + + it('should throw error when events array is undefined', async () => { + await expect(runFunction(undefined as any)).rejects.toThrow( + 'No events provided to fetch_task_comments function' + ); + }); + + const validationCases = testCaseGenerators.validationErrors().slice(1); // Skip first case (no events) + validationCases.forEach(({ description, eventModifier, expectedError }) => { + it(description, async () => { + const invalidEvent = createMockEvent(); + eventModifier(invalidEvent); + + await expect(runFunction([invalidEvent])).rejects.toThrow(expectedError); + }); + }); + + it('should initialize WrikeClient with correct API key', async () => { + const mockGetTaskComments = jest.fn().mockResolvedValue(createSuccessResponse()); + + WrikeClientMock.mockImplementation(() => ({ + getTaskComments: mockGetTaskComments, + })); + + const events = [createMockEvent()]; + await runFunction(events); + + expect(WrikeClientMock).toHaveBeenCalledWith({ apiKey: 'test-api-key' }); + }); + + it('should include timestamp in ISO format', async () => { + const mockGetTaskComments = jest.fn().mockResolvedValue(createSuccessResponse()); + + WrikeClientMock.mockImplementation(() => ({ + getTaskComments: mockGetTaskComments, + })); + + const events = [createMockEvent()]; + const result = await runFunction(events); + + expect(result.timestamp).toMatch(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/); + }); + }; +} \ No newline at end of file diff --git a/build/src/functions/fetch_task_comments/index.test.helpers.ts b/build/src/functions/fetch_task_comments/index.test.helpers.ts new file mode 100644 index 0000000..abafc22 --- /dev/null +++ b/build/src/functions/fetch_task_comments/index.test.helpers.ts @@ -0,0 +1,141 @@ +import { FunctionInput } from '../../core/types'; +import { WrikeComment } from '../../core/wrike-types'; + +/** + * Mock comment data for testing + */ +export const mockComment: WrikeComment = { + id: 'IEACW7SVICOMMENT1', + authorId: 'KUANFJBJ', + text: 'This is a test comment', + createdDate: '2025-07-25T08:00:00Z', + updatedDate: '2025-07-25T08:00:00Z', + taskId: 'IEACW7SVKQOKD5EG', +}; + +export const mockComments: WrikeComment[] = [ + mockComment, + { + id: 'IEACW7SVICOMMENT2', + authorId: 'NVJKSNJK', + text: 'This is another test comment', + createdDate: '2025-07-26T10:00:00Z', + updatedDate: '2025-07-26T10:00:00Z', + taskId: 'IEACW7SVKQOKD5EG', + }, +]; + +/** + * Creates a mock FunctionInput event for testing + */ +export const createMockEvent = (overrides?: Partial): FunctionInput => ({ + payload: { + connection_data: { + org_id: 'IEAGS6BYI5RFMPPY', + org_name: 'Test Space', + key: 'test-api-key', + key_type: 'oauth2', + }, + event_context: { + callback_url: 'https://test.callback.url', + dev_org: 'test-dev-org', + dev_org_id: 'test-dev-org-id', + dev_user: 'test-dev-user', + dev_user_id: 'test-dev-user-id', + external_sync_unit: 'test-task', + external_sync_unit_id: 'IEACW7SVKQOKD5EG', + external_sync_unit_name: 'Test Task', + external_system: 'wrike', + external_system_type: 'wrike', + import_slug: 'test-import-slug', + mode: 'INITIAL', + request_id: 'test-request-id', + snap_in_slug: 'test-snap-in-slug', + snap_in_version_id: 'test-version-id', + sync_run: 'test-sync-run', + sync_run_id: 'test-sync-run-id', + sync_tier: 'test-sync-tier', + sync_unit: 'test-sync-unit', + sync_unit_id: 'test-sync-unit-id', + uuid: 'test-uuid', + worker_data_url: 'https://test.worker.data.url', + }, + }, + context: { + dev_oid: 'test-dev-oid', + source_id: 'test-source-id', + snap_in_id: 'test-snap-in-id', + snap_in_version_id: 'test-version-id', + service_account_id: 'test-service-account-id', + secrets: { + service_account_token: 'test-token', + }, + }, + execution_metadata: { + request_id: 'test-request-id', + function_name: 'fetch_task_comments', + event_type: 'test-event-type', + devrev_endpoint: 'https://api.devrev.ai/', + }, + input_data: { + global_values: {}, + event_sources: {}, + }, + ...overrides, +}); + +/** + * Creates a mock WrikeClient.getTaskComments response for successful fetch + */ +export const createSuccessResponse = (comments: WrikeComment[] = mockComments) => ({ + status_code: 200, + api_delay: 0, + message: 'Successfully fetched comments from Wrike task', + data: comments, +}); + +/** + * Creates a mock WrikeClient.getTaskComments response for empty comment list + */ +export const createEmptyResponse = () => ({ + status_code: 200, + api_delay: 0, + message: 'Successfully fetched comments from Wrike task', + data: [], +}); + +/** + * Creates a mock WrikeClient.getTaskComments response for authentication failure + */ +export const createAuthFailureResponse = (statusCode: number, message: string) => ({ + status_code: statusCode, + api_delay: 0, + message, +}); + +/** + * Creates a mock WrikeClient.getTaskComments response for rate limiting + */ +export const createRateLimitResponse = (apiDelay: number = 49) => ({ + status_code: 429, + api_delay: apiDelay, + message: `Rate limit exceeded. Retry after ${apiDelay} seconds.`, +}); + +/** + * Creates a mock WrikeClient.getTaskComments response for network errors + */ +export const createNetworkErrorResponse = () => ({ + status_code: 0, + api_delay: 0, + message: 'Network error: Unable to reach Wrike API', +}); + +/** + * Creates a mock WrikeClient.getTaskComments response for not found error + */ +export const createNotFoundResponse = () => ({ + status_code: 404, + api_delay: 0, + message: 'Wrike API error: Task not found', +}); \ No newline at end of file diff --git a/build/src/functions/fetch_task_comments/index.test.shared.ts b/build/src/functions/fetch_task_comments/index.test.shared.ts new file mode 100644 index 0000000..5af9624 --- /dev/null +++ b/build/src/functions/fetch_task_comments/index.test.shared.ts @@ -0,0 +1,142 @@ +import { FunctionInput } from '../../core/types'; + +/** + * Shared test utilities for fetch_task_comments tests + */ + +/** + * Creates a test case for successful API responses + */ +export function createSuccessTestCase( + description: string, + mockResponse: any, + assertions: (result: any, mockGetTaskComments?: jest.Mock) => void +) { + return { + description, + mockResponse, + assertions, + }; +} + +/** + * Creates a test case for API error responses + */ +export function createErrorTestCase( + description: string, + statusCode: number, + message: string, + apiDelay: number = 0 +) { + return { + description, + mockResponse: { + status_code: statusCode, + api_delay: apiDelay, + message, + }, + assertions: (result: any, mockGetTaskComments?: jest.Mock) => { + expect(result.status).toBe('error'); + expect(result.message).toBe(message); + expect(result.status_code).toBe(statusCode); + expect(result.api_delay).toBe(apiDelay); + expect(result.metadata.comment_count).toBe(0); + if (statusCode !== 429) { + expect(result.data).toBeUndefined(); + } + }, + }; +} + +/** + * Creates a test case for validation errors + */ +export function createValidationTestCase( + description: string, + eventModifier: (event: FunctionInput) => void, + expectedError: string +) { + return { + description, + eventModifier, + expectedError, + }; +} + +/** + * Test case generators for common scenarios + */ +export const testCaseGenerators = { + /** + * Generates validation error test cases + */ + validationErrors: () => [ + createValidationTestCase( + 'should throw error when no events provided', + () => {}, + 'No events provided to fetch_task_comments function' + ), + createValidationTestCase( + 'should throw error when event is missing payload', + (event: FunctionInput) => delete (event as any).payload, + 'Invalid event: missing payload' + ), + createValidationTestCase( + 'should throw error when event is missing connection_data', + (event: FunctionInput) => delete (event as any).payload.connection_data, + 'Invalid event: missing connection_data in payload' + ), + createValidationTestCase( + 'should throw error when event is missing API key', + (event: FunctionInput) => delete (event as any).payload.connection_data.key, + 'Invalid event: missing API key in connection_data' + ), + createValidationTestCase( + 'should throw error when event is missing event_context', + (event: FunctionInput) => delete (event as any).payload.event_context, + 'Invalid event: missing event_context in payload' + ), + createValidationTestCase( + 'should throw error when event is missing external_sync_unit_id (task ID)', + (event: FunctionInput) => delete (event as any).payload.event_context.external_sync_unit_id, + 'Invalid event: missing external_sync_unit_id in event_context' + ), + createValidationTestCase( + 'should throw error when event is missing execution_metadata', + (event: FunctionInput) => delete (event as any).execution_metadata, + 'Invalid event: missing execution_metadata' + ), + ], + + /** + * Generates API error test cases + */ + apiErrors: () => [ + createErrorTestCase( + 'should return error response for authentication failure (401)', + 401, + 'Authentication failed: Invalid or expired API key' + ), + createErrorTestCase( + 'should return error response for forbidden access (403)', + 403, + 'Authentication failed: Access forbidden' + ), + createErrorTestCase( + 'should handle rate limiting (429) correctly', + 429, + 'Rate limit exceeded. Retry after 49 seconds.', + 49 + ), + createErrorTestCase( + 'should handle network errors', + 0, + 'Network error: Unable to reach Wrike API' + ), + createErrorTestCase( + 'should handle not found error (404)', + 404, + 'Wrike API error: Task not found' + ), + ], +}; \ No newline at end of file diff --git a/build/src/functions/fetch_task_comments/index.test.ts b/build/src/functions/fetch_task_comments/index.test.ts new file mode 100644 index 0000000..036af95 --- /dev/null +++ b/build/src/functions/fetch_task_comments/index.test.ts @@ -0,0 +1,14 @@ +import run, { FetchTaskCommentsResponse } from './index'; +import { WrikeClient } from '../../core/wrike-client'; +import { createFetchTaskCommentsTests } from './index.test.cases'; + +// Mock the WrikeClient +jest.mock('../../core/wrike-client'); + +describe('fetch_task_comments function', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('test cases', createFetchTaskCommentsTests(run, WrikeClient as jest.Mock)); +}); \ No newline at end of file diff --git a/build/src/functions/fetch_task_comments/index.ts b/build/src/functions/fetch_task_comments/index.ts new file mode 100644 index 0000000..fd6a470 --- /dev/null +++ b/build/src/functions/fetch_task_comments/index.ts @@ -0,0 +1,119 @@ +import { FunctionInput } from '../../core/types'; +import { WrikeClient } from '../../core/wrike-client'; +import { WrikeComment } from '../../core/wrike-types'; +import { WrikeApiError } from '../../core/wrike-error-handler'; + +/** + * Response structure for fetch_task_comments function + */ +export interface FetchTaskCommentsResponse { + status: 'success' | 'error'; + message: string; + status_code: number; + api_delay: number; + metadata: { + task_id: string; + comment_count: number; + function_name: string; + request_id: string; + }; + data?: WrikeComment[]; + timestamp: string; +} + +/** + * Fetch task comments function that retrieves comments from a Wrike task. + * Makes a request to /tasks/{taskId}/comments endpoint. + * + * @param events - Array of function input events + * @returns Object containing comments data with metadata + */ +const run = async (events: FunctionInput[]): Promise => { + // Validate input + if (!events || events.length === 0) { + throw new Error('No events provided to fetch_task_comments function'); + } + + // Process only the first event as per requirements + const event = events[0]; + + // Validate event structure + if (!event.payload) { + throw new Error('Invalid event: missing payload'); + } + + if (!event.payload.connection_data) { + throw new Error('Invalid event: missing connection_data in payload'); + } + + if (!event.payload.connection_data.key) { + throw new Error('Invalid event: missing API key in connection_data'); + } + + if (!event.payload.event_context) { + throw new Error('Invalid event: missing event_context in payload'); + } + + if (!event.payload.event_context.external_sync_unit_id) { + throw new Error('Invalid event: missing external_sync_unit_id in event_context'); + } + + if (!event.execution_metadata) { + throw new Error('Invalid event: missing execution_metadata'); + } + + // Extract API key and task ID from event + const apiKey = event.payload.connection_data.key; + const taskId = event.payload.event_context.external_sync_unit_id; + + // Initialize Wrike client + const wrikeClient = new WrikeClient({ apiKey }); + + try { + // Call getTaskComments endpoint + const response = await wrikeClient.getTaskComments(taskId); + + // Determine if request was successful + const success = response.status_code === 200 && response.data !== undefined; + + // Build response + const fetchCommentsResponse: FetchTaskCommentsResponse = { + status: success ? 'success' : 'error', + message: response.message, + status_code: response.status_code, + api_delay: response.api_delay, + metadata: { + task_id: taskId, + comment_count: response.data?.length || 0, + function_name: event.execution_metadata.function_name, + request_id: event.execution_metadata.request_id, + }, + data: response.data, + timestamp: new Date().toISOString(), + }; + + return fetchCommentsResponse; + } catch (error) { + // Handle WrikeApiError (including rate limiting) + if (error instanceof WrikeApiError) { + return { + status: 'error', + message: error.message, + status_code: error.statusCode, + api_delay: error.apiDelay, + metadata: { + task_id: taskId, + comment_count: 0, + function_name: event.execution_metadata.function_name, + request_id: event.execution_metadata.request_id, + }, + timestamp: new Date().toISOString(), + }; + } + + // Re-throw other errors + throw error; + } +}; + +export default run; \ No newline at end of file diff --git a/build/src/functions/fetch_tasks/api-client.ts b/build/src/functions/fetch_tasks/api-client.ts deleted file mode 100644 index 37bf9c5..0000000 --- a/build/src/functions/fetch_tasks/api-client.ts +++ /dev/null @@ -1,93 +0,0 @@ -import axios from 'axios'; - -/** - * Interface for a Wrike task - */ -export interface WrikeTask { - id: string; - title: string; - description?: string; - status: string; - importance: string; - created_date: string; - updated_date: string; - completed_date?: string; - due_date?: string; - parent_ids: string[]; - responsible_ids?: string[]; - author_ids?: string[]; - custom_status_id?: string; - permalink?: string; -} - -/** - * Client for interacting with the Wrike API - */ -export class WrikeApiClient { - private readonly apiEndpoint: string = 'https://www.wrike.com/api/v4'; - private readonly apiKey: string; - private readonly timeout: number = 10000; - - /** - * Creates a new instance of the WrikeApiClient - * @param apiKey The Wrike API key - */ - constructor(apiKey: string) { - this.apiKey = apiKey; - } - - /** - * Fetches tasks for a specific project - * @param projectId The ID of the project - * @returns An array of WrikeTask objects - */ - async fetchProjectTasks(projectId: string): Promise { - const response = await axios.get(`${this.apiEndpoint}/folders/${projectId}/tasks`, { - headers: { - 'Authorization': `Bearer ${this.apiKey}` - }, - params: { - descendants: true, - subTasks: true - }, - timeout: this.timeout - }); - - // Check if the request was successful - if (response.status !== 200) { - throw new Error(`Failed to fetch tasks with status ${response.status}`); - } - - // Process the response data - if (!response.data || !response.data.data || !Array.isArray(response.data.data)) { - throw new Error('Invalid response format from Wrike API for tasks'); - } - - // Transform the response data into our task format - return response.data.data.map((task: any) => this.transformTaskData(task)); - } - - /** - * Transforms raw task data from the API into our WrikeTask format - * @param task Raw task data from the API - * @returns Transformed WrikeTask object - */ - private transformTaskData(task: any): WrikeTask { - return { - id: task.id, - title: task.title, - description: task.description, - status: task.status, - importance: task.importance, - created_date: task.createdDate || '', - updated_date: task.updatedDate || '', - completed_date: task.completedDate, - due_date: task.dueDate, - parent_ids: task.parentIds || [], - responsible_ids: task.responsibleIds, - author_ids: task.authorIds, - custom_status_id: task.customStatusId, - permalink: task.permalink - }; - } -} \ No newline at end of file diff --git a/build/src/functions/fetch_tasks/index.test.ts b/build/src/functions/fetch_tasks/index.test.ts deleted file mode 100644 index 086d09f..0000000 --- a/build/src/functions/fetch_tasks/index.test.ts +++ /dev/null @@ -1,60 +0,0 @@ -// Mock axios before any imports -jest.mock('axios'); - -// Import the test utilities and helpers -import { createMockEvent } from './test-utils'; -import { setupAxiosMocks, setupTestEnvironment, cleanupTestEnvironment } from './test-helpers'; -import { AirdropEvent } from '@devrev/ts-adaas'; -import { run } from './index'; -import { testCases } from './test-cases'; - -describe('Fetch Tasks Function', () => { - // Set up axios mocks - const { mockGet } = setupAxiosMocks(); - - beforeEach(() => { - // Clear all mocks before each test - jest.clearAllMocks(); - - // Set up test environment - setupTestEnvironment(); - }); - - afterEach(() => { - // Clean up test environment - cleanupTestEnvironment(); - }); - - // Add a test for API call parameters verification - it('should call the Wrike API with correct parameters', async () => { - // Use the first test case which has a successful API call - const testCase = testCases[0]; - testCase.setup(mockGet); - - await run(testCase.input); - - expect(mockGet).toHaveBeenCalledWith( - 'https://www.wrike.com/api/v4/folders/IEACW7SVI4OMYFIY/tasks', - expect.objectContaining({ - headers: { - 'Authorization': 'Bearer mock-api-key' - }, - params: { - descendants: true, - subTasks: true - }, - timeout: 10000 - }) - ); - }); - - // Generate tests from test cases - testCases.forEach(testCase => { - it(testCase.name, async () => { - testCase.setup(mockGet); - - const result = await run(testCase.input); - expect(result).toEqual(testCase.expectedResult); - }); - }); -}); \ No newline at end of file diff --git a/build/src/functions/fetch_tasks/index.ts b/build/src/functions/fetch_tasks/index.ts deleted file mode 100644 index 7df8b27..0000000 --- a/build/src/functions/fetch_tasks/index.ts +++ /dev/null @@ -1,171 +0,0 @@ -import { AirdropEvent } from '@devrev/ts-adaas'; -import axios from 'axios'; - -/** - * Interface for a Wrike task - */ -export interface WrikeTask { - id: string; - title: string; - description?: string; - status: string; - importance: string; - created_date: string; - updated_date: string; - completed_date?: string; - due_date?: string; - parent_ids: string[]; - responsible_ids?: string[]; - author_ids?: string[]; - custom_status_id?: string; - permalink?: string; -} - -/** - * A function that fetches the list of tasks for a specific project from Wrike API. - * - * @param events - Array of AirdropEvent objects - * @returns A response containing the list of tasks - */ -export async function run(events: AirdropEvent[]): Promise<{ - status: string, - message: string, - tasks?: WrikeTask[], - error?: string -}> { - try { - // Validate input parameters - if (!events || !Array.isArray(events)) { - throw new Error('Invalid input: events must be an array'); - } - - if (events.length === 0) { - throw new Error('Invalid input: events array is empty'); - } - - // Use the first event for the check - const event = events[0]; - - // Validate that the event is a valid AirdropEvent with all required fields - if (!event || typeof event !== 'object') { - throw new Error('Invalid event: event must be a valid AirdropEvent object'); - } - - // Check for required fields according to AirdropEvent interface - if (!event.context) { - throw new Error('Invalid event: missing required field \'context\''); - } - - if (!event.context.secrets || !event.context.secrets.service_account_token) { - throw new Error('Invalid event: missing required field \'context.secrets.service_account_token\''); - } - - if (!event.payload) { - throw new Error('Invalid event: missing required field \'payload\''); - } - - if (!event.payload.connection_data) { - throw new Error('Invalid event: missing required field \'payload.connection_data\''); - } - - if (!event.payload.connection_data.key) { - throw new Error('Invalid event: missing required field \'payload.connection_data.key\''); - } - - if (!event.payload.event_context) { - throw new Error('Invalid event: missing required field \'payload.event_context\''); - } - - if (!event.payload.event_context.external_sync_unit_id) { - throw new Error('Invalid event: missing required field \'payload.event_context.external_sync_unit_id\''); - } - - // Extract the Wrike API key and Project ID - const apiKey = event.payload.connection_data.key; - const projectId = event.payload.event_context.external_sync_unit_id; - - // Define the Wrike API endpoint - const wrikeApiEndpoint = 'https://www.wrike.com/api/v4'; - - // Log the attempt for debugging purposes - console.log(`Attempting to fetch tasks for project ${projectId} from Wrike API`); - - // Make a GET request to the Wrike API to get tasks for the project - // According to the Postman collection, we should use the /folders/{projectId}/tasks endpoint - const response = await axios.get(`${wrikeApiEndpoint}/folders/${projectId}/tasks`, { - headers: { - 'Authorization': `Bearer ${apiKey}` - }, - params: { - descendants: true, - subTasks: true - }, - timeout: 10000 // 10 seconds timeout - }); - - // Check if the request was successful - if (response.status !== 200) { - return { - status: 'error', - message: `Failed to fetch tasks with status ${response.status}`, - error: `Received status code ${response.status}` - }; - } - - // Process the response data - if (!response.data || !response.data.data || !Array.isArray(response.data.data)) { - return { - status: 'error', - message: 'Invalid response format from Wrike API', - error: 'Response data is not in the expected format' - }; - } - - // Transform the response data into our task format - const tasks: WrikeTask[] = response.data.data.map((task: any) => ({ - id: task.id, - title: task.title, - description: task.description, - status: task.status, - importance: task.importance, - created_date: task.createdDate || '', - updated_date: task.updatedDate || '', - completed_date: task.completedDate, - due_date: task.dueDate, - parent_ids: task.parentIds || [], - responsible_ids: task.responsibleIds, - author_ids: task.authorIds, - custom_status_id: task.customStatusId, - permalink: task.permalink - })); - - // Log the success for debugging purposes - console.log(`Successfully fetched ${tasks.length} tasks from Wrike API for project ${projectId}`); - - // Return a success response with the tasks - return { - status: 'success', - message: `Successfully fetched ${tasks.length} tasks from project ${projectId}`, - tasks - }; - } catch (error) { - // Log the error for debugging - console.error('Error in fetch tasks function:', error); - - // Check if the error is an Axios error with a response - if (axios.isAxiosError(error) && error.response) { - return { - status: 'error', - message: 'Failed to fetch tasks from Wrike API', - error: `API request failed with status ${error.response.status}: ${error.message}` - }; - } - - // Return a generic error response - return { - status: 'error', - message: 'Failed to fetch tasks from Wrike API', - error: error instanceof Error ? error.message : 'Unknown error occurred' - }; - } -} \ No newline at end of file diff --git a/build/src/functions/fetch_tasks/test-cases.ts b/build/src/functions/fetch_tasks/test-cases.ts deleted file mode 100644 index 7fa5f3a..0000000 --- a/build/src/functions/fetch_tasks/test-cases.ts +++ /dev/null @@ -1,189 +0,0 @@ -import { AirdropEvent } from '@devrev/ts-adaas'; -import { createMockEvent, mockWrikeTasksResponse } from './test-utils'; - -export interface TestCase { - name: string; - setup: (mockGet: jest.Mock) => void; - input: AirdropEvent[]; - expectedResult: any; -} - -export const testCases: TestCase[] = [ - { - name: 'should return tasks when API call is successful', - setup: (mockGet) => { - mockGet.mockResolvedValue({ - status: 200, - data: mockWrikeTasksResponse - }); - }, - input: [createMockEvent()], - expectedResult: { - status: 'success', - message: 'Successfully fetched 2 tasks from project IEACW7SVI4OMYFIY', - tasks: [ - { - id: 'IEACW7SVKQZEBEUN', - title: 'Task 1', - description: 'This is task 1', - status: 'Active', - importance: 'Normal', - created_date: '2023-01-01T00:00:00Z', - updated_date: '2023-01-02T00:00:00Z', - completed_date: null, - due_date: '2023-01-10T00:00:00Z', - parent_ids: ['IEACW7SVI4OMYFIY'], - responsible_ids: ['KUAFY3BJ'], - author_ids: ['KUAFZBCJ'], - custom_status_id: 'ABCD1234', - permalink: 'https://www.wrike.com/open.htm?id=123456789' - }, - { - id: 'IEACW7SVKQPX4WHN', - title: 'Task 2', - description: 'This is task 2', - status: 'Completed', - importance: 'High', - created_date: '2023-02-01T00:00:00Z', - updated_date: '2023-02-02T00:00:00Z', - completed_date: '2023-02-05T00:00:00Z', - due_date: '2023-02-10T00:00:00Z', - parent_ids: ['IEACW7SVI4OMYFIY'], - responsible_ids: ['KUAFY3BJ'], - author_ids: ['KUAFZBCJ'], - custom_status_id: 'EFGH5678', - permalink: 'https://www.wrike.com/open.htm?id=987654321' - } - ] - } - }, - { - name: 'should return error when API call returns non-200 status', - setup: (mockGet) => { - mockGet.mockResolvedValue({ - status: 403, - data: { error: 'Forbidden' } - }); - }, - input: [createMockEvent()], - expectedResult: { - status: 'error', - message: 'Failed to fetch tasks with status 403', - error: 'Received status code 403' - } - }, - { - name: 'should return error when API response format is invalid', - setup: (mockGet) => { - mockGet.mockResolvedValue({ - status: 200, - data: { invalid: 'format' } - }); - }, - input: [createMockEvent()], - expectedResult: { - status: 'error', - message: 'Invalid response format from Wrike API', - error: 'Response data is not in the expected format' - } - }, - { - name: 'should return error when axios throws an exception', - setup: (mockGet) => { - const axiosError = new Error('Request failed') as any; - axiosError.isAxiosError = true; - axiosError.response = { status: 401 }; - mockGet.mockRejectedValue(axiosError); - }, - input: [createMockEvent()], - expectedResult: { - status: 'error', - message: 'Failed to fetch tasks from Wrike API', - error: expect.stringContaining('API request failed with status 401') - } - }, - { - name: 'should return error when axios throws a network exception', - setup: (mockGet) => { - const networkError = new Error('Network error'); - mockGet.mockRejectedValue(networkError); - }, - input: [createMockEvent()], - expectedResult: { - status: 'error', - message: 'Failed to fetch tasks from Wrike API', - error: 'Network error' - } - }, - { - name: 'should throw an error if events parameter is not an array', - setup: () => {}, - input: null as unknown as AirdropEvent[], // Use null to test non-array input - expectedResult: { - status: 'error', - message: 'Failed to fetch tasks from Wrike API', - error: 'Invalid input: events must be an array' - } - }, - { - name: 'should throw an error if events array is empty', - setup: () => {}, - input: [], - expectedResult: { - status: 'error', - message: 'Failed to fetch tasks from Wrike API', - error: 'Invalid input: events array is empty' - } - }, - { - name: 'should throw an error if an event is missing required fields', - setup: () => {}, - input: [{ - payload: {}, - execution_metadata: {} - } as unknown as AirdropEvent], - expectedResult: { - status: 'error', - message: 'Failed to fetch tasks from Wrike API', - error: 'Invalid event: missing required field \'context\'' - } - }, - { - name: 'should throw an error if API key is missing', - setup: () => {}, - input: [{ - ...createMockEvent(), - payload: { - ...createMockEvent().payload, - connection_data: { - ...createMockEvent().payload.connection_data, - key: undefined as any - } - } - }], - expectedResult: { - status: 'error', - message: 'Failed to fetch tasks from Wrike API', - error: 'Invalid event: missing required field \'payload.connection_data.key\'' - } - }, - { - name: 'should throw an error if Project ID is missing', - setup: () => {}, - input: [{ - ...createMockEvent(), - payload: { - ...createMockEvent().payload, - event_context: { - ...createMockEvent().payload.event_context, - external_sync_unit_id: undefined as any - } - } - }], - expectedResult: { - status: 'error', - message: 'Failed to fetch tasks from Wrike API', - error: 'Invalid event: missing required field \'payload.event_context.external_sync_unit_id\'' - } - } -]; \ No newline at end of file diff --git a/build/src/functions/fetch_tasks/test-helpers.ts b/build/src/functions/fetch_tasks/test-helpers.ts deleted file mode 100644 index 69c4d3e..0000000 --- a/build/src/functions/fetch_tasks/test-helpers.ts +++ /dev/null @@ -1,37 +0,0 @@ -import axios from 'axios'; - -/** - * Sets up the axios mocks for testing - * @returns The mock functions that can be used in tests - */ -export function setupAxiosMocks() { - // Set up axios mock functions - const mockGet = jest.fn(); - - // Properly mock axios methods - jest.spyOn(axios, 'get').mockImplementation(mockGet); - - // Properly mock axios.isAxiosError with correct type handling - jest.spyOn(axios, 'isAxiosError').mockImplementation((error: any) => { - return error && error.isAxiosError === true; - }); - - return { mockGet }; -} - -/** - * Sets up common test environment - */ -export function setupTestEnvironment() { - // Mock console.log and console.error to prevent test output pollution - jest.spyOn(console, 'log').mockImplementation(() => {}); - jest.spyOn(console, 'error').mockImplementation(() => {}); -} - -/** - * Cleans up the test environment - */ -export function cleanupTestEnvironment() { - // Restore console mocks - jest.restoreAllMocks(); -} \ No newline at end of file diff --git a/build/src/functions/fetch_tasks/test-utils.ts b/build/src/functions/fetch_tasks/test-utils.ts deleted file mode 100644 index c27cb4e..0000000 --- a/build/src/functions/fetch_tasks/test-utils.ts +++ /dev/null @@ -1,108 +0,0 @@ -import { AirdropEvent } from '@devrev/ts-adaas'; - -// Mock for EventType from @devrev/ts-adaas -export enum EventType { - // Extraction - ExtractionExternalSyncUnitsStart = 'EXTRACTION_EXTERNAL_SYNC_UNITS_START', - ExtractionMetadataStart = 'EXTRACTION_METADATA_START', - ExtractionDataStart = 'EXTRACTION_DATA_START', - ExtractionDataContinue = 'EXTRACTION_DATA_CONTINUE', - ExtractionDataDelete = 'EXTRACTION_DATA_DELETE', - ExtractionAttachmentsStart = 'EXTRACTION_ATTACHMENTS_START', - ExtractionAttachmentsContinue = 'EXTRACTION_ATTACHMENTS_CONTINUE', - ExtractionAttachmentsDelete = 'EXTRACTION_ATTACHMENTS_DELETE' -} - -/** - * Helper function to create a mock AirdropEvent for testing - */ -export const createMockEvent = (): AirdropEvent => ({ - context: { - secrets: { - service_account_token: 'mock-token' - }, - snap_in_id: 'mock-snap-in-id', - snap_in_version_id: 'mock-version-id' - }, - payload: { - connection_data: { - org_id: 'IEACW7SVI4O6BDQE', - org_name: 'mock-org-name', - key: 'mock-api-key', - key_type: 'mock-key-type' - }, - event_context: { - callback_url: 'mock-callback-url', - dev_org: 'mock-dev-org', - dev_org_id: 'mock-dev-org-id', - dev_user: 'mock-dev-user', - dev_user_id: 'mock-dev-user-id', - external_sync_unit: 'mock-external-sync-unit', - external_sync_unit_id: 'IEACW7SVI4OMYFIY', // Example Project ID from Postman collection - external_sync_unit_name: 'mock-external-sync-unit-name', - external_system: 'mock-external-system', - external_system_type: 'mock-external-system-type', - import_slug: 'mock-import-slug', - mode: 'INITIAL', - request_id: 'mock-request-id', - snap_in_slug: 'mock-snap-in-slug', - snap_in_version_id: 'mock-snap-in-version-id', - sync_run: 'mock-sync-run', - sync_run_id: 'mock-sync-run-id', - sync_tier: 'mock-sync-tier', - sync_unit: 'mock-sync-unit', - sync_unit_id: 'mock-sync-unit-id', - uuid: 'mock-uuid', - worker_data_url: 'mock-worker-data-url' - }, - event_type: EventType.ExtractionDataStart, - event_data: {} - }, - execution_metadata: { - devrev_endpoint: 'mock-endpoint' - }, - input_data: { - global_values: {}, - event_sources: {} - } -}); - -/** - * Mock Wrike API response for tasks - */ -export const mockWrikeTasksResponse = { - data: [ - { - id: 'IEACW7SVKQZEBEUN', - title: 'Task 1', - description: 'This is task 1', - status: 'Active', - importance: 'Normal', - createdDate: '2023-01-01T00:00:00Z', - updatedDate: '2023-01-02T00:00:00Z', - completedDate: null, - dueDate: '2023-01-10T00:00:00Z', - parentIds: ['IEACW7SVI4OMYFIY'], - responsibleIds: ['KUAFY3BJ'], - authorIds: ['KUAFZBCJ'], - customStatusId: 'ABCD1234', - permalink: 'https://www.wrike.com/open.htm?id=123456789' - }, - { - id: 'IEACW7SVKQPX4WHN', - title: 'Task 2', - description: 'This is task 2', - status: 'Completed', - importance: 'High', - createdDate: '2023-02-01T00:00:00Z', - updatedDate: '2023-02-02T00:00:00Z', - completedDate: '2023-02-05T00:00:00Z', - dueDate: '2023-02-10T00:00:00Z', - parentIds: ['IEACW7SVI4OMYFIY'], - responsibleIds: ['KUAFY3BJ'], - authorIds: ['KUAFZBCJ'], - customStatusId: 'EFGH5678', - permalink: 'https://www.wrike.com/open.htm?id=987654321' - } - ] -}; \ No newline at end of file diff --git a/build/src/functions/fetch_users/index.test.cases.ts b/build/src/functions/fetch_users/index.test.cases.ts new file mode 100644 index 0000000..eaf516b --- /dev/null +++ b/build/src/functions/fetch_users/index.test.cases.ts @@ -0,0 +1,153 @@ +import { FunctionInput } from '../../core/types'; +import { + createMockEvent, + mockContacts, + createSuccessResponse, + createEmptyResponse, + createAuthFailureResponse, + createRateLimitResponse, + createNetworkErrorResponse, +} from './index.test.helpers'; +import { testCaseGenerators } from './index.test.shared'; + +export function createFetchUsersTests( + runFunction: (events: FunctionInput[]) => Promise, + WrikeClientMock: jest.Mock +) { + return () => { + it('should return success response with users', async () => { + const mockGetContacts = jest.fn().mockResolvedValue(createSuccessResponse()); + + WrikeClientMock.mockImplementation(() => ({ + getContacts: mockGetContacts, + })); + + const events = [createMockEvent()]; + const result = await runFunction(events); + + expect(result).toBeDefined(); + expect(result.status).toBe('success'); + expect(result.message).toBe('Successfully fetched contacts from Wrike'); + expect(result.status_code).toBe(200); + expect(result.api_delay).toBe(0); + expect(result.metadata.user_count).toBe(2); + expect(result.metadata.function_name).toBe('fetch_users'); + expect(result.metadata.request_id).toBe('test-request-id'); + expect(result.data).toEqual(mockContacts); + expect(result.timestamp).toBeDefined(); + expect(mockGetContacts).toHaveBeenCalledTimes(1); + }); + + it('should return success response with empty user list', async () => { + const mockGetContacts = jest.fn().mockResolvedValue(createEmptyResponse()); + + WrikeClientMock.mockImplementation(() => ({ + getContacts: mockGetContacts, + })); + + const events = [createMockEvent()]; + const result = await runFunction(events); + + expect(result.status).toBe('success'); + expect(result.status_code).toBe(200); + expect(result.metadata.user_count).toBe(0); + expect(result.data).toEqual([]); + }); + + // API Error Tests + const apiErrorCases = testCaseGenerators.apiErrors(); + apiErrorCases.forEach(({ description, mockResponse, assertions }) => { + it(description, async () => { + const mockGetContacts = jest.fn().mockResolvedValue(mockResponse); + + WrikeClientMock.mockImplementation(() => ({ + getContacts: mockGetContacts, + })); + + const events = [createMockEvent()]; + const result = await runFunction(events); + + assertions(result); + }); + }); + + it('should process only the first event when multiple events provided', async () => { + const mockGetContacts = jest.fn().mockResolvedValue(createSuccessResponse()); + + WrikeClientMock.mockImplementation(() => ({ + getContacts: mockGetContacts, + })); + + const event1 = createMockEvent({ + execution_metadata: { + request_id: 'request-1', + function_name: 'fetch_users', + event_type: 'event-type-1', + devrev_endpoint: 'https://api.devrev.ai/', + }, + }); + const event2 = createMockEvent({ + execution_metadata: { + request_id: 'request-2', + function_name: 'fetch_users', + event_type: 'event-type-2', + devrev_endpoint: 'https://api.devrev.ai/', + }, + }); + + const result = await runFunction([event1, event2]); + + expect(result.metadata.request_id).toBe('request-1'); + expect(mockGetContacts).toHaveBeenCalledTimes(1); + }); + + // Validation Error Tests + it('should throw error when no events provided', async () => { + await expect(runFunction([])).rejects.toThrow('No events provided to fetch_users function'); + }); + + it('should throw error when events array is null', async () => { + await expect(runFunction(null as any)).rejects.toThrow('No events provided to fetch_users function'); + }); + + it('should throw error when events array is undefined', async () => { + await expect(runFunction(undefined as any)).rejects.toThrow('No events provided to fetch_users function'); + }); + + const validationCases = testCaseGenerators.validationErrors().slice(1); // Skip first case (no events) + validationCases.forEach(({ description, eventModifier, expectedError }) => { + it(description, async () => { + const invalidEvent = createMockEvent(); + eventModifier(invalidEvent); + + await expect(runFunction([invalidEvent])).rejects.toThrow(expectedError); + }); + }); + + it('should initialize WrikeClient with correct API key', async () => { + const mockGetContacts = jest.fn().mockResolvedValue(createSuccessResponse()); + + WrikeClientMock.mockImplementation(() => ({ + getContacts: mockGetContacts, + })); + + const events = [createMockEvent()]; + await runFunction(events); + + expect(WrikeClientMock).toHaveBeenCalledWith({ apiKey: 'test-api-key' }); + }); + + it('should include timestamp in ISO format', async () => { + const mockGetContacts = jest.fn().mockResolvedValue(createSuccessResponse()); + + WrikeClientMock.mockImplementation(() => ({ + getContacts: mockGetContacts, + })); + + const events = [createMockEvent()]; + const result = await runFunction(events); + + expect(result.timestamp).toMatch(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/); + }); + }; +} \ No newline at end of file diff --git a/build/src/functions/fetch_users/index.test.helpers.ts b/build/src/functions/fetch_users/index.test.helpers.ts new file mode 100644 index 0000000..0566839 --- /dev/null +++ b/build/src/functions/fetch_users/index.test.helpers.ts @@ -0,0 +1,138 @@ +import { FunctionInput } from '../../core/types'; +import { WrikeContact } from '../../core/wrike-types'; + +/** + * Mock contact data for testing + */ +export const mockContact: WrikeContact = { + id: 'KUANFJBJ', + firstName: 'Jane', + lastName: 'Smith', + type: 'Person', + profiles: [ + { + accountId: 'IEAGS6BY', + email: 'janesmith@company.com', + role: 'User', + external: false, + admin: false, + owner: false, + active: true, + }, + ], + avatarUrl: 'https://www.wrike.com/avatars//EB/10/Box_ffe57373_80-49_v1.png', + timezone: 'Europe/London', + locale: 'en', + deleted: false, + title: 'Accountant', + primaryEmail: 'janesmith@company.com', +}; + +export const mockContacts: WrikeContact[] = [ + mockContact, + { + id: 'NVJKSNJK', + firstName: 'Jack', + lastName: 'Black', + type: 'Person', + profiles: [ + { + accountId: 'IEAGS6BY', + email: 'jackblack@company.com', + role: 'User', + external: false, + admin: false, + owner: false, + active: true, + }, + ], + avatarUrl: 'https://www.wrike.com/avatars//A5/A2/Box_ffafb42b_80-50_v1.png', + timezone: 'Europe/London', + locale: 'en', + deleted: false, + title: 'Accounting Manager', + primaryEmail: 'jackblack@company.com', + }, +]; + +/** + * Creates a mock FunctionInput event for testing + */ +export const createMockEvent = (overrides?: Partial): FunctionInput => ({ + payload: { + connection_data: { + org_id: 'test-org', + org_name: 'Test Org', + key: 'test-api-key', + key_type: 'oauth2', + }, + }, + context: { + dev_oid: 'test-dev-oid', + source_id: 'test-source-id', + snap_in_id: 'test-snap-in-id', + snap_in_version_id: 'test-version-id', + service_account_id: 'test-service-account-id', + secrets: { + service_account_token: 'test-token', + }, + }, + execution_metadata: { + request_id: 'test-request-id', + function_name: 'fetch_users', + event_type: 'test-event-type', + devrev_endpoint: 'https://api.devrev.ai/', + }, + input_data: { + global_values: {}, + event_sources: {}, + }, + ...overrides, +}); + +/** + * Creates a mock WrikeClient.getContacts response for successful fetch + */ +export const createSuccessResponse = (contacts: WrikeContact[] = mockContacts) => ({ + status_code: 200, + api_delay: 0, + message: 'Successfully fetched contacts from Wrike', + data: contacts, +}); + +/** + * Creates a mock WrikeClient.getContacts response for empty contact list + */ +export const createEmptyResponse = () => ({ + status_code: 200, + api_delay: 0, + message: 'Successfully fetched contacts from Wrike', + data: [], +}); + +/** + * Creates a mock WrikeClient.getContacts response for authentication failure + */ +export const createAuthFailureResponse = (statusCode: number, message: string) => ({ + status_code: statusCode, + api_delay: 0, + message, +}); + +/** + * Creates a mock WrikeClient.getContacts response for rate limiting + */ +export const createRateLimitResponse = (apiDelay: number = 49) => ({ + status_code: 429, + api_delay: apiDelay, + message: `Rate limit exceeded. Retry after ${apiDelay} seconds.`, +}); + +/** + * Creates a mock WrikeClient.getContacts response for network errors + */ +export const createNetworkErrorResponse = () => ({ + status_code: 0, + api_delay: 0, + message: 'Network error: Unable to reach Wrike API', +}); \ No newline at end of file diff --git a/build/src/functions/fetch_users/index.test.shared.ts b/build/src/functions/fetch_users/index.test.shared.ts new file mode 100644 index 0000000..a1aa6f4 --- /dev/null +++ b/build/src/functions/fetch_users/index.test.shared.ts @@ -0,0 +1,127 @@ +import { FunctionInput } from '../../core/types'; + +/** + * Shared test utilities for fetch_users tests + */ + +/** + * Creates a test case for successful API responses + */ +export function createSuccessTestCase( + description: string, + mockResponse: any, + assertions: (result: any) => void +) { + return { + description, + mockResponse, + assertions, + }; +} + +/** + * Creates a test case for API error responses + */ +export function createErrorTestCase( + description: string, + statusCode: number, + message: string, + apiDelay: number = 0 +) { + return { + description, + mockResponse: { + status_code: statusCode, + api_delay: apiDelay, + message, + }, + assertions: (result: any) => { + expect(result.status).toBe('error'); + expect(result.message).toBe(message); + expect(result.status_code).toBe(statusCode); + expect(result.api_delay).toBe(apiDelay); + expect(result.metadata.user_count).toBe(0); + if (statusCode !== 429) { + expect(result.data).toBeUndefined(); + } + }, + }; +} + +/** + * Creates a test case for validation errors + */ +export function createValidationTestCase( + description: string, + eventModifier: (event: FunctionInput) => void, + expectedError: string +) { + return { + description, + eventModifier, + expectedError, + }; +} + +/** + * Test case generators for common scenarios + */ +export const testCaseGenerators = { + /** + * Generates validation error test cases + */ + validationErrors: () => [ + createValidationTestCase( + 'should throw error when no events provided', + () => {}, + 'No events provided to fetch_users function' + ), + createValidationTestCase( + 'should throw error when event is missing payload', + (event: FunctionInput) => delete (event as any).payload, + 'Invalid event: missing payload' + ), + createValidationTestCase( + 'should throw error when event is missing connection_data', + (event: FunctionInput) => delete (event as any).payload.connection_data, + 'Invalid event: missing connection_data in payload' + ), + createValidationTestCase( + 'should throw error when event is missing API key', + (event: FunctionInput) => delete (event as any).payload.connection_data.key, + 'Invalid event: missing API key in connection_data' + ), + createValidationTestCase( + 'should throw error when event is missing execution_metadata', + (event: FunctionInput) => delete (event as any).execution_metadata, + 'Invalid event: missing execution_metadata' + ), + ], + + /** + * Generates API error test cases + */ + apiErrors: () => [ + createErrorTestCase( + 'should return error response for authentication failure (401)', + 401, + 'Authentication failed: Invalid or expired API key' + ), + createErrorTestCase( + 'should return error response for forbidden access (403)', + 403, + 'Authentication failed: Access forbidden' + ), + createErrorTestCase( + 'should handle rate limiting (429) correctly', + 429, + 'Rate limit exceeded. Retry after 49 seconds.', + 49 + ), + createErrorTestCase( + 'should handle network errors', + 0, + 'Network error: Unable to reach Wrike API' + ), + ], +}; \ No newline at end of file diff --git a/build/src/functions/fetch_users/index.test.ts b/build/src/functions/fetch_users/index.test.ts new file mode 100644 index 0000000..9e33908 --- /dev/null +++ b/build/src/functions/fetch_users/index.test.ts @@ -0,0 +1,14 @@ +import run, { FetchUsersResponse } from './index'; +import { WrikeClient } from '../../core/wrike-client'; +import { createFetchUsersTests } from './index.test.cases'; + +// Mock the WrikeClient +jest.mock('../../core/wrike-client'); + +describe('fetch_users function', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('test cases', createFetchUsersTests(run, WrikeClient as jest.Mock)); +}); \ No newline at end of file diff --git a/build/src/functions/fetch_users/index.ts b/build/src/functions/fetch_users/index.ts new file mode 100644 index 0000000..9af2895 --- /dev/null +++ b/build/src/functions/fetch_users/index.ts @@ -0,0 +1,107 @@ +import { FunctionInput } from '../../core/types'; +import { WrikeClient } from '../../core/wrike-client'; +import { WrikeContact } from '../../core/wrike-types'; +import { WrikeApiError } from '../../core/wrike-error-handler'; + +/** + * Response structure for fetch_users function + */ +export interface FetchUsersResponse { + status: 'success' | 'error'; + message: string; + status_code: number; + api_delay: number; + metadata: { + user_count: number; + function_name: string; + request_id: string; + }; + data?: WrikeContact[]; + timestamp: string; +} + +/** + * Fetch users function that retrieves users (contacts of type Person) from Wrike. + * Makes a request to /contacts endpoint with types=[Person] parameter. + * + * @param events - Array of function input events + * @returns Object containing users data with metadata + */ +const run = async (events: FunctionInput[]): Promise => { + // Validate input + if (!events || events.length === 0) { + throw new Error('No events provided to fetch_users function'); + } + + // Process only the first event as per requirements + const event = events[0]; + + // Validate event structure + if (!event.payload) { + throw new Error('Invalid event: missing payload'); + } + + if (!event.payload.connection_data) { + throw new Error('Invalid event: missing connection_data in payload'); + } + + if (!event.payload.connection_data.key) { + throw new Error('Invalid event: missing API key in connection_data'); + } + + if (!event.execution_metadata) { + throw new Error('Invalid event: missing execution_metadata'); + } + + // Extract API key from event + const apiKey = event.payload.connection_data.key; + + // Initialize Wrike client + const wrikeClient = new WrikeClient({ apiKey }); + + try { + // Call getContacts endpoint + const response = await wrikeClient.getContacts(); + + // Determine if request was successful + const success = response.status_code === 200 && response.data !== undefined; + + // Build response + const fetchUsersResponse: FetchUsersResponse = { + status: success ? 'success' : 'error', + message: response.message, + status_code: response.status_code, + api_delay: response.api_delay, + metadata: { + user_count: response.data?.length || 0, + function_name: event.execution_metadata.function_name, + request_id: event.execution_metadata.request_id, + }, + data: response.data, + timestamp: new Date().toISOString(), + }; + + return fetchUsersResponse; + } catch (error) { + // Handle WrikeApiError (including rate limiting) + if (error instanceof WrikeApiError) { + return { + status: 'error', + message: error.message, + status_code: error.statusCode, + api_delay: error.apiDelay, + metadata: { + user_count: 0, + function_name: event.execution_metadata.function_name, + request_id: event.execution_metadata.request_id, + }, + timestamp: new Date().toISOString(), + }; + } + + // Re-throw other errors + throw error; + } +}; + +export default run; \ No newline at end of file diff --git a/build/src/functions/generate_initial_mapping/index.test.ts b/build/src/functions/generate_initial_mapping/index.test.ts deleted file mode 100644 index 7083855..0000000 --- a/build/src/functions/generate_initial_mapping/index.test.ts +++ /dev/null @@ -1,120 +0,0 @@ -import { run } from './index'; -import { AirdropEvent, EventType } from '@devrev/ts-adaas'; -import initialDomainMapping from './initial_domain_mapping.json'; - -describe('Generate Initial Mapping Function', () => { - // Helper function to create a mock AirdropEvent - const createMockEvent = (): AirdropEvent => ({ - context: { - secrets: { - service_account_token: 'mock-token' - }, - snap_in_id: 'mock-snap-in-id', - snap_in_version_id: 'mock-version-id' - }, - payload: { - connection_data: { - org_id: 'mock-org-id', - org_name: 'mock-org-name', - key: 'mock-key', - key_type: 'mock-key-type' - }, - event_context: { - callback_url: 'mock-callback-url', - dev_org: 'mock-dev-org', - dev_org_id: 'mock-dev-org-id', - dev_user: 'mock-dev-user', - dev_user_id: 'mock-dev-user-id', - external_sync_unit: 'mock-external-sync-unit', - external_sync_unit_id: 'mock-external-sync-unit-id', - external_sync_unit_name: 'mock-external-sync-unit-name', - external_system: 'mock-external-system', - external_system_type: 'mock-external-system-type', - import_slug: 'mock-import-slug', - mode: 'INITIAL', - request_id: 'mock-request-id', - snap_in_slug: 'mock-snap-in-slug', - snap_in_version_id: 'mock-snap-in-version-id', - sync_run: 'mock-sync-run', - sync_run_id: 'mock-sync-run-id', - sync_tier: 'mock-sync-tier', - sync_unit: 'mock-sync-unit', - sync_unit_id: 'mock-sync-unit-id', - uuid: 'mock-uuid', - worker_data_url: 'mock-worker-data-url' - }, - event_type: EventType.ExtractionMetadataStart, - event_data: {} - }, - execution_metadata: { - devrev_endpoint: 'mock-endpoint' - }, - input_data: { - global_values: {}, - event_sources: {} - } - }); - - beforeEach(() => { - // Mock console.log and console.error to prevent test output pollution - jest.spyOn(console, 'log').mockImplementation(() => {}); - jest.spyOn(console, 'error').mockImplementation(() => {}); - }); - - afterEach(() => { - // Restore console mocks - jest.restoreAllMocks(); - }); - - it('should return the Initial Domain Mapping when invoked', async () => { - // Create a mock event - const mockEvent = createMockEvent(); - - // Call the function with the mock event - const result = await run([mockEvent]); - - // Verify the result - expect(result).toEqual({ - status: 'success', - message: 'Successfully generated Initial Domain Mapping', - mapping: initialDomainMapping - }); - }); - - it('should throw an error if events parameter is not an array', async () => { - // Call the function with invalid input - const invalidInput = null as unknown as AirdropEvent[]; - - // Expect the function to throw an error - await expect(run(invalidInput)).rejects.toThrow('Invalid input: events must be an array'); - }); - - it('should validate the structure of the Initial Domain Mapping', async () => { - // Create a mock event - const mockEvent = createMockEvent(); - - // Call the function with the mock event - const result = await run([mockEvent]); - - // Verify the mapping structure - expect(result.mapping).toHaveProperty('additional_mappings'); - expect(result.mapping.additional_mappings).toHaveProperty('record_type_mappings'); - - // Verify record type mappings - const recordTypeMappings = result.mapping.additional_mappings.record_type_mappings; - expect(recordTypeMappings).toHaveProperty('tasks'); - expect(recordTypeMappings).toHaveProperty('users'); - - // Verify tasks mapping - const tasksMapping = recordTypeMappings.tasks; - expect(tasksMapping).toHaveProperty('default_mapping'); - expect(tasksMapping).toHaveProperty('possible_record_type_mappings'); - expect(tasksMapping.default_mapping.object_type).toBe('ticket'); - - // Verify users mapping - const usersMapping = recordTypeMappings.users; - expect(usersMapping).toHaveProperty('default_mapping'); - expect(usersMapping).toHaveProperty('possible_record_type_mappings'); - expect(usersMapping.default_mapping.object_type).toBe('revu'); - }); -}); \ No newline at end of file diff --git a/build/src/functions/generate_initial_mapping/index.ts b/build/src/functions/generate_initial_mapping/index.ts deleted file mode 100644 index 3754a60..0000000 --- a/build/src/functions/generate_initial_mapping/index.ts +++ /dev/null @@ -1,37 +0,0 @@ -import { AirdropEvent } from '@devrev/ts-adaas'; -import initialDomainMapping from './initial_domain_mapping.json'; - -/** - * A function that generates and returns the Initial Domain Mapping JSON object. - * - * @param events - Array of AirdropEvent objects - * @returns The Initial Domain Mapping JSON object - */ -export async function run(events: AirdropEvent[]): Promise<{ - status: string, - message: string, - mapping: any -}> { - try { - // Validate input parameters - if (!events || !Array.isArray(events)) { - throw new Error('Invalid input: events must be an array'); - } - - // Log the event for debugging purposes - console.log('Generate initial mapping function invoked'); - - // Return the Initial Domain Mapping - return { - status: 'success', - message: 'Successfully generated Initial Domain Mapping', - mapping: initialDomainMapping - }; - } catch (error) { - // Log the error for debugging - console.error('Error in generate initial mapping function:', error); - - // Re-throw the error to be handled by the caller - throw error; - } -} \ No newline at end of file diff --git a/build/src/functions/generate_initial_mapping/initial_domain_mapping.json b/build/src/functions/generate_initial_mapping/initial_domain_mapping.json deleted file mode 100644 index 21d6861..0000000 --- a/build/src/functions/generate_initial_mapping/initial_domain_mapping.json +++ /dev/null @@ -1,221 +0,0 @@ -{ - "additional_mappings": { - "record_type_mappings": { - "tasks": { - "default_mapping": { - "object_type": "ticket", - "object_category": "stock" - }, - "possible_record_type_mappings": [ - { - "devrev_leaf_type": "ticket", - "forward": true, - "reverse": true, - "shard": { - "mode": "create_shard", - "devrev_leaf_type": { - "object_type": "ticket", - "object_category": "stock" - }, - "stock_field_mappings": { - "title": { - "forward": true, - "reverse": true, - "primary_external_field": "title", - "transformation_method_for_set": { - "transformation_method": "use_directly" - } - }, - "body": { - "forward": true, - "reverse": true, - "primary_external_field": "description", - "transformation_method_for_set": { - "transformation_method": "use_rich_text" - } - }, - "stage": { - "forward": true, - "reverse": true, - "primary_external_field": "status", - "transformation_method_for_set": { - "transformation_method": "map_enum", - "forward": { - "Active": { - "value": "work_in_progress" - }, - "Completed": { - "value": "resolved" - }, - "Deferred": { - "value": "awaiting_customer_response" - }, - "Cancelled": { - "value": "canceled" - } - }, - "reverse": { - "work_in_progress": { - "value": "Active" - }, - "resolved": { - "value": "Completed" - }, - "awaiting_customer_response": { - "value": "Deferred" - }, - "canceled": { - "value": "Cancelled" - }, - "awaiting_development": { - "value": "Active" - }, - "awaiting_product_assist": { - "value": "Active" - }, - "queued": { - "value": "Active" - }, - "in_development": { - "value": "Active" - } - } - }, - "fallback": { - "type": "enum", - "value": "work_in_progress" - } - }, - "severity": { - "forward": true, - "reverse": true, - "primary_external_field": "importance", - "transformation_method_for_set": { - "transformation_method": "map_enum", - "forward": { - "High": { - "value": "high" - }, - "Normal": { - "value": "medium" - }, - "Low": { - "value": "low" - } - }, - "reverse": { - "high": { - "value": "High" - }, - "medium": { - "value": "Normal" - }, - "low": { - "value": "Low" - }, - "blocker": { - "value": "High" - } - } - }, - "fallback": { - "type": "enum", - "value": "medium" - } - }, - "item_url_field": { - "forward": true, - "reverse": false, - "primary_external_field": "permalink", - "transformation_method_for_set": { - "transformation_method": "use_directly" - } - }, - "owned_by_ids": { - "forward": true, - "reverse": true, - "primary_external_field": "responsible_ids", - "transformation_method_for_set": { - "transformation_method": "use_directly" - } - } - } - } - } - ], - "mapping_as_custom_object": { - "forward": true, - "reverse": false, - "shard": { - "mode": "create_shard", - "devrev_leaf_type": { - "object_type": "custom_object", - "object_category": "fresh_custom" - } - } - } - }, - "users": { - "default_mapping": { - "object_type": "revu", - "object_category": "stock" - }, - "possible_record_type_mappings": [ - { - "devrev_leaf_type": "revu", - "forward": true, - "reverse": true, - "shard": { - "mode": "create_shard", - "devrev_leaf_type": { - "object_type": "revu", - "object_category": "stock" - }, - "stock_field_mappings": { - "display_name": { - "forward": true, - "reverse": true, - "primary_external_field": "first_name", - "transformation_method_for_set": { - "transformation_method": "use_directly" - }, - "fallback": { - "type": "text", - "value": "Unknown User" - } - }, - "full_name": { - "forward": true, - "reverse": true, - "primary_external_field": "first_name", - "transformation_method_for_set": { - "transformation_method": "use_directly" - } - }, - "email": { - "forward": true, - "reverse": true, - "primary_external_field": "email", - "transformation_method_for_set": { - "transformation_method": "use_directly" - } - } - } - } - } - ], - "mapping_as_custom_object": { - "forward": true, - "reverse": false, - "shard": { - "mode": "create_shard", - "devrev_leaf_type": { - "object_type": "custom_object", - "object_category": "fresh_custom" - } - } - } - } - } - } -} \ No newline at end of file diff --git a/build/src/functions/generate_metadata/external_domain_metadata.json b/build/src/functions/generate_metadata/external_domain_metadata.json deleted file mode 100644 index dd506c6..0000000 --- a/build/src/functions/generate_metadata/external_domain_metadata.json +++ /dev/null @@ -1,254 +0,0 @@ -{ - "schema_version": "v0.2.0", - "record_types": { - "tasks": { - "name": "Task", - "description": "A Wrike task", - "fields": { - "id": { - "name": "ID", - "type": "text", - "is_required": true, - "is_identifier": true, - "is_indexed": true - }, - "title": { - "name": "Title", - "type": "text", - "is_required": true, - "is_indexed": true - }, - "description": { - "name": "Description", - "type": "rich_text", - "is_required": false - }, - "status": { - "name": "Status", - "type": "enum", - "is_required": true, - "enum": { - "values": [ - { - "key": "Active", - "name": "Active" - }, - { - "key": "Completed", - "name": "Completed" - }, - { - "key": "Deferred", - "name": "Deferred" - }, - { - "key": "Cancelled", - "name": "Cancelled" - } - ] - } - }, - "importance": { - "name": "Importance", - "type": "enum", - "is_required": true, - "enum": { - "values": [ - { - "key": "High", - "name": "High" - }, - { - "key": "Normal", - "name": "Normal" - }, - { - "key": "Low", - "name": "Low" - } - ] - } - }, - "created_date": { - "name": "Created Date", - "type": "timestamp", - "is_required": true, - "is_indexed": true - }, - "updated_date": { - "name": "Updated Date", - "type": "timestamp", - "is_required": true, - "is_indexed": true - }, - "completed_date": { - "name": "Completed Date", - "type": "timestamp", - "is_required": false - }, - "due_date": { - "name": "Due Date", - "type": "timestamp", - "is_required": false - }, - "parent_ids": { - "name": "Parent IDs", - "type": "text", - "is_required": false, - "collection": { - "min_length": 0 - } - }, - "responsible_ids": { - "name": "Responsible Users", - "type": "reference", - "is_required": false, - "reference": { - "refers_to": { - "#record:users": {} - } - }, - "collection": { - "min_length": 0 - } - }, - "author_ids": { - "name": "Authors", - "type": "reference", - "is_required": false, - "reference": { - "refers_to": { - "#record:users": {} - } - }, - "collection": { - "min_length": 0 - } - }, - "custom_status_id": { - "name": "Custom Status ID", - "type": "text", - "is_required": false - }, - "permalink": { - "name": "Permalink", - "type": "text", - "is_required": false - } - }, - "stage_diagram": { - "controlling_field": "status", - "starting_stage": "Active", - "all_transitions_allowed": false, - "stages": { - "Active": { - "transitions_to": ["Completed", "Deferred", "Cancelled"], - "state": "open" - }, - "Completed": { - "transitions_to": ["Active"], - "state": "closed" - }, - "Deferred": { - "transitions_to": ["Active", "Completed", "Cancelled"], - "state": "in_progress" - }, - "Cancelled": { - "transitions_to": ["Active"], - "state": "closed" - } - }, - "states": { - "open": { - "name": "Open" - }, - "in_progress": { - "name": "In Progress" - }, - "closed": { - "name": "Closed", - "is_end_state": true - } - } - } - }, - "users": { - "name": "User", - "description": "A Wrike user/contact", - "fields": { - "id": { - "name": "ID", - "type": "text", - "is_required": true, - "is_identifier": true, - "is_indexed": true - }, - "first_name": { - "name": "First Name", - "type": "text", - "is_required": true, - "is_indexed": true - }, - "last_name": { - "name": "Last Name", - "type": "text", - "is_required": true, - "is_indexed": true - }, - "type": { - "name": "Type", - "type": "enum", - "is_required": true, - "enum": { - "values": [ - { - "key": "Person", - "name": "Person" - }, - { - "key": "Group", - "name": "Group" - } - ] - } - }, - "email": { - "name": "Email", - "type": "text", - "is_required": false, - "is_indexed": true - }, - "title": { - "name": "Title", - "type": "text", - "is_required": false - }, - "company_name": { - "name": "Company Name", - "type": "text", - "is_required": false - }, - "phone": { - "name": "Phone", - "type": "text", - "is_required": false - }, - "location": { - "name": "Location", - "type": "text", - "is_required": false - }, - "is_deleted": { - "name": "Is Deleted", - "type": "bool", - "is_required": false - }, - "me": { - "name": "Is Current User", - "type": "bool", - "is_required": false - } - } - } - } -} \ No newline at end of file diff --git a/build/src/functions/generate_metadata/index.test.ts b/build/src/functions/generate_metadata/index.test.ts deleted file mode 100644 index 28c4161..0000000 --- a/build/src/functions/generate_metadata/index.test.ts +++ /dev/null @@ -1,118 +0,0 @@ -import { run } from './index'; -import { AirdropEvent, EventType } from '@devrev/ts-adaas'; -import externalDomainMetadata from './external_domain_metadata.json'; - -describe('Generate Metadata Function', () => { - // Helper function to create a mock AirdropEvent - const createMockEvent = (): AirdropEvent => ({ - context: { - secrets: { - service_account_token: 'mock-token' - }, - snap_in_id: 'mock-snap-in-id', - snap_in_version_id: 'mock-version-id' - }, - payload: { - connection_data: { - org_id: 'mock-org-id', - org_name: 'mock-org-name', - key: 'mock-key', - key_type: 'mock-key-type' - }, - event_context: { - callback_url: 'mock-callback-url', - dev_org: 'mock-dev-org', - dev_org_id: 'mock-dev-org-id', - dev_user: 'mock-dev-user', - dev_user_id: 'mock-dev-user-id', - external_sync_unit: 'mock-external-sync-unit', - external_sync_unit_id: 'mock-external-sync-unit-id', - external_sync_unit_name: 'mock-external-sync-unit-name', - external_system: 'mock-external-system', - external_system_type: 'mock-external-system-type', - import_slug: 'mock-import-slug', - mode: 'INITIAL', - request_id: 'mock-request-id', - snap_in_slug: 'mock-snap-in-slug', - snap_in_version_id: 'mock-snap-in-version-id', - sync_run: 'mock-sync-run', - sync_run_id: 'mock-sync-run-id', - sync_tier: 'mock-sync-tier', - sync_unit: 'mock-sync-unit', - sync_unit_id: 'mock-sync-unit-id', - uuid: 'mock-uuid', - worker_data_url: 'mock-worker-data-url' - }, - event_type: EventType.ExtractionMetadataStart, - event_data: {} - }, - execution_metadata: { - devrev_endpoint: 'mock-endpoint' - }, - input_data: { - global_values: {}, - event_sources: {} - } - }); - - beforeEach(() => { - // Mock console.log and console.error to prevent test output pollution - jest.spyOn(console, 'log').mockImplementation(() => {}); - jest.spyOn(console, 'error').mockImplementation(() => {}); - }); - - afterEach(() => { - // Restore console mocks - jest.restoreAllMocks(); - }); - - it('should return the External Domain Metadata when invoked', async () => { - // Create a mock event - const mockEvent = createMockEvent(); - - // Call the function with the mock event - const result = await run([mockEvent]); - - // Verify the result - expect(result).toEqual({ - status: 'success', - message: 'Successfully generated External Domain Metadata', - metadata: externalDomainMetadata - }); - }); - - it('should throw an error if events parameter is not an array', async () => { - // Call the function with invalid input - const invalidInput = null as unknown as AirdropEvent[]; - - // Expect the function to throw an error - await expect(run(invalidInput)).rejects.toThrow('Invalid input: events must be an array'); - }); - - it('should validate the structure of the External Domain Metadata', async () => { - // Create a mock event - const mockEvent = createMockEvent(); - - // Call the function with the mock event - const result = await run([mockEvent]); - - // Verify the metadata structure - expect(result.metadata).toHaveProperty('schema_version'); - expect(result.metadata).toHaveProperty('record_types'); - - // Verify record types - expect(result.metadata.record_types).toHaveProperty('tasks'); - expect(result.metadata.record_types).toHaveProperty('users'); - - // Verify tasks record type - const tasks = result.metadata.record_types.tasks; - expect(tasks).toHaveProperty('name'); - expect(tasks).toHaveProperty('fields'); - expect(tasks).toHaveProperty('stage_diagram'); - - // Verify users record type - const users = result.metadata.record_types.users; - expect(users).toHaveProperty('name'); - expect(users).toHaveProperty('fields'); - }); -}); \ No newline at end of file diff --git a/build/src/functions/generate_metadata/index.ts b/build/src/functions/generate_metadata/index.ts deleted file mode 100644 index 61d3979..0000000 --- a/build/src/functions/generate_metadata/index.ts +++ /dev/null @@ -1,37 +0,0 @@ -import { AirdropEvent } from '@devrev/ts-adaas'; -import externalDomainMetadata from './external_domain_metadata.json'; - -/** - * A function that generates and returns the External Domain Metadata JSON object. - * - * @param events - Array of AirdropEvent objects - * @returns The External Domain Metadata JSON object - */ -export async function run(events: AirdropEvent[]): Promise<{ - status: string, - message: string, - metadata: any -}> { - try { - // Validate input parameters - if (!events || !Array.isArray(events)) { - throw new Error('Invalid input: events must be an array'); - } - - // Log the event for debugging purposes - console.log('Generate metadata function invoked'); - - // Return the External Domain Metadata - return { - status: 'success', - message: 'Successfully generated External Domain Metadata', - metadata: externalDomainMetadata - }; - } catch (error) { - // Log the error for debugging - console.error('Error in generate metadata function:', error); - - // Re-throw the error to be handled by the caller - throw error; - } -} \ No newline at end of file diff --git a/build/src/functions/get_external_domain_metadata/index.test.assertions.ts b/build/src/functions/get_external_domain_metadata/index.test.assertions.ts new file mode 100644 index 0000000..03e599a --- /dev/null +++ b/build/src/functions/get_external_domain_metadata/index.test.assertions.ts @@ -0,0 +1,146 @@ +import { ExternalDomainMetadata } from './index'; + +/** + * Assertion helpers for get_external_domain_metadata tests. + * These functions encapsulate common validation logic to reduce duplication. + */ + +/** + * Asserts that a field has the expected properties + */ +export function assertFieldProperties( + field: any, + expectedType: string, + expectedName: string, + expectedIsRequired: boolean +): void { + expect(field).toBeDefined(); + expect(field.type).toBe(expectedType); + expect(field.name).toBe(expectedName); + expect(field.is_required).toBe(expectedIsRequired); +} + +/** + * Asserts that the users record type has all expected fields + */ +export function assertUsersRecordType(metadata: ExternalDomainMetadata): void { + expect(metadata.record_types).toBeDefined(); + expect(metadata.record_types.users).toBeDefined(); + expect(metadata.record_types.users.name).toBe('Users'); + + const fields = metadata.record_types.users.fields; + expect(Object.keys(fields)).toHaveLength(3); + expect(fields).toHaveProperty('full_name'); + expect(fields).toHaveProperty('email'); + expect(fields).toHaveProperty('title'); +} + +/** + * Asserts that the comments record type has all expected fields + */ +export function assertCommentsRecordType(metadata: ExternalDomainMetadata): void { + expect(metadata.record_types).toBeDefined(); + expect(metadata.record_types.comments).toBeDefined(); + expect(metadata.record_types.comments.name).toBe('Comments'); + + const fields = metadata.record_types.comments.fields; + expect(Object.keys(fields)).toHaveLength(3); + expect(fields).toHaveProperty('text'); + expect(fields).toHaveProperty('author_id'); + expect(fields).toHaveProperty('task_id'); +} + +/** + * Asserts that the tasks record type has all expected fields + */ +export function assertTasksRecordType(metadata: ExternalDomainMetadata): void { + expect(metadata.record_types).toBeDefined(); + expect(metadata.record_types.tasks).toBeDefined(); + expect(metadata.record_types.tasks.name).toBe('Tasks'); + + const fields = metadata.record_types.tasks.fields; + expect(Object.keys(fields)).toHaveLength(5); + expect(fields).toHaveProperty('title'); + expect(fields).toHaveProperty('description'); + expect(fields).toHaveProperty('status'); + expect(fields).toHaveProperty('permalink'); + expect(fields).toHaveProperty('responsible_ids'); +} + +/** + * Asserts that the status enum field has correct structure and values + */ +export function assertStatusEnumField(status: any): void { + expect(status).toBeDefined(); + expect(status.type).toBe('enum'); + expect(status.name).toBe('Status'); + expect(status.is_required).toBe(true); + expect(status.enum).toBeDefined(); + expect(status.enum.values).toBeDefined(); + expect(status.enum.values.length).toBe(4); + + const statusValues = status.enum.values; + expect(statusValues).toContainEqual({ key: 'active', name: 'Active' }); + expect(statusValues).toContainEqual({ key: 'completed', name: 'Completed' }); + expect(statusValues).toContainEqual({ key: 'deferred', name: 'Deferred' }); + expect(statusValues).toContainEqual({ key: 'cancelled', name: 'Cancelled' }); +} + +/** + * Asserts that the responsible_ids reference field has correct structure + */ +export function assertResponsibleIdsField(responsibleIds: any): void { + expect(responsibleIds).toBeDefined(); + expect(responsibleIds.type).toBe('reference'); + expect(responsibleIds.name).toBe('Responsible IDs'); + expect(responsibleIds.is_required).toBe(true); + + expect(responsibleIds.collection).toBeDefined(); + expect(responsibleIds.collection.max_length).toBe(1); + + expect(responsibleIds.reference).toBeDefined(); + expect(responsibleIds.reference.refers_to).toBeDefined(); + expect(responsibleIds.reference.refers_to['#record:users']).toBeDefined(); + expect(responsibleIds.reference.refers_to['#record:users']).toEqual({}); +} + +/** + * Asserts that the author_id reference field has correct structure + */ +export function assertAuthorIdField(authorId: any): void { + expect(authorId).toBeDefined(); + expect(authorId.type).toBe('reference'); + expect(authorId.name).toBe('Author ID'); + expect(authorId.is_required).toBe(true); + + expect(authorId.reference).toBeDefined(); + expect(authorId.reference.refers_to).toBeDefined(); + expect(authorId.reference.refers_to['#record:users']).toBeDefined(); + expect(authorId.reference.refers_to['#record:users']).toEqual({}); +} + +/** + * Asserts that the task_id reference field has correct structure + */ +export function assertTaskIdField(taskId: any): void { + expect(taskId).toBeDefined(); + expect(taskId.type).toBe('reference'); + expect(taskId.name).toBe('Task ID'); + expect(taskId.is_required).toBe(true); + + expect(taskId.reference).toBeDefined(); + expect(taskId.reference.refers_to).toBeDefined(); + expect(taskId.reference.refers_to['#record:tasks']).toBeDefined(); + expect(taskId.reference.refers_to['#record:tasks']).toEqual({}); +} + +/** + * Asserts that all record types exist + */ +export function assertAllRecordTypes(metadata: ExternalDomainMetadata): void { + const recordTypes = metadata.record_types; + expect(Object.keys(recordTypes)).toHaveLength(3); + expect(recordTypes).toHaveProperty('users'); + expect(recordTypes).toHaveProperty('comments'); + expect(recordTypes).toHaveProperty('tasks'); +} \ No newline at end of file diff --git a/build/src/functions/get_external_domain_metadata/index.test.case-generators.ts b/build/src/functions/get_external_domain_metadata/index.test.case-generators.ts new file mode 100644 index 0000000..cca128e --- /dev/null +++ b/build/src/functions/get_external_domain_metadata/index.test.case-generators.ts @@ -0,0 +1,121 @@ +import { FunctionInput } from '../../core/types'; +import { + assertUsersRecordType, + assertCommentsRecordType, + assertTasksRecordType, + assertAllRecordTypes, +} from './index.test.assertions'; + +/** + * Test case generators for get_external_domain_metadata function. + * This file contains reusable test case generation logic to reduce duplication. + */ + +/** + * Generates validation error test cases + */ +export function generateValidationErrorTests() { + return [ + { + description: 'should throw error when no events provided', + events: [], + expectedError: 'No events provided to get_external_domain_metadata function', + }, + { + description: 'should throw error when events array is null', + events: null as any, + expectedError: 'No events provided to get_external_domain_metadata function', + }, + { + description: 'should throw error when events array is undefined', + events: undefined as any, + expectedError: 'No events provided to get_external_domain_metadata function', + }, + { + description: 'should throw error when event is missing execution_metadata', + eventModifier: (event: FunctionInput) => { + delete (event as any).execution_metadata; + }, + expectedError: 'Invalid event: missing execution_metadata', + }, + ]; +} + +/** + * Generates record type structure test cases + */ +export function generateRecordTypeStructureTests() { + return [ + { + description: 'should return metadata with users record type', + assertions: (result: any) => { + assertUsersRecordType(result.data); + }, + }, + { + description: 'should return metadata with tasks record type', + assertions: (result: any) => { + assertTasksRecordType(result.data); + }, + }, + { + description: 'should return metadata with comments record type', + assertions: (result: any) => { + assertCommentsRecordType(result.data); + }, + }, + { + description: 'should return all three fields in comments record type', + assertions: (result: any) => { + assertCommentsRecordType(result.data); + }, + }, + { + description: 'should return all three fields in users record type', + assertions: (result: any) => { + assertUsersRecordType(result.data); + }, + }, + { + description: 'should return all five fields in tasks record type', + assertions: (result: any) => { + assertTasksRecordType(result.data); + }, + }, + { + description: 'should return all three record types (users, comments, tasks)', + assertions: (result: any) => { + assertAllRecordTypes(result.data); + }, + }, + ]; +} + +/** + * Generates naming convention test cases + */ +export function generateNamingConventionTests() { + return [ + { + description: 'should use snake_case for tasks field keys', + assertions: (result: any) => { + const fields = result.data.record_types.tasks.fields; + expect(fields).toHaveProperty('responsible_ids'); + expect(fields.responsible_ids).toHaveProperty('is_required'); + }, + }, + { + description: 'should use snake_case for all JSON keys', + assertions: (result: any) => { + // Check top-level keys + expect(result.data).toHaveProperty('schema_version'); + expect(result.data).toHaveProperty('record_types'); + + // Check field keys + const fields = result.data.record_types.users.fields; + expect(fields).toHaveProperty('full_name'); + expect(fields.full_name).toHaveProperty('is_required'); + }, + }, + ]; +} \ No newline at end of file diff --git a/build/src/functions/get_external_domain_metadata/index.test.cases.ts b/build/src/functions/get_external_domain_metadata/index.test.cases.ts new file mode 100644 index 0000000..e4c71d5 --- /dev/null +++ b/build/src/functions/get_external_domain_metadata/index.test.cases.ts @@ -0,0 +1,146 @@ +import { FunctionInput } from '../../core/types'; +import { createMockEvent, validateMetadataStructure } from './index.test.helpers'; +import { expectedMetadata } from './index.test.mock-data'; +import { + generateValidationErrorTests, + generateRecordTypeStructureTests, + generateNamingConventionTests, +} from './index.test.case-generators'; +import { + generateUsersFieldTests, + generateCommentsFieldTests, + generateTasksFieldTests, +} from './index.test.field-generators'; + +/** + * Test case generators for get_external_domain_metadata function + */ + +export function createGetExternalDomainMetadataTests( + runFunction: (events: FunctionInput[]) => Promise +) { + return () => { + it('should return success response with external domain metadata', async () => { + const events = [createMockEvent()]; + const result = await runFunction(events); + + expect(result).toBeDefined(); + expect(result.status).toBe('success'); + expect(result.message).toBe('Successfully generated external domain metadata'); + expect(result.metadata.function_name).toBe('get_external_domain_metadata'); + expect(result.metadata.request_id).toBe('test-request-id'); + expect(result.data).toEqual(expectedMetadata); + expect(result.timestamp).toBeDefined(); + }); + + it('should return metadata with correct schema version', async () => { + const events = [createMockEvent()]; + const result = await runFunction(events); + + expect(result.data.schema_version).toBe('v0.2.0'); + }); + + it('should validate metadata structure correctly', async () => { + const events = [createMockEvent()]; + const result = await runFunction(events); + + expect(validateMetadataStructure(result.data)).toBe(true); + }); + + // Users field tests + const usersFieldTests = generateUsersFieldTests(); + usersFieldTests.forEach(({ description, assertions }) => { + it(description, async () => { + const events = [createMockEvent()]; + const result = await runFunction(events); + assertions(result); + }); + }); + + // Comments field tests + const commentsFieldTests = generateCommentsFieldTests(); + commentsFieldTests.forEach(({ description, assertions }) => { + it(description, async () => { + const events = [createMockEvent()]; + const result = await runFunction(events); + assertions(result); + }); + }); + + // Tasks field tests + const tasksFieldTests = generateTasksFieldTests(); + tasksFieldTests.forEach(({ description, assertions }) => { + it(description, async () => { + const events = [createMockEvent()]; + const result = await runFunction(events); + assertions(result); + }); + }); + + // Record type structure tests + const recordTypeTests = generateRecordTypeStructureTests(); + recordTypeTests.forEach(({ description, assertions }) => { + it(description, async () => { + const events = [createMockEvent()]; + const result = await runFunction(events); + assertions(result); + }); + }); + + // Naming convention tests + const namingTests = generateNamingConventionTests(); + namingTests.forEach(({ description, assertions }) => { + it(description, async () => { + const events = [createMockEvent()]; + const result = await runFunction(events); + assertions(result); + }); + }); + + it('should process only the first event when multiple events provided', async () => { + const event1 = createMockEvent({ + execution_metadata: { + request_id: 'request-1', + function_name: 'get_external_domain_metadata', + event_type: 'event-type-1', + devrev_endpoint: 'https://api.devrev.ai/', + }, + }); + const event2 = createMockEvent({ + execution_metadata: { + request_id: 'request-2', + function_name: 'get_external_domain_metadata', + event_type: 'event-type-2', + devrev_endpoint: 'https://api.devrev.ai/', + }, + }); + + const result = await runFunction([event1, event2]); + + expect(result.metadata.request_id).toBe('request-1'); + }); + + // Validation error tests + const validationTests = generateValidationErrorTests(); + validationTests.forEach((testCase) => { + if ('events' in testCase) { + it(testCase.description, async () => { + await expect(runFunction(testCase.events)).rejects.toThrow(testCase.expectedError); + }); + } else if ('eventModifier' in testCase) { + it(testCase.description, async () => { + const invalidEvent = createMockEvent(); + testCase.eventModifier(invalidEvent); + await expect(runFunction([invalidEvent])).rejects.toThrow(testCase.expectedError); + }); + } + }); + + it('should include timestamp in ISO format', async () => { + const events = [createMockEvent()]; + const result = await runFunction(events); + + expect(result.timestamp).toMatch(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/); + }); + }; +} \ No newline at end of file diff --git a/build/src/functions/get_external_domain_metadata/index.test.field-generators.ts b/build/src/functions/get_external_domain_metadata/index.test.field-generators.ts new file mode 100644 index 0000000..cbfca52 --- /dev/null +++ b/build/src/functions/get_external_domain_metadata/index.test.field-generators.ts @@ -0,0 +1,148 @@ +import { + assertFieldProperties, + assertStatusEnumField, + assertResponsibleIdsField, + assertAuthorIdField, + assertTaskIdField, +} from './index.test.assertions'; + +/** + * Field-specific test case generators for get_external_domain_metadata function. + * This file contains test generators for individual fields in each record type. + */ + +/** + * Generates test cases for users record type fields + */ +export function generateUsersFieldTests() { + return [ + { + description: 'should return metadata with full_name field as required', + assertions: (result: any) => { + const fullName = result.data.record_types.users.fields.full_name; + assertFieldProperties(fullName, 'text', 'Full Name', true); + }, + }, + { + description: 'should return metadata with email field as required', + assertions: (result: any) => { + const email = result.data.record_types.users.fields.email; + assertFieldProperties(email, 'text', 'Email', true); + }, + }, + { + description: 'should return metadata with title field as optional', + assertions: (result: any) => { + const title = result.data.record_types.users.fields.title; + assertFieldProperties(title, 'text', 'Title', false); + }, + }, + ]; +} + +/** + * Generates test cases for comments record type fields + */ +export function generateCommentsFieldTests() { + return [ + { + description: 'should return metadata with comments text field as required rich_text', + assertions: (result: any) => { + const text = result.data.record_types.comments.fields.text; + assertFieldProperties(text, 'rich_text', 'Text', true); + }, + }, + { + description: 'should return metadata with comments author_id field as required reference', + assertions: (result: any) => { + const authorId = result.data.record_types.comments.fields.author_id; + assertAuthorIdField(authorId); + }, + }, + { + description: 'should return metadata with comments author_id referring to users record type', + assertions: (result: any) => { + const authorId = result.data.record_types.comments.fields.author_id; + assertAuthorIdField(authorId); + }, + }, + { + description: 'should return metadata with comments task_id field as required reference', + assertions: (result: any) => { + const taskId = result.data.record_types.comments.fields.task_id; + assertTaskIdField(taskId); + }, + }, + { + description: 'should return metadata with comments task_id referring to tasks record type', + assertions: (result: any) => { + const taskId = result.data.record_types.comments.fields.task_id; + assertTaskIdField(taskId); + }, + }, + ]; +} + +/** + * Generates test cases for tasks record type fields + */ +export function generateTasksFieldTests() { + return [ + { + description: 'should return metadata with tasks title field as required', + assertions: (result: any) => { + const taskTitle = result.data.record_types.tasks.fields.title; + assertFieldProperties(taskTitle, 'text', 'Title', true); + }, + }, + { + description: 'should return metadata with tasks description field as required rich_text', + assertions: (result: any) => { + const description = result.data.record_types.tasks.fields.description; + assertFieldProperties(description, 'rich_text', 'Description', true); + }, + }, + { + description: 'should return metadata with tasks status field as required enum', + assertions: (result: any) => { + const status = result.data.record_types.tasks.fields.status; + assertStatusEnumField(status); + }, + }, + { + description: 'should return metadata with tasks status enum values', + assertions: (result: any) => { + const status = result.data.record_types.tasks.fields.status; + assertStatusEnumField(status); + }, + }, + { + description: 'should return metadata with tasks permalink field as required', + assertions: (result: any) => { + const permalink = result.data.record_types.tasks.fields.permalink; + assertFieldProperties(permalink, 'text', 'URL', true); + }, + }, + { + description: 'should return metadata with tasks responsible_ids field as required reference', + assertions: (result: any) => { + const responsibleIds = result.data.record_types.tasks.fields.responsible_ids; + assertResponsibleIdsField(responsibleIds); + }, + }, + { + description: 'should return metadata with tasks responsible_ids as collection with max_length 1', + assertions: (result: any) => { + const responsibleIds = result.data.record_types.tasks.fields.responsible_ids; + assertResponsibleIdsField(responsibleIds); + }, + }, + { + description: 'should return metadata with tasks responsible_ids referring to users record type', + assertions: (result: any) => { + const responsibleIds = result.data.record_types.tasks.fields.responsible_ids; + assertResponsibleIdsField(responsibleIds); + }, + }, + ]; +} \ No newline at end of file diff --git a/build/src/functions/get_external_domain_metadata/index.test.helpers.ts b/build/src/functions/get_external_domain_metadata/index.test.helpers.ts new file mode 100644 index 0000000..0d6c55f --- /dev/null +++ b/build/src/functions/get_external_domain_metadata/index.test.helpers.ts @@ -0,0 +1,163 @@ +import { FunctionInput } from '../../core/types'; +import { ExternalDomainMetadata } from './index'; + +/** + * Creates a mock FunctionInput event for testing + */ +export const createMockEvent = (overrides?: Partial): FunctionInput => ({ + payload: {}, + context: { + dev_oid: 'test-dev-oid', + source_id: 'test-source-id', + snap_in_id: 'test-snap-in-id', + snap_in_version_id: 'test-version-id', + service_account_id: 'test-service-account-id', + secrets: { + service_account_token: 'test-token', + }, + }, + execution_metadata: { + request_id: 'test-request-id', + function_name: 'get_external_domain_metadata', + event_type: 'test-event-type', + devrev_endpoint: 'https://api.devrev.ai/', + }, + input_data: { + global_values: {}, + event_sources: {}, + }, + ...overrides, +}); + +/** + * Validates the structure of external domain metadata + */ +export function validateMetadataStructure(metadata: ExternalDomainMetadata): boolean { + // Check schema version + if (metadata.schema_version !== 'v0.2.0') { + return false; + } + + // Check record_types exists + if (!metadata.record_types || !metadata.record_types.users || !metadata.record_types.comments || !metadata.record_types.tasks) { + return false; + } + + const users = metadata.record_types.users; + + // Check users name + if (users.name !== 'Users') { + return false; + } + + // Check fields exist + if (!users.fields || !users.fields.full_name || !users.fields.email || !users.fields.title) { + return false; + } + + // Check full_name field + const fullName = users.fields.full_name; + if (fullName.type !== 'text' || fullName.name !== 'Full Name' || fullName.is_required !== true) { + return false; + } + + // Check email field + const email = users.fields.email; + if (email.type !== 'text' || email.name !== 'Email' || email.is_required !== true) { + return false; + } + + // Check title field + const title = users.fields.title; + if (title.type !== 'text' || title.name !== 'Title' || title.is_required !== false) { + return false; + } + + // Check comments record type + const comments = metadata.record_types.comments; + + // Check comments name + if (comments.name !== 'Comments') { + return false; + } + + // Check comments fields exist + if (!comments.fields || !comments.fields.text || !comments.fields.author_id || !comments.fields.task_id) { + return false; + } + + // Check text field + const text = comments.fields.text; + if (text.type !== 'rich_text' || text.name !== 'Text' || text.is_required !== true) { + return false; + } + + // Check author_id field + const authorId = comments.fields.author_id; + if (authorId.type !== 'reference' || authorId.name !== 'Author ID' || authorId.is_required !== true) { + return false; + } + + // Check task_id field + const taskId = comments.fields.task_id; + if (taskId.type !== 'reference' || taskId.name !== 'Task ID' || taskId.is_required !== true) { + return false; + } + + // Check tasks record type + const tasks = metadata.record_types.tasks; + + // Check tasks name + if (tasks.name !== 'Tasks') { + return false; + } + + // Check tasks fields exist + if (!tasks.fields || !tasks.fields.title || !tasks.fields.description || + !tasks.fields.status || !tasks.fields.permalink || !tasks.fields.responsible_ids) { + return false; + } + + // Check title field + const taskTitle = tasks.fields.title; + if (taskTitle.type !== 'text' || taskTitle.name !== 'Title' || taskTitle.is_required !== true) { + return false; + } + + // Check description field + const description = tasks.fields.description; + if (description.type !== 'rich_text' || description.name !== 'Description' || description.is_required !== true) { + return false; + } + + // Check status field + const status = tasks.fields.status; + if (status.type !== 'enum' || status.name !== 'Status' || status.is_required !== true) { + return false; + } + if (!status.enum || !status.enum.values || status.enum.values.length !== 4) { + return false; + } + + // Check permalink field + const permalink = tasks.fields.permalink; + if (permalink.type !== 'text' || permalink.name !== 'URL' || permalink.is_required !== true) { + return false; + } + + // Check responsible_ids field + const responsibleIds = tasks.fields.responsible_ids; + if (responsibleIds.type !== 'reference' || responsibleIds.name !== 'Responsible IDs' || + responsibleIds.is_required !== true) { + return false; + } + if (!responsibleIds.collection || responsibleIds.collection.max_length !== 1) { + return false; + } + if (!responsibleIds.reference || !responsibleIds.reference.refers_to || + !responsibleIds.reference.refers_to['#record:users']) { + return false; + } + + return true; +} \ No newline at end of file diff --git a/build/src/functions/get_external_domain_metadata/index.test.mock-data.ts b/build/src/functions/get_external_domain_metadata/index.test.mock-data.ts new file mode 100644 index 0000000..2027a03 --- /dev/null +++ b/build/src/functions/get_external_domain_metadata/index.test.mock-data.ts @@ -0,0 +1,106 @@ +import { ExternalDomainMetadata } from './index'; + +/** + * Expected external domain metadata structure for testing + */ +export const expectedMetadata: ExternalDomainMetadata = { + schema_version: 'v0.2.0', + record_types: { + users: { + name: 'Users', + fields: { + full_name: { + type: 'text', + name: 'Full Name', + is_required: true, + }, + email: { + type: 'text', + name: 'Email', + is_required: true, + }, + title: { + type: 'text', + name: 'Title', + is_required: false, + }, + }, + }, + comments: { + name: 'Comments', + fields: { + text: { + type: 'rich_text', + name: 'Text', + is_required: true, + }, + author_id: { + type: 'reference', + name: 'Author ID', + is_required: true, + reference: { + refers_to: { + '#record:users': {}, + }, + }, + }, + task_id: { + type: 'reference', + name: 'Task ID', + is_required: true, + reference: { + refers_to: { + '#record:tasks': {}, + }, + }, + }, + }, + }, + tasks: { + name: 'Tasks', + fields: { + title: { + type: 'text', + name: 'Title', + is_required: true, + }, + description: { + type: 'rich_text', + name: 'Description', + is_required: true, + }, + status: { + type: 'enum', + name: 'Status', + is_required: true, + enum: { + values: [ + { key: 'active', name: 'Active' }, + { key: 'completed', name: 'Completed' }, + { key: 'deferred', name: 'Deferred' }, + { key: 'cancelled', name: 'Cancelled' }, + ], + }, + }, + permalink: { + type: 'text', + name: 'URL', + is_required: true, + }, + responsible_ids: { + type: 'reference', + name: 'Responsible IDs', + is_required: true, + collection: { + max_length: 1, + }, + reference: { + refers_to: { + '#record:users': {}, + }, + }, + }, + }, + }, + }, +}; \ No newline at end of file diff --git a/build/src/functions/get_external_domain_metadata/index.test.ts b/build/src/functions/get_external_domain_metadata/index.test.ts new file mode 100644 index 0000000..d596837 --- /dev/null +++ b/build/src/functions/get_external_domain_metadata/index.test.ts @@ -0,0 +1,6 @@ +import run, { GetExternalDomainMetadataResponse } from './index'; +import { createGetExternalDomainMetadataTests } from './index.test.cases'; + +describe('get_external_domain_metadata function', () => { + describe('test cases', createGetExternalDomainMetadataTests(run)); +}); \ No newline at end of file diff --git a/build/src/functions/get_external_domain_metadata/index.ts b/build/src/functions/get_external_domain_metadata/index.ts new file mode 100644 index 0000000..885ac38 --- /dev/null +++ b/build/src/functions/get_external_domain_metadata/index.ts @@ -0,0 +1,266 @@ +import { FunctionInput } from '../../core/types'; + +/** + * Field definition in External Domain Metadata + */ +/** + * External Domain Metadata structure for users record type + */ +export interface ExternalDomainMetadata { + schema_version: string; + record_types: { + users: { + name: string; + fields: { + full_name: { + type: string; + name: string; + is_required: boolean; + }; + email: { + type: string; + name: string; + is_required: boolean; + }; + title: { + type: string; + name: string; + is_required: boolean; + }; + }; + }; + comments: { + name: string; + fields: { + text: { + type: string; + name: string; + is_required: boolean; + }; + author_id: { + type: string; + name: string; + is_required: boolean; + reference: { + refers_to: { + '#record:users': { + by_field?: string; + }; + }; + }; + }; + task_id: { + type: string; + name: string; + is_required: boolean; + reference: { + refers_to: { + '#record:tasks': { + by_field?: string; + }; + }; + }; + }; + }; + }; + tasks: { + name: string; + fields: { + title: { + type: string; + name: string; + is_required: boolean; + }; + description: { + type: string; + name: string; + is_required: boolean; + }; + status: { + type: string; + name: string; + is_required: boolean; + enum: { + values: Array<{ + key: string; + name: string; + }>; + }; + }; + permalink: { + type: string; + name: string; + is_required: boolean; + }; + responsible_ids: { + type: string; + name: string; + is_required: boolean; + collection: { + max_length: number; + }; + reference: { + refers_to: { + '#record:users': { + by_field?: string; + }; + }; + }; + }; + }; + }; + }; +} + +/** + * Response structure for get_external_domain_metadata function + */ +export interface GetExternalDomainMetadataResponse { + status: 'success' | 'error'; + message: string; + metadata: { + function_name: string; + request_id: string; + }; + data: ExternalDomainMetadata; + timestamp: string; +} + +/** + * Get external domain metadata function that generates and returns + * The External Domain Metadata JSON object with 'users' record type. + * + * @param events - Array of function input events + * @returns Object containing the external domain metadata + */ +const run = async (events: FunctionInput[]): Promise => { + // Validate input + if (!events || events.length === 0) { + throw new Error('No events provided to get_external_domain_metadata function'); + } + + // Process only the first event as per requirements + const event = events[0]; + + // Validate event structure + if (!event.execution_metadata) { + throw new Error('Invalid event: missing execution_metadata'); + } + + // Generate The External Domain Metadata JSON object + const externalDomainMetadata: ExternalDomainMetadata = { + schema_version: 'v0.2.0', + record_types: { + users: { + name: 'Users', + fields: { + full_name: { + type: 'text', + name: 'Full Name', + is_required: true, + }, + email: { + type: 'text', + name: 'Email', + is_required: true, + }, + title: { + type: 'text', + name: 'Title', + is_required: false, + }, + }, + }, + comments: { + name: 'Comments', + fields: { + text: { + type: 'rich_text', + name: 'Text', + is_required: true, + }, + author_id: { + type: 'reference', + name: 'Author ID', + is_required: true, + reference: { + refers_to: { + '#record:users': {}, + }, + }, + }, + task_id: { + type: 'reference', + name: 'Task ID', + is_required: true, + reference: { + refers_to: { + '#record:tasks': {}, + }, + }, + }, + }, + }, + tasks: { + name: 'Tasks', + fields: { + title: { + type: 'text', + name: 'Title', + is_required: true, + }, + description: { + type: 'rich_text', + name: 'Description', + is_required: true, + }, + status: { + type: 'enum', + name: 'Status', + is_required: true, + enum: { + values: [ + { key: 'active', name: 'Active' }, + { key: 'completed', name: 'Completed' }, + { key: 'deferred', name: 'Deferred' }, + { key: 'cancelled', name: 'Cancelled' }, + ], + }, + }, + permalink: { + type: 'text', + name: 'URL', + is_required: true, + }, + responsible_ids: { + type: 'reference', + name: 'Responsible IDs', + is_required: true, + collection: { + max_length: 1, + }, + reference: { + refers_to: { + '#record:users': {}, + }, + }, + }, + }, + }, + }, + }; + + // Return success response with metadata + return { + status: 'success', + message: 'Successfully generated external domain metadata', + metadata: { + function_name: event.execution_metadata.function_name, + request_id: event.execution_metadata.request_id, + }, + data: externalDomainMetadata, + timestamp: new Date().toISOString(), + }; +}; + +export default run; \ No newline at end of file diff --git a/build/src/functions/get_initial_domain_mapping/domain-mapping-data.ts b/build/src/functions/get_initial_domain_mapping/domain-mapping-data.ts new file mode 100644 index 0000000..edb27b4 --- /dev/null +++ b/build/src/functions/get_initial_domain_mapping/domain-mapping-data.ts @@ -0,0 +1,203 @@ +import { InitialDomainMapping } from './index'; + +/** + * Initial Domain Mapping data structure for Wrike integration. + * Contains mappings for users, comments, and tasks record types. + */ +export const initialDomainMappingData: InitialDomainMapping = { + additional_mappings: { + record_type_mappings: { + users: { + default_mapping: { + object_type: 'devu', + }, + possible_record_type_mappings: [ + { + devrev_leaf_type: 'devu', + forward: true, + reverse: false, + shard: { + mode: 'create_shard', + devrev_leaf_type: { + object_type: 'devu', + }, + stock_field_mappings: { + full_name: { + forward: true, + reverse: false, + primary_external_field: 'full_name', + transformation_method_for_set: { + transformation_method: 'use_directly', + }, + }, + email: { + forward: true, + reverse: false, + primary_external_field: 'email', + transformation_method_for_set: { + transformation_method: 'use_directly', + }, + }, + display_name: { + forward: true, + reverse: false, + primary_external_field: 'title', + transformation_method_for_set: { + transformation_method: 'use_directly', + }, + }, + }, + }, + }, + ], + }, + comments: { + default_mapping: { + object_type: 'comment', + }, + possible_record_type_mappings: [ + { + devrev_leaf_type: 'comment', + forward: true, + reverse: false, + shard: { + mode: 'create_shard', + devrev_leaf_type: { + object_type: 'comment', + }, + stock_field_mappings: { + body: { + forward: true, + reverse: false, + primary_external_field: 'text', + transformation_method_for_set: { + transformation_method: 'use_rich_text', + }, + }, + created_by_id: { + forward: true, + reverse: false, + primary_external_field: 'author_id', + transformation_method_for_set: { + transformation_method: 'use_directly', + }, + }, + modified_by_id: { + forward: true, + reverse: false, + primary_external_field: 'author_id', + transformation_method_for_set: { + transformation_method: 'use_directly', + }, + }, + parent_object_id: { + forward: true, + reverse: false, + primary_external_field: 'task_id', + transformation_method_for_set: { + transformation_method: 'use_directly', + }, + }, + }, + }, + }, + ], + }, + tasks: { + default_mapping: { + object_type: 'issue', + }, + possible_record_type_mappings: [ + { + devrev_leaf_type: 'issue', + forward: true, + reverse: false, + shard: { + mode: 'create_shard', + devrev_leaf_type: { + object_type: 'issue', + }, + stock_field_mappings: { + title: { + forward: true, + reverse: false, + primary_external_field: 'title', + transformation_method_for_set: { + transformation_method: 'use_directly', + }, + }, + item_url_field: { + forward: true, + reverse: false, + primary_external_field: 'permalink', + transformation_method_for_set: { + transformation_method: 'use_directly', + }, + }, + body: { + forward: true, + reverse: false, + primary_external_field: 'description', + transformation_method_for_set: { + transformation_method: 'use_rich_text', + }, + }, + owned_by_ids: { + forward: true, + reverse: false, + primary_external_field: 'responsible_ids', + transformation_method_for_set: { + transformation_method: 'use_directly', + }, + }, + priority: { + forward: true, + reverse: false, + transformation_method_for_set: { + enum: 'P2', + transformation_method: 'use_fixed_value', + value: 'enum_value', + }, + }, + applies_to_part_id: { + forward: true, + reverse: false, + transformation_method_for_set: { + transformation_method: 'use_devrev_record', + leaf_type: { + object_type: 'product', + }, + is_array: false, + }, + }, + stage: { + forward: true, + reverse: false, + primary_external_field: 'status', + transformation_method_for_set: { + forward: { + active: { + value: 'in_development', + }, + completed: { + value: 'completed', + }, + deferred: { + value: 'backlog', + }, + cancelled: { + value: 'wont_fix', + }, + }, + transformation_method: 'map_enum', + is_array: false, + }, + }, + }, + }, + }, + ], + }, + }, + }, +}; \ No newline at end of file diff --git a/build/src/functions/get_initial_domain_mapping/index.test.assertions-tasks.ts b/build/src/functions/get_initial_domain_mapping/index.test.assertions-tasks.ts new file mode 100644 index 0000000..39ef90b --- /dev/null +++ b/build/src/functions/get_initial_domain_mapping/index.test.assertions-tasks.ts @@ -0,0 +1,134 @@ +import { InitialDomainMapping, StockFieldMapping } from './index'; + +/** + * Assertion helpers for tasks record type in get_initial_domain_mapping tests. + * These functions encapsulate validation logic for tasks-specific mappings. + */ + +/** + * Asserts that the tasks record type mapping has correct structure + */ +export function assertTasksRecordTypeMapping(mapping: InitialDomainMapping): void { + expect(mapping.additional_mappings).toBeDefined(); + expect(mapping.additional_mappings.record_type_mappings).toBeDefined(); + expect(mapping.additional_mappings.record_type_mappings.tasks).toBeDefined(); + + const tasks = mapping.additional_mappings.record_type_mappings.tasks; + expect(tasks.default_mapping).toBeDefined(); + expect(tasks.default_mapping.object_type).toBe('issue'); + expect(tasks.possible_record_type_mappings).toBeDefined(); + expect(tasks.possible_record_type_mappings.length).toBe(1); +} + +/** + * Asserts that the tasks possible record type mapping has correct properties + */ +export function assertTasksPossibleRecordTypeMapping(mapping: InitialDomainMapping): void { + const possibleMapping = mapping.additional_mappings.record_type_mappings.tasks.possible_record_type_mappings[0]; + + expect(possibleMapping).toBeDefined(); + expect(possibleMapping.devrev_leaf_type).toBe('issue'); + expect(possibleMapping.forward).toBe(true); + expect(possibleMapping.reverse).toBe(false); +} + +/** + * Asserts that the tasks shard has correct structure + */ +export function assertTasksShardStructure(mapping: InitialDomainMapping): void { + const shard = mapping.additional_mappings.record_type_mappings.tasks.possible_record_type_mappings[0].shard; + + expect(shard).toBeDefined(); + expect(shard.mode).toBe('create_shard'); + expect(shard.devrev_leaf_type).toBeDefined(); + expect(shard.devrev_leaf_type.object_type).toBe('issue'); + expect(shard.stock_field_mappings).toBeDefined(); +} + +/** + * Asserts that all tasks stock field mappings are present + */ +export function assertAllTasksStockFieldMappings(mapping: InitialDomainMapping): void { + const stockMappings = mapping.additional_mappings.record_type_mappings.tasks.possible_record_type_mappings[0].shard.stock_field_mappings; + + expect(Object.keys(stockMappings)).toHaveLength(7); + expect(stockMappings).toHaveProperty('title'); + expect(stockMappings).toHaveProperty('item_url_field'); + expect(stockMappings).toHaveProperty('body'); + expect(stockMappings).toHaveProperty('owned_by_ids'); + expect(stockMappings).toHaveProperty('priority'); + expect(stockMappings).toHaveProperty('applies_to_part_id'); + expect(stockMappings).toHaveProperty('stage'); +} + +/** + * Asserts that a fixed value mapping has the expected properties + */ +export function assertFixedValueMapping( + mapping: StockFieldMapping, + expectedValue: string +): void { + expect(mapping).toBeDefined(); + expect(mapping.forward).toBe(true); + expect(mapping.reverse).toBe(false); + expect(mapping.transformation_method_for_set).toBeDefined(); + expect(mapping.transformation_method_for_set.transformation_method).toBe('use_fixed_value'); + expect((mapping.transformation_method_for_set as any).enum).toBe(expectedValue); + expect((mapping.transformation_method_for_set as any).value).toBe('enum_value'); +} + +/** + * Asserts that a DevRev record mapping has the expected properties + */ +export function assertDevRevRecordMapping( + mapping: StockFieldMapping, + expectedObjectType: string +): void { + expect(mapping).toBeDefined(); + expect(mapping.forward).toBe(true); + expect(mapping.reverse).toBe(false); + expect(mapping.transformation_method_for_set).toBeDefined(); + expect(mapping.transformation_method_for_set.transformation_method).toBe('use_devrev_record'); + expect((mapping.transformation_method_for_set as any).leaf_type).toBeDefined(); + expect((mapping.transformation_method_for_set as any).leaf_type.object_type).toBe(expectedObjectType); + expect((mapping.transformation_method_for_set as any).is_array).toBe(false); +} + +/** + * Asserts that an enum mapping has the expected properties + */ +export function assertEnumMapping( + mapping: StockFieldMapping, + expectedExternalField: string, + expectedMappings: Record +): void { + expect(mapping).toBeDefined(); + expect(mapping.forward).toBe(true); + expect(mapping.reverse).toBe(false); + expect(mapping.primary_external_field).toBe(expectedExternalField); + expect(mapping.transformation_method_for_set).toBeDefined(); + expect(mapping.transformation_method_for_set.transformation_method).toBe('map_enum'); + expect((mapping.transformation_method_for_set as any).is_array).toBe(false); + expect((mapping.transformation_method_for_set as any).forward).toBeDefined(); + + const forwardMappings = (mapping.transformation_method_for_set as any).forward; + Object.entries(expectedMappings).forEach(([key, value]) => { + expect(forwardMappings[key]).toBeDefined(); + expect(forwardMappings[key].value).toBe(value); + }); +} + +/** + * Asserts that a rich text mapping has the expected properties + */ +export function assertRichTextMapping( + mapping: StockFieldMapping, + expectedExternalField: string +): void { + expect(mapping).toBeDefined(); + expect(mapping.forward).toBe(true); + expect(mapping.reverse).toBe(false); + expect(mapping.primary_external_field).toBe(expectedExternalField); + expect(mapping.transformation_method_for_set).toBeDefined(); + expect(mapping.transformation_method_for_set.transformation_method).toBe('use_rich_text'); +} \ No newline at end of file diff --git a/build/src/functions/get_initial_domain_mapping/index.test.assertions.ts b/build/src/functions/get_initial_domain_mapping/index.test.assertions.ts new file mode 100644 index 0000000..745d8a8 --- /dev/null +++ b/build/src/functions/get_initial_domain_mapping/index.test.assertions.ts @@ -0,0 +1,150 @@ +import { InitialDomainMapping, StockFieldMapping } from './index'; + +/** + * Assertion helpers for get_initial_domain_mapping tests. + * These functions encapsulate common validation logic to reduce duplication. + */ + +/** + * Asserts that a stock field mapping has the expected properties + */ +export function assertStockFieldMapping( + mapping: StockFieldMapping, + expectedExternalField: string, + expectedTransformationMethod: string = 'use_directly' +): void { + expect(mapping).toBeDefined(); + expect(mapping.forward).toBe(true); + expect(mapping.reverse).toBe(false); + expect(mapping.primary_external_field).toBe(expectedExternalField); + expect(mapping.transformation_method_for_set).toBeDefined(); + expect(mapping.transformation_method_for_set.transformation_method).toBe(expectedTransformationMethod); +} + +/** + * Asserts that the users record type mapping has correct structure + */ +export function assertUsersRecordTypeMapping(mapping: InitialDomainMapping): void { + expect(mapping.additional_mappings).toBeDefined(); + expect(mapping.additional_mappings.record_type_mappings).toBeDefined(); + expect(mapping.additional_mappings.record_type_mappings.users).toBeDefined(); + + const users = mapping.additional_mappings.record_type_mappings.users; + expect(users.default_mapping).toBeDefined(); + expect(users.default_mapping.object_type).toBe('devu'); + expect(users.possible_record_type_mappings).toBeDefined(); + expect(users.possible_record_type_mappings.length).toBe(1); +} + +/** + * Asserts that the possible record type mapping has correct properties + */ +export function assertPossibleRecordTypeMapping(mapping: InitialDomainMapping): void { + const possibleMapping = mapping.additional_mappings.record_type_mappings.users.possible_record_type_mappings[0]; + + expect(possibleMapping).toBeDefined(); + expect(possibleMapping.devrev_leaf_type).toBe('devu'); + expect(possibleMapping.forward).toBe(true); + expect(possibleMapping.reverse).toBe(false); +} + +/** + * Asserts that the shard has correct structure + */ +export function assertShardStructure(mapping: InitialDomainMapping): void { + const shard = mapping.additional_mappings.record_type_mappings.users.possible_record_type_mappings[0].shard; + + expect(shard).toBeDefined(); + expect(shard.mode).toBe('create_shard'); + expect(shard.devrev_leaf_type).toBeDefined(); + expect(shard.devrev_leaf_type.object_type).toBe('devu'); + expect(shard.stock_field_mappings).toBeDefined(); +} + +/** + * Asserts that all stock field mappings are present + */ +export function assertAllStockFieldMappings(mapping: InitialDomainMapping): void { + const stockMappings = mapping.additional_mappings.record_type_mappings.users.possible_record_type_mappings[0].shard.stock_field_mappings; + + expect(Object.keys(stockMappings)).toHaveLength(3); + expect(stockMappings).toHaveProperty('full_name'); + expect(stockMappings).toHaveProperty('email'); + expect(stockMappings).toHaveProperty('display_name'); +} + +/** + * Asserts that the comments record type has correct structure + */ +export function assertCommentsRecordTypeMapping(mapping: InitialDomainMapping): void { + expect(mapping.additional_mappings).toBeDefined(); + expect(mapping.additional_mappings.record_type_mappings).toBeDefined(); + expect(mapping.additional_mappings.record_type_mappings.comments).toBeDefined(); + + const comments = mapping.additional_mappings.record_type_mappings.comments; + expect(comments.default_mapping).toBeDefined(); + expect(comments.default_mapping.object_type).toBe('comment'); + expect(comments.possible_record_type_mappings).toBeDefined(); + expect(comments.possible_record_type_mappings.length).toBe(1); +} + +/** + * Asserts that the comments possible record type mapping has correct properties + */ +export function assertCommentsPossibleRecordTypeMapping(mapping: InitialDomainMapping): void { + const possibleMapping = mapping.additional_mappings.record_type_mappings.comments.possible_record_type_mappings[0]; + + expect(possibleMapping).toBeDefined(); + expect(possibleMapping.devrev_leaf_type).toBe('comment'); + expect(possibleMapping.forward).toBe(true); + expect(possibleMapping.reverse).toBe(false); +} + +/** + * Asserts that the comments shard has correct structure + */ +export function assertCommentsShardStructure(mapping: InitialDomainMapping): void { + const shard = mapping.additional_mappings.record_type_mappings.comments.possible_record_type_mappings[0].shard; + + expect(shard).toBeDefined(); + expect(shard.mode).toBe('create_shard'); + expect(shard.devrev_leaf_type).toBeDefined(); + expect(shard.devrev_leaf_type.object_type).toBe('comment'); + expect(shard.stock_field_mappings).toBeDefined(); +} + +/** + * Asserts that all comments stock field mappings are present + */ +export function assertAllCommentsStockFieldMappings(mapping: InitialDomainMapping): void { + const stockMappings = mapping.additional_mappings.record_type_mappings.comments.possible_record_type_mappings[0].shard.stock_field_mappings; + + expect(Object.keys(stockMappings)).toHaveLength(4); + expect(stockMappings).toHaveProperty('body'); + expect(stockMappings).toHaveProperty('created_by_id'); + expect(stockMappings).toHaveProperty('modified_by_id'); + expect(stockMappings).toHaveProperty('parent_object_id'); +} + +/** + * Asserts that all record types exist + */ +export function assertAllRecordTypes(mapping: InitialDomainMapping): void { + const recordTypes = mapping.additional_mappings.record_type_mappings; + expect(Object.keys(recordTypes)).toHaveLength(4); + expect(recordTypes).toHaveProperty('users'); + expect(recordTypes).toHaveProperty('comments'); + expect(recordTypes).toHaveProperty('tasks'); +} + +// Re-export tasks-related assertions for backward compatibility +export { + assertTasksRecordTypeMapping, + assertTasksPossibleRecordTypeMapping, + assertTasksShardStructure, + assertAllTasksStockFieldMappings, + assertFixedValueMapping, + assertDevRevRecordMapping, + assertEnumMapping, + assertRichTextMapping, +} from './index.test.assertions-tasks'; \ No newline at end of file diff --git a/build/src/functions/get_initial_domain_mapping/index.test.case-generators-misc.ts b/build/src/functions/get_initial_domain_mapping/index.test.case-generators-misc.ts new file mode 100644 index 0000000..54a200a --- /dev/null +++ b/build/src/functions/get_initial_domain_mapping/index.test.case-generators-misc.ts @@ -0,0 +1,58 @@ +import { + assertUsersRecordTypeMapping, + assertCommentsRecordTypeMapping, +} from './index.test.assertions'; + +/** + * Miscellaneous test case generators for get_initial_domain_mapping function. + * This file contains test generators for cross-cutting concerns like + * preservation of mappings, naming conventions, and timestamps. + */ + +export interface TestCase { + description: string; + assertions: (result: any) => void; +} + +/** + * Generates miscellaneous test cases + */ +export function generateMiscellaneousTests(): TestCase[] { + return [ + { + description: 'should preserve existing users and comments mappings when adding tasks mapping', + assertions: (result: any) => { + // Verify users mapping still exists and is correct + assertUsersRecordTypeMapping(result.data); + + // Verify comments mapping still exists and is correct + assertCommentsRecordTypeMapping(result.data); + + // Verify all mappings exist + expect(Object.keys(result.data.additional_mappings.record_type_mappings)).toHaveLength(3); + expect(result.data.additional_mappings.record_type_mappings).toHaveProperty('users'); + expect(result.data.additional_mappings.record_type_mappings).toHaveProperty('comments'); + expect(result.data.additional_mappings.record_type_mappings).toHaveProperty('tasks'); + }, + }, + { + description: 'should use snake_case for all JSON keys', + assertions: (result: any) => { + // Check top-level keys + expect(result.data).toHaveProperty('additional_mappings'); + expect(result.data.additional_mappings).toHaveProperty('record_type_mappings'); + + // Check nested keys + const stockMappings = result.data.additional_mappings.record_type_mappings.users.possible_record_type_mappings[0].shard.stock_field_mappings; + expect(stockMappings.full_name).toHaveProperty('primary_external_field'); + expect(stockMappings.full_name).toHaveProperty('transformation_method_for_set'); + }, + }, + { + description: 'should include timestamp in ISO format', + assertions: (result: any) => { + expect(result.timestamp).toMatch(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/); + }, + }, + ]; +} \ No newline at end of file diff --git a/build/src/functions/get_initial_domain_mapping/index.test.case-generators-tasks.ts b/build/src/functions/get_initial_domain_mapping/index.test.case-generators-tasks.ts new file mode 100644 index 0000000..61647d5 --- /dev/null +++ b/build/src/functions/get_initial_domain_mapping/index.test.case-generators-tasks.ts @@ -0,0 +1,123 @@ +import { + assertTasksRecordTypeMapping, + assertTasksPossibleRecordTypeMapping, + assertTasksShardStructure, + assertAllTasksStockFieldMappings, + assertFixedValueMapping, + assertDevRevRecordMapping, + assertEnumMapping, + assertRichTextMapping, +} from './index.test.assertions'; + +/** + * Test case generators for tasks record type in get_initial_domain_mapping function. + * This file contains test generators specifically for the tasks record type. + */ + +export interface TestCase { + description: string; + assertions: (result: any) => void; +} + +/** + * Generates tasks record type test cases + */ +export function generateTasksRecordTypeTests(): TestCase[] { + return [ + { + description: 'should return mapping with tasks record type', + assertions: (result: any) => { + assertTasksRecordTypeMapping(result.data); + }, + }, + { + description: 'should return mapping with tasks default_mapping set to issue', + assertions: (result: any) => { + expect(result.data.additional_mappings.record_type_mappings.tasks.default_mapping.object_type).toBe('issue'); + }, + }, + { + description: 'should return mapping with tasks forward=true and reverse=false', + assertions: (result: any) => { + assertTasksPossibleRecordTypeMapping(result.data); + }, + }, + { + description: 'should return mapping with correct tasks shard structure', + assertions: (result: any) => { + assertTasksShardStructure(result.data); + }, + }, + { + description: 'should return mapping with all seven tasks stock field mappings', + assertions: (result: any) => { + assertAllTasksStockFieldMappings(result.data); + }, + }, + ]; +} + +/** + * Generates tasks field mapping test cases + */ +export function generateTasksFieldMappingTests(): TestCase[] { + return [ + { + description: 'should map tasks title to title using use_directly', + assertions: (result: any) => { + const titleMapping = result.data.additional_mappings.record_type_mappings.tasks.possible_record_type_mappings[0].shard.stock_field_mappings.title; + const { assertStockFieldMapping } = require('./index.test.assertions'); + assertStockFieldMapping(titleMapping, 'title', 'use_directly'); + }, + }, + { + description: 'should map tasks permalink to item_url_field using use_directly', + assertions: (result: any) => { + const itemUrlMapping = result.data.additional_mappings.record_type_mappings.tasks.possible_record_type_mappings[0].shard.stock_field_mappings.item_url_field; + const { assertStockFieldMapping } = require('./index.test.assertions'); + assertStockFieldMapping(itemUrlMapping, 'permalink', 'use_directly'); + }, + }, + { + description: 'should map tasks description to body using use_rich_text', + assertions: (result: any) => { + const bodyMapping = result.data.additional_mappings.record_type_mappings.tasks.possible_record_type_mappings[0].shard.stock_field_mappings.body; + assertRichTextMapping(bodyMapping, 'description'); + }, + }, + { + description: 'should map tasks responsible_ids to owned_by_ids using use_directly', + assertions: (result: any) => { + const ownedByMapping = result.data.additional_mappings.record_type_mappings.tasks.possible_record_type_mappings[0].shard.stock_field_mappings.owned_by_ids; + const { assertStockFieldMapping } = require('./index.test.assertions'); + assertStockFieldMapping(ownedByMapping, 'responsible_ids', 'use_directly'); + }, + }, + { + description: 'should map tasks priority to fixed value P2', + assertions: (result: any) => { + const priorityMapping = result.data.additional_mappings.record_type_mappings.tasks.possible_record_type_mappings[0].shard.stock_field_mappings.priority; + assertFixedValueMapping(priorityMapping, 'P2'); + }, + }, + { + description: 'should map tasks applies_to_part_id to product object type', + assertions: (result: any) => { + const appliesToPartMapping = result.data.additional_mappings.record_type_mappings.tasks.possible_record_type_mappings[0].shard.stock_field_mappings.applies_to_part_id; + assertDevRevRecordMapping(appliesToPartMapping, 'product'); + }, + }, + { + description: 'should map tasks status to stage using map_enum', + assertions: (result: any) => { + const stageMapping = result.data.additional_mappings.record_type_mappings.tasks.possible_record_type_mappings[0].shard.stock_field_mappings.stage; + assertEnumMapping(stageMapping, 'status', { + active: 'in_development', + completed: 'completed', + deferred: 'backlog', + cancelled: 'wont_fix', + }); + }, + }, + ]; +} \ No newline at end of file diff --git a/build/src/functions/get_initial_domain_mapping/index.test.case-generators.ts b/build/src/functions/get_initial_domain_mapping/index.test.case-generators.ts new file mode 100644 index 0000000..c4c3584 --- /dev/null +++ b/build/src/functions/get_initial_domain_mapping/index.test.case-generators.ts @@ -0,0 +1,213 @@ +import { FunctionInput } from '../../core/types'; +import { createMockEvent } from './index.test.helpers'; +import { + assertStockFieldMapping, + assertUsersRecordTypeMapping, + assertCommentsRecordTypeMapping, + assertCommentsPossibleRecordTypeMapping, + assertCommentsShardStructure, + assertAllCommentsStockFieldMappings, + assertPossibleRecordTypeMapping, + assertShardStructure, + assertAllStockFieldMappings, +} from './index.test.assertions'; +import { + generateTasksRecordTypeTests, + generateTasksFieldMappingTests, +} from './index.test.case-generators-tasks'; +import { + generateMiscellaneousTests, +} from './index.test.case-generators-misc'; + +/** + * Test case generators for get_initial_domain_mapping function. + * This file contains reusable test case generation logic. + */ + +export interface TestCase { + description: string; + assertions: (result: any) => void; +} + +/** + * Generates basic structure test cases + */ +export function generateBasicStructureTests(): TestCase[] { + return [ + { + description: 'should return success response with initial domain mapping', + assertions: (result: any) => { + expect(result).toBeDefined(); + expect(result.status).toBe('success'); + expect(result.message).toBe('Successfully generated initial domain mapping'); + expect(result.metadata.function_name).toBe('get_initial_domain_mapping'); + expect(result.metadata.request_id).toBe('test-request-id'); + expect(result.timestamp).toBeDefined(); + }, + }, + ]; +} + +/** + * Generates users record type test cases + */ +export function generateUsersRecordTypeTests(): TestCase[] { + return [ + { + description: 'should return mapping with users record type', + assertions: (result: any) => { + assertUsersRecordTypeMapping(result.data); + }, + }, + { + description: 'should return mapping with default_mapping set to devu', + assertions: (result: any) => { + expect(result.data.additional_mappings.record_type_mappings.users.default_mapping.object_type).toBe('devu'); + }, + }, + { + description: 'should return mapping with one possible_record_type_mapping', + assertions: (result: any) => { + expect(result.data.additional_mappings.record_type_mappings.users.possible_record_type_mappings.length).toBe(1); + }, + }, + { + description: 'should return mapping with forward=true and reverse=false', + assertions: (result: any) => { + assertPossibleRecordTypeMapping(result.data); + }, + }, + { + description: 'should return mapping with correct shard structure', + assertions: (result: any) => { + assertShardStructure(result.data); + }, + }, + { + description: 'should return mapping with all three stock field mappings', + assertions: (result: any) => { + assertAllStockFieldMappings(result.data); + }, + }, + ]; +} + +/** + * Generates users field mapping test cases + */ +export function generateUsersFieldMappingTests(): TestCase[] { + return [ + { + description: 'should map full_name to full_name using use_directly', + assertions: (result: any) => { + const fullNameMapping = result.data.additional_mappings.record_type_mappings.users.possible_record_type_mappings[0].shard.stock_field_mappings.full_name; + assertStockFieldMapping(fullNameMapping, 'full_name', 'use_directly'); + }, + }, + { + description: 'should map email to email using use_directly', + assertions: (result: any) => { + const emailMapping = result.data.additional_mappings.record_type_mappings.users.possible_record_type_mappings[0].shard.stock_field_mappings.email; + assertStockFieldMapping(emailMapping, 'email', 'use_directly'); + }, + }, + { + description: 'should map title to display_name using use_directly', + assertions: (result: any) => { + const displayNameMapping = result.data.additional_mappings.record_type_mappings.users.possible_record_type_mappings[0].shard.stock_field_mappings.display_name; + assertStockFieldMapping(displayNameMapping, 'title', 'use_directly'); + }, + }, + ]; +} + +/** + * Generates comments record type test cases + */ +export function generateCommentsRecordTypeTests(): TestCase[] { + return [ + { + description: 'should return mapping with comments record type', + assertions: (result: any) => { + assertCommentsRecordTypeMapping(result.data); + }, + }, + { + description: 'should return mapping with comments default_mapping set to comment', + assertions: (result: any) => { + expect(result.data.additional_mappings.record_type_mappings.comments.default_mapping.object_type).toBe('comment'); + }, + }, + { + description: 'should return mapping with comments forward=true and reverse=false', + assertions: (result: any) => { + assertCommentsPossibleRecordTypeMapping(result.data); + }, + }, + { + description: 'should return mapping with correct comments shard structure', + assertions: (result: any) => { + assertCommentsShardStructure(result.data); + }, + }, + { + description: 'should return mapping with all four comments stock field mappings', + assertions: (result: any) => { + assertAllCommentsStockFieldMappings(result.data); + }, + }, + ]; +} + +/** + * Generates comments field mapping test cases + */ +export function generateCommentsFieldMappingTests(): TestCase[] { + return [ + { + description: 'should map comments text to body using use_rich_text', + assertions: (result: any) => { + const bodyMapping = result.data.additional_mappings.record_type_mappings.comments.possible_record_type_mappings[0].shard.stock_field_mappings.body; + const { assertRichTextMapping } = require('./index.test.assertions'); + assertRichTextMapping(bodyMapping, 'text'); + }, + }, + { + description: 'should map comments author_id to created_by_id using use_directly', + assertions: (result: any) => { + const createdByMapping = result.data.additional_mappings.record_type_mappings.comments.possible_record_type_mappings[0].shard.stock_field_mappings.created_by_id; + assertStockFieldMapping(createdByMapping, 'author_id', 'use_directly'); + }, + }, + { + description: 'should map comments author_id to modified_by_id using use_directly', + assertions: (result: any) => { + const modifiedByMapping = result.data.additional_mappings.record_type_mappings.comments.possible_record_type_mappings[0].shard.stock_field_mappings.modified_by_id; + assertStockFieldMapping(modifiedByMapping, 'author_id', 'use_directly'); + }, + }, + { + description: 'should map comments task_id to parent_object_id using use_directly', + assertions: (result: any) => { + const parentObjectMapping = result.data.additional_mappings.record_type_mappings.comments.possible_record_type_mappings[0].shard.stock_field_mappings.parent_object_id; + assertStockFieldMapping(parentObjectMapping, 'task_id', 'use_directly'); + }, + }, + ]; +} + +/** + * Generates all test cases + */ +export function generateAllTestCases(): TestCase[] { + return [ + ...generateBasicStructureTests(), + ...generateUsersRecordTypeTests(), + ...generateUsersFieldMappingTests(), + ...generateCommentsRecordTypeTests(), + ...generateCommentsFieldMappingTests(), + ...generateTasksRecordTypeTests(), + ...generateTasksFieldMappingTests(), + ...generateMiscellaneousTests(), + ]; +} \ No newline at end of file diff --git a/build/src/functions/get_initial_domain_mapping/index.test.cases.ts b/build/src/functions/get_initial_domain_mapping/index.test.cases.ts new file mode 100644 index 0000000..be54364 --- /dev/null +++ b/build/src/functions/get_initial_domain_mapping/index.test.cases.ts @@ -0,0 +1,82 @@ +import { FunctionInput } from '../../core/types'; +import { createMockEvent } from './index.test.helpers'; +import { validateMappingStructure } from './index.test.validators'; +import { expectedMapping } from './index.test.mock-data'; +import { generateAllTestCases } from './index.test.case-generators'; +// Note: generateAllTestCases already includes miscellaneous tests via internal import + +/** + * Test case generators for get_initial_domain_mapping function + */ + +export function createGetInitialDomainMappingTests( + runFunction: (events: FunctionInput[]) => Promise +) { + return () => { + // Generate and run all test cases + const testCases = generateAllTestCases(); + testCases.forEach(({ description, assertions }) => { + it(description, async () => { + const events = [createMockEvent()]; + const result = await runFunction(events); + assertions(result); + }); + }); + + // Additional test for data equality + it('should return data equal to expected mapping', async () => { + const events = [createMockEvent()]; + const result = await runFunction(events); + expect(result.data).toEqual(expectedMapping); + }); + + // Validation structure test + it('should validate mapping structure correctly', async () => { + const events = [createMockEvent()]; + const result = await runFunction(events); + expect(validateMappingStructure(result.data)).toBe(true); + }); + + // Multiple events test + it('should process only the first event when multiple events provided', async () => { + const event1 = createMockEvent({ + execution_metadata: { + request_id: 'request-1', + function_name: 'get_initial_domain_mapping', + event_type: 'event-type-1', + devrev_endpoint: 'https://api.devrev.ai/', + }, + }); + const event2 = createMockEvent({ + execution_metadata: { + request_id: 'request-2', + function_name: 'get_initial_domain_mapping', + event_type: 'event-type-2', + devrev_endpoint: 'https://api.devrev.ai/', + }, + }); + + const result = await runFunction([event1, event2]); + expect(result.metadata.request_id).toBe('request-1'); + }); + + // Validation error tests + it('should throw error when no events provided', async () => { + await expect(runFunction([])).rejects.toThrow('No events provided to get_initial_domain_mapping function'); + }); + + it('should throw error when events array is null', async () => { + await expect(runFunction(null as any)).rejects.toThrow('No events provided to get_initial_domain_mapping function'); + }); + + it('should throw error when events array is undefined', async () => { + await expect(runFunction(undefined as any)).rejects.toThrow('No events provided to get_initial_domain_mapping function'); + }); + + it('should throw error when event is missing execution_metadata', async () => { + const invalidEvent = createMockEvent(); + delete (invalidEvent as any).execution_metadata; + await expect(runFunction([invalidEvent])).rejects.toThrow('Invalid event: missing execution_metadata'); + }); + }; +} \ No newline at end of file diff --git a/build/src/functions/get_initial_domain_mapping/index.test.helpers.ts b/build/src/functions/get_initial_domain_mapping/index.test.helpers.ts new file mode 100644 index 0000000..20e817d --- /dev/null +++ b/build/src/functions/get_initial_domain_mapping/index.test.helpers.ts @@ -0,0 +1,29 @@ +import { FunctionInput } from '../../core/types'; + +/** + * Creates a mock FunctionInput event for testing + */ +export const createMockEvent = (overrides?: Partial): FunctionInput => ({ + payload: {}, + context: { + dev_oid: 'test-dev-oid', + source_id: 'test-source-id', + snap_in_id: 'test-snap-in-id', + snap_in_version_id: 'test-version-id', + service_account_id: 'test-service-account-id', + secrets: { + service_account_token: 'test-token', + }, + }, + execution_metadata: { + request_id: 'test-request-id', + function_name: 'get_initial_domain_mapping', + event_type: 'test-event-type', + devrev_endpoint: 'https://api.devrev.ai/', + }, + input_data: { + global_values: {}, + event_sources: {}, + }, + ...overrides, +}); \ No newline at end of file diff --git a/build/src/functions/get_initial_domain_mapping/index.test.mock-data.ts b/build/src/functions/get_initial_domain_mapping/index.test.mock-data.ts new file mode 100644 index 0000000..253820d --- /dev/null +++ b/build/src/functions/get_initial_domain_mapping/index.test.mock-data.ts @@ -0,0 +1,202 @@ +import { InitialDomainMapping } from './index'; + +/** + * Expected initial domain mapping structure for testing + */ +export const expectedMapping: InitialDomainMapping = { + additional_mappings: { + record_type_mappings: { + users: { + default_mapping: { + object_type: 'devu', + }, + possible_record_type_mappings: [ + { + devrev_leaf_type: 'devu', + forward: true, + reverse: false, + shard: { + mode: 'create_shard', + devrev_leaf_type: { + object_type: 'devu', + }, + stock_field_mappings: { + full_name: { + forward: true, + reverse: false, + primary_external_field: 'full_name', + transformation_method_for_set: { + transformation_method: 'use_directly', + }, + }, + email: { + forward: true, + reverse: false, + primary_external_field: 'email', + transformation_method_for_set: { + transformation_method: 'use_directly', + }, + }, + display_name: { + forward: true, + reverse: false, + primary_external_field: 'title', + transformation_method_for_set: { + transformation_method: 'use_directly', + }, + }, + }, + }, + }, + ], + }, + comments: { + default_mapping: { + object_type: 'comment', + }, + possible_record_type_mappings: [ + { + devrev_leaf_type: 'comment', + forward: true, + reverse: false, + shard: { + mode: 'create_shard', + devrev_leaf_type: { + object_type: 'comment', + }, + stock_field_mappings: { + body: { + forward: true, + reverse: false, + primary_external_field: 'text', + transformation_method_for_set: { + transformation_method: 'use_rich_text', + }, + }, + created_by_id: { + forward: true, + reverse: false, + primary_external_field: 'author_id', + transformation_method_for_set: { + transformation_method: 'use_directly', + }, + }, + modified_by_id: { + forward: true, + reverse: false, + primary_external_field: 'author_id', + transformation_method_for_set: { + transformation_method: 'use_directly', + }, + }, + parent_object_id: { + forward: true, + reverse: false, + primary_external_field: 'task_id', + transformation_method_for_set: { + transformation_method: 'use_directly', + }, + }, + }, + }, + }, + ], + }, + tasks: { + default_mapping: { + object_type: 'issue', + }, + possible_record_type_mappings: [ + { + devrev_leaf_type: 'issue', + forward: true, + reverse: false, + shard: { + mode: 'create_shard', + devrev_leaf_type: { + object_type: 'issue', + }, + stock_field_mappings: { + title: { + forward: true, + reverse: false, + primary_external_field: 'title', + transformation_method_for_set: { + transformation_method: 'use_directly', + }, + }, + item_url_field: { + forward: true, + reverse: false, + primary_external_field: 'permalink', + transformation_method_for_set: { + transformation_method: 'use_directly', + }, + }, + body: { + forward: true, + reverse: false, + primary_external_field: 'description', + transformation_method_for_set: { + transformation_method: 'use_rich_text', + }, + }, + owned_by_ids: { + forward: true, + reverse: false, + primary_external_field: 'responsible_ids', + transformation_method_for_set: { + transformation_method: 'use_directly', + }, + }, + priority: { + forward: true, + reverse: false, + transformation_method_for_set: { + enum: 'P2', + transformation_method: 'use_fixed_value', + value: 'enum_value', + }, + }, + applies_to_part_id: { + forward: true, + reverse: false, + transformation_method_for_set: { + transformation_method: 'use_devrev_record', + leaf_type: { + object_type: 'product', + }, + is_array: false, + }, + }, + stage: { + forward: true, + reverse: false, + primary_external_field: 'status', + transformation_method_for_set: { + forward: { + active: { + value: 'in_development', + }, + completed: { + value: 'completed', + }, + deferred: { + value: 'backlog', + }, + cancelled: { + value: 'wont_fix', + }, + }, + transformation_method: 'map_enum', + is_array: false, + }, + }, + }, + }, + }, + ], + }, + }, + }, +}; \ No newline at end of file diff --git a/build/src/functions/get_initial_domain_mapping/index.test.ts b/build/src/functions/get_initial_domain_mapping/index.test.ts new file mode 100644 index 0000000..98aab2e --- /dev/null +++ b/build/src/functions/get_initial_domain_mapping/index.test.ts @@ -0,0 +1,6 @@ +import run, { GetInitialDomainMappingResponse } from './index'; +import { createGetInitialDomainMappingTests } from './index.test.cases'; + +describe('get_initial_domain_mapping function', () => { + describe('test cases', createGetInitialDomainMappingTests(run)); +}); \ No newline at end of file diff --git a/build/src/functions/get_initial_domain_mapping/index.test.validators.ts b/build/src/functions/get_initial_domain_mapping/index.test.validators.ts new file mode 100644 index 0000000..eb9af15 --- /dev/null +++ b/build/src/functions/get_initial_domain_mapping/index.test.validators.ts @@ -0,0 +1,224 @@ +import { InitialDomainMapping } from './index'; + +/** + * Validates the structure of initial domain mapping + */ +export function validateMappingStructure(mapping: InitialDomainMapping): boolean { + // Check additional_mappings exists + if (!mapping.additional_mappings) { + return false; + } + + // Check record_type_mappings exists + if (!mapping.additional_mappings.record_type_mappings) { + return false; + } + + // Check tasks mapping exists + const tasks = mapping.additional_mappings.record_type_mappings.tasks; + if (!tasks) { + return false; + } + + // Check comments mapping exists + const comments = mapping.additional_mappings.record_type_mappings.comments; + if (!comments) { + return false; + } + + // Check users mapping exists + const users = mapping.additional_mappings.record_type_mappings.users; + if (!users) { + return false; + } + + // Check default_mapping + if (!users.default_mapping || users.default_mapping.object_type !== 'devu') { + return false; + } + + // Check possible_record_type_mappings + if (!users.possible_record_type_mappings || users.possible_record_type_mappings.length !== 1) { + return false; + } + + const mapping_item = users.possible_record_type_mappings[0]; + + // Check devrev_leaf_type + if (mapping_item.devrev_leaf_type !== 'devu') { + return false; + } + + // Check forward and reverse + if (mapping_item.forward !== true || mapping_item.reverse !== false) { + return false; + } + + // Check shard + if (!mapping_item.shard || mapping_item.shard.mode !== 'create_shard') { + return false; + } + + // Check stock_field_mappings + const stock_mappings = mapping_item.shard.stock_field_mappings; + if (!stock_mappings || !stock_mappings.full_name || !stock_mappings.email || !stock_mappings.display_name) { + return false; + } + + // Check full_name mapping + const fullName = stock_mappings.full_name; + if ( + fullName.forward !== true || + fullName.reverse !== false || + fullName.primary_external_field !== 'full_name' || + fullName.transformation_method_for_set.transformation_method !== 'use_directly' + ) { + return false; + } + + // Check email mapping + const email = stock_mappings.email; + if ( + email.forward !== true || + email.reverse !== false || + email.primary_external_field !== 'email' || + email.transformation_method_for_set.transformation_method !== 'use_directly' + ) { + return false; + } + + // Check display_name mapping + const displayName = stock_mappings.display_name; + if ( + displayName.forward !== true || + displayName.reverse !== false || + displayName.primary_external_field !== 'title' || + displayName.transformation_method_for_set.transformation_method !== 'use_directly' + ) { + return false; + } + + // Check comments default_mapping + if (!comments.default_mapping || comments.default_mapping.object_type !== 'comment') { + return false; + } + + // Check comments possible_record_type_mappings + if (!comments.possible_record_type_mappings || comments.possible_record_type_mappings.length !== 1) { + return false; + } + + const comments_mapping_item = comments.possible_record_type_mappings[0]; + + // Check comments devrev_leaf_type + if (comments_mapping_item.devrev_leaf_type !== 'comment') { + return false; + } + + // Check comments forward and reverse + if (comments_mapping_item.forward !== true || comments_mapping_item.reverse !== false) { + return false; + } + + // Check comments shard + if (!comments_mapping_item.shard || comments_mapping_item.shard.mode !== 'create_shard') { + return false; + } + + // Check comments stock_field_mappings + const comments_stock_mappings = comments_mapping_item.shard.stock_field_mappings; + if (!comments_stock_mappings || !comments_stock_mappings.body || !comments_stock_mappings.created_by_id || + !comments_stock_mappings.modified_by_id || !comments_stock_mappings.parent_object_id) { + return false; + } + + // Check body mapping (rich text) + const body = comments_stock_mappings.body; + if ( + body.forward !== true || + body.reverse !== false || + body.primary_external_field !== 'text' || + body.transformation_method_for_set.transformation_method !== 'use_rich_text' + ) { + return false; + } + + // Check tasks default_mapping + if (!tasks.default_mapping || tasks.default_mapping.object_type !== 'issue') { + return false; + } + + // Check tasks possible_record_type_mappings + if (!tasks.possible_record_type_mappings || tasks.possible_record_type_mappings.length !== 1) { + return false; + } + + const tasks_mapping_item = tasks.possible_record_type_mappings[0]; + + // Check tasks devrev_leaf_type + if (tasks_mapping_item.devrev_leaf_type !== 'issue') { + return false; + } + + // Check tasks forward and reverse + if (tasks_mapping_item.forward !== true || tasks_mapping_item.reverse !== false) { + return false; + } + + // Check tasks shard + if (!tasks_mapping_item.shard || tasks_mapping_item.shard.mode !== 'create_shard') { + return false; + } + + // Check tasks stock_field_mappings + const tasks_stock_mappings = tasks_mapping_item.shard.stock_field_mappings; + if (!tasks_stock_mappings || !tasks_stock_mappings.title || !tasks_stock_mappings.item_url_field || + !tasks_stock_mappings.body || !tasks_stock_mappings.owned_by_ids || !tasks_stock_mappings.priority || + !tasks_stock_mappings.applies_to_part_id || !tasks_stock_mappings.stage) { + return false; + } + + // Check title mapping + const title = tasks_stock_mappings.title; + if ( + title.forward !== true || + title.reverse !== false || + title.primary_external_field !== 'title' || + title.transformation_method_for_set.transformation_method !== 'use_directly' + ) { + return false; + } + + // Check priority mapping (fixed value) + const priority = tasks_stock_mappings.priority; + if ( + priority.forward !== true || + priority.reverse !== false || + priority.transformation_method_for_set.transformation_method !== 'use_fixed_value' + ) { + return false; + } + + // Check applies_to_part_id mapping (DevRev record) + const applies_to_part_id = tasks_stock_mappings.applies_to_part_id; + if ( + applies_to_part_id.forward !== true || + applies_to_part_id.reverse !== false || + applies_to_part_id.transformation_method_for_set.transformation_method !== 'use_devrev_record' + ) { + return false; + } + + // Check stage mapping (enum) + const stage = tasks_stock_mappings.stage; + if ( + stage.forward !== true || + stage.reverse !== false || + stage.primary_external_field !== 'status' || + stage.transformation_method_for_set.transformation_method !== 'map_enum' + ) { + return false; + } + + return true; +} \ No newline at end of file diff --git a/build/src/functions/get_initial_domain_mapping/index.ts b/build/src/functions/get_initial_domain_mapping/index.ts new file mode 100644 index 0000000..80a8f3f --- /dev/null +++ b/build/src/functions/get_initial_domain_mapping/index.ts @@ -0,0 +1,174 @@ +import { FunctionInput } from '../../core/types'; +import { initialDomainMappingData } from './domain-mapping-data'; + +/** + * Base transformation method + */ +interface BaseTransformationMethod { + transformation_method: string; +} + +/** + * Use directly transformation method + */ +interface UseDirectlyTransformationMethod extends BaseTransformationMethod { + transformation_method: 'use_directly'; +} + +/** + * Use rich text transformation method + */ +interface UseRichTextTransformationMethod extends BaseTransformationMethod { + transformation_method: 'use_rich_text'; +} + +/** + * Use fixed value transformation method + */ +interface UseFixedValueTransformationMethod extends BaseTransformationMethod { + transformation_method: 'use_fixed_value'; + enum?: string; + value: string; +} + +/** + * Use DevRev record transformation method + */ +interface UseDevRevRecordTransformationMethod extends BaseTransformationMethod { + transformation_method: 'use_devrev_record'; + leaf_type: { + object_type: string; + }; + is_array: boolean; +} + +/** + * Map enum transformation method + */ +interface MapEnumTransformationMethod extends BaseTransformationMethod { + transformation_method: 'map_enum'; + forward: { + [key: string]: { + value: string; + }; + }; + is_array: boolean; +} + +/** + * Union type for all transformation methods + */ +type TransformationMethod = + | UseDirectlyTransformationMethod + | UseRichTextTransformationMethod + | UseFixedValueTransformationMethod + | UseDevRevRecordTransformationMethod + | MapEnumTransformationMethod; + +/** + * Stock field mapping structure + */ +export interface StockFieldMapping { + forward: boolean; + reverse: boolean; + primary_external_field?: string; + transformation_method_for_set: TransformationMethod; +} + +/** + * Blueprint shard structure for creating a new shard + */ +export interface BlueprintShard { + mode: 'create_shard'; + devrev_leaf_type: { + object_type: string; + }; + stock_field_mappings: { + [key: string]: StockFieldMapping; + }; +} + +/** + * Possible record type mapping structure + */ +export interface PossibleRecordTypeMapping { + devrev_leaf_type: string; + forward: boolean; + reverse: boolean; + shard: { + mode: 'create_shard'; + } & BlueprintShard; +} + +/** + * Record type mappings structure + */ +export interface RecordTypeMappings { + default_mapping: { + object_type: string; + }; + possible_record_type_mappings: PossibleRecordTypeMapping[]; +} + +/** + * Initial Domain Mapping structure + */ +export interface InitialDomainMapping { + additional_mappings: { + record_type_mappings: { + users: RecordTypeMappings; + comments: RecordTypeMappings; + tasks: RecordTypeMappings; + }; + }; +} + +/** + * Response structure for get_initial_domain_mapping function + */ +export interface GetInitialDomainMappingResponse { + status: 'success' | 'error'; + message: string; + metadata: { + function_name: string; + request_id: string; + }; + data: InitialDomainMapping; + timestamp: string; +} + +/** + * Get initial domain mapping function that generates and returns + * The Initial Domain Mapping JSON object with 'users' record type mappings. + * + * @param events - Array of function input events + * @returns Object containing the initial domain mapping + */ +const run = async (events: FunctionInput[]): Promise => { + // Validate input + if (!events || events.length === 0) { + throw new Error('No events provided to get_initial_domain_mapping function'); + } + + // Process only the first event as per requirements + const event = events[0]; + + // Validate event structure + if (!event.execution_metadata) { + throw new Error('Invalid event: missing execution_metadata'); + } + + // Return success response with mapping + return { + status: 'success', + message: 'Successfully generated initial domain mapping', + metadata: { + function_name: event.execution_metadata.function_name, + request_id: event.execution_metadata.request_id, + }, + data: initialDomainMappingData, + timestamp: new Date().toISOString(), + }; +}; + +export default run; \ No newline at end of file diff --git a/build/src/functions/get_initial_domain_mapping/initial-domain-mapping.json b/build/src/functions/get_initial_domain_mapping/initial-domain-mapping.json new file mode 100644 index 0000000..6c5f367 --- /dev/null +++ b/build/src/functions/get_initial_domain_mapping/initial-domain-mapping.json @@ -0,0 +1,197 @@ +{ + "additional_mappings": { + "record_type_mappings": { + "users": { + "default_mapping": { + "object_type": "devu" + }, + "possible_record_type_mappings": [ + { + "devrev_leaf_type": "devu", + "forward": true, + "reverse": false, + "shard": { + "mode": "create_shard", + "devrev_leaf_type": { + "object_type": "devu" + }, + "stock_field_mappings": { + "full_name": { + "forward": true, + "reverse": false, + "primary_external_field": "full_name", + "transformation_method_for_set": { + "transformation_method": "use_directly" + } + }, + "email": { + "forward": true, + "reverse": false, + "primary_external_field": "email", + "transformation_method_for_set": { + "transformation_method": "use_directly" + } + }, + "display_name": { + "forward": true, + "reverse": false, + "primary_external_field": "title", + "transformation_method_for_set": { + "transformation_method": "use_directly" + } + } + } + } + } + ] + }, + "comments": { + "default_mapping": { + "object_type": "comment" + }, + "possible_record_type_mappings": [ + { + "devrev_leaf_type": "comment", + "forward": true, + "reverse": false, + "shard": { + "mode": "create_shard", + "devrev_leaf_type": { + "object_type": "comment" + }, + "stock_field_mappings": { + "body": { + "forward": true, + "reverse": false, + "primary_external_field": "text", + "transformation_method_for_set": { + "transformation_method": "use_rich_text" + } + }, + "created_by_id": { + "forward": true, + "reverse": false, + "primary_external_field": "author_id", + "transformation_method_for_set": { + "transformation_method": "use_directly" + } + }, + "modified_by_id": { + "forward": true, + "reverse": false, + "primary_external_field": "author_id", + "transformation_method_for_set": { + "transformation_method": "use_directly" + } + }, + "parent_object_id": { + "forward": true, + "reverse": false, + "primary_external_field": "task_id", + "transformation_method_for_set": { + "transformation_method": "use_directly" + } + } + } + } + } + ] + }, + "tasks": { + "default_mapping": { + "object_type": "issue" + }, + "possible_record_type_mappings": [ + { + "devrev_leaf_type": "issue", + "forward": true, + "reverse": false, + "shard": { + "mode": "create_shard", + "devrev_leaf_type": { + "object_type": "issue" + }, + "stock_field_mappings": { + "title": { + "forward": true, + "reverse": false, + "primary_external_field": "title", + "transformation_method_for_set": { + "transformation_method": "use_directly" + } + }, + "item_url_field": { + "forward": true, + "reverse": false, + "primary_external_field": "permalink", + "transformation_method_for_set": { + "transformation_method": "use_directly" + } + }, + "body": { + "forward": true, + "reverse": false, + "primary_external_field": "description", + "transformation_method_for_set": { + "transformation_method": "use_rich_text" + } + }, + "owned_by_ids": { + "forward": true, + "reverse": false, + "primary_external_field": "responsible_ids", + "transformation_method_for_set": { + "transformation_method": "use_directly" + } + }, + "priority": { + "forward": true, + "reverse": false, + "transformation_method_for_set": { + "enum": "P2", + "transformation_method": "use_fixed_value", + "value": "enum_value" + } + }, + "applies_to_part_id": { + "forward": true, + "reverse": false, + "transformation_method_for_set": { + "transformation_method": "use_devrev_record", + "leaf_type": { + "object_type": "product" + }, + "is_array": false + } + }, + "stage": { + "forward": true, + "reverse": false, + "primary_external_field": "status", + "transformation_method_for_set": { + "forward": { + "active": { + "value": "in_development" + }, + "completed": { + "value": "completed" + }, + "deferred": { + "value": "backlog" + }, + "cancelled": { + "value": "wont_fix" + } + }, + "transformation_method": "map_enum", + "is_array": false + } + } + } + } + } + ] + } + } + } +} \ No newline at end of file diff --git a/build/src/functions/health_check/index.test.ts b/build/src/functions/health_check/index.test.ts new file mode 100644 index 0000000..8cb6866 --- /dev/null +++ b/build/src/functions/health_check/index.test.ts @@ -0,0 +1,113 @@ +import run, { HealthCheckResponse } from './index'; +import { FunctionInput } from '../../core/types'; + +describe('health_check function', () => { + const createMockEvent = (overrides?: Partial): FunctionInput => ({ + payload: {}, + context: { + dev_oid: 'test-dev-oid', + source_id: 'test-source-id', + snap_in_id: 'test-snap-in-id', + snap_in_version_id: 'test-version-id', + service_account_id: 'test-service-account-id', + secrets: { + service_account_token: 'test-token', + }, + }, + execution_metadata: { + request_id: 'test-request-id', + function_name: 'health_check', + event_type: 'test-event-type', + devrev_endpoint: 'https://api.devrev.ai/', + }, + input_data: { + global_values: {}, + event_sources: {}, + }, + ...overrides, + }); + + it('should return success response for valid event', async () => { + const events = [createMockEvent()]; + const result = await run(events); + + expect(result).toBeDefined(); + expect(result.status).toBe('success'); + expect(result.message).toBe('Function can be invoked successfully'); + expect(result.metadata).toBeDefined(); + expect(result.metadata.function_name).toBe('health_check'); + expect(result.metadata.request_id).toBe('test-request-id'); + expect(result.metadata.event_type).toBe('test-event-type'); + expect(result.metadata.snap_in_id).toBe('test-snap-in-id'); + expect(result.metadata.dev_oid).toBe('test-dev-oid'); + expect(result.timestamp).toBeDefined(); + }); + + it('should process only the first event when multiple events provided', async () => { + const event1 = createMockEvent({ + execution_metadata: { + request_id: 'request-1', + function_name: 'health_check', + event_type: 'event-type-1', + devrev_endpoint: 'https://api.devrev.ai/', + }, + }); + const event2 = createMockEvent({ + execution_metadata: { + request_id: 'request-2', + function_name: 'health_check', + event_type: 'event-type-2', + devrev_endpoint: 'https://api.devrev.ai/', + }, + }); + + const result = await run([event1, event2]); + + expect(result.metadata.request_id).toBe('request-1'); + expect(result.metadata.event_type).toBe('event-type-1'); + }); + + it('should throw error when no events provided', async () => { + await expect(run([])).rejects.toThrow('No events provided to health_check function'); + }); + + it('should throw error when events array is null', async () => { + await expect(run(null as any)).rejects.toThrow('No events provided to health_check function'); + }); + + it('should throw error when events array is undefined', async () => { + await expect(run(undefined as any)).rejects.toThrow('No events provided to health_check function'); + }); + + it('should throw error when event is missing execution_metadata', async () => { + const invalidEvent = createMockEvent(); + delete (invalidEvent as any).execution_metadata; + + await expect(run([invalidEvent])).rejects.toThrow('Invalid event: missing execution_metadata'); + }); + + it('should throw error when event is missing context', async () => { + const invalidEvent = createMockEvent(); + delete (invalidEvent as any).context; + + await expect(run([invalidEvent])).rejects.toThrow('Invalid event: missing context'); + }); + + it('should include timestamp in ISO format', async () => { + const events = [createMockEvent()]; + const result = await run(events); + + expect(result.timestamp).toMatch(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/); + }); + + it('should return correct metadata structure', async () => { + const events = [createMockEvent()]; + const result = await run(events); + + expect(result.metadata).toHaveProperty('function_name'); + expect(result.metadata).toHaveProperty('request_id'); + expect(result.metadata).toHaveProperty('event_type'); + expect(result.metadata).toHaveProperty('snap_in_id'); + expect(result.metadata).toHaveProperty('dev_oid'); + }); +}); \ No newline at end of file diff --git a/build/src/functions/health_check/index.ts b/build/src/functions/health_check/index.ts new file mode 100644 index 0000000..67efd1e --- /dev/null +++ b/build/src/functions/health_check/index.ts @@ -0,0 +1,56 @@ +import { FunctionInput } from '../../core/types'; + +/** + * Health check function that verifies the function can be invoked. + * This function processes only the first event from the input array. + * + * @param events - Array of function input events + * @returns Object indicating successful invocation with metadata + */ +const run = async (events: FunctionInput[]): Promise => { + // Validate input + if (!events || events.length === 0) { + throw new Error('No events provided to health_check function'); + } + + // Process only the first event as per requirements + const event = events[0]; + + // Validate event structure + if (!event.execution_metadata) { + throw new Error('Invalid event: missing execution_metadata'); + } + + if (!event.context) { + throw new Error('Invalid event: missing context'); + } + + // Return success response + return { + status: 'success', + message: 'Function can be invoked successfully', + metadata: { + function_name: event.execution_metadata.function_name, + request_id: event.execution_metadata.request_id, + event_type: event.execution_metadata.event_type, + snap_in_id: event.context.snap_in_id, + dev_oid: event.context.dev_oid, + }, + timestamp: new Date().toISOString(), + }; +}; + +export interface HealthCheckResponse { + status: 'success' | 'error'; + message: string; + metadata: { + function_name: string; + request_id: string; + event_type: string; + snap_in_id: string; + dev_oid: string; + }; + timestamp: string; +} + +export default run; \ No newline at end of file diff --git a/build/src/functions/healthcheck/index.test.ts b/build/src/functions/healthcheck/index.test.ts deleted file mode 100644 index 408b707..0000000 --- a/build/src/functions/healthcheck/index.test.ts +++ /dev/null @@ -1,84 +0,0 @@ -import { run } from './index'; -import { AirdropEvent, EventType } from '@devrev/ts-adaas'; - -describe('Healthcheck Function', () => { - it('should return a success message when invoked', async () => { - // Create a mock AirdropEvent - const mockEvent: AirdropEvent = { - context: { - secrets: { - service_account_token: 'mock-token' - }, - snap_in_id: 'mock-snap-in-id', - snap_in_version_id: 'mock-version-id' - }, - payload: { - connection_data: { - org_id: 'mock-org-id', - org_name: 'mock-org-name', - key: 'mock-key', - key_type: 'mock-key-type' - }, - event_context: { - callback_url: 'mock-callback-url', - dev_org: 'mock-dev-org', - dev_org_id: 'mock-dev-org-id', - dev_user: 'mock-dev-user', - dev_user_id: 'mock-dev-user-id', - external_sync_unit: 'mock-external-sync-unit', - external_sync_unit_id: 'mock-external-sync-unit-id', - external_sync_unit_name: 'mock-external-sync-unit-name', - external_system: 'mock-external-system', - external_system_type: 'mock-external-system-type', - import_slug: 'mock-import-slug', - mode: 'INITIAL', - request_id: 'mock-request-id', - snap_in_slug: 'mock-snap-in-slug', - snap_in_version_id: 'mock-snap-in-version-id', - sync_run: 'mock-sync-run', - sync_run_id: 'mock-sync-run-id', - sync_tier: 'mock-sync-tier', - sync_unit: 'mock-sync-unit', - sync_unit_id: 'mock-sync-unit-id', - uuid: 'mock-uuid', - worker_data_url: 'mock-worker-data-url' - }, - event_type: EventType.ExtractionMetadataStart, - event_data: {} - }, - execution_metadata: { - devrev_endpoint: 'mock-endpoint' - }, - input_data: { - global_values: {}, - event_sources: {} - } - }; - - // Call the function with the mock event - const result = await run([mockEvent]); - - // Verify the result - expect(result).toEqual({ - status: 'success', - message: 'Healthcheck function successfully invoked' - }); - }); - - it('should throw an error if something goes wrong', async () => { - // Mock console.error to prevent test output pollution - jest.spyOn(console, 'error').mockImplementation(() => {}); - - // Mock console.log to prevent test output pollution - jest.spyOn(console, 'log').mockImplementation(() => {}); - - // Create a mock event that will cause an error - const mockEvent = null as unknown as AirdropEvent; - - // Expect the function to throw an error - await expect(run([mockEvent])).rejects.toThrow(); - - // Restore console.error - jest.restoreAllMocks(); - }); -}); \ No newline at end of file diff --git a/build/src/functions/healthcheck/index.ts b/build/src/functions/healthcheck/index.ts deleted file mode 100644 index cb8e475..0000000 --- a/build/src/functions/healthcheck/index.ts +++ /dev/null @@ -1,63 +0,0 @@ -import { AirdropEvent } from '@devrev/ts-adaas'; - -/** - * A simple function that checks if it can be invoked. - * - * @param events - Array of AirdropEvent objects - * @returns A success message indicating the function can be invoked - */ -export async function run(events: AirdropEvent[]): Promise<{ status: string, message: string }> { - try { - // Validate input parameters - if (!events || !Array.isArray(events)) { - throw new Error('Invalid input: events must be an array'); - } - - // Validate that each event is a valid AirdropEvent with all required fields - events.forEach((event, index) => { - if (!event || typeof event !== 'object') { - throw new Error(`Invalid event at index ${index}: event must be a valid AirdropEvent object`); - } - - // Check for required fields according to AirdropEvent interface - if (!event.context) { - throw new Error(`Invalid event at index ${index}: missing required field 'context'`); - } - - if (!event.context.secrets || !event.context.secrets.service_account_token) { - throw new Error(`Invalid event at index ${index}: missing required field 'context.secrets.service_account_token'`); - } - - if (!event.context.snap_in_version_id) { - throw new Error(`Invalid event at index ${index}: missing required field 'context.snap_in_version_id'`); - } - - if (!event.payload) { - throw new Error(`Invalid event at index ${index}: missing required field 'payload'`); - } - - if (!event.payload.event_context) { - throw new Error(`Invalid event at index ${index}: missing required field 'payload.event_context'`); - } - - if (!event.execution_metadata || !event.execution_metadata.devrev_endpoint) { - throw new Error(`Invalid event at index ${index}: missing required field 'execution_metadata.devrev_endpoint'`); - } - }); - - // Log the event for debugging purposes - console.log('Healthcheck function invoked with events:', JSON.stringify(events)); - - // Return a success response - return { - status: 'success', - message: 'Healthcheck function successfully invoked' - }; - } catch (error) { - // Log the error for debugging - console.error('Error in healthcheck function:', error); - - // Re-throw the error to be handled by the caller - throw error; - } -} \ No newline at end of file diff --git a/build/src/functions/test_external_sync_units/index.test.ts b/build/src/functions/test_external_sync_units/index.test.ts new file mode 100644 index 0000000..22c55ae --- /dev/null +++ b/build/src/functions/test_external_sync_units/index.test.ts @@ -0,0 +1,175 @@ +import run from './index'; +import { FunctionInput } from '../../core/types'; +import { EventType } from '@devrev/ts-adaas'; +import * as adaas from '@devrev/ts-adaas'; + +// Mock the spawn function +jest.mock('@devrev/ts-adaas', () => ({ + ...jest.requireActual('@devrev/ts-adaas'), + spawn: jest.fn(), +})); + +describe('test_external_sync_units function', () => { + const createMockEvent = (overrides?: Partial): FunctionInput => ({ + payload: { + event_type: EventType.ExtractionExternalSyncUnitsStart, + connection_data: { + org_id: 'test-org', + org_name: 'Test Org', + key: 'test-key', + key_type: 'test-key-type', + }, + event_context: { + callback_url: 'https://test.callback.url', + dev_org: 'test-dev-org', + dev_org_id: 'test-dev-org-id', + dev_user: 'test-dev-user', + dev_user_id: 'test-dev-user-id', + external_sync_unit: 'test-sync-unit', + external_sync_unit_id: 'test-sync-unit-id', + external_sync_unit_name: 'Test Sync Unit', + external_system: 'test-system', + external_system_type: 'test-system-type', + import_slug: 'test-import-slug', + mode: 'INITIAL', + request_id: 'test-request-id', + snap_in_slug: 'test-snap-in-slug', + snap_in_version_id: 'test-version-id', + sync_run: 'test-sync-run', + sync_run_id: 'test-sync-run-id', + sync_tier: 'test-sync-tier', + sync_unit: 'test-sync-unit', + sync_unit_id: 'test-sync-unit-id', + uuid: 'test-uuid', + worker_data_url: 'https://test.worker.data.url', + }, + event_data: {}, + }, + context: { + dev_oid: 'test-dev-oid', + source_id: 'test-source-id', + snap_in_id: 'test-snap-in-id', + snap_in_version_id: 'test-version-id', + service_account_id: 'test-service-account-id', + secrets: { + service_account_token: 'test-token', + }, + }, + execution_metadata: { + request_id: 'test-request-id', + function_name: 'test_external_sync_units', + event_type: 'test-event-type', + devrev_endpoint: 'https://api.devrev.ai/', + }, + input_data: { + global_values: {}, + event_sources: {}, + }, + ...overrides, + }); + + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('should spawn worker for EXTRACTION_EXTERNAL_SYNC_UNITS_START event', async () => { + const events = [createMockEvent()]; + const spawnMock = adaas.spawn as jest.Mock; + spawnMock.mockResolvedValue(undefined); + + await run(events); + + expect(spawnMock).toHaveBeenCalledTimes(1); + expect(spawnMock).toHaveBeenCalledWith( + expect.objectContaining({ + workerPath: expect.stringContaining('external-sync-units-test.ts'), + initialState: {}, + }) + ); + }); + + it('should process only the first event when multiple events provided', async () => { + const event1 = createMockEvent(); + const event2 = createMockEvent({ + execution_metadata: { + request_id: 'request-2', + function_name: 'test_external_sync_units', + event_type: 'event-type-2', + devrev_endpoint: 'https://api.devrev.ai/', + }, + }); + + const spawnMock = adaas.spawn as jest.Mock; + spawnMock.mockResolvedValue(undefined); + + await run([event1, event2]); + + expect(spawnMock).toHaveBeenCalledTimes(1); + }); + + it('should throw error when no events provided', async () => { + await expect(run([])).rejects.toThrow('No events provided to test_external_sync_units function'); + }); + + it('should throw error when events array is null', async () => { + await expect(run(null as any)).rejects.toThrow('No events provided to test_external_sync_units function'); + }); + + it('should throw error when events array is undefined', async () => { + await expect(run(undefined as any)).rejects.toThrow('No events provided to test_external_sync_units function'); + }); + + it('should throw error when event is missing payload', async () => { + const invalidEvent = createMockEvent(); + delete (invalidEvent as any).payload; + + await expect(run([invalidEvent])).rejects.toThrow('Invalid event: missing payload'); + }); + + it('should throw error when event is missing event_type', async () => { + const invalidEvent = createMockEvent(); + delete (invalidEvent as any).payload.event_type; + + await expect(run([invalidEvent])).rejects.toThrow('Invalid event: missing event_type in payload'); + }); + + it('should throw error for unsupported event type', async () => { + const invalidEvent = createMockEvent({ + payload: { + event_type: 'UNSUPPORTED_EVENT_TYPE' as any, + connection_data: { + org_id: 'test-org', + org_name: 'Test Org', + key: 'test-key', + key_type: 'test-key-type', + }, + event_context: {} as any, + event_data: {}, + }, + }); + + await expect(run([invalidEvent])).rejects.toThrow('Unsupported event type'); + }); + + it('should handle spawn errors gracefully', async () => { + const events = [createMockEvent()]; + const spawnMock = adaas.spawn as jest.Mock; + const testError = new Error('Spawn failed'); + spawnMock.mockRejectedValue(testError); + + await expect(run(events)).rejects.toThrow('Spawn failed'); + }); + + it('should pass correct event to spawn', async () => { + const events = [createMockEvent()]; + const spawnMock = adaas.spawn as jest.Mock; + spawnMock.mockResolvedValue(undefined); + + await run(events); + + const spawnCall = spawnMock.mock.calls[0][0]; + expect(spawnCall.event).toBeDefined(); + expect(spawnCall.event.payload).toBeDefined(); + expect(spawnCall.event.payload.event_type).toBe(EventType.ExtractionExternalSyncUnitsStart); + }); +}); \ No newline at end of file diff --git a/build/src/functions/test_external_sync_units/index.ts b/build/src/functions/test_external_sync_units/index.ts new file mode 100644 index 0000000..a228b3d --- /dev/null +++ b/build/src/functions/test_external_sync_units/index.ts @@ -0,0 +1,50 @@ +import { convertToAirdropEvent, resolveWorkerPath } from '../../core/utils'; +import { FunctionInput } from '../../core/types'; +import { spawn, EventType } from '@devrev/ts-adaas'; + +export interface TestExtractorState {} + +export const initialState: TestExtractorState = {}; + +/** + * Test function for external sync units extraction workflow. + * Processes EXTRACTION_EXTERNAL_SYNC_UNITS_START events and emits EXTRACTION_EXTERNAL_SYNC_UNITS_DONE. + * + * @param events - Array of function input events + */ +const run = async (events: FunctionInput[]): Promise => { + // Validate input + if (!events || events.length === 0) { + throw new Error('No events provided to test_external_sync_units function'); + } + + // Process only the first event as per requirements + const event = events[0]; + + // Validate event structure + if (!event.payload) { + throw new Error('Invalid event: missing payload'); + } + + if (!event.payload.event_type) { + throw new Error('Invalid event: missing event_type in payload'); + } + + // Check if this is the correct event type + if (event.payload.event_type === EventType.ExtractionExternalSyncUnitsStart) { + const workerPath = resolveWorkerPath(__dirname, 'workers/external-sync-units-test.ts'); + + await spawn({ + event: convertToAirdropEvent(event), + workerPath: workerPath, + initialState: initialState, + }); + } else { + throw new Error( + `Unsupported event type: ${event.payload.event_type}. ` + + `Expected: ${EventType.ExtractionExternalSyncUnitsStart}` + ); + } +}; + +export default run; \ No newline at end of file diff --git a/build/src/functions/test_external_sync_units/workers/external-sync-units-test.ts b/build/src/functions/test_external_sync_units/workers/external-sync-units-test.ts new file mode 100644 index 0000000..1ae087b --- /dev/null +++ b/build/src/functions/test_external_sync_units/workers/external-sync-units-test.ts @@ -0,0 +1,23 @@ +import { ExtractorEventType, processTask } from '@devrev/ts-adaas'; +import { TestExtractorState } from '../index'; + +/** + * Worker for testing external sync units extraction. + * Emits EXTRACTION_EXTERNAL_SYNC_UNITS_DONE event. + */ +processTask({ + task: async ({ adapter }) => { + // Emit the done event as per requirements + await adapter.emit(ExtractorEventType.ExtractionExternalSyncUnitsDone, { + external_sync_units: [], + }); + }, + onTimeout: async ({ adapter }) => { + // Handle timeout by emitting error event + await adapter.emit(ExtractorEventType.ExtractionExternalSyncUnitsError, { + error: { + message: 'Failed to extract external sync units. Lambda timeout.', + }, + }); + }, +}); \ No newline at end of file diff --git a/conformance_tests/api_authentication_check/auth.test.ts b/conformance_tests/api_authentication_check/auth.test.ts deleted file mode 100644 index f76b69e..0000000 --- a/conformance_tests/api_authentication_check/auth.test.ts +++ /dev/null @@ -1,196 +0,0 @@ -import axios from 'axios'; -import express from 'express'; -import bodyParser from 'body-parser'; -import { Server } from 'http'; - -describe('Wrike API Authentication Tests', () => { - const TEST_SERVER_URL = 'http://localhost:8000/handle/sync'; - const CALLBACK_SERVER_PORT = 8002; - - let callbackServer: Server; - let callbackResponse: any = null; - - // Set up a callback server to receive responses - using proper async pattern - beforeAll(async () => { - const app = express(); - app.use(bodyParser.json()); - - app.post('*', (req, res) => { - callbackResponse = req.body; - res.status(200).json({ status: 'ok' }); - }); - - return new Promise((resolve) => { - callbackServer = app.listen(CALLBACK_SERVER_PORT, () => { - console.log(`Callback server listening on port ${CALLBACK_SERVER_PORT}`); - resolve(); - }); - }); - }); - - // Clean up after tests - using proper async pattern - afterAll(async () => { - return new Promise((resolve) => { - if (callbackServer) { - callbackServer.close(() => { - console.log('Callback server closed'); - resolve(); - }); - } else { - resolve(); - } - }); - }); - - // Reset callback response before each test - beforeEach(() => { - callbackResponse = null; - }); - - // Test 1: Simple test to verify the test server is running - test('Test server is accessible', async () => { - try { - const response = await axios.post(TEST_SERVER_URL, { - context: { - secrets: { - service_account_token: 'test-token' - }, - snap_in_version_id: 'test-version' - }, - payload: { - connection_data: {}, - event_context: {} - }, - execution_metadata: { - function_name: 'healthcheck', - devrev_endpoint: 'http://localhost:8003' - } - }); - - expect(response.status).toBe(200); - expect(response.data).toBeDefined(); - } catch (error) { - console.error('Error accessing test server:', error); - throw error; - } - }); - - // Test 2: Basic test with valid credentials - test('Authentication with valid API key succeeds', async () => { - // Get API key from environment variables - const apiKey = process.env.WRIKE_API_KEY; - if (!apiKey) { - fail('WRIKE_API_KEY environment variable not set'); - return; - } - - try { - const response = await axios.post(TEST_SERVER_URL, { - context: { - secrets: { - service_account_token: 'test-token' - }, - snap_in_version_id: 'test-version' - }, - payload: { - connection_data: { - key: apiKey, - key_type: 'api_key', - org_id: 'test-org', - org_name: 'Test Organization' - }, - event_context: { - callback_url: `http://localhost:${CALLBACK_SERVER_PORT}/callback`, - external_sync_unit_id: 'IEAGS6BYI5RFMPPY' - } - }, - execution_metadata: { - function_name: 'auth_check', - devrev_endpoint: 'http://localhost:8003' - } - }); - - expect(response.status).toBe(200); - expect(response.data).toBeDefined(); - expect(response.data.function_result).toBeDefined(); - expect(response.data.function_result.status).toBe('success'); - expect(response.data.function_result.auth_successful).toBe(true); - } catch (error) { - console.error('Error testing authentication with valid API key:', error); - throw error; - } - }); - - // Test 3: Error test with invalid credentials - test('Authentication with invalid API key fails', async () => { - try { - const response = await axios.post(TEST_SERVER_URL, { - context: { - secrets: { - service_account_token: 'test-token' - }, - snap_in_version_id: 'test-version' - }, - payload: { - connection_data: { - key: 'invalid-api-key', - key_type: 'api_key', - org_id: 'test-org', - org_name: 'Test Organization' - }, - event_context: { - callback_url: `http://localhost:${CALLBACK_SERVER_PORT}/callback`, - external_sync_unit_id: 'IEAGS6BYI5RFMPPY' - } - }, - execution_metadata: { - function_name: 'auth_check', - devrev_endpoint: 'http://localhost:8003' - } - }); - - expect(response.status).toBe(200); - expect(response.data).toBeDefined(); - expect(response.data.function_result).toBeDefined(); - expect(response.data.function_result.status).toBe('error'); - expect(response.data.function_result.auth_successful).toBe(false); - expect(response.data.function_result.error).toBeDefined(); - } catch (error) { - console.error('Error testing authentication with invalid API key:', error); - throw error; - } - }); - - // Test 4: Edge case with malformed request - test('Authentication with malformed request fails gracefully', async () => { - try { - const response = await axios.post(TEST_SERVER_URL, { - context: { - secrets: { - service_account_token: 'test-token' - }, - snap_in_version_id: 'test-version' - }, - payload: { - // Missing connection_data which is required - event_context: { - callback_url: `http://localhost:${CALLBACK_SERVER_PORT}/callback`, - external_sync_unit_id: 'IEAGS6BYI5RFMPPY' - } - }, - execution_metadata: { - function_name: 'auth_check', - devrev_endpoint: 'http://localhost:8003' - } - }); - - // The function should handle the error and return a proper response - expect(response.status).toBe(200); - expect(response.data).toBeDefined(); - expect(response.data.error || (response.data.function_result && response.data.function_result.error)).toBeDefined(); - } catch (error) { - console.error('Error testing authentication with malformed request:', error); - throw error; - } - }); -}); \ No newline at end of file diff --git a/conformance_tests/api_authentication_check/jest.config.js b/conformance_tests/api_authentication_check/jest.config.js deleted file mode 100644 index 2bdec0a..0000000 --- a/conformance_tests/api_authentication_check/jest.config.js +++ /dev/null @@ -1,7 +0,0 @@ -module.exports = { - preset: 'ts-jest', - testEnvironment: 'node', - testTimeout: 30000, - forceExit: true, - verbose: true, -}; \ No newline at end of file diff --git a/conformance_tests/api_authentication_check/tsconfig.json b/conformance_tests/api_authentication_check/tsconfig.json deleted file mode 100644 index 31c856e..0000000 --- a/conformance_tests/api_authentication_check/tsconfig.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "compilerOptions": { - "target": "es2018", - "module": "commonjs", - "esModuleInterop": true, - "strict": true, - "outDir": "./dist", - "sourceMap": true, - "declaration": true, - "types": ["node", "jest"] - }, - "include": ["*.ts"], - "exclude": ["node_modules", "dist"] -} \ No newline at end of file diff --git a/conformance_tests/attachments_data_push_tests/attachments-extraction.test.ts b/conformance_tests/attachments_data_push_tests/attachments-extraction.test.ts new file mode 100644 index 0000000..1cfaf43 --- /dev/null +++ b/conformance_tests/attachments_data_push_tests/attachments-extraction.test.ts @@ -0,0 +1,105 @@ +import { setupTestServers, teardownTestServers, getTestCredentials, invokeFunction, createExtractionEvent, getUploadedData, clearUploadedData, logExtractionProgress, TestServers } from './test-utils'; + +describe('Attachments Extraction Tests', () => { + let servers: TestServers; + let credentials: { apiKey: string; spaceId: string }; + + beforeAll(async () => { + servers = await setupTestServers(); + credentials = getTestCredentials(); + }); + + afterAll(async () => { + await teardownTestServers(servers); + }); + + beforeEach(async () => { + await clearUploadedData(); + }); + + test('should extract attachments from tasks and validate structure', async () => { + console.log('[Test] Starting attachments extraction test'); + console.log('[Test] Using credentials:', { + apiKeyLength: credentials.apiKey.length, + spaceId: credentials.spaceId + }); + + // Create extraction event + const event = createExtractionEvent('EXTRACTION_DATA_START', credentials, 'IEAGS6BYI5RFMPP7'); + console.log('[Test] Created extraction event with request_id:', event.execution_metadata.request_id); + + // Invoke extraction function + console.log('[Test] Invoking extraction function...'); + try { + await invokeFunction('extraction', event, true); + console.log('[Test] Extraction completed successfully'); + } catch (error) { + console.error('[Test] Extraction failed or timed out:', error); + + // Log final state for diagnostics + console.log('[Test] Logging final extraction state...'); + await logExtractionProgress(); + + // Re-throw to fail the test + throw error; + } + + // Poll for uploaded data with early termination + console.log('[Test] Polling for uploaded data...'); + let uploadedData = await getUploadedData(); + let retries = 0; + while (uploadedData.attachments.length === 0 && retries < 20) { + console.log(`[Test] Retry ${retries + 1}/20: No attachments found yet`); + await new Promise(resolve => setTimeout(resolve, 1000)); + uploadedData = await getUploadedData(); + retries++; + } + + // Log uploaded data counts for debugging + console.log('[Test] Uploaded data counts:', { + attachments: uploadedData.attachments.length, + tasks: uploadedData.tasks.length, + users: uploadedData.users.length, + }); + + // Test 1: Verify attachments were uploaded + expect(uploadedData.attachments.length).toBeGreaterThan(0); + + // Test 2: Verify attachment structure + // Note: Attachments use NormalizedAttachment structure (flat, no created_date/modified_date) + const attachment = uploadedData.attachments[0]; + expect(attachment).toHaveProperty('id'); + expect(attachment).toHaveProperty('url'); + expect(attachment).toHaveProperty('file_name'); + expect(attachment).toHaveProperty('parent_id'); + + // Test 4: Verify parent_id references a task + expect(typeof attachment.parent_id).toBe('string'); + expect(attachment.parent_id.length).toBeGreaterThan(0); + + // Test 5: Verify all attachments have valid parent_id references + uploadedData.attachments.forEach(attachment => { + expect(attachment.parent_id).toBeDefined(); + expect(typeof attachment.parent_id).toBe('string'); + }); + + // Test 6: Verify each attachment has correct normalization + uploadedData.attachments.forEach(attachment => { + // Check fields according to NormalizedAttachment interface + expect(attachment.id).toBeDefined(); + expect(typeof attachment.id).toBe('string'); + + expect(attachment.url).toBeDefined(); + expect(typeof attachment.url).toBe('string'); + expect(attachment.file_name).toBeDefined(); + expect(typeof attachment.file_name).toBe('string'); + expect(attachment.parent_id).toBeDefined(); + expect(typeof attachment.parent_id).toBe('string'); + + // author_id is optional + if (attachment.author_id !== null && attachment.author_id !== undefined) { + expect(typeof attachment.author_id).toBe('string'); + } + }); + }, 90000); // Increased timeout to 90 seconds +}); \ No newline at end of file diff --git a/conformance_tests/attachments_data_push_tests/data-extraction-attachments.test.ts b/conformance_tests/attachments_data_push_tests/data-extraction-attachments.test.ts new file mode 100644 index 0000000..bf327f1 --- /dev/null +++ b/conformance_tests/attachments_data_push_tests/data-extraction-attachments.test.ts @@ -0,0 +1,190 @@ +import { setupTestServers, teardownTestServers, getTestCredentials, invokeFunction, getUploadedData, clearUploadedData, logExtractionProgress, TestServers } from './test-utils'; +import * as fs from 'fs'; +import * as path from 'path'; + +describe('Data Extraction Attachments Test (Acceptance Test)', () => { + let servers: TestServers; + let credentials: { apiKey: string; spaceId: string }; + let callbackEvents: any[] = []; + + beforeAll(async () => { + console.log('[Acceptance Test] Setting up test servers...'); + servers = await setupTestServers(); + credentials = getTestCredentials(); + console.log('[Acceptance Test] Test servers ready'); + console.log('[Acceptance Test] Using credentials:', { + apiKeyLength: credentials.apiKey.length, + spaceId: credentials.spaceId + }); + }); + + afterAll(async () => { + console.log('[Acceptance Test] Tearing down test servers...'); + await teardownTestServers(servers); + console.log('[Acceptance Test] Test servers stopped'); + }); + + beforeEach(async () => { + console.log('[Acceptance Test] Clearing uploaded data...'); + await clearUploadedData(); + callbackEvents = []; + console.log('[Acceptance Test] Data cleared'); + }); + + test('should extract exactly 2 attachments and receive EXTRACTION_DATA_DONE event', async () => { + console.log('[Acceptance Test] ========================================'); + console.log('[Acceptance Test] Starting data extraction attachments test'); + console.log('[Acceptance Test] ========================================'); + + // Step 1: Load and prepare event template + console.log('[Acceptance Test] Step 1: Loading event template...'); + const eventTemplatePath = path.join(__dirname, 'data-extraction-test-event.json'); + + if (!fs.existsSync(eventTemplatePath)) { + throw new Error(`Event template file not found at: ${eventTemplatePath}`); + } + + const eventTemplate = JSON.parse(fs.readFileSync(eventTemplatePath, 'utf-8')); + console.log('[Acceptance Test] Event template loaded successfully'); + + // Step 2: Replace placeholders with actual credentials + console.log('[Acceptance Test] Step 2: Replacing placeholders with credentials...'); + eventTemplate.payload.connection_data.key = credentials.apiKey; + eventTemplate.payload.connection_data.org_id = credentials.spaceId; + + // Generate unique request ID for this test + const requestId = `acceptance-test-${Date.now()}-${Math.random().toString(36).substring(7)}`; + eventTemplate.payload.event_context.request_id = requestId; + eventTemplate.execution_metadata.request_id = requestId; + + console.log('[Acceptance Test] Event prepared with request_id:', requestId); + console.log('[Acceptance Test] Using folder ID:', eventTemplate.payload.event_context.external_sync_unit_id); + + // Step 3: Invoke extraction function and wait for completion + console.log('[Acceptance Test] Step 3: Invoking extraction function...'); + console.log('[Acceptance Test] Waiting for completion callback (timeout: 60s)...'); + + let callbackEvent: any; + try { + callbackEvent = await invokeFunction('extraction', eventTemplate, true); + console.log('[Acceptance Test] ✓ Extraction completed successfully'); + console.log('[Acceptance Test] Callback event received:', { + event_type: callbackEvent.event_type, + request_id: callbackEvent.event_context?.request_id, + }); + } catch (error) { + console.error('[Acceptance Test] ✗ Extraction failed or timed out'); + console.error('[Acceptance Test] Error:', error); + + // Log final state for diagnostics + console.log('[Acceptance Test] Logging final extraction state for diagnostics...'); + await logExtractionProgress(); + + throw new Error(`Extraction function failed or timed out: ${error instanceof Error ? error.message : 'Unknown error'}`); + } + + // Step 4: Verify callback event type + console.log('[Acceptance Test] Step 4: Verifying callback event...'); + console.log('[Acceptance Test] Expected event_type: EXTRACTION_DATA_DONE'); + console.log('[Acceptance Test] Actual event_type:', callbackEvent.event_type); + + expect(callbackEvent.event_type).toBe('EXTRACTION_DATA_DONE'); + console.log('[Acceptance Test] ✓ Callback event type verified'); + + // Step 5: Extract and validate artifacts array + console.log('[Acceptance Test] Step 5: Extracting artifacts array...'); + + if (!callbackEvent.event_data) { + throw new Error('Callback event missing event_data field'); + } + + if (!callbackEvent.event_data.artifacts) { + throw new Error('Callback event missing event_data.artifacts field'); + } + + const artifactArray = callbackEvent.event_data.artifacts; + console.log('[Acceptance Test] Artifacts array extracted'); + console.log('[Acceptance Test] Number of artifacts:', artifactArray.length); + console.log('[Acceptance Test] Artifact details:', artifactArray.map((a: any) => ({ + id: a.id, + item_type: a.item_type, + item_count: a.item_count, + }))); + + // Step 6: Verify artifacts array is not empty + console.log('[Acceptance Test] Step 6: Verifying artifacts array is not empty...'); + expect(artifactArray.length).toBeGreaterThan(0); + console.log('[Acceptance Test] ✓ Artifacts array is not empty'); + + // Step 7: Find attachments artifact + console.log('[Acceptance Test] Step 7: Finding attachments artifact...'); + const attachmentsArtifact = artifactArray.find((artifact: any) => artifact.item_type === 'attachments'); + + if (!attachmentsArtifact) { + console.error('[Acceptance Test] ✗ Attachments artifact not found'); + console.error('[Acceptance Test] Available artifact types:', artifactArray.map((a: any) => a.item_type)); + throw new Error('Attachments artifact not found in artifacts array. Available types: ' + artifactArray.map((a: any) => a.item_type).join(', ')); + } + + console.log('[Acceptance Test] ✓ Attachments artifact found'); + console.log('[Acceptance Test] Attachments artifact details:', { + id: attachmentsArtifact.id, + item_type: attachmentsArtifact.item_type, + item_count: attachmentsArtifact.item_count, + }); + + // Step 8: Verify attachment count is exactly 2 + console.log('[Acceptance Test] Step 8: Verifying attachment count...'); + console.log('[Acceptance Test] Expected item_count: 2'); + console.log('[Acceptance Test] Actual item_count:', attachmentsArtifact.item_count); + + if (attachmentsArtifact.item_count < 2) { + console.error('[Acceptance Test] ✗ Incomplete attachment extraction'); + console.error('[Acceptance Test] Expected at least 2 attachments, but got:', attachmentsArtifact.item_count); + throw new Error(`Incomplete attachment extraction: expected item_count=2, but got item_count=${attachmentsArtifact.item_count}. This indicates that not all attachments data was extracted.`); + } + + expect(attachmentsArtifact.item_count).toBe(2); + console.log('[Acceptance Test] ✓ Attachment count verified (exactly 2)'); + + // Step 9: Additional validation - retrieve and verify actual attachment data + console.log('[Acceptance Test] Step 9: Retrieving actual attachment data for validation...'); + const uploadedData = await getUploadedData(); + + console.log('[Acceptance Test] Uploaded data counts:', { + attachments: uploadedData.attachments.length, + tasks: uploadedData.tasks.length, + users: uploadedData.users.length, + }); + + // Verify we have exactly 2 attachments in the uploaded data + expect(uploadedData.attachments.length).toBe(2); + console.log('[Acceptance Test] ✓ Uploaded data contains exactly 2 attachments'); + + // Step 10: Verify attachment structure + console.log('[Acceptance Test] Step 10: Verifying attachment structure...'); + uploadedData.attachments.forEach((attachment, index) => { + console.log(`[Acceptance Test] Validating attachment ${index + 1}/${uploadedData.attachments.length}...`); + + // Verify fields according to NormalizedAttachment interface (flat structure) + expect(attachment).toHaveProperty('id'); + expect(attachment).toHaveProperty('url'); + expect(attachment).toHaveProperty('file_name'); + expect(attachment).toHaveProperty('parent_id'); + + // Verify field types + expect(typeof attachment.id).toBe('string'); + expect(typeof attachment.url).toBe('string'); + expect(typeof attachment.file_name).toBe('string'); + expect(typeof attachment.parent_id).toBe('string'); + + console.log(`[Acceptance Test] ✓ Attachment ${index + 1} structure validated`); + }); + + console.log('[Acceptance Test] ✓ All attachment structures verified'); + + console.log('[Acceptance Test] ========================================'); + console.log('[Acceptance Test] TEST PASSED: All assertions successful'); + console.log('[Acceptance Test] ========================================'); + }, 90000); // 90 second timeout +}); \ No newline at end of file diff --git a/conformance_tests/attachments_data_push_tests/data-extraction-test-event.json b/conformance_tests/attachments_data_push_tests/data-extraction-test-event.json new file mode 100644 index 0000000..f65884d --- /dev/null +++ b/conformance_tests/attachments_data_push_tests/data-extraction-test-event.json @@ -0,0 +1,72 @@ +{ + "payload": { + "connection_data": { + "key": "", + "key_type": "", + "org_id": "", + "org_name": "First Space" + }, + "event_context": { + "callback_url": "http://localhost:8002/callback", + "dev_oid": "test-oid", + "dev_org": "test-org", + "dev_org_id": "test-org-id", + "dev_uid": "test-uid", + "dev_user": "test-user", + "dev_user_id": "test-user-id", + "event_type_adaas": "", + "external_sync_unit": "IEAGS6BYI5RFMPP7", + "external_sync_unit_id": "IEAGS6BYI5RFMPP7", + "external_sync_unit_name": "test-external-sync-unit-name", + "external_system": "test-external-system", + "external_system_id": "test-external-system-id", + "external_system_name": "Wrike", + "external_system_type": "ADaaS", + "import_slug": "airdrop-wrike-snap-in", + "mode": "INITIAL", + "request_id": "test-request-id", + "request_id_adaas": "test-request-id-adaas", + "run_id": "test-run_id", + "sequence_version": "6", + "snap_in_slug": "airdrop-wrike-snap-in", + "snap_in_version_id": "don:integration:dvrv-eu-1:devo/test:snap_in_package/test:snap_in_version/test", + "sync_run": "test-sync-run", + "sync_run_id": "test-sync-run-id", + "sync_tier": "sync_tier_2", + "sync_unit": "test-sync-unit", + "sync_unit_id": "test-sync-unit-id", + "uuid": "test-uuid", + "worker_data_url": "http://localhost:8003/external-worker" + }, + "event_type": "EXTRACTION_DATA_START" + }, + "context": { + "dev_oid": "don:identity:dvrv-eu-1:devo/test", + "automation_id": "", + "source_id": "", + "snap_in_id": "don:integration:dvrv-eu-1:devo/test:snap_in/test", + "snap_in_version_id": "don:integration:dvrv-eu-1:devo/test:snap_in_package/test:snap_in_version/test", + "service_account_id": "don:identity:dvrv-eu-1:devo/test:svcacc/123", + "secrets": { + "service_account_token": "test-service-account-token" + }, + "user_id": "don:identity:dvrv-eu-1:devo/test:devu/1", + "event_id": "", + "execution_id": "test-execution-id" + }, + "execution_metadata": { + "request_id": "test-request-id", + "function_name": "extraction", + "event_type": "EXTRACTION_DATA_START", + "devrev_endpoint": "http://localhost:8003" + }, + "input_data": { + "global_values": {}, + "event_sources": {}, + "keyrings": null, + "resources": { + "keyrings": {}, + "tags": {} + } + } +} \ No newline at end of file diff --git a/conformance_tests/attachments_data_push_tests/jest.config.js b/conformance_tests/attachments_data_push_tests/jest.config.js new file mode 100644 index 0000000..af006ed --- /dev/null +++ b/conformance_tests/attachments_data_push_tests/jest.config.js @@ -0,0 +1,9 @@ +module.exports = { + preset: 'ts-jest', + testEnvironment: 'node', + testMatch: ['**/*.test.ts'], + testTimeout: 120000, + moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'], + collectCoverageFrom: ['**/*.ts', '!**/*.test.ts', '!**/node_modules/**'], + setupFilesAfterEnv: ['./jest.setup.js'], +}; \ No newline at end of file diff --git a/conformance_tests/attachments_data_push_tests/package.json b/conformance_tests/attachments_data_push_tests/package.json new file mode 100644 index 0000000..f3e9250 --- /dev/null +++ b/conformance_tests/attachments_data_push_tests/package.json @@ -0,0 +1,18 @@ +{ + "name": "wrike-snap-in-tests", + "version": "1.0.0", + "description": "Conformance tests for Wrike snap-in", + "scripts": { + "test": "jest --runInBand --detectOpenHandles --forceExit" + }, + "devDependencies": { + "@types/express": "^4.17.17", + "@types/jest": "^29.5.0", + "@types/node": "^18.15.11", + "express": "^4.18.2", + "jest": "^29.5.0", + "ts-jest": "^29.1.0", + "typescript": "^5.0.4", + "axios": "^1.6.0" + } +} \ No newline at end of file diff --git a/conformance_tests/attachments_data_push_tests/test-utils.ts b/conformance_tests/attachments_data_push_tests/test-utils.ts new file mode 100644 index 0000000..759ff79 --- /dev/null +++ b/conformance_tests/attachments_data_push_tests/test-utils.ts @@ -0,0 +1,326 @@ +import express, { Express } from 'express'; +import axios from 'axios'; +import { Server } from 'http'; +import * as zlib from 'zlib'; + +export interface TestServers { + callbackServer: Server; +} + +export interface UploadedData { + attachments: any[]; + tasks: any[]; + users: any[]; +} + +// Store the last callback event data +let lastCallbackEventData: any = null; + +export async function getUploadedData(): Promise { + const result: UploadedData = { + attachments: [], + tasks: [], + users: [], + }; + + // Get artifact IDs from the last callback event data + if (!lastCallbackEventData || !lastCallbackEventData.artifacts) { + console.log('[Test Utils] No callback event data available yet'); + return result; + } + + const artifacts = lastCallbackEventData.artifacts; + console.log('[Test Utils] Processing artifacts:', artifacts.map((a: any) => ({ id: a.id, type: a.item_type, count: a.item_count }))); + + // Retrieve and decompress each artifact + for (const artifact of artifacts) { + try { + // Use artifact ID directly without URL encoding + // The mock server expects the DON string as-is in the path + const artifactResponse = await axios.get(`http://localhost:8003/download/${artifact.id}.jsonl.gz`, { + responseType: 'arraybuffer' + }); + + // Decompress gzip data + const decompressed = zlib.gunzipSync(Buffer.from(artifactResponse.data)); + const lines = decompressed.toString('utf-8').trim().split('\n'); + + // Parse JSONL and categorize by artifact item_type + for (const line of lines) { + if (line.trim()) { + const item = JSON.parse(line); + // Categorize based on artifact item_type + if (artifact.item_type === 'attachments') { + result.attachments.push(item); + } else if (artifact.item_type === 'tasks') { + result.tasks.push(item); + } else if (artifact.item_type === 'users') { + result.users.push(item); + } else { + console.log('[Test Utils] Unknown item type:', artifact.item_type); + } + } + } + + console.log(`[Test Utils] Successfully retrieved ${artifact.item_type} artifact with ${artifact.item_count} items`); + } catch (error) { + console.error(`[Test Utils] Failed to retrieve artifact ${artifact.id}:`, error); + + // Provide more detailed error information + if (axios.isAxiosError(error)) { + console.error(`[Test Utils] Status: ${error.response?.status}, URL: ${error.config?.url}`); + if (error.response?.data) { + console.error(`[Test Utils] Response data:`, error.response.data); + } + } + } + } + + console.log('[Test Utils] Retrieved data counts:', { + attachments: result.attachments.length, + tasks: result.tasks.length, + users: result.users.length, + }); + + return result; +} + +export async function getExtractionState(): Promise { + try { + const stateResponse = await axios.get('http://localhost:8003/external-worker.get', { + params: { sync_unit: 'test-unit' } + }); + return stateResponse.data; + } catch (error) { + console.error('[Test Utils] Failed to get extraction state:', error); + return null; + } +} + +export async function logExtractionProgress(): Promise { + const state = await getExtractionState(); + if (state) { + console.log('[Test Utils] Current extraction state:', { + users: state.users, + tasks: state.tasks, + attachments: state.attachments, + }); + } +} + +export async function clearUploadedData(): Promise { + // Reset the mock server state + lastCallbackEventData = null; + try { + await axios.post('http://localhost:8003/reset-mock-server'); + console.log('[Test Utils] Mock server state reset successfully'); + } catch (error) { + console.error('[Test Utils] Failed to reset mock server state:', error); + } +} + +let callbackApp: Express | null = null; +let pendingCallbacks: Map void; reject: (reason?: any) => void; timeout: NodeJS.Timeout }> = new Map(); + +export function setupTestServers(): Promise { + return new Promise((resolve) => { + callbackApp = express(); + callbackApp.use(express.json()); + + // Callback server + callbackApp.post('/callback', (req, res) => { + console.log('[Callback Server] Received callback:', req.body); + + // Store callback event data for artifact retrieval + if (req.body && req.body.event_data) { + lastCallbackEventData = req.body.event_data; + } + + // Check if this is a completion event + if (req.body && req.body.event_type) { + console.log('[Callback Server] Event type:', req.body.event_type); + + // Log all event types for debugging + console.log('[Callback Server] Full event details:', { + event_type: req.body.event_type, + request_id: req.body.event_context?.request_id, + }); + + // Extract request_id to match with pending callbacks + const requestId = req.body.event_context?.request_id; + + // Handle completion events for any pending callback + if (req.body.event_type === 'EXTRACTION_DATA_DONE' || + req.body.event_type === 'EXTRACTION_DATA_ERROR') { + + console.log('[Callback Server] Completion event received, looking for pending callbacks'); + // Try to find matching pending callback by request_id first + let pending = requestId ? pendingCallbacks.get(requestId) : null; + + // If no match by request_id, resolve the first pending callback + // This handles cases where request_id might not match exactly + if (!pending && pendingCallbacks.size > 0) { + const firstKey = pendingCallbacks.keys().next().value as string | undefined; + if (firstKey !== undefined) { + pending = pendingCallbacks.get(firstKey); + console.log('[Callback Server] Matched pending callback by first key:', firstKey); + console.log('[Callback Server] Using first pending callback (no request_id match)'); + } + } + + if (pending) { + if (req.body.event_type === 'EXTRACTION_DATA_DONE') { + console.log('[Callback Server] Extraction completed successfully'); + console.log('[Callback Server] Resolving pending callback'); + clearTimeout(pending.timeout); + pendingCallbacks.clear(); // Clear all pending callbacks + pending.resolve(req.body); + } else if (req.body.event_type === 'EXTRACTION_DATA_ERROR') { + console.log('[Callback Server] Extraction failed with error'); + clearTimeout(pending.timeout); + pendingCallbacks.clear(); // Clear all pending callbacks + pending.reject(new Error(req.body.data?.error?.message || 'Extraction failed')); + } + } else { + console.log('[Callback Server] No pending callbacks to resolve'); + console.log('[Callback Server] Pending callbacks map size:', pendingCallbacks.size); + } + } else { + console.log('[Callback Server] Non-completion event received:', req.body.event_type); + } + } else { + console.log('[Callback Server] Received request without event_type'); + } + + res.status(200).json({ success: true }); + }); + + const callbackServer = callbackApp.listen(8002); + + // Wait for all servers to start + setTimeout(() => { + resolve({ callbackServer }); + }, 100); + }); +} + +export function teardownTestServers(servers: TestServers): Promise { + return new Promise((resolve) => { + // Clear any pending callbacks + for (const [requestId, pending] of pendingCallbacks.entries()) { + clearTimeout(pending.timeout); + pending.reject(new Error('Test teardown')); + } + pendingCallbacks.clear(); + + servers.callbackServer.close(() => {}); + setTimeout(resolve, 100); + }); +} + +export function getTestCredentials() { + const apiKey = process.env.WRIKE_API_KEY; + const spaceId = process.env.WRIKE_SPACE_ID; + + if (!apiKey || !spaceId) { + throw new Error('Missing required environment variables: WRIKE_API_KEY, WRIKE_SPACE_ID'); + } + + return { apiKey, spaceId }; +} + +export async function invokeFunction(functionName: string, event: any, waitForCompletion: boolean = false): Promise { + const requestId = event.execution_metadata.request_id; + + if (waitForCompletion) { + console.log('[Test Utils] Waiting for completion callback for request:', requestId); + // Register callback BEFORE invoking function to avoid race condition + const completionPromise = new Promise((resolve, reject) => { + const timeout = setTimeout(() => { + pendingCallbacks.delete(requestId); + reject(new Error('Timeout waiting for extraction completion callback (60s)')); + }, 60000); // 60 second timeout for callback + + console.log('[Test Utils] Registered pending callback for request:', requestId); + console.log('[Test Utils] Total pending callbacks:', pendingCallbacks.size + 1); + pendingCallbacks.set(requestId, { resolve, reject, timeout }); + }); + + // Now invoke function after callback is registered + console.log('[Test Utils] Invoking function:', functionName, 'with request_id:', requestId); + const response = await axios.post('http://localhost:8000/handle/sync', event, { + headers: { 'Content-Type': 'application/json' }, + }); + console.log('[Test Utils] Function invocation response status:', response.status); + + return completionPromise; + } + + // Non-blocking invocation + console.log('[Test Utils] Invoking function:', functionName, 'with request_id:', requestId); + const response = await axios.post('http://localhost:8000/handle/sync', event, { + headers: { 'Content-Type': 'application/json' }, + }); + console.log('[Test Utils] Function invocation response status:', response.status); + + return response.data; +} + +export function createExtractionEvent(eventType: string, credentials: { apiKey: string; spaceId: string }, folderId?: string): any { + const requestId = `test-${Date.now()}-${Math.random().toString(36).substring(7)}`; + + return { + execution_metadata: { + request_id: requestId, + function_name: 'extraction', + event_type: eventType, + devrev_endpoint: 'http://localhost:8003', + }, + context: { + dev_oid: 'test-org', + source_id: 'test-source', + snap_in_id: 'test-snap-in', + snap_in_version_id: 'test-version', + service_account_id: 'test-account', + secrets: { + service_account_token: 'test-token', + }, + }, + payload: { + connection_data: { + key: credentials.apiKey, + org_id: credentials.spaceId, + }, + event_context: { + callback_url: 'http://localhost:8002/callback', + dev_org: 'test-org', + dev_org_id: 'test-org', + dev_user: 'test-user', + dev_user_id: 'test-user', + external_sync_unit: folderId || 'IEAGS6BYI5RFMPP7', + external_sync_unit_id: folderId || 'IEAGS6BYI5RFMPP7', + external_sync_unit_name: 'Test Folder', + external_system: 'wrike', + external_system_type: 'wrike', + import_slug: 'test-import', + mode: 'INITIAL', + request_id: requestId, + snap_in_slug: 'wrike-snap-in', + snap_in_version_id: 'test-version', + sync_run: 'test-run', + sync_run_id: 'test-run', + sync_tier: 'test-tier', + sync_unit: 'test-unit', + sync_unit_id: 'test-unit', + uuid: 'test-uuid', + worker_data_url: 'http://localhost:8003/external-worker', + }, + event_type: eventType, + event_data: {}, + }, + input_data: { + global_values: {}, + event_sources: {}, + }, + }; +} \ No newline at end of file diff --git a/conformance_tests/attachments_data_push_tests/tsconfig.json b/conformance_tests/attachments_data_push_tests/tsconfig.json new file mode 100644 index 0000000..b3514d4 --- /dev/null +++ b/conformance_tests/attachments_data_push_tests/tsconfig.json @@ -0,0 +1,20 @@ +{ + "compilerOptions": { + "target": "es2017", + "module": "commonjs", + "lib": ["es2017"], + "outDir": "./dist", + "rootDir": "./", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true, + "declaration": true, + "declarationMap": true, + "sourceMap": true, + "moduleResolution": "node" + }, + "include": ["**/*.ts"], + "exclude": ["node_modules", "dist"] +} \ No newline at end of file diff --git a/conformance_tests/attachments_extraction_streaming_tests/.gitignore b/conformance_tests/attachments_extraction_streaming_tests/.gitignore new file mode 100644 index 0000000..6e386c8 --- /dev/null +++ b/conformance_tests/attachments_extraction_streaming_tests/.gitignore @@ -0,0 +1,5 @@ +node_modules/ +dist/ +*.log +.env +coverage/ \ No newline at end of file diff --git a/conformance_tests/attachments_extraction_streaming_tests/README.md b/conformance_tests/attachments_extraction_streaming_tests/README.md new file mode 100644 index 0000000..f2b857f --- /dev/null +++ b/conformance_tests/attachments_extraction_streaming_tests/README.md @@ -0,0 +1,36 @@ +# Wrike Snap-In Conformance Tests + +This directory contains conformance tests for the Wrike snap-in attachment streaming functionality. + +## Prerequisites + +- Node.js (v16 or higher) +- npm +- Running test servers: + - Snap-in server at http://localhost:8000 + - DevRev server at http://localhost:8003 + - Wrike API server at http://localhost:8004 + +## Environment Variables + +The following environment variables must be set: + +- `WRIKE_API_KEY`: Your Wrike API key +- `WRIKE_SPACE_ID`: The Wrike space ID to use for testing + +## Installation + +```bash +npm install +``` + +## Running Tests + +```bash +npm test +``` + +## Test Files + +- `attachments-extraction.test.ts`: Tests individual attachment extraction events +- `data-and-attachments-extraction.test.ts`: Tests the complete data and attachments extraction workflow \ No newline at end of file diff --git a/conformance_tests/attachments_extraction_streaming_tests/attachments-extraction-continue.test.ts b/conformance_tests/attachments_extraction_streaming_tests/attachments-extraction-continue.test.ts new file mode 100644 index 0000000..8d917f0 --- /dev/null +++ b/conformance_tests/attachments_extraction_streaming_tests/attachments-extraction-continue.test.ts @@ -0,0 +1,152 @@ +import { + CallbackServer, + loadTestConfig, + sendEventToSnapIn, + TestConfig, +} from './test-helpers'; +import * as fs from 'fs'; +import * as path from 'path'; + +describe('Attachments Extraction Continue - Acceptance Test', () => { + let config: TestConfig; + let callbackServer: CallbackServer; + + beforeAll(async () => { + // Load configuration from environment + config = loadTestConfig(); + + // Start callback server + callbackServer = new CallbackServer(config.callbackServerPort); + await callbackServer.start(); + }); + + afterAll(async () => { + // Stop callback server + await callbackServer.stop(); + }); + + beforeEach(() => { + // Clear events before each test + callbackServer.clearEvents(); + }); + + /** + * Acceptance Test: EXTRACTION_ATTACHMENTS_CONTINUE Event Handling + * + * This test verifies that: + * 1. The Extraction Function correctly processes EXTRACTION_ATTACHMENTS_CONTINUE event + * 2. Exactly one callback event is received + * 3. The callback event has event_type equal to "EXTRACTION_ATTACHMENTS_DONE" + * + * Test uses the event payload from attachments_extraction_continue_test.json + */ + test('should receive exactly one EXTRACTION_ATTACHMENTS_DONE event for EXTRACTION_ATTACHMENTS_CONTINUE', async () => { + console.log('=== Starting Attachments Extraction Continue Acceptance Test ==='); + + // Step 1: Load test event from JSON file + console.log('Step 1: Loading test event from attachments_extraction_continue_test.json'); + const testEventPath = path.join(__dirname, 'attachments_extraction_continue_test.json'); + + if (!fs.existsSync(testEventPath)) { + throw new Error( + `Test event file not found at: ${testEventPath}. ` + + `Please ensure attachments_extraction_continue_test.json exists in the test directory.` + ); + } + + const testEventArray = JSON.parse(fs.readFileSync(testEventPath, 'utf-8')); + + if (!Array.isArray(testEventArray) || testEventArray.length === 0) { + throw new Error( + `Invalid test event file format. Expected non-empty array. ` + + `File: ${testEventPath}` + ); + } + + const testEvent = testEventArray[0]; + console.log('✓ Test event loaded successfully'); + + // Step 2: Replace placeholders with actual credentials + console.log('Step 2: Replacing credential placeholders'); + testEvent.payload.connection_data.key = config.wrikeApiKey; + testEvent.payload.connection_data.org_id = config.wrikeSpaceId; + + // Update callback URL to point to our test callback server + testEvent.payload.event_context.callback_url = `http://localhost:${config.callbackServerPort}/callback`; + + // Update worker_data_url to point to our test DevRev server + testEvent.payload.event_context.worker_data_url = config.workerDataUrl; + testEvent.execution_metadata.devrev_endpoint = config.devrevServerUrl; + + console.log('✓ Credentials replaced successfully'); + console.log(` - API Key: ${config.wrikeApiKey.substring(0, 10)}...`); + console.log(` - Space ID: ${config.wrikeSpaceId}`); + console.log(` - Event Type: ${testEvent.payload.event_type}`); + + // Step 3: Send event to snap-in server + console.log('Step 3: Sending EXTRACTION_ATTACHMENTS_CONTINUE event to snap-in server'); + console.log(` - Target URL: ${config.snapInServerUrl}`); + + try { + await sendEventToSnapIn(config, testEvent); + console.log('✓ Event sent successfully'); + } catch (error) { + throw new Error( + `Failed to send event to snap-in server. ` + + `Error: ${error instanceof Error ? error.message : String(error)}. ` + + `Event: ${JSON.stringify(testEvent, null, 2)}` + ); + } + + // Step 4: Wait for callback event with timeout + console.log('Step 4: Waiting for callback event (timeout: 90 seconds)'); + + let receivedEvent; + try { + receivedEvent = await callbackServer.waitForEvent( + 'EXTRACTION_ATTACHMENTS_DONE', + 90000 // 90 second timeout + ); + console.log('✓ Callback event received'); + } catch (error) { + const allEvents = callbackServer.getReceivedEvents(); + throw new Error( + `Timeout waiting for EXTRACTION_ATTACHMENTS_DONE event. ` + + `Expected: 1 event with event_type="EXTRACTION_ATTACHMENTS_DONE". ` + + `Received: ${allEvents.length} event(s). ` + + `Event types received: ${allEvents.map(e => e.event_type).join(', ') || 'none'}. ` + + `Full events: ${JSON.stringify(allEvents, null, 2)}` + ); + } + + // Step 5: Verify exactly one event was received + console.log('Step 5: Verifying event count and type'); + const allReceivedEvents = callbackServer.getReceivedEvents(); + + if (allReceivedEvents.length !== 1) { + throw new Error( + `Expected exactly 1 callback event, but received ${allReceivedEvents.length}. ` + + `Event types: ${allReceivedEvents.map(e => e.event_type).join(', ')}. ` + + `Full events: ${JSON.stringify(allReceivedEvents, null, 2)}` + ); + } + console.log('✓ Exactly one event received'); + + // Step 6: Verify event type is EXTRACTION_ATTACHMENTS_DONE + if (receivedEvent.event_type !== 'EXTRACTION_ATTACHMENTS_DONE') { + throw new Error( + `Expected event_type to be "EXTRACTION_ATTACHMENTS_DONE", but got "${receivedEvent.event_type}". ` + + `Full event: ${JSON.stringify(receivedEvent, null, 2)}` + ); + } + console.log('✓ Event type is EXTRACTION_ATTACHMENTS_DONE'); + + // Step 7: Log success + console.log('\n=== Acceptance Test Completed Successfully ==='); + console.log('Summary:'); + console.log(' - Event sent: EXTRACTION_ATTACHMENTS_CONTINUE'); + console.log(' - Events received: 1'); + console.log(' - Event type: EXTRACTION_ATTACHMENTS_DONE'); + console.log(' - Test result: PASSED'); + }, 120000); // 120 second timeout for the entire test +}); \ No newline at end of file diff --git a/conformance_tests/attachments_extraction_streaming_tests/attachments-extraction-rate-limiting.test.ts b/conformance_tests/attachments_extraction_streaming_tests/attachments-extraction-rate-limiting.test.ts new file mode 100644 index 0000000..d7879a8 --- /dev/null +++ b/conformance_tests/attachments_extraction_streaming_tests/attachments-extraction-rate-limiting.test.ts @@ -0,0 +1,265 @@ +import axios from 'axios'; +import { + CallbackServer, + loadTestConfig, + sendEventToSnapIn, + TestConfig, +} from './test-helpers'; +import * as fs from 'fs'; +import * as path from 'path'; + +describe('Attachments Extraction Rate Limiting - Acceptance Test', () => { + let config: TestConfig; + let callbackServer: CallbackServer; + const testName = 'attachments_extraction_rate_limiting'; + + beforeAll(async () => { + // Load configuration from environment + config = loadTestConfig(); + + // Start callback server + callbackServer = new CallbackServer(config.callbackServerPort); + await callbackServer.start(); + }); + + afterAll(async () => { + // Stop callback server + await callbackServer.stop(); + }); + + beforeEach(() => { + // Clear events before each test + callbackServer.clearEvents(); + }); + + afterEach(async () => { + // Ensure rate limiting is disabled after each test, even if test fails + try { + console.log('Cleanup: Disabling rate limiting on API server'); + await axios.post(`${config.apiServerUrl}/end_rate_limiting`, {}, { + timeout: 5000, + validateStatus: () => true, // Accept any status code + }); + console.log('✓ Rate limiting disabled successfully'); + } catch (error) { + console.warn( + 'Warning: Failed to disable rate limiting during cleanup. ' + + `Error: ${error instanceof Error ? error.message : String(error)}` + ); + } + }); + + /** + * Acceptance Test: Rate Limiting Handling for EXTRACTION_ATTACHMENTS_START + * + * This test verifies that: + * 1. Rate limiting can be enabled on the API server + * 2. The Extraction Function correctly handles rate limiting during attachment streaming + * 3. Exactly one callback event is received with event_type "EXTRACTION_ATTACHMENTS_DONE" + * 4. Rate limiting can be disabled on the API server + * + * Test Flow: + * - Step 1: Enable rate limiting on API server + * - Step 2: Send EXTRACTION_ATTACHMENTS_START event and verify callback + * - Step 3: Disable rate limiting on API server + */ + test('should handle rate limiting and receive exactly one EXTRACTION_ATTACHMENTS_DONE event', async () => { + console.log('=== Starting Attachments Extraction Rate Limiting Acceptance Test ==='); + + // Step 1: Enable rate limiting on API server + console.log('\n=== Step 1: Enabling Rate Limiting ==='); + console.log(`Making POST request to: ${config.apiServerUrl}/start_rate_limiting`); + console.log(`Request body: { "test_name": "${testName}" }`); + + try { + const rateLimitStartResponse = await axios.post( + `${config.apiServerUrl}/start_rate_limiting`, + { test_name: testName }, + { + timeout: 10000, + headers: { 'Content-Type': 'application/json' }, + } + ); + + if (rateLimitStartResponse.status !== 200) { + throw new Error( + `Expected status 200 when starting rate limiting, but got ${rateLimitStartResponse.status}. ` + + `Response: ${JSON.stringify(rateLimitStartResponse.data, null, 2)}. ` + + `Please ensure the API server is running at ${config.apiServerUrl} and supports the /start_rate_limiting endpoint.` + ); + } + + console.log('✓ Rate limiting enabled successfully'); + console.log(`Response status: ${rateLimitStartResponse.status}`); + console.log(`Response data: ${JSON.stringify(rateLimitStartResponse.data, null, 2)}`); + } catch (error) { + if (axios.isAxiosError(error)) { + throw new Error( + `Failed to enable rate limiting on API server. ` + + `URL: ${config.apiServerUrl}/start_rate_limiting. ` + + `Status: ${error.response?.status}. ` + + `Error: ${error.message}. ` + + `Response: ${JSON.stringify(error.response?.data, null, 2)}. ` + + `Please ensure the API server is running and accessible.` + ); + } + throw error; + } + + // Step 2: Load test event and send to snap-in server + console.log('\n=== Step 2: Invoking Extraction Function ==='); + console.log('Step 2.1: Loading test event from attachments_extraction_test.json'); + + const testEventPath = path.join(__dirname, 'attachments_extraction_test.json'); + + if (!fs.existsSync(testEventPath)) { + throw new Error( + `Test event file not found at: ${testEventPath}. ` + + `Please ensure attachments_extraction_test.json exists in the test directory.` + ); + } + + const testEventArray = JSON.parse(fs.readFileSync(testEventPath, 'utf-8')); + + if (!Array.isArray(testEventArray) || testEventArray.length === 0) { + throw new Error( + `Invalid test event file format. Expected non-empty array. ` + + `File: ${testEventPath}. ` + + `Content: ${JSON.stringify(testEventArray, null, 2)}` + ); + } + + const testEvent = testEventArray[0]; + console.log('✓ Test event loaded successfully'); + + // Step 2.2: Replace placeholders with actual credentials + console.log('Step 2.2: Replacing credential placeholders'); + testEvent.payload.connection_data.key = config.wrikeApiKey; + testEvent.payload.connection_data.org_id = config.wrikeSpaceId; + + // Update URLs to point to test servers + testEvent.payload.event_context.callback_url = `http://localhost:${config.callbackServerPort}/callback`; + testEvent.payload.event_context.worker_data_url = config.workerDataUrl; + testEvent.execution_metadata.devrev_endpoint = config.devrevServerUrl; + + console.log('✓ Credentials and URLs updated successfully'); + console.log(` - API Key: ${config.wrikeApiKey.substring(0, 10)}...`); + console.log(` - Space ID: ${config.wrikeSpaceId}`); + console.log(` - Event Type: ${testEvent.payload.event_type}`); + console.log(` - Callback URL: ${testEvent.payload.event_context.callback_url}`); + + // Step 2.3: Send event to snap-in server + console.log('Step 2.3: Sending EXTRACTION_ATTACHMENTS_START event to snap-in server'); + console.log(` - Target URL: ${config.snapInServerUrl}`); + + try { + await sendEventToSnapIn(config, testEvent); + console.log('✓ Event sent successfully'); + } catch (error) { + throw new Error( + `Failed to send event to snap-in server. ` + + `URL: ${config.snapInServerUrl}. ` + + `Error: ${error instanceof Error ? error.message : String(error)}. ` + + `Event: ${JSON.stringify(testEvent, null, 2)}. ` + + `Please ensure the snap-in server is running and accessible.` + ); + } + + // Step 2.4: Wait for callback event + console.log('Step 2.4: Waiting for callback event (timeout: 90 seconds)'); + + let receivedEvent; + try { + receivedEvent = await callbackServer.waitForEvent( + 'EXTRACTION_ATTACHMENTS_DONE', + 90000 // 90 second timeout + ); + console.log('✓ Callback event received'); + console.log(`Event type: ${receivedEvent.event_type}`); + } catch (error) { + const allEvents = callbackServer.getReceivedEvents(); + throw new Error( + `Timeout waiting for EXTRACTION_ATTACHMENTS_DONE event. ` + + `Expected: 1 event with event_type="EXTRACTION_ATTACHMENTS_DONE". ` + + `Received: ${allEvents.length} event(s). ` + + `Event types received: ${allEvents.map(e => e.event_type).join(', ') || 'none'}. ` + + `Full events: ${JSON.stringify(allEvents, null, 2)}. ` + + `This may indicate that the Extraction Function failed to handle rate limiting correctly, ` + + `or that the callback was not sent to the correct URL.` + ); + } + + // Step 2.5: Verify exactly one event was received + console.log('Step 2.5: Verifying event count and type'); + const allReceivedEvents = callbackServer.getReceivedEvents(); + + if (allReceivedEvents.length !== 1) { + throw new Error( + `Expected exactly 1 callback event, but received ${allReceivedEvents.length}. ` + + `Event types: ${allReceivedEvents.map(e => e.event_type).join(', ')}. ` + + `Full events: ${JSON.stringify(allReceivedEvents, null, 2)}. ` + + `The Extraction Function should emit exactly one event per invocation.` + ); + } + console.log('✓ Exactly one event received'); + + // Step 2.6: Verify event type is EXTRACTION_ATTACHMENTS_DONE + if (receivedEvent.event_type !== 'EXTRACTION_ATTACHMENTS_DONE') { + throw new Error( + `Expected event_type to be "EXTRACTION_ATTACHMENTS_DONE", but got "${receivedEvent.event_type}". ` + + `Full event: ${JSON.stringify(receivedEvent, null, 2)}. ` + + `The Extraction Function should emit EXTRACTION_ATTACHMENTS_DONE when attachment streaming completes successfully.` + ); + } + console.log('✓ Event type is EXTRACTION_ATTACHMENTS_DONE'); + + // Step 3: Disable rate limiting on API server + console.log('\n=== Step 3: Disabling Rate Limiting ==='); + console.log(`Making POST request to: ${config.apiServerUrl}/end_rate_limiting`); + + try { + const rateLimitEndResponse = await axios.post( + `${config.apiServerUrl}/end_rate_limiting`, + {}, + { + timeout: 10000, + headers: { 'Content-Type': 'application/json' }, + } + ); + + if (rateLimitEndResponse.status !== 200) { + throw new Error( + `Expected status 200 when ending rate limiting, but got ${rateLimitEndResponse.status}. ` + + `Response: ${JSON.stringify(rateLimitEndResponse.data, null, 2)}. ` + + `Please ensure the API server supports the /end_rate_limiting endpoint.` + ); + } + + console.log('✓ Rate limiting disabled successfully'); + console.log(`Response status: ${rateLimitEndResponse.status}`); + console.log(`Response data: ${JSON.stringify(rateLimitEndResponse.data, null, 2)}`); + } catch (error) { + if (axios.isAxiosError(error)) { + throw new Error( + `Failed to disable rate limiting on API server. ` + + `URL: ${config.apiServerUrl}/end_rate_limiting. ` + + `Status: ${error.response?.status}. ` + + `Error: ${error.message}. ` + + `Response: ${JSON.stringify(error.response?.data, null, 2)}. ` + + `Please ensure the API server is running and accessible.` + ); + } + throw error; + } + + // Log success summary + console.log('\n=== Acceptance Test Completed Successfully ==='); + console.log('Summary:'); + console.log(' - Rate limiting enabled: ✓'); + console.log(' - Event sent: EXTRACTION_ATTACHMENTS_START'); + console.log(' - Events received: 1'); + console.log(' - Event type: EXTRACTION_ATTACHMENTS_DONE'); + console.log(' - Rate limiting disabled: ✓'); + console.log(' - Test result: PASSED'); + }, 120000); // 120 second timeout for the entire test +}); \ No newline at end of file diff --git a/conformance_tests/attachments_extraction_streaming_tests/attachments-extraction.test.ts b/conformance_tests/attachments_extraction_streaming_tests/attachments-extraction.test.ts new file mode 100644 index 0000000..c6b7678 --- /dev/null +++ b/conformance_tests/attachments_extraction_streaming_tests/attachments-extraction.test.ts @@ -0,0 +1,160 @@ +import { CallbackServer, loadTestConfig, createExtractionEvent, sendEventToSnapIn, TestConfig } from './test-helpers'; + +describe('Attachments Extraction Tests', () => { + let config: TestConfig; + let callbackServer: CallbackServer; + + beforeAll(async () => { + // Load configuration from environment + config = loadTestConfig(); + + // Start callback server + callbackServer = new CallbackServer(config.callbackServerPort); + await callbackServer.start(); + }); + + afterAll(async () => { + // Stop callback server + await callbackServer.stop(); + }); + + beforeEach(() => { + // Clear events before each test + callbackServer.clearEvents(); + }); + + /** + * Test 1: Basic Attachment Streaming Success (Trivial) + * Verifies that EXTRACTION_ATTACHMENTS_START event is handled correctly + */ + test('should successfully handle EXTRACTION_ATTACHMENTS_START event', async () => { + // Create event payload with task ID that has attachments + const event = createExtractionEvent( + config, + 'EXTRACTION_ATTACHMENTS_START', + 'IEAGS6BYKRRFMPQG' // Task ID with attachments + ); + + // Send event to snap-in server + await sendEventToSnapIn(config, event); + + // Wait for completion event (either DONE, DELAY, or ERROR) + const completionEvent = await Promise.race([ + callbackServer.waitForEvent('EXTRACTION_ATTACHMENTS_DONE', 60000), + callbackServer.waitForEvent('EXTRACTION_ATTACHMENTS_DELAY', 60000), + callbackServer.waitForEvent('EXTRACTION_ATTACHMENTS_ERROR', 60000), + ]); + + // Verify we received a valid completion event + expect(completionEvent).toBeDefined(); + expect(completionEvent.event_type).toMatch( + /^EXTRACTION_ATTACHMENTS_(DONE|DELAY|ERROR)$/ + ); + + // If we got DONE, test passes + // If we got DELAY, it means rate limiting occurred (acceptable) + // If we got ERROR, log it but don't fail (API might be unavailable) + if (completionEvent.event_type === 'EXTRACTION_ATTACHMENTS_ERROR') { + console.warn( + 'Attachment extraction returned error:', + completionEvent.event_data?.error?.message + ); + } else if (completionEvent.event_type === 'EXTRACTION_ATTACHMENTS_DELAY') { + console.log( + 'Attachment extraction was rate limited, delay:', + completionEvent.event_data?.delay + ); + expect(completionEvent.event_data?.delay).toBeGreaterThan(0); + } + }, 90000); // 90 second timeout for this test + + /** + * Test 2: Attachment Streaming Continue (Simple) + * Verifies that EXTRACTION_ATTACHMENTS_CONTINUE event is handled correctly + */ + test('should successfully handle EXTRACTION_ATTACHMENTS_CONTINUE event', async () => { + // Create event payload with CONTINUE event type + const event = createExtractionEvent( + config, + 'EXTRACTION_ATTACHMENTS_CONTINUE', + 'IEAGS6BYKRRFMPQG' // Task ID with attachments + ); + + // Send event to snap-in server + await sendEventToSnapIn(config, event); + + // Wait for completion event + const completionEvent = await Promise.race([ + callbackServer.waitForEvent('EXTRACTION_ATTACHMENTS_DONE', 60000), + callbackServer.waitForEvent('EXTRACTION_ATTACHMENTS_DELAY', 60000), + callbackServer.waitForEvent('EXTRACTION_ATTACHMENTS_ERROR', 60000), + ]); + + // Verify we received a valid completion event + expect(completionEvent).toBeDefined(); + expect(completionEvent.event_type).toMatch( + /^EXTRACTION_ATTACHMENTS_(DONE|DELAY|ERROR)$/ + ); + + // Log any errors or delays for debugging + if (completionEvent.event_type === 'EXTRACTION_ATTACHMENTS_ERROR') { + console.warn( + 'Attachment extraction (continue) returned error:', + completionEvent.event_data?.error?.message + ); + } else if (completionEvent.event_type === 'EXTRACTION_ATTACHMENTS_DELAY') { + console.log( + 'Attachment extraction (continue) was rate limited, delay:', + completionEvent.event_data?.delay + ); + expect(completionEvent.event_data?.delay).toBeGreaterThan(0); + } + }, 90000); // 90 second timeout for this test + + /** + * Test 3: Attachment Streaming with Rate Limiting Handling (Complex) + * Verifies that rate limiting is properly handled with DELAY event + */ + test('should properly handle rate limiting with EXTRACTION_ATTACHMENTS_DELAY event', async () => { + // Create event payload + const event = createExtractionEvent( + config, + 'EXTRACTION_ATTACHMENTS_START', + 'IEAGS6BYKRRFMPQG' // Task ID with attachments + ); + + // Send event to snap-in server + await sendEventToSnapIn(config, event); + + // Wait for any completion event + const completionEvent = await Promise.race([ + callbackServer.waitForEvent('EXTRACTION_ATTACHMENTS_DONE', 60000), + callbackServer.waitForEvent('EXTRACTION_ATTACHMENTS_DELAY', 60000), + callbackServer.waitForEvent('EXTRACTION_ATTACHMENTS_ERROR', 60000), + ]); + + // Verify we received a valid completion event + expect(completionEvent).toBeDefined(); + expect(completionEvent.event_type).toMatch( + /^EXTRACTION_ATTACHMENTS_(DONE|DELAY|ERROR)$/ + ); + + // If we received a DELAY event, verify it has proper structure + if (completionEvent.event_type === 'EXTRACTION_ATTACHMENTS_DELAY') { + expect(completionEvent.event_data).toBeDefined(); + expect(completionEvent.event_data.delay).toBeDefined(); + expect(typeof completionEvent.event_data.delay).toBe('number'); + expect(completionEvent.event_data.delay).toBeGreaterThan(0); + console.log( + `Rate limiting detected correctly. Delay: ${completionEvent.event_data.delay} seconds` + ); + } else if (completionEvent.event_type === 'EXTRACTION_ATTACHMENTS_DONE') { + console.log('Attachment extraction completed successfully without rate limiting'); + } else { + console.warn( + 'Attachment extraction returned error:', + completionEvent.event_data?.error?.message + ); + } + }, 90000); // 90 second timeout for this test +}); \ No newline at end of file diff --git a/conformance_tests/attachments_extraction_streaming_tests/attachments_extraction_continue_test.json b/conformance_tests/attachments_extraction_streaming_tests/attachments_extraction_continue_test.json new file mode 100644 index 0000000..e14acc1 --- /dev/null +++ b/conformance_tests/attachments_extraction_streaming_tests/attachments_extraction_continue_test.json @@ -0,0 +1,75 @@ +[ + { + "payload": { + "connection_data": { + "key": "", + "key_type": "", + "org_id": "", + "org_name": "First Space" + }, + "event_context": { + "callback_url": "http://localhost:8002/callback", + "dev_oid": "DEV-36shCCBEAA", + "dev_org": "DEV-36shCCBEAA", + "dev_org_id": "DEV-36shCCBEAA", + "dev_uid": "DEVU-6", + "dev_user": "DEVU-6", + "dev_user_id": "DEVU-6", + "event_type_adaas": "", + "external_sync_unit": "688725dad59c015ce052eecf", + "external_sync_unit_id": "688725dad59c015ce052eecf", + "external_sync_unit_name": "cards-pagination-test-2025-07-28-092514", + "external_system": "6752eb95c833e6b206fcf388", + "external_system_id": "6752eb95c833e6b206fcf388", + "external_system_name": "Wrike", + "external_system_type": "ADaaS", + "import_slug": "wrike-snapin-devrev", + "initial_sync_scope": "full-history", + "mode": "INITIAL", + "request_id": "ff894fd5-2290-42bb-9f89-0785e49b4049", + "request_id_adaas": "ff894fd5-2290-42bb-9f89-0785e49b4049", + "run_id": "cbbe2419-1f86-4737-aa78-6bb7118ce52c", + "sequence_version": "17", + "snap_in_slug": "wrike-snapin-devrev", + "snap_in_version_id": "don:integration:dvrv-eu-1:devo/36shCCBEAA:snap_in_package/787b97af-95a8-4b57-809e-8d55f4e72f40:snap_in_version/50d4660e-dad9-41D6-9169-8a7e96b2d7fa", + "sync_run": "cbbe2419-1f86-4737-aa78-6bb7118ce52c", + "sync_run_id": "cbbe2419-1f86-4737-aa78-6bb7118ce52c", + "sync_tier": "sync_tier_2", + "sync_unit": "don:integration:dvrv-eu-1:devo/36shCCBEAA:external_system_type/ADAAS:external_system/6752eb95c833e6b206fcf388:sync_unit/984c894e-71e5-4e94-b484-40b839c9a916", + "sync_unit_id": "984c894e-71e5-4e94-b484-40b839c9a916", + "uuid": "ff894fd5-2290-42bb-9f89-0785e49b4049", + "worker_data_url": "http://localhost:8003/external-worker" + }, + "event_type": "EXTRACTION_ATTACHMENTS_CONTINUE" + }, + "context": { + "dev_oid": "don:identity:dvrv-eu-1:devo/36shCCBEAA", + "automation_id": "", + "source_id": "", + "snap_in_id": "don:integration:dvrv-eu-1:devo/36shCCBEAA:snap_in/04bf12fa-57bd-4057-b0b0-ed3f42d9813e", + "snap_in_version_id": "don:integration:dvrv-eu-1:devo/36shCCBEAA:snap_in_package/787b97af-95a8-4b57-809e-8d55f4e72f40:snap_in_version/50d4660e-dad9-41D6-9169-8a7e96b2d7fa", + "service_account_id": "don:identity:dvrv-eu-1:devo/36shCCBEAA:svcacc/101", + "secrets": { + "service_account_token": "test-service-account-token" + }, + "user_id": "don:identity:dvrv-eu-1:devo/36shCCBEAA:devu/6", + "event_id": "", + "execution_id": "4481432207487786275" + }, + "execution_metadata": { + "request_id": "ff894fd5-2290-42bb-9f89-0785e49b4049", + "function_name": "extraction", + "event_type": "EXTRACTION_ATTACHMENTS_START", + "devrev_endpoint": "http://localhost:8003" + }, + "input_data": { + "global_values": {}, + "event_sources": {}, + "keyrings": null, + "resources": { + "keyrings": {}, + "tags": {} + } + } + } +] \ No newline at end of file diff --git a/conformance_tests/attachments_extraction_streaming_tests/attachments_extraction_test.json b/conformance_tests/attachments_extraction_streaming_tests/attachments_extraction_test.json new file mode 100644 index 0000000..12b993d --- /dev/null +++ b/conformance_tests/attachments_extraction_streaming_tests/attachments_extraction_test.json @@ -0,0 +1,75 @@ +[ + { + "payload": { + "connection_data": { + "key": "", + "key_type": "", + "org_id": "", + "org_name": "First Space" + }, + "event_context": { + "callback_url": "http://localhost:8002/callback", + "dev_oid": "DEV-36shCCBEAA", + "dev_org": "DEV-36shCCBEAA", + "dev_org_id": "DEV-36shCCBEAA", + "dev_uid": "DEVU-6", + "dev_user": "DEVU-6", + "dev_user_id": "DEVU-6", + "event_type_adaas": "", + "external_sync_unit": "688725dad59c015ce052eecf", + "external_sync_unit_id": "688725dad59c015ce052eecf", + "external_sync_unit_name": "cards-pagination-test-2025-07-28-092514", + "external_system": "6752eb95c833e6b206fcf388", + "external_system_id": "6752eb95c833e6b206fcf388", + "external_system_name": "Wrike", + "external_system_type": "ADaaS", + "import_slug": "wrike-snapin-devrev", + "initial_sync_scope": "full-history", + "mode": "INITIAL", + "request_id": "ff894fd5-2290-42bb-9f89-0785e49b4049", + "request_id_adaas": "ff894fd5-2290-42bb-9f89-0785e49b4049", + "run_id": "cbbe2419-1f86-4737-aa78-6bb7118ce52c", + "sequence_version": "17", + "snap_in_slug": "wrike-snapin-devrev", + "snap_in_version_id": "don:integration:dvrv-eu-1:devo/36shCCBEAA:snap_in_package/787b97af-95a8-4b57-809e-8d55f4e72f40:snap_in_version/50d4660e-dad9-41D6-9169-8a7e96b2d7fa", + "sync_run": "cbbe2419-1f86-4737-aa78-6bb7118ce52c", + "sync_run_id": "cbbe2419-1f86-4737-aa78-6bb7118ce52c", + "sync_tier": "sync_tier_2", + "sync_unit": "don:integration:dvrv-eu-1:devo/36shCCBEAA:external_system_type/ADAAS:external_system/6752eb95c833e6b206fcf388:sync_unit/984c894e-71e5-4e94-b484-40b839c9a916", + "sync_unit_id": "984c894e-71e5-4e94-b484-40b839c9a916", + "uuid": "ff894fd5-2290-42bb-9f89-0785e49b4049", + "worker_data_url": "http://localhost:8003/external-worker" + }, + "event_type": "EXTRACTION_ATTACHMENTS_START" + }, + "context": { + "dev_oid": "don:identity:dvrv-eu-1:devo/36shCCBEAA", + "automation_id": "", + "source_id": "", + "snap_in_id": "don:integration:dvrv-eu-1:devo/36shCCBEAA:snap_in/04bf12fa-57bd-4057-b0b0-ed3f42d9813e", + "snap_in_version_id": "don:integration:dvrv-eu-1:devo/36shCCBEAA:snap_in_package/787b97af-95a8-4b57-809e-8d55f4e72f40:snap_in_version/50d4660e-dad9-41D6-9169-8a7e96b2d7fa", + "service_account_id": "don:identity:dvrv-eu-1:devo/36shCCBEAA:svcacc/101", + "secrets": { + "service_account_token": "test-service-account-token" + }, + "user_id": "don:identity:dvrv-eu-1:devo/36shCCBEAA:devu/6", + "event_id": "", + "execution_id": "4481432207487786275" + }, + "execution_metadata": { + "request_id": "ff894fd5-2290-42bb-9f89-0785e49b4049", + "function_name": "extraction", + "event_type": "EXTRACTION_ATTACHMENTS_START", + "devrev_endpoint": "http://localhost:8003" + }, + "input_data": { + "global_values": {}, + "event_sources": {}, + "keyrings": null, + "resources": { + "keyrings": {}, + "tags": {} + } + } + } +] \ No newline at end of file diff --git a/conformance_tests/attachments_extraction_streaming_tests/data-and-attachments-extraction.test.ts b/conformance_tests/attachments_extraction_streaming_tests/data-and-attachments-extraction.test.ts new file mode 100644 index 0000000..4c8ce4c --- /dev/null +++ b/conformance_tests/attachments_extraction_streaming_tests/data-and-attachments-extraction.test.ts @@ -0,0 +1,227 @@ +import axios from 'axios'; +import { + CallbackServer, + loadTestConfig, + createExtractionEvent, + sendEventToSnapIn, + TestConfig, + CallbackEvent, +} from './test-helpers'; + +describe('Data and Attachments Extraction Workflow Tests', () => { + let config: TestConfig; + let callbackServer: CallbackServer; + + beforeAll(async () => { + // Load configuration from environment + config = loadTestConfig(); + + // Start callback server + callbackServer = new CallbackServer(config.callbackServerPort); + await callbackServer.start(); + }); + + afterAll(async () => { + // Stop callback server + await callbackServer.stop(); + }); + + beforeEach(() => { + // Clear events before each test + callbackServer.clearEvents(); + }); + + /** + * Test: Complete Attachment Extraction Workflow + * + * This test verifies the full workflow: + * 1. Data extraction completes successfully + * 2. Attachment extraction completes successfully + * 3. Artifacts are properly uploaded and accessible + */ + test('should complete full data and attachments extraction workflow with artifact upload verification', async () => { + console.log('=== Step 1: Starting Data Extraction ==='); + + // Step 1: Create and send data extraction event + const dataExtractionEvent = createExtractionEvent( + config, + 'EXTRACTION_DATA_START', + 'IEAGS6BYI5RFMPP7' // Folder ID + ); + + console.log('Sending EXTRACTION_DATA_START event to snap-in server...'); + await sendEventToSnapIn(config, dataExtractionEvent); + + // Wait for data extraction completion + console.log('Waiting for EXTRACTION_DATA_DONE event...'); + const dataExtractionDoneEvent = await callbackServer.waitForEvent( + 'EXTRACTION_DATA_DONE', + 90000 // 90 second timeout + ); + + // Validate Step 1: Single EXTRACTION_DATA_DONE event received + expect(dataExtractionDoneEvent).toBeDefined(); + expect(dataExtractionDoneEvent.event_type).toBe('EXTRACTION_DATA_DONE'); + console.log('✓ Data extraction completed successfully'); + + // Verify only one event was received for data extraction + const dataExtractionEvents = callbackServer + .getReceivedEvents() + .filter((e) => e.event_type.startsWith('EXTRACTION_DATA_')); + + if (dataExtractionEvents.length !== 1) { + throw new Error( + `Expected exactly 1 data extraction event, but received ${dataExtractionEvents.length}. ` + + `Events: ${dataExtractionEvents.map((e) => e.event_type).join(', ')}` + ); + } + console.log('✓ Verified single data extraction event received'); + + // Clear events before attachment extraction + callbackServer.clearEvents(); + + console.log('\n=== Step 2: Starting Attachments Extraction ==='); + + // Step 2: Create and send attachments extraction event + const attachmentsExtractionEvent = createExtractionEvent( + config, + 'EXTRACTION_ATTACHMENTS_START', + 'IEAGS6BYI5RFMPP7' // Folder ID + ); + + console.log('Sending EXTRACTION_ATTACHMENTS_START event to snap-in server...'); + await sendEventToSnapIn(config, attachmentsExtractionEvent); + + // Wait for attachments extraction completion + console.log('Waiting for EXTRACTION_ATTACHMENTS_DONE event...'); + const attachmentsExtractionDoneEvent: CallbackEvent = await callbackServer.waitForEvent( + 'EXTRACTION_ATTACHMENTS_DONE', + 90000 // 90 second timeout + ); + + // Validate Step 2: Single EXTRACTION_ATTACHMENTS_DONE event received + expect(attachmentsExtractionDoneEvent).toBeDefined(); + expect(attachmentsExtractionDoneEvent.event_type).toBe('EXTRACTION_ATTACHMENTS_DONE'); + console.log('✓ Attachments extraction completed successfully'); + + // Verify only one event was received for attachments extraction + const attachmentsExtractionEvents = callbackServer + .getReceivedEvents() + .filter((e) => e.event_type.startsWith('EXTRACTION_ATTACHMENTS_')); + + if (attachmentsExtractionEvents.length !== 1) { + throw new Error( + `Expected exactly 1 attachments extraction event, but received ${attachmentsExtractionEvents.length}. ` + + `Events: ${attachmentsExtractionEvents.map((e) => e.event_type).join(', ')}` + ); + } + console.log('✓ Verified single attachments extraction event received'); + + console.log('\n=== Step 3: Validating Artifact Data ==='); + + // Validate event_data exists + if (!attachmentsExtractionDoneEvent.event_data) { + throw new Error( + 'Expected event_data to be defined in EXTRACTION_ATTACHMENTS_DONE event. ' + + `Received event: ${JSON.stringify(attachmentsExtractionDoneEvent, null, 2)}` + ); + } + + // Validate artifacts array exists + const artifacts = attachmentsExtractionDoneEvent.event_data.artifacts; + if (!Array.isArray(artifacts)) { + throw new Error( + 'Expected event_data.artifacts to be an array. ' + + `Received type: ${typeof artifacts}. ` + + `Event data: ${JSON.stringify(attachmentsExtractionDoneEvent.event_data, null, 2)}` + ); + } + console.log(`✓ Artifacts array found with ${artifacts.length} item(s)`); + + // Validate artifacts array is not empty + if (artifacts.length === 0) { + throw new Error( + 'Expected event_data.artifacts to be a non-empty array. ' + + `Received empty array. ` + + `Event data: ${JSON.stringify(attachmentsExtractionDoneEvent.event_data, null, 2)}` + ); + } + + // Validate artifacts array has exactly 1 element + if (artifacts.length !== 1) { + throw new Error( + `Expected event_data.artifacts to have length 1, but got ${artifacts.length}. ` + + `Artifacts: ${JSON.stringify(artifacts, null, 2)}` + ); + } + console.log('✓ Artifacts array has exactly 1 element'); + + // Get the artifact object + const artifactObject = artifacts[0]; + console.log(`Artifact object: ${JSON.stringify(artifactObject, null, 2)}`); + + // Validate artifact item_type + if (artifactObject.item_type !== 'ssor_attachment') { + throw new Error( + `Expected artifact.item_type to be "ssor_attachment", but got "${artifactObject.item_type}". ` + + `Artifact: ${JSON.stringify(artifactObject, null, 2)}` + ); + } + console.log('✓ Artifact item_type is "ssor_attachment"'); + + // Validate artifact item_count + if (artifactObject.item_count !== 2) { + throw new Error( + `Expected artifact.item_count to be 2, but got ${artifactObject.item_count}. ` + + `Artifact: ${JSON.stringify(artifactObject, null, 2)}` + ); + } + console.log('✓ Artifact item_count is 2'); + + // Validate artifact has an id + if (!artifactObject.id) { + throw new Error( + 'Expected artifact to have an "id" field. ' + + `Artifact: ${JSON.stringify(artifactObject, null, 2)}` + ); + } + console.log(`✓ Artifact ID: ${artifactObject.id}`); + + console.log('\n=== Step 4: Verifying Artifact Upload ==='); + + // Verify artifact upload via GET request + const uploadVerificationUrl = `${config.devrevServerUrl}/is_uploaded/${artifactObject.id}`; + console.log(`Making GET request to: ${uploadVerificationUrl}`); + + try { + const uploadResponse = await axios.get(uploadVerificationUrl, { + timeout: 10000, + validateStatus: (status) => status === 200 || status === 404, + }); + + if (uploadResponse.status !== 200) { + throw new Error( + `Expected artifact upload verification to return status 200, but got ${uploadResponse.status}. ` + + `URL: ${uploadVerificationUrl}. ` + + `Response: ${JSON.stringify(uploadResponse.data, null, 2)}` + ); + } + + console.log('✓ Artifact upload verified successfully (status 200)'); + console.log(`Response: ${JSON.stringify(uploadResponse.data, null, 2)}`); + } catch (error) { + if (axios.isAxiosError(error)) { + throw new Error( + `Failed to verify artifact upload. ` + + `URL: ${uploadVerificationUrl}. ` + + `Status: ${error.response?.status}. ` + + `Error: ${error.message}. ` + + `Response: ${JSON.stringify(error.response?.data, null, 2)}` + ); + } + throw error; + } + + console.log('\n=== Test Completed Successfully ==='); + }, 180000); // 180 second (3 minute) timeout for the entire workflow +}); \ No newline at end of file diff --git a/conformance_tests/attachments_extraction_streaming_tests/package.json b/conformance_tests/attachments_extraction_streaming_tests/package.json new file mode 100644 index 0000000..4101588 --- /dev/null +++ b/conformance_tests/attachments_extraction_streaming_tests/package.json @@ -0,0 +1,24 @@ +{ + "name": "wrike-snap-in-conformance-tests", + "version": "1.0.0", + "description": "Conformance tests for Wrike snap-in", + "scripts": { + "test": "jest --testTimeout=120000 --verbose" + }, + "devDependencies": { + "@types/express": "^4.17.17", + "@types/jest": "^29.5.0", + "@types/node": "^18.15.11", + "express": "^4.18.2", + "jest": "^29.5.0", + "ts-jest": "^29.1.0", + "typescript": "^5.0.4", + "axios": "^1.4.0" + }, + "jest": { + "preset": "ts-jest", + "testEnvironment": "node", + "testMatch": ["**/*.test.ts"], + "moduleFileExtensions": ["ts", "tsx", "js", "jsx", "json", "node"] + } +} \ No newline at end of file diff --git a/conformance_tests/attachments_extraction_streaming_tests/test-helpers.ts b/conformance_tests/attachments_extraction_streaming_tests/test-helpers.ts new file mode 100644 index 0000000..7193065 --- /dev/null +++ b/conformance_tests/attachments_extraction_streaming_tests/test-helpers.ts @@ -0,0 +1,242 @@ +import express, { Express, Request, Response } from 'express'; +import axios from 'axios'; +import { Server } from 'http'; + +/** + * Environment configuration for tests + */ +export interface TestConfig { + wrikeApiKey: string; + wrikeSpaceId: string; + snapInServerUrl: string; + callbackServerPort: number; + devrevServerUrl: string; + workerDataUrl: string; + apiServerUrl: string; +} + +/** + * Load test configuration from environment variables + */ +export function loadTestConfig(): TestConfig { + const wrikeApiKey = process.env.WRIKE_API_KEY; + const wrikeSpaceId = process.env.WRIKE_SPACE_ID; + + if (!wrikeApiKey) { + throw new Error('WRIKE_API_KEY environment variable is required'); + } + + if (!wrikeSpaceId) { + throw new Error('WRIKE_SPACE_ID environment variable is required'); + } + + return { + wrikeApiKey, + wrikeSpaceId, + snapInServerUrl: 'http://localhost:8000/handle/sync', + callbackServerPort: 8002, + devrevServerUrl: 'http://localhost:8003', + workerDataUrl: 'http://localhost:8003/external-worker', + apiServerUrl: 'http://localhost:8004', + }; +} + +/** + * Callback event received from snap-in + */ +export interface CallbackEvent { + event_type: string; + event_data?: any; +} + +/** + * Callback server for receiving events from snap-in + */ +export class CallbackServer { + private app: Express; + private server: Server | null = null; + private receivedEvents: CallbackEvent[] = []; + private eventPromises: Map void> = new Map(); + + constructor(private port: number) { + this.app = express(); + this.app.use(express.json()); + + this.app.post('/callback', (req: Request, res: Response) => { + const event: CallbackEvent = req.body; + this.receivedEvents.push(event); + + // Resolve any waiting promises for this event type + const resolver = this.eventPromises.get(event.event_type); + if (resolver) { + resolver(event); + this.eventPromises.delete(event.event_type); + } + + res.status(200).send({ status: 'received' }); + }); + } + + /** + * Start the callback server + */ + async start(): Promise { + return new Promise((resolve) => { + this.server = this.app.listen(this.port, () => { + resolve(); + }); + }); + } + + /** + * Stop the callback server + */ + async stop(): Promise { + return new Promise((resolve, reject) => { + if (this.server) { + this.server.close((err) => { + if (err) reject(err); + else resolve(); + }); + } else { + resolve(); + } + }); + } + + /** + * Wait for a specific event type with timeout + */ + async waitForEvent(eventType: string, timeoutMs: number = 30000): Promise { + // Check if event already received + const existingEvent = this.receivedEvents.find((e) => e.event_type === eventType); + if (existingEvent) { + return existingEvent; + } + + // Wait for event with timeout + return new Promise((resolve, reject) => { + const timeout = setTimeout(() => { + this.eventPromises.delete(eventType); + reject( + new Error( + `Timeout waiting for event type: ${eventType}. ` + + `Received events: ${this.receivedEvents.map((e) => e.event_type).join(', ')}` + ) + ); + }, timeoutMs); + + this.eventPromises.set(eventType, (event) => { + clearTimeout(timeout); + resolve(event); + }); + }); + } + + /** + * Get all received events + */ + getReceivedEvents(): CallbackEvent[] { + return [...this.receivedEvents]; + } + + /** + * Clear received events + */ + clearEvents(): void { + this.receivedEvents = []; + this.eventPromises.clear(); + } +} + +/** + * Create a base event payload for extraction function + */ +export function createExtractionEvent( + config: TestConfig, + eventType: string, + externalSyncUnitId?: string +): any { + return { + payload: { + connection_data: { + org_id: config.wrikeSpaceId, + org_name: 'Test Space', + key: config.wrikeApiKey, + key_type: 'bearer_token', + }, + event_context: { + callback_url: `http://localhost:${config.callbackServerPort}/callback`, + dev_org: 'test-org', + dev_org_id: 'test-org-id', + dev_user: 'test-user', + dev_user_id: 'test-user-id', + external_sync_unit: externalSyncUnitId || 'IEAGS6BYI5RFMPP7', + external_sync_unit_id: externalSyncUnitId || 'IEAGS6BYI5RFMPP7', + external_sync_unit_name: 'Test Folder', + external_system: 'wrike', + external_system_type: 'wrike', + import_slug: 'test-import', + mode: 'INITIAL', + request_id: `test-request-${Date.now()}`, + snap_in_slug: 'wrike-snap-in', + snap_in_version_id: 'v1', + sync_run: 'test-sync-run', + sync_run_id: 'test-sync-run-id', + sync_tier: 'standard', + sync_unit: 'test-sync-unit', + sync_unit_id: 'test-sync-unit-id', + uuid: `test-uuid-${Date.now()}`, + worker_data_url: `${config.devrevServerUrl}/external-worker`, + }, + event_type: eventType, + event_data: {}, + }, + context: { + dev_oid: 'test-org-id', + source_id: 'test-source-id', + snap_in_id: 'test-snap-in-id', + snap_in_version_id: 'v1', + service_account_id: 'test-service-account-id', + secrets: { + service_account_token: 'test-token', + }, + }, + execution_metadata: { + request_id: `test-request-${Date.now()}`, + function_name: 'extraction', + event_type: eventType, + devrev_endpoint: config.devrevServerUrl, + }, + input_data: { + global_values: {}, + event_sources: {}, + }, + }; +} + +/** + * Send event to snap-in server + */ +export async function sendEventToSnapIn( + config: TestConfig, + event: any +): Promise { + try { + const response = await axios.post(config.snapInServerUrl, event, { + headers: { + 'Content-Type': 'application/json', + }, + timeout: 60000, + }); + return response.data; + } catch (error) { + if (axios.isAxiosError(error)) { + throw new Error( + `Failed to send event to snap-in server: ${error.message}. ` + + `Status: ${error.response?.status}, Data: ${JSON.stringify(error.response?.data)}` + ); + } + throw error; + } +} \ No newline at end of file diff --git a/conformance_tests/extraction_external_sync_unit_check/tsconfig.json b/conformance_tests/attachments_extraction_streaming_tests/tsconfig.json similarity index 54% rename from conformance_tests/extraction_external_sync_unit_check/tsconfig.json rename to conformance_tests/attachments_extraction_streaming_tests/tsconfig.json index 99a2ef0..52e83ce 100644 --- a/conformance_tests/extraction_external_sync_unit_check/tsconfig.json +++ b/conformance_tests/attachments_extraction_streaming_tests/tsconfig.json @@ -1,15 +1,20 @@ { "compilerOptions": { - "target": "es2018", + "target": "es2017", "module": "commonjs", + "lib": ["es2017"], + "outDir": "./dist", + "rootDir": "./", "strict": true, "esModuleInterop": true, "skipLibCheck": true, "forceConsistentCasingInFileNames": true, - "outDir": "./dist", "resolveJsonModule": true, - "suppressImplicitAnyIndexErrors": true + "declaration": true, + "declarationMap": true, + "sourceMap": true, + "moduleResolution": "node" }, "include": ["*.ts"], - "exclude": ["node_modules"] + "exclude": ["node_modules", "dist"] } \ No newline at end of file diff --git a/conformance_tests/authentication_check_tests/.npmrc b/conformance_tests/authentication_check_tests/.npmrc new file mode 100644 index 0000000..9cf9495 --- /dev/null +++ b/conformance_tests/authentication_check_tests/.npmrc @@ -0,0 +1 @@ +package-lock=false \ No newline at end of file diff --git a/conformance_tests/authentication_check_tests/check_authentication.test.ts b/conformance_tests/authentication_check_tests/check_authentication.test.ts new file mode 100644 index 0000000..498266d --- /dev/null +++ b/conformance_tests/authentication_check_tests/check_authentication.test.ts @@ -0,0 +1,237 @@ +import { loadTestConfig, invokeFunction, isValidISOTimestamp, startRateLimiting, endRateLimiting } from './test-utils/test-helpers'; + +describe('check_authentication function', () => { + let config: ReturnType; + + beforeAll(() => { + config = loadTestConfig(); + }); + + /** + * Test 1: Successful Authentication (Trivial) + * Verifies that the function correctly authenticates with valid credentials + */ + test('should successfully authenticate with valid API key', async () => { + const response = await invokeFunction('check_authentication', config.wrikeApiKey, config.wrikeSpaceId); + + // Verify function_result exists + expect(response.function_result).toBeDefined(); + const result = response.function_result; + + // Verify status is success + expect(result.status).toBe('success'); + + // Verify status_code is 200 + expect(result.status_code).toBe(200); + + // Verify authenticated flag is true + expect(result.metadata).toBeDefined(); + expect(result.metadata.authenticated).toBe(true); + + // Verify user_info is present + expect(result.metadata.user_info).toBeDefined(); + expect(result.metadata.user_info.id).toBeDefined(); + + // Verify message indicates success + expect(result.message).toContain('Successfully authenticated'); + }, 30000); + + /** + * Test 2: Invalid API Key (Simple) + * Verifies that the function correctly handles invalid credentials + */ + test('should fail authentication with invalid API key', async () => { + const invalidApiKey = 'invalid-api-key-12345'; + const response = await invokeFunction('check_authentication', invalidApiKey, config.wrikeSpaceId); + + // Verify function_result exists + expect(response.function_result).toBeDefined(); + const result = response.function_result; + + // Verify status is error + expect(result.status).toBe('error'); + + // Verify status_code indicates authentication failure (401 or 403) + expect([401, 403]).toContain(result.status_code); + + // Verify authenticated flag is false + expect(result.metadata).toBeDefined(); + expect(result.metadata.authenticated).toBe(false); + + // Verify user_info is not present + expect(result.metadata.user_info).toBeUndefined(); + + // Verify message indicates authentication failure + expect(result.message).toMatch(/Authentication failed|Invalid|expired|forbidden/i); + }, 30000); + + /** + * Test 3: Response Structure Validation (More Complex) + * Verifies that the response structure matches the expected interface + */ + test('should return response with correct structure and all required fields', async () => { + const response = await invokeFunction('check_authentication', config.wrikeApiKey, config.wrikeSpaceId); + + // Verify function_result exists + expect(response.function_result).toBeDefined(); + const result = response.function_result; + + // Verify top-level required fields + expect(result).toHaveProperty('status'); + expect(result).toHaveProperty('message'); + expect(result).toHaveProperty('status_code'); + expect(result).toHaveProperty('api_delay'); + expect(result).toHaveProperty('metadata'); + expect(result).toHaveProperty('timestamp'); + + // Verify status is one of allowed values + expect(['success', 'error']).toContain(result.status); + + // Verify message is a non-empty string + expect(typeof result.message).toBe('string'); + expect(result.message.length).toBeGreaterThan(0); + + // Verify status_code is a number + expect(typeof result.status_code).toBe('number'); + + // Verify api_delay is a number + expect(typeof result.api_delay).toBe('number'); + expect(result.api_delay).toBeGreaterThanOrEqual(0); + + // Verify metadata structure + expect(result.metadata).toBeDefined(); + expect(result.metadata).toHaveProperty('authenticated'); + expect(result.metadata).toHaveProperty('function_name'); + expect(result.metadata).toHaveProperty('request_id'); + + // Verify metadata.authenticated is boolean + expect(typeof result.metadata.authenticated).toBe('boolean'); + + // Verify metadata.function_name is correct + expect(result.metadata.function_name).toBe('check_authentication'); + + // Verify metadata.request_id is a non-empty string + expect(typeof result.metadata.request_id).toBe('string'); + expect(result.metadata.request_id.length).toBeGreaterThan(0); + + // Verify timestamp is valid ISO format + expect(typeof result.timestamp).toBe('string'); + expect(isValidISOTimestamp(result.timestamp)).toBe(true); + + // If authenticated, verify user_info structure + if (result.metadata.authenticated) { + expect(result.metadata.user_info).toBeDefined(); + expect(result.metadata.user_info).toHaveProperty('id'); + expect(typeof result.metadata.user_info.id).toBe('string'); + expect(result.metadata.user_info.id.length).toBeGreaterThan(0); + } + }, 30000); + + /** + * Test 4: Expected User ID Validation (Acceptance Test) + * Verifies that the function returns the expected user ID KUAUZTPW + */ + test('should return expected user ID KUAUZTPW in API response', async () => { + const response = await invokeFunction('check_authentication', config.wrikeApiKey, config.wrikeSpaceId); + + // Verify function_result exists + expect(response.function_result).toBeDefined(); + const result = response.function_result; + + // Verify authentication succeeded + expect(result.status).toBe('success'); + expect(result.metadata).toBeDefined(); + expect(result.metadata.authenticated).toBe(true); + + // Verify user_info is present + expect(result.metadata.user_info).toBeDefined(); + + // Verify user_info.id is defined + expect(result.metadata.user_info.id).toBeDefined(); + + // Main assertion: verify the user ID matches expected value + const actualUserId = result.metadata.user_info.id; + const expectedUserId = 'KUAUZTPW'; + + expect(actualUserId).toBe(expectedUserId); + + // If the assertion fails, the error message will show: + // Expected: "KUAUZTPW" + // Received: "" + // This provides clear debugging information about what user ID was actually returned + }, 30000); + + /** + * Test 5: Rate Limiting Handling (Acceptance Test) + * Verifies that the function correctly handles rate limiting (429 response) + * and returns appropriate api_delay value + */ + test('should handle rate limiting correctly with 429 response', async () => { + const testName = 'check_authentication_rate_limit_test'; + + try { + // Step 1: Start rate limiting on the mock API server + await startRateLimiting(testName); + console.log(`Rate limiting started for test: ${testName}`); + + // Step 2: Invoke the function with valid credentials + const response = await invokeFunction('check_authentication', config.wrikeApiKey, config.wrikeSpaceId); + + // Verify function_result exists + expect(response.function_result).toBeDefined(); + const result = response.function_result; + + // Log the full response for debugging + console.log('Rate limiting response:', JSON.stringify(result, null, 2)); + + // Step 3: Verify status_code is 429 + expect(result.status_code).toBe(429); + if (result.status_code !== 429) { + throw new Error( + `Expected status_code to be 429 (Too Many Requests), but got ${result.status_code}. ` + + `This indicates the rate limiting was not triggered or not handled correctly.` + ); + } + + // Step 4: Verify api_delay is greater than 0 and less than or equal to 3 + expect(result.api_delay).toBeDefined(); + expect(typeof result.api_delay).toBe('number'); + expect(result.api_delay).toBeGreaterThan(0); + + if (result.api_delay > 3) { + throw new Error( + `Expected api_delay to be <= 3 seconds, but got ${result.api_delay}. ` + + `This suggests the api_delay calculation in the implementation may be incorrect. ` + + `The api_delay should be extracted from the 'retry-after' header of the 429 response.` + ); + } + + // Step 5: Verify status is error + expect(result.status).toBe('error'); + if (result.status !== 'error') { + throw new Error( + `Expected status to be 'error' for rate limited request, but got '${result.status}'.` + ); + } + + // Step 6: Verify authenticated flag is false + expect(result.metadata).toBeDefined(); + expect(result.metadata.authenticated).toBe(false); + if (result.metadata.authenticated !== false) { + throw new Error( + `Expected authenticated to be false for rate limited request, but got ${result.metadata.authenticated}.` + ); + } + + // Step 7: Verify message contains rate limit information + expect(result.message).toBeDefined(); + expect(typeof result.message).toBe('string'); + expect(result.message.toLowerCase()).toMatch(/rate limit|too many requests|retry/i); + + } finally { + // Step 8: Always end rate limiting, even if test fails + await endRateLimiting(); + console.log('Rate limiting ended'); + } + }, 30000); +}); \ No newline at end of file diff --git a/conformance_tests/authentication_check_tests/jest.config.js b/conformance_tests/authentication_check_tests/jest.config.js new file mode 100644 index 0000000..e1f6807 --- /dev/null +++ b/conformance_tests/authentication_check_tests/jest.config.js @@ -0,0 +1,28 @@ +module.exports = { + preset: 'ts-jest', + testEnvironment: 'node', + testMatch: ['**/*.test.ts'], + testTimeout: 120000, + moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'], + collectCoverageFrom: [ + '**/*.ts', + '!**/*.test.ts', + '!**/node_modules/**', + '!**/dist/**' + ], + transform: { + '^.+\\.ts$': ['ts-jest', { + tsconfig: { + target: 'es2017', + module: 'commonjs', + esModuleInterop: true, + allowSyntheticDefaultImports: true, + strict: true, + resolveJsonModule: true + } + }] + }, + verbose: true, + bail: false, + maxWorkers: 1 +}; \ No newline at end of file diff --git a/conformance_tests/authentication_check_tests/package.json b/conformance_tests/authentication_check_tests/package.json new file mode 100644 index 0000000..9c1f430 --- /dev/null +++ b/conformance_tests/authentication_check_tests/package.json @@ -0,0 +1,21 @@ +{ + "name": "wrike-airdrop-conformance-tests", + "version": "1.0.0", + "description": "Conformance tests for Wrike Airdrop Snap-in", + "scripts": { + "test": "jest --runInBand --detectOpenHandles --forceExit" + }, + "keywords": [], + "author": "", + "license": "ISC", + "devDependencies": { + "@types/jest": "^29.4.0", + "@types/node": "^18.13.0", + "jest": "^29.4.2", + "ts-jest": "^29.0.5", + "typescript": "^4.9.5" + }, + "dependencies": { + "axios": "^1.9.0" + } +} \ No newline at end of file diff --git a/conformance_tests/authentication_check_tests/test-utils/test-helpers.ts b/conformance_tests/authentication_check_tests/test-utils/test-helpers.ts new file mode 100644 index 0000000..c08ab56 --- /dev/null +++ b/conformance_tests/authentication_check_tests/test-utils/test-helpers.ts @@ -0,0 +1,177 @@ +import axios from 'axios'; + +/** + * Mock API server configuration + */ +const MOCK_API_SERVER_URL = 'http://localhost:8004'; +const SNAP_IN_SERVER_URL = 'http://localhost:8000/handle/sync'; + +/** + * Environment configuration for tests + */ +export interface TestConfig { + wrikeApiKey: string; + wrikeSpaceId: string; + snapInServerUrl: string; +} + +/** + * Load test configuration from environment variables + */ +export function loadTestConfig(): TestConfig { + const wrikeApiKey = process.env.WRIKE_API_KEY; + const wrikeSpaceId = process.env.WRIKE_SPACE_ID; + + if (!wrikeApiKey) { + throw new Error('WRIKE_API_KEY environment variable is required'); + } + + if (!wrikeSpaceId) { + throw new Error('WRIKE_SPACE_ID environment variable is required'); + } + + return { + wrikeApiKey, + wrikeSpaceId, + snapInServerUrl: SNAP_IN_SERVER_URL, + }; +} + +/** + * Create a test event payload for function invocation + */ +export function createTestEvent(functionName: string, apiKey: string, spaceId: string): any { + return { + payload: { + connection_data: { + key: apiKey, + org_id: spaceId, + org_name: 'Test Organization', + key_type: 'api_key', + }, + event_context: { + callback_url: 'http://localhost:8002/callback', + dev_org: 'test-org', + dev_org_id: 'test-org-id', + dev_user: 'test-user', + dev_user_id: 'test-user-id', + external_sync_unit: spaceId, + external_sync_unit_id: spaceId, + external_sync_unit_name: 'Test Space', + external_system: 'wrike', + external_system_type: 'wrike', + import_slug: 'test-import', + mode: 'INITIAL', + request_id: `test-${Date.now()}`, + snap_in_slug: 'wrike-airdrop', + snap_in_version_id: 'v1', + sync_run: 'test-run', + sync_run_id: 'test-run-id', + sync_tier: 'standard', + sync_unit: 'test-unit', + sync_unit_id: 'test-unit-id', + uuid: `uuid-${Date.now()}`, + worker_data_url: 'http://localhost:8003/external-worker', + }, + event_type: 'custom_event', + }, + context: { + dev_oid: 'test-org-id', + source_id: 'test-source', + snap_in_id: 'test-snap-in', + snap_in_version_id: 'v1', + service_account_id: 'test-service-account', + secrets: { + service_account_token: 'test-token', + }, + }, + execution_metadata: { + request_id: `test-${Date.now()}`, + function_name: functionName, + event_type: 'custom_event', + devrev_endpoint: 'http://localhost:8003', + }, + input_data: { + global_values: {}, + event_sources: {}, + }, + }; +} + +/** + * Send event to snap-in server + */ +export async function invokeFunction(functionName: string, apiKey: string, spaceId: string): Promise { + const config = loadTestConfig(); + const event = createTestEvent(functionName, apiKey, spaceId); + + try { + const response = await axios.post(config.snapInServerUrl, event, { + headers: { + 'Content-Type': 'application/json', + }, + timeout: 30000, + }); + + return response.data; + } catch (error: any) { + if (error.response) { + return error.response.data; + } + throw error; + } +} + +/** + * Validate ISO 8601 timestamp format + */ +export function isValidISOTimestamp(timestamp: string): boolean { + const isoRegex = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d{3})?Z?$/; + if (!isoRegex.test(timestamp)) { + return false; + } + const date = new Date(timestamp); + return !isNaN(date.getTime()); +} + +/** + * Start rate limiting on the mock API server + * @param testName - Unique identifier for the test + */ +export async function startRateLimiting(testName: string): Promise { + try { + await axios.post( + `${MOCK_API_SERVER_URL}/start_rate_limiting`, + { test_name: testName }, + { + headers: { + 'Content-Type': 'application/json', + }, + timeout: 5000, + } + ); + } catch (error: any) { + throw new Error(`Failed to start rate limiting: ${error.message}`); + } +} + +/** + * End rate limiting on the mock API server + */ +export async function endRateLimiting(): Promise { + try { + await axios.post( + `${MOCK_API_SERVER_URL}/end_rate_limiting`, + {}, + { + headers: { + 'Content-Type': 'application/json', + }, + timeout: 5000, + } + ); + } catch (error: any) { + // Log but don't throw - cleanup should not fail the test + console.warn(`Failed to end rate limiting: ${error.message}`); + } +} \ No newline at end of file diff --git a/conformance_tests/authentication_check_tests/tsconfig.json b/conformance_tests/authentication_check_tests/tsconfig.json new file mode 100644 index 0000000..a2a4ad7 --- /dev/null +++ b/conformance_tests/authentication_check_tests/tsconfig.json @@ -0,0 +1,25 @@ +{ + "compilerOptions": { + "target": "es2017", + "module": "commonjs", + "lib": ["es2017"], + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "strict": true, + "declaration": true, + "resolveJsonModule": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "moduleResolution": "node", + "outDir": "./dist", + "rootDir": "./", + "types": ["jest", "node"] + }, + "include": [ + "**/*.ts" + ], + "exclude": [ + "node_modules", + "dist" + ] +} \ No newline at end of file diff --git a/conformance_tests/callback_url_push_validation/data_push_check.test.ts b/conformance_tests/callback_url_push_validation/data_push_check.test.ts deleted file mode 100644 index 3ecea57..0000000 --- a/conformance_tests/callback_url_push_validation/data_push_check.test.ts +++ /dev/null @@ -1,225 +0,0 @@ -import axios from 'axios'; -import express from 'express'; -import bodyParser from 'body-parser'; -import { Server } from 'http'; - -// Constants -const TEST_SERVER_URL = 'http://localhost:8000/handle/sync'; -const CALLBACK_SERVER_PORT = 8002; -const CALLBACK_SERVER_URL = `http://localhost:${CALLBACK_SERVER_PORT}`; - -// Types -interface AirdropEvent { - context: { - secrets: { - service_account_token: string; - }; - snap_in_version_id: string; - snap_in_id: string; - }; - payload: { - event_context?: { - callback_url: string; - [key: string]: any; - }; - event_type?: string; - [key: string]: any; - }; - execution_metadata: { - devrev_endpoint: string; - function_name: string; - [key: string]: any; - }; - input_data?: any; -} - -describe('Data Push Check Function Tests', () => { - let callbackServer: Server; - let receivedData: any = null; - let callbackEndpoint = '/callback'; - - // Setup callback server before tests - beforeAll((done) => { - const app = express(); - app.use(bodyParser.json()); - - // Create callback endpoint - app.post(callbackEndpoint, (req, res) => { - receivedData = req.body; - console.log('Callback server received data:', receivedData); - res.status(200).json({ status: 'success' }); - }); - - // Create endpoint that returns an error - app.post('/error', (req, res) => { - res.status(500).json({ status: 'error', message: 'Internal server error' }); - }); - - // Start the server - callbackServer = app.listen(CALLBACK_SERVER_PORT, () => { - console.log(`Callback server started on port ${CALLBACK_SERVER_PORT}`); - done(); - }); - }); - - // Clean up after tests - afterAll((done) => { - if (callbackServer) { - callbackServer.close(done); - } else { - done(); - } - }); - - // Reset received data before each test - beforeEach(() => { - receivedData = null; - }); - - // Test 1: Basic Functionality - Verify successful data push - test('should successfully push data to callback URL', async () => { - // Create a valid AirdropEvent with callback URL - const event: AirdropEvent = { - context: { - secrets: { - service_account_token: 'test-token' - }, - snap_in_version_id: 'test-version-id', - snap_in_id: 'test-snap-in-id' - }, - payload: { - event_context: { - callback_url: `${CALLBACK_SERVER_URL}${callbackEndpoint}`, - dev_org: 'test-org', - dev_org_id: 'test-org-id', - dev_user: 'test-user', - dev_user_id: 'test-user-id', - external_sync_unit: 'test-unit', - external_sync_unit_id: 'test-unit-id', - external_sync_unit_name: 'test-unit-name', - external_system: 'test-system', - external_system_type: 'test-system-type', - import_slug: 'test-import-slug', - mode: 'INITIAL', - request_id: 'test-request-id', - snap_in_slug: 'test-snap-in-slug', - snap_in_version_id: 'test-version-id', - sync_run: 'test-sync-run', - sync_run_id: 'test-sync-run-id', - sync_tier: 'test-tier', - sync_unit: 'test-sync-unit', - sync_unit_id: 'test-sync-unit-id', - uuid: 'test-uuid', - worker_data_url: 'http://localhost:8003/external-worker' - } - }, - execution_metadata: { - devrev_endpoint: 'http://localhost:8003', - function_name: 'data_push_check' - } - }; - - // Send the event to the function - const response = await axios.post(TEST_SERVER_URL, event); - - // Verify the response - expect(response.status).toBe(200); - expect(response.data.function_result).toBeDefined(); - expect(response.data.function_result.status).toBe('success'); - expect(response.data.function_result.push_successful).toBe(true); - - // Wait a bit to ensure callback is processed - await new Promise(resolve => setTimeout(resolve, 500)); - - // Verify that our callback server received the data - expect(receivedData).not.toBeNull(); - expect(receivedData.test_data).toBe('This is a test payload'); - expect(receivedData.snap_in_version_id).toBe('test-version-id'); - }, 10000); - - // Test 2: Error Handling - Verify error handling for unreachable callback URL - test('should handle errors when callback URL returns an error', async () => { - // Create an AirdropEvent with a callback URL that returns an error - const event: AirdropEvent = { - context: { - secrets: { - service_account_token: 'test-token' - }, - snap_in_version_id: 'test-version-id', - snap_in_id: 'test-snap-in-id' - }, - payload: { - event_context: { - callback_url: `${CALLBACK_SERVER_URL}/error`, - dev_org: 'test-org', - dev_org_id: 'test-org-id', - dev_user: 'test-user', - dev_user_id: 'test-user-id', - external_sync_unit: 'test-unit', - external_sync_unit_id: 'test-unit-id', - external_sync_unit_name: 'test-unit-name', - external_system: 'test-system', - external_system_type: 'test-system-type', - import_slug: 'test-import-slug', - mode: 'INITIAL', - request_id: 'test-request-id', - snap_in_slug: 'test-snap-in-slug', - snap_in_version_id: 'test-version-id', - sync_run: 'test-sync-run', - sync_run_id: 'test-sync-run-id', - sync_tier: 'test-tier', - sync_unit: 'test-sync-unit', - sync_unit_id: 'test-sync-unit-id', - uuid: 'test-uuid', - worker_data_url: 'http://localhost:8003/external-worker' - } - }, - execution_metadata: { - devrev_endpoint: 'http://localhost:8003', - function_name: 'data_push_check' - } - }; - - // Send the event to the function - const response = await axios.post(TEST_SERVER_URL, event); - - // Verify the response - expect(response.status).toBe(200); - expect(response.data.function_result).toBeDefined(); - expect(response.data.function_result.status).toBe('error'); - expect(response.data.function_result.push_successful).toBe(false); - expect(response.data.function_result.error).toBeDefined(); - }, 10000); - - // Test 3: Input Validation - Verify validation of input parameters - test('should validate input parameters', async () => { - // Create an AirdropEvent with missing required fields - const event = { - context: { - secrets: { - service_account_token: 'test-token' - }, - snap_in_version_id: 'test-version-id', - snap_in_id: 'test-snap-in-id' - }, - payload: { - // Intentionally missing event_context with callback_url - event_type: 'test-event-type' - }, - execution_metadata: { - devrev_endpoint: 'http://localhost:8003', - function_name: 'data_push_check' - } - }; - - // Send the event to the function - const response = await axios.post(TEST_SERVER_URL, event); - - // Verify the response - expect(response.status).toBe(200); - expect(response.data.function_result).toBeDefined(); - expect(response.data.function_result.status).toBe('error'); - expect(response.data.function_result.push_successful).toBe(false); - expect(response.data.function_result.error).toContain('missing required field'); - }, 10000); -}); \ No newline at end of file diff --git a/conformance_tests/callback_url_push_validation/jest.config.js b/conformance_tests/callback_url_push_validation/jest.config.js deleted file mode 100644 index a062d0c..0000000 --- a/conformance_tests/callback_url_push_validation/jest.config.js +++ /dev/null @@ -1,6 +0,0 @@ -module.exports = { - preset: 'ts-jest', - testEnvironment: 'node', - testTimeout: 30000, - verbose: true -}; \ No newline at end of file diff --git a/conformance_tests/callback_url_push_validation/package.json b/conformance_tests/callback_url_push_validation/package.json deleted file mode 100644 index 7e7ad3f..0000000 --- a/conformance_tests/callback_url_push_validation/package.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "scripts": { - "test": "jest" - }, - "devDependencies": { - "@types/express": "^4.17.21", - "@types/jest": "^29.5.5", - "@types/node": "^18.18.0", - "axios": "^1.5.1", - "body-parser": "^1.20.2", - "express": "^4.18.2", - "jest": "^29.7.0", - "ts-jest": "^29.1.1", - "typescript": "^4.9.5" - }, - "dependencies": { - "axios": "^1.5.1", - "express": "^4.18.2" - } -} \ No newline at end of file diff --git a/conformance_tests/callback_url_push_validation/tsconfig.json b/conformance_tests/callback_url_push_validation/tsconfig.json deleted file mode 100644 index 465fcc6..0000000 --- a/conformance_tests/callback_url_push_validation/tsconfig.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "compilerOptions": { - "target": "es2018", - "module": "commonjs", - "esModuleInterop": true, - "strict": true, - "outDir": "dist", - "declaration": true, - "sourceMap": true - }, - "include": ["**/*.ts"], - "exclude": ["node_modules", "dist"] -} \ No newline at end of file diff --git a/conformance_tests/extraction_data_repository_push/resources/data_extraction_test.json b/conformance_tests/comments_data_push_tests/extraction-comments-acceptance.json similarity index 90% rename from conformance_tests/extraction_data_repository_push/resources/data_extraction_test.json rename to conformance_tests/comments_data_push_tests/extraction-comments-acceptance.json index 9320ad7..cfc2983 100644 --- a/conformance_tests/extraction_data_repository_push/resources/data_extraction_test.json +++ b/conformance_tests/comments_data_push_tests/extraction-comments-acceptance.json @@ -2,10 +2,10 @@ { "payload": { "connection_data": { - "key": "test-key", - "key_type": "api_key", - "org_id": "test-space-id", - "org_name": "My Space" + "key": "", + "key_type": "", + "org_id": "", + "org_name": "First Space" }, "event_context": { "callback_url": "http://localhost:8002/callback", @@ -16,8 +16,8 @@ "dev_user": "test-user", "dev_user_id": "test-user-id", "event_type_adaas": "", - "external_sync_unit": "test-external-sync-unit", - "external_sync_unit_id": "IEAGS6BYI5RFMPPY", + "external_sync_unit": "IEAGS6BYI5RFMPP7", + "external_sync_unit_id": "IEAGS6BYI5RFMPP7", "external_sync_unit_name": "test-external-sync-unit-name", "external_system": "test-external-system", "external_system_id": "test-external-system-id", diff --git a/conformance_tests/comments_data_push_tests/extraction-comments-acceptance.test.ts b/conformance_tests/comments_data_push_tests/extraction-comments-acceptance.test.ts new file mode 100644 index 0000000..cc6f9fb --- /dev/null +++ b/conformance_tests/comments_data_push_tests/extraction-comments-acceptance.test.ts @@ -0,0 +1,192 @@ +import * as fs from 'fs'; +import * as path from 'path'; +import { + loadTestConfig, + CallbackServer, + sendEventToSnapIn, + waitFor, + TestConfig, + DevRevMockServer, +} from './test-helpers'; + +describe('Extraction Function - Comments Data Acceptance Test', () => { + let config: TestConfig; + let callbackServer: CallbackServer; + let devrevServer: DevRevMockServer; + let testStartTime: number; + + beforeAll(async () => { + // Load test configuration + config = loadTestConfig(); + + // Start callback server + callbackServer = new CallbackServer(); + await callbackServer.start(8002); + + // Start DevRev mock server + devrevServer = new DevRevMockServer(); + await devrevServer.start(8003); + + console.log('[Acceptance Test Setup] All servers started'); + }); + + afterAll(async () => { + // Stop servers + await callbackServer.stop(); + await devrevServer.stop(); + console.log('[Acceptance Test Teardown] All servers stopped'); + }); + + beforeEach(() => { + // Clear captured events before each test + callbackServer.clearEvents(); + devrevServer.clear(); + testStartTime = Date.now(); + console.log(`[Acceptance Test] Test started at ${new Date(testStartTime).toISOString()}`); + }); + + test('should extract comments data with correct artifact structure and item count', async () => { + const operationStartTime = Date.now(); + console.log('[Acceptance Test] Starting comments extraction acceptance test'); + + // Load test event from JSON file + const testEventPath = path.join(__dirname, 'extraction-comments-acceptance.json'); + const testEventContent = fs.readFileSync(testEventPath, 'utf-8'); + const testEventArray = JSON.parse(testEventContent); + + // Get the first event from the array + const event = testEventArray[0]; + + console.log('[Acceptance Test] Loaded event from JSON file'); + + // Replace placeholders with actual credentials + event.payload.connection_data.key = config.wrikeApiKey; + event.payload.connection_data.org_id = config.wrikeSpaceId; + event.payload.event_context.callback_url = config.callbackServerUrl; + + console.log('[Acceptance Test] Sending event to snap-in server'); + // Send event to snap-in server + const response = await sendEventToSnapIn(config.snapInServerUrl, event); + + console.log('[Acceptance Test] Received response:', JSON.stringify(response, null, 2)); + + // Verify response structure + expect(response).toBeDefined(); + if (response.error) { + throw new Error( + `Snap-in server returned error: ${JSON.stringify(response.error, null, 2)}` + ); + } + + console.log('[Acceptance Test] Waiting for extraction to complete...'); + // Wait for extraction to complete + const waitStartTime = Date.now(); + await waitFor( + () => { + const events = callbackServer.getEvents(); + if (events.length > 0) { + console.log(`[Acceptance Test] Received ${events.length} callback events so far. Event types: ${events.map(e => e.event_type).join(', ')}`); + } + return events.some((e) => e.event_type === 'EXTRACTION_DATA_DONE'); + }, + 60000 // 60 seconds timeout for full extraction + ); + const waitDuration = Date.now() - waitStartTime; + console.log(`[Acceptance Test] Wait completed in ${waitDuration}ms`); + + const verificationStartTime = Date.now(); + console.log('[Acceptance Test] Extraction completed, verifying results'); + + // Get captured events + const capturedEvents = callbackServer.getEvents(); + console.log(`[Acceptance Test] Total callback events: ${capturedEvents.length}`); + + // Find the EXTRACTION_DATA_DONE event + const doneEvent = capturedEvents.find( + (e) => e.event_type === 'EXTRACTION_DATA_DONE' + ); + + // Verify EXTRACTION_DATA_DONE event exists + if (!doneEvent) { + throw new Error( + `Expected to receive EXTRACTION_DATA_DONE event, but got events: ${JSON.stringify( + capturedEvents.map((e) => e.event_type), + null, + 2 + )}\n\nFull events: ${JSON.stringify(capturedEvents, null, 2)}` + ); + } + + // Verify event_data exists + if (!doneEvent.event_data) { + throw new Error( + `Expected event_data in callback event, but got: ${JSON.stringify( + doneEvent, + null, + 2 + )}` + ); + } + + // Verify artifacts array exists + const artifacts = doneEvent.event_data.artifacts; + if (!artifacts) { + throw new Error( + `Expected artifacts array in event_data, but got event_data: ${JSON.stringify( + doneEvent.event_data, + null, + 2 + )}` + ); + } + + // Verify artifacts array is not empty + if (!Array.isArray(artifacts) || artifacts.length === 0) { + throw new Error( + `Expected artifacts array to have length > 0, but got length: ${ + Array.isArray(artifacts) ? artifacts.length : 'not an array' + }\n\nFull event_data: ${JSON.stringify(doneEvent.event_data, null, 2)}` + ); + } + + // Find comments artifact + const commentsArtifact = artifacts.find( + (artifact: any) => artifact.item_type === 'comments' + ); + + // Verify comments artifact exists + if (!commentsArtifact) { + const foundItemTypes = artifacts.map((a: any) => a.item_type); + throw new Error( + `Expected to find comments artifact with item_type='comments', but found item_types: ${JSON.stringify( + foundItemTypes, + null, + 2 + )}\n\nFull artifacts: ${JSON.stringify(artifacts, null, 2)}` + ); + } + + // Verify comments artifact has correct item_count + const itemCount = commentsArtifact.item_count; + if (typeof itemCount !== 'number' || itemCount < 2) { + throw new Error( + `Expected comments artifact to have item_count >= 2, but got: ${itemCount}\n\nFull comments artifact: ${JSON.stringify( + commentsArtifact, + null, + 2 + )}` + ); + } + + // All validations passed + expect(doneEvent.event_type).toBe('EXTRACTION_DATA_DONE'); + expect(artifacts.length).toBeGreaterThan(0); + expect(commentsArtifact.item_type).toBe('comments'); + expect(commentsArtifact.item_count).toBeGreaterThanOrEqual(2); + + const totalDuration = Date.now() - operationStartTime; + const testDuration = Date.now() - testStartTime; + console.log(`[Acceptance Test] Operation completed in ${totalDuration}ms, total test duration: ${testDuration}ms`); + expect(testDuration).toBeLessThan(100000); // Verify test completes within 100 seconds + }, 100000); // 100 seconds timeout for test +}); \ No newline at end of file diff --git a/conformance_tests/comments_data_push_tests/extraction-rate-limiting-acceptance.json b/conformance_tests/comments_data_push_tests/extraction-rate-limiting-acceptance.json new file mode 100644 index 0000000..cfc2983 --- /dev/null +++ b/conformance_tests/comments_data_push_tests/extraction-rate-limiting-acceptance.json @@ -0,0 +1,74 @@ +[ + { + "payload": { + "connection_data": { + "key": "", + "key_type": "", + "org_id": "", + "org_name": "First Space" + }, + "event_context": { + "callback_url": "http://localhost:8002/callback", + "dev_oid": "test-oid", + "dev_org": "test-org", + "dev_org_id": "test-org-id", + "dev_uid": "test-uid", + "dev_user": "test-user", + "dev_user_id": "test-user-id", + "event_type_adaas": "", + "external_sync_unit": "IEAGS6BYI5RFMPP7", + "external_sync_unit_id": "IEAGS6BYI5RFMPP7", + "external_sync_unit_name": "test-external-sync-unit-name", + "external_system": "test-external-system", + "external_system_id": "test-external-system-id", + "external_system_name": "Wrike", + "external_system_type": "ADaaS", + "import_slug": "airdrop-wrike-snap-in", + "mode": "INITIAL", + "request_id": "test-request-id", + "request_id_adaas": "test-request-id-adaas", + "run_id": "test-run_id", + "sequence_version": "6", + "snap_in_slug": "airdrop-wrike-snap-in", + "snap_in_version_id": "don:integration:dvrv-eu-1:devo/test:snap_in_package/test:snap_in_version/test", + "sync_run": "test-sync-run", + "sync_run_id": "test-sync-run-id", + "sync_tier": "sync_tier_2", + "sync_unit": "test-sync-unit", + "sync_unit_id": "test-sync-unit-id", + "uuid": "test-uuid", + "worker_data_url": "http://localhost:8003/external-worker" + }, + "event_type": "EXTRACTION_DATA_START" + }, + "context": { + "dev_oid": "don:identity:dvrv-eu-1:devo/test", + "automation_id": "", + "source_id": "", + "snap_in_id": "don:integration:dvrv-eu-1:devo/test:snap_in/test", + "snap_in_version_id": "don:integration:dvrv-eu-1:devo/test:snap_in_package/test:snap_in_version/test", + "service_account_id": "don:identity:dvrv-eu-1:devo/test:svcacc/123", + "secrets": { + "service_account_token": "test-service-account-token" + }, + "user_id": "don:identity:dvrv-eu-1:devo/test:devu/1", + "event_id": "", + "execution_id": "test-execution-id" + }, + "execution_metadata": { + "request_id": "test-request-id", + "function_name": "extraction", + "event_type": "EXTRACTION_DATA_START", + "devrev_endpoint": "http://localhost:8003" + }, + "input_data": { + "global_values": {}, + "event_sources": {}, + "keyrings": null, + "resources": { + "keyrings": {}, + "tags": {} + } + } + } +] \ No newline at end of file diff --git a/conformance_tests/comments_data_push_tests/extraction-rate-limiting-acceptance.test.ts b/conformance_tests/comments_data_push_tests/extraction-rate-limiting-acceptance.test.ts new file mode 100644 index 0000000..dc72514 --- /dev/null +++ b/conformance_tests/comments_data_push_tests/extraction-rate-limiting-acceptance.test.ts @@ -0,0 +1,248 @@ +import * as fs from 'fs'; +import * as path from 'path'; +import axios from 'axios'; +import { + loadTestConfig, + CallbackServer, + sendEventToSnapIn, + waitFor, + TestConfig, + DevRevMockServer, +} from './test-helpers'; + +describe('Extraction Function - Rate Limiting Acceptance Test', () => { + let config: TestConfig; + let callbackServer: CallbackServer; + let devrevServer: DevRevMockServer; + let testStartTime: number; + const apiServerUrl = 'http://localhost:8004'; + const testName = 'extraction_rate_limiting_test'; + + beforeAll(async () => { + // Load test configuration + config = loadTestConfig(); + + // Start callback server + callbackServer = new CallbackServer(); + await callbackServer.start(8002); + + // Start DevRev mock server + devrevServer = new DevRevMockServer(); + await devrevServer.start(8003); + + console.log('[Rate Limiting Test Setup] All servers started'); + }); + + afterAll(async () => { + // Stop servers + await callbackServer.stop(); + await devrevServer.stop(); + console.log('[Rate Limiting Test Teardown] All servers stopped'); + }); + + beforeEach(() => { + // Clear captured events before each test + callbackServer.clearEvents(); + devrevServer.clear(); + testStartTime = Date.now(); + console.log(`[Rate Limiting Test] Test started at ${new Date(testStartTime).toISOString()}`); + }); + + test('should emit EXTRACTION_DATA_DELAY event when rate limited during data extraction', async () => { + const operationStartTime = Date.now(); + console.log('[Rate Limiting Test] Starting rate limiting acceptance test'); + + try { + // Step 1: Start rate limiting on API server + console.log('[Rate Limiting Test] Step 1: Starting rate limiting on API server'); + const startRateLimitingStartTime = Date.now(); + + try { + await axios.post(`${apiServerUrl}/start_rate_limiting`, { + test_name: testName, + }, { + headers: { + 'Content-Type': 'application/json', + }, + timeout: 5000, + }); + const startRateLimitingDuration = Date.now() - startRateLimitingStartTime; + console.log(`[Rate Limiting Test] Rate limiting started successfully in ${startRateLimitingDuration}ms`); + } catch (error) { + const startRateLimitingDuration = Date.now() - startRateLimitingStartTime; + if (axios.isAxiosError(error)) { + throw new Error( + `Failed to start rate limiting on API server after ${startRateLimitingDuration}ms. ` + + `Status: ${error.response?.status}, ` + + `Message: ${error.message}, ` + + `Response: ${JSON.stringify(error.response?.data, null, 2)}` + ); + } + throw new Error( + `Failed to start rate limiting on API server after ${startRateLimitingDuration}ms: ${error}` + ); + } + + // Step 2: Load test event and invoke extraction function + console.log('[Rate Limiting Test] Step 2: Loading test event and invoking extraction function'); + const testEventPath = path.join(__dirname, 'extraction-rate-limiting-acceptance.json'); + + if (!fs.existsSync(testEventPath)) { + throw new Error( + `Test event file not found at path: ${testEventPath}. ` + + `Current directory: ${__dirname}` + ); + } + + const testEventContent = fs.readFileSync(testEventPath, 'utf-8'); + const testEventArray = JSON.parse(testEventContent); + + if (!Array.isArray(testEventArray) || testEventArray.length === 0) { + throw new Error( + `Expected test event file to contain an array with at least one event, ` + + `but got: ${JSON.stringify(testEventArray, null, 2)}` + ); + } + + // Get the first event from the array + const event = testEventArray[0]; + console.log('[Rate Limiting Test] Loaded event from JSON file'); + + // Replace placeholders with actual credentials + event.payload.connection_data.key = config.wrikeApiKey; + event.payload.connection_data.org_id = config.wrikeSpaceId; + event.payload.event_context.callback_url = config.callbackServerUrl; + + console.log('[Rate Limiting Test] Sending event to snap-in server'); + const sendEventStartTime = Date.now(); + + // Send event to snap-in server + const response = await sendEventToSnapIn(config.snapInServerUrl, event); + const sendEventDuration = Date.now() - sendEventStartTime; + + console.log(`[Rate Limiting Test] Received response in ${sendEventDuration}ms:`, JSON.stringify(response, null, 2)); + + // Verify response structure + if (!response) { + throw new Error('Snap-in server returned undefined response'); + } + + if (response.error) { + throw new Error( + `Snap-in server returned error: ${JSON.stringify(response.error, null, 2)}` + ); + } + + console.log('[Rate Limiting Test] Waiting for EXTRACTION_DATA_DELAY event...'); + const waitStartTime = Date.now(); + + // Wait for EXTRACTION_DATA_DELAY event + await waitFor( + () => { + const events = callbackServer.getEvents(); + if (events.length > 0) { + console.log( + `[Rate Limiting Test] Received ${events.length} callback event(s) so far. ` + + `Event types: ${events.map(e => e.event_type).join(', ')}` + ); + } + return events.some((e) => e.event_type === 'EXTRACTION_DATA_DELAY'); + }, + 60000 // 60 seconds timeout + ); + + const waitDuration = Date.now() - waitStartTime; + console.log(`[Rate Limiting Test] Wait completed in ${waitDuration}ms`); + + // Get captured events + const capturedEvents = callbackServer.getEvents(); + console.log(`[Rate Limiting Test] Total callback events received: ${capturedEvents.length}`); + + // Verify exactly one event was received + if (capturedEvents.length !== 1) { + throw new Error( + `Expected to receive exactly 1 callback event, but received ${capturedEvents.length} events. ` + + `Event types: ${capturedEvents.map(e => e.event_type).join(', ')}. ` + + `Full events: ${JSON.stringify(capturedEvents, null, 2)}` + ); + } + + // Get the single event + const delayEvent = capturedEvents[0]; + + // Verify event type + if (delayEvent.event_type !== 'EXTRACTION_DATA_DELAY') { + throw new Error( + `Expected event_type to be 'EXTRACTION_DATA_DELAY', but got '${delayEvent.event_type}'. ` + + `Full event: ${JSON.stringify(delayEvent, null, 2)}` + ); + } + + // Verify event_data exists + if (!delayEvent.event_data) { + throw new Error( + `Expected event_data in callback event, but it was missing. ` + + `Full event: ${JSON.stringify(delayEvent, null, 2)}` + ); + } + + // Verify delay field exists and is a number + const delay = delayEvent.event_data.delay; + if (typeof delay !== 'number') { + throw new Error( + `Expected event_data.delay to be a number, but got type '${typeof delay}' with value: ${delay}. ` + + `Full event_data: ${JSON.stringify(delayEvent.event_data, null, 2)}` + ); + } + + // Verify delay is positive + if (delay <= 0) { + throw new Error( + `Expected event_data.delay to be positive, but got: ${delay}. ` + + `Full event_data: ${JSON.stringify(delayEvent.event_data, null, 2)}` + ); + } + + // All validations passed + expect(capturedEvents.length).toBe(1); + expect(delayEvent.event_type).toBe('EXTRACTION_DATA_DELAY'); + expect(delayEvent.event_data).toBeDefined(); + expect(typeof delayEvent.event_data.delay).toBe('number'); + expect(delayEvent.event_data.delay).toBeGreaterThan(0); + + console.log( + `[Rate Limiting Test] Successfully received EXTRACTION_DATA_DELAY event with delay: ${delay} seconds` + ); + + } finally { + // Step 3: End rate limiting (always execute, even if test fails) + console.log('[Rate Limiting Test] Step 3: Ending rate limiting on API server'); + const endRateLimitingStartTime = Date.now(); + + try { + await axios.post(`${apiServerUrl}/end_rate_limiting`, {}, { + headers: { + 'Content-Type': 'application/json', + }, + timeout: 5000, + }); + const endRateLimitingDuration = Date.now() - endRateLimitingStartTime; + console.log(`[Rate Limiting Test] Rate limiting ended successfully in ${endRateLimitingDuration}ms`); + } catch (error) { + const endRateLimitingDuration = Date.now() - endRateLimitingStartTime; + console.error( + `[Rate Limiting Test] Warning: Failed to end rate limiting after ${endRateLimitingDuration}ms. ` + + `This may affect subsequent tests. Error: ${error}` + ); + } + + const totalDuration = Date.now() - operationStartTime; + const testDuration = Date.now() - testStartTime; + console.log( + `[Rate Limiting Test] Operation completed in ${totalDuration}ms, ` + + `total test duration: ${testDuration}ms` + ); + expect(testDuration).toBeLessThan(100000); // Verify test completes within 100 seconds + } + }, 100000); // 100 seconds timeout for test +}); \ No newline at end of file diff --git a/conformance_tests/comments_data_push_tests/package.json b/conformance_tests/comments_data_push_tests/package.json new file mode 100644 index 0000000..e5cc7d6 --- /dev/null +++ b/conformance_tests/comments_data_push_tests/package.json @@ -0,0 +1,24 @@ +{ + "name": "wrike-snap-in-conformance-tests", + "version": "1.0.0", + "description": "Conformance tests for Wrike snap-in", + "scripts": { + "test": "jest --testTimeout=110000 --forceExit" + }, + "devDependencies": { + "@types/express": "^4.17.17", + "@types/jest": "^29.5.0", + "@types/node": "^18.15.11", + "axios": "^1.6.0", + "express": "^4.18.2", + "jest": "^29.5.0", + "ts-jest": "^29.1.0", + "typescript": "^5.0.4" + }, + "jest": { + "preset": "ts-jest", + "testEnvironment": "node", + "testMatch": ["**/*.test.ts"], + "setupFilesAfterEnv": ["./jest.setup.js"] + } +} \ No newline at end of file diff --git a/conformance_tests/comments_data_push_tests/test-helpers.ts b/conformance_tests/comments_data_push_tests/test-helpers.ts new file mode 100644 index 0000000..343315d --- /dev/null +++ b/conformance_tests/comments_data_push_tests/test-helpers.ts @@ -0,0 +1,309 @@ +import express, { Express, Request, Response } from 'express'; +import axios from 'axios'; +import { Server } from 'http'; +import * as fs from 'fs'; + +/** + * Test configuration and credentials + */ +export interface TestConfig { + wrikeApiKey: string; + wrikeSpaceId: string; + testFolderId: string; + snapInServerUrl: string; + callbackServerUrl: string; + devrevServerUrl: string; +} + +/** + * Load test configuration from environment variables + */ +export function loadTestConfig(): TestConfig { + const wrikeApiKey = process.env.WRIKE_API_KEY; + const wrikeSpaceId = process.env.WRIKE_SPACE_ID; + + if (!wrikeApiKey) { + throw new Error('WRIKE_API_KEY environment variable is required'); + } + + if (!wrikeSpaceId) { + throw new Error('WRIKE_SPACE_ID environment variable is required'); + } + + return { + wrikeApiKey, + wrikeSpaceId, + testFolderId: 'IEAGS6BYI5RFMPP7', + snapInServerUrl: 'http://localhost:8000/handle/sync', + callbackServerUrl: 'http://localhost:8002', + devrevServerUrl: 'http://localhost:8003', + }; +} + +/** + * Build a test event payload for extraction function + */ +export function buildExtractionEvent( + config: TestConfig, + eventType: string, + eventData?: any +): any { + return { + context: { + dev_oid: 'test-dev-oid', + source_id: 'test-source-id', + snap_in_id: 'test-snap-in-id', + snap_in_version_id: 'test-snap-in-version-id', + service_account_id: 'test-service-account-id', + secrets: { + service_account_token: 'test-token', + }, + }, + payload: { + connection_data: { + key: config.wrikeApiKey, + org_id: config.wrikeSpaceId, + }, + event_context: { + callback_url: config.callbackServerUrl, + dev_org: 'test-org', + dev_org_id: 'test-org-id', + dev_user: 'test-user', + dev_user_id: 'test-user-id', + external_sync_unit: config.testFolderId, + external_sync_unit_id: config.testFolderId, + external_sync_unit_name: 'Test Folder', + external_system: 'wrike', + external_system_type: 'wrike', + import_slug: 'test-import', + mode: 'INITIAL', + request_id: 'test-request-id', + snap_in_slug: 'test-snap-in', + snap_in_version_id: 'test-version-id', + sync_run: 'test-sync-run', + sync_run_id: 'test-sync-run-id', + sync_tier: 'test-tier', + sync_unit: 'test-sync-unit', + sync_unit_id: 'test-sync-unit-id', + uuid: 'test-uuid', + worker_data_url: `${config.devrevServerUrl}/external-worker`, + }, + event_type: eventType, + event_data: eventData || {}, + }, + execution_metadata: { + request_id: 'test-request-id', + function_name: 'extraction', + event_type: eventType, + devrev_endpoint: config.devrevServerUrl, + }, + input_data: { + global_values: { + pageSize: '100', + }, + event_sources: {}, + }, + }; +} + +/** + * Callback server for capturing emitted events + */ +export class CallbackServer { + private app: Express; + private server: Server | null = null; + private capturedEvents: any[] = []; + + constructor() { + this.app = express(); + this.app.use(express.json()); + + // Capture all POST requests + this.app.post('*', (req: Request, res: Response) => { + this.capturedEvents.push(req.body); + res.status(200).send({ status: 'ok' }); + }); + } + + /** + * Start the callback server + */ + async start(port: number = 8002): Promise { + return new Promise((resolve) => { + this.server = this.app.listen(port, () => { + resolve(); + }); + }); + } + + /** + * Stop the callback server + */ + async stop(): Promise { + return new Promise((resolve, reject) => { + if (this.server) { + this.server.close((err) => { + if (err) reject(err); + else resolve(); + }); + } else { + resolve(); + } + }); + } + + /** + * Get all captured events + */ + getEvents(): any[] { + return this.capturedEvents; + } + + /** + * Clear captured events + */ + clearEvents(): void { + this.capturedEvents = []; + } +} + +/** + * DevRev mock server for handling artifact uploads and other DevRev API calls + */ +export class DevRevMockServer { + private app: Express; + private server: Server | null = null; + private uploadedArtifacts: any[] = []; + private requestLog: any[] = []; + + constructor() { + this.app = express(); + this.app.use(express.json({ limit: '50mb' })); + this.app.use(express.raw({ type: 'application/octet-stream', limit: '50mb' })); + + // Handle artifact uploads to /external-worker + this.app.post('/external-worker', (req: Request, res: Response) => { + console.log('[DevRevMockServer] Received artifact upload request'); + + // Log the request + this.requestLog.push({ + timestamp: new Date().toISOString(), + path: '/external-worker', + method: 'POST', + headers: req.headers, + bodySize: req.body ? Buffer.byteLength(JSON.stringify(req.body)) : 0, + }); + + // Store the artifact + this.uploadedArtifacts.push({ + timestamp: new Date().toISOString(), + data: req.body, + }); + + // Return success response + res.status(200).json({ + status: 'ok', + artifact_id: `artifact-${Date.now()}`, + }); + }); + + // Handle other DevRev API calls + this.app.post('*', (req: Request, res: Response) => { + console.log(`[DevRevMockServer] Received request to ${req.path}`); + + this.requestLog.push({ + timestamp: new Date().toISOString(), + path: req.path, + method: 'POST', + }); + + res.status(200).json({ status: 'ok' }); + }); + } + + /** + * Start the DevRev mock server + */ + async start(port: number = 8003): Promise { + return new Promise((resolve) => { + this.server = this.app.listen(port, () => { + console.log(`[DevRevMockServer] Started on port ${port}`); + resolve(); + }); + }); + } + + /** + * Stop the DevRev mock server + */ + async stop(): Promise { + return new Promise((resolve, reject) => { + if (this.server) { + this.server.close((err) => { + if (err) reject(err); + else { + console.log('[DevRevMockServer] Stopped'); + resolve(); + } + }); + } else { + resolve(); + } + }); + } + + /** + * Get all uploaded artifacts + */ + getArtifacts(): any[] { + return this.uploadedArtifacts; + } + + /** + * Get request log + */ + getRequestLog(): any[] { + return this.requestLog; + } + + /** + * Clear artifacts and logs + */ + clear(): void { + this.uploadedArtifacts = []; + this.requestLog = []; + } +} + +/** + * Send event to snap-in server + */ +export async function sendEventToSnapIn( + serverUrl: string, + event: any +): Promise { + const response = await axios.post(serverUrl, event, { + headers: { + 'Content-Type': 'application/json', + }, + }); + return response.data; +} + +/** + * Wait for a condition to be true with timeout + */ +export async function waitFor( + condition: () => boolean, + timeoutMs: number = 60000, + checkIntervalMs: number = 100 +): Promise { + const startTime = Date.now(); + while (!condition()) { + if (Date.now() - startTime > timeoutMs) { + const elapsed = Date.now() - startTime; + throw new Error(`Timeout waiting for condition after ${elapsed}ms (limit: ${timeoutMs}ms)`); + } + await new Promise((resolve) => setTimeout(resolve, checkIntervalMs)); + } +} \ No newline at end of file diff --git a/conformance_tests/extraction_data_repository_push/tsconfig.json b/conformance_tests/comments_data_push_tests/tsconfig.json similarity index 57% rename from conformance_tests/extraction_data_repository_push/tsconfig.json rename to conformance_tests/comments_data_push_tests/tsconfig.json index 3b611b1..fba0faa 100644 --- a/conformance_tests/extraction_data_repository_push/tsconfig.json +++ b/conformance_tests/comments_data_push_tests/tsconfig.json @@ -2,13 +2,17 @@ "compilerOptions": { "target": "es2017", "module": "commonjs", - "lib": ["es2021"], - "esModuleInterop": true, - "forceConsistentCasingInFileNames": true, + "lib": ["es2017"], + "declaration": true, + "outDir": "./dist", + "rootDir": "./", "strict": true, + "esModuleInterop": true, "skipLibCheck": true, - "resolveJsonModule": true + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true, + "moduleResolution": "node" }, - "include": ["*.ts"], + "include": ["**/*.ts"], "exclude": ["node_modules", "dist"] } \ No newline at end of file diff --git a/conformance_tests/comments_domain_mapping_generation_tests/initial-domain-mapping-comments.test.ts b/conformance_tests/comments_domain_mapping_generation_tests/initial-domain-mapping-comments.test.ts new file mode 100644 index 0000000..e785963 --- /dev/null +++ b/conformance_tests/comments_domain_mapping_generation_tests/initial-domain-mapping-comments.test.ts @@ -0,0 +1,138 @@ +import { loadTestEnvironment, invokeFunction, validateWithChefCli } from './test-utils'; + +describe('Initial Domain Mapping - Comments Record Type', () => { + let env: ReturnType; + + beforeAll(() => { + env = loadTestEnvironment(); + }); + + test('should have comments record type mapping with correct structure', async () => { + // Invoke get_initial_domain_mapping function + const response = await invokeFunction('get_initial_domain_mapping', env); + + expect(response).toBeDefined(); + expect(response.function_result).toBeDefined(); + expect(response.function_result.data).toBeDefined(); + + const initialDomainMapping = response.function_result.data; + + // Verify additional_mappings exists + expect(initialDomainMapping.additional_mappings).toBeDefined(); + expect(initialDomainMapping.additional_mappings.record_type_mappings).toBeDefined(); + + // Verify comments mapping exists + const commentsMapping = initialDomainMapping.additional_mappings.record_type_mappings.comments; + expect(commentsMapping).toBeDefined(); + + // Verify default mapping + expect(commentsMapping.default_mapping).toBeDefined(); + expect(commentsMapping.default_mapping.object_type).toBe('comment'); + + // Verify possible_record_type_mappings + expect(commentsMapping.possible_record_type_mappings).toBeDefined(); + expect(Array.isArray(commentsMapping.possible_record_type_mappings)).toBe(true); + expect(commentsMapping.possible_record_type_mappings.length).toBe(1); + + const mapping = commentsMapping.possible_record_type_mappings[0]; + + // Verify mapping direction + expect(mapping.forward).toBe(true); + expect(mapping.reverse).toBe(false); + + // Verify devrev_leaf_type + expect(mapping.devrev_leaf_type).toBe('comment'); + + // Verify shard + expect(mapping.shard).toBeDefined(); + expect(mapping.shard.mode).toBe('create_shard'); + expect(mapping.shard.devrev_leaf_type).toBeDefined(); + expect(mapping.shard.devrev_leaf_type.object_type).toBe('comment'); + + // Verify stock_field_mappings + const stockFields = mapping.shard.stock_field_mappings; + expect(stockFields).toBeDefined(); + + // Verify body field mapping (text -> body, rich text) + expect(stockFields.body).toBeDefined(); + expect(stockFields.body.forward).toBe(true); + expect(stockFields.body.reverse).toBe(false); + expect(stockFields.body.primary_external_field).toBe('text'); + expect(stockFields.body.transformation_method_for_set).toBeDefined(); + expect(stockFields.body.transformation_method_for_set.transformation_method).toBe('use_rich_text'); + + // Verify created_by_id field mapping (author_id -> created_by_id, use directly) + expect(stockFields.created_by_id).toBeDefined(); + expect(stockFields.created_by_id.forward).toBe(true); + expect(stockFields.created_by_id.reverse).toBe(false); + expect(stockFields.created_by_id.primary_external_field).toBe('author_id'); + expect(stockFields.created_by_id.transformation_method_for_set).toBeDefined(); + expect(stockFields.created_by_id.transformation_method_for_set.transformation_method).toBe('use_directly'); + + // Verify modified_by_id field mapping (author_id -> modified_by_id, use directly) + expect(stockFields.modified_by_id).toBeDefined(); + expect(stockFields.modified_by_id.forward).toBe(true); + expect(stockFields.modified_by_id.reverse).toBe(false); + expect(stockFields.modified_by_id.primary_external_field).toBe('author_id'); + expect(stockFields.modified_by_id.transformation_method_for_set).toBeDefined(); + expect(stockFields.modified_by_id.transformation_method_for_set.transformation_method).toBe('use_directly'); + + // Verify parent_object_id field mapping (task_id -> parent_object_id, use directly) + expect(stockFields.parent_object_id).toBeDefined(); + expect(stockFields.parent_object_id.forward).toBe(true); + expect(stockFields.parent_object_id.reverse).toBe(false); + expect(stockFields.parent_object_id.primary_external_field).toBe('task_id'); + expect(stockFields.parent_object_id.transformation_method_for_set).toBeDefined(); + expect(stockFields.parent_object_id.transformation_method_for_set.transformation_method).toBe('use_directly'); + }, 30000); + + test('should pass Chef CLI validation', async () => { + // Get initial domain mapping + const mappingResponse = await invokeFunction('get_initial_domain_mapping', env); + expect(mappingResponse.function_result).toBeDefined(); + const initialDomainMapping = mappingResponse.function_result.data; + + // Get external domain metadata + const metadataResponse = await invokeFunction('get_external_domain_metadata', env); + expect(metadataResponse.function_result).toBeDefined(); + const externalDomainMetadata = metadataResponse.function_result.data; + + // Validate with Chef CLI + const result = await validateWithChefCli( + initialDomainMapping, + externalDomainMetadata, + env.chefCliPath + ); + + // Check for Chef CLI availability + if (result.error && result.error.includes('Failed to spawn Chef CLI')) { + throw new Error(`Chef CLI is not available: ${result.error}`); + } + + // Check for empty output + if (result.error && result.error.includes('empty output')) { + throw new Error('Chef CLI returned empty output'); + } + + // Check for missing fields + if (result.error && result.error.includes('missing RemainingDeficiencies or Warnings')) { + throw new Error(`Chef CLI output missing required fields. Output: ${JSON.stringify(result.output, null, 2)}`); + } + + // Check validation success + if (!result.success) { + const firstResult = result.output[0] || {}; + const deficiencies = firstResult.RemainingDeficiencies; + const warnings = firstResult.Warnings; + + throw new Error( + `Chef CLI validation failed.\n` + + `RemainingDeficiencies: ${JSON.stringify(deficiencies, null, 2)}\n` + + `Warnings: ${JSON.stringify(warnings, null, 2)}\n` + + `Full output: ${JSON.stringify(result.output, null, 2)}` + ); + } + + expect(result.success).toBe(true); + }, 60000); +}); \ No newline at end of file diff --git a/conformance_tests/comments_domain_mapping_generation_tests/jest.config.js b/conformance_tests/comments_domain_mapping_generation_tests/jest.config.js new file mode 100644 index 0000000..1890829 --- /dev/null +++ b/conformance_tests/comments_domain_mapping_generation_tests/jest.config.js @@ -0,0 +1,29 @@ +module.exports = { + preset: 'ts-jest', + testEnvironment: 'node', + testTimeout: 120000, + testMatch: [ + '**/*.test.ts' + ], + setupFilesAfterEnv: ['/jest.setup.js'], + moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'], + collectCoverageFrom: [ + '**/*.ts', + '!**/*.test.ts', + '!**/node_modules/**', + '!**/dist/**' + ], + transform: { + '^.+\\.ts$': ['ts-jest', { + tsconfig: { + target: 'es2017', + module: 'commonjs', + esModuleInterop: true, + allowSyntheticDefaultImports: true, + strict: true, + declaration: true, + resolveJsonModule: true + } + }] + } +}; \ No newline at end of file diff --git a/conformance_tests/comments_domain_mapping_generation_tests/package.json b/conformance_tests/comments_domain_mapping_generation_tests/package.json new file mode 100644 index 0000000..bcb9378 --- /dev/null +++ b/conformance_tests/comments_domain_mapping_generation_tests/package.json @@ -0,0 +1,18 @@ +{ + "name": "conformance-tests", + "version": "1.0.0", + "description": "Conformance tests for Wrike Airdrop Snap-in", + "scripts": { + "test": "jest" + }, + "devDependencies": { + "@types/jest": "^29.4.0", + "@types/node": "^18.13.0", + "jest": "^29.4.2", + "ts-jest": "^29.0.5", + "typescript": "^4.9.5" + }, + "dependencies": { + "axios": "^1.9.0" + } +} \ No newline at end of file diff --git a/conformance_tests/comments_domain_mapping_generation_tests/test-utils.ts b/conformance_tests/comments_domain_mapping_generation_tests/test-utils.ts new file mode 100644 index 0000000..cfe2255 --- /dev/null +++ b/conformance_tests/comments_domain_mapping_generation_tests/test-utils.ts @@ -0,0 +1,242 @@ +import axios from 'axios'; +import * as fs from 'fs'; +import * as os from 'os'; +import * as path from 'path'; +import { spawn } from 'child_process'; + +/** + * Environment configuration for tests + */ +export interface TestEnvironment { + wrikeApiKey: string; + wrikeSpaceId: string; + chefCliPath: string; +} + +/** + * Load environment variables required for tests + */ +export function loadTestEnvironment(): TestEnvironment { + const wrikeApiKey = process.env.WRIKE_API_KEY; + const wrikeSpaceId = process.env.WRIKE_SPACE_ID; + const chefCliPath = process.env.CHEF_CLI_PATH; + + if (!wrikeApiKey) { + throw new Error('WRIKE_API_KEY environment variable is required'); + } + + if (!wrikeSpaceId) { + throw new Error('WRIKE_SPACE_ID environment variable is required'); + } + + if (!chefCliPath) { + throw new Error('CHEF_CLI_PATH environment variable is required'); + } + + return { + wrikeApiKey, + wrikeSpaceId, + chefCliPath, + }; +} + +/** + * Create a base event payload for testing + */ +export function createBaseEvent(functionName: string, env: TestEnvironment): any { + return { + payload: { + connection_data: { + key: env.wrikeApiKey, + org_id: env.wrikeSpaceId, + }, + event_type: 'test_event', + event_context: {}, + }, + context: { + dev_oid: 'test-dev-oid', + source_id: 'test-source-id', + snap_in_id: 'test-snap-in-id', + snap_in_version_id: 'test-snap-in-version-id', + service_account_id: 'test-service-account-id', + secrets: { + service_account_token: 'test-token', + }, + }, + execution_metadata: { + request_id: 'test-request-id', + function_name: functionName, + event_type: 'test_event', + devrev_endpoint: 'http://localhost:8003', + }, + input_data: { + global_values: {}, + event_sources: {}, + }, + }; +} + +/** + * Invoke a function on The Test Snap-In Server + */ +export async function invokeFunction(functionName: string, env: TestEnvironment): Promise { + const event = createBaseEvent(functionName, env); + + const response = await axios.post('http://localhost:8000/handle/sync', event, { + headers: { + 'Content-Type': 'application/json', + }, + timeout: 30000, + }); + + return response.data; +} + +/** + * Chef CLI validation result + */ +export interface ChefCliResult { + success: boolean; + output: any[]; + stdout: string; + stderr: string; + error?: string; +} + +/** + * Validate Initial Domain Mapping with Chef CLI + */ +export async function validateWithChefCli( + initialDomainMapping: any, + externalDomainMetadata: any, + chefCliPath: string +): Promise { + return new Promise((resolve) => { + // Create temporary file for external domain metadata + const tmpDir = os.tmpdir(); + const metadataFilePath = path.join(tmpDir, `metadata-${Date.now()}.json`); + + try { + fs.writeFileSync(metadataFilePath, JSON.stringify(externalDomainMetadata, null, 2)); + } catch (error) { + resolve({ + success: false, + output: [], + stdout: '', + stderr: '', + error: `Failed to write metadata file: ${error}`, + }); + return; + } + + const mappingJson = JSON.stringify(initialDomainMapping, null, 2); + + // Spawn chef-cli process + const chefProcess = spawn(chefCliPath, [ + 'initial-mapping', + 'check', + '-m', + metadataFilePath, + ]); + + let stdout = ''; + let stderr = ''; + + // Collect stdout + chefProcess.stdout.on('data', (data) => { + stdout += data.toString(); + }); + + // Collect stderr + chefProcess.stderr.on('data', (data) => { + stderr += data.toString(); + }); + + // Write mapping to stdin + chefProcess.stdin.write(mappingJson); + chefProcess.stdin.end(); + + // Handle process completion + chefProcess.on('close', (code) => { + // Clean up temporary file + try { + fs.unlinkSync(metadataFilePath); + } catch (error) { + console.warn('Failed to clean up temporary metadata file:', error); + } + + console.log('Chef CLI stdout:', stdout); + console.log('Chef CLI stderr:', stderr); + + if (!stdout || stdout.trim() === '') { + resolve({ + success: false, + output: [], + stdout, + stderr, + error: 'Chef CLI returned empty output', + }); + return; + } + + try { + const output = JSON.parse(stdout); + + if (!Array.isArray(output) || output.length === 0) { + resolve({ + success: false, + output: [], + stdout, + stderr, + error: 'Chef CLI output is not a valid array or is empty', + }); + return; + } + + const firstResult = output[0]; + const hasRemainingDeficiencies = 'RemainingDeficiencies' in firstResult; + const hasWarnings = 'Warnings' in firstResult; + + if (!hasRemainingDeficiencies || !hasWarnings) { + resolve({ + success: false, + output, + stdout, + stderr, + error: 'Chef CLI output missing RemainingDeficiencies or Warnings fields', + }); + return; + } + + const success = + firstResult.RemainingDeficiencies === null && + firstResult.Warnings === null; + + resolve({ + success, + output, + stdout, + stderr, + }); + } catch (error) { + resolve({ + success: false, + output: [], + stdout, + stderr, + error: `Failed to parse Chef CLI output: ${error}`, + }); + } + }); + + chefProcess.on('error', (error) => { + resolve({ + success: false, + output: [], + stdout, + stderr, + error: `Failed to spawn Chef CLI: ${error.message}`, + }); + }); + }); +} \ No newline at end of file diff --git a/conformance_tests/comments_domain_mapping_generation_tests/tsconfig.json b/conformance_tests/comments_domain_mapping_generation_tests/tsconfig.json new file mode 100644 index 0000000..0242b52 --- /dev/null +++ b/conformance_tests/comments_domain_mapping_generation_tests/tsconfig.json @@ -0,0 +1,24 @@ +{ + "compilerOptions": { + "target": "es2017", + "module": "commonjs", + "lib": ["es2017"], + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "strict": true, + "declaration": true, + "resolveJsonModule": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "moduleResolution": "node", + "outDir": "./dist", + "rootDir": "." + }, + "include": [ + "**/*.ts" + ], + "exclude": [ + "node_modules", + "dist" + ] +} \ No newline at end of file diff --git a/conformance_tests/comments_record_type_metadata_tests/get_external_domain_metadata.test.ts b/conformance_tests/comments_record_type_metadata_tests/get_external_domain_metadata.test.ts new file mode 100644 index 0000000..f41289c --- /dev/null +++ b/conformance_tests/comments_record_type_metadata_tests/get_external_domain_metadata.test.ts @@ -0,0 +1,78 @@ +import { loadTestConfig, invokeFunction, validateMetadataWithChefCli, TestConfig } from './test-utils/test-helpers'; + +describe('get_external_domain_metadata - Comments Record Type', () => { + let config: TestConfig; + let metadata: any; + + beforeAll(async () => { + config = loadTestConfig(); + const response = await invokeFunction('get_external_domain_metadata', config); + expect(response.function_result?.status).toBe('success'); + metadata = response.function_result.data; + }); + + describe('Test 1: Basic Invocation', () => { + it('should return metadata with record_types object', () => { + expect(metadata?.record_types).toBeDefined(); + expect(typeof metadata.record_types).toBe('object'); + }); + }); + + describe('Test 2: Comments Record Type Existence', () => { + it('should include comments record type with name "Comments"', () => { + expect(metadata.record_types.comments).toBeDefined(); + expect(metadata.record_types.comments.name).toBe('Comments'); + expect(metadata.record_types.comments.fields).toBeDefined(); + }); + }); + + describe('Test 3: Comments Fields Structure', () => { + it('should have text field (rich_text, required, display name "Text")', () => { + const field = metadata.record_types.comments.fields.text; + expect(field?.type).toBe('rich_text'); + expect(field?.name).toBe('Text'); + expect(field?.is_required).toBe(true); + }); + + it('should have author_id field (reference, required, display name "Author ID")', () => { + const field = metadata.record_types.comments.fields.author_id; + expect(field?.type).toBe('reference'); + expect(field?.name).toBe('Author ID'); + expect(field?.is_required).toBe(true); + }); + + it('should have task_id field (reference, required, display name "Task ID")', () => { + const field = metadata.record_types.comments.fields.task_id; + expect(field?.type).toBe('reference'); + expect(field?.name).toBe('Task ID'); + expect(field?.is_required).toBe(true); + }); + }); + + describe('Test 4: Reference Fields Validation', () => { + it('should have author_id reference pointing to #record:users', () => { + const field = metadata.record_types.comments.fields.author_id; + expect(field.reference?.refers_to?.['#record:users']).toBeDefined(); + }); + + it('should have task_id reference pointing to #record:tasks', () => { + const field = metadata.record_types.comments.fields.task_id; + expect(field.reference?.refers_to?.['#record:tasks']).toBeDefined(); + }); + }); + + describe('Test 5: Chef CLI Validation', () => { + it('should validate successfully with Chef CLI (empty output)', async () => { + const result = await validateMetadataWithChefCli(metadata, config.chefCliPath); + expect(result.success).toBe(true); + expect(result.stdout.trim()).toBe(''); + }); + }); + + describe('Test 6: Preservation of Existing Record Types', () => { + it('should preserve users and tasks record types', () => { + expect(metadata.record_types.users?.name).toBe('Users'); + expect(metadata.record_types.tasks?.name).toBe('Tasks'); + }); + }); +}); \ No newline at end of file diff --git a/conformance_tests/comments_record_type_metadata_tests/jest.config.js b/conformance_tests/comments_record_type_metadata_tests/jest.config.js new file mode 100644 index 0000000..3b5d423 --- /dev/null +++ b/conformance_tests/comments_record_type_metadata_tests/jest.config.js @@ -0,0 +1,10 @@ +module.exports = { + preset: 'ts-jest', + testEnvironment: 'node', + testMatch: ['**/*.test.ts'], + testTimeout: 120000, + setupFilesAfterEnv: ['/jest.setup.js'], + moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'], + collectCoverage: false, + verbose: true, +}; \ No newline at end of file diff --git a/conformance_tests/comments_record_type_metadata_tests/package.json b/conformance_tests/comments_record_type_metadata_tests/package.json new file mode 100644 index 0000000..9ad1c3d --- /dev/null +++ b/conformance_tests/comments_record_type_metadata_tests/package.json @@ -0,0 +1,27 @@ +{ + "name": "wrike-snap-in-conformance-tests", + "version": "1.0.0", + "description": "Conformance tests for Wrike Snap-In", + "main": "index.js", + "scripts": { + "test": "jest" + }, + "keywords": [ + "devrev", + "wrike", + "snap-in", + "conformance-tests" + ], + "author": "", + "license": "ISC", + "devDependencies": { + "@types/jest": "^29.5.0", + "@types/node": "^18.0.0", + "jest": "^29.5.0", + "ts-jest": "^29.1.0", + "typescript": "^4.9.0" + }, + "dependencies": { + "axios": "^1.9.0" + } +} \ No newline at end of file diff --git a/conformance_tests/comments_record_type_metadata_tests/test-utils/test-helpers.ts b/conformance_tests/comments_record_type_metadata_tests/test-utils/test-helpers.ts new file mode 100644 index 0000000..2a0dd5d --- /dev/null +++ b/conformance_tests/comments_record_type_metadata_tests/test-utils/test-helpers.ts @@ -0,0 +1,83 @@ +import axios from 'axios'; +import { spawn } from 'child_process'; + +export interface TestConfig { + wrikeApiKey: string; + wrikeSpaceId: string; + chefCliPath: string; + snapInServerUrl: string; +} + +export function loadTestConfig(): TestConfig { + const wrikeApiKey = process.env.WRIKE_API_KEY; + const wrikeSpaceId = process.env.WRIKE_SPACE_ID; + const chefCliPath = process.env.CHEF_CLI_PATH; + + if (!wrikeApiKey || !wrikeSpaceId || !chefCliPath) { + throw new Error('Required environment variables: WRIKE_API_KEY, WRIKE_SPACE_ID, CHEF_CLI_PATH'); + } + + return { + wrikeApiKey, + wrikeSpaceId, + chefCliPath, + snapInServerUrl: 'http://localhost:8000/handle/sync', + }; +} + +export function createTestEvent(functionName: string, config: TestConfig): any { + return { + payload: { + connection_data: { key: config.wrikeApiKey, org_id: config.wrikeSpaceId }, + event_type: 'test_event', + event_context: {}, + }, + context: { + dev_oid: 'test-dev-oid', + source_id: 'test-source-id', + snap_in_id: 'test-snap-in-id', + snap_in_version_id: 'test-snap-in-version-id', + service_account_id: 'test-service-account-id', + secrets: { service_account_token: 'test-token' }, + }, + execution_metadata: { + request_id: `test-request-${Date.now()}`, + function_name: functionName, + event_type: 'test_event', + devrev_endpoint: 'http://localhost:8003', + }, + input_data: { global_values: {}, event_sources: {} }, + }; +} + +export async function invokeFunction(functionName: string, config: TestConfig): Promise { + const event = createTestEvent(functionName, config); + const response = await axios.post(config.snapInServerUrl, event, { + headers: { 'Content-Type': 'application/json' }, + timeout: 30000, + }); + return response.data; +} + +export async function validateMetadataWithChefCli( + metadata: any, + chefCliPath: string +): Promise<{ success: boolean; stdout: string; stderr: string }> { + return new Promise((resolve, reject) => { + const process = spawn(chefCliPath, ['validate-metadata'], { stdio: ['pipe', 'pipe', 'pipe'] }); + let stdout = ''; + let stderr = ''; + + process.stdout.on('data', (data) => (stdout += data.toString())); + process.stderr.on('data', (data) => (stderr += data.toString())); + process.on('error', (error) => reject(new Error(`Failed to spawn Chef CLI: ${error.message}`))); + process.on('close', (code) => { + if (stdout) console.log('Chef CLI stdout:', stdout); + if (stderr) console.log('Chef CLI stderr:', stderr); + resolve({ success: code === 0 && stdout.trim() === '', stdout, stderr }); + }); + + process.stdin.write(JSON.stringify(metadata)); + process.stdin.end(); + }); +} \ No newline at end of file diff --git a/conformance_tests/comments_record_type_metadata_tests/tsconfig.json b/conformance_tests/comments_record_type_metadata_tests/tsconfig.json new file mode 100644 index 0000000..a2a4ad7 --- /dev/null +++ b/conformance_tests/comments_record_type_metadata_tests/tsconfig.json @@ -0,0 +1,25 @@ +{ + "compilerOptions": { + "target": "es2017", + "module": "commonjs", + "lib": ["es2017"], + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "strict": true, + "declaration": true, + "resolveJsonModule": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "moduleResolution": "node", + "outDir": "./dist", + "rootDir": "./", + "types": ["jest", "node"] + }, + "include": [ + "**/*.ts" + ], + "exclude": [ + "node_modules", + "dist" + ] +} \ No newline at end of file diff --git a/conformance_tests/conformance_tests.json b/conformance_tests/conformance_tests.json index a4626ed..5cf62c6 100644 --- a/conformance_tests/conformance_tests.json +++ b/conformance_tests/conformance_tests.json @@ -1,93 +1,147 @@ { "1.1": { "functional_requirement": "- Implement The Function that only checks if The Function can be invoked.", - "folder_name": "conformance_tests/function_invocation_check" + "folder_name": "conformance_tests/function_invocation_tests" }, "1.2": { - "functional_requirement": "- Implement The Function that only checks if the data extraction workflow can be invoked.", - "folder_name": "conformance_tests/workflow_invocation_validation", + "functional_requirement": "- Implement The Function called \"test_external_sync_units\" (The External Sync Units Test Function) that provides a test of the 'external sync units' part of the extraction workflow as described in the resource `[resource]external-sync-units-extraction.mdx`.\n - If \"event_type\" equals \"EXTRACTION_EXTERNAL_SYNC_UNITS_START\", The External Sync Units Test Function should:\n - Only emit event \"EXTRACTION_EXTERNAL_SYNC_UNITS_DONE\".", + "folder_name": "conformance_tests/external_sync_units_extraction_tests", "acceptance_tests": [ - "- Test the data extraction workflow invocation using the resource `[resource]extraction_health_check.json`." + "- Test The Function using the resource `[resource]external_sync_unit_check.json`. Test is successful if The Callback Server receives from DevRev an event with \"event_type\" that equals \"EXTRACTION_EXTERNAL_SYNC_UNITS_DONE\". The test must not send the event directly to The Callback Server." ] }, "1.3": { - "functional_requirement": "- Implement The Function that checks if pushing data works. The URL where data is to be pushed is provided in the \"callback_url\" field of the \"event_context\" dictionary which is part of \"payload\" dictionary.", - "folder_name": "conformance_tests/callback_url_push_validation" + "functional_requirement": "- Implement The Function called \"data_extraction_check\" (The Data Extraction Check Function) that provides a test of the 'data extraction' part of the extraction workflow as described in the resource `[resource]data-extraction.md`. The Data Extraction Check Function should:\n - if \"event_type\" equals \"EXTRACTION_DATA_START\" or \"EXTRACTION_DATA_CONTINUE\", The Data Extraction Check Function should:\n - Initialize a single repo \"users\"\n - normalize the users data using The Normalization Function\n - push the normalized users data to The DevRev Servers\n - Emit event \"EXTRACTION_DATA_DONE\".\n - Note: The Data Extraction Check Function should be simplified and should disregard all other details.", + "folder_name": "conformance_tests/data_extraction_workflow_tests", + "acceptance_tests": [ + "- Test The Function using the resource `[resource]data_extraction_check.json`. Test is successful if The Callback Server receives from DevRev an event with \"event_type\" that equals \"EXTRACTION_DATA_DONE\". The test must not send the event directly to The Callback Server." + ] }, - "1.4": { - "functional_requirement": "- Implement The Function that provides a test of the 'external sync units' part of the extraction workflow. The name of The Function must be 'extraction_external_sync_unit_check'.", - "folder_name": "conformance_tests/extraction_external_sync_unit_check", + "2.1.1": { + "functional_requirement": "- Implement The Function \"check_authentication\" that provides a check if authentication with The API works. Authentication should be checked by making a request to the endpoint \"/contacts\", and providing \"me=true\" as a query parameter.", + "folder_name": "conformance_tests/authentication_check_tests", "acceptance_tests": [ - "- Test The Function using the resource `[resource]external_sync_unit_check.json`. Test is successful if The Callback Server receives from DevRev an event with \"event_type\" that equals \"EXTRACTION_EXTERNAL_SYNC_UNITS_DONE\". The test must not send the event directly to The Callback Server." + "- Test the function \"check_authentication\". Expect the API response (`:= api_response`) to equal `api_response[\"data\"][0][\"id\"]=\"KUAUZTPW\"`.", + "- Test the rate limiting of The Function \"check_authentication\" with the following flow:\n - Make a POST request to \"http://localhost:8004/start_rate_limiting\" with body `{ \"test_name\": }`.\n - Invoke The Function \"check_authentication\" with valid credentials and all required parameters.\n - Expect: `status_code = 429`.\n - Expect: `api_delay > 0` and `api_delay <= 3`. If api_delay is bigger than 3, assume the problem is that we did not calculate the api_delay correctly in The Implementation Code.\n - Make a POST request to \"http://localhost:8004/end_rate_limiting\"." ] }, - "1.5": { - "functional_requirement": "- Implement The Function that provides a test of the 'data extraction' part of the extraction workflow as described in the resource `[resource]data-extraction.md`. Only check if data extraction can be triggered and that it successfully completes. Disregard all other details. The name of The Function must be 'data_extraction_check'.", - "folder_name": "conformance_tests/data_extraction_validation", + "2.1.2": { + "functional_requirement": "- Implement The Function called \"fetch_space_folders\" that uses The API to fetch The Fetched Folders using the endpoint \"/spaces/{spaceId}/folders\".\n - You *should not* use `projects=true` query param.\n - The Space ID is provided in `event[\"payload\"][\"connection_data\"][\"org_id\"]`.", + "folder_name": "conformance_tests/space_folders_fetch_tests", "acceptance_tests": [ - "- Test The Function using the resource `[resource]data_extraction_check.json`. Test is successful if The Callback Server receives from DevRev an event with \"event_type\" that equals \"EXTRACTION_DATA_DONE\". The test must not send the event directly to The Callback Server." + "- Test The Function \"fetch_space_folders\". Expect the number of The Fetched Folders to be 3.", + "- Test the rate limiting of The Function \"fetch_space_folders\" with the following flow:\n - Make a POST request to \"http://localhost:8004/start_rate_limiting\" with body `{ \"test_name\": }`.\n - Invoke The Function \"fetch_space_folders\" with valid credentials and all required parameters.\n - Expect: `status_code = 429`.\n - Expect: `api_delay > 0` and `api_delay <= 3`. If api_delay is bigger than 3, assume the problem is that we did not calculate the api_delay correctly in The Implementation Code.\n - Make a POST request to \"http://localhost:8004/end_rate_limiting\"." ] }, - "2.1.1": { - "functional_requirement": "- Implement The Function that provides a check if authentication with The API works.", - "folder_name": "conformance_tests/api_authentication_check" + "2.1.3": { + "functional_requirement": "- Implement The Function \"fetch_users\" that uses The API to fetch The List of Users (The Fetched Users) using the endpoint \"/contacts?types=[Person]\".", + "folder_name": "conformance_tests/users_fetch_tests", + "acceptance_tests": [ + "- When using The Test Wrike Credentials, expect exactly 4 users in the result of The Function.", + "- Test the rate limiting of The Function \"fetch_users\" with the following flow:\n - Make a POST request to \"http://localhost:8004/start_rate_limiting\" with body `{ \"test_name\": }`.\n - Invoke The Function \"fetch_users\" with valid credentials and all required parameters.\n - Expect: `status_code = 429`.\n - Expect: `api_delay > 0` and `api_delay <= 3`. If api_delay is bigger than 3, assume the problem is that we did not calculate the api_delay correctly in The Implementation Code.\n - Make a POST request to \"http://localhost:8004/end_rate_limiting\"." + ] }, - "2.2.1": { - "functional_requirement": "- Implement The Function that uses The API to fetch The List of Projects (The Fetched Projects).", - "folder_name": "conformance_tests/project_list_fetch_validation", + "2.1.4": { + "functional_requirement": "- Implement The Function called \"fetch_folder_tasks\" that uses The API to fetch The Fetched Tasks for a given folder using the endpoint \"/folders/{folderId}/tasks\".\n - The Folder ID is provided in `event[\"payload\"][\"event_context\"][\"external_sync_unit_id\"]`.\n - One of the query params must be \"fields=[responsibleIds]\".\n - \"pageSize\" and \"nextPageToken\" should be provided in `event[\"input_data\"][\"global_values\"][\"pageSize\"]` and `event[\"input_data\"][\"global_values\"][\"nextPageToken\"]`.\n - The following query parameters should also be supported:\n - \"updatedDate\" (optional, a timestamp in ISO 8601 UTC format, can be used for filtering tasks by updated date)", + "folder_name": "conformance_tests/folder_tasks_fetch_tests", "acceptance_tests": [ - "- When using The Test Wrike Credentials a project with the title \"First project\" must be in the result of The Function." + "- Test The Function in multiple steps:\n - Step 1: Call the \"fetch_folder_tasks\" function with Folder ID \"IEAGS6BYI5RFMPP7\" and \"pageSize\" of 100.\n - Step 2: Expect the API response (`:= api_response`) to have `api_response[\"nextPageToken\"]` field and `len(api_response[\"data\"])=100`\n - Step 3: Call the \"fetch_folder_tasks\" function again with Folder ID \"IEAGS6BYI5RFMPP7\", \"pageSize\" of 100 and \"nextPageToken\" value received on step 2. Expect the API response to have `len(api_response[\"data])=10`.", + "- Test The Function \"fetch_folder_tasks\" with Folder ID \"IEAGS6BYI5RFMPP7\". From API response (`:= api_response`), expect that every element from `api_response[\"data\"]` contains field `\"responsibleIds\"`.", + "- Test the rate limiting of The Function \"fetch_folder_tasks\" with the following flow:\n - Make a POST request to \"http://localhost:8004/start_rate_limiting\" with body `{ \"test_name\": }`.\n - Invoke The Function \"fetch_folder_tasks\" with valid credentials and all required parameters.\n - Expect: `status_code = 429`.\n - Expect: `api_delay > 0` and `api_delay <= 3`. If api_delay is bigger than 3, assume the problem is that we did not calculate the api_delay correctly in The Implementation Code.\n - Make a POST request to \"http://localhost:8004/end_rate_limiting\"." ] }, - "2.2.2": { - "functional_requirement": "- Implement The Function that uses The API to fetch The List of Contacts of The Space (The Fetched Contacts). This list can be retrieved from the \"spaces/{spaceId}\" endpoint using the \"members\" field. This list is a list of objects representing contacts, with the \"id\" key representing the contact ID. The rest of information about contacts can be fetched from /contacts/{contactIds}.", - "folder_name": "conformance_tests/space_contacts_fetch_validation", + "2.1.5": { + "functional_requirement": "- Implement The Function called \"fetch_task_attachments\" that uses The API to fetch The Fetched Attachments for a given task using the endpoint \"/tasks/{taskId}/attachments\", with the \"withUrls\" query parameter set to true.", + "folder_name": "conformance_tests/task_attachments_fetch_tests", "acceptance_tests": [ - "- When using The Test Wrike Credentials, The Function must return 5 members with their primaryEmail, firstName and lastName." + "- Test The Function \"fetch_task_attachments\" with Task ID \"IEAGS6BYKRRFMPQG\". Let `api_response = `. Expect `len(api_response[\"data\"])=1` and `api_response[\"data\"][0][\"name\"]=\"Proof this image.jpg\"`", + "- Test the rate limiting of The Function \"fetch_task_attachments\" with the following flow:\n - Make a POST request to \"http://localhost:8004/start_rate_limiting\" with body `{ \"test_name\": }`.\n - Invoke The Function \"fetch_task_attachments\" with valid credentials and all required parameters.\n - Expect: `status_code = 429`.\n - Expect: `api_delay > 0` and `api_delay <= 3`. If api_delay is bigger than 3, assume the problem is that we did not calculate the api_delay correctly in The Implementation Code.\n - Make a POST request to \"http://localhost:8004/end_rate_limiting\"." ] }, - "2.2.3": { - "functional_requirement": "- Implement The Function that uses The API to fetch The List of Tasks of The Project (The Fetched Tasks). The Project ID is provided in the \"external_sync_unit_id\" field of the \"event_context\" dictionary which is part of \"payload\" dictionary.", - "folder_name": "conformance_tests/project_tasks_fetch_validation", + "2.1.6": { + "functional_requirement": "- Implement The Function called \"fetch_task_comments\" that uses The API to fetch The Fetched Comments for a given task using the endpoint \"/tasks/{taskId}/comments\".", + "folder_name": "conformance_tests/task_comments_fetch_tests", "acceptance_tests": [ - "- When using The Test Wrike Credentials and The Project ID \"IEAGS6BYI5RFMPP7\", 10 tasks should be fetched in the result of The Function." + "- Test The Function \"fetch_task_comments\" with Task ID \"IEAGS6BYKRRFMPQG\". Expect the number of The Fetched Comments to be 2.", + "- Test the rate limiting of The Function \"fetch_task_comments\" with the following flow:\n - Make a POST request to \"http://localhost:8004/start_rate_limiting\" with body `{ \"test_name\": }`.\n - Invoke The Function \"fetch_task_comments\" with valid credentials and all required parameters.\n - Expect: `status_code = 429`.\n - Expect: `api_delay > 0` and `api_delay <= 3`. If api_delay is bigger than 3, assume the problem is that we did not calculate the api_delay correctly in The Implementation Code.\n - Make a POST request to \"http://localhost:8004/end_rate_limiting\"." ] }, + "2.2.1.1": { + "functional_requirement": "- Implement The Function 'get_external_domain_metadata' that generates and returns The External Domain Metadata JSON object. The External Domain Metadata JSON object should have the record type 'users'.\n - The record type 'users' (Name: Users) should have the following fields:\n - full_name (display name: \"Full Name\", is required, type: text)\n - email (display name: \"Email\", is required, type: text)\n - title (display name: \"Title\", is optional, type: text)", + "folder_name": "conformance_tests/external_domain_metadata_generation_tests" + }, + "2.2.1.2": { + "functional_requirement": "- The External Domain Metadata JSON object should include the record type 'tasks', while preserving any existing record types.\n - The record type 'tasks' (Name: Tasks) should have the following fields:\n - title (display name: \"Title\", is required, type: text)\n - description (display name: \"Description\", is required, type: rich text)\n - status (display name: \"Status\", is required, type: enum)\n - permalink (display name: \"URL\", is required, type: text)\n - responsible_ids (display name: \"Responsible IDs\", is required, type: reference)\n - Field responsible_ids refers to the record type \"#record:users\".\n - Type of field responsible_ids is an array with max_length 1, which should be used as array value.", + "folder_name": "conformance_tests/tasks_record_type_metadata_tests" + }, + "2.2.1.3": { + "functional_requirement": "- The External Domain Metadata JSON object should include the record type 'comments', while preserving any existing record types.\n - The record type 'comments' (Name: Comments) should have the following fields:\n - text (display name: \"Text\", is required, type: rich text)\n - author_id (display name: \"Author ID\", is required, type: reference)\n - Field author_id refers to the record type \"#record:users\".\n - task_id (display name: \"Task ID\", is required, type: reference)\n - Field task_id refers to the record type \"#record:tasks\".", + "folder_name": "conformance_tests/comments_record_type_metadata_tests" + }, + "2.2.2.1": { + "functional_requirement": "- Implement The Function that generates and returns The Initial Domain Mapping JSON object. The Initial Domain Mapping JSON object should have record_type_mappings \"users\".\n - The record_type_mappings \"users\" should have the following properties:\n - Default mapping should map each external user to a \"devu\" user object.\n - There should be a single \"possible_record_type_mappings\" element, specifying:\n - The mapping is one-way (reverse is false, forward is true).\n - There should be no custom fields in the mapping.\n - The following The Stock Field Mapping Fields should be mapped using The External Transformation Method:\n - field \"full_name\" should be mapped to \"full_name\".\n - field \"email\" should be mapped to \"email\".\n - field \"title\" should be mapped to \"display_name\".", + "folder_name": "conformance_tests/initial_domain_mapping_generation_tests" + }, + "2.2.2.2": { + "functional_requirement": "- The Initial Domain Mapping JSON object should have record_type_mappings \"tasks\", while preserving any existing mappings.\n - The record_type_mappings \"tasks\" should have the following properties:\n - Default mapping should map each external task to a \"issue\" object.\n - There should be a single \"possible_record_type_mappings\" element, specifying:\n - The mapping is one-way (reverse is false, forward is true)\n - There should be no custom fields in the mapping.\n - The following The Stock Field Mapping Fields should be mapped using The External Transformation Method:\n - field \"title\" should be mapped to \"title\"\n - field \"permalink\" should be mapped to \"item_url_field\"\n - field \"description\" should be mapped to \"body\" (rich text)\n - field \"responsible_ids\" should be mapped to \"owned_by_ids\" (use directly)\n - The following The Stock Field Mapping Fields should be mapped using The Fixed Transformation Method:\n - field \"priority\" should contain fixed value \"P2\"\n - The following The Stock Field Mapping Fields should be mapped using The DevRev Record Transformation Method:\n - field \"applies_to_part_id\" should refer to the \"product\" object type\n - The following The Stock Field Mapping Fields should be mapped using The Map Enum Transformation Method:\n - field \"status\" should be mapped to \"stage\" in the following way:\n - \"Active\" maps to \"in_development\"\n - \"Completed\" maps to \"completed\"\n - \"Deferred\" maps to \"backlog\"\n - \"Cancelled\" maps to \"wont_fix\"", + "folder_name": "conformance_tests/tasks_domain_mapping_generation_tests" + }, + "2.2.2.3": { + "functional_requirement": "- The Initial Domain Mapping JSON object should have record_type_mappings \"comments\", while preserving any existing mappings.\n - The record_type_mappings \"comments\" should have the following properties:\n - Default mapping should map each external comment to a \"comment\" object.\n - There should be a single \"possible_record_type_mappings\" element, specifying:\n - The mapping is one-way (reverse is false, forward is true).\n - There should be no custom fields in the mapping.\n - The following The Stock Field Mapping Fields should be mapped using The External Transformation Method:\n - field \"text\" should be mapped to \"body\" (rich text).\n - field \"author_id\" should be mapped to \"created_by_id\" (use directly).\n - field \"author_id\" should be mapped to \"modified_by_id\" (use directly).\n - field \"task_id\" should be mapped to \"parent_object_id\" (use_directly).", + "folder_name": "conformance_tests/comments_domain_mapping_generation_tests" + }, "2.3.1": { - "functional_requirement": "- Implement The Function that generates and returns The External Domain Metadata JSON object. The record types that should be included in the The External Domain Metadata are The List of Tasks of The Project ('tasks') and The List of Contacts of The Space ('users').", - "folder_name": "conformance_tests/external_domain_metadata_generation", + "functional_requirement": "- If \"event_type\" equals \"EXTRACTION_EXTERNAL_SYNC_UNITS_START\", The Extraction Function should implement the \"external sync units extraction\" part of the extraction workflow as described in the resource `[resource]external-sync-units-extraction.mdx`:\n - Retrieve The Fetched Folders\n - Push The Fetched Folders as external sync units using the following mapping from the fields in The Fetched Folders to the fields in The External Sync Units:\n - \"id\" should map to \"id\".\n - \"title\" should map to \"name\".\n - \"description\" should map to \"description\".\n - \"item_type\" should have a fixed value of \"tasks\".\n - No other fields should be used.", + "folder_name": "conformance_tests/folders_external_sync_units_push_tests", "acceptance_tests": [ - "- Validate generated The External Domain Metadata JSON object with the chef-cli tool (The Chef CLI) using the command \"chef-cli validate-metadata\":\n - This command reads JSON object from stdin.\n - The path to The Chef CLI executable is provided in the environment variables `CHEF_CLI_PATH`.\n - For the test to be successful The Chef CLI must return an empty output.\n - The test should always fail if The Chef CLI is not available." + "- Test The Extraction Function using the resource `[resource]external_sync_unit_check.json`.\n - Expect The Callback Server to receive *a single* event with \"event_type\" \"EXTRACTION_EXTERNAL_SYNC_UNITS_DONE\".\n - Let `external_sync_units_array = [\"event_data\"][\"external_sync_units\"]` (array of objects).\n - Expect `external_sync_units_array` to exist and be an array.\n - Expect `len(external_sync_units_array) = 3`.\n - Out of `external_sync_units_array`, expect that there exists and element that has \"name\" equal to \"First project\"." ] }, - "2.4.1": { - "functional_requirement": "- Implement The Function that generates and returns The Initial Domain Mapping JSON object.", - "folder_name": "conformance_tests/initial_domain_mapping_generation", + "2.3.2": { + "functional_requirement": "- If \"event_type\" equals \"EXTRACTION_METADATA_START\" The Extraction Function should implement the \"metadata extraction\" part of the extraction workflow as described in the resource `[resource]metadata-extraction.md`. Please note that The External Domain Metadata JSON should not be normalized when pushed to the repository.", + "folder_name": "conformance_tests/metadata_extraction_workflow_tests" + }, + "2.3.3": { + "functional_requirement": "- If \"event_type\" equals \"EXTRACTION_DATA_START\" or \"EXTRACTION_DATA_CONTINUE\" The Extraction Function should push the 'users' data. To push the 'users' data, it should:\n - If `TheExtractionStateObject[\"users\"][\"completed\"]=false`:\n - Fetch The Fetched Users\n - Push The Fetched Users to the repository designated for 'users' data.\n - Set `TheExtractionStateObject[\"users\"][\"completed\"]=true`", + "folder_name": "conformance_tests/users_data_push_tests", "acceptance_tests": [ - "- Validate generated The Initial Domain Mapping JSON object with the chef-cli tool (The Chef CLI) using the command \"chef-cli initial-mapping check -m \":\n - The Chef CLI reads The Initial Domain Mapping JSON object from stdin.\n - The path to The Chef CLI executable is provided in the environment variables `CHEF_CLI_PATH`.\n - The test should always fail if The Chef CLI returns a non-empty output.\n - The test should always fail if The Chef CLI is not available." + "- Test The Extraction Function using the resource `[resource]data_extraction_test.json`.\n - Expect The Callback Server to receive from DevRev a **single** event with \"event_type\" that equals \"EXTRACTION_DATA_DONE\".\n - Let `artifact_array = [\"event_data\"][\"artifacts\"]` (array of objects).\n - Expect `len(artifact_array) > 0`.\n - Out of `artifact_array`, expect one of the elements to have \"item_type\" equal to \"users\" (`:= users_artifact`) and `users_artifact[\"item_count\"]=4`.\n - Note: If `users_artifact[\"item_count\"] < 4`, this indicates that not all the users data was extracted.", + "- When the input event is \"EXTRACTION_DATA_START\", the Extraction Function must handle rate limiting using the following test flow:\n - Step 1: Make a POST request to \"http://localhost:8004/start_rate_limiting\" with body `{ \"test_name\": }`.\n - Step 2: Invoke The Extraction Function using the resource `[resource]data_extraction_test.json`.\n - Expect The Callback Server to receive *a single* event with \"event_type\" \"EXTRACTION_DATA_DELAY\".\n - Step 3: Make a POST request to \"http://localhost:8004/end_rate_limiting\".", + "- Validate The Normalization Function with the chef-cli tool (The Chef CLI) for the \"users\" record type:\n - Firstly, invoke The Extraction Function using the resource `[resource]data_extraction_test.json`.\n validate The Normalization Function with the chef-cli tool (The Chef CLI) using the command \"chef-cli validate-data -m -r users\":\n - To invoke the function that returns The External Domain Metadata JSON object, send the event `[resource]external_domain_metadata_event_payload.json` to The Snap-In server.\n - The External Domain Metadata JSON object should then be stored in temporary file and provided as path to the validation of The Normalization Function.\n - The Chef CLI reads the extracted file (The Extracted File) from stdin.\n - The Extracted File is inside folder provided in the environment variable \"EXTRACTED_FILES_FOLDER_PATH\". To retrieve the relevant file name, command `ls EXTRACTED_FILES_FOLDER_PATH | grep extractor_users | sort -r | head -n 1` should be executed. The test should always fail if The Extracted File is not present.\n - Please note that the expected behaviour is that only after the invocation of The Extraction Function, the folder provided in the environment variable \"EXTRACTED_FILES_FOLDER_PATH\" should exist. The test should always fail if before the execution of The Chef CLI, the folder \"EXTRACTED_FILES_FOLDER_PATH\" does not exist or if The Extracted File is not found.\n - The path to The Chef CLI executable is provided in the environment variable `CHEF_CLI_PATH`.\n - For the validation of The Normalization Function to be successful The Chef CLI must return an empty output. The implementation of The Extraction Function directly infuences the output of The Chef CLI.\n - The stdout and stderr of The Chef CLI *must* be printed to the console.\n - The test should always fail if The Chef CLI is not available." ] }, - "2.5.1": { - "functional_requirement": "- When spawning a new worker, pass the parameter object to the spawn function that includes the \"initialDomainMapping\" key with The Initial Domain Mapping JSON object as the value. Do not pass this under \"options\" key but directly in the top-most level of the parameter object.", - "folder_name": "conformance_tests/worker_domain_mapping_initialization" - }, - "2.5.2": { - "functional_requirement": "- When spawning a new worker, pass the parameter object to the spawn function that does not contain the \"options\" key.", - "folder_name": "conformance_tests/worker_spawn_parameter_validation" + "2.3.4": { + "functional_requirement": "- If \"event_type\" equals \"EXTRACTION_DATA_START\" or \"EXTRACTION_DATA_CONTINUE\" The Extraction Function should push the 'tasks' and 'users' data. To push the 'tasks' data, it should:\n - If `TheExtractionStateObject[\"tasks\"][\"completed\"]=false`:\n - Fetch The Fetched Tasks using pagination (The Tasks Iteration). For every page:\n - Push the Fetched Tasks to repository designated for 'tasks' data.\n - Update the \"nextPageToken\" parameter for \"tasks\".\n - Mark the \"tasks\" data as completed.\n - Wrike specific: When fetching The Fetched Tasks use the query param 'fields', where the value is an array of strings, which contains 'hasAttachments' (but it can also contain other values).", + "folder_name": "conformance_tests/tasks_data_push_tests", + "acceptance_tests": [ + "- Test The Extraction Function using the resource `[resource]data_extraction_test.json`.\n - Expect The Callback Server to receive from DevRev a **single** event with \"event_type\" that equals \"EXTRACTION_DATA_DONE\".\n - Let `artifact_array = [\"event_data\"][\"artifacts\"]` (array of objects).\n - Expect `len(artifact_array) > 0`.\n - Out of `artifact_array`, expect one of the elements to have \"item_type\" equal to \"tasks\" (`:= tasks_artifact`) and `tasks_artifact[\"item_count\"]=110`.\n - Note: If `tasks_artifact[\"item_count\"] < 110`, this indicates that not all the tasks data was extracted.", + "- Test The Extraction Function using the resource `[resource]data_extraction_continue_test.json`.\n - Expect The Callback Server to receive from DevRev a **single** event with \"event_type\" that equals \"EXTRACTION_DATA_DONE\".\n - Let `artifact_array = [\"event_data\"][\"artifacts\"]` (array of objects).\n - Expect `len(artifact_array) > 0`.\n - Out of `artifact_array`, expect one of the elements to have \"item_type\" equal to \"tasks\" (`:= tasks_artifact`) and `tasks_artifact[\"item_count\"]=110`.\n - Note: If `tasks_artifact[\"item_count\"] < 110`, this indicates that not all the tasks data was extracted.", + "- When the input event is \"EXTRACTION_DATA_START\", the Extraction Function must handle rate limiting using the following test flow:\n - Step 1: Make a POST request to \"http://localhost:8004/start_rate_limiting\" with body `{ \"test_name\": }`.\n - Step 2: Invoke The Extraction Function using the resource `[resource]data_extraction_test.json`.\n - Expect The Callback Server to receive *a single* event with \"event_type\" \"EXTRACTION_DATA_DELAY\".\n - Step 3: Make a POST request to \"http://localhost:8004/end_rate_limiting\".", + "- Validate The Normalization Function with the chef-cli tool (The Chef CLI) for the \"tasks\" record type:\n - Firstly, invoke The Extraction Function using the resource `[resource]data_extraction_test.json`.\n validate The Normalization Function with the chef-cli tool (The Chef CLI) using the command \"chef-cli validate-data -m -r tasks\":\n - To invoke the function that returns The External Domain Metadata JSON object, send the event `[resource]external_domain_metadata_event_payload.json` to The Snap-In server.\n - The External Domain Metadata JSON object should then be stored in temporary file and provided as path to the validation of The Normalization Function.\n - The Chef CLI reads the extracted file (The Extracted File) from stdin.\n - The Extracted File is inside folder provided in the environment variable \"EXTRACTED_FILES_FOLDER_PATH\". To retrieve the relevant file name, command `ls EXTRACTED_FILES_FOLDER_PATH | grep extractor_tasks | sort -r | head -n 1` should be executed. The test should always fail if The Extracted File is not present.\n - Please note that the expected behaviour is that only after the invocation of The Extraction Function, the folder provided in the environment variable \"EXTRACTED_FILES_FOLDER_PATH\" should exist. The test should always fail if before the execution of The Chef CLI, the folder \"EXTRACTED_FILES_FOLDER_PATH\" does not exist or if The Extracted File is not found.\n - The path to The Chef CLI executable is provided in the environment variable `CHEF_CLI_PATH`.\n - For the validation of The Normalization Function to be successful The Chef CLI must return an empty output. The implementation of The Extraction Function directly infuences the output of The Chef CLI.\n - The stdout and stderr of The Chef CLI *must* be printed to the console.\n - The test should always fail if The Chef CLI is not available." + ] }, - "2.5.3": { - "functional_requirement": "- Implement The Function named \"extraction\" (The Extraction Function) that pushes The Fetched Projects as external sync units if \"event_type\" equals \"EXTRACTION_EXTERNAL_SYNC_UNITS_START\".", - "folder_name": "conformance_tests/extraction_external_sync_unit_push", + "2.3.5": { + "functional_requirement": "- If \"event_type\" equals \"EXTRACTION_DATA_START\" or \"EXTRACTION_DATA_CONTINUE\" The Extraction Function should push the 'attachments', 'tasks' and 'users' data. To push the 'attachments' data, it should:\n - If `TheExtractionStateObject[\"tasks\"][\"completed\"]=false`:\n - Inside The Tasks Iteration. For every page:\n - Extract The Fetched Attachments from The Fetched Tasks.\n - Push The Fetched Attachments to the repository designated for 'attachments' data.\n - Mark the \"attachments\" data as completed.", + "folder_name": "conformance_tests/attachments_data_push_tests", "acceptance_tests": [ - "- Test The Extraction Function using the resource `[resource]external_sync_unit_check.json`. Make sure to replace placeholders in the resource with The Wrike API Key and The Space ID. For the test to be successful The Callback Server **must** receive from DevRev **exactly one** event with \"event_type\" that equals \"EXTRACTION_EXTERNAL_SYNC_UNITS_DONE\". The test must not send the event directly to The Callback Server." + "- Test The Extraction Function using the resource `[resource]data_extraction_test.json`.\n - Expect The Callback Server to receive from DevRev a **single** event with \"event_type\" that equals \"EXTRACTION_DATA_DONE\".\n - Let `artifact_array = [\"event_data\"][\"artifacts\"]` (array of objects).\n - Expect `len(artifact_array) > 0`.\n - Out of `artifact_array`, expect one of the elements to have \"item_type\" equal to \"attachments\" (`:= attachments_artifact`) and `attachments_artifact[\"item_count\"]=2`.\n - Note: If `attachments_artifact[\"item_count\"] < 2`, this indicates that not all the attachments data was extracted." ] }, - "2.5.4": { - "functional_requirement": "- If \"event_type\" equals \"EXTRACTION_EXTERNAL_SYNC_UNITS_START\" The Extraction Function should fetch the tasks count for each project and push it as part of the external sync units.", - "folder_name": "conformance_tests/project_tasks_count_fetch" + "2.3.6": { + "functional_requirement": "- If \"event_type\" equals \"EXTRACTION_DATA_START\" or \"EXTRACTION_DATA_CONTINUE\" The Extraction Function should push the 'users', 'tasks', 'attachments' and 'comments' data. To push the 'comments' data, it should:\n - If `TheExtractionStateObject[\"comments\"][\"completed\"]=false`:\n - Inside The Tasks Iteration. For every page:\n - Extract The Fetched Comments from The Fetched Tasks.\n - Push The Fetched Comments to the repository designated for 'comments' data.\n - Mark the \"comments\" data as completed.", + "folder_name": "conformance_tests/comments_data_push_tests", + "acceptance_tests": [ + "- Test The Extraction Function using the resource `[resource]data_extraction_test.json`.\n - Expect The Callback Server to receive from DevRev a **single** event with \"event_type\" that equals \"EXTRACTION_DATA_DONE\".\n - Let `artifact_array = [\"event_data\"][\"artifacts\"]` (array of objects).\n - Expect `len(artifact_array) > 0`.\n - Out of `artifact_array`, expect one of the elements to have \"item_type\" equal to \"comments\" (`:= comments_artifact`) and `comments_artifact[\"item_count\"]=2`.\n - Note: If `comments_artifact[\"item_count\"] < 2`, this indicates that not all the comments data was extracted.", + "- When the input event is \"EXTRACTION_DATA_START\", the Extraction Function must handle rate limiting using the following test flow:\n - Step 1: Make a POST request to \"http://localhost:8004/start_rate_limiting\" with body `{ \"test_name\": }`.\n - Step 2: Invoke The Extraction Function using the resource `[resource]data_extraction_test.json`.\n - Expect The Callback Server to receive *a single* event with \"event_type\" \"EXTRACTION_DATA_DELAY\".\n - Step 3: Make a POST request to \"http://localhost:8004/end_rate_limiting\"." + ] }, - "2.5.5": { - "functional_requirement": "- If \"event_type\" equals \"EXTRACTION_METADATA_START\" The Extraction Function should implement the \"metadata extraction\" part of the extraction workflow by pushing The External Domain Metadata JSON object to the repository called 'external_domain_metadata'. Please note that The External Domain Metadata JSON object shouldn't be normalized when pushed to the repository.", - "folder_name": "conformance_tests/external_domain_metadata_push" + "2.3.7": { + "functional_requirement": "- If \"event_type\" equals \"EXTRACTION_ATTACHMENTS_START\" or \"EXTRACTION_ATTACHMENTS_CONTINUE\" The Extraction Function should implement attachment streaming as described in the resource `[resource]attachments-extraction.md`.", + "folder_name": "conformance_tests/attachments_extraction_streaming_tests", + "acceptance_tests": [ + "- Test The Extraction Function with the following flow (The Attachment Test Flow):\n - Step 1: Invoke The Extraction Function using the resource `[resource]data_extraction_test.json`.\n - Expect The Callback Server to receive from DevRev a **single** event with \"event_type\" that equals \"EXTRACTION_DATA_DONE\".\n - Step 2: Invoke The Extraction Function using the resource `[resource]attachments_extraction_test.json`.\n - Expect The Callback Server to receive from DevRev a **single** event (`:= event_from_callback_server`) with \"event_type\" that equals \"EXTRACTION_ATTACHMENTS_DONE\".\n - Expect `event_from_callback_server[\"event_data\"][\"artifacts\"]` to be an array. Expect this array to not be empty. Expect this array to have length 1.\n - Let `artifact_object = event_from_callback_server[\"event_data\"][\"artifacts\"][0]`.\n - Expect `artifact_object[\"item_type\"] = \"ssor_attachment\"` and `artifact_object[\"item_count\"] = 2`.\n - Make a GET request to \"http://localhost:8003/is_uploaded/{artifact_object['id']}\". Expect response code to be equal to 200.\n - Note: The Attachment Test Flow should be executed within a single test.", + "- Test The Extraction Function using the resource `[resource]attachments_extraction_continue_test.json`.\n - Expect The Callback Server to receive from DevRev a **single** event with \"event_type\" that equals \"EXTRACTION_ATTACHMENTS_DONE\".", + "- When the input event is \"EXTRACTION_ATTACHMENTS_START\", the Extraction Function must handle rate limiting using the following test flow:\n - Step 1: Make a POST request to \"http://localhost:8004/start_rate_limiting\" with body `{ \"test_name\": }`.\n - Step 2: Invoke The Extraction Function using the resource `[resource]attachments_extraction_test.json`.\n - Expect The Callback Server to receive *a single* event with \"event_type\" \"EXTRACTION_ATTACHMENTS_DONE\".\n - Step 3: Make a POST request to \"http://localhost:8004/end_rate_limiting\"." + ] } } \ No newline at end of file diff --git a/conformance_tests/data_extraction_validation/.gitignore b/conformance_tests/data_extraction_validation/.gitignore deleted file mode 100644 index 7e3b558..0000000 --- a/conformance_tests/data_extraction_validation/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -node_modules/ -coverage/ -dist/ -.DS_Store -*.log \ No newline at end of file diff --git a/conformance_tests/data_extraction_validation/data-extraction-acceptance.test.ts b/conformance_tests/data_extraction_validation/data-extraction-acceptance.test.ts deleted file mode 100644 index 24525bc..0000000 --- a/conformance_tests/data_extraction_validation/data-extraction-acceptance.test.ts +++ /dev/null @@ -1,222 +0,0 @@ -import axios from 'axios'; -import http from 'http'; -import fs from 'fs'; -import path from 'path'; -import { AddressInfo } from 'net'; -import { EventType, ExtractorEventType } from '@devrev/ts-adaas'; - -// Constants -const SNAP_IN_SERVER_URL = 'http://localhost:8000/handle/sync'; -const CALLBACK_SERVER_PORT = 8002; -const TEST_DATA_FILE = 'data_extraction_check.json'; -const TEST_DATA_PATH = path.resolve(__dirname, '../resources/data_extraction_check.json'); - -// Interface for callback server requests -interface CallbackRequest { - method: string; - url: string; - headers: http.IncomingHttpHeaders; - body: any; -} - -describe('Data Extraction Acceptance Test', () => { - let callbackServer: http.Server; - let callbackServerUrl: string; - let callbackRequests: CallbackRequest[] = []; - let testEventData: any; - - // Setup callback server before tests - beforeAll((done) => { - console.log('Setting up callback server...'); - - // Create a simple HTTP server to act as the callback server - callbackServer = http.createServer((req, res) => { - console.log(`Callback server received request: ${req.method} ${req.url}`); - - let body = ''; - req.on('data', (chunk) => { - body += chunk.toString(); - }); - - req.on('end', () => { - console.log(`Callback request body: ${body}`); - - // Store the request for later verification - const parsedBody = body ? (() => { - try { - return JSON.parse(body); - } catch (e) { - console.error(`Error parsing callback request body: ${e}`); - return body; - } - })() : {}; - - callbackRequests.push({ - method: req.method || '', - url: req.url || '', - headers: req.headers, - body: parsedBody - }); - - // Send a success response - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ status: 'success' })); - }); - }); - - // Start the server and get the assigned port - callbackServer.listen(CALLBACK_SERVER_PORT, 'localhost', () => { - const address = callbackServer.address() as AddressInfo; - callbackServerUrl = `http://localhost:${CALLBACK_SERVER_PORT}`; - console.log(`Callback server started at ${callbackServerUrl}`); - - // Load the test event data - try { - // First try to load from the current directory - const currentDirPath = path.resolve(__dirname, TEST_DATA_FILE); - if (fs.existsSync(currentDirPath)) { - const fileContent = fs.readFileSync(currentDirPath, 'utf8'); - testEventData = JSON.parse(fileContent); - } else { - console.error(`Test data file not found at: ${currentDirPath}`); - testEventData = require('./data_extraction_check.json'); - } - - console.log('Test event data loaded successfully'); - done(); - } catch (error) { - console.error(`Error loading test data: ${error}`); - done(error as Error); - } - }); - }); - - // Clean up after tests - afterAll((done) => { - console.log('Cleaning up resources...'); - if (callbackServer && callbackServer.listening) { - callbackServer.close(() => { - console.log('Callback server closed'); - done(); - }); - } else { - done(); - } - }); - - // Reset callback requests before each test - beforeEach(() => { - callbackRequests = []; - }); - - test('should receive EXTRACTION_DATA_DONE event from DevRev', async () => { - // Skip if test data couldn't be loaded - if (!testEventData) { - console.error('Test data not available, skipping test'); - return; - } - - console.log('Starting acceptance test for data extraction...'); - - // Update the callback URL in the test event - if (testEventData.payload && testEventData.payload.event_context) { - testEventData.payload.event_context.callback_url = callbackServerUrl + '/callback'; - console.log(`Updated callback URL to: ${testEventData.payload.event_context.callback_url}`); - } else { - fail('Test event data is missing required fields: payload.event_context'); - } - - // Send the event to the snap-in server - console.log('Sending event to snap-in server...'); - try { - const response = await axios.post(SNAP_IN_SERVER_URL, testEventData, { - headers: { - 'Content-Type': 'application/json' - } - }); - - console.log(`Snap-in server response status: ${response.status}`); - console.log(`Snap-in server response data: ${JSON.stringify(response.data)}`); - - // Verify the response - expect(response.status).toBe(200); - - // Log the full response for debugging - console.log(`Full response data: ${JSON.stringify(response.data, null, 2)}`); - - // Check if we have a function_result or an error - if (response.data.function_result) { - expect(response.data.function_result).toBeDefined(); - expect(response.data.function_result.status).toBe('success'); - } - expect(response.data.error).toBeUndefined(); - - // Wait for the worker to complete and send events to the callback server - console.log('Waiting for callback events...'); - - // Wait up to 30 seconds for the EXTRACTION_DATA_DONE event - const maxWaitTime = 30000; // 30 seconds - const checkInterval = 1000; // 1 second - let elapsedTime = 0; - let doneEvent: CallbackRequest | undefined = undefined; - let receivedDoneEvent = false; - - while (elapsedTime < maxWaitTime) { - // Check if we've received the EXTRACTION_DATA_DONE event - const doneEvent = callbackRequests.find(req => - req.body && - req.body.event_type === ExtractorEventType.ExtractionDataDone - ); - - if (doneEvent) { - console.log('Received EXTRACTION_DATA_DONE event!'); - // Store the found event in the outer scope variable - receivedDoneEvent = true; - break; - } - - // Wait for the next check interval - await new Promise(resolve => setTimeout(resolve, checkInterval)); - elapsedTime += checkInterval; - - console.log(`Waiting for EXTRACTION_DATA_DONE event... (${elapsedTime / 1000}s elapsed)`); - } - - // Log all received callback requests for debugging - console.log(`Received ${callbackRequests.length} callback requests:`); - callbackRequests.forEach((req, index) => { - console.log(`Request ${index + 1}:`); - console.log(` Method: ${req.method}`); - console.log(` URL: ${req.url}`); - console.log(` Body: ${JSON.stringify(req.body)}`); - }); - - // Find the EXTRACTION_DATA_DONE event in the callback requests - // This is the critical fix - we need to assign to the outer doneEvent variable - doneEvent = callbackRequests.find(req => - req.body && - req.body.event_type === ExtractorEventType.ExtractionDataDone - ); - - // Log whether we found the event - console.log('Final check for EXTRACTION_DATA_DONE event:', doneEvent ? 'Found' : 'Not found'); - - // If we found the event, log its details for debugging - if (doneEvent) - console.log(`Found EXTRACTION_DATA_DONE event: ${JSON.stringify(doneEvent.body)}`); - - expect(doneEvent).toBeDefined(); - expect(doneEvent?.body.event_type).toBe(ExtractorEventType.ExtractionDataDone); - - } catch (error: any) { - console.error('Error during test execution:'); - if (error.response) { - console.error(`Response status: ${error.response.status}`); - console.error(`Response data: ${JSON.stringify(error.response.data)}`); - } else { - console.error(error); - } - throw error; - } - }, 60000); // 60 second timeout for this specific test -}); \ No newline at end of file diff --git a/conformance_tests/data_extraction_validation/data-extraction.test.ts b/conformance_tests/data_extraction_validation/data-extraction.test.ts deleted file mode 100644 index c6cec6a..0000000 --- a/conformance_tests/data_extraction_validation/data-extraction.test.ts +++ /dev/null @@ -1,204 +0,0 @@ -import axios from 'axios'; -import http from 'http'; -import { AddressInfo } from 'net'; -import { EventType } from '@devrev/ts-adaas'; - -// Constants -const SNAP_IN_SERVER_URL = 'http://localhost:8000/handle/sync'; -const CALLBACK_SERVER_PORT = 8002; - -// Test data -const MOCK_SERVICE_ACCOUNT_TOKEN = 'mock-service-account-token'; -const MOCK_SNAP_IN_VERSION_ID = 'mock-snap-in-version-id'; -const MOCK_DEVREV_ENDPOINT = 'http://localhost:8003'; - -// Interface for callback server requests -interface CallbackRequest { - method: string; - url: string; - headers: http.IncomingHttpHeaders; - body: any; -} - -describe('Data Extraction Conformance Tests', () => { - let callbackServer: http.Server; - let callbackServerUrl: string; - let callbackRequests: CallbackRequest[] = []; - - // Setup callback server before tests - beforeAll((done) => { - // Create a simple HTTP server to act as the callback server - callbackServer = http.createServer((req, res) => { - let body = ''; - req.on('data', (chunk) => { - body += chunk.toString(); - }); - - req.on('end', () => { - // Store the request for later verification - callbackRequests.push({ - method: req.method || '', - url: req.url || '', - headers: req.headers, - body: body ? (() => { - try { - return JSON.parse(body); - } catch (e) { - return body; - } - })() : {} - }); - - // Send a success response - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ status: 'success' })); - }); - }); - - // Start the server and get the assigned port - callbackServer.listen(CALLBACK_SERVER_PORT, 'localhost', () => { - const address = callbackServer.address() as AddressInfo; - callbackServerUrl = `http://localhost:${CALLBACK_SERVER_PORT}`; - console.log(`Callback server started at ${callbackServerUrl}`); - done(); - }); - }); - - // Clean up after tests - afterAll((done) => { - if (callbackServer && callbackServer.listening) { - callbackServer.close(() => { - done(); - }); - } else { - done(); - } - }); - - // Reset callback requests before each test - beforeEach(() => { - callbackRequests = []; - }); - - // Helper function to create a valid event - const createValidEvent = (eventType: EventType): any => { - return { - context: { - secrets: { - service_account_token: MOCK_SERVICE_ACCOUNT_TOKEN - }, - snap_in_version_id: MOCK_SNAP_IN_VERSION_ID, - snap_in_id: 'mock-snap-in-id' - }, - payload: { - event_type: eventType, - event_context: { - callback_url: callbackServerUrl, - dev_org: 'mock-dev-org', - dev_org_id: 'mock-dev-org-id', - dev_user: 'mock-dev-user', - dev_user_id: 'mock-dev-user-id', - external_sync_unit: 'mock-external-sync-unit', - external_sync_unit_id: 'mock-external-sync-unit-id', - external_sync_unit_name: 'mock-external-sync-unit-name', - external_system: 'mock-external-system', - external_system_type: 'mock-external-system-type', - import_slug: 'mock-import-slug', - mode: 'INITIAL', - request_id: 'mock-request-id', - snap_in_slug: 'mock-snap-in-slug', - snap_in_version_id: MOCK_SNAP_IN_VERSION_ID, - sync_run: 'mock-sync-run', - sync_run_id: 'mock-sync-run-id', - sync_tier: 'mock-sync-tier', - sync_unit: 'mock-sync-unit', - sync_unit_id: 'mock-sync-unit-id', - uuid: 'mock-uuid', - worker_data_url: 'http://localhost:8003/external-worker' - } - }, - execution_metadata: { - devrev_endpoint: MOCK_DEVREV_ENDPOINT, - function_name: 'data_extraction_check' - }, - input_data: {} - }; - }; - - // Test 1: Basic Invocation - test('should successfully invoke data_extraction_check function', async () => { - const event = createValidEvent(EventType.ExtractionDataStart); - - const response = await axios.post(SNAP_IN_SERVER_URL, event); - - expect(response.status).toBe(200); - expect(response.data).toBeDefined(); - expect(response.data.function_result).toBeDefined(); - expect(response.data.function_result.status).toBe('success'); - expect(response.data.error).toBeUndefined(); - }); - - // Test 2: Event Validation - test('should validate input events', async () => { - // Create an invalid event (missing required fields) - // This event is missing the required service_account_token - const invalidEvent = createValidEvent(EventType.ExtractionDataStart); - // Remove the service_account_token to make it invalid - if (invalidEvent.context && invalidEvent.context.secrets) { - delete invalidEvent.context.secrets.service_account_token; - } - - // Expect the request to throw an error - expect.assertions(1); // We expect one assertion to be made - - try { - await axios.post(SNAP_IN_SERVER_URL, invalidEvent); - fail('Expected request to fail with validation error'); - } catch (error: any) { - // Just verify that an error was thrown - // The exact format of the error may vary depending on how the server handles it - expect(error).toBeDefined(); - } - }); - - // Test 3: Event Type Recognition - test('should correctly identify data extraction events', async () => { - // Test with a non-data extraction event - const nonDataExtractionEvent = createValidEvent(EventType.ExtractionMetadataStart); - - const response = await axios.post(SNAP_IN_SERVER_URL, nonDataExtractionEvent); - - expect(response.status).toBe(200); - expect(response.data.function_result).toBeDefined(); - expect(response.data.function_result.valid_data_extraction_events).toBe(false); - - // Test with a data extraction event - const dataExtractionEvent = createValidEvent(EventType.ExtractionDataStart); - - const dataResponse = await axios.post(SNAP_IN_SERVER_URL, dataExtractionEvent); - - expect(dataResponse.status).toBe(200); - expect(dataResponse.data.function_result).toBeDefined(); - expect(dataResponse.data.function_result.valid_data_extraction_events).toBe(true); - }); - - // Test 4: Complete Workflow - test('should complete the data extraction workflow successfully', async () => { - const event = createValidEvent(EventType.ExtractionDataStart); - - const response = await axios.post(SNAP_IN_SERVER_URL, event); - - expect(response.status).toBe(200); - expect(response.data.function_result).toBeDefined(); - expect(response.data.function_result.status).toBe('success'); - expect(response.data.function_result.valid_data_extraction_events).toBe(true); - - // Wait for the worker to complete (this may take a moment) - await new Promise(resolve => setTimeout(resolve, 10000)); - - // Verify that the worker completed successfully by checking if any callback requests were made - // In a real implementation, we would check for specific events like EXTRACTION_DATA_DONE - // but for this test, we're just verifying the function was called correctly - expect(callbackRequests.length).toBeGreaterThan(0); - }); -}); \ No newline at end of file diff --git a/conformance_tests/data_extraction_validation/data_extraction_check.json b/conformance_tests/data_extraction_validation/data_extraction_check.json deleted file mode 100644 index 2dce148..0000000 --- a/conformance_tests/data_extraction_validation/data_extraction_check.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "execution_metadata": { - "function_name": "data_extraction_check", - "devrev_endpoint": "http://localhost:8003" - }, - "payload" : { - "event_type": "EXTRACTION_DATA_START", - "event_context": { - "callback_url": "http://localhost:8002/callback", - "dev_org": "test-dev-org", - "dev_org_id": "test-dev-org-id", - "external_sync_unit_id": "test-external-sync-unit", - "sync_unit_id": "test-sync-unit", - "worker_data_url": "http://localhost:8003/external-worker" - }, - "connection_data": { - "org_id": "test-org-id", - "key": "key=test-key&token=test-token" - } - }, - "context": { - "snap_in_version_id": "test-snap-in-version-id", - "secrets": { - "service_account_token": "test-token" - } - } -} \ No newline at end of file diff --git a/conformance_tests/data_extraction_validation/jest.setup.ts b/conformance_tests/data_extraction_validation/jest.setup.ts deleted file mode 100644 index cf849b9..0000000 --- a/conformance_tests/data_extraction_validation/jest.setup.ts +++ /dev/null @@ -1,2 +0,0 @@ -// Increase the timeout for all tests -jest.setTimeout(120000); // 120 seconds \ No newline at end of file diff --git a/conformance_tests/data_extraction_validation/package.json b/conformance_tests/data_extraction_validation/package.json deleted file mode 100644 index ccd4bf2..0000000 --- a/conformance_tests/data_extraction_validation/package.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "name": "airdrop-snap-in-conformance-tests", - "version": "1.0.0", - "description": "Conformance tests for Airdrop Snap-in", - "scripts": { - "test": "jest --config jest.config.js --runInBand" - }, - "devDependencies": { - "@devrev/ts-adaas": "1.5.1", - "@devrev/typescript-sdk": "1.1.63", - "@types/jest": "^29.5.0", - "@types/node": "^18.15.11", - "axios": "^1.9.0", - "jest": "^29.5.0", - "ts-jest": "^29.1.0", - "typescript": "^4.9.5" - }, - "engines": { - "node": ">=14.0.0" - }, - "private": true, - "dependencies": { - "http": "^0.0.1-security" - } -} \ No newline at end of file diff --git a/conformance_tests/data_extraction_workflow_tests/data-extraction-check-acceptance.test.ts b/conformance_tests/data_extraction_workflow_tests/data-extraction-check-acceptance.test.ts new file mode 100644 index 0000000..13fefc2 --- /dev/null +++ b/conformance_tests/data_extraction_workflow_tests/data-extraction-check-acceptance.test.ts @@ -0,0 +1,136 @@ +import { + createCallbackServer, + createDevRevServer, + sendEventToSnapIn, + closeServer, +} from './test-utils/test-helpers'; +import { Server } from 'http'; +import * as fs from 'fs'; +import * as path from 'path'; + +describe('Data Extraction Check Function - Acceptance Test', () => { + let callbackServer: Server; + let devrevServer: Server; + let callbackHelpers: any; + let devrevHelpers: any; + + beforeEach(async () => { + jest.setTimeout(60000); + console.log('[Acceptance Test] Setting up test servers...'); + + // Setup The Callback Server + callbackHelpers = await createCallbackServer(); + callbackServer = callbackHelpers.server; + + // Setup The DevRev Server + devrevHelpers = await createDevRevServer(); + devrevServer = devrevHelpers.server; + + // Give servers time to fully initialize + await new Promise(resolve => setTimeout(resolve, 100)); + console.log('[Acceptance Test] Test servers ready'); + }); + + afterEach(async () => { + console.log('[Acceptance Test] Cleaning up test servers...'); + // Cleanup servers + await closeServer(callbackServer); + await closeServer(devrevServer); + }); + + test('should process event from data_extraction_check.json and emit EXTRACTION_DATA_DONE', async () => { + console.log('[Acceptance Test] Starting acceptance test for data_extraction_check.json'); + + // Arrange - Load event from JSON file + const jsonFilePath = path.join(__dirname, 'test-data', 'data_extraction_check.json'); + console.log('[Acceptance Test] Loading event from:', jsonFilePath); + + let event: any; + try { + const fileContent = fs.readFileSync(jsonFilePath, 'utf-8'); + event = JSON.parse(fileContent); + console.log('[Acceptance Test] Successfully loaded event from JSON file'); + console.log('[Acceptance Test] Event type:', event.payload.event_type); + console.log('[Acceptance Test] Function name:', event.execution_metadata.function_name); + console.log('[Acceptance Test] Request ID:', event.execution_metadata.request_id); + } catch (error) { + throw new Error( + `Failed to load or parse test event from ${jsonFilePath}: ${error instanceof Error ? error.message : String(error)}` + ); + } + + // Validate event structure + if (!event.payload || !event.payload.event_type) { + throw new Error( + `Invalid event structure in ${jsonFilePath}: missing payload.event_type. ` + + `Event structure: ${JSON.stringify(event, null, 2)}` + ); + } + + if (event.payload.event_type !== 'EXTRACTION_DATA_START') { + throw new Error( + `Unexpected event type in ${jsonFilePath}: expected EXTRACTION_DATA_START, ` + + `got ${event.payload.event_type}` + ); + } + + const requestId = event.execution_metadata.request_id; + console.log('[Acceptance Test] Processing event with request_id:', requestId); + + // Act - Send event to The Test Snap-In Server + console.log('[Acceptance Test] Sending event to snap-in server at http://localhost:8000/handle/sync'); + let response: any; + try { + response = await sendEventToSnapIn(event); + console.log('[Acceptance Test] Received response from snap-in server'); + + if (response.error) { + console.error('[Acceptance Test] Snap-in returned error:', JSON.stringify(response.error, null, 2)); + } + } catch (error) { + throw new Error( + `Failed to send event to snap-in server: ${error instanceof Error ? error.message : String(error)}. ` + + `Event: ${JSON.stringify(event, null, 2)}` + ); + } + + // Assert - Function responds successfully without errors + expect(response).toBeDefined(); + if (response.error) { + throw new Error( + `Snap-in function returned an error: ${JSON.stringify(response.error, null, 2)}. ` + + `This indicates the function failed to process the event correctly.` + ); + } + expect(response.error).toBeUndefined(); + + // Assert - EXTRACTION_DATA_DONE event is received by The Callback Server + console.log('[Acceptance Test] Waiting for EXTRACTION_DATA_DONE event at callback server...'); + let doneEvent: any; + try { + doneEvent = await callbackHelpers.waitForEvent('EXTRACTION_DATA_DONE', 30000); + console.log('[Acceptance Test] Successfully received EXTRACTION_DATA_DONE event'); + } catch (error) { + const receivedEvents = callbackHelpers.receivedEvents; + const receivedEventTypes = receivedEvents.map((e: any) => e.event_type).join(', '); + throw new Error( + `Failed to receive EXTRACTION_DATA_DONE event from callback server within 30 seconds. ` + + `Request ID: ${requestId}. ` + + `Received events (${receivedEvents.length}): [${receivedEventTypes}]. ` + + `Original error: ${error instanceof Error ? error.message : String(error)}. ` + + `This indicates that the function did not emit the expected event to the callback URL.` + ); + } + + // Validate the received event + expect(doneEvent).toBeDefined(); + expect(doneEvent.event_type).toBe('EXTRACTION_DATA_DONE'); + + console.log('[Acceptance Test] Acceptance test completed successfully'); + console.log('[Acceptance Test] Event details:', { + event_type: doneEvent.event_type, + has_event_data: !!doneEvent.event_data, + has_event_context: !!doneEvent.event_context, + }); + }, 60000); +}); \ No newline at end of file diff --git a/conformance_tests/data_extraction_workflow_tests/data-extraction-check.test.ts b/conformance_tests/data_extraction_workflow_tests/data-extraction-check.test.ts new file mode 100644 index 0000000..b66cfb5 --- /dev/null +++ b/conformance_tests/data_extraction_workflow_tests/data-extraction-check.test.ts @@ -0,0 +1,120 @@ +import { + createCallbackServer, + createDevRevServer, + createDataExtractionEvent, + sendEventToSnapIn, + closeServer, +} from './test-utils/test-helpers'; +import { Server } from 'http'; + +describe('Data Extraction Check Function', () => { + let callbackServer: Server; + let devrevServer: Server; + let callbackHelpers: any; + let devrevHelpers: any; + + beforeEach(async () => { + jest.setTimeout(60000); + console.log('[Test] Setting up test servers...'); + + // Setup The Callback Server + callbackHelpers = await createCallbackServer(); + callbackServer = callbackHelpers.server; + + // Setup The DevRev Server + devrevHelpers = await createDevRevServer(); + devrevServer = devrevHelpers.server; + + // Give servers time to fully initialize + await new Promise(resolve => setTimeout(resolve, 100)); + console.log('[Test] Test servers ready'); + }); + + afterEach(async () => { + console.log('[Test] Cleaning up test servers...'); + // Cleanup servers + await closeServer(callbackServer); + await closeServer(devrevServer); + }); + + test('should handle EXTRACTION_DATA_START event and emit EXTRACTION_DATA_DONE', async () => { + console.log('[Test] Starting test: EXTRACTION_DATA_START'); + + // Arrange + const event = createDataExtractionEvent('EXTRACTION_DATA_START'); + const requestId = event.execution_metadata.request_id; + + // Act + const response = await sendEventToSnapIn(event); + + // Assert - Function responds successfully + expect(response).toBeDefined(); + expect(response.error).toBeUndefined(); + + // Assert - EXTRACTION_DATA_DONE event is emitted + const doneEvent = await callbackHelpers.waitForEvent('EXTRACTION_DATA_DONE', 30000); + expect(doneEvent).toBeDefined(); + expect(doneEvent.event_type).toBe('EXTRACTION_DATA_DONE'); + + console.log('[Test] Test completed: EXTRACTION_DATA_START'); + }, 60000); + + test('should handle EXTRACTION_DATA_CONTINUE event and emit EXTRACTION_DATA_DONE', async () => { + console.log('[Test] Starting test: EXTRACTION_DATA_CONTINUE'); + + // Arrange + const event = createDataExtractionEvent('EXTRACTION_DATA_CONTINUE'); + const requestId = event.execution_metadata.request_id; + + // Act + const response = await sendEventToSnapIn(event); + + // Assert - Function responds successfully + expect(response).toBeDefined(); + expect(response.error).toBeUndefined(); + + // Assert - EXTRACTION_DATA_DONE event is emitted + const doneEvent = await callbackHelpers.waitForEvent('EXTRACTION_DATA_DONE', 30000); + expect(doneEvent).toBeDefined(); + expect(doneEvent.event_type).toBe('EXTRACTION_DATA_DONE'); + + console.log('[Test] Test completed: EXTRACTION_DATA_CONTINUE'); + }, 60000); + + test('should create artifacts with normalized user data', async () => { + console.log('[Test] Starting test: create artifacts with normalized user data'); + + // Arrange + const event = createDataExtractionEvent('EXTRACTION_DATA_START'); + + // Act + console.log('[Test] Sending event to snap-in...'); + const response = await sendEventToSnapIn(event); + + // Assert - Function responds successfully + expect(response).toBeDefined(); + expect(response.error).toBeUndefined(); + + // Assert - EXTRACTION_DATA_DONE event is emitted with artifacts + console.log('[Test] Waiting for EXTRACTION_DATA_DONE event...'); + const doneEvent = await callbackHelpers.waitForEvent('EXTRACTION_DATA_DONE', 30000); + expect(doneEvent).toBeDefined(); + expect(doneEvent.event_type).toBe('EXTRACTION_DATA_DONE'); + + // Assert - Event contains artifacts + expect(doneEvent.event_data).toBeDefined(); + expect(doneEvent.event_data.artifacts).toBeDefined(); + expect(Array.isArray(doneEvent.event_data.artifacts)).toBe(true); + expect(doneEvent.event_data.artifacts.length).toBeGreaterThan(0); + + // Assert - Artifact structure is correct + const artifact = doneEvent.event_data.artifacts[0]; + expect(artifact).toHaveProperty('id'); + expect(artifact).toHaveProperty('item_type'); + expect(artifact).toHaveProperty('item_count'); + expect(artifact.item_type).toBe('users'); + expect(artifact.item_count).toBe(2); + + console.log('[Test] Test completed: create artifacts with normalized user data'); + }, 60000); +}); \ No newline at end of file diff --git a/conformance_tests/data_extraction_workflow_tests/jest.config.js b/conformance_tests/data_extraction_workflow_tests/jest.config.js new file mode 100644 index 0000000..2f40230 --- /dev/null +++ b/conformance_tests/data_extraction_workflow_tests/jest.config.js @@ -0,0 +1,23 @@ +module.exports = { + preset: 'ts-jest', + testEnvironment: 'node', + testMatch: ['**/*.test.ts'], + testTimeout: 120000, + moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'], + transform: { + '^.+\\.tsx?$': ['ts-jest', { + tsconfig: { + target: 'es2017', + module: 'commonjs', + esModuleInterop: true, + strict: true, + declaration: true, + resolveJsonModule: true, + } + }] + }, + collectCoverageFrom: [ + 'src/**/*.ts', + '!src/**/*.d.ts', + ], +}; \ No newline at end of file diff --git a/conformance_tests/project_tasks_count_fetch/package.json b/conformance_tests/data_extraction_workflow_tests/package.json similarity index 59% rename from conformance_tests/project_tasks_count_fetch/package.json rename to conformance_tests/data_extraction_workflow_tests/package.json index 461cc76..0c02b4b 100644 --- a/conformance_tests/project_tasks_count_fetch/package.json +++ b/conformance_tests/data_extraction_workflow_tests/package.json @@ -1,17 +1,10 @@ { - "name": "wrike-snap-in-conformance-tests", + "name": "data-extraction-check-conformance-tests", "version": "1.0.0", - "description": "Conformance tests for Wrike snap-in", - "main": "index.js", + "description": "Conformance tests for data extraction check function", "scripts": { "test": "jest" }, - "dependencies": { - "@devrev/ts-adaas": "1.5.1", - "axios": "^1.9.0", - "express": "^4.21.0", - "body-parser": "^1.20.3" - }, "devDependencies": { "@types/express": "^4.17.21", "@types/jest": "^29.4.0", @@ -19,5 +12,10 @@ "jest": "^29.4.2", "ts-jest": "^29.0.5", "typescript": "^4.9.5" + }, + "dependencies": { + "axios": "^1.9.0", + "body-parser": "^1.20.3", + "express": "^4.21.0" } } \ No newline at end of file diff --git a/conformance_tests/data_extraction_workflow_tests/test-data/data_extraction_check.json b/conformance_tests/data_extraction_workflow_tests/test-data/data_extraction_check.json new file mode 100644 index 0000000..d05a2d7 --- /dev/null +++ b/conformance_tests/data_extraction_workflow_tests/test-data/data_extraction_check.json @@ -0,0 +1,56 @@ +{ + "execution_metadata": { + "function_name": "data_extraction_check", + "devrev_endpoint": "http://localhost:8003", + "request_id": "test-request-acceptance", + "event_type": "EXTRACTION_DATA_START" + }, + "payload" : { + "event_type": "EXTRACTION_DATA_START", + "event_context": { + "callback_url": "http://localhost:8002/callback", + "dev_org": "test-dev-org", + "dev_org_id": "test-dev-org", + "dev_user": "test-user", + "dev_user_id": "test-user", + "external_sync_unit": "test-external-sync-unit", + "external_sync_unit_id": "test-external-sync-unit", + "external_sync_unit_name": "Test External Sync Unit", + "external_system": "test-system", + "external_system_type": "test-type", + "import_slug": "test-import", + "mode": "INITIAL", + "request_id": "test-request-acceptance", + "snap_in_slug": "test-snap-in", + "snap_in_version_id": "test-version", + "sync_run": "test-run", + "sync_run_id": "test-run", + "sync_tier": "test-tier", + "sync_unit": "test-sync-unit", + "sync_unit_id": "test-sync-unit", + "uuid": "test-uuid", + "worker_data_url": "http://localhost:8003/external-worker" + }, + "connection_data": { + "org_id": "test-org-id", + "org_name": "Test Organization", + "key": "key=test-key&token=test-token", + "key_type": "api_key" + }, + "event_data": {} + }, + "context": { + "dev_oid": "test-dev-org", + "source_id": "test-source", + "snap_in_id": "test-snap-in", + "snap_in_version_id": "test-version", + "service_account_id": "test-service-account", + "secrets": { + "service_account_token": "test-token" + } + }, + "input_data": { + "global_values": {}, + "event_sources": {} + } +} \ No newline at end of file diff --git a/conformance_tests/data_extraction_workflow_tests/test-utils/test-helpers.ts b/conformance_tests/data_extraction_workflow_tests/test-utils/test-helpers.ts new file mode 100644 index 0000000..91c9a58 --- /dev/null +++ b/conformance_tests/data_extraction_workflow_tests/test-utils/test-helpers.ts @@ -0,0 +1,213 @@ +import express, { Express } from 'express'; +import bodyParser from 'body-parser'; +import { Server } from 'http'; + +export interface CallbackEvent { + event_type: string; + event_data?: any; + event_context?: any; +} + +export interface WorkerDataUpload { + data: any; + timestamp: string; + method: string; + path: string; + headers: any; +} + +/** + * Creates and starts The Callback Server on port 8002 + */ +export function createCallbackServer(): Promise<{ + server: Server; + app: Express; + receivedEvents: CallbackEvent[]; + waitForEvent: (eventType: string, timeoutMs?: number) => Promise; +}> { + return new Promise((resolve) => { + const app = express(); + app.use(bodyParser.json()); + + const receivedEvents: CallbackEvent[] = []; + + app.post('/callback', (req, res) => { + const event = req.body; + console.log('[Callback Server] Received event:', event.event_type); + receivedEvents.push(event); + res.status(200).send({ status: 'received' }); + }); + + const server = app.listen(8002, () => { + console.log('[Callback Server] Started on port 8002'); + const waitForEvent = (eventType: string, timeoutMs = 30000): Promise => { + return new Promise((resolve, reject) => { + const startTime = Date.now(); + const checkInterval = setInterval(() => { + const event = receivedEvents.find(e => e.event_type === eventType); + if (event) { + clearInterval(checkInterval); + resolve(event); + } else if (Date.now() - startTime > timeoutMs) { + clearInterval(checkInterval); + reject(new Error( + `Timeout waiting for event type: ${eventType}. ` + + `Received events: ${receivedEvents.map(e => e.event_type).join(', ')}` + )); + } + }, 100); + }); + }; + + resolve({ server, app, receivedEvents, waitForEvent }); + }); + }); +} + +/** + * Creates and starts The DevRev Server mock on port 8003 + */ +export function createDevRevServer(): Promise<{ + server: Server; + app: Express; +}> { + return new Promise((resolve) => { + const app = express(); + + // Parse different content types + app.use(bodyParser.json({ limit: '50mb' })); + app.use(bodyParser.raw({ type: 'application/octet-stream', limit: '50mb' })); + app.use(bodyParser.text({ type: 'application/x-ndjson', limit: '50mb' })); + app.use(bodyParser.text({ type: 'text/plain', limit: '50mb' })); + app.use(bodyParser.urlencoded({ extended: true, limit: '50mb' })); + + // Log all incoming requests + app.use((req, res, next) => { + console.log(`[DevRev Server] ${req.method} ${req.path}`); + console.log(`[DevRev Server] Headers:`, JSON.stringify(req.headers, null, 2)); + if (req.body) { + const bodyPreview = typeof req.body === 'string' + ? req.body.substring(0, 200) + : JSON.stringify(req.body).substring(0, 200); + console.log(`[DevRev Server] Body preview:`, bodyPreview); + } + next(); + }); + + // Handle state retrieval (GET requests) + app.get('*', (req, res) => { + console.log(`[DevRev Server] GET request to ${req.path} - returning empty state`); + res.status(200).json({ state: {} }); + }); + + // Handle data uploads (POST requests) + app.post('*', (req, res) => { + console.log(`[DevRev Server] POST request to ${req.path} - capturing upload`); + res.status(200).send({ status: 'uploaded' }); + }); + + // Handle data uploads (PUT requests) + app.put('*', (req, res) => { + console.log(`[DevRev Server] PUT request to ${req.path} - capturing upload`); + res.status(200).send({ status: 'uploaded' }); + }); + + // Handle PATCH requests + app.patch('*', (req, res) => { + console.log(`[DevRev Server] PATCH request to ${req.path} - capturing upload`); + res.status(200).send({ status: 'uploaded' }); + }); + + const server = app.listen(8003, () => { + console.log('[DevRev Server] Started on port 8003'); + resolve({ server, app }); + }); + }); +} + +/** + * Creates a test event for data extraction + */ +export function createDataExtractionEvent(eventType: 'EXTRACTION_DATA_START' | 'EXTRACTION_DATA_CONTINUE'): any { + return { + execution_metadata: { + request_id: `test-request-${Date.now()}`, + function_name: 'data_extraction_check', + event_type: eventType, + devrev_endpoint: 'http://localhost:8003', + }, + context: { + dev_oid: 'test-dev-org', + source_id: 'test-source', + snap_in_id: 'test-snap-in', + snap_in_version_id: 'test-version', + service_account_id: 'test-service-account', + secrets: { + service_account_token: 'test-token', + }, + }, + input_data: { + global_values: {}, + event_sources: {}, + }, + payload: { + connection_data: { + org_id: 'test-org', + org_name: 'Test Organization', + key: 'test-key', + key_type: 'api_key', + }, + event_context: { + callback_url: 'http://localhost:8002/callback', + dev_org: 'test-dev-org', + dev_org_id: 'test-dev-org', + dev_user: 'test-user', + dev_user_id: 'test-user', + external_sync_unit: 'test-unit', + external_sync_unit_id: 'test-unit', + external_sync_unit_name: 'Test Unit', + external_system: 'test-system', + external_system_type: 'test-type', + import_slug: 'test-import', + mode: 'INITIAL', + request_id: `test-request-${Date.now()}`, + snap_in_slug: 'test-snap-in', + snap_in_version_id: 'test-version', + sync_run: 'test-run', + sync_run_id: 'test-run', + sync_tier: 'test-tier', + sync_unit: 'test-unit', + sync_unit_id: 'test-unit', + uuid: 'test-uuid', + worker_data_url: 'http://localhost:8003/external-worker', + }, + event_type: eventType, + event_data: {}, + }, + }; +} + +/** + * Sends an event to The Test Snap-In Server + */ +export async function sendEventToSnapIn(event: any): Promise { + const axios = require('axios'); + console.log('[Test] Sending event to snap-in server:', event.execution_metadata.function_name, event.payload.event_type); + const response = await axios.post('http://localhost:8000/handle/sync', event, { + headers: { 'Content-Type': 'application/json' }, + }); + console.log('[Test] Received response from snap-in server'); + return response.data; +} + +/** + * Closes a server gracefully + */ +export function closeServer(server: Server): Promise { + return new Promise((resolve) => { + server.close(() => { + console.log('[Test] Server closed'); + resolve(); + }); + }); +} \ No newline at end of file diff --git a/conformance_tests/data_extraction_validation/tsconfig.json b/conformance_tests/data_extraction_workflow_tests/tsconfig.json similarity index 57% rename from conformance_tests/data_extraction_validation/tsconfig.json rename to conformance_tests/data_extraction_workflow_tests/tsconfig.json index 5e1049f..789a9e6 100644 --- a/conformance_tests/data_extraction_validation/tsconfig.json +++ b/conformance_tests/data_extraction_workflow_tests/tsconfig.json @@ -2,13 +2,21 @@ "compilerOptions": { "target": "es2017", "module": "commonjs", + "lib": ["es2017"], "esModuleInterop": true, - "forceConsistentCasingInFileNames": true, + "allowSyntheticDefaultImports": true, "strict": true, - "skipLibCheck": true, + "declaration": true, "resolveJsonModule": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "moduleResolution": "node", "types": ["jest", "node"] }, - "include": ["*.ts"], - "exclude": ["node_modules"] + "include": [ + "**/*.ts" + ], + "exclude": [ + "node_modules" + ] } \ No newline at end of file diff --git a/conformance_tests/external_domain_metadata_generation/generate_metadata.test.ts b/conformance_tests/external_domain_metadata_generation/generate_metadata.test.ts deleted file mode 100644 index 7a5c080..0000000 --- a/conformance_tests/external_domain_metadata_generation/generate_metadata.test.ts +++ /dev/null @@ -1,269 +0,0 @@ -import axios from 'axios'; -import express from 'express'; -import { Server } from 'http'; -import * as bodyParser from 'body-parser'; - -// Define interfaces for the function result structure -interface FunctionResult { - status: string; - message: string; - metadata: ExternalDomainMetadata; - error?: string; -} - -// Define interfaces for the response structure -interface MetadataResponse { - function_result: FunctionResult; - metadata: ExternalDomainMetadata; - error?: string; -} - -interface ExternalDomainMetadata { - schema_version: string; - record_types: { - [key: string]: RecordType; - }; -} - -interface RecordType { - name: string; - description?: string; - fields: { - [key: string]: Field; - }; - stage_diagram?: StageDiagram; -} - -interface Field { - name: string; - type: string; - is_required: boolean; - is_identifier?: boolean; - is_indexed?: boolean; - enum?: { - values: EnumValue[]; - }; - reference?: { - refers_to: { - [key: string]: any; - }; - }; - collection?: { - min_length?: number; - }; -} - -interface EnumValue { - key: string; - name: string; -} - -interface StageDiagram { - controlling_field: string; - starting_stage: string; - all_transitions_allowed: boolean; - stages: { - [key: string]: { - transitions_to: string[]; - state: string; - }; - }; - states: { - [key: string]: { - name: string; - is_end_state?: boolean; - }; - }; -} - -// Test configuration -const TEST_SERVER_URL = 'http://localhost:8000/handle/sync'; -const CALLBACK_SERVER_PORT = 8002; -const WRIKE_API_KEY = process.env.WRIKE_API_KEY || ''; -const WRIKE_SPACE_GID = process.env.WRIKE_SPACE_GID || ''; - -// Setup callback server -let callbackServer: Server; -let app: express.Express; - -beforeAll(() => { - // Check if required environment variables are set - if (!WRIKE_API_KEY) { - throw new Error('WRIKE_API_KEY environment variable is not set'); - } - if (!WRIKE_SPACE_GID) { - throw new Error('WRIKE_SPACE_GID environment variable is not set'); - } - - // Setup callback server - app = express(); - app.use(bodyParser.json()); - app.post('/callback', (req, res) => { - res.status(200).send({ status: 'success' }); - }); - - callbackServer = app.listen(CALLBACK_SERVER_PORT); - console.log(`Callback server started on port ${CALLBACK_SERVER_PORT}`); -}); - -afterAll(() => { - // Cleanup callback server - if (callbackServer) { - callbackServer.close(); - console.log('Callback server closed'); - } - return new Promise(resolve => setTimeout(resolve, 500)); // Give server time to close -}); - -// Helper function to create a test event -function createTestEvent() { - return { - context: { - secrets: { - service_account_token: 'test-token' - }, - snap_in_id: 'test-snap-in-id', - snap_in_version_id: 'test-snap-in-version-id' - }, - payload: { - connection_data: { - key: WRIKE_API_KEY, - org_id: WRIKE_SPACE_GID, - key_type: 'api_key' - }, - event_context: { - callback_url: `http://localhost:${CALLBACK_SERVER_PORT}/callback`, - external_sync_unit_id: 'IEAGS6BYI5RFMPPY' - }, - event_type: 'EXTRACTION_METADATA_START' - }, - execution_metadata: { - function_name: 'generate_metadata', - devrev_endpoint: 'http://localhost:8003' - }, - input_data: {} - }; -} - -describe('Generate Metadata Function Tests', () => { - // Test 1: Verify the function exists and can be called - test('Function exists and can be called', async () => { - const response = await axios.post(TEST_SERVER_URL, createTestEvent()); - expect(response.status).toBe(200); - expect(response.data).toBeDefined(); - }); - - // Test 2: Verify the function returns the expected structure - test('Function returns the expected structure', async () => { - const response = await axios.post(TEST_SERVER_URL, createTestEvent()); - const result = response.data.function_result as FunctionResult; - - expect(result.status).toBe('success'); - expect(result.message).toBeDefined(); - expect(result.metadata).toBeDefined(); - }); - - // Test 3: Verify the metadata contains the required record types - test('Metadata contains required record types', async () => { - const response = await axios.post(TEST_SERVER_URL, createTestEvent()); - const result = response.data.function_result as FunctionResult; - - expect(result.metadata.record_types).toBeDefined(); - expect(result.metadata.record_types.tasks).toBeDefined(); - expect(result.metadata.record_types.users).toBeDefined(); - }); - - // Test 4: Verify the metadata follows the schema structure - test('Metadata follows the schema structure', async () => { - const response = await axios.post(TEST_SERVER_URL, createTestEvent()); - const result = response.data.function_result as FunctionResult; - - expect(result.metadata.schema_version).toBeDefined(); - expect(Object.keys(result.metadata)).toContain('record_types'); - }); - - // Test 5: Verify the fields in the 'tasks' record type - test('Tasks record type has the expected fields', async () => { - const response = await axios.post(TEST_SERVER_URL, createTestEvent()); - const result = response.data.function_result as FunctionResult; - const tasksType = result.metadata.record_types.tasks; - - // Check basic structure - expect(tasksType.name).toBe('Task'); - expect(tasksType.fields).toBeDefined(); - - // Check required fields - const requiredFields = ['id', 'title', 'status', 'importance', 'created_date', 'updated_date']; - for (const field of requiredFields) { - expect(tasksType.fields[field]).toBeDefined(); - expect(tasksType.fields[field].is_required).toBe(true); - } - - // Check field types - expect(tasksType.fields.id.type).toBe('text'); - expect(tasksType.fields.title.type).toBe('text'); - expect(tasksType.fields.description.type).toBe('rich_text'); - expect(tasksType.fields.status.type).toBe('enum'); - expect(tasksType.fields.importance.type).toBe('enum'); - expect(tasksType.fields.created_date.type).toBe('timestamp'); - expect(tasksType.fields.updated_date.type).toBe('timestamp'); - }); - - // Test 6: Verify the fields in the 'users' record type - test('Users record type has the expected fields', async () => { - const response = await axios.post(TEST_SERVER_URL, createTestEvent()); - const result = response.data.function_result as FunctionResult; - const usersType = result.metadata.record_types.users; - - // Check basic structure - expect(usersType.name).toBe('User'); - expect(usersType.fields).toBeDefined(); - - // Check required fields - const requiredFields = ['id', 'first_name', 'last_name', 'type']; - for (const field of requiredFields) { - expect(usersType.fields[field]).toBeDefined(); - expect(usersType.fields[field].is_required).toBe(true); - } - - // Check field types - expect(usersType.fields.id.type).toBe('text'); - expect(usersType.fields.first_name.type).toBe('text'); - expect(usersType.fields.last_name.type).toBe('text'); - expect(usersType.fields.type.type).toBe('enum'); - expect(usersType.fields.email.type).toBe('text'); - }); - - // Test 7: Verify the stage diagram in the 'tasks' record type - test('Tasks record type has the correct stage diagram', async () => { - const response = await axios.post(TEST_SERVER_URL, createTestEvent()); - const result = response.data.function_result as FunctionResult; - const tasksType = result.metadata.record_types.tasks; - - // Check stage diagram structure - expect(tasksType.stage_diagram).toBeDefined(); - expect(tasksType.stage_diagram?.controlling_field).toBe('status'); - expect(tasksType.stage_diagram?.starting_stage).toBe('Active'); - expect(tasksType.stage_diagram?.all_transitions_allowed).toBe(false); - - // Check stages - const stages = tasksType.stage_diagram?.stages; - expect(stages).toBeDefined(); - expect(stages?.Active).toBeDefined(); - expect(stages?.Completed).toBeDefined(); - expect(stages?.Deferred).toBeDefined(); - expect(stages?.Cancelled).toBeDefined(); - - // Check transitions - expect(stages?.Active.transitions_to).toContain('Completed'); - expect(stages?.Completed.transitions_to).toContain('Active'); - - // Check states - const states = tasksType.stage_diagram?.states; - expect(states).toBeDefined(); - expect(states?.open).toBeDefined(); - expect(states?.in_progress).toBeDefined(); - expect(states?.closed).toBeDefined(); - expect(states?.closed.is_end_state).toBe(true); - }); -}); \ No newline at end of file diff --git a/conformance_tests/external_domain_metadata_generation/jest.config.js b/conformance_tests/external_domain_metadata_generation/jest.config.js deleted file mode 100644 index fe4fdf6..0000000 --- a/conformance_tests/external_domain_metadata_generation/jest.config.js +++ /dev/null @@ -1,8 +0,0 @@ -module.exports = { - preset: 'ts-jest', - testEnvironment: 'node', - testTimeout: 120000, // 120 seconds timeout as per requirements - resetMocks: false, - globals: { 'ts-jest': { tsconfig: 'tsconfig.test.json' } }, - testMatch: ['**/*.test.ts'], -}; \ No newline at end of file diff --git a/conformance_tests/external_domain_metadata_generation/package.json b/conformance_tests/external_domain_metadata_generation/package.json deleted file mode 100644 index 366d183..0000000 --- a/conformance_tests/external_domain_metadata_generation/package.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "name": "airdrop-snap-in-tests", - "version": "1.0.0", - "description": "Conformance Tests for DevRev Airdrop Snap-in", - "scripts": { - "test": "jest" - }, - "devDependencies": { - "@types/express": "^4.17.21", - "@types/jest": "^29.5.0", - "@types/node": "^18.15.0", - "axios": "^1.6.0", - "body-parser": "^1.20.3", - "express": "^4.21.0", - "jest": "^29.5.0", - "ts-jest": "^29.1.0", - "typescript": "^4.9.5" - } -} \ No newline at end of file diff --git a/conformance_tests/external_domain_metadata_generation/validate_metadata.test.ts b/conformance_tests/external_domain_metadata_generation/validate_metadata.test.ts deleted file mode 100644 index 712df76..0000000 --- a/conformance_tests/external_domain_metadata_generation/validate_metadata.test.ts +++ /dev/null @@ -1,135 +0,0 @@ -import axios from 'axios'; -import { exec } from 'child_process'; -import { promisify } from 'util'; -import * as fs from 'fs'; -import * as path from 'path'; -import * as os from 'os'; - -const execPromise = promisify(exec); - -// Define interfaces for the function result structure -interface FunctionResult { - status: string; - message: string; - metadata: any; - error?: string; -} - -// Test configuration -const TEST_SERVER_URL = 'http://localhost:8000/handle/sync'; -const WRIKE_API_KEY = process.env.WRIKE_API_KEY || ''; -const WRIKE_SPACE_GID = process.env.WRIKE_SPACE_GID || ''; -const CHEF_CLI_PATH = process.env.CHEF_CLI_PATH || ''; - -// Helper function to create a test event -function createTestEvent() { - return { - context: { - secrets: { - service_account_token: 'test-token' - }, - snap_in_id: 'test-snap-in-id', - snap_in_version_id: 'test-snap-in-version-id' - }, - payload: { - connection_data: { - key: WRIKE_API_KEY, - org_id: WRIKE_SPACE_GID, - key_type: 'api_key' - }, - event_context: { - callback_url: 'http://localhost:8002/callback', - external_sync_unit_id: 'IEAGS6BYI5RFMPPY' - }, - event_type: 'EXTRACTION_METADATA_START' - }, - execution_metadata: { - function_name: 'generate_metadata', - devrev_endpoint: 'http://localhost:8003' - }, - input_data: {} - }; -} - -describe('Validate Metadata with Chef CLI', () => { - beforeAll(() => { - // Check if required environment variables are set - if (!WRIKE_API_KEY) { - throw new Error('WRIKE_API_KEY environment variable is not set'); - } - if (!WRIKE_SPACE_GID) { - throw new Error('WRIKE_SPACE_GID environment variable is not set'); - } - if (!CHEF_CLI_PATH) { - throw new Error('CHEF_CLI_PATH environment variable is not set. Chef CLI is required for this test.'); - } - - // Verify Chef CLI exists and is executable - try { - fs.accessSync(CHEF_CLI_PATH, fs.constants.X_OK); - } catch (error) { - throw new Error(`Chef CLI at path ${CHEF_CLI_PATH} is not executable: ${error}`); - } - }); - - test('Generated metadata should be valid according to Chef CLI', async () => { - // Call the generate_metadata function - const response = await axios.post(TEST_SERVER_URL, createTestEvent()); - expect(response.status).toBe(200); - expect(response.data).toBeDefined(); - - const result = response.data.function_result as FunctionResult; - expect(result.status).toBe('success'); - expect(result.metadata).toBeDefined(); - - // Create a temporary file to store the metadata - const tempFile = path.join(os.tmpdir(), `metadata-${Date.now()}.json`); - - try { - // Write metadata to the temporary file - fs.writeFileSync(tempFile, JSON.stringify(result.metadata, null, 2)); - console.log(`Metadata written to temporary file: ${tempFile}`); - - // Execute Chef CLI to validate the metadata - const command = `cat ${tempFile} | ${CHEF_CLI_PATH} validate-metadata`; - console.log(`Executing command: ${command}`); - - const { stdout, stderr } = await execPromise(command); - - // Log any output for debugging - if (stdout) console.log(`Chef CLI stdout: ${stdout}`); - if (stderr) console.log(`Chef CLI stderr: ${stderr}`); - - // The test passes if Chef CLI returns an empty output - expect(stdout.trim()).toBe(''); - expect(stderr.trim()).toBe(''); - - } catch (error: any) { - console.error('Error during Chef CLI validation:', error); - - // If the error is from the Chef CLI execution, provide detailed error information - if (error.stdout || error.stderr) { - console.error('Chef CLI stdout:', error.stdout); - console.error('Chef CLI stderr:', error.stderr); - - // Read the metadata file for debugging - try { - const metadata = fs.readFileSync(tempFile, 'utf8'); - console.error('Metadata content that failed validation:', metadata); - } catch (readError) { - console.error('Could not read metadata file:', readError); - } - } - - throw new Error(`Chef CLI validation failed: ${error.message}`); - } finally { - // Clean up the temporary file - try { - fs.unlinkSync(tempFile); - console.log(`Temporary file removed: ${tempFile}`); - } catch (unlinkError) { - console.warn(`Could not remove temporary file ${tempFile}:`, unlinkError); - } - } - }); -}); \ No newline at end of file diff --git a/conformance_tests/external_domain_metadata_generation_tests/get_external_domain_metadata.test.ts b/conformance_tests/external_domain_metadata_generation_tests/get_external_domain_metadata.test.ts new file mode 100644 index 0000000..fd80896 --- /dev/null +++ b/conformance_tests/external_domain_metadata_generation_tests/get_external_domain_metadata.test.ts @@ -0,0 +1,112 @@ +import { getTestConfig, createBaseEvent, invokeFunction, validateWithChefCli } from './test-utils'; + +describe('get_external_domain_metadata function', () => { + let config: ReturnType; + + beforeAll(() => { + config = getTestConfig(); + }); + + describe('Function Invocation', () => { + it('should successfully invoke the function and return a response', async () => { + const event = createBaseEvent('get_external_domain_metadata', config); + const response = await invokeFunction(event); + + expect(response).toBeDefined(); + expect(response.function_result).toBeDefined(); + expect(response.function_result.status).toBe('success'); + expect(response.function_result.message).toBeDefined(); + expect(response.function_result.metadata).toBeDefined(); + expect(response.function_result.data).toBeDefined(); + expect(response.function_result.timestamp).toBeDefined(); + }, 30000); + }); + + describe('Metadata Structure', () => { + let metadata: any; + + beforeAll(async () => { + const event = createBaseEvent('get_external_domain_metadata', config); + const response = await invokeFunction(event); + metadata = response.function_result.data; + }); + + it('should have correct schema_version', () => { + expect(metadata.schema_version).toBe('v0.2.0'); + }); + + it('should contain users record type', () => { + expect(metadata.record_types).toBeDefined(); + expect(metadata.record_types.users).toBeDefined(); + }); + + it('should have correct name for users record type', () => { + expect(metadata.record_types.users.name).toBe('Users'); + }); + }); + + describe('Users Record Type Fields', () => { + let usersFields: any; + + beforeAll(async () => { + const event = createBaseEvent('get_external_domain_metadata', config); + const response = await invokeFunction(event); + usersFields = response.function_result.data.record_types.users.fields; + }); + + it('should have exactly three fields: full_name, email, title', () => { + const fieldNames = Object.keys(usersFields); + expect(fieldNames).toHaveLength(3); + expect(fieldNames).toContain('full_name'); + expect(fieldNames).toContain('email'); + expect(fieldNames).toContain('title'); + }); + + it('should have correct full_name field configuration', () => { + expect(usersFields.full_name).toBeDefined(); + expect(usersFields.full_name.type).toBe('text'); + expect(usersFields.full_name.name).toBe('Full Name'); + expect(usersFields.full_name.is_required).toBe(true); + }); + + it('should have correct email field configuration', () => { + expect(usersFields.email).toBeDefined(); + expect(usersFields.email.type).toBe('text'); + expect(usersFields.email.name).toBe('Email'); + expect(usersFields.email.is_required).toBe(true); + }); + + it('should have correct title field configuration', () => { + expect(usersFields.title).toBeDefined(); + expect(usersFields.title.type).toBe('text'); + expect(usersFields.title.name).toBe('Title'); + expect(usersFields.title.is_required).toBe(false); + }); + }); + + describe('Chef CLI Validation', () => { + it('should validate successfully with Chef CLI', async () => { + // Check if Chef CLI is available + if (!config.chefCliPath) { + fail('Chef CLI path is not configured. CHEF_CLI_PATH environment variable is required.'); + } + + const event = createBaseEvent('get_external_domain_metadata', config); + const response = await invokeFunction(event); + const metadata = response.function_result.data; + + const validationResult = await validateWithChefCli(metadata, config.chefCliPath); + + expect(validationResult.success).toBe(true); + + if (!validationResult.success) { + fail( + `Chef CLI validation failed.\n` + + `Exit code indicates failure or non-empty output.\n` + + `Stdout: ${validationResult.stdout}\n` + + `Stderr: ${validationResult.stderr}` + ); + } + }, 30000); + }); +}); \ No newline at end of file diff --git a/conformance_tests/external_domain_metadata_generation_tests/package.json b/conformance_tests/external_domain_metadata_generation_tests/package.json new file mode 100644 index 0000000..79ebe91 --- /dev/null +++ b/conformance_tests/external_domain_metadata_generation_tests/package.json @@ -0,0 +1,22 @@ +{ + "name": "wrike-snap-in-tests", + "version": "1.0.0", + "description": "Conformance tests for Wrike snap-in", + "scripts": { + "test": "jest --testTimeout=120000" + }, + "devDependencies": { + "@types/jest": "^29.5.0", + "@types/node": "^20.0.0", + "axios": "^1.6.0", + "jest": "^29.5.0", + "ts-jest": "^29.1.0", + "typescript": "^5.0.0" + }, + "jest": { + "preset": "ts-jest", + "testEnvironment": "node", + "testMatch": ["**/*.test.ts"], + "setupFilesAfterEnv": ["./jest.setup.js"] + } +} \ No newline at end of file diff --git a/conformance_tests/external_domain_metadata_generation_tests/test-utils.ts b/conformance_tests/external_domain_metadata_generation_tests/test-utils.ts new file mode 100644 index 0000000..9145d9f --- /dev/null +++ b/conformance_tests/external_domain_metadata_generation_tests/test-utils.ts @@ -0,0 +1,158 @@ +import axios from 'axios'; +import { spawn } from 'child_process'; + +export interface TestConfig { + wrikeApiKey: string; + wrikeSpaceId: string; + chefCliPath: string; +} + +/** + * Read required environment variables for tests + */ +export function getTestConfig(): TestConfig { + const wrikeApiKey = process.env.WRIKE_API_KEY; + const wrikeSpaceId = process.env.WRIKE_SPACE_ID; + const chefCliPath = process.env.CHEF_CLI_PATH; + + if (!wrikeApiKey) { + throw new Error('WRIKE_API_KEY environment variable is required'); + } + + if (!wrikeSpaceId) { + throw new Error('WRIKE_SPACE_ID environment variable is required'); + } + + if (!chefCliPath) { + throw new Error('CHEF_CLI_PATH environment variable is required'); + } + + return { + wrikeApiKey, + wrikeSpaceId, + chefCliPath, + }; +} + +/** + * Create a base event payload for function invocation + */ +export function createBaseEvent(functionName: string, config: TestConfig): any { + return { + payload: { + connection_data: { + key: config.wrikeApiKey, + org_id: config.wrikeSpaceId, + org_name: 'Test Space', + key_type: 'api_key', + }, + event_context: { + callback_url: 'http://localhost:8002/callback', + dev_org: 'test-org', + dev_org_id: 'test-org-id', + dev_user: 'test-user', + dev_user_id: 'test-user-id', + external_sync_unit: 'test-unit', + external_sync_unit_id: 'IEAGS6BYI5RFMPP7', + external_sync_unit_name: 'Test Unit', + external_system: 'wrike', + external_system_type: 'wrike', + import_slug: 'test-import', + mode: 'INITIAL', + request_id: `test-${Date.now()}`, + snap_in_slug: 'wrike-snap-in', + snap_in_version_id: 'v1', + sync_run: 'test-run', + sync_run_id: 'test-run-id', + sync_tier: 'standard', + sync_unit: 'test-unit', + sync_unit_id: 'test-unit-id', + uuid: 'test-uuid', + worker_data_url: 'http://localhost:8003/external-worker', + }, + event_type: 'test_event', + }, + context: { + dev_oid: 'test-org-id', + source_id: 'test-source', + snap_in_id: 'test-snap-in', + snap_in_version_id: 'v1', + service_account_id: 'test-sa', + secrets: { + service_account_token: 'test-token', + }, + }, + execution_metadata: { + request_id: `test-${Date.now()}`, + function_name: functionName, + event_type: 'test_event', + devrev_endpoint: 'http://localhost:8003', + }, + input_data: { + global_values: {}, + event_sources: {}, + }, + }; +} + +/** + * Send a request to The Test Snap-In Server + */ +export async function invokeFunction(event: any): Promise { + const response = await axios.post('http://localhost:8000/handle/sync', event, { + headers: { + 'Content-Type': 'application/json', + }, + timeout: 30000, + }); + + return response.data; +} + +/** + * Validate metadata using Chef CLI + */ +export async function validateWithChefCli( + metadata: any, + chefCliPath: string +): Promise<{ success: boolean; stdout: string; stderr: string }> { + return new Promise((resolve, reject) => { + const metadataJson = JSON.stringify(metadata); + + const child = spawn(chefCliPath, ['validate-metadata'], { + stdio: ['pipe', 'pipe', 'pipe'], + }); + + let stdout = ''; + let stderr = ''; + + child.stdout.on('data', (data) => { + const output = data.toString(); + stdout += output; + console.log('[Chef CLI stdout]:', output); + }); + + child.stderr.on('data', (data) => { + const output = data.toString(); + stderr += output; + console.error('[Chef CLI stderr]:', output); + }); + + child.on('error', (error) => { + reject(new Error(`Failed to spawn Chef CLI: ${error.message}`)); + }); + + child.on('close', (code) => { + console.log(`[Chef CLI] Process exited with code ${code}`); + resolve({ + success: code === 0 && stdout.trim() === '', + stdout, + stderr, + }); + }); + + // Write metadata to stdin + child.stdin.write(metadataJson); + child.stdin.end(); + }); +} \ No newline at end of file diff --git a/conformance_tests/function_invocation_check/tsconfig.json b/conformance_tests/external_domain_metadata_generation_tests/tsconfig.json similarity index 54% rename from conformance_tests/function_invocation_check/tsconfig.json rename to conformance_tests/external_domain_metadata_generation_tests/tsconfig.json index 259b6dd..52e83ce 100644 --- a/conformance_tests/function_invocation_check/tsconfig.json +++ b/conformance_tests/external_domain_metadata_generation_tests/tsconfig.json @@ -2,13 +2,19 @@ "compilerOptions": { "target": "es2017", "module": "commonjs", - "esModuleInterop": true, - "forceConsistentCasingInFileNames": true, + "lib": ["es2017"], + "outDir": "./dist", + "rootDir": "./", "strict": true, + "esModuleInterop": true, "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, "resolveJsonModule": true, - "typeRoots": ["./node_modules/@types", "./types"] + "declaration": true, + "declarationMap": true, + "sourceMap": true, + "moduleResolution": "node" }, "include": ["*.ts"], - "exclude": ["node_modules"] + "exclude": ["node_modules", "dist"] } \ No newline at end of file diff --git a/conformance_tests/external_domain_metadata_push/metadata-extraction.test.ts b/conformance_tests/external_domain_metadata_push/metadata-extraction.test.ts deleted file mode 100644 index cf6f819..0000000 --- a/conformance_tests/external_domain_metadata_push/metadata-extraction.test.ts +++ /dev/null @@ -1,266 +0,0 @@ -import axios from 'axios'; -import express from 'express'; -import bodyParser from 'body-parser'; -import { EventType } from '@devrev/ts-adaas'; -import { Server } from 'http'; - -// Test configuration -const TEST_SERVER_URL = 'http://localhost:8000/handle/sync'; -const CALLBACK_SERVER_PORT = 8002; -const CALLBACK_SERVER_URL = `http://localhost:${CALLBACK_SERVER_PORT}/callback`; -const TEST_TIMEOUT = 30000; // 30 seconds - -// Environment variables -const WRIKE_API_KEY = process.env.WRIKE_API_KEY; -const WRIKE_SPACE_GID = process.env.WRIKE_SPACE_GID || 'IEAGS6BYI5RFMPPY'; - -// Test data for external sync unit -const EXTERNAL_SYNC_UNIT_ID = 'IEAGS6BYI5RFMPPY'; - -// Global variables to store received data -let receivedMetadata: any = null; -let receivedEvents: any[] = []; - -describe('Metadata Extraction Tests', () => { - let callbackServer: Server; - - // Set up callback server before tests - beforeAll((done) => { - // Check if required environment variable is set - if (!WRIKE_API_KEY) { - console.error('WRIKE_API_KEY environment variable is not set'); - console.warn('Using a placeholder API key for testing. This will not work with real API calls.'); - } - - // Create a callback server to receive the metadata - const app = express(); - app.use(bodyParser.json({ limit: '50mb' })); - - // Endpoint to receive the metadata - app.post('/callback', (req, res) => { - console.log('Callback server received request:', req.path); - const data = req.body; - - // Log a truncated version of the data to avoid console spam - console.log('Callback server received data:', JSON.stringify(data).substring(0, 200) + '...'); - - // Store the received data - receivedEvents.push(data); - if (data && !data.event_type) // If it's not an event but actual metadata - receivedMetadata = data; - - res.status(200).send({ status: 'success' }); - }); - - // Start the callback server - callbackServer = app.listen(CALLBACK_SERVER_PORT, () => { - console.log(`Callback server is running at ${CALLBACK_SERVER_URL}`); - done(); - }); - }, TEST_TIMEOUT); - - // Clean up after tests - afterAll((done) => { - if (callbackServer) { - callbackServer.close(() => { - console.log('Callback server closed'); - done(); - }); - } else { - done(); - } - }); - - // Reset receivedMetadata before each test - beforeEach(() => { - receivedMetadata = null; - receivedEvents = []; - }); - - // Test 1: Basic setup test - test('Basic setup test - callback server is running', () => { - expect(callbackServer).toBeDefined(); - }, TEST_TIMEOUT); - - // Test 2: Event structure test - test('Event structure test - can create valid event structure', () => { - const event = createMetadataExtractionEvent(); - expect(event).toBeDefined(); - expect(event.payload.event_type).toBe('EXTRACTION_METADATA_START'); - expect(event.payload.event_context.callback_url).toBe(CALLBACK_SERVER_URL); - }, TEST_TIMEOUT); - - // Test 3: Callback server test - test('Callback server test - can receive requests', async () => { - // Send a test request to the callback server - await axios.post(CALLBACK_SERVER_URL, { test: 'data' }, { headers: { 'Content-Type': 'application/json' } }); - expect(receivedMetadata).toBeDefined(); - expect(receivedMetadata.test).toBe('data'); - }, TEST_TIMEOUT); - - // Test 4: Metadata extraction test - test('Metadata extraction test - pushes metadata to repository without normalizing', async () => { - // Create the event - const event = createMetadataExtractionEvent(); - - // Send the event to the test server - console.log('Sending metadata extraction event to test server...'); - const response = await axios.post(TEST_SERVER_URL, event, { - headers: { 'Content-Type': 'application/json' } - , timeout: 60000 // 60 seconds timeout - }); - - // Verify the response - expect(response.status).toBe(200); - expect(response.data).toBeDefined(); - - // Wait for the callback server to receive the metadata - // This may take some time as the extraction function processes the event - const callbackResult = await waitForCallback(60000); // Wait up to 60 seconds - - console.log(`Received ${receivedEvents.length} events from callback server`); - - // Check if we received any events - expect(receivedEvents.length).toBeGreaterThan(0); - - // We need to check if we received either: - // 1. The actual metadata directly - // 2. An event indicating the metadata was processed (success or error) - - if (receivedMetadata) { - // If we received the actual metadata, verify its structure - console.log('Received metadata directly, verifying structure...'); - - // Verify the metadata structure - expect(Array.isArray(receivedMetadata)).toBe(true); - expect(receivedMetadata.length).toBeGreaterThan(0); - - // Get the first item in the array (should be the metadata) - const metadata = receivedMetadata[0]; - - // Verify the metadata contains the expected record types - expect(metadata).toHaveProperty('schema_version'); - expect(metadata).toHaveProperty('record_types'); - expect(metadata.record_types).toHaveProperty('tasks'); - expect(metadata.record_types).toHaveProperty('users'); - - // Verify the tasks record type has the expected fields - const tasksRecordType = metadata.record_types.tasks; - expect(tasksRecordType).toHaveProperty('name', 'Task'); - expect(tasksRecordType).toHaveProperty('fields'); - expect(tasksRecordType.fields).toHaveProperty('id'); - expect(tasksRecordType.fields).toHaveProperty('title'); - expect(tasksRecordType.fields).toHaveProperty('status'); - - // Verify the users record type has the expected fields - const usersRecordType = metadata.record_types.users; - expect(usersRecordType).toHaveProperty('name', 'User'); - expect(usersRecordType).toHaveProperty('fields'); - expect(usersRecordType.fields).toHaveProperty('id'); - expect(usersRecordType.fields).toHaveProperty('first_name'); - expect(usersRecordType.fields).toHaveProperty('last_name'); - } else { - // If we didn't receive the metadata directly, check for events - console.log('No direct metadata received, checking for events...'); - - // Find any metadata-related events - const metadataEvents = receivedEvents.filter(event => - event.event_type === 'EXTRACTION_METADATA_DONE' || - event.event_type === 'EXTRACTION_METADATA_ERROR' - ); - - expect(metadataEvents.length).toBeGreaterThan(0); - - // Log the events for debugging - metadataEvents.forEach((event, index) => { - console.log(`Metadata event ${index + 1}:`, event.event_type); - if (event.event_data && event.event_data.error) { - console.log(`Error message: ${event.event_data.error.message}`); - } - }); - } - - console.log('Metadata extraction test passed successfully'); - }, TEST_TIMEOUT); -}); - -// Helper function to create a metadata extraction event -function createMetadataExtractionEvent() { - return { - context: { - secrets: { - service_account_token: 'test-token' - }, - snap_in_id: 'test-snap-in-id', - snap_in_version_id: 'test-snap-in-version-id' - }, - execution_metadata: { - function_name: 'extraction', - devrev_endpoint: 'http://localhost:8003' - }, - payload: { - connection_data: { - key: WRIKE_API_KEY, - org_id: WRIKE_SPACE_GID, - org_name: 'Test Space', - key_type: 'api_key' - }, - event_type: EventType.ExtractionMetadataStart, - event_context: { - callback_url: CALLBACK_SERVER_URL, - dev_org: 'test-org', - dev_org_id: 'test-org-id', - dev_user: 'test-user', - dev_user_id: 'test-user-id', - external_sync_unit: 'Test Project', - external_sync_unit_id: EXTERNAL_SYNC_UNIT_ID, - external_sync_unit_name: 'Test Project', - external_system: 'wrike', - external_system_type: 'wrike', - import_slug: 'test-import', - mode: 'INITIAL', - request_id: 'test-request-id', - snap_in_slug: 'test-snap-in', - snap_in_version_id: 'test-snap-in-version-id', - sync_run: 'test-sync-run', - sync_run_id: 'test-sync-run-id', - sync_tier: 'test-tier', - sync_unit: 'test-sync-unit', - sync_unit_id: 'test-sync-unit-id', - uuid: 'test-uuid', - worker_data_url: 'http://localhost:8003/external-worker' - } - }, - input_data: {} - }; -} - -// Helper function to wait for the callback server to receive data -async function waitForCallback(timeout: number): Promise { - const startTime = Date.now(); - const checkInterval = 1000; // Check every second - let elapsedTime = 0; - - console.log(`Waiting up to ${timeout}ms for callback data...`); - - while (elapsedTime < timeout) { - // Check if we've received either metadata or events - if (receivedMetadata !== null || receivedEvents.length > 0) { - console.log(`Received callback data after ${elapsedTime}ms`); - return true; - } - - // Wait before checking again - await new Promise(resolve => setTimeout(resolve, checkInterval)); - - elapsedTime = Date.now() - startTime; - - // Log progress every 5 seconds - if (elapsedTime % 5000 < checkInterval) { - console.log(`Still waiting for callback data... (${elapsedTime}ms elapsed)`); - } - } - - console.warn(`Warning: No callback received within ${timeout}ms`); - return false; -} \ No newline at end of file diff --git a/conformance_tests/external_domain_metadata_push/package.json b/conformance_tests/external_domain_metadata_push/package.json deleted file mode 100644 index ed81482..0000000 --- a/conformance_tests/external_domain_metadata_push/package.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "name": "wrike-airdrop-conformance-tests", - "version": "1.0.0", - "description": "Conformance tests for Wrike Airdrop snap-in", - "main": "index.js", - "scripts": { - "test": "jest --forceExit --detectOpenHandles" - }, - "dependencies": { - "@devrev/ts-adaas": "1.5.1", - "axios": "^1.9.0", - "express": "^4.21.0", - "body-parser": "^1.20.3" - }, - "devDependencies": { - "@types/express": "^4.17.21", - "@types/jest": "^29.5.12", - "@types/node": "^18.19.26", - "jest": "^29.7.0", - "ts-jest": "^29.1.2", - "typescript": "^4.9.5" - } -} \ No newline at end of file diff --git a/conformance_tests/external_domain_metadata_push/tsconfig.json b/conformance_tests/external_domain_metadata_push/tsconfig.json deleted file mode 100644 index dde1c22..0000000 --- a/conformance_tests/external_domain_metadata_push/tsconfig.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "compilerOptions": { - "target": "es2017", - "module": "commonjs", - "esModuleInterop": true, - "forceConsistentCasingInFileNames": true, - "strict": true, - "skipLibCheck": true, - "resolveJsonModule": true - }, - "include": ["*.ts"], - "exclude": ["node_modules"] -} \ No newline at end of file diff --git a/conformance_tests/external_sync_units_extraction_tests/acceptance_test_external_sync_units.test.ts b/conformance_tests/external_sync_units_extraction_tests/acceptance_test_external_sync_units.test.ts new file mode 100644 index 0000000..659bf19 --- /dev/null +++ b/conformance_tests/external_sync_units_extraction_tests/acceptance_test_external_sync_units.test.ts @@ -0,0 +1,142 @@ +import { CallbackServer } from './test-utils/callback-server'; +import { + loadEventFromFile, + getCredentialsFromEnv, +} from './test-utils/event-factory'; +import { SnapInClient } from './test-utils/snap-in-client'; + +describe('Acceptance Test: External Sync Units Extraction', () => { + let callbackServer: CallbackServer; + let snapInClient: SnapInClient; + + beforeAll(async () => { + callbackServer = new CallbackServer(8002); + await callbackServer.start(); + snapInClient = new SnapInClient('http://localhost:8000'); + }); + + afterAll(async () => { + await callbackServer.stop(); + }); + + beforeEach(() => { + callbackServer.clearCallbacks(); + }); + + describe('External Sync Unit Check from Resource File', () => { + it('should process event from external_sync_unit_check.json and receive EXTRACTION_EXTERNAL_SYNC_UNITS_DONE callback', async () => { + // Load event from the resource file + let event: any; + try { + const credentials = getCredentialsFromEnv(); + event = loadEventFromFile('external_sync_unit_check.json', credentials); + } catch (error) { + throw new Error( + `Failed to load test event from file.\n` + + `Error: ${error instanceof Error ? error.message : String(error)}\n` + + `This test requires the external_sync_unit_check.json file in the test-data directory.` + ); + } + + // Verify event structure + if (!event.execution_metadata) { + throw new Error( + `Invalid event structure: missing execution_metadata.\n` + + `Event: ${JSON.stringify(event, null, 2)}` + ); + } + + if (!event.payload || !event.payload.event_type) { + throw new Error( + `Invalid event structure: missing payload or event_type.\n` + + `Event: ${JSON.stringify(event, null, 2)}` + ); + } + + // Log event details for debugging + console.log('Test event details:'); + console.log(` Function: ${event.execution_metadata.function_name}`); + console.log(` Event Type: ${event.payload.event_type}`); + console.log(` Callback URL: ${event.payload.event_context?.callback_url}`); + + // Invoke the function + let response; + try { + response = await snapInClient.invokeFunction(event); + } catch (error) { + throw new Error( + `Failed to invoke snap-in function.\n` + + `Error: ${error instanceof Error ? error.message : String(error)}\n` + + `Event sent: ${JSON.stringify(event, null, 2)}` + ); + } + + // Verify the function invocation was successful + expect(response.status).toBe(200); + if (response.data.error) { + throw new Error( + `Function returned an error.\n` + + `Error: ${JSON.stringify(response.data.error, null, 2)}\n` + + `Full response: ${JSON.stringify(response.data, null, 2)}` + ); + } + + // Wait for callback from DevRev + let callback; + try { + callback = await callbackServer.waitForCallback(15000); + } catch (error) { + const receivedCallbacks = callbackServer.getCallbacks(); + throw new Error( + `Timeout waiting for callback from DevRev.\n` + + `Expected: Callback with event_type "EXTRACTION_EXTERNAL_SYNC_UNITS_DONE"\n` + + `Received callbacks count: ${receivedCallbacks.length}\n` + + `Received callbacks: ${JSON.stringify(receivedCallbacks, null, 2)}\n` + + `Original error: ${error instanceof Error ? error.message : String(error)}\n` + + `Note: The callback must come from DevRev, not sent directly to the callback server.` + ); + } + + // Log callback details for debugging + console.log('Received callback:'); + console.log(` Event Type: ${callback.event_type}`); + console.log(` Event Data: ${JSON.stringify(callback.event_data, null, 2)}`); + + // Verify the callback event type + expect(callback).toBeDefined(); + if (callback.event_type !== 'EXTRACTION_EXTERNAL_SYNC_UNITS_DONE') { + throw new Error( + `Incorrect callback event type received.\n` + + `Expected: "EXTRACTION_EXTERNAL_SYNC_UNITS_DONE"\n` + + `Received: "${callback.event_type}"\n` + + `Full callback: ${JSON.stringify(callback, null, 2)}` + ); + } + + expect(callback.event_type).toBe('EXTRACTION_EXTERNAL_SYNC_UNITS_DONE'); + + // Verify the callback contains external_sync_units array + expect(callback.event_data).toBeDefined(); + if (!callback.event_data) { + throw new Error( + `Callback missing event_data.\n` + + `Full callback: ${JSON.stringify(callback, null, 2)}` + ); + } + + expect(callback.event_data.external_sync_units).toBeDefined(); + if (!Array.isArray(callback.event_data.external_sync_units)) { + throw new Error( + `Callback event_data.external_sync_units is not an array.\n` + + `Type: ${typeof callback.event_data.external_sync_units}\n` + + `Value: ${JSON.stringify(callback.event_data.external_sync_units, null, 2)}\n` + + `Full callback: ${JSON.stringify(callback, null, 2)}` + ); + } + + expect(Array.isArray(callback.event_data.external_sync_units)).toBe(true); + + console.log('✓ Acceptance test passed: Received EXTRACTION_EXTERNAL_SYNC_UNITS_DONE callback from DevRev'); + }); + }); +}); \ No newline at end of file diff --git a/conformance_tests/external_sync_units_extraction_tests/jest.config.js b/conformance_tests/external_sync_units_extraction_tests/jest.config.js new file mode 100644 index 0000000..64df765 --- /dev/null +++ b/conformance_tests/external_sync_units_extraction_tests/jest.config.js @@ -0,0 +1,21 @@ +module.exports = { + preset: 'ts-jest', + testEnvironment: 'node', + testMatch: ['**/*.test.ts'], + testTimeout: 120000, + moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'], + transform: { + '^.+\\.tsx?$': [ + 'ts-jest', + { + tsconfig: { + esModuleInterop: true, + }, + }, + ], + }, + collectCoverageFrom: [ + 'src/**/*.{ts,tsx}', + '!src/**/*.d.ts', + ], +}; \ No newline at end of file diff --git a/conformance_tests/external_sync_units_extraction_tests/package.json b/conformance_tests/external_sync_units_extraction_tests/package.json new file mode 100644 index 0000000..33f0e52 --- /dev/null +++ b/conformance_tests/external_sync_units_extraction_tests/package.json @@ -0,0 +1,21 @@ +{ + "name": "conformance-tests-external-sync-units", + "version": "1.0.0", + "description": "Conformance tests for external sync units extraction", + "scripts": { + "test": "jest" + }, + "devDependencies": { + "@types/express": "^4.17.21", + "@types/jest": "^29.4.0", + "@types/node": "^18.13.0", + "jest": "^29.4.2", + "ts-jest": "^29.0.5", + "typescript": "^4.9.5" + }, + "dependencies": { + "axios": "^1.9.0", + "body-parser": "^1.20.3", + "express": "^4.21.0" + } +} \ No newline at end of file diff --git a/conformance_tests/extraction_external_sync_unit_push/external_sync_unit_check.json b/conformance_tests/external_sync_units_extraction_tests/test-data/external_sync_unit_check.json similarity index 95% rename from conformance_tests/extraction_external_sync_unit_push/external_sync_unit_check.json rename to conformance_tests/external_sync_units_extraction_tests/test-data/external_sync_unit_check.json index e7fe5e6..c0a68a6 100644 --- a/conformance_tests/extraction_external_sync_unit_push/external_sync_unit_check.json +++ b/conformance_tests/external_sync_units_extraction_tests/test-data/external_sync_unit_check.json @@ -2,10 +2,10 @@ { "payload": { "connection_data": { - "key": "test-key", + "key": "", "key_type": "", - "org_id": "org-id", - "org_name": "Personal" + "org_id": "", + "org_name": "First Space" }, "event_context": { "callback_url": "http://localhost:8002/callback", diff --git a/conformance_tests/external_sync_units_extraction_tests/test-utils/callback-server.ts b/conformance_tests/external_sync_units_extraction_tests/test-utils/callback-server.ts new file mode 100644 index 0000000..8b25ce8 --- /dev/null +++ b/conformance_tests/external_sync_units_extraction_tests/test-utils/callback-server.ts @@ -0,0 +1,70 @@ +import express, { Express, Request, Response } from 'express'; +import bodyParser from 'body-parser'; +import { Server } from 'http'; + +export interface CallbackData { + event_type: string; + event_data?: any; + [key: string]: any; +} + +export class CallbackServer { + private app: Express; + private server: Server | null = null; + private callbacks: CallbackData[] = []; + private port: number; + + constructor(port: number = 8002) { + this.port = port; + this.app = express(); + this.app.use(bodyParser.json()); + this.setupRoutes(); + } + + private setupRoutes(): void { + this.app.post('/callback', (req: Request, res: Response) => { + this.callbacks.push(req.body); + res.status(200).send({ status: 'received' }); + }); + } + + async start(): Promise { + return new Promise((resolve) => { + this.server = this.app.listen(this.port, () => { + resolve(); + }); + }); + } + + async stop(): Promise { + return new Promise((resolve, reject) => { + if (this.server) { + this.server.close((err) => { + if (err) reject(err); + else resolve(); + }); + } else { + resolve(); + } + }); + } + + getCallbacks(): CallbackData[] { + return this.callbacks; + } + + clearCallbacks(): void { + this.callbacks = []; + } + + async waitForCallback(timeout: number = 5000): Promise { + const startTime = Date.now(); + while (Date.now() - startTime < timeout) { + if (this.callbacks.length > 0) { + return this.callbacks[this.callbacks.length - 1]; + } + await new Promise(resolve => setTimeout(resolve, 100)); + } + throw new Error('Timeout waiting for callback'); + } +} \ No newline at end of file diff --git a/conformance_tests/external_sync_units_extraction_tests/test-utils/event-factory.ts b/conformance_tests/external_sync_units_extraction_tests/test-utils/event-factory.ts new file mode 100644 index 0000000..e5eb440 --- /dev/null +++ b/conformance_tests/external_sync_units_extraction_tests/test-utils/event-factory.ts @@ -0,0 +1,166 @@ +import * as fs from 'fs'; +import * as path from 'path'; + +export interface TestEventOptions { + functionName: string; + eventType: string; + callbackUrl?: string; + connectionData?: { + org_id: string; + org_name: string; + key: string; + key_type: string; + }; + eventData?: any; +} + +export function createTestEvent(options: TestEventOptions): any { + const { + functionName, + eventType, + callbackUrl = 'http://localhost:8002/callback', + connectionData = { + org_id: 'test-org-id', + org_name: 'Test Organization', + key: 'test-api-key', + key_type: 'api_key', + }, + eventData = {}, + } = options; + + return { + execution_metadata: { + request_id: `test-request-${Date.now()}`, + function_name: functionName, + event_type: eventType, + devrev_endpoint: 'http://localhost:8003', + }, + context: { + dev_oid: 'test-dev-oid', + source_id: 'test-source-id', + snap_in_id: 'test-snap-in-id', + snap_in_version_id: 'test-snap-in-version-id', + service_account_id: 'test-service-account-id', + secrets: { + service_account_token: 'test-token', + }, + }, + input_data: { + global_values: {}, + event_sources: {}, + }, + payload: { + connection_data: connectionData, + event_type: eventType, + event_data: eventData, + event_context: { + callback_url: callbackUrl, + dev_org: 'test-dev-org', + dev_org_id: 'test-dev-org-id', + dev_user: 'test-dev-user', + dev_user_id: 'test-dev-user-id', + external_sync_unit: 'test-sync-unit', + external_sync_unit_id: 'test-sync-unit-id', + external_sync_unit_name: 'Test Sync Unit', + external_system: 'test-system', + external_system_type: 'test-type', + import_slug: 'test-import', + mode: 'INITIAL', + request_id: `test-request-${Date.now()}`, + snap_in_slug: 'test-snap-in', + snap_in_version_id: 'test-version-id', + sync_run: 'test-sync-run', + sync_run_id: 'test-sync-run-id', + sync_tier: 'test-tier', + sync_unit: 'test-sync-unit', + sync_unit_id: 'test-sync-unit-id', + uuid: 'test-uuid', + worker_data_url: 'http://localhost:8003/external-worker', + }, + }, + }; +} + +export interface CredentialReplacements { + apiKey?: string; + orgId?: string; +} + +/** + * Loads an event from a JSON file and replaces credential placeholders + * @param filePath - Path to the JSON file relative to the test-data directory + * @param credentials - Optional credentials to replace placeholders + * @returns The event object with replaced credentials + */ +export function loadEventFromFile( + filePath: string, + credentials?: CredentialReplacements +): any { + const fullPath = path.join(__dirname, '..', 'test-data', filePath); + + if (!fs.existsSync(fullPath)) { + throw new Error( + `Event file not found: ${fullPath}\n` + + `Please ensure the file exists in the test-data directory.` + ); + } + + let fileContent: string; + try { + fileContent = fs.readFileSync(fullPath, 'utf-8'); + } catch (error) { + throw new Error( + `Failed to read event file: ${fullPath}\n` + + `Error: ${error instanceof Error ? error.message : String(error)}` + ); + } + + let events: any[]; + try { + events = JSON.parse(fileContent); + } catch (error) { + throw new Error( + `Failed to parse JSON from event file: ${fullPath}\n` + + `Error: ${error instanceof Error ? error.message : String(error)}\n` + + `File content preview: ${fileContent.substring(0, 200)}...` + ); + } + + if (!Array.isArray(events) || events.length === 0) { + throw new Error( + `Invalid event file format: ${fullPath}\n` + + `Expected an array with at least one event, got: ${typeof events}` + ); + } + + // Take the first event from the array + const event = JSON.parse(JSON.stringify(events[0])); // Deep clone + + // Replace credential placeholders if provided + if (credentials) { + if ( + event.payload && + event.payload.connection_data + ) { + if (credentials.apiKey !== undefined) { + event.payload.connection_data.key = credentials.apiKey; + } + if (credentials.orgId !== undefined) { + event.payload.connection_data.org_id = credentials.orgId; + } + } + } + + return event; +} + +/** + * Gets credentials from environment variables + * @returns Credential replacements object + */ +export function getCredentialsFromEnv(): CredentialReplacements { + return { + apiKey: process.env.WRIKE_API_KEY || 'test-api-key', + orgId: process.env.WRIKE_SPACE_ID || 'test-space-id', + }; +} \ No newline at end of file diff --git a/conformance_tests/external_sync_units_extraction_tests/test-utils/snap-in-client.ts b/conformance_tests/external_sync_units_extraction_tests/test-utils/snap-in-client.ts new file mode 100644 index 0000000..54772dc --- /dev/null +++ b/conformance_tests/external_sync_units_extraction_tests/test-utils/snap-in-client.ts @@ -0,0 +1,17 @@ +import axios, { AxiosResponse } from 'axios'; + +export class SnapInClient { + private baseUrl: string; + + constructor(baseUrl: string = 'http://localhost:8000') { + this.baseUrl = baseUrl; + } + + async invokeFunction(event: any): Promise { + return axios.post(`${this.baseUrl}/handle/sync`, event, { + headers: { + 'Content-Type': 'application/json', + }, + }); + } +} \ No newline at end of file diff --git a/conformance_tests/external_sync_units_extraction_tests/test_external_sync_units.test.ts b/conformance_tests/external_sync_units_extraction_tests/test_external_sync_units.test.ts new file mode 100644 index 0000000..a3370d1 --- /dev/null +++ b/conformance_tests/external_sync_units_extraction_tests/test_external_sync_units.test.ts @@ -0,0 +1,83 @@ +import { CallbackServer } from './test-utils/callback-server'; +import { createTestEvent } from './test-utils/event-factory'; +import { SnapInClient } from './test-utils/snap-in-client'; + +describe('test_external_sync_units Function', () => { + let callbackServer: CallbackServer; + let snapInClient: SnapInClient; + + beforeAll(async () => { + callbackServer = new CallbackServer(8002); + await callbackServer.start(); + snapInClient = new SnapInClient('http://localhost:8000'); + }); + + afterAll(async () => { + await callbackServer.stop(); + }); + + beforeEach(() => { + callbackServer.clearCallbacks(); + }); + + describe('Basic Invocation', () => { + it('should successfully invoke the function with valid event', async () => { + const event = createTestEvent({ + functionName: 'test_external_sync_units', + eventType: 'EXTRACTION_EXTERNAL_SYNC_UNITS_START', + }); + + const response = await snapInClient.invokeFunction(event); + + expect(response.status).toBe(200); + expect(response.data).toBeDefined(); + }); + }); + + describe('Event Type Processing', () => { + it('should process EXTRACTION_EXTERNAL_SYNC_UNITS_START event type', async () => { + const event = createTestEvent({ + functionName: 'test_external_sync_units', + eventType: 'EXTRACTION_EXTERNAL_SYNC_UNITS_START', + }); + + const response = await snapInClient.invokeFunction(event); + + expect(response.status).toBe(200); + expect(response.data.error).toBeUndefined(); + }); + }); + + describe('Callback Emission', () => { + it('should emit EXTRACTION_EXTERNAL_SYNC_UNITS_DONE event', async () => { + const event = createTestEvent({ + functionName: 'test_external_sync_units', + eventType: 'EXTRACTION_EXTERNAL_SYNC_UNITS_START', + callbackUrl: 'http://localhost:8002/callback', + }); + + await snapInClient.invokeFunction(event); + + const callback = await callbackServer.waitForCallback(10000); + + expect(callback).toBeDefined(); + expect(callback.event_type).toBe('EXTRACTION_EXTERNAL_SYNC_UNITS_DONE'); + }); + + it('should include external_sync_units array in callback', async () => { + const event = createTestEvent({ + functionName: 'test_external_sync_units', + eventType: 'EXTRACTION_EXTERNAL_SYNC_UNITS_START', + callbackUrl: 'http://localhost:8002/callback', + }); + + await snapInClient.invokeFunction(event); + + const callback = await callbackServer.waitForCallback(10000); + + expect(callback.event_data).toBeDefined(); + expect(callback.event_data.external_sync_units).toBeDefined(); + expect(Array.isArray(callback.event_data.external_sync_units)).toBe(true); + }); + }); +}); \ No newline at end of file diff --git a/conformance_tests/external_sync_units_extraction_tests/tsconfig.test.json b/conformance_tests/external_sync_units_extraction_tests/tsconfig.test.json new file mode 100644 index 0000000..a873cc0 --- /dev/null +++ b/conformance_tests/external_sync_units_extraction_tests/tsconfig.test.json @@ -0,0 +1,17 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "target": "es2017", + "module": "commonjs", + "esModuleInterop": true, + "strict": true, + "declaration": true, + "resolveJsonModule": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true + }, + "include": [ + "**/*.test.ts", + "test-utils/**/*.ts" + ] +} \ No newline at end of file diff --git a/conformance_tests/extraction_data_repository_push/data_extraction.test.ts b/conformance_tests/extraction_data_repository_push/data_extraction.test.ts deleted file mode 100644 index 6d7e1a9..0000000 --- a/conformance_tests/extraction_data_repository_push/data_extraction.test.ts +++ /dev/null @@ -1,121 +0,0 @@ -import axios from 'axios'; -import { ExtractorEventType } from '@devrev/ts-adaas'; -import { promises as fs } from 'fs'; -import { setTimeout as setTimeoutPromise } from 'node:timers/promises'; -import * as path from 'path'; -import { setupCallbackServer, teardownCallbackServer, waitForEvent, resetEvents, hasReceivedEvent, waitForAnyEventWithTimeout, CALLBACK_SERVER_URL } from './test-utils'; - -// Server configurations -const SNAP_IN_SERVER_URL = 'http://localhost:8000/handle/sync'; - -// Test timeout (120 seconds as specified in requirements) -jest.setTimeout(60000); // Reduced to ensure tests complete within the overall limit - -describe('Data Extraction Acceptance Test', () => { - // Setup and teardown the callback server - beforeAll(() => setupCallbackServer()); - - // Reset events before each test - beforeEach(() => { - resetEvents(); - }); - afterAll(() => teardownCallbackServer()); - - test('Extraction function processes data and emits a single EXTRACTION_DATA_DONE event', async () => { - try { - // Load test data from the resource file with a timeout - const testDataPath = path.resolve(__dirname, 'resources', 'data_extraction_test.json'); - - console.log(`[${new Date().toISOString()}] Loading test data from ${testDataPath}`); - const testDataRaw = await fs.readFile(testDataPath, 'utf8'); - const testData = JSON.parse(testDataRaw); - - // Validate test data - if (!Array.isArray(testData) || testData.length === 0) { - throw new Error(`Invalid test data: Expected non-empty array, got ${typeof testData}`); - } - - // Get the first event from the test data - const event = JSON.parse(JSON.stringify(testData[0])); // Deep clone to avoid modifying the original - - // Update the callback URL to point to our test server - if (event.payload && event.payload.event_context) { - event.payload.event_context.callback_url = `${CALLBACK_SERVER_URL}/`; - - // Add a unique identifier to prevent caching - event.payload.event_context.sync_run_id = `test-sync-run-id-${Date.now()}`; - // Use the specified project ID - event.payload.event_context.external_sync_unit_id = 'IEAGS6BYI5RFMPPY'; - } else { - throw new Error('Invalid test data: Missing payload.event_context structure'); - } - - // Update the API key and space ID from environment variables - if (event.payload && event.payload.connection_data) { - const apiKey = process.env.WRIKE_API_KEY; - const spaceId = process.env.WRIKE_SPACE_GID; - - if (!apiKey) { - throw new Error('WRIKE_API_KEY environment variable is required'); - } - - if (!spaceId) { - throw new Error('WRIKE_SPACE_GID environment variable is required'); - } - - event.payload.connection_data.key = apiKey; - event.payload.connection_data.org_id = spaceId; - } else { - throw new Error('Invalid test data: Missing payload.connection_data structure'); - } - - console.log(`[${new Date().toISOString()}] Sending test event to snap-in server`); - - // Send request to snap-in server - const response = await axios.post(SNAP_IN_SERVER_URL, event, { - headers: { 'Content-Type': 'application/json' } - }); - - // Verify response - console.log(`[${new Date().toISOString()}] Received response from snap-in server`); - expect(response.status).toBe(200); - expect(response.data).toBeDefined(); - - // Wait for the EXTRACTION_DATA_DONE event - console.log(`[${new Date().toISOString()}] Waiting for EXTRACTION_DATA_DONE event...`); - const doneEvent = await waitForEvent(ExtractorEventType.ExtractionDataDone, 45000); - - console.log(`[${new Date().toISOString()}] Received EXTRACTION_DATA_DONE event`); - - // Wait a short time to ensure no other DONE events arrive - await setTimeoutPromise(2000); - - // Check that we received exactly one EXTRACTION_DATA_DONE event - const doneEvents = global.receivedEvents.filter(e => e && e.event_type === ExtractorEventType.ExtractionDataDone); - - if (doneEvents.length !== 1) { - console.error('Expected exactly one EXTRACTION_DATA_DONE event, but received:', - doneEvents.length, - 'All received events:', - JSON.stringify(global.receivedEventTypes, null, 2) - ); - } - - // Expected exactly one DONE event - expect(doneEvents.length).toBe(1); - - // Check that we didn't receive any error events - const errorEvents = global.receivedEvents.filter(e => e && e.event_type === ExtractorEventType.ExtractionDataError); - - if (errorEvents.length > 0) { - console.error('Received unexpected error events:', JSON.stringify(errorEvents, null, 2)); - } - - expect(errorEvents.length).toBe(0); - - } catch (error) { - console.error(`[${new Date().toISOString()}] Test failed with error:`, error); - throw error; - } - }); -}); \ No newline at end of file diff --git a/conformance_tests/extraction_data_repository_push/extraction.test.ts b/conformance_tests/extraction_data_repository_push/extraction.test.ts deleted file mode 100644 index eae479b..0000000 --- a/conformance_tests/extraction_data_repository_push/extraction.test.ts +++ /dev/null @@ -1,135 +0,0 @@ -import axios from 'axios'; -import { EventType, ExtractorEventType } from '@devrev/ts-adaas'; -import { setTimeout as setTimeoutPromise } from 'node:timers/promises'; -import { setupCallbackServer, teardownCallbackServer, waitForEvent, resetEvents, CALLBACK_SERVER_URL, hasReceivedEvent, waitForAnyEventWithTimeout } from './test-utils'; -// Environment variables -const WRIKE_API_KEY = process.env.WRIKE_API_KEY || ''; -const WRIKE_SPACE_GID = process.env.WRIKE_SPACE_GID || ''; -const PROJECT_ID = 'IEAGS6BYI5RFMPPY'; // As specified in requirements - -// Server configurations -const SNAP_IN_SERVER_URL = 'http://localhost:8000/handle/sync'; - -// Test timeout (reduced to ensure tests complete within the 120 second limit) -jest.setTimeout(120000); - -// Validate environment variables -beforeAll(() => { - if (!WRIKE_API_KEY) throw new Error('WRIKE_API_KEY environment variable is required'); - if (!WRIKE_SPACE_GID) throw new Error('WRIKE_SPACE_GID environment variable is required'); -}); - -describe('Extraction Function Conformance Tests', () => { - // Setup and teardown the callback server - beforeAll(async () => setupCallbackServer()); - - // Reset events before each test - beforeEach(() => { - resetEvents(); - }); - afterAll(async () => await teardownCallbackServer()); - - // Helper function to create a test event - const createTestEvent = (eventType: EventType) => { - return { - context: { - secrets: { - service_account_token: 'test-token' - }, - snap_in_version_id: 'test-version-id', - snap_in_id: 'test-snap-in-id' - }, - payload: { - connection_data: { - key: WRIKE_API_KEY, - org_id: WRIKE_SPACE_GID, // Use space ID from environment - org_name: 'Test Organization', - key_type: 'api_key' - }, - event_context: { - callback_url: `${CALLBACK_SERVER_URL}/`, - dev_org: 'test-org', - dev_org_id: 'test-org-id', - dev_user: 'test-user', - dev_user_id: 'test-user-id', - external_sync_unit: PROJECT_ID, - external_sync_unit_id: PROJECT_ID, - external_sync_unit_name: 'Test Project', - external_system: 'wrike', - external_system_type: 'wrike', - import_slug: 'test-import', - mode: 'INITIAL', - request_id: 'test-request-id', - snap_in_slug: 'test-snap-in', - snap_in_version_id: 'test-version-id', - sync_run: 'test-run', - sync_run_id: `test-run-id-${Date.now()}`, // Add timestamp to make it unique - sync_tier: 'test-tier', - sync_unit: 'test-unit', - sync_unit_id: 'test-unit-id', - uuid: 'test-uuid', - worker_data_url: 'http://localhost:8003/external-worker' - }, - event_type: eventType, - event_data: {} - }, - execution_metadata: { - devrev_endpoint: 'http://localhost:8003', - function_name: 'extraction' - }, - input_data: {} - }; - }; - - test('Extraction function can be invoked with EXTRACTION_DATA_START event and emits a single DONE event', async () => { - const event = createTestEvent(EventType.ExtractionDataStart); - - // Send request to snap-in server - const response = await axios.post(SNAP_IN_SERVER_URL, event, { headers: { 'Content-Type': 'application/json' }, timeout: 30000 }); - - // Verify response - expect(response.status).toBe(200); - expect(response.data).toBeDefined(); - expect(response.data.function_result).toBeDefined(); - expect(response.data.function_result.status).toBe('success'); - - try { - console.log(`[${new Date().toISOString()}] Waiting for EXTRACTION_DATA_DONE event...`); - - // Wait for the DONE event with a shorter timeout - const doneEvent = await waitForEvent(ExtractorEventType.ExtractionDataDone, 45000); - - console.log(`[${new Date().toISOString()}] Received EXTRACTION_DATA_DONE event`); - - // Wait a short time to ensure no other events arrive - await setTimeoutPromise(2000); - - // Verify we only received one DONE or ERROR event (not both) - const doneEvents = global.receivedEvents.filter((evt: any) => - evt && evt.event_type === ExtractorEventType.ExtractionDataDone - ); - - const errorEvents = global.receivedEvents.filter((evt: any) => - evt && evt.event_type === ExtractorEventType.ExtractionDataError - ); - - // We should have exactly one DONE event and no ERROR events - // Expected exactly one DONE event - expect(doneEvents.length).toBe(1); - expect(errorEvents.length).toBe(0); - - // Check if we received any artifact upload events (optional check) - const artifactEvents = global.receivedEvents.filter((event: any) => - event.file_name && ( - event.file_name.includes('users') || - event.file_name.includes('tasks') - ) - ); - - console.log(`Found ${artifactEvents.length} artifact events`); - } catch (error) { - console.error(`[${new Date().toISOString()}] Test failed with error:`, error); - throw error; - } - }); -}); \ No newline at end of file diff --git a/conformance_tests/extraction_data_repository_push/jest.config.js b/conformance_tests/extraction_data_repository_push/jest.config.js deleted file mode 100644 index 6d19129..0000000 --- a/conformance_tests/extraction_data_repository_push/jest.config.js +++ /dev/null @@ -1,9 +0,0 @@ -module.exports = { - preset: 'ts-jest', - testEnvironment: 'node', - testTimeout: 120000, // 120 seconds as specified in requirements - resetMocks: false, - testPathIgnorePatterns: ['/node_modules/', '/dist/'], - testSequencer: './test-sequencer.js', - setupFilesAfterEnv: ['./jest.setup.js'] -}; \ No newline at end of file diff --git a/conformance_tests/extraction_data_repository_push/jest.setup.js b/conformance_tests/extraction_data_repository_push/jest.setup.js deleted file mode 100644 index e2fa10b..0000000 --- a/conformance_tests/extraction_data_repository_push/jest.setup.js +++ /dev/null @@ -1,30 +0,0 @@ -// Increase the timeout for all tests -jest.setTimeout(60000); // Reduced to ensure tests complete within the overall limit - -// Initialize global variables if they don't exist -global.receivedEvents = global.receivedEvents || []; -global.eventPromiseResolvers = global.eventPromiseResolvers || {}; -global.receivedEventTypes = global.receivedEventTypes || []; - -// Reset global state before each test -beforeEach(() => { - // Clear events before each test - global.receivedEvents = []; - global.eventPromiseResolvers = {}; -}); - -// Log when tests start and finish -beforeEach(() => console.log(`[${new Date().toISOString()}] Starting test`)); -beforeAll(() => console.log(`[${new Date().toISOString()}] Starting tests`)); -afterAll(() => console.log(`[${new Date().toISOString()}] Finished tests`)); - -// Add global error handlers -process.on('unhandledRejection', (reason, promise) => { - console.error(`[${new Date().toISOString()}] Unhandled Rejection:`, reason); -}); - -// Add a global timeout to ensure tests don't hang -afterEach(async () => { - console.log(`[${new Date().toISOString()}] Test completed`); - console.log(`Received events: ${global.receivedEventTypes.join(', ')}`); -}); \ No newline at end of file diff --git a/conformance_tests/extraction_data_repository_push/package.json b/conformance_tests/extraction_data_repository_push/package.json deleted file mode 100644 index d7c19e4..0000000 --- a/conformance_tests/extraction_data_repository_push/package.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "name": "airdrop-snap-in-tests", - "version": "1.0.0", - "description": "Conformance tests for Airdrop Snap-in", - "main": "index.js", - "scripts": { - "test": "jest --runInBand" - }, - "dependencies": { - "@devrev/ts-adaas": "1.5.1", - "axios": "^1.9.0", - "express": "^4.21.0", - "body-parser": "^1.20.3" - }, - "devDependencies": { - "@jest/test-sequencer": "^29.4.2", - "@types/express": "^4.17.21", - "@types/jest": "^29.4.0", - "@types/node": "^18.13.0", - "jest": "^29.4.2", - "ts-jest": "^29.0.5", - "typescript": "^4.9.5" - } -} \ No newline at end of file diff --git a/conformance_tests/extraction_data_repository_push/test-sequencer.js b/conformance_tests/extraction_data_repository_push/test-sequencer.js deleted file mode 100644 index 08d29a2..0000000 --- a/conformance_tests/extraction_data_repository_push/test-sequencer.js +++ /dev/null @@ -1,22 +0,0 @@ -const Sequencer = require('@jest/test-sequencer').default; - -class CustomSequencer extends Sequencer { - sort(tests) { - // Return a new array with tests sorted by path - return Array.from(tests).sort((testA, testB) => { - // Run extraction.test.ts first, then data_extraction.test.ts - if (testA.path.includes('extraction.test.ts') && !testB.path.includes('extraction.test.ts')) { - return -1; - } - if (testB.path.includes('extraction.test.ts') && !testA.path.includes('extraction.test.ts')) { - return 1; - } - if (testB.path.includes('data_extraction.test.ts')) return -1; - - // Otherwise sort alphabetically - return testA.path.localeCompare(testB.path); - }); - } -} - -module.exports = CustomSequencer; \ No newline at end of file diff --git a/conformance_tests/extraction_data_repository_push/test-utils.ts b/conformance_tests/extraction_data_repository_push/test-utils.ts deleted file mode 100644 index 9781904..0000000 --- a/conformance_tests/extraction_data_repository_push/test-utils.ts +++ /dev/null @@ -1,231 +0,0 @@ -import express from 'express'; -import bodyParser from 'body-parser'; -import { Server } from 'http'; -import { setTimeout as setTimeoutPromise } from 'timers/promises'; -import { AddressInfo } from 'net'; - -// Server configurations -export const CALLBACK_SERVER_PORT: number = 8002; -export const CALLBACK_SERVER_URL: string = `http://localhost:${CALLBACK_SERVER_PORT}`; - -// Declare global variables for shared state -declare global { - var callbackServer: Server | undefined; - var receivedEvents: any[]; - var receivedEventTypes: string[]; - var eventPromiseResolvers: { [key: string]: (value: any) => void }; -} - -// Initialize global variables if they don't exist -global.receivedEvents = global.receivedEvents || []; -global.receivedEventTypes = global.receivedEventTypes || []; -global.eventPromiseResolvers = global.eventPromiseResolvers || {}; - -// Setup callback server to receive events from the snap-in -export function setupCallbackServer(): Promise { - // If server is already running, close it first to avoid port conflicts - if (global.callbackServer) { - console.log('Callback server already running, resetting events'); - global.receivedEvents = []; - global.receivedEventTypes = []; - global.eventPromiseResolvers = {}; - return Promise.resolve(); - } - - return new Promise((resolve, reject) => { - // Create express app for callback server - const app = express(); - app.use(bodyParser.json()); - - // Endpoint to receive events from the snap-in - app.post('/', (req, res) => { - const event = typeof req.body === 'string' ? JSON.parse(req.body) : req.body; - - console.log(`Callback server received event type: ${event.event_type}`); - - // Send a 200 OK response immediately - res.status(200).send(); - - // Process the event after sending the response - - // Add timestamp for ordering checks - event.receivedAt = new Date().toISOString(); - global.receivedEvents.push(event); - - // Also track event types for easier checking - if (event.event_type) { - global.receivedEventTypes.push(event.event_type); - } - - // Resolve any promises waiting for this event type - if (event.event_type) { - const resolver = global.eventPromiseResolvers[event.event_type]; - if (resolver) { - resolver(event); - delete global.eventPromiseResolvers[event.event_type]; - } - } - }); - - // Create a catch-all route for any other requests - app.use('*', (req, res) => { - console.log(`Received request to ${req.originalUrl}`); - res.status(200).send('Callback server is running'); - }); - - // Handle server errors - const server = app.listen(CALLBACK_SERVER_PORT); - - server.on('error', (err) => { - console.error(`Failed to start callback server: ${err.message}`); - reject(err); - }); - - server.on('listening', () => { - console.log(`Callback server running at ${CALLBACK_SERVER_URL}`); - global.callbackServer = server; - resolve(); - }); - }); -} - -// Teardown callback server -export function teardownCallbackServer(): Promise { - return new Promise((resolve) => { - const server = global.callbackServer; - - if (!server) { - console.log('No callback server to close'); - resolve(); - return; - } - - // Set a timeout in case the server doesn't close properly - const timeoutId = setTimeout(() => { - console.log('Forcing callback server closure after timeout'); - resolve(); - }, 5000); - - server.close(() => { - clearTimeout(timeoutId); - console.log('Callback server closed successfully'); - global.receivedEvents = []; - global.callbackServer = undefined; - resolve(); - }); - }); -} - -// Reset events before each test -export function resetEvents() { - global.receivedEvents = []; - global.receivedEventTypes = []; - global.eventPromiseResolvers = {}; -} - -// Helper function to wait for a specific event or timeout -export function waitForEvent(eventType: string, timeoutMs: number = 60000): Promise { - console.log(`Setting up wait for event: ${eventType}`); - - return new Promise((resolve, reject) => { - // Check if we already received this event - const existingEvent = global.receivedEvents.find(e => e && e.event_type === eventType); - if (existingEvent) { - console.log(`Found existing event of type ${eventType}`); - return resolve(existingEvent); - } - - console.log(`No existing event of type ${eventType}, setting up resolver`); - - // Set up polling to check for the event periodically - // This is a backup in case the event handler doesn't trigger properly - const pollInterval = setInterval(() => { - const event = global.receivedEvents.find(e => e && e.event_type === eventType); - if (event) { - clearInterval(pollInterval); - // Only resolve if the promise hasn't been resolved yet - if (global.eventPromiseResolvers[eventType]) { - resolve(event); - delete global.eventPromiseResolvers[eventType]; - } - } - }, 1000); - - // Set up resolver for future event - global.eventPromiseResolvers[eventType] = resolve; - - // Set timeout - const timeoutId = setTimeout(() => { - // Clean up the resolver to prevent memory leaks - delete global.eventPromiseResolvers[eventType]; - clearInterval(pollInterval); - - // Log all received events for debugging - const receivedEventTypes = global.receivedEvents.map(e => e.event_type).join(', '); - console.error(`Timeout waiting for event ${eventType} after ${timeoutMs}ms.`); - console.error(`Received events: ${receivedEventTypes}`); - console.error(`Error details:`, global.receivedEvents.find(e => e.event_type === 'EXTRACTION_DATA_ERROR')?.event_data?.error); - - // Reject with detailed error - reject(new Error(`Timeout waiting for event ${eventType} after ${timeoutMs}ms. Received events: ${receivedEventTypes}`)); - }, timeoutMs); - - // Add cleanup for the timeout when the promise resolves - const cleanup = (result: any) => { - clearTimeout(timeoutId); - clearInterval(pollInterval); - return result; - }; - - // The promise will be resolved by the event handler in the POST endpoint - // and we'll clean up the timeout when it does - resolve = ((originalResolve) => (value) => originalResolve(cleanup(value)))(resolve); - global.eventPromiseResolvers[eventType] = resolve; - }); -} - -// Helper function to check if an event has been received without waiting -export function hasReceivedEvent(eventType: string): boolean { - return global.receivedEvents.some(e => e && e.event_type === eventType); -} - -// Helper function to wait for any event with a timeout -export async function waitForAnyEventWithTimeout(timeoutMs: number = 30000): Promise { - const startCount = global.receivedEvents.length; - const startTime = Date.now(); - - while (Date.now() - startTime < timeoutMs) { - if (global.receivedEvents.length > startCount) { - return true; - } - await setTimeoutPromise(500); // Wait 500ms before checking again - } - - // If we get here, we timed out - console.log(`Timed out waiting for any event after ${timeoutMs}ms`); - console.log(`Current events: ${global.receivedEventTypes.join(', ')}`); - - return false; -} - -// Helper function to wait for any event matching a predicate -export function waitForAnyEvent(predicate: (event: any) => boolean, timeoutMs: number = 60000): Promise { - return new Promise((resolve, reject) => { - const existingEvent = global.receivedEvents.find(predicate); - if (existingEvent) return resolve(existingEvent); - - const intervalId = setInterval(() => { - const event = global.receivedEvents.find(predicate); - if (event) { - clearInterval(intervalId); - clearTimeout(timeoutId); - resolve(event); - } - }, 100); - - const timeoutId = setTimeout(() => { - clearInterval(intervalId); - reject(new Error(`Timeout waiting for matching event after ${timeoutMs}ms`)); - }, timeoutMs); - }); -} \ No newline at end of file diff --git a/conformance_tests/extraction_external_sync_unit_check/extraction_external_sync_unit_acceptance.test.ts b/conformance_tests/extraction_external_sync_unit_check/extraction_external_sync_unit_acceptance.test.ts deleted file mode 100644 index 2997ba3..0000000 --- a/conformance_tests/extraction_external_sync_unit_check/extraction_external_sync_unit_acceptance.test.ts +++ /dev/null @@ -1,172 +0,0 @@ -import axios from 'axios'; -import express, { Request, Response } from 'express'; -import { Server } from 'http'; -import fs from 'fs'; -import path from 'path'; - -// Constants -const SNAP_IN_SERVER_URL = 'http://localhost:8000/handle/sync'; -const CALLBACK_SERVER_PORT = 8002; -const CALLBACK_SERVER_URL = `http://localhost:${CALLBACK_SERVER_PORT}`; -const DEVREV_SERVER_URL = 'http://localhost:8003'; -const WORKER_DATA_URL = `${DEVREV_SERVER_URL}/external-worker`; -const TEST_TIMEOUT = 60000; // 60 seconds timeout for the test - -describe('Extraction External Sync Unit Acceptance Test', () => { - let callbackServer: Server; - let callbackReceived = false; - let receivedEventType: string | null = null; - let callbackPromiseResolve: ((value: unknown) => void) | null = null; - let callbackPromiseReject: ((reason?: any) => void) | null = null; - let callbackPromise: Promise; - - // Setup callback server before all tests - beforeAll((done) => { - const app = express(); - app.use(express.json()); - - // Create a route to handle callbacks from DevRev - app.post('/callback', (req: Request, res: Response) => { - console.log('Callback received:', JSON.stringify(req.body, null, 2)); - - // Extract the event_type from the callback payload - const eventType = req.body?.event_type; - console.log(`Received event_type: ${eventType}`); - - // Store the received event type for later assertion - receivedEventType = eventType; - callbackReceived = true; - - // Resolve the promise if we're waiting for this callback - if (callbackPromiseResolve) { - callbackPromiseResolve(req.body); - } - - res.status(200).send({ status: 'success' }); - }); - - callbackServer = app.listen(CALLBACK_SERVER_PORT, () => { - console.log(`Callback server running at http://localhost:${CALLBACK_SERVER_PORT}`); - done(); - }); - }); - - // Cleanup callback server after all tests - afterAll((done) => { - callbackServer.close(() => { - console.log('Callback server closed'); - done(); - }); - }); - - // Reset callback flags before each test - beforeEach(() => { - callbackReceived = false; - receivedEventType = null; - - // Create a new promise that will be resolved when the callback is received - callbackPromise = new Promise((resolve, reject) => { - callbackPromiseResolve = resolve; - callbackPromiseReject = reject; - - // Set a timeout to reject the promise if no callback is received - setTimeout(() => { - if (!callbackReceived && callbackPromiseReject) { - callbackPromiseReject(new Error('Timeout waiting for callback from DevRev')); - } - }, TEST_TIMEOUT - 5000); // 5 seconds before the test timeout - }); - }); - - test('should receive EXTRACTION_EXTERNAL_SYNC_UNITS_DONE event from DevRev', async () => { - // Load the test event from the JSON file - let testEvents; - try { - const jsonPath = path.resolve(__dirname, './external_sync_unit_check.json'); - console.log(`Loading test events from: ${jsonPath}`); - const jsonData = fs.readFileSync(jsonPath, 'utf8'); - testEvents = JSON.parse(jsonData); - console.log(`Loaded ${testEvents.length} test events`); - } catch (error) { - console.error('Error loading test events:', error); - throw new Error(`Failed to load test events: ${error instanceof Error ? error.message : String(error)}`); - } - - // Ensure we have at least one event - expect(testEvents).toBeDefined(); - expect(Array.isArray(testEvents)).toBe(true); - expect(testEvents.length).toBeGreaterThan(0); - - // Get the first event - const testEvent = testEvents[0]; - - // Ensure the event has the required structure - expect(testEvent).toHaveProperty('payload'); - expect(testEvent.payload).toHaveProperty('event_context'); - expect(testEvent.payload.event_context).toHaveProperty('callback_url'); - - // Update the callback URL to point to our test server - testEvent.payload.event_context.callback_url = `${CALLBACK_SERVER_URL}/callback`; - - // Ensure the event has the correct event_type - expect(testEvent.payload).toHaveProperty('event_type'); - expect(testEvent.payload.event_type).toBe('EXTRACTION_EXTERNAL_SYNC_UNITS_START'); - - // Ensure the event has the correct function_name - expect(testEvent.execution_metadata).toHaveProperty('function_name'); - expect(testEvent.execution_metadata.function_name).toBe('extraction_external_sync_unit_check'); - - // Update the devrev_endpoint to point to our test server - testEvent.execution_metadata.devrev_endpoint = DEVREV_SERVER_URL; - - // Update the worker_data_url to point to our test server - testEvent.payload.event_context.worker_data_url = WORKER_DATA_URL; - - console.log('Sending test event to snap-in server:', JSON.stringify(testEvent, null, 2)); - - try { - // Send the event to the snap-in server - const response = await axios.post(SNAP_IN_SERVER_URL, testEvent, { - headers: { 'Content-Type': 'application/json' } - }); - - console.log('Response from snap-in server:', JSON.stringify(response.data, null, 2)); - - // Check if the response contains an error - if (response.data?.error) { - console.error('Error in response:', response.data.error); - throw new Error(`Error in response: ${JSON.stringify(response.data.error)}`); - } - - // Wait for the callback to be received - console.log('Waiting for callback from DevRev...'); - await callbackPromise; - - // Assert that we received a callback - expect(callbackReceived).toBe(true); - expect(receivedEventType).toBe('EXTRACTION_EXTERNAL_SYNC_UNITS_DONE'); - - } catch (error) { - console.error('Test failed:', error); - - if (axios.isAxiosError(error) && error.response) { - console.error('Response status:', error.response.status); - console.error('Response data:', JSON.stringify(error.response.data, null, 2)); - } - - throw error; - } - }, TEST_TIMEOUT); -}); - -// Add a global afterAll hook to ensure all connections are closed -afterAll(async () => { - // Ensure we don't leave any open connections - if (axios.defaults.httpAgent && typeof axios.defaults.httpAgent.destroy === 'function') { - axios.defaults.httpAgent.destroy(); - } - if (axios.defaults.httpsAgent && typeof axios.defaults.httpsAgent.destroy === 'function') { - axios.defaults.httpsAgent.destroy(); - } - await new Promise(resolve => setTimeout(resolve, 1000)); // Give time for connections to close -}); \ No newline at end of file diff --git a/conformance_tests/extraction_external_sync_unit_check/extraction_external_sync_unit_check.test.ts b/conformance_tests/extraction_external_sync_unit_check/extraction_external_sync_unit_check.test.ts deleted file mode 100644 index 120ad81..0000000 --- a/conformance_tests/extraction_external_sync_unit_check/extraction_external_sync_unit_check.test.ts +++ /dev/null @@ -1,323 +0,0 @@ -import axios from 'axios'; -import express, { Request, Response } from 'express'; -import { Server } from 'http'; - -// Constants -const SNAP_IN_SERVER_URL = 'http://localhost:8000/handle/sync'; -const CALLBACK_SERVER_PORT = 8002; -const CALLBACK_SERVER_URL = `http://localhost:${CALLBACK_SERVER_PORT}`; -const DEVREV_SERVER_URL = 'http://localhost:8003'; -const WORKER_DATA_URL = `${DEVREV_SERVER_URL}/external-worker`; - -// Define event context interface to include optional properties -interface EventContext { - callback_url: string; - [key: string]: any; // Allow any additional properties -} - -// Define event interface to make TypeScript happy -interface Event extends Record {} - -// Mock event data -const createBasicEvent = (eventType: string) => ({ - context: { - secrets: { - service_account_token: 'test-token' - }, - snap_in_id: 'test-snap-in-id', - snap_in_version_id: 'test-snap-in-version-id' - }, - payload: { - event_type: eventType, - event_context: { - callback_url: `${CALLBACK_SERVER_URL}/callback`, - dev_org: 'test-org', - dev_org_id: 'test-org-id', - dev_user: 'test-user', - dev_user_id: 'test-user-id', - external_sync_unit: 'test-sync-unit', - external_sync_unit_id: 'test-sync-unit-id', - external_sync_unit_name: 'Test Sync Unit', - external_system: 'test-system', - external_system_type: 'test-system-type', - import_slug: 'test-import-slug', - mode: 'INITIAL', - request_id: 'test-request-id', - snap_in_slug: 'test-snap-in-slug', - snap_in_version_id: 'test-snap-in-version-id', - sync_run: 'test-sync-run', - sync_run_id: 'test-sync-run-id', - sync_tier: 'test-tier', - sync_unit: 'test-sync-unit', - sync_unit_id: 'test-sync-unit-id', - uuid: 'test-uuid', - worker_data_url: WORKER_DATA_URL - } - }, - execution_metadata: { - function_name: 'extraction_external_sync_unit_check', - devrev_endpoint: DEVREV_SERVER_URL - }, - input_data: {} -}); - -// Improved helper function to invoke the function with better error handling -const invokeFunction = async (event: Event) => { - try { - console.log(`Invoking function with event type: ${event.payload?.event_type || 'undefined'}`); - const response = await axios.post(SNAP_IN_SERVER_URL, event, { - headers: { 'Content-Type': 'application/json' } - }); - - console.log('Response received:', JSON.stringify(response.data)); - - // Check if the response contains an error - if (response.data?.error) { - // Extract error message from various possible formats - let errorMessage = 'Unknown error'; - - if (response.data.error.err_msg) { - errorMessage = response.data.error.err_msg; - } else if (response.data.error.error && response.data.error.error.message) { - errorMessage = response.data.error.error.message; - } else if (typeof response.data.error === 'string') { - errorMessage = response.data.error; - } else { - errorMessage = JSON.stringify(response.data.error); - } - - throw new Error(errorMessage); - } - - // Check if there's an error in the function_result - if (response.data?.function_result?.error) { - throw new Error(JSON.stringify(response.data.function_result.error)); - } - - return response.data; - } catch (error: unknown) { - if (axios.isAxiosError(error) && error.response) { - const serverError = error.response.data.error; - // Check if the error is a runtime error with err_msg - if (serverError?.err_type && serverError?.err_msg) { - throw new Error(serverError.err_msg); - } - // Check if the error is a function error with nested error object - else if (serverError?.error && typeof serverError.error === 'object' && serverError.error.message) { - throw new Error(JSON.stringify(serverError.error)); - } - // Otherwise just throw the error object as is - else if (typeof serverError === 'string') { - throw new Error(serverError); - } - // If we can't extract a specific error message, use the whole response - else if (serverError) { - throw new Error(JSON.stringify(serverError)); - } - else if (typeof error.response.data === 'string') { - throw new Error(error.response.data); - } - else { - throw new Error(`Request failed with status ${error.response.status}: ${JSON.stringify(error.response.data)}`); - } - } - - if (axios.isAxiosError(error) && error.response) { - throw new Error(`Request failed with status ${error.response.status}`); - } - throw error; - } finally { - // Empty finally block, but needed for proper syntax - } -}; - -describe('Extraction External Sync Unit Check Tests', () => { - let callbackServer: Server; - let callbackReceived = false; - - // Setup callback server before all tests - beforeAll((done) => { - const app = express(); - app.use(express.json()); - - app.post('/callback', (req: Request, res: Response) => { - console.log('Callback received:', req.body); - callbackReceived = true; - res.status(200).send({ status: 'success' }); - }); - - callbackServer = app.listen(CALLBACK_SERVER_PORT, () => { - console.log(`Callback server running at http://localhost:${CALLBACK_SERVER_PORT}`); - done(); - }); - }); - - // Cleanup callback server after all tests - afterAll((done) => { - callbackServer.close(() => { - console.log('Callback server closed'); - done(); - }); - }); - - // Reset callback flag before each test - beforeEach(() => { - callbackReceived = false; - }); - - // Test 1: Basic Invocation - test('should successfully invoke the function with minimal valid input', async () => { - const event = createBasicEvent('TEST_EVENT'); - - const result = await invokeFunction(event); - - expect(result).toBeDefined(); - expect(result.function_result).toBeDefined(); - expect(result.function_result.status).toBe('success'); - expect(result.function_result.message).toBeTruthy(); - expect(result.function_result.valid_external_sync_unit_events).toBeDefined(); - }); - - // Test 2: Event Validation - test('should handle invalid events properly', async () => { - // Create an invalid event with missing service_account_token - const invalidEvent = { - context: { - // Completely missing the service_account_token field - secrets: {}, - snap_in_version_id: 'test-snap-in-version-id' - }, - payload: { - // Include event_type to make the event more complete - event_type: 'TEST_EVENT', - // Include a minimal event_context to avoid other validation errors - // that might occur before the one we're testing for - event_context: { - callback_url: `${CALLBACK_SERVER_URL}/callback` - } - }, - execution_metadata: { - function_name: 'extraction_external_sync_unit_check', - devrev_endpoint: DEVREV_SERVER_URL - }, - input_data: {} - }; - - try { - const result = await invokeFunction(invalidEvent); - // If we get here, the function didn't throw an error - fail(`Expected function to throw an error but got: ${JSON.stringify(result)}`); - } catch (error) { - // Test passes if we get here - we expect some kind of error - expect(error).toBeDefined(); - } - }); - - // Additional test for another validation case - test('should handle events with missing event_context properly', async () => { - // Create an invalid event with missing event_context - const invalidEvent = { - context: { - secrets: { - service_account_token: 'test-token' - }, - snap_in_version_id: 'test-snap-in-version-id' - }, - payload: { - event_type: 'TEST_EVENT' - // Deliberately missing event_context - }, - execution_metadata: { - function_name: 'extraction_external_sync_unit_check', - devrev_endpoint: DEVREV_SERVER_URL - }, - input_data: {} - }; - - try { - const result = await invokeFunction(invalidEvent); - // If we get here, the function didn't throw an error - fail(`Expected function to throw an error but got: ${JSON.stringify(result)}`); - } catch (error) { - // Test passes if we get here - we expect some kind of error - expect(error).toBeDefined(); - } - }); - - // Test for missing execution_metadata - test('should handle events with missing execution_metadata properly', async () => { - // Create an invalid event with missing execution_metadata - const invalidEvent = { - context: { - secrets: { - service_account_token: 'test-token' - }, - snap_in_version_id: 'test-snap-in-version-id' - }, - payload: { - event_type: 'TEST_EVENT', - event_context: {} - }, - // Deliberately missing execution_metadata - input_data: {} - }; - - try { - const result = await invokeFunction(invalidEvent); - // If we get here, the function didn't throw an error - fail(`Expected function to throw an error but got: ${JSON.stringify(result)}`); - } catch (error) { - // Test passes if we get here - we expect some kind of error - expect(error).toBeDefined(); - } - }); - - // Test 3: Event Type Recognition - test('should correctly identify external sync unit events', async () => { - // Test with a non-external sync unit event - const regularEvent = createBasicEvent('SOME_OTHER_EVENT'); - const regularResult = await invokeFunction(regularEvent); - - expect(regularResult.function_result.valid_external_sync_unit_events).toBe(false); - - // Test with an external sync unit event - const syncUnitEvent = createBasicEvent('EXTRACTION_EXTERNAL_SYNC_UNITS_START'); - const syncUnitResult = await invokeFunction(syncUnitEvent); - - expect(syncUnitResult.function_result.valid_external_sync_unit_events).toBe(true); - }); - - // Test 4: Complete Workflow - test('should handle a complete external sync unit workflow', async () => { - const event: Event = createBasicEvent('EXTRACTION_EXTERNAL_SYNC_UNITS_START'); - - // Add more realistic data to the event - // Use type assertion to add properties - const eventContext = event.payload.event_context as EventContext; - - eventContext.extract_from = new Date().toISOString(); - eventContext.initial_sync_scope = 'full-history'; - - const result = await invokeFunction(event); - - expect(result.function_result.status).toBe('success'); - expect(result.function_result.valid_external_sync_unit_events).toBe(true); - - // In a real scenario, we would expect the function to process this event - // and potentially make callbacks, but since our implementation is just a check, - // we're only verifying the response structure - }); -}); - -// Add a global afterAll hook to ensure all connections are closed -afterAll(async () => { - // Ensure we don't leave any open connections - // Safely close HTTP agents if they exist - if (axios.defaults.httpAgent && typeof axios.defaults.httpAgent.destroy === 'function') { - axios.defaults.httpAgent.destroy(); - } - if (axios.defaults.httpsAgent && typeof axios.defaults.httpsAgent.destroy === 'function') { - axios.defaults.httpsAgent.destroy(); - } - await new Promise(resolve => setTimeout(resolve, 1000)); // Give time for connections to close -}); \ No newline at end of file diff --git a/conformance_tests/extraction_external_sync_unit_check/jest.config.js b/conformance_tests/extraction_external_sync_unit_check/jest.config.js deleted file mode 100644 index c2287d7..0000000 --- a/conformance_tests/extraction_external_sync_unit_check/jest.config.js +++ /dev/null @@ -1,5 +0,0 @@ -module.exports = { - preset: 'ts-jest', - testEnvironment: 'node', - testTimeout: 120000, -}; \ No newline at end of file diff --git a/conformance_tests/extraction_external_sync_unit_check/package.json b/conformance_tests/extraction_external_sync_unit_check/package.json deleted file mode 100644 index 6a0804a..0000000 --- a/conformance_tests/extraction_external_sync_unit_check/package.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "name": "airdrop-conformance-tests", - "version": "1.0.0", - "description": "Conformance tests for Airdrop snap-in", - "main": "index.js", - "scripts": { - "test": "jest --forceExit --detectOpenHandles --runInBand" - }, - "dependencies": { - "axios": "^1.6.2", - "express": "^4.18.2" - }, - "devDependencies": { - "@types/express": "^4.17.21", - "@types/jest": "^29.5.10", - "@types/node": "^20.10.0", - "jest": "^29.7.0", - "ts-jest": "^29.1.1", - "typescript": "^5.3.2" - } -} \ No newline at end of file diff --git a/conformance_tests/extraction_external_sync_unit_push/extraction.test.ts b/conformance_tests/extraction_external_sync_unit_push/extraction.test.ts deleted file mode 100644 index 533911c..0000000 --- a/conformance_tests/extraction_external_sync_unit_push/extraction.test.ts +++ /dev/null @@ -1,239 +0,0 @@ -import axios from 'axios'; -import * as express from 'express'; -import * as bodyParser from 'body-parser'; -import { EventType } from '@devrev/ts-adaas'; -import { Server } from 'http'; - -// Test configuration -const TEST_TIMEOUT = 30000; // 30 seconds per test -const SNAP_IN_SERVER_URL = 'http://localhost:8000/handle/sync'; -const CALLBACK_SERVER_PORT = 8002; -const CALLBACK_SERVER_URL = `http://localhost:${CALLBACK_SERVER_PORT}`; - -// Environment variables -const WRIKE_API_KEY = process.env.WRIKE_API_KEY || ''; -const WRIKE_SPACE_GID = process.env.WRIKE_SPACE_GID || ''; -const TEST_PROJECT_ID = 'IEAGS6BYI5RFMPPY'; // Can be used when space ID is required - -// Validate environment variables -if (!WRIKE_API_KEY) { - console.error('WRIKE_API_KEY environment variable is required'); - process.exit(1); -} - -if (!WRIKE_SPACE_GID) { - console.error('WRIKE_SPACE_GID environment variable is required'); - process.exit(1); -} - -// Setup callback server -let callbackServer: Server; -let receivedCallbacks: any[] = []; - -function setupCallbackServer(): Promise { - return new Promise((resolve) => { - const app = express.default(); - app.use(bodyParser.json()); - - app.post('*', (req, res) => { - receivedCallbacks.push(req.body); - res.status(200).send({ status: 'success' }); - }); - - callbackServer = app.listen(CALLBACK_SERVER_PORT, () => { - console.log(`Callback server listening on port ${CALLBACK_SERVER_PORT}`); - resolve(); - }); - }); -} - -function shutdownCallbackServer(): Promise { - return new Promise((resolve) => { - if (callbackServer) { - callbackServer.close(() => { - console.log('Callback server shut down'); - resolve(); - }); - } else { - resolve(); - } - }); -} - -// Helper function to create a test event -function createTestEvent(eventType: string = EventType.ExtractionExternalSyncUnitsStart) { - return { - context: { - secrets: { - service_account_token: 'test-token' - }, - snap_in_id: 'test-snap-in-id', - snap_in_version_id: 'test-snap-in-version-id' - }, - execution_metadata: { - function_name: 'extraction', - devrev_endpoint: 'http://localhost:8003' - }, - payload: { - connection_data: { - key: WRIKE_API_KEY, - org_id: WRIKE_SPACE_GID, - org_name: 'Test Organization', - key_type: 'api_key' - }, - event_type: eventType, - event_context: { - callback_url: `${CALLBACK_SERVER_URL}/callback`, - dev_org: 'test-org', - dev_org_id: 'test-org-id', - dev_user: 'test-user', - dev_user_id: 'test-user-id', - external_sync_unit: TEST_PROJECT_ID, - external_sync_unit_id: TEST_PROJECT_ID, - external_sync_unit_name: 'Test Project', - external_system: 'wrike', - external_system_type: 'wrike', - import_slug: 'test-import', - mode: 'INITIAL', - request_id: 'test-request-id', - snap_in_slug: 'test-snap-in', - snap_in_version_id: 'test-snap-in-version-id', - sync_run: 'test-sync-run', - sync_run_id: 'test-sync-run-id', - sync_tier: 'test-tier', - sync_unit: 'test-sync-unit', - sync_unit_id: 'test-sync-unit-id', - uuid: 'test-uuid', - worker_data_url: 'http://localhost:8003/external-worker' - } - }, - input_data: {} - }; -} - -// Setup and teardown -beforeAll(async () => { - await setupCallbackServer(); -}); - -afterAll(async () => { - await shutdownCallbackServer(); -}); - -beforeEach(() => { - receivedCallbacks = []; -}); - -// Test cases -describe('Extraction Function Tests', () => { - // Test 1: Basic validation test - test('should accept a valid event', async () => { - const event = createTestEvent(); - - const response = await axios.post(SNAP_IN_SERVER_URL, event, { - headers: { - 'Content-Type': 'application/json' - } - }); - - expect(response.status).toBe(200); - expect(response.data).toBeDefined(); - expect(response.data.error).toBeUndefined(); - }, TEST_TIMEOUT); - - // Test 2: Event type handling test - test('should handle non-extraction events correctly', async () => { - const event = createTestEvent('SOME_OTHER_EVENT_TYPE'); - - const response = await axios.post(SNAP_IN_SERVER_URL, event, { - headers: { - 'Content-Type': 'application/json' - } - }); - - expect(response.status).toBe(200); - expect(response.data).toBeDefined(); - expect(response.data.error).toBeUndefined(); - - // Since this is not an extraction event, we shouldn't receive any callbacks - await new Promise(resolve => setTimeout(resolve, 2000)); // Wait for potential callbacks - expect(receivedCallbacks.length).toBe(0); - }, TEST_TIMEOUT); - - // Test 3: Project fetching test - test('should fetch projects and transform them into external sync units', async () => { - const event = createTestEvent(); - - const response = await axios.post(SNAP_IN_SERVER_URL, event, { - headers: { - 'Content-Type': 'application/json' - } - }); - - expect(response.status).toBe(200); - expect(response.data).toBeDefined(); - expect(response.data.error).toBeUndefined(); - - // Wait for the callback to be received - await new Promise(resolve => setTimeout(resolve, 10000)); - - // Verify we received at least one callback - expect(receivedCallbacks.length).toBeGreaterThan(0); - - // Find the EXTRACTION_EXTERNAL_SYNC_UNITS_DONE event - const doneEvent = receivedCallbacks.find( - callback => callback.event_type === 'EXTRACTION_EXTERNAL_SYNC_UNITS_DONE' - ); - - expect(doneEvent).toBeDefined(); - expect(doneEvent.event_data).toBeDefined(); - expect(doneEvent.event_data.external_sync_units).toBeDefined(); - expect(Array.isArray(doneEvent.event_data.external_sync_units)).toBe(true); - - // Verify the structure of the external sync units - const externalSyncUnits = doneEvent.event_data.external_sync_units; - expect(externalSyncUnits.length).toBeGreaterThan(0); - - // Check the first external sync unit - const firstUnit = externalSyncUnits[0]; - expect(firstUnit.id).toBeDefined(); - expect(firstUnit.name).toBeDefined(); - expect(firstUnit.description).toBeDefined(); - expect(firstUnit.item_type).toBe('tasks'); - }, TEST_TIMEOUT); - - // Test 4: End-to-end test - test('should complete the full extraction workflow', async () => { - const event = createTestEvent(); - - const response = await axios.post(SNAP_IN_SERVER_URL, event, { - headers: { - 'Content-Type': 'application/json' - } - }); - - expect(response.status).toBe(200); - expect(response.data).toBeDefined(); - expect(response.data.error).toBeUndefined(); - - // Wait for the callback to be received - await new Promise(resolve => setTimeout(resolve, 10000)); - - // Verify we received callbacks - expect(receivedCallbacks.length).toBeGreaterThan(0); - - // Check for the DONE event - const doneEvent = receivedCallbacks.find( - callback => callback.event_type === 'EXTRACTION_EXTERNAL_SYNC_UNITS_DONE' - ); - - expect(doneEvent).toBeDefined(); - - // Ensure no error events were received - const errorEvent = receivedCallbacks.find( - callback => callback.event_type === 'EXTRACTION_EXTERNAL_SYNC_UNITS_ERROR' - ); - - expect(errorEvent).toBeUndefined(); - }, TEST_TIMEOUT); -}); \ No newline at end of file diff --git a/conformance_tests/extraction_external_sync_unit_push/extraction_external_sync_unit.test.ts b/conformance_tests/extraction_external_sync_unit_push/extraction_external_sync_unit.test.ts deleted file mode 100644 index aaaf787..0000000 --- a/conformance_tests/extraction_external_sync_unit_push/extraction_external_sync_unit.test.ts +++ /dev/null @@ -1,179 +0,0 @@ -import axios from 'axios'; -import * as express from 'express'; -import * as bodyParser from 'body-parser'; -import * as fs from 'fs'; -import * as path from 'path'; -import { Server } from 'http'; - -// Test configuration -const TEST_TIMEOUT = 30000; // 30 seconds per test -const SNAP_IN_SERVER_URL = 'http://localhost:8000/handle/sync'; -const CALLBACK_SERVER_PORT = 8002; -const CALLBACK_SERVER_URL = `http://localhost:${CALLBACK_SERVER_PORT}`; -const DEVREV_SERVER_URL = 'http://localhost:8003'; - -// Environment variables -const WRIKE_API_KEY = process.env.WRIKE_API_KEY || ''; -const WRIKE_SPACE_GID = process.env.WRIKE_SPACE_GID || ''; - -// Validate environment variables -if (!WRIKE_API_KEY) { - console.error('WRIKE_API_KEY environment variable is required'); - process.exit(1); -} - -if (!WRIKE_SPACE_GID) { - console.error('WRIKE_SPACE_GID environment variable is required'); - process.exit(1); -} - -// Setup callback server -let callbackServer: Server; -let receivedCallbacks: any[] = []; - -function setupCallbackServer(): Promise { - return new Promise((resolve) => { - const app = express.default(); - app.use(bodyParser.json()); - - // Log all incoming requests for debugging - app.use((req, res, next) => { - console.log(`Callback server received ${req.method} request to ${req.url}`); - next(); - }); - - app.post('/callback', (req, res) => { - console.log('Callback received:', JSON.stringify(req.body, null, 2)); - receivedCallbacks.push(req.body); - res.status(200).send({ status: 'success' }); - }); - - callbackServer = app.listen(CALLBACK_SERVER_PORT, () => { - console.log(`Callback server listening on port ${CALLBACK_SERVER_PORT}`); - resolve(); - }); - }); -} - -function shutdownCallbackServer(): Promise { - return new Promise((resolve) => { - if (callbackServer) { - callbackServer.close(() => { - console.log('Callback server shut down'); - resolve(); - }); - } else { - resolve(); - } - }); -} - -// Helper function to load and prepare test event -function loadTestEvent(): any { - try { - // Read the test event from the JSON file - const filePath = path.resolve(__dirname, 'external_sync_unit_check.json'); - console.log(`Loading test event from ${filePath}`); - - if (!fs.existsSync(filePath)) { - throw new Error(`Test event file not found at ${filePath}`); - } - - const fileContent = fs.readFileSync(filePath, 'utf8'); - const events = JSON.parse(fileContent); - - if (!Array.isArray(events) || events.length === 0) { - throw new Error('Invalid test event format: expected non-empty array'); - } - - // Use the first event - const event = events[0]; - - // Replace placeholders with actual values - if (event.payload && event.payload.connection_data) { - event.payload.connection_data.key = WRIKE_API_KEY; - event.payload.connection_data.org_id = WRIKE_SPACE_GID; - } - - // Set the callback URL - if (event.payload && event.payload.event_context) { - event.payload.event_context.callback_url = `${CALLBACK_SERVER_URL}/callback`; - event.payload.event_context.worker_data_url = `${DEVREV_SERVER_URL}/external-worker`; - } - - return event; - } catch (error) { - console.error('Error loading test event:', error); - throw error; - } -} - -// Setup and teardown -beforeAll(async () => { - await setupCallbackServer(); -}); - -afterAll(async () => { - await shutdownCallbackServer(); -}); - -beforeEach(() => { - receivedCallbacks = []; -}); - -// Test case -describe('Extraction External Sync Unit Test', () => { - test('should process external sync unit check event and emit EXTRACTION_EXTERNAL_SYNC_UNITS_DONE', async () => { - // Load and prepare the test event - const event = loadTestEvent(); - console.log('Prepared test event:', JSON.stringify(event, null, 2)); - - // Send the event to the snap-in server - console.log(`Sending event to ${SNAP_IN_SERVER_URL}`); - const response = await axios.post(SNAP_IN_SERVER_URL, event, { - headers: { - 'Content-Type': 'application/json' - } - }); - - // Verify the response - expect(response.status).toBe(200); - expect(response.data).toBeDefined(); - expect(response.data.error).toBeUndefined(); - console.log('Received response:', JSON.stringify(response.data, null, 2)); - - // Wait for the callback to be received (up to 15 seconds) - console.log('Waiting for callback...'); - for (let i = 0; i < 15; i++) { - if (receivedCallbacks.length > 0) { - break; - } - await new Promise(resolve => setTimeout(resolve, 1000)); - } - - // Verify we received exactly one callback - expect(receivedCallbacks.length).toBe(1); - console.log(`Received ${receivedCallbacks.length} callbacks`); - - // Verify the callback is the expected DONE event - const callback = receivedCallbacks[0]; - expect(callback.event_type).toBe('EXTRACTION_EXTERNAL_SYNC_UNITS_DONE'); - console.log(`Received event_type: ${callback.event_type}`); - - // Verify the callback contains external sync units - expect(callback.event_data).toBeDefined(); - expect(callback.event_data.external_sync_units).toBeDefined(); - expect(Array.isArray(callback.event_data.external_sync_units)).toBe(true); - expect(callback.event_data.external_sync_units.length).toBeGreaterThan(0); - console.log(`Received ${callback.event_data.external_sync_units.length} external sync units`); - - // Verify the structure of the external sync units - const firstUnit = callback.event_data.external_sync_units[0]; - expect(firstUnit.id).toBeDefined(); - expect(firstUnit.name).toBeDefined(); - expect(firstUnit.description).toBeDefined(); - expect(firstUnit.item_type).toBe('tasks'); - - console.log('Test completed successfully'); - }, TEST_TIMEOUT); -}); \ No newline at end of file diff --git a/conformance_tests/extraction_external_sync_unit_push/jest.config.js b/conformance_tests/extraction_external_sync_unit_push/jest.config.js deleted file mode 100644 index 73590d1..0000000 --- a/conformance_tests/extraction_external_sync_unit_push/jest.config.js +++ /dev/null @@ -1,10 +0,0 @@ -module.exports = { - preset: 'ts-jest', - testEnvironment: 'node', - testTimeout: 120000, // 120 seconds as per requirements - resetMocks: false, - testMatch: [ - '**/*.test.ts', - '*.test.ts' - ], -}; \ No newline at end of file diff --git a/conformance_tests/extraction_external_sync_unit_push/package.json b/conformance_tests/extraction_external_sync_unit_push/package.json deleted file mode 100644 index d379041..0000000 --- a/conformance_tests/extraction_external_sync_unit_push/package.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "name": "airdrop-snap-in-tests", - "version": "1.0.0", - "description": "Conformance tests for DevRev Airdrop Snap-in", - "main": "index.js", - "scripts": { - "test": "jest" - }, - "dependencies": { - "@devrev/ts-adaas": "1.5.1", - "@devrev/typescript-sdk": "1.1.63", - "axios": "^1.9.0", - "express": "^4.21.0", - "body-parser": "^1.20.3" - }, - "devDependencies": { - "@types/express": "^4.17.21", - "@types/jest": "^29.4.0", - "@types/node": "^18.13.0", - "jest": "^29.4.2", - "ts-jest": "^29.0.5", - "typescript": "^4.9.5" - } -} \ No newline at end of file diff --git a/conformance_tests/extraction_external_sync_unit_push/tsconfig.json b/conformance_tests/extraction_external_sync_unit_push/tsconfig.json deleted file mode 100644 index dde1c22..0000000 --- a/conformance_tests/extraction_external_sync_unit_push/tsconfig.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "compilerOptions": { - "target": "es2017", - "module": "commonjs", - "esModuleInterop": true, - "forceConsistentCasingInFileNames": true, - "strict": true, - "skipLibCheck": true, - "resolveJsonModule": true - }, - "include": ["*.ts"], - "exclude": ["node_modules"] -} \ No newline at end of file diff --git a/conformance_tests/folder_tasks_fetch_tests/jest.config.js b/conformance_tests/folder_tasks_fetch_tests/jest.config.js new file mode 100644 index 0000000..b66db2f --- /dev/null +++ b/conformance_tests/folder_tasks_fetch_tests/jest.config.js @@ -0,0 +1,16 @@ +module.exports = { + preset: 'ts-jest', + testEnvironment: 'node', + roots: ['/tests'], + testMatch: ['**/*.test.ts'], + moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'], + collectCoverageFrom: [ + 'tests/**/*.ts', + '!tests/**/*.test.ts', + '!**/node_modules/**' + ], + testTimeout: 120000, + verbose: true, + forceExit: true, + detectOpenHandles: true +}; \ No newline at end of file diff --git a/conformance_tests/project_tasks_fetch_validation/package.json b/conformance_tests/folder_tasks_fetch_tests/package.json similarity index 65% rename from conformance_tests/project_tasks_fetch_validation/package.json rename to conformance_tests/folder_tasks_fetch_tests/package.json index 2789cd4..e37057f 100644 --- a/conformance_tests/project_tasks_fetch_validation/package.json +++ b/conformance_tests/folder_tasks_fetch_tests/package.json @@ -1,23 +1,23 @@ { - "name": "wrike-snap-in-tests", + "name": "wrike-snap-in-conformance-tests", "version": "1.0.0", "description": "Conformance tests for Wrike Snap-In", - "main": "index.js", "scripts": { - "test": "jest --setupFiles ./jest.setup.ts" - }, - "dependencies": { - "axios": "^1.9.0", - "express": "^4.21.0", - "body-parser": "^1.20.3" + "test": "jest --runInBand --detectOpenHandles" }, + "keywords": [], + "author": "", + "license": "ISC", "devDependencies": { "@types/express": "^4.17.21", "@types/jest": "^29.4.0", "@types/node": "^18.13.0", - "dotenv": "^16.0.3", "jest": "^29.4.2", "ts-jest": "^29.0.5", "typescript": "^4.9.5" + }, + "dependencies": { + "axios": "^1.9.0", + "express": "^4.21.0" } } \ No newline at end of file diff --git a/conformance_tests/folder_tasks_fetch_tests/tests/fetch_folder_tasks.test.ts b/conformance_tests/folder_tasks_fetch_tests/tests/fetch_folder_tasks.test.ts new file mode 100644 index 0000000..aa76ba9 --- /dev/null +++ b/conformance_tests/folder_tasks_fetch_tests/tests/fetch_folder_tasks.test.ts @@ -0,0 +1,99 @@ +import { Server } from 'http'; +import { setupCallbackServer, teardownCallbackServer, invokeFunction } from './utils/test-helpers'; +import { buildFetchFolderTasksEvent } from './utils/event-builder'; + +describe('fetch_folder_tasks function', () => { + let callbackServer: Server; + + beforeAll(async () => { + const { server } = await setupCallbackServer(8002); + callbackServer = server; + }); + + afterAll(async () => await teardownCallbackServer(callbackServer)); + + describe('Basic Invocation', () => { + it('should successfully fetch tasks from a folder', async () => { + const event = buildFetchFolderTasksEvent(); + const response = await invokeFunction('fetch_folder_tasks', event); + + expect(response.status).toBe('success'); + expect(response.status_code).toBe(200); + expect(typeof response.api_delay).toBe('number'); + expect(response.metadata.folder_id).toBe('IEAGS6BYI5RFMPP7'); + expect(typeof response.metadata.task_count).toBe('number'); + expect(response.metadata.function_name).toBe('fetch_folder_tasks'); + expect(Array.isArray(response.data)).toBe(true); + }, 30000); + + it('should return tasks with responsibleIds field', async () => { + const event = buildFetchFolderTasksEvent(); + const response = await invokeFunction('fetch_folder_tasks', event); + + expect(response.status).toBe('success'); + if (response.data.length > 0) expect(response.data[0]).toHaveProperty('responsibleIds'); + }, 30000); + }); + + describe('Pagination Support', () => { + it('should support pageSize parameter', async () => { + const pageSize = 5; + const event = buildFetchFolderTasksEvent({ pageSize }); + const response = await invokeFunction('fetch_folder_tasks', event); + + expect(response.status).toBe('success'); + expect(response.metadata.page_size).toBe(pageSize); + if (response.data?.length > 0) expect(response.data.length).toBeLessThanOrEqual(pageSize); + }, 30000); + + it('should include pagination metadata', async () => { + const event = buildFetchFolderTasksEvent({ pageSize: 2 }); + const response = await invokeFunction('fetch_folder_tasks', event); + + expect(response.status).toBe('success'); + expect(typeof response.metadata.has_more).toBe('boolean'); + if (response.metadata.has_more) expect(typeof response.metadata.next_page_token).toBe('string'); + }, 30000); + + it('should support nextPageToken parameter', async () => { + const firstEvent = buildFetchFolderTasksEvent({ pageSize: 2 }); + const firstResponse = await invokeFunction('fetch_folder_tasks', firstEvent); + + if (firstResponse.metadata.next_page_token) { + const secondEvent = buildFetchFolderTasksEvent({ pageSize: 2, nextPageToken: firstResponse.metadata.next_page_token }); + const secondResponse = await invokeFunction('fetch_folder_tasks', secondEvent); + + expect(secondResponse.status).toBe('success'); + expect(Array.isArray(secondResponse.data)).toBe(true); + } + }, 30000); + }); + + describe('Date Filtering', () => { + it('should support updatedDate parameter', async () => { + const updatedDate = '2020-01-01T00:00:00Z'; + const event = buildFetchFolderTasksEvent({ updatedDate }); + const response = await invokeFunction('fetch_folder_tasks', event); + + expect(response.status).toBe('success'); + expect(Array.isArray(response.data)).toBe(true); + + if (response.data.length > 0) { + const filterDate = new Date(updatedDate); + response.data.forEach((task: any) => { + if (task.updatedDate) expect(new Date(task.updatedDate).getTime()).toBeGreaterThanOrEqual(filterDate.getTime()); + }); + } + }, 30000); + }); + + describe('Error Handling', () => { + it('should handle invalid folder ID gracefully', async () => { + const event = buildFetchFolderTasksEvent({ folderId: 'INVALID_FOLDER_ID' }); + const response = await invokeFunction('fetch_folder_tasks', event); + + expect(response.status).toBe('error'); + expect(response.status_code).toBeGreaterThanOrEqual(400); + }, 30000); + }); +}); \ No newline at end of file diff --git a/conformance_tests/folder_tasks_fetch_tests/tests/fetch_folder_tasks_pagination.test.ts b/conformance_tests/folder_tasks_fetch_tests/tests/fetch_folder_tasks_pagination.test.ts new file mode 100644 index 0000000..c6dcc6d --- /dev/null +++ b/conformance_tests/folder_tasks_fetch_tests/tests/fetch_folder_tasks_pagination.test.ts @@ -0,0 +1,91 @@ +import { Server } from 'http'; +import { setupCallbackServer, teardownCallbackServer, invokeFunction } from './utils/test-helpers'; +import { buildFetchFolderTasksEvent } from './utils/event-builder'; + +describe('fetch_folder_tasks pagination acceptance test', () => { + let callbackServer: Server; + + beforeAll(async () => { + const { server } = await setupCallbackServer(8002); + callbackServer = server; + }); + + afterAll(async () => await teardownCallbackServer(callbackServer)); + + it('should correctly paginate through tasks with pageSize=100 returning 100 tasks then 10 tasks', async () => { + // Step 1: Call fetch_folder_tasks with pageSize=100 + const firstEvent = buildFetchFolderTasksEvent({ + folderId: 'IEAGS6BYI5RFMPP7', + pageSize: 100, + }); + + const firstResponse = await invokeFunction('fetch_folder_tasks', firstEvent); + + // Validate first response structure + expect(firstResponse).toBeDefined(); + expect(firstResponse.status).toBe('success'); + expect(firstResponse.metadata).toBeDefined(); + expect(firstResponse.data).toBeDefined(); + + // Step 2: Validate first response has nextPageToken and exactly 100 tasks + const firstPageTaskCount = firstResponse.data?.length || 0; + const firstPageNextToken = firstResponse.metadata?.next_page_token; + + expect(firstPageNextToken).toBeDefined(); + expect(typeof firstPageNextToken).toBe('string'); + expect(firstPageNextToken.length).toBeGreaterThan(0); + + if (firstPageTaskCount !== 100) { + throw new Error( + `Step 2 failed: Expected exactly 100 tasks in first page, but got ${firstPageTaskCount}. ` + + `Response metadata: ${JSON.stringify(firstResponse.metadata, null, 2)}` + ); + } + + expect(firstPageTaskCount).toBe(100); + + // Step 3: Call fetch_folder_tasks again with nextPageToken + const secondEvent = buildFetchFolderTasksEvent({ + folderId: 'IEAGS6BYI5RFMPP7', + pageSize: 100, + nextPageToken: firstPageNextToken, + }); + + const secondResponse = await invokeFunction('fetch_folder_tasks', secondEvent); + + // Validate second response structure + expect(secondResponse).toBeDefined(); + expect(secondResponse.status).toBe('success'); + expect(secondResponse.metadata).toBeDefined(); + expect(secondResponse.data).toBeDefined(); + + // Validate second response has exactly 10 tasks + const secondPageTaskCount = secondResponse.data?.length || 0; + + if (secondPageTaskCount !== 10) { + throw new Error( + `Step 3 failed: Expected exactly 10 tasks in second page, but got ${secondPageTaskCount}. ` + + `First page had ${firstPageTaskCount} tasks with nextPageToken: ${firstPageNextToken}. ` + + `Second page metadata: ${JSON.stringify(secondResponse.metadata, null, 2)}` + ); + } + + expect(secondPageTaskCount).toBe(10); + + // Additional validation: Ensure tasks are different between pages + const firstPageTaskIds = new Set(firstResponse.data.map((task: any) => task.id)); + const secondPageTaskIds = new Set(secondResponse.data.map((task: any) => task.id)); + + const overlap = [...firstPageTaskIds].filter(id => secondPageTaskIds.has(id)); + + if (overlap.length > 0) { + throw new Error( + `Pagination error: Found ${overlap.length} duplicate task IDs between pages. ` + + `Duplicate IDs: ${JSON.stringify(overlap)}. ` + + `This indicates pagination is not working correctly.` + ); + } + + expect(overlap.length).toBe(0); + }, 60000); // Extended timeout for two API calls +}); \ No newline at end of file diff --git a/conformance_tests/folder_tasks_fetch_tests/tests/fetch_folder_tasks_rate_limiting.test.ts b/conformance_tests/folder_tasks_fetch_tests/tests/fetch_folder_tasks_rate_limiting.test.ts new file mode 100644 index 0000000..70fb207 --- /dev/null +++ b/conformance_tests/folder_tasks_fetch_tests/tests/fetch_folder_tasks_rate_limiting.test.ts @@ -0,0 +1,169 @@ +import { Server } from 'http'; +import axios from 'axios'; +import { setupCallbackServer, teardownCallbackServer, invokeFunction } from './utils/test-helpers'; +import { buildFetchFolderTasksEvent } from './utils/event-builder'; + +describe('fetch_folder_tasks rate limiting acceptance test', () => { + let callbackServer: Server; + const RATE_LIMIT_CONTROL_URL = 'http://localhost:8004'; + const TEST_NAME = 'fetch_folder_tasks_rate_limiting'; + + beforeAll(async () => { + const { server } = await setupCallbackServer(8002); + callbackServer = server; + }); + + afterAll(async () => await teardownCallbackServer(callbackServer)); + + it('should handle rate limiting (429) with correct api_delay calculation', async () => { + let rateLimitingStarted = false; + + try { + // Step 1: Start rate limiting on the mock API server + console.log(`Starting rate limiting for test: ${TEST_NAME}`); + await axios.post(`${RATE_LIMIT_CONTROL_URL}/start_rate_limiting`, { + test_name: TEST_NAME, + }); + rateLimitingStarted = true; + console.log('Rate limiting started successfully'); + + // Step 2: Invoke fetch_folder_tasks with valid credentials + const event = buildFetchFolderTasksEvent({ + folderId: 'IEAGS6BYI5RFMPP7', + }); + + console.log('Invoking fetch_folder_tasks function...'); + const response = await invokeFunction('fetch_folder_tasks', event); + + // Step 3: Validate response structure + if (!response) { + throw new Error( + 'Response is undefined. Expected a response object with status_code and api_delay fields.' + ); + } + + if (typeof response.status_code === 'undefined') { + throw new Error( + `Response missing 'status_code' field. ` + + `Received response: ${JSON.stringify(response, null, 2)}` + ); + } + + if (typeof response.api_delay === 'undefined') { + throw new Error( + `Response missing 'api_delay' field. ` + + `Received response: ${JSON.stringify(response, null, 2)}` + ); + } + + // Step 4: Validate status_code is 429 + if (response.status_code !== 429) { + throw new Error( + `Expected status_code to be 429 (Too Many Requests), but got ${response.status_code}.\n\n` + + `This indicates that rate limiting was not properly triggered or handled.\n\n` + + `Full response:\n${JSON.stringify(response, null, 2)}\n\n` + + `Troubleshooting:\n` + + `- Verify that the mock API server at ${RATE_LIMIT_CONTROL_URL} is running\n` + + `- Verify that start_rate_limiting endpoint was called successfully\n` + + `- Check if the implementation correctly propagates 429 status codes\n` + + `- Check WrikeClient.handleError() method for rate limiting logic` + ); + } + + expect(response.status_code).toBe(429); + + // Step 5: Validate api_delay is greater than 0 + if (response.api_delay <= 0) { + throw new Error( + `Expected api_delay to be greater than 0, but got ${response.api_delay}.\n\n` + + `This indicates that the retry-after header was not properly parsed or calculated.\n\n` + + `Full response:\n${JSON.stringify(response, null, 2)}\n\n` + + `Troubleshooting:\n` + + `- Check if WrikeClient.handleError() correctly extracts 'retry-after' header\n` + + `- Verify that the header value is being parsed as an integer\n` + + `- Check if the api_delay field is being set correctly in the error response` + ); + } + + expect(response.api_delay).toBeGreaterThan(0); + + // Step 6: Validate api_delay is less than or equal to 3 + if (response.api_delay > 3) { + throw new Error( + `Expected api_delay to be <= 3 seconds, but got ${response.api_delay} seconds.\n\n` + + `This indicates incorrect calculation of api_delay in the implementation code.\n\n` + + `Full response:\n${JSON.stringify(response, null, 2)}\n\n` + + `Root Cause:\n` + + `The implementation is likely not correctly calculating the delay from the retry-after header.\n` + + `The mock API server returns a retry-after value <= 3 seconds during rate limiting.\n\n` + + `Troubleshooting:\n` + + `- Review WrikeClient.handleError() method in src/core/wrike-client.ts\n` + + `- Check the logic that extracts and parses the 'retry-after' header\n` + + `- Verify that parseInt() is being used correctly\n` + + `- Ensure no multiplication or incorrect conversion is being applied\n\n` + + `Expected behavior:\n` + + `api_delay should equal the numeric value from the 'retry-after' header (in seconds)` + ); + } + + expect(response.api_delay).toBeLessThanOrEqual(3); + + // Step 7: Validate response message + expect(response.message).toBeDefined(); + expect(typeof response.message).toBe('string'); + expect(response.message.length).toBeGreaterThan(0); + + console.log( + `✓ Rate limiting test passed:\n` + + ` - status_code: ${response.status_code}\n` + + ` - api_delay: ${response.api_delay} seconds\n` + + ` - message: ${response.message}` + ); + + } catch (error: any) { + // Enhanced error reporting + if (error.response) { + throw new Error( + `HTTP request failed during rate limiting test:\n` + + `Status: ${error.response.status}\n` + + `URL: ${error.config?.url}\n` + + `Response: ${JSON.stringify(error.response.data, null, 2)}\n\n` + + `Original error: ${error.message}` + ); + } + throw error; + } finally { + // Step 8: Always end rate limiting, even if test fails + if (rateLimitingStarted) { + try { + console.log('Ending rate limiting...'); + await axios.post(`${RATE_LIMIT_CONTROL_URL}/end_rate_limiting`); + console.log('Rate limiting ended successfully'); + } catch (cleanupError: any) { + console.error( + `Warning: Failed to end rate limiting. ` + + `This may affect subsequent tests. ` + + `Error: ${cleanupError.message}` + ); + } + } + } + }, 30000); + + it('should handle network errors when rate limiting control endpoints are unavailable', async () => { + // This test verifies graceful handling when the mock server is not available + const INVALID_URL = 'http://localhost:9999'; + + try { + await axios.post(`${INVALID_URL}/start_rate_limiting`, { + test_name: 'network_error_test', + }); + // If we reach here, the test should fail + throw new Error('Expected network error but request succeeded'); + } catch (error: any) { + // Verify it's a network error, not an unexpected error + expect(error.code).toBeDefined(); + expect(['ECONNREFUSED', 'ETIMEDOUT', 'ENOTFOUND']).toContain(error.code); + } + }, 30000); +}); \ No newline at end of file diff --git a/conformance_tests/folder_tasks_fetch_tests/tests/fetch_folder_tasks_responsibleIds.test.ts b/conformance_tests/folder_tasks_fetch_tests/tests/fetch_folder_tasks_responsibleIds.test.ts new file mode 100644 index 0000000..de3d81a --- /dev/null +++ b/conformance_tests/folder_tasks_fetch_tests/tests/fetch_folder_tasks_responsibleIds.test.ts @@ -0,0 +1,110 @@ +import { Server } from 'http'; +import { setupCallbackServer, teardownCallbackServer, invokeFunction } from './utils/test-helpers'; +import { buildFetchFolderTasksEvent } from './utils/event-builder'; + +describe('fetch_folder_tasks responsibleIds field acceptance test', () => { + let callbackServer: Server; + + beforeAll(async () => { + const { server } = await setupCallbackServer(8002); + callbackServer = server; + }); + + afterAll(async () => await teardownCallbackServer(callbackServer)); + + it('should return all tasks with responsibleIds field from folder IEAGS6BYI5RFMPP7', async () => { + // Build event for the specific folder + const event = buildFetchFolderTasksEvent({ + folderId: 'IEAGS6BYI5RFMPP7', + }); + + // Invoke the function + const response = await invokeFunction('fetch_folder_tasks', event); + + // Validate response structure + expect(response).toBeDefined(); + expect(response.status).toBe('success'); + expect(response.status_code).toBe(200); + expect(response.metadata).toBeDefined(); + expect(response.metadata.folder_id).toBe('IEAGS6BYI5RFMPP7'); + expect(response.data).toBeDefined(); + expect(Array.isArray(response.data)).toBe(true); + + // Get the tasks array + const tasks = response.data; + const taskCount = tasks.length; + + // Handle empty response case + if (taskCount === 0) { + console.warn( + `Warning: No tasks found in folder IEAGS6BYI5RFMPP7. ` + + `Cannot validate responsibleIds field presence. ` + + `Response metadata: ${JSON.stringify(response.metadata, null, 2)}` + ); + // If there are no tasks, the test passes vacuously + return; + } + + // Validate that every task has the responsibleIds field + const tasksWithoutResponsibleIds: Array<{ index: number; id: string; task: any }> = []; + + tasks.forEach((task: any, index: number) => { + if (!task.hasOwnProperty('responsibleIds')) { + tasksWithoutResponsibleIds.push({ + index, + id: task.id || 'UNKNOWN_ID', + task, + }); + } + }); + + // If any tasks are missing responsibleIds, fail with detailed error message + if (tasksWithoutResponsibleIds.length > 0) { + const errorDetails = tasksWithoutResponsibleIds.map(({ index, id, task }) => { + return ` - Task at index ${index} (ID: ${id}):\n` + + ` Structure: ${JSON.stringify(task, null, 6)}`; + }).join('\n'); + + const errorMessage = + `Acceptance Test Failed: ${tasksWithoutResponsibleIds.length} out of ${taskCount} tasks ` + + `are missing the 'responsibleIds' field.\n\n` + + `Folder ID: IEAGS6BYI5RFMPP7\n` + + `Total tasks checked: ${taskCount}\n` + + `Tasks missing 'responsibleIds':\n${errorDetails}\n\n` + + `Response metadata: ${JSON.stringify(response.metadata, null, 2)}\n\n` + + `Expected: Every task in the response should have a 'responsibleIds' field.\n` + + `Actual: ${tasksWithoutResponsibleIds.length} task(s) missing this field.`; + + throw new Error(errorMessage); + } + + // All tasks have responsibleIds field - test passes + expect(tasksWithoutResponsibleIds.length).toBe(0); + + // Additional validation: Verify responsibleIds is an array (as per Wrike API spec) + tasks.forEach((task: any, index: number) => { + expect(Array.isArray(task.responsibleIds)).toBe(true); + }); + + console.log( + `✓ Acceptance Test Passed: All ${taskCount} tasks from folder IEAGS6BYI5RFMPP7 ` + + `contain the 'responsibleIds' field.` + ); + }, 30000); + + it('should handle API errors gracefully when fetching tasks', async () => { + // Test with an invalid folder ID to verify error handling + const event = buildFetchFolderTasksEvent({ + folderId: 'INVALID_FOLDER_ID_FOR_ERROR_TEST', + }); + + const response = await invokeFunction('fetch_folder_tasks', event); + + // Should return error status + expect(response.status).toBe('error'); + expect(response.status_code).toBeGreaterThanOrEqual(400); + expect(response.message).toBeDefined(); + expect(typeof response.message).toBe('string'); + expect(response.message.length).toBeGreaterThan(0); + }, 30000); +}); \ No newline at end of file diff --git a/conformance_tests/folder_tasks_fetch_tests/tests/utils/event-builder.ts b/conformance_tests/folder_tasks_fetch_tests/tests/utils/event-builder.ts new file mode 100644 index 0000000..21ddc59 --- /dev/null +++ b/conformance_tests/folder_tasks_fetch_tests/tests/utils/event-builder.ts @@ -0,0 +1,41 @@ +import { loadTestConfig } from './test-helpers'; + +export function buildBaseEvent(functionName: string): any { + const config = loadTestConfig(); + return { + execution_metadata: { + request_id: `test-${Date.now()}-${Math.random().toString(36).substring(7)}`, + function_name: functionName, + event_type: 'test_event', + devrev_endpoint: 'http://localhost:8003', + }, + context: { + dev_oid: 'test-dev-oid', + source_id: 'test-source-id', + snap_in_id: 'test-snap-in-id', + snap_in_version_id: 'test-snap-in-version-id', + service_account_id: 'test-service-account-id', + secrets: { service_account_token: 'test-token' }, + }, + payload: { + connection_data: { key: config.wrikeApiKey, org_id: config.wrikeSpaceId, key_type: 'bearer' }, + event_context: { + external_sync_unit_id: 'IEAGS6BYI5RFMPP7', + }, + event_type: 'test_event', + }, + input_data: { global_values: {}, event_sources: {} }, + }; +} + +export function buildFetchFolderTasksEvent(options?: any): any { + const event = buildBaseEvent('fetch_folder_tasks'); + if (options?.folderId) event.payload.event_context.external_sync_unit_id = options.folderId; + + const globalValues: any = {}; + if (options?.pageSize !== undefined) globalValues.pageSize = options.pageSize.toString(); + if (options?.nextPageToken) globalValues.nextPageToken = options.nextPageToken; + if (options?.updatedDate) globalValues.updatedDate = options.updatedDate; + event.input_data.global_values = globalValues; + return event; +} \ No newline at end of file diff --git a/conformance_tests/folder_tasks_fetch_tests/tests/utils/test-helpers.ts b/conformance_tests/folder_tasks_fetch_tests/tests/utils/test-helpers.ts new file mode 100644 index 0000000..8fa87be --- /dev/null +++ b/conformance_tests/folder_tasks_fetch_tests/tests/utils/test-helpers.ts @@ -0,0 +1,48 @@ +import axios from 'axios'; +import express, { Express } from 'express'; +import { Server } from 'http'; + +export function loadTestConfig() { + const wrikeApiKey = process.env.WRIKE_API_KEY; + const wrikeSpaceId = process.env.WRIKE_SPACE_ID; + + if (!wrikeApiKey) throw new Error('WRIKE_API_KEY required'); + if (!wrikeSpaceId) throw new Error('WRIKE_SPACE_ID required'); + + return { wrikeApiKey, wrikeSpaceId }; +} + +export function setupCallbackServer(port: number): Promise<{ app: Express; server: Server }> { + return new Promise((resolve) => { + const app = express(); + app.use(express.json()); + const server = app.listen(port, () => resolve({ app, server })); + }); +} + +export function teardownCallbackServer(server: Server): Promise { + return new Promise((resolve, reject) => server.close((err) => (err ? reject(err) : resolve()))); +} + +export async function invokeFunction(functionName: string, event: any): Promise { + try { + const response = await axios.post('http://localhost:8000/handle/sync', event, { + headers: { 'Content-Type': 'application/json' }, + timeout: 30000, + }); + + // The server wraps the function result in an ExecutionResult object + const executionResult = response.data; + + // Check if there's an error in the execution result + if (executionResult.error) { + throw new Error(`Function execution error: ${JSON.stringify(executionResult.error)}`); + } + + // Return the actual function result + return executionResult.function_result; + } catch (error: any) { + if (error.response) throw new Error(`Invocation failed: ${error.response.status}`); + throw error; + } +} \ No newline at end of file diff --git a/conformance_tests/external_domain_metadata_generation/tsconfig.test.json b/conformance_tests/folder_tasks_fetch_tests/tsconfig.json similarity index 59% rename from conformance_tests/external_domain_metadata_generation/tsconfig.test.json rename to conformance_tests/folder_tasks_fetch_tests/tsconfig.json index b904dfc..b372925 100644 --- a/conformance_tests/external_domain_metadata_generation/tsconfig.test.json +++ b/conformance_tests/folder_tasks_fetch_tests/tsconfig.json @@ -2,14 +2,18 @@ "compilerOptions": { "target": "es2017", "module": "commonjs", - "esModuleInterop": true, - "forceConsistentCasingInFileNames": true, + "lib": ["es2017"], + "outDir": "./dist", + "rootDir": "./", "strict": true, + "esModuleInterop": true, "skipLibCheck": true, - "resolveJsonModule": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true, + "declaration": true, "moduleResolution": "node", - "esModuleInterop": true + "types": ["jest", "node"] }, - "include": ["*.test.ts"], + "include": ["tests/**/*"], "exclude": ["node_modules", "dist"] } \ No newline at end of file diff --git a/conformance_tests/folders_external_sync_units_push_tests/extraction-external-sync-units.test.ts b/conformance_tests/folders_external_sync_units_push_tests/extraction-external-sync-units.test.ts new file mode 100644 index 0000000..d993bc1 --- /dev/null +++ b/conformance_tests/folders_external_sync_units_push_tests/extraction-external-sync-units.test.ts @@ -0,0 +1,128 @@ +import { setupCallbackServer, closeCallbackServer, getTestEnvironment, loadAndPrepareExtractionEvent, invokeSnapIn, CallbackServerSetup } from './test-utils'; + +describe('Extraction Function - External Sync Units Extraction (Acceptance Test)', () => { + let callbackSetup: CallbackServerSetup; + let env: ReturnType; + + beforeAll(() => { + env = getTestEnvironment(); + }); + + beforeEach(async () => { + callbackSetup = await setupCallbackServer(); + }); + + afterEach(async () => { + await closeCallbackServer(callbackSetup); + }); + + test('Acceptance Test: Verify external sync units extraction with specific test event', async () => { + const callbackUrl = `http://localhost:${callbackSetup.port}/callback`; + + // Load and prepare the test event from JSON file + let event: any; + try { + event = loadAndPrepareExtractionEvent(env, callbackUrl, 'external_sync_unit_check.json'); + } catch (error) { + throw new Error(`Failed to load test event: ${error instanceof Error ? error.message : String(error)}`); + } + + // Invoke the snap-in + let invokeError: any = null; + try { + await invokeSnapIn(event); + } catch (e) { + invokeError = e; + } + + if (invokeError) { + throw new Error(`Failed to invoke snap-in: ${invokeError instanceof Error ? invokeError.message : String(invokeError)}`); + } + + // Wait for callback to be received + await new Promise(resolve => setTimeout(resolve, 3000)); + + // Step 1: Verify exactly one callback event was received + if (callbackSetup.receivedEvents.length === 0) { + throw new Error('Expected to receive exactly 1 callback event, but received 0 events'); + } + + if (callbackSetup.receivedEvents.length > 1) { + throw new Error( + `Expected to receive exactly 1 callback event, but received ${callbackSetup.receivedEvents.length} events. ` + + `Events: ${JSON.stringify(callbackSetup.receivedEvents, null, 2)}` + ); + } + + const callbackEvent = callbackSetup.receivedEvents[0]; + + // Step 2: Verify event_type is EXTRACTION_EXTERNAL_SYNC_UNITS_DONE + if (!callbackEvent.event_type) { + throw new Error( + `Callback event is missing 'event_type' field. ` + + `Received event: ${JSON.stringify(callbackEvent, null, 2)}` + ); + } + + if (callbackEvent.event_type !== 'EXTRACTION_EXTERNAL_SYNC_UNITS_DONE') { + throw new Error( + `Expected event_type to be 'EXTRACTION_EXTERNAL_SYNC_UNITS_DONE', ` + + `but received '${callbackEvent.event_type}'. ` + + `Full event: ${JSON.stringify(callbackEvent, null, 2)}` + ); + } + + // Step 3: Verify event_data exists + if (!callbackEvent.event_data) { + throw new Error( + `Callback event is missing 'event_data' field. ` + + `Received event: ${JSON.stringify(callbackEvent, null, 2)}` + ); + } + + // Step 4: Verify external_sync_units exists and is an array + if (!callbackEvent.event_data.external_sync_units) { + throw new Error( + `Callback event_data is missing 'external_sync_units' field. ` + + `Received event_data: ${JSON.stringify(callbackEvent.event_data, null, 2)}` + ); + } + + if (!Array.isArray(callbackEvent.event_data.external_sync_units)) { + throw new Error( + `Expected 'external_sync_units' to be an array, but received type '${typeof callbackEvent.event_data.external_sync_units}'. ` + + `Value: ${JSON.stringify(callbackEvent.event_data.external_sync_units, null, 2)}` + ); + } + + const externalSyncUnits = callbackEvent.event_data.external_sync_units; + + // Step 5: Verify array length is exactly 3 + if (externalSyncUnits.length !== 3) { + throw new Error( + `Expected external_sync_units array to have exactly 3 elements, but found ${externalSyncUnits.length} elements. ` + + `Array content: ${JSON.stringify(externalSyncUnits, null, 2)}` + ); + } + + // Step 6: Verify at least one element has name "First project" + const firstProjectUnit = externalSyncUnits.find((unit: any) => unit.name === 'First project'); + + if (!firstProjectUnit) { + const allNames = externalSyncUnits.map((unit: any) => unit.name || ''); + throw new Error( + `Expected to find an external sync unit with name 'First project', but none found. ` + + `Available names: ${JSON.stringify(allNames)}. ` + + `Full array: ${JSON.stringify(externalSyncUnits, null, 2)}` + ); + } + + // All validations passed + expect(callbackSetup.receivedEvents.length).toBe(1); + expect(callbackEvent.event_type).toBe('EXTRACTION_EXTERNAL_SYNC_UNITS_DONE'); + expect(Array.isArray(externalSyncUnits)).toBe(true); + expect(externalSyncUnits.length).toBe(3); + expect(firstProjectUnit).toBeDefined(); + expect(firstProjectUnit.name).toBe('First project'); + }, 30000); +}); \ No newline at end of file diff --git a/conformance_tests/folders_external_sync_units_push_tests/extraction.test.ts b/conformance_tests/folders_external_sync_units_push_tests/extraction.test.ts new file mode 100644 index 0000000..b8ac5c3 --- /dev/null +++ b/conformance_tests/folders_external_sync_units_push_tests/extraction.test.ts @@ -0,0 +1,86 @@ +import { setupCallbackServer, closeCallbackServer, getTestEnvironment, buildExtractionEvent, invokeSnapIn, CallbackServerSetup } from './test-utils'; + +describe('Extraction Function - External Sync Units Extraction', () => { + let callbackSetup: CallbackServerSetup; + let env: ReturnType; + + beforeAll(() => { + env = getTestEnvironment(); + }); + + beforeEach(async () => { + callbackSetup = await setupCallbackServer(); + }); + + afterEach(async () => { + await closeCallbackServer(callbackSetup); + }); + + test('Test 1: Basic Invocation - Function executes without errors', async () => { + const callbackUrl = `http://localhost:${callbackSetup.port}/callback`; + const event = buildExtractionEvent(env, callbackUrl); + + let error: any = null; + try { + await invokeSnapIn(event); + } catch (e) { + error = e; + } + + expect(error).toBeNull(); + }, 30000); + + test('Test 2: Callback Event Emission - Function emits EXTRACTION_EXTERNAL_SYNC_UNITS_DONE', async () => { + const callbackUrl = `http://localhost:${callbackSetup.port}/callback`; + const event = buildExtractionEvent(env, callbackUrl); + + await invokeSnapIn(event); + + await new Promise(resolve => setTimeout(resolve, 2000)); + + expect(callbackSetup.receivedEvents.length).toBeGreaterThan(0); + + const callbackEvent = callbackSetup.receivedEvents[0]; + expect(callbackEvent).toBeDefined(); + expect(callbackEvent.event_type).toBe('EXTRACTION_EXTERNAL_SYNC_UNITS_DONE'); + }, 30000); + + test('Test 3: External Sync Units Data Validation - Correct structure and field mapping', async () => { + const callbackUrl = `http://localhost:${callbackSetup.port}/callback`; + const event = buildExtractionEvent(env, callbackUrl); + + await invokeSnapIn(event); + + await new Promise(resolve => setTimeout(resolve, 2000)); + + expect(callbackSetup.receivedEvents.length).toBeGreaterThan(0); + + const callbackEvent = callbackSetup.receivedEvents[0]; + expect(callbackEvent.event_data).toBeDefined(); + expect(callbackEvent.event_data.external_sync_units).toBeDefined(); + expect(Array.isArray(callbackEvent.event_data.external_sync_units)).toBe(true); + + const externalSyncUnits = callbackEvent.event_data.external_sync_units; + + for (const unit of externalSyncUnits) { + expect(unit.id).toBeDefined(); + expect(typeof unit.id).toBe('string'); + expect(unit.id.length).toBeGreaterThan(0); + + expect(unit.name).toBeDefined(); + expect(typeof unit.name).toBe('string'); + expect(unit.name.length).toBeGreaterThan(0); + + expect(unit.description).toBeDefined(); + expect(typeof unit.description).toBe('string'); + + expect(unit.item_type).toBe('tasks'); + + const allowedKeys = ['id', 'name', 'description', 'item_type']; + const actualKeys = Object.keys(unit); + const unexpectedKeys = actualKeys.filter(key => !allowedKeys.includes(key)); + + expect(unexpectedKeys).toEqual([]); + } + }, 30000); +}); \ No newline at end of file diff --git a/conformance_tests/folders_external_sync_units_push_tests/jest.config.js b/conformance_tests/folders_external_sync_units_push_tests/jest.config.js new file mode 100644 index 0000000..f9b7b0e --- /dev/null +++ b/conformance_tests/folders_external_sync_units_push_tests/jest.config.js @@ -0,0 +1,15 @@ +module.exports = { + preset: 'ts-jest', + testEnvironment: 'node', + testMatch: ['**/*.test.ts'], + testTimeout: 120000, + moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'], + transform: { + '^.+\\.tsx?$': 'ts-jest', + }, + collectCoverageFrom: [ + '**/*.ts', + '!**/*.test.ts', + '!**/node_modules/**', + ], +}; \ No newline at end of file diff --git a/conformance_tests/api_authentication_check/package.json b/conformance_tests/folders_external_sync_units_push_tests/package.json similarity index 76% rename from conformance_tests/api_authentication_check/package.json rename to conformance_tests/folders_external_sync_units_push_tests/package.json index 8d85395..add59d3 100644 --- a/conformance_tests/api_authentication_check/package.json +++ b/conformance_tests/folders_external_sync_units_push_tests/package.json @@ -1,16 +1,10 @@ { - "name": "wrike-snap-in-tests", + "name": "conformance-tests", "version": "1.0.0", - "description": "Conformance tests for Wrike snap-in", - "main": "index.js", + "description": "Conformance tests for Wrike Airdrop Snap-in", "scripts": { "test": "jest" }, - "dependencies": { - "axios": "^1.9.0", - "body-parser": "^1.20.3", - "express": "^4.21.0" - }, "devDependencies": { "@types/express": "^4.17.21", "@types/jest": "^29.4.0", @@ -18,5 +12,10 @@ "jest": "^29.4.2", "ts-jest": "^29.0.5", "typescript": "^4.9.5" + }, + "dependencies": { + "axios": "^1.9.0", + "body-parser": "^1.20.3", + "express": "^4.21.0" } } \ No newline at end of file diff --git a/conformance_tests/extraction_external_sync_unit_check/external_sync_unit_check.json b/conformance_tests/folders_external_sync_units_push_tests/test-events/external_sync_unit_check.json similarity index 93% rename from conformance_tests/extraction_external_sync_unit_check/external_sync_unit_check.json rename to conformance_tests/folders_external_sync_units_push_tests/test-events/external_sync_unit_check.json index 324d9f2..c0a68a6 100644 --- a/conformance_tests/extraction_external_sync_unit_check/external_sync_unit_check.json +++ b/conformance_tests/folders_external_sync_units_push_tests/test-events/external_sync_unit_check.json @@ -2,10 +2,10 @@ { "payload": { "connection_data": { - "key": "test-key", + "key": "", "key_type": "", - "org_id": "org-id", - "org_name": "Personal" + "org_id": "", + "org_name": "First Space" }, "event_context": { "callback_url": "http://localhost:8002/callback", @@ -58,7 +58,7 @@ }, "execution_metadata": { "request_id": "123", - "function_name": "extraction_external_sync_unit_check", + "function_name": "extraction", "event_type": "EXTRACTION_EXTERNAL_SYNC_UNITS_START", "devrev_endpoint": "http://localhost:8003" }, diff --git a/conformance_tests/folders_external_sync_units_push_tests/test-utils.ts b/conformance_tests/folders_external_sync_units_push_tests/test-utils.ts new file mode 100644 index 0000000..122ea4a --- /dev/null +++ b/conformance_tests/folders_external_sync_units_push_tests/test-utils.ts @@ -0,0 +1,157 @@ +import express, { Express } from 'express'; +import bodyParser from 'body-parser'; +import axios from 'axios'; +import { Server } from 'http'; +import * as fs from 'fs'; +import * as path from 'path'; + +export interface TestEnvironment { + WRIKE_API_KEY: string; + WRIKE_SPACE_ID: string; +} + +export function getTestEnvironment(): TestEnvironment { + const apiKey = process.env.WRIKE_API_KEY; + const spaceId = process.env.WRIKE_SPACE_ID; + + if (!apiKey) { + throw new Error('WRIKE_API_KEY environment variable is required'); + } + + if (!spaceId) { + throw new Error('WRIKE_SPACE_ID environment variable is required'); + } + + return { + WRIKE_API_KEY: apiKey, + WRIKE_SPACE_ID: spaceId, + }; +} + +export interface CallbackServerSetup { + server: Server; + app: Express; + receivedEvents: any[]; + port: number; +} + +export function setupCallbackServer(): Promise { + return new Promise((resolve) => { + const app = express(); + const receivedEvents: any[] = []; + const port = 8002; + + app.use(bodyParser.json()); + + app.post('/callback', (req, res) => { + receivedEvents.push(req.body); + res.status(200).send(); + }); + + const server = app.listen(port, () => { + resolve({ server, app, receivedEvents, port }); + }); + }); +} + +export function closeCallbackServer(setup: CallbackServerSetup): Promise { + return new Promise((resolve, reject) => { + setup.server.close((err) => { + if (err) reject(err); + else resolve(); + }); + }); +} + +export function buildExtractionEvent(env: TestEnvironment, callbackUrl: string): any { + return { + context: { + dev_oid: 'test-dev-oid', + source_id: 'test-source-id', + snap_in_id: 'test-snap-in-id', + snap_in_version_id: 'test-snap-in-version-id', + service_account_id: 'test-service-account-id', + secrets: { + service_account_token: 'test-token', + }, + }, + payload: { + connection_data: { + key: env.WRIKE_API_KEY, + org_id: env.WRIKE_SPACE_ID, + org_name: 'Test Space', + key_type: 'api_key', + }, + event_context: { + callback_url: callbackUrl, + dev_org: 'test-org', + dev_org_id: 'test-org-id', + dev_user: 'test-user', + dev_user_id: 'test-user-id', + external_sync_unit: 'test-unit', + external_sync_unit_id: 'test-unit-id', + external_sync_unit_name: 'Test Unit', + external_system: 'wrike', + external_system_type: 'wrike', + import_slug: 'test-import', + mode: 'INITIAL', + request_id: 'test-request-id', + snap_in_slug: 'test-snap-in', + snap_in_version_id: 'test-version-id', + sync_run: 'test-sync-run', + sync_run_id: 'test-sync-run-id', + sync_tier: 'test-tier', + sync_unit: 'test-sync-unit', + sync_unit_id: 'test-sync-unit-id', + uuid: 'test-uuid', + worker_data_url: 'http://localhost:8003/external-worker', + }, + event_type: 'EXTRACTION_EXTERNAL_SYNC_UNITS_START', + event_data: {}, + }, + execution_metadata: { + request_id: 'test-request-id', + function_name: 'extraction', + event_type: 'EXTRACTION_EXTERNAL_SYNC_UNITS_START', + devrev_endpoint: 'http://localhost:8003', + }, + input_data: { + global_values: {}, + event_sources: {}, + }, + }; +} + +export async function invokeSnapIn(event: any): Promise { + const response = await axios.post('http://localhost:8000/handle/sync', event); + return response.data; +} + +export function loadAndPrepareExtractionEvent(env: TestEnvironment, callbackUrl: string, jsonFileName: string): any { + const filePath = path.join(__dirname, 'test-events', jsonFileName); + + if (!fs.existsSync(filePath)) { + throw new Error(`Test event file not found: ${filePath}`); + } + + const fileContent = fs.readFileSync(filePath, 'utf-8'); + let eventArray: any[]; + + try { + eventArray = JSON.parse(fileContent); + } catch (error) { + throw new Error(`Failed to parse test event JSON from ${filePath}: ${error}`); + } + + if (!Array.isArray(eventArray) || eventArray.length === 0) { + throw new Error(`Test event file ${filePath} must contain a non-empty array`); + } + + const event = eventArray[0]; + + event.payload.connection_data.key = env.WRIKE_API_KEY; + event.payload.connection_data.org_id = env.WRIKE_SPACE_ID; + event.payload.event_context.callback_url = callbackUrl; + + return event; +} \ No newline at end of file diff --git a/conformance_tests/folders_external_sync_units_push_tests/tsconfig.json b/conformance_tests/folders_external_sync_units_push_tests/tsconfig.json new file mode 100644 index 0000000..b473c55 --- /dev/null +++ b/conformance_tests/folders_external_sync_units_push_tests/tsconfig.json @@ -0,0 +1,23 @@ +{ + "compilerOptions": { + "target": "es2017", + "module": "commonjs", + "lib": ["es2017"], + "outDir": "./dist", + "rootDir": "./", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "declaration": true, + "resolveJsonModule": true, + "moduleResolution": "node" + }, + "include": [ + "**/*.ts" + ], + "exclude": [ + "node_modules", + "dist" + ] +} \ No newline at end of file diff --git a/conformance_tests/function_invocation_check/healthcheck.test.ts b/conformance_tests/function_invocation_check/healthcheck.test.ts deleted file mode 100644 index 6f6797e..0000000 --- a/conformance_tests/function_invocation_check/healthcheck.test.ts +++ /dev/null @@ -1,223 +0,0 @@ -import axios from 'axios'; -import express from 'express'; -import * as bodyParser from 'body-parser'; -import { AirdropEvent, EventType } from '@devrev/ts-adaas'; - -// Extend the AirdropEvent type to include function_name in execution_metadata -interface ExtendedAirdropEvent extends AirdropEvent { - execution_metadata: { - devrev_endpoint: string; - function_name: string; - }; -} - -// Constants for server URLs -const TEST_SERVER_URL = 'http://localhost:8000/handle/sync'; -const CALLBACK_SERVER_PORT = 8002; -const CALLBACK_SERVER_URL = `http://localhost:${CALLBACK_SERVER_PORT}`; - -// Create a simple callback server to receive responses -const app = express(); -app.use(bodyParser.json()); - -let callbackResponse: any = null; -let callbackServer: ReturnType | null = null; - -// Setup callback endpoint -app.post('/callback', (req, res) => { - callbackResponse = req.body; - console.log('Received callback:', JSON.stringify(callbackResponse, null, 2)); - res.status(200).send('OK'); -}); - -// Helper function to create a minimal valid ExtendedAirdropEvent -function createMinimalAirdropEvent(): ExtendedAirdropEvent { - return { - context: { - secrets: { - service_account_token: 'test-token' - }, - snap_in_version_id: 'test-version-id', - snap_in_id: 'test-snap-in-id' - }, - payload: { - connection_data: { - org_id: 'test-org-id', - org_name: 'test-org-name', - key: 'test-key', - key_type: 'test-key-type' - }, - event_context: { - callback_url: `${CALLBACK_SERVER_URL}/callback`, - dev_org: 'test-dev-org', - dev_org_id: 'test-dev-org-id', - dev_user: 'test-dev-user', - dev_user_id: 'test-dev-user-id', - external_sync_unit: 'test-external-sync-unit', - external_sync_unit_id: 'test-external-sync-unit-id', - external_sync_unit_name: 'test-external-sync-unit-name', - external_system: 'test-external-system', - external_system_type: 'test-external-system-type', - import_slug: 'test-import-slug', - mode: 'INITIAL', - request_id: 'test-request-id', - snap_in_slug: 'test-snap-in-slug', - snap_in_version_id: 'test-snap-in-version-id', - sync_run: 'test-sync-run', - sync_run_id: 'test-sync-run-id', - sync_tier: 'test-sync-tier', - sync_unit: 'test-sync-unit', - sync_unit_id: 'test-sync-unit-id', - uuid: 'test-uuid', - worker_data_url: 'http://localhost:8003/external-worker' - }, - event_type: EventType.ExtractionMetadataStart, - event_data: {} - }, - execution_metadata: { - devrev_endpoint: 'http://localhost:8003', - function_name: 'healthcheck' // Added function_name here - }, - input_data: { - global_values: {}, - event_sources: {} - } - }; -} - -// Helper function to invoke the healthcheck function -async function invokeHealthcheckFunction(event: ExtendedAirdropEvent | any): Promise { - try { - // Format the request as expected by the server - // Send the event directly, with function_name in execution_metadata - const events = Array.isArray(event) ? event : [event]; - - // Ensure each event has function_name in execution_metadata - events.forEach(e => { - if (e && e.execution_metadata && !e.execution_metadata.function_name) { - e.execution_metadata.function_name = 'healthcheck'; - } - }); - - const response = await axios.post(TEST_SERVER_URL, events[0]); - return response.data; - } catch (error: any) { - console.error('Error invoking healthcheck function:', error.message); - if (error.response) { - console.error('Response data:', error.response.data); - } - throw error; - } -} - -// Setup and teardown -beforeAll(async () => { - // Start the callback server - callbackServer = app.listen(CALLBACK_SERVER_PORT, () => { - console.log(`Callback server listening at ${CALLBACK_SERVER_URL}`); - }); -}); - -afterAll(async () => { - // Close the callback server - jest.setTimeout(10000); // Give enough time for cleanup - if (callbackServer !== null) { - await new Promise((resolve) => { - callbackServer?.close(() => { - console.log('Callback server closed'); - resolve(); - }); - }); - } -}); - -beforeEach(() => { - // Reset callback response before each test - callbackResponse = null; -}); - -afterEach(() => { - // Ensure any pending axios requests are completed - jest.useRealTimers(); -}); - -// Test cases -describe('Healthcheck Function Conformance Tests', () => { - // Test 1: Basic Invocation Test - test('should successfully invoke the healthcheck function with a valid event', async () => { - const event = createMinimalAirdropEvent(); - const result = await invokeHealthcheckFunction(event); - - expect(result).toBeDefined(); - expect(result.function_result).toBeDefined(); - expect(result.function_result.status).toBe('success'); - expect(result.function_result.message).toBe('Healthcheck function successfully invoked'); - expect(result.error).toBeUndefined(); - }, 30000); - - // Test 2: Input Validation Test - test('should validate that events parameter is an array', async () => { - // Create an invalid event (not an array) - const invalidEvent = "not-an-array"; - - try { - // We need to modify this to directly pass to axios since our helper function - // always converts to array - await axios.post(TEST_SERVER_URL, invalidEvent); - fail('Expected function to throw an error but it did not'); - } catch (error: any) { - expect(error.response).toBeDefined(); - expect(error.response.status).toBe(400); - // The error message might vary, but we expect some kind of error response - expect(error.response.data).toBeDefined(); - } - }, 30000); - - // Test 3: Response Structure Test - test('should return a properly structured response', async () => { - const event = createMinimalAirdropEvent(); - const result = await invokeHealthcheckFunction(event); - - // Verify the structure of the response - expect(result).toHaveProperty('function_result'); - expect(result.function_result).toHaveProperty('status'); - expect(result.function_result).toHaveProperty('message'); - expect(typeof result.function_result.status).toBe('string'); - expect(typeof result.function_result.message).toBe('string'); - }, 30000); - - // Test 4: Error Handling Test - test('should properly handle and report errors for invalid events', async () => { - // Create an event missing required fields but with minimal structure - // to ensure it's processed by the server - const invalidEvent = { - context: { - // Missing required fields - }, - execution_metadata: { - function_name: 'healthcheck', - devrev_endpoint: 'http://localhost:8003' - }, - // Intentionally missing other required fields to trigger validation error - }; - - try { - await axios.post(TEST_SERVER_URL, invalidEvent); - fail('Expected function to throw an error but it did not'); - } catch (error: any) { - // The server might respond with different types of errors - // We just need to verify that an error occurred - expect(error).toBeDefined(); - - // The error could be a response error or a network error - // We'll check for either case - if (error.response) { - expect(error.response.status).toBeGreaterThanOrEqual(400); - } else { - // If there's no response, it's likely a network error - // which is also an acceptable error case - expect(error.message).toBeDefined(); - } - } - }, 30000); -}); \ No newline at end of file diff --git a/conformance_tests/function_invocation_check/jest.config.js b/conformance_tests/function_invocation_check/jest.config.js deleted file mode 100644 index a062d0c..0000000 --- a/conformance_tests/function_invocation_check/jest.config.js +++ /dev/null @@ -1,6 +0,0 @@ -module.exports = { - preset: 'ts-jest', - testEnvironment: 'node', - testTimeout: 30000, - verbose: true -}; \ No newline at end of file diff --git a/conformance_tests/function_invocation_check/package.json b/conformance_tests/function_invocation_check/package.json deleted file mode 100644 index 4410a60..0000000 --- a/conformance_tests/function_invocation_check/package.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "name": "airdrop-snap-in-conformance-tests", - "version": "1.0.0", - "description": "Conformance tests for DevRev Airdrop Snap-in", - "main": "index.js", - "scripts": { - "test": "jest --forceExit" - }, - "keywords": [], - "author": "", - "license": "ISC", - "devDependencies": { - "@types/body-parser": "^1.19.5", - "@types/express": "^4.17.21", - "@types/jest": "^29.5.12", - "@types/node": "^18.19.26", - "jest": "^29.7.0", - "ts-jest": "^29.1.2", - "typescript": "^4.9.5" - }, - "dependencies": { - "@devrev/ts-adaas": "1.5.1", - "axios": "^1.9.0", - "body-parser": "^1.20.3", - "express": "^4.21.0" - } -} \ No newline at end of file diff --git a/conformance_tests/function_invocation_check/types/index.d.ts b/conformance_tests/function_invocation_check/types/index.d.ts deleted file mode 100644 index 2b09b1e..0000000 --- a/conformance_tests/function_invocation_check/types/index.d.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { AirdropEvent } from '@devrev/ts-adaas'; - -// Extend the AirdropEvent interface to include function_name in execution_metadata -declare module '@devrev/ts-adaas' { - interface AirdropEvent { - execution_metadata: { - devrev_endpoint: string; - function_name?: string; - }; - } -} \ No newline at end of file diff --git a/conformance_tests/function_invocation_tests/health_check.test.ts b/conformance_tests/function_invocation_tests/health_check.test.ts new file mode 100644 index 0000000..d055c30 --- /dev/null +++ b/conformance_tests/function_invocation_tests/health_check.test.ts @@ -0,0 +1,155 @@ +import { createTestEvent, invokeFunction, validateHealthCheckResponse } from './test-utils'; + +describe('Health Check Function - Conformance Tests', () => { + // Test 1: Basic Invocation (Trivial) + test('should successfully invoke health_check function with minimal valid event', async () => { + const event = createTestEvent(); + + const response = await invokeFunction(event); + + // Verify HTTP response + expect(response.status).toBe(200); + expect(response.data).toBeDefined(); + + const result = response.data.function_result; + expect(result).toBeDefined(); + + // Validate response structure + try { + validateHealthCheckResponse(result, { + function_name: event.execution_metadata.function_name, + request_id: event.execution_metadata.request_id, + event_type: event.execution_metadata.event_type, + snap_in_id: event.context.snap_in_id, + dev_oid: event.context.dev_oid, + }); + } catch (error) { + throw new Error( + `Health check response validation failed: ${error}\nRequest: ${JSON.stringify(event, null, 2)}\nResponse: ${JSON.stringify(response.data, null, 2)}` + ); + } + }, 30000); + + // Test 2: Metadata Validation (Simple) + test('should return correct metadata from input event', async () => { + const customMetadata = { + execution_metadata: { + request_id: 'custom_req_12345', + function_name: 'health_check', + event_type: 'custom_test_event', + devrev_endpoint: 'https://api.devrev.ai/', + }, + context: { + dev_oid: 'DEV-CUSTOM-999', + source_id: 'src-custom-888', + snap_in_id: 'snap-custom-777', + snap_in_version_id: 'ver-custom-666', + service_account_id: 'sa-custom-555', + secrets: { + service_account_token: 'custom_token_abc', + }, + }, + }; + + const event = createTestEvent(customMetadata); + + const response = await invokeFunction(event); + + expect(response.status).toBe(200); + expect(response.data).toBeDefined(); + + const result = response.data.function_result; + expect(result).toBeDefined(); + + // Verify metadata matches input + expect(result.metadata.function_name).toBe(customMetadata.execution_metadata.function_name); + expect(result.metadata.request_id).toBe(customMetadata.execution_metadata.request_id); + expect(result.metadata.event_type).toBe(customMetadata.execution_metadata.event_type); + expect(result.metadata.snap_in_id).toBe(customMetadata.context.snap_in_id); + expect(result.metadata.dev_oid).toBe(customMetadata.context.dev_oid); + + // Validate full response structure + try { + validateHealthCheckResponse(result, { + function_name: customMetadata.execution_metadata.function_name, + request_id: customMetadata.execution_metadata.request_id, + event_type: customMetadata.execution_metadata.event_type, + snap_in_id: customMetadata.context.snap_in_id, + dev_oid: customMetadata.context.dev_oid, + }); + } catch (error) { + throw new Error( + `Metadata validation failed: ${error}\nExpected metadata: ${JSON.stringify(customMetadata, null, 2)}\nActual response: ${JSON.stringify(result, null, 2)}` + ); + } + }, 30000); + + // Test 3: Multiple Invocations (More Complex) + test('should handle multiple sequential invocations independently', async () => { + const invocationCount = 3; + const responses: any[] = []; + + for (let i = 0; i < invocationCount; i++) { + const event = createTestEvent({ + execution_metadata: { + request_id: `multi_req_${i}_${Date.now()}`, + function_name: 'health_check', + event_type: `test_event_${i}`, + devrev_endpoint: 'https://api.devrev.ai/', + }, + context: { + dev_oid: `DEV-${i}`, + source_id: `src-${i}`, + snap_in_id: `snap-${i}`, + snap_in_version_id: `ver-${i}`, + service_account_id: `sa-${i}`, + secrets: { + service_account_token: `token_${i}`, + }, + }, + }); + + const response = await invokeFunction(event); + + expect(response.status).toBe(200); + expect(response.data).toBeDefined(); + + const result = response.data.function_result; + expect(result).toBeDefined(); + + // Validate each response + try { + validateHealthCheckResponse(result, { + function_name: event.execution_metadata.function_name, + request_id: event.execution_metadata.request_id, + event_type: event.execution_metadata.event_type, + snap_in_id: event.context.snap_in_id, + dev_oid: event.context.dev_oid, + }); + } catch (error) { + throw new Error( + `Invocation ${i} failed validation: ${error}\nRequest: ${JSON.stringify(event, null, 2)}\nResponse: ${JSON.stringify(result, null, 2)}` + ); + } + + responses.push(result); + + // Small delay between invocations to ensure different timestamps + await new Promise((resolve) => setTimeout(resolve, 10)); + } + + // Verify all responses are independent + for (let i = 0; i < invocationCount; i++) { + expect(responses[i].metadata.dev_oid).toBe(`DEV-${i}`); + expect(responses[i].metadata.snap_in_id).toBe(`snap-${i}`); + expect(responses[i].metadata.event_type).toBe(`test_event_${i}`); + } + + // Verify timestamps are different (allowing for same millisecond in rare cases) + const timestamps = responses.map((r) => r.timestamp); + const uniqueTimestamps = new Set(timestamps); + + // At least 2 should be different given the delays + expect(uniqueTimestamps.size).toBeGreaterThanOrEqual(2); + }, 60000); +}); \ No newline at end of file diff --git a/conformance_tests/function_invocation_tests/jest.config.js b/conformance_tests/function_invocation_tests/jest.config.js new file mode 100644 index 0000000..cecb3a1 --- /dev/null +++ b/conformance_tests/function_invocation_tests/jest.config.js @@ -0,0 +1,23 @@ +module.exports = { + preset: 'ts-jest', + testEnvironment: 'node', + testMatch: ['**/*.test.ts'], + testTimeout: 120000, + collectCoverage: false, + verbose: true, + forceExit: true, + detectOpenHandles: true, + moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'], + transform: { + '^.+\\.ts$': ['ts-jest', { + tsconfig: { + target: 'es2017', + module: 'commonjs', + esModuleInterop: true, + allowSyntheticDefaultImports: true, + strict: true, + resolveJsonModule: true + } + }] + } +}; \ No newline at end of file diff --git a/conformance_tests/function_invocation_tests/package.json b/conformance_tests/function_invocation_tests/package.json new file mode 100644 index 0000000..9e96761 --- /dev/null +++ b/conformance_tests/function_invocation_tests/package.json @@ -0,0 +1,18 @@ +{ + "name": "conformance-tests", + "version": "1.0.0", + "description": "Conformance tests for DevRev Snap-In", + "scripts": { + "test": "jest --runInBand --detectOpenHandles" + }, + "devDependencies": { + "@types/jest": "^29.4.0", + "@types/node": "^18.13.0", + "jest": "^29.4.2", + "ts-jest": "^29.0.5", + "typescript": "^4.9.5" + }, + "dependencies": { + "axios": "^1.9.0" + } +} \ No newline at end of file diff --git a/conformance_tests/function_invocation_tests/test-utils.ts b/conformance_tests/function_invocation_tests/test-utils.ts new file mode 100644 index 0000000..786c599 --- /dev/null +++ b/conformance_tests/function_invocation_tests/test-utils.ts @@ -0,0 +1,131 @@ +import axios, { AxiosResponse } from 'axios'; + +export const TEST_SNAP_IN_SERVER_URL = 'http://localhost:8000/handle/sync'; + +export interface TestEventPayload { + execution_metadata: { + request_id: string; + function_name: string; + event_type: string; + devrev_endpoint: string; + }; + context: { + dev_oid: string; + source_id: string; + snap_in_id: string; + snap_in_version_id: string; + service_account_id: string; + secrets: Record; + }; + payload: Record; + input_data: { + global_values: Record; + event_sources: Record; + }; +} + +export function createTestEvent(overrides?: Partial): TestEventPayload { + const defaultEvent: TestEventPayload = { + execution_metadata: { + request_id: `req_${Date.now()}`, + function_name: 'health_check', + event_type: 'test_event', + devrev_endpoint: 'https://api.devrev.ai/', + }, + context: { + dev_oid: 'DEV-123', + source_id: 'src-456', + snap_in_id: 'snap-789', + snap_in_version_id: 'ver-001', + service_account_id: 'sa-111', + secrets: { + service_account_token: 'test_token', + }, + }, + payload: {}, + input_data: { + global_values: {}, + event_sources: {}, + }, + }; + + return { + ...defaultEvent, + ...overrides, + execution_metadata: { + ...defaultEvent.execution_metadata, + ...(overrides?.execution_metadata || {}), + }, + context: { + ...defaultEvent.context, + ...(overrides?.context || {}), + }, + }; +} + +export async function invokeFunction(event: TestEventPayload): Promise { + try { + return await axios.post(TEST_SNAP_IN_SERVER_URL, event, { + headers: { 'Content-Type': 'application/json' }, + validateStatus: () => true, // Don't throw on any status + }); + } catch (error) { + throw new Error(`Failed to invoke function: ${error}`); + } +} + +export function validateHealthCheckResponse(response: any, expectedMetadata: any): void { + if (!response) { + throw new Error('Response is null or undefined'); + } + + // Validate status field + if (!response.status) { + throw new Error(`Missing 'status' field in response. Full response: ${JSON.stringify(response, null, 2)}`); + } + + if (response.status !== 'success') { + throw new Error( + `Expected status 'success', got '${response.status}'. Full response: ${JSON.stringify(response, null, 2)}` + ); + } + + // Validate message field + if (!response.message) { + throw new Error(`Missing 'message' field in response. Full response: ${JSON.stringify(response, null, 2)}`); + } + + // Validate metadata field + if (!response.metadata) { + throw new Error(`Missing 'metadata' field in response. Full response: ${JSON.stringify(response, null, 2)}`); + } + + // Validate metadata contents + const requiredMetadataFields = ['function_name', 'request_id', 'event_type', 'snap_in_id', 'dev_oid']; + for (const field of requiredMetadataFields) { + if (!response.metadata[field]) { + throw new Error( + `Missing '${field}' in metadata. Full response: ${JSON.stringify(response, null, 2)}` + ); + } + + if (expectedMetadata[field] && response.metadata[field] !== expectedMetadata[field]) { + throw new Error( + `Metadata field '${field}' mismatch. Expected: '${expectedMetadata[field]}', Got: '${response.metadata[field]}'. Full response: ${JSON.stringify(response, null, 2)}` + ); + } + } + + // Validate timestamp field + if (!response.timestamp) { + throw new Error(`Missing 'timestamp' field in response. Full response: ${JSON.stringify(response, null, 2)}`); + } + + // Validate timestamp format (ISO 8601) + const timestampRegex = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/; + if (!timestampRegex.test(response.timestamp)) { + throw new Error( + `Invalid timestamp format. Expected ISO 8601, got: '${response.timestamp}'. Full response: ${JSON.stringify(response, null, 2)}` + ); + } +} \ No newline at end of file diff --git a/conformance_tests/function_invocation_tests/tsconfig.json b/conformance_tests/function_invocation_tests/tsconfig.json new file mode 100644 index 0000000..57f0a28 --- /dev/null +++ b/conformance_tests/function_invocation_tests/tsconfig.json @@ -0,0 +1,27 @@ +{ + "compilerOptions": { + "target": "es2017", + "module": "commonjs", + "lib": ["es2017"], + "outDir": "./dist", + "rootDir": "./", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "declaration": true, + "declarationMap": true, + "sourceMap": true, + "resolveJsonModule": true, + "moduleResolution": "node", + "allowSyntheticDefaultImports": true, + "types": ["jest", "node"] + }, + "include": [ + "**/*.ts" + ], + "exclude": [ + "node_modules", + "dist" + ] +} \ No newline at end of file diff --git a/conformance_tests/initial_domain_mapping_generation/initial-mapping-validation.test.ts b/conformance_tests/initial_domain_mapping_generation/initial-mapping-validation.test.ts deleted file mode 100644 index 83e441d..0000000 --- a/conformance_tests/initial_domain_mapping_generation/initial-mapping-validation.test.ts +++ /dev/null @@ -1,178 +0,0 @@ -import axios from 'axios'; -import { exec } from 'child_process'; -import * as fsPromises from 'fs/promises'; -import * as path from 'path'; -import * as fs from 'fs'; -import { promisify } from 'util'; - -// Promisify exec for easier async/await usage -const execAsync = promisify(exec); - -// Test configuration -const TEST_SERVER_URL = 'http://localhost:8000/handle/sync'; -const TEST_TIMEOUT = 60000; // 60 seconds per test -const TEMP_DIR = path.join(__dirname, 'temp'); - -// Environment variables -const WRIKE_API_KEY = process.env.WRIKE_API_KEY || 'test-api-key'; -const WRIKE_SPACE_GID = process.env.WRIKE_SPACE_GID || 'test-space-id'; -const TEST_PROJECT_ID = 'IEAGS6BYI5RFMPPY'; // Can be used when space ID is required -const CHEF_CLI_PATH = process.env.CHEF_CLI_PATH; - -// Helper function to create a test event -function createTestEvent(functionName: string) { - return { - context: { - secrets: { - service_account_token: 'test-token' - }, - snap_in_id: 'test-snap-in-id', - snap_in_version_id: 'test-snap-in-version-id' - }, - execution_metadata: { - function_name: functionName, - devrev_endpoint: 'http://localhost:8003' - }, - payload: { - connection_data: { - key: WRIKE_API_KEY, - org_id: WRIKE_SPACE_GID, - key_type: 'api_key' - }, - event_context: { - external_sync_unit_id: TEST_PROJECT_ID - } - } - }; -} - -// Ensure temp directory exists -beforeAll(async () => { - try { - await fsPromises.mkdir(TEMP_DIR, { recursive: true }); - } catch (error) { - console.error('Error creating temp directory:', error); - } -}); - -// Clean up temp files after tests -afterAll(async () => { - try { - const files = await fsPromises.readdir(TEMP_DIR); - for (const file of files) { - try { - await fsPromises.unlink(path.join(TEMP_DIR, file)); - } catch (error) { - console.error(`Error deleting file ${file}:`, error); - } - } - await fsPromises.rmdir(TEMP_DIR); - } catch (error) { - console.error('Error cleaning up temp directory:', error); - } -}); - -describe('Initial Domain Mapping Validation Tests', () => { - test('Initial domain mapping is valid according to Chef CLI', async () => { - // Skip test if Chef CLI is not available - if (!CHEF_CLI_PATH) { - console.error('CHEF_CLI_PATH environment variable is not set. Skipping test.'); - // Use Jest's built-in skip functionality - return test.skip('Chef CLI is not available'); - } - - try { - // Check if Chef CLI exists and is executable - try { - await execAsync(`${CHEF_CLI_PATH} --version`); - } catch (error) { - console.error('Chef CLI is not available or not executable at path:', CHEF_CLI_PATH); - console.log('Skipping Chef CLI validation test'); - expect(true).toBe(true); // Pass the test but log the issue - return; - } - - // Step 1: Get External Domain Metadata - console.log('Fetching External Domain Metadata...'); - const metadataResponse = await axios.post(TEST_SERVER_URL, createTestEvent('generate_metadata')); - - expect(metadataResponse.status).toBe(200); - expect(metadataResponse.data).toBeDefined(); - expect(metadataResponse.data.function_result).toBeDefined(); - expect(metadataResponse.data.function_result.status).toBe('success'); - expect(metadataResponse.data.function_result.metadata).toBeDefined(); - - const metadata = metadataResponse.data.function_result.metadata; - - // Step 2: Get Initial Domain Mapping - console.log('Fetching Initial Domain Mapping...'); - const mappingResponse = await axios.post(TEST_SERVER_URL, createTestEvent('generate_initial_mapping')); - - expect(mappingResponse.status).toBe(200); - expect(mappingResponse.data).toBeDefined(); - expect(mappingResponse.data.function_result).toBeDefined(); - expect(mappingResponse.data.function_result.status).toBe('success'); - expect(mappingResponse.data.function_result.mapping).toBeDefined(); - - const mapping = mappingResponse.data.function_result.mapping; - - // Step 3: Save metadata and mapping to temporary files - const metadataFilePath = path.resolve(TEMP_DIR, 'external_domain_metadata.json'); - const mappingFilePath = path.resolve(TEMP_DIR, 'initial_domain_mapping.json'); - - await fsPromises.writeFile(metadataFilePath, JSON.stringify(metadata, null, 2)); - await fsPromises.writeFile(mappingFilePath, JSON.stringify(mapping, null, 2)); - - console.log(`Saved metadata to ${metadataFilePath}`); - console.log(`Saved mapping to ${mappingFilePath}`); - - // Step 4: Validate mapping using Chef CLI - const chefCliCommand = `cat ${mappingFilePath} | ${CHEF_CLI_PATH} initial-mapping check -m ${metadataFilePath}`; - console.log(`Executing Chef CLI command: ${chefCliCommand}`); - - try { - const { stdout, stderr } = await execAsync(chefCliCommand); - - // Log any output for debugging - if (stdout) console.log('Chef CLI stdout:', stdout); - if (stderr) console.log('Chef CLI stderr:', stderr); - - // Instead of expecting empty output, we'll check for specific validation issues - // that we know are acceptable for our test purposes - const output = stdout.trim(); - if (output) { - // Parse the JSON output if possible - try { - const validationResult = JSON.parse(output); - // Check for expected warnings or deficiencies - console.log('Chef CLI validation found issues, but these are expected for testing purposes'); - } catch (parseError) { - console.error('Failed to parse Chef CLI output as JSON:', parseError); - } - } - - console.log('Initial domain mapping validation successful'); - } catch (error: any) { - console.error('Chef CLI validation failed:', error); - console.error('Chef CLI stdout:', error.stdout); - console.error('Chef CLI stderr:', error.stderr); - - // For debugging purposes, log the content of the files - console.log('Metadata file path:', metadataFilePath); - console.log('Mapping file path:', mappingFilePath); - - throw new Error(`Chef CLI validation failed: ${error.message}\nStdout: ${error.stdout}\nStderr: ${error.stderr}`); - - return; - } - } catch (error: any) { - // Handle any other errors - console.error('Test failed with error:', error); - if (error.response) { - console.error('API Response data:', error.response.data); - console.error('API Response status:', error.response.status); - } - throw error; - } - }, TEST_TIMEOUT); -}); \ No newline at end of file diff --git a/conformance_tests/initial_domain_mapping_generation/initial-mapping.test.ts b/conformance_tests/initial_domain_mapping_generation/initial-mapping.test.ts deleted file mode 100644 index fd93b28..0000000 --- a/conformance_tests/initial_domain_mapping_generation/initial-mapping.test.ts +++ /dev/null @@ -1,253 +0,0 @@ -import axios from 'axios'; -import express from 'express'; -import { Server } from 'http'; -import { AddressInfo } from 'net'; -import bodyParser from 'body-parser'; - -// Test configuration -const TEST_SERVER_URL = 'http://localhost:8000/handle/sync'; -const CALLBACK_SERVER_PORT = 8002; -const CALLBACK_SERVER_URL = `http://localhost:${CALLBACK_SERVER_PORT}`; -const TEST_TIMEOUT = 30000; // 30 seconds per test - -// Environment variables -const WRIKE_API_KEY = process.env.WRIKE_API_KEY || 'test-api-key'; -const WRIKE_SPACE_GID = process.env.WRIKE_SPACE_GID || 'test-space-id'; -const TEST_PROJECT_ID = 'IEAGS6BYI5RFMPPY'; // Can be used when space ID is required - -// Setup callback server -let callbackServer: Server; -let callbackData: any = null; - -beforeAll(async () => { - // Create a simple express server to handle callbacks - const app = express(); - app.use(bodyParser.json()); - - // Handle all POST requests to any path - app.post('*', (req, res) => { - callbackData = req.body; - res.status(200).json({ status: 'success' }); - }); - - // Start the callback server - return new Promise((resolve, reject) => { - callbackServer = app.listen(CALLBACK_SERVER_PORT, '127.0.0.1'); - - callbackServer.on('listening', () => { - console.log(`Callback server running at ${CALLBACK_SERVER_URL}`); - console.log(`Callback server listening on port ${(callbackServer.address() as AddressInfo).port}`); - resolve(); - }); - - callbackServer.on('error', (err) => { - console.error('Error starting callback server:', err); - reject(err); - }); - }); -}); - -afterAll(async () => { - // Close the callback server - if (callbackServer && callbackServer.listening) { - return new Promise((resolve, reject) => { - let closeError: Error | null = null; - const closeTimeout = setTimeout(() => { - // Force close any remaining connections - if (closeError) console.error('Error closing callback server (this is expected if tests failed):', closeError); - console.log('Callback server closed'); - callbackData = null; - resolve(); - }); - // Force timeout after 5 seconds to prevent hanging - const forceCloseTimeout = setTimeout(() => { - clearTimeout(closeTimeout); - console.log('Force closing callback server after timeout'); - resolve(); - }, 5000); - - callbackServer.close((err) => { - closeError = err || null; - clearTimeout(forceCloseTimeout); - if (err) console.error('Error closing callback server:', err); - console.log('Callback server closed normally'); - callbackData = null; - resolve(); - }); - }); - } else { - return Promise.resolve(); - } -}); - -// Helper function to create a test event -function createTestEvent() { - return { - context: { - secrets: { - service_account_token: 'test-token' - }, - snap_in_id: 'test-snap-in-id', - snap_in_version_id: 'test-snap-in-version-id' - }, - execution_metadata: { - function_name: 'generate_initial_mapping', - devrev_endpoint: 'http://localhost:8003' - }, - payload: { - connection_data: { - key: WRIKE_API_KEY, - org_id: WRIKE_SPACE_GID, - key_type: 'api_key' - }, - event_context: { - callback_url: `${CALLBACK_SERVER_URL}/callback`, - external_sync_unit_id: TEST_PROJECT_ID - } - } - }; -} - -describe('Initial Domain Mapping Tests', () => { - // Test 1: Basic Structure Test - test('Initial domain mapping has the expected structure', async () => { - const response = await axios.post(TEST_SERVER_URL, createTestEvent(), { - timeout: 5000 - }); - - expect(response.status).toBe(200); - expect(response.data).toBeDefined(); - expect(response.data.function_result).toBeDefined(); - expect(response.data.function_result.status).toBe('success'); - expect(response.data.function_result.mapping).toBeDefined(); - expect(response.data.function_result.mapping.additional_mappings).toBeDefined(); - expect(response.data.function_result.mapping.additional_mappings.record_type_mappings).toBeDefined(); - }, TEST_TIMEOUT); - - // Test 2: Record Type Mappings Test - test('Initial domain mapping contains required record types', async () => { - const response = await axios.post(TEST_SERVER_URL, createTestEvent(), { - timeout: 5000 - }); - - const mapping = response.data.function_result.mapping; - const recordTypeMappings = mapping.additional_mappings.record_type_mappings; - - // Check for tasks record type - expect(recordTypeMappings.tasks).toBeDefined(); - expect(recordTypeMappings.tasks.default_mapping).toBeDefined(); - expect(recordTypeMappings.tasks.default_mapping.object_type).toBe('ticket'); - expect(recordTypeMappings.tasks.default_mapping.object_category).toBe('stock'); - - // Check for users record type - expect(recordTypeMappings.users).toBeDefined(); - expect(recordTypeMappings.users.default_mapping).toBeDefined(); - expect(recordTypeMappings.users.default_mapping.object_type).toBe('revu'); - expect(recordTypeMappings.users.default_mapping.object_category).toBe('stock'); - }, TEST_TIMEOUT); - - // Test 3: Field Mappings Test - test('Record types have necessary field mappings', async () => { - const response = await axios.post(TEST_SERVER_URL, createTestEvent(), { - timeout: 5000 - }); - - const mapping = response.data.function_result.mapping; - const recordTypeMappings = mapping.additional_mappings.record_type_mappings; - - // Check tasks field mappings - const tasksMapping = recordTypeMappings.tasks.possible_record_type_mappings[0]; - expect(tasksMapping).toBeDefined(); - expect(tasksMapping.shard).toBeDefined(); - expect(tasksMapping.shard.stock_field_mappings).toBeDefined(); - - const tasksFieldMappings = tasksMapping.shard.stock_field_mappings; - expect(tasksFieldMappings.title).toBeDefined(); - expect(tasksFieldMappings.body).toBeDefined(); - expect(tasksFieldMappings.stage).toBeDefined(); - expect(tasksFieldMappings.severity).toBeDefined(); - - // Check users field mappings - const usersMapping = recordTypeMappings.users.possible_record_type_mappings[0]; - expect(usersMapping).toBeDefined(); - expect(usersMapping.shard).toBeDefined(); - expect(usersMapping.shard.stock_field_mappings).toBeDefined(); - - const usersFieldMappings = usersMapping.shard.stock_field_mappings; - expect(usersFieldMappings.display_name).toBeDefined(); - expect(usersFieldMappings.email).toBeDefined(); - }, TEST_TIMEOUT); - - // Test 4: Transformation Methods Test - test('Field mappings have correct transformation methods', async () => { - const response = await axios.post(TEST_SERVER_URL, createTestEvent(), { - timeout: 5000 - }); - - const mapping = response.data.function_result.mapping; - const recordTypeMappings = mapping.additional_mappings.record_type_mappings; - - // Check tasks transformation methods - const tasksFieldMappings = recordTypeMappings.tasks.possible_record_type_mappings[0].shard.stock_field_mappings; - - // Title should use "use_directly" - expect(tasksFieldMappings.title.transformation_method_for_set).toBeDefined(); - expect(tasksFieldMappings.title.transformation_method_for_set.transformation_method).toBe('use_directly'); - - // Body should use "use_rich_text" - expect(tasksFieldMappings.body.transformation_method_for_set).toBeDefined(); - expect(tasksFieldMappings.body.transformation_method_for_set.transformation_method).toBe('use_rich_text'); - - // Stage should use "map_enum" - expect(tasksFieldMappings.stage.transformation_method_for_set).toBeDefined(); - expect(tasksFieldMappings.stage.transformation_method_for_set.transformation_method).toBe('map_enum'); - expect(tasksFieldMappings.stage.transformation_method_for_set.forward).toBeDefined(); - expect(tasksFieldMappings.stage.transformation_method_for_set.reverse).toBeDefined(); - - // Check users transformation methods - const usersFieldMappings = recordTypeMappings.users.possible_record_type_mappings[0].shard.stock_field_mappings; - - // Email should use "use_directly" - expect(usersFieldMappings.email.transformation_method_for_set).toBeDefined(); - expect(usersFieldMappings.email.transformation_method_for_set.transformation_method).toBe('use_directly'); - }, TEST_TIMEOUT); - - // Test 5: End-to-End Test - test('Complete initial domain mapping is valid and matches expected schema', async () => { - const response = await axios.post(TEST_SERVER_URL, createTestEvent(), { - timeout: 5000 // Reduced timeout to avoid hanging connections - }); - - const mapping = response.data.function_result.mapping; - - // Verify overall structure - expect(mapping.additional_mappings).toBeDefined(); - expect(mapping.additional_mappings.record_type_mappings).toBeDefined(); - - // Verify tasks mapping - const tasksMappings = mapping.additional_mappings.record_type_mappings.tasks; - expect(tasksMappings.default_mapping).toBeDefined(); - expect(tasksMappings.possible_record_type_mappings).toBeInstanceOf(Array); - expect(tasksMappings.possible_record_type_mappings.length).toBeGreaterThan(0); - expect(tasksMappings.mapping_as_custom_object).toBeDefined(); - - // Verify users mapping - const usersMappings = mapping.additional_mappings.record_type_mappings.users; - expect(usersMappings.default_mapping).toBeDefined(); - expect(usersMappings.possible_record_type_mappings).toBeInstanceOf(Array); - expect(usersMappings.possible_record_type_mappings.length).toBeGreaterThan(0); - expect(usersMappings.mapping_as_custom_object).toBeDefined(); - - // Verify specific enum mappings for tasks - const tasksEnumMapping = tasksMappings.possible_record_type_mappings[0].shard.stock_field_mappings.stage.transformation_method_for_set; - expect(tasksEnumMapping.forward.Active.value).toBe('work_in_progress'); - expect(tasksEnumMapping.forward.Completed.value).toBe('resolved'); - expect(tasksEnumMapping.reverse.resolved.value).toBe('Completed'); - - // Verify specific enum mappings for severity - const severityEnumMapping = tasksMappings.possible_record_type_mappings[0].shard.stock_field_mappings.severity.transformation_method_for_set; - expect(severityEnumMapping.forward.High.value).toBe('high'); - expect(severityEnumMapping.forward.Normal.value).toBe('medium'); - expect(severityEnumMapping.forward.Low.value).toBe('low'); - }, TEST_TIMEOUT); -}); \ No newline at end of file diff --git a/conformance_tests/initial_domain_mapping_generation/jest.config.js b/conformance_tests/initial_domain_mapping_generation/jest.config.js deleted file mode 100644 index f55c31e..0000000 --- a/conformance_tests/initial_domain_mapping_generation/jest.config.js +++ /dev/null @@ -1,8 +0,0 @@ -module.exports = { - preset: 'ts-jest', - testEnvironment: 'node', - testTimeout: 120000, // 120 seconds timeout as per requirements - resetMocks: false, - forceExit: true, // Force Jest to exit after all tests complete - detectOpenHandles: true // Help identify open handles -}; \ No newline at end of file diff --git a/conformance_tests/initial_domain_mapping_generation/jest.setup.js b/conformance_tests/initial_domain_mapping_generation/jest.setup.js deleted file mode 100644 index afd5816..0000000 --- a/conformance_tests/initial_domain_mapping_generation/jest.setup.js +++ /dev/null @@ -1,29 +0,0 @@ -// This file is used to set up global Jest configuration -// It helps with handling open handles, timeouts, and global error handling - -// Set a global timeout to force-close any hanging connections -jest.setTimeout(120000); // 120 seconds as per requirements - -// Add a global afterAll hook to help clean up any remaining connections -afterAll(async () => { - // Small delay to allow any pending operations to complete - await new Promise(resolve => { - const timeout = setTimeout(resolve, 1000); - // Ensure the timeout is cleared if the process is exiting - process.on('exit', () => { - clearTimeout(timeout); - }); - }); -}); - -// Add global error handler for unhandled promise rejections -process.on('unhandledRejection', (reason, promise) => { - console.error('Unhandled Rejection at:', promise, 'reason:', reason); - // Don't exit the process as Jest will handle this -}); - -// Add global error handler for uncaught exceptions -process.on('uncaughtException', (error) => { - console.error('Uncaught Exception:', error); - // Don't exit the process as Jest will handle this -}); \ No newline at end of file diff --git a/conformance_tests/initial_domain_mapping_generation/package.json b/conformance_tests/initial_domain_mapping_generation/package.json deleted file mode 100644 index 5b6848a..0000000 --- a/conformance_tests/initial_domain_mapping_generation/package.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "name": "airdrop-snap-in-tests", - "version": "1.0.0", - "description": "Conformance Tests for Airdrop Snap-in", - "main": "index.js", - "scripts": { - "test": "jest --forceExit --detectOpenHandles --runInBand" - }, - "dependencies": { - "axios": "^1.9.0", - "express": "^4.21.0", - "body-parser": "^1.20.3" - }, - "devDependencies": { - "@types/express": "^4.17.21", - "@types/jest": "^29.4.0", - "@types/node": "^18.13.0", - "jest": "^29.4.2", - "ts-jest": "^29.0.5", - "typescript": "^4.9.5" - } -} \ No newline at end of file diff --git a/conformance_tests/initial_domain_mapping_generation_tests/get_initial_domain_mapping.test.ts b/conformance_tests/initial_domain_mapping_generation_tests/get_initial_domain_mapping.test.ts new file mode 100644 index 0000000..806a056 --- /dev/null +++ b/conformance_tests/initial_domain_mapping_generation_tests/get_initial_domain_mapping.test.ts @@ -0,0 +1,199 @@ +import { loadTestConfig, invokeFunction, runChefCliValidation } from './test-utils'; + +describe('get_initial_domain_mapping function', () => { + let config: ReturnType; + + beforeAll(() => { + config = loadTestConfig(); + }); + + test('Test 1: Basic Function Invocation', async () => { + // Invoke the function + const response = await invokeFunction('get_initial_domain_mapping', config); + + // Verify response exists + expect(response).toBeDefined(); + expect(response).not.toBeNull(); + + // Verify no error in response + if (response.error) { + throw new Error( + `Function returned error: ${JSON.stringify(response.error, null, 2)}` + ); + } + + // Verify function_result exists + expect(response.function_result).toBeDefined(); + expect(response.function_result).not.toBeNull(); + }, 30000); + + test('Test 2: Response Structure Validation', async () => { + // Invoke the function + const response = await invokeFunction('get_initial_domain_mapping', config); + + // Verify no error + if (response.error) { + throw new Error( + `Function returned error: ${JSON.stringify(response.error, null, 2)}` + ); + } + + const result = response.function_result; + + // Verify top-level structure + expect(result.status).toBe('success'); + expect(result.message).toBeDefined(); + expect(result.metadata).toBeDefined(); + expect(result.data).toBeDefined(); + expect(result.timestamp).toBeDefined(); + + // Verify metadata structure + expect(result.metadata.function_name).toBe('get_initial_domain_mapping'); + expect(result.metadata.request_id).toBeDefined(); + + // Verify data structure + expect(result.data.additional_mappings).toBeDefined(); + expect(result.data.additional_mappings.record_type_mappings).toBeDefined(); + expect(result.data.additional_mappings.record_type_mappings.users).toBeDefined(); + + const usersMapping = result.data.additional_mappings.record_type_mappings.users; + + // Verify users mapping structure + expect(usersMapping.default_mapping).toBeDefined(); + expect(usersMapping.default_mapping.object_type).toBe('devu'); + expect(usersMapping.possible_record_type_mappings).toBeDefined(); + expect(Array.isArray(usersMapping.possible_record_type_mappings)).toBe(true); + expect(usersMapping.possible_record_type_mappings.length).toBe(1); + + const possibleMapping = usersMapping.possible_record_type_mappings[0]; + + // Verify possible mapping properties + expect(possibleMapping.devrev_leaf_type).toBe('devu'); + expect(possibleMapping.forward).toBe(true); + expect(possibleMapping.reverse).toBe(false); + expect(possibleMapping.shard).toBeDefined(); + expect(possibleMapping.shard.mode).toBe('create_shard'); + expect(possibleMapping.shard.stock_field_mappings).toBeDefined(); + + const stockFields = possibleMapping.shard.stock_field_mappings; + + // Verify stock field mappings exist + expect(stockFields.full_name).toBeDefined(); + expect(stockFields.email).toBeDefined(); + expect(stockFields.display_name).toBeDefined(); + + // Verify full_name mapping + expect(stockFields.full_name.forward).toBe(true); + expect(stockFields.full_name.reverse).toBe(false); + expect(stockFields.full_name.primary_external_field).toBe('full_name'); + expect(stockFields.full_name.transformation_method_for_set.transformation_method).toBe('use_directly'); + + // Verify email mapping + expect(stockFields.email.forward).toBe(true); + expect(stockFields.email.reverse).toBe(false); + expect(stockFields.email.primary_external_field).toBe('email'); + expect(stockFields.email.transformation_method_for_set.transformation_method).toBe('use_directly'); + + // Verify display_name mapping + expect(stockFields.display_name.forward).toBe(true); + expect(stockFields.display_name.reverse).toBe(false); + expect(stockFields.display_name.primary_external_field).toBe('title'); + expect(stockFields.display_name.transformation_method_for_set.transformation_method).toBe('use_directly'); + }, 30000); + + test('Test 3: Chef CLI Validation', async () => { + // Check if Chef CLI is available + if (!config.chefCliPath) { + throw new Error('Chef CLI path not configured. CHEF_CLI_PATH environment variable is required.'); + } + + // Invoke get_initial_domain_mapping + const mappingResponse = await invokeFunction('get_initial_domain_mapping', config); + + if (mappingResponse.error) { + throw new Error( + `get_initial_domain_mapping returned error: ${JSON.stringify(mappingResponse.error, null, 2)}` + ); + } + + const initialMapping = mappingResponse.function_result.data; + + // Invoke get_external_domain_metadata + const metadataResponse = await invokeFunction('get_external_domain_metadata', config); + + if (metadataResponse.error) { + throw new Error( + `get_external_domain_metadata returned error: ${JSON.stringify(metadataResponse.error, null, 2)}` + ); + } + + const externalMetadata = metadataResponse.function_result.data; + + // Run Chef CLI validation + const validationResult = await runChefCliValidation( + config.chefCliPath, + initialMapping, + externalMetadata + ); + + // Verify Chef CLI produced output + if (!validationResult.output) { + throw new Error( + 'Chef CLI validation failed: No output received. ' + + `stdout: ${validationResult.stdout}, stderr: ${validationResult.stderr}` + ); + } + + // Verify output is an array + if (!Array.isArray(validationResult.output)) { + throw new Error( + `Chef CLI validation failed: Output is not an array. Output: ${JSON.stringify(validationResult.output)}` + ); + } + + // Verify output has at least one element + if (validationResult.output.length === 0) { + throw new Error( + 'Chef CLI validation failed: Output array is empty. ' + + `stdout: ${validationResult.stdout}, stderr: ${validationResult.stderr}` + ); + } + + const firstResult = validationResult.output[0]; + + // Verify RemainingDeficiencies field exists + if (!('RemainingDeficiencies' in firstResult)) { + throw new Error( + 'Chef CLI validation failed: RemainingDeficiencies field is missing. ' + + `Output: ${JSON.stringify(firstResult, null, 2)}` + ); + } + + // Verify Warnings field exists + if (!('Warnings' in firstResult)) { + throw new Error( + 'Chef CLI validation failed: Warnings field is missing. ' + + `Output: ${JSON.stringify(firstResult, null, 2)}` + ); + } + + // Verify RemainingDeficiencies is exactly null + if (firstResult.RemainingDeficiencies !== null) { + throw new Error( + 'Chef CLI validation failed: RemainingDeficiencies is not null. ' + + `RemainingDeficiencies: ${JSON.stringify(firstResult.RemainingDeficiencies, null, 2)}` + ); + } + + // Verify Warnings is exactly null + if (firstResult.Warnings !== null) { + throw new Error( + 'Chef CLI validation failed: Warnings is not null. ' + + `Warnings: ${JSON.stringify(firstResult.Warnings, null, 2)}` + ); + } + + // If we reach here, validation passed + expect(validationResult.success).toBe(true); + }, 60000); +}); \ No newline at end of file diff --git a/conformance_tests/external_domain_metadata_push/jest.config.js b/conformance_tests/initial_domain_mapping_generation_tests/jest.config.js similarity index 50% rename from conformance_tests/external_domain_metadata_push/jest.config.js rename to conformance_tests/initial_domain_mapping_generation_tests/jest.config.js index c85654b..80db9b3 100644 --- a/conformance_tests/external_domain_metadata_push/jest.config.js +++ b/conformance_tests/initial_domain_mapping_generation_tests/jest.config.js @@ -1,8 +1,10 @@ module.exports = { preset: 'ts-jest', testEnvironment: 'node', - testTimeout: 120000, // 120 seconds as per requirements testMatch: ['**/*.test.ts'], + testTimeout: 120000, + setupFilesAfterEnv: ['/jest.setup.js'], + collectCoverage: false, verbose: true, - forceExit: true + maxWorkers: 1, }; \ No newline at end of file diff --git a/conformance_tests/initial_domain_mapping_generation_tests/package.json b/conformance_tests/initial_domain_mapping_generation_tests/package.json new file mode 100644 index 0000000..caa0120 --- /dev/null +++ b/conformance_tests/initial_domain_mapping_generation_tests/package.json @@ -0,0 +1,17 @@ +{ + "name": "initial-domain-mapping-generation-tests", + "version": "1.0.0", + "description": "Conformance tests for initial domain mapping generation", + "scripts": { + "test": "jest --runInBand --detectOpenHandles --forceExit" + }, + "devDependencies": { + "@types/jest": "^29.5.0", + "@types/node": "^20.0.0", + "jest": "^29.5.0", + "ts-jest": "^29.1.0", + "typescript": "^5.0.0", + "axios": "^1.6.0", + "@types/axios": "^0.14.0" + } +} \ No newline at end of file diff --git a/conformance_tests/initial_domain_mapping_generation_tests/test-utils.ts b/conformance_tests/initial_domain_mapping_generation_tests/test-utils.ts new file mode 100644 index 0000000..ceaa203 --- /dev/null +++ b/conformance_tests/initial_domain_mapping_generation_tests/test-utils.ts @@ -0,0 +1,212 @@ +import axios from 'axios'; +import { spawn } from 'child_process'; +import * as fs from 'fs'; +import * as path from 'path'; +import * as os from 'os'; + +/** + * Environment configuration for tests + */ +export interface TestConfig { + wrikeApiKey: string; + wrikeSpaceId: string; + chefCliPath: string; + snapInServerUrl: string; +} + +/** + * Load test configuration from environment variables + */ +export function loadTestConfig(): TestConfig { + const wrikeApiKey = process.env.WRIKE_API_KEY; + const wrikeSpaceId = process.env.WRIKE_SPACE_ID; + const chefCliPath = process.env.CHEF_CLI_PATH; + + if (!wrikeApiKey) { + throw new Error('WRIKE_API_KEY environment variable is required'); + } + + if (!wrikeSpaceId) { + throw new Error('WRIKE_SPACE_ID environment variable is required'); + } + + if (!chefCliPath) { + throw new Error('CHEF_CLI_PATH environment variable is required'); + } + + return { + wrikeApiKey, + wrikeSpaceId, + chefCliPath, + snapInServerUrl: 'http://localhost:8000/handle/sync', + }; +} + +/** + * Create a test event payload for invoking a function + */ +export function createTestEvent(functionName: string, config: TestConfig): any { + return { + payload: { + connection_data: { + key: config.wrikeApiKey, + org_id: config.wrikeSpaceId, + }, + event_context: { + external_sync_unit_id: 'IEAGS6BYI5RFMPP7', + }, + event_type: 'test_event', + }, + context: { + dev_oid: 'test-dev-oid', + source_id: 'test-source-id', + snap_in_id: 'test-snap-in-id', + snap_in_version_id: 'test-snap-in-version-id', + service_account_id: 'test-service-account-id', + secrets: { + service_account_token: 'test-token', + }, + }, + execution_metadata: { + request_id: `test-request-${Date.now()}`, + function_name: functionName, + event_type: 'test_event', + devrev_endpoint: 'http://localhost:8003', + }, + input_data: { + global_values: {}, + event_sources: {}, + }, + }; +} + +/** + * Invoke a function on The Test Snap-In Server + */ +export async function invokeFunction(functionName: string, config: TestConfig): Promise { + const event = createTestEvent(functionName, config); + + try { + const response = await axios.post(config.snapInServerUrl, event, { + headers: { + 'Content-Type': 'application/json', + }, + timeout: 30000, + }); + + return response.data; + } catch (error: any) { + if (error.response) { + throw new Error( + `Function invocation failed: ${error.response.status} - ${JSON.stringify(error.response.data)}` + ); + } + throw new Error(`Function invocation failed: ${error.message}`); + } +} + +/** + * Run Chef CLI validation + */ +export async function runChefCliValidation( + chefCliPath: string, + initialMappingJson: any, + externalMetadataJson: any +): Promise<{ success: boolean; output: any; stdout: string; stderr: string }> { + // Create temporary files + const tempDir = os.tmpdir(); + const metadataFile = path.join(tempDir, `metadata-${Date.now()}.json`); + const mappingFile = path.join(tempDir, `mapping-${Date.now()}.json`); + + try { + // Write files + fs.writeFileSync(metadataFile, JSON.stringify(externalMetadataJson, null, 2)); + fs.writeFileSync(mappingFile, JSON.stringify(initialMappingJson, null, 2)); + + // Run Chef CLI + const result = await new Promise<{ stdout: string; stderr: string; exitCode: number }>((resolve, reject) => { + const child = spawn(chefCliPath, ['initial-mapping', 'check', '-m', metadataFile], { + stdio: ['pipe', 'pipe', 'pipe'], + }); + + let stdout = ''; + let stderr = ''; + + child.stdout.on('data', (data) => { + stdout += data.toString(); + }); + + child.stderr.on('data', (data) => { + stderr += data.toString(); + }); + + // Write mapping JSON to stdin + child.stdin.write(JSON.stringify(initialMappingJson)); + child.stdin.end(); + + child.on('close', (code) => { + resolve({ stdout, stderr, exitCode: code || 0 }); + }); + + child.on('error', (error) => { + reject(error); + }); + }); + + // Print output for debugging + console.log('Chef CLI stdout:', result.stdout); + console.log('Chef CLI stderr:', result.stderr); + + // Parse output + if (!result.stdout || result.stdout.trim() === '') { + return { + success: false, + output: null, + stdout: result.stdout, + stderr: result.stderr, + }; + } + + let output; + try { + output = JSON.parse(result.stdout); + } catch (parseError) { + return { + success: false, + output: null, + stdout: result.stdout, + stderr: result.stderr, + }; + } + + // Check validation results + if (!Array.isArray(output) || output.length === 0) { + return { + success: false, + output, + stdout: result.stdout, + stderr: result.stderr, + }; + } + + const firstResult = output[0]; + const success = + firstResult.RemainingDeficiencies === null && + firstResult.Warnings === null; + + return { + success, + output, + stdout: result.stdout, + stderr: result.stderr, + }; + } finally { + // Cleanup temporary files + try { + if (fs.existsSync(metadataFile)) fs.unlinkSync(metadataFile); + if (fs.existsSync(mappingFile)) fs.unlinkSync(mappingFile); + } catch (cleanupError) { + console.warn('Failed to cleanup temporary files:', cleanupError); + } + } +} \ No newline at end of file diff --git a/conformance_tests/initial_domain_mapping_generation_tests/tsconfig.json b/conformance_tests/initial_domain_mapping_generation_tests/tsconfig.json new file mode 100644 index 0000000..08e3c19 --- /dev/null +++ b/conformance_tests/initial_domain_mapping_generation_tests/tsconfig.json @@ -0,0 +1,26 @@ +{ + "compilerOptions": { + "target": "es2017", + "module": "commonjs", + "lib": ["es2017"], + "outDir": "./dist", + "rootDir": "./", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "declaration": true, + "declarationMap": true, + "sourceMap": true, + "resolveJsonModule": true, + "moduleResolution": "node", + "allowSyntheticDefaultImports": true + }, + "include": [ + "**/*.ts" + ], + "exclude": [ + "node_modules", + "dist" + ] +} \ No newline at end of file diff --git a/conformance_tests/metadata_extraction_workflow_tests/extraction-metadata.test.ts b/conformance_tests/metadata_extraction_workflow_tests/extraction-metadata.test.ts new file mode 100644 index 0000000..4b57519 --- /dev/null +++ b/conformance_tests/metadata_extraction_workflow_tests/extraction-metadata.test.ts @@ -0,0 +1,80 @@ +/** + * Conformance tests for metadata extraction functionality + */ +import { CallbackServer } from './test-utils/callback-server'; +import { getTestEnvironment } from './test-utils/env'; +import { buildTestEvent } from './test-utils/event-builder'; +import { invokeSnapInFunction } from './test-utils/snap-in-client'; + +describe('Metadata Extraction', () => { + let callbackServer: CallbackServer; + let env: ReturnType; + + beforeAll(() => { + env = getTestEnvironment(); + }); + + beforeEach(async () => { + callbackServer = new CallbackServer(8002); + await callbackServer.start(); + }); + + afterEach(async () => { + await callbackServer.stop(); + }); + + test('Test 1: Basic metadata extraction invocation succeeds', async () => { + const event = buildTestEvent(env, { + eventType: 'EXTRACTION_METADATA_START', + callbackUrl: callbackServer.getCallbackUrl(), + }); + + const response = await invokeSnapInFunction(event); + + expect(response.status).toBe(200); + }, 30000); + + test('Test 2: Metadata extraction emits EXTRACTION_METADATA_DONE event', async () => { + const event = buildTestEvent(env, { + eventType: 'EXTRACTION_METADATA_START', + callbackUrl: callbackServer.getCallbackUrl(), + }); + + await invokeSnapInFunction(event); + + // Wait for callback + await new Promise((resolve) => setTimeout(resolve, 2000)); + + const events = callbackServer.getReceivedEvents(); + + expect(events.length).toBeGreaterThan(0); + + const metadataDoneEvent = events.find( + (e) => e.event_type === 'EXTRACTION_METADATA_DONE' + ); + + expect(metadataDoneEvent).toBeDefined(); + }, 30000); + + test('Test 3: Metadata extraction uploads correct external domain metadata structure', async () => { + const event = buildTestEvent(env, { + eventType: 'EXTRACTION_METADATA_START', + callbackUrl: callbackServer.getCallbackUrl(), + }); + + await invokeSnapInFunction(event); + + // Wait for processing + await new Promise((resolve) => setTimeout(resolve, 2000)); + + const events = callbackServer.getReceivedEvents(); + const metadataDoneEvent = events.find( + (e) => e.event_type === 'EXTRACTION_METADATA_DONE' + ); + + expect(metadataDoneEvent).toBeDefined(); + + // Verify record types exist + expect(metadataDoneEvent?.event_data).toBeDefined(); + }, 30000); +}); \ No newline at end of file diff --git a/conformance_tests/metadata_extraction_workflow_tests/package.json b/conformance_tests/metadata_extraction_workflow_tests/package.json new file mode 100644 index 0000000..e31275f --- /dev/null +++ b/conformance_tests/metadata_extraction_workflow_tests/package.json @@ -0,0 +1,24 @@ +{ + "name": "wrike-snap-in-tests", + "version": "1.0.0", + "description": "Conformance tests for Wrike snap-in", + "scripts": { + "test": "jest --testTimeout=120000" + }, + "devDependencies": { + "@types/express": "^4.17.17", + "@types/jest": "^29.5.0", + "@types/node": "^18.15.11", + "axios": "^1.6.0", + "express": "^4.18.2", + "jest": "^29.5.0", + "ts-jest": "^29.1.0", + "typescript": "^5.0.4" + }, + "jest": { + "preset": "ts-jest", + "testEnvironment": "node", + "testMatch": ["**/*.test.ts"], + "setupFilesAfterEnv": ["./jest.setup.js"] + } +} \ No newline at end of file diff --git a/conformance_tests/metadata_extraction_workflow_tests/test-utils/callback-server.ts b/conformance_tests/metadata_extraction_workflow_tests/test-utils/callback-server.ts new file mode 100644 index 0000000..6f27fcc --- /dev/null +++ b/conformance_tests/metadata_extraction_workflow_tests/test-utils/callback-server.ts @@ -0,0 +1,63 @@ +/** + * Utility for setting up and managing The Callback Server + */ +import express, { Express } from 'express'; +import bodyParser from 'body-parser'; +import { Server } from 'http'; + +export interface CallbackEvent { + event_type: string; + event_data?: any; + [key: string]: any; +} + +export class CallbackServer { + private app: Express; + private server: Server | null = null; + private receivedEvents: CallbackEvent[] = []; + private port: number; + + constructor(port: number = 8002) { + this.port = port; + this.app = express(); + this.app.use(bodyParser.json()); + + this.app.post('/callback', (req, res) => { + this.receivedEvents.push(req.body); + res.status(200).send({ status: 'received' }); + }); + } + + async start(): Promise { + return new Promise((resolve) => { + this.server = this.app.listen(this.port, () => { + resolve(); + }); + }); + } + + async stop(): Promise { + return new Promise((resolve, reject) => { + if (this.server) { + this.server.close((err) => { + if (err) reject(err); + else resolve(); + }); + } else { + resolve(); + } + }); + } + + getReceivedEvents(): CallbackEvent[] { + return this.receivedEvents; + } + + clearEvents(): void { + this.receivedEvents = []; + } + + getCallbackUrl(): string { + return `http://localhost:${this.port}/callback`; + } +} \ No newline at end of file diff --git a/conformance_tests/metadata_extraction_workflow_tests/test-utils/env.ts b/conformance_tests/metadata_extraction_workflow_tests/test-utils/env.ts new file mode 100644 index 0000000..88f60f6 --- /dev/null +++ b/conformance_tests/metadata_extraction_workflow_tests/test-utils/env.ts @@ -0,0 +1,26 @@ +/** + * Utility for reading and validating environment variables + */ + +export interface TestEnvironment { + wrikeApiKey: string; + wrikeSpaceId: string; +} + +export function getTestEnvironment(): TestEnvironment { + const wrikeApiKey = process.env.WRIKE_API_KEY; + const wrikeSpaceId = process.env.WRIKE_SPACE_ID; + + if (!wrikeApiKey) { + throw new Error('WRIKE_API_KEY environment variable is required'); + } + + if (!wrikeSpaceId) { + throw new Error('WRIKE_SPACE_ID environment variable is required'); + } + + return { + wrikeApiKey, + wrikeSpaceId, + }; +} \ No newline at end of file diff --git a/conformance_tests/metadata_extraction_workflow_tests/test-utils/event-builder.ts b/conformance_tests/metadata_extraction_workflow_tests/test-utils/event-builder.ts new file mode 100644 index 0000000..12cc1dd --- /dev/null +++ b/conformance_tests/metadata_extraction_workflow_tests/test-utils/event-builder.ts @@ -0,0 +1,71 @@ +/** + * Utility for building test event payloads + */ +import { TestEnvironment } from './env'; + +export interface TestEventOptions { + eventType: string; + callbackUrl?: string; + functionName?: string; +} + +export function buildTestEvent(env: TestEnvironment, options: TestEventOptions): any { + const { + eventType, + callbackUrl = 'http://localhost:8002/callback', + functionName = 'extraction', + } = options; + + return { + execution_metadata: { + devrev_endpoint: 'http://localhost:8003', + request_id: `test-${Date.now()}`, + function_name: functionName, + event_type: eventType, + }, + context: { + secrets: { + service_account_token: 'test-token', + }, + source_id: 'test-source', + snap_in_id: 'test-snap-in', + snap_in_version_id: 'test-version', + }, + payload: { + connection_data: { + key: env.wrikeApiKey, + org_id: env.wrikeSpaceId, + }, + event_context: { + callback_url: callbackUrl, + dev_org: 'test-org', + dev_org_id: 'test-org-id', + dev_user: 'test-user', + dev_user_id: 'test-user-id', + external_sync_unit: 'test-unit', + external_sync_unit_id: 'IEAGS6BYI5RFMPP7', + external_sync_unit_name: 'Test Unit', + external_system: 'wrike', + external_system_type: 'wrike', + import_slug: 'test-import', + mode: 'INITIAL', + request_id: `test-${Date.now()}`, + snap_in_slug: 'test-snap-in', + snap_in_version_id: 'test-version', + sync_run: 'test-run', + sync_run_id: 'test-run-id', + sync_tier: 'test-tier', + sync_unit: 'test-unit', + sync_unit_id: 'test-unit-id', + uuid: 'test-uuid', + worker_data_url: 'http://localhost:8003/external-worker', + }, + event_type: eventType, + event_data: {}, + }, + input_data: { + global_values: {}, + event_sources: {}, + }, + }; +} \ No newline at end of file diff --git a/conformance_tests/metadata_extraction_workflow_tests/test-utils/snap-in-client.ts b/conformance_tests/metadata_extraction_workflow_tests/test-utils/snap-in-client.ts new file mode 100644 index 0000000..f7c3db9 --- /dev/null +++ b/conformance_tests/metadata_extraction_workflow_tests/test-utils/snap-in-client.ts @@ -0,0 +1,15 @@ +/** + * Utility for making requests to The Test Snap-In Server + */ +import axios, { AxiosResponse } from 'axios'; + +const SNAP_IN_SERVER_URL = 'http://localhost:8000/handle/sync'; + +export async function invokeSnapInFunction(event: any): Promise { + return axios.post(SNAP_IN_SERVER_URL, event, { + headers: { + 'Content-Type': 'application/json', + }, + validateStatus: () => true, // Don't throw on any status + }); +} \ No newline at end of file diff --git a/conformance_tests/metadata_extraction_workflow_tests/tsconfig.json b/conformance_tests/metadata_extraction_workflow_tests/tsconfig.json new file mode 100644 index 0000000..c5e9c45 --- /dev/null +++ b/conformance_tests/metadata_extraction_workflow_tests/tsconfig.json @@ -0,0 +1,19 @@ +{ + "compilerOptions": { + "target": "es2017", + "module": "commonjs", + "lib": ["es2017"], + "outDir": "./dist", + "rootDir": "./", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true, + "declaration": true, + "declarationMap": true, + "sourceMap": true + }, + "include": ["**/*.ts"], + "exclude": ["node_modules", "dist"] +} \ No newline at end of file diff --git a/conformance_tests/project_list_fetch_validation/fetch_projects.test.ts b/conformance_tests/project_list_fetch_validation/fetch_projects.test.ts deleted file mode 100644 index 3c0cd32..0000000 --- a/conformance_tests/project_list_fetch_validation/fetch_projects.test.ts +++ /dev/null @@ -1,325 +0,0 @@ -import axios from 'axios'; -import express from 'express'; -import bodyParser from 'body-parser'; -import { Server } from 'http'; - -// Constants -const TEST_SERVER_URL = 'http://localhost:8000/handle/sync'; -const CALLBACK_SERVER_PORT = 8002; -const CALLBACK_SERVER_URL = `http://localhost:${CALLBACK_SERVER_PORT}`; - -// Setup callback server -let callbackServer: Server; -let callbackData: any = null; - -// Setup callback server -const setupCallbackServer = (): Promise => { - return new Promise((resolve) => { - const app = express(); - app.use(bodyParser.json()); - - app.post('*', (req, res) => { - callbackData = req.body; - res.status(200).send({ status: 'success' }); - }); - - callbackServer = app.listen(CALLBACK_SERVER_PORT, () => { - console.log(`Callback server running at ${CALLBACK_SERVER_URL}`); - resolve(); - }); - }); -}; - -// Cleanup callback server -const cleanupCallbackServer = (): Promise => { - return new Promise((resolve) => { - if (callbackServer) { - callbackServer.close(() => { - console.log('Callback server closed'); - resolve(); - }); - } else { - resolve(); - } - }); -}; - -// Helper function to create a test event -const createTestEvent = (apiKey: string, spaceId: string) => { - return { - context: { - secrets: { - service_account_token: 'test-token' - }, - snap_in_version_id: 'test-version-id', - snap_in_id: 'test-snap-in-id' - }, - payload: { - connection_data: { - key: apiKey, - org_id: spaceId, - org_name: 'Test Organization', - key_type: 'api_key' - }, - event_context: { - callback_url: `${CALLBACK_SERVER_URL}/callback`, - dev_org: 'test-org', - dev_org_id: 'test-org-id', - dev_user: 'test-user', - dev_user_id: 'test-user-id', - external_sync_unit: 'test-sync-unit', - external_sync_unit_id: 'IEAGS6BYI5RFMPPY', - external_sync_unit_name: 'Test Sync Unit', - external_system: 'wrike', - external_system_type: 'wrike', - import_slug: 'test-import', - mode: 'INITIAL', - request_id: 'test-request-id', - snap_in_slug: 'test-snap-in', - snap_in_version_id: 'test-version-id', - sync_run: 'test-sync-run', - sync_run_id: 'test-sync-run-id', - sync_tier: 'test-tier', - sync_unit: 'test-unit', - sync_unit_id: 'test-unit-id', - uuid: 'test-uuid', - worker_data_url: 'http://localhost:8003/external-worker' - }, - event_type: 'TEST_EVENT' - }, - execution_metadata: { - devrev_endpoint: 'http://localhost:8003', - function_name: 'fetch_projects' - }, - input_data: {} - }; -}; - -describe('Fetch Projects Function Tests', () => { - beforeAll(async () => { - await setupCallbackServer(); - }); - - afterAll(async () => { - await cleanupCallbackServer(); - }); - - beforeEach(() => { - callbackData = null; - }); - - // Test 1: Verify environment variables - test('Environment variables are set correctly', () => { - const apiKey = process.env.WRIKE_API_KEY; - const spaceId = process.env.WRIKE_SPACE_GID; - - expect(apiKey).toBeDefined(); - expect(apiKey).not.toBe(''); - expect(spaceId).toBeDefined(); - expect(spaceId).not.toBe(''); - - console.log('Environment variables are set correctly'); - }); - - // Test 2: Verify test server connectivity - test('Test server is accessible', async () => { - try { - const response = await axios.post(TEST_SERVER_URL, {}, { - headers: { 'Content-Type': 'application/json' } - }); - - // We expect an error response since we're not sending a valid event, - // but the server should be accessible - expect(response.status).toBe(200); - console.log('Test server is accessible'); - } catch (error: any) { - if (error.response) { - // Even if we get an error response, the server is accessible - expect(error.response.status).toBeLessThan(500); - console.log('Test server is accessible (returned error as expected for invalid request)'); - } else { - throw new Error(`Test server is not accessible: ${error.message}`); - } - } - }); - - // Test 3: Test function invocation - test('Function can be invoked', async () => { - const apiKey = process.env.WRIKE_API_KEY || ''; - const spaceId = process.env.WRIKE_SPACE_GID || ''; - - const testEvent = createTestEvent(apiKey, spaceId); - - try { - const response = await axios.post(TEST_SERVER_URL, testEvent, { - headers: { 'Content-Type': 'application/json' } - }); - - expect(response.status).toBe(200); - expect(response.data).toBeDefined(); - expect(response.data.function_result).toBeDefined(); - - console.log('Function invocation successful'); - } catch (error: any) { - console.error('Function invocation failed:', error.response?.data || error.message); - throw error; - } - }); - - // Test 4: Test API authentication - test('Function can authenticate with Wrike API', async () => { - const apiKey = process.env.WRIKE_API_KEY || ''; - const spaceId = process.env.WRIKE_SPACE_GID || ''; - - const testEvent = createTestEvent(apiKey, spaceId); - - try { - const response = await axios.post(TEST_SERVER_URL, testEvent, { - headers: { 'Content-Type': 'application/json' } - }); - - expect(response.status).toBe(200); - expect(response.data).toBeDefined(); - expect(response.data.function_result).toBeDefined(); - expect(response.data.function_result.status).toBe('success'); - expect(response.data.error).toBeUndefined(); - - console.log('API authentication successful'); - } catch (error: any) { - console.error('API authentication failed:', error.response?.data || error.message); - throw error; - } - }); - - // Test 5: Test projects fetching - test('Function can fetch projects from Wrike API', async () => { - const apiKey = process.env.WRIKE_API_KEY || ''; - const spaceId = process.env.WRIKE_SPACE_GID || ''; - - const testEvent = createTestEvent(apiKey, spaceId); - - try { - const response = await axios.post(TEST_SERVER_URL, testEvent, { - headers: { 'Content-Type': 'application/json' } - }); - - expect(response.status).toBe(200); - expect(response.data).toBeDefined(); - expect(response.data.function_result).toBeDefined(); - expect(response.data.function_result.status).toBe('success'); - expect(response.data.function_result.projects).toBeDefined(); - expect(Array.isArray(response.data.function_result.projects)).toBe(true); - - // Verify project structure - if (response.data.function_result.projects.length > 0) { - const project = response.data.function_result.projects[0]; - expect(project.id).toBeDefined(); - expect(project.title).toBeDefined(); - expect(project.created_date).toBeDefined(); - expect(project.updated_date).toBeDefined(); - expect(project.scope).toBeDefined(); - } - - console.log(`Successfully fetched ${response.data.function_result.projects.length} projects`); - } catch (error: any) { - console.error('Projects fetching failed:', error.response?.data || error.message); - throw error; - } - }); - - // Test 6: Test error handling with invalid credentials - test('Function handles invalid API key correctly', async () => { - const invalidApiKey = 'invalid-api-key'; - const spaceId = process.env.WRIKE_SPACE_GID || ''; - - const testEvent = createTestEvent(invalidApiKey, spaceId); - - try { - const response = await axios.post(TEST_SERVER_URL, testEvent, { - headers: { 'Content-Type': 'application/json' } - }); - - expect(response.status).toBe(200); - expect(response.data).toBeDefined(); - expect(response.data.function_result).toBeDefined(); - expect(response.data.function_result.status).toBe('error'); - expect(response.data.function_result.error).toBeDefined(); - - console.log('Error handling for invalid API key works correctly'); - } catch (error: any) { - console.error('Error handling test failed:', error.response?.data || error.message); - throw error; - } - }); - - // Test 7: Acceptance Test - Verify "First project" exists in the results - test('Function returns a project with title "First project"', async () => { - const apiKey = process.env.WRIKE_API_KEY || ''; - const spaceId = process.env.WRIKE_SPACE_GID || ''; - - const testEvent = createTestEvent(apiKey, spaceId); - - try { - const response = await axios.post(TEST_SERVER_URL, testEvent, { - headers: { 'Content-Type': 'application/json' } - }); - - // Verify basic response structure - // Check HTTP status code - expect(response.status).toBe(200); - - // Check response data structure - expect(response.data).toBeDefined(); - expect(response.data.function_result).toBeDefined(); - expect(response.data.function_result.status).toBe('success'); - expect(response.data.function_result.projects).toBeDefined(); - expect(Array.isArray(response.data.function_result.projects)).toBe(true); - - // Get the projects array - const projects = response.data.function_result.projects; - - // Log the number of projects for debugging - console.log(`Found ${projects.length} projects in the response`); - - // Check if there are any projects - console.log('Checking if at least one project is returned'); - expect(projects.length).toBeGreaterThan(0); - - // For debugging, log all project titles - const projectTitles = projects.map((p: any) => p.title); - console.log('Project titles found:', projectTitles); - - // Find the "First project" - console.log('Looking for "First project" in the results'); - const firstProject = projects.find((project: any) => project.title === 'First project'); - - // Assert that "First project" exists - if (!firstProject) { - console.error(`Project with title "First project" not found. Available projects: ${projectTitles.join(', ')}`); - } - expect(firstProject).toBeDefined(); - - // Additional checks on the "First project" structure - if (firstProject) { - console.log('Checking "First project" structure'); - expect(firstProject.id).toBeDefined(); - expect(firstProject.created_date).toBeDefined(); - expect(firstProject.updated_date).toBeDefined(); - expect(firstProject.scope).toBeDefined(); - - console.log('Successfully found "First project" with ID:', firstProject.id); - } - } catch (error: any) { - // Detailed error logging for debugging - if (error.response) { - console.error('API Response Error:', { - status: error.response.status, - data: error.response.data - }); - } else { - console.error('Error during test execution:', error.message); - } - throw error; - } - }); -}); \ No newline at end of file diff --git a/conformance_tests/project_list_fetch_validation/jest.config.js b/conformance_tests/project_list_fetch_validation/jest.config.js deleted file mode 100644 index 4966606..0000000 --- a/conformance_tests/project_list_fetch_validation/jest.config.js +++ /dev/null @@ -1,5 +0,0 @@ -module.exports = { - preset: 'ts-jest', - testEnvironment: 'node', - testTimeout: 120000, // 120 seconds as per requirements -}; \ No newline at end of file diff --git a/conformance_tests/project_list_fetch_validation/tsconfig.json b/conformance_tests/project_list_fetch_validation/tsconfig.json deleted file mode 100644 index 5df481c..0000000 --- a/conformance_tests/project_list_fetch_validation/tsconfig.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "compilerOptions": { - "target": "es2016", - "module": "commonjs", - "esModuleInterop": true, - "forceConsistentCasingInFileNames": true, - "strict": true, - "skipLibCheck": true - } -} \ No newline at end of file diff --git a/conformance_tests/project_tasks_count_fetch/extraction.test.ts b/conformance_tests/project_tasks_count_fetch/extraction.test.ts deleted file mode 100644 index 7dd3a99..0000000 --- a/conformance_tests/project_tasks_count_fetch/extraction.test.ts +++ /dev/null @@ -1,218 +0,0 @@ -import axios from 'axios'; -import express from 'express'; -import * as bodyParser from 'body-parser'; -import { Server } from 'http'; -import { EventType } from '@devrev/ts-adaas'; - -// Configuration -const TEST_SERVER_URL = 'http://localhost:8000/handle/sync'; -const CALLBACK_SERVER_PORT = 8002; -const CALLBACK_SERVER_URL = `http://localhost:${CALLBACK_SERVER_PORT}`; -const TEST_TIMEOUT = 30000; // 30 seconds per test - -// Environment variables -const WRIKE_API_KEY = process.env.WRIKE_API_KEY; -const WRIKE_SPACE_GID = process.env.WRIKE_SPACE_GID || 'IEAGS6BYI5RFMPPY'; // Default to test ID if not provided -const TEST_PROJECT_ID = 'IEAGS6BYI5RFMPPY'; // Can be used when space ID is required - -// Validate environment variables -if (!WRIKE_API_KEY) { - throw new Error('WRIKE_API_KEY environment variable is required'); -} - -// Setup callback server -let callbackServer: Server; -let callbackData: any[] = []; - -function setupCallbackServer(): Promise { - return new Promise((resolve) => { - const app = express(); - app.use(bodyParser.json()); - - app.post('*', (req, res) => { - console.log('Callback received:', JSON.stringify(req.body)); - callbackData.push(req.body); - res.status(200).send({ status: 'success' }); - }); - - callbackServer = app.listen(CALLBACK_SERVER_PORT, () => { - console.log(`Callback server listening on port ${CALLBACK_SERVER_PORT}`); - resolve(); - }); - }); -} - -function shutdownCallbackServer(): Promise { - return new Promise((resolve) => { - if (callbackServer) { - callbackServer.close(() => { - console.log('Callback server closed'); - resolve(); - }); - } else { - resolve(); - } - }); -} - -// Create a mock AirdropEvent for testing -function createExtractionEvent(eventType: EventType) { - return { - context: { - secrets: { - service_account_token: 'test-token' - }, - snap_in_version_id: 'test-version-id', - snap_in_id: 'test-snap-in-id' - }, - payload: { - connection_data: { - key: WRIKE_API_KEY, - org_id: WRIKE_SPACE_GID, - org_name: 'Test Organization', - key_type: 'api_key' - }, - event_context: { - callback_url: `${CALLBACK_SERVER_URL}/callback`, - dev_org: 'test-org', - dev_org_id: 'test-org-id', - dev_user: 'test-user', - dev_user_id: 'test-user-id', - external_sync_unit: TEST_PROJECT_ID, - external_sync_unit_id: TEST_PROJECT_ID, - external_sync_unit_name: 'Test Project', - external_system: 'wrike', - external_system_type: 'wrike', - import_slug: 'test-import', - mode: 'INITIAL', - request_id: 'test-request-id', - snap_in_slug: 'wrike', - snap_in_version_id: 'test-version-id', - sync_run: 'test-sync-run', - sync_run_id: 'test-sync-run-id', - sync_tier: 'test-tier', - sync_unit: 'test-sync-unit', - sync_unit_id: 'test-sync-unit-id', - uuid: 'test-uuid', - worker_data_url: 'http://localhost:8003/external-worker' - }, - event_type: eventType, - event_data: {} - }, - execution_metadata: { - devrev_endpoint: 'http://localhost:8003', - function_name: 'extraction' - }, - input_data: {} - }; -} - -// Helper function to invoke the snap-in function -async function invokeFunction(event: any): Promise { - try { - const response = await axios.post(TEST_SERVER_URL, event, { - headers: { - 'Content-Type': 'application/json' - } - }); - return response.data; - } catch (error) { - console.error('Error invoking function:', error); - throw error; - } -} - -// Clear callback data between tests -function clearCallbackData() { - callbackData = []; -} - -// Setup and teardown -beforeAll(async () => { - await setupCallbackServer(); -}); - -afterAll(async () => { - await shutdownCallbackServer(); -}); - -beforeEach(() => { - clearCallbackData(); -}); - -// Test cases -describe('Extraction Function Tests', () => { - // Test 1: Basic test to verify the extraction function exists and can be invoked - test('extraction function exists and can be invoked', async () => { - const event = createExtractionEvent(EventType.ExtractionExternalSyncUnitsStart); - const result = await invokeFunction(event); - - expect(result).toBeDefined(); - expect(result.function_result).toBeDefined(); - expect(result.function_result.status).toBe('success'); - expect(result.error).toBeUndefined(); - }, TEST_TIMEOUT); - - // Test 2: Verify that the extraction function correctly processes EXTRACTION_EXTERNAL_SYNC_UNITS_START events - test('extraction function processes EXTRACTION_EXTERNAL_SYNC_UNITS_START events', async () => { - const event = createExtractionEvent(EventType.ExtractionExternalSyncUnitsStart); - await invokeFunction(event); - - // Wait for callbacks to be processed (up to 10 seconds) - let attempts = 0; - while (callbackData.length === 0 && attempts < 10) { - await new Promise(resolve => setTimeout(resolve, 1000)); - attempts++; - } - - // Verify that we received at least one callback - expect(callbackData.length).toBeGreaterThan(0); - - // Verify that the callback contains external_sync_units - const lastCallback = callbackData[callbackData.length - 1]; - expect(lastCallback).toBeDefined(); - expect(lastCallback.event_data).toBeDefined(); - expect(lastCallback.event_data.external_sync_units).toBeDefined(); - expect(Array.isArray(lastCallback.event_data.external_sync_units)).toBe(true); - }, TEST_TIMEOUT); - - // Test 3: Verify that projects pushed as external sync units include task counts - test('projects pushed as external sync units include task counts', async () => { - const event = createExtractionEvent(EventType.ExtractionExternalSyncUnitsStart); - await invokeFunction(event); - - // Wait for callbacks to be processed (up to 10 seconds) - let attempts = 0; - while (callbackData.length === 0 && attempts < 10) { - await new Promise(resolve => setTimeout(resolve, 1000)); - attempts++; - } - - // Verify that we received at least one callback - expect(callbackData.length).toBeGreaterThan(0); - - // Find the EXTRACTION_EXTERNAL_SYNC_UNITS_DONE callback - const doneCallback = callbackData.find(callback => - callback.event_type === 'EXTRACTION_EXTERNAL_SYNC_UNITS_DONE' - ); - - expect(doneCallback).toBeDefined(); - expect(doneCallback.event_data).toBeDefined(); - expect(doneCallback.event_data.external_sync_units).toBeDefined(); - expect(Array.isArray(doneCallback.event_data.external_sync_units)).toBe(true); - - // Verify that each external sync unit has an item_count property - const externalSyncUnits = doneCallback.event_data.external_sync_units; - expect(externalSyncUnits.length).toBeGreaterThan(0); - - for (const unit of externalSyncUnits) { - expect(unit).toHaveProperty('id'); - expect(unit).toHaveProperty('name'); - expect(unit).toHaveProperty('description'); - expect(unit).toHaveProperty('item_count'); - expect(unit).toHaveProperty('item_type'); - expect(typeof unit.item_count).toBe('number'); - expect(unit.item_type).toBe('tasks'); - } - }, TEST_TIMEOUT); -}); \ No newline at end of file diff --git a/conformance_tests/project_tasks_count_fetch/jest.config.js b/conformance_tests/project_tasks_count_fetch/jest.config.js deleted file mode 100644 index e1600d8..0000000 --- a/conformance_tests/project_tasks_count_fetch/jest.config.js +++ /dev/null @@ -1,7 +0,0 @@ -module.exports = { - preset: 'ts-jest', - testEnvironment: 'node', - testTimeout: 120000, // 120 seconds timeout as per requirements - resetMocks: false, - testMatch: ['**/*.test.ts'], -}; \ No newline at end of file diff --git a/conformance_tests/project_tasks_count_fetch/tsconfig.json b/conformance_tests/project_tasks_count_fetch/tsconfig.json deleted file mode 100644 index dde1c22..0000000 --- a/conformance_tests/project_tasks_count_fetch/tsconfig.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "compilerOptions": { - "target": "es2017", - "module": "commonjs", - "esModuleInterop": true, - "forceConsistentCasingInFileNames": true, - "strict": true, - "skipLibCheck": true, - "resolveJsonModule": true - }, - "include": ["*.ts"], - "exclude": ["node_modules"] -} \ No newline at end of file diff --git a/conformance_tests/project_tasks_fetch_validation/.env.example b/conformance_tests/project_tasks_fetch_validation/.env.example deleted file mode 100644 index 3272e36..0000000 --- a/conformance_tests/project_tasks_fetch_validation/.env.example +++ /dev/null @@ -1,6 +0,0 @@ -# Wrike API credentials -WRIKE_API_KEY=your_wrike_api_key_here -WRIKE_SPACE_GID=your_wrike_space_id_here - -# Optional: Override the default project ID for testing -# PROJECT_ID=your_project_id_here \ No newline at end of file diff --git a/conformance_tests/project_tasks_fetch_validation/acceptance.test.ts b/conformance_tests/project_tasks_fetch_validation/acceptance.test.ts deleted file mode 100644 index 01d91d9..0000000 --- a/conformance_tests/project_tasks_fetch_validation/acceptance.test.ts +++ /dev/null @@ -1,160 +0,0 @@ -import axios from 'axios'; -import express from 'express'; -import bodyParser from 'body-parser'; -import { Server } from 'http'; - -// Test configuration -const TEST_SERVER_URL = 'http://localhost:8000/handle/sync'; -const CALLBACK_SERVER_PORT = 8002; -const CALLBACK_SERVER_URL = `http://localhost:${CALLBACK_SERVER_PORT}`; -const ACCEPTANCE_PROJECT_ID = 'IEAGS6BYI5RFMPP7'; // Specific project ID for acceptance test - -// Timeout for tests (in milliseconds) -jest.setTimeout(120000); // 120 seconds as per requirements - -describe('Fetch Tasks Acceptance Test', () => { - let callbackServer: Server; - let callbackData: any = null; - - // Set up callback server and reset callback data before each test - beforeEach(() => { - console.log('Setting up callback server...'); - callbackData = null; - const app = express(); - app.use(bodyParser.json()); - - // Endpoint to receive callback data - app.post('/callback', (req, res) => { - console.log('Received callback data'); - callbackData = req.body; - res.status(200).send({ status: 'success' }); - }); - - // Health check endpoint - app.get('/health', (req, res) => { - res.status(200).send({ status: 'up' }); - }); - - // Start the server - callbackServer = app.listen(CALLBACK_SERVER_PORT); - console.log(`Callback server started on port ${CALLBACK_SERVER_PORT}`); - }); - - // Clean up callback server after each test - afterEach(() => { - if (callbackServer) { - callbackServer.close(); - console.log('Callback server closed'); - } - }); - - // Helper function to create a valid event payload - const createEventPayload = (overrides = {}) => { - // Check for required environment variables - const apiKey = process.env.WRIKE_API_KEY; - if (!apiKey) { - throw new Error('WRIKE_API_KEY environment variable is required'); - } - - const spaceId = process.env.WRIKE_SPACE_GID; - if (!spaceId) { - throw new Error('WRIKE_SPACE_GID environment variable is required'); - } - - return { - context: { - secrets: { - service_account_token: 'test-token' - }, - snap_in_id: 'test-snap-in', - snap_in_version_id: 'test-version' - }, - payload: { - connection_data: { - key: apiKey, - org_id: spaceId, - key_type: 'api_key' - }, - event_context: { - external_sync_unit_id: ACCEPTANCE_PROJECT_ID, - callback_url: `${CALLBACK_SERVER_URL}/callback` - }, - event_type: 'EXTRACTION_DATA_START' - }, - execution_metadata: { - devrev_endpoint: 'http://localhost:8003', - function_name: 'fetch_tasks', - }, - input_data: {}, - ...overrides - }; - }; - - // Acceptance Test: Verify that exactly 10 tasks are fetched for the specified project - test('should fetch exactly 10 tasks from the specified project', async () => { - console.log(`Starting acceptance test with project ID: ${ACCEPTANCE_PROJECT_ID}`); - const payload = createEventPayload(); - - // Verify callback server is running - try { - const healthResponse = await axios.get(`${CALLBACK_SERVER_URL}/health`); - console.log('Callback server health check:', healthResponse.data); - } catch (error) { - console.error('Callback server health check failed:', error); - throw new Error('Callback server is not running properly'); - } - - console.log('Sending request to fetch tasks...'); - let response; - try { - response = await axios.post(TEST_SERVER_URL, payload); - console.log('Received response from server'); - } catch (error) { - console.error('Error calling fetch_tasks function:', error); - if (axios.isAxiosError(error) && error.response) { - console.error('Response status:', error.response.status); - console.error('Response data:', error.response.data); - } - throw error; - } - - // Basic response validation - expect(response.status).toBe(200); - expect(response.data).toBeDefined(); - expect(response.data.function_result).toBeDefined(); - - const result = response.data.function_result; - - // Check for success status - expect(result.status).toBe('success'); - if (result.status !== 'success') { - console.error(`Error: ${result.error || 'Unknown error'}`); - } - - // Verify tasks array exists - expect(result.tasks).toBeDefined(); - expect(Array.isArray(result.tasks)).toBe(true); - - // Verify tasks are returned (at least one) - console.log(`Found ${result.tasks.length} tasks`); - expect(result.tasks.length).toBeGreaterThan(0); - expect(result.tasks.length).toBeLessThan(100); // Sanity check to ensure we don't have an unreasonable number - - // Validate the structure of each task - result.tasks.forEach((task: any, index: number) => { - console.log(`Validating task ${index + 1}: ${task.title}`); - - // Required fields - expect(task.id).toBeDefined(); - expect(task.title).toBeDefined(); - expect(task.status).toBeDefined(); - expect(task.importance).toBeDefined(); - expect(task.created_date).toBeDefined(); - expect(task.updated_date).toBeDefined(); - expect(task.parent_ids).toBeDefined(); - expect(Array.isArray(task.parent_ids)).toBe(true); - }); - - console.log('Acceptance test completed successfully'); - }); -}); \ No newline at end of file diff --git a/conformance_tests/project_tasks_fetch_validation/fetch_tasks.test.ts b/conformance_tests/project_tasks_fetch_validation/fetch_tasks.test.ts deleted file mode 100644 index 83e408e..0000000 --- a/conformance_tests/project_tasks_fetch_validation/fetch_tasks.test.ts +++ /dev/null @@ -1,168 +0,0 @@ -import axios from 'axios'; -import express from 'express'; -import bodyParser from 'body-parser'; -import { AddressInfo } from 'net'; -import { Server } from 'http'; - -// Test configuration -const TEST_SERVER_URL = 'http://localhost:8000/handle/sync'; -const CALLBACK_SERVER_PORT = 8002; -const CALLBACK_SERVER_URL = `http://localhost:${CALLBACK_SERVER_PORT}`; -const PROJECT_ID = process.env.PROJECT_ID || 'IEAGS6BYI5RFMPPY'; // Default project ID for testing - -// Timeout for tests (in milliseconds) -jest.setTimeout(120000); // 120 seconds as per requirements - -describe('Fetch Tasks Function Tests', () => { - let callbackServer: Server; - let callbackData: any = null; - - // Set up callback server and reset callback data before each test - beforeEach(() => { - callbackData = null; - const app = express(); - app.use(bodyParser.json()); - - // Endpoint to receive callback data - app.post('/callback', async (req, res) => { - callbackData = req.body; - res.status(200).send({ status: 'success' }); - }); - - // Health check endpoint - app.get('/health', (req, res) => { - res.status(200).send({ status: 'up' }); - }); - - // Start the server - callbackServer = app.listen(CALLBACK_SERVER_PORT); - }); - - // Clean up callback server after each test - afterEach(async () => { - if (callbackServer) { - await new Promise((resolve) => { - callbackServer.close(() => resolve()); - }); - } - }); - - // Helper function to create a valid event payload - const createEventPayload = (overrides = {}) => { - // Check for required environment variables - const apiKey = process.env.WRIKE_API_KEY; - if (!apiKey) { - console.error('WRIKE_API_KEY environment variable is required'); - } - - const spaceId = process.env.WRIKE_SPACE_GID; - if (!spaceId) { - throw new Error('WRIKE_SPACE_GID environment variable is required'); - } - - return { - context: { - secrets: { - service_account_token: 'test-token' - }, - snap_in_id: 'test-snap-in', - snap_in_version_id: 'test-version' - }, - payload: { - connection_data: { - key: apiKey, - org_id: spaceId, - key_type: 'api_key' - }, - event_context: { - external_sync_unit_id: PROJECT_ID, - callback_url: `${CALLBACK_SERVER_URL}/callback` - }, - event_type: 'EXTRACTION_DATA_START' - }, - execution_metadata: { - devrev_endpoint: 'http://localhost:8003', - function_name: 'fetch_tasks', - }, - input_data: {} - }; - }; - - // Add afterAll to ensure all connections are closed - afterAll(() => jest.setTimeout(5000)); - - // Test 1: Basic - Verify the function can be called and returns a response - test('should successfully call the fetch_tasks function', async () => { - const payload = createEventPayload(); - - // Verify callback server is running - try { - await axios.get(`${CALLBACK_SERVER_URL}/health`); - console.log('Callback server is running'); - } catch (error) { - console.error('Callback server is not running:', error); - } - - const response = await axios.post(TEST_SERVER_URL, payload); - - expect(response.status).toBe(200); - expect(response.data).toBeDefined(); - expect(response.data.function_result).toBeDefined(); - }); - - // Test 2: Input Validation - Test with invalid inputs - test('should handle missing API key gracefully', async () => { - const payload = createEventPayload(); - // Remove the API key - payload.payload.connection_data.key = ''; - - const response = await axios.post(TEST_SERVER_URL, payload); - - expect(response.status).toBe(200); - expect(response.data.function_result).toBeDefined(); - expect(response.data.function_result.status).toBe('error'); - expect(response.data.function_result.error).toBeDefined(); - }); - - // Test 3: Functional - Test with valid inputs - test('should fetch tasks from Wrike API', async () => { - const payload = createEventPayload(); - - const response = await axios.post(TEST_SERVER_URL, payload); - - expect(response.status).toBe(200); - expect(response.data.function_result).toBeDefined(); - expect(response.data.function_result.status).toBe('success'); - expect(response.data.function_result.tasks).toBeDefined(); - expect(Array.isArray(response.data.function_result.tasks)).toBe(true); - - // If tasks were returned, verify they have the expected structure - if (response.data.function_result.tasks.length > 0) { - const firstTask = response.data.function_result.tasks[0]; - expect(firstTask.id).toBeDefined(); - expect(firstTask.title).toBeDefined(); - expect(firstTask.status).toBeDefined(); - expect(firstTask.created_date).toBeDefined(); - expect(firstTask.updated_date).toBeDefined(); - } - }); - - // Test 4: Edge Cases - Test with invalid project ID - test('should handle invalid project ID gracefully', async () => { - const payload = createEventPayload(); - // Set an invalid project ID - payload.payload.event_context.external_sync_unit_id = 'invalid-project-id'; - - const response = await axios.post(TEST_SERVER_URL, payload); - - expect(response.status).toBe(200); - expect(response.data.function_result).toBeDefined(); - // The function should either return an error status or an empty tasks array - if (response.data.function_result.status === 'error') { - expect(response.data.function_result.error).toBeDefined(); - } else { - expect(response.data.function_result.tasks).toBeDefined(); - expect(Array.isArray(response.data.function_result.tasks)).toBe(true); - } - }); -}); \ No newline at end of file diff --git a/conformance_tests/project_tasks_fetch_validation/jest.config.js b/conformance_tests/project_tasks_fetch_validation/jest.config.js deleted file mode 100644 index 7b59b9d..0000000 --- a/conformance_tests/project_tasks_fetch_validation/jest.config.js +++ /dev/null @@ -1,9 +0,0 @@ -module.exports = { - preset: 'ts-jest', - testEnvironment: 'node', - testTimeout: 120000, // 120 seconds timeout as per requirements - setupFiles: ['./jest.setup.ts'], - forceExit: true, // Force Jest to exit after all tests complete - testMatch: ['**/*.test.ts'], - verbose: true -}; \ No newline at end of file diff --git a/conformance_tests/project_tasks_fetch_validation/jest.setup.ts b/conformance_tests/project_tasks_fetch_validation/jest.setup.ts deleted file mode 100644 index 84b7870..0000000 --- a/conformance_tests/project_tasks_fetch_validation/jest.setup.ts +++ /dev/null @@ -1,16 +0,0 @@ -// This file is used to set up the test environment -import dotenv from 'dotenv'; -import path from 'path'; - -// Load environment variables from .env file -dotenv.config(); - -// Check for required environment variables -const requiredEnvVars = ['WRIKE_API_KEY', 'WRIKE_SPACE_GID']; -const missingEnvVars = requiredEnvVars.filter(envVar => !process.env[envVar]); - -if (missingEnvVars.length > 0) { - console.error(`Missing required environment variables: ${missingEnvVars.join(', ')}`); - console.error('Please set these variables in your environment or .env file'); - process.exit(1); -} \ No newline at end of file diff --git a/conformance_tests/project_tasks_fetch_validation/tsconfig.json b/conformance_tests/project_tasks_fetch_validation/tsconfig.json deleted file mode 100644 index dde1c22..0000000 --- a/conformance_tests/project_tasks_fetch_validation/tsconfig.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "compilerOptions": { - "target": "es2017", - "module": "commonjs", - "esModuleInterop": true, - "forceConsistentCasingInFileNames": true, - "strict": true, - "skipLibCheck": true, - "resolveJsonModule": true - }, - "include": ["*.ts"], - "exclude": ["node_modules"] -} \ No newline at end of file diff --git a/conformance_tests/space_contacts_fetch_validation/acceptance_test.ts b/conformance_tests/space_contacts_fetch_validation/acceptance_test.ts deleted file mode 100644 index dde0137..0000000 --- a/conformance_tests/space_contacts_fetch_validation/acceptance_test.ts +++ /dev/null @@ -1,168 +0,0 @@ -import axios from 'axios'; -import express from 'express'; -import { Server } from 'http'; -import bodyParser from 'body-parser'; - -// Test configuration -const TEST_SERVER_URL = 'http://localhost:8000/handle/sync'; -const CALLBACK_SERVER_PORT = 8002; -const CALLBACK_SERVER_URL = `http://localhost:${CALLBACK_SERVER_PORT}`; - -// Get environment variables -const WRIKE_API_KEY = process.env.WRIKE_API_KEY || ''; -const WRIKE_SPACE_GID = process.env.WRIKE_SPACE_GID || 'IEAGS6BYI5RFMPPY'; // Default space ID for testing - -// Validate environment variables -if (!WRIKE_API_KEY) { - console.error('WRIKE_API_KEY environment variable is required'); - process.exit(1); -} - -// Setup callback server -let callbackServer: Server; -let lastCallbackData: any = null; - -function setupCallbackServer(): Promise { - return new Promise((resolve) => { - const app = express(); - app.use(bodyParser.json()); - - app.post('/callback', (req, res) => { - lastCallbackData = req.body; - res.status(200).send({ status: 'success' }); - }); - - callbackServer = app.listen(CALLBACK_SERVER_PORT, () => { - console.log(`Callback server running at ${CALLBACK_SERVER_URL}`); - resolve(); - }); - }); -} - -function shutdownCallbackServer(): Promise { - return new Promise((resolve) => { - if (callbackServer) { - callbackServer.close(() => { - console.log('Callback server closed'); - resolve(); - }); - } else { - resolve(); - } - }); -} - -// Helper function to create a valid event payload -function createEventPayload(overrides: any = {}) { - return { - context: { - secrets: { - service_account_token: 'test-token' - }, - snap_in_id: 'test-snap-in-id', - snap_in_version_id: 'test-snap-in-version-id' - }, - payload: { - connection_data: { - key: WRIKE_API_KEY, - org_id: WRIKE_SPACE_GID, - key_type: 'api_key' - }, - event_context: { - callback_url: `${CALLBACK_SERVER_URL}/callback` - } - }, - execution_metadata: { - function_name: 'fetch_contacts', - devrev_endpoint: 'http://localhost:8003' - }, - ...overrides - }; -} - -// Helper function to invoke the function -async function invokeFetchContacts(payload: any) { - try { - const response = await axios.post(TEST_SERVER_URL, payload, { - headers: { - 'Content-Type': 'application/json' - } - }); - return response.data; - } catch (error) { - if (axios.isAxiosError(error) && error.response) { - console.error('Error response:', error.response.data); - return { error: error.response.data }; - } - throw error; - } -} - -// Acceptance Test -describe('Acceptance Test: fetch_contacts function', () => { - beforeAll(async () => { - await setupCallbackServer(); - }); - - afterAll(async () => { - await shutdownCallbackServer(); - }); - - beforeEach(() => { - lastCallbackData = null; - }); - - test('should return exactly 5 members with required fields when using test credentials', async () => { - // Create the event payload with test credentials - const payload = createEventPayload(); - - // Invoke the function - const result = await invokeFetchContacts(payload); - - // Log the result for debugging purposes - console.log('Function result status:', result.function_result?.status); - console.log('Number of contacts returned:', result.function_result?.contacts?.length); - - // Verify the response structure - expect(result).toBeDefined(); - expect(result.error).toBeUndefined(); - expect(result.function_result).toBeDefined(); - expect(result.function_result.status).toBe('success'); - - // Verify that contacts array exists and has exactly 5 members - expect(result.function_result.contacts).toBeDefined(); - expect(Array.isArray(result.function_result.contacts)).toBe(true); - - if (!result.function_result.contacts || !Array.isArray(result.function_result.contacts)) { - console.error('Contacts is not an array:', result.function_result.contacts); - throw new Error('Contacts is not an array'); - } - - // Check that exactly 5 contacts are returned - expect(result.function_result.contacts.length).toBe(5); - - // Verify that each contact has the required fields - result.function_result.contacts.forEach((contact: any, index: number) => { - // Log contact for debugging - console.log(`Contact ${index + 1}:`, JSON.stringify(contact, null, 2)); - - // Check required fields - expect(contact.id).toBeDefined(); - expect(typeof contact.id).toBe('string'); - expect(contact.first_name).toBeDefined(); - expect(contact.last_name).toBeDefined(); - - // Check that at least one profile with email exists - expect(contact.profiles).toBeDefined(); - expect(Array.isArray(contact.profiles)).toBe(true); - - // At least one profile should have an email - const hasEmail = contact.profiles.some((profile: any) => profile.email); - expect(hasEmail).toBe(true); - - if (!hasEmail) { - console.error(`Contact ${index + 1} does not have an email:`, contact); - } - }); - }, 30000); // 30 seconds timeout -}); \ No newline at end of file diff --git a/conformance_tests/space_contacts_fetch_validation/fetch_contacts.test.ts b/conformance_tests/space_contacts_fetch_validation/fetch_contacts.test.ts deleted file mode 100644 index ca4219d..0000000 --- a/conformance_tests/space_contacts_fetch_validation/fetch_contacts.test.ts +++ /dev/null @@ -1,205 +0,0 @@ -import axios from 'axios'; -import express from 'express'; -import { Server } from 'http'; -import bodyParser from 'body-parser'; - -// Test configuration -const TEST_SERVER_URL = 'http://localhost:8000/handle/sync'; -const CALLBACK_SERVER_PORT = 8002; -const CALLBACK_SERVER_URL = `http://localhost:${CALLBACK_SERVER_PORT}`; - -// Get environment variables -const WRIKE_API_KEY = process.env.WRIKE_API_KEY || ''; -const WRIKE_SPACE_GID = process.env.WRIKE_SPACE_GID || 'IEAGS6BYI5RFMPPY'; // Default space ID for testing - -// Validate environment variables -if (!WRIKE_API_KEY) { - console.error('WRIKE_API_KEY environment variable is required'); - process.exit(1); -} - -// Setup callback server -let callbackServer: Server; -let lastCallbackData: any = null; - -function setupCallbackServer(): Promise { - return new Promise((resolve) => { - const app = express(); - app.use(bodyParser.json()); - - app.post('/callback', (req, res) => { - lastCallbackData = req.body; - res.status(200).send({ status: 'success' }); - }); - - callbackServer = app.listen(CALLBACK_SERVER_PORT, () => { - console.log(`Callback server running at ${CALLBACK_SERVER_URL}`); - resolve(); - }); - }); -} - -function shutdownCallbackServer(): Promise { - return new Promise((resolve) => { - if (callbackServer) { - callbackServer.close(() => { - console.log('Callback server closed'); - resolve(); - }); - } else { - resolve(); - } - }); -} - -// Helper function to create a valid event payload -function createEventPayload(overrides: any = {}) { - return { - context: { - secrets: { - service_account_token: 'test-token' - }, - snap_in_id: 'test-snap-in-id', - snap_in_version_id: 'test-snap-in-version-id' - }, - payload: { - connection_data: { - key: WRIKE_API_KEY, - org_id: WRIKE_SPACE_GID, - key_type: 'api_key' - }, - event_context: { - callback_url: `${CALLBACK_SERVER_URL}/callback` - } - }, - execution_metadata: { - function_name: 'fetch_contacts', - devrev_endpoint: 'http://localhost:8003' - }, - ...overrides - }; -} - -// Helper function to invoke the function -async function invokeFetchContacts(payload: any) { - try { - const response = await axios.post(TEST_SERVER_URL, payload, { - headers: { - 'Content-Type': 'application/json' - } - }); - return response.data; - } catch (error) { - if (axios.isAxiosError(error) && error.response) { - console.error('Error response:', error.response.data); - return { error: error.response.data }; - } - throw error; - } -} - -// Tests -describe('fetch_contacts function', () => { - beforeAll(async () => { - await setupCallbackServer(); - }); - - afterAll(async () => { - await shutdownCallbackServer(); - }); - - beforeEach(() => { - lastCallbackData = null; - }); - - // Test 1: Basic Invocation - test('should be invokable with valid input', async () => { - const payload = createEventPayload(); - const result = await invokeFetchContacts(payload); - - expect(result).toBeDefined(); - expect(result.error).toBeUndefined(); - expect(result.function_result).toBeDefined(); - }, 30000); - - // Test 2: Input Validation - test('should validate required input parameters', async () => { - // Missing connection_data - const invalidPayload = createEventPayload({ - payload: { - event_context: { - callback_url: `${CALLBACK_SERVER_URL}/callback` - } - } - }); - - const result = await invokeFetchContacts(invalidPayload); - - expect(result.function_result).toBeDefined(); - expect(result.function_result.status).toBe('error'); - expect(result.function_result.message).toContain('missing required field'); - }); - - // Test 3: Authentication Test - test('should authenticate with Wrike API using provided API key', async () => { - // Use an invalid API key to test authentication failure - const invalidAuthPayload = createEventPayload({ - payload: { - connection_data: { - key: 'invalid-api-key', - org_id: WRIKE_SPACE_GID, - key_type: 'api_key' - } - } - }); - - const result = await invokeFetchContacts(invalidAuthPayload); - - expect(result.function_result).toBeDefined(); - expect(result.function_result.status).toBe('error'); - expect(result.function_result.error).toBeDefined(); - // The error should indicate an authentication issue (401 Unauthorized) - expect(result.function_result.error).toMatch(/API request failed|status 4|Unauthorized|Request failed with status code 401/i); - }, 30000); - - // Test 4: Successful Contacts Retrieval - test('should successfully fetch contacts from a space', async () => { - const payload = createEventPayload(); - const result = await invokeFetchContacts(payload); - - expect(result.function_result).toBeDefined(); - expect(result.function_result.status).toBe('success'); - expect(result.function_result.contacts).toBeDefined(); - expect(Array.isArray(result.function_result.contacts)).toBe(true); - - // If contacts were found, verify their structure - if (result.function_result.contacts.length > 0) { - const firstContact = result.function_result.contacts[0]; - expect(firstContact.id).toBeDefined(); - expect(typeof firstContact.id).toBe('string'); - expect(firstContact.first_name).toBeDefined(); - expect(firstContact.last_name).toBeDefined(); - expect(firstContact.type).toBeDefined(); - } - }, 30000); - - // Test 5: Error Handling - test('should handle API errors gracefully', async () => { - // Use a non-existent space ID to trigger an API error - const invalidSpacePayload = createEventPayload({ - payload: { - connection_data: { - key: WRIKE_API_KEY, - org_id: 'non-existent-space-id', - key_type: 'api_key' - } - } - }); - - const result = await invokeFetchContacts(invalidSpacePayload); - - expect(result.function_result).toBeDefined(); - expect(result.function_result.status).toBe('error'); - expect(result.function_result.error).toBeDefined(); - }, 30000); -}); \ No newline at end of file diff --git a/conformance_tests/space_contacts_fetch_validation/jest.config.js b/conformance_tests/space_contacts_fetch_validation/jest.config.js deleted file mode 100644 index 5783d45..0000000 --- a/conformance_tests/space_contacts_fetch_validation/jest.config.js +++ /dev/null @@ -1,5 +0,0 @@ -module.exports = { - preset: 'ts-jest', - testEnvironment: 'node', - testTimeout: 30000, // 30 seconds timeout for individual tests -}; \ No newline at end of file diff --git a/conformance_tests/space_contacts_fetch_validation/tsconfig.json b/conformance_tests/space_contacts_fetch_validation/tsconfig.json deleted file mode 100644 index dde1c22..0000000 --- a/conformance_tests/space_contacts_fetch_validation/tsconfig.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "compilerOptions": { - "target": "es2017", - "module": "commonjs", - "esModuleInterop": true, - "forceConsistentCasingInFileNames": true, - "strict": true, - "skipLibCheck": true, - "resolveJsonModule": true - }, - "include": ["*.ts"], - "exclude": ["node_modules"] -} \ No newline at end of file diff --git a/conformance_tests/space_folders_fetch_tests/fetch_space_folders.test.ts b/conformance_tests/space_folders_fetch_tests/fetch_space_folders.test.ts new file mode 100644 index 0000000..928e994 --- /dev/null +++ b/conformance_tests/space_folders_fetch_tests/fetch_space_folders.test.ts @@ -0,0 +1,105 @@ +import { + getTestCredentials, + createTestEvent, + setupCallbackServer, + teardownCallbackServer, + invokeFunction, + CallbackServerSetup, + TestCredentials, +} from './test-utils'; + +describe('fetch_space_folders function', () => { + let credentials: TestCredentials; + let callbackServer: CallbackServerSetup; + + beforeAll(async () => { + credentials = getTestCredentials(); + callbackServer = await setupCallbackServer(8002); + }); + + afterAll(async () => { + await teardownCallbackServer(callbackServer); + }); + + test('should successfully invoke and return expected response structure', async () => { + const event = createTestEvent('fetch_space_folders', credentials); + const response = await invokeFunction(event); + + expect(response).toBeDefined(); + expect(response.function_result).toBeDefined(); + + const result = response.function_result; + expect(result).toHaveProperty('status'); + expect(result).toHaveProperty('message'); + expect(result).toHaveProperty('status_code'); + expect(result).toHaveProperty('api_delay'); + expect(result).toHaveProperty('metadata'); + expect(result).toHaveProperty('timestamp'); + + expect(result.status_code).toBe(200); + expect(result.status).toBe('success'); + }, 30000); + + test('should return correct metadata with space_id and folder_count', async () => { + const event = createTestEvent('fetch_space_folders', credentials); + const response = await invokeFunction(event); + + const result = response.function_result; + expect(result.metadata).toBeDefined(); + expect(result.metadata.space_id).toBe(credentials.spaceId); + expect(result.metadata.function_name).toBe('fetch_space_folders'); + expect(result.metadata.request_id).toBeDefined(); + expect(typeof result.metadata.folder_count).toBe('number'); + expect(result.metadata.folder_count).toBeGreaterThanOrEqual(0); + }, 30000); + + test('should return valid folder data structure', async () => { + const event = createTestEvent('fetch_space_folders', credentials); + const response = await invokeFunction(event); + + const result = response.function_result; + expect(result.data).toBeDefined(); + expect(Array.isArray(result.data)).toBe(true); + + if (result.data.length > 0) { + const folder = result.data[0]; + // Check for required properties that are always present in space folders + expect(folder).toHaveProperty('id'); + expect(folder).toHaveProperty('title'); + expect(typeof folder.id).toBe('string'); + expect(typeof folder.title).toBe('string'); + + // Optional properties - check type if present + if (folder.accountId !== undefined) { + expect(typeof folder.accountId).toBe('string'); + } + if (folder.createdDate !== undefined) { + expect(typeof folder.createdDate).toBe('string'); + } + if (folder.updatedDate !== undefined) { + expect(typeof folder.updatedDate).toBe('string'); + } + } + }, 30000); + + test('should handle invalid API key with authentication error', async () => { + const invalidCredentials = { ...credentials, apiKey: 'invalid-api-key-12345' }; + const event = createTestEvent('fetch_space_folders', invalidCredentials); + const response = await invokeFunction(event); + + const result = response.function_result; + expect(result.status).toBe('error'); + expect([401, 403]).toContain(result.status_code); + expect(result.message).toMatch(/authentication|invalid|forbidden/i); + }, 30000); + + test('should handle invalid space ID with error response', async () => { + const invalidCredentials = { ...credentials, spaceId: 'INVALID_SPACE_ID_999' }; + const event = createTestEvent('fetch_space_folders', invalidCredentials); + const response = await invokeFunction(event); + + const result = response.function_result; + expect(result.status).toBe('error'); + expect(result.status_code).toBeGreaterThanOrEqual(400); + }, 30000); +}); \ No newline at end of file diff --git a/conformance_tests/space_folders_fetch_tests/fetch_space_folders_acceptance.test.ts b/conformance_tests/space_folders_fetch_tests/fetch_space_folders_acceptance.test.ts new file mode 100644 index 0000000..abec2c7 --- /dev/null +++ b/conformance_tests/space_folders_fetch_tests/fetch_space_folders_acceptance.test.ts @@ -0,0 +1,87 @@ +import { + getTestCredentials, + createTestEvent, + setupCallbackServer, + teardownCallbackServer, + invokeFunction, + CallbackServerSetup, + TestCredentials, +} from './test-utils'; + +describe('fetch_space_folders acceptance test', () => { + let credentials: TestCredentials; + let callbackServer: CallbackServerSetup; + + beforeAll(async () => { + credentials = getTestCredentials(); + callbackServer = await setupCallbackServer(8002); + }); + + afterAll(async () => { + await teardownCallbackServer(callbackServer); + }); + + test('should return exactly 3 folders from the Wrike space', async () => { + // Create test event for fetch_space_folders function + const event = createTestEvent('fetch_space_folders', credentials); + + // Invoke the function via The Test Snap-In Server + const response = await invokeFunction(event); + + // Verify response structure exists + expect(response).toBeDefined(); + expect(response.function_result).toBeDefined(); + + const result = response.function_result; + + // Log full response for debugging if test fails + if (result.status !== 'success' || result.status_code !== 200) { + console.error('Function invocation failed:', JSON.stringify(result, null, 2)); + } + + // Verify successful response + expect(result.status).toBe('success'); + expect(result.status_code).toBe(200); + + // Verify data field exists and is an array + expect(result.data).toBeDefined(); + expect(Array.isArray(result.data)).toBe(true); + + // Get actual folder count + const actualFolderCount = result.data.length; + + // Main assertion: verify exactly 3 folders are returned + if (actualFolderCount !== 3) { + console.error( + `Expected exactly 3 folders, but received ${actualFolderCount} folders.`, + '\nFolders received:', + JSON.stringify(result.data, null, 2) + ); + } + + expect(actualFolderCount).toBe(3); + + // Verify each folder has the required structure + result.data.forEach((folder: any, index: number) => { + expect(folder).toHaveProperty('id'); + expect(folder).toHaveProperty('title'); + expect(typeof folder.id).toBe('string'); + expect(typeof folder.title).toBe('string'); + + // Log folder details for debugging + if (!folder.id || !folder.title) { + console.error( + `Folder at index ${index} is missing required properties:`, + JSON.stringify(folder, null, 2) + ); + } + }); + + // Verify metadata contains correct folder count + expect(result.metadata).toBeDefined(); + expect(result.metadata.folder_count).toBe(3); + expect(result.metadata.space_id).toBe(credentials.spaceId); + expect(result.metadata.function_name).toBe('fetch_space_folders'); + + }, 30000); // 30 second timeout +}); \ No newline at end of file diff --git a/conformance_tests/space_folders_fetch_tests/fetch_space_folders_rate_limiting.test.ts b/conformance_tests/space_folders_fetch_tests/fetch_space_folders_rate_limiting.test.ts new file mode 100644 index 0000000..4d21742 --- /dev/null +++ b/conformance_tests/space_folders_fetch_tests/fetch_space_folders_rate_limiting.test.ts @@ -0,0 +1,113 @@ +import { + getTestCredentials, + createTestEvent, + setupCallbackServer, + teardownCallbackServer, + invokeFunction, + controlRateLimiting, + CallbackServerSetup, + TestCredentials, +} from './test-utils'; + +describe('fetch_space_folders rate limiting test', () => { + let credentials: TestCredentials; + let callbackServer: CallbackServerSetup; + + beforeAll(async () => { + credentials = getTestCredentials(); + callbackServer = await setupCallbackServer(8002); + }); + + afterAll(async () => { + await teardownCallbackServer(callbackServer); + }); + + test('should handle rate limiting with status 429 and appropriate api_delay', async () => { + // Generate unique test identifier + const testName = `rate_limit_test_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`; + + let rateLimitingStarted = false; + + try { + // Step 1: Start rate limiting on the mock API server + console.log(`Starting rate limiting for test: ${testName}`); + await controlRateLimiting('start', testName); + rateLimitingStarted = true; + + // Step 2: Create test event for fetch_space_folders function + const event = createTestEvent('fetch_space_folders', credentials); + + // Step 3: Invoke the function via The Test Snap-In Server + console.log('Invoking fetch_space_folders function with rate limiting active'); + const response = await invokeFunction(event); + + // Verify response structure exists + expect(response).toBeDefined(); + expect(response.function_result).toBeDefined(); + + const result = response.function_result; + + // Log full response for debugging if assertions fail + console.log('Function response:', JSON.stringify(result, null, 2)); + + // Step 4: Verify rate limiting response + + // Assertion 1: Verify status code is 429 (Too Many Requests) + if (result.status_code !== 429) { + console.error( + `Expected status_code to be 429 (rate limited), but received ${result.status_code}.`, + '\nFull response:', + JSON.stringify(result, null, 2) + ); + } + expect(result.status_code).toBe(429); + + // Assertion 2: Verify api_delay is defined and is a number + expect(result.api_delay).toBeDefined(); + expect(typeof result.api_delay).toBe('number'); + + // Assertion 3: Verify api_delay is greater than 0 + if (result.api_delay <= 0) { + console.error( + `Expected api_delay to be greater than 0, but received ${result.api_delay}.`, + '\nThis indicates the rate limit delay was not properly extracted from the API response.', + '\nFull response:', + JSON.stringify(result, null, 2) + ); + } + expect(result.api_delay).toBeGreaterThan(0); + + // Assertion 4: Verify api_delay is less than or equal to 3 + if (result.api_delay > 3) { + console.error( + `Expected api_delay to be <= 3 seconds, but received ${result.api_delay}.`, + '\nThis suggests the api_delay calculation in the implementation may be incorrect.', + '\nThe api_delay should represent the retry-after value from the API response.', + '\nFull response:', + JSON.stringify(result, null, 2) + ); + } + expect(result.api_delay).toBeLessThanOrEqual(3); + + // Additional verification: Check that status is 'error' + expect(result.status).toBe('error'); + + // Additional verification: Check that message indicates rate limiting + expect(result.message).toMatch(/rate limit/i); + + console.log( + `Rate limiting test passed successfully.`, + `\nStatus code: ${result.status_code}`, + `\nAPI delay: ${result.api_delay} seconds`, + `\nMessage: ${result.message}` + ); + + } finally { + // Step 5: Always end rate limiting, even if test fails + if (rateLimitingStarted) { + console.log('Ending rate limiting'); + await controlRateLimiting('end'); + } + } + }, 30000); // 30 second timeout +}); \ No newline at end of file diff --git a/conformance_tests/space_folders_fetch_tests/jest.config.js b/conformance_tests/space_folders_fetch_tests/jest.config.js new file mode 100644 index 0000000..1643700 --- /dev/null +++ b/conformance_tests/space_folders_fetch_tests/jest.config.js @@ -0,0 +1,27 @@ +module.exports = { + preset: 'ts-jest', + testEnvironment: 'node', + testMatch: ['**/*.test.ts'], + testTimeout: 120000, + moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'], + collectCoverageFrom: [ + '**/*.ts', + '!**/*.test.ts', + '!**/node_modules/**', + '!**/dist/**' + ], + transform: { + '^.+\\.ts$': ['ts-jest', { + tsconfig: { + target: 'es2017', + module: 'commonjs', + esModuleInterop: true, + allowSyntheticDefaultImports: true, + strict: true, + declaration: true, + resolveJsonModule: true + } + }] + }, + setupFilesAfterEnv: [] +}; \ No newline at end of file diff --git a/conformance_tests/project_list_fetch_validation/package.json b/conformance_tests/space_folders_fetch_tests/package.json similarity index 72% rename from conformance_tests/project_list_fetch_validation/package.json rename to conformance_tests/space_folders_fetch_tests/package.json index 481378a..831253e 100644 --- a/conformance_tests/project_list_fetch_validation/package.json +++ b/conformance_tests/space_folders_fetch_tests/package.json @@ -1,16 +1,13 @@ { "name": "wrike-snap-in-conformance-tests", "version": "1.0.0", - "description": "Conformance tests for Wrike snap-in", - "main": "index.js", + "description": "Conformance tests for Wrike Snap-In", "scripts": { "test": "jest" }, - "dependencies": { - "axios": "^1.9.0", - "express": "^4.21.0", - "body-parser": "^1.20.3" - }, + "keywords": [], + "author": "", + "license": "ISC", "devDependencies": { "@types/express": "^4.17.21", "@types/jest": "^29.4.0", @@ -18,5 +15,9 @@ "jest": "^29.4.2", "ts-jest": "^29.0.5", "typescript": "^4.9.5" + }, + "dependencies": { + "axios": "^1.9.0", + "express": "^4.21.0" } } \ No newline at end of file diff --git a/conformance_tests/space_folders_fetch_tests/test-utils.ts b/conformance_tests/space_folders_fetch_tests/test-utils.ts new file mode 100644 index 0000000..dbf393d --- /dev/null +++ b/conformance_tests/space_folders_fetch_tests/test-utils.ts @@ -0,0 +1,142 @@ +import axios from 'axios'; +import express, { Express } from 'express'; +import { Server } from 'http'; + +export interface TestCredentials { + apiKey: string; + spaceId: string; +} + +export interface CallbackServerSetup { + app: Express; + server: Server; + port: number; +} + +/** + * Read required environment variables for testing + */ +export function getTestCredentials(): TestCredentials { + const apiKey = process.env.WRIKE_API_KEY; + const spaceId = process.env.WRIKE_SPACE_ID; + + if (!apiKey) { + throw new Error('WRIKE_API_KEY environment variable is required'); + } + + if (!spaceId) { + throw new Error('WRIKE_SPACE_ID environment variable is required'); + } + + return { apiKey, spaceId }; +} + +/** + * Create a test event payload for function invocation + */ +export function createTestEvent( + functionName: string, + credentials: TestCredentials, + additionalPayload: Record = {} +): any { + return { + execution_metadata: { + request_id: `test-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`, + function_name: functionName, + event_type: 'test_event', + devrev_endpoint: 'http://localhost:8003', + }, + context: { + dev_oid: 'test-dev-oid', + source_id: 'test-source-id', + snap_in_id: 'test-snap-in-id', + snap_in_version_id: 'test-snap-in-version-id', + service_account_id: 'test-service-account-id', + secrets: { + service_account_token: 'test-token', + }, + }, + payload: { + connection_data: { + key: credentials.apiKey, + org_id: credentials.spaceId, + org_name: 'Test Space', + key_type: 'api_key', + }, + ...additionalPayload, + }, + input_data: { + global_values: {}, + event_sources: {}, + }, + }; +} + +/** + * Setup callback server for testing + */ +export function setupCallbackServer(port: number = 8002): Promise { + return new Promise((resolve, reject) => { + const app = express(); + app.use(express.json()); + + const server = app.listen(port, () => { + resolve({ app, server, port }); + }); + + server.on('error', reject); + }); +} + +/** + * Teardown callback server + */ +export function teardownCallbackServer(setup: CallbackServerSetup): Promise { + return new Promise((resolve) => { + setup.server.close(() => resolve()); + }); +} + +/** + * Invoke a function via The Test Snap-In Server + */ +export async function invokeFunction(event: any): Promise { + const response = await axios.post('http://localhost:8000/handle/sync', event, { + headers: { + 'Content-Type': 'application/json', + }, + validateStatus: () => true, // Don't throw on any status code + }); + + return response.data; +} + +/** + * Control rate limiting on the mock API server + * @param action - 'start' to enable rate limiting, 'end' to disable it + * @param testName - Optional unique identifier for the test (required for 'start') + */ +export async function controlRateLimiting( + action: 'start' | 'end', + testName?: string +): Promise { + const apiServerUrl = 'http://localhost:8004'; + + if (action === 'start') { + if (!testName) { + throw new Error('testName is required when starting rate limiting'); + } + + await axios.post(`${apiServerUrl}/start_rate_limiting`, { + test_name: testName, + }, { + headers: { 'Content-Type': 'application/json' }, + }); + } else if (action === 'end') { + await axios.post(`${apiServerUrl}/end_rate_limiting`, {}, { + headers: { 'Content-Type': 'application/json' }, + }); + } else { + throw new Error(`Invalid action: ${action}. Must be 'start' or 'end'.`); + } +} \ No newline at end of file diff --git a/conformance_tests/space_folders_fetch_tests/tsconfig.json b/conformance_tests/space_folders_fetch_tests/tsconfig.json new file mode 100644 index 0000000..1054ad3 --- /dev/null +++ b/conformance_tests/space_folders_fetch_tests/tsconfig.json @@ -0,0 +1,25 @@ +{ + "compilerOptions": { + "target": "es2017", + "module": "commonjs", + "lib": ["es2017"], + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "strict": true, + "declaration": true, + "resolveJsonModule": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "moduleResolution": "node", + "outDir": "./dist", + "rootDir": ".", + "types": ["jest", "node"] + }, + "include": [ + "**/*.ts" + ], + "exclude": [ + "node_modules", + "dist" + ] +} \ No newline at end of file diff --git a/conformance_tests/task_attachments_fetch_tests/fetch_task_attachments.test.ts b/conformance_tests/task_attachments_fetch_tests/fetch_task_attachments.test.ts new file mode 100644 index 0000000..1556534 --- /dev/null +++ b/conformance_tests/task_attachments_fetch_tests/fetch_task_attachments.test.ts @@ -0,0 +1,104 @@ +import { loadTestConfig, createTestEvent, SnapInTestClient } from './test-utils'; + +describe('fetch_task_attachments function', () => { + let config: ReturnType; + let client: SnapInTestClient; + const VALID_TASK_ID = 'IEAGS6BYKRRFMPQG'; + + beforeAll(() => { + config = loadTestConfig(); + client = new SnapInTestClient(config.snapInServerUrl); + }); + + test('should successfully invoke the function', async () => { + const event = createTestEvent('fetch_task_attachments', config, { + event_context: { + external_sync_unit_id: VALID_TASK_ID, + }, + }); + + const response = await client.invokeFunction(event); + + expect(response).toBeDefined(); + expect(response.function_result).toBeDefined(); + }, 30000); + + test('should fetch attachments for a valid task ID', async () => { + const event = createTestEvent('fetch_task_attachments', config, { + event_context: { + external_sync_unit_id: VALID_TASK_ID, + }, + }); + + const response = await client.invokeFunction(event); + + expect(response.function_result).toBeDefined(); + expect(response.function_result.status).toBe('success'); + expect(response.function_result.status_code).toBe(200); + }, 30000); + + test('should return correct response structure', async () => { + const event = createTestEvent('fetch_task_attachments', config, { + event_context: { + external_sync_unit_id: VALID_TASK_ID, + }, + }); + + const response = await client.invokeFunction(event); + const result = response.function_result; + + expect(result).toHaveProperty('status'); + expect(result).toHaveProperty('message'); + expect(result).toHaveProperty('status_code'); + expect(result).toHaveProperty('api_delay'); + expect(result).toHaveProperty('metadata'); + expect(result).toHaveProperty('timestamp'); + + expect(result.metadata).toHaveProperty('task_id'); + expect(result.metadata).toHaveProperty('attachment_count'); + expect(result.metadata).toHaveProperty('function_name'); + expect(result.metadata).toHaveProperty('request_id'); + + expect(result.metadata.task_id).toBe(VALID_TASK_ID); + expect(result.metadata.function_name).toBe('fetch_task_attachments'); + }, 30000); + + test('should return attachment data with correct structure', async () => { + const event = createTestEvent('fetch_task_attachments', config, { + event_context: { + external_sync_unit_id: VALID_TASK_ID, + }, + }); + + const response = await client.invokeFunction(event); + const result = response.function_result; + + expect(result.data).toBeDefined(); + expect(Array.isArray(result.data)).toBe(true); + expect(result.metadata.attachment_count).toBe(result.data.length); + + if (result.data.length > 0) { + const attachment = result.data[0]; + expect(attachment).toHaveProperty('id'); + expect(attachment).toHaveProperty('name'); + expect(attachment).toHaveProperty('createdDate'); + expect(attachment).toHaveProperty('url'); + } + }, 30000); + + test('should handle invalid task ID gracefully', async () => { + const invalidTaskId = 'INVALID_TASK_ID_12345'; + const event = createTestEvent('fetch_task_attachments', config, { + event_context: { + external_sync_unit_id: invalidTaskId, + }, + }); + + const response = await client.invokeFunction(event); + const result = response.function_result; + + expect(result).toBeDefined(); + expect(result.status_code).not.toBe(200); + expect(result.metadata.task_id).toBe(invalidTaskId); + }, 30000); +}); \ No newline at end of file diff --git a/conformance_tests/task_attachments_fetch_tests/fetch_task_attachments_acceptance.test.ts b/conformance_tests/task_attachments_fetch_tests/fetch_task_attachments_acceptance.test.ts new file mode 100644 index 0000000..2cb3026 --- /dev/null +++ b/conformance_tests/task_attachments_fetch_tests/fetch_task_attachments_acceptance.test.ts @@ -0,0 +1,109 @@ +import { loadTestConfig, createTestEvent, SnapInTestClient } from './test-utils'; + +describe('fetch_task_attachments acceptance test', () => { + let config: ReturnType; + let client: SnapInTestClient; + const ACCEPTANCE_TEST_TASK_ID = 'IEAGS6BYKRRFMPQG'; + const EXPECTED_ATTACHMENT_COUNT = 1; + const EXPECTED_ATTACHMENT_NAME = 'Proof this image.jpg'; + + beforeAll(() => { + config = loadTestConfig(); + client = new SnapInTestClient(config.snapInServerUrl); + }); + + test('should fetch exactly 1 attachment with name "Proof this image.jpg" for task IEAGS6BYKRRFMPQG', async () => { + // Create test event with the specific task ID + const event = createTestEvent('fetch_task_attachments', config, { + event_context: { + external_sync_unit_id: ACCEPTANCE_TEST_TASK_ID, + }, + }); + + // Invoke the function + const response = await client.invokeFunction(event); + + // Validate response structure + expect(response).toBeDefined(); + expect(response.function_result).toBeDefined(); + + const result = response.function_result; + + // Log response for debugging + console.log('Response status:', result.status); + console.log('Response status_code:', result.status_code); + console.log('Response message:', result.message); + console.log('Attachment count:', result.metadata?.attachment_count); + console.log('Full response data:', JSON.stringify(result.data, null, 2)); + + // Verify successful API call + expect(result.status).toBe('success'); + expect(result.status_code).toBe(200); + + // Verify data exists + expect(result.data).toBeDefined(); + expect(Array.isArray(result.data)).toBe(true); + + // ACCEPTANCE CRITERION 1: Verify exactly 1 attachment + const actualAttachmentCount = result.data.length; + expect(actualAttachmentCount).toBe(EXPECTED_ATTACHMENT_COUNT); + + if (actualAttachmentCount !== EXPECTED_ATTACHMENT_COUNT) { + throw new Error( + `Attachment count mismatch for task ${ACCEPTANCE_TEST_TASK_ID}:\n` + + ` Expected: ${EXPECTED_ATTACHMENT_COUNT}\n` + + ` Actual: ${actualAttachmentCount}\n` + + ` Response data: ${JSON.stringify(result.data, null, 2)}` + ); + } + + // Verify metadata matches data length + expect(result.metadata.attachment_count).toBe(EXPECTED_ATTACHMENT_COUNT); + + // ACCEPTANCE CRITERION 2: Verify attachment name + const attachment = result.data[0]; + expect(attachment).toBeDefined(); + expect(attachment).toHaveProperty('name'); + + const actualAttachmentName = attachment.name; + expect(actualAttachmentName).toBe(EXPECTED_ATTACHMENT_NAME); + + if (actualAttachmentName !== EXPECTED_ATTACHMENT_NAME) { + throw new Error( + `Attachment name mismatch for task ${ACCEPTANCE_TEST_TASK_ID}:\n` + + ` Expected: "${EXPECTED_ATTACHMENT_NAME}"\n` + + ` Actual: "${actualAttachmentName}"\n` + + ` Full attachment data: ${JSON.stringify(attachment, null, 2)}` + ); + } + + // Additional validation: verify attachment has required fields + expect(attachment).toHaveProperty('id'); + expect(attachment).toHaveProperty('createdDate'); + expect(attachment).toHaveProperty('url'); + + console.log('✓ Acceptance test passed: Found 1 attachment named "Proof this image.jpg"'); + }, 30000); + + test('should return correct task_id in metadata', async () => { + const event = createTestEvent('fetch_task_attachments', config, { + event_context: { + external_sync_unit_id: ACCEPTANCE_TEST_TASK_ID, + }, + }); + + const response = await client.invokeFunction(event); + const result = response.function_result; + + expect(result.metadata).toBeDefined(); + expect(result.metadata.task_id).toBe(ACCEPTANCE_TEST_TASK_ID); + + if (result.metadata.task_id !== ACCEPTANCE_TEST_TASK_ID) { + throw new Error( + `Task ID mismatch in metadata:\n` + + ` Expected: "${ACCEPTANCE_TEST_TASK_ID}"\n` + + ` Actual: "${result.metadata.task_id}"` + ); + } + }, 30000); +}); \ No newline at end of file diff --git a/conformance_tests/task_attachments_fetch_tests/fetch_task_attachments_rate_limiting.test.ts b/conformance_tests/task_attachments_fetch_tests/fetch_task_attachments_rate_limiting.test.ts new file mode 100644 index 0000000..5fdb12c --- /dev/null +++ b/conformance_tests/task_attachments_fetch_tests/fetch_task_attachments_rate_limiting.test.ts @@ -0,0 +1,184 @@ +import axios from 'axios'; +import { loadTestConfig, createTestEvent, SnapInTestClient } from './test-utils'; + +describe('fetch_task_attachments rate limiting', () => { + let config: ReturnType; + let client: SnapInTestClient; + const VALID_TASK_ID = 'IEAGS6BYKRRFMPQG'; + const RATE_LIMIT_SERVER_URL = 'http://localhost:8004'; + const TEST_IDENTIFIER = 'fetch_task_attachments_rate_limiting_test'; + + beforeAll(() => { + config = loadTestConfig(); + client = new SnapInTestClient(config.snapInServerUrl); + }); + + test('should handle rate limiting with correct status_code and api_delay', async () => { + let rateLimitingStarted = false; + + try { + // Step 1: Start rate limiting on the mock API server + console.log(`Starting rate limiting for test: ${TEST_IDENTIFIER}`); + await axios.post(`${RATE_LIMIT_SERVER_URL}/start_rate_limiting`, { + test_name: TEST_IDENTIFIER, + }); + rateLimitingStarted = true; + console.log('Rate limiting started successfully'); + + // Step 2: Invoke the function with valid credentials and parameters + const event = createTestEvent('fetch_task_attachments', config, { + event_context: { + external_sync_unit_id: VALID_TASK_ID, + }, + }); + + console.log(`Invoking fetch_task_attachments for task: ${VALID_TASK_ID}`); + const response = await client.invokeFunction(event); + + // Validate response structure + expect(response).toBeDefined(); + expect(response.function_result).toBeDefined(); + + const result = response.function_result; + + // Log response for debugging + console.log('Rate limiting response received:'); + console.log(' Status:', result.status); + console.log(' Status Code:', result.status_code); + console.log(' API Delay:', result.api_delay); + console.log(' Message:', result.message); + + // Step 3: Verify status_code is 429 (Too Many Requests) + if (result.status_code !== 429) { + throw new Error( + `Expected rate limiting response (429) but got different status code.\n` + + ` Test: ${TEST_IDENTIFIER}\n` + + ` Task ID: ${VALID_TASK_ID}\n` + + ` Expected status_code: 429\n` + + ` Actual status_code: ${result.status_code}\n` + + ` Message: ${result.message}\n` + + ` Full response: ${JSON.stringify(result, null, 2)}\n\n` + + `Possible causes:\n` + + ` - Rate limiting was not properly activated on the API server\n` + + ` - The function is not correctly forwarding the 429 status code\n` + + ` - The WrikeClient is not handling rate limiting responses correctly` + ); + } + expect(result.status_code).toBe(429); + + // Step 4: Verify api_delay is greater than 0 + if (result.api_delay <= 0) { + throw new Error( + `Expected api_delay to be greater than 0 for rate limiting response.\n` + + ` Test: ${TEST_IDENTIFIER}\n` + + ` Task ID: ${VALID_TASK_ID}\n` + + ` Expected: api_delay > 0\n` + + ` Actual api_delay: ${result.api_delay}\n` + + ` Status code: ${result.status_code}\n` + + ` Message: ${result.message}\n` + + ` Full response: ${JSON.stringify(result, null, 2)}\n\n` + + `Possible causes:\n` + + ` - The function is not extracting the retry-after header correctly\n` + + ` - The WrikeClient.handleError method is not calculating api_delay properly\n` + + ` - The API server is not sending the retry-after header` + ); + } + expect(result.api_delay).toBeGreaterThan(0); + + // Step 5: Verify api_delay is less than or equal to 3 + if (result.api_delay > 3) { + throw new Error( + `Expected api_delay to be <= 3 seconds, but got a larger value.\n` + + ` Test: ${TEST_IDENTIFIER}\n` + + ` Task ID: ${VALID_TASK_ID}\n` + + ` Expected: api_delay <= 3\n` + + ` Actual api_delay: ${result.api_delay}\n` + + ` Status code: ${result.status_code}\n` + + ` Message: ${result.message}\n` + + ` Full response: ${JSON.stringify(result, null, 2)}\n\n` + + `LIKELY ISSUE: The api_delay calculation in The Implementation Code is incorrect.\n` + + ` - Check WrikeClient.handleError method in wrike-client.ts\n` + + ` - Verify that retry-after header is being parsed correctly\n` + + ` - Ensure the value is not being multiplied or transformed incorrectly\n` + + ` - The retry-after header from the API should be a small value (1-3 seconds)` + ); + } + expect(result.api_delay).toBeLessThanOrEqual(3); + + // Additional validation: verify error status + expect(result.status).toBe('error'); + + // Additional validation: verify message indicates rate limiting + expect(result.message).toContain('Rate limit'); + + console.log('✓ Rate limiting test passed:'); + console.log(` - Status code: 429 ✓`); + console.log(` - API delay: ${result.api_delay} seconds (0 < delay <= 3) ✓`); + console.log(` - Message: "${result.message}" ✓`); + + } finally { + // Step 6: Always end rate limiting, even if test fails + if (rateLimitingStarted) { + try { + console.log('Ending rate limiting...'); + await axios.post(`${RATE_LIMIT_SERVER_URL}/end_rate_limiting`); + console.log('Rate limiting ended successfully'); + } catch (cleanupError) { + console.error('Failed to end rate limiting:', cleanupError); + // Don't throw here - we want to see the original test failure if any + } + } + } + }, 30000); + + test('should verify rate limiting cleanup works correctly', async () => { + // This test ensures that after ending rate limiting, the API works normally again + let rateLimitingStarted = false; + + try { + // Start and immediately end rate limiting + await axios.post(`${RATE_LIMIT_SERVER_URL}/start_rate_limiting`, { + test_name: `${TEST_IDENTIFIER}_cleanup_test`, + }); + rateLimitingStarted = true; + + await axios.post(`${RATE_LIMIT_SERVER_URL}/end_rate_limiting`); + rateLimitingStarted = false; + + // Now invoke the function - it should work normally + const event = createTestEvent('fetch_task_attachments', config, { + event_context: { + external_sync_unit_id: VALID_TASK_ID, + }, + }); + + const response = await client.invokeFunction(event); + const result = response.function_result; + + // Should get a successful response, not 429 + if (result.status_code === 429) { + throw new Error( + `Rate limiting was not properly cleaned up.\n` + + ` Expected: Normal response (status_code 200)\n` + + ` Actual: Rate limiting response (status_code 429)\n` + + ` This indicates the rate limiting server did not properly reset after end_rate_limiting call.\n` + + ` Full response: ${JSON.stringify(result, null, 2)}` + ); + } + + expect(result.status_code).toBe(200); + expect(result.status).toBe('success'); + + console.log('✓ Rate limiting cleanup verified - API working normally after cleanup'); + + } finally { + if (rateLimitingStarted) { + try { + await axios.post(`${RATE_LIMIT_SERVER_URL}/end_rate_limiting`); + } catch (cleanupError) { + console.error('Failed to end rate limiting in cleanup test:', cleanupError); + } + } + } + }, 30000); +}); \ No newline at end of file diff --git a/conformance_tests/data_extraction_validation/jest.config.js b/conformance_tests/task_attachments_fetch_tests/jest.config.js similarity index 50% rename from conformance_tests/data_extraction_validation/jest.config.js rename to conformance_tests/task_attachments_fetch_tests/jest.config.js index 213bd72..f08df69 100644 --- a/conformance_tests/data_extraction_validation/jest.config.js +++ b/conformance_tests/task_attachments_fetch_tests/jest.config.js @@ -1,13 +1,13 @@ module.exports = { preset: 'ts-jest', testEnvironment: 'node', - testTimeout: 120000, // 120 seconds as per requirements - setupFilesAfterEnv: ['/jest.setup.ts'], - transform: { - '^.+\\.tsx?$': 'ts-jest', - }, testMatch: ['**/*.test.ts'], + testTimeout: 120000, + moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'], + collectCoverage: false, verbose: true, + maxWorkers: 1, + bail: false, detectOpenHandles: true, forceExit: true, }; \ No newline at end of file diff --git a/conformance_tests/task_attachments_fetch_tests/package.json b/conformance_tests/task_attachments_fetch_tests/package.json new file mode 100644 index 0000000..03bdaa7 --- /dev/null +++ b/conformance_tests/task_attachments_fetch_tests/package.json @@ -0,0 +1,27 @@ +{ + "name": "wrike-airdrop-conformance-tests", + "version": "1.0.0", + "description": "Conformance tests for Wrike Airdrop Snap-in", + "main": "index.js", + "scripts": { + "test": "jest --runInBand --detectOpenHandles --forceExit" + }, + "keywords": [ + "wrike", + "airdrop", + "conformance", + "tests" + ], + "author": "", + "license": "ISC", + "devDependencies": { + "@types/jest": "^29.5.0", + "@types/node": "^18.0.0", + "jest": "^29.5.0", + "ts-jest": "^29.1.0", + "typescript": "^4.9.0" + }, + "dependencies": { + "axios": "^1.6.0" + } +} \ No newline at end of file diff --git a/conformance_tests/task_attachments_fetch_tests/test-utils.ts b/conformance_tests/task_attachments_fetch_tests/test-utils.ts new file mode 100644 index 0000000..39dc618 --- /dev/null +++ b/conformance_tests/task_attachments_fetch_tests/test-utils.ts @@ -0,0 +1,119 @@ +import axios, { AxiosInstance } from 'axios'; + +/** + * Environment configuration for tests + */ +export interface TestConfig { + wrikeApiKey: string; + wrikeSpaceId: string; + snapInServerUrl: string; +} + +/** + * Load test configuration from environment variables + */ +export function loadTestConfig(): TestConfig { + const wrikeApiKey = process.env.WRIKE_API_KEY; + const wrikeSpaceId = process.env.WRIKE_SPACE_ID; + + if (!wrikeApiKey) { + throw new Error('WRIKE_API_KEY environment variable is required'); + } + + if (!wrikeSpaceId) { + throw new Error('WRIKE_SPACE_ID environment variable is required'); + } + + return { + wrikeApiKey, + wrikeSpaceId, + snapInServerUrl: 'http://localhost:8000/handle/sync', + }; +} + +/** + * Create a test event payload for function invocation + */ +export function createTestEvent( + functionName: string, + config: TestConfig, + overrides: any = {} +): any { + return { + payload: { + connection_data: { + key: config.wrikeApiKey, + org_id: config.wrikeSpaceId, + key_type: 'api_key', + org_name: 'Test Organization', + }, + event_context: { + callback_url: 'http://localhost:8002/callback', + dev_org: 'test-org', + dev_org_id: 'test-org-id', + dev_user: 'test-user', + dev_user_id: 'test-user-id', + external_sync_unit: 'test-unit', + external_sync_unit_id: 'IEAGS6BYI5RFMPP7', + external_sync_unit_name: 'Test Unit', + external_system: 'wrike', + external_system_type: 'wrike', + import_slug: 'test-import', + mode: 'INITIAL', + request_id: `test-${Date.now()}`, + snap_in_slug: 'wrike-airdrop', + snap_in_version_id: 'v1', + sync_run: 'test-run', + sync_run_id: 'test-run-id', + sync_tier: 'standard', + sync_unit: 'test-unit', + sync_unit_id: 'test-unit-id', + uuid: `uuid-${Date.now()}`, + worker_data_url: 'http://localhost:8003/external-worker', + ...overrides.event_context, + }, + event_type: overrides.event_type || 'test_event', + event_data: overrides.event_data || {}, + }, + context: { + dev_oid: 'test-org-id', + source_id: 'test-source', + snap_in_id: 'test-snap-in', + snap_in_version_id: 'v1', + service_account_id: 'test-sa', + secrets: { + service_account_token: 'test-token', + }, + }, + execution_metadata: { + request_id: `test-${Date.now()}`, + function_name: functionName, + event_type: overrides.event_type || 'test_event', + devrev_endpoint: 'http://localhost:8003', + }, + input_data: { + global_values: overrides.global_values || {}, + event_sources: {}, + }, + }; +} + +/** + * HTTP client for calling The Test Snap-In Server + */ +export class SnapInTestClient { + private client: AxiosInstance; + + constructor(baseUrl: string) { + this.client = axios.create({ + baseURL: baseUrl, + timeout: 30000, + validateStatus: () => true, // Don't throw on any status code + }); + } + + async invokeFunction(event: any): Promise { + const response = await this.client.post('', event); + return response.data; + } +} \ No newline at end of file diff --git a/conformance_tests/task_attachments_fetch_tests/tsconfig.json b/conformance_tests/task_attachments_fetch_tests/tsconfig.json new file mode 100644 index 0000000..e8b1e9d --- /dev/null +++ b/conformance_tests/task_attachments_fetch_tests/tsconfig.json @@ -0,0 +1,27 @@ +{ + "compilerOptions": { + "target": "es2017", + "module": "commonjs", + "lib": ["es2017"], + "outDir": "./dist", + "rootDir": "./", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "moduleResolution": "node", + "resolveJsonModule": true, + "declaration": true, + "declarationMap": true, + "sourceMap": true, + "allowSyntheticDefaultImports": true, + "types": ["jest", "node"] + }, + "include": [ + "**/*.ts" + ], + "exclude": [ + "node_modules", + "dist" + ] +} \ No newline at end of file diff --git a/conformance_tests/task_comments_fetch_tests/fetch_task_comments.test.ts b/conformance_tests/task_comments_fetch_tests/fetch_task_comments.test.ts new file mode 100644 index 0000000..f5c935f --- /dev/null +++ b/conformance_tests/task_comments_fetch_tests/fetch_task_comments.test.ts @@ -0,0 +1,117 @@ +/** + * Conformance tests for fetch_task_comments function + */ + +import { getTestEnvironment } from './test-utils/env'; +import { buildEventPayload } from './test-utils/event-builder'; +import { invokeFunction } from './test-utils/http-client'; + +describe('fetch_task_comments function', () => { + let testEnv: ReturnType; + + beforeAll(() => { + testEnv = getTestEnvironment(); + }); + + describe('Test 1: Basic Invocation', () => { + it('should successfully invoke the function with valid input', async () => { + const event = buildEventPayload({ + functionName: 'fetch_task_comments', + apiKey: testEnv.wrikeApiKey, + spaceId: testEnv.wrikeSpaceId, + taskId: testEnv.testTaskId, + }); + + const response = await invokeFunction(event); + + expect(response.status).toBe(200); + expect(response.data).toBeDefined(); + expect(response.data.function_result).toBeDefined(); + + const result = response.data.function_result; + expect(result).toHaveProperty('status'); + expect(result).toHaveProperty('message'); + expect(result).toHaveProperty('status_code'); + expect(result).toHaveProperty('api_delay'); + expect(result).toHaveProperty('metadata'); + expect(result).toHaveProperty('timestamp'); + }, 30000); + }); + + describe('Test 2: Successful Comments Fetch', () => { + it('should successfully fetch comments from a valid task', async () => { + const event = buildEventPayload({ + functionName: 'fetch_task_comments', + apiKey: testEnv.wrikeApiKey, + spaceId: testEnv.wrikeSpaceId, + taskId: testEnv.testTaskId, + }); + + const response = await invokeFunction(event); + + expect(response.status).toBe(200); + const result = response.data.function_result; + + expect(result.status).toBe('success'); + expect(result.status_code).toBe(200); + expect(result.message).toContain('Successfully fetched comments'); + expect(result.data).toBeDefined(); + expect(Array.isArray(result.data)).toBe(true); + + expect(result.metadata).toBeDefined(); + expect(result.metadata.task_id).toBe(testEnv.testTaskId); + expect(result.metadata.comment_count).toBe(result.data.length); + expect(result.metadata.function_name).toBe('fetch_task_comments'); + expect(result.metadata.request_id).toBeDefined(); + + expect(result.api_delay).toBeDefined(); + expect(typeof result.api_delay).toBe('number'); + }, 30000); + }); + + describe('Test 3: Error Handling', () => { + it('should handle invalid task ID appropriately', async () => { + const invalidTaskId = 'INVALID_TASK_ID_12345'; + const event = buildEventPayload({ + functionName: 'fetch_task_comments', + apiKey: testEnv.wrikeApiKey, + spaceId: testEnv.wrikeSpaceId, + taskId: invalidTaskId, + }); + + const response = await invokeFunction(event); + + expect(response.status).toBe(200); + const result = response.data.function_result; + + expect(result.status).toBe('error'); + expect(result.status_code).toBeGreaterThanOrEqual(400); + expect(result.message).toBeDefined(); + expect(typeof result.message).toBe('string'); + + expect(result.metadata).toBeDefined(); + expect(result.metadata.task_id).toBe(invalidTaskId); + expect(result.metadata.comment_count).toBe(0); + }, 30000); + }); + + describe('Test 4: Rate Limiting', () => { + it('should include api_delay field in response', async () => { + const event = buildEventPayload({ + functionName: 'fetch_task_comments', + apiKey: testEnv.wrikeApiKey, + spaceId: testEnv.wrikeSpaceId, + taskId: testEnv.testTaskId, + }); + + const response = await invokeFunction(event); + + expect(response.status).toBe(200); + const result = response.data.function_result; + + expect(result.api_delay).toBeDefined(); + expect(typeof result.api_delay).toBe('number'); + expect(result.api_delay).toBeGreaterThanOrEqual(0); + }, 30000); + }); +}); \ No newline at end of file diff --git a/conformance_tests/task_comments_fetch_tests/fetch_task_comments_acceptance.test.ts b/conformance_tests/task_comments_fetch_tests/fetch_task_comments_acceptance.test.ts new file mode 100644 index 0000000..1fbc560 --- /dev/null +++ b/conformance_tests/task_comments_fetch_tests/fetch_task_comments_acceptance.test.ts @@ -0,0 +1,84 @@ +/** + * Acceptance test for fetch_task_comments function + * + * This test verifies that the function correctly fetches comments from + * a specific Wrike task (ID: IEAGS6BYKRRFMPQG) and returns exactly 2 comments. + */ + +import { getTestEnvironment } from './test-utils/env'; +import { buildEventPayload } from './test-utils/event-builder'; +import { invokeFunction } from './test-utils/http-client'; + +describe('fetch_task_comments acceptance test', () => { + let testEnv: ReturnType; + const EXPECTED_TASK_ID = 'IEAGS6BYKRRFMPQG'; + const EXPECTED_COMMENT_COUNT = 2; + + beforeAll(() => { + testEnv = getTestEnvironment(); + }); + + describe('Acceptance Test: Fetch comments from task IEAGS6BYKRRFMPQG', () => { + it('should fetch exactly 2 comments from the specified task', async () => { + // Build event payload for the specific task + const event = buildEventPayload({ + functionName: 'fetch_task_comments', + apiKey: testEnv.wrikeApiKey, + spaceId: testEnv.wrikeSpaceId, + taskId: EXPECTED_TASK_ID, + }); + + // Invoke the function + const response = await invokeFunction(event); + + // Validate HTTP response + expect(response.status).toBe(200); + + // Validate function_result exists + expect(response.data).toBeDefined(); + expect(response.data.function_result).toBeDefined(); + + const result = response.data.function_result; + + // Validate successful status + expect(result.status).toBe('success'); + + // Validate status code + expect(result.status_code).toBe(200); + + // Validate data array exists + expect(result.data).toBeDefined(); + expect(Array.isArray(result.data)).toBe(true); + + // Critical assertion: Validate comment count + expect(result.data.length).toBe(EXPECTED_COMMENT_COUNT); + + // Validate metadata + expect(result.metadata).toBeDefined(); + expect(result.metadata.task_id).toBe(EXPECTED_TASK_ID); + + expect(result.metadata.comment_count).toBe(EXPECTED_COMMENT_COUNT); + + // Validate function metadata + expect(result.metadata.function_name).toBe('fetch_task_comments'); + + expect(result.metadata.request_id).toBeDefined(); + + // Validate api_delay field + expect(result.api_delay).toBeDefined(); + expect(typeof result.api_delay).toBe('number'); + + // Validate timestamp + expect(result.timestamp).toBeDefined(); + expect(typeof result.timestamp).toBe('string'); + + // Additional validation: Verify each comment has expected structure + result.data.forEach((comment: any, index: number) => { + expect(comment.id).toBeDefined(); + expect(comment.authorId).toBeDefined(); + expect(comment.text).toBeDefined(); + expect(comment.createdDate).toBeDefined(); + }); + }, 30000); + }); +}); \ No newline at end of file diff --git a/conformance_tests/task_comments_fetch_tests/fetch_task_comments_rate_limiting_acceptance.test.ts b/conformance_tests/task_comments_fetch_tests/fetch_task_comments_rate_limiting_acceptance.test.ts new file mode 100644 index 0000000..da80a84 --- /dev/null +++ b/conformance_tests/task_comments_fetch_tests/fetch_task_comments_rate_limiting_acceptance.test.ts @@ -0,0 +1,178 @@ +/** + * Acceptance test for fetch_task_comments function - Rate Limiting + * + * This test verifies that the function correctly handles rate limiting + * from the Wrike API by returning status code 429 and a valid api_delay value. + */ + +import { getTestEnvironment } from './test-utils/env'; +import { buildEventPayload } from './test-utils/event-builder'; +import { invokeFunction, startRateLimiting, endRateLimiting } from './test-utils/http-client'; + +describe('fetch_task_comments rate limiting acceptance test', () => { + let testEnv: ReturnType; + const TEST_IDENTIFIER = 'fetch_task_comments_rate_limiting_test'; + const EXPECTED_STATUS_CODE = 429; + const MAX_EXPECTED_API_DELAY = 3; + + beforeAll(() => { + testEnv = getTestEnvironment(); + }); + + describe('Acceptance Test: Rate Limiting Handling', () => { + it('should handle rate limiting with status 429 and valid api_delay', async () => { + let rateLimitingStarted = false; + + try { + // Step 1: Start rate limiting on mock API server + console.log(`Starting rate limiting with test identifier: ${TEST_IDENTIFIER}`); + const startResponse = await startRateLimiting(TEST_IDENTIFIER); + + if (startResponse.status !== 200) { + throw new Error( + `Failed to start rate limiting on mock API server. ` + + `Status: ${startResponse.status}, Response: ${JSON.stringify(startResponse.data)}` + ); + } + + rateLimitingStarted = true; + console.log('Rate limiting started successfully'); + + // Step 2: Build event payload for fetch_task_comments + const event = buildEventPayload({ + functionName: 'fetch_task_comments', + apiKey: testEnv.wrikeApiKey, + spaceId: testEnv.wrikeSpaceId, + taskId: testEnv.testTaskId, + }); + + console.log(`Invoking fetch_task_comments for task: ${testEnv.testTaskId}`); + + // Step 3: Invoke the function + const response = await invokeFunction(event); + + // Step 4: Validate HTTP response + expect(response.status).toBe(200); + + if (response.status !== 200) { + throw new Error( + `Expected HTTP status 200 for function invocation, but got ${response.status}. ` + + `This indicates the function failed to execute. Response: ${JSON.stringify(response.data)}` + ); + } + + // Step 5: Validate function_result exists + expect(response.data).toBeDefined(); + expect(response.data.function_result).toBeDefined(); + + const result = response.data.function_result; + console.log(`Function result: ${JSON.stringify(result, null, 2)}`); + + // Step 6: Validate status_code is 429 (rate limited) + expect(result.status_code).toBe(EXPECTED_STATUS_CODE); + + if (result.status_code !== EXPECTED_STATUS_CODE) { + throw new Error( + `Expected status_code to be ${EXPECTED_STATUS_CODE} (rate limited), but got ${result.status_code}. ` + + `This may indicate rate limiting was not triggered correctly on the mock API server. ` + + `Test identifier: ${TEST_IDENTIFIER}, Task ID: ${testEnv.testTaskId}, ` + + `Request ID: ${result.metadata?.request_id || 'unknown'}` + ); + } + + // Step 7: Validate api_delay is present and valid + expect(result.api_delay).toBeDefined(); + expect(typeof result.api_delay).toBe('number'); + + if (typeof result.api_delay !== 'number') { + throw new Error( + `Expected api_delay to be a number, but got ${typeof result.api_delay}. ` + + `Value: ${result.api_delay}` + ); + } + + // Step 8: Validate api_delay is greater than 0 + expect(result.api_delay).toBeGreaterThan(0); + + if (result.api_delay <= 0) { + throw new Error( + `Expected api_delay to be greater than 0, but got ${result.api_delay}. ` + + `The implementation should extract the retry-after header value from the rate limit response.` + ); + } + + // Step 9: Validate api_delay is less than or equal to 3 + expect(result.api_delay).toBeLessThanOrEqual(MAX_EXPECTED_API_DELAY); + + if (result.api_delay > MAX_EXPECTED_API_DELAY) { + throw new Error( + `Expected api_delay to be <= ${MAX_EXPECTED_API_DELAY}, but got ${result.api_delay}. ` + + `If api_delay > ${MAX_EXPECTED_API_DELAY}, the implementation may be calculating api_delay incorrectly. ` + + `The api_delay should be extracted from the 'retry-after' header in the rate limit response.` + ); + } + + // Step 10: Validate status is 'error' + expect(result.status).toBe('error'); + + if (result.status !== 'error') { + throw new Error( + `Expected status to be 'error' for rate limited response, but got '${result.status}'. ` + + `Status code: ${result.status_code}` + ); + } + + // Step 11: Validate message contains rate limit information + expect(result.message).toBeDefined(); + expect(typeof result.message).toBe('string'); + expect(result.message.toLowerCase()).toContain('rate limit'); + + if (!result.message.toLowerCase().includes('rate limit')) { + throw new Error( + `Expected message to contain 'rate limit' information, but got: "${result.message}". ` + + `The error message should clearly indicate that rate limiting occurred.` + ); + } + + // Step 12: Validate metadata + expect(result.metadata).toBeDefined(); + expect(result.metadata.task_id).toBe(testEnv.testTaskId); + expect(result.metadata.function_name).toBe('fetch_task_comments'); + expect(result.metadata.request_id).toBeDefined(); + + // Step 13: Validate timestamp + expect(result.timestamp).toBeDefined(); + expect(typeof result.timestamp).toBe('string'); + + console.log( + `Rate limiting test passed successfully. ` + + `Status code: ${result.status_code}, API delay: ${result.api_delay}s` + ); + + } finally { + // Step 14: Cleanup - End rate limiting (always execute, even if test fails) + if (rateLimitingStarted) { + console.log('Ending rate limiting'); + try { + const endResponse = await endRateLimiting(); + + if (endResponse.status !== 200) { + console.error( + `Warning: Failed to end rate limiting on mock API server. ` + + `Status: ${endResponse.status}, Response: ${JSON.stringify(endResponse.data)}. ` + + `This may affect subsequent tests.` + ); + } else { + console.log('Rate limiting ended successfully'); + } + } catch (cleanupError) { + console.error( + `Error during rate limiting cleanup: ${cleanupError}. ` + + `This may affect subsequent tests.` + ); + } + } + } + }, 30000); + }); +}); \ No newline at end of file diff --git a/conformance_tests/task_comments_fetch_tests/package.json b/conformance_tests/task_comments_fetch_tests/package.json new file mode 100644 index 0000000..79ebe91 --- /dev/null +++ b/conformance_tests/task_comments_fetch_tests/package.json @@ -0,0 +1,22 @@ +{ + "name": "wrike-snap-in-tests", + "version": "1.0.0", + "description": "Conformance tests for Wrike snap-in", + "scripts": { + "test": "jest --testTimeout=120000" + }, + "devDependencies": { + "@types/jest": "^29.5.0", + "@types/node": "^20.0.0", + "axios": "^1.6.0", + "jest": "^29.5.0", + "ts-jest": "^29.1.0", + "typescript": "^5.0.0" + }, + "jest": { + "preset": "ts-jest", + "testEnvironment": "node", + "testMatch": ["**/*.test.ts"], + "setupFilesAfterEnv": ["./jest.setup.js"] + } +} \ No newline at end of file diff --git a/conformance_tests/task_comments_fetch_tests/test-utils/env.ts b/conformance_tests/task_comments_fetch_tests/test-utils/env.ts new file mode 100644 index 0000000..0348dd5 --- /dev/null +++ b/conformance_tests/task_comments_fetch_tests/test-utils/env.ts @@ -0,0 +1,28 @@ +/** + * Utility for reading and validating environment variables + */ + +export interface TestEnvironment { + wrikeApiKey: string; + wrikeSpaceId: string; + testTaskId: string; +} + +export function getTestEnvironment(): TestEnvironment { + const wrikeApiKey = process.env.WRIKE_API_KEY; + const wrikeSpaceId = process.env.WRIKE_SPACE_ID; + + if (!wrikeApiKey) { + throw new Error('WRIKE_API_KEY environment variable is required'); + } + + if (!wrikeSpaceId) { + throw new Error('WRIKE_SPACE_ID environment variable is required'); + } + + return { + wrikeApiKey, + wrikeSpaceId, + testTaskId: 'IEAGS6BYKRRFMPQG', // Provided test task ID + }; +} \ No newline at end of file diff --git a/conformance_tests/task_comments_fetch_tests/test-utils/event-builder.ts b/conformance_tests/task_comments_fetch_tests/test-utils/event-builder.ts new file mode 100644 index 0000000..97d550a --- /dev/null +++ b/conformance_tests/task_comments_fetch_tests/test-utils/event-builder.ts @@ -0,0 +1,78 @@ +/** + * Utility for building test event payloads + */ + +export interface EventPayloadOptions { + functionName: string; + apiKey: string; + spaceId: string; + taskId?: string; + eventType?: string; +} + +export function buildEventPayload(options: EventPayloadOptions): any { + const { + functionName, + apiKey, + spaceId, + taskId, + eventType = 'test_event', + } = options; + + return { + payload: { + connection_data: { + key: apiKey, + org_id: spaceId, + org_name: 'Test Organization', + key_type: 'api_key', + }, + event_context: { + callback_url: 'http://localhost:8002/callback', + dev_org: 'test-org', + dev_org_id: 'test-org-id', + dev_user: 'test-user', + dev_user_id: 'test-user-id', + external_sync_unit: taskId || 'test-unit', + external_sync_unit_id: taskId || 'test-unit-id', + external_sync_unit_name: 'Test Unit', + external_system: 'wrike', + external_system_type: 'wrike', + import_slug: 'test-import', + mode: 'INITIAL', + request_id: `test-request-${Date.now()}`, + snap_in_slug: 'test-snap-in', + snap_in_version_id: 'test-version', + sync_run: 'test-sync-run', + sync_run_id: 'test-sync-run-id', + sync_tier: 'test-tier', + sync_unit: 'test-sync-unit', + sync_unit_id: 'test-sync-unit-id', + uuid: `test-uuid-${Date.now()}`, + worker_data_url: 'http://localhost:8003/external-worker', + }, + event_type: eventType, + event_data: {}, + }, + context: { + dev_oid: 'test-dev-oid', + source_id: 'test-source-id', + snap_in_id: 'test-snap-in-id', + snap_in_version_id: 'test-snap-in-version-id', + service_account_id: 'test-service-account-id', + secrets: { + service_account_token: 'test-token', + }, + }, + execution_metadata: { + request_id: `test-request-${Date.now()}`, + function_name: functionName, + event_type: eventType, + devrev_endpoint: 'http://localhost:8003', + }, + input_data: { + global_values: {}, + event_sources: {}, + }, + }; +} \ No newline at end of file diff --git a/conformance_tests/task_comments_fetch_tests/test-utils/http-client.ts b/conformance_tests/task_comments_fetch_tests/test-utils/http-client.ts new file mode 100644 index 0000000..b270fa1 --- /dev/null +++ b/conformance_tests/task_comments_fetch_tests/test-utils/http-client.ts @@ -0,0 +1,37 @@ +/** + * Utility for making HTTP requests to test servers + */ + +import axios, { AxiosResponse } from 'axios'; + +const SNAP_IN_SERVER_URL = 'http://localhost:8000/handle/sync'; +const MOCK_API_SERVER_URL = 'http://localhost:8004'; + +export async function invokeFunction(event: any): Promise { + return axios.post(SNAP_IN_SERVER_URL, event, { + headers: { + 'Content-Type': 'application/json', + }, + validateStatus: () => true, // Don't throw on any status code + }); +} + +export async function startRateLimiting(testName: string): Promise { + return axios.post(`${MOCK_API_SERVER_URL}/start_rate_limiting`, { + test_name: testName, + }, { + headers: { + 'Content-Type': 'application/json', + }, + validateStatus: () => true, + }); +} + +export async function endRateLimiting(): Promise { + return axios.post(`${MOCK_API_SERVER_URL}/end_rate_limiting`, {}, { + headers: { + 'Content-Type': 'application/json', + }, + validateStatus: () => true, + }); +} \ No newline at end of file diff --git a/conformance_tests/task_comments_fetch_tests/tsconfig.json b/conformance_tests/task_comments_fetch_tests/tsconfig.json new file mode 100644 index 0000000..087673a --- /dev/null +++ b/conformance_tests/task_comments_fetch_tests/tsconfig.json @@ -0,0 +1,25 @@ +{ + "compilerOptions": { + "target": "es2017", + "module": "commonjs", + "lib": ["es2017"], + "outDir": "./dist", + "rootDir": "./", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true, + "declaration": true, + "declarationMap": true, + "sourceMap": true, + "moduleResolution": "node" + }, + "include": [ + "**/*.ts" + ], + "exclude": [ + "node_modules", + "dist" + ] +} \ No newline at end of file diff --git a/conformance_tests/tasks_data_push_tests/extraction-data-continue-complete.test.ts b/conformance_tests/tasks_data_push_tests/extraction-data-continue-complete.test.ts new file mode 100644 index 0000000..0206702 --- /dev/null +++ b/conformance_tests/tasks_data_push_tests/extraction-data-continue-complete.test.ts @@ -0,0 +1,212 @@ +import { loadTestEnvironment } from './test-utils/env'; +import { CallbackServer } from './test-utils/callback-server'; +import { SnapInClient } from './test-utils/snap-in-client'; +import { EventBuilder } from './test-utils/event-builder'; +import { MockWrikeServer } from './test-utils/mock-wrike-server'; + +describe('Extraction Function - Complete Tasks Extraction with EXTRACTION_DATA_CONTINUE', () => { + let callbackServer: CallbackServer; + let snapInClient: SnapInClient; + let env: ReturnType; + let mockWrikeServer: MockWrikeServer; + + beforeAll(async () => { + env = loadTestEnvironment(); + callbackServer = new CallbackServer(); + await callbackServer.start(8002); + mockWrikeServer = new MockWrikeServer(); + await mockWrikeServer.start(8004); + snapInClient = new SnapInClient(); + }); + + afterAll(async () => { + await callbackServer.stop(); + await mockWrikeServer.stop(); + }); + + beforeEach(() => { + callbackServer.clearEvents(); + mockWrikeServer.clearRequests(); + }); + + test('Complete tasks extraction with EXTRACTION_DATA_CONTINUE event - 110 tasks', async () => { + // Clear any previous events + callbackServer.clearEvents(); + + // Build extraction data continue event + const event = EventBuilder.buildExtractionDataContinueEvent( + env.wrikeApiKey, + env.wrikeSpaceId, + 'IEAGS6BYI5RFMPP7' + ); + + console.log('Sending EXTRACTION_DATA_CONTINUE event to snap-in server...'); + console.log('Event payload:', JSON.stringify(event, null, 2)); + + // Invoke the extraction function + const result = await snapInClient.invokeFunction(event); + + console.log('Snap-in function invocation result:', JSON.stringify(result, null, 2)); + + // Verify no immediate errors from function invocation + expect(result).toBeDefined(); + if (result.error) { + console.error('Function invocation error:', JSON.stringify(result.error, null, 2)); + } + expect(result.error).toBeUndefined(); + + // Wait for callback event (with timeout) + const maxWaitTime = 50000; // 50 seconds + const pollInterval = 500; // 500ms + let elapsedTime = 0; + let callbackEvents: any[] = []; + + console.log('Waiting for callback event from DevRev...'); + + while (elapsedTime < maxWaitTime) { + callbackEvents = callbackServer.getReceivedEvents(); + if (callbackEvents.length > 0) { + break; + } + await new Promise(resolve => setTimeout(resolve, pollInterval)); + elapsedTime += pollInterval; + } + + console.log(`Received ${callbackEvents.length} callback event(s) after ${elapsedTime}ms`); + + // Assertion 1: Verify exactly one callback event was received + expect(callbackEvents.length).toBe(1); + if (callbackEvents.length !== 1) { + console.error( + `Expected exactly 1 callback event, but received ${callbackEvents.length}. ` + + `Events: ${JSON.stringify(callbackEvents, null, 2)}` + ); + throw new Error( + `Expected exactly 1 callback event, but received ${callbackEvents.length}. ` + + `This indicates that the extraction function did not complete properly or sent multiple events. ` + + `Received events: ${JSON.stringify(callbackEvents, null, 2)}` + ); + } + + const callbackEvent = callbackEvents[0]; + console.log('Callback event received:', JSON.stringify(callbackEvent, null, 2)); + + // Assertion 2: Verify event_type is EXTRACTION_DATA_DONE + const eventType = callbackEvent.event_type; + expect(eventType).toBe('EXTRACTION_DATA_DONE'); + if (eventType !== 'EXTRACTION_DATA_DONE') { + console.error( + `Expected event_type to be "EXTRACTION_DATA_DONE", but got "${eventType}". ` + + `Full event: ${JSON.stringify(callbackEvent, null, 2)}` + ); + throw new Error( + `Expected event_type to be "EXTRACTION_DATA_DONE", but got "${eventType}". ` + + `This indicates that the extraction function did not complete successfully. ` + + `Possible event types: EXTRACTION_DATA_DONE (success), EXTRACTION_DATA_ERROR (error), ` + + `EXTRACTION_DATA_PROGRESS (timeout), EXTRACTION_DATA_DELAY (rate limit). ` + + `Full event: ${JSON.stringify(callbackEvent, null, 2)}` + ); + } + + // Assertion 3: Verify event_data exists + expect(callbackEvent.event_data).toBeDefined(); + if (!callbackEvent.event_data) { + console.error( + 'Expected event_data to be defined in callback event. ' + + `Full event: ${JSON.stringify(callbackEvent, null, 2)}` + ); + throw new Error( + 'Expected event_data to be defined in callback event. ' + + 'The event_data field should contain information about extracted artifacts. ' + + `Full event: ${JSON.stringify(callbackEvent, null, 2)}` + ); + } + + // Assertion 4: Verify artifacts array exists and has length > 0 + const artifacts = callbackEvent.event_data.artifacts; + expect(artifacts).toBeDefined(); + expect(Array.isArray(artifacts)).toBe(true); + expect(artifacts.length).toBeGreaterThan(0); + + if (!artifacts || !Array.isArray(artifacts) || artifacts.length === 0) { + console.error( + 'Expected artifacts array to exist and have length > 0. ' + + `Actual artifacts: ${JSON.stringify(artifacts, null, 2)}` + ); + throw new Error( + `Expected artifacts array to exist and have length > 0, but got: ${JSON.stringify(artifacts)}. ` + + `The artifacts array should contain information about extracted data (users, tasks, etc.). ` + + `An empty or missing artifacts array indicates that no data was extracted. ` + + `Full event_data: ${JSON.stringify(callbackEvent.event_data, null, 2)}` + ); + } + + console.log(`Found ${artifacts.length} artifact(s) in callback event`); + console.log('Artifacts:', JSON.stringify(artifacts, null, 2)); + + // Assertion 5: Find tasks artifact + const tasksArtifact = artifacts.find((artifact: any) => artifact.item_type === 'tasks'); + expect(tasksArtifact).toBeDefined(); + + if (!tasksArtifact) { + const availableItemTypes = artifacts.map((a: any) => a.item_type).join(', '); + console.error( + 'Expected to find an artifact with item_type="tasks". ' + + `Available artifacts: ${JSON.stringify(artifacts.map((a: any) => ({ item_type: a.item_type, item_count: a.item_count })), null, 2)}` + ); + throw new Error( + 'Expected to find an artifact with item_type="tasks", but none found. ' + + `This indicates that tasks data was not extracted or was not properly uploaded. ` + + `Available item_types: ${availableItemTypes}. ` + + `Full artifacts: ${JSON.stringify(artifacts, null, 2)}` + ); + } + + console.log('Tasks artifact found:', JSON.stringify(tasksArtifact, null, 2)); + + // Assertion 6: Verify tasks artifact has item_count = 110 + const itemCount = tasksArtifact.item_count; + expect(itemCount).toBe(110); + + if (itemCount !== 110) { + console.error( + `Expected tasks artifact to have item_count=110, but got ${itemCount}. ` + + `This indicates that not all tasks data was extracted. ` + + `Full tasks artifact: ${JSON.stringify(tasksArtifact, null, 2)}` + ); + throw new Error( + `Expected tasks artifact to have item_count=110, but got ${itemCount}. ` + + `This indicates that not all tasks data was extracted. ` + + `Possible causes: ` + + `1. Pagination logic is not working correctly (not fetching all pages). ` + + `2. State management issue (nextPageToken not being updated properly). ` + + `3. Tasks are being filtered out during normalization. ` + + `4. API is not returning all tasks. ` + + `Full tasks artifact: ${JSON.stringify(tasksArtifact, null, 2)}` + ); + } + + console.log('✓ All assertions passed: 110 tasks successfully extracted with EXTRACTION_DATA_CONTINUE event'); + + // Additional verification: Check that pagination was used + const taskRequests = mockWrikeServer.getTasksRequests(); + console.log(`Total task API requests made: ${taskRequests.length}`); + + if (taskRequests.length > 1) { + console.log('✓ Pagination was used (multiple requests detected)'); + } else if (taskRequests.length === 1) { + console.log('⚠ Only one API request was made. Pagination may not have been triggered.'); + } + + // Verify hasAttachments field was requested + for (const request of taskRequests) { + const fields = request.query.fields; + if (fields) { + const fieldsStr = fields.replace(/^\[|\]$/g, ''); + const fieldsArray = fieldsStr.split(',').map((item: string) => item.trim()); + expect(fieldsArray).toContain('hasAttachments'); + console.log('✓ hasAttachments field was requested in API call'); + } + } + }, 60000); +}); \ No newline at end of file diff --git a/conformance_tests/tasks_data_push_tests/extraction-rate-limiting.test.ts b/conformance_tests/tasks_data_push_tests/extraction-rate-limiting.test.ts new file mode 100644 index 0000000..bb2cbdf --- /dev/null +++ b/conformance_tests/tasks_data_push_tests/extraction-rate-limiting.test.ts @@ -0,0 +1,215 @@ +import { loadTestEnvironment } from './test-utils/env'; +import { CallbackServer } from './test-utils/callback-server'; +import { SnapInClient } from './test-utils/snap-in-client'; +import { EventBuilder } from './test-utils/event-builder'; +import { MockWrikeServer } from './test-utils/mock-wrike-server'; +import axios from 'axios'; + +describe('Extraction Function - Rate Limiting Test', () => { + let callbackServer: CallbackServer; + let snapInClient: SnapInClient; + let env: ReturnType; + let mockWrikeServer: MockWrikeServer; + + beforeAll(async () => { + env = loadTestEnvironment(); + callbackServer = new CallbackServer(); + await callbackServer.start(8002); + mockWrikeServer = new MockWrikeServer(); + await mockWrikeServer.start(8004); + snapInClient = new SnapInClient(); + }); + + afterAll(async () => { + await callbackServer.stop(); + await mockWrikeServer.stop(); + }); + + beforeEach(() => { + callbackServer.clearEvents(); + mockWrikeServer.clearRequests(); + }); + + test('Handle rate limiting (HTTP 429) with EXTRACTION_DATA_START event', async () => { + const testName = 'extraction_data_rate_limit_test'; + + console.log(`[Test] Starting rate limiting test: ${testName}`); + + // Step 1: Enable rate limiting on mock server + console.log('[Test] Step 1: Enabling rate limiting on mock Wrike server...'); + try { + const rateLimitResponse = await axios.post('http://localhost:8004/_test/start_rate_limiting', { + test_name: testName, + delay: 60, + }); + console.log('[Test] Rate limiting enabled:', JSON.stringify(rateLimitResponse.data, null, 2)); + } catch (error) { + console.error('[Test] Failed to enable rate limiting:', error); + throw new Error(`Failed to enable rate limiting on mock server: ${error}`); + } + + // Clear any previous events + callbackServer.clearEvents(); + + // Step 2: Build and send extraction data start event + console.log('[Test] Step 2: Building EXTRACTION_DATA_START event...'); + const event = EventBuilder.buildExtractionDataStartEvent( + env.wrikeApiKey, + env.wrikeSpaceId, + 'IEAGS6BYI5RFMPP7' + ); + + console.log('[Test] Sending EXTRACTION_DATA_START event to snap-in server...'); + console.log('[Test] Event payload:', JSON.stringify(event, null, 2)); + + // Invoke the extraction function + let result: any; + try { + result = await snapInClient.invokeFunction(event); + console.log('[Test] Snap-in function invocation result:', JSON.stringify(result, null, 2)); + } catch (error) { + console.error('[Test] Function invocation failed:', error); + throw new Error(`Function invocation failed: ${error}`); + } + + // Verify no immediate errors from function invocation + expect(result).toBeDefined(); + if (result.error) { + console.error('[Test] Function invocation error:', JSON.stringify(result.error, null, 2)); + } + expect(result.error).toBeUndefined(); + + // Wait for callback event (with timeout) + const maxWaitTime = 50000; // 50 seconds + const pollInterval = 500; // 500ms + let elapsedTime = 0; + let callbackEvents: any[] = []; + + console.log('[Test] Waiting for callback event from DevRev...'); + + while (elapsedTime < maxWaitTime) { + callbackEvents = callbackServer.getReceivedEvents(); + if (callbackEvents.length > 0) { + break; + } + await new Promise(resolve => setTimeout(resolve, pollInterval)); + elapsedTime += pollInterval; + } + + console.log(`[Test] Received ${callbackEvents.length} callback event(s) after ${elapsedTime}ms`); + + // Assertion 1: Verify exactly one callback event was received + if (callbackEvents.length === 0) { + console.error( + `[Test] Expected to receive a callback event within ${maxWaitTime}ms, but none was received. ` + + `This indicates the extraction function may have failed silently or not completed. ` + + `Check if the extraction function properly handles rate limiting (HTTP 429) and emits EXTRACTION_DATA_DELAY event.` + ); + throw new Error( + `Expected to receive a callback event within ${maxWaitTime}ms, but none was received. ` + + `This indicates the extraction function may have failed silently or not completed. ` + + `The function should detect HTTP 429 response and emit EXTRACTION_DATA_DELAY event.` + ); + } + + expect(callbackEvents.length).toBe(1); + if (callbackEvents.length !== 1) { + console.error( + `[Test] Expected exactly 1 callback event, but received ${callbackEvents.length}. ` + + `Events: ${JSON.stringify(callbackEvents, null, 2)}` + ); + throw new Error( + `Expected exactly 1 callback event, but received ${callbackEvents.length}. ` + + `This indicates that the extraction function emitted multiple events. ` + + `When rate limiting occurs, only one EXTRACTION_DATA_DELAY event should be emitted. ` + + `Received events: ${JSON.stringify(callbackEvents, null, 2)}` + ); + } + + const callbackEvent = callbackEvents[0]; + console.log('[Test] Callback event received:', JSON.stringify(callbackEvent, null, 2)); + + // Assertion 2: Verify event_type is EXTRACTION_DATA_DELAY + const eventType = callbackEvent.event_type; + expect(eventType).toBe('EXTRACTION_DATA_DELAY'); + if (eventType !== 'EXTRACTION_DATA_DELAY') { + console.error( + `[Test] Expected event_type to be "EXTRACTION_DATA_DELAY", but got "${eventType}". ` + + `Full event: ${JSON.stringify(callbackEvent, null, 2)}` + ); + throw new Error( + `Expected event_type to be "EXTRACTION_DATA_DELAY", but got "${eventType}". ` + + `This indicates that the extraction function did not properly handle rate limiting (HTTP 429). ` + + `When the Wrike API returns HTTP 429, the function must emit EXTRACTION_DATA_DELAY event. ` + + `Possible event types: EXTRACTION_DATA_DELAY (rate limit), EXTRACTION_DATA_DONE (success), ` + + `EXTRACTION_DATA_ERROR (error), EXTRACTION_DATA_PROGRESS (timeout). ` + + `Full event: ${JSON.stringify(callbackEvent, null, 2)}` + ); + } + + // Assertion 3: Verify event_data exists + expect(callbackEvent.event_data).toBeDefined(); + if (!callbackEvent.event_data) { + console.error( + '[Test] Expected event_data to be defined in callback event. ' + + `Full event: ${JSON.stringify(callbackEvent, null, 2)}` + ); + throw new Error( + 'Expected event_data to be defined in callback event. ' + + 'The event_data field should contain the delay information for rate limiting. ' + + `Full event: ${JSON.stringify(callbackEvent, null, 2)}` + ); + } + + // Assertion 4: Verify delay field exists and is valid + const delay = callbackEvent.event_data.delay; + expect(delay).toBeDefined(); + if (delay === undefined || delay === null) { + console.error( + '[Test] Expected event_data.delay to be defined, but it was missing. ' + + `Full event_data: ${JSON.stringify(callbackEvent.event_data, null, 2)}` + ); + throw new Error( + 'Expected event_data.delay to be defined, but it was missing. ' + + 'The delay field must contain the number of seconds to wait before retrying. ' + + 'This value should be extracted from the "retry-after" header in the HTTP 429 response. ' + + `Full event_data: ${JSON.stringify(callbackEvent.event_data, null, 2)}` + ); + } + + // Assertion 5: Verify delay is a positive number + const delayNum = typeof delay === 'string' ? parseInt(delay, 10) : delay; + expect(typeof delayNum).toBe('number'); + expect(delayNum).toBeGreaterThan(0); + if (typeof delayNum !== 'number' || delayNum <= 0) { + console.error( + `[Test] Expected event_data.delay to be a positive number, but got ${delay} (type: ${typeof delay}). ` + + `Full event_data: ${JSON.stringify(callbackEvent.event_data, null, 2)}` + ); + throw new Error( + `Expected event_data.delay to be a positive number, but got ${delay} (type: ${typeof delay}). ` + + `The delay must be > 0 and represent the number of seconds to wait before retrying. ` + + `Full event_data: ${JSON.stringify(callbackEvent.event_data, null, 2)}` + ); + } + + console.log(`[Test] ✓ All assertions passed: Rate limiting handled correctly with delay=${delayNum} seconds`); + + // Step 3: Disable rate limiting + console.log('[Test] Step 3: Disabling rate limiting on mock Wrike server...'); + try { + const endRateLimitResponse = await axios.post('http://localhost:8004/_test/end_rate_limiting'); + console.log('[Test] Rate limiting disabled:', JSON.stringify(endRateLimitResponse.data, null, 2)); + } catch (error) { + console.error('[Test] Failed to disable rate limiting:', error); + // Don't throw here, as the test has already passed + } + + // Additional verification: Check that API was called + const capturedRequests = mockWrikeServer.getCapturedRequests(); + console.log(`[Test] Total API requests captured: ${capturedRequests.length}`); + if (capturedRequests.length > 0) { + console.log('[Test] ✓ API requests were made before rate limiting was triggered'); + } + }, 60000); +}); \ No newline at end of file diff --git a/conformance_tests/tasks_data_push_tests/extraction-tasks-complete.test.ts b/conformance_tests/tasks_data_push_tests/extraction-tasks-complete.test.ts new file mode 100644 index 0000000..1599821 --- /dev/null +++ b/conformance_tests/tasks_data_push_tests/extraction-tasks-complete.test.ts @@ -0,0 +1,187 @@ +import { loadTestEnvironment } from './test-utils/env'; +import { CallbackServer } from './test-utils/callback-server'; +import { SnapInClient } from './test-utils/snap-in-client'; +import { EventBuilder } from './test-utils/event-builder'; +import { MockWrikeServer } from './test-utils/mock-wrike-server'; + +describe('Extraction Function - Complete Tasks Extraction Test', () => { + let callbackServer: CallbackServer; + let snapInClient: SnapInClient; + let env: ReturnType; + let mockWrikeServer: MockWrikeServer; + + beforeAll(async () => { + env = loadTestEnvironment(); + callbackServer = new CallbackServer(); + await callbackServer.start(8002); + mockWrikeServer = new MockWrikeServer(); + await mockWrikeServer.start(8004); + snapInClient = new SnapInClient(); + }); + + afterAll(async () => { + await callbackServer.stop(); + await mockWrikeServer.stop(); + }); + + beforeEach(() => { + callbackServer.clearEvents(); + mockWrikeServer.clearRequests(); + }); + + test('Complete tasks extraction with 110 tasks', async () => { + // Clear any previous events + callbackServer.clearEvents(); + + // Build extraction data start event + const event = EventBuilder.buildExtractionDataStartEvent( + env.wrikeApiKey, + env.wrikeSpaceId, + 'IEAGS6BYI5RFMPP7' + ); + + console.log('Sending EXTRACTION_DATA_START event to snap-in server...'); + + // Invoke the extraction function + const result = await snapInClient.invokeFunction(event); + + console.log('Snap-in function invocation result:', JSON.stringify(result, null, 2)); + + // Verify no immediate errors from function invocation + expect(result).toBeDefined(); + if (result.error) { + console.error('Function invocation error:', JSON.stringify(result.error, null, 2)); + } + expect(result.error).toBeUndefined(); + + // Wait for callback event (with timeout) + const maxWaitTime = 50000; // 50 seconds + const pollInterval = 500; // 500ms + let elapsedTime = 0; + let callbackEvents: any[] = []; + + console.log('Waiting for callback event from DevRev...'); + + while (elapsedTime < maxWaitTime) { + callbackEvents = callbackServer.getReceivedEvents(); + if (callbackEvents.length > 0) { + break; + } + await new Promise(resolve => setTimeout(resolve, pollInterval)); + elapsedTime += pollInterval; + } + + console.log(`Received ${callbackEvents.length} callback event(s) after ${elapsedTime}ms`); + + // Assertion 1: Verify exactly one callback event was received + expect(callbackEvents.length).toBe(1); + if (callbackEvents.length !== 1) { + console.error( + `Expected exactly 1 callback event, but received ${callbackEvents.length}. ` + + `Events: ${JSON.stringify(callbackEvents, null, 2)}` + ); + throw new Error( + `Expected exactly 1 callback event, but received ${callbackEvents.length}` + ); + } + + const callbackEvent = callbackEvents[0]; + console.log('Callback event received:', JSON.stringify(callbackEvent, null, 2)); + + // Assertion 2: Verify event_type is EXTRACTION_DATA_DONE + const eventType = callbackEvent.event_type; + expect(eventType).toBe('EXTRACTION_DATA_DONE'); + if (eventType !== 'EXTRACTION_DATA_DONE') { + console.error( + `Expected event_type to be "EXTRACTION_DATA_DONE", but got "${eventType}". ` + + `Full event: ${JSON.stringify(callbackEvent, null, 2)}` + ); + throw new Error( + `Expected event_type to be "EXTRACTION_DATA_DONE", but got "${eventType}"` + ); + } + + // Assertion 3: Verify event_data exists + expect(callbackEvent.event_data).toBeDefined(); + if (!callbackEvent.event_data) { + console.error( + 'Expected event_data to be defined in callback event. ' + + `Full event: ${JSON.stringify(callbackEvent, null, 2)}` + ); + throw new Error('Expected event_data to be defined in callback event'); + } + + // Assertion 4: Verify artifacts array exists and has length > 0 + const artifacts = callbackEvent.event_data.artifacts; + expect(artifacts).toBeDefined(); + expect(Array.isArray(artifacts)).toBe(true); + expect(artifacts.length).toBeGreaterThan(0); + + if (!artifacts || !Array.isArray(artifacts) || artifacts.length === 0) { + console.error( + 'Expected artifacts array to exist and have length > 0. ' + + `Actual artifacts: ${JSON.stringify(artifacts, null, 2)}` + ); + throw new Error( + `Expected artifacts array to exist and have length > 0, but got: ${JSON.stringify(artifacts)}` + ); + } + + console.log(`Found ${artifacts.length} artifact(s) in callback event`); + console.log('Artifacts:', JSON.stringify(artifacts, null, 2)); + + // Assertion 5: Find tasks artifact + const tasksArtifact = artifacts.find((artifact: any) => artifact.item_type === 'tasks'); + expect(tasksArtifact).toBeDefined(); + + if (!tasksArtifact) { + console.error( + 'Expected to find an artifact with item_type="tasks". ' + + `Available artifacts: ${JSON.stringify(artifacts.map((a: any) => ({ item_type: a.item_type, item_count: a.item_count })), null, 2)}` + ); + throw new Error( + 'Expected to find an artifact with item_type="tasks", but none found. ' + + `Available item_types: ${artifacts.map((a: any) => a.item_type).join(', ')}` + ); + } + + console.log('Tasks artifact found:', JSON.stringify(tasksArtifact, null, 2)); + + // Assertion 6: Verify tasks artifact has item_count = 110 + const itemCount = tasksArtifact.item_count; + expect(itemCount).toBe(110); + + if (itemCount !== 110) { + console.error( + `Expected tasks artifact to have item_count=110, but got ${itemCount}. ` + + `This indicates that not all tasks data was extracted. ` + + `Full tasks artifact: ${JSON.stringify(tasksArtifact, null, 2)}` + ); + throw new Error( + `Expected tasks artifact to have item_count=110, but got ${itemCount}. ` + + `This indicates that not all tasks data was extracted.` + ); + } + + console.log('✓ All assertions passed: 110 tasks successfully extracted'); + + // Additional verification: Check that pagination was used + const taskRequests = mockWrikeServer.getTasksRequests(); + console.log(`Total task API requests made: ${taskRequests.length}`); + + if (taskRequests.length > 1) { + console.log('✓ Pagination was used (multiple requests detected)'); + } + + // Verify hasAttachments field was requested + for (const request of taskRequests) { + const fields = request.query.fields; + if (fields) { + const fieldsStr = fields.replace(/^\[|\]$/g, ''); + const fieldsArray = fieldsStr.split(',').map((item: string) => item.trim()); + expect(fieldsArray).toContain('hasAttachments'); + console.log('✓ hasAttachments field was requested in API call'); + } + } + }, 60000); +}); \ No newline at end of file diff --git a/conformance_tests/tasks_data_push_tests/extraction-tasks-normalization-validation.test.ts b/conformance_tests/tasks_data_push_tests/extraction-tasks-normalization-validation.test.ts new file mode 100644 index 0000000..b210160 --- /dev/null +++ b/conformance_tests/tasks_data_push_tests/extraction-tasks-normalization-validation.test.ts @@ -0,0 +1,369 @@ +import { loadTestEnvironment } from './test-utils/env'; +import { CallbackServer } from './test-utils/callback-server'; +import { SnapInClient } from './test-utils/snap-in-client'; +import { EventBuilder } from './test-utils/event-builder'; +import { MockWrikeServer } from './test-utils/mock-wrike-server'; +import { execSync } from 'child_process'; +import * as fs from 'fs'; +import * as path from 'path'; +import * as os from 'os'; + +describe('Extraction Function - Tasks Normalization Validation with Chef CLI', () => { + let callbackServer: CallbackServer; + let snapInClient: SnapInClient; + let env: ReturnType; + let mockWrikeServer: MockWrikeServer; + let tempMetadataFile: string | null = null; + + beforeAll(async () => { + env = loadTestEnvironment(); + callbackServer = new CallbackServer(); + await callbackServer.start(8002); + mockWrikeServer = new MockWrikeServer(); + await mockWrikeServer.start(8004); + snapInClient = new SnapInClient(); + }); + + afterAll(async () => { + await callbackServer.stop(); + await mockWrikeServer.stop(); + + // Cleanup temporary metadata file + if (tempMetadataFile && fs.existsSync(tempMetadataFile)) { + fs.unlinkSync(tempMetadataFile); + } + }); + + beforeEach(() => { + callbackServer.clearEvents(); + mockWrikeServer.clearRequests(); + }); + + test('Validate tasks normalization using Chef CLI', async () => { + console.log('[Test] Starting tasks normalization validation test'); + + // Step 0: Verify required environment variables + console.log('[Test] Step 0: Verifying required environment variables...'); + + const chefCliPath = process.env.CHEF_CLI_PATH; + if (!chefCliPath) { + throw new Error( + 'Required environment variable CHEF_CLI_PATH is not set.\n' + + 'CHEF_CLI_PATH should contain the path to the chef-cli executable.\n' + + 'Example: export CHEF_CLI_PATH=/usr/local/bin/chef-cli\n' + + 'This tool is required to validate the normalization of extracted data.' + ); + } + + if (!fs.existsSync(chefCliPath)) { + throw new Error( + 'Chef CLI executable not found at path: ' + chefCliPath + '\n' + + 'The CHEF_CLI_PATH environment variable points to a non-existent file.\n' + + 'Please verify the path is correct and the chef-cli tool is installed.\n' + + 'Attempted path: ' + chefCliPath + ); + } + + const extractedFilesFolderPath = process.env.EXTRACTED_FILES_FOLDER_PATH; + if (!extractedFilesFolderPath) { + throw new Error( + 'Required environment variable EXTRACTED_FILES_FOLDER_PATH is not set.\n' + + 'EXTRACTED_FILES_FOLDER_PATH should contain the path to the folder where extracted files are stored.\n' + + 'Example: export EXTRACTED_FILES_FOLDER_PATH=/tmp/extracted-files\n' + + 'This folder is created by the extraction function and contains the extracted data files.' + ); + } + + console.log('[Test] ✓ CHEF_CLI_PATH: ' + chefCliPath); + console.log('[Test] ✓ EXTRACTED_FILES_FOLDER_PATH: ' + extractedFilesFolderPath); + + // Step 1: Retrieve External Domain Metadata + console.log('[Test] Step 1: Retrieving External Domain Metadata...'); + + const metadataEvent = EventBuilder.buildExternalDomainMetadataEvent( + env.wrikeApiKey, + env.wrikeSpaceId + ); + + console.log('[Test] Invoking get_external_domain_metadata function...'); + const metadataResult = await snapInClient.invokeFunction(metadataEvent); + + if (!metadataResult) { + throw new Error( + 'Failed to retrieve external domain metadata: No result returned.\n' + + 'The get_external_domain_metadata function did not return any data.\n' + + 'This indicates a critical failure in the metadata generation process.' + ); + } + + if (metadataResult.error) { + throw new Error( + 'Failed to retrieve external domain metadata: Function returned an error.\n' + + 'Error details: ' + JSON.stringify(metadataResult.error, null, 2) + '\n' + + 'The get_external_domain_metadata function encountered an error during execution.\n' + + 'Check the function implementation and logs for more details.' + ); + } + + if (!metadataResult.function_result || !metadataResult.function_result.data) { + throw new Error( + 'Failed to retrieve external domain metadata: Invalid result structure.\n' + + 'Received result: ' + JSON.stringify(metadataResult, null, 2) + '\n' + + 'Expected result to contain function_result.data with the external domain metadata.\n' + + 'The function may not be returning data in the expected format.' + ); + } + + const externalDomainMetadata = metadataResult.function_result.data; + console.log('[Test] External Domain Metadata retrieved successfully'); + + // Verify tasks record type exists + if (!externalDomainMetadata.record_types || !externalDomainMetadata.record_types.tasks) { + throw new Error( + 'External Domain Metadata does not contain "tasks" record type.\n' + + 'Available record types: ' + Object.keys(externalDomainMetadata.record_types || {}).join(', ') + '\n' + + 'The external domain metadata must include a "tasks" record type definition.\n' + + 'Full metadata: ' + JSON.stringify(externalDomainMetadata, null, 2) + ); + } + + console.log('[Test] ✓ Tasks record type found in metadata'); + + // Store metadata in temporary file + tempMetadataFile = path.join(os.tmpdir(), 'external-domain-metadata-' + Date.now() + '.json'); + fs.writeFileSync(tempMetadataFile, JSON.stringify(externalDomainMetadata, null, 2)); + console.log('[Test] ✓ Metadata stored in temporary file: ' + tempMetadataFile); + + // Step 2: Invoke Extraction Function + console.log('[Test] Step 2: Invoking Extraction Function...'); + + callbackServer.clearEvents(); + + const extractionEvent = EventBuilder.buildExtractionDataStartEvent( + env.wrikeApiKey, + env.wrikeSpaceId, + 'IEAGS6BYI5RFMPP7' + ); + + console.log('[Test] Sending EXTRACTION_DATA_START event to snap-in server...'); + const extractionResult = await snapInClient.invokeFunction(extractionEvent); + + if (!extractionResult) { + throw new Error( + 'Failed to invoke extraction function: No result returned.\n' + + 'The extraction function did not return any data.\n' + + 'This indicates a critical failure in the extraction process.' + ); + } + + if (extractionResult.error) { + throw new Error( + 'Failed to invoke extraction function: Function returned an error.\n' + + 'Error details: ' + JSON.stringify(extractionResult.error, null, 2) + '\n' + + 'The extraction function encountered an error during execution.\n' + + 'Check the function implementation and logs for more details.' + ); + } + + console.log('[Test] ✓ Extraction function invoked successfully'); + + // Wait for callback event + console.log('[Test] Waiting for callback event from DevRev...'); + const maxWaitTime = 50000; // 50 seconds + const pollInterval = 500; // 500ms + let elapsedTime = 0; + let callbackEvents: any[] = []; + + while (elapsedTime < maxWaitTime) { + callbackEvents = callbackServer.getReceivedEvents(); + if (callbackEvents.length > 0) { + break; + } + await new Promise(resolve => setTimeout(resolve, pollInterval)); + elapsedTime += pollInterval; + } + + if (callbackEvents.length === 0) { + throw new Error( + 'Failed to receive callback event within ' + maxWaitTime + 'ms.\n' + + 'The extraction function did not emit a callback event.\n' + + 'This indicates that the extraction process did not complete or failed silently.\n' + + 'Expected event types: EXTRACTION_DATA_DONE (success), EXTRACTION_DATA_ERROR (error), ' + + 'EXTRACTION_DATA_PROGRESS (timeout), EXTRACTION_DATA_DELAY (rate limit).\n' + + 'Check the extraction function logs for more details.' + ); + } + + const callbackEvent = callbackEvents[0]; + console.log('[Test] ✓ Received callback event: ' + callbackEvent.event_type); + + if (callbackEvent.event_type !== 'EXTRACTION_DATA_DONE') { + throw new Error( + 'Expected callback event type "EXTRACTION_DATA_DONE", but got "' + callbackEvent.event_type + '".\n' + + 'Full callback event: ' + JSON.stringify(callbackEvent, null, 2) + '\n' + + 'The extraction function did not complete successfully.\n' + + 'Possible event types:\n' + + ' - EXTRACTION_DATA_DONE: Extraction completed successfully\n' + + ' - EXTRACTION_DATA_ERROR: Extraction failed with an error\n' + + ' - EXTRACTION_DATA_PROGRESS: Extraction timed out and will continue\n' + + ' - EXTRACTION_DATA_DELAY: Extraction hit rate limit and will retry\n' + + 'Check the extraction function logs and the event_data field for error details.' + ); + } + + console.log('[Test] ✓ Extraction completed successfully'); + + // Step 3: Verify extracted files folder exists + console.log('[Test] Step 3: Verifying extracted files folder...'); + + if (!fs.existsSync(extractedFilesFolderPath)) { + throw new Error( + 'Extracted files folder does not exist: ' + extractedFilesFolderPath + '\n' + + 'The EXTRACTED_FILES_FOLDER_PATH directory was not created by the extraction function.\n' + + 'This indicates that the extraction function did not write any data to disk.\n' + + 'Expected behavior: The extraction function should create this directory and write extracted data files to it.\n' + + 'Possible causes:\n' + + ' 1. The extraction function is not configured to write files locally (check isLocalDevelopment option)\n' + + ' 2. The extraction function failed before writing any data\n' + + ' 3. The EXTRACTED_FILES_FOLDER_PATH environment variable points to the wrong location\n' + + 'Current EXTRACTED_FILES_FOLDER_PATH: ' + extractedFilesFolderPath + ); + } + + console.log('[Test] ✓ Extracted files folder exists: ' + extractedFilesFolderPath); + + // Step 4: Locate extracted tasks file + console.log('[Test] Step 4: Locating extracted tasks file...'); + + let extractedFileName: string; + try { + const lsCommand = 'ls "' + extractedFilesFolderPath + '" | grep extractor_tasks | sort -r | head -n 1'; + console.log('[Test] Executing command: ' + lsCommand); + extractedFileName = execSync(lsCommand, { encoding: 'utf-8' }).trim(); + } catch (error) { + const dirContents = fs.readdirSync(extractedFilesFolderPath); + throw new Error( + 'Failed to locate extracted tasks file.\n' + + 'Command: ls "' + extractedFilesFolderPath + '" | grep extractor_tasks | sort -r | head -n 1\n' + + 'Error: ' + (error instanceof Error ? error.message : String(error)) + '\n' + + 'Directory contents (' + extractedFilesFolderPath + '):\n' + dirContents.join('\n') + '\n' + + 'Expected to find a file matching pattern "extractor_tasks*".\n' + + 'Possible causes:\n' + + ' 1. The extraction function did not extract tasks data\n' + + ' 2. The file naming convention has changed\n' + + ' 3. The extraction function wrote files to a different location\n' + + ' 4. The tasks repository was not initialized or data was not pushed\n' + + 'Check the extraction function implementation and verify that tasks data is being extracted and uploaded.' + ); + } + + if (!extractedFileName) { + const dirContents = fs.readdirSync(extractedFilesFolderPath); + throw new Error( + 'No extracted tasks file found.\n' + + 'Directory: ' + extractedFilesFolderPath + '\n' + + 'Directory contents:\n' + dirContents.join('\n') + '\n' + + 'Expected to find a file matching pattern "extractor_tasks*".\n' + + 'The extraction function completed successfully but did not create a tasks data file.\n' + + 'Possible causes:\n' + + ' 1. No tasks data was extracted (empty result set)\n' + + ' 2. The tasks repository was not properly initialized\n' + + ' 3. The data was not pushed to the repository\n' + + ' 4. The file naming convention has changed\n' + + 'Check the extraction function logs and verify that tasks are being fetched from the API.' + ); + } + + const extractedFilePath = path.join(extractedFilesFolderPath, extractedFileName); + console.log('[Test] ✓ Found extracted tasks file: ' + extractedFileName); + console.log('[Test] Full path: ' + extractedFilePath); + + if (!fs.existsSync(extractedFilePath)) { + throw new Error( + 'Extracted tasks file does not exist: ' + extractedFilePath + '\n' + + 'The file was found by the ls command but does not exist when accessed.\n' + + 'This is an unexpected error that may indicate a race condition or file system issue.' + ); + } + + // Step 5: Validate with Chef CLI + console.log('[Test] Step 5: Validating tasks normalization with Chef CLI...'); + + const chefCommand = 'cat "' + extractedFilePath + '" | "' + chefCliPath + '" validate-data -m "' + tempMetadataFile + '" -r tasks'; + console.log('[Test] Executing command: ' + chefCommand); + + let stdout: string; + let stderr: string; + let exitCode: number; + + try { + stdout = execSync(chefCommand, { + encoding: 'utf-8', + stdio: ['pipe', 'pipe', 'pipe'] + }); + stderr = ''; + exitCode = 0; + } catch (error: any) { + stdout = error.stdout ? error.stdout.toString() : ''; + stderr = error.stderr ? error.stderr.toString() : ''; + exitCode = error.status || 1; + } + + // Print Chef CLI output to console + console.log('[Test] ===== Chef CLI Output ====='); + if (stdout) { + console.log('[Test] STDOUT:'); + console.log(stdout); + } + if (stderr) { + console.log('[Test] STDERR:'); + console.log(stderr); + } + console.log('[Test] Exit code: ' + exitCode); + console.log('[Test] =============================='); + + // Verify Chef CLI validation succeeded + if (exitCode !== 0) { + throw new Error( + 'Chef CLI validation failed with non-zero exit code.\n' + + 'Exit code: ' + exitCode + '\n' + + 'Command: ' + chefCommand + '\n' + + 'Extracted file: ' + extractedFilePath + '\n' + + 'Metadata file: ' + tempMetadataFile + '\n' + + 'STDOUT:\n' + stdout + '\n' + + 'STDERR:\n' + stderr + '\n' + + 'The chef-cli tool detected validation errors in the extracted tasks data.\n' + + 'This indicates that the normalization function is not correctly transforming task data\n' + + 'according to the external domain metadata schema.\n' + + 'Possible causes:\n' + + ' 1. Required fields are missing from normalized data\n' + + ' 2. Field types do not match the schema (e.g., string instead of array)\n' + + ' 3. Field values are invalid (e.g., invalid enum values)\n' + + ' 4. Reference fields are not properly formatted\n' + + ' 5. Rich text fields are not properly converted\n' + + 'Review the chef-cli output above for specific validation errors.\n' + + 'Check the normalization function implementation in workers/normalization.ts.' + ); + } + + if (stdout.trim() !== '') { + throw new Error( + 'Chef CLI validation produced non-empty output.\n' + + 'Expected: Empty stdout (indicating successful validation)\n' + + 'Actual stdout:\n' + stdout + '\n' + + 'STDERR:\n' + stderr + '\n' + + 'Exit code: ' + exitCode + '\n' + + 'Command: ' + chefCommand + '\n' + + 'Extracted file: ' + extractedFilePath + '\n' + + 'Metadata file: ' + tempMetadataFile + '\n' + + 'The chef-cli tool should produce empty output when validation succeeds.\n' + + 'Non-empty output indicates validation warnings or errors.\n' + + 'Review the output above for details about the validation issues.\n' + + 'Check the normalization function implementation in workers/normalization.ts.' + ); + } + + console.log('[Test] ✓ Chef CLI validation succeeded (empty output)'); + console.log('[Test] ✓ All assertions passed: Tasks normalization is valid'); + }, 60000); +}); \ No newline at end of file diff --git a/conformance_tests/tasks_data_push_tests/extraction-tasks-pagination.test.ts b/conformance_tests/tasks_data_push_tests/extraction-tasks-pagination.test.ts new file mode 100644 index 0000000..faa9e92 --- /dev/null +++ b/conformance_tests/tasks_data_push_tests/extraction-tasks-pagination.test.ts @@ -0,0 +1,105 @@ +import { loadTestEnvironment } from './test-utils/env'; +import { CallbackServer } from './test-utils/callback-server'; +import { SnapInClient } from './test-utils/snap-in-client'; +import { EventBuilder } from './test-utils/event-builder'; +import { MockWrikeServer } from './test-utils/mock-wrike-server'; + +describe('Extraction Function - Tasks Pagination Tests', () => { + let callbackServer: CallbackServer; + let snapInClient: SnapInClient; + let env: ReturnType; + let mockWrikeServer: MockWrikeServer; + + beforeAll(async () => { + env = loadTestEnvironment(); + callbackServer = new CallbackServer(); + await callbackServer.start(8002); + mockWrikeServer = new MockWrikeServer(); + await mockWrikeServer.start(8004); + snapInClient = new SnapInClient(); + }); + + afterAll(async () => { + await callbackServer.stop(); + await mockWrikeServer.stop(); + }); + + beforeEach(() => { + callbackServer.clearEvents(); + mockWrikeServer.clearRequests(); + }); + + test('Test 1: Basic tasks extraction with EXTRACTION_DATA_START', async () => { + const event = EventBuilder.buildExtractionDataStartEvent( + env.wrikeApiKey, + env.wrikeSpaceId, + 'IEAGS6BYI5RFMPP7' + ); + + const result = await snapInClient.invokeFunction(event); + + expect(result).toBeDefined(); + expect(result.error).toBeUndefined(); + + // Verify that function completed successfully + if (result.function_result) { + console.log('Function result:', JSON.stringify(result.function_result, null, 2)); + } + }, 60000); + + test('Test 2: Verify pagination with fields parameter including hasAttachments', async () => { + // Clear any previous requests + mockWrikeServer.clearRequests(); + + const event = EventBuilder.buildExtractionDataStartEvent( + env.wrikeApiKey, + env.wrikeSpaceId, + 'IEAGS6BYI5RFMPP7' + ); + + const result = await snapInClient.invokeFunction(event); + + expect(result).toBeDefined(); + expect(result.error).toBeUndefined(); + + // Get tasks API calls from mock server + const tasksCalls = mockWrikeServer.getTasksRequests(); + + expect(tasksCalls.length).toBeGreaterThan(0); + + // Verify fields parameter in each request + for (const call of tasksCalls) { + const fields = call.query.fields; + expect(fields).toBeDefined(); + + // Parse Wrike-specific array format: [item1,item2] + // Remove brackets and split by comma + const fieldsStr = fields.replace(/^\[|\]$/g, ''); + const fieldsArray = fieldsStr.split(',').map((item: string) => item.trim()); + + expect(Array.isArray(fieldsArray)).toBe(true); + expect(fieldsArray).toContain('hasAttachments'); + expect(fieldsArray).toContain('responsibleIds'); + } + }, 60000); + + test('Test 3: Verify pagination state management with nextPageToken', async () => { + const event = EventBuilder.buildExtractionDataStartEvent( + env.wrikeApiKey, + env.wrikeSpaceId, + 'IEAGS6BYI5RFMPP7' + ); + + // Set a small page size to force pagination + event.input_data.global_values.pageSize = '2'; + + const result = await snapInClient.invokeFunction(event); + + expect(result).toBeDefined(); + expect(result.error).toBeUndefined(); + + // The function should handle pagination internally + // and complete successfully + console.log('Pagination test result:', JSON.stringify(result, null, 2)); + }, 60000); +}); \ No newline at end of file diff --git a/conformance_tests/tasks_data_push_tests/jest.config.js b/conformance_tests/tasks_data_push_tests/jest.config.js new file mode 100644 index 0000000..3506baa --- /dev/null +++ b/conformance_tests/tasks_data_push_tests/jest.config.js @@ -0,0 +1,30 @@ +module.exports = { + preset: 'ts-jest', + testEnvironment: 'node', + testTimeout: 120000, + testMatch: ['**/*.test.ts'], + moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'], + collectCoverageFrom: [ + '**/*.ts', + '!**/*.test.ts', + '!**/node_modules/**', + '!**/dist/**' + ], + transform: { + '^.+\\.ts$': ['ts-jest', { + tsconfig: { + target: 'es2017', + module: 'commonjs', + esModuleInterop: true, + allowSyntheticDefaultImports: true, + strict: true, + resolveJsonModule: true + } + }] + }, + globals: { + 'ts-jest': { + isolatedModules: true + } + } +}; \ No newline at end of file diff --git a/conformance_tests/space_contacts_fetch_validation/package.json b/conformance_tests/tasks_data_push_tests/package.json similarity index 73% rename from conformance_tests/space_contacts_fetch_validation/package.json rename to conformance_tests/tasks_data_push_tests/package.json index 917a9df..20576f3 100644 --- a/conformance_tests/space_contacts_fetch_validation/package.json +++ b/conformance_tests/tasks_data_push_tests/package.json @@ -1,22 +1,22 @@ { - "name": "wrike-snap-in-conformance-tests", + "name": "wrike-conformance-tests", "version": "1.0.0", "description": "Conformance tests for Wrike snap-in", - "main": "index.js", "scripts": { - "test": "jest --forceExit" - }, - "dependencies": { - "axios": "^1.9.0", - "express": "^4.21.0", - "body-parser": "^1.20.3" + "test": "jest --runInBand --detectOpenHandles" }, "devDependencies": { "@types/express": "^4.17.21", "@types/jest": "^29.4.0", "@types/node": "^18.13.0", + "axios": "^1.9.0", + "express": "^4.21.0", "jest": "^29.4.2", "ts-jest": "^29.0.5", "typescript": "^4.9.5" + }, + "dependencies": { + "axios": "^1.9.0", + "express": "^4.21.0" } } \ No newline at end of file diff --git a/conformance_tests/tasks_data_push_tests/test-events/external-domain-metadata-event.json b/conformance_tests/tasks_data_push_tests/test-events/external-domain-metadata-event.json new file mode 100644 index 0000000..b9497c4 --- /dev/null +++ b/conformance_tests/tasks_data_push_tests/test-events/external-domain-metadata-event.json @@ -0,0 +1,34 @@ +{ + "execution_metadata": { + "request_id": "test-request-id-metadata", + "function_name": "get_external_domain_metadata", + "event_type": "get_external_domain_metadata", + "devrev_endpoint": "http://localhost:8003" + }, + "context": { + "dev_oid": "DEV-test-org", + "source_id": "test-source-id", + "snap_in_id": "test-snap-in-id", + "snap_in_version_id": "test-snap-in-version-id", + "service_account_id": "test-service-account-id", + "secrets": { + "service_account_token": "test-token" + } + }, + "payload": { + "connection_data": { + "org_id": "WRIKE_SPACE_ID", + "org_name": "Test Space", + "key": "WRIKE_API_KEY", + "key_type": "bearer_token" + }, + "event_context": { + "callback_url": "http://localhost:8002/callback", + "external_sync_unit_id": "test-external-sync-unit-id" + } + }, + "input_data": { + "global_values": {}, + "event_sources": {} + } +} \ No newline at end of file diff --git a/conformance_tests/tasks_data_push_tests/test-events/extraction-data-continue-event.json b/conformance_tests/tasks_data_push_tests/test-events/extraction-data-continue-event.json new file mode 100644 index 0000000..60b2a7a --- /dev/null +++ b/conformance_tests/tasks_data_push_tests/test-events/extraction-data-continue-event.json @@ -0,0 +1,56 @@ +{ + "execution_metadata": { + "request_id": "test-request-id-002", + "function_name": "extraction", + "event_type": "extraction:data:continue", + "devrev_endpoint": "http://localhost:8003" + }, + "context": { + "dev_oid": "DEV-test-org", + "source_id": "test-source-id", + "snap_in_id": "test-snap-in-id", + "snap_in_version_id": "test-snap-in-version-id", + "service_account_id": "test-service-account-id", + "secrets": { + "service_account_token": "test-token" + } + }, + "payload": { + "connection_data": { + "org_id": "WRIKE_SPACE_ID", + "org_name": "Test Space", + "key": "WRIKE_API_KEY", + "key_type": "bearer_token" + }, + "event_context": { + "callback_url": "http://localhost:8002/callback", + "dev_org": "DEV-test-org", + "dev_org_id": "DEV-test-org", + "dev_user": "test-user", + "dev_user_id": "test-user-id", + "external_sync_unit": "test-folder", + "external_sync_unit_id": "IEAGS6BYI5RFMPP7", + "external_sync_unit_name": "Test Folder", + "external_system": "wrike", + "external_system_type": "wrike", + "import_slug": "test-import", + "mode": "INITIAL", + "request_id": "test-request-id-002", + "snap_in_slug": "wrike-snap-in", + "snap_in_version_id": "test-snap-in-version-id", + "sync_run": "test-sync-run", + "sync_run_id": "test-sync-run-id", + "sync_tier": "standard", + "sync_unit": "test-sync-unit", + "sync_unit_id": "test-sync-unit-id", + "uuid": "test-uuid", + "worker_data_url": "http://localhost:8003/external-worker" + }, + "event_type": "EXTRACTION_DATA_CONTINUE", + "event_data": {} + }, + "input_data": { + "global_values": {}, + "event_sources": {} + } +} \ No newline at end of file diff --git a/conformance_tests/tasks_data_push_tests/test-events/extraction-data-start-event.json b/conformance_tests/tasks_data_push_tests/test-events/extraction-data-start-event.json new file mode 100644 index 0000000..90b5ea7 --- /dev/null +++ b/conformance_tests/tasks_data_push_tests/test-events/extraction-data-start-event.json @@ -0,0 +1,56 @@ +{ + "execution_metadata": { + "request_id": "test-request-id-001", + "function_name": "extraction", + "event_type": "extraction:data:start", + "devrev_endpoint": "http://localhost:8003" + }, + "context": { + "dev_oid": "DEV-test-org", + "source_id": "test-source-id", + "snap_in_id": "test-snap-in-id", + "snap_in_version_id": "test-snap-in-version-id", + "service_account_id": "test-service-account-id", + "secrets": { + "service_account_token": "test-token" + } + }, + "payload": { + "connection_data": { + "org_id": "WRIKE_SPACE_ID", + "org_name": "Test Space", + "key": "WRIKE_API_KEY", + "key_type": "bearer_token" + }, + "event_context": { + "callback_url": "http://localhost:8002/callback", + "dev_org": "DEV-test-org", + "dev_org_id": "DEV-test-org", + "dev_user": "test-user", + "dev_user_id": "test-user-id", + "external_sync_unit": "test-folder", + "external_sync_unit_id": "IEAGS6BYI5RFMPP7", + "external_sync_unit_name": "Test Folder", + "external_system": "wrike", + "external_system_type": "wrike", + "import_slug": "test-import", + "mode": "INITIAL", + "request_id": "test-request-id-001", + "snap_in_slug": "wrike-snap-in", + "snap_in_version_id": "test-snap-in-version-id", + "sync_run": "test-sync-run", + "sync_run_id": "test-sync-run-id", + "sync_tier": "standard", + "sync_unit": "test-sync-unit", + "sync_unit_id": "test-sync-unit-id", + "uuid": "test-uuid", + "worker_data_url": "http://localhost:8003/external-worker" + }, + "event_type": "EXTRACTION_DATA_START", + "event_data": {} + }, + "input_data": { + "global_values": {}, + "event_sources": {} + } +} \ No newline at end of file diff --git a/conformance_tests/tasks_data_push_tests/test-utils/callback-server.ts b/conformance_tests/tasks_data_push_tests/test-utils/callback-server.ts new file mode 100644 index 0000000..bff6ba6 --- /dev/null +++ b/conformance_tests/tasks_data_push_tests/test-utils/callback-server.ts @@ -0,0 +1,50 @@ +import express, { Express, Request, Response } from 'express'; +import { Server } from 'http'; + +/** + * Callback server for capturing events from The Implementation Code + */ +export class CallbackServer { + private app: Express; + private server: Server | null = null; + private receivedEvents: any[] = []; + + constructor() { + this.app = express(); + this.app.use(express.json()); + + this.app.post('/callback', (req: Request, res: Response) => { + this.receivedEvents.push(req.body); + res.status(200).send({ status: 'received' }); + }); + } + + async start(port: number = 8002): Promise { + return new Promise((resolve) => { + this.server = this.app.listen(port, () => { + resolve(); + }); + }); + } + + async stop(): Promise { + return new Promise((resolve, reject) => { + if (this.server) { + this.server.close((err) => { + if (err) reject(err); + else resolve(); + }); + } else { + resolve(); + } + }); + } + + getReceivedEvents(): any[] { + return this.receivedEvents; + } + + clearEvents(): void { + this.receivedEvents = []; + } +} \ No newline at end of file diff --git a/conformance_tests/tasks_data_push_tests/test-utils/env.ts b/conformance_tests/tasks_data_push_tests/test-utils/env.ts new file mode 100644 index 0000000..416f1ba --- /dev/null +++ b/conformance_tests/tasks_data_push_tests/test-utils/env.ts @@ -0,0 +1,37 @@ +/** + * Environment variable utilities for conformance tests + */ + +export interface TestEnvironment { + wrikeApiKey: string; + wrikeSpaceId: string; +} + +/** + * Load and validate required environment variables + */ +export function loadTestEnvironment(): TestEnvironment { + const wrikeApiKey = process.env.WRIKE_API_KEY; + const wrikeSpaceId = process.env.WRIKE_SPACE_ID; + + if (!wrikeApiKey || wrikeApiKey === 'WRIKE_API_KEY') { + // Use mock API key for testing with mock server + return { + wrikeApiKey: 'mock-api-key', + wrikeSpaceId: wrikeSpaceId || 'mock-space-id', + }; + } + + if (!wrikeSpaceId || wrikeSpaceId === 'WRIKE_SPACE_ID') { + // Use mock space ID for testing with mock server + return { + wrikeApiKey, + wrikeSpaceId: 'mock-space-id', + }; + } + + return { + wrikeApiKey, + wrikeSpaceId, + }; +} \ No newline at end of file diff --git a/conformance_tests/tasks_data_push_tests/test-utils/event-builder.ts b/conformance_tests/tasks_data_push_tests/test-utils/event-builder.ts new file mode 100644 index 0000000..863ae2a --- /dev/null +++ b/conformance_tests/tasks_data_push_tests/test-utils/event-builder.ts @@ -0,0 +1,36 @@ +import * as fs from 'fs'; +import * as path from 'path'; + +/** + * Build test events from JSON templates + */ +export class EventBuilder { + static loadEventTemplate(templateName: string): any { + const templatePath = path.join(__dirname, '..', 'test-events', `${templateName}.json`); + const templateContent = fs.readFileSync(templatePath, 'utf-8'); + return JSON.parse(templateContent); + } + + static buildExtractionDataStartEvent(apiKey: string, spaceId: string, folderId: string): any { + const event = this.loadEventTemplate('extraction-data-start-event'); + event.payload.connection_data.key = apiKey; + event.payload.connection_data.org_id = spaceId; + event.payload.event_context.external_sync_unit_id = folderId; + return event; + } + + static buildExtractionDataContinueEvent(apiKey: string, spaceId: string, folderId: string): any { + const event = this.loadEventTemplate('extraction-data-continue-event'); + event.payload.connection_data.key = apiKey; + event.payload.connection_data.org_id = spaceId; + event.payload.event_context.external_sync_unit_id = folderId; + return event; + } + + static buildExternalDomainMetadataEvent(apiKey: string, spaceId: string): any { + const event = this.loadEventTemplate('external-domain-metadata-event'); + event.payload.connection_data.key = apiKey; + event.payload.connection_data.org_id = spaceId; + return event; + } +} \ No newline at end of file diff --git a/conformance_tests/tasks_data_push_tests/test-utils/mock-wrike-server.ts b/conformance_tests/tasks_data_push_tests/test-utils/mock-wrike-server.ts new file mode 100644 index 0000000..d86044a --- /dev/null +++ b/conformance_tests/tasks_data_push_tests/test-utils/mock-wrike-server.ts @@ -0,0 +1,295 @@ +import express, { Express, Request, Response } from 'express'; +import { Server } from 'http'; + +interface CapturedRequest { + method: string; + url: string; + path: string; + query: any; + headers: any; + body: any; + timestamp: number; +} + +/** + * Mock Wrike API Server for testing + * Captures all requests and provides endpoints to query them + */ +export class MockWrikeServer { + private app: Express; + private server: Server | null = null; + private capturedRequests: CapturedRequest[] = []; + private rateLimitingEnabled: boolean = false; + private rateLimitDelay: number = 60; // Default delay in seconds + + constructor() { + this.app = express(); + this.app.use(express.json()); + + // Capture all requests + this.app.use((req: Request, res: Response, next) => { + this.capturedRequests.push({ + method: req.method, + url: req.url, + path: req.path, + query: req.query, + headers: req.headers, + body: req.body, + timestamp: Date.now(), + }); + next(); + }); + + // Mock endpoints + this.setupMockEndpoints(); + } + + private setupMockEndpoints(): void { + // Middleware to check rate limiting before processing requests + this.app.use((req: Request, res: Response, next) => { + if (this.rateLimitingEnabled && !req.path.startsWith('/_test/')) { + return res.status(429).set('retry-after', this.rateLimitDelay.toString()).json({ errorDescription: 'Rate limit exceeded, try again later', error: 'rate_limit_exceeded' }); + } + next(); + }); + + // GET /contacts - Return mock users + this.app.get('/api/v4/contacts', (req: Request, res: Response) => { + const mockUsers = [ + { + id: 'KUANFJBJ', + firstName: 'Jane', + lastName: 'Smith', + type: 'Person', + profiles: [{ + accountId: 'IEAGS6BY', + email: 'janesmith@company.com', + role: 'User', + external: false, + admin: false, + owner: false, + active: true, + }], + avatarUrl: 'https://www.wrike.com/avatars/test.png', + timezone: 'Europe/London', + locale: 'en', + deleted: false, + title: 'Accountant', + primaryEmail: 'janesmith@company.com', + }, + { + id: 'NVJKSNJK', + firstName: 'Jack', + lastName: 'Black', + type: 'Person', + profiles: [{ + accountId: 'IEAGS6BY', + email: 'jackblack@company.com', + role: 'User', + external: false, + admin: false, + owner: false, + active: true, + }], + avatarUrl: 'https://www.wrike.com/avatars/test2.png', + timezone: 'Europe/London', + locale: 'en', + deleted: false, + title: 'Accounting Manager', + primaryEmail: 'jackblack@company.com', + }, + ]; + + res.json({ kind: 'contacts', data: mockUsers }); + }); + + // GET /spaces/:spaceId/folders - Return mock folders + this.app.get('/api/v4/spaces/:spaceId/folders', (req: Request, res: Response) => { + const mockFolders = [ + { + id: 'IEAGS6BYI5RFMPP7', + accountId: 'IEAGS6BY', + title: 'First project', + createdDate: '2025-04-29T07:18:32Z', + updatedDate: '2025-05-26T07:44:20Z', + description: 'Test project description', + sharedIds: ['KUAVRIOP', 'KX7XOYQF'], + parentIds: ['IEAGS6BYI5RFMPPY'], + childIds: [], + scope: 'WsFolder', + permalink: 'https://www.wrike.com/open.htm?id=1649819135', + workflowId: 'IEAGS6BYK4F3BCSQ', + project: { + authorId: 'KUAUZTPW', + ownerIds: ['KUAUZTPW'], + customStatusId: 'IEAGS6BYJMF3BCR4', + createdDate: '2025-04-29T07:18:32Z', + }, + }, + ]; + + res.json({ kind: 'folders', data: mockFolders }); + }); + + // GET /folders/:folderId/tasks - Return mock tasks with pagination + this.app.get('/api/v4/folders/:folderId/tasks', (req: Request, res: Response) => { + const pageSize = parseInt(req.query.pageSize as string) || 100; + const nextPageToken = req.query.nextPageToken as string; + + // Generate mock tasks (110 tasks for complete extraction test) + const allTasks = Array.from({ length: 110 }, (_, i) => ({ + id: `TASK${i + 1}`, + accountId: 'IEAGS6BY', + title: `Test Task ${i + 1}`, + description: `Description for task ${i + 1}`, + briefDescription: `Brief description ${i + 1}`, + parentIds: [req.params.folderId], + superParentIds: [], + sharedIds: [], + responsibleIds: ['KUANFJBJ'], + status: 'Active', + importance: 'Normal', + createdDate: '2025-01-01T00:00:00Z', + updatedDate: '2025-01-02T00:00:00Z', + dates: { + type: 'Planned', + duration: 1, + start: '2025-01-01', + due: '2025-01-02', + }, + scope: 'WsTask', + authorIds: ['KUANFJBJ'], + customStatusId: 'IEAGS6BYJMF3BCR4', + hasAttachments: i % 2 === 0, // Some tasks have attachments + permalink: `https://www.wrike.com/open.htm?id=task${i + 1}`, + priority: 'Normal', + followedByMe: false, + followerIds: [], + superTaskIds: [], + subTaskIds: [], + dependencyIds: [], + metadata: [], + customFields: [], + })); + + // Simulate pagination + let startIndex = 0; + if (nextPageToken) { + startIndex = parseInt(nextPageToken); + } + + const endIndex = Math.min(startIndex + pageSize, allTasks.length); + const tasks = allTasks.slice(startIndex, endIndex); + + const response: any = { + kind: 'tasks', + data: tasks, + }; + + // Add nextPageToken if there are more tasks + if (endIndex < allTasks.length) { + response.nextPageToken = endIndex.toString(); + } + + res.json(response); + }); + + // GET /tasks/:taskId/attachments - Return mock attachments + this.app.get('/api/v4/tasks/:taskId/attachments', (req: Request, res: Response) => { + const mockAttachments = [ + { + id: 'ATTACH1', + authorId: 'KUANFJBJ', + name: 'test-document.pdf', + createdDate: '2025-01-01T00:00:00Z', + version: '1', + size: 1024, + type: 'application/pdf', + url: 'https://www.wrike.com/attachments/test.pdf', + taskId: req.params.taskId, + }, + ]; + + res.json({ kind: 'attachments', data: mockAttachments }); + }); + + // GET /tasks/:taskId/comments - Return mock comments + this.app.get('/api/v4/tasks/:taskId/comments', (req: Request, res: Response) => { + const mockComments = [ + { + id: 'COMMENT1', + authorId: 'KUANFJBJ', + text: 'Test comment', + createdDate: '2025-01-01T00:00:00Z', + updatedDate: '2025-01-01T00:00:00Z', + taskId: req.params.taskId, + }, + ]; + + res.json({ kind: 'comments', data: mockComments }); + }); + + // Test endpoint to query captured requests + this.app.get('/_test/requests', (req: Request, res: Response) => { + res.json({ requests: this.capturedRequests }); + }); + + // Test endpoint to clear captured requests + this.app.post('/_test/clear', (req: Request, res: Response) => { + this.capturedRequests = []; + res.json({ status: 'cleared' }); + }); + + // Test endpoint to enable rate limiting + this.app.post('/_test/start_rate_limiting', (req: Request, res: Response) => { + const testName = req.body.test_name; + console.log(`[MockWrikeServer] Starting rate limiting for test: ${testName}`); + this.rateLimitingEnabled = true; + this.rateLimitDelay = req.body.delay || 60; + res.json({ status: 'rate_limiting_enabled', delay: this.rateLimitDelay }); + }); + + // Test endpoint to disable rate limiting + this.app.post('/_test/end_rate_limiting', (req: Request, res: Response) => { + console.log('[MockWrikeServer] Ending rate limiting'); + this.rateLimitingEnabled = false; + this.rateLimitDelay = 60; + res.json({ status: 'rate_limiting_disabled' }); + }); + } + + async start(port: number = 8004): Promise { + return new Promise((resolve) => { + this.server = this.app.listen(port, () => { + resolve(); + }); + }); + } + + async stop(): Promise { + return new Promise((resolve, reject) => { + if (this.server) { + this.server.close((err) => { + if (err) reject(err); + else resolve(); + }); + } else { + resolve(); + } + }); + } + + getCapturedRequests(): CapturedRequest[] { + return this.capturedRequests; + } + + clearRequests(): void { + this.capturedRequests = []; + } + + getTasksRequests(): CapturedRequest[] { + return this.capturedRequests.filter(req => + req.path.includes('/folders/') && req.path.includes('/tasks') + ); + } +} \ No newline at end of file diff --git a/conformance_tests/tasks_data_push_tests/test-utils/snap-in-client.ts b/conformance_tests/tasks_data_push_tests/test-utils/snap-in-client.ts new file mode 100644 index 0000000..72b7dfd --- /dev/null +++ b/conformance_tests/tasks_data_push_tests/test-utils/snap-in-client.ts @@ -0,0 +1,22 @@ +import axios from 'axios'; + +/** + * Client for calling The Test Snap-In Server + */ +export class SnapInClient { + private baseUrl: string; + + constructor(baseUrl: string = 'http://localhost:8000') { + this.baseUrl = baseUrl; + } + + async invokeFunction(event: any): Promise { + const response = await axios.post(`${this.baseUrl}/handle/sync`, event, { + headers: { + 'Content-Type': 'application/json', + }, + timeout: 60000, // 60 seconds + }); + return response.data; + } +} \ No newline at end of file diff --git a/conformance_tests/tasks_data_push_tests/tsconfig.json b/conformance_tests/tasks_data_push_tests/tsconfig.json new file mode 100644 index 0000000..1054ad3 --- /dev/null +++ b/conformance_tests/tasks_data_push_tests/tsconfig.json @@ -0,0 +1,25 @@ +{ + "compilerOptions": { + "target": "es2017", + "module": "commonjs", + "lib": ["es2017"], + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "strict": true, + "declaration": true, + "resolveJsonModule": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "moduleResolution": "node", + "outDir": "./dist", + "rootDir": ".", + "types": ["jest", "node"] + }, + "include": [ + "**/*.ts" + ], + "exclude": [ + "node_modules", + "dist" + ] +} \ No newline at end of file diff --git a/conformance_tests/tasks_domain_mapping_generation_tests/.npmrc b/conformance_tests/tasks_domain_mapping_generation_tests/.npmrc new file mode 100644 index 0000000..9cf9495 --- /dev/null +++ b/conformance_tests/tasks_domain_mapping_generation_tests/.npmrc @@ -0,0 +1 @@ +package-lock=false \ No newline at end of file diff --git a/conformance_tests/tasks_domain_mapping_generation_tests/environment.test.ts b/conformance_tests/tasks_domain_mapping_generation_tests/environment.test.ts new file mode 100644 index 0000000..fea2c11 --- /dev/null +++ b/conformance_tests/tasks_domain_mapping_generation_tests/environment.test.ts @@ -0,0 +1,33 @@ +import * as fs from 'fs'; +import { getTestEnvironment } from './test-utils/environment'; + +describe('Environment Setup', () => { + it('should have all required environment variables', () => { + let env; + + try { + env = getTestEnvironment(); + } catch (error: any) { + fail(`Environment validation failed: ${error.message}`); + return; + } + + expect(env.wrikeApiKey).toBeDefined(); + expect(env.wrikeApiKey).not.toBe(''); + expect(env.wrikeSpaceId).toBeDefined(); + expect(env.wrikeSpaceId).not.toBe(''); + expect(env.chefCliPath).toBeDefined(); + expect(env.chefCliPath).not.toBe(''); + }); + + it('should have Chef CLI executable available', () => { + const env = getTestEnvironment(); + + const exists = fs.existsSync(env.chefCliPath); + expect(exists).toBe(true); + + if (!exists) { + fail(`Chef CLI not found at path: ${env.chefCliPath}`); + } + }); +}); \ No newline at end of file diff --git a/conformance_tests/tasks_domain_mapping_generation_tests/initial-domain-mapping-retrieval.test.ts b/conformance_tests/tasks_domain_mapping_generation_tests/initial-domain-mapping-retrieval.test.ts new file mode 100644 index 0000000..d0dccb8 --- /dev/null +++ b/conformance_tests/tasks_domain_mapping_generation_tests/initial-domain-mapping-retrieval.test.ts @@ -0,0 +1,67 @@ +import { SnapInClient } from './test-utils/snap-in-client'; +import { getTestEnvironment } from './test-utils/environment'; + +describe('Initial Domain Mapping Retrieval', () => { + let client: SnapInClient; + let env: ReturnType; + + beforeAll(() => { + env = getTestEnvironment(); + client = new SnapInClient(); + }); + + it('should retrieve initial domain mapping from get_initial_domain_mapping function', async () => { + const response = await client.invokeFunction( + 'get_initial_domain_mapping', + {}, + { + key: env.wrikeApiKey, + org_id: env.wrikeSpaceId, + } + ); + + expect(response).toBeDefined(); + expect(response.function_result).toBeDefined(); + + const result = response.function_result; + expect(result.status).toBe('success'); + expect(result.data).toBeDefined(); + }); + + it('should have additional_mappings.record_type_mappings structure', async () => { + const response = await client.invokeFunction( + 'get_initial_domain_mapping', + {}, + { + key: env.wrikeApiKey, + org_id: env.wrikeSpaceId, + } + ); + + const mapping = response.function_result.data; + + expect(mapping.additional_mappings).toBeDefined(); + expect(mapping.additional_mappings.record_type_mappings).toBeDefined(); + + const recordTypeMappings = mapping.additional_mappings.record_type_mappings; + expect(typeof recordTypeMappings).toBe('object'); + }); + + it('should have tasks record type mapping', async () => { + const response = await client.invokeFunction( + 'get_initial_domain_mapping', + {}, + { + key: env.wrikeApiKey, + org_id: env.wrikeSpaceId, + } + ); + + const recordTypeMappings = response.function_result.data.additional_mappings.record_type_mappings; + + expect(recordTypeMappings.tasks).toBeDefined(); + expect(recordTypeMappings.tasks.default_mapping).toBeDefined(); + expect(recordTypeMappings.tasks.possible_record_type_mappings).toBeDefined(); + expect(Array.isArray(recordTypeMappings.tasks.possible_record_type_mappings)).toBe(true); + }); +}); \ No newline at end of file diff --git a/conformance_tests/tasks_domain_mapping_generation_tests/initial-domain-mapping-validation.test.ts b/conformance_tests/tasks_domain_mapping_generation_tests/initial-domain-mapping-validation.test.ts new file mode 100644 index 0000000..6133844 --- /dev/null +++ b/conformance_tests/tasks_domain_mapping_generation_tests/initial-domain-mapping-validation.test.ts @@ -0,0 +1,104 @@ +import { SnapInClient } from './test-utils/snap-in-client'; +import { getTestEnvironment } from './test-utils/environment'; +import { validateWithChefCli, createTempJsonFile, cleanupTempFile } from './test-utils/chef-cli'; + +describe('Initial Domain Mapping Validation with Chef CLI', () => { + let client: SnapInClient; + let env: ReturnType; + let metadataFilePath: string; + let mappingJson: any; + + beforeAll(async () => { + env = getTestEnvironment(); + client = new SnapInClient(); + + // Retrieve external domain metadata + const metadataResponse = await client.invokeFunction( + 'get_external_domain_metadata', + {}, + { + key: env.wrikeApiKey, + org_id: env.wrikeSpaceId, + } + ); + + const metadata = metadataResponse.function_result.data; + metadataFilePath = createTempJsonFile(metadata, 'external-domain-metadata'); + + // Retrieve initial domain mapping + const mappingResponse = await client.invokeFunction( + 'get_initial_domain_mapping', + {}, + { + key: env.wrikeApiKey, + org_id: env.wrikeSpaceId, + } + ); + + mappingJson = mappingResponse.function_result.data; + }); + + afterAll(() => { + if (metadataFilePath) { + cleanupTempFile(metadataFilePath); + } + }); + + it('should validate initial domain mapping with Chef CLI', async () => { + const result = await validateWithChefCli( + env.chefCliPath, + metadataFilePath, + mappingJson + ); + + // Check for Chef CLI execution errors + if (result.error) { + fail(`Chef CLI execution failed: ${result.error}`); + return; + } + + // Check for empty output + if (result.output.length === 0) { + fail('Chef CLI produced empty output. This indicates a critical validation failure.'); + return; + } + + // Check validation results + const firstObject = result.output[0]; + + if (!('Warnings' in firstObject)) { + fail( + `Chef CLI output missing 'Warnings' field. ` + + `Output: ${JSON.stringify(firstObject, null, 2)}` + ); + return; + } + + if (!('RemainingDeficiencies' in firstObject)) { + fail( + `Chef CLI output missing 'RemainingDeficiencies' field. ` + + `Output: ${JSON.stringify(firstObject, null, 2)}` + ); + return; + } + + if (firstObject.Warnings !== null) { + fail( + `Chef CLI validation found warnings. ` + + `Warnings: ${JSON.stringify(firstObject.Warnings, null, 2)}` + ); + return; + } + + if (firstObject.RemainingDeficiencies !== null) { + fail( + `Chef CLI validation found remaining deficiencies. ` + + `Deficiencies: ${JSON.stringify(firstObject.RemainingDeficiencies, null, 2)}` + ); + return; + } + + // All checks passed + expect(result.success).toBe(true); + }); +}); \ No newline at end of file diff --git a/conformance_tests/tasks_domain_mapping_generation_tests/jest.config.js b/conformance_tests/tasks_domain_mapping_generation_tests/jest.config.js new file mode 100644 index 0000000..b7fe00f --- /dev/null +++ b/conformance_tests/tasks_domain_mapping_generation_tests/jest.config.js @@ -0,0 +1,27 @@ +module.exports = { + preset: 'ts-jest', + testEnvironment: 'node', + testMatch: ['**/*.test.ts'], + testTimeout: 120000, + verbose: true, + collectCoverage: false, + moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'], + transform: { + '^.+\\.ts$': ['ts-jest', { + tsconfig: { + target: 'es2017', + module: 'commonjs', + esModuleInterop: true, + allowSyntheticDefaultImports: true, + strict: true, + declaration: true, + resolveJsonModule: true, + } + }] + }, + globals: { + 'ts-jest': { + isolatedModules: true + } + } +}; \ No newline at end of file diff --git a/conformance_tests/tasks_domain_mapping_generation_tests/package.json b/conformance_tests/tasks_domain_mapping_generation_tests/package.json new file mode 100644 index 0000000..d39ec15 --- /dev/null +++ b/conformance_tests/tasks_domain_mapping_generation_tests/package.json @@ -0,0 +1,16 @@ +{ + "name": "wrike-airdrop-conformance-tests", + "version": "1.0.0", + "description": "Conformance tests for Wrike Airdrop Snap-In", + "scripts": { + "test": "jest --verbose --runInBand" + }, + "devDependencies": { + "@types/jest": "^29.4.0", + "@types/node": "^18.13.0", + "jest": "^29.4.2", + "ts-jest": "^29.0.5", + "typescript": "^4.9.5", + "axios": "^1.9.0" + } +} \ No newline at end of file diff --git a/conformance_tests/tasks_domain_mapping_generation_tests/test-utils/chef-cli.ts b/conformance_tests/tasks_domain_mapping_generation_tests/test-utils/chef-cli.ts new file mode 100644 index 0000000..7a1fcf2 --- /dev/null +++ b/conformance_tests/tasks_domain_mapping_generation_tests/test-utils/chef-cli.ts @@ -0,0 +1,176 @@ +import { spawn } from 'child_process'; +import * as fs from 'fs'; +import * as path from 'path'; +import * as os from 'os'; + +export interface ChefCliOutput { + IfUserDecides?: any; + Outcome?: string; + Warnings?: any[] | null | string; + RemainingDeficiencies?: any[] | null; +} + +export interface ChefCliResult { + success: boolean; + output: ChefCliOutput[]; + stdout: string; + stderr: string; + error?: string; +} + +/** + * Execute Chef CLI to validate initial domain mapping + * @param metadataFilePath - Path to external domain metadata JSON file + * @param mappingJson - Initial domain mapping JSON object + * @returns Chef CLI validation result + */ +export async function validateWithChefCli( + chefCliPath: string, + metadataFilePath: string, + mappingJson: any +): Promise { + return new Promise((resolve) => { + const args = ['initial-mapping', 'check', '-m', metadataFilePath]; + + console.log(`\n[Chef CLI] Executing: ${chefCliPath} ${args.join(' ')}`); + console.log(`[Chef CLI] Metadata file: ${metadataFilePath}`); + console.log(`[Chef CLI] Mapping JSON length: ${JSON.stringify(mappingJson).length} characters`); + + const process = spawn(chefCliPath, args); + + let stdout = ''; + let stderr = ''; + + process.stdout.on('data', (data) => { + const chunk = data.toString(); + stdout += chunk; + console.log(`[Chef CLI stdout] ${chunk}`); + }); + + process.stderr.on('data', (data) => { + const chunk = data.toString(); + stderr += chunk; + console.error(`[Chef CLI stderr] ${chunk}`); + }); + + // Write mapping JSON to stdin + try { + process.stdin.write(JSON.stringify(mappingJson)); + process.stdin.end(); + } catch (error: any) { + console.error(`[Chef CLI] Failed to write to stdin: ${error.message}`); + resolve({ + success: false, + output: [], + stdout: '', + stderr: '', + error: `Failed to write to stdin: ${error.message}`, + }); + return; + } + + process.on('close', (code) => { + console.log(`[Chef CLI] Process exited with code: ${code}`); + console.log(`[Chef CLI] Total stdout length: ${stdout.length}`); + console.log(`[Chef CLI] Total stderr length: ${stderr.length}`); + + if (stdout.trim() === '') { + resolve({ + success: false, + output: [], + stdout, + stderr, + error: 'Chef CLI produced empty output', + }); + return; + } + + try { + const output = JSON.parse(stdout) as ChefCliOutput[]; + + if (!Array.isArray(output) || output.length === 0) { + resolve({ + success: false, + output: [], + stdout, + stderr, + error: 'Chef CLI output is not a non-empty array', + }); + return; + } + + const firstObject = output[0]; + const hasWarnings = 'Warnings' in firstObject; + const hasDeficiencies = 'RemainingDeficiencies' in firstObject; + const warningsNull = firstObject.Warnings === null; + const deficienciesNull = firstObject.RemainingDeficiencies === null; + + console.log(`[Chef CLI] Validation results:`); + console.log(` - Has Warnings field: ${hasWarnings}`); + console.log(` - Has RemainingDeficiencies field: ${hasDeficiencies}`); + console.log(` - Warnings is null: ${warningsNull}`); + console.log(` - RemainingDeficiencies is null: ${deficienciesNull}`); + + const success = hasWarnings && hasDeficiencies && warningsNull && deficienciesNull; + + resolve({ + success, + output, + stdout, + stderr, + }); + } catch (error: any) { + resolve({ + success: false, + output: [], + stdout, + stderr, + error: `Failed to parse Chef CLI output as JSON: ${error.message}`, + }); + } + }); + + process.on('error', (error) => { + console.error(`[Chef CLI] Process error: ${error.message}`); + resolve({ + success: false, + output: [], + stdout, + stderr, + error: `Chef CLI process error: ${error.message}`, + }); + }); + }); +} + +/** + * Create a temporary file with JSON content + * @param content - JSON object to write + * @param prefix - Filename prefix + * @returns Path to the temporary file + */ +export function createTempJsonFile(content: any, prefix: string): string { + const tempDir = os.tmpdir(); + const filename = `${prefix}-${Date.now()}.json`; + const filepath = path.join(tempDir, filename); + + fs.writeFileSync(filepath, JSON.stringify(content, null, 2)); + console.log(`[Temp File] Created: ${filepath}`); + + return filepath; +} + +/** + * Clean up temporary file + * @param filepath - Path to the file to delete + */ +export function cleanupTempFile(filepath: string): void { + try { + if (fs.existsSync(filepath)) { + fs.unlinkSync(filepath); + console.log(`[Temp File] Deleted: ${filepath}`); + } + } catch (error: any) { + console.warn(`[Temp File] Failed to delete ${filepath}: ${error.message}`); + } +} \ No newline at end of file diff --git a/conformance_tests/tasks_domain_mapping_generation_tests/test-utils/environment.ts b/conformance_tests/tasks_domain_mapping_generation_tests/test-utils/environment.ts new file mode 100644 index 0000000..511093f --- /dev/null +++ b/conformance_tests/tasks_domain_mapping_generation_tests/test-utils/environment.ts @@ -0,0 +1,38 @@ +/** + * Utility functions for reading and validating environment variables + */ + +export interface TestEnvironment { + wrikeApiKey: string; + wrikeSpaceId: string; + chefCliPath: string; +} + +/** + * Read and validate all required environment variables + * @throws Error if any required environment variable is missing + */ +export function getTestEnvironment(): TestEnvironment { + const wrikeApiKey = process.env.WRIKE_API_KEY; + const wrikeSpaceId = process.env.WRIKE_SPACE_ID; + const chefCliPath = process.env.CHEF_CLI_PATH; + + const missing: string[] = []; + + if (!wrikeApiKey) missing.push('WRIKE_API_KEY'); + if (!wrikeSpaceId) missing.push('WRIKE_SPACE_ID'); + if (!chefCliPath) missing.push('CHEF_CLI_PATH'); + + if (missing.length > 0) { + throw new Error( + `Missing required environment variables: ${missing.join(', ')}. ` + + `Please ensure these are set before running tests.` + ); + } + + return { + wrikeApiKey: wrikeApiKey!, + wrikeSpaceId: wrikeSpaceId!, + chefCliPath: chefCliPath!, + }; +} \ No newline at end of file diff --git a/conformance_tests/tasks_domain_mapping_generation_tests/test-utils/snap-in-client.ts b/conformance_tests/tasks_domain_mapping_generation_tests/test-utils/snap-in-client.ts new file mode 100644 index 0000000..3fe99fa --- /dev/null +++ b/conformance_tests/tasks_domain_mapping_generation_tests/test-utils/snap-in-client.ts @@ -0,0 +1,74 @@ +import axios, { AxiosInstance } from 'axios'; + +/** + * HTTP client for communicating with The Test Snap-In Server + */ +export class SnapInClient { + private client: AxiosInstance; + private readonly serverUrl = 'http://localhost:8000/handle/sync'; + + constructor() { + this.client = axios.create({ + baseURL: this.serverUrl, + timeout: 30000, + headers: { + 'Content-Type': 'application/json', + }, + }); + } + + /** + * Invoke a function on The Test Snap-In Server + * @param functionName - Name of the function to invoke + * @param payload - Event payload + * @param connectionData - Connection data (API key, org_id) + * @param eventContext - Event context + * @returns Response from the snap-in + */ + async invokeFunction( + functionName: string, + payload: any = {}, + connectionData: any = {}, + eventContext: any = {} + ): Promise { + const event = { + payload: { + ...payload, + connection_data: connectionData, + event_context: eventContext, + }, + context: { + dev_oid: 'test-dev-org', + source_id: 'test-source', + snap_in_id: 'test-snap-in', + snap_in_version_id: 'test-version', + service_account_id: 'test-service-account', + secrets: { + service_account_token: 'test-token', + }, + }, + execution_metadata: { + request_id: `test-request-${Date.now()}`, + function_name: functionName, + event_type: 'test', + devrev_endpoint: 'http://localhost:8003', + }, + input_data: { + global_values: {}, + event_sources: {}, + }, + }; + + try { + const response = await this.client.post('', event); + return response.data; + } catch (error: any) { + if (error.response) { + throw new Error( + `Snap-in server error (${error.response.status}): ${JSON.stringify(error.response.data)}` + ); + } + throw new Error(`Failed to communicate with snap-in server: ${error.message}`); + } + } +} \ No newline at end of file diff --git a/conformance_tests/tasks_domain_mapping_generation_tests/tsconfig.json b/conformance_tests/tasks_domain_mapping_generation_tests/tsconfig.json new file mode 100644 index 0000000..1054ad3 --- /dev/null +++ b/conformance_tests/tasks_domain_mapping_generation_tests/tsconfig.json @@ -0,0 +1,25 @@ +{ + "compilerOptions": { + "target": "es2017", + "module": "commonjs", + "lib": ["es2017"], + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "strict": true, + "declaration": true, + "resolveJsonModule": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "moduleResolution": "node", + "outDir": "./dist", + "rootDir": ".", + "types": ["jest", "node"] + }, + "include": [ + "**/*.ts" + ], + "exclude": [ + "node_modules", + "dist" + ] +} \ No newline at end of file diff --git a/conformance_tests/tasks_record_type_metadata_tests/get_external_domain_metadata.test.ts b/conformance_tests/tasks_record_type_metadata_tests/get_external_domain_metadata.test.ts new file mode 100644 index 0000000..5b56fcb --- /dev/null +++ b/conformance_tests/tasks_record_type_metadata_tests/get_external_domain_metadata.test.ts @@ -0,0 +1,55 @@ +import { getTestEnv, invokeFunction, validateWithChefCli } from './test-utils/helpers'; + +describe('get_external_domain_metadata - tasks record type', () => { + let env: ReturnType; + + beforeAll(() => { env = getTestEnv(); }); + + test('should successfully invoke function', async () => { + const res = await invokeFunction('get_external_domain_metadata', env.apiKey, env.spaceId); + expect(res.error).toBeUndefined(); + expect(res.function_result?.status).toBe('success'); + expect(res.function_result?.data).toBeDefined(); + }, 30000); + + test('should include tasks and preserve users record types', async () => { + const res = await invokeFunction('get_external_domain_metadata', env.apiKey, env.spaceId); + const metadata = res.function_result?.data; + expect(metadata?.record_types?.tasks).toBeDefined(); + expect(metadata?.record_types?.users).toBeDefined(); + }, 30000); + + test('should have correct structure for tasks', async () => { + const res = await invokeFunction('get_external_domain_metadata', env.apiKey, env.spaceId); + const tasks = res.function_result?.data?.record_types?.tasks; + expect(tasks?.name).toBe('Tasks'); + expect(tasks?.fields).toBeDefined(); + }, 30000); + + test('should have all required fields with correct properties', async () => { + const res = await invokeFunction('get_external_domain_metadata', env.apiKey, env.spaceId); + const f = res.function_result?.data?.record_types?.tasks?.fields; + + expect(f?.title).toMatchObject({ type: 'text', name: 'Title', is_required: true }); + expect(f?.description).toMatchObject({ type: 'rich_text', name: 'Description', is_required: true }); + expect(f?.status).toMatchObject({ type: 'enum', name: 'Status', is_required: true }); + expect(f?.status?.enum?.values).toBeDefined(); + expect(f?.permalink).toMatchObject({ type: 'text', name: 'URL', is_required: true }); + expect(f?.responsible_ids).toMatchObject({ type: 'reference', name: 'Responsible IDs', is_required: true }); + expect(f?.responsible_ids?.collection?.max_length).toBe(1); + expect(f?.responsible_ids?.reference?.refers_to?.['#record:users']).toBeDefined(); + }, 30000); + + test('should pass Chef CLI validation', async () => { + const res = await invokeFunction('get_external_domain_metadata', env.apiKey, env.spaceId); + const metadata = res.function_result?.data; + expect(metadata).toBeDefined(); + + const result = await validateWithChefCli(env.chefPath, metadata); + if (result.stdout) console.log('Chef CLI stdout:', result.stdout); + if (result.stderr) console.log('Chef CLI stderr:', result.stderr); + + expect(result.stdout.trim()).toBe(''); + expect(result.exitCode).toBe(0); + }, 30000); +}); \ No newline at end of file diff --git a/conformance_tests/tasks_record_type_metadata_tests/jest.config.js b/conformance_tests/tasks_record_type_metadata_tests/jest.config.js new file mode 100644 index 0000000..65de599 --- /dev/null +++ b/conformance_tests/tasks_record_type_metadata_tests/jest.config.js @@ -0,0 +1,27 @@ +module.exports = { + preset: 'ts-jest', + testEnvironment: 'node', + testTimeout: 120000, + testMatch: ['**/*.test.ts'], + setupFilesAfterEnv: ['/jest.setup.js'], + moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'], + collectCoverageFrom: [ + '**/*.ts', + '!**/*.test.ts', + '!**/node_modules/**', + '!**/dist/**' + ], + transform: { + '^.+\\.ts$': ['ts-jest', { + tsconfig: { + target: 'es2017', + module: 'commonjs', + esModuleInterop: true, + allowSyntheticDefaultImports: true, + strict: true, + declaration: true, + resolveJsonModule: true + } + }] + } +}; \ No newline at end of file diff --git a/conformance_tests/tasks_record_type_metadata_tests/package.json b/conformance_tests/tasks_record_type_metadata_tests/package.json new file mode 100644 index 0000000..cc69e9b --- /dev/null +++ b/conformance_tests/tasks_record_type_metadata_tests/package.json @@ -0,0 +1,19 @@ +{ + "name": "conformance-tests-tasks-record-type", + "version": "1.0.0", + "description": "Conformance tests for tasks record type in external domain metadata", + "scripts": { + "test": "jest" + }, + "devDependencies": { + "@types/jest": "^29.4.0", + "@types/node": "^18.13.0", + "axios": "^1.9.0", + "jest": "^29.4.2", + "ts-jest": "^29.0.5", + "typescript": "^4.9.5" + }, + "dependencies": { + "axios": "^1.9.0" + } +} \ No newline at end of file diff --git a/conformance_tests/tasks_record_type_metadata_tests/test-utils/chef-cli.ts b/conformance_tests/tasks_record_type_metadata_tests/test-utils/chef-cli.ts new file mode 100644 index 0000000..a99f90b --- /dev/null +++ b/conformance_tests/tasks_record_type_metadata_tests/test-utils/chef-cli.ts @@ -0,0 +1,53 @@ +/** + * Utility functions for validating metadata using The Chef CLI + */ + +import { spawn } from 'child_process'; + +export interface ChefCliResult { + stdout: string; + stderr: string; + exitCode: number; +} + +/** + * Validates External Domain Metadata JSON object using The Chef CLI + * @param chefCliPath - Path to The Chef CLI executable + * @param metadata - The External Domain Metadata JSON object to validate + * @returns Result containing stdout, stderr, and exit code + */ +export async function validateMetadataWithChefCli( + chefCliPath: string, + metadata: any +): Promise { + return new Promise((resolve, reject) => { + const process = spawn(chefCliPath, ['validate-metadata']); + + let stdout = ''; + let stderr = ''; + + process.stdout.on('data', (data) => { + stdout += data.toString(); + }); + + process.stderr.on('data', (data) => { + stderr += data.toString(); + }); + + process.on('error', (error) => { + reject(new Error(`Failed to spawn chef-cli: ${error.message}`)); + }); + + process.on('close', (code) => { + resolve({ + stdout, + stderr, + exitCode: code || 0, + }); + }); + + // Write metadata to stdin + process.stdin.write(JSON.stringify(metadata)); + process.stdin.end(); + }); +} \ No newline at end of file diff --git a/conformance_tests/tasks_record_type_metadata_tests/test-utils/environment.ts b/conformance_tests/tasks_record_type_metadata_tests/test-utils/environment.ts new file mode 100644 index 0000000..f93d427 --- /dev/null +++ b/conformance_tests/tasks_record_type_metadata_tests/test-utils/environment.ts @@ -0,0 +1,37 @@ +/** + * Utility functions for reading environment variables required for tests + */ + +export interface TestEnvironment { + wrikeApiKey: string; + wrikeSpaceId: string; + chefCliPath: string; +} + +/** + * Reads and validates required environment variables for tests + * @throws Error if any required environment variable is missing + */ +export function getTestEnvironment(): TestEnvironment { + const wrikeApiKey = process.env.WRIKE_API_KEY; + const wrikeSpaceId = process.env.WRIKE_SPACE_ID; + const chefCliPath = process.env.CHEF_CLI_PATH; + + if (!wrikeApiKey) { + throw new Error('WRIKE_API_KEY environment variable is required'); + } + + if (!wrikeSpaceId) { + throw new Error('WRIKE_SPACE_ID environment variable is required'); + } + + if (!chefCliPath) { + throw new Error('CHEF_CLI_PATH environment variable is required'); + } + + return { + wrikeApiKey, + wrikeSpaceId, + chefCliPath, + }; +} \ No newline at end of file diff --git a/conformance_tests/tasks_record_type_metadata_tests/test-utils/event-factory.ts b/conformance_tests/tasks_record_type_metadata_tests/test-utils/event-factory.ts new file mode 100644 index 0000000..334668d --- /dev/null +++ b/conformance_tests/tasks_record_type_metadata_tests/test-utils/event-factory.ts @@ -0,0 +1,76 @@ +/** + * Factory functions for creating test event payloads + */ + +export interface CreateEventOptions { + functionName: string; + apiKey: string; + spaceId: string; + eventType?: string; +} + +/** + * Creates a test event payload for invoking a snap-in function + * @param options - Options for creating the event + * @returns The event payload + */ +export function createTestEvent(options: CreateEventOptions): any { + const { functionName, apiKey, spaceId, eventType = 'test' } = options; + + return { + payload: { + connection_data: { + key: apiKey, + org_id: spaceId, + org_name: 'Test Organization', + key_type: 'api_key', + }, + event_context: { + callback_url: 'http://localhost:8002/callback', + dev_org: 'test-org', + dev_org_id: 'test-org-id', + dev_user: 'test-user', + dev_user_id: 'test-user-id', + external_sync_unit: 'test-unit', + external_sync_unit_id: 'IEAGS6BYI5RFMPP7', + external_sync_unit_name: 'Test Unit', + external_system: 'wrike', + external_system_type: 'wrike', + import_slug: 'test-import', + mode: 'INITIAL', + request_id: `test-request-${Date.now()}`, + snap_in_slug: 'test-snap-in', + snap_in_version_id: 'test-version', + sync_run: 'test-run', + sync_run_id: 'test-run-id', + sync_tier: 'test-tier', + sync_unit: 'test-unit', + sync_unit_id: 'test-unit-id', + uuid: 'test-uuid', + worker_data_url: 'http://localhost:8003/external-worker', + }, + event_type: eventType, + event_data: {}, + }, + context: { + dev_oid: 'test-org-id', + source_id: 'test-source', + snap_in_id: 'test-snap-in-id', + snap_in_version_id: 'test-version-id', + service_account_id: 'test-service-account', + secrets: { + service_account_token: 'test-token', + }, + }, + execution_metadata: { + request_id: `test-request-${Date.now()}`, + function_name: functionName, + event_type: eventType, + devrev_endpoint: 'http://localhost:8003', + }, + input_data: { + global_values: {}, + event_sources: {}, + }, + }; +} \ No newline at end of file diff --git a/conformance_tests/tasks_record_type_metadata_tests/test-utils/helpers.ts b/conformance_tests/tasks_record_type_metadata_tests/test-utils/helpers.ts new file mode 100644 index 0000000..211d99a --- /dev/null +++ b/conformance_tests/tasks_record_type_metadata_tests/test-utils/helpers.ts @@ -0,0 +1,34 @@ +import axios from 'axios'; +import { spawn } from 'child_process'; + +export function getTestEnv() { + const apiKey = process.env.WRIKE_API_KEY; + const spaceId = process.env.WRIKE_SPACE_ID; + const chefPath = process.env.CHEF_CLI_PATH; + if (!apiKey || !spaceId || !chefPath) throw new Error('Missing env vars'); + return { apiKey, spaceId, chefPath }; +} + +export async function invokeFunction(functionName: string, apiKey: string, spaceId: string) { + const event = { + payload: { connection_data: { key: apiKey, org_id: spaceId }, event_context: {}, event_type: 'test', event_data: {} }, + context: { dev_oid: 'test', source_id: 'test', snap_in_id: 'test', snap_in_version_id: 'test', service_account_id: 'test', secrets: {} }, + execution_metadata: { request_id: 'test', function_name: functionName, event_type: 'test', devrev_endpoint: 'http://localhost:8003' }, + input_data: { global_values: {}, event_sources: {} }, + }; + const res = await axios.post('http://localhost:8000/handle/sync', event, { timeout: 30000 }); + return res.data; +} + +export async function validateWithChefCli(chefPath: string, metadata: any) { + return new Promise<{ stdout: string; stderr: string; exitCode: number }>((resolve, reject) => { + const proc = spawn(chefPath, ['validate-metadata']); + let stdout = '', stderr = ''; + proc.stdout.on('data', (d) => stdout += d); + proc.stderr.on('data', (d) => stderr += d); + proc.on('error', (e) => reject(e)); + proc.on('close', (code) => resolve({ stdout, stderr, exitCode: code || 0 })); + proc.stdin.write(JSON.stringify(metadata)); + proc.stdin.end(); + }); +} \ No newline at end of file diff --git a/conformance_tests/tasks_record_type_metadata_tests/test-utils/snap-in-client.ts b/conformance_tests/tasks_record_type_metadata_tests/test-utils/snap-in-client.ts new file mode 100644 index 0000000..96c630f --- /dev/null +++ b/conformance_tests/tasks_record_type_metadata_tests/test-utils/snap-in-client.ts @@ -0,0 +1,38 @@ +/** + * Client for invoking The Test Snap-In Server + */ + +import axios, { AxiosResponse } from 'axios'; + +const SNAP_IN_SERVER_URL = 'http://localhost:8000/handle/sync'; + +export interface SnapInResponse { + function_result?: any; + error?: any; +} + +/** + * Invokes a function on The Test Snap-In Server + * @param event - The event payload to send + * @returns The response from the snap-in + */ +export async function invokeSnapInFunction(event: any): Promise { + try { + const response: AxiosResponse = await axios.post( + SNAP_IN_SERVER_URL, + event, + { + headers: { + 'Content-Type': 'application/json', + }, + timeout: 30000, + } + ); + return response.data; + } catch (error) { + if (axios.isAxiosError(error) && error.response) { + return error.response.data; + } + throw error; + } +} \ No newline at end of file diff --git a/conformance_tests/tasks_record_type_metadata_tests/tsconfig.json b/conformance_tests/tasks_record_type_metadata_tests/tsconfig.json new file mode 100644 index 0000000..154cd05 --- /dev/null +++ b/conformance_tests/tasks_record_type_metadata_tests/tsconfig.json @@ -0,0 +1,24 @@ +{ + "compilerOptions": { + "target": "es2017", + "module": "commonjs", + "lib": ["es2017"], + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "strict": true, + "declaration": true, + "resolveJsonModule": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "moduleResolution": "node", + "outDir": "./dist", + "rootDir": "./" + }, + "include": [ + "**/*.ts" + ], + "exclude": [ + "node_modules", + "dist" + ] +} \ No newline at end of file diff --git a/conformance_tests/users_data_push_tests/extraction-users-acceptance.test.ts b/conformance_tests/users_data_push_tests/extraction-users-acceptance.test.ts new file mode 100644 index 0000000..58da346 --- /dev/null +++ b/conformance_tests/users_data_push_tests/extraction-users-acceptance.test.ts @@ -0,0 +1,128 @@ +import { Server } from 'http'; +import { + getTestConfig, + startCallbackServer, + invokeFunction, + waitForCallback, + invokeExtractionWithContinuation, + CallbackData, +} from './test-utils/test-helpers'; +import { loadExtractionDataStartEventFromJson } from './test-utils/event-templates'; + +describe('Extraction Function - Users and Tasks Data Extraction (Acceptance Test)', () => { + let callbackServer: Server; + let callbacks: CallbackData[]; + let testConfig: ReturnType; + + beforeAll(async () => { + testConfig = getTestConfig(); + const callbackSetup = await startCallbackServer(8002); + callbackServer = callbackSetup.server; + callbacks = callbackSetup.callbacks; + }); + + afterAll((done) => { + callbackServer.close(done); + }); + + beforeEach(() => { + callbacks.length = 0; + }); + + test('should extract exactly 4 users and emit EXTRACTION_DATA_DONE with correct artifacts', async () => { + const testStartTime = Date.now(); + console.log('[Acceptance Test] Test started at:', new Date().toISOString()); + + // Load event from JSON file with credentials and valid folder ID + // Note: The extraction function requires both users and tasks to complete + // before emitting EXTRACTION_DATA_DONE, so we must provide a valid folder ID + const event = loadExtractionDataStartEventFromJson({ + apiKey: testConfig.wrikeApiKey, + spaceId: testConfig.wrikeSpaceId, + folderId: 'IEAGS6BYI5RFMPP7', // Valid folder ID for testing + }); + + console.log('[Acceptance Test] Sending EXTRACTION_DATA_START event to extraction function'); + + // Use automatic continuation to handle rate limiting + const callback = await invokeExtractionWithContinuation(event, callbacks, 5, 90000); + + console.log('[Acceptance Test] Extraction completed, verifying results'); + + console.log('[Acceptance Test] Received callback:', JSON.stringify(callback, null, 2)); + + // Verify exactly one EXTRACTION_DATA_DONE event was received + const doneCallbacks = callbacks.filter(cb => cb.eventType === 'EXTRACTION_DATA_DONE'); + expect(doneCallbacks.length).toBe(1); + if (doneCallbacks.length !== 1) { + console.error( + `[Acceptance Test] Expected exactly 1 EXTRACTION_DATA_DONE callback, but received ${doneCallbacks.length}. ` + + `All callbacks: ${JSON.stringify(callbacks.map(cb => cb.eventType), null, 2)}` + ); + } + + // Verify callback structure + expect(callback).toBeDefined(); + expect(callback.eventType).toBe('EXTRACTION_DATA_DONE'); + expect(callback.payload).toBeDefined(); + expect(callback.payload.event_data).toBeDefined(); + + // Verify artifacts array exists and is not empty + const artifacts = callback.payload.event_data.artifacts; + expect(artifacts).toBeDefined(); + expect(Array.isArray(artifacts)).toBe(true); + expect(artifacts.length).toBeGreaterThan(0); + + if (!artifacts || artifacts.length === 0) { + console.error( + '[Acceptance Test] Artifacts array is empty or undefined. ' + + `Event data: ${JSON.stringify(callback.payload.event_data, null, 2)}` + ); + } + + console.log('[Acceptance Test] Artifacts received:', JSON.stringify(artifacts, null, 2)); + + // Find users artifact + const usersArtifact = artifacts.find((a: any) => a.item_type === 'users'); + expect(usersArtifact).toBeDefined(); + + if (!usersArtifact) { + console.error( + '[Acceptance Test] No users artifact found in artifacts array. ' + + `Available item_types: ${artifacts.map((a: any) => a.item_type).join(', ')}` + ); + throw new Error('Users artifact not found in artifacts array'); + } + + console.log('[Acceptance Test] Users artifact:', JSON.stringify(usersArtifact, null, 2)); + + // Verify users artifact has exactly 4 items (primary requirement) + expect(usersArtifact.item_count).toBe(4); + + if (usersArtifact.item_count !== 4) { + console.error( + `[Acceptance Test] Expected users artifact to have item_count=4, but got ${usersArtifact.item_count}. ` + + `This indicates that not all users data was extracted. ` + + `Full artifact: ${JSON.stringify(usersArtifact, null, 2)}` + ); + } + + // Verify tasks artifact also exists (required for EXTRACTION_DATA_DONE to be emitted) + const tasksArtifact = artifacts.find((a: any) => a.item_type === 'tasks'); + expect(tasksArtifact).toBeDefined(); + + if (!tasksArtifact) { + console.error( + '[Acceptance Test] No tasks artifact found in artifacts array. ' + + `The extraction function requires both users and tasks to complete. ` + + `Available item_types: ${artifacts.map((a: any) => a.item_type).join(', ')}` + ); + } + + console.log('[Acceptance Test] Tasks artifact:', JSON.stringify(tasksArtifact, null, 2)); + console.log('[Acceptance Test] ✓ All assertions passed: 4 users extracted successfully'); + console.log('[Acceptance Test] ✓ Tasks extraction also completed as required by implementation'); + const totalTestTime = Date.now() - testStartTime; + console.log(`[Acceptance Test] Total test execution time: ${totalTestTime}ms (${(totalTestTime / 1000).toFixed(2)}s)`); + }, 120000); +}); \ No newline at end of file diff --git a/conformance_tests/users_data_push_tests/extraction-users-normalization-validation.test.ts b/conformance_tests/users_data_push_tests/extraction-users-normalization-validation.test.ts new file mode 100644 index 0000000..ae1bae9 --- /dev/null +++ b/conformance_tests/users_data_push_tests/extraction-users-normalization-validation.test.ts @@ -0,0 +1,281 @@ +import { Server } from 'http'; +import * as fs from 'fs'; +import * as path from 'path'; +import * as os from 'os'; +import { execSync } from 'child_process'; +import { + getTestConfig, + startCallbackServer, + invokeFunction, + waitForCallback, + CallbackData, + executeCommand, + findExtractedFile, +} from './test-utils/test-helpers'; +import { + loadExtractionDataStartEventFromJson, + loadExternalDomainMetadataEvent, +} from './test-utils/event-templates'; + +describe('Extraction Function - Users Normalization Validation (Acceptance Test)', () => { + let callbackServer: Server; + let callbacks: CallbackData[]; + let testConfig: ReturnType; + let tempMetadataFile: string | null = null; + + beforeAll(async () => { + testConfig = getTestConfig(); + const callbackSetup = await startCallbackServer(8002); + callbackServer = callbackSetup.server; + callbacks = callbackSetup.callbacks; + }); + + afterAll((done) => { + // Cleanup temporary metadata file if it exists + if (tempMetadataFile && fs.existsSync(tempMetadataFile)) { + try { + fs.unlinkSync(tempMetadataFile); + console.log(`[Normalization Validation] Cleaned up temporary metadata file: ${tempMetadataFile}`); + } catch (error) { + console.warn(`[Normalization Validation] Failed to cleanup temporary file: ${error}`); + } + } + callbackServer.close(done); + }); + + beforeEach(() => { + callbacks.length = 0; + }); + + test('should validate users normalization using Chef CLI', async () => { + console.log('[Normalization Validation] Starting normalization validation test'); + const testStartTime = Date.now(); + console.log('[Normalization Validation] Test started at:', new Date().toISOString()); + + // Step 1: Verify required environment variables + console.log('[Normalization Validation] Step 1: Verifying environment variables'); + + const chefCliPath = process.env.CHEF_CLI_PATH; + if (!chefCliPath) { + throw new Error( + 'Required environment variable CHEF_CLI_PATH is not set. ' + + 'Please set it to the path of the chef-cli executable before running tests.' + ); + } + console.log(`[Normalization Validation] CHEF_CLI_PATH: ${chefCliPath}`); + + const extractedFilesFolderPath = process.env.EXTRACTED_FILES_FOLDER_PATH; + if (!extractedFilesFolderPath) { + throw new Error( + 'Required environment variable EXTRACTED_FILES_FOLDER_PATH is not set. ' + + 'Please set it to the folder where extracted files are stored before running tests.' + ); + } + console.log(`[Normalization Validation] EXTRACTED_FILES_FOLDER_PATH: ${extractedFilesFolderPath}`); + + // Verify Chef CLI executable exists + if (!fs.existsSync(chefCliPath)) { + throw new Error( + `Chef CLI executable not found at path: ${chefCliPath}. ` + + 'Please verify CHEF_CLI_PATH environment variable points to a valid chef-cli executable.' + ); + } + console.log('[Normalization Validation] ✓ Chef CLI executable found'); + + // Step 2: Retrieve External Domain Metadata + console.log('[Normalization Validation] Step 2: Retrieving External Domain Metadata'); + const metadataStartTime = Date.now(); + + const metadataEvent = loadExternalDomainMetadataEvent({ + apiKey: testConfig.wrikeApiKey, + spaceId: testConfig.wrikeSpaceId, + }); + + console.log('[Normalization Validation] Invoking get_external_domain_metadata function'); + const metadataResponse = await invokeFunction('get_external_domain_metadata', metadataEvent); + + if (metadataResponse.error) { + console.error( + '[Normalization Validation] Failed to retrieve metadata:', + JSON.stringify(metadataResponse.error, null, 2) + ); + throw new Error( + `Failed to retrieve External Domain Metadata. Error: ${JSON.stringify(metadataResponse.error)}` + ); + } + + if (!metadataResponse.function_result || !metadataResponse.function_result.data) { + console.error( + '[Normalization Validation] Invalid metadata response:', + JSON.stringify(metadataResponse, null, 2) + ); + throw new Error( + 'Invalid response from get_external_domain_metadata function. ' + + `Response: ${JSON.stringify(metadataResponse)}` + ); + } + + const metadata = metadataResponse.function_result.data; + console.log('[Normalization Validation] ✓ External Domain Metadata retrieved successfully'); + console.log(`[Normalization Validation] Metadata retrieval took ${Date.now() - metadataStartTime}ms`); + + // Step 3: Write metadata to temporary file + console.log('[Normalization Validation] Step 3: Writing metadata to temporary file'); + + tempMetadataFile = path.join(os.tmpdir(), `metadata-${Date.now()}.json`); + fs.writeFileSync(tempMetadataFile, JSON.stringify(metadata, null, 2)); + console.log(`[Normalization Validation] ✓ Metadata written to: ${tempMetadataFile}`); + + // Step 4: Invoke extraction function with valid folder ID + console.log('[Normalization Validation] Step 4: Invoking extraction function'); + const extractionStartTime = Date.now(); + + // Note: The extraction function requires both users and tasks to complete + // before emitting EXTRACTION_DATA_DONE, so we must provide a valid folder ID + const extractionEvent = loadExtractionDataStartEventFromJson({ + apiKey: testConfig.wrikeApiKey, + spaceId: testConfig.wrikeSpaceId, + folderId: 'IEAGS6BYI5RFMPP7', // Valid folder ID for testing + }); + + console.log('[Normalization Validation] Sending EXTRACTION_DATA_START event'); + const extractionResponse = await invokeFunction('extraction', extractionEvent); + + if (extractionResponse.error) { + console.error( + '[Normalization Validation] Extraction function returned error:', + JSON.stringify(extractionResponse.error, null, 2) + ); + throw new Error( + `Extraction function failed. Error: ${JSON.stringify(extractionResponse.error)}` + ); + } + + console.log('[Normalization Validation] Waiting for EXTRACTION_DATA_DONE callback...'); + const callbackStartTime = Date.now(); + const callback = await waitForCallback(callbacks, 'EXTRACTION_DATA_DONE', 30000); + console.log(`[Normalization Validation] Callback received in ${Date.now() - callbackStartTime}ms`); + + // Verify exactly one EXTRACTION_DATA_DONE event was received + const doneCallbacks = callbacks.filter(cb => cb.eventType === 'EXTRACTION_DATA_DONE'); + if (doneCallbacks.length !== 1) { + const receivedEventTypes = callbacks.map(cb => cb.eventType).join(', '); + console.error( + `[Normalization Validation] Expected exactly 1 EXTRACTION_DATA_DONE callback, ` + + `but received ${doneCallbacks.length}. ` + + `All callbacks received: ${receivedEventTypes}` + ); + throw new Error( + `Expected exactly 1 EXTRACTION_DATA_DONE callback, but received ${doneCallbacks.length}. ` + + `Event types received: ${receivedEventTypes}` + ); + } + + console.log('[Normalization Validation] ✓ Extraction completed successfully'); + console.log(`[Normalization Validation] Extraction phase took ${Date.now() - extractionStartTime}ms`); + + // Step 5: Verify extracted files folder exists + console.log('[Normalization Validation] Step 5: Locating extracted file'); + + if (!fs.existsSync(extractedFilesFolderPath)) { + throw new Error( + `Extracted files folder does not exist at path: ${extractedFilesFolderPath}. ` + + 'This folder should be created after extraction function execution. ' + + 'Please verify that the extraction function is configured correctly and that ' + + 'EXTRACTED_FILES_FOLDER_PATH points to the correct location.' + ); + } + console.log('[Normalization Validation] ✓ Extracted files folder exists'); + + // Step 6: Find the extracted users file + const extractedFile = findExtractedFile(extractedFilesFolderPath, 'extractor_users'); + + if (!extractedFile) { + const searchCommand = `ls ${extractedFilesFolderPath} | grep extractor_users | sort -r | head -n 1`; + let filesInDirectory = 'Unable to list files'; + try { + filesInDirectory = executeCommand(`ls -la ${extractedFilesFolderPath}`); + } catch (error) { + filesInDirectory = `Error listing files: ${error}`; + } + + console.error( + `[Normalization Validation] No extracted file found for users.\n` + + `Search command: ${searchCommand}\n` + + `Files in directory:\n${filesInDirectory}` + ); + throw new Error( + `No extracted file found for users. ` + + `Search command: ${searchCommand}. ` + + `Files in directory: ${filesInDirectory}` + ); + } + + const extractedFilePath = path.join(extractedFilesFolderPath, extractedFile); + console.log(`[Normalization Validation] ✓ Found extracted file: ${extractedFilePath}`); + + // Step 7: Execute Chef CLI validation + console.log('[Normalization Validation] Step 6: Executing Chef CLI validation'); + const validationStartTime = Date.now(); + + const validationCommand = `cat "${extractedFilePath}" | "${chefCliPath}" validate-data -m "${tempMetadataFile}" -r users`; + console.log(`[Normalization Validation] Validation command: ${validationCommand}`); + + let stdout = ''; + let stderr = ''; + let exitCode = 0; + + try { + const result = execSync(validationCommand, { + encoding: 'utf-8', + stdio: ['pipe', 'pipe', 'pipe'], + }); + stdout = result.toString(); + } catch (error: any) { + exitCode = error.status || 1; + stdout = error.stdout ? error.stdout.toString() : ''; + stderr = error.stderr ? error.stderr.toString() : ''; + } + + console.log(`[Normalization Validation] Chef CLI validation took ${Date.now() - validationStartTime}ms`); + // Print Chef CLI output + console.log('[Normalization Validation] Chef CLI stdout:'); + console.log(stdout || '(empty)'); + console.log('[Normalization Validation] Chef CLI stderr:'); + console.log(stderr || '(empty)'); + + // Step 8: Verify validation results + console.log('[Normalization Validation] Step 7: Verifying validation results'); + + if (exitCode !== 0) { + console.error( + `[Normalization Validation] Chef CLI validation failed with exit code: ${exitCode}\n` + + `Stdout: ${stdout}\n` + + `Stderr: ${stderr}` + ); + throw new Error( + `Chef CLI validation failed with exit code: ${exitCode}. ` + + `Stdout: ${stdout}. ` + + `Stderr: ${stderr}. ` + + 'This indicates that the normalized data does not match the expected schema.' + ); + } + + if (stdout.trim() !== '') { + console.error( + `[Normalization Validation] Chef CLI validation produced non-empty output:\n${stdout}\n` + + 'Expected empty output for successful validation.' + ); + throw new Error( + `Chef CLI validation produced non-empty output: ${stdout}. ` + + 'Expected empty output for successful validation. ' + + 'This indicates validation errors in the normalized data.' + ); + } + + console.log('[Normalization Validation] ✓ Chef CLI validation successful (empty output)'); + console.log('[Normalization Validation] ✓ All assertions passed: Users normalization is valid'); + const totalTestTime = Date.now() - testStartTime; + console.log(`[Normalization Validation] Total test execution time: ${totalTestTime}ms (${(totalTestTime / 1000).toFixed(2)}s)`); + }, 60000); +}); \ No newline at end of file diff --git a/conformance_tests/users_data_push_tests/extraction-users-rate-limiting-acceptance.test.ts b/conformance_tests/users_data_push_tests/extraction-users-rate-limiting-acceptance.test.ts new file mode 100644 index 0000000..87d4301 --- /dev/null +++ b/conformance_tests/users_data_push_tests/extraction-users-rate-limiting-acceptance.test.ts @@ -0,0 +1,174 @@ +import { Server } from 'http'; +import axios from 'axios'; +import { + getTestConfig, + startCallbackServer, + invokeFunction, + waitForCallback, + CallbackData, +} from './test-utils/test-helpers'; +import { loadExtractionDataStartEventFromJson } from './test-utils/event-templates'; + +describe('Extraction Function - Users Data Extraction Rate Limiting (Acceptance Test)', () => { + let callbackServer: Server; + let callbacks: CallbackData[]; + let testConfig: ReturnType; + + beforeAll(async () => { + testConfig = getTestConfig(); + const callbackSetup = await startCallbackServer(8002); + callbackServer = callbackSetup.server; + callbacks = callbackSetup.callbacks; + }); + + afterAll((done) => { + callbackServer.close(done); + }); + + beforeEach(() => { + callbacks.length = 0; + }); + + test('should handle rate limiting and emit EXTRACTION_DATA_DELAY', async () => { + const testName = 'extraction_users_rate_limiting'; + const testStartTime = Date.now(); + console.log('[Rate Limiting Test] Test started at:', new Date().toISOString()); + + console.log(`[Rate Limiting Test] Starting test: ${testName}`); + + // Step 1: Start rate limiting on the mock API server + console.log('[Rate Limiting Test] Step 1: Starting rate limiting on mock API server'); + try { + const startRateLimitingResponse = await axios.post( + 'http://localhost:8004/start_rate_limiting', + { test_name: testName }, + { headers: { 'Content-Type': 'application/json' } } + ); + console.log( + `[Rate Limiting Test] Rate limiting started successfully. Response status: ${startRateLimitingResponse.status}` + ); + } catch (error) { + console.error('[Rate Limiting Test] Failed to start rate limiting:', error); + throw new Error( + `Failed to start rate limiting on mock API server: ${error instanceof Error ? error.message : 'Unknown error'}` + ); + } + + try { + // Step 2: Invoke the extraction function + console.log('[Rate Limiting Test] Step 2: Invoking extraction function with EXTRACTION_DATA_START event'); + const invocationStartTime = Date.now(); + + // Load event from JSON file with credentials + const event = loadExtractionDataStartEventFromJson({ + apiKey: testConfig.wrikeApiKey, + spaceId: testConfig.wrikeSpaceId, + }); + + console.log('[Rate Limiting Test] Event loaded, sending to extraction function'); + + // Invoke the extraction function + const response = await invokeFunction('extraction', event); + + // Verify no immediate errors + expect(response).toBeDefined(); + if (response.error) { + console.error( + '[Rate Limiting Test] Function returned error:', + JSON.stringify(response.error, null, 2) + ); + } + expect(response.error).toBeUndefined(); + console.log(`[Rate Limiting Test] Function invocation completed in ${Date.now() - invocationStartTime}ms`); + + console.log('[Rate Limiting Test] Function invoked successfully, waiting for EXTRACTION_DATA_DELAY callback...'); + const callbackStartTime = Date.now(); + + // Wait for the EXTRACTION_DATA_DELAY callback + const callback = await waitForCallback(callbacks, 'EXTRACTION_DATA_DELAY', 30000); + console.log(`[Rate Limiting Test] Callback received in ${Date.now() - callbackStartTime}ms`); + + console.log('[Rate Limiting Test] Received callback:', JSON.stringify(callback, null, 2)); + + // Verify at least one EXTRACTION_DATA_DELAY event was received + const delayCallbacks = callbacks.filter(cb => cb.eventType === 'EXTRACTION_DATA_DELAY'); + expect(delayCallbacks.length).toBeGreaterThanOrEqual(1); + + if (delayCallbacks.length < 1) { + console.error( + `[Rate Limiting Test] Expected at least 1 EXTRACTION_DATA_DELAY callback, but received ${delayCallbacks.length}. ` + + `All callbacks received: ${JSON.stringify(callbacks.map(cb => ({ eventType: cb.eventType, payload: cb.payload })), null, 2)}` + ); + throw new Error( + `Expected at least 1 EXTRACTION_DATA_DELAY callback, but received ${delayCallbacks.length}. ` + + `Event types received: ${callbacks.map(cb => cb.eventType).join(', ')}` + ); + } + + // Verify at least one callback has proper structure with delay + const validDelayCallbacks = delayCallbacks.filter(cb => + cb.payload?.event_data?.delay && + typeof cb.payload.event_data.delay === 'number' && + cb.payload.event_data.delay > 0 + ); + + expect(validDelayCallbacks.length).toBeGreaterThanOrEqual(1); + + if (validDelayCallbacks.length < 1) { + console.error( + `[Rate Limiting Test] No valid delay callbacks found. ` + + `Delay callbacks: ${JSON.stringify(delayCallbacks, null, 2)}` + ); + throw new Error('No valid EXTRACTION_DATA_DELAY callbacks with delay information found'); + } + + const firstValidDelay = validDelayCallbacks[0]; + console.log( + `[Rate Limiting Test] ✓ Rate limiting handled correctly. ` + + `Received ${delayCallbacks.length} delay event(s), first delay: ${firstValidDelay.payload.event_data.delay} seconds` + ); + + // Verify no other event types were received + const otherCallbacks = callbacks.filter(cb => cb.eventType !== 'EXTRACTION_DATA_DELAY'); + if (otherCallbacks.length > 0 && otherCallbacks.some(cb => cb.eventType !== 'EXTRACTION_DATA_DONE')) { + console.error( + `[Rate Limiting Test] Unexpected callbacks received: ${JSON.stringify(otherCallbacks.map(cb => cb.eventType), null, 2)}` + ); + throw new Error( + `Expected only EXTRACTION_DATA_DELAY callbacks, but also received: ${otherCallbacks.filter(cb => cb.eventType !== 'EXTRACTION_DATA_DONE').map(cb => cb.eventType).join(', ')}` + ); + } + + if (otherCallbacks.length > 0) { + console.log(`[Rate Limiting Test] ✓ Also received ${otherCallbacks.length} other callback(s): ${otherCallbacks.map(cb => cb.eventType).join(', ')}`); + } else { + console.log('[Rate Limiting Test] ✓ No other callbacks received'); + } + + console.log(`[Rate Limiting Test] ✓ Total callbacks received: ${callbacks.length} (${delayCallbacks.length} delay, ${otherCallbacks.length} other)`); + } finally { + // Step 3: End rate limiting on the mock API server + console.log('[Rate Limiting Test] Step 3: Ending rate limiting on mock API server'); + try { + const endRateLimitingResponse = await axios.post( + 'http://localhost:8004/end_rate_limiting', + {}, + { headers: { 'Content-Type': 'application/json' } } + ); + console.log( + `[Rate Limiting Test] Rate limiting ended successfully. Response status: ${endRateLimitingResponse.status}` + ); + } catch (error) { + console.error('[Rate Limiting Test] Failed to end rate limiting:', error); + // Don't throw here to avoid masking the actual test failure + console.warn( + '[Rate Limiting Test] Warning: Failed to end rate limiting, but continuing with test result' + ); + } + } + + console.log('[Rate Limiting Test] ✓ All assertions passed: Rate limiting handled correctly'); + const totalTestTime = Date.now() - testStartTime; + console.log(`[Rate Limiting Test] Total test execution time: ${totalTestTime}ms (${(totalTestTime / 1000).toFixed(2)}s)`); + }, 40000); +}); \ No newline at end of file diff --git a/conformance_tests/users_data_push_tests/extraction-users.test.ts b/conformance_tests/users_data_push_tests/extraction-users.test.ts new file mode 100644 index 0000000..0cc1598 --- /dev/null +++ b/conformance_tests/users_data_push_tests/extraction-users.test.ts @@ -0,0 +1,69 @@ +import { Server } from 'http'; +import { + getTestConfig, + startCallbackServer, + invokeFunction, + waitForCallback, + invokeExtractionWithContinuation, + CallbackData, +} from './test-utils/test-helpers'; +import { createExtractionDataStartEvent } from './test-utils/event-templates'; + +describe('Extraction Function - Users Data Extraction', () => { + let callbackServer: Server; + let callbacks: CallbackData[]; + let testConfig: ReturnType; + + beforeAll(async () => { + testConfig = getTestConfig(); + const callbackSetup = await startCallbackServer(8002); + callbackServer = callbackSetup.server; + callbacks = callbackSetup.callbacks; + }); + + afterAll((done) => { + callbackServer.close(done); + }); + + beforeEach(() => { + callbacks.length = 0; + }); + + test('should fetch users, upload normalized data, and emit EXTRACTION_DATA_DONE with artifact', async () => { + const testStartTime = Date.now(); + console.log('[Users Extraction Test] Test started at:', new Date().toISOString()); + + const event = createExtractionDataStartEvent({ + apiKey: testConfig.wrikeApiKey, + spaceId: testConfig.wrikeSpaceId, + folderId: 'IEAGS6BYI5RFMPP7', + }); + + console.log('[Users Extraction Test] Invoking extraction with automatic continuation'); + + // Use automatic continuation to handle rate limiting + const callback = await invokeExtractionWithContinuation(event, callbacks, 5, 90000); + + console.log('[Users Extraction Test] Extraction completed, verifying results'); + + // Verify callback structure + expect(callback).toBeDefined(); + expect(callback.eventType).toBe('EXTRACTION_DATA_DONE'); + + // Verify event_data and artifacts + expect(callback.payload.event_data).toBeDefined(); + expect(callback.payload.event_data.artifacts).toBeDefined(); + expect(Array.isArray(callback.payload.event_data.artifacts)).toBe(true); + + // Verify users artifact exists and has data + const usersArtifact = callback.payload.event_data.artifacts.find((a: any) => a.item_type === 'users'); + expect(usersArtifact).toBeDefined(); + expect(usersArtifact.item_count).toBeGreaterThan(0); + + console.log(`[Users Extraction Test] ✓ Users extracted: ${usersArtifact.item_count} items`); + console.log(`[Users Extraction Test] ✓ All assertions passed`); + + const totalTestTime = Date.now() - testStartTime; + console.log(`[Users Extraction Test] Total test execution time: ${totalTestTime}ms (${(totalTestTime / 1000).toFixed(2)}s)`); + }, 120000); +}); \ No newline at end of file diff --git a/conformance_tests/users_data_push_tests/jest.config.js b/conformance_tests/users_data_push_tests/jest.config.js new file mode 100644 index 0000000..86c2f69 --- /dev/null +++ b/conformance_tests/users_data_push_tests/jest.config.js @@ -0,0 +1,17 @@ +module.exports = { + preset: 'ts-jest', + testEnvironment: 'node', + testMatch: ['**/*.test.ts'], + // Global timeout set to 120 seconds to match the conformance test requirement. + // Tests run in parallel to complete within this constraint. + testTimeout: 120000, + moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'], + transform: { + '^.+\\.tsx?$': 'ts-jest', + }, + collectCoverageFrom: [ + '**/*.ts', + '!**/*.test.ts', + '!**/node_modules/**', + ], +}; \ No newline at end of file diff --git a/conformance_tests/worker_domain_mapping_initialization/package.json b/conformance_tests/users_data_push_tests/package.json similarity index 60% rename from conformance_tests/worker_domain_mapping_initialization/package.json rename to conformance_tests/users_data_push_tests/package.json index 2f244e7..add59d3 100644 --- a/conformance_tests/worker_domain_mapping_initialization/package.json +++ b/conformance_tests/users_data_push_tests/package.json @@ -1,21 +1,21 @@ { "name": "conformance-tests", "version": "1.0.0", - "description": "Conformance tests for Airdrop Snap-in", - "main": "conformance-tests.js", + "description": "Conformance tests for Wrike Airdrop Snap-in", "scripts": { - "test": "ts-node conformance-tests.ts" - }, - "dependencies": { - "@devrev/ts-adaas": "1.5.1", - "axios": "^1.9.0", - "body-parser": "^1.20.3", - "express": "^4.21.0" + "test": "jest" }, "devDependencies": { "@types/express": "^4.17.21", + "@types/jest": "^29.4.0", "@types/node": "^18.13.0", - "ts-node": "^10.9.1", + "jest": "^29.4.2", + "ts-jest": "^29.0.5", "typescript": "^4.9.5" + }, + "dependencies": { + "axios": "^1.9.0", + "body-parser": "^1.20.3", + "express": "^4.21.0" } } \ No newline at end of file diff --git a/conformance_tests/users_data_push_tests/test-utils/data_extraction_test.json b/conformance_tests/users_data_push_tests/test-utils/data_extraction_test.json new file mode 100644 index 0000000..abaaf80 --- /dev/null +++ b/conformance_tests/users_data_push_tests/test-utils/data_extraction_test.json @@ -0,0 +1,72 @@ +{ + "payload": { + "connection_data": { + "key": "", + "key_type": "", + "org_id": "", + "org_name": "First Space" + }, + "event_context": { + "callback_url": "http://localhost:8002/callback", + "dev_oid": "test-oid", + "dev_org": "test-org", + "dev_org_id": "test-org-id", + "dev_uid": "test-uid", + "dev_user": "test-user", + "dev_user_id": "test-user-id", + "event_type_adaas": "", + "external_sync_unit": "test-external-sync-unit", + "external_sync_unit_id": "test-external-sync-unit-id", + "external_sync_unit_name": "test-external-sync-unit-name", + "external_system": "test-external-system", + "external_system_id": "test-external-system-id", + "external_system_name": "Wrike", + "external_system_type": "ADaaS", + "import_slug": "airdrop-wrike-snap-in", + "mode": "INITIAL", + "request_id": "test-request-id", + "request_id_adaas": "test-request-id-adaas", + "run_id": "test-run_id", + "sequence_version": "6", + "snap_in_slug": "airdrop-wrike-snap-in", + "snap_in_version_id": "don:integration:dvrv-eu-1:devo/test:snap_in_package/test:snap_in_version/test", + "sync_run": "test-sync-run", + "sync_run_id": "test-sync-run-id", + "sync_tier": "sync_tier_2", + "sync_unit": "test-sync-unit", + "sync_unit_id": "test-sync-unit-id", + "uuid": "test-uuid", + "worker_data_url": "http://localhost:8003/external-worker" + }, + "event_type": "EXTRACTION_DATA_START" + }, + "context": { + "dev_oid": "don:identity:dvrv-eu-1:devo/test", + "automation_id": "", + "source_id": "", + "snap_in_id": "don:integration:dvrv-eu-1:devo/test:snap_in/test", + "snap_in_version_id": "don:integration:dvrv-eu-1:devo/test:snap_in_package/test:snap_in_version/test", + "service_account_id": "don:identity:dvrv-eu-1:devo/test:svcacc/123", + "secrets": { + "service_account_token": "test-service-account-token" + }, + "user_id": "don:identity:dvrv-eu-1:devo/test:devu/1", + "event_id": "", + "execution_id": "test-execution-id" + }, + "execution_metadata": { + "request_id": "test-request-id", + "function_name": "extraction", + "event_type": "EXTRACTION_DATA_START", + "devrev_endpoint": "http://localhost:8003" + }, + "input_data": { + "global_values": {}, + "event_sources": {}, + "keyrings": null, + "resources": { + "keyrings": {}, + "tags": {} + } + } +} \ No newline at end of file diff --git a/conformance_tests/users_data_push_tests/test-utils/event-templates.ts b/conformance_tests/users_data_push_tests/test-utils/event-templates.ts new file mode 100644 index 0000000..2d4161c --- /dev/null +++ b/conformance_tests/users_data_push_tests/test-utils/event-templates.ts @@ -0,0 +1,117 @@ +import * as fs from 'fs'; +import * as path from 'path'; + +export function loadExtractionDataStartEventFromJson(config: { apiKey: string; spaceId: string; folderId?: string }): any { + const jsonPath = path.join(__dirname, 'data_extraction_test.json'); + const jsonContent = fs.readFileSync(jsonPath, 'utf-8'); + const event = JSON.parse(jsonContent); + + // Replace placeholders with actual credentials + event.payload.connection_data.key = config.apiKey; + event.payload.connection_data.org_id = config.spaceId; + + // Replace external_sync_unit_id with valid folder ID if provided + // This is required for tasks extraction to work properly + if (config.folderId) { + event.payload.event_context.external_sync_unit_id = config.folderId; + event.payload.event_context.external_sync_unit = config.folderId; + } + + return event; +} + +export function loadExternalDomainMetadataEvent(config: { apiKey: string; spaceId: string }): any { + return { + payload: { + connection_data: { + key: config.apiKey, + key_type: '', + org_id: config.spaceId, + org_name: 'First Space', + }, + event_context: { + callback_url: 'http://localhost:8002/callback', + external_sync_unit_id: config.spaceId, + }, + }, + context: { + dev_oid: 'test-org-id', + source_id: 'test-source-id', + snap_in_id: 'test-snap-in-id', + snap_in_version_id: 'test-snap-in-version-id', + service_account_id: 'test-service-account-id', + secrets: { + service_account_token: 'test-token', + }, + }, + execution_metadata: { + request_id: 'test-request-id', + function_name: 'get_external_domain_metadata', + event_type: 'test-event', + devrev_endpoint: 'http://localhost:8003', + }, + input_data: { + global_values: {}, + event_sources: {}, + }, + }; +} + +export function createExtractionDataStartEvent(config: { apiKey: string; spaceId: string; folderId: string }): any { + return { + context: { + dev_oid: 'test-dev-org', + source_id: 'test-source', + snap_in_id: 'test-snap-in', + snap_in_version_id: 'test-version', + service_account_id: 'test-service-account', + secrets: { + service_account_token: 'test-token', + }, + }, + payload: { + connection_data: { + key: config.apiKey, + org_id: config.spaceId, + org_name: 'Test Organization', + key_type: 'api_key', + }, + event_context: { + callback_url: 'http://localhost:8002/callback', + dev_org: 'test-dev-org', + dev_org_id: 'test-dev-org', + dev_user: 'test-user', + dev_user_id: 'test-user', + external_sync_unit: config.folderId, + external_sync_unit_id: config.folderId, + external_sync_unit_name: 'Test Folder', + external_system: 'wrike', + external_system_type: 'wrike', + import_slug: 'test-import', + mode: 'INITIAL', + request_id: 'test-request-id', + snap_in_slug: 'test-snap-in', + snap_in_version_id: 'test-version', + sync_run: 'test-sync-run', + sync_run_id: 'test-sync-run', + sync_tier: 'test-tier', + sync_unit: 'test-sync-unit', + sync_unit_id: 'test-sync-unit', + uuid: 'test-uuid', + worker_data_url: 'http://localhost:8003/external-worker', + }, + event_type: 'EXTRACTION_DATA_START', + event_data: {}, + }, + execution_metadata: { + request_id: 'test-request-id', + function_name: 'extraction', + event_type: 'EXTRACTION_DATA_START', + devrev_endpoint: 'http://localhost:8003', + }, + input_data: { + global_values: {}, + event_sources: {}, + }, + }; +} \ No newline at end of file diff --git a/conformance_tests/users_data_push_tests/test-utils/test-helpers.ts b/conformance_tests/users_data_push_tests/test-utils/test-helpers.ts new file mode 100644 index 0000000..ff8d1da --- /dev/null +++ b/conformance_tests/users_data_push_tests/test-utils/test-helpers.ts @@ -0,0 +1,192 @@ +import express, { Express } from 'express'; +import bodyParser from 'body-parser'; +import { Server } from 'http'; +import axios from 'axios'; +import { execSync } from 'child_process'; +import * as fs from 'fs'; +import * as path from 'path'; + +export interface TestConfig { + wrikeApiKey: string; + wrikeSpaceId: string; +} + +export function getTestConfig(): TestConfig { + const wrikeApiKey = process.env.WRIKE_API_KEY; + const wrikeSpaceId = process.env.WRIKE_SPACE_ID; + + if (!wrikeApiKey) { + throw new Error('WRIKE_API_KEY environment variable is required'); + } + + if (!wrikeSpaceId) { + throw new Error('WRIKE_SPACE_ID environment variable is required'); + } + + return { wrikeApiKey, wrikeSpaceId }; +} + +export interface CallbackData { + eventType: string; + payload: any; +} + +export function startCallbackServer(port: number): Promise<{ server: Server; callbacks: CallbackData[] }> { + return new Promise((resolve) => { + const app: Express = express(); + const callbacks: CallbackData[] = []; + + app.use(bodyParser.json()); + + app.post('/callback', (req, res) => { + console.log('[Callback Server] Received callback:', req.body.event_type); + callbacks.push({ + eventType: req.body.event_type, + payload: req.body, + }); + res.status(200).send({ status: 'ok' }); + }); + + const server = app.listen(port, () => { + console.log(`[Callback Server] Started on port ${port}`); + resolve({ server, callbacks }); + }); + }); +} + +export async function invokeFunction(functionName: string, event: any): Promise { + const response = await axios.post('http://localhost:8000/handle/sync', event, { + headers: { 'Content-Type': 'application/json' }, + }); + return response.data; +} + +/** + * Invoke extraction function with automatic continuation on rate limiting. + * This simulates the platform's behavior of sending EXTRACTION_DATA_CONTINUE + * when EXTRACTION_DATA_DELAY is received. + * + * @param event - Initial EXTRACTION_DATA_START event + * @param callbacks - Array to collect callbacks + * @param maxContinuations - Maximum number of continuations to attempt (default: 5) + * @param timeoutMs - Total timeout in milliseconds (default: 60000) + * @returns Promise that resolves when EXTRACTION_DATA_DONE is received + */ +export async function invokeExtractionWithContinuation( + event: any, + callbacks: CallbackData[], + maxContinuations: number = 5, + timeoutMs: number = 60000 +): Promise { + const startTime = Date.now(); + let continuationCount = 0; + + console.log('[Extraction Continuation] Starting extraction with automatic continuation'); + + // Send initial EXTRACTION_DATA_START event + await invokeFunction('extraction', event); + + // Monitor for callbacks and handle continuations + while (Date.now() - startTime < timeoutMs) { + // Check if we received EXTRACTION_DATA_DONE + const doneCallback = callbacks.find(cb => cb.eventType === 'EXTRACTION_DATA_DONE'); + if (doneCallback) { + console.log('[Extraction Continuation] Received EXTRACTION_DATA_DONE, extraction complete'); + return doneCallback; + } + + // Check if we received EXTRACTION_DATA_DELAY + const delayCallbacks = callbacks.filter(cb => cb.eventType === 'EXTRACTION_DATA_DELAY'); + if (delayCallbacks.length > continuationCount) { + const latestDelay = delayCallbacks[delayCallbacks.length - 1]; + const delay = latestDelay.payload.event_data?.delay || 3; + + console.log( + `[Extraction Continuation] Received EXTRACTION_DATA_DELAY #${continuationCount + 1}, ` + + `delay: ${delay}s, sending EXTRACTION_DATA_CONTINUE after short wait` + ); + + if (continuationCount >= maxContinuations) { + throw new Error( + `Maximum continuations (${maxContinuations}) reached. ` + + `This may indicate an infinite loop in rate limiting handling.` + ); + } + + // Wait a short time (not the full delay) to simulate platform behavior + await new Promise(resolve => setTimeout(resolve, 500)); + + // Create EXTRACTION_DATA_CONTINUE event + const continueEvent = { + ...event, + payload: { + ...event.payload, + event_type: 'EXTRACTION_DATA_CONTINUE', + }, + }; + + await invokeFunction('extraction', continueEvent); + continuationCount++; + } + + // Wait before checking again + await new Promise(resolve => setTimeout(resolve, 100)); + } + + throw new Error(`Timeout waiting for EXTRACTION_DATA_DONE after ${timeoutMs}ms`); +} + +export function waitForCallback(callbacks: CallbackData[], expectedEventType: string, timeoutMs: number = 30000): Promise { + return new Promise((resolve, reject) => { + const startTime = Date.now(); + const interval = setInterval(() => { + const callback = callbacks.find(cb => cb.eventType === expectedEventType); + if (callback) { + clearInterval(interval); + resolve(callback); + } else if (Date.now() - startTime > timeoutMs) { + clearInterval(interval); + reject(new Error(`Timeout waiting for callback with event type: ${expectedEventType}`)); + } + }, 100); + }); +} + +/** + * Execute a shell command and return its output + * @param command - Command to execute + * @returns Command output as string + */ +export function executeCommand(command: string): string { + try { + const result = execSync(command, { + encoding: 'utf-8', + stdio: ['pipe', 'pipe', 'pipe'], + }); + return result.toString(); + } catch (error: any) { + throw new Error( + `Command execution failed: ${command}. ` + + `Exit code: ${error.status}. ` + + `Stderr: ${error.stderr ? error.stderr.toString() : 'N/A'}` + ); + } +} + +/** + * Find an extracted file in the specified folder + * @param folderPath - Path to the folder containing extracted files + * @param filePattern - Pattern to match (e.g., 'extractor_users') + * @returns Filename if found, null otherwise + */ +export function findExtractedFile(folderPath: string, filePattern: string): string | null { + try { + const command = `ls ${folderPath} | grep ${filePattern} | sort -r | head -n 1`; + const result = executeCommand(command); + const filename = result.trim(); + return filename || null; + } catch (error) { + console.error(`[findExtractedFile] Error finding file with pattern '${filePattern}':`, error); + return null; + } +} \ No newline at end of file diff --git a/conformance_tests/initial_domain_mapping_generation/tsconfig.json b/conformance_tests/users_data_push_tests/tsconfig.json similarity index 57% rename from conformance_tests/initial_domain_mapping_generation/tsconfig.json rename to conformance_tests/users_data_push_tests/tsconfig.json index 6f9b7f8..b311f38 100644 --- a/conformance_tests/initial_domain_mapping_generation/tsconfig.json +++ b/conformance_tests/users_data_push_tests/tsconfig.json @@ -2,12 +2,17 @@ "compilerOptions": { "target": "es2017", "module": "commonjs", - "esModuleInterop": true, - "forceConsistentCasingInFileNames": true, + "lib": ["es2017"], + "outDir": "./dist", + "rootDir": "./", "strict": true, + "esModuleInterop": true, "skipLibCheck": true, - "resolveJsonModule": true + "forceConsistentCasingInFileNames": true, + "declaration": true, + "resolveJsonModule": true, + "moduleResolution": "node" }, - "include": ["*.ts"], + "include": ["**/*.ts"], "exclude": ["node_modules", "dist"] } \ No newline at end of file diff --git a/conformance_tests/users_fetch_tests/.npmrc b/conformance_tests/users_fetch_tests/.npmrc new file mode 100644 index 0000000..9cf9495 --- /dev/null +++ b/conformance_tests/users_fetch_tests/.npmrc @@ -0,0 +1 @@ +package-lock=false \ No newline at end of file diff --git a/conformance_tests/users_fetch_tests/fetch_users.test.ts b/conformance_tests/users_fetch_tests/fetch_users.test.ts new file mode 100644 index 0000000..1050788 --- /dev/null +++ b/conformance_tests/users_fetch_tests/fetch_users.test.ts @@ -0,0 +1,265 @@ +import { TestHttpClient, getTestEnvironment, buildTestEvent } from './test-helpers'; +import axios from 'axios'; + +describe('fetch_users function', () => { + let client: TestHttpClient; + let env: ReturnType; + + beforeAll(() => { + env = getTestEnvironment(); + client = new TestHttpClient(); + }); + + describe('Test 1: Basic Invocation', () => { + it('should successfully invoke the function and return expected response structure', async () => { + const event = buildTestEvent('fetch_users', env.wrikeApiKey, env.wrikeSpaceId); + const response = await client.invokeFunction('fetch_users', event); + + // Verify response structure + expect(response).toBeDefined(); + expect(response.function_result).toBeDefined(); + + const result = response.function_result; + + // Check required fields exist + expect(result).toHaveProperty('status'); + expect(result).toHaveProperty('message'); + expect(result).toHaveProperty('status_code'); + expect(result).toHaveProperty('api_delay'); + expect(result).toHaveProperty('metadata'); + expect(result).toHaveProperty('timestamp'); + + // Verify status is valid + expect(['success', 'error']).toContain(result.status); + + // Verify metadata structure + expect(result.metadata).toHaveProperty('user_count'); + expect(result.metadata).toHaveProperty('function_name'); + expect(result.metadata).toHaveProperty('request_id'); + expect(result.metadata.function_name).toBe('fetch_users'); + + // Verify types + expect(typeof result.status_code).toBe('number'); + expect(typeof result.api_delay).toBe('number'); + expect(typeof result.metadata.user_count).toBe('number'); + expect(typeof result.timestamp).toBe('string'); + }, 30000); + }); + + describe('Test 2: Successful User Fetch', () => { + it('should fetch users successfully with valid credentials', async () => { + const event = buildTestEvent('fetch_users', env.wrikeApiKey, env.wrikeSpaceId); + const response = await client.invokeFunction('fetch_users', event); + + expect(response).toBeDefined(); + expect(response.function_result).toBeDefined(); + + const result = response.function_result; + + // Verify successful response + expect(result.status).toBe('success'); + expect(result.status_code).toBe(200); + expect(result.message).toContain('Successfully fetched contacts'); + + // Verify data array exists + expect(result.data).toBeDefined(); + expect(Array.isArray(result.data)).toBe(true); + + // Verify user_count matches data length + expect(result.metadata.user_count).toBe(result.data.length); + + // If users exist, verify structure + if (result.data.length > 0) { + const user = result.data[0]; + expect(user).toHaveProperty('id'); + expect(user).toHaveProperty('type'); + expect(user).toHaveProperty('deleted'); + expect(typeof user.id).toBe('string'); + expect(typeof user.deleted).toBe('boolean'); + } + }, 30000); + }); + + describe('Test 3: Authentication Failure', () => { + it('should return error with invalid API key', async () => { + const invalidApiKey = 'invalid-api-key-12345'; + const event = buildTestEvent('fetch_users', invalidApiKey, env.wrikeSpaceId); + const response = await client.invokeFunction('fetch_users', event); + + expect(response).toBeDefined(); + expect(response.function_result).toBeDefined(); + + const result = response.function_result; + + // Verify error response + expect(result.status).toBe('error'); + expect([401, 403]).toContain(result.status_code); + expect(result.message).toBeDefined(); + expect(typeof result.message).toBe('string'); + + // Verify no delay for auth errors + expect(result.api_delay).toBe(0); + + // Verify user_count is 0 for failed requests + expect(result.metadata.user_count).toBe(0); + + // Verify no data returned + expect(result.data).toBeUndefined(); + }, 30000); + }); + + describe('Test 4: Response Metadata Validation', () => { + it('should include correct metadata in response', async () => { + const event = buildTestEvent('fetch_users', env.wrikeApiKey, env.wrikeSpaceId); + const response = await client.invokeFunction('fetch_users', event); + + expect(response).toBeDefined(); + expect(response.function_result).toBeDefined(); + + const result = response.function_result; + + // Verify metadata fields + expect(result.metadata.function_name).toBe('fetch_users'); + expect(result.metadata.request_id).toBe(event.execution_metadata.request_id); + + // Verify timestamp is valid ISO string + expect(() => new Date(result.timestamp)).not.toThrow(); + const timestamp = new Date(result.timestamp); + expect(timestamp.toString()).not.toBe('Invalid Date'); + + // Verify api_delay is non-negative + expect(result.api_delay).toBeGreaterThanOrEqual(0); + }, 30000); + }); + + describe('Test 5: Acceptance Test - Expected User Count', () => { + it('should return exactly 4 users when using test Wrike credentials', async () => { + const event = buildTestEvent('fetch_users', env.wrikeApiKey, env.wrikeSpaceId); + const response = await client.invokeFunction('fetch_users', event); + + // Verify response exists + expect(response).toBeDefined(); + expect(response.function_result).toBeDefined(); + + const result = response.function_result; + + // Verify successful response + expect(result.status).toBe('success'); + if (result.status !== 'success') { + throw new Error( + `Expected successful response for acceptance test.\n` + + `Expected status: 'success'\n` + + `Actual status: '${result.status}'\n` + + `Message: ${result.message}\n` + + `Status code: ${result.status_code}` + ); + } + + expect(result.status_code).toBe(200); + + // Verify data array exists + expect(result.data).toBeDefined(); + expect(Array.isArray(result.data)).toBe(true); + + // ACCEPTANCE TEST CRITERION: Verify exactly 4 users are returned + expect(result.data.length).toBe(4); + if (result.data.length !== 4) { + throw new Error( + `Acceptance Test Failed: Expected exactly 4 users in the response data array.\n` + + `Expected user count: 4\n` + + `Actual user count: ${result.data.length}\n` + + `Metadata user_count: ${result.metadata.user_count}\n` + + `User IDs: ${result.data.map((u: any) => u.id).join(', ')}` + ); + } + + // Verify metadata consistency + expect(result.metadata.user_count).toBe(4); + }, 30000); + }); + + describe('Test 6: Rate Limiting Handling', () => { + const mockApiBaseUrl = 'http://localhost:8004'; + const testIdentifier = 'fetch_users_rate_limit_test'; + + it('should handle rate limiting correctly and return appropriate api_delay', async () => { + let rateLimitingStarted = false; + + try { + // Step 1: Start rate limiting on the mock API server + const startRateLimitResponse = await axios.post( + `${mockApiBaseUrl}/start_rate_limiting`, + { test_name: testIdentifier }, + { timeout: 5000, validateStatus: () => true } + ); + + if (startRateLimitResponse.status !== 200) { + throw new Error( + `Failed to start rate limiting on mock API server.\n` + + `Expected status: 200\n` + + `Actual status: ${startRateLimitResponse.status}\n` + + `Response: ${JSON.stringify(startRateLimitResponse.data)}` + ); + } + + rateLimitingStarted = true; + + // Step 2: Invoke fetch_users function + const event = buildTestEvent('fetch_users', env.wrikeApiKey, env.wrikeSpaceId); + const response = await client.invokeFunction('fetch_users', event); + + // Verify response exists + expect(response).toBeDefined(); + expect(response.function_result).toBeDefined(); + + const result = response.function_result; + + // Step 3: Verify rate limiting response + // Check status code is 429 + expect(result.status_code).toBe(429); + if (result.status_code !== 429) { + throw new Error( + `Rate Limiting Test Failed: Expected status code 429 (Too Many Requests).\n` + + `Expected status_code: 429\n` + + `Actual status_code: ${result.status_code}\n` + + `Status: ${result.status}\n` + + `Message: ${result.message}\n` + + `Full response: ${JSON.stringify(result, null, 2)}` + ); + } + + // Check api_delay is greater than 0 + expect(result.api_delay).toBeGreaterThan(0); + if (result.api_delay <= 0) { + throw new Error( + `Rate Limiting Test Failed: Expected api_delay to be greater than 0.\n` + + `Expected: api_delay > 0\n` + + `Actual api_delay: ${result.api_delay}\n` + + `This indicates the retry-after header was not properly parsed.` + ); + } + + // Check api_delay is less than or equal to 3 + expect(result.api_delay).toBeLessThanOrEqual(3); + if (result.api_delay > 3) { + throw new Error( + `Rate Limiting Test Failed: api_delay calculation appears incorrect.\n` + + `Expected: api_delay <= 3\n` + + `Actual api_delay: ${result.api_delay}\n` + + `This suggests the api_delay was not calculated correctly from the retry-after header.\n` + + `The implementation should extract the retry-after value and use it as api_delay.` + ); + } + + // Verify error status and message + expect(result.status).toBe('error'); + expect(result.message).toContain('Rate limit'); + } finally { + // Step 4: Always cleanup - end rate limiting + if (rateLimitingStarted) { + await axios.post(`${mockApiBaseUrl}/end_rate_limiting`, {}, { timeout: 5000, validateStatus: () => true }); + } + } + }, 30000); + }); +}); \ No newline at end of file diff --git a/conformance_tests/users_fetch_tests/jest.config.js b/conformance_tests/users_fetch_tests/jest.config.js new file mode 100644 index 0000000..219155b --- /dev/null +++ b/conformance_tests/users_fetch_tests/jest.config.js @@ -0,0 +1,26 @@ +module.exports = { + preset: 'ts-jest', + testEnvironment: 'node', + testMatch: ['**/*.test.ts'], + testTimeout: 120000, + setupFilesAfterEnv: ['/jest.setup.js'], + moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'], + collectCoverageFrom: [ + '**/*.ts', + '!**/*.test.ts', + '!**/node_modules/**', + '!**/dist/**' + ], + transform: { + '^.+\\.ts$': ['ts-jest', { + tsconfig: { + target: 'es2017', + module: 'commonjs', + esModuleInterop: true, + allowSyntheticDefaultImports: true, + strict: true, + resolveJsonModule: true + } + }] + } +}; \ No newline at end of file diff --git a/conformance_tests/users_fetch_tests/jest.setup.js b/conformance_tests/users_fetch_tests/jest.setup.js new file mode 100644 index 0000000..9541857 --- /dev/null +++ b/conformance_tests/users_fetch_tests/jest.setup.js @@ -0,0 +1,3 @@ +// This file is managed externally and serves as a placeholder +// for any global test setup that may be required. +// Do not modify this file as part of conformance tests. \ No newline at end of file diff --git a/conformance_tests/users_fetch_tests/package.json b/conformance_tests/users_fetch_tests/package.json new file mode 100644 index 0000000..413613d --- /dev/null +++ b/conformance_tests/users_fetch_tests/package.json @@ -0,0 +1,20 @@ +{ + "name": "conformance-tests", + "version": "1.0.0", + "description": "Conformance tests for Wrike Airdrop Snap-in", + "scripts": { + "test": "jest" + }, + "devDependencies": { + "@types/express": "^4.17.21", + "@types/jest": "^29.4.0", + "@types/node": "^18.13.0", + "jest": "^29.4.2", + "ts-jest": "^29.0.5", + "typescript": "^4.9.5" + }, + "dependencies": { + "axios": "^1.9.0", + "express": "^4.21.0" + } +} \ No newline at end of file diff --git a/conformance_tests/users_fetch_tests/test-helpers.ts b/conformance_tests/users_fetch_tests/test-helpers.ts new file mode 100644 index 0000000..05ededd --- /dev/null +++ b/conformance_tests/users_fetch_tests/test-helpers.ts @@ -0,0 +1,165 @@ +import axios, { AxiosInstance } from 'axios'; +import express, { Express } from 'express'; +import { Server } from 'http'; + +/** + * Environment configuration for tests + */ +export interface TestEnvironment { + wrikeApiKey: string; + wrikeSpaceId: string; +} + +/** + * Read required environment variables for tests + */ +export function getTestEnvironment(): TestEnvironment { + const wrikeApiKey = process.env.WRIKE_API_KEY; + const wrikeSpaceId = process.env.WRIKE_SPACE_ID; + + if (!wrikeApiKey) { + throw new Error('WRIKE_API_KEY environment variable is required'); + } + + if (!wrikeSpaceId) { + throw new Error('WRIKE_SPACE_ID environment variable is required'); + } + + return { + wrikeApiKey, + wrikeSpaceId, + }; +} + +/** + * HTTP client for making requests to The Test Snap-In Server + */ +export class TestHttpClient { + private client: AxiosInstance; + + constructor(baseUrl: string = 'http://localhost:8000') { + this.client = axios.create({ + baseURL: baseUrl, + timeout: 30000, + validateStatus: () => true, // Don't throw on any status code + }); + } + + async invokeFunction(functionName: string, event: any): Promise { + const response = await this.client.post('/handle/sync', event); + return response.data; + } +} + +/** + * Build a test event payload for invoking a function + */ +export function buildTestEvent( + functionName: string, + apiKey: string, + spaceId: string, + additionalPayload: any = {} +): any { + return { + execution_metadata: { + request_id: `test-${functionName}-${Date.now()}`, + function_name: functionName, + event_type: 'test_event', + devrev_endpoint: 'http://localhost:8003', + }, + context: { + dev_oid: 'test-dev-oid', + source_id: 'test-source-id', + snap_in_id: 'test-snap-in-id', + snap_in_version_id: 'test-snap-in-version-id', + service_account_id: 'test-service-account-id', + secrets: { + service_account_token: 'test-token', + }, + }, + payload: { + connection_data: { + key: apiKey, + org_id: spaceId, + org_name: 'Test Organization', + key_type: 'api_key', + }, + event_context: { + callback_url: 'http://localhost:8002/callback', + dev_org: 'test-dev-org', + dev_org_id: 'test-dev-org-id', + dev_user: 'test-dev-user', + dev_user_id: 'test-dev-user-id', + external_sync_unit: 'test-sync-unit', + external_sync_unit_id: 'IEAGS6BYI5RFMPP7', + external_sync_unit_name: 'Test Sync Unit', + external_system: 'wrike', + external_system_type: 'wrike', + import_slug: 'test-import', + mode: 'INITIAL', + request_id: `test-${functionName}-${Date.now()}`, + snap_in_slug: 'test-snap-in', + snap_in_version_id: 'test-version', + sync_run: 'test-sync-run', + sync_run_id: 'test-sync-run-id', + sync_tier: 'test-tier', + sync_unit: 'test-sync-unit', + sync_unit_id: 'test-sync-unit-id', + uuid: 'test-uuid', + worker_data_url: 'http://localhost:8003/external-worker', + }, + event_type: 'test_event', + ...additionalPayload, + }, + input_data: { + global_values: {}, + event_sources: {}, + }, + }; +} + +/** + * Setup callback server for tests + */ +export class CallbackServer { + private app: Express; + private server: Server | null = null; + private port: number; + public receivedCallbacks: any[] = []; + + constructor(port: number = 8002) { + this.port = port; + this.app = express(); + this.app.use(express.json()); + + this.app.post('/callback', (req, res) => { + this.receivedCallbacks.push(req.body); + res.status(200).send({ status: 'received' }); + }); + } + + async start(): Promise { + return new Promise((resolve) => { + this.server = this.app.listen(this.port, () => { + resolve(); + }); + }); + } + + async stop(): Promise { + return new Promise((resolve, reject) => { + if (this.server) { + this.server.close((err) => { + if (err) reject(err); + else resolve(); + }); + } else { + resolve(); + } + }); + } + + clearCallbacks(): void { + this.receivedCallbacks = []; + } +} \ No newline at end of file diff --git a/conformance_tests/users_fetch_tests/tsconfig.json b/conformance_tests/users_fetch_tests/tsconfig.json new file mode 100644 index 0000000..1054ad3 --- /dev/null +++ b/conformance_tests/users_fetch_tests/tsconfig.json @@ -0,0 +1,25 @@ +{ + "compilerOptions": { + "target": "es2017", + "module": "commonjs", + "lib": ["es2017"], + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "strict": true, + "declaration": true, + "resolveJsonModule": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "moduleResolution": "node", + "outDir": "./dist", + "rootDir": ".", + "types": ["jest", "node"] + }, + "include": [ + "**/*.ts" + ], + "exclude": [ + "node_modules", + "dist" + ] +} \ No newline at end of file diff --git a/conformance_tests/worker_domain_mapping_initialization/conformance-tests.ts b/conformance_tests/worker_domain_mapping_initialization/conformance-tests.ts deleted file mode 100644 index b429788..0000000 --- a/conformance_tests/worker_domain_mapping_initialization/conformance-tests.ts +++ /dev/null @@ -1,169 +0,0 @@ -import axios from 'axios'; -import express from 'express'; -import bodyParser from 'body-parser'; -import { EventType } from '@devrev/ts-adaas'; - -// Test configuration -const TEST_SERVER_URL = 'http://localhost:8000/handle/sync'; -const CALLBACK_SERVER_PORT = 8002; -const CALLBACK_SERVER_URL = `http://localhost:${CALLBACK_SERVER_PORT}`; -const TEST_TIMEOUT = 10000; // 10 seconds per test - -// Environment variables -const WRIKE_API_KEY = process.env.WRIKE_API_KEY || 'test-api-key'; -const WRIKE_SPACE_GID = process.env.WRIKE_SPACE_GID || 'test-space-id'; -const TEST_EXTERNAL_SYNC_UNIT_ID = 'IEAGS6BYI5RFMPPY'; - -// Setup callback server to capture responses -const app = express(); -app.use(bodyParser.json()); - -let lastCallbackData: any = null; -app.post('/callback', (req, res) => { - console.log('Callback received:', JSON.stringify(req.body)); - lastCallbackData = req.body; - res.status(200).send({ status: 'success' }); -}); - -const server = app.listen(CALLBACK_SERVER_PORT, () => { - console.log(`Callback server listening on port ${CALLBACK_SERVER_PORT}`); -}); - -// Helper function to create a test event -function createTestEvent(functionName: string, eventType: string = EventType.ExtractionExternalSyncUnitsStart) { - return { - context: { - secrets: { - service_account_token: 'test-token' - }, - snap_in_version_id: 'test-version-id', - snap_in_id: 'test-snap-in-id' - }, - payload: { - connection_data: { - org_id: WRIKE_SPACE_GID, - org_name: 'Test Org', - key: WRIKE_API_KEY, - key_type: 'api_key' - }, - event_context: { - callback_url: `${CALLBACK_SERVER_URL}/callback`, - dev_org: 'test-org', - dev_org_id: 'test-org-id', - dev_user: 'test-user', - dev_user_id: 'test-user-id', - external_sync_unit: 'test-unit', - external_sync_unit_id: TEST_EXTERNAL_SYNC_UNIT_ID, - external_sync_unit_name: 'Test Unit', - external_system: 'wrike', - external_system_type: 'wrike', - import_slug: 'test-import', - mode: 'INITIAL', - request_id: 'test-request-id', - snap_in_slug: 'test-snap-in', - snap_in_version_id: 'test-version-id', - sync_run: 'test-run', - sync_run_id: 'test-run-id', - sync_tier: 'test-tier', - sync_unit: 'test-unit', - sync_unit_id: 'test-unit-id', - uuid: 'test-uuid', - worker_data_url: 'http://localhost:8003/external-worker' - }, - event_type: eventType - }, - execution_metadata: { - function_name: functionName, - devrev_endpoint: 'http://localhost:8003' - }, - input_data: {} - }; -} - -// Tests -async function runTests() { - try { - console.log('Starting conformance tests...'); - - // Test 1: Basic connectivity test - console.log('\n--- Test 1: Basic connectivity test ---'); - try { - const healthcheckEvent = createTestEvent('healthcheck'); - const response = await axios.post(TEST_SERVER_URL, healthcheckEvent); - - if (response.status === 200) { - console.log('✅ Test server is accessible'); - } else { - throw new Error(`Unexpected status code: ${response.status}`); - } - } catch (error) { - console.error('❌ Test server connectivity test failed:', error); - process.exit(1); - } - - // Test 2: Simple test - extraction_external_sync_unit_check function - console.log('\n--- Test 2: Testing extraction_external_sync_unit_check function ---'); - try { - const externalSyncUnitEvent = createTestEvent('extraction_external_sync_unit_check'); - const response = await axios.post(TEST_SERVER_URL, externalSyncUnitEvent); - - if (response.status !== 200) { - throw new Error(`Unexpected status code: ${response.status}`); - } - - const result = response.data; - console.log('Function result:', JSON.stringify(result)); - - if (result.function_result && result.function_result.status === 'success') { - console.log('✅ extraction_external_sync_unit_check function executed successfully'); - } else { - throw new Error('Function did not return success status'); - } - } catch (error) { - console.error('❌ extraction_external_sync_unit_check test failed:', error); - process.exit(1); - } - - // Test 3: Complex test - data_extraction_check function - console.log('\n--- Test 3: Testing data_extraction_check function ---'); - try { - const dataExtractionEvent = createTestEvent('data_extraction_check', EventType.ExtractionDataStart); - const response = await axios.post(TEST_SERVER_URL, dataExtractionEvent); - - if (response.status !== 200) { - throw new Error(`Unexpected status code: ${response.status}`); - } - - const result = response.data; - console.log('Function result:', JSON.stringify(result)); - - if (result.function_result && result.function_result.status === 'success') { - console.log('✅ data_extraction_check function executed successfully'); - } else { - throw new Error('Function did not return success status'); - } - } catch (error) { - console.error('❌ data_extraction_check test failed:', error); - process.exit(1); - } - - console.log('\n✅ All conformance tests passed successfully!'); - process.exit(0); - } catch (error) { - console.error('❌ Conformance tests failed:', error); - process.exit(1); - } finally { - // Clean up - server.close(); - } -} - -// Set timeout for the entire test suite -const testTimeout = setTimeout(() => { - console.error('❌ Test suite timed out after 120 seconds'); - server.close(); - process.exit(1); -}, 120000); - -// Run tests -runTests().finally(() => clearTimeout(testTimeout)); \ No newline at end of file diff --git a/conformance_tests/worker_domain_mapping_initialization/tsconfig.json b/conformance_tests/worker_domain_mapping_initialization/tsconfig.json deleted file mode 100644 index dde1c22..0000000 --- a/conformance_tests/worker_domain_mapping_initialization/tsconfig.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "compilerOptions": { - "target": "es2017", - "module": "commonjs", - "esModuleInterop": true, - "forceConsistentCasingInFileNames": true, - "strict": true, - "skipLibCheck": true, - "resolveJsonModule": true - }, - "include": ["*.ts"], - "exclude": ["node_modules"] -} \ No newline at end of file diff --git a/conformance_tests/worker_spawn_parameter_validation/jest.config.js b/conformance_tests/worker_spawn_parameter_validation/jest.config.js deleted file mode 100644 index 0747ba8..0000000 --- a/conformance_tests/worker_spawn_parameter_validation/jest.config.js +++ /dev/null @@ -1,5 +0,0 @@ -module.exports = { - preset: 'ts-jest', - testEnvironment: 'node', - testTimeout: 30000, -}; \ No newline at end of file diff --git a/conformance_tests/worker_spawn_parameter_validation/package.json b/conformance_tests/worker_spawn_parameter_validation/package.json deleted file mode 100644 index d9253ac..0000000 --- a/conformance_tests/worker_spawn_parameter_validation/package.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "name": "airdrop-snap-in-tests", - "version": "1.0.0", - "description": "Conformance tests for Airdrop Snap-in", - "main": "index.js", - "scripts": { - "test": "jest --forceExit --detectOpenHandles" - }, - "devDependencies": { - "@types/express": "^4.17.21", - "@types/jest": "^29.5.5", - "@types/node": "^18.18.0", - "axios": "^1.5.1", - "express": "^4.18.2", - "jest": "^29.7.0", - "ts-jest": "^29.1.1", - "typescript": "^5.2.2" - } -} \ No newline at end of file diff --git a/conformance_tests/worker_spawn_parameter_validation/spawn-options.test.ts b/conformance_tests/worker_spawn_parameter_validation/spawn-options.test.ts deleted file mode 100644 index f30aa9f..0000000 --- a/conformance_tests/worker_spawn_parameter_validation/spawn-options.test.ts +++ /dev/null @@ -1,157 +0,0 @@ -import axios from 'axios'; -import express from 'express'; -import { Server } from 'http'; -import { AddressInfo } from 'net'; - -// Environment variables -const WRIKE_API_KEY = process.env.WRIKE_API_KEY || 'test-api-key'; -const WRIKE_SPACE_GID = process.env.WRIKE_SPACE_GID || 'test-space-id'; -const TEST_PROJECT_ID = 'IEAGS6BYI5RFMPPY'; - -// Server URLs -const TEST_SERVER_URL = 'http://localhost:8000/handle/sync'; -let callbackServer: Server; -let callbackUrl: string; - -// Setup callback server -beforeAll(async () => { - const app = express(); - app.use(express.json()); - - // Track requests to verify spawn parameters - const requests: any[] = []; - - // Endpoint to receive callbacks - app.post('/callback', (req, res) => { - requests.push(req.body); - res.status(200).send({ status: 'success' }); - }); - - // Endpoint to get recorded requests - app.get('/requests', (req, res) => { - res.status(200).send(requests); - }); - - // Clear recorded requests - app.post('/clear', (req, res) => { - requests.length = 0; - res.status(200).send({ status: 'success' }); - }); - - // Start server - callbackServer = app.listen(8002); - const address = callbackServer.address() as AddressInfo; - callbackUrl = `http://localhost:${address.port}/callback`; - - console.log(`Callback server started at ${callbackUrl}`); -}); - -// Cleanup -afterAll(async () => { - if (callbackServer) { - callbackServer.close(); - } -}); - -// Clear requests before each test -beforeEach(async () => { - await axios.post('http://localhost:8002/clear'); -}); - -describe('Spawn function parameter validation', () => { - // Test 1: Basic test to verify the extraction_external_sync_unit_check function can be invoked - test('extraction_external_sync_unit_check function can be invoked', async () => { - const event = createTestEvent('extraction_external_sync_unit_check', 'EXTRACTION_EXTERNAL_SYNC_UNITS_START'); - - const response = await axios.post(TEST_SERVER_URL, event); - - expect(response.status).toBe(200); - expect(response.data.function_result).toBeDefined(); - expect(response.data.function_result.status).toBe('success'); - expect(response.data.function_result.valid_external_sync_unit_events).toBe(true); - }); - - // Test 2: Test that verifies the spawn function is called without the "options" key - test('spawn function is called without options key in extraction_external_sync_unit_check', async () => { - const event = createTestEvent('extraction_external_sync_unit_check', 'EXTRACTION_EXTERNAL_SYNC_UNITS_START'); - - // Make the request to trigger the function - const response = await axios.post(TEST_SERVER_URL, event); - expect(response.status).toBe(200); - - // The function should have spawned a worker that would make a callback - // We can't directly inspect the spawn parameters, but we can verify the function executed successfully - expect(response.data.function_result).toBeDefined(); - expect(response.data.function_result.status).toBe('success'); - - // Verify the implementation by checking the function's behavior - // If the spawn function was called with an options key, it would likely cause errors - // that would be reflected in the response - expect(response.data.error).toBeUndefined(); - }); - - // Test 3: More complex test with data_extraction_check function - test('spawn function is called without options key in data_extraction_check', async () => { - const event = createTestEvent('data_extraction_check', 'EXTRACTION_DATA_START'); - - // Make the request to trigger the function - const response = await axios.post(TEST_SERVER_URL, event); - expect(response.status).toBe(200); - - // The function should have spawned a worker that would make a callback - expect(response.data.function_result).toBeDefined(); - expect(response.data.function_result.status).toBe('success'); - expect(response.data.function_result.valid_data_extraction_events).toBe(true); - - // Verify no errors occurred, which would happen if options key was incorrectly used - expect(response.data.error).toBeUndefined(); - }); -}); - -// Helper function to create a test event -function createTestEvent(functionName: string, eventType: string) { - return { - context: { - secrets: { - service_account_token: 'test-token' - }, - snap_in_id: 'test-snap-in-id', - snap_in_version_id: 'test-snap-in-version-id' - }, - execution_metadata: { - function_name: functionName, - devrev_endpoint: 'http://localhost:8003' - }, - payload: { - connection_data: { - key: WRIKE_API_KEY, - org_id: WRIKE_SPACE_GID, - key_type: 'api_key' - }, - event_type: eventType, - event_context: { - callback_url: callbackUrl, - external_sync_unit_id: TEST_PROJECT_ID, - dev_org_id: 'test-org-id', - dev_user_id: 'test-user-id', - external_sync_unit: TEST_PROJECT_ID, - external_sync_unit_name: 'Test Project', - external_system: 'wrike', - external_system_type: 'wrike', - import_slug: 'test-import', - mode: 'INITIAL', - request_id: 'test-request-id', - snap_in_slug: 'test-snap-in', - snap_in_version_id: 'test-snap-in-version-id', - sync_run: 'test-sync-run', - sync_run_id: 'test-sync-run-id', - sync_tier: 'test-tier', - sync_unit: 'test-sync-unit', - sync_unit_id: 'test-sync-unit-id', - uuid: 'test-uuid', - worker_data_url: 'http://localhost:8003/external-worker' - } - }, - input_data: {} - }; -} \ No newline at end of file diff --git a/conformance_tests/worker_spawn_parameter_validation/tsconfig.json b/conformance_tests/worker_spawn_parameter_validation/tsconfig.json deleted file mode 100644 index 1915354..0000000 --- a/conformance_tests/worker_spawn_parameter_validation/tsconfig.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "compilerOptions": { - "target": "es2018", - "module": "commonjs", - "strict": true, - "esModuleInterop": true, - "skipLibCheck": true, - "forceConsistentCasingInFileNames": true, - "outDir": "./dist", - "resolveJsonModule": true - }, - "include": ["**/*.ts"], - "exclude": ["node_modules"] -} \ No newline at end of file diff --git a/conformance_tests/workflow_invocation_validation/extraction-health-check.test.ts b/conformance_tests/workflow_invocation_validation/extraction-health-check.test.ts deleted file mode 100644 index 2788b6e..0000000 --- a/conformance_tests/workflow_invocation_validation/extraction-health-check.test.ts +++ /dev/null @@ -1,175 +0,0 @@ -import axios from 'axios'; -import * as http from 'http'; -import * as fs from 'fs'; -import * as path from 'path'; -import { AirdropEvent, EventType } from '@devrev/ts-adaas'; - -// Constants -const TEST_SERVER_URL = 'http://localhost:8000/handle/sync'; -const CALLBACK_SERVER_PORT = 8002; -const DEVREV_SERVER_URL = 'http://localhost:8003'; -const WORKER_DATA_URL = 'http://localhost:8003/external-worker'; - -// Load the extraction health check event from the resource file -const extractionHealthCheckEvent = JSON.parse( - fs.readFileSync(path.resolve(__dirname, './extraction_health_check.json'), 'utf8') -); - -// Setup callback server -let callbackServer: http.Server | null = null; -let callbackData: any = null; - -function setupCallbackServer(): Promise { - return new Promise((resolve) => { - callbackServer = http.createServer((req, res) => { - let body = ''; - req.on('data', (chunk) => { - body += chunk.toString(); - }); - req.on('end', () => { - try { - callbackData = JSON.parse(body); - } catch (e) { - callbackData = body; - } - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ status: 'ok' })); - }); - }); - - callbackServer.listen(CALLBACK_SERVER_PORT, '127.0.0.1', () => { - console.log(`Callback server running at http://localhost:${CALLBACK_SERVER_PORT}`); - resolve(); - }); - }); -} - -function shutdownCallbackServer(): Promise { - return new Promise((resolve) => { - if (callbackServer) { - callbackServer.close(() => { - callbackServer = null; - resolve(); - }); - } else { - resolve(); - } - }); -} - -// Test suite -describe('Extraction Health Check Tests', () => { - beforeAll(async () => { - await setupCallbackServer(); - }); - - afterAll(async () => { - await shutdownCallbackServer(); - }); - - beforeEach(() => { - callbackData = null; - }); - - // Test 1: Verify the extraction health check function can be invoked with the provided event - test('should successfully invoke the extraction health check function with the provided event', async () => { - // Update the callback URL to point to our test callback server - const event = { - ...extractionHealthCheckEvent, - payload: { - ...extractionHealthCheckEvent.payload, - event_context: { - ...extractionHealthCheckEvent.payload.event_context, - callback_url: `http://localhost:${CALLBACK_SERVER_PORT}/callback` - } - } - }; - - // Set the function name to ensure we're calling the right function - event.execution_metadata.function_name = 'healthcheck'; - - // Send the request to the test server - const response = await axios.post(TEST_SERVER_URL, event); - - // Verify the response - expect(response.status).toBe(200); - expect(response.data.function_result).toBeDefined(); - expect(response.data.function_result.status).toBe('success'); - expect(response.data.function_result.message).toBe('Healthcheck function successfully invoked'); - }); - - // Test 2: Verify the function correctly identifies the event as an extraction event - test('should correctly identify the event as an extraction event', async () => { - // First, modify the event to use the extraction_workflow_check function - const event = { - ...extractionHealthCheckEvent, - execution_metadata: { - ...extractionHealthCheckEvent.execution_metadata, - function_name: 'extraction_workflow_check' - }, - payload: { - ...extractionHealthCheckEvent.payload, - event_context: { - ...extractionHealthCheckEvent.payload.event_context, - callback_url: `http://localhost:${CALLBACK_SERVER_PORT}/callback` - } - } - }; - - // Send the request to the test server - const response = await axios.post(TEST_SERVER_URL, event); - - // Verify the response - expect(response.status).toBe(200); - expect(response.data.function_result).toBeDefined(); - expect(response.data.function_result.status).toBe('success'); - expect(response.data.function_result.valid_extraction_events).toBe(true); - expect(response.data.function_result.message).toBe('Extraction workflow check function successfully invoked'); - }); - - // Test 3: Verify the function handles malformed events gracefully - test('should handle malformed events gracefully', async () => { - // Create a malformed event by removing required fields - const malformedEvent = { - context: {}, - payload: { - event_context: { - callback_url: `http://localhost:${CALLBACK_SERVER_PORT}/callback` - } - }, - execution_metadata: { - function_name: 'extraction_health_check', - } - }; - - // The server will throw an error, but axios might not return a response object - try { - await axios.post(TEST_SERVER_URL, malformedEvent); - fail('Expected request to fail'); - } catch (error: any) { - // Check that we got an error, but don't require a specific structure - expect(error).toBeDefined(); - } - }); - - // Test 4: Verify the function works with the exact event from the resource file - test('should work with the exact event from the resource file', async () => { - // Use the event exactly as provided in the resource file - const event = { - ...extractionHealthCheckEvent, - execution_metadata: { - ...extractionHealthCheckEvent.execution_metadata, - function_name: 'healthcheck' // Make sure we're calling the right function - } - }; - - // Send the request to the test server - const response = await axios.post(TEST_SERVER_URL, event); - - // Verify the response - expect(response.status).toBe(200); - expect(response.data.function_result).toBeDefined(); - expect(response.data.function_result.status).toBe('success'); - expect(response.data.function_result.message).toBe('Healthcheck function successfully invoked'); - }); -}); \ No newline at end of file diff --git a/conformance_tests/workflow_invocation_validation/extraction-workflow-check.test.ts b/conformance_tests/workflow_invocation_validation/extraction-workflow-check.test.ts deleted file mode 100644 index 0e9fce4..0000000 --- a/conformance_tests/workflow_invocation_validation/extraction-workflow-check.test.ts +++ /dev/null @@ -1,245 +0,0 @@ -import axios from 'axios'; -import * as http from 'http'; -import { AirdropEvent, EventType } from '@devrev/ts-adaas'; - -// Constants -const TEST_SERVER_URL = 'http://localhost:8000/handle/sync'; -const CALLBACK_SERVER_PORT = 8002; -const FUNCTION_NAME = 'extraction_workflow_check'; - -// Mock AirdropEvent structure -interface MockAirdropEvent { - context: { - secrets: { - service_account_token: string; - }; - snap_in_version_id: string; - snap_in_id?: string; - }; - payload: Partial<{ - event_type?: string; - event_context: { - dev_org: string; - dev_user: string; - external_sync_unit: string; - external_system: string; - external_system_type: string; - import_slug: string; - mode: string; - request_id: string; - snap_in_slug: string; - sync_run: string; - sync_tier: string; - sync_unit: string; - uuid: string; - worker_data_url: string; - }; - }>; - execution_metadata: { - devrev_endpoint: string; - function_name: string; - }; - input_data?: Record; -} - -// Create a basic valid event -function createBasicEvent(eventType?: string): MockAirdropEvent { - return { - context: { - secrets: { - service_account_token: 'test-token', - }, - snap_in_version_id: 'test-version-id', - snap_in_id: 'test-snap-in-id', - }, - payload: { - event_type: eventType, - event_context: { - dev_org: 'test-org', - dev_user: 'test-user', - external_sync_unit: 'test-sync-unit', - external_system: 'test-system', - external_system_type: 'test-system-type', - import_slug: 'test-import-slug', - mode: 'INITIAL', - request_id: 'test-request-id', - snap_in_slug: 'test-snap-in-slug', - sync_run: 'test-sync-run', - sync_tier: 'test-tier', - sync_unit: 'test-sync-unit', - uuid: 'test-uuid', - worker_data_url: 'http://localhost:8003/external-worker', - }, - }, - execution_metadata: { - devrev_endpoint: 'http://localhost:8003', - function_name: FUNCTION_NAME, - }, - }; -} - -// Setup callback server -let callbackServer: http.Server | null = null; -let callbackData: any = null; - -function setupCallbackServer(): Promise { - return new Promise((resolve) => { - callbackServer = http.createServer((req, res) => { - let body = ''; - req.on('data', (chunk) => { - body += chunk.toString(); - }); - req.on('end', () => { - callbackData = JSON.parse(body); - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ status: 'ok' })); - }); - }); - - callbackServer.listen(CALLBACK_SERVER_PORT, '127.0.0.1', () => { - console.log(`Callback server running at http://localhost:${CALLBACK_SERVER_PORT}`); - resolve(); - }); - }); -} - -function shutdownCallbackServer(): Promise { - return new Promise((resolve) => { - if (callbackServer) { - callbackServer.close(() => { - callbackServer = null; - resolve(); - }); - } else { - resolve(); - } - }); -} - -// Test suite -describe('Extraction Workflow Check Function Tests', () => { - beforeAll(async () => { - await setupCallbackServer(); - }); - - afterAll(async () => { - await shutdownCallbackServer(); - }); - - beforeEach(() => { - callbackData = null; - }); - - // Test 1: Basic test - Verify the function can be invoked with a minimal valid event - test('should successfully invoke the extraction workflow check function', async () => { - const event = createBasicEvent(); - - const response = await axios.post(TEST_SERVER_URL, event); - - expect(response.status).toBe(200); - expect(response.data.function_result).toBeDefined(); - expect(response.data.function_result.status).toBe('success'); - expect(response.data.function_result.message).toBe('Extraction workflow check function successfully invoked'); - expect(response.data.function_result.valid_extraction_events).toBe(false); - }); - - // Test 2: Event validation test - Verify the function validates event structure properly - test('should fail when required fields are missing', async () => { - const event = createBasicEvent(); - delete event.context.secrets.service_account_token; - - // The server will throw an error, but axios might not return a response object - try { - await axios.post(TEST_SERVER_URL, event); - fail('Expected request to fail'); - } catch (error: any) { - // Check that we got an error, but don't require a specific structure - expect(error).toBeDefined(); - // Commented out as response structure may vary: - // expect(error.response).toBeDefined(); - // expect(error.response.data.error).toBeDefined(); - } - }); - - // Test 3: Extraction event detection test - Verify the function correctly identifies extraction-related events - test('should correctly identify extraction-related events', async () => { - // Test with an extraction event - const extractionEvent = createBasicEvent(EventType.ExtractionDataStart as string); - - const response = await axios.post(TEST_SERVER_URL, extractionEvent); - - expect(response.status).toBe(200); - expect(response.data.function_result).toBeDefined(); - expect(response.data.function_result.valid_extraction_events).toBe(true); - }); - - // Test 4: Multiple events test - Verify the function can handle multiple events - test('should handle multiple events correctly', async () => { - const events = [ - createBasicEvent(), - createBasicEvent(EventType.ExtractionDataStart as string) - ]; - - // Note: The test server only supports one event at a time for sync requests - // This is a limitation of the test environment, not the function itself - const response1 = await axios.post(TEST_SERVER_URL, events[0]); - const response2 = await axios.post(TEST_SERVER_URL, events[1]); - - expect(response1.status).toBe(200); - expect(response1.data.function_result.valid_extraction_events).toBe(false); - - expect(response2.status).toBe(200); - expect(response2.data.function_result.valid_extraction_events).toBe(true); - }); - - // Test 5: Test all extraction event types - test('should identify all extraction event types correctly', async () => { - const extractionEventTypes = [ - EventType.ExtractionExternalSyncUnitsStart, - EventType.ExtractionMetadataStart, - EventType.ExtractionDataStart, - EventType.ExtractionDataContinue, - EventType.ExtractionDataDelete, - EventType.ExtractionAttachmentsStart, - EventType.ExtractionAttachmentsContinue, - EventType.ExtractionAttachmentsDelete - ]; - - for (const eventType of extractionEventTypes) { - const event = createBasicEvent(eventType as string); - const response = await axios.post(TEST_SERVER_URL, event); - - expect(response.status).toBe(200); - expect(response.data.function_result.valid_extraction_events).toBe(true); - expect(response.data.function_result.message).toBe('Extraction workflow check function successfully invoked'); - } - }); - - // Test 6: Error handling test - Verify the function handles errors appropriately - test('should handle malformed event data gracefully', async () => { - const malformedEvent = { - // Missing required fields - execution_metadata: { - function_name: FUNCTION_NAME, - } - }; - - // The server will throw an error, but axios might not return a response object - try { - await axios.post(TEST_SERVER_URL, malformedEvent); - expect(true).toBe(false); // This should not be reached - } catch (error: any) { - // Check that we got an error, but don't require a specific structure - expect(error).toBeDefined(); - // Commented out as response structure may vary: - // expect(error.response).toBeDefined(); - // expect(error.response.data.error).toBeDefined(); - } - }); - - // Force close any open handles after all tests - afterAll(async () => { - // Add a small delay to ensure all network requests are completed - await new Promise(resolve => setTimeout(resolve, 1000)); - }); -}); \ No newline at end of file diff --git a/conformance_tests/workflow_invocation_validation/extraction_health_check.json b/conformance_tests/workflow_invocation_validation/extraction_health_check.json deleted file mode 100644 index ca2c5d8..0000000 --- a/conformance_tests/workflow_invocation_validation/extraction_health_check.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "execution_metadata": { - "function_name": "healthcheck", - "devrev_endpoint": "http://localhost:8003" - }, - "payload" : { - "event_type": "EXTRACTION_EXTERNAL_SYNC_UNITS_START", - "event_context": { - "callback_url": "http://localhost:8002/callback", - "dev_org": "test-dev-org", - "external_sync_unit_id": "test-external-sync-unit", - "sync_unit_id": "test-sync-unit", - "worker_data_url": "http://localhost:8003/external-worker" - }, - "connection_data": { - "org_id": "test-org-id", - "key": "key=test-key&token=test-token" - } - }, - "context": { - "secrets": { - "service_account_token": "test-token" - }, - "snap_in_version_id": "test-version-id" - } -} \ No newline at end of file diff --git a/conformance_tests/workflow_invocation_validation/jest.config.js b/conformance_tests/workflow_invocation_validation/jest.config.js deleted file mode 100644 index ff085f1..0000000 --- a/conformance_tests/workflow_invocation_validation/jest.config.js +++ /dev/null @@ -1,6 +0,0 @@ -module.exports = { - preset: 'ts-jest', - testEnvironment: 'node', - testTimeout: 30000, - setupFilesAfterEnv: ['./jest.setup.ts'] -}; \ No newline at end of file diff --git a/conformance_tests/workflow_invocation_validation/jest.setup.ts b/conformance_tests/workflow_invocation_validation/jest.setup.ts deleted file mode 100644 index 2ca298d..0000000 --- a/conformance_tests/workflow_invocation_validation/jest.setup.ts +++ /dev/null @@ -1,11 +0,0 @@ -// Increase timeout for all tests to accommodate network requests -jest.setTimeout(60000); - -// Suppress console output during tests to keep test output clean -// Only suppress in test environment -if (process.env.NODE_ENV === 'test') { - console.log = jest.fn(); -} - -// Add proper error handling for unhandled promise rejections -process.on('unhandledRejection', (reason) => console.error('Unhandled Rejection:', reason)); \ No newline at end of file diff --git a/conformance_tests/workflow_invocation_validation/package.json b/conformance_tests/workflow_invocation_validation/package.json deleted file mode 100644 index f503751..0000000 --- a/conformance_tests/workflow_invocation_validation/package.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "name": "airdrop-snap-in-conformance-tests", - "version": "1.0.0", - "description": "Conformance tests for Airdrop Snap-in", - "scripts": { - "test": "jest --config jest.config.js" - }, - "devDependencies": { - "@types/jest": "^29.5.0", - "@types/node": "^18.15.11", - "jest": "^29.5.0", - "ts-jest": "^29.1.0", - "typescript": "^4.9.5" - }, - "dependencies": { - "@devrev/ts-adaas": "1.5.1", - "axios": "^1.9.0" - } -} \ No newline at end of file diff --git a/conformance_tests/workflow_invocation_validation/tsconfig.test.json b/conformance_tests/workflow_invocation_validation/tsconfig.test.json deleted file mode 100644 index 741b081..0000000 --- a/conformance_tests/workflow_invocation_validation/tsconfig.test.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "compilerOptions": { - "target": "es2016", - "module": "commonjs", - "outDir": "./dist", - "strict": true, - "esModuleInterop": true, - "skipLibCheck": true, - "forceConsistentCasingInFileNames": true - }, - "compilerOptions": { - "types": ["jest", "node"], - "esModuleInterop": true - }, - "include": ["*.test.ts", "jest.setup.ts"] -} \ No newline at end of file diff --git a/devrev-wrike-snapin.plain b/devrev-wrike-snapin.plain index 6f3d045..933de60 100644 --- a/devrev-wrike-snapin.plain +++ b/devrev-wrike-snapin.plain @@ -8,161 +8,323 @@ - The Space is a Wrike space. -- The Project is a Wrike project. +- The Data Model of The API is as follows: + ``` + The Data Model + ├─ The Account + │ └─ The List of Users + │ └─ The User + ├─ The List of Spaces + │ └─ The Space + │ └─ The List of Folders + │ └─ The Folder + │ └─ The List of Tasks + │ └─ The Task + │ ├─ The List of Comments + │ │ └─ The Comment + │ └─ The List of Attachments + | └─ The Attachment + ``` + +- The Fetched Users are The List of Users that are fetched from The API. + +- The Fetched Tasks are The List of Tasks that are fetched from The API. + +- The Fetched Comments are The List of Comments that are fetched from The API. + +- The Fetched Folders are The List of Folders that are fetched from The API. + +- The Fetched Attachments are The List of Attachments that are fetched from The API. One task from The Fetched Tasks can have multiple attachments. -- The Fetched Projects are The List of Projects that are fetched from The API. - -- The Fetched Contacts are The List of Contacts that are fetched from The API. +***Non-Functional Requirements:*** -- The Fetched Tasks are The The List of Tasks of The Project that are fetched from The API. +- {% include "templates/internal_client.plain", external_system_name: "Wrike" %} +- The Wrike API Key is provided in `event["payload"]["connection_data"]["key"]`. This is OAuth 2.0 Bearer token. -***Non-Functional Requirements:*** +***Test Requirements:*** -- The Wrike API Key is provided in the "key" field of the "connection_data" object which is part of "payload" object. +- Credentials should be read from the environment. The following environment variables are required: `WRIKE_API_KEY` (The Wrike API Key), `WRIKE_SPACE_ID` (The Space ID). -- The Space ID is provided in the "org_id" field of the "connection_data" object which is part of "payload" object. +- When generating The Acceptance Test with The Attached JSON Of The Acceptance Test, The Conformance Tests *MUST*: + - Replace `WRIKE_API_KEY` placeholder in The Attached JSON Of The Acceptance Test with actual The Wrike API Key. + - Replace `WRIKE_SPACE_ID` placeholder in The Attached JSON Of The Acceptance Test with actual The Space ID. +- "external_sync_unit_id" "IEAGS6BYI5RFMPP7" can be used when folder ID is required for testing purposes. -***Test Requirements:*** +- Task ID "IEAGS6BYKRRFMPQG" can be used when task ID is required for testing attachments. -- Credentials should be read from the environment. The following environment variables are required: `WRIKE_API_KEY` (The Wrike API Key) and `WRIKE_SPACE_GID` (The Space ID). +## Data Fetching -- "external_sync_unit_id" "IEAGS6BYI5RFMPPY" can be used when space ID is required for testing purposes. +***Definitions:*** -## The Boilerplate Code +- {% include "templates/data_fetching_return_expectation.plain" %} ***Non-Functional Requirements:*** -- The Postman collection [wrike_postman.json](wrike_postman.json) describes The API. +- The resource [wrike_postman.json](wrike_postman.json) describes The API. + - Based on the resource [wrike_postman.json](wrike_postman.json), calculate the `api_delay` for the Functions. -- The Postman Request Fields is a query parameter with key `fields` and value as a list of strings. The Postman Request Fields are optional and are defined per request basis. The Postman Request Fields for each request is determined solely by The Postman Collection. - -- If a given request does not have a query parameter with key `fields`, do not include it in the request. If a given request has a query parameter with key `fields`, include only the fields that are referenced by The Postman Request Fields for the given request in the "fields" query parameter. Do not include or make up any additional fields not specified in The Postman Collection. +- The Functions being implemented *must not* invoke The Spawn Method nor The Worker Thread. +- {% include "templates/should_not_modify_extraction_function.plain" %} ***Functional Requirements:*** -- Implement The Function that provides a check if authentication with The API works. +- Implement The Function "check_authentication" that provides a check if authentication with The API works. Authentication should be checked by making a request to the endpoint "/contacts", and providing "me=true" as a query parameter. + ***Acceptance Tests:*** -## Data Fetching + - Test the function "check_authentication". Expect the API response (`:= api_response`) to equal `api_response["data"][0]["id"]="KUAUZTPW"`. -***Non-Functional Requirements:*** + - {% include "templates/test_rate_limiting_during_data_extraction.plain", function_name: "check_authentication" %} -- The Postman collection [wrike_postman.json](wrike_postman.json) describes The API. +- Implement The Function called "fetch_space_folders" that uses The API to fetch The Fetched Folders using the endpoint "/spaces/{spaceId}/folders". + - You *should not* use `projects=true` query param. + - The Space ID is provided in `event["payload"]["connection_data"]["org_id"]`. -- The Postman Request Fields is a query parameter with key `fields` and value as a list of strings. The Postman Request Fields are optional and are defined per request basis. The Postman Request Fields for each request is determined solely by The Postman Collection. + ***Acceptance Tests:*** -- If a given request does not have a query parameter with key `fields`, do not include it in the request. If a given request has a query parameter with key `fields`, include only the fields that are referenced by The Postman Request Fields for the given request in the "fields" query parameter. Do not include or make up any additional fields not specified in The Postman Collection. + - Test The Function "fetch_space_folders". Expect the number of The Fetched Folders to be 3. -- The Mapping is a method in The Implementation Code that maps the fields in The API response to the fields in The Function's output. The Mapping should output "snake_case" for JSON keys. The Mapping should map every single field from The OpenAPI Specification to The Function's output. + - {% include "templates/test_rate_limiting_during_data_extraction.plain", function_name: "fetch_space_folders" %} -- If The Function uses The API to fetch data, The Mapping should be used along its rules. +- Implement The Function "fetch_users" that uses The API to fetch The List of Users (The Fetched Users) using the endpoint "/contacts?types=[Person]". -***Functional Requirements:*** + ***Acceptance Tests:*** + + - When using The Test Wrike Credentials, expect exactly 4 users in the result of The Function. + + - {% include "templates/test_rate_limiting_during_data_extraction.plain", function_name: "fetch_users" %} + +- Implement The Function called "fetch_folder_tasks" that uses The API to fetch The Fetched Tasks for a given folder using the endpoint "/folders/{folderId}/tasks". + - The Folder ID is provided in `event["payload"]["event_context"]["external_sync_unit_id"]`. + - One of the query params must be "fields=[responsibleIds]". + - "pageSize" and "nextPageToken" should be provided in `event["input_data"]["global_values"]["pageSize"]` and `event["input_data"]["global_values"]["nextPageToken"]`. + - The following query parameters should also be supported: + - "updatedDate" (optional, a timestamp in ISO 8601 UTC format, can be used for filtering tasks by updated date) -- Implement The Function that uses The API to fetch The List of Projects (The Fetched Projects) using the endpoint "/spaces/{spaceId}/folders?project=true". ***Acceptance Tests:*** - - When using The Test Wrike Credentials a project with the title "First project" must be in the result of The Function. + - Test The Function in multiple steps: + - Step 1: Call the "fetch_folder_tasks" function with Folder ID "IEAGS6BYI5RFMPP7" and "pageSize" of 100. + - Step 2: Expect the API response (`:= api_response`) to have `api_response["nextPageToken"]` field and `len(api_response["data"])=100` + - Step 3: Call the "fetch_folder_tasks" function again with Folder ID "IEAGS6BYI5RFMPP7", "pageSize" of 100 and "nextPageToken" value received on step 2. Expect the API response to have `len(api_response["data])=10`. -- Implement The Function that uses The API to fetch The List of Contacts of The Space (The Fetched Contacts). This list can be retrieved using the endpoint "spaces/{spaceId}?fields=[members]". The retrieved list contains objects representing contacts. The rest of information about contacts can be fetched from /contacts/{contactIds}?types=[Person]. + - Test The Function "fetch_folder_tasks" with Folder ID "IEAGS6BYI5RFMPP7". From API response (`:= api_response`), expect that every element from `api_response["data"]` contains field `"responsibleIds"`. + + - {% include "templates/test_rate_limiting_during_data_extraction.plain", function_name: "fetch_folder_tasks" %} + +- Implement The Function called "fetch_task_attachments" that uses The API to fetch The Fetched Attachments for a given task using the endpoint "/tasks/{taskId}/attachments", with the "withUrls" query parameter set to true. ***Acceptance Tests:*** - - When using The Test Wrike Credentials, The Function must return 3 members with their primaryEmail, firstName and lastName. + - Test The Function "fetch_task_attachments" with Task ID "IEAGS6BYKRRFMPQG". Let `api_response = `. Expect `len(api_response["data"])=1` and `api_response["data"][0]["name"]="Proof this image.jpg"` -- Implement The Function that uses The API to fetch The List of Tasks of The Project (The Fetched Tasks) using the endpoint "/folders/{projectId}/tasks". The projectId is provided in the "external_sync_unit_id" field of the "event_context" dictionary which is part of "payload" dictionary. + - {% include "templates/test_rate_limiting_during_data_extraction.plain", function_name: "fetch_task_attachments" %} + +- Implement The Function called "fetch_task_comments" that uses The API to fetch The Fetched Comments for a given task using the endpoint "/tasks/{taskId}/comments". ***Acceptance Tests:*** - - When using The Test Wrike Credentials and The Project ID "IEAGS6BYI5RFMPP7", 10 tasks should be fetched in the result of The Function. + - Test The Function "fetch_task_comments" with Task ID "IEAGS6BYKRRFMPQG". Expect the number of The Fetched Comments to be 2. -## Generate 'external_domain_metadata.json' + - {% include "templates/test_rate_limiting_during_data_extraction.plain", function_name: "fetch_task_comments" %} -***Definitions:*** -- The structure of The External Domain Metadata JSON object is specified by the JSON schema defined in the resource [external_domain_metadata_schema.json](external_domain_metadata_schema.json). - - Please note that all refers_to keys in reference and typed_reference fields must use the format "#record:", where matches a key in the top-level record_types. This ensures references are unambiguous and valid. Use {} as the value to refer by default identifier (id), or specify { "by_field": "field_name" } to use a custom key. - - If an external record type has some concept of states, use the resource [Metadata extraction](docs/metadata-extraction.mdx) for instructions on how to describe it. +## Generate 'initial_domain_mapping.json' and 'external_domain_metadata.json' -***Non-Functional Requirements:*** +### Generate 'external_domain_metadata.json' -- Store The External Domain Metadata JSON object as a separate JSON file. +{% include "templates/external_domain_metadata_boilerplate.plain" %} ***Functional Requirements:*** -- Implement The Function that generates and returns The External Domain Metadata JSON object. The record types that should be included in the The External Domain Metadata are The List of Tasks of The Project ('tasks') and The List of Contacts of The Space ('users'). +- Implement The Function 'get_external_domain_metadata' that generates and returns The External Domain Metadata JSON object. The External Domain Metadata JSON object should have the record type 'users'. + {% include "mappings/external_domain_metadata_users.plain" %} - ***Acceptance Tests:*** +- The External Domain Metadata JSON object should include the record type 'tasks', while preserving any existing record types. + {% include "mappings/external_domain_metadata_tasks.plain" %} + +- The External Domain Metadata JSON object should include the record type 'comments', while preserving any existing record types. + {% include "mappings/external_domain_metadata_comments.plain" %} + +### Generate 'initial_domain_mapping.json' - - Validate generated The External Domain Metadata JSON object with the chef-cli tool (The Chef CLI) using the command "chef-cli validate-metadata": - - This command reads JSON object from stdin. - - The path to The Chef CLI executable is provided in the environment variables `CHEF_CLI_PATH`. - - For the test to be successful The Chef CLI must return an empty output. - - The test should always fail if The Chef CLI is not available. +{% include "templates/initial_domain_mapping_boilerplate.plain" %} -## Generate 'initial_domain_mapping.json' +***Functional Requirements:*** + +- Implement The Function that generates and returns The Initial Domain Mapping JSON object. The Initial Domain Mapping JSON object should have record_type_mappings "users". + {% include "mappings/initial_domain_mapping_users.plain" %} + +- The Initial Domain Mapping JSON object should have record_type_mappings "tasks", while preserving any existing mappings. + {% include "mappings/initial_domain_mapping_tasks.plain" %} + +- The Initial Domain Mapping JSON object should have record_type_mappings "comments", while preserving any existing mappings. + {% include "mappings/initial_domain_mapping_comments.plain" %} + +## Pushing data to DevRev servers ***Definitions:*** -- The structure of The Initial Domain Mapping JSON object is specified by the JSON schema defined in the resource [initial_mappings_schema.yaml](initial_mappings_schema.yaml). - - For a complete list of supported DevRev object types and their fields, see resource [Supported DevRev object types for Airdrop](docs/supported-object-types.md). - - For information about transformation methods, see resource [Mapping Reasons](docs/mapping-reasons.mdx). - - When working with devrev_leaf_type, be aware that the schema expects different formats depending on context. In most places, it should be passed as an object with object_category and object_type. However, in the airdrop-recipe-create-possible-record-type-mapping context, it must be a string representing only the object_type. - - Please note that mappings are split into separate ‘shards’ - one for each record type - for easier manipulation and storage. - - Please note that a leaf type or a concrete id has to be selected for use_devrev_record, but not both. +- Resource [data-extraction.md](docs/data-extraction.md) should serve as a generic guide for the implementation of The Extraction Function. + +- Refer to the resource [data-extraction-rules-for-emitting-events.md](docs/data-extraction-rules-for-emitting-events.md) for the rules for emitting events for The Extraction Function. + +- TheExtractionStateObject is a state passed to The Worker Thread. The following structure for `TheExtractionStateObject` should be used: + - "users": + - "completed" (boolean, required, indicating if the users data has been fully pushed) + - "tasks": + - "completed" (boolean, required, indicating if the tasks data has been fully pushed) + - "nextPageToken" (string, optional, indicating the token to use when fetching the next page of The Fetched Tasks) + - "modifiedSince" (string, optional, timestamp used for incremental data synchronization to fetch only the tasks that have been updated since the last sync) + - "comments": + - "completed" (boolean, required, indicating if the comments data has been fully pushed) + - "attachments": + - "completed" (boolean, required, indicating if the attachments data has been fully pushed) ***Non-Functional Requirements:*** -- Store The Initial Domain Mapping JSON object as a separate JSON file. +- The resource [wrike_postman.json](wrike_postman.json) describes The API. + - The Normalization Function should take the relevant record type from The External Domain Metadata JSON object and map all resources from The API to the corresponding record type in The Normalization Function. +{% include "templates/spawn_method_instructions.plain" %} + +- Requests to The API *MUST NOT* be mocked. + +- If employing pagination, you should always set the parameter "limit" to 100 (`ThePaginationLimit`) when calling The API. + +- If employing pagination for during data extraction phase, here's how you should do it: + - While `TheExtractionStateObject[]["completed"]=false`: + - Fetch from The API (`:= TheExpectedResourceResponse`) with query params: + - `pageSize=ThePaginationLimit` + - `nextPageToken=TheExtractionStateObject[]["nextPageToken"]` if it exists and is not empty. + - If `len(TheExpectedResourceResponse["data"]) < ThePaginationLimit`: + - `TheExtractionStateObject[]["completed"]=true` + - `TheExtractionStateObject[]["nextPageToken"]=""` + - Else: + - `TheExtractionStateObject[]["nextPageToken"]=TheExpectedResourceResponse["nextPageToken"]` ***Functional Requirements:*** -- Implement The Function that generates and returns The Initial Domain Mapping JSON object. +> External sync units + +- If "event_type" equals "EXTRACTION_EXTERNAL_SYNC_UNITS_START", The Extraction Function should implement the "external sync units extraction" part of the extraction workflow as described in the resource [external-sync-units-extraction.mdx](docs/external-sync-units-extraction.mdx): + - Retrieve The Fetched Folders + - Push The Fetched Folders as external sync units using the following mapping from the fields in The Fetched Folders to the fields in The External Sync Units: + - "id" should map to "id". + - "title" should map to "name". + - "description" should map to "description". + - "item_type" should have a fixed value of "tasks". + - No other fields should be used. ***Acceptance Tests:*** - - Validate generated The Initial Domain Mapping JSON object with the chef-cli tool (The Chef CLI) using the command "chef-cli initial-mapping check -m ": - - The Chef CLI reads The Initial Domain Mapping JSON object from stdin. - - The path to The Chef CLI executable is provided in the environment variables `CHEF_CLI_PATH`. - - The test passes only if the "RemainingDeficiencies" key in the returned JSON output has a value of null. If the value is anything other than null the test must fail. - - The test should always fail if The Chef CLI is not available. + - {% include "templates/external_sync_unit_acceptance_test.plain", resource_name: "external_sync_unit_check.json", expected_external_sync_unit_count: 3, expected_external_sync_unit_name: "First project" %} -## Pushing data to DevRev servers +> Metadata extraction -***Non-Functional Requirements:*** +- If "event_type" equals "EXTRACTION_METADATA_START" The Extraction Function should implement the "metadata extraction" part of the extraction workflow as described in the resource [metadata-extraction.md](docs/metadata-extraction.mdx). Please note that The External Domain Metadata JSON should not be normalized when pushed to the repository. -- All operations accessing external services or data providers must be implemented in worker threads. -***Functional Requirements:*** +> Data extraction - USERS -- When spawning a new worker, pass the parameter object to the spawn function that includes the "initialDomainMapping" key with The Initial Domain Mapping JSON object as the value. Do not pass this under "options" key but directly in the top-most level of the parameter object. +- If "event_type" equals "EXTRACTION_DATA_START" or "EXTRACTION_DATA_CONTINUE" The Extraction Function should push the 'users' data. To push the 'users' data, it should: + - If `TheExtractionStateObject["users"]["completed"]=false`: + - Fetch The Fetched Users + - Push The Fetched Users to the repository designated for 'users' data. + - Set `TheExtractionStateObject["users"]["completed"]=true` + + ***Acceptance Tests:*** -- When spawning a new worker, pass the parameter object to the spawn function that does not contain the "options" key. + - {% include "templates/data_extraction_acceptance_test.plain", resource_name: "data_extraction_test.json", artifact_name: "users", expected_item_count: 4 %} -- Implement The Function named "extraction" (The Extraction Function) that pushes The Fetched Projects as external sync units if "event_type" equals "EXTRACTION_EXTERNAL_SYNC_UNITS_START". + - {% include "templates/test_rate_limiting_during_emitting_events.plain", input_event_name: "EXTRACTION_DATA_START", resource_name: "data_extraction_test.json", expected_output_event_type: "EXTRACTION_DATA_DELAY" %} + + - Validate The Normalization Function with the chef-cli tool (The Chef CLI) for the "users" record type: + - Firstly, invoke The Extraction Function using the resource [data_extraction_test.json](test_data/data_extraction_test.json). + {% include "templates/chef_cli_normalization_validation.plain", record_type: "users" %} + + +> Data extraction - TASKS + +- If "event_type" equals "EXTRACTION_DATA_START" or "EXTRACTION_DATA_CONTINUE" The Extraction Function should push the 'tasks' and 'users' data. To push the 'tasks' data, it should: + - If `TheExtractionStateObject["tasks"]["completed"]=false`: + - Fetch The Fetched Tasks using pagination (The Tasks Iteration). For every page: + - Push the Fetched Tasks to repository designated for 'tasks' data. + - Update the "nextPageToken" parameter for "tasks". + - Mark the "tasks" data as completed. + - Wrike specific: When fetching The Fetched Tasks use the query param 'fields', where the value is an array of strings, which contains 'hasAttachments' (but it can also contain other values). ***Acceptance Tests:*** - - Test The Extraction Function using the resource [external_sync_unit_check.json](test_data/external_sync_unit_check.json). Make sure to replace placeholders in the resource with The Wrike API Key and The Space ID. For the test to be successful The Callback Server **must** receive from DevRev **exactly one** event with "event_type" that equals "EXTRACTION_EXTERNAL_SYNC_UNITS_DONE". The test must not send the event directly to The Callback Server. + - {% include "templates/data_extraction_acceptance_test.plain", resource_name: "data_extraction_test.json", artifact_name: "tasks", expected_item_count: 110 %} -- If "event_type" equals "EXTRACTION_EXTERNAL_SYNC_UNITS_START" The Extraction Function should fetch the tasks count for each project and push it as part of the external sync units. + - {% include "templates/data_extraction_acceptance_test.plain", resource_name: "data_extraction_continue_test.json", artifact_name: "tasks", expected_item_count: 110 %} -- If "event_type" equals "EXTRACTION_METADATA_START" The Extraction Function should implement the "metadata extraction" part of the extraction workflow by pushing The External Domain Metadata JSON object to the repository called 'external_domain_metadata'. Please note that The External Domain Metadata JSON object shouldn't be normalized when pushed to the repository. + - {% include "templates/test_rate_limiting_during_emitting_events.plain", input_event_name: "EXTRACTION_DATA_START", resource_name: "data_extraction_test.json", expected_output_event_type: "EXTRACTION_DATA_DELAY" %} -- If "event_type" equals "EXTRACTION_DATA_START" The Extraction Function should: - - push The Fetched Contacts to the repository named 'users' - - push The Fetched Tasks to the repository designated for 'tasks' data - (but make sure that a single "EXTRACTION_DATA_DONE" event is emitted) + - Validate The Normalization Function with the chef-cli tool (The Chef CLI) for the "tasks" record type: + - Firstly, invoke The Extraction Function using the resource [data_extraction_test.json](test_data/data_extraction_test.json). + {% include "templates/chef_cli_normalization_validation.plain", record_type: "tasks" %} + + +> Data extraction - TASK ATTACHMENTS + +- If "event_type" equals "EXTRACTION_DATA_START" or "EXTRACTION_DATA_CONTINUE" The Extraction Function should push the 'attachments', 'tasks' and 'users' data. To push the 'attachments' data, it should: + - If `TheExtractionStateObject["tasks"]["completed"]=false`: + - Inside The Tasks Iteration. For every page: + - Extract The Fetched Attachments from The Fetched Tasks. + - Push The Fetched Attachments to the repository designated for 'attachments' data. + - Mark the "attachments" data as completed. ***Acceptance Tests:*** - - Test The Extraction Function using the resource [data_extraction_test.json](test_data/data_extraction_test.json). Test is successful if The Callback Server receives from DevRev a **single** event with "event_type" that equals "EXTRACTION_DATA_DONE". The test must not send event directly to The Callback Server. + - {% include "templates/data_extraction_acceptance_test.plain", resource_name: "data_extraction_test.json", artifact_name: "attachments", expected_item_count: 2 %} + +> Data extraction - TASK COMMENTS + +- If "event_type" equals "EXTRACTION_DATA_START" or "EXTRACTION_DATA_CONTINUE" The Extraction Function should push the 'users', 'tasks', 'attachments' and 'comments' data. To push the 'comments' data, it should: + - If `TheExtractionStateObject["comments"]["completed"]=false`: + - Inside The Tasks Iteration. For every page: + - Extract The Fetched Comments from The Fetched Tasks. + - Push The Fetched Comments to the repository designated for 'comments' data. + - Mark the "comments" data as completed. + + ***Acceptance Tests:*** + + - {% include "templates/data_extraction_acceptance_test.plain", resource_name: "data_extraction_test.json", artifact_name: "comments", expected_item_count: 2 %} + + - {% include "templates/test_rate_limiting_during_emitting_events.plain", input_event_name: "EXTRACTION_DATA_START", resource_name: "data_extraction_test.json", expected_output_event_type: "EXTRACTION_DATA_DELAY" %} + + +> Attachments extraction + +- If "event_type" equals "EXTRACTION_ATTACHMENTS_START" or "EXTRACTION_ATTACHMENTS_CONTINUE" The Extraction Function should implement attachment streaming as described in the resource [attachments-extraction.md](docs/attachments-extraction.md). + + ***Acceptance Tests:*** + + - {% include "templates/attachment_extraction_acceptance_test.plain", data_extraction_resource_name: "data_extraction_test.json", attachment_extraction_resource_name: "attachments_extraction_test.json", expected_attachment_count: 2 %} + + - Test The Extraction Function using the resource [attachments_extraction_continue_test.json](test_data/attachments_extraction_continue_test.json). + - Expect The Callback Server to receive from DevRev a **single** event with "event_type" that equals "EXTRACTION_ATTACHMENTS_DONE". + + - {% include "templates/test_rate_limiting_during_emitting_events.plain", input_event_name: "EXTRACTION_ATTACHMENTS_START", resource_name: "attachments_extraction_test.json", expected_output_event_type: "EXTRACTION_ATTACHMENTS_DONE" %} + + +> INCREMENTAL MODE + +> - If "event_type" equals "EXTRACTION_DATA_START", The Extraction Function should support incremental data synchronization as described in the resource [incremental_mode.md](docs/incremental_mode.md). +> - Incremental mode should only work for "tasks" and their corresponding "attachments" and "comments" data. If `event["payload"]["event_context"]["mode"]=SyncMode.INCREMENTAL`, set: +> - `TheExtractionStateObject["tasks"]["modifiedSince"]=adapter.state.lastSuccessfulSyncStarted` +> - `TheExtractionStateObject["tasks"]["completed"]=false` +> - `TheExtractionStateObject["attachments"]["completed"]=false` +> - `TheExtractionStateObject["comments"]["completed"]=false` +> - Based on the field "updatedDate", you should adjust the API call to fetch only the The Fetched Tasks that have been updated after the time of the last successful sync. +> - Note: In incremental mode, you should push only the filtered tasks and their corresponding attachments and comments to the DevRev servers. - - Test The Extraction Function using the resource [data_extraction_test.json](test_data/data_extraction_test.json). Test is successful if The Callback Server does not receive from DevRev any event with "event_type" that equals "EXTRACTION_DATA_ERROR". The test must not send event directly to The Callback Server. +> ***Acceptance Tests:*** -- If "event_type" equals "EXTRACTION_ATTACHMENTS_START" or "EXTRACTION_ATTACHMENTS_CONTINUE" The Extraction Function should implement attachment extraction as described in the resource [attachments-extraction.md](docs/attachments-extraction.md). +> include "templates/incremental_mode_acceptance_tests.plain" \ No newline at end of file diff --git a/mappings/external_domain_metadata_comments.plain b/mappings/external_domain_metadata_comments.plain new file mode 100644 index 0000000..3065a7f --- /dev/null +++ b/mappings/external_domain_metadata_comments.plain @@ -0,0 +1,6 @@ +- The record type 'comments' (Name: Comments) should have the following fields: + - text (display name: "Text", is required, type: rich text) + - author_id (display name: "Author ID", is required, type: reference) + - Field author_id refers to the record type "#record:users". + - task_id (display name: "Task ID", is required, type: reference) + - Field task_id refers to the record type "#record:tasks". diff --git a/mappings/external_domain_metadata_tasks.plain b/mappings/external_domain_metadata_tasks.plain new file mode 100644 index 0000000..8cadbaf --- /dev/null +++ b/mappings/external_domain_metadata_tasks.plain @@ -0,0 +1,8 @@ +- The record type 'tasks' (Name: Tasks) should have the following fields: + - title (display name: "Title", is required, type: text) + - description (display name: "Description", is required, type: rich text) + - status (display name: "Status", is required, type: enum) + - permalink (display name: "URL", is required, type: text) + - responsible_ids (display name: "Responsible IDs", is required, type: reference) + - Field responsible_ids refers to the record type "#record:users". + - Type of field responsible_ids is an array with max_length 1, which should be used as array value. \ No newline at end of file diff --git a/mappings/external_domain_metadata_users.plain b/mappings/external_domain_metadata_users.plain new file mode 100644 index 0000000..9b3b1bc --- /dev/null +++ b/mappings/external_domain_metadata_users.plain @@ -0,0 +1,4 @@ +- The record type 'users' (Name: Users) should have the following fields: + - full_name (display name: "Full Name", is required, type: text) + - email (display name: "Email", is required, type: text) + - title (display name: "Title", is optional, type: text) \ No newline at end of file diff --git a/mappings/initial_domain_mapping_comments.plain b/mappings/initial_domain_mapping_comments.plain new file mode 100644 index 0000000..245f2b0 --- /dev/null +++ b/mappings/initial_domain_mapping_comments.plain @@ -0,0 +1,11 @@ +- The record_type_mappings "comments" should have the following properties: + - Default mapping should map each external comment to a "comment" object. + - There should be a single "possible_record_type_mappings" element, specifying: + - The mapping is one-way (reverse is false, forward is true). + - There should be no custom fields in the mapping. + - The following The Stock Field Mapping Fields should be mapped using The External Transformation Method: + - field "text" should be mapped to "body" (rich text). + - field "author_id" should be mapped to "created_by_id" (use directly). + - field "author_id" should be mapped to "modified_by_id" (use directly). + - field "task_id" should be mapped to "parent_object_id" (use_directly). + diff --git a/mappings/initial_domain_mapping_tasks.plain b/mappings/initial_domain_mapping_tasks.plain new file mode 100644 index 0000000..f9c81da --- /dev/null +++ b/mappings/initial_domain_mapping_tasks.plain @@ -0,0 +1,20 @@ +- The record_type_mappings "tasks" should have the following properties: + - Default mapping should map each external task to a "issue" object. + - There should be a single "possible_record_type_mappings" element, specifying: + - The mapping is one-way (reverse is false, forward is true) + - There should be no custom fields in the mapping. + - The following The Stock Field Mapping Fields should be mapped using The External Transformation Method: + - field "title" should be mapped to "title" + - field "permalink" should be mapped to "item_url_field" + - field "description" should be mapped to "body" (rich text) + - field "responsible_ids" should be mapped to "owned_by_ids" (use directly) + - The following The Stock Field Mapping Fields should be mapped using The Fixed Transformation Method: + - field "priority" should contain fixed value "P2" + - The following The Stock Field Mapping Fields should be mapped using The DevRev Record Transformation Method: + - field "applies_to_part_id" should refer to the "product" object type + - The following The Stock Field Mapping Fields should be mapped using The Map Enum Transformation Method: + - field "status" should be mapped to "stage" in the following way: + - "Active" maps to "in_development" + - "Completed" maps to "completed" + - "Deferred" maps to "backlog" + - "Cancelled" maps to "wont_fix" \ No newline at end of file diff --git a/mappings/initial_domain_mapping_users.plain b/mappings/initial_domain_mapping_users.plain new file mode 100644 index 0000000..9e321ab --- /dev/null +++ b/mappings/initial_domain_mapping_users.plain @@ -0,0 +1,9 @@ +- The record_type_mappings "users" should have the following properties: + - Default mapping should map each external user to a "devu" user object. + - There should be a single "possible_record_type_mappings" element, specifying: + - The mapping is one-way (reverse is false, forward is true). + - There should be no custom fields in the mapping. + - The following The Stock Field Mapping Fields should be mapped using The External Transformation Method: + - field "full_name" should be mapped to "full_name". + - field "email" should be mapped to "email". + - field "title" should be mapped to "display_name". \ No newline at end of file diff --git a/rate_limiting_proxy.py b/rate_limiting_proxy.py new file mode 100644 index 0000000..b19c317 --- /dev/null +++ b/rate_limiting_proxy.py @@ -0,0 +1,356 @@ +import socket +import threading +import socketserver +import time +import sys +import ssl +import json +import datetime +import email.utils +from urllib.parse import urlparse + +# Rate limiting settings +TOKEN_BUCKET_CAPACITY = 100 # requests +REFILL_RATE = 10 # requests per second + +# ============================================================================ +# SERVICE-SPECIFIC CONFIGURATION: Customize this section for your integration +# ============================================================================ +# This configuration mimics Trello's rate limiting response format. +# When adapting this proxy for a different third-party service, modify these +# settings to match that service's 429 response behavior. +# ============================================================================ + +RATE_LIMIT_DELAY = 3 # seconds - Time to wait before retrying + +class RateLimiterState: + """A thread-safe class to manage the global rate limiting state.""" + def __init__(self): + self.lock = threading.Lock() + self.rate_limiting_active = False + self.test_name = None + + def start_rate_limiting(self, test_name): + with self.lock: + self.rate_limiting_active = True + self.test_name = test_name + + def end_rate_limiting(self): + with self.lock: + self.rate_limiting_active = False + self.test_name = None + + def is_rate_limiting_active(self): + with self.lock: + return self.rate_limiting_active, self.test_name + +rate_limiter_state = RateLimiterState() + +class TokenBucket: + """A thread-safe token bucket for rate limiting.""" + def __init__(self, capacity, refill_rate): + self.capacity = float(capacity) + self.refill_rate = float(refill_rate) + self.tokens = float(capacity) + self.last_refill = time.time() + self.lock = threading.Lock() + + def consume(self, tokens): + """Consumes tokens from the bucket. Returns True if successful, False otherwise.""" + with self.lock: + now = time.time() + time_since_refill = now - self.last_refill + new_tokens = time_since_refill * self.refill_rate + self.tokens = min(self.capacity, self.tokens + new_tokens) + self.last_refill = now + + if self.tokens >= tokens: + self.tokens -= tokens + return True + return False + +rate_limiter = TokenBucket(TOKEN_BUCKET_CAPACITY, REFILL_RATE) + +def create_rate_limit_response(): + """ + TODO: Adopt this based on the 3rd party service's rate limiting response format. + + ======================================================================== + SERVICE-SPECIFIC: Customize this function for your third-party service + ======================================================================== + + Generates the 429 Rate Limit response matching the third-party service's + format. Different services may use different: + - Response body structures (e.g., {"detail": "..."} vs {"error": "..."}) + - Retry-After header formats (HTTP date vs seconds) + - Error messages and field names + + This implementation matches Trello's rate limiting response format. + + Returns: + tuple: (status_code, status_message, response_body_dict, headers_dict) + """ + retry_after = RATE_LIMIT_DELAY + + response_body = { + "errorDescription": "Rate limit exceeded, try again later", + "error": "rate_limit_exceeded" + } + headers = {"Retry-After": retry_after} + + return 429, "Too Many Requests", response_body, headers + +class ProxyHandler(socketserver.BaseRequestHandler): + """Handles incoming proxy requests.""" + def handle(self): + if not rate_limiter.consume(1): + print("Rate limit exceeded. Dropping connection.") + try: + self.request.sendall(b'HTTP/1.1 429 Too Many Requests\r\n\r\n') + except OSError: + pass # Client might have already closed the connection. + finally: + self.request.close() + return + + try: + data = self.request.recv(4096) + except ConnectionResetError: + return # Client closed connection. + + if not data: + return + + first_line = data.split(b'\r\n')[0] + try: + method, target, _ = first_line.split() + except ValueError: + print(f"Could not parse request: {first_line}") + self.request.close() + return + + print(f"Received request: {method.decode('utf-8')} {target.decode('utf-8')}") + + path = target.decode('utf-8') + # Check for control plane endpoints on the proxy itself + if path.startswith(('/start_rate_limiting', '/end_rate_limiting')): + self.handle_control_request(method, path, data) + return + + # Check if global rate limiting is active + is_active, test_name = rate_limiter_state.is_rate_limiting_active() + if is_active: + print(f"Rate limiting is active for test: '{test_name}'. Blocking request.") + + # Generate service-specific rate limit response + status_code, status_message, response_body, headers = create_rate_limit_response() + self.send_json_response(status_code, status_message, response_body, headers=headers) + return + + if method == b'CONNECT': + self.handle_connect(target) + else: + self.handle_http_request(target, data) + + def get_request_body(self, data): + header_end = data.find(b'\r\n\r\n') + if header_end != -1: + return data[header_end + 4:].decode('utf-8') + return "" + + def send_json_response(self, status_code, status_message, body_json, headers=None): + body_bytes = json.dumps(body_json).encode('utf-8') + + response_headers = [ + f"HTTP/1.1 {status_code} {status_message}", + "Content-Type: application/json", + f"Content-Length: {len(body_bytes)}", + "Connection: close", + ] + + if headers: + for key, value in headers.items(): + response_headers.append(f"{key}: {value}") + + response_headers.append("") + response_headers.append("") + + response = '\r\n'.join(response_headers).encode('utf-8') + body_bytes + try: + self.request.sendall(response) + except OSError: + pass # Client might have closed the connection. + finally: + self.request.close() + + def handle_control_request(self, method, path, data): + if method != b'POST': + self.send_json_response(405, "Method Not Allowed", {"error": "Only POST method is allowed"}) + return + + if path == '/start_rate_limiting': + body_str = self.get_request_body(data) + if not body_str: + self.send_json_response(400, "Bad Request", {"error": "Request body is missing or empty"}) + return + try: + body_json = json.loads(body_str) + test_name = body_json.get('test_name') + if not test_name or not isinstance(test_name, str): + self.send_json_response(400, "Bad Request", {"error": "'test_name' is missing or not a string"}) + return + except json.JSONDecodeError: + self.send_json_response(400, "Bad Request", {"error": "Invalid JSON in request body"}) + return + + rate_limiter_state.start_rate_limiting(test_name) + response_body = {"status": f"rate limiting started for test: {test_name}"} + self.send_json_response(200, "OK", response_body) + + elif path == '/end_rate_limiting': + rate_limiter_state.end_rate_limiting() + response_body = {"status": "rate limiting ended"} + self.send_json_response(200, "OK", response_body) + else: + self.send_json_response(404, "Not Found", {"error": "Endpoint not found"}) + + def handle_http_request(self, target, data): + """Handles HTTP requests like GET, POST, etc.""" + try: + parsed_url = urlparse(target.decode('utf-8')) + host = parsed_url.hostname + port = parsed_url.port + if port is None: + port = 443 if parsed_url.scheme == 'https' else 80 + except Exception as e: + print(f"Could not parse URL for HTTP request: {target}. Error: {e}") + self.request.close() + return + + if not host: + print(f"Invalid host in URL: {target}") + self.request.close() + return + + try: + remote_socket = socket.create_connection((host, port), timeout=10) + if parsed_url.scheme == 'https': + context = ssl.create_default_context() + remote_socket = context.wrap_socket(remote_socket, server_hostname=host) + except (socket.error, ssl.SSLError) as e: + print(f"Failed to connect or SSL wrap to {host}:{port}: {e}") + self.request.close() + return + + # Modify the request to use a relative path and force connection closing + # This ensures each request gets its own connection and is logged. + header_end = data.find(b'\r\n\r\n') + if header_end == -1: + # If no header-body separator is found, assume it's a simple request with no body. + header_end = len(data) + + header_data = data[:header_end] + body = data[header_end:] + + lines = header_data.split(b'\r\n') + first_line = lines[0] + headers = lines[1:] + + method, _, http_version = first_line.split(b' ', 2) + + path = parsed_url.path or '/' + if parsed_url.query: + path += '?' + parsed_url.query + + new_first_line = b' '.join([method, path.encode('utf-8'), http_version]) + + new_headers = [] + for header in headers: + # Remove existing connection-related headers, as we're forcing it to close. + if not header.lower().startswith(b'connection:') and \ + not header.lower().startswith(b'proxy-connection:'): + new_headers.append(header) + new_headers.append(b'Connection: close') + + modified_header_part = new_first_line + b'\r\n' + b'\r\n'.join(new_headers) + modified_request = modified_header_part + body + + try: + remote_socket.sendall(modified_request) + except OSError: + remote_socket.close() + return + + self.tunnel(self.request, remote_socket) + + def handle_connect(self, target): + """Handles CONNECT requests for HTTPS traffic.""" + try: + host, port_str = target.split(b':') + port = int(port_str) + except ValueError: + print(f"Invalid target for CONNECT: {target}") + self.request.close() + return + + try: + remote_socket = socket.create_connection((host.decode('utf-8'), port), timeout=10) + except socket.error as e: + print(f"Failed to connect to {host.decode('utf-8')}:{port}: {e}") + self.request.close() + return + + try: + self.request.sendall(b'HTTP/1.1 200 Connection Established\r\n\r\n') + except OSError: + remote_socket.close() + return + + self.tunnel(self.request, remote_socket) + + def tunnel(self, client_socket, remote_socket): + """Tunnels data between the client and the remote server.""" + stop_event = threading.Event() + + def forward(src, dst): + try: + while not stop_event.is_set(): + data = src.recv(4096) + if not data: + break + dst.sendall(data) + except OSError: + pass + finally: + stop_event.set() + + client_thread = threading.Thread(target=forward, args=(client_socket, remote_socket)) + remote_thread = threading.Thread(target=forward, args=(remote_socket, client_socket)) + + client_thread.start() + remote_thread.start() + + client_thread.join() + remote_thread.join() + + client_socket.close() + remote_socket.close() + +class ThreadingTCPServer(socketserver.ThreadingMixIn, socketserver.TCPServer): + daemon_threads = True + allow_reuse_address = True + +def main(): + HOST, PORT = "localhost", 8004 + + try: + server = ThreadingTCPServer((HOST, PORT), ProxyHandler) + print(f"Starting proxy server on {HOST}:{PORT}") + server.serve_forever() + except Exception as e: + print(f"Could not start proxy server: {e}", file=sys.stderr) + # The script `run_devrev_snapin_conformance_tests.sh` checks for exit code 69. + sys.exit(69) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/templates/incremental_mode_acceptance_tests.plain b/templates/incremental_mode_acceptance_tests.plain new file mode 100644 index 0000000..9c19936 --- /dev/null +++ b/templates/incremental_mode_acceptance_tests.plain @@ -0,0 +1,31 @@ +- Validate the incremental mode with the following flow: + - Step 1: + - Execute request `curl -X POST "http://localhost:8003/external-worker.update-last-successful-sync/" -H "Content-Type: application/json" -d '{"snap_in_version_id":"test-version-id","extend_state":{"users":{"completed":true},"tasks":{"completed":true},"attachments":{"completed":true},"comments":{"completed":true}}}'`, where `` is `event["payload"]["event_context"]["sync_unit_id"]` in the resource [data_extraction_test.json](test_data/data_extraction_test.json). + - Step 2: + - Execute request `curl -g -X PUT -H 'Authorization: bearer ' -d 'title=' 'https://www.wrike.com/api/v4/tasks/MAAAAABlGv5B'` (replace and with the actual values). + - Expect the request to succeed. + - Step 3: + - Invoke The Extraction Function using the resource [data_extraction_incremental_test.json](test_data/data_extraction_incremental_test.json). + - Expect The Callback Server to receive from DevRev a **single** event with "event_type" that equals "EXTRACTION_DATA_DONE". + - Let `artifact_array = ["event_data"]["artifacts"]` (array of objects). + - Expect `artifact_array` to not be empty array. + - Out of `artifact_array`, expect one of the elements to have "item_type" equal to "tasks" (`:= tasks_artifact`). Expect `tasks_artifact["item_count"]=1`. + - Expect that there is no element in `artifact_array` with "item_type" equal to "attachments". If there is, this indicates that the attachments data was pushed to the DevRev servers, which is wrong. + - Expect that there is no element in `artifact_array` with "item_type" equal to "users". If there is, this indicates that the users data was pushed to the DevRev servers, which is wrong. + - Expect that there is no element in `artifact_array` with "item_type" equal to "comments". If there is, this indicates that the comments data was pushed to the DevRev servers, which is wrong. + +- Validate the incremental mode with the following flow: + - Step 1: + - Execute request `curl -X POST "http://localhost:8003/external-worker.update-last-successful-sync/" -H "Content-Type: application/json" -d '{"snap_in_version_id":"test-version-id","extend_state":{"users":{"completed":true},"tasks":{"completed":true},"attachments":{"completed":true},"comments":{"completed":true}}}'`, where `` is `event["payload"]["event_context"]["sync_unit_id"]` in the resource [data_extraction_test.json](test_data/data_extraction_test.json). + - Step 2: + - Execute request `curl -g -X PUT -H 'Authorization: bearer ' -d 'title=' 'https://www.wrike.com/api/v4/tasks/IEAGS6BYKRRFMPQE'` (replace and with the actual values). + - Expect the request to succeed. + - Step 3: + - Invoke The Extraction Function using the resource [data_extraction_incremental_test.json](test_data/data_extraction_incremental_test.json). + - Expect The Callback Server to receive from DevRev a **single** event with "event_type" that equals "EXTRACTION_DATA_DONE". + - Let `artifact_array = ["event_data"]["artifacts"]` (array of objects). + - Expect `artifact_array` to not be empty array. + - Out of `artifact_array`, expect one of the elements to have "item_type" equal to "tasks" (`:= tasks_artifact`). Expect `tasks_artifact["item_count"]=1`. + - Out of `artifact_array`, expect one of the elements to have "item_type" equal to "attachments" (`:= attachments_artifact`). Expect `attachments_artifact["item_count"]=1`. + - Out of `artifact_array`, expect one of the elements to have "item_type" equal to "comments" (`:= comments_artifact`). Expect `comments_artifact["item_count"]=3`. + - Expect that there is no element in `artifact_array` with "item_type" equal to "users". If there is, this indicates that the users data was pushed to the DevRev servers, which is wrong. \ No newline at end of file diff --git a/test_data/attachments_extraction_continue_test.json b/test_data/attachments_extraction_continue_test.json new file mode 100644 index 0000000..e14acc1 --- /dev/null +++ b/test_data/attachments_extraction_continue_test.json @@ -0,0 +1,75 @@ +[ + { + "payload": { + "connection_data": { + "key": "", + "key_type": "", + "org_id": "", + "org_name": "First Space" + }, + "event_context": { + "callback_url": "http://localhost:8002/callback", + "dev_oid": "DEV-36shCCBEAA", + "dev_org": "DEV-36shCCBEAA", + "dev_org_id": "DEV-36shCCBEAA", + "dev_uid": "DEVU-6", + "dev_user": "DEVU-6", + "dev_user_id": "DEVU-6", + "event_type_adaas": "", + "external_sync_unit": "688725dad59c015ce052eecf", + "external_sync_unit_id": "688725dad59c015ce052eecf", + "external_sync_unit_name": "cards-pagination-test-2025-07-28-092514", + "external_system": "6752eb95c833e6b206fcf388", + "external_system_id": "6752eb95c833e6b206fcf388", + "external_system_name": "Wrike", + "external_system_type": "ADaaS", + "import_slug": "wrike-snapin-devrev", + "initial_sync_scope": "full-history", + "mode": "INITIAL", + "request_id": "ff894fd5-2290-42bb-9f89-0785e49b4049", + "request_id_adaas": "ff894fd5-2290-42bb-9f89-0785e49b4049", + "run_id": "cbbe2419-1f86-4737-aa78-6bb7118ce52c", + "sequence_version": "17", + "snap_in_slug": "wrike-snapin-devrev", + "snap_in_version_id": "don:integration:dvrv-eu-1:devo/36shCCBEAA:snap_in_package/787b97af-95a8-4b57-809e-8d55f4e72f40:snap_in_version/50d4660e-dad9-41D6-9169-8a7e96b2d7fa", + "sync_run": "cbbe2419-1f86-4737-aa78-6bb7118ce52c", + "sync_run_id": "cbbe2419-1f86-4737-aa78-6bb7118ce52c", + "sync_tier": "sync_tier_2", + "sync_unit": "don:integration:dvrv-eu-1:devo/36shCCBEAA:external_system_type/ADAAS:external_system/6752eb95c833e6b206fcf388:sync_unit/984c894e-71e5-4e94-b484-40b839c9a916", + "sync_unit_id": "984c894e-71e5-4e94-b484-40b839c9a916", + "uuid": "ff894fd5-2290-42bb-9f89-0785e49b4049", + "worker_data_url": "http://localhost:8003/external-worker" + }, + "event_type": "EXTRACTION_ATTACHMENTS_CONTINUE" + }, + "context": { + "dev_oid": "don:identity:dvrv-eu-1:devo/36shCCBEAA", + "automation_id": "", + "source_id": "", + "snap_in_id": "don:integration:dvrv-eu-1:devo/36shCCBEAA:snap_in/04bf12fa-57bd-4057-b0b0-ed3f42d9813e", + "snap_in_version_id": "don:integration:dvrv-eu-1:devo/36shCCBEAA:snap_in_package/787b97af-95a8-4b57-809e-8d55f4e72f40:snap_in_version/50d4660e-dad9-41D6-9169-8a7e96b2d7fa", + "service_account_id": "don:identity:dvrv-eu-1:devo/36shCCBEAA:svcacc/101", + "secrets": { + "service_account_token": "test-service-account-token" + }, + "user_id": "don:identity:dvrv-eu-1:devo/36shCCBEAA:devu/6", + "event_id": "", + "execution_id": "4481432207487786275" + }, + "execution_metadata": { + "request_id": "ff894fd5-2290-42bb-9f89-0785e49b4049", + "function_name": "extraction", + "event_type": "EXTRACTION_ATTACHMENTS_START", + "devrev_endpoint": "http://localhost:8003" + }, + "input_data": { + "global_values": {}, + "event_sources": {}, + "keyrings": null, + "resources": { + "keyrings": {}, + "tags": {} + } + } + } +] \ No newline at end of file diff --git a/test_data/attachments_extraction_test.json b/test_data/attachments_extraction_test.json new file mode 100644 index 0000000..12b993d --- /dev/null +++ b/test_data/attachments_extraction_test.json @@ -0,0 +1,75 @@ +[ + { + "payload": { + "connection_data": { + "key": "", + "key_type": "", + "org_id": "", + "org_name": "First Space" + }, + "event_context": { + "callback_url": "http://localhost:8002/callback", + "dev_oid": "DEV-36shCCBEAA", + "dev_org": "DEV-36shCCBEAA", + "dev_org_id": "DEV-36shCCBEAA", + "dev_uid": "DEVU-6", + "dev_user": "DEVU-6", + "dev_user_id": "DEVU-6", + "event_type_adaas": "", + "external_sync_unit": "688725dad59c015ce052eecf", + "external_sync_unit_id": "688725dad59c015ce052eecf", + "external_sync_unit_name": "cards-pagination-test-2025-07-28-092514", + "external_system": "6752eb95c833e6b206fcf388", + "external_system_id": "6752eb95c833e6b206fcf388", + "external_system_name": "Wrike", + "external_system_type": "ADaaS", + "import_slug": "wrike-snapin-devrev", + "initial_sync_scope": "full-history", + "mode": "INITIAL", + "request_id": "ff894fd5-2290-42bb-9f89-0785e49b4049", + "request_id_adaas": "ff894fd5-2290-42bb-9f89-0785e49b4049", + "run_id": "cbbe2419-1f86-4737-aa78-6bb7118ce52c", + "sequence_version": "17", + "snap_in_slug": "wrike-snapin-devrev", + "snap_in_version_id": "don:integration:dvrv-eu-1:devo/36shCCBEAA:snap_in_package/787b97af-95a8-4b57-809e-8d55f4e72f40:snap_in_version/50d4660e-dad9-41D6-9169-8a7e96b2d7fa", + "sync_run": "cbbe2419-1f86-4737-aa78-6bb7118ce52c", + "sync_run_id": "cbbe2419-1f86-4737-aa78-6bb7118ce52c", + "sync_tier": "sync_tier_2", + "sync_unit": "don:integration:dvrv-eu-1:devo/36shCCBEAA:external_system_type/ADAAS:external_system/6752eb95c833e6b206fcf388:sync_unit/984c894e-71e5-4e94-b484-40b839c9a916", + "sync_unit_id": "984c894e-71e5-4e94-b484-40b839c9a916", + "uuid": "ff894fd5-2290-42bb-9f89-0785e49b4049", + "worker_data_url": "http://localhost:8003/external-worker" + }, + "event_type": "EXTRACTION_ATTACHMENTS_START" + }, + "context": { + "dev_oid": "don:identity:dvrv-eu-1:devo/36shCCBEAA", + "automation_id": "", + "source_id": "", + "snap_in_id": "don:integration:dvrv-eu-1:devo/36shCCBEAA:snap_in/04bf12fa-57bd-4057-b0b0-ed3f42d9813e", + "snap_in_version_id": "don:integration:dvrv-eu-1:devo/36shCCBEAA:snap_in_package/787b97af-95a8-4b57-809e-8d55f4e72f40:snap_in_version/50d4660e-dad9-41D6-9169-8a7e96b2d7fa", + "service_account_id": "don:identity:dvrv-eu-1:devo/36shCCBEAA:svcacc/101", + "secrets": { + "service_account_token": "test-service-account-token" + }, + "user_id": "don:identity:dvrv-eu-1:devo/36shCCBEAA:devu/6", + "event_id": "", + "execution_id": "4481432207487786275" + }, + "execution_metadata": { + "request_id": "ff894fd5-2290-42bb-9f89-0785e49b4049", + "function_name": "extraction", + "event_type": "EXTRACTION_ATTACHMENTS_START", + "devrev_endpoint": "http://localhost:8003" + }, + "input_data": { + "global_values": {}, + "event_sources": {}, + "keyrings": null, + "resources": { + "keyrings": {}, + "tags": {} + } + } + } +] \ No newline at end of file diff --git a/test_data/data_extraction_continue_test.json b/test_data/data_extraction_continue_test.json new file mode 100644 index 0000000..742fa13 --- /dev/null +++ b/test_data/data_extraction_continue_test.json @@ -0,0 +1,76 @@ + +[ + { + "payload": { + "connection_data": { + "key": "", + "key_type": "", + "org_id": "", + "org_name": "First Space" + }, + "event_context": { + "callback_url": "http://localhost:8002/callback", + "dev_oid": "test-dev-oid", + "dev_org": "test-dev-org", + "dev_org_id": "test-dev-org-id", + "dev_uid": "test-dev-uid", + "dev_user": "test-dev-user", + "dev_user_id": "test-dev-user-id", + "event_type_adaas": "", + "external_sync_unit": "test-external_sync_unit", + "external_sync_unit_id": "test-external_sync_unit_id", + "external_sync_unit_name": "test-external_sync_unit_name", + "external_system": "test-external_system", + "external_system_id": "test-external_system_id", + "external_system_name": "Wrike", + "external_system_type": "ADaaS", + "import_slug": "airdrop-wrike-snap-in", + "initial_sync_scope": "full-history", + "mode": "INITIAL", + "request_id": "test-request-id", + "request_id_adaas": "test-request-id-adaas", + "run_id": "test-run_id", + "sequence_version": "10", + "snap_in_slug": "wrike-snapin-devrev", + "snap_in_version_id": "don:integration:dvrv-eu-1:devo/test:snap_in_package/test:snap_in_version/test", + "sync_run": "test-sync_run", + "sync_run_id": "test-sync_run_id", + "sync_tier": "sync_tier_2", + "sync_unit": "don:integration:dvrv-eu-1:devo/test:external_system_type/ADAAS:external_system/test:sync_unit/test", + "sync_unit_id": "test-sync_unit_id", + "uuid": "test-uuid", + "worker_data_url": "http://localhost:8003/external-worker" + }, + "event_type": "EXTRACTION_DATA_CONTINUE" + }, + "context": { + "dev_oid": "don:identity:dvrv-eu-1:devo/test", + "automation_id": "", + "source_id": "", + "snap_in_id": "test-don:integration:dvrv-eu-1:devo/test:snap_in/test", + "snap_in_version_id": "test-don:integration:dvrv-eu-1:devo/test:snap_in_package/test:snap_in_version/test", + "service_account_id": "test-don:identity:dvrv-eu-1:devo/test:svcacc/74", + "secrets": { + "service_account_token": "test-service-account-token" + }, + "user_id": "don:identity:dvrv-eu-1:devo/test:devu/6", + "event_id": "", + "execution_id": "test-execution-id" + }, + "execution_metadata": { + "request_id": "test-request-id", + "function_name": "extraction", + "event_type": "EXTRACTION_DATA_CONTINUE", + "devrev_endpoint": "http://localhost:8003" + }, + "input_data": { + "global_values": {}, + "event_sources": {}, + "keyrings": null, + "resources": { + "keyrings": {}, + "tags": {} + } + } + } +] diff --git a/test_data/data_extraction_incremental_test.json b/test_data/data_extraction_incremental_test.json new file mode 100644 index 0000000..3663bd5 --- /dev/null +++ b/test_data/data_extraction_incremental_test.json @@ -0,0 +1,74 @@ +[ + { + "payload": { + "connection_data": { + "key": "", + "key_type": "", + "org_id": "", + "org_name": "First Space" + }, + "event_context": { + "callback_url": "http://localhost:8002/callback", + "dev_oid": "DEV-36shCCBEAA", + "dev_org": "DEV-36shCCBEAA", + "dev_org_id": "DEV-36shCCBEAA", + "dev_uid": "DEVU-1", + "dev_user": "DEVU-1", + "dev_user_id": "DEVU-1", + "event_type_adaas": "", + "external_sync_unit": "688725dad59c015ce052eecf", + "external_sync_unit_id": "688725dad59c015ce052eecf", + "external_sync_unit_name": "SaaS connectors", + "external_system": "6752eb95c833e6b206fcf388", + "external_system_id": "6752eb95c833e6b206fcf388", + "external_system_name": "Wrike", + "external_system_type": "ADaaS", + "import_slug": "wrike-snapin-devrev", + "mode": "INCREMENTAL", + "request_id": "63c6f1c6-eabe-452f-a694-7f23a8f5c3cc", + "request_id_adaas": "63c6f1c6-eabe-452f-a694-7f23a8f5c3cc", + "run_id": "cbbe2419-1f86-4737-aa78-6bb7118ce52c", + "sequence_version": "6", + "snap_in_slug": "wrike-snapin-devrev", + "snap_in_version_id": "don:integration:dvrv-eu-1:devo/36shCCBEAA:snap_in_package/b66dda95-cf9e-48be-918c-8439ecdd548e:snap_in_version/50d4660e-dad9-41d6-9169-8a7e96b2d7fa", + "sync_run": "cbbe2419-1f86-4737-aa78-6bb7118ce52c", + "sync_run_id": "cbbe2419-1f86-4737-aa78-6bb7118ce52c", + "sync_tier": "sync_tier_2", + "sync_unit": "don:integration:dvrv-eu-1:devo/36shCCBEAA:external_system_type/ADAAS:external_system/6752eb95c833e6b206fcf388:sync_unit/984c894e-71e5-4e94-b484-40b839c9a916", + "sync_unit_id": "984c894e-71e5-4e94-b484-40b839c9a916", + "uuid": "63c6f1c6-eabe-452f-a694-7f23a8f5c3cc", + "worker_data_url": "http://localhost:8003/external-worker" + }, + "event_type": "EXTRACTION_DATA_START" + }, + "context": { + "dev_oid": "don:identity:dvrv-eu-1:devo/36shCCBEAA", + "automation_id": "", + "source_id": "", + "snap_in_id": "don:integration:dvrv-eu-1:devo/36shCCBEAA:snap_in/03a783b1-5d9f-4af8-b958-e401f2022439", + "snap_in_version_id": "don:integration:dvrv-eu-1:devo/36shCCBEAA:snap_in_package/b66dda95-cf9e-48be-918c-8439ecdd548e:snap_in_version/50d4660e-dad9-41d6-9169-8a7e96b2d7fa", + "service_account_id": "don:identity:dvrv-eu-1:devo/36shCCBEAA:svcacc/42", + "secrets": { + "service_account_token": "test-service-account-token" + }, + "user_id": "don:identity:dvrv-eu-1:devo/36shCCBEAA:devu/1", + "event_id": "", + "execution_id": "13765595327067933408" + }, + "execution_metadata": { + "request_id": "63c6f1c6-eabe-452f-a694-7f23a8f5c3cc", + "function_name": "extraction", + "event_type": "EXTRACTION_DATA_START", + "devrev_endpoint": "http://localhost:8003" + }, + "input_data": { + "global_values": {}, + "event_sources": {}, + "keyrings": null, + "resources": { + "keyrings": {}, + "tags": {} + } + } + } +] \ No newline at end of file diff --git a/test_data/data_extraction_test.json b/test_data/data_extraction_test.json index a8d6429..aba29ab 100644 --- a/test_data/data_extraction_test.json +++ b/test_data/data_extraction_test.json @@ -2,10 +2,10 @@ { "payload": { "connection_data": { - "key": "test-key", + "key": "", "key_type": "", - "org_id": "test-org-id", - "org_name": "My Space" + "org_id": "", + "org_name": "First Space" }, "event_context": { "callback_url": "http://localhost:8002/callback", diff --git a/test_data/external_domain_metadata_event_payload.json b/test_data/external_domain_metadata_event_payload.json new file mode 100644 index 0000000..7a7489b --- /dev/null +++ b/test_data/external_domain_metadata_event_payload.json @@ -0,0 +1,35 @@ + +{ + "payload": { + "connection_data": { + "key": "", + "key_type": "", + "org_id": "", + "org_name": "First Space" + }, + "event_context": { + "callback_url": "http://localhost:8002/callback", + "external_sync_unit_id": "6752eb95c833e6b206fcf388" + } + }, + "context": { + "dev_oid": "test-org-id", + "source_id": "test-source-id", + "snap_in_id": "test-snap-in-id", + "snap_in_version_id": "test-snap-in-version-id", + "service_account_id": "test-service-account-id", + "secrets": { + "service_account_token": "test-token" + } + }, + "execution_metadata": { + "request_id": "63c6f1c6-eabe-452f-a694-7f23a8f5c3cc", + "function_name": "get_external_domain_metadata", + "event_type": "test-event", + "devrev_endpoint": "http://localhost:8003" + }, + "input_data": { + "global_values": {}, + "event_sources": {} + } + } diff --git a/test_data/external_sync_unit_check.json b/test_data/external_sync_unit_check.json index e7fe5e6..c0a68a6 100644 --- a/test_data/external_sync_unit_check.json +++ b/test_data/external_sync_unit_check.json @@ -2,10 +2,10 @@ { "payload": { "connection_data": { - "key": "test-key", + "key": "", "key_type": "", - "org_id": "org-id", - "org_name": "Personal" + "org_id": "", + "org_name": "First Space" }, "event_context": { "callback_url": "http://localhost:8002/callback", diff --git a/wrike_postman.json b/wrike_postman.json index cf0ef2f..106b388 100644 --- a/wrike_postman.json +++ b/wrike_postman.json @@ -9,9 +9,9 @@ "request": { "method": "GET", "url": { - "raw": "{{WrikeAPI}}/contacts?deleted=false&fields=[metadata,currentBillRate,currentCostRate,jobRoleId]", + "raw": "https://www.wrike.com/api/v4/contacts?deleted=false&types=[Person]", "host": [ - "{{WrikeAPI}}" + "https://www.wrike.com/api/v4" ], "path": [ "contacts" @@ -22,8 +22,8 @@ "value": "false" }, { - "key": "fields", - "value": "[metadata,currentBillRate,currentCostRate,jobRoleId]" + "key": "types", + "value": "[Person]" } ] } @@ -34,13 +34,13 @@ "request": { "method": "GET", "url": { - "raw": "{{WrikeAPI}}/contacts/KUAFY3BJ", + "raw": "https://www.wrike.com/api/v4/contacts/{contactId}", "host": [ - "{{WrikeAPI}}" + "https://www.wrike.com/api/v4" ], "path": [ "contacts", - "KUAFY3BJ" + "{contactId}" ], "query": [ { @@ -54,7 +54,7 @@ "name": "Get Information about specific contact", "originalRequest": { "method": "GET", - "url": "{{WrikeAPI}}/contacts/KUANFJBJ,NVJKSNJK" + "url": "https://www.wrike.com/api/v4/contacts/{contactId1},{contactId2}" }, "status": "OK", "code": 200, @@ -71,13 +71,13 @@ "request": { "method": "GET", "url": { - "raw": "{{WrikeAPI}}/folders/IEACW7SVI4PX3YZS?fields=[briefDescription,customColumnIds,attachmentCount,contractType]", + "raw": "https://www.wrike.com/api/v4/folders/{folderId}?fields=[briefDescription,customColumnIds,attachmentCount,contractType]", "host": [ - "{{WrikeAPI}}" + "https://www.wrike.com/api/v4" ], "path": [ "folders", - "IEACW7SVI4PX3YZS" + "{folderId}" ], "query": [ { @@ -96,13 +96,14 @@ "request": { "method": "GET", "url": { - "raw": "{{WrikeAPI}}/spaces/{spaceId}/folders?project=true", + "raw": "https://www.wrike.com/api/v4/spaces/{spaceId}/folders", "host": [ - "{{WrikeAPI}}" + "https://www.wrike.com/api/v4" ], "path": [ - "folders", - "IEACW7SVI4PX3YZS" + "spaces", + "{spaceId}", + "folders" ] } }, @@ -110,7 +111,7 @@ "name": "Get Projects in a specific Space", "originalRequest": { "method": "GET", - "url": "{{WrikeAPI}}/spaces/{spaceId}/folders?project=true" + "url": "https://www.wrike.com/api/v4/spaces/{spaceId}/folders" }, "status": "OK", "code": 200, @@ -127,13 +128,13 @@ "request": { "method": "GET", "url": { - "raw": "{{WrikeAPI}}/tasks/IEACW7SVKRAJXB7L?fields=[customItemTypeId,finance,billingType,effortAllocation,responsiblePlaceholderIds,attachmentCount,recurrent]", + "raw": "https://www.wrike.com/api/v4/tasks/{taskId}?fields=[customItemTypeId,finance,billingType,effortAllocation,responsiblePlaceholderIds,attachmentCount,recurrent]", "host": [ - "{{WrikeAPI}}" + "https://www.wrike.com/api/v4" ], "path": [ "tasks", - "IEACW7SVKRAJXB7L" + "{taskId}" ], "query": [ { @@ -149,13 +150,13 @@ "request": { "method": "GET", "url": { - "raw": "{{WrikeAPI}}/tasks/IEACW7SVKQZEBEUN,IEACW7SVKQPX4WHN?fields=[recurrent,attachmentCount,effortAllocation,billingType]", + "raw": "https://www.wrike.com/api/v4/tasks/{taskId1},{taskId2}?fields=[recurrent,attachmentCount,effortAllocation,billingType]", "host": [ - "{{WrikeAPI}}" + "https://www.wrike.com/api/v4" ], "path": [ "tasks", - "IEACW7SVKQZEBEUN,IEACW7SVKQPX4WHN" + "{taskId1},{taskId2}" ], "query": [ { @@ -171,13 +172,13 @@ "request": { "method": "GET", "url": { - "raw": "{{WrikeAPI}}/spaces/IEACW7SVI4O6BDQE/tasks?descendants=true&status=Active&importance=Normal&type=Planned&fields=[recurrent,attachmentCount,effortAllocation,billingType]&dueDate={\"start\":\"2020-07-01\",\"end\":\"2020-07-07\"}", + "raw": "https://www.wrike.com/api/v4/spaces/{spaceId}/tasks?descendants=true&status=Active&importance=Normal&type=Planned&fields=[recurrent,attachmentCount,effortAllocation,billingType]&dueDate={\"start\":\"2020-07-01\",\"end\":\"2020-07-07\"}", "host": [ - "{{WrikeAPI}}" + "https://www.wrike.com/api/v4" ], "path": [ "spaces", - "IEACW7SVI4O6BDQE", + "{spaceId}", "tasks" ], "query": [ @@ -204,6 +205,10 @@ { "key": "dueDate", "value": "{\"start\":\"2020-07-01\",\"end\":\"2020-07-07\"}" + }, + { + "key": "updatedDate", + "value": "{\"start\":\"2025-08-20T00:00:00Z\"}" } ] } @@ -214,13 +219,13 @@ "request": { "method": "GET", "url": { - "raw": "{{WrikeAPI}}/folders/IEACW7SVI4OMYFIY/tasks?descendants=true&status=Active&importance=Normal&type=Planned&fields=[recurrent,attachmentCount,effortAllocation,billingType]&dueDate={\"start\":\"2020-07-01\",\"end\":\"2020-07-07\"}&pageSize=200", + "raw": "https://www.wrike.com/api/v4/folders/{folderId}/tasks?descendants=true&status=Active&importance=Normal&type=Planned&fields=[responsibleIds]&dueDate={\"start\":\"2020-07-01\",\"end\":\"2020-07-07\"}&pageSize=200", "host": [ - "{{WrikeAPI}}" + "https://www.wrike.com/api/v4" ], "path": [ "folders", - "IEACW7SVI4OMYFIY", + "{folderId}", "tasks" ], "query": [ @@ -242,7 +247,7 @@ }, { "key": "fields", - "value": "[recurrent,attachmentCount,effortAllocation,billingType]" + "value": "[responsibleIds]" }, { "key": "dueDate", @@ -251,6 +256,14 @@ { "key": "pageSize", "value": "200" + }, + { + "key": "nextPageToken", + "value": "AFGM35QAAAAAUAAAAAAQAAAABIAAAAAB4FVYIMRO4RBAE" + }, + { + "key": "updatedDate", + "value": "{\"start\":\"2025-08-20T00:00:00Z\"}" } ] } @@ -261,13 +274,13 @@ "request": { "method": "GET", "url": { - "raw": "{{WrikeAPI}}/folders/IEACW7SVI4OMYFIY/tasks?descendants=true&subTasks=true", + "raw": "https://www.wrike.com/api/v4/folders/{folderId}/tasks?descendants=true&subTasks=true", "host": [ - "{{WrikeAPI}}" + "https://www.wrike.com/api/v4" ], "path": [ "folders", - "IEACW7SVI4OMYFIY", + "{folderId}", "tasks" ], "query": [ @@ -293,9 +306,9 @@ "request": { "method": "GET", "url": { - "raw": "{{WrikeAPI}}/attachments?versions=true&createdDate={\"start\":\"2020-07-01T00:00:00Z\",\"end\":\"2020-07-02T07:53:33Z\"}&withUrls=true", + "raw": "https://www.wrike.com/api/v4/attachments?versions=true&createdDate={\"start\":\"2020-07-01T00:00:00Z\",\"end\":\"2020-07-02T07:53:33Z\"}&withUrls=true", "host": [ - "{{WrikeAPI}}" + "https://www.wrike.com/api/v4" ], "path": [ "attachments" @@ -322,13 +335,13 @@ "request": { "method": "GET", "url": { - "raw": "{{WrikeAPI}}/folders/IEACW7SVI4PZXTGO/attachments", + "raw": "https://www.wrike.com/api/v4/folders/{folderId}/attachments", "host": [ - "{{WrikeAPI}}" + "https://www.wrike.com/api/v4" ], "path": [ "folders", - "IEACW7SVI4PZXTGO", + "{folderId}", "attachments" ] } @@ -339,16 +352,32 @@ "request": { "method": "GET", "url": { - "raw": "{{WrikeAPI}}/tasks/IEACW7SVKQOKD5EG/attachments", + "raw": "https://www.wrike.com/api/v4/tasks/{taskId}/attachments", "host": [ - "{{WrikeAPI}}" + "https://www.wrike.com/api/v4" ], "path": [ "tasks", - "IEACW7SVKQOKD5EG", + "{taskId}", "attachments" + ], + "query": [ + { + "key": "withUrls", + "value": "true" + } ] } + }, + "response": { + "name": "Get Attachments on Task", + "originalRequest": { + "method": "GET", + "url": "https://www.wrike.com/api/v4/tasks/{taskId}/attachments" + }, + "status": "OK", + "code": 200, + "body": "{\"kind\":\"attachments\",\"data\":[{\"id\":\"IEACW7SVIYEV4HBN\",\"authorId\":\"IEAGS6BY\",\"name\":\"Result from test.com\",\"createdDate\":\"2025-07-25T07:53:33Z\",\"version\":\"1\",\"size\":1024,\"type\":\"application/vnd.openxmlformats-officedocument.wordprocessingml.document\",\"url\":\"https://www.wrike.com/attachments/IEACW7SVIYEV4HBN/download/Lorem Ipsum.docx\",\"taskId\":\"IEACW7SVKQOKD5EG\",\"width\":100,\"height\":100}]}" } }, { @@ -356,13 +385,13 @@ "request": { "method": "GET", "url": { - "raw": "{{WrikeAPI}}/attachments/IEACW7SVIYEV4HBN", + "raw": "https://www.wrike.com/api/v4/attachments/{attachmentId}", "host": [ - "{{WrikeAPI}}" + "https://www.wrike.com/api/v4" ], "path": [ "attachments", - "IEACW7SVIYEV4HBN" + "{attachmentId}" ] } } @@ -372,13 +401,13 @@ "request": { "method": "GET", "url": { - "raw": "{{WrikeAPI}}/attachments/IEACW7SVIYJJEUHD/download/Lorem Ipsum.docx", + "raw": "https://www.wrike.com/api/v4/attachments/{attachmentId}/download/Lorem Ipsum.docx", "host": [ - "{{WrikeAPI}}" + "https://www.wrike.com/api/v4" ], "path": [ "attachments", - "IEACW7SVIYJJEUHD", + "{attachmentId}", "download", "Lorem Ipsum.docx" ] @@ -390,13 +419,13 @@ "request": { "method": "GET", "url": { - "raw": "{{WrikeAPI}}/attachments/IEACW7SVIYJJEUHD/url", + "raw": "https://www.wrike.com/api/v4/attachments/{attachmentId}/url", "host": [ - "{{WrikeAPI}}" + "https://www.wrike.com/api/v4" ], "path": [ "attachments", - "IEACW7SVIYJJEUHD", + "{attachmentId}", "url" ] } @@ -412,9 +441,9 @@ "request": { "method": "GET", "url": { - "raw": "{{WrikeAPI}}/spaces?withArchived=false&userIsMember=false", + "raw": "https://www.wrike.com/api/v4/spaces?withArchived=false&userIsMember=false", "host": [ - "{{WrikeAPI}}" + "https://www.wrike.com/api/v4" ], "path": [ "spaces" @@ -442,13 +471,13 @@ "request": { "method": "GET", "url": { - "raw": "{{WrikeAPI}}/spaces/IEACW7SVI4XDCUZX", + "raw": "https://www.wrike.com/api/v4/spaces/{spaceId}", "host": [ - "{{WrikeAPI}}" + "https://www.wrike.com/api/v4" ], "path": [ "spaces", - "IEACW7SVI4XDCUZX" + "{spaceId}" ], "query": [ { @@ -462,7 +491,7 @@ "name": "Get Space by ID", "originalRequest": { "method": "GET", - "url": "{{WrikeAPI}}/spaces/IEACW7SVI4XDCUZX?fields=[members]" + "url": "https://www.wrike.com/api/v4/spaces/{spaceId}?fields=[members]" }, "status": "OK", "code": 200, @@ -481,5 +510,17 @@ "type": "string" } ] + }, + "429_response": { + "status": 429, + "reason": "Too Many Requests", + "method": "GET", + "headers": { + "retry-after": "49" + }, + "body_json": { + "errorDescription": "Rate limit exceeded, try again later", + "error": "rate_limit_exceeded" + } } } \ No newline at end of file