diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 39b701b54..a9396a7a4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,4 +1,4 @@ -name: 'CI' +name: "CI" on: pull_request: @@ -12,7 +12,7 @@ jobs: - uses: actions/checkout@v3 - uses: actions/setup-node@v4 with: - node-version: '20' + node-version: "22" - name: Cache node_modules uses: actions/cache@v4 env: @@ -31,7 +31,7 @@ jobs: - uses: actions/checkout@v3 - uses: actions/setup-node@v4 with: - node-version: '20' + node-version: "22" - name: Cache node_modules uses: actions/cache@v4 env: @@ -47,8 +47,8 @@ jobs: - name: Checkout Barge uses: actions/checkout@v3 with: - repository: 'oceanprotocol/barge' - path: 'barge' + repository: "oceanprotocol/barge" + path: "barge" - name: Set Database Environment Variables run: | @@ -60,7 +60,7 @@ jobs: - name: Run Ganache with Barge working-directory: ${{ github.workspace }}/barge run: | - bash -x start_ocean.sh --with-typesense 2>&1 > start_ocean.log & + bash -x start_ocean.sh 2>&1 > start_ocean.log & - name: Install deps & build run: npm ci && npm run build:metadata @@ -78,7 +78,11 @@ jobs: if [ $attempt -eq $max_attempts ]; then echo "Error: Node container failed to start within 300 seconds" docker ps -a - docker inspect ocean-node-1 + docker inspect ocean-node-1 || true + echo "=== Barge startup script output ===" + cat ${{ github.workspace }}/barge/start_ocean.log || true + echo "=== Ocean Node Logs ===" + docker logs ocean-node-1 2>&1 | tail -100 || true exit 1 fi sleep 10 @@ -91,13 +95,23 @@ jobs: path: coverage/ test_integration: + name: test_integration (${{ matrix.transport }}) runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + include: + - transport: http + node_endpoint: "http://127.0.0.1:8001" + - transport: p2p + node_endpoint: "16Uiu2HAmRkJeRYRghP3ETQCpdz8NsQzQE9RpSST7i5YNgWqH4dVE" + steps: - uses: actions/checkout@v3 - uses: actions/setup-node@v4 with: - node-version: '20' + node-version: "22" - name: Cache node_modules uses: actions/cache@v4 env: @@ -115,8 +129,8 @@ jobs: - name: Checkout Barge uses: actions/checkout@v3 with: - repository: 'oceanprotocol/barge' - path: 'barge' + repository: "oceanprotocol/barge" + path: "barge" - name: Login to Docker Hub if: ${{ env.DOCKERHUB_PASSWORD && env.DOCKERHUB_USERNAME }} @@ -136,7 +150,7 @@ jobs: - name: Run Barge working-directory: ${{ github.workspace }}/barge run: | - bash -x start_ocean.sh --with-typesense 2>&1 > start-node.log & + bash -x start_ocean.sh 2>&1 > start-node.log & - name: Install deps & build run: npm ci && npm run build:metadata @@ -170,15 +184,23 @@ jobs: attempt=$((attempt + 1)) done - - name: integration - run: npm run test:integration:cover + - name: integration (${{ matrix.transport }}) + run: | + if [ "${{ matrix.transport }}" = "http" ]; then + npm run test:integration:cover + else + npm run test:integration + fi env: INDEXING_RETRY_INTERVAL: 4000 INDEXING_MAX_RETRIES: 120 - NODE_URL: 'http://127.0.0.1:8001' + NODE_ENDPOINT: ${{ matrix.node_endpoint }} - name: docker logs run: | + echo "=== Barge startup script output ===" + cat ${{ github.workspace }}/barge/start-node.log || true + echo "" echo "=== Ocean Contracts Logs ===" docker logs ocean-ocean-contracts-1 2>&1 || echo "Container not found" echo "" @@ -187,6 +209,7 @@ jobs: if: ${{ failure() }} - name: Upload coverage + if: matrix.transport == 'http' uses: actions/upload-artifact@v4 with: name: coverage-integration @@ -199,7 +222,7 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, macos-latest, windows-latest] - node: ['20', '18'] + node: ["22", "20"] steps: - uses: actions/checkout@v3 @@ -237,13 +260,13 @@ jobs: - uses: actions/checkout@v3 - uses: actions/setup-node@v4 with: - node-version: '20' + node-version: "22" - name: checkout ocean.js repo uses: actions/checkout@v3 with: - repository: 'oceanprotocol/ocean.js' - path: 'ocean.js' + repository: "oceanprotocol/ocean.js" + path: "ocean.js" ref: ${{ github.event.pull_request.head.sha }} - name: setup git config diff --git a/CHANGELOG.md b/CHANGELOG.md index d38f67632..fb49e4c23 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,9 +4,113 @@ All notable changes to this project will be documented in this file. Dates are d Generated by [`auto-changelog`](https://github.com/CookPete/auto-changelog). +#### [v7.0.0-next.13](https://github.com/oceanprotocol/ocean.js/compare/v7.0.0-next.12...v7.0.0-next.13) + +- create auth token with signed messge [`664a9b3`](https://github.com/oceanprotocol/ocean.js/commit/664a9b36552a8aaad6d995f5ef615735aba0c58f) + +#### [v7.0.0-next.12](https://github.com/oceanprotocol/ocean.js/compare/v7.0.0-next.11...v7.0.0-next.12) + +> 9 April 2026 + +- fetchnodelogs p2p method [`218c472`](https://github.com/oceanprotocol/ocean.js/commit/218c4720a2f82d780cfc6f29f24372fd2a706649) +- Release 7.0.0-next.12 [`5abf22f`](https://github.com/oceanprotocol/ocean.js/commit/5abf22f8ac3c02d1b61eb89df77c2dee06dcaf42) + +#### [v7.0.0-next.11](https://github.com/oceanprotocol/ocean.js/compare/v7.0.0-next.10...v7.0.0-next.11) + +> 9 April 2026 + +- Release 7.0.0-next.11 [`b8a24d9`](https://github.com/oceanprotocol/ocean.js/commit/b8a24d92842e3ca6c91c3810d77ac24767486c87) +- export libp2p node [`cfae9c5`](https://github.com/oceanprotocol/ocean.js/commit/cfae9c5ec5749c8902717f4e231a1baa14a880c6) + +#### [v7.0.0-next.10](https://github.com/oceanprotocol/ocean.js/compare/v7.0.0-next.9...v7.0.0-next.10) + +> 9 April 2026 + +- p2p store discovered nodes & add dockerregistryauth [`8689233`](https://github.com/oceanprotocol/ocean.js/commit/8689233403c0a73e555844d72e04f48e7d581cab) +- Release 7.0.0-next.10 [`b74e620`](https://github.com/oceanprotocol/ocean.js/commit/b74e62047ec6c05e2b30491a643693556edbdb58) +- lint fix [`cfa8ffd`](https://github.com/oceanprotocol/ocean.js/commit/cfa8ffd0b598e48fcf3c4cf008d9b4a72e0b25a8) + +#### [v7.0.0-next.9](https://github.com/oceanprotocol/ocean.js/compare/v7.0.0-next.8...v7.0.0-next.9) + +> 9 April 2026 + +- fix review comments [`51f59c5`](https://github.com/oceanprotocol/ocean.js/commit/51f59c52ca9885087d8a00c77ae053e3495a7d65) +- Release 7.0.0-next.9 [`5c02645`](https://github.com/oceanprotocol/ocean.js/commit/5c0264573d89ae7eb805a256f446f8c98086329a) + +#### [v7.0.0-next.8](https://github.com/oceanprotocol/ocean.js/compare/v7.0.0-next.7...v7.0.0-next.8) + +> 8 April 2026 + +- Release 7.0.0-next.8 [`b5582b5`](https://github.com/oceanprotocol/ocean.js/commit/b5582b5934868180b59477fd7ce3eb18a0933b02) +- allow multiaddr dial [`c6d7807`](https://github.com/oceanprotocol/ocean.js/commit/c6d78078be7f855256500e82a98fc88983c08f89) + +#### [v7.0.0-next.7](https://github.com/oceanprotocol/ocean.js/compare/v7.0.0-next.6...v7.0.0-next.7) + +> 8 April 2026 + +- get node jobs and get node status [`a56d4c3`](https://github.com/oceanprotocol/ocean.js/commit/a56d4c35b7993a4982507617fa2e1d88f5272c2a) +- use direct command for status [`694fce7`](https://github.com/oceanprotocol/ocean.js/commit/694fce7f901f019bf7e51626701e6adf3da5b49d) +- Release 7.0.0-next.7 [`7bb6345`](https://github.com/oceanprotocol/ocean.js/commit/7bb6345536684dc8009e7e93e3c6bf6adbc3d366) + +#### [v7.0.0-next.6](https://github.com/oceanprotocol/ocean.js/compare/v7.0.0-next.5...v7.0.0-next.6) + +> 6 April 2026 + +- Release 7.0.0-next.6 [`7b376f2`](https://github.com/oceanprotocol/ocean.js/commit/7b376f2c1206f84ae8c2562e73f7b9bde15ecc53) +- keep only esential files [`4368065`](https://github.com/oceanprotocol/ocean.js/commit/4368065552309e723e8447be2170130942a8c01c) + +#### [v7.0.0-next.5](https://github.com/oceanprotocol/ocean.js/compare/v7.0.0-next.4...v7.0.0-next.5) + +> 6 April 2026 + +- fix circuit parsing [`aef0342`](https://github.com/oceanprotocol/ocean.js/commit/aef0342880eb7481076266f99054f4c71c011394) +- Release 7.0.0-next.5 [`eca532f`](https://github.com/oceanprotocol/ocean.js/commit/eca532f2825adb733b49bc07e9999edecdb0850c) + +#### [v7.0.0-next.4](https://github.com/oceanprotocol/ocean.js/compare/v7.0.0-next.3...v7.0.0-next.4) + +> 6 April 2026 + +- devdep libp2p + fix exports [`2362ece`](https://github.com/oceanprotocol/ocean.js/commit/2362ece541681c5f50873aa5f3ce584be7ba139b) +- Release 7.0.0-next.4 [`a82ab55`](https://github.com/oceanprotocol/ocean.js/commit/a82ab55cfaef98fedc281c23ff758d059caff8b1) + +#### [v7.0.0-next.3](https://github.com/oceanprotocol/ocean.js/compare/v7.0.0-next.2...v7.0.0-next.3) + +> 6 April 2026 + +- replace uint8arrays lib with native [`9af1c01`](https://github.com/oceanprotocol/ocean.js/commit/9af1c017620f65848bf26f00d30f40e37ff30e77) +- Release 7.0.0-next.3 [`b399086`](https://github.com/oceanprotocol/ocean.js/commit/b399086553c03801acc9d3922eb2131ecec9b928) +- Revert "fix exports name in pacakge json" [`aaf33a9`](https://github.com/oceanprotocol/ocean.js/commit/aaf33a97d3b6af019cfa01ae2ba739c6da720f3a) + +#### [v7.0.0-next.2](https://github.com/oceanprotocol/ocean.js/compare/v7.0.0-next.1...v7.0.0-next.2) + +> 3 April 2026 + +- circuit relay [`1465468`](https://github.com/oceanprotocol/ocean.js/commit/1465468442f6f5d5eb93a476e8ba04c02d295a8f) +- libp2p circuit + evict bad connections [`d9cba46`](https://github.com/oceanprotocol/ocean.js/commit/d9cba462d02fedba6c221efbc5aa8e8a080b372e) +- Release 7.0.0-next.2 [`8f33b03`](https://github.com/oceanprotocol/ocean.js/commit/8f33b03b11bf442d7ce0c6a31d905cbde4514508) + +#### [v7.0.0-next.1](https://github.com/oceanprotocol/ocean.js/compare/v7.0.0-next.0...v7.0.0-next.1) + +> 2 April 2026 + +- bump uint8arrays to esm supported [`f1ff511`](https://github.com/oceanprotocol/ocean.js/commit/f1ff511e9c92cee94cd129698bc46c7700952461) +- getmultiaddr from peerid [`1973f27`](https://github.com/oceanprotocol/ocean.js/commit/1973f278964598b6c8abb61c85395f0608375d43) +- Release 7.0.0-next.1 [`0a59035`](https://github.com/oceanprotocol/ocean.js/commit/0a5903556fcc2d7e38ccc52b266e15b1ca9e2273) + +#### [v7.0.0-next.0](https://github.com/oceanprotocol/ocean.js/compare/v6.1.2...v7.0.0-next.0) + +> 1 April 2026 + +- p2p impl [`68db01f`](https://github.com/oceanprotocol/ocean.js/commit/68db01fbf8f28854e6e0abe43aba85fbf6834c28) +- p2p warmup [`8c2bb5a`](https://github.com/oceanprotocol/ocean.js/commit/8c2bb5ae2b17b5f2c9530800c1f3143b1b318b0e) +- push,fetch config and no tls [`9cda783`](https://github.com/oceanprotocol/ocean.js/commit/9cda78366ddcfe97a93ecb79dc3a8aa3264ccecc) + #### [v6.1.2](https://github.com/oceanprotocol/ocean.js/compare/v6.1.1...v6.1.2) +> 24 March 2026 + - Feature/output_in_initializeCompute [`#2056`](https://github.com/oceanprotocol/ocean.js/pull/2056) +- Release 6.1.2 [`3b3cdeb`](https://github.com/oceanprotocol/ocean.js/commit/3b3cdebdb5ce7b89ebbbd2d49e44d5f6fa65e0d1) #### [v6.1.1](https://github.com/oceanprotocol/ocean.js/compare/v6.1.0...v6.1.1) @@ -209,7 +313,7 @@ Generated by [`auto-changelog`](https://github.com/CookPete/auto-changelog). - export util addrs functions [`#2017`](https://github.com/oceanprotocol/ocean.js/pull/2017) - Release 4.3.6 [`b28db44`](https://github.com/oceanprotocol/ocean.js/commit/b28db446144939e9ed864cc11559f718104d094b) -#### [v4.3.5](https://github.com/oceanprotocol/ocean.js/compare/v4.3.4...v4.3.5) +#### [v4.3.5](https://github.com/oceanprotocol/ocean.js/compare/v4.3.3...v4.3.5) > 17 November 2025 @@ -218,13 +322,11 @@ Generated by [`auto-changelog`](https://github.com/CookPete/auto-changelog). - remove typo [`f199f4a`](https://github.com/oceanprotocol/ocean.js/commit/f199f4af809eb85249fe0c59303fcc3207d8e7a7) - export EnterpriseFeeCollector [`68712a2`](https://github.com/oceanprotocol/ocean.js/commit/68712a2af17824bb8fc78824842376e35f582487) -#### [v4.3.4](https://github.com/oceanprotocol/ocean.js/compare/v4.3.2...v4.3.4) +#### [v4.3.3](https://github.com/oceanprotocol/ocean.js/compare/v4.3.2...v4.3.3) -> 15 November 2025 +> 1 August 2025 - add policy server endpoints [`#1974`](https://github.com/oceanprotocol/ocean.js/pull/1974) -- add EnterpriseFee [`b502882`](https://github.com/oceanprotocol/ocean.js/commit/b502882d05375eda326166608730a11e779c6f42) -- Release 4.3.4 [`e45a7de`](https://github.com/oceanprotocol/ocean.js/commit/e45a7defa239f6800eb68a13e1a9a3d9662f509d) - Release 4.3.3 [`6d6d4cf`](https://github.com/oceanprotocol/ocean.js/commit/6d6d4cfe57f2a3dee73b4957ff2f144c0e7ff9d7) #### [v4.3.2](https://github.com/oceanprotocol/ocean.js/compare/v4.3.1...v4.3.2) diff --git a/CodeExamples.md b/CodeExamples.md index 5bc969121..088e0091d 100644 --- a/CodeExamples.md +++ b/CodeExamples.md @@ -104,6 +104,7 @@ import { sendTx, ConfigHelper, configHelperNetworks, + getNodeEndpointConfig, amountToUnits, getEventFromTx, LoggerInstance @@ -206,9 +207,7 @@ Next, we define the metadata that will describe our data asset. This is what we const config = new ConfigHelper().getConfig( parseInt(String((await publisherAccount.provider.getNetwork()).chainId)) ) - if (process.env.NODE_URL) { - config.oceanNodeUri = process.env.NODE_URL - } + Object.assign(config, getNodeEndpointConfig()) aquarius = new Aquarius(config?.oceanNodeUri) providerUrl = config?.oceanNodeUri addresses = JSON.parse( diff --git a/ComputeExamples.md b/ComputeExamples.md index 1aa2ef561..d0f74ea00 100644 --- a/ComputeExamples.md +++ b/ComputeExamples.md @@ -143,6 +143,7 @@ import { sendTx, configHelperNetworks, ConfigHelper, + getNodeEndpointConfig, getEventFromTx, amountToUnits, isDefined, @@ -280,8 +281,6 @@ let resolvedAlgorithmDdo: DDO let computeJobId: string let agreementId: string -let computeRoutePath: string -let hasFreeComputeSupport: boolean ``` ### 4.3 Helper methods @@ -438,9 +437,7 @@ We need to load the configuration. Add the following code into your `run(){ }` f const config = new ConfigHelper().getConfig( parseInt(String((await publisherAccount.provider.getNetwork()).chainId)) ) - if (process.env.NODE_URL) { - config.oceanNodeUri = process.env.NODE_URL - } + Object.assign(config, getNodeEndpointConfig()) aquariusInstance = new Aquarius(config?.oceanNodeUri) providerUrl = config?.oceanNodeUri addresses = JSON.parse( @@ -622,126 +619,99 @@ let's check the free compute environment assert(computeEnv, 'Cannot find the free compute env') --> - - - Let's have 5 minute of compute access - ```Typescript - const mytime = new Date() - const computeMinutes = 5 - mytime.setMinutes(mytime.getMinutes() + computeMinutes) - - ``` - Let's prepare the dataset and algorithm assets to be used in the compute job - ```Typescript - const assets: ComputeAsset[] = [ - { - documentId: resolvedDatasetDdo.id, - serviceId: resolvedDatasetDdo.services[0].id - } - ] - - const algo: ComputeAlgorithm = { - documentId: resolvedAlgorithmDdo.id, - serviceId: resolvedAlgorithmDdo.services[0].id, - meta: resolvedAlgorithmDdo.metadata.algorithm +Let's have 5 minute of compute access +```Typescript + const mytime = new Date() + const computeMinutes = 5 + mytime.setMinutes(mytime.getMinutes() + computeMinutes) + +``` +Let's prepare the dataset and algorithm assets to be used in the compute job +```Typescript + const assets: ComputeAsset[] = [ + { + documentId: resolvedDatasetDdo.id, + serviceId: resolvedDatasetDdo.services[0].id } - ``` + ] - Let's start the free compute job - ```Typescript - const computeJobs = await ProviderInstance.freeComputeStart( - providerUrl, - consumerAccount, - computeEnv.id, - assets, - algo - ) - ``` - - - - Let's save the compute job it, we re going to use later - ```Typescript - computeJobId = computeJobs[0].jobId - // eslint-disable-next-line prefer-destructuring - agreementId = computeJobs[0].agreementId - ``` - + +Let's save the compute job it, we re going to use later +```Typescript + computeJobId = computeJobs[0].jobId + // eslint-disable-next-line prefer-destructuring + agreementId = computeJobs[0].agreementId +``` + ## 11. Check compute status and get download compute results URL ### 11.1 Check compute status - You can also add various delays so you see the various states of the compute job - ```Typescript - const jobStatus = await ProviderInstance.computeStatus( - providerUrl, - await consumerAccount.getAddress(), - computeJobId, - agreementId - ) - ``` - - Now, let's see the current status of the previously started computer job - ```Typescript - console.log('Current status of the compute job: ', jobStatus) - ``` - +You can also add various delays so you see the various states of the compute job +```Typescript + const jobStatus = await ProviderInstance.computeStatus( + providerUrl, + consumerAccount, + computeJobId, + agreementId + ) +``` + +Now, let's see the current status of the previously started computer job +```Typescript + console.log('Current status of the compute job: ', jobStatus) +``` + ### 11.2 Get download compute results URL - - ```Typescript - await sleep(10000) - const downloadURL = await ProviderInstance.getComputeResultUrl( - providerUrl, - consumerAccount, - computeJobId, - 0 - ) - ``` - - Let's check the compute results url for the specified index - ```Typescript - console.log(`Compute results URL: ${downloadURL}`) - ``` - +```Typescript + await sleep(10000) + const downloadURL = await ProviderInstance.getComputeResultUrl( + providerUrl, + consumerAccount, + computeJobId, + 0 + ) +``` + +Let's check the compute results url for the specified index +```Typescript + console.log(`Compute results URL: ${downloadURL}`) +``` + @@ -764,227 +734,203 @@ let's select compute environment which have free and paid resources - - Let's have 5 minute of compute access - ```Typescript - - const mytime = new Date() - const computeMinutes = 5 - mytime.setMinutes(mytime.getMinutes() + computeMinutes) - const computeValidUntil = Math.floor(mytime.getTime() / 1000) - - ``` - - Let's prepare the dataset and algorithm assets to be used in the compute job - ```Typescript - const resources: ComputeResourceRequest[] = [ - { - id: 'cpu', - amount: 2 - }, - { - id: 'ram', - amount: 2 - }, - { - id: 'disk', - amount: 0 - } - ] - const assets: ComputeAsset[] = [ - { - documentId: resolvedDatasetDdo.id, - serviceId: resolvedDatasetDdo.services[0].id - } - ] - const dtAddressArray = [resolvedDatasetDdo.services[0].datatokenAddress] - const algo: ComputeAlgorithm = { - documentId: resolvedAlgorithmDdo.id, - serviceId: resolvedAlgorithmDdo.services[0].id, - meta: resolvedAlgorithmDdo.metadata.algorithm + +Let's have 5 minute of compute access +```Typescript + + const mytime = new Date() + const computeMinutes = 5 + mytime.setMinutes(mytime.getMinutes() + computeMinutes) + const computeValidUntil = Math.floor(mytime.getTime() / 1000) + +``` + +Let's prepare the dataset and algorithm assets to be used in the compute job +```Typescript + const resources: ComputeResourceRequest[] = [ + { + id: 'cpu', + amount: 2 + }, + { + id: 'ram', + amount: 2 + }, + { + id: 'disk', + amount: 0 } - ``` - - Triggering initialize compute to see payment options - ```Typescript - const providerInitializeComputeResults = await ProviderInstance.initializeCompute( - assets, - algo, - computeEnv.id, - paymentToken, - computeValidUntil, - providerUrl, - consumerAccount, - resources, - Number(chainId) - ) + ] + const assets: ComputeAsset[] = [ + { + documentId: resolvedDatasetDdo.id, + serviceId: resolvedDatasetDdo.services[0].id + } + ] + const dtAddressArray = [resolvedDatasetDdo.services[0].datatokenAddress] + const algo: ComputeAlgorithm = { + documentId: resolvedAlgorithmDdo.id, + serviceId: resolvedAlgorithmDdo.services[0].id, + meta: resolvedAlgorithmDdo.metadata.algorithm + } +``` - console.log( - 'providerInitializeComputeResults = ', - JSON.stringify(providerInitializeComputeResults) - ) +Triggering initialize compute to see payment options +```Typescript + const providerInitializeComputeResults = await ProviderInstance.initializeCompute( + assets, + algo, + computeEnv.id, + paymentToken, + computeValidUntil, + providerUrl, + await consumerAccount.getAddress(), + resources, + Number(chainId) + ) - ``` + console.log( + 'providerInitializeComputeResults = ', + JSON.stringify(providerInitializeComputeResults) + ) - +``` - Let's check funds for escrow payment - ```Typescript - const escrow = new EscrowContract( - getAddress(providerInitializeComputeResults.payment.escrowAddress), - consumerAccount - ) - const paymentTokenPublisher = new Datatoken(publisherAccount) - const balancePublisherPaymentToken = await paymentTokenPublisher.balance( - paymentToken, - await publisherAccount.getAddress() - ) - assert( - new BigNumber(parseEther(balancePublisherPaymentToken)).isGreaterThan(0), - 'Balance should be higher than 0' - ) - const tx = await publisherAccount.sendTransaction({ - to: computeEnv.consumerAddress, - value: parseEther('1.5') - }) - await tx.wait() - - await paymentTokenPublisher.transfer( - paymentToken, - getAddress(computeEnv.consumerAddress), - (Number(balancePublisherPaymentToken) / 2).toString() - ) - const amountToDeposit = ( - providerInitializeComputeResults.payment.amount * 2 - ).toString() - await escrow.verifyFundsForEscrowPayment( - paymentToken, - computeEnv.consumerAddress, - await unitsToAmount(consumerAccount, paymentToken, amountToDeposit), - providerInitializeComputeResults.payment.amount.toString(), - providerInitializeComputeResults.payment.minLockSeconds.toString(), - '10' - ) - ``` + + +Let's check funds for escrow payment +```Typescript + const escrow = new EscrowContract( + getAddress(providerInitializeComputeResults.payment.escrowAddress), + consumerAccount + ) + const paymentTokenPublisher = new Datatoken(publisherAccount) + const balancePublisherPaymentToken = await paymentTokenPublisher.balance( + paymentToken, + await publisherAccount.getAddress() + ) + assert( + new BigNumber(parseEther(balancePublisherPaymentToken)).isGreaterThan(0), + 'Balance should be higher than 0' + ) + const tx = await publisherAccount.sendTransaction({ + to: computeEnv.consumerAddress, + value: parseEther('1.5') + }) + await tx.wait() + + await paymentTokenPublisher.transfer( + paymentToken, + getAddress(computeEnv.consumerAddress), + (Number(balancePublisherPaymentToken) / 2).toString() + ) + const amountToDeposit = ( + providerInitializeComputeResults.payment.amount * 2 + ).toString() + await escrow.verifyFundsForEscrowPayment( + paymentToken, + computeEnv.consumerAddress, + await unitsToAmount(consumerAccount, paymentToken, amountToDeposit), + providerInitializeComputeResults.payment.amount.toString(), + providerInitializeComputeResults.payment.minLockSeconds.toString(), + '10' + ) +``` - Let's order assets - ```Typescript +Let's order assets +```Typescript - algo.transferTxId = await handleOrder( - providerInitializeComputeResults.algorithm, - resolvedAlgorithmDdo.services[0].datatokenAddress, + algo.transferTxId = await handleOrder( + providerInitializeComputeResults.algorithm, + resolvedAlgorithmDdo.services[0].datatokenAddress, + consumerAccount, + computeEnv.consumerAddress, + 0 + ) + for (let i = 0; i < providerInitializeComputeResults.datasets.length; i++) { + assets[i].transferTxId = await handleOrder( + providerInitializeComputeResults.datasets[i], + dtAddressArray[i], consumerAccount, computeEnv.consumerAddress, 0 ) - for (let i = 0; i < providerInitializeComputeResults.datasets.length; i++) { - assets[i].transferTxId = await handleOrder( - providerInitializeComputeResults.datasets[i], - dtAddressArray[i], - consumerAccount, - computeEnv.consumerAddress, - 0 - ) - } - ``` - - Let's start compute job - ```Typescript - const computeJobs = await ProviderInstance.computeStart( - providerUrl, - consumerAccount, - computeEnv.id, - assets, - algo, - computeValidUntil, - paymentToken, - resources, - Number(chainId) - ) - ``` - - - - Let's save the compute job it, we re going to use later - ```Typescript - computeJobId = computeJobs[0].jobId - ``` - + +Let's save the compute job it, we re going to use later +```Typescript + computeJobId = computeJobs[0].jobId +``` + ## 13. Check paid compute job status and get download compute results URL ### 13.1 Check compute status for paid compute job - You can also add various delays so you see the various states of the compute job - ```Typescript - const jobStatus = await ProviderInstance.computeStatus( - providerUrl, - await consumerAccount.getAddress(), - computeJobId - ) - ``` - - Now, let's see the current status of the previously started computer job - ```Typescript - console.log('Current status of the compute job: ', jobStatus) - ``` - +You can also add various delays so you see the various states of the compute job +```Typescript + const jobStatus = await ProviderInstance.computeStatus( + providerUrl, + consumerAccount, + computeJobId + ) +``` + +Now, let's see the current status of the previously started computer job +```Typescript + console.log('Current status of the compute job: ', jobStatus) +``` + ### 13.2 Get download compute results URL +--> - ```Typescript - await sleep(10000) - const downloadURL = await ProviderInstance.getComputeResultUrl( - providerUrl, - consumerAccount, - computeJobId, - 0 - ) - ``` - - Let's check the compute results url for the specified index - ```Typescript - console.log(`Compute results URL: ${downloadURL}`) - ``` - +Let's check the compute results url for the specified index +```Typescript + console.log(`Compute results URL: ${downloadURL}`) +``` + diff --git a/package-lock.json b/package-lock.json index 35de86ff4..c38eb7839 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,20 +1,16 @@ { "name": "@oceanprotocol/lib", - "version": "6.1.2", + "version": "7.0.0-next.13", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@oceanprotocol/lib", - "version": "6.1.2", + "version": "7.0.0-next.13", "license": "Apache-2.0", "dependencies": { "@oasisprotocol/sapphire-paratime": "^1.3.2", - "@oceanprotocol/contracts": "^2.6.0", "@oceanprotocol/ddo-js": "^0.1.4", - "@rdfjs/dataset": "^2.0.2", - "@rdfjs/formats-common": "^3.1.0", - "@zazuko/env-node": "^2.1.4", "bignumber.js": "^9.3.1", "cross-fetch": "^4.0.0", "crypto-js": "^4.1.1", @@ -25,7 +21,17 @@ "jsonwebtoken": "^9.0.2" }, "devDependencies": { + "@chainsafe/libp2p-noise": "^17.0.0", + "@chainsafe/libp2p-yamux": "^8.0.1", "@istanbuljs/nyc-config-typescript": "^1.0.2", + "@libp2p/bootstrap": "^12.0.14", + "@libp2p/circuit-relay-v2": "^4.1.7", + "@libp2p/identify": "^4.0.13", + "@libp2p/kad-dht": "^16.1.6", + "@libp2p/tcp": "^11.0.13", + "@libp2p/websockets": "^10.1.6", + "@multiformats/multiaddr": "^13.0.1", + "@oceanprotocol/contracts": "^2.6.0", "@truffle/hdwallet-provider": "^2.0.14", "@types/chai": "^5.2.2", "@types/chai-spies": "^1.0.3", @@ -46,6 +52,7 @@ "eslint-config-prettier": "^10.1.5", "eslint-plugin-prettier": "^4.2.1", "fs": "0.0.1-security", + "libp2p": "^3.1.6", "microbundle": "^0.15.1", "mocha": "^11.7.1", "mock-local-storage": "^1.1.24", @@ -1921,6 +1928,114 @@ "buffer": "^6.0.3" } }, + "node_modules/@chainsafe/as-chacha20poly1305": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/@chainsafe/as-chacha20poly1305/-/as-chacha20poly1305-0.1.0.tgz", + "integrity": "sha512-BpNcL8/lji/GM3+vZ/bgRWqJ1q5kwvTFmGPk7pxm/QQZDbaMI98waOHjEymTjq2JmdD/INdNBFOVSyJofXg7ew==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/@chainsafe/as-sha256": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@chainsafe/as-sha256/-/as-sha256-1.2.0.tgz", + "integrity": "sha512-H2BNHQ5C3RS+H0ZvOdovK6GjFAyq5T6LClad8ivwj9Oaiy28uvdsGVS7gNJKuZmg0FGHAI+n7F0Qju6U0QkKDA==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/@chainsafe/is-ip": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@chainsafe/is-ip/-/is-ip-2.1.0.tgz", + "integrity": "sha512-KIjt+6IfysQ4GCv66xihEitBjvhU/bixbbbFxdJ1sqCp4uJ0wuZiYBPhksZoy4lfaF0k9cwNzY5upEW/VWdw3w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@chainsafe/libp2p-noise": { + "version": "17.0.0", + "resolved": "https://registry.npmjs.org/@chainsafe/libp2p-noise/-/libp2p-noise-17.0.0.tgz", + "integrity": "sha512-vwrmY2Y+L1xYhIDiEpl61KHxwrLCZoXzTpwhyk34u+3+6zCAZPL3GxH3i2cs+u5IYNoyLptORdH17RKFXy7upA==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "@chainsafe/as-chacha20poly1305": "^0.1.0", + "@chainsafe/as-sha256": "^1.2.0", + "@libp2p/crypto": "^5.1.9", + "@libp2p/interface": "^3.0.0", + "@libp2p/peer-id": "^6.0.0", + "@libp2p/utils": "^7.0.0", + "@noble/ciphers": "^2.0.1", + "@noble/curves": "^2.0.1", + "@noble/hashes": "^2.0.1", + "protons-runtime": "^5.6.0", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0", + "wherearewe": "^2.0.1" + } + }, + "node_modules/@chainsafe/libp2p-noise/node_modules/@noble/ciphers": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@noble/ciphers/-/ciphers-2.1.1.tgz", + "integrity": "sha512-bysYuiVfhxNJuldNXlFEitTVdNnYUc+XNJZd7Qm2a5j1vZHgY+fazadNFWFaMK/2vye0JVlxV3gHmC0WDfAOQw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 20.19.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/@chainsafe/libp2p-noise/node_modules/@noble/curves": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@noble/curves/-/curves-2.0.1.tgz", + "integrity": "sha512-vs1Az2OOTBiP4q0pwjW5aF0xp9n4MxVrmkFBxc6EKZc6ddYx5gaZiAsZoq0uRRXWbi3AT/sBqn05eRPtn1JCPw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@noble/hashes": "2.0.1" + }, + "engines": { + "node": ">= 20.19.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/@chainsafe/libp2p-noise/node_modules/@noble/hashes": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-2.0.1.tgz", + "integrity": "sha512-XlOlEbQcE9fmuXxrVTXCTlG2nlRXa9Rj3rr5Ue/+tX+nmkgbX720YHh0VR3hBF9xDvwnb8D2shVGOwNx+ulArw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 20.19.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/@chainsafe/libp2p-yamux": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@chainsafe/libp2p-yamux/-/libp2p-yamux-8.0.1.tgz", + "integrity": "sha512-pJsqmUg1cZRJZn/luAtQaq0uLcVfExo51Rg7iRtAEceNYtsKUi/exfegnvTBzTnF1CGmTzVEV3MCLsRhqiNyoA==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "@libp2p/interface": "^3.0.0", + "@libp2p/utils": "^7.0.0", + "race-signal": "^2.0.0", + "uint8arraylist": "^2.4.8" + } + }, + "node_modules/@chainsafe/netmask": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@chainsafe/netmask/-/netmask-2.0.0.tgz", + "integrity": "sha512-I3Z+6SWUoaljh3TBzCnCxjlUyN8tA+NAk5L6m9IxvCf1BENQTePzPMis97CoN/iMW1St3WN+AWCCRp+TTBRiDg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@chainsafe/is-ip": "^2.0.1" + } + }, "node_modules/@cspotcode/source-map-support": { "version": "0.8.1", "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", @@ -1957,6 +2072,20 @@ "node": ">=18.0" } }, + "node_modules/@dnsquery/dns-packet": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/@dnsquery/dns-packet/-/dns-packet-6.1.1.tgz", + "integrity": "sha512-WXTuFvL3G+74SchFAtz3FgIYVOe196ycvGsMgvSH/8Goptb1qpIQtIuM4SOK9G9lhMWYpHxnXyy544ZhluFOew==", + "dev": true, + "license": "MIT", + "dependencies": { + "@leichtgewicht/ip-codec": "^2.0.4", + "utf8-codec": "^1.0.0" + }, + "engines": { + "node": ">=6" + } + }, "node_modules/@esbuild/aix-ppc64": { "version": "0.25.1", "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.1.tgz", @@ -3716,6 +3845,472 @@ "@jridgewell/sourcemap-codec": "^1.4.14" } }, + "node_modules/@leichtgewicht/ip-codec": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@leichtgewicht/ip-codec/-/ip-codec-2.0.5.tgz", + "integrity": "sha512-Vo+PSpZG2/fmgmiNzYK9qWRh8h/CHrwD0mo1h1DzL4yzHNSfWYujGTYsWGreD000gcgmZ7K4Ys6Tx9TxtsKdDw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@libp2p/bootstrap": { + "version": "12.0.14", + "resolved": "https://registry.npmjs.org/@libp2p/bootstrap/-/bootstrap-12.0.14.tgz", + "integrity": "sha512-kVg/t303ac6l7eSo5M1oZs72cs54FRRjYhmXwvHbPEz5v7NgglDpnNgz7ScBJGaS/z4Xn5qeyghVZFc8Qz7BaA==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "@libp2p/interface": "^3.1.0", + "@libp2p/interface-internal": "^3.0.13", + "@libp2p/peer-id": "^6.0.4", + "@multiformats/multiaddr": "^13.0.1", + "@multiformats/multiaddr-matcher": "^3.0.1", + "main-event": "^1.0.1" + } + }, + "node_modules/@libp2p/circuit-relay-v2": { + "version": "4.1.7", + "resolved": "https://registry.npmjs.org/@libp2p/circuit-relay-v2/-/circuit-relay-v2-4.1.7.tgz", + "integrity": "sha512-yQJ5+CSKGz1oqisa/hbp+VfwWpYFU9XPgC2qq6Q6rFK80CxeIf3AQ0QQdC/OAS/5Nl+EFLK5DQuaQ+QYT54XLQ==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "@libp2p/crypto": "^5.1.14", + "@libp2p/interface": "^3.1.1", + "@libp2p/interface-internal": "^3.0.14", + "@libp2p/peer-collections": "^7.0.14", + "@libp2p/peer-id": "^6.0.5", + "@libp2p/peer-record": "^9.0.6", + "@libp2p/utils": "^7.0.14", + "@multiformats/multiaddr": "^13.0.1", + "@multiformats/multiaddr-matcher": "^3.0.1", + "any-signal": "^4.1.1", + "main-event": "^1.0.1", + "multiformats": "^13.4.0", + "nanoid": "^5.1.5", + "progress-events": "^1.0.1", + "protons-runtime": "^6.0.1", + "retimeable-signal": "^1.0.1", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0" + } + }, + "node_modules/@libp2p/circuit-relay-v2/node_modules/nanoid": { + "version": "5.1.7", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.1.7.tgz", + "integrity": "sha512-ua3NDgISf6jdwezAheMOk4mbE1LXjm1DfMUDMuJf4AqxLFK3ccGpgWizwa5YV7Yz9EpXwEaWoRXSb/BnV0t5dQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.js" + }, + "engines": { + "node": "^18 || >=20" + } + }, + "node_modules/@libp2p/circuit-relay-v2/node_modules/protons-runtime": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/protons-runtime/-/protons-runtime-6.0.1.tgz", + "integrity": "sha512-ONL+jDj143WA1m+WKLuuqBIaDKxm32dx6HfJdyujrRcni/6KkhXzVnyg22nH/Wwqmbwnd1BKUVkD1hMEWZFeww==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "uint8-varint": "^2.0.4", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0" + } + }, + "node_modules/@libp2p/crypto": { + "version": "5.1.14", + "resolved": "https://registry.npmjs.org/@libp2p/crypto/-/crypto-5.1.14.tgz", + "integrity": "sha512-0L2SEhDfvKWFhlc8GXgm268MoakrS4qbewD5LoZpoiUesXpB9e1vjed9dWEN1VsSjOmrOPyhBoSxZ2mnLTrOVA==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "@libp2p/interface": "^3.1.1", + "@noble/curves": "^2.0.1", + "@noble/hashes": "^2.0.1", + "multiformats": "^13.4.0", + "protons-runtime": "^6.0.1", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0" + } + }, + "node_modules/@libp2p/crypto/node_modules/@noble/curves": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@noble/curves/-/curves-2.0.1.tgz", + "integrity": "sha512-vs1Az2OOTBiP4q0pwjW5aF0xp9n4MxVrmkFBxc6EKZc6ddYx5gaZiAsZoq0uRRXWbi3AT/sBqn05eRPtn1JCPw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@noble/hashes": "2.0.1" + }, + "engines": { + "node": ">= 20.19.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/@libp2p/crypto/node_modules/@noble/hashes": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-2.0.1.tgz", + "integrity": "sha512-XlOlEbQcE9fmuXxrVTXCTlG2nlRXa9Rj3rr5Ue/+tX+nmkgbX720YHh0VR3hBF9xDvwnb8D2shVGOwNx+ulArw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 20.19.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/@libp2p/crypto/node_modules/protons-runtime": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/protons-runtime/-/protons-runtime-6.0.1.tgz", + "integrity": "sha512-ONL+jDj143WA1m+WKLuuqBIaDKxm32dx6HfJdyujrRcni/6KkhXzVnyg22nH/Wwqmbwnd1BKUVkD1hMEWZFeww==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "uint8-varint": "^2.0.4", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0" + } + }, + "node_modules/@libp2p/identify": { + "version": "4.0.13", + "resolved": "https://registry.npmjs.org/@libp2p/identify/-/identify-4.0.13.tgz", + "integrity": "sha512-/zAhl2yMuQeHMyghJZDBRvQ1l3fRwxbsq3zPhT5nDscu9qVDa/CB4xDwquV4jV2Y/pnvefZtngJgj5c+bBIxug==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "@libp2p/crypto": "^5.1.13", + "@libp2p/interface": "^3.1.0", + "@libp2p/interface-internal": "^3.0.13", + "@libp2p/peer-id": "^6.0.4", + "@libp2p/peer-record": "^9.0.5", + "@libp2p/utils": "^7.0.13", + "@multiformats/multiaddr": "^13.0.1", + "@multiformats/multiaddr-matcher": "^3.0.1", + "it-drain": "^3.0.10", + "it-parallel": "^3.0.13", + "main-event": "^1.0.1", + "protons-runtime": "^5.6.0", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0" + } + }, + "node_modules/@libp2p/interface": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@libp2p/interface/-/interface-3.1.1.tgz", + "integrity": "sha512-pQuReZeZUSqk27UXwXXdAVlxrgs08GrcPsd92Qv27IFBPICG8da3FmHg1bclUpMW/6GE6o4qDCVqR4cBMRVKyA==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "@multiformats/dns": "^1.0.6", + "@multiformats/multiaddr": "^13.0.1", + "main-event": "^1.0.1", + "multiformats": "^13.4.0", + "progress-events": "^1.0.1", + "uint8arraylist": "^2.4.8" + } + }, + "node_modules/@libp2p/interface-internal": { + "version": "3.0.14", + "resolved": "https://registry.npmjs.org/@libp2p/interface-internal/-/interface-internal-3.0.14.tgz", + "integrity": "sha512-X7TxzWapCKNaBCy9quPJIiXouPaAbPNT2XgWghw1MouznKPMWzCyHY+kW0l+e2JkvBqeSDHLPdBE7WnHwdbNtA==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "@libp2p/interface": "^3.1.1", + "@libp2p/peer-collections": "^7.0.14", + "@multiformats/multiaddr": "^13.0.1", + "progress-events": "^1.0.1" + } + }, + "node_modules/@libp2p/kad-dht": { + "version": "16.1.6", + "resolved": "https://registry.npmjs.org/@libp2p/kad-dht/-/kad-dht-16.1.6.tgz", + "integrity": "sha512-G3RqIkA/zG8brOQfADYt+PaLbEOEcwF8DzEVPsRGLpn80xaTMj2zOG4F86wcWK7FSciijnZzDOT0AKgoDzAcdg==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "@libp2p/crypto": "^5.1.13", + "@libp2p/interface": "^3.1.0", + "@libp2p/interface-internal": "^3.0.13", + "@libp2p/peer-collections": "^7.0.13", + "@libp2p/peer-id": "^6.0.4", + "@libp2p/ping": "^3.0.13", + "@libp2p/record": "^4.0.9", + "@libp2p/utils": "^7.0.13", + "@multiformats/multiaddr": "^13.0.1", + "@multiformats/multiaddr-matcher": "^3.0.1", + "any-signal": "^4.1.1", + "interface-datastore": "^9.0.1", + "it-all": "^3.0.9", + "it-drain": "^3.0.10", + "it-length": "^3.0.9", + "it-map": "^3.1.4", + "it-merge": "^3.0.12", + "it-parallel": "^3.0.13", + "it-pipe": "^3.0.1", + "it-pushable": "^3.2.3", + "it-take": "^3.0.9", + "main-event": "^1.0.1", + "multiformats": "^13.4.0", + "p-defer": "^4.0.1", + "p-event": "^7.0.0", + "progress-events": "^1.0.1", + "protons-runtime": "^5.6.0", + "race-signal": "^2.0.0", + "uint8-varint": "^2.0.4", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0" + } + }, + "node_modules/@libp2p/logger": { + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/@libp2p/logger/-/logger-6.2.3.tgz", + "integrity": "sha512-ZlGE8a0pHDkTFoNleKHAu4Fqta1QHiqgR3CR9fw0Ek/FnjMXo++zxyBCYdwqYz/Jeqh1s1/svSonRTIfknF4zQ==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "@libp2p/interface": "^3.1.1", + "@multiformats/multiaddr": "^13.0.1", + "interface-datastore": "^9.0.1", + "multiformats": "^13.4.0", + "weald": "^1.1.0" + } + }, + "node_modules/@libp2p/multistream-select": { + "version": "7.0.13", + "resolved": "https://registry.npmjs.org/@libp2p/multistream-select/-/multistream-select-7.0.13.tgz", + "integrity": "sha512-nX13GinXiuBFgN+zA/CvIyXZyR/DaftT26agsw6dDfhRvH2RWsoPvf0IGqxk90DsLhpmVxZnTE31rITjmLIKww==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "@libp2p/interface": "^3.1.0", + "@libp2p/utils": "^7.0.13", + "it-length-prefixed": "^10.0.1", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0" + } + }, + "node_modules/@libp2p/peer-collections": { + "version": "7.0.14", + "resolved": "https://registry.npmjs.org/@libp2p/peer-collections/-/peer-collections-7.0.14.tgz", + "integrity": "sha512-PoH9m6ihhuEe5ot23o7kZ7aa10QlemTaHyn6w34oXUjhCFWsYNbl3zIlnTLdM2r1ROQABEeMH7AmxvfgipNR0A==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "@libp2p/interface": "^3.1.1", + "@libp2p/peer-id": "^6.0.5", + "@libp2p/utils": "^7.0.14", + "multiformats": "^13.4.0" + } + }, + "node_modules/@libp2p/peer-id": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/@libp2p/peer-id/-/peer-id-6.0.5.tgz", + "integrity": "sha512-0rAcAnoOrhjUPs03fRMw29hctzx9s1mdsmCdfgl1U4FnEohMRfBmLkGD8Al3/J52Z23jwzdDfz1VpyxjOANaHA==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "@libp2p/crypto": "^5.1.14", + "@libp2p/interface": "^3.1.1", + "multiformats": "^13.4.0", + "uint8arrays": "^5.1.0" + } + }, + "node_modules/@libp2p/peer-record": { + "version": "9.0.6", + "resolved": "https://registry.npmjs.org/@libp2p/peer-record/-/peer-record-9.0.6.tgz", + "integrity": "sha512-AJNscSkH6lbia7OO+9F+eGryOnhAZwbJghj4iG2jF2IuGJ5G+hJv28AJyep5J6+BzaTJdnDhhXM5RPHFqHMmWQ==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "@libp2p/crypto": "^5.1.14", + "@libp2p/interface": "^3.1.1", + "@libp2p/peer-id": "^6.0.5", + "@multiformats/multiaddr": "^13.0.1", + "multiformats": "^13.4.0", + "protons-runtime": "^6.0.1", + "uint8-varint": "^2.0.4", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0" + } + }, + "node_modules/@libp2p/peer-record/node_modules/protons-runtime": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/protons-runtime/-/protons-runtime-6.0.1.tgz", + "integrity": "sha512-ONL+jDj143WA1m+WKLuuqBIaDKxm32dx6HfJdyujrRcni/6KkhXzVnyg22nH/Wwqmbwnd1BKUVkD1hMEWZFeww==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "uint8-varint": "^2.0.4", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0" + } + }, + "node_modules/@libp2p/peer-store": { + "version": "12.0.13", + "resolved": "https://registry.npmjs.org/@libp2p/peer-store/-/peer-store-12.0.13.tgz", + "integrity": "sha512-hXiIrXEUlXNDJe7i0O32qXRGmLA3ckoRHDjGZcNKMzPnkRDPkGEUQ42v1keA+1QoysMkm95xYyyhF6S3dA6nxg==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "@libp2p/crypto": "^5.1.13", + "@libp2p/interface": "^3.1.0", + "@libp2p/peer-collections": "^7.0.13", + "@libp2p/peer-id": "^6.0.4", + "@libp2p/peer-record": "^9.0.5", + "@multiformats/multiaddr": "^13.0.1", + "interface-datastore": "^9.0.1", + "it-all": "^3.0.9", + "main-event": "^1.0.1", + "mortice": "^3.3.1", + "multiformats": "^13.4.0", + "protons-runtime": "^5.6.0", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0" + } + }, + "node_modules/@libp2p/ping": { + "version": "3.0.13", + "resolved": "https://registry.npmjs.org/@libp2p/ping/-/ping-3.0.13.tgz", + "integrity": "sha512-ag4opUEB/eYPzNXBn8KaqBNCHVAvED6Kdr3HCMct4pQS3qRrJ6zbLrm6qb7D2WWEKO0WrLuCjE7NmBk9nprmDQ==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "@libp2p/crypto": "^5.1.13", + "@libp2p/interface": "^3.1.0", + "@libp2p/interface-internal": "^3.0.13", + "@multiformats/multiaddr": "^13.0.1", + "p-event": "^7.0.0", + "race-signal": "^2.0.0", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0" + } + }, + "node_modules/@libp2p/record": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/@libp2p/record/-/record-4.0.9.tgz", + "integrity": "sha512-ITxntqQ2GDK/yA1NhzEQc2dXpxgox96xZ1cqO507choY5z5Czhz2BxfyElVO/XYjOXvylu1XN66uh3VuGHrfkQ==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "protons-runtime": "^5.6.0", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0" + } + }, + "node_modules/@libp2p/tcp": { + "version": "11.0.13", + "resolved": "https://registry.npmjs.org/@libp2p/tcp/-/tcp-11.0.13.tgz", + "integrity": "sha512-YTV6rX1NpQVbixYDlsWtIAHALBkW4vYdk4DPmHbFyvvQJdtBofbeHIQakyjPexKrbUtDGaoIXgT5KC2osLRp0g==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "@libp2p/interface": "^3.1.0", + "@libp2p/utils": "^7.0.13", + "@multiformats/multiaddr": "^13.0.1", + "@multiformats/multiaddr-matcher": "^3.0.1", + "@types/sinon": "^20.0.0", + "main-event": "^1.0.1", + "p-event": "^7.0.0", + "progress-events": "^1.0.1", + "uint8arraylist": "^2.4.8" + } + }, + "node_modules/@libp2p/utils": { + "version": "7.0.14", + "resolved": "https://registry.npmjs.org/@libp2p/utils/-/utils-7.0.14.tgz", + "integrity": "sha512-G8tj32VT1sRAiXV3pGLMlepRSmkydCKBRXzTp/OFqDjRmoXRlIenWMN+hxKOG5wXOyXZkRtkBbXJGq2kIB27/A==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "@chainsafe/is-ip": "^2.1.0", + "@chainsafe/netmask": "^2.0.0", + "@libp2p/crypto": "^5.1.14", + "@libp2p/interface": "^3.1.1", + "@libp2p/logger": "^6.2.3", + "@multiformats/multiaddr": "^13.0.1", + "@sindresorhus/fnv1a": "^3.1.0", + "any-signal": "^4.1.1", + "cborg": "^4.2.14", + "delay": "^7.0.0", + "is-loopback-addr": "^2.0.2", + "it-length-prefixed": "^10.0.1", + "it-pipe": "^3.0.1", + "it-pushable": "^3.2.3", + "it-stream-types": "^2.0.2", + "main-event": "^1.0.1", + "netmask": "^2.0.2", + "p-defer": "^4.0.1", + "p-event": "^7.0.0", + "race-signal": "^2.0.0", + "uint8-varint": "^2.0.4", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0" + } + }, + "node_modules/@libp2p/utils/node_modules/cborg": { + "version": "4.5.8", + "resolved": "https://registry.npmjs.org/cborg/-/cborg-4.5.8.tgz", + "integrity": "sha512-6/viltD51JklRhq4L7jC3zgy6gryuG5xfZ3kzpE+PravtyeQLeQmCYLREhQH7pWENg5pY4Yu/XCd6a7dKScVlw==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "cborg": "lib/bin.js" + } + }, + "node_modules/@libp2p/websockets": { + "version": "10.1.6", + "resolved": "https://registry.npmjs.org/@libp2p/websockets/-/websockets-10.1.6.tgz", + "integrity": "sha512-hRJtodXHbPNvrRaRZL6KwaHsbadUiBAHDkTUS2VqQXmRWZ77B02Es5LDjgSDJIaw4GBJIo8vx2oaML5R94WtBA==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "@libp2p/interface": "^3.1.0", + "@libp2p/utils": "^7.0.13", + "@multiformats/multiaddr": "^13.0.1", + "@multiformats/multiaddr-matcher": "^3.0.1", + "@multiformats/multiaddr-to-uri": "^12.0.0", + "main-event": "^1.0.1", + "p-event": "^7.0.0", + "progress-events": "^1.0.1", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0", + "ws": "^8.18.3" + } + }, + "node_modules/@libp2p/websockets/node_modules/ws": { + "version": "8.20.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.20.0.tgz", + "integrity": "sha512-sAt8BhgNbzCtgGbt2OxmpuryO63ZoDk/sqaB/znQm94T4fCEsy/yV+7CdC1kJhOU9lboAEU7R3kquuycDoibVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": ">=5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, "node_modules/@metamask/eth-sig-util": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/@metamask/eth-sig-util/-/eth-sig-util-4.0.1.tgz", @@ -3790,6 +4385,91 @@ "dev": true, "license": "ISC" }, + "node_modules/@multiformats/dns": { + "version": "1.0.13", + "resolved": "https://registry.npmjs.org/@multiformats/dns/-/dns-1.0.13.tgz", + "integrity": "sha512-yr4bxtA3MbvJ+2461kYIYMsiiZj/FIqKI64hE4SdvWJUdWF9EtZLar38juf20Sf5tguXKFUruluswAO6JsjS2w==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "@dnsquery/dns-packet": "^6.1.1", + "@libp2p/interface": "^3.1.0", + "hashlru": "^2.3.0", + "p-queue": "^9.0.0", + "progress-events": "^1.0.0", + "uint8arrays": "^5.0.2" + } + }, + "node_modules/@multiformats/dns/node_modules/eventemitter3": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.4.tgz", + "integrity": "sha512-mlsTRyGaPBjPedk6Bvw+aqbsXDtoAyAzm5MO7JgU+yVRyMQ5O8bD4Kcci7BS85f93veegeCPkL8R4GLClnjLFw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@multiformats/dns/node_modules/p-queue": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/p-queue/-/p-queue-9.1.0.tgz", + "integrity": "sha512-O/ZPaXuQV29uSLbxWBGGZO1mCQXV2BLIwUr59JUU9SoH76mnYvtms7aafH/isNSNGwuEfP6W/4xD0/TJXxrizw==", + "dev": true, + "license": "MIT", + "dependencies": { + "eventemitter3": "^5.0.1", + "p-timeout": "^7.0.0" + }, + "engines": { + "node": ">=20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@multiformats/dns/node_modules/p-timeout": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-7.0.1.tgz", + "integrity": "sha512-AxTM2wDGORHGEkPCt8yqxOTMgpfbEHqF51f/5fJCmwFC3C/zNcGT63SymH2ttOAaiIws2zVg4+izQCjrakcwHg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@multiformats/multiaddr": { + "version": "13.0.1", + "resolved": "https://registry.npmjs.org/@multiformats/multiaddr/-/multiaddr-13.0.1.tgz", + "integrity": "sha512-XToN915cnfr6Lr9EdGWakGJbPT0ghpg/850HvdC+zFX8XvpLZElwa8synCiwa8TuvKNnny6m8j8NVBNCxhIO3g==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "@chainsafe/is-ip": "^2.0.1", + "multiformats": "^13.0.0", + "uint8-varint": "^2.0.1", + "uint8arrays": "^5.0.0" + } + }, + "node_modules/@multiformats/multiaddr-matcher": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@multiformats/multiaddr-matcher/-/multiaddr-matcher-3.0.1.tgz", + "integrity": "sha512-jvjwzCPysVTQ53F4KqwmcqZw73BqHMk0UUZrMP9P4OtJ/YHrfs122ikTqhVA2upe0P/Qz9l8HVlhEifVYB2q9A==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "@multiformats/multiaddr": "^13.0.0" + } + }, + "node_modules/@multiformats/multiaddr-to-uri": { + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/@multiformats/multiaddr-to-uri/-/multiaddr-to-uri-12.0.0.tgz", + "integrity": "sha512-3uIEBCiy8tfzxYYBl81x1tISiNBQ7mHU4pGjippbJRoQYHzy/ZdZM/7JvTldr8pc/dzpkaNJxnsuxxlhsPOJsA==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "@multiformats/multiaddr": "^13.0.0" + } + }, "node_modules/@noble/ciphers": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/@noble/ciphers/-/ciphers-0.3.0.tgz", @@ -3996,6 +4676,7 @@ "version": "2.6.0", "resolved": "https://registry.npmjs.org/@oceanprotocol/contracts/-/contracts-2.6.0.tgz", "integrity": "sha512-4K3TTM0q4VlBs7GLXzQkGMae576iAGHMARqMFcKXUUtfGyiT8KP4sP9IBsq8UUhu11R4JU2KTqXenHS8EWxrbA==", + "dev": true, "license": "Apache-2.0" }, "node_modules/@oceanprotocol/ddo-js": { @@ -4678,6 +5359,19 @@ "integrity": "sha512-831qok9r2t8AlxLko40y2ebgSDhenenCatLVeW/uBtnHPyhHOvG0C7TvfgecV+wHzIm5KUICgzmVpWS+IMEAeg==", "license": "MIT" }, + "node_modules/@sindresorhus/fnv1a": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/fnv1a/-/fnv1a-3.1.0.tgz", + "integrity": "sha512-KV321z5m/0nuAg83W1dPLy85HpHDk7Sdi4fJbwvacWsEhAh+rZUW4ZfGcXmUIvjZg4ss2bcwNlRhJ7GBEUG08w==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/@sindresorhus/is": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-4.6.0.tgz", @@ -5731,6 +6425,23 @@ "integrity": "sha512-FmgJfu+MOcQ370SD0ev7EI8TlCAfKYU+B4m5T3yXc1CiRN94g/SZPtsCkk506aUDtlMnFZvasDwHHUcZUEaYuA==", "dev": true }, + "node_modules/@types/sinon": { + "version": "20.0.0", + "resolved": "https://registry.npmjs.org/@types/sinon/-/sinon-20.0.0.tgz", + "integrity": "sha512-etYGUC6IEevDGSWvR9WrECRA01ucR2/Oi9XMBUAdV0g4bLkNf4HlZWGiGlDOq5lgwXRwcV+PSeKgFcW4QzzYOg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/sinonjs__fake-timers": "*" + } + }, + "node_modules/@types/sinonjs__fake-timers": { + "version": "15.0.1", + "resolved": "https://registry.npmjs.org/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-15.0.1.tgz", + "integrity": "sha512-Ko2tjWJq8oozHzHV+reuvS5KYIRAokHnGbDwGh/J64LntgpbuylF74ipEL24HCyRjf9FOlBiBHWBR1RlVKsI1w==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/underscore": { "version": "1.13.0", "resolved": "https://registry.npmjs.org/@types/underscore/-/underscore-1.13.0.tgz", @@ -6243,6 +6954,13 @@ "node": ">=6.5" } }, + "node_modules/abort-error": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/abort-error/-/abort-error-1.0.1.tgz", + "integrity": "sha512-fxqCblJiIPdSXIUrxI0PL+eJG49QdP9SQ70qtB65MVAoMr2rASlOyAbJFOylfB467F/f+5BCLJJq58RYi7mGfg==", + "dev": true, + "license": "Apache-2.0 OR MIT" + }, "node_modules/abortcontroller-polyfill": { "version": "1.7.8", "resolved": "https://registry.npmjs.org/abortcontroller-polyfill/-/abortcontroller-polyfill-1.7.8.tgz", @@ -6387,6 +7105,17 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, + "node_modules/any-signal": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/any-signal/-/any-signal-4.2.0.tgz", + "integrity": "sha512-LndMvYuAPf4rC195lk7oSFuHOYFpOszIYrNYv0gHAvz+aEhE9qPZLhmrIz5pXP2BSsPOXvsuHDXEGaiQhIh9wA==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "engines": { + "node": ">=16.0.0", + "npm": ">=7.0.0" + } + }, "node_modules/append-transform": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/append-transform/-/append-transform-2.0.0.tgz", @@ -8609,6 +9338,25 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/datastore-core": { + "version": "11.0.2", + "resolved": "https://registry.npmjs.org/datastore-core/-/datastore-core-11.0.2.tgz", + "integrity": "sha512-0pN4hMcaCWcnUBo5OL/8j14Lt1l/p1v2VvzryRYeJAKRLqnFrzy2FhAQ7y0yTA63ki760ImQHfm2XlZrfIdFpQ==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "@libp2p/logger": "^6.0.0", + "interface-datastore": "^9.0.0", + "interface-store": "^7.0.0", + "it-drain": "^3.0.9", + "it-filter": "^3.1.3", + "it-map": "^3.1.3", + "it-merge": "^3.0.11", + "it-pipe": "^3.0.1", + "it-sort": "^3.0.8", + "it-take": "^3.0.8" + } + }, "node_modules/debug": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", @@ -8847,6 +9595,23 @@ "node": ">= 14" } }, + "node_modules/delay": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/delay/-/delay-7.0.0.tgz", + "integrity": "sha512-C3vaGs818qzZjCvVJ98GQUMVyWeg7dr5w2Nwwb2t5K8G98jOyyVO2ti2bKYk5yoYElqH3F2yA53ykuEnwD6MCg==", + "dev": true, + "license": "MIT", + "dependencies": { + "random-int": "^3.1.0", + "unlimited-timeout": "^0.1.0" + }, + "engines": { + "node": ">=20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/delayed-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", @@ -12674,6 +13439,13 @@ "node": ">=8" } }, + "node_modules/hashlru": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/hashlru/-/hashlru-2.3.0.tgz", + "integrity": "sha512-0cMsjjIC8I+D3M44pOQdsy0OHXGLVz6Z0beRuufhKa0KfaD2wGwAev6jILzXsd3/vpnNQJmWyZtIILqM1N+n5A==", + "dev": true, + "license": "MIT" + }, "node_modules/hasown": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", @@ -13023,6 +13795,24 @@ } } }, + "node_modules/interface-datastore": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/interface-datastore/-/interface-datastore-9.0.2.tgz", + "integrity": "sha512-jebn+GV/5LTDDoyicNIB4D9O0QszpPqT09Z/MpEWvf3RekjVKpXJCDguM5Au2fwIFxFDAQMZe5bSla0jMamCNg==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "interface-store": "^7.0.0", + "uint8arrays": "^5.1.0" + } + }, + "node_modules/interface-store": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/interface-store/-/interface-store-7.0.1.tgz", + "integrity": "sha512-OPRRUO3Cs6Jr/t98BrJLQp1jUTPgrRH0PqFfuNoPAqd+J7ABN1tjFVjQdaOBiybYJTS/AyBSZnZVWLPvp3dW3w==", + "dev": true, + "license": "Apache-2.0 OR MIT" + }, "node_modules/internal-slot": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.1.0.tgz", @@ -13233,6 +14023,13 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/is-electron": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/is-electron/-/is-electron-2.2.2.tgz", + "integrity": "sha512-FO/Rhvz5tuw4MCWkpMzHFKWD2LsfHzIb7i6MdPYZ/KW7AlxawyLkqdy+jPZP1WubqEADE3O4FUENlJHDfQASRg==", + "dev": true, + "license": "MIT" + }, "node_modules/is-extglob": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", @@ -13354,6 +14151,13 @@ "node": ">=8" } }, + "node_modules/is-loopback-addr": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-loopback-addr/-/is-loopback-addr-2.0.2.tgz", + "integrity": "sha512-26POf2KRCno/KTNL5Q0b/9TYnL00xEsSaLfiFRmjM7m7Lw7ZMmFybzzuX4CcsLAluZGd+niLUiMRxEooVE3aqg==", + "dev": true, + "license": "MIT" + }, "node_modules/is-map": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.3.tgz", @@ -13374,6 +14178,19 @@ "dev": true, "license": "MIT" }, + "node_modules/is-network-error": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/is-network-error/-/is-network-error-1.3.1.tgz", + "integrity": "sha512-6QCxa49rQbmUWLfk0nuGqzql9U8uaV2H6279bRErPBHe/109hCzsLUBUHfbEtvLIHBd6hyXbgedBSHevm43Edw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/is-number": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", @@ -13793,6 +14610,183 @@ "node": ">=8" } }, + "node_modules/it-all": { + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/it-all/-/it-all-3.0.9.tgz", + "integrity": "sha512-fz1oJJ36ciGnu2LntAlE6SA97bFZpW7Rnt0uEc1yazzR2nKokZLr8lIRtgnpex4NsmaBcvHF+Z9krljWFy/mmg==", + "dev": true, + "license": "Apache-2.0 OR MIT" + }, + "node_modules/it-drain": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/it-drain/-/it-drain-3.0.10.tgz", + "integrity": "sha512-0w/bXzudlyKIyD1+rl0xUKTI7k4cshcS43LTlBiGFxI8K1eyLydNPxGcsVLsFVtKh1/ieS8AnVWt6KwmozxyEA==", + "dev": true, + "license": "Apache-2.0 OR MIT" + }, + "node_modules/it-filter": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/it-filter/-/it-filter-3.1.4.tgz", + "integrity": "sha512-80kWEKgiFEa4fEYD3mwf2uygo1dTQ5Y5midKtL89iXyjinruA/sNXl6iFkTcdNedydjvIsFhWLiqRPQP4fAwWQ==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "it-peekable": "^3.0.0" + } + }, + "node_modules/it-length": { + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/it-length/-/it-length-3.0.9.tgz", + "integrity": "sha512-cPhRPzyulYqyL7x4sX4MOjG/xu3vvEIFAhJ1aCrtrnbfxloCOtejOONib5oC3Bz8tLL6b6ke6+YHu4Bm6HCG7A==", + "dev": true, + "license": "Apache-2.0 OR MIT" + }, + "node_modules/it-length-prefixed": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/it-length-prefixed/-/it-length-prefixed-10.0.1.tgz", + "integrity": "sha512-BhyluvGps26u9a7eQIpOI1YN7mFgi8lFwmiPi07whewbBARKAG9LE09Odc8s1Wtbt2MB6rNUrl7j9vvfXTJwdQ==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "it-reader": "^6.0.1", + "it-stream-types": "^2.0.1", + "uint8-varint": "^2.0.1", + "uint8arraylist": "^2.0.0", + "uint8arrays": "^5.0.1" + }, + "engines": { + "node": ">=16.0.0", + "npm": ">=7.0.0" + } + }, + "node_modules/it-map": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/it-map/-/it-map-3.1.4.tgz", + "integrity": "sha512-QB9PYQdE9fUfpVFYfSxBIyvKynUCgblb143c+ktTK6ZuKSKkp7iH58uYFzagqcJ5HcqIfn1xbfaralHWam+3fg==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "it-peekable": "^3.0.0" + } + }, + "node_modules/it-merge": { + "version": "3.0.12", + "resolved": "https://registry.npmjs.org/it-merge/-/it-merge-3.0.12.tgz", + "integrity": "sha512-nnnFSUxKlkZVZD7c0jYw6rDxCcAQYcMsFj27thf7KkDhpj0EA0g9KHPxbFzHuDoc6US2EPS/MtplkNj8sbCx4Q==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "it-queueless-pushable": "^2.0.0" + } + }, + "node_modules/it-parallel": { + "version": "3.0.13", + "resolved": "https://registry.npmjs.org/it-parallel/-/it-parallel-3.0.13.tgz", + "integrity": "sha512-85PPJ/O8q97Vj9wmDTSBBXEkattwfQGruXitIzrh0RLPso6RHfiVqkuTqBNufYYtB1x6PSkh0cwvjmMIkFEPHA==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "p-defer": "^4.0.1" + } + }, + "node_modules/it-peekable": { + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/it-peekable/-/it-peekable-3.0.8.tgz", + "integrity": "sha512-7IDBQKSp/dtBxXV3Fj0v3qM1jftJ9y9XrWLRIuU1X6RdKqWiN60syNwP0fiDxZD97b8SYM58dD3uklIk1TTQAw==", + "dev": true, + "license": "Apache-2.0 OR MIT" + }, + "node_modules/it-pipe": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/it-pipe/-/it-pipe-3.0.1.tgz", + "integrity": "sha512-sIoNrQl1qSRg2seYSBH/3QxWhJFn9PKYvOf/bHdtCBF0bnghey44VyASsWzn5dAx0DCDDABq1hZIuzKmtBZmKA==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "it-merge": "^3.0.0", + "it-pushable": "^3.1.2", + "it-stream-types": "^2.0.1" + }, + "engines": { + "node": ">=16.0.0", + "npm": ">=7.0.0" + } + }, + "node_modules/it-pushable": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/it-pushable/-/it-pushable-3.2.3.tgz", + "integrity": "sha512-gzYnXYK8Y5t5b/BnJUr7glfQLO4U5vyb05gPx/TyTw+4Bv1zM9gFk4YsOrnulWefMewlphCjKkakFvj1y99Tcg==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "p-defer": "^4.0.0" + } + }, + "node_modules/it-queue": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/it-queue/-/it-queue-1.1.1.tgz", + "integrity": "sha512-yeYCV22WF1QDyb3ylw+g3TGEdkmnoHUH2mc12QoGOQuxW4XP1V7Zd3BfsEF1iq2IFBwIK7wCPUcRLTAQVeZ3SQ==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "abort-error": "^1.0.1", + "it-pushable": "^3.2.3", + "main-event": "^1.0.0", + "race-event": "^1.3.0", + "race-signal": "^2.0.0" + } + }, + "node_modules/it-queueless-pushable": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/it-queueless-pushable/-/it-queueless-pushable-2.0.3.tgz", + "integrity": "sha512-USa5EzTvmQswOcVE7+o6qsj2o2G+6KHCxSogPOs23sGYkDWFidhqVO7dAvv6ve/Z+Q+nvxpEa9rrRo6VEK7w4Q==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "abort-error": "^1.0.1", + "p-defer": "^4.0.1", + "race-signal": "^2.0.0" + } + }, + "node_modules/it-reader": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/it-reader/-/it-reader-6.0.4.tgz", + "integrity": "sha512-XCWifEcNFFjjBHtor4Sfaj8rcpt+FkY0L6WdhD578SCDhV4VUm7fCkF3dv5a+fTcfQqvN9BsxBTvWbYO6iCjTg==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "it-stream-types": "^2.0.1", + "uint8arraylist": "^2.0.0" + }, + "engines": { + "node": ">=16.0.0", + "npm": ">=7.0.0" + } + }, + "node_modules/it-sort": { + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/it-sort/-/it-sort-3.0.9.tgz", + "integrity": "sha512-jsM6alGaPiQbcAJdzMsuMh00uJcI+kD9TBoScB8TR75zUFOmHvhSsPi+Dmh2zfVkcoca+14EbfeIZZXTUGH63w==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "it-all": "^3.0.0" + } + }, + "node_modules/it-stream-types": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/it-stream-types/-/it-stream-types-2.0.2.tgz", + "integrity": "sha512-Rz/DEZ6Byn/r9+/SBCuJhpPATDF9D+dz5pbgSUyBsCDtza6wtNATrz/jz1gDyNanC3XdLboriHnOC925bZRBww==", + "dev": true, + "license": "Apache-2.0 OR MIT" + }, + "node_modules/it-take": { + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/it-take/-/it-take-3.0.9.tgz", + "integrity": "sha512-XMeUbnjOcgrhFXPUqa7H0VIjYSV/BvyxxjCp76QHVAFDJw2LmR1SHxUFiqyGeobgzJr7P2ZwSRRJQGn4D2BVlA==", + "dev": true, + "license": "Apache-2.0 OR MIT" + }, "node_modules/iterator.prototype": { "version": "1.1.5", "resolved": "https://registry.npmjs.org/iterator.prototype/-/iterator.prototype-1.1.5.tgz", @@ -14440,6 +15434,42 @@ "node": ">= 0.8.0" } }, + "node_modules/libp2p": { + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/libp2p/-/libp2p-3.1.6.tgz", + "integrity": "sha512-p1Tg8htMjQbbyNOQd5GtSsZJXKkJQYQBvRrPGMCa3PZBjGs2pNV4Utr7z0na+WgfJJn+mIbcNvP7NzzcrSD1nw==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "@chainsafe/is-ip": "^2.1.0", + "@chainsafe/netmask": "^2.0.0", + "@libp2p/crypto": "^5.1.13", + "@libp2p/interface": "^3.1.0", + "@libp2p/interface-internal": "^3.0.13", + "@libp2p/logger": "^6.2.2", + "@libp2p/multistream-select": "^7.0.13", + "@libp2p/peer-collections": "^7.0.13", + "@libp2p/peer-id": "^6.0.4", + "@libp2p/peer-store": "^12.0.13", + "@libp2p/utils": "^7.0.13", + "@multiformats/dns": "^1.0.6", + "@multiformats/multiaddr": "^13.0.1", + "@multiformats/multiaddr-matcher": "^3.0.1", + "any-signal": "^4.1.1", + "datastore-core": "^11.0.1", + "interface-datastore": "^9.0.1", + "it-merge": "^3.0.12", + "it-parallel": "^3.0.13", + "main-event": "^1.0.1", + "multiformats": "^13.4.0", + "p-defer": "^4.0.1", + "p-event": "^7.0.0", + "p-retry": "^7.0.0", + "progress-events": "^1.0.1", + "race-signal": "^2.0.0", + "uint8arrays": "^5.1.0" + } + }, "node_modules/lilconfig": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.1.0.tgz", @@ -14687,6 +15717,13 @@ "sourcemap-codec": "^1.4.8" } }, + "node_modules/main-event": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/main-event/-/main-event-1.0.1.tgz", + "integrity": "sha512-NWtdGrAca/69fm6DIVd8T9rtfDII4Q8NQbIbsKQq2VzS9eqOGYs8uaNQjcuaCq/d9H/o625aOTJX2Qoxzqw0Pw==", + "dev": true, + "license": "Apache-2.0 OR MIT" + }, "node_modules/make-dir": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", @@ -15410,6 +16447,18 @@ "global": "^4.3.2" } }, + "node_modules/mortice": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/mortice/-/mortice-3.3.1.tgz", + "integrity": "sha512-t3oESfijIPGsmsdLEKjF+grHfrbnKSXflJtgb1wY14cjxZpS6GnhHRXTxxzCAoCCnq1YYfpEPwY3gjiCPhOufQ==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "abort-error": "^1.0.0", + "it-queue": "^1.1.0", + "main-event": "^1.0.0" + } + }, "node_modules/mri": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz", @@ -15471,6 +16520,13 @@ "varint": "^5.0.0" } }, + "node_modules/multiformats": { + "version": "13.4.2", + "resolved": "https://registry.npmjs.org/multiformats/-/multiformats-13.4.2.tgz", + "integrity": "sha512-eh6eHCrRi1+POZ3dA+Dq1C6jhP1GNtr9CRINMb67OKzqW9I5DUuZM/3jLPlzhgpGeiNUlEGEbkCYChXMCc/8DQ==", + "dev": true, + "license": "Apache-2.0 OR MIT" + }, "node_modules/multihashes": { "version": "0.4.21", "resolved": "https://registry.npmjs.org/multihashes/-/multihashes-0.4.21.tgz", @@ -16291,6 +17347,48 @@ "node": ">=12.20" } }, + "node_modules/p-defer": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/p-defer/-/p-defer-4.0.1.tgz", + "integrity": "sha512-Mr5KC5efvAK5VUptYEIopP1bakB85k2IWXaRC0rsh1uwn1L6M0LVml8OIQ4Gudg4oyZakf7FmeRLkMMtZW1i5A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-event": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/p-event/-/p-event-7.1.0.tgz", + "integrity": "sha512-/lkPs5W1aC3cp6vqZefpdosOn65J571sWodyfOQiF0+tmDCpU+H8Atwpu0vQROCVUlZuToDN5eyTLsMLLc54mg==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-timeout": "^7.0.1" + }, + "engines": { + "node": ">=20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-event/node_modules/p-timeout": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-7.0.1.tgz", + "integrity": "sha512-AxTM2wDGORHGEkPCt8yqxOTMgpfbEHqF51f/5fJCmwFC3C/zNcGT63SymH2ttOAaiIws2zVg4+izQCjrakcwHg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/p-finally": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", @@ -16363,6 +17461,22 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/p-retry": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/p-retry/-/p-retry-7.1.1.tgz", + "integrity": "sha512-J5ApzjyRkkf601HpEeykoiCvzHQjWxPAHhyjFcEUP2SWq0+35NKh8TLhpLw+Dkq5TZBFvUM6UigdE9hIVYTl5w==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-network-error": "^1.1.0" + }, + "engines": { + "node": ">=20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/p-timeout": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-3.2.0.tgz", @@ -17536,6 +18650,13 @@ "node": ">=8" } }, + "node_modules/progress-events": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/progress-events/-/progress-events-1.0.1.tgz", + "integrity": "sha512-MOzLIwhpt64KIVN64h1MwdKWiyKFNc/S6BoYKPIVUHFg0/eIEyBulhWCgn678v/4c0ri3FdGuzXymNCv02MUIw==", + "dev": true, + "license": "Apache-2.0 OR MIT" + }, "node_modules/promise-to-callback": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/promise-to-callback/-/promise-to-callback-1.0.0.tgz", @@ -17578,6 +18699,18 @@ "integrity": "sha512-hHVTzba3wboROl0/aWRRG9dMytgH6ow//STBZh43l/wQgmMhYhOFi0EHWAPtoCz9IAUymsyP0TSBHkhgMEGNnQ==", "dev": true }, + "node_modules/protons-runtime": { + "version": "5.6.0", + "resolved": "https://registry.npmjs.org/protons-runtime/-/protons-runtime-5.6.0.tgz", + "integrity": "sha512-/Kde+sB9DsMFrddJT/UZWe6XqvL7SL5dbag/DBCElFKhkwDj7XKt53S+mzLyaDP5OqS0wXjV5SA572uWDaT0Hg==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "uint8-varint": "^2.0.2", + "uint8arraylist": "^2.4.3", + "uint8arrays": "^5.0.1" + } + }, "node_modules/proxy-addr": { "version": "2.0.7", "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", @@ -17727,6 +18860,36 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/race-event": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/race-event/-/race-event-1.6.1.tgz", + "integrity": "sha512-vi7WH5g5KoTFpu2mme/HqZiWH14XSOtg5rfp6raBskBHl7wnmy3F/biAIyY5MsK+BHWhoPhxtZ1Y2R7OHHaWyQ==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "abort-error": "^1.0.1" + } + }, + "node_modules/race-signal": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/race-signal/-/race-signal-2.0.0.tgz", + "integrity": "sha512-P31bLhE4ByBX/70QDXMutxnqgwrF1WUXea1O8DXuviAgkdbQ1iQMQotNgzJIBC9yUSn08u/acZrMUhgw7w6GpA==", + "dev": true, + "license": "Apache-2.0 OR MIT" + }, + "node_modules/random-int": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/random-int/-/random-int-3.1.0.tgz", + "integrity": "sha512-h8CRz8cpvzj0hC/iH/1Gapgcl2TQ6xtnCpyOI5WvWfXf/yrDx2DOU+tD9rX23j36IF11xg1KqB9W11Z18JPMdw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/randombytes": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", @@ -18556,6 +19719,13 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/retimeable-signal": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/retimeable-signal/-/retimeable-signal-1.0.1.tgz", + "integrity": "sha512-Cy26CYfbWnYu8HMoJeDhaMpW/EYFIbne3vMf6G9RSrOyWYXbPehja/BEdzpqmM84uy2bfBD7NPZhoQ4GZEtgvg==", + "dev": true, + "license": "Apache-2.0 OR MIT" + }, "node_modules/retry": { "version": "0.13.1", "resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz", @@ -20812,6 +21982,37 @@ "integrity": "sha512-d3i8kc/4s1CFW5g3FctmF1Bu2GVXGBMTn82JY2BW0ZtTtI8pRx1YWGPCFBwRF4uYVSJ7ua4y+qYEPqS+x+3w7Q==", "license": "Do, what You want" }, + "node_modules/uint8-varint": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/uint8-varint/-/uint8-varint-2.0.4.tgz", + "integrity": "sha512-FwpTa7ZGA/f/EssWAb5/YV6pHgVF1fViKdW8cWaEarjB8t7NyofSWBdOTyFPaGuUG4gx3v1O3PQ8etsiOs3lcw==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "uint8arraylist": "^2.0.0", + "uint8arrays": "^5.0.0" + } + }, + "node_modules/uint8arraylist": { + "version": "2.4.8", + "resolved": "https://registry.npmjs.org/uint8arraylist/-/uint8arraylist-2.4.8.tgz", + "integrity": "sha512-vc1PlGOzglLF0eae1M8mLRTBivsvrGsdmJ5RbK3e+QRvRLOZfZhQROTwH/OfyF3+ZVUg9/8hE8bmKP2CvP9quQ==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "uint8arrays": "^5.0.1" + } + }, + "node_modules/uint8arrays": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/uint8arrays/-/uint8arrays-5.1.0.tgz", + "integrity": "sha512-vA6nFepEmlSKkMBnLBaUMVvAC4G3CTmO58C12y4sq6WPDOR7mOFYOi7GlrQ4djeSbP6JG9Pv9tJDM97PedRSww==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "multiformats": "^13.0.0" + } + }, "node_modules/ultron": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/ultron/-/ultron-1.1.1.tgz", @@ -20909,6 +22110,19 @@ "node": ">= 4.0.0" } }, + "node_modules/unlimited-timeout": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/unlimited-timeout/-/unlimited-timeout-0.1.0.tgz", + "integrity": "sha512-D4g+mxFeQGQHzCfnvij+R35ukJ0658Zzudw7j16p4tBBbNasKkKM4SocYxqhwT5xA7a9JYWDzKkEFyMlRi5sng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/unpipe": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", @@ -20993,6 +22207,13 @@ "integrity": "sha512-E8VjFIQ/TyQgp+TZfS6l8yp/xWppSAHzidGiRrqe4bK4XP9pTRyKFgGJpO3SN7zdX4DeomTrwaseCHovfpFcqQ==", "license": "MIT" }, + "node_modules/utf8-codec": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/utf8-codec/-/utf8-codec-1.0.0.tgz", + "integrity": "sha512-S/QSLezp3qvG4ld5PUfXiH7mCFxLKjSVZRFkB3DOjgwHuJPFDkInAXc/anf7BAbHt/D38ozDzL+QMZ6/7gsI6w==", + "dev": true, + "license": "MIT" + }, "node_modules/util": { "version": "0.12.5", "resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz", @@ -21112,6 +22333,40 @@ "defaults": "^1.0.3" } }, + "node_modules/weald": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/weald/-/weald-1.1.1.tgz", + "integrity": "sha512-PaEQShzMCz8J/AD2N3dJMc1hTZWkJeLKS2NMeiVkV5KDHwgZe7qXLEzyodsT/SODxWDdXJJqocuwf3kHzcXhSQ==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "ms": "^3.0.0-canary.1", + "supports-color": "^10.0.0" + } + }, + "node_modules/weald/node_modules/ms": { + "version": "3.0.0-canary.202508261828", + "resolved": "https://registry.npmjs.org/ms/-/ms-3.0.0-canary.202508261828.tgz", + "integrity": "sha512-NotsCoUCIUkojWCzQff4ttdCfIPoA1UGZsyQbi7KmqkNRfKCrvga8JJi2PknHymHOuor0cJSn/ylj52Cbt2IrQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/weald/node_modules/supports-color": { + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-10.2.2.tgz", + "integrity": "sha512-SS+jx45GF1QjgEXQx4NJZV9ImqmO2NPz5FNsIHrsDjh2YsHnawpan7SNQ1o8NuhrbHZy9AZhIoCUiCeaW/C80g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, "node_modules/web-streams-polyfill": { "version": "3.3.3", "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz", @@ -21842,6 +23097,20 @@ "webidl-conversions": "^3.0.0" } }, + "node_modules/wherearewe": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/wherearewe/-/wherearewe-2.0.1.tgz", + "integrity": "sha512-XUguZbDxCA2wBn2LoFtcEhXL6AXo+hVjGonwhSTTTU9SzbWG8Xu3onNIpzf9j/mYUcJQ0f+m37SzG77G851uFw==", + "dev": true, + "license": "Apache-2.0 OR MIT", + "dependencies": { + "is-electron": "^2.2.0" + }, + "engines": { + "node": ">=16.0.0", + "npm": ">=7.0.0" + } + }, "node_modules/which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", @@ -23465,6 +24734,89 @@ "buffer": "^6.0.3" } }, + "@chainsafe/as-chacha20poly1305": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/@chainsafe/as-chacha20poly1305/-/as-chacha20poly1305-0.1.0.tgz", + "integrity": "sha512-BpNcL8/lji/GM3+vZ/bgRWqJ1q5kwvTFmGPk7pxm/QQZDbaMI98waOHjEymTjq2JmdD/INdNBFOVSyJofXg7ew==", + "dev": true + }, + "@chainsafe/as-sha256": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@chainsafe/as-sha256/-/as-sha256-1.2.0.tgz", + "integrity": "sha512-H2BNHQ5C3RS+H0ZvOdovK6GjFAyq5T6LClad8ivwj9Oaiy28uvdsGVS7gNJKuZmg0FGHAI+n7F0Qju6U0QkKDA==", + "dev": true + }, + "@chainsafe/is-ip": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@chainsafe/is-ip/-/is-ip-2.1.0.tgz", + "integrity": "sha512-KIjt+6IfysQ4GCv66xihEitBjvhU/bixbbbFxdJ1sqCp4uJ0wuZiYBPhksZoy4lfaF0k9cwNzY5upEW/VWdw3w==", + "dev": true + }, + "@chainsafe/libp2p-noise": { + "version": "17.0.0", + "resolved": "https://registry.npmjs.org/@chainsafe/libp2p-noise/-/libp2p-noise-17.0.0.tgz", + "integrity": "sha512-vwrmY2Y+L1xYhIDiEpl61KHxwrLCZoXzTpwhyk34u+3+6zCAZPL3GxH3i2cs+u5IYNoyLptORdH17RKFXy7upA==", + "dev": true, + "requires": { + "@chainsafe/as-chacha20poly1305": "^0.1.0", + "@chainsafe/as-sha256": "^1.2.0", + "@libp2p/crypto": "^5.1.9", + "@libp2p/interface": "^3.0.0", + "@libp2p/peer-id": "^6.0.0", + "@libp2p/utils": "^7.0.0", + "@noble/ciphers": "^2.0.1", + "@noble/curves": "^2.0.1", + "@noble/hashes": "^2.0.1", + "protons-runtime": "^5.6.0", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0", + "wherearewe": "^2.0.1" + }, + "dependencies": { + "@noble/ciphers": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@noble/ciphers/-/ciphers-2.1.1.tgz", + "integrity": "sha512-bysYuiVfhxNJuldNXlFEitTVdNnYUc+XNJZd7Qm2a5j1vZHgY+fazadNFWFaMK/2vye0JVlxV3gHmC0WDfAOQw==", + "dev": true + }, + "@noble/curves": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@noble/curves/-/curves-2.0.1.tgz", + "integrity": "sha512-vs1Az2OOTBiP4q0pwjW5aF0xp9n4MxVrmkFBxc6EKZc6ddYx5gaZiAsZoq0uRRXWbi3AT/sBqn05eRPtn1JCPw==", + "dev": true, + "requires": { + "@noble/hashes": "2.0.1" + } + }, + "@noble/hashes": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-2.0.1.tgz", + "integrity": "sha512-XlOlEbQcE9fmuXxrVTXCTlG2nlRXa9Rj3rr5Ue/+tX+nmkgbX720YHh0VR3hBF9xDvwnb8D2shVGOwNx+ulArw==", + "dev": true + } + } + }, + "@chainsafe/libp2p-yamux": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@chainsafe/libp2p-yamux/-/libp2p-yamux-8.0.1.tgz", + "integrity": "sha512-pJsqmUg1cZRJZn/luAtQaq0uLcVfExo51Rg7iRtAEceNYtsKUi/exfegnvTBzTnF1CGmTzVEV3MCLsRhqiNyoA==", + "dev": true, + "requires": { + "@libp2p/interface": "^3.0.0", + "@libp2p/utils": "^7.0.0", + "race-signal": "^2.0.0", + "uint8arraylist": "^2.4.8" + } + }, + "@chainsafe/netmask": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@chainsafe/netmask/-/netmask-2.0.0.tgz", + "integrity": "sha512-I3Z+6SWUoaljh3TBzCnCxjlUyN8tA+NAk5L6m9IxvCf1BENQTePzPMis97CoN/iMW1St3WN+AWCCRp+TTBRiDg==", + "dev": true, + "requires": { + "@chainsafe/is-ip": "^2.0.1" + } + }, "@cspotcode/source-map-support": { "version": "0.8.1", "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", @@ -23495,6 +24847,16 @@ "undici": "^6.23.0" } }, + "@dnsquery/dns-packet": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/@dnsquery/dns-packet/-/dns-packet-6.1.1.tgz", + "integrity": "sha512-WXTuFvL3G+74SchFAtz3FgIYVOe196ycvGsMgvSH/8Goptb1qpIQtIuM4SOK9G9lhMWYpHxnXyy544ZhluFOew==", + "dev": true, + "requires": { + "@leichtgewicht/ip-codec": "^2.0.4", + "utf8-codec": "^1.0.0" + } + }, "@esbuild/aix-ppc64": { "version": "0.25.1", "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.1.tgz", @@ -24469,6 +25831,414 @@ "@jridgewell/sourcemap-codec": "^1.4.14" } }, + "@leichtgewicht/ip-codec": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@leichtgewicht/ip-codec/-/ip-codec-2.0.5.tgz", + "integrity": "sha512-Vo+PSpZG2/fmgmiNzYK9qWRh8h/CHrwD0mo1h1DzL4yzHNSfWYujGTYsWGreD000gcgmZ7K4Ys6Tx9TxtsKdDw==", + "dev": true + }, + "@libp2p/bootstrap": { + "version": "12.0.14", + "resolved": "https://registry.npmjs.org/@libp2p/bootstrap/-/bootstrap-12.0.14.tgz", + "integrity": "sha512-kVg/t303ac6l7eSo5M1oZs72cs54FRRjYhmXwvHbPEz5v7NgglDpnNgz7ScBJGaS/z4Xn5qeyghVZFc8Qz7BaA==", + "dev": true, + "requires": { + "@libp2p/interface": "^3.1.0", + "@libp2p/interface-internal": "^3.0.13", + "@libp2p/peer-id": "^6.0.4", + "@multiformats/multiaddr": "^13.0.1", + "@multiformats/multiaddr-matcher": "^3.0.1", + "main-event": "^1.0.1" + } + }, + "@libp2p/circuit-relay-v2": { + "version": "4.1.7", + "resolved": "https://registry.npmjs.org/@libp2p/circuit-relay-v2/-/circuit-relay-v2-4.1.7.tgz", + "integrity": "sha512-yQJ5+CSKGz1oqisa/hbp+VfwWpYFU9XPgC2qq6Q6rFK80CxeIf3AQ0QQdC/OAS/5Nl+EFLK5DQuaQ+QYT54XLQ==", + "dev": true, + "requires": { + "@libp2p/crypto": "^5.1.14", + "@libp2p/interface": "^3.1.1", + "@libp2p/interface-internal": "^3.0.14", + "@libp2p/peer-collections": "^7.0.14", + "@libp2p/peer-id": "^6.0.5", + "@libp2p/peer-record": "^9.0.6", + "@libp2p/utils": "^7.0.14", + "@multiformats/multiaddr": "^13.0.1", + "@multiformats/multiaddr-matcher": "^3.0.1", + "any-signal": "^4.1.1", + "main-event": "^1.0.1", + "multiformats": "^13.4.0", + "nanoid": "^5.1.5", + "progress-events": "^1.0.1", + "protons-runtime": "^6.0.1", + "retimeable-signal": "^1.0.1", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0" + }, + "dependencies": { + "nanoid": { + "version": "5.1.7", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.1.7.tgz", + "integrity": "sha512-ua3NDgISf6jdwezAheMOk4mbE1LXjm1DfMUDMuJf4AqxLFK3ccGpgWizwa5YV7Yz9EpXwEaWoRXSb/BnV0t5dQ==", + "dev": true + }, + "protons-runtime": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/protons-runtime/-/protons-runtime-6.0.1.tgz", + "integrity": "sha512-ONL+jDj143WA1m+WKLuuqBIaDKxm32dx6HfJdyujrRcni/6KkhXzVnyg22nH/Wwqmbwnd1BKUVkD1hMEWZFeww==", + "dev": true, + "requires": { + "uint8-varint": "^2.0.4", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0" + } + } + } + }, + "@libp2p/crypto": { + "version": "5.1.14", + "resolved": "https://registry.npmjs.org/@libp2p/crypto/-/crypto-5.1.14.tgz", + "integrity": "sha512-0L2SEhDfvKWFhlc8GXgm268MoakrS4qbewD5LoZpoiUesXpB9e1vjed9dWEN1VsSjOmrOPyhBoSxZ2mnLTrOVA==", + "dev": true, + "requires": { + "@libp2p/interface": "^3.1.1", + "@noble/curves": "^2.0.1", + "@noble/hashes": "^2.0.1", + "multiformats": "^13.4.0", + "protons-runtime": "^6.0.1", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0" + }, + "dependencies": { + "@noble/curves": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@noble/curves/-/curves-2.0.1.tgz", + "integrity": "sha512-vs1Az2OOTBiP4q0pwjW5aF0xp9n4MxVrmkFBxc6EKZc6ddYx5gaZiAsZoq0uRRXWbi3AT/sBqn05eRPtn1JCPw==", + "dev": true, + "requires": { + "@noble/hashes": "2.0.1" + } + }, + "@noble/hashes": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-2.0.1.tgz", + "integrity": "sha512-XlOlEbQcE9fmuXxrVTXCTlG2nlRXa9Rj3rr5Ue/+tX+nmkgbX720YHh0VR3hBF9xDvwnb8D2shVGOwNx+ulArw==", + "dev": true + }, + "protons-runtime": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/protons-runtime/-/protons-runtime-6.0.1.tgz", + "integrity": "sha512-ONL+jDj143WA1m+WKLuuqBIaDKxm32dx6HfJdyujrRcni/6KkhXzVnyg22nH/Wwqmbwnd1BKUVkD1hMEWZFeww==", + "dev": true, + "requires": { + "uint8-varint": "^2.0.4", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0" + } + } + } + }, + "@libp2p/identify": { + "version": "4.0.13", + "resolved": "https://registry.npmjs.org/@libp2p/identify/-/identify-4.0.13.tgz", + "integrity": "sha512-/zAhl2yMuQeHMyghJZDBRvQ1l3fRwxbsq3zPhT5nDscu9qVDa/CB4xDwquV4jV2Y/pnvefZtngJgj5c+bBIxug==", + "dev": true, + "requires": { + "@libp2p/crypto": "^5.1.13", + "@libp2p/interface": "^3.1.0", + "@libp2p/interface-internal": "^3.0.13", + "@libp2p/peer-id": "^6.0.4", + "@libp2p/peer-record": "^9.0.5", + "@libp2p/utils": "^7.0.13", + "@multiformats/multiaddr": "^13.0.1", + "@multiformats/multiaddr-matcher": "^3.0.1", + "it-drain": "^3.0.10", + "it-parallel": "^3.0.13", + "main-event": "^1.0.1", + "protons-runtime": "^5.6.0", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0" + } + }, + "@libp2p/interface": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@libp2p/interface/-/interface-3.1.1.tgz", + "integrity": "sha512-pQuReZeZUSqk27UXwXXdAVlxrgs08GrcPsd92Qv27IFBPICG8da3FmHg1bclUpMW/6GE6o4qDCVqR4cBMRVKyA==", + "dev": true, + "requires": { + "@multiformats/dns": "^1.0.6", + "@multiformats/multiaddr": "^13.0.1", + "main-event": "^1.0.1", + "multiformats": "^13.4.0", + "progress-events": "^1.0.1", + "uint8arraylist": "^2.4.8" + } + }, + "@libp2p/interface-internal": { + "version": "3.0.14", + "resolved": "https://registry.npmjs.org/@libp2p/interface-internal/-/interface-internal-3.0.14.tgz", + "integrity": "sha512-X7TxzWapCKNaBCy9quPJIiXouPaAbPNT2XgWghw1MouznKPMWzCyHY+kW0l+e2JkvBqeSDHLPdBE7WnHwdbNtA==", + "dev": true, + "requires": { + "@libp2p/interface": "^3.1.1", + "@libp2p/peer-collections": "^7.0.14", + "@multiformats/multiaddr": "^13.0.1", + "progress-events": "^1.0.1" + } + }, + "@libp2p/kad-dht": { + "version": "16.1.6", + "resolved": "https://registry.npmjs.org/@libp2p/kad-dht/-/kad-dht-16.1.6.tgz", + "integrity": "sha512-G3RqIkA/zG8brOQfADYt+PaLbEOEcwF8DzEVPsRGLpn80xaTMj2zOG4F86wcWK7FSciijnZzDOT0AKgoDzAcdg==", + "dev": true, + "requires": { + "@libp2p/crypto": "^5.1.13", + "@libp2p/interface": "^3.1.0", + "@libp2p/interface-internal": "^3.0.13", + "@libp2p/peer-collections": "^7.0.13", + "@libp2p/peer-id": "^6.0.4", + "@libp2p/ping": "^3.0.13", + "@libp2p/record": "^4.0.9", + "@libp2p/utils": "^7.0.13", + "@multiformats/multiaddr": "^13.0.1", + "@multiformats/multiaddr-matcher": "^3.0.1", + "any-signal": "^4.1.1", + "interface-datastore": "^9.0.1", + "it-all": "^3.0.9", + "it-drain": "^3.0.10", + "it-length": "^3.0.9", + "it-map": "^3.1.4", + "it-merge": "^3.0.12", + "it-parallel": "^3.0.13", + "it-pipe": "^3.0.1", + "it-pushable": "^3.2.3", + "it-take": "^3.0.9", + "main-event": "^1.0.1", + "multiformats": "^13.4.0", + "p-defer": "^4.0.1", + "p-event": "^7.0.0", + "progress-events": "^1.0.1", + "protons-runtime": "^5.6.0", + "race-signal": "^2.0.0", + "uint8-varint": "^2.0.4", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0" + } + }, + "@libp2p/logger": { + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/@libp2p/logger/-/logger-6.2.3.tgz", + "integrity": "sha512-ZlGE8a0pHDkTFoNleKHAu4Fqta1QHiqgR3CR9fw0Ek/FnjMXo++zxyBCYdwqYz/Jeqh1s1/svSonRTIfknF4zQ==", + "dev": true, + "requires": { + "@libp2p/interface": "^3.1.1", + "@multiformats/multiaddr": "^13.0.1", + "interface-datastore": "^9.0.1", + "multiformats": "^13.4.0", + "weald": "^1.1.0" + } + }, + "@libp2p/multistream-select": { + "version": "7.0.13", + "resolved": "https://registry.npmjs.org/@libp2p/multistream-select/-/multistream-select-7.0.13.tgz", + "integrity": "sha512-nX13GinXiuBFgN+zA/CvIyXZyR/DaftT26agsw6dDfhRvH2RWsoPvf0IGqxk90DsLhpmVxZnTE31rITjmLIKww==", + "dev": true, + "requires": { + "@libp2p/interface": "^3.1.0", + "@libp2p/utils": "^7.0.13", + "it-length-prefixed": "^10.0.1", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0" + } + }, + "@libp2p/peer-collections": { + "version": "7.0.14", + "resolved": "https://registry.npmjs.org/@libp2p/peer-collections/-/peer-collections-7.0.14.tgz", + "integrity": "sha512-PoH9m6ihhuEe5ot23o7kZ7aa10QlemTaHyn6w34oXUjhCFWsYNbl3zIlnTLdM2r1ROQABEeMH7AmxvfgipNR0A==", + "dev": true, + "requires": { + "@libp2p/interface": "^3.1.1", + "@libp2p/peer-id": "^6.0.5", + "@libp2p/utils": "^7.0.14", + "multiformats": "^13.4.0" + } + }, + "@libp2p/peer-id": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/@libp2p/peer-id/-/peer-id-6.0.5.tgz", + "integrity": "sha512-0rAcAnoOrhjUPs03fRMw29hctzx9s1mdsmCdfgl1U4FnEohMRfBmLkGD8Al3/J52Z23jwzdDfz1VpyxjOANaHA==", + "dev": true, + "requires": { + "@libp2p/crypto": "^5.1.14", + "@libp2p/interface": "^3.1.1", + "multiformats": "^13.4.0", + "uint8arrays": "^5.1.0" + } + }, + "@libp2p/peer-record": { + "version": "9.0.6", + "resolved": "https://registry.npmjs.org/@libp2p/peer-record/-/peer-record-9.0.6.tgz", + "integrity": "sha512-AJNscSkH6lbia7OO+9F+eGryOnhAZwbJghj4iG2jF2IuGJ5G+hJv28AJyep5J6+BzaTJdnDhhXM5RPHFqHMmWQ==", + "dev": true, + "requires": { + "@libp2p/crypto": "^5.1.14", + "@libp2p/interface": "^3.1.1", + "@libp2p/peer-id": "^6.0.5", + "@multiformats/multiaddr": "^13.0.1", + "multiformats": "^13.4.0", + "protons-runtime": "^6.0.1", + "uint8-varint": "^2.0.4", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0" + }, + "dependencies": { + "protons-runtime": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/protons-runtime/-/protons-runtime-6.0.1.tgz", + "integrity": "sha512-ONL+jDj143WA1m+WKLuuqBIaDKxm32dx6HfJdyujrRcni/6KkhXzVnyg22nH/Wwqmbwnd1BKUVkD1hMEWZFeww==", + "dev": true, + "requires": { + "uint8-varint": "^2.0.4", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0" + } + } + } + }, + "@libp2p/peer-store": { + "version": "12.0.13", + "resolved": "https://registry.npmjs.org/@libp2p/peer-store/-/peer-store-12.0.13.tgz", + "integrity": "sha512-hXiIrXEUlXNDJe7i0O32qXRGmLA3ckoRHDjGZcNKMzPnkRDPkGEUQ42v1keA+1QoysMkm95xYyyhF6S3dA6nxg==", + "dev": true, + "requires": { + "@libp2p/crypto": "^5.1.13", + "@libp2p/interface": "^3.1.0", + "@libp2p/peer-collections": "^7.0.13", + "@libp2p/peer-id": "^6.0.4", + "@libp2p/peer-record": "^9.0.5", + "@multiformats/multiaddr": "^13.0.1", + "interface-datastore": "^9.0.1", + "it-all": "^3.0.9", + "main-event": "^1.0.1", + "mortice": "^3.3.1", + "multiformats": "^13.4.0", + "protons-runtime": "^5.6.0", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0" + } + }, + "@libp2p/ping": { + "version": "3.0.13", + "resolved": "https://registry.npmjs.org/@libp2p/ping/-/ping-3.0.13.tgz", + "integrity": "sha512-ag4opUEB/eYPzNXBn8KaqBNCHVAvED6Kdr3HCMct4pQS3qRrJ6zbLrm6qb7D2WWEKO0WrLuCjE7NmBk9nprmDQ==", + "dev": true, + "requires": { + "@libp2p/crypto": "^5.1.13", + "@libp2p/interface": "^3.1.0", + "@libp2p/interface-internal": "^3.0.13", + "@multiformats/multiaddr": "^13.0.1", + "p-event": "^7.0.0", + "race-signal": "^2.0.0", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0" + } + }, + "@libp2p/record": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/@libp2p/record/-/record-4.0.9.tgz", + "integrity": "sha512-ITxntqQ2GDK/yA1NhzEQc2dXpxgox96xZ1cqO507choY5z5Czhz2BxfyElVO/XYjOXvylu1XN66uh3VuGHrfkQ==", + "dev": true, + "requires": { + "protons-runtime": "^5.6.0", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0" + } + }, + "@libp2p/tcp": { + "version": "11.0.13", + "resolved": "https://registry.npmjs.org/@libp2p/tcp/-/tcp-11.0.13.tgz", + "integrity": "sha512-YTV6rX1NpQVbixYDlsWtIAHALBkW4vYdk4DPmHbFyvvQJdtBofbeHIQakyjPexKrbUtDGaoIXgT5KC2osLRp0g==", + "dev": true, + "requires": { + "@libp2p/interface": "^3.1.0", + "@libp2p/utils": "^7.0.13", + "@multiformats/multiaddr": "^13.0.1", + "@multiformats/multiaddr-matcher": "^3.0.1", + "@types/sinon": "^20.0.0", + "main-event": "^1.0.1", + "p-event": "^7.0.0", + "progress-events": "^1.0.1", + "uint8arraylist": "^2.4.8" + } + }, + "@libp2p/utils": { + "version": "7.0.14", + "resolved": "https://registry.npmjs.org/@libp2p/utils/-/utils-7.0.14.tgz", + "integrity": "sha512-G8tj32VT1sRAiXV3pGLMlepRSmkydCKBRXzTp/OFqDjRmoXRlIenWMN+hxKOG5wXOyXZkRtkBbXJGq2kIB27/A==", + "dev": true, + "requires": { + "@chainsafe/is-ip": "^2.1.0", + "@chainsafe/netmask": "^2.0.0", + "@libp2p/crypto": "^5.1.14", + "@libp2p/interface": "^3.1.1", + "@libp2p/logger": "^6.2.3", + "@multiformats/multiaddr": "^13.0.1", + "@sindresorhus/fnv1a": "^3.1.0", + "any-signal": "^4.1.1", + "cborg": "^4.2.14", + "delay": "^7.0.0", + "is-loopback-addr": "^2.0.2", + "it-length-prefixed": "^10.0.1", + "it-pipe": "^3.0.1", + "it-pushable": "^3.2.3", + "it-stream-types": "^2.0.2", + "main-event": "^1.0.1", + "netmask": "^2.0.2", + "p-defer": "^4.0.1", + "p-event": "^7.0.0", + "race-signal": "^2.0.0", + "uint8-varint": "^2.0.4", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0" + }, + "dependencies": { + "cborg": { + "version": "4.5.8", + "resolved": "https://registry.npmjs.org/cborg/-/cborg-4.5.8.tgz", + "integrity": "sha512-6/viltD51JklRhq4L7jC3zgy6gryuG5xfZ3kzpE+PravtyeQLeQmCYLREhQH7pWENg5pY4Yu/XCd6a7dKScVlw==", + "dev": true + } + } + }, + "@libp2p/websockets": { + "version": "10.1.6", + "resolved": "https://registry.npmjs.org/@libp2p/websockets/-/websockets-10.1.6.tgz", + "integrity": "sha512-hRJtodXHbPNvrRaRZL6KwaHsbadUiBAHDkTUS2VqQXmRWZ77B02Es5LDjgSDJIaw4GBJIo8vx2oaML5R94WtBA==", + "dev": true, + "requires": { + "@libp2p/interface": "^3.1.0", + "@libp2p/utils": "^7.0.13", + "@multiformats/multiaddr": "^13.0.1", + "@multiformats/multiaddr-matcher": "^3.0.1", + "@multiformats/multiaddr-to-uri": "^12.0.0", + "main-event": "^1.0.1", + "p-event": "^7.0.0", + "progress-events": "^1.0.1", + "uint8arraylist": "^2.4.8", + "uint8arrays": "^5.1.0", + "ws": "^8.18.3" + }, + "dependencies": { + "ws": { + "version": "8.20.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.20.0.tgz", + "integrity": "sha512-sAt8BhgNbzCtgGbt2OxmpuryO63ZoDk/sqaB/znQm94T4fCEsy/yV+7CdC1kJhOU9lboAEU7R3kquuycDoibVA==", + "dev": true, + "requires": {} + } + } + }, "@metamask/eth-sig-util": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/@metamask/eth-sig-util/-/eth-sig-util-4.0.1.tgz", @@ -24537,6 +26307,74 @@ "integrity": "sha512-/kSXhY692qiV1MXu6EeOZvg5nECLclxNXcKCxJ3cXQgYuRymRHpdx/t7JXfsK+JLjwA1e1c1/SBrlQYpusC29Q==", "dev": true }, + "@multiformats/dns": { + "version": "1.0.13", + "resolved": "https://registry.npmjs.org/@multiformats/dns/-/dns-1.0.13.tgz", + "integrity": "sha512-yr4bxtA3MbvJ+2461kYIYMsiiZj/FIqKI64hE4SdvWJUdWF9EtZLar38juf20Sf5tguXKFUruluswAO6JsjS2w==", + "dev": true, + "requires": { + "@dnsquery/dns-packet": "^6.1.1", + "@libp2p/interface": "^3.1.0", + "hashlru": "^2.3.0", + "p-queue": "^9.0.0", + "progress-events": "^1.0.0", + "uint8arrays": "^5.0.2" + }, + "dependencies": { + "eventemitter3": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.4.tgz", + "integrity": "sha512-mlsTRyGaPBjPedk6Bvw+aqbsXDtoAyAzm5MO7JgU+yVRyMQ5O8bD4Kcci7BS85f93veegeCPkL8R4GLClnjLFw==", + "dev": true + }, + "p-queue": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/p-queue/-/p-queue-9.1.0.tgz", + "integrity": "sha512-O/ZPaXuQV29uSLbxWBGGZO1mCQXV2BLIwUr59JUU9SoH76mnYvtms7aafH/isNSNGwuEfP6W/4xD0/TJXxrizw==", + "dev": true, + "requires": { + "eventemitter3": "^5.0.1", + "p-timeout": "^7.0.0" + } + }, + "p-timeout": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-7.0.1.tgz", + "integrity": "sha512-AxTM2wDGORHGEkPCt8yqxOTMgpfbEHqF51f/5fJCmwFC3C/zNcGT63SymH2ttOAaiIws2zVg4+izQCjrakcwHg==", + "dev": true + } + } + }, + "@multiformats/multiaddr": { + "version": "13.0.1", + "resolved": "https://registry.npmjs.org/@multiformats/multiaddr/-/multiaddr-13.0.1.tgz", + "integrity": "sha512-XToN915cnfr6Lr9EdGWakGJbPT0ghpg/850HvdC+zFX8XvpLZElwa8synCiwa8TuvKNnny6m8j8NVBNCxhIO3g==", + "dev": true, + "requires": { + "@chainsafe/is-ip": "^2.0.1", + "multiformats": "^13.0.0", + "uint8-varint": "^2.0.1", + "uint8arrays": "^5.0.0" + } + }, + "@multiformats/multiaddr-matcher": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@multiformats/multiaddr-matcher/-/multiaddr-matcher-3.0.1.tgz", + "integrity": "sha512-jvjwzCPysVTQ53F4KqwmcqZw73BqHMk0UUZrMP9P4OtJ/YHrfs122ikTqhVA2upe0P/Qz9l8HVlhEifVYB2q9A==", + "dev": true, + "requires": { + "@multiformats/multiaddr": "^13.0.0" + } + }, + "@multiformats/multiaddr-to-uri": { + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/@multiformats/multiaddr-to-uri/-/multiaddr-to-uri-12.0.0.tgz", + "integrity": "sha512-3uIEBCiy8tfzxYYBl81x1tISiNBQ7mHU4pGjippbJRoQYHzy/ZdZM/7JvTldr8pc/dzpkaNJxnsuxxlhsPOJsA==", + "dev": true, + "requires": { + "@multiformats/multiaddr": "^13.0.0" + } + }, "@noble/ciphers": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/@noble/ciphers/-/ciphers-0.3.0.tgz", @@ -24668,7 +26506,8 @@ "@oceanprotocol/contracts": { "version": "2.6.0", "resolved": "https://registry.npmjs.org/@oceanprotocol/contracts/-/contracts-2.6.0.tgz", - "integrity": "sha512-4K3TTM0q4VlBs7GLXzQkGMae576iAGHMARqMFcKXUUtfGyiT8KP4sP9IBsq8UUhu11R4JU2KTqXenHS8EWxrbA==" + "integrity": "sha512-4K3TTM0q4VlBs7GLXzQkGMae576iAGHMARqMFcKXUUtfGyiT8KP4sP9IBsq8UUhu11R4JU2KTqXenHS8EWxrbA==", + "dev": true }, "@oceanprotocol/ddo-js": { "version": "0.1.4", @@ -25185,6 +27024,12 @@ "resolved": "https://registry.npmjs.org/@sec-ant/readable-stream/-/readable-stream-0.4.1.tgz", "integrity": "sha512-831qok9r2t8AlxLko40y2ebgSDhenenCatLVeW/uBtnHPyhHOvG0C7TvfgecV+wHzIm5KUICgzmVpWS+IMEAeg==" }, + "@sindresorhus/fnv1a": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/fnv1a/-/fnv1a-3.1.0.tgz", + "integrity": "sha512-KV321z5m/0nuAg83W1dPLy85HpHDk7Sdi4fJbwvacWsEhAh+rZUW4ZfGcXmUIvjZg4ss2bcwNlRhJ7GBEUG08w==", + "dev": true + }, "@sindresorhus/is": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-4.6.0.tgz", @@ -26056,6 +27901,21 @@ "integrity": "sha512-FmgJfu+MOcQ370SD0ev7EI8TlCAfKYU+B4m5T3yXc1CiRN94g/SZPtsCkk506aUDtlMnFZvasDwHHUcZUEaYuA==", "dev": true }, + "@types/sinon": { + "version": "20.0.0", + "resolved": "https://registry.npmjs.org/@types/sinon/-/sinon-20.0.0.tgz", + "integrity": "sha512-etYGUC6IEevDGSWvR9WrECRA01ucR2/Oi9XMBUAdV0g4bLkNf4HlZWGiGlDOq5lgwXRwcV+PSeKgFcW4QzzYOg==", + "dev": true, + "requires": { + "@types/sinonjs__fake-timers": "*" + } + }, + "@types/sinonjs__fake-timers": { + "version": "15.0.1", + "resolved": "https://registry.npmjs.org/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-15.0.1.tgz", + "integrity": "sha512-Ko2tjWJq8oozHzHV+reuvS5KYIRAokHnGbDwGh/J64LntgpbuylF74ipEL24HCyRjf9FOlBiBHWBR1RlVKsI1w==", + "dev": true + }, "@types/underscore": { "version": "1.13.0", "resolved": "https://registry.npmjs.org/@types/underscore/-/underscore-1.13.0.tgz", @@ -26360,6 +28220,12 @@ "event-target-shim": "^5.0.0" } }, + "abort-error": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/abort-error/-/abort-error-1.0.1.tgz", + "integrity": "sha512-fxqCblJiIPdSXIUrxI0PL+eJG49QdP9SQ70qtB65MVAoMr2rASlOyAbJFOylfB467F/f+5BCLJJq58RYi7mGfg==", + "dev": true + }, "abortcontroller-polyfill": { "version": "1.7.8", "resolved": "https://registry.npmjs.org/abortcontroller-polyfill/-/abortcontroller-polyfill-1.7.8.tgz", @@ -26458,6 +28324,12 @@ "color-convert": "^2.0.1" } }, + "any-signal": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/any-signal/-/any-signal-4.2.0.tgz", + "integrity": "sha512-LndMvYuAPf4rC195lk7oSFuHOYFpOszIYrNYv0gHAvz+aEhE9qPZLhmrIz5pXP2BSsPOXvsuHDXEGaiQhIh9wA==", + "dev": true + }, "append-transform": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/append-transform/-/append-transform-2.0.0.tgz", @@ -27946,6 +29818,24 @@ "is-data-view": "^1.0.1" } }, + "datastore-core": { + "version": "11.0.2", + "resolved": "https://registry.npmjs.org/datastore-core/-/datastore-core-11.0.2.tgz", + "integrity": "sha512-0pN4hMcaCWcnUBo5OL/8j14Lt1l/p1v2VvzryRYeJAKRLqnFrzy2FhAQ7y0yTA63ki760ImQHfm2XlZrfIdFpQ==", + "dev": true, + "requires": { + "@libp2p/logger": "^6.0.0", + "interface-datastore": "^9.0.0", + "interface-store": "^7.0.0", + "it-drain": "^3.0.9", + "it-filter": "^3.1.3", + "it-map": "^3.1.3", + "it-merge": "^3.0.11", + "it-pipe": "^3.0.1", + "it-sort": "^3.0.8", + "it-take": "^3.0.8" + } + }, "debug": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", @@ -28094,6 +29984,16 @@ "esprima": "^4.0.1" } }, + "delay": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/delay/-/delay-7.0.0.tgz", + "integrity": "sha512-C3vaGs818qzZjCvVJ98GQUMVyWeg7dr5w2Nwwb2t5K8G98jOyyVO2ti2bKYk5yoYElqH3F2yA53ykuEnwD6MCg==", + "dev": true, + "requires": { + "random-int": "^3.1.0", + "unlimited-timeout": "^0.1.0" + } + }, "delayed-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", @@ -30894,6 +32794,12 @@ } } }, + "hashlru": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/hashlru/-/hashlru-2.3.0.tgz", + "integrity": "sha512-0cMsjjIC8I+D3M44pOQdsy0OHXGLVz6Z0beRuufhKa0KfaD2wGwAev6jILzXsd3/vpnNQJmWyZtIILqM1N+n5A==", + "dev": true + }, "hasown": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", @@ -31127,6 +33033,22 @@ "rxjs": "^7.8.2" } }, + "interface-datastore": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/interface-datastore/-/interface-datastore-9.0.2.tgz", + "integrity": "sha512-jebn+GV/5LTDDoyicNIB4D9O0QszpPqT09Z/MpEWvf3RekjVKpXJCDguM5Au2fwIFxFDAQMZe5bSla0jMamCNg==", + "dev": true, + "requires": { + "interface-store": "^7.0.0", + "uint8arrays": "^5.1.0" + } + }, + "interface-store": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/interface-store/-/interface-store-7.0.1.tgz", + "integrity": "sha512-OPRRUO3Cs6Jr/t98BrJLQp1jUTPgrRH0PqFfuNoPAqd+J7ABN1tjFVjQdaOBiybYJTS/AyBSZnZVWLPvp3dW3w==", + "dev": true + }, "internal-slot": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.1.0.tgz", @@ -31252,6 +33174,12 @@ "integrity": "sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ==", "dev": true }, + "is-electron": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/is-electron/-/is-electron-2.2.2.tgz", + "integrity": "sha512-FO/Rhvz5tuw4MCWkpMzHFKWD2LsfHzIb7i6MdPYZ/KW7AlxawyLkqdy+jPZP1WubqEADE3O4FUENlJHDfQASRg==", + "dev": true + }, "is-extglob": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", @@ -31324,6 +33252,12 @@ "integrity": "sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w==", "dev": true }, + "is-loopback-addr": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-loopback-addr/-/is-loopback-addr-2.0.2.tgz", + "integrity": "sha512-26POf2KRCno/KTNL5Q0b/9TYnL00xEsSaLfiFRmjM7m7Lw7ZMmFybzzuX4CcsLAluZGd+niLUiMRxEooVE3aqg==", + "dev": true + }, "is-map": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.3.tgz", @@ -31336,6 +33270,12 @@ "integrity": "sha512-51ypPSPCoTEIN9dy5Oy+h4pShgJmPCygKfyRCISBI+JoWT/2oJvK8QPxmwv7b/p239jXrm9M1mlQbyKJ5A152g==", "dev": true }, + "is-network-error": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/is-network-error/-/is-network-error-1.3.1.tgz", + "integrity": "sha512-6QCxa49rQbmUWLfk0nuGqzql9U8uaV2H6279bRErPBHe/109hCzsLUBUHfbEtvLIHBd6hyXbgedBSHevm43Edw==", + "dev": true + }, "is-number": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", @@ -31607,6 +33547,154 @@ "istanbul-lib-report": "^3.0.0" } }, + "it-all": { + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/it-all/-/it-all-3.0.9.tgz", + "integrity": "sha512-fz1oJJ36ciGnu2LntAlE6SA97bFZpW7Rnt0uEc1yazzR2nKokZLr8lIRtgnpex4NsmaBcvHF+Z9krljWFy/mmg==", + "dev": true + }, + "it-drain": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/it-drain/-/it-drain-3.0.10.tgz", + "integrity": "sha512-0w/bXzudlyKIyD1+rl0xUKTI7k4cshcS43LTlBiGFxI8K1eyLydNPxGcsVLsFVtKh1/ieS8AnVWt6KwmozxyEA==", + "dev": true + }, + "it-filter": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/it-filter/-/it-filter-3.1.4.tgz", + "integrity": "sha512-80kWEKgiFEa4fEYD3mwf2uygo1dTQ5Y5midKtL89iXyjinruA/sNXl6iFkTcdNedydjvIsFhWLiqRPQP4fAwWQ==", + "dev": true, + "requires": { + "it-peekable": "^3.0.0" + } + }, + "it-length": { + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/it-length/-/it-length-3.0.9.tgz", + "integrity": "sha512-cPhRPzyulYqyL7x4sX4MOjG/xu3vvEIFAhJ1aCrtrnbfxloCOtejOONib5oC3Bz8tLL6b6ke6+YHu4Bm6HCG7A==", + "dev": true + }, + "it-length-prefixed": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/it-length-prefixed/-/it-length-prefixed-10.0.1.tgz", + "integrity": "sha512-BhyluvGps26u9a7eQIpOI1YN7mFgi8lFwmiPi07whewbBARKAG9LE09Odc8s1Wtbt2MB6rNUrl7j9vvfXTJwdQ==", + "dev": true, + "requires": { + "it-reader": "^6.0.1", + "it-stream-types": "^2.0.1", + "uint8-varint": "^2.0.1", + "uint8arraylist": "^2.0.0", + "uint8arrays": "^5.0.1" + } + }, + "it-map": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/it-map/-/it-map-3.1.4.tgz", + "integrity": "sha512-QB9PYQdE9fUfpVFYfSxBIyvKynUCgblb143c+ktTK6ZuKSKkp7iH58uYFzagqcJ5HcqIfn1xbfaralHWam+3fg==", + "dev": true, + "requires": { + "it-peekable": "^3.0.0" + } + }, + "it-merge": { + "version": "3.0.12", + "resolved": "https://registry.npmjs.org/it-merge/-/it-merge-3.0.12.tgz", + "integrity": "sha512-nnnFSUxKlkZVZD7c0jYw6rDxCcAQYcMsFj27thf7KkDhpj0EA0g9KHPxbFzHuDoc6US2EPS/MtplkNj8sbCx4Q==", + "dev": true, + "requires": { + "it-queueless-pushable": "^2.0.0" + } + }, + "it-parallel": { + "version": "3.0.13", + "resolved": "https://registry.npmjs.org/it-parallel/-/it-parallel-3.0.13.tgz", + "integrity": "sha512-85PPJ/O8q97Vj9wmDTSBBXEkattwfQGruXitIzrh0RLPso6RHfiVqkuTqBNufYYtB1x6PSkh0cwvjmMIkFEPHA==", + "dev": true, + "requires": { + "p-defer": "^4.0.1" + } + }, + "it-peekable": { + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/it-peekable/-/it-peekable-3.0.8.tgz", + "integrity": "sha512-7IDBQKSp/dtBxXV3Fj0v3qM1jftJ9y9XrWLRIuU1X6RdKqWiN60syNwP0fiDxZD97b8SYM58dD3uklIk1TTQAw==", + "dev": true + }, + "it-pipe": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/it-pipe/-/it-pipe-3.0.1.tgz", + "integrity": "sha512-sIoNrQl1qSRg2seYSBH/3QxWhJFn9PKYvOf/bHdtCBF0bnghey44VyASsWzn5dAx0DCDDABq1hZIuzKmtBZmKA==", + "dev": true, + "requires": { + "it-merge": "^3.0.0", + "it-pushable": "^3.1.2", + "it-stream-types": "^2.0.1" + } + }, + "it-pushable": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/it-pushable/-/it-pushable-3.2.3.tgz", + "integrity": "sha512-gzYnXYK8Y5t5b/BnJUr7glfQLO4U5vyb05gPx/TyTw+4Bv1zM9gFk4YsOrnulWefMewlphCjKkakFvj1y99Tcg==", + "dev": true, + "requires": { + "p-defer": "^4.0.0" + } + }, + "it-queue": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/it-queue/-/it-queue-1.1.1.tgz", + "integrity": "sha512-yeYCV22WF1QDyb3ylw+g3TGEdkmnoHUH2mc12QoGOQuxW4XP1V7Zd3BfsEF1iq2IFBwIK7wCPUcRLTAQVeZ3SQ==", + "dev": true, + "requires": { + "abort-error": "^1.0.1", + "it-pushable": "^3.2.3", + "main-event": "^1.0.0", + "race-event": "^1.3.0", + "race-signal": "^2.0.0" + } + }, + "it-queueless-pushable": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/it-queueless-pushable/-/it-queueless-pushable-2.0.3.tgz", + "integrity": "sha512-USa5EzTvmQswOcVE7+o6qsj2o2G+6KHCxSogPOs23sGYkDWFidhqVO7dAvv6ve/Z+Q+nvxpEa9rrRo6VEK7w4Q==", + "dev": true, + "requires": { + "abort-error": "^1.0.1", + "p-defer": "^4.0.1", + "race-signal": "^2.0.0" + } + }, + "it-reader": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/it-reader/-/it-reader-6.0.4.tgz", + "integrity": "sha512-XCWifEcNFFjjBHtor4Sfaj8rcpt+FkY0L6WdhD578SCDhV4VUm7fCkF3dv5a+fTcfQqvN9BsxBTvWbYO6iCjTg==", + "dev": true, + "requires": { + "it-stream-types": "^2.0.1", + "uint8arraylist": "^2.0.0" + } + }, + "it-sort": { + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/it-sort/-/it-sort-3.0.9.tgz", + "integrity": "sha512-jsM6alGaPiQbcAJdzMsuMh00uJcI+kD9TBoScB8TR75zUFOmHvhSsPi+Dmh2zfVkcoca+14EbfeIZZXTUGH63w==", + "dev": true, + "requires": { + "it-all": "^3.0.0" + } + }, + "it-stream-types": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/it-stream-types/-/it-stream-types-2.0.2.tgz", + "integrity": "sha512-Rz/DEZ6Byn/r9+/SBCuJhpPATDF9D+dz5pbgSUyBsCDtza6wtNATrz/jz1gDyNanC3XdLboriHnOC925bZRBww==", + "dev": true + }, + "it-take": { + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/it-take/-/it-take-3.0.9.tgz", + "integrity": "sha512-XMeUbnjOcgrhFXPUqa7H0VIjYSV/BvyxxjCp76QHVAFDJw2LmR1SHxUFiqyGeobgzJr7P2ZwSRRJQGn4D2BVlA==", + "dev": true + }, "iterator.prototype": { "version": "1.1.5", "resolved": "https://registry.npmjs.org/iterator.prototype/-/iterator.prototype-1.1.5.tgz", @@ -32116,6 +34204,41 @@ "type-check": "~0.4.0" } }, + "libp2p": { + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/libp2p/-/libp2p-3.1.6.tgz", + "integrity": "sha512-p1Tg8htMjQbbyNOQd5GtSsZJXKkJQYQBvRrPGMCa3PZBjGs2pNV4Utr7z0na+WgfJJn+mIbcNvP7NzzcrSD1nw==", + "dev": true, + "requires": { + "@chainsafe/is-ip": "^2.1.0", + "@chainsafe/netmask": "^2.0.0", + "@libp2p/crypto": "^5.1.13", + "@libp2p/interface": "^3.1.0", + "@libp2p/interface-internal": "^3.0.13", + "@libp2p/logger": "^6.2.2", + "@libp2p/multistream-select": "^7.0.13", + "@libp2p/peer-collections": "^7.0.13", + "@libp2p/peer-id": "^6.0.4", + "@libp2p/peer-store": "^12.0.13", + "@libp2p/utils": "^7.0.13", + "@multiformats/dns": "^1.0.6", + "@multiformats/multiaddr": "^13.0.1", + "@multiformats/multiaddr-matcher": "^3.0.1", + "any-signal": "^4.1.1", + "datastore-core": "^11.0.1", + "interface-datastore": "^9.0.1", + "it-merge": "^3.0.12", + "it-parallel": "^3.0.13", + "main-event": "^1.0.1", + "multiformats": "^13.4.0", + "p-defer": "^4.0.1", + "p-event": "^7.0.0", + "p-retry": "^7.0.0", + "progress-events": "^1.0.1", + "race-signal": "^2.0.0", + "uint8arrays": "^5.1.0" + } + }, "lilconfig": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.1.0.tgz", @@ -32301,6 +34424,12 @@ "sourcemap-codec": "^1.4.8" } }, + "main-event": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/main-event/-/main-event-1.0.1.tgz", + "integrity": "sha512-NWtdGrAca/69fm6DIVd8T9rtfDII4Q8NQbIbsKQq2VzS9eqOGYs8uaNQjcuaCq/d9H/o625aOTJX2Qoxzqw0Pw==", + "dev": true + }, "make-dir": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", @@ -32839,6 +34968,17 @@ "global": "^4.3.2" } }, + "mortice": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/mortice/-/mortice-3.3.1.tgz", + "integrity": "sha512-t3oESfijIPGsmsdLEKjF+grHfrbnKSXflJtgb1wY14cjxZpS6GnhHRXTxxzCAoCCnq1YYfpEPwY3gjiCPhOufQ==", + "dev": true, + "requires": { + "abort-error": "^1.0.0", + "it-queue": "^1.1.0", + "main-event": "^1.0.0" + } + }, "mri": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz", @@ -32878,6 +35018,12 @@ "varint": "^5.0.0" } }, + "multiformats": { + "version": "13.4.2", + "resolved": "https://registry.npmjs.org/multiformats/-/multiformats-13.4.2.tgz", + "integrity": "sha512-eh6eHCrRi1+POZ3dA+Dq1C6jhP1GNtr9CRINMb67OKzqW9I5DUuZM/3jLPlzhgpGeiNUlEGEbkCYChXMCc/8DQ==", + "dev": true + }, "multihashes": { "version": "0.4.21", "resolved": "https://registry.npmjs.org/multihashes/-/multihashes-0.4.21.tgz", @@ -33434,6 +35580,29 @@ "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-3.0.0.tgz", "integrity": "sha512-mlVgR3PGuzlo0MmTdk4cXqXWlwQDLnONTAg6sm62XkMJEiRxN3GL3SffkYvqwonbkJBcrI7Uvv5Zh9yjvn2iUw==" }, + "p-defer": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/p-defer/-/p-defer-4.0.1.tgz", + "integrity": "sha512-Mr5KC5efvAK5VUptYEIopP1bakB85k2IWXaRC0rsh1uwn1L6M0LVml8OIQ4Gudg4oyZakf7FmeRLkMMtZW1i5A==", + "dev": true + }, + "p-event": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/p-event/-/p-event-7.1.0.tgz", + "integrity": "sha512-/lkPs5W1aC3cp6vqZefpdosOn65J571sWodyfOQiF0+tmDCpU+H8Atwpu0vQROCVUlZuToDN5eyTLsMLLc54mg==", + "dev": true, + "requires": { + "p-timeout": "^7.0.1" + }, + "dependencies": { + "p-timeout": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-7.0.1.tgz", + "integrity": "sha512-AxTM2wDGORHGEkPCt8yqxOTMgpfbEHqF51f/5fJCmwFC3C/zNcGT63SymH2ttOAaiIws2zVg4+izQCjrakcwHg==", + "dev": true + } + } + }, "p-finally": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", @@ -33477,6 +35646,15 @@ "p-timeout": "^3.2.0" } }, + "p-retry": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/p-retry/-/p-retry-7.1.1.tgz", + "integrity": "sha512-J5ApzjyRkkf601HpEeykoiCvzHQjWxPAHhyjFcEUP2SWq0+35NKh8TLhpLw+Dkq5TZBFvUM6UigdE9hIVYTl5w==", + "dev": true, + "requires": { + "is-network-error": "^1.1.0" + } + }, "p-timeout": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-3.2.0.tgz", @@ -34220,6 +36398,12 @@ "fromentries": "^1.2.0" } }, + "progress-events": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/progress-events/-/progress-events-1.0.1.tgz", + "integrity": "sha512-MOzLIwhpt64KIVN64h1MwdKWiyKFNc/S6BoYKPIVUHFg0/eIEyBulhWCgn678v/4c0ri3FdGuzXymNCv02MUIw==", + "dev": true + }, "promise-to-callback": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/promise-to-callback/-/promise-to-callback-1.0.0.tgz", @@ -34253,6 +36437,17 @@ "integrity": "sha512-hHVTzba3wboROl0/aWRRG9dMytgH6ow//STBZh43l/wQgmMhYhOFi0EHWAPtoCz9IAUymsyP0TSBHkhgMEGNnQ==", "dev": true }, + "protons-runtime": { + "version": "5.6.0", + "resolved": "https://registry.npmjs.org/protons-runtime/-/protons-runtime-5.6.0.tgz", + "integrity": "sha512-/Kde+sB9DsMFrddJT/UZWe6XqvL7SL5dbag/DBCElFKhkwDj7XKt53S+mzLyaDP5OqS0wXjV5SA572uWDaT0Hg==", + "dev": true, + "requires": { + "uint8-varint": "^2.0.2", + "uint8arraylist": "^2.4.3", + "uint8arrays": "^5.0.1" + } + }, "proxy-addr": { "version": "2.0.7", "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", @@ -34348,6 +36543,27 @@ "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz", "integrity": "sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==" }, + "race-event": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/race-event/-/race-event-1.6.1.tgz", + "integrity": "sha512-vi7WH5g5KoTFpu2mme/HqZiWH14XSOtg5rfp6raBskBHl7wnmy3F/biAIyY5MsK+BHWhoPhxtZ1Y2R7OHHaWyQ==", + "dev": true, + "requires": { + "abort-error": "^1.0.1" + } + }, + "race-signal": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/race-signal/-/race-signal-2.0.0.tgz", + "integrity": "sha512-P31bLhE4ByBX/70QDXMutxnqgwrF1WUXea1O8DXuviAgkdbQ1iQMQotNgzJIBC9yUSn08u/acZrMUhgw7w6GpA==", + "dev": true + }, + "random-int": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/random-int/-/random-int-3.1.0.tgz", + "integrity": "sha512-h8CRz8cpvzj0hC/iH/1Gapgcl2TQ6xtnCpyOI5WvWfXf/yrDx2DOU+tD9rX23j36IF11xg1KqB9W11Z18JPMdw==", + "dev": true + }, "randombytes": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", @@ -34932,6 +37148,12 @@ } } }, + "retimeable-signal": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/retimeable-signal/-/retimeable-signal-1.0.1.tgz", + "integrity": "sha512-Cy26CYfbWnYu8HMoJeDhaMpW/EYFIbne3vMf6G9RSrOyWYXbPehja/BEdzpqmM84uy2bfBD7NPZhoQ4GZEtgvg==", + "dev": true + }, "retry": { "version": "0.13.1", "resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz", @@ -36449,6 +38671,34 @@ "resolved": "https://registry.npmjs.org/uint32/-/uint32-0.2.1.tgz", "integrity": "sha512-d3i8kc/4s1CFW5g3FctmF1Bu2GVXGBMTn82JY2BW0ZtTtI8pRx1YWGPCFBwRF4uYVSJ7ua4y+qYEPqS+x+3w7Q==" }, + "uint8-varint": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/uint8-varint/-/uint8-varint-2.0.4.tgz", + "integrity": "sha512-FwpTa7ZGA/f/EssWAb5/YV6pHgVF1fViKdW8cWaEarjB8t7NyofSWBdOTyFPaGuUG4gx3v1O3PQ8etsiOs3lcw==", + "dev": true, + "requires": { + "uint8arraylist": "^2.0.0", + "uint8arrays": "^5.0.0" + } + }, + "uint8arraylist": { + "version": "2.4.8", + "resolved": "https://registry.npmjs.org/uint8arraylist/-/uint8arraylist-2.4.8.tgz", + "integrity": "sha512-vc1PlGOzglLF0eae1M8mLRTBivsvrGsdmJ5RbK3e+QRvRLOZfZhQROTwH/OfyF3+ZVUg9/8hE8bmKP2CvP9quQ==", + "dev": true, + "requires": { + "uint8arrays": "^5.0.1" + } + }, + "uint8arrays": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/uint8arrays/-/uint8arrays-5.1.0.tgz", + "integrity": "sha512-vA6nFepEmlSKkMBnLBaUMVvAC4G3CTmO58C12y4sq6WPDOR7mOFYOi7GlrQ4djeSbP6JG9Pv9tJDM97PedRSww==", + "dev": true, + "requires": { + "multiformats": "^13.0.0" + } + }, "ultron": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/ultron/-/ultron-1.1.1.tgz", @@ -36515,6 +38765,12 @@ "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==" }, + "unlimited-timeout": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/unlimited-timeout/-/unlimited-timeout-0.1.0.tgz", + "integrity": "sha512-D4g+mxFeQGQHzCfnvij+R35ukJ0658Zzudw7j16p4tBBbNasKkKM4SocYxqhwT5xA7a9JYWDzKkEFyMlRi5sng==", + "dev": true + }, "unpipe": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", @@ -36562,6 +38818,12 @@ "resolved": "https://registry.npmjs.org/utf8/-/utf8-3.0.0.tgz", "integrity": "sha512-E8VjFIQ/TyQgp+TZfS6l8yp/xWppSAHzidGiRrqe4bK4XP9pTRyKFgGJpO3SN7zdX4DeomTrwaseCHovfpFcqQ==" }, + "utf8-codec": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/utf8-codec/-/utf8-codec-1.0.0.tgz", + "integrity": "sha512-S/QSLezp3qvG4ld5PUfXiH7mCFxLKjSVZRFkB3DOjgwHuJPFDkInAXc/anf7BAbHt/D38ozDzL+QMZ6/7gsI6w==", + "dev": true + }, "util": { "version": "0.12.5", "resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz", @@ -36653,6 +38915,30 @@ "defaults": "^1.0.3" } }, + "weald": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/weald/-/weald-1.1.1.tgz", + "integrity": "sha512-PaEQShzMCz8J/AD2N3dJMc1hTZWkJeLKS2NMeiVkV5KDHwgZe7qXLEzyodsT/SODxWDdXJJqocuwf3kHzcXhSQ==", + "dev": true, + "requires": { + "ms": "^3.0.0-canary.1", + "supports-color": "^10.0.0" + }, + "dependencies": { + "ms": { + "version": "3.0.0-canary.202508261828", + "resolved": "https://registry.npmjs.org/ms/-/ms-3.0.0-canary.202508261828.tgz", + "integrity": "sha512-NotsCoUCIUkojWCzQff4ttdCfIPoA1UGZsyQbi7KmqkNRfKCrvga8JJi2PknHymHOuor0cJSn/ylj52Cbt2IrQ==", + "dev": true + }, + "supports-color": { + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-10.2.2.tgz", + "integrity": "sha512-SS+jx45GF1QjgEXQx4NJZV9ImqmO2NPz5FNsIHrsDjh2YsHnawpan7SNQ1o8NuhrbHZy9AZhIoCUiCeaW/C80g==", + "dev": true + } + } + }, "web-streams-polyfill": { "version": "3.3.3", "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz", @@ -37243,6 +39529,15 @@ "webidl-conversions": "^3.0.0" } }, + "wherearewe": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/wherearewe/-/wherearewe-2.0.1.tgz", + "integrity": "sha512-XUguZbDxCA2wBn2LoFtcEhXL6AXo+hVjGonwhSTTTU9SzbWG8Xu3onNIpzf9j/mYUcJQ0f+m37SzG77G851uFw==", + "dev": true, + "requires": { + "is-electron": "^2.2.0" + } + }, "which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", diff --git a/package.json b/package.json index eef8a0146..078b52f75 100644 --- a/package.json +++ b/package.json @@ -1,18 +1,27 @@ { "name": "@oceanprotocol/lib", "source": "./src/index.ts", - "version": "6.1.2", + "version": "7.0.0-next.13", "description": "JavaScript client library for Ocean Protocol", "main": "./dist/lib.cjs", "umd:main": "dist/lib.umd.js", "module": "./dist/lib.module.mjs", "type": "module", - "export": { - "require": "./dist/lib.cjs", - "import": "./dist/lib.module.mjs", - "default": "./dist/lib.modern.mjs" + "exports": { + ".": { + "types": "./dist/types/index.d.ts", + "import": "./dist/lib.module.mjs", + "require": "./dist/lib.cjs", + "default": "./dist/lib.modern.mjs" + } }, "types": "./dist/types/index.d.ts", + "files": [ + "dist", + "!dist/*.map", + "README.md", + "LICENSE" + ], "scripts": { "start": "npm run clean && npm run build:metadata && tsc -w", "build": "npm run clean && npm run build:metadata && microbundle build --format modern,esm,cjs,umd --compress --tsconfig tsconfig.json", @@ -56,12 +65,8 @@ "web3": "^1.8.0" }, "dependencies": { - "@oasisprotocol/sapphire-paratime": "^1.3.2", - "@oceanprotocol/contracts": "^2.6.0", "@oceanprotocol/ddo-js": "^0.1.4", - "@rdfjs/dataset": "^2.0.2", - "@rdfjs/formats-common": "^3.1.0", - "@zazuko/env-node": "^2.1.4", + "@oasisprotocol/sapphire-paratime": "^1.3.2", "bignumber.js": "^9.3.1", "cross-fetch": "^4.0.0", "crypto-js": "^4.1.1", @@ -72,7 +77,18 @@ "jsonwebtoken": "^9.0.2" }, "devDependencies": { + "@chainsafe/libp2p-noise": "^17.0.0", + "@chainsafe/libp2p-yamux": "^8.0.1", "@istanbuljs/nyc-config-typescript": "^1.0.2", + "@libp2p/bootstrap": "^12.0.14", + "@libp2p/circuit-relay-v2": "^4.1.7", + "@libp2p/identify": "^4.0.13", + "@libp2p/kad-dht": "^16.1.6", + "@libp2p/tcp": "^11.0.13", + "@libp2p/websockets": "^10.1.6", + "@multiformats/multiaddr": "^13.0.1", + "@oceanprotocol/contracts": "^2.6.0", + "libp2p": "^3.1.6", "@truffle/hdwallet-provider": "^2.0.14", "@types/chai": "^5.2.2", "@types/chai-spies": "^1.0.3", diff --git a/scripts/get-metadata.js b/scripts/get-metadata.js index 2118b76a2..dd14b2211 100755 --- a/scripts/get-metadata.js +++ b/scripts/get-metadata.js @@ -2,7 +2,7 @@ 'use strict'; import { execSync } from 'child_process'; -import packageInfo from '../package.json' assert { type: 'json' }; +import packageInfo from '../package.json' with { type: 'json' }; process.stdout.write( JSON.stringify( { diff --git a/src/@types/Compute.ts b/src/@types/Compute.ts index 5433b504c..b88b62b2d 100644 --- a/src/@types/Compute.ts +++ b/src/@types/Compute.ts @@ -110,6 +110,29 @@ export interface ComputeJob { } } +export interface ComputeJobPayment { + chainId: number + token: string + lockTx: string | null + claimTx: string | null + cancelTx: string | null + cost: number +} + +export interface NodeComputeJob extends ComputeJob { + environment?: string + stopRequested?: boolean + resources?: ComputeResourceRequest[] + isFree?: boolean + algoStartTimestamp?: string + algoStopTimestamp?: string + payment?: ComputeJobPayment + algoDuration?: number + queueMaxWaitTime?: number + jobIdHash?: string + maxJobDuration?: number +} + export interface ComputeOutputEncryption { encryptMethod: EncryptMethod.AES // in future we will support more ciphers key: string // AES symetric key @@ -168,3 +191,5 @@ export interface dockerRegistryAuth { password?: string auth?: string } + +export type ComputeResultStream = AsyncIterable diff --git a/src/@types/File.ts b/src/@types/File.ts index 31a903f6e..2dfcc0197 100644 --- a/src/@types/File.ts +++ b/src/@types/File.ts @@ -2,6 +2,19 @@ export interface HeadersObject { [key: string]: string } +export enum FileObjectType { + // eslint-disable-next-line no-unused-vars + URL = 'url', + // eslint-disable-next-line no-unused-vars + IPFS = 'ipfs', + // eslint-disable-next-line no-unused-vars + ARWEAVE = 'arweave', + // eslint-disable-next-line no-unused-vars + S3 = 's3', + // eslint-disable-next-line no-unused-vars + FTP = 'ftp' +} + export enum EncryptMethod { // eslint-disable-next-line no-unused-vars AES = 'AES', @@ -10,7 +23,7 @@ export enum EncryptMethod { } export interface BaseFileObject { - type: string + type: FileObjectType | string encryptedBy?: string encryptMethod?: EncryptMethod } diff --git a/src/@types/Provider.ts b/src/@types/Provider.ts index 7ee1e385b..f547ec5ea 100644 --- a/src/@types/Provider.ts +++ b/src/@types/Provider.ts @@ -42,6 +42,48 @@ export interface ServiceEndpoint { method: string urlPath: string } + +export interface NodeStatusProvider { + chainId: string + network: string +} + +export interface NodeStatusIndexer { + chainId: string + network: string + block: string +} + +export interface NodeStatus { + id: string + publicKey: string + friendlyName: string + address: string + version: string + http: boolean + p2p: boolean + provider: NodeStatusProvider[] + indexer: NodeStatusIndexer[] + escrowAddress: Record + supportedStorage: Record + platform: { + cpus: number + freemem: number + totalmem: number + loadavg: number[] + arch: string + machine: string + platform: string + osType: string + node: string + } + codeHash: string + allowedAdmins: { + addresses: string[] + accessLists: string[] | null + } + uptime: number +} export interface UserCustomParameters { [key: string]: any } @@ -85,3 +127,21 @@ export const PROTOCOL_COMMANDS = { GET_LOGS: 'getLogs', JOBS: 'jobs' } + +export interface NodeLogsParams { + logId?: string + startTime?: string + endTime?: string + maxLogs?: number + moduleName?: string + level?: string + page?: number +} + +export interface NodeLogEntry { + timestamp: string + level: string + moduleName: string + message: string + meta?: Record +} diff --git a/src/config/ConfigHelper.ts b/src/config/ConfigHelper.ts index 444bf6bfa..5f7ec7f71 100644 --- a/src/config/ConfigHelper.ts +++ b/src/config/ConfigHelper.ts @@ -182,21 +182,21 @@ export const KNOWN_CONFIDENTIAL_EVMS = [ 23295 // oasis_sapphire_testnet ] +export const getNodeEndpointConfig = (): Partial => { + if (process.env.NODE_ENDPOINT || process.env.NODE_URL) { + return { + oceanNodeUri: process.env.NODE_ENDPOINT || process.env.NODE_URL + } + } + + return {} +} + export class ConfigHelper { /* Load contract addresses from env ADDRESS_FILE (generated by ocean-contracts) */ public getAddressesFromEnv(network: string, customAddresses?: any): Partial { let configAddresses: Partial - const getUris = () => { - if (process.env.NODE_URL) { - return { - oceanNodeUri: process.env.NODE_URL - } - } - - return {} - } - if (customAddresses && customAddresses[network]) { const { FixedPrice, @@ -223,7 +223,7 @@ export class ConfigHelper { accessListFactory: AccessListFactory, escrow: Escrow, EnterpriseFeeCollector, - ...getUris() + ...getNodeEndpointConfig() } } else if ((DefaultContractsAddresses as { [key: string]: any })[network]) { const { @@ -251,7 +251,7 @@ export class ConfigHelper { accessListFactory: AccessListFactory, escrow: Escrow, EnterpriseFeeCollector, - ...getUris() + ...getNodeEndpointConfig() } } return configAddresses diff --git a/src/contracts/NFT.ts b/src/contracts/NFT.ts index 8c8d8ebe9..25f27460d 100644 --- a/src/contracts/NFT.ts +++ b/src/contracts/NFT.ts @@ -561,9 +561,15 @@ export class Nft extends SmartContract { if (!(await this.getNftPermissions(nftAddress, address)).updateMetadata) { throw new Error(`Caller is not Metadata updater`) } + // Indexer expects a bare peer ID, not a full multiaddr, for P2P decrypt routing. + let decryptorUrl = metadataDecryptorUrl + if (metadataDecryptorUrl?.includes('/p2p/')) { + const p2pMatch = metadataDecryptorUrl.match(/\/p2p\/([^/]+)/) + decryptorUrl = p2pMatch?.[1] ?? metadataDecryptorUrl + } const estGas = await nftContract.setMetaData.estimateGas( metadataState, - metadataDecryptorUrl, + decryptorUrl, metadataDecryptorAddress, flags, data, @@ -578,7 +584,7 @@ export class Nft extends SmartContract { this.config?.gasFeeMultiplier, nftContract.setMetaData, metadataState, - metadataDecryptorUrl, + decryptorUrl, metadataDecryptorAddress, flags, data, diff --git a/src/services/Aquarius.ts b/src/services/Aquarius.ts index 573f9d66e..5cbce7be2 100644 --- a/src/services/Aquarius.ts +++ b/src/services/Aquarius.ts @@ -5,6 +5,8 @@ import { Signer } from 'ethers' import { signRequest } from '../utils/SignatureUtils.js' import { Asset, DDO, DDOManager, ValidateMetadata } from '@oceanprotocol/ddo-js' import { PROTOCOL_COMMANDS } from '../@types/Provider.js' +import { isP2pUri, getAuthorization } from './providers/BaseProvider.js' +import { ProviderInstance } from './Provider.js' export interface SearchQuery { from?: number @@ -26,12 +28,6 @@ export class Aquarius { this.aquariusURL = aquariusURL } - // temp, untll we merge aquarius & provider - private getAuthorization(signerOrAuthToken: Signer | string): string | undefined { - const isAuthToken = typeof signerOrAuthToken === 'string' - return isAuthToken ? signerOrAuthToken : undefined - } - /** Resolves a DID * @param {string} did DID of the asset. * @param {AbortSignal} signal abort signal @@ -42,6 +38,11 @@ export class Aquarius { signal?: AbortSignal, authorization?: string ): Promise { + if (isP2pUri(this.aquariusURL)) { + const result = await ProviderInstance.resolveDdo(this.aquariusURL, did, signal) + if (result) return result as Asset + throw new Error('P2P request failed') + } const path = this.aquariusURL + '/api/aquarius/assets/ddo/' + did try { const response = await fetch(path, { @@ -83,16 +84,22 @@ export class Aquarius { LoggerInstance.warn('Max Limit exceeded, defaulting to 500 retries.') maxRetries = 500 } + const isP2p = isP2pUri(this.aquariusURL) do { try { - const path = this.aquariusURL + '/api/aquarius/assets/ddo/' + did - const response = await fetch(path, { - method: 'GET', - headers: { 'Content-Type': 'application/json', Authorization: authorization }, - signal - }) - if (response.ok) { - const ddo = await response.json() + let ddo: any + if (isP2p) { + ddo = await ProviderInstance.resolveDdo(this.aquariusURL, did, signal) + } else { + const path = this.aquariusURL + '/api/aquarius/assets/ddo/' + did + const response = await fetch(path, { + method: 'GET', + headers: { 'Content-Type': 'application/json', Authorization: authorization }, + signal + }) + if (response.ok) ddo = await response.json() + } + if (ddo) { const ddoInstance = DDOManager.getDDOClass(ddo) const { indexedMetadata } = ddoInstance.getAssetFields() if (txid) { @@ -126,6 +133,10 @@ export class Aquarius { signal?: AbortSignal, authorization?: string ): Promise { + if (isP2pUri(providerUrl)) { + return ProviderInstance.validateDdo(providerUrl, ddo, signer, signal) + } + const ddoValidateRoute = providerUrl + '/api/aquarius/assets/ddo/validate' const pathNonce = providerUrl + '/api/services/nonce' @@ -138,7 +149,6 @@ export class Aquarius { signal }) let { nonce } = await responseNonce.json() - console.log(`[getNonce] Consumer: ${publisherAddress} nonce: ${nonce}`) if (!nonce || nonce === null) { nonce = '0' } @@ -155,7 +165,7 @@ export class Aquarius { body: JSON.stringify(data), headers: { 'Content-Type': 'application/json', - Authorization: this.getAuthorization(signer) + Authorization: getAuthorization(signer) }, signal }) diff --git a/src/services/Provider.ts b/src/services/Provider.ts index 7784e32b5..42bc3b9b8 100644 --- a/src/services/Provider.ts +++ b/src/services/Provider.ts @@ -1,1632 +1,9 @@ -import fetch from 'cross-fetch' -import { Signer } from 'ethers' -import { LoggerInstance } from '../utils/Logger.js' -import { - StorageObject, - FileInfo, - ComputeJob, - ComputeOutput, - ComputeAlgorithm, - ComputeAsset, - ComputeEnvironment, - ProviderInitialize, - ProviderComputeInitializeResults, - ServiceEndpoint, - UserCustomParameters, - ComputeResourceRequest, - ComputePayment, - ComputeJobMetadata, - PolicyServerInitializeCommand, - PolicyServerPassthroughCommand, - dockerRegistryAuth -} from '../@types' -import { PROTOCOL_COMMANDS } from '../@types/Provider.js' -import { decodeJwt } from '../utils/Jwt.js' -import { eciesencrypt } from '../utils/eciesencrypt.js' -import { signRequest } from '../utils/SignatureUtils.js' - -export class Provider { - private async getConsumerAddress(signerOrAuthToken: Signer | string): Promise { - const isAuthToken = typeof signerOrAuthToken === 'string' - return isAuthToken - ? decodeJwt(signerOrAuthToken).address - : await signerOrAuthToken.getAddress() - } - - private async getSignature( - signerOrAuthToken: Signer | string, - nonce: string, - command: string - ): Promise { - const isAuthToken = typeof signerOrAuthToken === 'string' - if (isAuthToken) return null - const message = String( - String(await signerOrAuthToken.getAddress()) + String(nonce) + String(command) - ) - return signRequest(signerOrAuthToken, message) - } - - private getAuthorization(signerOrAuthToken: Signer | string): string | undefined { - const isAuthToken = typeof signerOrAuthToken === 'string' - return isAuthToken ? signerOrAuthToken : undefined - } - - /** - * Returns the provider endpoints - * @param {string} providerUri - the provider url - * @return {Promise} - */ - async getEndpoints(providerUri: string, authorization?: string): Promise { - try { - const endpoints = await this.getData(providerUri, authorization) - return await endpoints.json() - } catch (e) { - LoggerInstance.error('Finding the service endpoints failed:', e) - throw new Error('HTTP request failed calling Provider') - } - } - - /** - * Returns the node public key - * @return {string} The node public key - */ - private async getNodePublicKey(providerUri: string): Promise { - const providerEndpoints = await this.getEndpoints(providerUri) - return providerEndpoints.nodePublicKey - } - - /** - * This function returns the endpoint URL for a given service name. - * @param {ServiceEndpoint[]} servicesEndpoints - The array of service endpoints - * @param {string} serviceName - The name of the service - * @returns {ServiceEndpoint} The endpoint URL for the given service name - */ - getEndpointURL( - servicesEndpoints: ServiceEndpoint[], - serviceName: string - ): ServiceEndpoint { - if (!servicesEndpoints) return null - return servicesEndpoints.find( - (s) => s.serviceName.toLowerCase() === serviceName.toLowerCase() - ) as ServiceEndpoint - } - - /** - * This function returns an array of service endpoints for a given provider endpoint. - * @param {string} providerEndpoint - The provider endpoint - * @param {any} endpoints - The endpoints object - * @returns {ServiceEndpoint[]} An array of service endpoints - */ - public async getServiceEndpoints(providerEndpoint: string, endpoints: any) { - const serviceEndpoints: ServiceEndpoint[] = [] - for (const i in endpoints.serviceEndpoints) { - const endpoint: ServiceEndpoint = { - serviceName: i, - method: endpoints.serviceEndpoints[i][0], - urlPath: - providerEndpoint.replace(/\/+$/, '') + - '/' + - endpoints.serviceEndpoints[i][1].replace(/^\/+/, '') - } - serviceEndpoints.push(endpoint) - } - return serviceEndpoints - } - - /** - * Get current nonce from the provider. - * @param {string} providerUri provider uri address - * @param {string} consumerAddress Publisher address - * @param {AbortSignal} signal abort signal - * @param {string} providerEndpoints Identifier of the asset to be registered in ocean - * @param {string} serviceEndpoints document description object (DDO)= - * @return {Promise} urlDetails - */ - public async getNonce( - providerUri: string, - consumerAddress: string, - signal?: AbortSignal, - providerEndpoints?: any, - serviceEndpoints?: ServiceEndpoint[] - ): Promise { - if (!providerEndpoints) { - providerEndpoints = await this.getEndpoints(providerUri) - } - if (!serviceEndpoints) { - serviceEndpoints = await this.getServiceEndpoints(providerUri, providerEndpoints) - } - const path = this.getEndpointURL(serviceEndpoints, 'nonce') - ? this.getEndpointURL(serviceEndpoints, 'nonce').urlPath - : null - if (!path) return null - try { - const response = await fetch(path + `?userAddress=${consumerAddress}`, { - method: 'GET', - headers: { 'Content-Type': 'application/json' }, - signal - }) - const { nonce } = await response.json() - console.log(`[getNonce] Consumer: ${consumerAddress} nonce: ${nonce}`) - const sanitizedNonce = !nonce || nonce === null ? 0 : Number(nonce) - return sanitizedNonce - } catch (e) { - LoggerInstance.error(e) - throw new Error(e.message) - } - } - - /** - * Encrypt data using the Provider's own symmetric key - * @param {string} data data in json format that needs to be sent , it can either be a DDO or a File array - * @param {number} chainId network's id so provider can choose the corresponding Signer object - * @param {string} providerUri provider uri address - * @param {AbortSignal} signal abort signal - * @return {Promise} urlDetails - */ - public async encrypt( - data: any, - chainId: number, - providerUri: string, - signerOrAuthToken: Signer | string, - policyServer?: any, - signal?: AbortSignal - ): Promise { - const providerEndpoints = await this.getEndpoints(providerUri) - const serviceEndpoints = await this.getServiceEndpoints( - providerUri, - providerEndpoints - ) - const consumerAddress = await this.getConsumerAddress(signerOrAuthToken) - const nonce = ( - (await this.getNonce( - providerUri, - consumerAddress, - signal, - providerEndpoints, - serviceEndpoints - )) + 1 - ).toString() - const signature = await this.getSignature( - signerOrAuthToken, - nonce, - PROTOCOL_COMMANDS.ENCRYPT - ) - - let path = - (this.getEndpointURL(serviceEndpoints, 'encrypt') - ? this.getEndpointURL(serviceEndpoints, 'encrypt').urlPath - : null) + `?chainId=${chainId}` - if (!path) return null - path += `&nonce=${nonce}` - path += `&consumerAddress=${consumerAddress}` - path += `&signature=${signature}` - - try { - const response = await fetch(path, { - method: 'POST', - body: JSON.stringify(data), - headers: { 'Content-Type': 'application/octet-stream' }, - signal - }) - return await response.text() - } catch (e) { - LoggerInstance.error(e) - throw new Error('HTTP request failed calling Provider') - } - } - - /** - * Get file details for a given DID and service ID. - * @param {string} did - The DID to check. - * @param {string} serviceId - The service ID to check. - * @param {string} providerUri - The URI of the provider. - * @param {boolean} [withChecksum=false] - Whether or not to include a checksum. - * @param {AbortSignal} [signal] - An optional abort signal. - * @returns {Promise} A promise that resolves with an array of file info objects. - */ - public async checkDidFiles( - did: string, - serviceId: string, - providerUri: string, - withChecksum: boolean = false, - signal?: AbortSignal - ): Promise { - const providerEndpoints = await this.getEndpoints(providerUri) - const serviceEndpoints = await this.getServiceEndpoints( - providerUri, - providerEndpoints - ) - const args = { did, serviceId, checksum: withChecksum } - const files: FileInfo[] = [] - const path = this.getEndpointURL(serviceEndpoints, 'fileinfo') - ? this.getEndpointURL(serviceEndpoints, 'fileinfo').urlPath - : null - if (!path) return null - let response - try { - response = await fetch(path, { - method: 'POST', - body: JSON.stringify(args), - headers: { 'Content-Type': 'application/json' }, - signal - }) - } catch (e) { - LoggerInstance.error('File info call failed: ') - LoggerInstance.error(e) - throw new Error(e) - } - if (response?.ok) { - const results: FileInfo[] = await response.json() - for (const result of results) { - files.push(result) - } - return files - } - const resolvedResponse = await response.json() - LoggerInstance.error( - 'File info call failed: ', - response.status, - response.statusText, - resolvedResponse - ) - throw new Error(JSON.stringify(resolvedResponse)) - } - - /** - * Get File details (if possible) - * @param {StorageObject} file one of the supported file structures - * @param {string} providerUri uri of the provider that will be used to check the file - * @param {boolean} [withChecksum=false] - Whether or not to include a checksum. - * @param {AbortSignal} [signal] - An optional abort signal. - * @returns {Promise} A promise that resolves with an array of file info objects. - */ - public async getFileInfo( - file: StorageObject, - providerUri: string, - withChecksum: boolean = false, - signal?: AbortSignal - ): Promise { - const providerEndpoints = await this.getEndpoints(providerUri) - const serviceEndpoints = await this.getServiceEndpoints( - providerUri, - providerEndpoints - ) - const args = { ...file, checksum: withChecksum } - const files: FileInfo[] = [] - const path = this.getEndpointURL(serviceEndpoints, 'fileinfo') - ? this.getEndpointURL(serviceEndpoints, 'fileinfo').urlPath - : null - if (!path) return null - let response - try { - response = await fetch(path, { - method: 'POST', - body: JSON.stringify(args), - headers: { 'Content-Type': 'application/json' }, - signal - }) - } catch (e) { - LoggerInstance.error('File info call failed: ') - LoggerInstance.error(e) - throw new Error(e) - } - if (response?.ok) { - const results: FileInfo[] = await response.json() - for (const result of results) { - files.push(result) - } - return files - } - const resolvedResponse = await response.json() - LoggerInstance.error( - 'File info call failed: ', - response.status, - response.statusText, - resolvedResponse - ) - throw new Error(JSON.stringify(resolvedResponse)) - } - - /** - * Returns compute environments from a provider. - * @param {string} providerUri - The URI of the provider. - * @param {AbortSignal} [signal] - An optional abort signal. - * @returns {Promise<{[chainId: number]: ComputeEnvironment[]}>} A promise that resolves with an object containing compute environments for each chain ID. - */ - public async getComputeEnvironments( - providerUri: string, - signal?: AbortSignal - ): Promise { - const providerEndpoints = await this.getEndpoints(providerUri) - const serviceEndpoints = await this.getServiceEndpoints( - providerUri, - providerEndpoints - ) - const path = this.getEndpointURL(serviceEndpoints, 'computeEnvironments')?.urlPath - if (!path) return null - let response - try { - response = await fetch(path, { - method: 'GET', - headers: { 'Content-Type': 'application/json' }, - signal - }) - } catch (e) { - LoggerInstance.error('Fetch compute env failed: ') - LoggerInstance.error(e) - throw new Error(e) - } - if (response?.ok) { - const result = response.json() - // chain is not part of response - // if (Array.isArray(result)) { - // const providerChain: number = providerEndpoints.chainId - // return { [providerChain]: result } - // } - return result - } - const resolvedResponse = await response.json() - LoggerInstance.error( - 'Fetch compute env failed: ', - response.status, - response.statusText, - resolvedResponse - ) - throw new Error(JSON.stringify(resolvedResponse)) - } - - /** - * Initializes the provider for a service request. - * @param {string} did - The asset DID . - * @param {string} serviceId - The asset service ID. - * @param {number} fileIndex - The file index. - * @param {string} consumerAddress - The consumer address. - * @param {string} providerUri - The URI of the provider. - * @param {AbortSignal} [signal] - The abort signal if any. - * @param {UserCustomParameters} [userCustomParameters] - The custom parameters if any. - * @param {string} [computeEnv] - The compute environment if any. - * @param {number} [validUntil] - The validity time if any. - * @returns {Promise} A promise that resolves with ProviderInitialize response. - */ - public async initialize( - did: string, - serviceId: string, - fileIndex: number, - consumerAddress: string, - providerUri: string, - signal?: AbortSignal, - userCustomParameters?: UserCustomParameters, - computeEnv?: string, - validUntil?: number - ): Promise { - const providerEndpoints = await this.getEndpoints(providerUri) - const serviceEndpoints = await this.getServiceEndpoints( - providerUri, - providerEndpoints - ) - let initializeUrl = this.getEndpointURL(serviceEndpoints, 'initialize') - ? this.getEndpointURL(serviceEndpoints, 'initialize').urlPath - : null - - if (!initializeUrl) return null - initializeUrl += `?documentId=${did}` - initializeUrl += `&serviceId=${serviceId}` - initializeUrl += `&fileIndex=${fileIndex}` - initializeUrl += `&consumerAddress=${consumerAddress}` - if (userCustomParameters) - initializeUrl += '&userdata=' + encodeURI(JSON.stringify(userCustomParameters)) - if (computeEnv) initializeUrl += '&environment=' + encodeURI(computeEnv) - if (validUntil) initializeUrl += '&validUntil=' + validUntil - let response - try { - response = await fetch(initializeUrl, { - method: 'GET', - headers: { 'Content-Type': 'application/json' }, - signal - }) - } catch (e) { - LoggerInstance.error('Provider initialized failed: ') - LoggerInstance.error(e) - throw new Error(`Provider initialize failed url: ${initializeUrl} `) - } - if (response?.status === 200) { - const results: ProviderInitialize = await response.json() - return results - } - const resolvedResponse = await response.json() - LoggerInstance.error( - 'Provider initialized failed: ', - response.status, - response.statusText, - resolvedResponse - ) - throw new Error(JSON.stringify(resolvedResponse)) - } - - /** Initializes the provider for a compute request. - * @param {ComputeAsset[]} assets The datasets array to initialize compute request. - * @param {ComputeAlgorithmber} algorithm The algorithm to use. - * @param {string} computeEnv The compute environment. - * @param {string} token The payment token address. - * @param {number} validUntil The job expiration date. - * @param {string} providerUri The provider URI. - * @param {SignerOrAuthToken} signerOrAuthToken Signer or auth token - * @param {ComputeResourceRequest[]} resources The resources to start compute job with. - * @param {number} chainId The chain used to do payments - * @param {any} policyServer Policy server data. - * @param {AbortSignal} signal abort signal - * @param {ComputeOutput} output The compute job output settings. - * @param {dockerRegistryAuth} dockerRegistryAuth Docker registry authentication data. - * @return {Promise} ProviderComputeInitialize data - */ - public async initializeCompute( - assets: ComputeAsset[], - algorithm: ComputeAlgorithm, - computeEnv: string, - token: string, - validUntil: number, - providerUri: string, - signerOrAuthToken: Signer | string, - resources: ComputeResourceRequest[], - chainId: number, - policyServer?: any, - signal?: AbortSignal, - output?: ComputeOutput, - dockerRegistryAuth?: dockerRegistryAuth - ): Promise { - const providerEndpoints = await this.getEndpoints(providerUri) - const serviceEndpoints = await this.getServiceEndpoints( - providerUri, - providerEndpoints - ) - const initializeUrl = this.getEndpointURL(serviceEndpoints, 'initializeCompute') - ? this.getEndpointURL(serviceEndpoints, 'initializeCompute').urlPath - : null - if (!initializeUrl) return null - - // Diff from V1. We might need a signature to get the files object, specially if dealing with confidential evm and template 4 - // otherwise it can be ignored - const consumerAddress = await this.getConsumerAddress(signerOrAuthToken) - const nonce = ( - (await this.getNonce( - providerUri, - consumerAddress, - signal, - providerEndpoints, - serviceEndpoints - )) + 1 - ).toString() - - // same signed message as for start compute (consumer address + did[0] + nonce) - let signature - const isAuthToken = typeof signerOrAuthToken === 'string' - if (!isAuthToken) { - let signatureMessage = consumerAddress - signatureMessage += assets[0]?.documentId - signatureMessage += nonce - signature = await signRequest(signerOrAuthToken, signatureMessage) - } - - const providerData: Record = { - datasets: assets, - algorithm, - environment: computeEnv, - payment: { - chainId, - token, - resources - }, - maxJobDuration: validUntil, - consumerAddress, - signature - } - if (dockerRegistryAuth) { - const nodeKey = await this.getNodePublicKey(providerUri) - if (nodeKey) { - providerData.dockerRegistryAuth = eciesencrypt( - nodeKey, - JSON.stringify(dockerRegistryAuth) - ) - } - } - - if (policyServer) providerData.policyServer = policyServer - if (output) { - const nodeKey = await this.getNodePublicKey(providerUri) - if (nodeKey) { - providerData.output = eciesencrypt(nodeKey, JSON.stringify(output)) - } - } - let response - try { - console.log('Initialize compute url:', initializeUrl) - response = await fetch(initializeUrl, { - method: 'POST', - body: JSON.stringify(providerData), - headers: { - 'Content-Type': 'application/json', - Authorization: this.getAuthorization(signerOrAuthToken) - }, - signal - }) - console.log('Raw response:', response) - if (!response.ok) { - const errorText = await response.text() - throw new Error(`${errorText}`) - } - } catch (e) { - LoggerInstance.error('Initialize compute failed: ') - LoggerInstance.error(e) - throw new Error(`ComputeJob cannot be initialized: ${e.message}.`) - } - if (response?.ok) { - const params = await response.json() - return params - } - const resolvedResponse = await response.json() - LoggerInstance.error( - 'Initialize compute failed: ', - response.status, - response.statusText, - resolvedResponse - ) - LoggerInstance.error('Payload was:', JSON.stringify(providerData)) - throw new Error(JSON.stringify(resolvedResponse)) - } - - /** - * Gets the download URL. - * @param {string} did - The DID. - * @param {string} serviceId - The service ID. - * @param {number} fileIndex - The file index. - * @param {string} transferTxId - The transfer transaction ID. - * @param {string} providerUri - The provider URI. - * @param {SignerOrAuthToken} signerOrAuthToken - The signer or auth token. - * @param {any} policyServer Policy server data. - * @param {UserCustomParameters} userCustomParameters - The user custom parameters. - * @returns {Promise} The download URL. - */ - public async getDownloadUrl( - did: string, - serviceId: string, - fileIndex: number, - transferTxId: string, - providerUri: string, - signerOrAuthToken: Signer | string, - policyServer?: any, - userCustomParameters?: UserCustomParameters - ): Promise { - const providerEndpoints = await this.getEndpoints(providerUri) - const serviceEndpoints = await this.getServiceEndpoints( - providerUri, - providerEndpoints - ) - const downloadUrl = this.getEndpointURL(serviceEndpoints, 'download') - ? this.getEndpointURL(serviceEndpoints, 'download').urlPath - : null - if (!downloadUrl) return null - const consumerAddress = await this.getConsumerAddress(signerOrAuthToken) - const nonce = ( - (await this.getNonce( - providerUri, - consumerAddress, - null, - providerEndpoints, - serviceEndpoints - )) + 1 - ).toString() - - const signature = await this.getSignature( - signerOrAuthToken, - nonce, - PROTOCOL_COMMANDS.DOWNLOAD - ) - let consumeUrl = downloadUrl - consumeUrl += `?fileIndex=${fileIndex}` - consumeUrl += `&documentId=${did}` - consumeUrl += `&transferTxId=${transferTxId}` - consumeUrl += `&serviceId=${serviceId}` - consumeUrl += `&consumerAddress=${consumerAddress}` - consumeUrl += `&nonce=${nonce}` - if (policyServer) { - consumeUrl += '&policyServer=' + encodeURI(JSON.stringify(policyServer)) - } - - consumeUrl += `&signature=${signature}` - if (userCustomParameters) - consumeUrl += '&userdata=' + encodeURI(JSON.stringify(userCustomParameters)) - return consumeUrl - } - - /** Instruct the provider to start a PAYED compute job - * @param {string} providerUri The provider URI. - * @param {SignerOrAuthToken} signerOrAuthToken The consumer signer object or auth token. - * @param {string} computeEnv The compute environment. - * @param {ComputeAsset} datasets The dataset to start compute on + additionalDatasets (the additional datasets if that is the case) - * @param {ComputeAlgorithm} algorithm The algorithm to start compute with. - * @param {number} maxJobDuration The compute job max execution time. - * @param {string} token The token address for compute payment. - * @param {ComputeResourceRequest} resources The resources to start compute job with. - * @param {chainId} chainId The chain used to do payments - * @param {ComputeJobMetadata} metadata The compute job metadata. Additional metadata to be stored in the database. - * @param {ComputeOutput} output The compute job output settings. - * @param {any} policyServer Policy server data. - * @param {AbortSignal} signal abort signal - * @param {number} queueMaxWaitTime Maximum time in seconds to wait in the compute queue if resources are not available - * @param {dockerRegistryAuth} dockerRegistryAuth Docker registry authentication data. - * @return {Promise} The compute job or jobs. - */ - public async computeStart( - providerUri: string, - signerOrAuthToken: Signer | string, - computeEnv: string, - datasets: ComputeAsset[], - algorithm: ComputeAlgorithm, - maxJobDuration: number, - token: string, - resources: ComputeResourceRequest[], - chainId: number, // network used by payment (only for payed compute jobs) - metadata?: ComputeJobMetadata, - additionalViewers?: string[], - output?: ComputeOutput, - policyServer?: any, - signal?: AbortSignal, - queueMaxWaitTime?: number, - dockerRegistryAuth?: dockerRegistryAuth - ): Promise { - console.log('called new compute start method...') - console.log('datasets: ', datasets) - console.log('algorithm: ', algorithm) - const providerEndpoints = await this.getEndpoints(providerUri) - const serviceEndpoints = await this.getServiceEndpoints( - providerUri, - providerEndpoints - ) - - const computeStartUrl = this.getEndpointURL(serviceEndpoints, 'computeStart') - ? this.getEndpointURL(serviceEndpoints, 'computeStart').urlPath - : null - - if (!computeStartUrl) { - LoggerInstance.error( - 'Compute start failed: Cannot get proper computeStart route (perhaps not implemented on provider?)' - ) - return null - } - - const consumerAddress = await this.getConsumerAddress(signerOrAuthToken) - const nonce = ( - (await this.getNonce( - providerUri, - consumerAddress, - signal, - providerEndpoints, - serviceEndpoints - )) + 1 - ).toString() - const signature = await this.getSignature( - signerOrAuthToken, - nonce, - PROTOCOL_COMMANDS.COMPUTE_START - ) - const payload = Object() - payload.consumerAddress = consumerAddress - payload.signature = signature - payload.nonce = nonce - payload.environment = computeEnv - payload.maxJobDuration = maxJobDuration - payload.resources = resources - // kept for backwards compatibility (tests running against existing provider) - payload.dataset = datasets[0] - // new field for C2D v2 - payload.datasets = datasets - payload.algorithm = algorithm - payload.chainId = chainId - payload.payment = { - chainId, - token, - maxJobDuration - } - if (dockerRegistryAuth) { - const nodeKey = await this.getNodePublicKey(providerUri) - if (nodeKey) { - payload.dockerRegistryAuth = eciesencrypt( - nodeKey, - JSON.stringify(dockerRegistryAuth) - ) - } - } - if (resources) payload.resources = resources - if (metadata) payload.metadata = metadata - if (additionalViewers) payload.additionalViewers = additionalViewers - // if (additionalDatasets) payload.additionalDatasets = additionalDatasets - if (output) { - const nodeKey = await this.getNodePublicKey(providerUri) - if (nodeKey) { - payload.output = eciesencrypt(nodeKey, JSON.stringify(output)) - } - } - if (policyServer) payload.policyServer = policyServer - if (queueMaxWaitTime) payload.queueMaxWaitTime = queueMaxWaitTime - let response - try { - response = await fetch(computeStartUrl, { - method: 'POST', - body: JSON.stringify(payload), - headers: { - 'Content-Type': 'application/json', - Authorization: this.getAuthorization(signerOrAuthToken) - }, - signal - }) - } catch (e) { - LoggerInstance.error('Compute start failed:') - LoggerInstance.error(e) - LoggerInstance.error('Payload was:', payload) - throw new Error('HTTP request failed calling Provider') - } - if (response?.ok) { - const params = await response.json() - return params - } - const resolvedResponse = await response.json() - LoggerInstance.error( - 'Compute start failed: ', - response.status, - response.statusText, - resolvedResponse - ) - LoggerInstance.error('Payload was:', payload) - throw new Error(JSON.stringify(resolvedResponse)) - } - - /** Instruct the provider to start a FREE compute job - * @param {string} providerUri The provider URI. - * @param {SignerOrAuthToken} signerOrAuthToken The consumer signer object or auth token. - * @param {string} computeEnv The compute environment. - * @param {ComputeAsset} datasets The dataset to start compute on + additionalDatasets (the additional datasets if that is the case) - * @param {ComputeAlgorithm} algorithm The algorithm to start compute with. - * @param {ComputeResourceRequest} resources The resources to start compute job with. - * @param {ComputeJobMetadata} metadata The compute job metadata. Additional metadata to be stored in the database. - * @param {ComputeOutput} output The compute job output settings. - * @param {any} policyServer Policy server data. - * @param {AbortSignal} signal abort signal - * @param {number} queueMaxWaitTime Maximum time in seconds to wait in the compute queue if resources are not available - * @param {dockerRegistryAuth} dockerRegistryAuth Docker registry authentication data. - * @return {Promise} The compute job or jobs. - */ - public async freeComputeStart( - providerUri: string, - signerOrAuthToken: Signer | string, - computeEnv: string, - datasets: ComputeAsset[], - algorithm: ComputeAlgorithm, - resources?: ComputeResourceRequest[], - metadata?: ComputeJobMetadata, - additionalViewers?: string[], - output?: ComputeOutput, - policyServer?: any, - signal?: AbortSignal, - queueMaxWaitTime?: number, - dockerRegistryAuth?: dockerRegistryAuth - ): Promise { - console.log('called new free compute start method...') - console.log('datasets: ', datasets) - console.log('algorithm: ', algorithm) - const providerEndpoints = await this.getEndpoints(providerUri) - const serviceEndpoints = await this.getServiceEndpoints( - providerUri, - providerEndpoints - ) - - const computeStartUrl = this.getEndpointURL(serviceEndpoints, 'freeCompute') - ? this.getEndpointURL(serviceEndpoints, 'freeCompute').urlPath - : null - - if (!computeStartUrl) { - LoggerInstance.error( - 'Compute start failed: Cannot get proper computeStart route (perhaps not implemented on provider?)' - ) - return null - } - - const consumerAddress = await this.getConsumerAddress(signerOrAuthToken) - const nonce = ( - (await this.getNonce( - providerUri, - consumerAddress, - signal, - providerEndpoints, - serviceEndpoints - )) + 1 - ).toString() - - const signature = await this.getSignature( - signerOrAuthToken, - nonce, - PROTOCOL_COMMANDS.FREE_COMPUTE_START - ) - const payload = Object() - payload.consumerAddress = consumerAddress - payload.signature = signature - payload.nonce = nonce - payload.environment = computeEnv - payload.resources = resources - // kept for backwards compatibility (tests running against existing provider) - payload.dataset = datasets[0] - // new field for C2D v2 - payload.datasets = datasets - payload.algorithm = algorithm - if (metadata) payload.metadata = metadata - if (additionalViewers) payload.additionalViewers = additionalViewers - if (dockerRegistryAuth) { - const nodeKey = await this.getNodePublicKey(providerUri) - if (nodeKey) { - payload.dockerRegistryAuth = eciesencrypt( - nodeKey, - JSON.stringify(dockerRegistryAuth) - ) - } - } - // if (additionalDatasets) payload.additionalDatasets = additionalDatasets - if (output) { - const nodeKey = await this.getNodePublicKey(providerUri) - if (nodeKey) { - payload.output = eciesencrypt(nodeKey, JSON.stringify(output)) - } - } - - if (policyServer) payload.policyServer = policyServer - if (queueMaxWaitTime) payload.queueMaxWaitTime = queueMaxWaitTime - let response - try { - response = await fetch(computeStartUrl, { - method: 'POST', - body: JSON.stringify(payload), - headers: { - 'Content-Type': 'application/json', - Authorization: this.getAuthorization(signerOrAuthToken) - }, - signal - }) - } catch (e) { - LoggerInstance.error('Compute start failed:') - LoggerInstance.error(e) - LoggerInstance.error('Payload was:', payload) - throw new Error('HTTP request failed calling Provider') - } - if (response?.ok) { - const params = await response.json() - return params - } - const resolvedResponse = await response.json() - LoggerInstance.error( - 'Compute start failed: ', - response.status, - response.statusText, - resolvedResponse - ) - LoggerInstance.error('Payload was:', payload) - throw new Error(JSON.stringify(resolvedResponse)) - } - - /** - * - * @param providerUri provider URL - * @param signerOrAuthToken signer or auth token - * @param jobId jobId - * @param signal abort signal - * @returns logs response - */ - public async computeStreamableLogs( - providerUri: string, - signerOrAuthToken: Signer | string, - jobId: string, - signal?: AbortSignal - ): Promise { - const isAuthToken = typeof signerOrAuthToken === 'string' - const providerEndpoints = await this.getEndpoints(providerUri) - const serviceEndpoints = await this.getServiceEndpoints( - providerUri, - providerEndpoints - ) - - const computeStreamableLogs = this.getEndpointURL( - serviceEndpoints, - 'computeStreamableLogs' - ) - ? this.getEndpointURL(serviceEndpoints, 'computeStreamableLogs').urlPath - : null - - if (!computeStreamableLogs) { - LoggerInstance.error( - 'Compute start failed: Cannot get proper computeStreamableLogs route (perhaps not implemented on provider?)' - ) - return null - } - const consumerAddress = await this.getConsumerAddress(signerOrAuthToken) - const nonce = ( - (await this.getNonce( - providerUri, - consumerAddress, - signal, - providerEndpoints, - serviceEndpoints - )) + 1 - ).toString() - - let url = `?consumerAddress=${consumerAddress}&jobId=${jobId}` - // Is signer, add signature and nonce - if (!isAuthToken) { - const signature = await this.getSignature( - signerOrAuthToken, - nonce, - PROTOCOL_COMMANDS.COMPUTE_GET_STREAMABLE_LOGS - ) - url += `&signature=${signature}` - url += `&nonce=${nonce}` - } - - let response - try { - response = await fetch(computeStreamableLogs + url, { - method: 'GET', - headers: { - 'Content-Type': 'application/json', - Authorization: this.getAuthorization(signerOrAuthToken) - }, - signal - }) - console.log('Raw response:', response) - } catch (e) { - LoggerInstance.error('computeStreamableLogs failed:') - LoggerInstance.error(e) - throw new Error('HTTP request failed calling Provider') - } - if (response?.ok || response?.status === 200) { - // do not handle the response here - console.log('Response body:', response.body) - return response.body - } - LoggerInstance.error( - 'computeStreamableLogs failed: ', - response.status, - response.statusText, - await response.json() - ) - return null - } - - public async getComputeStartRoutes( - providerUri: string, - isFreeCompute: boolean = false - ): Promise { - const providerEndpoints = await this.getEndpoints(providerUri) - const serviceEndpoints = await this.getServiceEndpoints( - providerUri, - providerEndpoints - ) - let computeStartUrl = null - if (isFreeCompute) { - computeStartUrl = this.getEndpointURL(serviceEndpoints, 'freeCompute') - ? this.getEndpointURL(serviceEndpoints, 'freeCompute').urlPath - : null - } else { - computeStartUrl = this.getEndpointURL(serviceEndpoints, 'computeStart') - ? this.getEndpointURL(serviceEndpoints, 'computeStart').urlPath - : null - } - return computeStartUrl - } - - /** Instruct the provider to Stop the execution of a to stop a compute job. - * @param {string} jobId the compute job id - * @param {string} providerUri The provider URI. - * @param {SignerOrAuthToken} signerOrAuthToken The consumer signer or auth token. - * @param {string} agreementId The agreement id. - * @param {AbortSignal} signal abort signal - * @return {Promise} - */ - public async computeStop( - jobId: string, - providerUri: string, - signerOrAuthToken: Signer | string, - agreementId?: string, - signal?: AbortSignal - ): Promise { - const isAuthToken = typeof signerOrAuthToken === 'string' - const providerEndpoints = await this.getEndpoints(providerUri) - const serviceEndpoints = await this.getServiceEndpoints( - providerUri, - providerEndpoints - ) - const computeStopUrl = this.getEndpointURL(serviceEndpoints, 'computeStop') - ? this.getEndpointURL(serviceEndpoints, 'computeStop').urlPath - : null - - const consumerAddress = await this.getConsumerAddress(signerOrAuthToken) - - const nonce = ( - (await this.getNonce( - providerUri, - consumerAddress, - signal, - providerEndpoints, - serviceEndpoints - )) + 1 - ).toString() - - const signature = await this.getSignature( - signerOrAuthToken, - nonce, - PROTOCOL_COMMANDS.COMPUTE_STOP - ) - const queryParams = new URLSearchParams() - queryParams.set('consumerAddress', consumerAddress) - queryParams.set('nonce', nonce) - queryParams.set('jobId', jobId) - if (!isAuthToken) { - queryParams.set('signature', signature) - } - - if (agreementId) queryParams.set('agreementId', agreementId) - - const queryString = queryParams.toString() - if (!queryString) return null - let response - try { - response = await fetch(computeStopUrl + '?' + queryString, { - method: 'PUT', - headers: { - 'Content-Type': 'application/json', - Authorization: this.getAuthorization(signerOrAuthToken) - }, - signal - }) - } catch (e) { - LoggerInstance.error('Compute stop failed:') - LoggerInstance.error(e) - throw new Error('HTTP request failed calling Provider') - } - - if (response?.ok) { - const params = await response.json() - return params - } - const resolvedResponse = await response.json() - LoggerInstance.error( - 'Compute stop failed: ', - response.status, - response.statusText, - resolvedResponse - ) - throw new Error(JSON.stringify(resolvedResponse)) - } - - /** Get compute status for a specific jobId/documentId/owner. - * @param {string} providerUri The URI of the provider we want to query - * @param {string} consumerAddress The consumer ethereum address - * @param {string} jobId The ID of a compute job. - * @param {string} agreementId The ID of the service agreement (tx id) - * @param {AbortSignal} signal abort signal - * @return {Promise} - */ - public async computeStatus( - providerUri: string, - consumerAddress: string, - jobId?: string, - agreementId?: string, - signal?: AbortSignal, - authorization?: string - ): Promise { - const providerEndpoints = await this.getEndpoints(providerUri) - const serviceEndpoints = await this.getServiceEndpoints( - providerUri, - providerEndpoints - ) - const computeStatusUrl = this.getEndpointURL(serviceEndpoints, 'computeStatus') - ? this.getEndpointURL(serviceEndpoints, 'computeStatus').urlPath - : null - - let url = `?consumerAddress=${consumerAddress}` - url += (agreementId && `&agreementId=${agreementId}`) || '' // ${this.noZeroX(agreementId)} #https://github.com/oceanprotocol/ocean.js/issues/1892 - url += (jobId && `&jobId=${jobId}`) || '' - - if (!computeStatusUrl) return null - let response - try { - console.log('computeStatusUrl: ', computeStatusUrl + url) - response = await fetch(computeStatusUrl + url, { - method: 'GET', - headers: { 'Content-Type': 'application/json', Authorization: authorization }, - signal - }) - } catch (e) { - LoggerInstance.error('Get compute status failed') - LoggerInstance.error(e) - throw new Error(e) - } - if (response?.ok) { - const params = await response.json() - return params - } - LoggerInstance.error( - 'Get compute status failed:', - response.status, - response.statusText - ) - if (response?.ok) { - const params = await response.json() - return params - } - const resolvedResponse = await response.json() - LoggerInstance.error( - 'Get compute status failed:', - response.status, - response.statusText, - resolvedResponse - ) - throw new Error(JSON.stringify(resolvedResponse)) - } - - /** Get compute result url - * @param {string} providerUri The URI of the provider we want to query - * @param {SignerOrAuthToken} signerOrAuthToken signer or auth token - * @param {string} jobId The ID of a compute job. - * @param {number} index Result index - * @return {Promise} - */ - public async getComputeResultUrl( - providerUri: string, - signerOrAuthToken: Signer | string, - jobId: string, - index: number - ): Promise { - const isAuthToken = typeof signerOrAuthToken === 'string' - const providerEndpoints = await this.getEndpoints(providerUri) - const serviceEndpoints = await this.getServiceEndpoints( - providerUri, - providerEndpoints - ) - const computeResultUrl = this.getEndpointURL(serviceEndpoints, 'computeResult') - ? this.getEndpointURL(serviceEndpoints, 'computeResult').urlPath - : null - - const consumerAddress = await this.getConsumerAddress(signerOrAuthToken) - const nonce = ( - (await this.getNonce( - providerUri, - consumerAddress, - null, - providerEndpoints, - serviceEndpoints - )) + 1 - ).toString() - const signature = await this.getSignature( - signerOrAuthToken, - nonce, - PROTOCOL_COMMANDS.COMPUTE_GET_RESULT - ) - if (!computeResultUrl) return null - let resultUrl = computeResultUrl - resultUrl += `?consumerAddress=${consumerAddress}` - resultUrl += `&jobId=${jobId}` - resultUrl += `&index=${index.toString()}` - if (!isAuthToken) { - resultUrl += `&nonce=${nonce}` - resultUrl += `&signature=${signature}` - } - return resultUrl - } - - /** Generates an auth token - * @param {Signer} consumer consumer Signer wallet object - * @param {string} providerUri The URI of the provider we want to query - * @param {AbortSignal} signal abort signal - * @return {Promise} - */ - public async generateAuthToken( - consumer: Signer, - providerUri: string, - signal?: AbortSignal - ): Promise { - const consumerAddress = await consumer.getAddress() - const providerEndpoints = await this.getEndpoints(providerUri) - const serviceEndpoints = await this.getServiceEndpoints( - providerUri, - providerEndpoints - ) - const url = this.getEndpointURL(serviceEndpoints, 'generateAuthToken').urlPath || null - const nonce = ( - (await this.getNonce( - providerUri, - consumerAddress, - signal, - providerEndpoints, - serviceEndpoints - )) + 1 - ).toString() - - const signature = await this.getSignature( - consumer, - nonce, - PROTOCOL_COMMANDS.CREATE_AUTH_TOKEN - ) - - try { - const response = await fetch(url, { - method: 'POST', - body: JSON.stringify({ - address: consumerAddress, - signature, - nonce - }), - headers: { 'Content-Type': 'application/json' }, - signal - }) - - if (!response?.ok) { - throw new Error( - `Failed to generate auth token: ${response.status} ${response.statusText}` - ) - } - const params = await response.json() - return params?.token - } catch (e) { - LoggerInstance.error('Generate auth token failed:') - LoggerInstance.error(e) - throw new Error('HTTP request failed calling Provider') - } - } - - /** Generates an auth token - * @param {Signer} consumer consumer Signer wallet object - * @param {string} token The auth token to invalidate - * @param {string} providerUri The URI of the provider we want to query - * @param {AbortSignal} signal abort signal - * @return {Promise} - */ - public async invalidateAuthToken( - consumer: Signer, - token: string, - providerUri: string, - signal?: AbortSignal - ): Promise<{ success: boolean }> { - const consumerAddress = await consumer.getAddress() - const providerEndpoints = await this.getEndpoints(providerUri) - const serviceEndpoints = await this.getServiceEndpoints( - providerUri, - providerEndpoints - ) - const url = - this.getEndpointURL(serviceEndpoints, 'invalidateAuthToken').urlPath || null - const nonce = ( - (await this.getNonce( - providerUri, - consumerAddress, - signal, - providerEndpoints, - serviceEndpoints - )) + 1 - ).toString() - - const signatureMessage = consumerAddress + nonce - const signature = await signRequest(consumer, signatureMessage) - - try { - const response = await fetch(url, { - method: 'POST', - body: JSON.stringify({ - address: consumerAddress, - signature, - token, - nonce - }), - headers: { 'Content-Type': 'application/json' }, - signal - }) - - if (!response?.ok) { - throw new Error( - `Failed to invalidate auth token: ${response.status} ${response.statusText}` - ) - } - const params = await response.json() - return params - } catch (e) { - LoggerInstance.error('Generate auth token failed:') - LoggerInstance.error(e) - throw new Error('HTTP request failed calling Provider') - } - } - - /** Check for a valid provider at URL - * @param {String} url provider uri address - * @param {AbortSignal} signal abort signal - * @return {Promise} valid or not - */ - public async isValidProvider(url: string, signal?: AbortSignal): Promise { - try { - const response = await fetch(url, { - method: 'GET', - headers: { 'Content-Type': 'application/json' }, - signal - }) - if (response?.ok) { - const params = await response.json() - if (params && (params.providerAddress || params.providerAddresses)) return true - } - return false - } catch (error) { - LoggerInstance.error(`Error validating provider: ${error.message}`) - return false - } - } - - /** Sends a PolicyServer request to node to be passthrough to PS - * @param {string} providerUri The provider URI. - * @param {PolicyServerPassthroughCommand} request The request to be passed through to the Policy Server. - * @param {AbortSignal} signal abort signal - */ - public async PolicyServerPassthrough( - providerUri: string, - request: PolicyServerPassthroughCommand, - signal?: AbortSignal - ): Promise { - const providerEndpoints = await this.getEndpoints(providerUri) - const serviceEndpoints = await this.getServiceEndpoints( - providerUri, - providerEndpoints - ) - const initializeUrl = this.getEndpointURL(serviceEndpoints, 'PolicyServerPassthrough') - ? this.getEndpointURL(serviceEndpoints, 'PolicyServerPassthrough').urlPath - : null - if (!initializeUrl) return null - - let response - try { - response = await fetch(initializeUrl, { - method: 'POST', - body: JSON.stringify(request), - headers: { - 'Content-Type': 'application/json' - }, - signal - }) - if (!response.ok) { - const errorText = await response.text() - throw new Error(`${errorText}`) - } - } catch (e) { - LoggerInstance.error('PolicyServerPassthrough failed: ') - LoggerInstance.error(e) - throw new Error(`PolicyServerPassthrough failed: ${e.message}.`) - } - if (response?.ok) { - const params = await response.json() - return params - } - const resolvedResponse = await response.json() - LoggerInstance.error( - 'PolicyServerPassthrough failed: ', - response.status, - response.statusText, - resolvedResponse - ) - LoggerInstance.error('Payload was:', JSON.stringify(request)) - throw new Error(JSON.stringify(resolvedResponse)) - } - - /** Initialize Policy Server verification - * @param {string} providerUri The provider URI. - * @param {PolicyServerInitializeCommand} request The request to be sent to the Policy Server. - * @param {AbortSignal} signal abort signal - */ - public async initializePSVerification( - providerUri: string, - request: PolicyServerInitializeCommand, - signal?: AbortSignal - ): Promise { - const providerEndpoints = await this.getEndpoints(providerUri) - const serviceEndpoints = await this.getServiceEndpoints( - providerUri, - providerEndpoints - ) - const initializeUrl = this.getEndpointURL( - serviceEndpoints, - 'initializePSVerification' - ) - ? this.getEndpointURL(serviceEndpoints, 'initializePSVerification').urlPath - : null - if (!initializeUrl) return null - - let response - try { - response = await fetch(initializeUrl, { - method: 'POST', - body: JSON.stringify(request), - headers: { - 'Content-Type': 'application/json' - }, - signal - }) - if (!response.ok) { - const errorText = await response.text() - throw new Error(`${errorText}`) - } - } catch (e) { - LoggerInstance.error('initializePSVerification failed: ') - LoggerInstance.error(e) - throw new Error(`initializePSVerification failed: ${e.message}.`) - } - if (response?.ok) { - const params = await response.json() - return params - } - const resolvedResponse = await response.json() - LoggerInstance.error( - 'initializePSVerification failed: ', - response.status, - response.statusText, - resolvedResponse - ) - LoggerInstance.error('Payload was:', JSON.stringify(request)) - throw new Error(JSON.stringify(resolvedResponse)) - } - - /** - * Download node logs as an admin. - * @param {string} providerUri - The provider URI. - * @param {Signer} signer - The admin signer. - * @param {string} startTime - Start time (epoch ms) to get logs from. - * @param {string} endTime - End time (epoch ms) to get logs to. - * @param {number} [maxLogs] - Maximum number of logs to retrieve (default: 100, max: 1000). - * @param {string} [moduleName] - Filter logs by module name. - * @param {string} [level] - Filter logs by log level. - * @param {number} [page] - Pagination page number. - * @param {AbortSignal} [signal] - An optional abort signal. - * @returns {Promise} The logs response body stream. - */ - public async downloadNodeLogs( - providerUri: string, - signer: Signer, - startTime: string, - endTime: string, - maxLogs?: number, - moduleName?: string, - level?: string, - page?: number, - signal?: AbortSignal - ): Promise { - const providerEndpoints = await this.getEndpoints(providerUri) - const serviceEndpoints = await this.getServiceEndpoints( - providerUri, - providerEndpoints - ) - - const logsUrl = this.getEndpointURL(serviceEndpoints, 'logs') - ? this.getEndpointURL(serviceEndpoints, 'logs').urlPath - : null - - if (!logsUrl) { - LoggerInstance.error( - 'Download node logs failed: Cannot get proper logs route (perhaps not implemented on provider?)' - ) - return null - } - const consumerAddress = await signer.getAddress() - const nonce = ( - (await this.getNonce( - providerUri, - consumerAddress, - signal, - providerEndpoints, - serviceEndpoints - )) + 1 - ).toString() - - const signature = await this.getSignature(signer, nonce, PROTOCOL_COMMANDS.GET_LOGS) - let url = logsUrl + `?startTime=${startTime}&endTime=${endTime}` - if (maxLogs) url += `&maxLogs=${maxLogs}` - if (moduleName) url += `&moduleName=${moduleName}` - if (level) url += `&level=${level}` - if (page) url += `&page=${page}` - - let response - try { - response = await fetch(url, { - method: 'POST', - body: JSON.stringify({ - signature, - nonce, - address: consumerAddress - }), - headers: { 'Content-Type': 'application/json' }, - signal - }) - } catch (e) { - LoggerInstance.error('Download node logs failed:') - LoggerInstance.error(e) - throw new Error('HTTP request failed calling Provider') - } - - if (response?.ok) { - return response.body - } - - const resolvedResponse = await response.json() - LoggerInstance.error( - 'Download node logs failed: ', - response.status, - response.statusText, - resolvedResponse - ) - throw new Error(JSON.stringify(resolvedResponse)) - } - - /** - * Private method that removes the leading 0x from a string. - * @param {string} input - The input string. - * @returns The transformed string. - */ - private noZeroX(input: string): string { - return this.zeroXTransformer(input, false) - } - - /** - * Private method that removes the leading 0x from a string. - * @param {string} input - The input string. - * @param {boolean} zeroOutput - Whether to include 0x in the output if the input is valid and zeroOutput is true. - * @returns The transformed string. - */ - private zeroXTransformer(input = '', zeroOutput: boolean): string { - const { valid, output } = this.inputMatch( - input, - /^(?:0x)*([a-f0-9]+)$/i, - 'zeroXTransformer' - ) - return (zeroOutput && valid ? '0x' : '') + output - } - - /** - * Private method that matches an input string against a regular expression and returns the first capture group. - * @param {string} input - The input string to match. - * @param {RegExp} regexp - The regular expression to match against. - * @param {string} conversorName - The name of the method calling this function. - * @returns An object with two properties: `valid` (a boolean indicating whether the input matched the regular expression) and `output` (the first capture group of the match, or the original input if there was no match). - */ - private inputMatch( - input: string, - regexp: RegExp, - conversorName: string - ): { valid: boolean; output: string } { - if (typeof input !== 'string') { - LoggerInstance.debug('Not input string:') - LoggerInstance.debug(input) - throw new Error(`[${conversorName}] Expected string, input type: ${typeof input}`) - } - const match = input.match(regexp) - if (!match) { - LoggerInstance.warn(`[${conversorName}] Input transformation failed.`) - return { valid: false, output: input } - } - return { valid: true, output: match[1] } - } - - /** - * Private method that fetches data from a URL using the GET method. - * @param {string} url - The URL to fetch data from. - * @returns A Promise that resolves to a Response object. - */ - private async getData(url: string, authorization?: string): Promise { - return fetch(url, { - method: 'GET', - headers: { - 'Content-type': 'application/json', - Authorization: authorization - } - }) - } -} - -export const ProviderInstance = new Provider() +import { BaseProvider } from './providers/BaseProvider.js' +export { + BaseProvider as Provider, + isP2pUri, + OCEAN_P2P_PROTOCOL, + type P2PConfig +} from './providers/BaseProvider.js' + +export const ProviderInstance = new BaseProvider() diff --git a/src/services/providers/BaseProvider.ts b/src/services/providers/BaseProvider.ts new file mode 100644 index 000000000..d5ab03655 --- /dev/null +++ b/src/services/providers/BaseProvider.ts @@ -0,0 +1,534 @@ +import { peerIdFromString } from '@libp2p/peer-id' +import { multiaddr, type Multiaddr } from '@multiformats/multiaddr' +import { Signer } from 'ethers' +import { + StorageObject, + FileInfo, + ComputeJob, + ComputeOutput, + ComputeAlgorithm, + ComputeAsset, + ComputeEnvironment, + ComputeResultStream, + ProviderInitialize, + ProviderComputeInitializeResults, + ServiceEndpoint, + UserCustomParameters, + ComputeResourceRequest, + ComputeJobMetadata, + PolicyServerInitializeCommand, + PolicyServerPassthroughCommand, + dockerRegistryAuth, + DownloadResponse, + NodeStatus, + NodeComputeJob, + NodeLogsParams, + NodeLogEntry +} from '../../@types/index.js' +import { type DDO, type ValidateMetadata } from '@oceanprotocol/ddo-js' +import { decodeJwt } from '../../utils/Jwt.js' +import { signRequest } from '../../utils/SignatureUtils.js' +import { HttpProvider } from './HttpProvider.js' +import { P2pProvider, type P2PConfig } from './P2pProvider.js' + +export { OCEAN_P2P_PROTOCOL, type P2PConfig } from './P2pProvider.js' + +export async function getConsumerAddress( + signerOrAuthToken: Signer | string +): Promise { + return typeof signerOrAuthToken === 'string' + ? decodeJwt(signerOrAuthToken).address + : signerOrAuthToken.getAddress() +} + +export async function getSignature( + signerOrAuthToken: Signer | string, + nonce: string, + command: string +): Promise { + if (typeof signerOrAuthToken === 'string') return null + const message = String( + String(await signerOrAuthToken.getAddress()) + String(nonce) + String(command) + ) + return signRequest(signerOrAuthToken, message) +} + +export function getAuthorization(signerOrAuthToken: Signer | string): string | undefined { + return typeof signerOrAuthToken === 'string' ? signerOrAuthToken : undefined +} + +export function isP2pUri(nodeUri: string | Multiaddr[]): boolean { + if (Array.isArray(nodeUri)) return true + if (!nodeUri) return false + try { + multiaddr(nodeUri) + return true + } catch {} + try { + peerIdFromString(nodeUri) + return true + } catch { + return false + } +} + +export class BaseProvider { + private httpProvider = new HttpProvider() + private p2pProvider = new P2pProvider() + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + protected getImpl(nodeUri: string | Multiaddr[]): any { + if (Array.isArray(nodeUri)) return this.p2pProvider + return isP2pUri(nodeUri) ? this.p2pProvider : this.httpProvider + } + + public async getNonce( + nodeUri: string | Multiaddr[], + consumerAddress: string, + signal?: AbortSignal, + providerEndpoints?: any, + serviceEndpoints?: ServiceEndpoint[] + ): Promise { + return this.getImpl(nodeUri).getNonce( + nodeUri, + consumerAddress, + signal, + providerEndpoints, + serviceEndpoints + ) + } + + public async encrypt( + data: any, + chainId: number, + nodeUri: string | Multiaddr[], + signerOrAuthToken: Signer | string, + policyServer?: any, + signal?: AbortSignal + ): Promise { + return this.getImpl(nodeUri).encrypt( + data, + chainId, + nodeUri, + signerOrAuthToken, + policyServer, + signal + ) + } + + public async checkDidFiles( + did: string, + serviceId: string, + nodeUri: string | Multiaddr[], + withChecksum: boolean = false, + signal?: AbortSignal + ): Promise { + return this.getImpl(nodeUri).checkDidFiles( + did, + serviceId, + nodeUri, + withChecksum, + signal + ) + } + + public async getFileInfo( + file: StorageObject, + nodeUri: string | Multiaddr[], + withChecksum: boolean = false, + signal?: AbortSignal + ): Promise { + return this.getImpl(nodeUri).getFileInfo(file, nodeUri, withChecksum, signal) + } + + public async getComputeEnvironments( + nodeUri: string | Multiaddr[], + signal?: AbortSignal + ): Promise { + return this.getImpl(nodeUri).getComputeEnvironments(nodeUri, signal) + } + + public async initialize( + did: string, + serviceId: string, + fileIndex: number, + consumerAddress: string, + nodeUri: string | Multiaddr[], + signal?: AbortSignal, + userCustomParameters?: UserCustomParameters, + computeEnv?: string, + validUntil?: number + ): Promise { + return this.getImpl(nodeUri).initialize( + did, + serviceId, + fileIndex, + consumerAddress, + nodeUri, + signal, + userCustomParameters, + computeEnv, + validUntil + ) + } + + public async initializeCompute( + assets: ComputeAsset[], + algorithm: ComputeAlgorithm, + computeEnv: string, + token: string, + validUntil: number, + nodeUri: string | Multiaddr[], + consumerAddress: string, + resources: ComputeResourceRequest[], + chainId: number, + policyServer?: any, + signal?: AbortSignal, + queueMaxWaitTime?: number, + dockerRegistryAuthData?: dockerRegistryAuth, + output?: ComputeOutput + ): Promise { + return this.getImpl(nodeUri).initializeCompute( + assets, + algorithm, + computeEnv, + token, + validUntil, + nodeUri, + consumerAddress, + resources, + chainId, + policyServer, + signal, + queueMaxWaitTime, + dockerRegistryAuthData, + output + ) + } + + public async getDownloadUrl( + did: string, + serviceId: string, + fileIndex: number, + transferTxId: string, + nodeUri: string | Multiaddr[], + signerOrAuthToken: Signer | string, + policyServer?: any, + userCustomParameters?: UserCustomParameters + ): Promise { + return this.getImpl(nodeUri).getDownloadUrl( + did, + serviceId, + fileIndex, + transferTxId, + nodeUri, + signerOrAuthToken, + policyServer, + userCustomParameters + ) + } + + public async computeStart( + nodeUri: string | Multiaddr[], + signerOrAuthToken: Signer | string, + computeEnv: string, + datasets: ComputeAsset[], + algorithm: ComputeAlgorithm, + maxJobDuration: number, + token: string, + resources: ComputeResourceRequest[], + chainId: number, + metadata?: ComputeJobMetadata, + additionalViewers?: string[], + output?: ComputeOutput, + policyServer?: any, + signal?: AbortSignal, + queueMaxWaitTime?: number, + dockerRegistryAuth?: dockerRegistryAuth + ): Promise { + return this.getImpl(nodeUri).computeStart( + nodeUri, + signerOrAuthToken, + computeEnv, + datasets, + algorithm, + maxJobDuration, + token, + resources, + chainId, + metadata, + additionalViewers, + output, + policyServer, + signal, + queueMaxWaitTime, + dockerRegistryAuth + ) + } + + public async freeComputeStart( + nodeUri: string | Multiaddr[], + signerOrAuthToken: Signer | string, + computeEnv: string, + datasets: ComputeAsset[], + algorithm: ComputeAlgorithm, + resources?: ComputeResourceRequest[], + metadata?: ComputeJobMetadata, + additionalViewers?: string[], + output?: ComputeOutput, + policyServer?: any, + signal?: AbortSignal, + queueMaxWaitTime?: number, + dockerRegistryAuth?: dockerRegistryAuth + ): Promise { + return this.getImpl(nodeUri).freeComputeStart( + nodeUri, + signerOrAuthToken, + computeEnv, + datasets, + algorithm, + resources, + metadata, + additionalViewers, + output, + policyServer, + signal, + queueMaxWaitTime, + dockerRegistryAuth + ) + } + + public async computeStreamableLogs( + nodeUri: string | Multiaddr[], + signerOrAuthToken: Signer | string, + jobId: string, + signal?: AbortSignal + ): Promise { + return this.getImpl(nodeUri).computeStreamableLogs( + nodeUri, + signerOrAuthToken, + jobId, + signal + ) + } + + public async computeStop( + jobId: string, + nodeUri: string | Multiaddr[], + signerOrAuthToken: Signer | string, + agreementId?: string, + signal?: AbortSignal + ): Promise { + return this.getImpl(nodeUri).computeStop( + jobId, + nodeUri, + signerOrAuthToken, + agreementId, + signal + ) + } + + public async computeStatus( + nodeUri: string | Multiaddr[], + signerOrAuthToken: Signer | string, + jobId?: string, + agreementId?: string, + signal?: AbortSignal + ): Promise { + return this.getImpl(nodeUri).computeStatus( + nodeUri, + signerOrAuthToken, + jobId, + agreementId, + signal + ) + } + + public async getComputeResultUrl( + nodeUri: string | Multiaddr[], + signerOrAuthToken: Signer | string, + jobId: string, + index: number + ): Promise { + return this.getImpl(nodeUri).getComputeResultUrl( + nodeUri, + signerOrAuthToken, + jobId, + index + ) + } + + public async getComputeResult( + nodeUri: string | Multiaddr[], + signerOrAuthToken: Signer | string, + jobId: string, + index: number, + offset: number = 0 + ): Promise { + return this.getImpl(nodeUri).getComputeResult( + nodeUri, + signerOrAuthToken, + jobId, + index, + offset + ) + } + + public async generateAuthToken( + consumer: Signer, + nodeUri: string | Multiaddr[], + signal?: AbortSignal + ): Promise { + return this.getImpl(nodeUri).generateAuthToken(consumer, nodeUri, signal) + } + + public async generateSignedAuthToken( + address: string, + signature: string, + nonce: string, + nodeUri: string | Multiaddr[], + signal?: AbortSignal + ): Promise { + return this.p2pProvider.generateSignedAuthToken( + address, + signature, + nonce, + nodeUri, + signal + ) + } + + public async invalidateAuthToken( + consumer: Signer, + token: string, + nodeUri: string | Multiaddr[], + signal?: AbortSignal + ): Promise<{ success: boolean }> { + return this.getImpl(nodeUri).invalidateAuthToken(consumer, token, nodeUri, signal) + } + + public async resolveDdo( + nodeUri: string | Multiaddr[], + did: string, + signal?: AbortSignal + ): Promise { + return this.getImpl(nodeUri).resolveDdo(nodeUri, did, signal) + } + + public async validateDdo( + nodeUri: string | Multiaddr[], + ddo: DDO, + signer: Signer, + signal?: AbortSignal + ): Promise { + return this.getImpl(nodeUri).validateDdo(nodeUri, ddo, signer, signal) + } + + public async isValidProvider( + url: string | Multiaddr[], + signal?: AbortSignal + ): Promise { + return this.getImpl(url).isValidProvider(url, signal) + } + + public async PolicyServerPassthrough( + nodeUri: string | Multiaddr[], + request: PolicyServerPassthroughCommand, + signal?: AbortSignal + ): Promise { + return this.getImpl(nodeUri).PolicyServerPassthrough(nodeUri, request, signal) + } + + public async initializePSVerification( + nodeUri: string | Multiaddr[], + request: PolicyServerInitializeCommand, + signal?: AbortSignal + ): Promise { + return this.getImpl(nodeUri).initializePSVerification(nodeUri, request, signal) + } + + public async downloadNodeLogs( + nodeUri: string | Multiaddr[], + signer: Signer, + startTime: string, + endTime: string, + maxLogs?: number, + moduleName?: string, + level?: string, + page?: number, + signal?: AbortSignal + ): Promise { + return this.getImpl(nodeUri).downloadNodeLogs( + nodeUri, + signer, + startTime, + endTime, + maxLogs, + moduleName, + level, + page, + signal + ) + } + + public async getNodeStatus( + nodeUri: string | Multiaddr[], + signal?: AbortSignal + ): Promise { + return this.getImpl(nodeUri).getNodeStatus(nodeUri, signal) + } + + public async getNodeJobs( + nodeUri: string | Multiaddr[], + fromTimestamp?: number, + signal?: AbortSignal + ): Promise { + return this.getImpl(nodeUri).getNodeJobs(nodeUri, fromTimestamp, signal) + } + + public async setupP2P(config: P2PConfig): Promise { + return this.p2pProvider.setupP2P(config) + } + + public getLibp2pNode() { + return this.p2pProvider.getLibp2pNode() + } + + public async getDiscoveredNodes(): Promise< + Array<{ peerId: string; multiaddrs: string[] }> + > { + return this.p2pProvider.getDiscoveredNodes() + } + + public async getMultiaddrFromPeerId(peerId: string): Promise { + return this.p2pProvider.getMultiaddrFromPeerId(peerId) + } + + /** + * Fetch node logs via P2P with a pre-signed payload. + * For auto-signed log fetching (HTTP or P2P), use downloadNodeLogs(). + */ + public async fetchNodeLogs( + nodeUri: string | Multiaddr[], + address: string, + signature: string, + nonce: string, + logParams?: NodeLogsParams + ): Promise { + return this.p2pProvider.fetchNodeLogs(nodeUri, address, signature, nonce, logParams) + } + + public async fetchConfig( + nodeUri: string | Multiaddr[], + payload: Record + ): Promise { + return this.p2pProvider.fetchConfig(nodeUri, payload) + } + + public async pushConfig( + nodeUri: string | Multiaddr[], + payload: Record + ): Promise { + return this.p2pProvider.pushConfig(nodeUri, payload) + } +} diff --git a/src/services/providers/HttpProvider.ts b/src/services/providers/HttpProvider.ts new file mode 100644 index 000000000..effb010ed --- /dev/null +++ b/src/services/providers/HttpProvider.ts @@ -0,0 +1,1590 @@ +import fetch from 'cross-fetch' +import { Signer } from 'ethers' +import { LoggerInstance } from '../../utils/Logger.js' +import { + StorageObject, + FileInfo, + ComputeJob, + ComputeOutput, + ComputeAlgorithm, + ComputeAsset, + ComputeEnvironment, + ProviderInitialize, + ProviderComputeInitializeResults, + ServiceEndpoint, + UserCustomParameters, + ComputeResourceRequest, + ComputeJobMetadata, + PolicyServerInitializeCommand, + PolicyServerPassthroughCommand, + dockerRegistryAuth, + ComputeResultStream, + NodeStatus, + NodeComputeJob, + NodeLogEntry +} from '../../@types/index.js' +import { PROTOCOL_COMMANDS } from '../../@types/Provider.js' +import { type DDO, type ValidateMetadata } from '@oceanprotocol/ddo-js' +import { eciesencrypt } from '../../utils/eciesencrypt.js' +import { signRequest } from '../../utils/SignatureUtils.js' +import { getConsumerAddress, getSignature, getAuthorization } from './BaseProvider.js' + +export class HttpProvider { + protected getConsumerAddress(s: Signer | string) { + return getConsumerAddress(s) + } + + protected getSignature(s: Signer | string, nonce: string, command: string) { + return getSignature(s, nonce, command) + } + + protected getAuthorization(s: Signer | string) { + return getAuthorization(s) + } + + /** + * Returns the provider endpoints + * @param {string} nodeUri - the provider url + * @return {Promise} + */ + async getEndpoints(nodeUri: string, authorization?: string): Promise { + try { + const endpoints = await this.getData(nodeUri, authorization) + return await endpoints.json() + } catch (e) { + LoggerInstance.error('Finding the service endpoints failed:', e) + throw new Error('HTTP request failed calling Provider') + } + } + + public async getNodeStatus(nodeUri: string, signal?: AbortSignal): Promise { + const providerEndpoints = await this.getEndpoints(nodeUri) + const serviceEndpoints = await this.getServiceEndpoints(nodeUri, providerEndpoints) + const endpoint = this.getEndpointURL(serviceEndpoints, 'directCommand') + if (!endpoint?.urlPath) return null + try { + const response = await fetch(endpoint.urlPath, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ command: PROTOCOL_COMMANDS.STATUS }), + signal + }) + if (response?.ok) return response.json() + return null + } catch (e) { + LoggerInstance.error('getNodeStatus failed:', e) + return null + } + } + + public async getNodeJobs( + nodeUri: string, + fromTimestamp?: number, + signal?: AbortSignal + ): Promise { + const providerEndpoints = await this.getEndpoints(nodeUri) + const serviceEndpoints = await this.getServiceEndpoints(nodeUri, providerEndpoints) + const endpoint = this.getEndpointURL(serviceEndpoints, 'jobs') + if (!endpoint?.urlPath) return [] + let url = endpoint.urlPath + if (fromTimestamp) url += `?fromTimestamp=${fromTimestamp}` + try { + const response = await fetch(url, { + method: 'GET', + headers: { 'Content-Type': 'application/json' }, + signal + }) + if (response?.ok) { + const data = await response.json() + return Array.isArray(data?.jobs) ? data.jobs : [] + } + return [] + } catch (e) { + LoggerInstance.error('getNodeJobs failed:', e) + return [] + } + } + + /** + * Returns the node public key + * @return {string} The node public key + */ + private async getNodePublicKey(nodeUri: string): Promise { + const providerEndpoints = await this.getEndpoints(nodeUri) + return providerEndpoints.nodePublicKey + } + + /** + * This function returns the endpoint URL for a given service name. + * @param {ServiceEndpoint[]} servicesEndpoints - The array of service endpoints + * @param {string} serviceName - The name of the service + * @returns {ServiceEndpoint} The endpoint URL for the given service name + */ + getEndpointURL( + servicesEndpoints: ServiceEndpoint[], + serviceName: string + ): ServiceEndpoint { + if (!servicesEndpoints) return null + return servicesEndpoints.find( + (s) => s.serviceName.toLowerCase() === serviceName.toLowerCase() + ) as ServiceEndpoint + } + + /** + * This function returns an array of service endpoints for a given provider endpoint. + * @param {string} providerEndpoint - The provider endpoint + * @param {any} endpoints - The endpoints object + * @returns {ServiceEndpoint[]} An array of service endpoints + */ + public async getServiceEndpoints(providerEndpoint: string, endpoints: any) { + const serviceEndpoints: ServiceEndpoint[] = [] + for (const i in endpoints.serviceEndpoints) { + const endpoint: ServiceEndpoint = { + serviceName: i, + method: endpoints.serviceEndpoints[i][0], + urlPath: + providerEndpoint.replace(/\/+$/, '') + + '/' + + endpoints.serviceEndpoints[i][1].replace(/^\/+/, '') + } + serviceEndpoints.push(endpoint) + } + return serviceEndpoints + } + + /** + * Get current nonce from the provider. + * @param {string} nodeUri provider uri address + * @param {string} consumerAddress Publisher address + * @param {AbortSignal} signal abort signal + * @param {string} providerEndpoints Identifier of the asset to be registered in ocean + * @param {string} serviceEndpoints document description object (DDO)= + * @return {Promise} urlDetails + */ + public async getNonce( + nodeUri: string, + consumerAddress: string, + signal?: AbortSignal, + providerEndpoints?: any, + serviceEndpoints?: ServiceEndpoint[] + ): Promise { + if (!providerEndpoints) { + providerEndpoints = await this.getEndpoints(nodeUri) + } + if (!serviceEndpoints) { + serviceEndpoints = await this.getServiceEndpoints(nodeUri, providerEndpoints) + } + const path = this.getEndpointURL(serviceEndpoints, 'nonce') + ? this.getEndpointURL(serviceEndpoints, 'nonce').urlPath + : null + if (!path) return null + try { + const response = await fetch(path + `?userAddress=${consumerAddress}`, { + method: 'GET', + headers: { 'Content-Type': 'application/json' }, + signal + }) + const { nonce } = await response.json() + const sanitizedNonce = !nonce || nonce === null ? 0 : Number(nonce) + return sanitizedNonce + } catch (e) { + LoggerInstance.error(e) + throw new Error(e.message) + } + } + + /** + * Encrypt data using the Provider's own symmetric key + * @param {string} data data in json format that needs to be sent , it can either be a DDO or a File array + * @param {number} chainId network's id so provider can choose the corresponding Signer object + * @param {string} nodeUri provider uri address + * @param {AbortSignal} signal abort signal + * @return {Promise} urlDetails + */ + public async encrypt( + data: any, + chainId: number, + nodeUri: string, + signerOrAuthToken: Signer | string, + policyServer?: any, + signal?: AbortSignal + ): Promise { + const providerEndpoints = await this.getEndpoints(nodeUri) + const serviceEndpoints = await this.getServiceEndpoints(nodeUri, providerEndpoints) + const consumerAddress = await this.getConsumerAddress(signerOrAuthToken) + const nonce = ( + (await this.getNonce( + nodeUri, + consumerAddress, + signal, + providerEndpoints, + serviceEndpoints + )) + 1 + ).toString() + const signature = await this.getSignature( + signerOrAuthToken, + nonce, + PROTOCOL_COMMANDS.ENCRYPT + ) + + let path = + (this.getEndpointURL(serviceEndpoints, 'encrypt') + ? this.getEndpointURL(serviceEndpoints, 'encrypt').urlPath + : null) + `?chainId=${chainId}` + if (!path) return null + path += `&nonce=${nonce}` + path += `&consumerAddress=${consumerAddress}` + path += `&signature=${signature}` + + try { + const response = await fetch(path, { + method: 'POST', + body: JSON.stringify(data), + headers: { 'Content-Type': 'application/octet-stream' }, + signal + }) + return await response.text() + } catch (e) { + LoggerInstance.error(e) + throw new Error('HTTP request failed calling Provider') + } + } + + /** + * Get file details for a given DID and service ID. + * @param {string} did - The DID to check. + * @param {string} serviceId - The service ID to check. + * @param {string} nodeUri - The URI of the provider. + * @param {boolean} [withChecksum=false] - Whether or not to include a checksum. + * @param {AbortSignal} [signal] - An optional abort signal. + * @returns {Promise} A promise that resolves with an array of file info objects. + */ + public async checkDidFiles( + did: string, + serviceId: string, + nodeUri: string, + withChecksum: boolean = false, + signal?: AbortSignal + ): Promise { + const providerEndpoints = await this.getEndpoints(nodeUri) + const serviceEndpoints = await this.getServiceEndpoints(nodeUri, providerEndpoints) + const args = { did, serviceId, checksum: withChecksum } + const files: FileInfo[] = [] + const path = this.getEndpointURL(serviceEndpoints, 'fileinfo') + ? this.getEndpointURL(serviceEndpoints, 'fileinfo').urlPath + : null + if (!path) return null + let response + try { + response = await fetch(path, { + method: 'POST', + body: JSON.stringify(args), + headers: { 'Content-Type': 'application/json' }, + signal + }) + } catch (e) { + LoggerInstance.error('File info call failed: ') + LoggerInstance.error(e) + throw new Error(e) + } + if (response?.ok) { + const results: FileInfo[] = await response.json() + for (const result of results) { + files.push(result) + } + return files + } + const resolvedResponse = await response.json() + LoggerInstance.error( + 'File info call failed: ', + response.status, + response.statusText, + resolvedResponse + ) + throw new Error(JSON.stringify(resolvedResponse)) + } + + /** + * Get File details (if possible) + * @param {StorageObject} file one of the supported file structures + * @param {string} nodeUri uri of the provider that will be used to check the file + * @param {boolean} [withChecksum=false] - Whether or not to include a checksum. + * @param {AbortSignal} [signal] - An optional abort signal. + * @returns {Promise} A promise that resolves with an array of file info objects. + */ + public async getFileInfo( + file: StorageObject, + nodeUri: string, + withChecksum: boolean = false, + signal?: AbortSignal + ): Promise { + const providerEndpoints = await this.getEndpoints(nodeUri) + const serviceEndpoints = await this.getServiceEndpoints(nodeUri, providerEndpoints) + const args = { ...file, checksum: withChecksum } + const files: FileInfo[] = [] + const path = this.getEndpointURL(serviceEndpoints, 'fileinfo') + ? this.getEndpointURL(serviceEndpoints, 'fileinfo').urlPath + : null + if (!path) return null + let response + try { + response = await fetch(path, { + method: 'POST', + body: JSON.stringify(args), + headers: { 'Content-Type': 'application/json' }, + signal + }) + } catch (e) { + LoggerInstance.error('File info call failed: ') + LoggerInstance.error(e) + throw new Error(e) + } + if (response?.ok) { + const results: FileInfo[] = await response.json() + for (const result of results) { + files.push(result) + } + return files + } + const resolvedResponse = await response.json() + LoggerInstance.error( + 'File info call failed: ', + response.status, + response.statusText, + resolvedResponse + ) + throw new Error(JSON.stringify(resolvedResponse)) + } + + /** + * Returns compute environments from a provider. + * @param {string} nodeUri - The URI of the provider. + * @param {AbortSignal} [signal] - An optional abort signal. + * @returns {Promise} A promise that resolves with compute environments. + */ + public async getComputeEnvironments( + nodeUri: string, + signal?: AbortSignal + ): Promise { + const providerEndpoints = await this.getEndpoints(nodeUri) + const serviceEndpoints = await this.getServiceEndpoints(nodeUri, providerEndpoints) + const path = this.getEndpointURL(serviceEndpoints, 'computeEnvironments')?.urlPath + if (!path) return null + let response + try { + response = await fetch(path, { + method: 'GET', + headers: { 'Content-Type': 'application/json' }, + signal + }) + } catch (e) { + LoggerInstance.error('Fetch compute env failed: ') + LoggerInstance.error(e) + throw new Error(e) + } + if (response?.ok) { + const result = response.json() + return result + } + const resolvedResponse = await response.json() + LoggerInstance.error( + 'Fetch compute env failed: ', + response.status, + response.statusText, + resolvedResponse + ) + throw new Error(JSON.stringify(resolvedResponse)) + } + + /** + * Initializes the provider for a service request. + * @param {string} did - The asset DID . + * @param {string} serviceId - The asset service ID. + * @param {number} fileIndex - The file index. + * @param {string} consumerAddress - The consumer address. + * @param {string} nodeUri - The URI of the provider. + * @param {AbortSignal} [signal] - The abort signal if any. + * @param {UserCustomParameters} [userCustomParameters] - The custom parameters if any. + * @param {string} [computeEnv] - The compute environment if any. + * @param {number} [validUntil] - The validity time if any. + * @returns {Promise} A promise that resolves with ProviderInitialize response. + */ + public async initialize( + did: string, + serviceId: string, + fileIndex: number, + consumerAddress: string, + nodeUri: string, + signal?: AbortSignal, + userCustomParameters?: UserCustomParameters, + computeEnv?: string, + validUntil?: number + ): Promise { + const providerEndpoints = await this.getEndpoints(nodeUri) + const serviceEndpoints = await this.getServiceEndpoints(nodeUri, providerEndpoints) + let initializeUrl = this.getEndpointURL(serviceEndpoints, 'initialize') + ? this.getEndpointURL(serviceEndpoints, 'initialize').urlPath + : null + + if (!initializeUrl) return null + initializeUrl += `?documentId=${did}` + initializeUrl += `&serviceId=${serviceId}` + initializeUrl += `&fileIndex=${fileIndex}` + initializeUrl += `&consumerAddress=${consumerAddress}` + if (userCustomParameters) + initializeUrl += '&userdata=' + encodeURI(JSON.stringify(userCustomParameters)) + if (computeEnv) initializeUrl += '&environment=' + encodeURI(computeEnv) + if (validUntil) initializeUrl += '&validUntil=' + validUntil + let response + try { + response = await fetch(initializeUrl, { + method: 'GET', + headers: { 'Content-Type': 'application/json' }, + signal + }) + } catch (e) { + LoggerInstance.error('Provider initialized failed: ') + LoggerInstance.error(e) + throw new Error(`Provider initialize failed url: ${initializeUrl} `) + } + if (response?.status === 200) { + const results: ProviderInitialize = await response.json() + return results + } + const resolvedResponse = await response.json() + LoggerInstance.error( + 'Provider initialized failed: ', + response.status, + response.statusText, + resolvedResponse + ) + throw new Error(JSON.stringify(resolvedResponse)) + } + + /** Initializes the provider for a compute request. + * @param {ComputeAsset[]} assets The datasets array to initialize compute request. + * @param {ComputeAlgorithmber} algorithm The algorithm to use. + * @param {string} computeEnv The compute environment. + * @param {string} token The payment token address. + * @param {number} validUntil The job expiration date. + * @param {string} nodeUri The provider URI. + * @param {SignerOrAuthToken} signerOrAuthToken Signer or auth token + * @param {ComputeResourceRequest[]} resources The resources to start compute job with. + * @param {number} chainId The chain used to do payments + * @param {any} policyServer Policy server data. + * @param {AbortSignal} signal abort signal + * @param {dockerRegistryAuth} dockerRegistryAuth Docker registry authentication data. + * @return {Promise} ProviderComputeInitialize data + */ + public async initializeCompute( + assets: ComputeAsset[], + algorithm: ComputeAlgorithm, + computeEnv: string, + token: string, + validUntil: number, + nodeUri: string, + consumerAddress: string, + resources: ComputeResourceRequest[], + chainId: number, + policyServer?: any, + signal?: AbortSignal, + queueMaxWaitTime?: number, + dockerRegistryAuthData?: dockerRegistryAuth, + output?: ComputeOutput + ): Promise { + const providerEndpoints = await this.getEndpoints(nodeUri) + const serviceEndpoints = await this.getServiceEndpoints(nodeUri, providerEndpoints) + const initializeUrl = this.getEndpointURL(serviceEndpoints, 'initializeCompute') + ? this.getEndpointURL(serviceEndpoints, 'initializeCompute').urlPath + : null + if (!initializeUrl) return null + + const providerData: Record = { + datasets: assets, + algorithm, + environment: computeEnv, + payment: { + chainId, + token, + resources + }, + maxJobDuration: validUntil, + consumerAddress + } + if (policyServer) providerData.policyServer = policyServer + if (queueMaxWaitTime) providerData.queueMaxWaitTime = queueMaxWaitTime + if (dockerRegistryAuthData) { + const nodeKey = await this.getNodePublicKey(nodeUri) + if (nodeKey) { + providerData.encryptedDockerRegistryAuth = eciesencrypt( + nodeKey, + JSON.stringify(dockerRegistryAuthData) + ) + } + } + if (output) { + const nodeKey = await this.getNodePublicKey(nodeUri) + if (nodeKey) providerData.output = eciesencrypt(nodeKey, JSON.stringify(output)) + } + + let response + try { + response = await fetch(initializeUrl, { + method: 'POST', + body: JSON.stringify(providerData), + headers: { + 'Content-Type': 'application/json' + }, + signal + }) + if (!response.ok) { + const errorText = await response.text() + throw new Error(`${errorText}`) + } + } catch (e) { + LoggerInstance.error('Initialize compute failed: ') + LoggerInstance.error(e) + throw new Error(`ComputeJob cannot be initialized: ${e.message}.`) + } + if (response?.ok) { + const params = await response.json() + return params + } + const resolvedResponse = await response.json() + LoggerInstance.error( + 'Initialize compute failed: ', + response.status, + response.statusText, + resolvedResponse + ) + LoggerInstance.error('Payload was:', JSON.stringify(providerData)) + throw new Error(JSON.stringify(resolvedResponse)) + } + + /** + * Gets the download URL. + * @param {string} did - The DID. + * @param {string} serviceId - The service ID. + * @param {number} fileIndex - The file index. + * @param {string} transferTxId - The transfer transaction ID. + * @param {string} nodeUri - The provider URI. + * @param {SignerOrAuthToken} signerOrAuthToken - The signer or auth token. + * @param {any} policyServer Policy server data. + * @param {UserCustomParameters} userCustomParameters - The user custom parameters. + * @returns {Promise} The download URL. + */ + public async getDownloadUrl( + did: string, + serviceId: string, + fileIndex: number, + transferTxId: string, + nodeUri: string, + signerOrAuthToken: Signer | string, + policyServer?: any, + userCustomParameters?: UserCustomParameters + ): Promise { + const providerEndpoints = await this.getEndpoints(nodeUri) + const serviceEndpoints = await this.getServiceEndpoints(nodeUri, providerEndpoints) + const downloadUrl = this.getEndpointURL(serviceEndpoints, 'download') + ? this.getEndpointURL(serviceEndpoints, 'download').urlPath + : null + if (!downloadUrl) return null + const consumerAddress = await this.getConsumerAddress(signerOrAuthToken) + const nonce = ( + (await this.getNonce( + nodeUri, + consumerAddress, + null, + providerEndpoints, + serviceEndpoints + )) + 1 + ).toString() + + const signature = await this.getSignature( + signerOrAuthToken, + nonce, + PROTOCOL_COMMANDS.DOWNLOAD + ) + let consumeUrl = downloadUrl + consumeUrl += `?fileIndex=${fileIndex}` + consumeUrl += `&documentId=${did}` + consumeUrl += `&transferTxId=${transferTxId}` + consumeUrl += `&serviceId=${serviceId}` + consumeUrl += `&consumerAddress=${consumerAddress}` + consumeUrl += `&nonce=${nonce}` + if (policyServer) { + consumeUrl += '&policyServer=' + encodeURI(JSON.stringify(policyServer)) + } + + consumeUrl += `&signature=${signature}` + if (userCustomParameters) + consumeUrl += '&userdata=' + encodeURI(JSON.stringify(userCustomParameters)) + return consumeUrl + } + + /** Instruct the provider to start a PAYED compute job + * @param {string} nodeUri The provider URI. + * @param {SignerOrAuthToken} signerOrAuthToken The consumer signer object or auth token. + * @param {string} computeEnv The compute environment. + * @param {ComputeAsset} datasets The dataset to start compute on + additionalDatasets (the additional datasets if that is the case) + * @param {ComputeAlgorithm} algorithm The algorithm to start compute with. + * @param {number} maxJobDuration The compute job max execution time. + * @param {string} token The token address for compute payment. + * @param {ComputeResourceRequest} resources The resources to start compute job with. + * @param {chainId} chainId The chain used to do payments + * @param {ComputeJobMetadata} metadata The compute job metadata. Additional metadata to be stored in the database. + * @param {ComputeOutput} output The compute job output settings. + * @param {any} policyServer Policy server data. + * @param {AbortSignal} signal abort signal + * @param {number} queueMaxWaitTime Maximum time in seconds to wait in the compute queue if resources are not available + * @param {dockerRegistryAuth} dockerRegistryAuth Docker registry authentication data. + * @return {Promise} The compute job or jobs. + */ + public async computeStart( + nodeUri: string, + signerOrAuthToken: Signer | string, + computeEnv: string, + datasets: ComputeAsset[], + algorithm: ComputeAlgorithm, + maxJobDuration: number, + token: string, + resources: ComputeResourceRequest[], + chainId: number, + metadata?: ComputeJobMetadata, + additionalViewers?: string[], + output?: ComputeOutput, + policyServer?: any, + signal?: AbortSignal, + queueMaxWaitTime?: number, + dockerRegistryAuth?: dockerRegistryAuth + ): Promise { + const providerEndpoints = await this.getEndpoints(nodeUri) + const serviceEndpoints = await this.getServiceEndpoints(nodeUri, providerEndpoints) + + const computeStartUrl = this.getEndpointURL(serviceEndpoints, 'computeStart') + ? this.getEndpointURL(serviceEndpoints, 'computeStart').urlPath + : null + + if (!computeStartUrl) { + LoggerInstance.error( + 'Compute start failed: Cannot get proper computeStart route (perhaps not implemented on provider?)' + ) + return null + } + + const consumerAddress = await this.getConsumerAddress(signerOrAuthToken) + const nonce = ( + (await this.getNonce( + nodeUri, + consumerAddress, + signal, + providerEndpoints, + serviceEndpoints + )) + 1 + ).toString() + const signature = await this.getSignature( + signerOrAuthToken, + nonce, + PROTOCOL_COMMANDS.COMPUTE_START + ) + const payload = Object() + payload.consumerAddress = consumerAddress + payload.signature = signature + payload.nonce = nonce + payload.environment = computeEnv + payload.maxJobDuration = maxJobDuration + payload.resources = resources + // kept for backwards compatibility (tests running against existing provider) + payload.dataset = datasets[0] + // new field for C2D v2 + payload.datasets = datasets + payload.algorithm = algorithm + payload.chainId = chainId + payload.payment = { + chainId, + token, + maxJobDuration, + resources + } + if (dockerRegistryAuth) { + const nodeKey = await this.getNodePublicKey(nodeUri) + if (nodeKey) { + payload.encryptedDockerRegistryAuth = eciesencrypt( + nodeKey, + JSON.stringify(dockerRegistryAuth) + ) + } + } + if (resources) payload.resources = resources + if (metadata) payload.metadata = metadata + if (additionalViewers) payload.additionalViewers = additionalViewers + if (output) { + const nodeKey = await this.getNodePublicKey(nodeUri) + if (nodeKey) { + payload.output = eciesencrypt(nodeKey, JSON.stringify(output)) + } + } + if (policyServer) payload.policyServer = policyServer + if (queueMaxWaitTime) payload.queueMaxWaitTime = queueMaxWaitTime + let response + try { + response = await fetch(computeStartUrl, { + method: 'POST', + body: JSON.stringify(payload), + headers: { + 'Content-Type': 'application/json', + Authorization: this.getAuthorization(signerOrAuthToken) + }, + signal + }) + } catch (e) { + LoggerInstance.error('Compute start failed:') + LoggerInstance.error(e) + LoggerInstance.error('Payload was:', payload) + throw new Error('HTTP request failed calling Provider') + } + if (response?.ok) { + const params = await response.json() + return params + } + const resolvedResponse = await response.json() + LoggerInstance.error( + 'Compute start failed: ', + response.status, + response.statusText, + resolvedResponse + ) + LoggerInstance.error('Payload was:', payload) + throw new Error(JSON.stringify(resolvedResponse)) + } + + /** Instruct the provider to start a FREE compute job + * @param {string} nodeUri The provider URI. + * @param {SignerOrAuthToken} signerOrAuthToken The consumer signer object or auth token. + * @param {string} computeEnv The compute environment. + * @param {ComputeAsset} datasets The dataset to start compute on + additionalDatasets (the additional datasets if that is the case) + * @param {ComputeAlgorithm} algorithm The algorithm to start compute with. + * @param {ComputeResourceRequest} resources The resources to start compute job with. + * @param {ComputeJobMetadata} metadata The compute job metadata. Additional metadata to be stored in the database. + * @param {ComputeOutput} output The compute job output settings. + * @param {any} policyServer Policy server data. + * @param {AbortSignal} signal abort signal + * @param {number} queueMaxWaitTime Maximum time in seconds to wait in the compute queue if resources are not available + * @param {dockerRegistryAuth} dockerRegistryAuth Docker registry authentication data. + * @return {Promise} The compute job or jobs. + */ + public async freeComputeStart( + nodeUri: string, + signerOrAuthToken: Signer | string, + computeEnv: string, + datasets: ComputeAsset[], + algorithm: ComputeAlgorithm, + resources?: ComputeResourceRequest[], + metadata?: ComputeJobMetadata, + additionalViewers?: string[], + output?: ComputeOutput, + policyServer?: any, + signal?: AbortSignal, + queueMaxWaitTime?: number, + dockerRegistryAuth?: dockerRegistryAuth + ): Promise { + const providerEndpoints = await this.getEndpoints(nodeUri) + const serviceEndpoints = await this.getServiceEndpoints(nodeUri, providerEndpoints) + + const computeStartUrl = this.getEndpointURL(serviceEndpoints, 'freeCompute') + ? this.getEndpointURL(serviceEndpoints, 'freeCompute').urlPath + : null + + if (!computeStartUrl) { + LoggerInstance.error( + 'Compute start failed: Cannot get proper computeStart route (perhaps not implemented on provider?)' + ) + return null + } + + const consumerAddress = await this.getConsumerAddress(signerOrAuthToken) + const nonce = ( + (await this.getNonce( + nodeUri, + consumerAddress, + signal, + providerEndpoints, + serviceEndpoints + )) + 1 + ).toString() + + const signature = await this.getSignature( + signerOrAuthToken, + nonce, + PROTOCOL_COMMANDS.FREE_COMPUTE_START + ) + const payload = Object() + payload.consumerAddress = consumerAddress + payload.signature = signature + payload.nonce = nonce + payload.environment = computeEnv + payload.resources = resources + // kept for backwards compatibility (tests running against existing provider) + payload.dataset = datasets[0] + // new field for C2D v2 + payload.datasets = datasets + payload.algorithm = algorithm + if (metadata) payload.metadata = metadata + if (additionalViewers) payload.additionalViewers = additionalViewers + if (dockerRegistryAuth) { + const nodeKey = await this.getNodePublicKey(nodeUri) + if (nodeKey) { + payload.encryptedDockerRegistryAuth = eciesencrypt( + nodeKey, + JSON.stringify(dockerRegistryAuth) + ) + } + } + if (output) { + const nodeKey = await this.getNodePublicKey(nodeUri) + if (nodeKey) { + payload.output = eciesencrypt(nodeKey, JSON.stringify(output)) + } + } + + if (policyServer) payload.policyServer = policyServer + if (queueMaxWaitTime) payload.queueMaxWaitTime = queueMaxWaitTime + let response + try { + response = await fetch(computeStartUrl, { + method: 'POST', + body: JSON.stringify(payload), + headers: { + 'Content-Type': 'application/json', + Authorization: this.getAuthorization(signerOrAuthToken) + }, + signal + }) + } catch (e) { + LoggerInstance.error('Compute start failed:') + LoggerInstance.error(e) + LoggerInstance.error('Payload was:', payload) + throw new Error('HTTP request failed calling Provider') + } + if (response?.ok) { + const params = await response.json() + return params + } + const resolvedResponse = await response.json() + LoggerInstance.error( + 'Compute start failed: ', + response.status, + response.statusText, + resolvedResponse + ) + LoggerInstance.error('Payload was:', payload) + throw new Error(JSON.stringify(resolvedResponse)) + } + + /** + * @param nodeUri provider URL + * @param signerOrAuthToken signer or auth token + * @param jobId jobId + * @param signal abort signal + * @returns logs response + */ + public async computeStreamableLogs( + nodeUri: string, + signerOrAuthToken: Signer | string, + jobId: string, + signal?: AbortSignal + ): Promise { + const isAuthToken = typeof signerOrAuthToken === 'string' + const providerEndpoints = await this.getEndpoints(nodeUri) + const serviceEndpoints = await this.getServiceEndpoints(nodeUri, providerEndpoints) + + const computeStreamableLogs = this.getEndpointURL( + serviceEndpoints, + 'computeStreamableLogs' + ) + ? this.getEndpointURL(serviceEndpoints, 'computeStreamableLogs').urlPath + : null + + if (!computeStreamableLogs) { + LoggerInstance.error( + 'Compute start failed: Cannot get proper computeStreamableLogs route (perhaps not implemented on provider?)' + ) + return null + } + const consumerAddress = await this.getConsumerAddress(signerOrAuthToken) + const nonce = ( + (await this.getNonce( + nodeUri, + consumerAddress, + signal, + providerEndpoints, + serviceEndpoints + )) + 1 + ).toString() + + let url = `?consumerAddress=${consumerAddress}&jobId=${jobId}` + // Is signer, add signature and nonce + if (!isAuthToken) { + const signature = await this.getSignature( + signerOrAuthToken, + nonce, + PROTOCOL_COMMANDS.COMPUTE_GET_STREAMABLE_LOGS + ) + url += `&signature=${signature}` + url += `&nonce=${nonce}` + } + + let response + try { + response = await fetch(computeStreamableLogs + url, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + Authorization: this.getAuthorization(signerOrAuthToken) + }, + signal + }) + } catch (e) { + LoggerInstance.error('computeStreamableLogs failed:') + LoggerInstance.error(e) + throw new Error('HTTP request failed calling Provider') + } + if (response?.ok || response?.status === 200) { + return response.body + } + LoggerInstance.error( + 'computeStreamableLogs failed: ', + response.status, + response.statusText, + await response.json() + ) + return null + } + + /** Instruct the provider to Stop the execution of a to stop a compute job. + * @param {string} jobId the compute job id + * @param {string} nodeUri The provider URI. + * @param {SignerOrAuthToken} signerOrAuthToken The consumer signer or auth token. + * @param {string} agreementId The agreement id. + * @param {AbortSignal} signal abort signal + * @return {Promise} + */ + public async computeStop( + jobId: string, + nodeUri: string, + signerOrAuthToken: Signer | string, + agreementId?: string, + signal?: AbortSignal + ): Promise { + const isAuthToken = typeof signerOrAuthToken === 'string' + const providerEndpoints = await this.getEndpoints(nodeUri) + const serviceEndpoints = await this.getServiceEndpoints(nodeUri, providerEndpoints) + const computeStopUrl = this.getEndpointURL(serviceEndpoints, 'computeStop') + ? this.getEndpointURL(serviceEndpoints, 'computeStop').urlPath + : null + + const consumerAddress = await this.getConsumerAddress(signerOrAuthToken) + + const nonce = ( + (await this.getNonce( + nodeUri, + consumerAddress, + signal, + providerEndpoints, + serviceEndpoints + )) + 1 + ).toString() + + const signature = await this.getSignature( + signerOrAuthToken, + nonce, + PROTOCOL_COMMANDS.COMPUTE_STOP + ) + const queryParams = new URLSearchParams() + queryParams.set('consumerAddress', consumerAddress) + queryParams.set('nonce', nonce) + queryParams.set('jobId', jobId) + if (!isAuthToken) { + queryParams.set('signature', signature) + } + + if (agreementId) queryParams.set('agreementId', agreementId) + + const queryString = queryParams.toString() + if (!queryString) return null + let response + try { + response = await fetch(computeStopUrl + '?' + queryString, { + method: 'PUT', + headers: { + 'Content-Type': 'application/json', + Authorization: this.getAuthorization(signerOrAuthToken) + }, + signal + }) + } catch (e) { + LoggerInstance.error('Compute stop failed:') + LoggerInstance.error(e) + throw new Error('HTTP request failed calling Provider') + } + + if (response?.ok) { + const params = await response.json() + return params + } + const resolvedResponse = await response.json() + LoggerInstance.error( + 'Compute stop failed: ', + response.status, + response.statusText, + resolvedResponse + ) + throw new Error(JSON.stringify(resolvedResponse)) + } + + /** Get compute status for a specific jobId/documentId/owner. + * @param {string} nodeUri The URI of the provider we want to query + * @param {string} consumerAddress The consumer ethereum address + * @param {string} jobId The ID of a compute job. + * @param {string} agreementId The ID of the service agreement (tx id) + * @param {AbortSignal} signal abort signal + * @return {Promise} + */ + public async computeStatus( + nodeUri: string, + signerOrAuthToken: Signer | string, + jobId?: string, + agreementId?: string, + signal?: AbortSignal + ): Promise { + const consumerAddress = await getConsumerAddress(signerOrAuthToken) + const authorization = getAuthorization(signerOrAuthToken) + const providerEndpoints = await this.getEndpoints(nodeUri) + const serviceEndpoints = await this.getServiceEndpoints(nodeUri, providerEndpoints) + const computeStatusUrl = this.getEndpointURL(serviceEndpoints, 'computeStatus') + ? this.getEndpointURL(serviceEndpoints, 'computeStatus').urlPath + : null + + let url = `?consumerAddress=${consumerAddress}` + url += (agreementId && `&agreementId=${agreementId}`) || '' + url += (jobId && `&jobId=${jobId}`) || '' + + if (!computeStatusUrl) return null + let response + try { + response = await fetch(computeStatusUrl + url, { + method: 'GET', + headers: { 'Content-Type': 'application/json', Authorization: authorization }, + signal + }) + } catch (e) { + LoggerInstance.error('Get compute status failed') + LoggerInstance.error(e) + throw new Error(e) + } + if (response?.ok) { + const params = await response.json() + return params + } + LoggerInstance.error( + 'Get compute status failed:', + response.status, + response.statusText + ) + if (response?.ok) { + const params = await response.json() + return params + } + const resolvedResponse = await response.json() + LoggerInstance.error( + 'Get compute status failed:', + response.status, + response.statusText, + resolvedResponse + ) + throw new Error(JSON.stringify(resolvedResponse)) + } + + /** Get compute result url + * @param {string} nodeUri The URI of the provider we want to query + * @param {SignerOrAuthToken} signerOrAuthToken signer or auth token + * @param {string} jobId The ID of a compute job. + * @param {number} index Result index + * @return {Promise} + */ + public async getComputeResultUrl( + nodeUri: string, + signerOrAuthToken: Signer | string, + jobId: string, + index: number + ): Promise { + const isAuthToken = typeof signerOrAuthToken === 'string' + const providerEndpoints = await this.getEndpoints(nodeUri) + const serviceEndpoints = await this.getServiceEndpoints(nodeUri, providerEndpoints) + const computeResultUrl = this.getEndpointURL(serviceEndpoints, 'computeResult') + ? this.getEndpointURL(serviceEndpoints, 'computeResult').urlPath + : null + + const consumerAddress = await this.getConsumerAddress(signerOrAuthToken) + const nonce = ( + (await this.getNonce( + nodeUri, + consumerAddress, + null, + providerEndpoints, + serviceEndpoints + )) + 1 + ).toString() + const signature = await this.getSignature( + signerOrAuthToken, + nonce, + PROTOCOL_COMMANDS.COMPUTE_GET_RESULT + ) + if (!computeResultUrl) return null + let resultUrl = computeResultUrl + resultUrl += `?consumerAddress=${consumerAddress}` + resultUrl += `&jobId=${jobId}` + resultUrl += `&index=${index.toString()}` + if (!isAuthToken) { + resultUrl += `&nonce=${nonce}` + resultUrl += `&signature=${signature}` + } + return resultUrl + } + + public async getComputeResult( + nodeUri: string, + signerOrAuthToken: Signer | string, + jobId: string, + index: number, + offset: number = 0 + ): Promise { + const resultUrl = await this.getComputeResultUrl( + nodeUri, + signerOrAuthToken, + jobId, + index + ) + if (!resultUrl) throw new Error('Could not retrieve compute result URL') + const response = await fetch(resultUrl, { + headers: offset > 0 ? { Range: `bytes=${offset}-` } : {} + }) + if (!response.ok) + throw new Error(`Failed to fetch compute result: ${response.status}`) + return response.body as unknown as ComputeResultStream + } + + /** Generates an auth token + * @param {Signer} consumer consumer Signer wallet object + * @param {string} nodeUri The URI of the provider we want to query + * @param {AbortSignal} signal abort signal + * @return {Promise} + */ + public async generateAuthToken( + consumer: Signer, + nodeUri: string, + signal?: AbortSignal + ): Promise { + const consumerAddress = await consumer.getAddress() + const providerEndpoints = await this.getEndpoints(nodeUri) + const serviceEndpoints = await this.getServiceEndpoints(nodeUri, providerEndpoints) + const url = this.getEndpointURL(serviceEndpoints, 'generateAuthToken').urlPath || null + const nonce = ( + (await this.getNonce( + nodeUri, + consumerAddress, + signal, + providerEndpoints, + serviceEndpoints + )) + 1 + ).toString() + + const signature = await this.getSignature( + consumer, + nonce, + PROTOCOL_COMMANDS.CREATE_AUTH_TOKEN + ) + + try { + const response = await fetch(url, { + method: 'POST', + body: JSON.stringify({ + address: consumerAddress, + signature, + nonce + }), + headers: { 'Content-Type': 'application/json' }, + signal + }) + + if (!response?.ok) { + throw new Error( + `Failed to generate auth token: ${response.status} ${response.statusText}` + ) + } + const params = await response.json() + return params?.token + } catch (e) { + LoggerInstance.error('Generate auth token failed:') + LoggerInstance.error(e) + throw new Error('HTTP request failed calling Provider') + } + } + + /** Invalidates an auth token + * @param {Signer} consumer consumer Signer wallet object + * @param {string} token The auth token to invalidate + * @param {string} nodeUri The URI of the provider we want to query + * @param {AbortSignal} signal abort signal + * @return {Promise<{ success: boolean }>} + */ + public async invalidateAuthToken( + consumer: Signer, + token: string, + nodeUri: string, + signal?: AbortSignal + ): Promise<{ success: boolean }> { + const consumerAddress = await consumer.getAddress() + const providerEndpoints = await this.getEndpoints(nodeUri) + const serviceEndpoints = await this.getServiceEndpoints(nodeUri, providerEndpoints) + const url = + this.getEndpointURL(serviceEndpoints, 'invalidateAuthToken').urlPath || null + const nonce = ( + (await this.getNonce( + nodeUri, + consumerAddress, + signal, + providerEndpoints, + serviceEndpoints + )) + 1 + ).toString() + + const signatureMessage = consumerAddress + nonce + const signature = await signRequest(consumer, signatureMessage) + + try { + const response = await fetch(url, { + method: 'POST', + body: JSON.stringify({ + address: consumerAddress, + signature, + token, + nonce + }), + headers: { 'Content-Type': 'application/json' }, + signal + }) + + if (!response?.ok) { + throw new Error( + `Failed to invalidate auth token: ${response.status} ${response.statusText}` + ) + } + const params = await response.json() + return params + } catch (e) { + LoggerInstance.error('Generate auth token failed:') + LoggerInstance.error(e) + throw new Error('HTTP request failed calling Provider') + } + } + + /** Check for a valid provider at URL + * @param {String} url provider uri address + * @param {AbortSignal} signal abort signal + * @return {Promise} valid or not + */ + public async isValidProvider(url: string, signal?: AbortSignal): Promise { + try { + const response = await fetch(url, { + method: 'GET', + headers: { 'Content-Type': 'application/json' }, + signal + }) + if (response?.ok) { + const params = await response.json() + if (params && (params.providerAddress || params.providerAddresses)) return true + } + return false + } catch (error) { + LoggerInstance.error(`Error validating provider: ${error.message}`) + return false + } + } + + /** Sends a PolicyServer request to node to be passthrough to PS + * @param {string} nodeUri The provider URI. + * @param {PolicyServerPassthroughCommand} request The request to be passed through to the Policy Server. + * @param {AbortSignal} signal abort signal + */ + public async PolicyServerPassthrough( + nodeUri: string, + request: PolicyServerPassthroughCommand, + signal?: AbortSignal + ): Promise { + const providerEndpoints = await this.getEndpoints(nodeUri) + const serviceEndpoints = await this.getServiceEndpoints(nodeUri, providerEndpoints) + const initializeUrl = this.getEndpointURL(serviceEndpoints, 'PolicyServerPassthrough') + ? this.getEndpointURL(serviceEndpoints, 'PolicyServerPassthrough').urlPath + : null + if (!initializeUrl) return null + + let response + try { + response = await fetch(initializeUrl, { + method: 'POST', + body: JSON.stringify(request), + headers: { + 'Content-Type': 'application/json' + }, + signal + }) + if (!response.ok) { + const errorText = await response.text() + throw new Error(`${errorText}`) + } + } catch (e) { + LoggerInstance.error('PolicyServerPassthrough failed: ') + LoggerInstance.error(e) + throw new Error(`PolicyServerPassthrough failed: ${e.message}.`) + } + if (response?.ok) { + const params = await response.json() + return params + } + const resolvedResponse = await response.json() + LoggerInstance.error( + 'PolicyServerPassthrough failed: ', + response.status, + response.statusText, + resolvedResponse + ) + LoggerInstance.error('Payload was:', JSON.stringify(request)) + throw new Error(JSON.stringify(resolvedResponse)) + } + + /** Initialize Policy Server verification + * @param {string} nodeUri The provider URI. + * @param {PolicyServerInitializeCommand} request The request to be sent to the Policy Server. + * @param {AbortSignal} signal abort signal + */ + public async initializePSVerification( + nodeUri: string, + request: PolicyServerInitializeCommand, + signal?: AbortSignal + ): Promise { + const providerEndpoints = await this.getEndpoints(nodeUri) + const serviceEndpoints = await this.getServiceEndpoints(nodeUri, providerEndpoints) + const initializeUrl = this.getEndpointURL( + serviceEndpoints, + 'initializePSVerification' + ) + ? this.getEndpointURL(serviceEndpoints, 'initializePSVerification').urlPath + : null + if (!initializeUrl) return null + + let response + try { + response = await fetch(initializeUrl, { + method: 'POST', + body: JSON.stringify(request), + headers: { + 'Content-Type': 'application/json' + }, + signal + }) + if (!response.ok) { + const errorText = await response.text() + throw new Error(`${errorText}`) + } + } catch (e) { + LoggerInstance.error('initializePSVerification failed: ') + LoggerInstance.error(e) + throw new Error(`initializePSVerification failed: ${e.message}.`) + } + if (response?.ok) { + const params = await response.json() + return params + } + const resolvedResponse = await response.json() + LoggerInstance.error( + 'initializePSVerification failed: ', + response.status, + response.statusText, + resolvedResponse + ) + LoggerInstance.error('Payload was:', JSON.stringify(request)) + throw new Error(JSON.stringify(resolvedResponse)) + } + + /** + * Download node logs as an admin. + * @param {string} nodeUri - The provider URI. + * @param {Signer} signer - The admin signer. + * @param {string} startTime - Start time (epoch ms) to get logs from. + * @param {string} endTime - End time (epoch ms) to get logs to. + * @param {number} [maxLogs] - Maximum number of logs to retrieve (default: 100, max: 1000). + * @param {string} [moduleName] - Filter logs by module name. + * @param {string} [level] - Filter logs by log level. + * @param {number} [page] - Pagination page number. + * @param {AbortSignal} [signal] - An optional abort signal. + * @returns {Promise} The logs response body stream. + */ + public async downloadNodeLogs( + nodeUri: string, + signer: Signer, + startTime: string, + endTime: string, + maxLogs?: number, + moduleName?: string, + level?: string, + page?: number, + signal?: AbortSignal + ): Promise { + const providerEndpoints = await this.getEndpoints(nodeUri) + const serviceEndpoints = await this.getServiceEndpoints(nodeUri, providerEndpoints) + + const logsUrl = this.getEndpointURL(serviceEndpoints, 'logs') + ? this.getEndpointURL(serviceEndpoints, 'logs').urlPath + : null + + if (!logsUrl) { + LoggerInstance.error( + 'Download node logs failed: Cannot get proper logs route (perhaps not implemented on provider?)' + ) + return null + } + const consumerAddress = await signer.getAddress() + const nonce = ( + (await this.getNonce( + nodeUri, + consumerAddress, + signal, + providerEndpoints, + serviceEndpoints + )) + 1 + ).toString() + + const signature = await this.getSignature(signer, nonce, PROTOCOL_COMMANDS.GET_LOGS) + let url = logsUrl + `?startTime=${startTime}&endTime=${endTime}` + if (maxLogs) url += `&maxLogs=${maxLogs}` + if (moduleName) url += `&moduleName=${moduleName}` + if (level) url += `&level=${level}` + if (page) url += `&page=${page}` + + let response + try { + response = await fetch(url, { + method: 'POST', + body: JSON.stringify({ + signature, + nonce, + address: consumerAddress + }), + headers: { 'Content-Type': 'application/json' }, + signal + }) + } catch (e) { + LoggerInstance.error('Download node logs failed:') + LoggerInstance.error(e) + throw new Error('HTTP request failed calling Provider') + } + + if (response?.ok) { + return response.body + } + + const resolvedResponse = await response.json() + LoggerInstance.error( + 'Download node logs failed: ', + response.status, + response.statusText, + resolvedResponse + ) + throw new Error(JSON.stringify(resolvedResponse)) + } + + private noZeroX(input: string): string { + return this.zeroXTransformer(input, false) + } + + private zeroXTransformer(input = '', zeroOutput: boolean): string { + const { valid, output } = this.inputMatch( + input, + /^(?:0x)*([a-f0-9]+)$/i, + 'zeroXTransformer' + ) + return (zeroOutput && valid ? '0x' : '') + output + } + + private inputMatch( + input: string, + regexp: RegExp, + conversorName: string + ): { valid: boolean; output: string } { + if (typeof input !== 'string') { + LoggerInstance.debug('Not input string:') + LoggerInstance.debug(input) + throw new Error(`[${conversorName}] Expected string, input type: ${typeof input}`) + } + const match = input.match(regexp) + if (!match) { + LoggerInstance.warn(`[${conversorName}] Input transformation failed.`) + return { valid: false, output: input } + } + return { valid: true, output: match[1] } + } + + public async resolveDdo( + nodeUri: string, + did: string, + signal?: AbortSignal + ): Promise { + const path = nodeUri + '/api/aquarius/assets/ddo/' + did + const response = await fetch(path, { + method: 'GET', + headers: { 'Content-Type': 'application/json' }, + signal + }) + if (response.ok) return response.json() + return null + } + + public async validateDdo( + nodeUri: string, + ddo: DDO, + signer: Signer, + signal?: AbortSignal + ): Promise { + const publisherAddress = await signer.getAddress() + const nonceResp = await ( + await this.getData(`${nodeUri}/api/services/nonce?userAddress=${publisherAddress}`) + ).json() + const nonce = (Number(nonceResp.nonce ?? 0) + 1).toString() + const message = publisherAddress + nonce + PROTOCOL_COMMANDS.VALIDATE_DDO + const signature = await signRequest(signer, message) + const response = await fetch(`${nodeUri}/api/aquarius/assets/ddo/validate`, { + method: 'POST', + body: JSON.stringify({ ddo, publisherAddress, nonce, signature }), + headers: { 'Content-Type': 'application/json' }, + signal + }) + if (!response.ok) return null + const j = await response.json() + return { + valid: true, + hash: j.hash, + proof: { validatorAddress: j.publicKey, r: j.r[0], s: j.s[0], v: j.v } + } as ValidateMetadata + } + + private async getData(url: string, authorization?: string): Promise { + return fetch(url, { + method: 'GET', + headers: { + 'Content-type': 'application/json', + Authorization: authorization + } + }) + } +} diff --git a/src/services/providers/P2pProvider.ts b/src/services/providers/P2pProvider.ts new file mode 100644 index 000000000..447d40a6a --- /dev/null +++ b/src/services/providers/P2pProvider.ts @@ -0,0 +1,1488 @@ +import { type Libp2p, type Libp2pOptions, createLibp2p } from 'libp2p' +import { noise } from '@chainsafe/libp2p-noise' +import { yamux } from '@chainsafe/libp2p-yamux' +import { webSockets } from '@libp2p/websockets' +import { tcp } from '@libp2p/tcp' +import { circuitRelayTransport } from '@libp2p/circuit-relay-v2' +import { bootstrap } from '@libp2p/bootstrap' +import { identify } from '@libp2p/identify' +import { EventTypes, KadDHT, kadDHT } from '@libp2p/kad-dht' +import { ping } from '@libp2p/ping' +import { peerIdFromString } from '@libp2p/peer-id' +import { lpStream, UnexpectedEOFError } from '@libp2p/utils' +import type { Connection } from '@libp2p/interface' +import { multiaddr, type Multiaddr } from '@multiformats/multiaddr' +import { Signer } from 'ethers' +import { sleep } from '../../utils/General.js' +import { LoggerInstance } from '../../utils/Logger.js' +import { + StorageObject, + FileInfo, + ComputeJob, + ComputeOutput, + ComputeAlgorithm, + ComputeAsset, + ComputeEnvironment, + ProviderInitialize, + ProviderComputeInitializeResults, + UserCustomParameters, + ComputeResourceRequest, + ComputeJobMetadata, + PolicyServerInitializeCommand, + PolicyServerPassthroughCommand, + dockerRegistryAuth, + DownloadResponse, + ComputeResultStream, + NodeStatus, + NodeComputeJob +} from '../../@types/index.js' +import { PROTOCOL_COMMANDS, NodeLogsParams, NodeLogEntry } from '../../@types/Provider.js' +import { type DDO, type ValidateMetadata } from '@oceanprotocol/ddo-js' +import { signRequest } from '../../utils/SignatureUtils.js' +import { getConsumerAddress, getSignature, getAuthorization } from './BaseProvider.js' +import { eciesencrypt } from '../../utils/eciesencrypt.js' + +export const OCEAN_P2P_PROTOCOL = '/ocean/nodes/1.0.0' +const OCEAN_DHT_PROTOCOL = '/ocean/nodes/1.0.0/kad/1.0.0' +const DEFAULT_MAX_RETRIES = 5 +const DEFAULT_RETRY_DELAY_MS = 1000 +const DEFAULT_DIAL_TIMEOUT_MS = 10_000 + +// Ocean Protocol public bootstrap nodes (WebSocket addresses) +const DEFAULT_BOOTSTRAP_PEERS = [ + '/dns4/bootstrap1.oncompute.ai/tcp/9001/ws/p2p/16Uiu2HAmLhRDqfufZiQnxvQs2XHhd6hwkLSPfjAQg1gH8wgRixiP', + '/dns4/bootstrap2.oncompute.ai/tcp/9001/ws/p2p/16Uiu2HAmHwzeVw7RpGopjZe6qNBJbzDDBdqtrSk7Gcx1emYsfgL4', + '/dns4/bootstrap3.oncompute.ai/tcp/9001/ws/p2p/16Uiu2HAmBKSeEP3v4tYEPsZsZv9VELinyMCsrVTJW9BvQeFXx28U', + '/dns4/bootstrap4.oncompute.ai/tcp/9001/ws/p2p/16Uiu2HAmSTVTArioKm2wVcyeASHYEsnx2ZNq467Z4GMDU4ErEPom' +] + +export interface P2PConfig { + /** + * Bootstrap peer multiaddrs for DHT peer discovery. + * Required when dialing bare peer IDs; defaults to Ocean Protocol's + * public bootstrap nodes. Ignored if `libp2p.peerDiscovery` is set. + */ + bootstrapPeers?: string[] + /** Timeout per dial + stream operation in ms. Default: 10000 */ + dialTimeout?: number + /** Max retry attempts on connection errors. Default: 5 */ + maxRetries?: number + /** Base delay between retries in ms. Default: 1000 */ + retryDelay?: number + /** + * Timeout for DHT peer lookup when dialing a bare peer ID, in ms. Default: 60000. + * Intentionally separate from dialTimeout — DHT resolution needs more time than + * a direct dial. Once a peer is found and connected, subsequent calls skip this. + */ + dhtLookupTimeout?: number + + /** + * Enable TCP transport in addition to WebSockets. Default: false. + * Required in Node.js/Electron environments to reach nodes over plain TCP. + * Do NOT enable in browser builds — TCP is not available in browsers. + */ + enableTcp?: boolean + /** + * Full libp2p node configuration. Fields provided here override ocean.js + * defaults (transports, encrypters, services, connectionManager, etc.). + * Unset fields keep ocean.js defaults. + */ + + libp2p?: Partial +} + +export class P2pProvider { + private p2pConfig: P2PConfig = {} + private libp2pNode: Libp2p | null = null + /** + * Configure the internal libp2p node used for P2P transport. + * Call this once before making P2P requests, e.g.: + * ProviderInstance.setupP2P({ bootstrapPeers: ['/ip4/1.2.3.4/tcp/9000/ws/p2p/16Uiu2...'] }) + * + * Required when using bare peer IDs as nodeUri — the bootstrap peers + * provide DHT entry points so the peer can be located. + */ + public async setupP2P(config: P2PConfig): Promise { + this.p2pConfig = config + if (this.libp2pNode) { + Promise.resolve(this.libp2pNode.stop()).catch(() => {}) + this.libp2pNode = null + } + await this.getOrCreateLibp2pNode() + } + + public async getMultiaddrFromPeerId(peerId: string): Promise { + const appendedPeerId = (peerId: string) => + peerId.includes('/p2p/') ? peerId : `${peerId}/p2p/${peerId}` + const node = await this.getOrCreateLibp2pNode() + + // Check existing connections — remoteAddr.toString() gives the full multiaddr + const connection = node + .getConnections() + .find((c) => c.remotePeer.toString() === peerId) + if (connection?.remoteAddr) { + const addr = connection.remoteAddr.toString() + return appendedPeerId(addr) + } + + // Check peerStore (populated by peer:discovery, DHT, and connections) + try { + const peerData = await node.peerStore.get(peerIdFromString(peerId)) + if (peerData?.addresses?.length > 0) { + const addr = peerData.addresses[0].multiaddr.toString() + return appendedPeerId(addr) + } + } catch {} + + // DHT lookup as last resort + const dht = node.services.dht as KadDHT + for await (const event of dht.findPeer(peerIdFromString(peerId), { + signal: AbortSignal.timeout(20000) + })) { + if (event.type === EventTypes.FINAL_PEER && event.peer.multiaddrs.length > 0) { + const addr = event.peer.multiaddrs[0].toString() + return appendedPeerId(addr) + } + } + + throw new Error(`No multiaddrs found for peer id ${peerId}`) + } + + /** Returns the underlying libp2p node instance, or null if P2P is not initialized. */ + public getLibp2pNode(): Libp2p | null { + return this.libp2pNode ?? null + } + + /** Returns all peers known to the peerStore (discovered via bootstrap, DHT, or connections). */ + public async getDiscoveredNodes(): Promise< + Array<{ peerId: string; multiaddrs: string[] }> + > { + if (!this.libp2pNode) return [] + const allPeers = await this.libp2pNode.peerStore.all() + return allPeers.map((peer) => ({ + peerId: peer.id.toString(), + multiaddrs: peer.addresses.map((a) => a.multiaddr.toString()) + })) + } + + private bufToHex(val: any): string { + if (typeof val === 'string') { + try { + val = JSON.parse(val) + } catch { + return val + } + } + if (val?.type === 'Buffer' && Array.isArray(val.data)) { + return Buffer.from(val.data).toString() + } + if (val instanceof Uint8Array || Buffer.isBuffer(val)) { + return Buffer.from(val).toString() + } + return val + } + + private async getOrCreateLibp2pNode(): Promise { + if (this.libp2pNode) return this.libp2pNode + + const bootstrapAddrs = (this.p2pConfig.bootstrapPeers ?? DEFAULT_BOOTSTRAP_PEERS).map( + multiaddr + ) + + const node = await createLibp2p({ + addresses: { listen: [] }, + transports: [ + webSockets(), + circuitRelayTransport(), + ...(this.p2pConfig.enableTcp ? [tcp()] : []) + ], + connectionEncrypters: [noise()], + streamMuxers: [yamux()], + peerDiscovery: [ + ...(bootstrapAddrs.length > 0 + ? [bootstrap({ list: bootstrapAddrs.map(String), timeout: 10000 })] + : []) + ], + services: { + identify: identify(), + ping: ping(), + dht: kadDHT({ protocol: OCEAN_DHT_PROTOCOL, clientMode: true }) + }, + // Without this we are blocking connection to plain ws - the bundler thinks we are in a browser. + // This also applies to local nodes. + // Browsers will still block connection if transport is not secure. + connectionGater: { denyDialMultiaddr: () => false }, + connectionManager: { maxConnections: 100 }, + connectionMonitor: { abortConnectionOnPingFailure: false }, + // User-supplied config overrides all defaults above. + // Cast needed: services generics can't be inferred through a Partial spread. + ...(this.p2pConfig.libp2p as any) + }) + + await node.start() + node.addEventListener('peer:discovery', (evt: any) => { + const peerInfo = evt.detail + if (!peerInfo?.id) return + const peerId = peerInfo.id.toString() + if ( + node.getConnections().length < 100 && + node.getConnections(peerInfo.id).length === 0 + ) { + node + .dial(peerInfo.id, { signal: AbortSignal.timeout(10000) }) + .catch((err: Error) => { + LoggerInstance.debug( + `Failed to dial discovered peer ${peerId}: ${err.message}` + ) + }) + } + }) + + this.libp2pNode = node + return node + } + + private toUint8Array(chunk: Uint8Array | { subarray(): Uint8Array }): Uint8Array { + return chunk instanceof Uint8Array ? chunk : chunk.subarray() + } + + private isDialable(ma: Multiaddr): boolean { + // Node.js can dial any transport (TCP, WS, WSS) + if (typeof window === 'undefined') return true + + // Browsers on HTTPS pages can only use WSS/TLS + const str = ma.toString() + return str.includes('/tls/sni') + } + + private peerIdFromMultiaddr(ma: Multiaddr): string | null { + const parts = ma.toString().split('/p2p/') + if (parts.length <= 1) return null + // Strip trailing protocol components like /p2p-circuit + const raw = parts[parts.length - 1] + return raw.split('/')[0] || null + } + + private async getConnection( + nodeUri: string | Multiaddr[], + signal: AbortSignal + ): Promise { + const node = await this.getOrCreateLibp2pNode() + + if (Array.isArray(nodeUri)) { + const dialable = nodeUri.filter((ma) => this.isDialable(ma)) + + if (dialable.length > 0) { + LoggerInstance.debug(`[P2P] dial array: ${dialable.length} dialable addrs`) + try { + const conn = await node.dial(dialable, { signal }) + LoggerInstance.debug(`[P2P] dial array SUCCESS via ${conn.remoteAddr}`) + return conn + } catch (err: any) { + LoggerInstance.debug(`[P2P] dial array failed: ${err.message}`) + } + } + + for (const ma of nodeUri) { + const pid = this.peerIdFromMultiaddr(ma) + if (pid) { + LoggerInstance.debug(`[P2P] Not multiaddrs, fallback to peerId: ${pid}`) + return this.dialByPeerId(node, pid, signal) + } + } + throw new Error('No valid addresses and no peer ID in multiaddrs') + } + + try { + const ma = multiaddr(nodeUri) + if (this.isDialable(ma)) { + LoggerInstance.debug(`[P2P] dial single addr: ${ma}`) + try { + const conn = await node.dial(ma, { signal }) + LoggerInstance.debug(`[P2P] dial single SUCCESS via ${conn.remoteAddr}`) + return conn + } catch (err: any) { + LoggerInstance.debug(`[P2P] dial single failed: ${err.message}`) + } + } + const pid = this.peerIdFromMultiaddr(ma) + if (pid) { + LoggerInstance.debug(`[P2P] single fallback -> dialByPeerId ${pid}`) + return this.dialByPeerId(node, pid, signal) + } + throw new Error(`Cannot dial address: ${nodeUri}`) + } catch (err: any) { + if (err.message?.includes('Cannot dial')) throw err + } + + LoggerInstance.debug(`[P2P] bare peerId -> dialByPeerId ${nodeUri}`) + return this.dialByPeerId(node, nodeUri, signal) + } + + private async dialByPeerId( + node: Libp2p, + peerIdStr: string, + signal: AbortSignal + ): Promise { + const peerId = peerIdFromString(peerIdStr) + + const existing = node.getConnections(peerId).filter((c) => c.status === 'open') + if (existing.length > 0) { + LoggerInstance.debug( + `[P2P] ${peerIdStr}: reusing existing connection via ${existing[0].remoteAddr}` + ) + return existing[0] + } + + // Wait briefly for bootstrap if node just started (0 connections) + if (node.getConnections().length === 0) { + LoggerInstance.debug( + `[P2P] ${peerIdStr}: no connections yet, waiting for bootstrap...` + ) + await sleep(3000) + const after = node.getConnections(peerId) + if (after.length > 0) { + LoggerInstance.debug(`[P2P] ${peerIdStr}: connected during bootstrap wait`) + return after[0] + } + } + + const seen = new Set() + const allAddrs: Multiaddr[] = [] + const addAddr = (ma: Multiaddr) => { + const key = ma.toString() + if (!seen.has(key)) { + seen.add(key) + allAddrs.push(ma) + } + } + + try { + const peerData = await node.peerStore.get(peerId) + if (peerData?.addresses) { + for (const addr of peerData.addresses) { + addAddr(addr.multiaddr) + } + LoggerInstance.debug( + `[P2P] ${peerIdStr}: ${peerData.addresses.length} peerStore addrs` + ) + } + } catch { + LoggerInstance.debug(`[P2P] ${peerIdStr}: not in peerStore`) + } + + const knownDialable = allAddrs.filter((ma) => this.isDialable(ma)) + if (knownDialable.length === 0) { + LoggerInstance.debug( + `[P2P] ${peerIdStr}: no dialable addrs in peerStore, querying DHT...` + ) + try { + const dhtSignal = AbortSignal.timeout(this.p2pConfig.dhtLookupTimeout ?? 60_000) + const peerInfo = await node.peerRouting.findPeer(peerId, { signal: dhtSignal }) + for (const ma of peerInfo.multiaddrs) addAddr(ma) + LoggerInstance.debug( + `[P2P] ${peerIdStr}: DHT returned ${peerInfo.multiaddrs.length} addrs` + ) + } catch (err: any) { + LoggerInstance.debug(`[P2P] ${peerIdStr}: DHT findPeer failed: ${err.message}`) + } + } else { + LoggerInstance.debug( + `[P2P] ${peerIdStr}: ${knownDialable.length} dialable addrs from peerStore, skipping DHT` + ) + } + + const dialable = allAddrs + .filter((ma) => this.isDialable(ma)) + .map((ma) => { + const str = ma.toString() + return str.includes('/p2p/') ? ma : multiaddr(`${str}/p2p/${peerIdStr}`) + }) + + LoggerInstance.debug( + `[P2P] ${peerIdStr}: ${dialable.length}/${allAddrs.length} addrs are dialable` + ) + + if (dialable.length > 0) { + LoggerInstance.debug(`[P2P] ${peerIdStr}: dialing ${dialable.map(String)}`) + try { + const conn = await node.dial(dialable, { signal }) + LoggerInstance.debug( + `[P2P] ${peerIdStr}: SUCCESS via ${conn.remoteAddr} (limited=${ + conn.limits != null + })` + ) + return conn + } catch (err: any) { + LoggerInstance.debug(`[P2P] ${peerIdStr}: direct dial failed: ${err.message}`) + } + } + + LoggerInstance.debug(`[P2P] ${peerIdStr}: last resort dial by peerId`) + try { + const conn = await node.dial(peerId, { signal: AbortSignal.timeout(10_000) }) + LoggerInstance.debug( + `[P2P] ${peerIdStr}: peerId dial SUCCESS via ${conn.remoteAddr} (limited=${ + conn.limits != null + })` + ) + return conn + } catch { + throw new Error( + `Cannot reach peer ${peerIdStr}. ` + + (allAddrs.length > 0 + ? `Found addrs: ${allAddrs.map(String).join(', ')} (none dialable). ` + : 'No addresses found. ') + + `Active connections: ${node.getConnections().length}.` + ) + } + } + + protected getConsumerAddress(s: Signer | string) { + return getConsumerAddress(s) + } + + protected getSignature(s: Signer | string, nonce: string, command: string) { + return getSignature(s, nonce, command) + } + + private async getNodePublicKey(nodeUri: string | Multiaddr[]): Promise { + const endpoints = await this.getEndpoints(nodeUri) + return endpoints?.nodePublicKey + } + + protected getAuthorization(s: Signer | string) { + return getAuthorization(s) + } + + private async dialAndStream( + nodeUri: string | Multiaddr[], + payload: Record, + signal?: AbortSignal + ): Promise<{ + lp: ReturnType + firstBytes: Uint8Array + connection: Connection + }> { + const opSignal = + signal ?? AbortSignal.timeout(this.p2pConfig.dialTimeout ?? DEFAULT_DIAL_TIMEOUT_MS) + const connection = await this.getConnection(nodeUri, opSignal) + try { + const stream = await connection.newStream(OCEAN_P2P_PROTOCOL, { + signal: opSignal, + runOnLimitedConnection: true + }) + const lp = lpStream(stream) + + await lp.write(new TextEncoder().encode(JSON.stringify(payload)), { + signal: opSignal + }) + await stream.close() + + const firstChunk = await lp.read({ signal: opSignal }) + const firstBytes = this.toUint8Array(firstChunk) + + return { lp, firstBytes, connection } + } catch (err: any) { + // Evict the connection so retries get a fresh on + try { + connection.abort(new Error('stream failed')) + } catch {} + throw err + } + } + + private async sendP2pCommand( + nodeUri: string | Multiaddr[], + command: string, + body: Record, + signerOrAuthToken?: Signer | string | null, + signal?: AbortSignal, + retrialNumber: number = 0 + ): Promise { + try { + const payload = { + command, + authorization: signerOrAuthToken + ? this.getAuthorization(signerOrAuthToken) + : undefined, + ...body + } + + const { lp, firstBytes } = await this.dialAndStream(nodeUri, payload, signal) + + if (!firstBytes.length) { + throw new Error('Gateway node error: no response from peer') + } + + const statusText = new TextDecoder().decode(firstBytes) + try { + const status = JSON.parse(statusText) + if (typeof status?.httpStatus === 'number' && status.httpStatus >= 400) { + throw new Error(status.error ?? `Gateway node error: ${status.httpStatus}`) + } + } catch {} + + if ( + command === PROTOCOL_COMMANDS.COMPUTE_GET_STREAMABLE_LOGS || + command === PROTOCOL_COMMANDS.COMPUTE_GET_RESULT + ) { + const streamableChunks = (async function* () { + try { + while (true) { + const chunk = await lp.read({ + signal: AbortSignal.timeout(DEFAULT_DIAL_TIMEOUT_MS) + }) + yield chunk instanceof Uint8Array ? chunk : chunk.subarray() + } + } catch (e) { + if (!(e instanceof UnexpectedEOFError)) { + throw e + } + } + })() + return streamableChunks + } + + const chunks: Uint8Array[] = [firstBytes] + try { + while (true) { + const chunk = await lp.read({ + signal: AbortSignal.timeout( + this.p2pConfig.dialTimeout ?? DEFAULT_DIAL_TIMEOUT_MS + ) + }) + chunks.push(this.toUint8Array(chunk)) + } + } catch (e) { + if (!(e instanceof UnexpectedEOFError)) { + throw e + } + } + + let response: unknown + for (let i = 0; i < chunks.length; i++) { + const text = new TextDecoder().decode(chunks[i]) + try { + response = JSON.parse(text) + } catch { + response = chunks[i] + } + } + + const res = response as Record | null + if (typeof res?.httpStatus === 'number' && res.httpStatus >= 400) { + throw new Error( + typeof res.error === 'string' ? res.error : JSON.stringify(res.error) + ) + } + + const errText = (typeof response === 'string' ? response : res?.error) ?? '' + if ( + errText.includes('Cannot connect to peer') && + retrialNumber < (this.p2pConfig.maxRetries ?? DEFAULT_MAX_RETRIES) + ) { + await new Promise((resolve) => + setTimeout(resolve, this.p2pConfig.retryDelay ?? DEFAULT_RETRY_DELAY_MS) + ) + return this.sendP2pCommand( + nodeUri, + command, + body, + signerOrAuthToken, + signal, + retrialNumber + 1 + ) + } + + return response + } catch (err: any) { + const msg: string = err?.message ?? '' + if ( + (msg.includes('closed') || msg.includes('reset')) && + retrialNumber < (this.p2pConfig.maxRetries ?? DEFAULT_MAX_RETRIES) + ) { + LoggerInstance.debug( + `[P2P] Stream reset/closed on attempt ${retrialNumber + 1}, retrying...` + ) + + // Connection already evicted by dialAndStream catch block. + // Brief delay ensures libp2p fully cleans up before retry. + await sleep(1000) + return this.sendP2pCommand( + nodeUri, + command, + body, + signerOrAuthToken, + signal, + retrialNumber + 1 + ) + } + throw new Error(`P2P command error: ${msg}`) + } + } + + /** + * Returns node status via P2P STATUS command. + * @param {string} nodeUri - multiaddr of the node + */ + async getEndpoints(nodeUri: string | Multiaddr[]): Promise { + try { + return await this.sendP2pCommand(nodeUri, PROTOCOL_COMMANDS.STATUS, {}) + } catch (e) { + LoggerInstance.error('P2P getEndpoints (STATUS) failed:', e) + throw e + } + } + + public async getNodeStatus( + nodeUri: string | Multiaddr[], + signal?: AbortSignal + ): Promise { + return this.getEndpoints(nodeUri) + } + + public async getNodeJobs( + nodeUri: string | Multiaddr[], + fromTimestamp?: number, + signal?: AbortSignal + ): Promise { + try { + const body: Record = {} + if (fromTimestamp) body.fromTimestamp = fromTimestamp.toString() + const result = await this.sendP2pCommand( + nodeUri, + PROTOCOL_COMMANDS.JOBS, + body, + null, + signal + ) + return Array.isArray(result) ? result : [] + } catch (e) { + LoggerInstance.error('P2P getNodeJobs failed:', e) + return [] + } + } + + /** + * Get current nonce from the node via P2P. + */ + public async getNonce( + nodeUri: string | Multiaddr[], + consumerAddress: string, + signal?: AbortSignal + ): Promise { + try { + const result = await this.sendP2pCommand( + nodeUri, + PROTOCOL_COMMANDS.NONCE, + { address: consumerAddress }, + null, + signal + ) + // ocean-node may return a plain number or { nonce: number } + const nonceValue = + typeof result === 'number' ? result : result?.nonce ?? result ?? 0 + return !nonceValue || nonceValue === null ? 0 : Number(nonceValue) + } catch (e) { + LoggerInstance.error('P2P getNonce failed:', e) + throw e + } + } + + /** + * Encrypt data via P2P ENCRYPT command. + */ + public async encrypt( + data: any, + chainId: number, + nodeUri: string | Multiaddr[], + signerOrAuthToken: Signer | string, + _policyServer?: any, + signal?: AbortSignal + ): Promise { + const consumerAddress = await this.getConsumerAddress(signerOrAuthToken) + const nonce = ((await this.getNonce(nodeUri, consumerAddress, signal)) + 1).toString() + const signature = await this.getSignature( + signerOrAuthToken, + nonce, + PROTOCOL_COMMANDS.ENCRYPT + ) + const result = await this.sendP2pCommand( + nodeUri, + PROTOCOL_COMMANDS.ENCRYPT, + { + chainId, + nonce, + consumerAddress, + signature, + blob: typeof data === 'string' ? data : JSON.stringify(data) + }, + signerOrAuthToken, + signal + ) + return this.bufToHex(result) + } + + /** + * Get file details for a given DID and service ID via P2P. + */ + public async checkDidFiles( + did: string, + serviceId: string, + nodeUri: string | Multiaddr[], + withChecksum: boolean = false, + signal?: AbortSignal + ): Promise { + const result = await this.sendP2pCommand( + nodeUri, + PROTOCOL_COMMANDS.FILE_INFO, + { did, serviceId, checksum: withChecksum }, + null, + signal + ) + return Array.isArray(result) ? result : [result] + } + + /** + * Get File details via P2P. + */ + public async getFileInfo( + file: StorageObject, + nodeUri: string | Multiaddr[], + withChecksum: boolean = false, + signal?: AbortSignal + ): Promise { + const result = await this.sendP2pCommand( + nodeUri, + PROTOCOL_COMMANDS.FILE_INFO, + { file, type: (file as any).type, checksum: withChecksum }, + null, + signal + ) + return Array.isArray(result) ? result : [result] + } + + /** + * Returns compute environments via P2P. + */ + public async getComputeEnvironments( + nodeUri: string | Multiaddr[], + signal?: AbortSignal + ): Promise { + const result = await this.sendP2pCommand( + nodeUri, + PROTOCOL_COMMANDS.COMPUTE_GET_ENVIRONMENTS, + {}, + null, + signal + ) + return Array.isArray(result) ? result : [result] + } + + /** + * Initializes the provider for a service (download) request via P2P. + */ + public async initialize( + did: string, + serviceId: string, + fileIndex: number, + consumerAddress: string, + nodeUri: string | Multiaddr[], + signal?: AbortSignal, + userCustomParameters?: UserCustomParameters, + computeEnv?: string, + validUntil?: number + ): Promise { + const body: Record = { + ddoId: did, + serviceId, + consumerAddress + } + if (userCustomParameters) body.userdata = userCustomParameters + if (computeEnv) body.environment = computeEnv + if (validUntil) body.validUntil = validUntil + return this.sendP2pCommand(nodeUri, PROTOCOL_COMMANDS.GET_FEES, body, null, signal) + } + + /** + * Initializes compute request via P2P. No auth required -- the node only + * validates parameters and applies rate limits. + */ + public async initializeCompute( + assets: ComputeAsset[], + algorithm: ComputeAlgorithm, + computeEnv: string, + token: string, + validUntil: number, + nodeUri: string | Multiaddr[], + consumerAddress: string, + resources: ComputeResourceRequest[], + chainId: number, + policyServer?: any, + signal?: AbortSignal, + queueMaxWaitTime?: number, + dockerRegistryAuthData?: dockerRegistryAuth, + output?: ComputeOutput + ): Promise { + const body: Record = { + datasets: assets, + algorithm, + environment: computeEnv, + payment: { chainId, token, resources }, + maxJobDuration: validUntil, + consumerAddress + } + if (policyServer) body.policyServer = policyServer + if (queueMaxWaitTime) body.queueMaxWaitTime = queueMaxWaitTime + if (dockerRegistryAuthData) { + const nodeKey = await this.getNodePublicKey(nodeUri) + if (nodeKey) { + body.encryptedDockerRegistryAuth = eciesencrypt( + nodeKey, + JSON.stringify(dockerRegistryAuthData) + ) + } + } + if (output) { + const nodeKey = await this.getNodePublicKey(nodeUri) + if (nodeKey) body.output = eciesencrypt(nodeKey, JSON.stringify(output)) + } + + return this.sendP2pCommand( + nodeUri, + PROTOCOL_COMMANDS.COMPUTE_INITIALIZE, + body, + null, + signal + ) + } + + /** + * Sends a DOWNLOAD command to the peer via P2P, reads the binary stream + * directly from the lpStream, and returns a DownloadResponse. + * The node decrypts the service file and streams raw file data back. + */ + public async getDownloadUrl( + did: string, + serviceId: string, + fileIndex: number, + transferTxId: string, + nodeUri: string | Multiaddr[], + signerOrAuthToken: Signer | string, + policyServer?: any, + userCustomParameters?: UserCustomParameters + ): Promise { + const consumerAddress = await this.getConsumerAddress(signerOrAuthToken) + const nonce = ((await this.getNonce(nodeUri, consumerAddress)) + 1).toString() + const signature = await this.getSignature( + signerOrAuthToken, + nonce, + PROTOCOL_COMMANDS.DOWNLOAD + ) + + const payload: Record = { + command: PROTOCOL_COMMANDS.DOWNLOAD, + authorization: this.getAuthorization(signerOrAuthToken), + fileIndex, + documentId: did, + transferTxId, + serviceId, + consumerAddress, + nonce, + signature + } + if (policyServer) payload.policyServer = policyServer + if (userCustomParameters) payload.userData = userCustomParameters + + const { lp, firstBytes } = await this.dialAndStream(nodeUri, payload) + + // First lp frame is the status JSON (if present). Some nodes send binary data + // directly without a status prefix — in that case JSON.parse throws SyntaxError + // and we treat the frame as the start of file data. + const statusText = new TextDecoder().decode(firstBytes) + let status: { httpStatus?: number; error?: string } | null = null + try { + status = JSON.parse(statusText) + } catch { + // Not JSON — first frame is file data, fall through to chunk collection + } + if (status && typeof status.httpStatus === 'number' && status.httpStatus >= 400) { + throw new Error(status.error ?? `P2P download error: ${status.httpStatus}`) + } + + // Collect binary file data. If the first frame wasn't a status JSON, it's data. + const chunks: Buffer[] = status === null ? [Buffer.from(firstBytes)] : [] + try { + while (true) { + const chunk = await lp.read({ + signal: AbortSignal.timeout( + this.p2pConfig.dialTimeout ?? DEFAULT_DIAL_TIMEOUT_MS + ) + }) + chunks.push(Buffer.from(this.toUint8Array(chunk))) + } + } catch (e) { + if (!(e instanceof UnexpectedEOFError)) { + throw e + } + } + + const combined = Buffer.concat(chunks) + return { + data: combined.buffer.slice( + combined.byteOffset, + combined.byteOffset + combined.byteLength + ) as ArrayBuffer, + filename: `file${fileIndex}` + } + } + + /** + * Start a paid compute job via P2P. + */ + public async computeStart( + nodeUri: string | Multiaddr[], + signerOrAuthToken: Signer | string, + computeEnv: string, + datasets: ComputeAsset[], + algorithm: ComputeAlgorithm, + maxJobDuration: number, + token: string, + resources: ComputeResourceRequest[], + chainId: number, + metadata?: ComputeJobMetadata, + additionalViewers?: string[], + output?: ComputeOutput, + policyServer?: any, + signal?: AbortSignal, + queueMaxWaitTime?: number, + dockerRegistryAuth?: dockerRegistryAuth + ): Promise { + const consumerAddress = await this.getConsumerAddress(signerOrAuthToken) + const nonce = ((await this.getNonce(nodeUri, consumerAddress, signal)) + 1).toString() + + const signature = await this.getSignature( + signerOrAuthToken, + nonce, + PROTOCOL_COMMANDS.COMPUTE_START + ) + + const body: Record = { + environment: computeEnv, + dataset: datasets[0], + datasets, + algorithm, + maxJobDuration, + feeToken: token, + resources, + chainId, + payment: { chainId, token, maxJobDuration, resources }, + consumerAddress, + nonce, + signature + } + if (metadata) body.metadata = metadata + if (additionalViewers) body.additionalViewers = additionalViewers + if (policyServer) body.policyServer = policyServer + if (queueMaxWaitTime) body.queueMaxWaitTime = queueMaxWaitTime + if (dockerRegistryAuth) { + const nodeKey = await this.getNodePublicKey(nodeUri) + if (nodeKey) + body.encryptedDockerRegistryAuth = eciesencrypt( + nodeKey, + JSON.stringify(dockerRegistryAuth) + ) + } + if (output) { + const nodeKey = await this.getNodePublicKey(nodeUri) + if (nodeKey) body.output = eciesencrypt(nodeKey, JSON.stringify(output)) + } + + const result = await this.sendP2pCommand( + nodeUri, + PROTOCOL_COMMANDS.COMPUTE_START, + body, + signerOrAuthToken, + signal + ) + return Array.isArray(result) ? result : result + } + + /** + * Start a free compute job via P2P. + */ + public async freeComputeStart( + nodeUri: string | Multiaddr[], + signerOrAuthToken: Signer | string, + computeEnv: string, + datasets: ComputeAsset[], + algorithm: ComputeAlgorithm, + resources?: ComputeResourceRequest[], + metadata?: ComputeJobMetadata, + additionalViewers?: string[], + output?: ComputeOutput, + policyServer?: any, + signal?: AbortSignal, + queueMaxWaitTime?: number, + dockerRegistryAuth?: dockerRegistryAuth + ): Promise { + const consumerAddress = await this.getConsumerAddress(signerOrAuthToken) + const nonce = ((await this.getNonce(nodeUri, consumerAddress, signal)) + 1).toString() + + const signature = await this.getSignature( + signerOrAuthToken, + nonce, + PROTOCOL_COMMANDS.FREE_COMPUTE_START + ) + + const body: Record = { + environment: computeEnv, + dataset: datasets[0], + datasets, + algorithm, + resources, + consumerAddress, + nonce, + signature + } + if (metadata) body.metadata = metadata + if (additionalViewers) body.additionalViewers = additionalViewers + if (policyServer) body.policyServer = policyServer + if (queueMaxWaitTime) body.queueMaxWaitTime = queueMaxWaitTime + if (dockerRegistryAuth) { + const nodeKey = await this.getNodePublicKey(nodeUri) + if (nodeKey) + body.encryptedDockerRegistryAuth = eciesencrypt( + nodeKey, + JSON.stringify(dockerRegistryAuth) + ) + } + if (output) { + const nodeKey = await this.getNodePublicKey(nodeUri) + if (nodeKey) body.output = eciesencrypt(nodeKey, JSON.stringify(output)) + } + + const result = await this.sendP2pCommand( + nodeUri, + PROTOCOL_COMMANDS.FREE_COMPUTE_START, + body, + signerOrAuthToken, + signal + ) + return Array.isArray(result) ? result : result + } + + /** + * Get streamable compute logs via P2P. Returns an async generator of Uint8Array chunks. + */ + public async computeStreamableLogs( + nodeUri: string | Multiaddr[], + signerOrAuthToken: Signer | string, + jobId: string, + signal?: AbortSignal + ): Promise { + const isAuthToken = typeof signerOrAuthToken === 'string' + if (isAuthToken) { + return this.sendP2pCommand( + nodeUri, + PROTOCOL_COMMANDS.COMPUTE_GET_STREAMABLE_LOGS, + { jobId }, + signerOrAuthToken, + signal + ) + } + + const consumerAddress = await this.getConsumerAddress(signerOrAuthToken) + const nonce = ((await this.getNonce(nodeUri, consumerAddress, signal)) + 1).toString() + const signature = await this.getSignature( + signerOrAuthToken, + nonce, + PROTOCOL_COMMANDS.COMPUTE_GET_STREAMABLE_LOGS + ) + return this.sendP2pCommand( + nodeUri, + PROTOCOL_COMMANDS.COMPUTE_GET_STREAMABLE_LOGS, + { jobId, consumerAddress, nonce, signature }, + signerOrAuthToken, + signal + ) + } + + /** + * Stop a compute job via P2P. + */ + public async computeStop( + jobId: string, + nodeUri: string | Multiaddr[], + signerOrAuthToken: Signer | string, + agreementId?: string, + signal?: AbortSignal + ): Promise { + const consumerAddress = await this.getConsumerAddress(signerOrAuthToken) + const nonce = ((await this.getNonce(nodeUri, consumerAddress, signal)) + 1).toString() + + const signature = await this.getSignature( + signerOrAuthToken, + nonce, + PROTOCOL_COMMANDS.COMPUTE_STOP + ) + + const body: Record = { jobId, consumerAddress, nonce, signature } + if (agreementId) body.agreementId = agreementId + + return this.sendP2pCommand( + nodeUri, + PROTOCOL_COMMANDS.COMPUTE_STOP, + body, + signerOrAuthToken, + signal + ) + } + + /** + * Get compute status via P2P. + */ + public async computeStatus( + nodeUri: string | Multiaddr[], + signerOrAuthToken: Signer | string, + jobId?: string, + agreementId?: string, + signal?: AbortSignal + ): Promise { + const consumerAddress = await this.getConsumerAddress(signerOrAuthToken) + const body: Record = { consumerAddress } + if (jobId) body.jobId = jobId + if (agreementId) body.agreementId = agreementId + + return this.sendP2pCommand( + nodeUri, + PROTOCOL_COMMANDS.COMPUTE_GET_STATUS, + body, + signerOrAuthToken, + signal + ) + } + + /** + * Get compute result as an async generator of Uint8Array chunks via P2P. + * Supports resumable downloads via `offset` (byte position to resume from). + */ + public async getComputeResult( + nodeUri: string | Multiaddr[], + signerOrAuthToken: Signer | string, + jobId: string, + index: number, + offset: number = 0 + ): Promise { + const consumerAddress = await this.getConsumerAddress(signerOrAuthToken) + const payload: Record = { + command: PROTOCOL_COMMANDS.COMPUTE_GET_RESULT, + jobId, + index, + offset, + consumerAddress + } + + if (typeof signerOrAuthToken === 'string') { + payload.authorization = signerOrAuthToken + } else { + const nonce = ((await this.getNonce(nodeUri, consumerAddress)) + 1).toString() + payload.nonce = nonce + payload.signature = await this.getSignature( + signerOrAuthToken, + nonce, + PROTOCOL_COMMANDS.COMPUTE_GET_RESULT + ) + } + + const { lp, firstBytes } = await this.dialAndStream(nodeUri, payload) + + // First frame is always a status JSON + const status = JSON.parse(new TextDecoder().decode(firstBytes)) + if (typeof status?.httpStatus === 'number' && status.httpStatus >= 400) { + throw new Error(status.error ?? `P2P compute result error: ${status.httpStatus}`) + } + + const dialTimeout = this.p2pConfig.dialTimeout ?? DEFAULT_DIAL_TIMEOUT_MS + return (async function* () { + try { + while (true) { + const chunk = await lp.read({ signal: AbortSignal.timeout(dialTimeout) }) + yield chunk instanceof Uint8Array ? chunk : chunk.subarray() + } + } catch (e) { + if (!(e instanceof UnexpectedEOFError)) throw e + } + })() + } + + public async getComputeResultUrl( + nodeUri: string | Multiaddr[], + signerOrAuthToken: Signer | string, + jobId: string, + index: number + ): Promise { + const consumerAddress = await this.getConsumerAddress(signerOrAuthToken) + const result = await this.sendP2pCommand( + nodeUri, + PROTOCOL_COMMANDS.COMPUTE_GET_RESULT, + { jobId, index, consumerAddress }, + signerOrAuthToken + ) + return result + } + + /** + * Generate an auth token via P2P (auto-signs with Signer). + */ + public async generateAuthToken( + consumer: Signer, + nodeUri: string | Multiaddr[], + signal?: AbortSignal + ): Promise { + const address = await consumer.getAddress() + const nonce = ((await this.getNonce(nodeUri, address, signal)) + 1).toString() + const signature = await this.getSignature( + consumer, + nonce, + PROTOCOL_COMMANDS.CREATE_AUTH_TOKEN + ) + + const result = await this.sendP2pCommand( + nodeUri, + PROTOCOL_COMMANDS.CREATE_AUTH_TOKEN, + { address, signature, nonce }, + null, + signal + ) + return result?.token ?? result + } + + /** + * Generate an auth token from a pre-signed request (no Signer needed). + */ + public async generateSignedAuthToken( + address: string, + signature: string, + nonce: string, + nodeUri: string | Multiaddr[], + signal?: AbortSignal + ): Promise { + const result = await this.sendP2pCommand( + nodeUri, + PROTOCOL_COMMANDS.CREATE_AUTH_TOKEN, + { address, signature, nonce }, + null, + signal + ) + return result?.token ?? result + } + + /** + * Resolve a DDO by DID via P2P GET_DDO command. + */ + public async resolveDdo( + nodeUri: string | Multiaddr[], + did: string, + signal?: AbortSignal + ): Promise { + return this.sendP2pCommand( + nodeUri, + PROTOCOL_COMMANDS.GET_DDO, + { id: did }, + null, + signal + ) + } + + /** + * Validate a DDO via P2P VALIDATE_DDO command. + */ + public async validateDdo( + nodeUri: string | Multiaddr[], + ddo: DDO, + signer: Signer, + signal?: AbortSignal + ): Promise { + const publisherAddress = await signer.getAddress() + const nonce = ( + (await this.getNonce(nodeUri, publisherAddress, signal)) + 1 + ).toString() + const message = publisherAddress + nonce + PROTOCOL_COMMANDS.VALIDATE_DDO + const sig = await signRequest(signer, message) + const result = await this.sendP2pCommand( + nodeUri, + PROTOCOL_COMMANDS.VALIDATE_DDO, + { ddo, publisherAddress, nonce, signature: sig }, + null, + signal + ) + if (!result || result.error) return null + return { + valid: true, + hash: this.bufToHex(result.hash), + proof: { + validatorAddress: this.bufToHex(result.publicKey), + r: this.bufToHex(result.r?.[0] ?? result.r), + s: this.bufToHex(result.s?.[0] ?? result.s), + v: result.v + } + } as ValidateMetadata + } + + /** + * Invalidate an auth token via P2P. + */ + public async invalidateAuthToken( + consumer: Signer, + token: string, + nodeUri: string | Multiaddr[], + signal?: AbortSignal + ): Promise<{ success: boolean }> { + const consumerAddress = await consumer.getAddress() + const nonce = ((await this.getNonce(nodeUri, consumerAddress, signal)) + 1).toString() + const signatureMessage = consumerAddress + nonce + const signature = await signRequest(consumer, signatureMessage) + return this.sendP2pCommand( + nodeUri, + PROTOCOL_COMMANDS.INVALIDATE_AUTH_TOKEN, + { address: consumerAddress, signature, token, nonce }, + null, + signal + ) + } + + /** + * Check if a P2P node is reachable by calling STATUS. + */ + public async isValidProvider( + nodeUri: string | Multiaddr[], + signal?: AbortSignal + ): Promise { + try { + const result = await this.sendP2pCommand( + nodeUri, + PROTOCOL_COMMANDS.STATUS, + {}, + null, + signal + ) + // STATUS response uses 'address' (ETH addr) while HTTP root uses 'providerAddress' + return !!( + result && + (result.address || result.providerAddress || result.providerAddresses) + ) + } catch { + return false + } + } + + /** + * PolicyServer passthrough via P2P. + */ + public async PolicyServerPassthrough( + nodeUri: string | Multiaddr[], + request: PolicyServerPassthroughCommand, + signal?: AbortSignal + ): Promise { + return this.sendP2pCommand( + nodeUri, + PROTOCOL_COMMANDS.POLICY_SERVER_PASSTHROUGH, + { ...request }, + null, + signal + ) + } + + /** + * Initialize Policy Server verification via P2P. + */ + public async initializePSVerification( + nodeUri: string | Multiaddr[], + request: PolicyServerInitializeCommand, + signal?: AbortSignal + ): Promise { + return this.sendP2pCommand( + nodeUri, + PROTOCOL_COMMANDS.POLICY_SERVER_PASSTHROUGH, + { ...request }, + null, + signal + ) + } + + /** + * Download node logs via P2P. + */ + public async downloadNodeLogs( + nodeUri: string | Multiaddr[], + signer: Signer, + startTime: string, + endTime: string, + maxLogs?: number, + moduleName?: string, + level?: string, + page?: number, + signal?: AbortSignal + ): Promise { + const consumerAddress = await signer.getAddress() + const nonce = ((await this.getNonce(nodeUri, consumerAddress, signal)) + 1).toString() + const signature = await this.getSignature(signer, nonce, PROTOCOL_COMMANDS.GET_LOGS) + + const body: Record = { + startTime, + endTime, + signature, + nonce, + address: consumerAddress + } + if (maxLogs) body.maxLogs = maxLogs + if (moduleName) body.moduleName = moduleName + if (level) body.level = level + if (page) body.page = page + + return this.sendP2pCommand(nodeUri, PROTOCOL_COMMANDS.GET_LOGS, body, signer, signal) + } + + /** + * Fetch node logs via P2P with a pre-signed payload. + * P2P only — use downloadNodeLogs() for the auto-signed variant. + */ + public async fetchNodeLogs( + nodeUri: string | Multiaddr[], + address: string, + signature: string, + nonce: string, + logParams?: NodeLogsParams + ): Promise { + return this.sendP2pCommand(nodeUri, PROTOCOL_COMMANDS.GET_LOGS, { + address, + signature, + nonce, + ...logParams + }) + } + + /** + * Fetch node configuration via P2P. Accepts a pre-signed payload — + * the caller is responsible for nonce retrieval and signing. + */ + public async fetchConfig( + nodeUri: string | Multiaddr[], + payload: Record + ): Promise { + return this.sendP2pCommand(nodeUri, PROTOCOL_COMMANDS.FETCH_CONFIG, payload) + } + + /** + * Push node configuration via P2P. Accepts a pre-signed payload — + * the caller is responsible for nonce retrieval and signing. + */ + public async pushConfig( + nodeUri: string | Multiaddr[], + payload: Record + ): Promise { + return this.sendP2pCommand(nodeUri, PROTOCOL_COMMANDS.PUSH_CONFIG, payload) + } +} diff --git a/src/utils/FetchHelper.ts b/src/utils/FetchHelper.ts index f0a95f374..9d04a1149 100644 --- a/src/utils/FetchHelper.ts +++ b/src/utils/FetchHelper.ts @@ -29,16 +29,21 @@ export function downloadFileBrowser(url: string): void { } /** - * Triggers a file download from the specified URL when called from a browser context. - * @param {string} url - The URL of the file to download - * @param {number} [index] - The file index - * @returns {Promise} - A Promise that resolves when the file has been downloaded + * Downloads a file from a URL, or passes through an already-collected DownloadResponse + * (returned by P2P transport). + * @param {string | DownloadResponse} urlOrData - HTTP URL or pre-collected P2P response + * @param {number} [index] - The file index, used as fallback filename for URL downloads + * @returns {Promise} */ export async function downloadFile( - url: string, + urlOrData: string | DownloadResponse, index?: number ): Promise { - const response = await fetch(url) + if (typeof urlOrData !== 'string') { + return urlOrData + } + + const response = await fetch(urlOrData) if (!response.ok) { throw new Error('Response error.') } @@ -49,7 +54,7 @@ export async function downloadFile( .match(/attachment;filename=(.+)/)[1] } catch { try { - filename = url.split('/').pop() + filename = urlOrData.split('/').pop() } catch { filename = `file${index}` } diff --git a/test/config.ts b/test/config.ts index d74d41001..565662b2f 100644 --- a/test/config.ts +++ b/test/config.ts @@ -1,9 +1,12 @@ import { JsonRpcProvider, Signer } from 'ethers' import fs from 'fs' import { homedir } from 'os' -import { ConfigHelper, configHelperNetworks } from '../src/config/index.js' +import { + ConfigHelper, + configHelperNetworks, + getNodeEndpointConfig +} from '../src/config/index.js' import { LoggerInstance, LogLevel } from '../src/utils/index.js' - LoggerInstance.setLevel(LogLevel.Error) export interface Addresses { @@ -34,9 +37,7 @@ export const getTestConfig = async (signer: Signer) => { const network = await signer.provider?.getNetwork() const config = new ConfigHelper().getConfig(Number(network?.chainId)) - if (process.env.NODE_URL) { - config.oceanNodeUri = process.env.NODE_URL - } + Object.assign(config, getNodeEndpointConfig()) return config } diff --git a/test/integration/Auth.test.ts b/test/integration/Auth.test.ts index 0d3c9a97d..78305b9b6 100644 --- a/test/integration/Auth.test.ts +++ b/test/integration/Auth.test.ts @@ -11,9 +11,6 @@ describe('Auth token tests', async () => { before(async () => { account = (await provider.getSigner(0)) as Signer config = await getTestConfig(account) - if (process.env.NODE_URL) { - config.oceanNodeUri = process.env.NODE_URL - } providerUrl = config?.oceanNodeUri }) diff --git a/test/integration/CodeExamples.test.ts b/test/integration/CodeExamples.test.ts index 0689cb6b0..246c05808 100644 --- a/test/integration/CodeExamples.test.ts +++ b/test/integration/CodeExamples.test.ts @@ -104,6 +104,7 @@ import { sendTx, ConfigHelper, configHelperNetworks, + getNodeEndpointConfig, amountToUnits, getEventFromTx, LoggerInstance @@ -206,9 +207,7 @@ describe('Marketplace flow tests', async () => { const config = new ConfigHelper().getConfig( parseInt(String((await publisherAccount.provider.getNetwork()).chainId)) ) - if (process.env.NODE_URL) { - config.oceanNodeUri = process.env.NODE_URL - } + Object.assign(config, getNodeEndpointConfig()) aquarius = new Aquarius(config?.oceanNodeUri) providerUrl = config?.oceanNodeUri addresses = JSON.parse( diff --git a/test/integration/ComputeExamples.test.ts b/test/integration/ComputeExamples.test.ts index a34e1ba9f..057f46760 100644 --- a/test/integration/ComputeExamples.test.ts +++ b/test/integration/ComputeExamples.test.ts @@ -143,6 +143,7 @@ import { sendTx, configHelperNetworks, ConfigHelper, + getNodeEndpointConfig, getEventFromTx, amountToUnits, isDefined, @@ -280,8 +281,6 @@ let resolvedAlgorithmDdo: DDO let computeJobId: string let agreementId: string -let computeRoutePath: string -let hasFreeComputeSupport: boolean /// ``` /// ### 4.3 Helper methods @@ -438,9 +437,7 @@ describe('Compute-to-data example tests', async () => { const config = new ConfigHelper().getConfig( parseInt(String((await publisherAccount.provider.getNetwork()).chainId)) ) - if (process.env.NODE_URL) { - config.oceanNodeUri = process.env.NODE_URL - } + Object.assign(config, getNodeEndpointConfig()) aquariusInstance = new Aquarius(config?.oceanNodeUri) providerUrl = config?.oceanNodeUri addresses = JSON.parse( @@ -622,126 +619,99 @@ describe('Compute-to-data example tests', async () => { assert(computeEnv, 'Cannot find the free compute env') /// --> - /// - - /// Let's have 5 minute of compute access - /// ```Typescript - const mytime = new Date() - const computeMinutes = 5 - mytime.setMinutes(mytime.getMinutes() + computeMinutes) - - /// ``` - /// Let's prepare the dataset and algorithm assets to be used in the compute job - /// ```Typescript - const assets: ComputeAsset[] = [ - { - documentId: resolvedDatasetDdo.id, - serviceId: resolvedDatasetDdo.services[0].id - } - ] - - const algo: ComputeAlgorithm = { - documentId: resolvedAlgorithmDdo.id, - serviceId: resolvedAlgorithmDdo.services[0].id, - meta: resolvedAlgorithmDdo.metadata.algorithm + /// Let's have 5 minute of compute access + /// ```Typescript + const mytime = new Date() + const computeMinutes = 5 + mytime.setMinutes(mytime.getMinutes() + computeMinutes) + + /// ``` + /// Let's prepare the dataset and algorithm assets to be used in the compute job + /// ```Typescript + const assets: ComputeAsset[] = [ + { + documentId: resolvedDatasetDdo.id, + serviceId: resolvedDatasetDdo.services[0].id } - /// ``` + ] - /// Let's start the free compute job - /// ```Typescript - const computeJobs = await ProviderInstance.freeComputeStart( - providerUrl, - consumerAccount, - computeEnv.id, - assets, - algo - ) - /// ``` - - /// - - /// Let's save the compute job it, we re going to use later - /// ```Typescript - computeJobId = computeJobs[0].jobId - // eslint-disable-next-line prefer-destructuring - agreementId = computeJobs[0].agreementId - /// ``` - /// + + /// Let's save the compute job it, we re going to use later + /// ```Typescript + computeJobId = computeJobs[0].jobId + // eslint-disable-next-line prefer-destructuring + agreementId = computeJobs[0].agreementId + /// ``` + /// /// ## 11. Check compute status and get download compute results URL it('11.1 Check compute status', async () => { /// - /// You can also add various delays so you see the various states of the compute job - /// ```Typescript - const jobStatus = await ProviderInstance.computeStatus( - providerUrl, - await consumerAccount.getAddress(), - computeJobId, - agreementId - ) - /// ``` - /// - /// Now, let's see the current status of the previously started computer job - /// ```Typescript - console.log('Current status of the compute job: ', jobStatus) - /// ``` - /// + /// You can also add various delays so you see the various states of the compute job + /// ```Typescript + const jobStatus = await ProviderInstance.computeStatus( + providerUrl, + consumerAccount, + computeJobId, + agreementId + ) + /// ``` + /// + /// Now, let's see the current status of the previously started computer job + /// ```Typescript + console.log('Current status of the compute job: ', jobStatus) + /// ``` + /// it('11.2 Get download compute results URL', async () => { /// - - /// ```Typescript - await sleep(10000) - const downloadURL = await ProviderInstance.getComputeResultUrl( - providerUrl, - consumerAccount, - computeJobId, - 0 - ) - /// ``` - /// - /// Let's check the compute results url for the specified index - /// ```Typescript - console.log(`Compute results URL: ${downloadURL}`) - /// ``` - /// + /// ```Typescript + await sleep(10000) + const downloadURL = await ProviderInstance.getComputeResultUrl( + providerUrl, + consumerAccount, + computeJobId, + 0 + ) + /// ``` + /// + /// Let's check the compute results url for the specified index + /// ```Typescript + console.log(`Compute results URL: ${downloadURL}`) + /// ``` + /// @@ -764,227 +734,203 @@ describe('Compute-to-data example tests', async () => { /// - - /// Let's have 5 minute of compute access - /// ```Typescript - - const mytime = new Date() - const computeMinutes = 5 - mytime.setMinutes(mytime.getMinutes() + computeMinutes) - const computeValidUntil = Math.floor(mytime.getTime() / 1000) - - /// ``` - - /// Let's prepare the dataset and algorithm assets to be used in the compute job - /// ```Typescript - const resources: ComputeResourceRequest[] = [ - { - id: 'cpu', - amount: 2 - }, - { - id: 'ram', - amount: 2 - }, - { - id: 'disk', - amount: 0 - } - ] - const assets: ComputeAsset[] = [ - { - documentId: resolvedDatasetDdo.id, - serviceId: resolvedDatasetDdo.services[0].id - } - ] - const dtAddressArray = [resolvedDatasetDdo.services[0].datatokenAddress] - const algo: ComputeAlgorithm = { - documentId: resolvedAlgorithmDdo.id, - serviceId: resolvedAlgorithmDdo.services[0].id, - meta: resolvedAlgorithmDdo.metadata.algorithm + + /// Let's have 5 minute of compute access + /// ```Typescript + + const mytime = new Date() + const computeMinutes = 5 + mytime.setMinutes(mytime.getMinutes() + computeMinutes) + const computeValidUntil = Math.floor(mytime.getTime() / 1000) + + /// ``` + + /// Let's prepare the dataset and algorithm assets to be used in the compute job + /// ```Typescript + const resources: ComputeResourceRequest[] = [ + { + id: 'cpu', + amount: 2 + }, + { + id: 'ram', + amount: 2 + }, + { + id: 'disk', + amount: 0 } - /// ``` - - /// Triggering initialize compute to see payment options - /// ```Typescript - const providerInitializeComputeResults = await ProviderInstance.initializeCompute( - assets, - algo, - computeEnv.id, - paymentToken, - computeValidUntil, - providerUrl, - consumerAccount, - resources, - Number(chainId) - ) + ] + const assets: ComputeAsset[] = [ + { + documentId: resolvedDatasetDdo.id, + serviceId: resolvedDatasetDdo.services[0].id + } + ] + const dtAddressArray = [resolvedDatasetDdo.services[0].datatokenAddress] + const algo: ComputeAlgorithm = { + documentId: resolvedAlgorithmDdo.id, + serviceId: resolvedAlgorithmDdo.services[0].id, + meta: resolvedAlgorithmDdo.metadata.algorithm + } + /// ``` - console.log( - 'providerInitializeComputeResults = ', - JSON.stringify(providerInitializeComputeResults) - ) + /// Triggering initialize compute to see payment options + /// ```Typescript + const providerInitializeComputeResults = await ProviderInstance.initializeCompute( + assets, + algo, + computeEnv.id, + paymentToken, + computeValidUntil, + providerUrl, + await consumerAccount.getAddress(), + resources, + Number(chainId) + ) - /// ``` + console.log( + 'providerInitializeComputeResults = ', + JSON.stringify(providerInitializeComputeResults) + ) - /// + /// ``` - /// Let's check funds for escrow payment - /// ```Typescript - const escrow = new EscrowContract( - getAddress(providerInitializeComputeResults.payment.escrowAddress), - consumerAccount - ) - const paymentTokenPublisher = new Datatoken(publisherAccount) - const balancePublisherPaymentToken = await paymentTokenPublisher.balance( - paymentToken, - await publisherAccount.getAddress() - ) - assert( - new BigNumber(parseEther(balancePublisherPaymentToken)).isGreaterThan(0), - 'Balance should be higher than 0' - ) - const tx = await publisherAccount.sendTransaction({ - to: computeEnv.consumerAddress, - value: parseEther('1.5') - }) - await tx.wait() - - await paymentTokenPublisher.transfer( - paymentToken, - getAddress(computeEnv.consumerAddress), - (Number(balancePublisherPaymentToken) / 2).toString() - ) - const amountToDeposit = ( - providerInitializeComputeResults.payment.amount * 2 - ).toString() - await escrow.verifyFundsForEscrowPayment( - paymentToken, - computeEnv.consumerAddress, - await unitsToAmount(consumerAccount, paymentToken, amountToDeposit), - providerInitializeComputeResults.payment.amount.toString(), - providerInitializeComputeResults.payment.minLockSeconds.toString(), - '10' - ) - /// ``` + /// + + /// Let's check funds for escrow payment + /// ```Typescript + const escrow = new EscrowContract( + getAddress(providerInitializeComputeResults.payment.escrowAddress), + consumerAccount + ) + const paymentTokenPublisher = new Datatoken(publisherAccount) + const balancePublisherPaymentToken = await paymentTokenPublisher.balance( + paymentToken, + await publisherAccount.getAddress() + ) + assert( + new BigNumber(parseEther(balancePublisherPaymentToken)).isGreaterThan(0), + 'Balance should be higher than 0' + ) + const tx = await publisherAccount.sendTransaction({ + to: computeEnv.consumerAddress, + value: parseEther('1.5') + }) + await tx.wait() + + await paymentTokenPublisher.transfer( + paymentToken, + getAddress(computeEnv.consumerAddress), + (Number(balancePublisherPaymentToken) / 2).toString() + ) + const amountToDeposit = ( + providerInitializeComputeResults.payment.amount * 2 + ).toString() + await escrow.verifyFundsForEscrowPayment( + paymentToken, + computeEnv.consumerAddress, + await unitsToAmount(consumerAccount, paymentToken, amountToDeposit), + providerInitializeComputeResults.payment.amount.toString(), + providerInitializeComputeResults.payment.minLockSeconds.toString(), + '10' + ) + /// ``` - /// Let's order assets - /// ```Typescript + /// Let's order assets + /// ```Typescript - algo.transferTxId = await handleOrder( - providerInitializeComputeResults.algorithm, - resolvedAlgorithmDdo.services[0].datatokenAddress, + algo.transferTxId = await handleOrder( + providerInitializeComputeResults.algorithm, + resolvedAlgorithmDdo.services[0].datatokenAddress, + consumerAccount, + computeEnv.consumerAddress, + 0 + ) + for (let i = 0; i < providerInitializeComputeResults.datasets.length; i++) { + assets[i].transferTxId = await handleOrder( + providerInitializeComputeResults.datasets[i], + dtAddressArray[i], consumerAccount, computeEnv.consumerAddress, 0 ) - for (let i = 0; i < providerInitializeComputeResults.datasets.length; i++) { - assets[i].transferTxId = await handleOrder( - providerInitializeComputeResults.datasets[i], - dtAddressArray[i], - consumerAccount, - computeEnv.consumerAddress, - 0 - ) - } - /// ``` - - /// Let's start compute job - /// ```Typescript - const computeJobs = await ProviderInstance.computeStart( - providerUrl, - consumerAccount, - computeEnv.id, - assets, - algo, - computeValidUntil, - paymentToken, - resources, - Number(chainId) - ) - /// ``` - - /// - - /// Let's save the compute job it, we re going to use later - /// ```Typescript - computeJobId = computeJobs[0].jobId - /// ``` - /// + + /// Let's save the compute job it, we re going to use later + /// ```Typescript + computeJobId = computeJobs[0].jobId + /// ``` + /// /// ## 13. Check paid compute job status and get download compute results URL it('13.1 Check compute status for paid compute job', async () => { /// - /// You can also add various delays so you see the various states of the compute job - /// ```Typescript - const jobStatus = await ProviderInstance.computeStatus( - providerUrl, - await consumerAccount.getAddress(), - computeJobId - ) - /// ``` - /// - /// Now, let's see the current status of the previously started computer job - /// ```Typescript - console.log('Current status of the compute job: ', jobStatus) - /// ``` - /// + /// You can also add various delays so you see the various states of the compute job + /// ```Typescript + const jobStatus = await ProviderInstance.computeStatus( + providerUrl, + consumerAccount, + computeJobId + ) + /// ``` + /// + /// Now, let's see the current status of the previously started computer job + /// ```Typescript + console.log('Current status of the compute job: ', jobStatus) + /// ``` + /// it('13.2 Get download compute results URL', async () => { /// + /// --> - /// ```Typescript - await sleep(10000) - const downloadURL = await ProviderInstance.getComputeResultUrl( - providerUrl, - consumerAccount, - computeJobId, - 0 - ) - /// ``` - /// - /// Let's check the compute results url for the specified index - /// ```Typescript - console.log(`Compute results URL: ${downloadURL}`) - /// ``` - /// + /// Let's check the compute results url for the specified index + /// ```Typescript + console.log(`Compute results URL: ${downloadURL}`) + /// ``` + /// diff --git a/test/integration/ComputeFlow.test.ts b/test/integration/ComputeFlow.test.ts index 4c85b9eb4..8c0a42c99 100644 --- a/test/integration/ComputeFlow.test.ts +++ b/test/integration/ComputeFlow.test.ts @@ -48,13 +48,10 @@ let resolvedDdoWithNoTimeout let resolvedAlgoDdoWith2mTimeout let resolvedAlgoDdoWithNoTimeout -let freeEnvDatasetTxId -let freeEnvAlgoTxId let paidEnvDatasetTxId let paidEnvAlgoTxId let computeValidUntil let escrow: EscrowContract -let freeComputeRouteSupport = null const computeJobDuration = 60 * 15 // 15 minutes let computeMinutes: number @@ -245,33 +242,12 @@ function delay(interval: number) { }).timeout(interval + 200) } -async function waitTillJobEnds(): Promise { - return new Promise((resolve) => { - const interval = setInterval(async () => { - const jobStatus = (await ProviderInstance.computeStatus( - providerUrl, - await consumerAccount.getAddress(), - freeComputeJobId - )) as ComputeJob - if (jobStatus?.[0]?.status === 70) { - clearInterval(interval) - resolve(jobStatus.status) - } - }, 10000) - }) -} - describe('Compute flow tests', async () => { before(async () => { publisherAccount = (await provider.getSigner(0)) as Signer consumerAccount = (await provider.getSigner(1)) as Signer config = await getTestConfig(publisherAccount) aquarius = new Aquarius(config?.oceanNodeUri) - - if (process.env.NODE_URL) { - config.oceanNodeUri = process.env.NODE_URL - } - providerUrl = config?.oceanNodeUri addresses = getAddresses() paymentToken = addresses.Ocean @@ -443,37 +419,22 @@ describe('Compute flow tests', async () => { meta: resolvedAlgoDdoWith2mTimeout.metadata.algorithm } - freeComputeRouteSupport = await ProviderInstance.getComputeStartRoutes( + const computeJobs = await ProviderInstance.freeComputeStart( providerUrl, - true + consumerAccount, + computeEnv.id, + assets, + algo ) - if (freeComputeRouteSupport) { - const computeJobs = await ProviderInstance.freeComputeStart( - providerUrl, - consumerAccount, - computeEnv.id, - assets, - algo - ) - console.log('Compute jobs: ', computeJobs) - freeEnvDatasetTxId = assets[0].transferTxId - freeEnvAlgoTxId = algo.transferTxId - assert(computeJobs, 'Cannot start compute job') - freeComputeJobId = computeJobs[0].jobId - } else { - assert( - freeComputeRouteSupport === null, - 'Cannot start free compute job. provider at ' + - providerUrl + - ' does not implement freeCompute route' - ) - } + console.log('Compute jobs: ', computeJobs) + assert(computeJobs, 'Cannot start compute job') + freeComputeJobId = computeJobs[0].jobId }).timeout(40000) it('Check compute status', async () => { const jobStatus = (await ProviderInstance.computeStatus( providerUrl, - await consumerAccount.getAddress(), + consumerAccount, freeComputeJobId )) as ComputeJob assert(jobStatus, 'Cannot retrieve compute status!') @@ -528,7 +489,7 @@ describe('Compute flow tests', async () => { paymentToken, computeValidUntil, providerUrl, - consumerAccount, + await consumerAccount.getAddress(), resources, Number(chainId) ) @@ -577,7 +538,7 @@ describe('Compute flow tests', async () => { paymentToken, computeValidUntil, providerUrl, - consumerAccount, + await consumerAccount.getAddress(), resources, Number(chainId) ) @@ -702,7 +663,7 @@ describe('Compute flow tests', async () => { it('Check compute status', async () => { const jobStatus = (await ProviderInstance.computeStatus( providerUrl, - await consumerAccount.getAddress(), + consumerAccount, paidComputeJobId, resolvedDdoWith2mTimeout.id )) as ComputeJob @@ -753,7 +714,7 @@ describe('Compute flow tests', async () => { paymentToken, computeJobDuration, providerUrl, - consumerAccount, + await consumerAccount.getAddress(), resources, Number(chainId) ) @@ -797,11 +758,14 @@ describe('Compute flow tests', async () => { }) // move to reuse Orders - const delayTimeout = Math.max( - resolvedDdoWith2mTimeout.services[0].timeout * 1000 + 1000, - resolvedAlgoDdoWith2mTimeout.services[0].timeout * 1000 + 1000 - ) - delay(delayTimeout) + it('should delay', function (done) { + const delayTimeout = Math.max( + resolvedDdoWith2mTimeout.services[0].timeout * 1000 + 1000, + resolvedAlgoDdoWith2mTimeout.services[0].timeout * 1000 + 1000 + ) + this.timeout(delayTimeout + 200) + setTimeout(() => done(), delayTimeout) + }) it('should start a computeJob using the paid resources, by paying the assets providerFees (reuseOrder) and paying escrow lock for max job duration', async () => { const { chainId } = await consumerAccount.provider.getNetwork() @@ -845,7 +809,7 @@ describe('Compute flow tests', async () => { paymentToken, computeValidUntil, providerUrl, - consumerAccount, + await consumerAccount.getAddress(), resources, Number(chainId) ) @@ -921,7 +885,7 @@ describe('Compute flow tests', async () => { it('Check compute status', async () => { const jobStatus = (await ProviderInstance.computeStatus( providerUrl, - await consumerAccount.getAddress(), + consumerAccount, freeComputeJobId, resolvedDdoWith2mTimeout.id )) as ComputeJob @@ -937,4 +901,19 @@ describe('Compute flow tests', async () => { ) assert(downloadURL, 'Provider getComputeResultUrl failed!') }) + + it('Get compute result as stream', async () => { + const stream = await ProviderInstance.getComputeResult( + providerUrl, + consumerAccount, + freeComputeJobId, + 0 + ) + assert(stream, 'getComputeResult returned no stream') + let totalBytes = 0 + for await (const chunk of stream) { + totalBytes += chunk.length + } + assert(totalBytes > 0, 'getComputeResult stream returned no bytes') + }).timeout(60000) }) diff --git a/test/integration/Provider.test.ts b/test/integration/Provider.test.ts index 34b41a304..dcac4d69c 100644 --- a/test/integration/Provider.test.ts +++ b/test/integration/Provider.test.ts @@ -1,13 +1,12 @@ import { assert } from 'chai' import { getTestConfig, provider } from '../config.js' -import { Config, Provider } from '../../src/index.js' +import { Config, ProviderInstance } from '../../src/index.js' import { Signer } from 'ethers' import { FileInfo } from '../../src/@types/index.js' describe('Provider tests', async () => { let config: Config let signer: Signer - let providerInstance: Provider before(async () => { signer = (await provider.getSigner(0)) as Signer @@ -15,21 +14,21 @@ describe('Provider tests', async () => { }) it('Initialize Ocean', async () => { - providerInstance = new Provider() + // ProviderInstance is the shared singleton, already warmed up by _P2PWarmup for P2P mode }) it('Alice tests invalid provider', async () => { - const valid = await providerInstance.isValidProvider('http://example.net') + const valid = await ProviderInstance.isValidProvider('http://example.net') assert(valid === false) }) it('Alice tests valid provider', async () => { - const valid = await providerInstance.isValidProvider(config.oceanNodeUri) + const valid = await ProviderInstance.isValidProvider(config.oceanNodeUri) assert(valid === true) }) it('Alice checks URL fileinfo', async () => { - const fileinfo: FileInfo[] = await providerInstance.getFileInfo( + const fileinfo: FileInfo[] = await ProviderInstance.getFileInfo( { type: 'url', url: 'https://raw.githubusercontent.com/oceanprotocol/ocean.js/refs/heads/main/README.md', @@ -41,7 +40,7 @@ describe('Provider tests', async () => { }) it('Alice checks Arweave fileinfo', async () => { - const fileinfo: FileInfo[] = await providerInstance.getFileInfo( + const fileinfo: FileInfo[] = await ProviderInstance.getFileInfo( { type: 'arweave', transactionId: 'a4qJoQZa1poIv5guEzkfgZYSAD0uYm7Vw4zm_tCswVQ' @@ -52,12 +51,26 @@ describe('Provider tests', async () => { }) it('Alice tests compute environments', async () => { - const computeEnvs = await providerInstance.getComputeEnvironments(config.oceanNodeUri) + const computeEnvs = await ProviderInstance.getComputeEnvironments(config.oceanNodeUri) assert(computeEnvs, 'No Compute environments found') }) + it('Alice tests getNodeStatus', async () => { + const status = await ProviderInstance.getNodeStatus(config.oceanNodeUri) + assert(status, 'No status returned') + assert(status.id, 'Status missing id') + assert(status.address, 'Status missing address') + assert(status.version, 'Status missing version') + assert(Array.isArray(status.provider), 'Status missing provider array') + }) + + it('Alice tests getNodeJobs', async () => { + const jobs = await ProviderInstance.getNodeJobs(config.oceanNodeUri) + assert(Array.isArray(jobs), 'Jobs should be an array') + }) + it('Alice tests getNonce', async () => { - const nonce = await providerInstance.getNonce( + const nonce = await ProviderInstance.getNonce( config.oceanNodeUri, '0xBE5449a6A97aD46c8558A3356267Ee5D2731ab5e' ) diff --git a/test/integration/PublishEditConsume.test.ts b/test/integration/PublishEditConsume.test.ts index c40f1c589..f00c46695 100644 --- a/test/integration/PublishEditConsume.test.ts +++ b/test/integration/PublishEditConsume.test.ts @@ -15,8 +15,7 @@ import { sendTx, transfer, amountToUnits, - StorageObject, - AssetFiles + StorageObject } from '../../src/index.js' import { createAssetHelper, orderAsset, updateAssetMetadata } from './helpers.js' import { DDO } from '@oceanprotocol/ddo-js' @@ -56,8 +55,6 @@ let resolvedGraphqlAssetDdoAfterUpdate let urlOrderTx let arwaveOrderTx let ipfsOrderTx -let onchainOrderTx -let grapqlOrderTx const urlFile: StorageObject = { type: 'url', diff --git a/test/integration/_P2PWarmup.test.ts b/test/integration/_P2PWarmup.test.ts new file mode 100644 index 000000000..6bda93cc4 --- /dev/null +++ b/test/integration/_P2PWarmup.test.ts @@ -0,0 +1,35 @@ +import { assert } from 'chai' +import { isP2pUri, ProviderInstance, getNodeEndpointConfig } from '../../src/index.js' + +// This suite runs first (underscore prefix sorts before all letters). +// It warms the libp2p node - every subsequent test reuses the connection. +describe('P2P connection warmup', () => { + it('should connect to the P2P node', async function () { + const nodeUrl = getNodeEndpointConfig().oceanNodeUri + if (!nodeUrl || !isP2pUri(nodeUrl)) { + this.skip() + } + + this.timeout(60000) + + const bootstrapPeers = nodeUrl.startsWith('/') + ? [nodeUrl] + : [`/ip4/172.15.0.5/tcp/9001/ws/p2p/${nodeUrl}`] + + await ProviderInstance.setupP2P({ bootstrapPeers }) + while ( + (await ProviderInstance.getDiscoveredNodes()).length === 0 || + !(await ProviderInstance.getDiscoveredNodes()).find( + (node) => node.peerId === nodeUrl + ) + ) { + console.log(`Waiting for P2P node to be discovered...`) + await new Promise((resolve) => setTimeout(resolve, 2000)) + } + + console.log('P2P node discovered, checking if it is reachable...') + const ok = await ProviderInstance.isValidProvider(nodeUrl) + assert.ok(ok, 'P2P node should be reachable') + console.log('P2P node reachable') + }) +})