diff --git a/.github/workflows/cleanup.yml b/.github/workflows/cleanup.yml index fa2ba7cb..11ca9fda 100644 --- a/.github/workflows/cleanup.yml +++ b/.github/workflows/cleanup.yml @@ -42,7 +42,7 @@ on: jobs: del_runs: - runs-on: self-hosted + runs-on: ubicloud-standard-2 steps: - name: Delete workflow runs uses: Mattraks/delete-workflow-runs@v2 diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml index 6d9db1f1..c2a53d6e 100644 --- a/.github/workflows/pr.yml +++ b/.github/workflows/pr.yml @@ -13,7 +13,7 @@ jobs: # This job only runs for pull request comments name: PR comment if: ${{ github.event.issue.pull_request }} - runs-on: ubuntu-latest + runs-on: ubicloud-standard-2 steps: - name: Send Notification uses: appleboy/telegram-action@master @@ -26,7 +26,7 @@ jobs: pull_requests_and_review: name: Pull request action or review if: ${{ !github.event.issue.pull_request }} - runs-on: ubuntu-latest + runs-on: ubicloud-standard-2 steps: - name: Send Notification uses: appleboy/telegram-action@master diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 40229036..5a107cac 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -6,7 +6,7 @@ name: tests jobs: clippy: name: Actions - clippy - runs-on: self-hosted + runs-on: ubicloud-standard-4 steps: - uses: actions/checkout@v4 with: @@ -23,7 +23,7 @@ jobs: - run: cargo clippy --lib --target wasm32-unknown-unknown -- -D warnings coverage: name: Actions - unit tests coverage - runs-on: self-hosted + runs-on: ubicloud-standard-4 steps: - uses: actions/checkout@v4 with: @@ -32,21 +32,21 @@ jobs: run: sudo apt-get update && sudo apt-get install -y pkg-config libssl-dev - uses: actions-rs/toolchain@v1 with: - toolchain: nightly + toolchain: 1.81.0 profile: minimal override: true - name: Install cargo-tarpaulin - run: cargo install cargo-tarpaulin --version 0.29.1 - - run: cargo tarpaulin --exclude-files "packages/*" --exclude-files "*/proto/*" - --exclude-files "contracts/price-provider/*" --exclude-files - "contracts/auto-withdrawer/*" --exclude-files + run: cargo install cargo-tarpaulin --version 0.29.1 --locked + - run: cargo tarpaulin --verbose --exclude-files "packages/*" --exclude-files + "*/proto/*" --exclude-files "contracts/price-provider/*" + --exclude-files "contracts/auto-withdrawer/*" --exclude-files "contracts/hook-tester/*" --exclude-files "contracts/astroport-exchange-handler/*" --exclude-files "contracts/proposal-votes-poc/*" --exclude-files "contracts/rewards-manager/*" --exclude-files - "contracts/puppeteer-authz/*" --exclude-files "contracts/validators-stats/*" --exclude-files - "contracts/provider-proposals-poc/*" --exclude-files "*schema*" --out + "contracts/provider-proposals-poc/*" --exclude-files + "contracts/redemption-rate-adapter/*" --exclude-files "*schema*" --out Xml --output-dir ./ - name: Produce the coverage report uses: insightsengineering/coverage-action@v2 @@ -54,33 +54,33 @@ jobs: path: ./cobertura.xml threshold: 45 fail: true - publish: true + publish: false diff: false coverage-summary-title: Code Coverage Summary rustfmt: name: Actions - rustfmt - runs-on: self-hosted + runs-on: ubicloud-standard-4 steps: - uses: actions/checkout@v4 with: fetch-depth: 1 - uses: actions-rs/toolchain@v1 with: - toolchain: 1.75.0 + toolchain: 1.81.0 components: rustfmt profile: minimal override: true - run: cargo fmt -- --check unit-test: name: Actions - unit test - runs-on: self-hosted + runs-on: ubicloud-standard-4 steps: - uses: actions/checkout@v4 with: fetch-depth: 1 - uses: actions-rs/toolchain@v1 with: - toolchain: 1.75.0 + toolchain: 1.81.0 profile: minimal - run: cargo fetch --verbose - run: cargo build @@ -89,7 +89,7 @@ jobs: RUST_BACKTRACE: 1 lint-test: name: Actions - integration tests lint - runs-on: self-hosted + runs-on: ubicloud-standard-4 steps: - name: Setup node uses: actions/setup-node@v4 @@ -104,7 +104,7 @@ jobs: run: cd integration_tests && yarn --ignore-engines && yarn lint images-prepare: name: Actions - images prepare - runs-on: self-hosted + runs-on: ubicloud-standard-4 steps: - name: Upgrade docker compose to use v2 run: sudo curl -L @@ -136,14 +136,14 @@ jobs: yarn build-images artifacts-prepare: name: Actions - artifacts prepare - runs-on: self-hosted + runs-on: ubicloud-standard-4 steps: - uses: actions/checkout@v4 with: fetch-depth: 1 - uses: actions-rs/toolchain@v1 with: - toolchain: 1.75.0 + toolchain: 1.81.0 profile: minimal override: true - run: make compile @@ -152,90 +152,6 @@ jobs: with: path: artifacts key: ${{ runner.os }}-${{ github.sha }} - test-poc-provider-proposals: - name: test:poc-provider-proposals Integration Tests - needs: - - images-prepare - - artifacts-prepare - runs-on: self-hosted - steps: - - name: Upgrade docker compose to use v2 - run: sudo curl -L - "https://github.com/docker/compose/releases/download/v2.23.0/docker-compose-$(uname - -s)-$(uname -m)" -o /usr/local/bin/docker-compose && sudo chmod +x - /usr/local/bin/docker-compose - - uses: actions/checkout@v4 - with: - fetch-depth: 1 - - name: Setup node - uses: actions/setup-node@v4 - with: - node-version: 20.12.2 - - name: Install Yarn - run: npm install -g yarn - - name: Log in to Private Registry - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKER_USER }} - password: ${{ secrets.DOCKER_TOKEN }} - - name: Clean volumes - run: docker volume prune -f - - name: Download images - run: | - cd integration_tests - yarn build-images - - name: Download artifacts - uses: actions/cache@v4 - with: - path: artifacts - key: ${{ runner.os }}-${{ github.sha }} - - name: Run test test:poc-provider-proposals - run: cd integration_tests && yarn && yarn test:poc-provider-proposals - - name: Cleanup resources - if: always() - run: | - docker stop -t0 $(docker ps -a -q) || true - docker container prune -f || true - docker volume rm $(docker volume ls -q) || true - test-poc-proposal-votes: - name: test:poc-proposal-votes Integration Tests - needs: - - images-prepare - - artifacts-prepare - runs-on: self-hosted - steps: - - name: Upgrade docker compose to use v2 - run: sudo curl -L - "https://github.com/docker/compose/releases/download/v2.23.0/docker-compose-$(uname - -s)-$(uname -m)" -o /usr/local/bin/docker-compose && sudo chmod +x - /usr/local/bin/docker-compose - - uses: actions/checkout@v4 - with: - fetch-depth: 1 - - name: Setup node - uses: actions/setup-node@v4 - with: - node-version: 20.12.2 - - name: Install Yarn - run: npm install -g yarn - - name: Log in to Private Registry - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKER_USER }} - password: ${{ secrets.DOCKER_TOKEN }} - - name: Clean volumes - run: docker volume prune -f - - name: Download images - run: | - cd integration_tests - yarn build-images - - name: Download artifacts - uses: actions/cache@v4 - with: - path: artifacts - key: ${{ runner.os }}-${{ github.sha }} - - name: Run test test:poc-proposal-votes - run: cd integration_tests && yarn && yarn test:poc-proposal-votes - name: Cleanup resources if: always() run: | @@ -247,7 +163,7 @@ jobs: needs: - images-prepare - artifacts-prepare - runs-on: self-hosted + runs-on: ubicloud-standard-4 steps: - name: Upgrade docker compose to use v2 run: sudo curl -L @@ -292,7 +208,7 @@ jobs: needs: - images-prepare - artifacts-prepare - runs-on: self-hosted + runs-on: ubicloud-standard-4 steps: - name: Upgrade docker compose to use v2 run: sudo curl -L @@ -337,7 +253,7 @@ jobs: needs: - images-prepare - artifacts-prepare - runs-on: self-hosted + runs-on: ubicloud-standard-4 steps: - name: Upgrade docker compose to use v2 run: sudo curl -L @@ -377,57 +293,12 @@ jobs: docker stop -t0 $(docker ps -a -q) || true docker container prune -f || true docker volume rm $(docker volume ls -q) || true - test-pump-multi: - name: test:pump-multi Integration Tests - needs: - - images-prepare - - artifacts-prepare - runs-on: self-hosted - steps: - - name: Upgrade docker compose to use v2 - run: sudo curl -L - "https://github.com/docker/compose/releases/download/v2.23.0/docker-compose-$(uname - -s)-$(uname -m)" -o /usr/local/bin/docker-compose && sudo chmod +x - /usr/local/bin/docker-compose - - uses: actions/checkout@v4 - with: - fetch-depth: 1 - - name: Setup node - uses: actions/setup-node@v4 - with: - node-version: 20.12.2 - - name: Install Yarn - run: npm install -g yarn - - name: Log in to Private Registry - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKER_USER }} - password: ${{ secrets.DOCKER_TOKEN }} - - name: Clean volumes - run: docker volume prune -f - - name: Download images - run: | - cd integration_tests - yarn build-images - - name: Download artifacts - uses: actions/cache@v4 - with: - path: artifacts - key: ${{ runner.os }}-${{ github.sha }} - - name: Run test test:pump-multi - run: cd integration_tests && yarn && yarn test:pump-multi - - name: Cleanup resources - if: always() - run: | - docker stop -t0 $(docker ps -a -q) || true - docker container prune -f || true - docker volume rm $(docker volume ls -q) || true test-validators-stats: name: test:validators-stats Integration Tests needs: - images-prepare - artifacts-prepare - runs-on: self-hosted + runs-on: ubicloud-standard-4 steps: - name: Upgrade docker compose to use v2 run: sudo curl -L @@ -472,7 +343,7 @@ jobs: needs: - images-prepare - artifacts-prepare - runs-on: self-hosted + runs-on: ubicloud-standard-4 steps: - name: Upgrade docker compose to use v2 run: sudo curl -L @@ -517,7 +388,7 @@ jobs: needs: - images-prepare - artifacts-prepare - runs-on: self-hosted + runs-on: ubicloud-standard-4 steps: - name: Upgrade docker compose to use v2 run: sudo curl -L @@ -562,7 +433,7 @@ jobs: needs: - images-prepare - artifacts-prepare - runs-on: self-hosted + runs-on: ubicloud-standard-4 steps: - name: Upgrade docker compose to use v2 run: sudo curl -L @@ -607,7 +478,7 @@ jobs: needs: - images-prepare - artifacts-prepare - runs-on: self-hosted + runs-on: ubicloud-standard-4 steps: - name: Upgrade docker compose to use v2 run: sudo curl -L @@ -652,7 +523,7 @@ jobs: needs: - images-prepare - artifacts-prepare - runs-on: self-hosted + runs-on: ubicloud-standard-4 steps: - name: Upgrade docker compose to use v2 run: sudo curl -L diff --git a/contracts/puppeteer/src/contract.rs b/contracts/puppeteer/src/contract.rs index 6b02cf59..f27fa099 100644 --- a/contracts/puppeteer/src/contract.rs +++ b/contracts/puppeteer/src/contract.rs @@ -1,11 +1,14 @@ use crate::proto::{ cosmos::base::v1beta1::Coin as ProtoCoin, + gaia::liquid::v1beta1::{ + MsgRedeemTokensForShares, MsgRedeemTokensForSharesResponse, MsgTokenizeShares, + MsgTokenizeSharesResponse, + }, liquidstaking::{ distribution::v1beta1::MsgWithdrawDelegatorReward, staking::v1beta1::{ MsgBeginRedelegate, MsgBeginRedelegateResponse, MsgDelegateResponse, - MsgRedeemTokensforShares, MsgRedeemTokensforSharesResponse, MsgTokenizeShares, - MsgTokenizeSharesResponse, MsgUndelegateResponse, + MsgUndelegateResponse, }, }, }; @@ -875,14 +878,14 @@ fn execute_redeem_shares( let delegator = puppeteer_base.ica.get_address(deps.storage)?; let any_msgs = items .iter() - .map(|one| MsgRedeemTokensforShares { + .map(|one| MsgRedeemTokensForShares { delegator_address: delegator.to_string(), amount: Some(ProtoCoin { denom: one.remote_denom.to_string(), amount: one.amount.to_string(), }), }) - .map(|msg| prepare_any_msg(msg, "/cosmos.staking.v1beta1.MsgRedeemTokensForShares")) + .map(|msg| prepare_any_msg(msg, "/gaia.liquid.v1beta1.MsgRedeemTokensForShares")) .collect::>>()?; let submsg = compose_submsg( deps.branch(), @@ -1104,7 +1107,7 @@ fn get_answers_from_msg_data( }, ) } - "/cosmos.staking.v1beta1.MsgTokenizeShares" => { + "/gaia.liquid.v1beta1.MsgTokenizeShares" => { let out: MsgTokenizeSharesResponse = decode_message_response(&item.data)?; ResponseAnswer::TokenizeSharesResponse( drop_puppeteer_base::proto::MsgTokenizeSharesResponse { @@ -1126,8 +1129,8 @@ fn get_answers_from_msg_data( drop_puppeteer_base::proto::MsgGrantResponse {}, ) } - "/cosmos.staking.v1beta1.MsgRedeemTokensForShares" => { - let out: MsgRedeemTokensforSharesResponse = decode_message_response(&item.data)?; + "/gaia.liquid.v1beta1.MsgRedeemTokensForShares" => { + let out: MsgRedeemTokensForSharesResponse = decode_message_response(&item.data)?; ResponseAnswer::RedeemTokensforSharesResponse( drop_puppeteer_base::proto::MsgRedeemTokensforSharesResponse { amount: out.amount.map(convert_coin).transpose()?, diff --git a/contracts/puppeteer/src/proto.rs b/contracts/puppeteer/src/proto.rs index c652b36b..9e00bb99 100644 --- a/contracts/puppeteer/src/proto.rs +++ b/contracts/puppeteer/src/proto.rs @@ -23,3 +23,14 @@ pub mod liquidstaking { } } } + +pub mod gaia { + pub mod liquid { + pub mod module { + include!("./proto/gaia.liquid.module.v1.rs"); + } + pub mod v1beta1 { + include!("./proto/gaia.liquid.v1beta1.rs"); + } + } +} diff --git a/contracts/puppeteer/src/proto/gaia.liquid.module.v1.rs b/contracts/puppeteer/src/proto/gaia.liquid.module.v1.rs new file mode 100644 index 00000000..9ea11fb0 --- /dev/null +++ b/contracts/puppeteer/src/proto/gaia.liquid.module.v1.rs @@ -0,0 +1,17 @@ +// @generated +// This file is @generated by prost-build. +/// Module is the config object of the liquid module. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Module { + /// authority defines the custom module authority. If not set, defaults to the + /// governance module. + #[prost(string, tag="1")] + pub authority: ::prost::alloc::string::String, + /// bech32_prefix_validator is the bech32 validator prefix for the app. + #[prost(string, tag="2")] + pub bech32_prefix_validator: ::prost::alloc::string::String, + /// bech32_prefix_consensus is the bech32 consensus node prefix for the app. + #[prost(string, tag="3")] + pub bech32_prefix_consensus: ::prost::alloc::string::String, +} +// @@protoc_insertion_point(module) diff --git a/contracts/puppeteer/src/proto/gaia.liquid.v1beta1.rs b/contracts/puppeteer/src/proto/gaia.liquid.v1beta1.rs new file mode 100644 index 00000000..0dd2270f --- /dev/null +++ b/contracts/puppeteer/src/proto/gaia.liquid.v1beta1.rs @@ -0,0 +1,391 @@ +// @generated +// This file is @generated by prost-build. +/// Params defines the parameters for the x/liquid module. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Params { + /// global_liquid_staking_cap represents a cap on the portion of stake that + /// comes from liquid staking providers + #[prost(string, tag = "8")] + pub global_liquid_staking_cap: ::prost::alloc::string::String, + /// validator_liquid_staking_cap represents a cap on the portion of stake that + /// comes from liquid staking providers for a specific validator + #[prost(string, tag = "9")] + pub validator_liquid_staking_cap: ::prost::alloc::string::String, +} +/// TokenizeShareRecord represents a tokenized delegation +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct TokenizeShareRecord { + #[prost(uint64, tag = "1")] + pub id: u64, + #[prost(string, tag = "2")] + pub owner: ::prost::alloc::string::String, + /// module account take the role of delegator + #[prost(string, tag = "3")] + pub module_account: ::prost::alloc::string::String, + /// validator delegated to for tokenize share record creation + #[prost(string, tag = "4")] + pub validator: ::prost::alloc::string::String, +} +/// PendingTokenizeShareAuthorizations stores a list of addresses that have their +/// tokenize share enablement in progress +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct PendingTokenizeShareAuthorizations { + #[prost(string, repeated, tag = "1")] + pub addresses: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, +} +/// TokenizeShareRecordReward represents the properties of tokenize share +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct TokenizeShareRecordReward { + #[prost(uint64, tag = "1")] + pub record_id: u64, + #[prost(message, repeated, tag = "2")] + pub reward: ::prost::alloc::vec::Vec, +} +/// LiquidValidator is the storage layout for details about a validator's liquid +/// stake. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct LiquidValidator { + /// operator_address defines the address of the validator's operator; bech + /// encoded in JSON. + #[prost(string, tag = "1")] + pub operator_address: ::prost::alloc::string::String, + /// Number of shares either tokenized or owned by a liquid staking provider + #[prost(string, tag = "3")] + pub liquid_shares: ::prost::alloc::string::String, +} +/// TokenizeShareLockStatus indicates whether the address is able to tokenize +/// shares +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum TokenizeShareLockStatus { + /// UNSPECIFIED defines an empty tokenize share lock status + Unspecified = 0, + /// LOCKED indicates the account is locked and cannot tokenize shares + Locked = 1, + /// UNLOCKED indicates the account is unlocked and can tokenize shares + Unlocked = 2, + /// LOCK_EXPIRING indicates the account is unable to tokenize shares, but + /// will be able to tokenize shortly (after 1 unbonding period) + LockExpiring = 3, +} +impl TokenizeShareLockStatus { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + TokenizeShareLockStatus::Unspecified => "TOKENIZE_SHARE_LOCK_STATUS_UNSPECIFIED", + TokenizeShareLockStatus::Locked => "TOKENIZE_SHARE_LOCK_STATUS_LOCKED", + TokenizeShareLockStatus::Unlocked => "TOKENIZE_SHARE_LOCK_STATUS_UNLOCKED", + TokenizeShareLockStatus::LockExpiring => "TOKENIZE_SHARE_LOCK_STATUS_LOCK_EXPIRING", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "TOKENIZE_SHARE_LOCK_STATUS_UNSPECIFIED" => Some(Self::Unspecified), + "TOKENIZE_SHARE_LOCK_STATUS_LOCKED" => Some(Self::Locked), + "TOKENIZE_SHARE_LOCK_STATUS_UNLOCKED" => Some(Self::Unlocked), + "TOKENIZE_SHARE_LOCK_STATUS_LOCK_EXPIRING" => Some(Self::LockExpiring), + _ => None, + } + } +} +/// GenesisState defines the liquid module's genesis state. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct GenesisState { + /// params defines all the parameters of related to deposit. + #[prost(message, optional, tag = "1")] + pub params: ::core::option::Option, + /// store tokenize share records to provide reward to record owners + #[prost(message, repeated, tag = "9")] + pub tokenize_share_records: ::prost::alloc::vec::Vec, + /// last tokenize share record id, used for next share record id calculation + #[prost(uint64, tag = "10")] + pub last_tokenize_share_record_id: u64, + /// total number of liquid staked tokens at genesis + #[prost(bytes = "vec", tag = "11")] + pub total_liquid_staked_tokens: ::prost::alloc::vec::Vec, + /// tokenize shares locks at genesis + #[prost(message, repeated, tag = "12")] + pub tokenize_share_locks: ::prost::alloc::vec::Vec, +} +/// TokenizeSharesLock required for specifying account locks at genesis +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct TokenizeShareLock { + /// Address of the account that is locked + #[prost(string, tag = "1")] + pub address: ::prost::alloc::string::String, + /// Status of the lock (LOCKED or LOCK_EXPIRING) + #[prost(string, tag = "2")] + pub status: ::prost::alloc::string::String, + /// Completion time if the lock is expiring + #[prost(message, optional, tag = "3")] + pub completion_time: ::core::option::Option<::prost_types::Timestamp>, +} +/// QueryLiquidValidatorRequest is the request type for the Query/LiquidValidator RPC method. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryLiquidValidatorRequest { + #[prost(string, tag = "1")] + pub validator_addr: ::prost::alloc::string::String, +} +/// QueryLiquidValidatorResponse is the response type for the Query/LiquidValidator RPC method. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryLiquidValidatorResponse { + #[prost(message, optional, tag = "1")] + pub liquid_validator: ::core::option::Option, +} +/// QueryParamsRequest is request type for the Query/Params RPC method. +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct QueryParamsRequest {} +/// QueryParamsResponse is response type for the Query/Params RPC method. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryParamsResponse { + /// params holds all the parameters of this module. + #[prost(message, optional, tag = "1")] + pub params: ::core::option::Option, +} +/// QueryTokenizeShareRecordByIdRequest is request type for the +/// Query/QueryTokenizeShareRecordById RPC method. +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct QueryTokenizeShareRecordByIdRequest { + #[prost(uint64, tag = "1")] + pub id: u64, +} +/// QueryTokenizeShareRecordByIdRequest is response type for the +/// Query/QueryTokenizeShareRecordById RPC method. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryTokenizeShareRecordByIdResponse { + #[prost(message, optional, tag = "1")] + pub record: ::core::option::Option, +} +/// QueryTokenizeShareRecordByDenomRequest is request type for the +/// Query/QueryTokenizeShareRecordByDenom RPC method. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryTokenizeShareRecordByDenomRequest { + #[prost(string, tag = "1")] + pub denom: ::prost::alloc::string::String, +} +/// QueryTokenizeShareRecordByDenomResponse is response type for the +/// Query/QueryTokenizeShareRecordByDenom RPC method. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryTokenizeShareRecordByDenomResponse { + #[prost(message, optional, tag = "1")] + pub record: ::core::option::Option, +} +/// QueryTokenizeShareRecordsOwnedRequest is request type for the +/// Query/QueryTokenizeShareRecordsOwned RPC method. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryTokenizeShareRecordsOwnedRequest { + #[prost(string, tag = "1")] + pub owner: ::prost::alloc::string::String, +} +/// QueryTokenizeShareRecordsOwnedResponse is response type for the +/// Query/QueryTokenizeShareRecordsOwned RPC method. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryTokenizeShareRecordsOwnedResponse { + #[prost(message, repeated, tag = "1")] + pub records: ::prost::alloc::vec::Vec, +} +/// QueryAllTokenizeShareRecordsRequest is request type for the +/// Query/QueryAllTokenizeShareRecords RPC method. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryAllTokenizeShareRecordsRequest { + /// pagination defines an optional pagination for the request. + #[prost(message, optional, tag = "1")] + pub pagination: + ::core::option::Option, +} +/// QueryAllTokenizeShareRecordsResponse is response type for the +/// Query/QueryAllTokenizeShareRecords RPC method. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryAllTokenizeShareRecordsResponse { + #[prost(message, repeated, tag = "1")] + pub records: ::prost::alloc::vec::Vec, + /// pagination defines the pagination in the response. + #[prost(message, optional, tag = "2")] + pub pagination: + ::core::option::Option, +} +/// QueryLastTokenizeShareRecordIdRequest is request type for the +/// Query/QueryLastTokenizeShareRecordId RPC method. +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct QueryLastTokenizeShareRecordIdRequest {} +/// QueryLastTokenizeShareRecordIdResponse is response type for the +/// Query/QueryLastTokenizeShareRecordId RPC method. +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct QueryLastTokenizeShareRecordIdResponse { + #[prost(uint64, tag = "1")] + pub id: u64, +} +/// QueryTotalTokenizeSharedAssetsRequest is request type for the +/// Query/QueryTotalTokenizeSharedAssets RPC method. +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct QueryTotalTokenizeSharedAssetsRequest {} +/// QueryTotalTokenizeSharedAssetsResponse is response type for the +/// Query/QueryTotalTokenizeSharedAssets RPC method. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryTotalTokenizeSharedAssetsResponse { + #[prost(message, optional, tag = "1")] + pub value: ::core::option::Option, +} +/// QueryTotalLiquidStakedRequest is request type for the +/// Query/QueryQueryTotalLiquidStaked RPC method. +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct QueryTotalLiquidStaked {} +/// QueryTotalLiquidStakedResponse is response type for the +/// Query/QueryQueryTotalLiquidStaked RPC method. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryTotalLiquidStakedResponse { + #[prost(string, tag = "1")] + pub tokens: ::prost::alloc::string::String, +} +/// QueryTokenizeShareLockInfo queries the tokenize share lock information +/// associated with given account +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryTokenizeShareLockInfo { + #[prost(string, tag = "1")] + pub address: ::prost::alloc::string::String, +} +/// QueryTokenizeShareLockInfoResponse is the response from the +/// QueryTokenizeShareLockInfo query +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryTokenizeShareLockInfoResponse { + #[prost(string, tag = "1")] + pub status: ::prost::alloc::string::String, + #[prost(string, tag = "2")] + pub expiration_time: ::prost::alloc::string::String, +} +/// QueryTokenizeShareRecordRewardRequest is the request type for the +/// Query/TokenizeShareRecordReward RPC method. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryTokenizeShareRecordRewardRequest { + #[prost(string, tag = "1")] + pub owner_address: ::prost::alloc::string::String, +} +/// QueryTokenizeShareRecordRewardResponse is the response type for the +/// Query/TokenizeShareRecordReward RPC method. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryTokenizeShareRecordRewardResponse { + /// rewards defines all the rewards accrued by a delegator. + #[prost(message, repeated, tag = "1")] + pub rewards: ::prost::alloc::vec::Vec, + /// total defines the sum of all the rewards. + #[prost(message, repeated, tag = "2")] + pub total: ::prost::alloc::vec::Vec, +} +/// MsgUpdateParams is the Msg/UpdateParams request type. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct MsgUpdateParams { + /// authority is the address that controls the module (defaults to x/gov unless + /// overwritten). + #[prost(string, tag = "1")] + pub authority: ::prost::alloc::string::String, + /// params defines the x/liquid parameters to update. + /// + /// NOTE: All parameters must be supplied. + #[prost(message, optional, tag = "2")] + pub params: ::core::option::Option, +} +/// MsgUpdateParamsResponse defines the response structure for executing a +/// MsgUpdateParams message. +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct MsgUpdateParamsResponse {} +/// MsgTokenizeShares tokenizes a delegation +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct MsgTokenizeShares { + #[prost(string, tag = "1")] + pub delegator_address: ::prost::alloc::string::String, + #[prost(string, tag = "2")] + pub validator_address: ::prost::alloc::string::String, + #[prost(message, optional, tag = "3")] + pub amount: ::core::option::Option, + #[prost(string, tag = "4")] + pub tokenized_share_owner: ::prost::alloc::string::String, +} +/// MsgTokenizeSharesResponse defines the Msg/MsgTokenizeShares response type. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct MsgTokenizeSharesResponse { + #[prost(message, optional, tag = "1")] + pub amount: ::core::option::Option, +} +/// MsgRedeemTokensForShares redeems a tokenized share back into a native +/// delegation +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct MsgRedeemTokensForShares { + #[prost(string, tag = "1")] + pub delegator_address: ::prost::alloc::string::String, + #[prost(message, optional, tag = "2")] + pub amount: ::core::option::Option, +} +/// MsgRedeemTokensForSharesResponse defines the Msg/MsgRedeemTokensForShares +/// response type. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct MsgRedeemTokensForSharesResponse { + #[prost(message, optional, tag = "1")] + pub amount: ::core::option::Option, +} +/// MsgTransferTokenizeShareRecord transfer a tokenize share record +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct MsgTransferTokenizeShareRecord { + #[prost(uint64, tag = "1")] + pub tokenize_share_record_id: u64, + #[prost(string, tag = "2")] + pub sender: ::prost::alloc::string::String, + #[prost(string, tag = "3")] + pub new_owner: ::prost::alloc::string::String, +} +/// MsgTransferTokenizeShareRecordResponse defines the +/// Msg/MsgTransferTokenizeShareRecord response type. +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct MsgTransferTokenizeShareRecordResponse {} +/// MsgDisableTokenizeShares prevents the tokenization of shares for a given +/// address +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct MsgDisableTokenizeShares { + #[prost(string, tag = "1")] + pub delegator_address: ::prost::alloc::string::String, +} +/// MsgDisableTokenizeSharesResponse defines the Msg/DisableTokenizeShares +/// response type. +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct MsgDisableTokenizeSharesResponse {} +/// MsgEnableTokenizeShares re-enables tokenization of shares for a given address +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct MsgEnableTokenizeShares { + #[prost(string, tag = "1")] + pub delegator_address: ::prost::alloc::string::String, +} +/// MsgEnableTokenizeSharesResponse defines the Msg/EnableTokenizeShares response +/// type. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct MsgEnableTokenizeSharesResponse { + #[prost(message, optional, tag = "1")] + pub completion_time: ::core::option::Option<::prost_types::Timestamp>, +} +/// MsgWithdrawTokenizeShareRecordReward withdraws tokenize share rewards for a +/// specific record +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct MsgWithdrawTokenizeShareRecordReward { + #[prost(string, tag = "1")] + pub owner_address: ::prost::alloc::string::String, + #[prost(uint64, tag = "2")] + pub record_id: u64, +} +/// MsgWithdrawTokenizeShareRecordReward defines the +/// Msg/WithdrawTokenizeShareRecordReward response type. +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct MsgWithdrawTokenizeShareRecordRewardResponse {} +/// MsgWithdrawAllTokenizeShareRecordReward withdraws tokenize share rewards or +/// all records owned by the designated owner +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct MsgWithdrawAllTokenizeShareRecordReward { + #[prost(string, tag = "1")] + pub owner_address: ::prost::alloc::string::String, +} +/// MsgWithdrawAllTokenizeShareRecordRewardResponse defines the +/// Msg/WithdrawTokenizeShareRecordReward response type. +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct MsgWithdrawAllTokenizeShareRecordRewardResponse {} +// @@protoc_insertion_point(module) diff --git a/contracts/puppeteer/src/proto/gaia.metaprotocols.rs b/contracts/puppeteer/src/proto/gaia.metaprotocols.rs new file mode 100644 index 00000000..2b5e88dd --- /dev/null +++ b/contracts/puppeteer/src/proto/gaia.metaprotocols.rs @@ -0,0 +1,19 @@ +// @generated +// This file is @generated by prost-build. +/// ExtensionData is a data structure that can be used in transaction extensions. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ExtensionData { + /// protocol_id is the identifier of the protocol + /// the field is not used internally but it is validated for correctness + #[prost(string, tag="1")] + pub protocol_id: ::prost::alloc::string::String, + /// protocol_version is the identifier of the protocol version + /// the field is not used internally but it is validated for correctness + #[prost(string, tag="2")] + pub protocol_version: ::prost::alloc::string::String, + /// arbitrary bytes data that can be used to store any data + /// the field is not used internally but it is validated and must be provided + #[prost(bytes="vec", tag="3")] + pub data: ::prost::alloc::vec::Vec, +} +// @@protoc_insertion_point(module) diff --git a/contracts/puppeteer/src/tests.rs b/contracts/puppeteer/src/tests.rs index 45b470b9..4947deec 100644 --- a/contracts/puppeteer/src/tests.rs +++ b/contracts/puppeteer/src/tests.rs @@ -493,7 +493,7 @@ fn test_execute_redeem_share() { let mut buf = Vec::with_capacity(msg.encoded_len()); msg.encode(&mut buf).unwrap(); let any_msg = neutron_sdk::bindings::types::ProtobufAny { - type_url: "/cosmos.staking.v1beta1.MsgRedeemTokensForShares".to_string(), + type_url: "/gaia.liquid.v1beta1.MsgRedeemTokensForShares".to_string(), value: Binary::from(buf), }; assert_eq!( diff --git a/integration_tests/dockerfiles/gaia/Dockerfile b/integration_tests/dockerfiles/gaia/Dockerfile index 96a6938f..b7cf2900 100644 --- a/integration_tests/dockerfiles/gaia/Dockerfile +++ b/integration_tests/dockerfiles/gaia/Dockerfile @@ -1,21 +1,28 @@ -ARG IMG_TAG=latest - -FROM golang:1.20-alpine AS builder +FROM golang:1.23-alpine AS builder WORKDIR /src/app/ +ENV PACKAGES="curl build-base make git libc-dev bash file gcc linux-headers eudev-dev" +RUN apk add --no-cache $PACKAGES + +ARG WASMVM_VERSION=v2.2.3 +ADD https://github.com/CosmWasm/wasmvm/releases/download/${WASMVM_VERSION}/libwasmvm_muslc.aarch64.a /lib/libwasmvm_muslc.aarch64.a +ADD https://github.com/CosmWasm/wasmvm/releases/download/${WASMVM_VERSION}/libwasmvm_muslc.x86_64.a /lib/libwasmvm_muslc.x86_64.a +RUN sha256sum /lib/libwasmvm_muslc.aarch64.a | grep 6641730781bb1adc4bdf04a1e0f822b9ad4fb8ed57dcbbf575527e63b791ae41 +RUN sha256sum /lib/libwasmvm_muslc.x86_64.a | grep 32503fe35a7be202c5f7c3051497d6e4b3cd83079a61f5a0bf72a2a455b6d820 +RUN cp "/lib/libwasmvm_muslc.$(uname -m).a" /lib/libwasmvm_muslc.a + + COPY go.mod go.sum* ./ RUN go mod download -COPY . . -ENV PACKAGES curl make git libc-dev bash gcc linux-headers eudev-dev python3 -RUN apk add --no-cache $PACKAGES -RUN CGO_ENABLED=0 make install +COPY . . +RUN LEDGER_ENABLED=false LINK_STATICALLY=true BUILD_TAGS=muslc make build +RUN echo "Ensuring binary is statically linked ..." \ + && file /src/app/build/gaiad | grep "statically linked" -FROM golang:1.20-alpine -ARG IMG_TAG -RUN apk add --no-cache bash jq -COPY --from=builder /go/bin/gaiad /usr/local/bin/ +FROM alpine:latest +RUN apk add --no-cache build-base jq bash +COPY --from=builder /src/app/build/gaiad /usr/local/bin/ EXPOSE 26656 26657 1317 9090 USER 0 ENTRYPOINT ["gaiad"] - diff --git a/integration_tests/dockerfiles/gaia/build.sh b/integration_tests/dockerfiles/gaia/build.sh index 77c67d52..1917a9a8 100755 --- a/integration_tests/dockerfiles/gaia/build.sh +++ b/integration_tests/dockerfiles/gaia/build.sh @@ -2,26 +2,14 @@ DIR="$(dirname $0)" cd $DIR VERSION=$(cat ../../package.json | jq -r '.version') -git clone https://github.com/cosmos/gaia.git -b v14.1.0 -cp ./Dockerfile ./gaia if [[ "$CI" == "true" ]]; then VERSION="_$VERSION" ORG=neutronorg/lionco-contracts: else VERSION=":$VERSION" - new_replace="github.com/cosmos/ibc-go/v4 v4.4.2 => github.com/ratik/ibc-go/v4 v4.4.3-0.20231115171220-5c22b66cfa8c" - gomod_file="gaia/go.mod" - cp "$gomod_file" "$gomod_file.bak" - awk -v new_replace="$new_replace" ' - BEGIN { replace_block=0; added=0 } - /replace[[:space:]]*\(/ { replace_block=1 } - /^[[:space:]]*\)/ { if(replace_block) { print new_replace; added=1; replace_block=0 } } - { print } - END { if(!added) { print "replace ("; print new_replace; print ")" } } - ' "$gomod_file.bak" > "$gomod_file" - cd gaia - go mod tidy - cd .. fi +git clone https://github.com/cosmos/gaia.git -b v24.0.0-rc1 +cp ./Dockerfile ./gaia + docker build gaia -t ${ORG}gaia-test${VERSION} -rm -rf ./gaia \ No newline at end of file +rm -rf ./gaia diff --git a/integration_tests/dockerfiles/gaia/options.json b/integration_tests/dockerfiles/gaia/options.json new file mode 100644 index 00000000..6aca8297 --- /dev/null +++ b/integration_tests/dockerfiles/gaia/options.json @@ -0,0 +1,7 @@ +{ + "commands": { + "addGenesisAccount": "genesis add-genesis-account", + "gentx": "genesis gentx", + "collectGenTx": "genesis collect-gentxs" + } +} diff --git a/integration_tests/dockerfiles/hermes/Dockerfile.aarch64 b/integration_tests/dockerfiles/hermes/Dockerfile.aarch64 new file mode 100644 index 00000000..faf9f09b --- /dev/null +++ b/integration_tests/dockerfiles/hermes/Dockerfile.aarch64 @@ -0,0 +1,29 @@ +FROM ubuntu:20.04 +ARG HERMES_VERSION=v1.12.0 +ENV DEBIAN_FRONTEND=noninteractive +WORKDIR /app + +RUN apt-get update && apt-get install -y wget ca-certificates && \ + rm -rf /var/lib/apt/lists/* && \ + apt-get clean && \ + ARCH=$(uname -m) && \ + case "$ARCH" in \ + x86_64) PLATFORM="x86_64" ;; \ + aarch64) PLATFORM="aarch64" ;; \ + *) echo "Unsupported architecture: $ARCH" && exit 1 ;; \ + esac && \ + TARNAME="hermes-${HERMES_VERSION}-${PLATFORM}-unknown-linux-gnu.tar.gz" && \ + wget "https://github.com/informalsystems/hermes/releases/download/${HERMES_VERSION}/${TARNAME}" && \ + tar -xf "$TARNAME" && \ + # определить имя файла + if [ -f "hermes" ]; then \ + mv hermes /bin/hermes ; \ + elif [ -f "hermes-${HERMES_VERSION}-${PLATFORM}-unknown-linux-gnu" ]; then \ + mv "hermes-${HERMES_VERSION}-${PLATFORM}-unknown-linux-gnu" /bin/hermes ; \ + else \ + echo "Hermes binary not found after extraction!" && exit 1 ; \ + fi && \ + chmod +x /bin/hermes && \ + rm -rf "$TARNAME" + +ENTRYPOINT ["hermes", "start"] \ No newline at end of file diff --git a/integration_tests/dockerfiles/hermes/Dockerfile.x86_64 b/integration_tests/dockerfiles/hermes/Dockerfile.x86_64 new file mode 100644 index 00000000..ef140da6 --- /dev/null +++ b/integration_tests/dockerfiles/hermes/Dockerfile.x86_64 @@ -0,0 +1,29 @@ +FROM ubuntu:24.04 +ARG HERMES_VERSION=v1.12.0 +ENV DEBIAN_FRONTEND=noninteractive +WORKDIR /app + +RUN apt-get update && apt-get install -y wget ca-certificates && \ + rm -rf /var/lib/apt/lists/* && \ + apt-get clean && \ + ARCH=$(uname -m) && \ + case "$ARCH" in \ + x86_64) PLATFORM="x86_64" ;; \ + aarch64) PLATFORM="aarch64" ;; \ + *) echo "Unsupported architecture: $ARCH" && exit 1 ;; \ + esac && \ + TARNAME="hermes-${HERMES_VERSION}-${PLATFORM}-unknown-linux-gnu.tar.gz" && \ + wget "https://github.com/informalsystems/hermes/releases/download/${HERMES_VERSION}/${TARNAME}" && \ + tar -xf "$TARNAME" && \ + # определить имя файла + if [ -f "hermes" ]; then \ + mv hermes /bin/hermes ; \ + elif [ -f "hermes-${HERMES_VERSION}-${PLATFORM}-unknown-linux-gnu" ]; then \ + mv "hermes-${HERMES_VERSION}-${PLATFORM}-unknown-linux-gnu" /bin/hermes ; \ + else \ + echo "Hermes binary not found after extraction!" && exit 1 ; \ + fi && \ + chmod +x /bin/hermes && \ + rm -rf "$TARNAME" + +ENTRYPOINT ["hermes", "start"] \ No newline at end of file diff --git a/integration_tests/dockerfiles/hermes/build.sh b/integration_tests/dockerfiles/hermes/build.sh index 4e8971f7..59df3989 100755 --- a/integration_tests/dockerfiles/hermes/build.sh +++ b/integration_tests/dockerfiles/hermes/build.sh @@ -8,5 +8,13 @@ if [[ "$CI" == "true" ]]; then else VERSION=":$VERSION" fi -docker build . -t ${ORG}hermes-test${VERSION} +ARCH=$(uname -m) + +if [ "$ARCH" = "arm64" ] || [ "$ARCH" = "aarch64" ]; then + DOCKERFILE="Dockerfile.aarch64" +else + DOCKERFILE="Dockerfile.x86_64" +fi +docker build -f $DOCKERFILE -t ${ORG}hermes-test${VERSION} . + diff --git a/integration_tests/dockerfiles/neutron-query-relayer/build.sh b/integration_tests/dockerfiles/neutron-query-relayer/build.sh index d5ee13a9..6f72e5e9 100755 --- a/integration_tests/dockerfiles/neutron-query-relayer/build.sh +++ b/integration_tests/dockerfiles/neutron-query-relayer/build.sh @@ -1,7 +1,7 @@ #!/bin/bash DIR="$(dirname $0)" cd $DIR -git clone -b foxpy/low-submission-margin-period https://github.com/neutron-org/neutron-query-relayer +git clone -b feat/low-submission-margin-period-updated https://github.com/neutron-org/neutron-query-relayer VERSION=$(cat ../../package.json | jq -r '.version') if [[ "$CI" == "true" ]]; then VERSION="_$VERSION" diff --git a/integration_tests/package.json b/integration_tests/package.json index d53dfe9c..2e1dcb8f 100644 --- a/integration_tests/package.json +++ b/integration_tests/package.json @@ -1,6 +1,6 @@ { "name": "drop-cosmos-integration-tests", - "version": "1.0.4", + "version": "1.0.43", "main": "vitest", "license": "MIT", "scripts": { diff --git a/integration_tests/src/testSuite.ts b/integration_tests/src/testSuite.ts index 92196367..97be2f50 100644 --- a/integration_tests/src/testSuite.ts +++ b/integration_tests/src/testSuite.ts @@ -1,6 +1,10 @@ import cosmopark, { CosmoparkConfig } from '@neutron-org/cosmopark'; import { DirectSecp256k1HdWallet } from '@cosmjs/proto-signing'; -import { StargateClient } from '@cosmjs/stargate'; +import { + QueryClient, + setupIbcExtension, + StargateClient, +} from '@cosmjs/stargate'; import { Client as NeutronClient } from '@neutron-org/client-ts'; import { waitFor } from './helpers/waitFor'; import { sleep } from './helpers/sleep'; @@ -10,6 +14,8 @@ import { CosmoparkRelayer, } from '@neutron-org/cosmopark/lib/types'; import { Suite } from 'vitest'; +import { Tendermint34Client } from '@cosmjs/tendermint-rpc'; + const packageJSON = require(`${__dirname}/../package.json`); const VERSION = (process.env.CI ? '_' : ':') + packageJSON.version; const ORG = process.env.CI ? 'neutronorg/lionco-contracts:' : ''; @@ -31,7 +37,13 @@ const TIMEOUT = 10_000; const redefinedParams = process.env.REMOTE_CHAIN_OPTS && fs.existsSync(process.env.REMOTE_CHAIN_OPTS) ? JSON.parse(fs.readFileSync(process.env.REMOTE_CHAIN_OPTS).toString()) - : {}; + : { + commands: { + addGenesisAccount: 'genesis add-genesis-account', + gentx: 'genesis gentx', + collectGenTx: 'genesis collect-gentxs', + }, + }; const networkConfigs = { lsm: { @@ -51,6 +63,11 @@ const networkConfigs = { 'app_state.slashing.params.slash_fraction_downtime': '0.1', 'app_state.staking.params.validator_bond_factor': '10', 'app_state.staking.params.unbonding_time': '1814400s', + 'app_state.feemarket.params.beta': '0.0', + 'app_state.feemarket.params.max_learning_rate': '0.0', + 'app_state.feemarket.params.min_learning_rate': '0.0', + 'app_state.feemarket.params.min_base_gas_price': '0.001', + 'app_state.feemarket.state.base_gas_price': '0.001', 'app_state.mint.minter.inflation': '0.9', 'app_state.mint.params.inflation_max': '0.95', 'app_state.mint.params.inflation_min': '0.5', @@ -86,13 +103,7 @@ const networkConfigs = { trace: true, validators: 2, commands: redefinedParams.commands, - validators_balance: [ - '1900000000', - '100000000', - '100000000', - '100000000', - '100000000', - ], + validators_balance: ['1900000000', '100000000'], genesis_opts: redefinedParams.genesisOpts || { 'app_state.slashing.params.downtime_jail_duration': '10s', 'app_state.slashing.params.signed_blocks_window': '10', @@ -100,6 +111,11 @@ const networkConfigs = { 'app_state.slashing.params.slash_fraction_downtime': '0.1', 'app_state.staking.params.validator_bond_factor': '10', 'app_state.staking.params.unbonding_time': '1814400s', + 'app_state.feemarket.params.beta': '0.0', + 'app_state.feemarket.params.max_learning_rate': '0.0', + 'app_state.feemarket.params.min_learning_rate': '0.0', + 'app_state.feemarket.params.min_base_gas_price': '0.001', + 'app_state.feemarket.state.base_gas_price': '0.001', 'app_state.mint.minter.inflation': '0.9', 'app_state.mint.params.inflation_max': '0.95', 'app_state.mint.params.inflation_min': '0.5', @@ -126,6 +142,80 @@ const networkConfigs = { `/opt/init-gaia.sh > /opt/init-gaia.log 2>&1`, ], }, + neutronv2: { + binary: 'neutrond', + chain_id: 'ntrntest', + denom: 'untrn', + image: `${ORG}neutronv2-test${VERSION}`, + prefix: 'neutron', + loglevel: 'debug', + trace: true, + public: true, + validators: 2, + validators_balance: ['1900000000', '100000000', '100000000'], + upload: [ + './artifacts/contracts', + './artifacts/contracts_thirdparty', + './artifacts/scripts/init-neutrond.sh', + ], + post_init: ['CHAINID=ntrntest CHAIN_DIR=/opt /opt/init-neutrond.sh'], + genesis_opts: { + 'app_state.crisis.constant_fee.denom': 'untrn', + }, + config_opts: { + 'consensus.timeout_commit': '500ms', + 'consensus.timeout_propose': '500ms', + }, + app_opts: { + 'api.enable': 'true', + 'api.address': 'tcp://0.0.0.0:1317', + 'api.swagger': 'true', + 'grpc.enable': 'true', + 'grpc.address': '0.0.0.0:9090', + 'minimum-gas-prices': '0.0025untrn', + 'rosetta.enable': 'true', + 'telemetry.prometheus-retention-time': 1000, + }, + }, + initia: { + binary: redefinedParams.binary || 'initiad', + chain_id: 'testinitia', + denom: redefinedParams.denom || 'uinit', + image: `${ORG}${process.env.REMOTE_CHAIN ?? 'initia-test'}${VERSION}`, + prefix: redefinedParams.prefix || 'init', + trace: true, + validators: 2, + commands: redefinedParams.commands, + validators_balance: ['100000000', '100000000'], + genesis_opts: redefinedParams.genesisOpts || { + 'app_state.slashing.params.downtime_jail_duration': '10s', + 'app_state.slashing.params.signed_blocks_window': '10', + 'app_state.slashing.params.min_signed_per_window': '0.9', + 'app_state.slashing.params.slash_fraction_downtime': '0.1', + 'app_state.staking.params.validator_bond_factor': '10', + 'app_state.staking.params.unbonding_time': '1814400s', + 'app_state.mint.minter.inflation': '0.9', + 'app_state.mint.params.inflation_max': '0.95', + 'app_state.mint.params.inflation_min': '0.5', + 'app_state.interchainaccounts.host_genesis_state.params.allow_messages': [ + '*', + ], + }, + config_opts: { + 'rpc.laddr': 'tcp://0.0.0.0:26657', + }, + app_opts: { + 'api.enable': true, + 'api.address': 'tcp://0.0.0.0:1317', + 'api.swagger': true, + 'grpc.enable': true, + 'grpc.address': '0.0.0.0:9090', + 'minimum-gas-prices': redefinedParams.denom + ? `0${redefinedParams.denom}` + : '0uinit', + 'rosetta.enable': true, + }, + }, neutron: { binary: 'neutrond', chain_id: 'ntrntest', @@ -167,9 +257,9 @@ const relayersConfig = { balance: '1000000000', binary: 'hermes', config: { - 'chains.0.gas_multiplier': 1.2, + 'chains.0.gas_multiplier': 1.8, 'chains.0.trusting_period': '112h0m0s', - 'chains.1.gas_multiplier': 1.2, + 'chains.1.gas_multiplier': 1.8, 'chains.1.trusting_period': '168h0m0s', }, image: `${ORG}hermes-test${VERSION}`, @@ -182,6 +272,11 @@ const relayersConfig = { image: `${ORG}neutron-query-relayer-test${VERSION}`, log_level: 'debug', type: 'neutron', + environment: { + RELAYER_NEUTRON_CHAIN_DENOM: 'untrn', + RELAYER_NEUTRON_CHAIN_MAX_GAS_PRICE: 1000, + RELAYER_NEUTRON_CHAIN_GAS_PRICE_MULTIPLIER: 1.1, + }, }, }; @@ -252,7 +347,23 @@ const awaitNeutronChannels = (rest: string, rpc: string): Promise => await sleep(10000); return false; } - }, 100_000); + }, 500_000); + +export const awaitTargetChannels = (rpc: string): Promise => + waitFor(async () => { + try { + const tmClient = await Tendermint34Client.connect(rpc); + const client = QueryClient.withExtensions(tmClient, setupIbcExtension); + const res = await client.ibc.channel.allChannels(); + if (res.channels.length > 0 && res.channels[0].state === 3) { + return true; + } + await sleep(10000); + } catch (e) { + await sleep(10000); + return false; + } + }, 500_000); export const generateWallets = (): Promise> => keys.reduce( @@ -377,6 +488,7 @@ export const setupPark = async ( mnemonic: wallets.neutronqueryrelayer, } as any); } + const instance = await cosmopark.create(config); await Promise.all( Object.entries(instance.ports).map(([network, ports]) => diff --git a/integration_tests/src/testcases/auto-withdrawer.test.ts b/integration_tests/src/testcases/auto-withdrawer.test.ts index d100ee46..46884c3d 100644 --- a/integration_tests/src/testcases/auto-withdrawer.test.ts +++ b/integration_tests/src/testcases/auto-withdrawer.test.ts @@ -193,6 +193,7 @@ describe('Auto withdrawer', () => { }); it('transfer tokens to neutron', async () => { + await sleep(15000); // wait for the chain to be ready context.gaiaUserAddress = ( await context.gaiaWallet.getAccounts() )[0].address; @@ -398,7 +399,7 @@ describe('Auto withdrawer', () => { account.address, res.codeId, { - sdk_version: process.env.SDK_VERSION || '0.46.0', + sdk_version: process.env.SDK_VERSION || '0.49.0', code_ids: { core_code_id: context.codeIds.core, token_code_id: context.codeIds.token, @@ -620,6 +621,10 @@ describe('Auto withdrawer', () => { expect(ica).toHaveLength(65); expect(ica.startsWith('cosmos')).toBeTruthy(); context.icaAddress = ica; + await context.park.executeInNetwork( + 'gaia', + `${context.park.config.networks['gaia'].binary} tx bank send demo1 ${context.icaAddress} 10000stake --fees 10000stake --keyring-backend=test --home=/opt --fees 10000stake -y --output json`, + ); }); it('set puppeteer ICA to the staker', async () => { @@ -960,7 +965,7 @@ describe('Auto withdrawer', () => { const { gaiaClient } = context; const res = await gaiaClient.getBalance(context.icaAddress, 'stake'); ica.balance = parseInt(res.amount); - expect(ica.balance).toEqual(0); + expect(ica.balance).toEqual(10000); }); it('deploy pump', async () => { const { client, account, neutronUserAddress } = context; @@ -1113,7 +1118,7 @@ describe('Auto withdrawer', () => { 'stake', ); const balance = parseInt(res.amount); - expect(0).toEqual(ica.balance); + expect(ica.balance).toEqual(10000); ica.balance = balance; }); it('wait for balances to come', async () => { @@ -1267,7 +1272,7 @@ describe('Auto withdrawer', () => { 'stake', ); const newBalance = parseInt(res.amount); - expect(newBalance).toBeGreaterThan(balance); + expect(newBalance).toBeGreaterThan(balance - 10000); }); it('wait for balance to update', async () => { const { remote_height: currentHeight } = diff --git a/integration_tests/src/testcases/core-slashing.test.ts b/integration_tests/src/testcases/core-slashing.test.ts index 34aafa8e..f90cd79a 100644 --- a/integration_tests/src/testcases/core-slashing.test.ts +++ b/integration_tests/src/testcases/core-slashing.test.ts @@ -196,6 +196,7 @@ describe('Core Slashing', () => { }); it('transfer tokens to neutron', async () => { + await sleep(15000); // wait for the chain to be ready context.gaiaUserAddress = ( await context.gaiaWallet.getAccounts() )[0].address; @@ -402,7 +403,7 @@ describe('Core Slashing', () => { account.address, res.codeId, { - sdk_version: process.env.SDK_VERSION || '0.46.0', + sdk_version: process.env.SDK_VERSION || '0.49.0', local_denom: 'untrn', code_ids: { core_code_id: context.codeIds.core, @@ -586,6 +587,10 @@ describe('Core Slashing', () => { expect(ica).toHaveLength(65); expect(ica.startsWith('cosmos')).toBeTruthy(); context.icaAddress = ica; + await context.park.executeInNetwork( + 'gaia', + `${context.park.config.networks['gaia'].binary} tx bank send demo1 ${context.icaAddress} 10000stake --fees 10000stake --keyring-backend=test --home=/opt --fees 10000stake -y --output json`, + ); }); it('set puppeteer ICA to the staker', async () => { const res = await context.factoryContractClient.adminExecute( diff --git a/integration_tests/src/testcases/core.test.ts b/integration_tests/src/testcases/core.test.ts index 1ba32524..35a8a8d9 100644 --- a/integration_tests/src/testcases/core.test.ts +++ b/integration_tests/src/testcases/core.test.ts @@ -198,6 +198,7 @@ describe('Core', () => { }); it('transfer tokens to neutron', async () => { + await sleep(15000); // wait for the chain to be ready context.gaiaUserAddress = ( await context.gaiaWallet.getAccounts() )[0].address; @@ -405,7 +406,7 @@ describe('Core', () => { account.address, res.codeId, { - sdk_version: process.env.SDK_VERSION || '0.46.0', + sdk_version: process.env.SDK_VERSION || '0.49.0', local_denom: 'untrn', code_ids: { core_code_id: context.codeIds.core, @@ -623,6 +624,11 @@ describe('Core', () => { expect(ica).toHaveLength(65); expect(ica.startsWith('cosmos')).toBeTruthy(); context.stakerIcaAddress = ica; + await context.park.executeInNetwork( + 'gaia', + `${context.park.config.networks['gaia'].binary} tx bank send demo1 ${context.stakerIcaAddress} 10000stake --fees 10000stake --keyring-backend=test --home=/opt --fees 10000stake -y --output json`, + ); + await sleep(5000); }); it('setup ICA for rewards pump', async () => { const { rewardsPumpContractClient, neutronUserAddress } = context; @@ -676,6 +682,12 @@ describe('Core', () => { expect(ica).toHaveLength(65); expect(ica.startsWith('cosmos')).toBeTruthy(); context.icaAddress = ica; + + await context.park.executeInNetwork( + 'gaia', + `${context.park.config.networks['gaia'].binary} tx bank send demo1 ${context.icaAddress} 10000stake --fees 10000stake --keyring-backend=test --home=/opt --fees 10000stake -y --output json`, + ); + await sleep(5000); }); it('set puppeteer ICA to the staker', async () => { const res = await context.factoryContractClient.adminExecute( @@ -707,6 +719,7 @@ describe('Core', () => { ); expect(res.transactionHash).toHaveLength(64); }); + it('grant staker to delegate funds from puppeteer ICA and set up rewards receiver', async () => { const { neutronUserAddress } = context; const res = await context.factoryContractClient.adminExecute( @@ -756,6 +769,12 @@ describe('Core', () => { return res.status === 'idle'; }, 100_000); }); + + // it('delegate from puppeteer ICA', async () => { + // await context.park.executeInNetwork('gaia', 'gaiad'); + // process.exit(-1); + // }); + it('verify grant', async () => { const res = await context.park.executeInNetwork( 'gaia', @@ -934,7 +953,7 @@ describe('Core', () => { context.validatorAddress = (await wallet.getAccounts())[0].address; const res = await context.park.executeInNetwork( 'gaia', - `gaiad tx staking delegate ${context.validatorAddress} 1000000stake --from ${context.gaiaUserAddress} --yes --chain-id testgaia --home=/opt --keyring-backend=test --output json`, + `gaiad tx staking delegate ${context.validatorAddress} 1000000stake --from ${context.gaiaUserAddress} --fees 10000stake --yes --chain-id testgaia --home=/opt --keyring-backend=test --output json`, ); expect(res.exitCode).toBe(0); const out = JSON.parse(res.out); @@ -945,7 +964,7 @@ describe('Core', () => { it('tokenize share on gaia side', async () => { const res = await context.park.executeInNetwork( 'gaia', - `gaiad tx staking tokenize-share ${context.validatorAddress} 600000stake ${context.gaiaUserAddress} --from ${context.gaiaUserAddress} --yes --chain-id testgaia --home=/opt --keyring-backend=test --gas auto --gas-adjustment 2 --output json`, + `gaiad tx liquid tokenize-share ${context.validatorAddress} 600000stake ${context.gaiaUserAddress} --from ${context.gaiaUserAddress} --fees 10000stake --yes --chain-id testgaia --home=/opt --keyring-backend=test --gas auto --gas-adjustment 2 --output json`, ); expect(res.exitCode).toBe(0); const out = JSON.parse(res.out); @@ -965,7 +984,7 @@ describe('Core', () => { it('transfer tokenized share to neutron', async () => { const res = await context.park.executeInNetwork( 'gaia', - `gaiad tx ibc-transfer transfer transfer channel-0 ${context.neutronUserAddress} 600000${context.validatorAddress}/1 --from ${context.gaiaUserAddress} --yes --chain-id testgaia --home=/opt --keyring-backend=test --gas auto --gas-adjustment 2 --output json`, + `gaiad tx ibc-transfer transfer transfer channel-0 ${context.neutronUserAddress} 600000${context.validatorAddress}/1 --from ${context.gaiaUserAddress} --fees 10000stake --yes --chain-id testgaia --home=/opt --keyring-backend=test --gas auto --gas-adjustment 2 --output json`, ); expect(res.exitCode).toBe(0); const out = JSON.parse(res.out); @@ -1093,7 +1112,7 @@ describe('Core', () => { const { gaiaClient } = context; const res = await gaiaClient.getBalance(context.icaAddress, 'stake'); ica.balance = parseInt(res.amount); - expect(ica.balance).toEqual(0); + expect(ica.balance).toEqual(10000); }); it('deploy pump', async () => { const { client, account, neutronUserAddress } = context; @@ -1213,7 +1232,7 @@ describe('Core', () => { ); expect(balances).toEqual([ { - amount: '1000000', + amount: '1010000', denom: context.park.config.networks.gaia.denom, }, ]); @@ -1293,7 +1312,7 @@ describe('Core', () => { context.park.config.networks.gaia.denom, ); const balance = parseInt(res.amount); - expect(balance).toEqual(0); + expect(balance).toEqual(10000); }); it('wait delegations', async () => { await waitFor(async () => { @@ -1624,7 +1643,7 @@ describe('Core', () => { { const res = await context.park.executeInNetwork( 'gaia', - `gaiad tx staking delegate ${context.validatorAddress} 100000stake --from ${context.gaiaUserAddress} --yes --chain-id testgaia --home=/opt --keyring-backend=test --output json`, + `gaiad tx staking delegate ${context.validatorAddress} 100000stake --from ${context.gaiaUserAddress} --yes --fees 10000stake --chain-id testgaia --home=/opt --keyring-backend=test --output json`, ); expect(res.exitCode).toBe(0); const out = JSON.parse(res.out); @@ -1635,7 +1654,7 @@ describe('Core', () => { { const res = await context.park.executeInNetwork( 'gaia', - `gaiad tx staking delegate ${context.secondValidatorAddress} 100000stake --from ${context.gaiaUserAddress} --yes --chain-id testgaia --home=/opt --keyring-backend=test --output json`, + `gaiad tx staking delegate ${context.secondValidatorAddress} 100000stake --from ${context.gaiaUserAddress} --yes --fees 10000stake --chain-id testgaia --home=/opt --keyring-backend=test --output json`, ); expect(res.exitCode).toBe(0); const out = JSON.parse(res.out); @@ -1648,7 +1667,7 @@ describe('Core', () => { { const res = await context.park.executeInNetwork( 'gaia', - `gaiad tx staking tokenize-share ${context.validatorAddress} 60000stake ${context.gaiaUserAddress} --from ${context.gaiaUserAddress} --yes --chain-id testgaia --home=/opt --keyring-backend=test --gas auto --gas-adjustment 2 --output json`, + `gaiad tx liquid tokenize-share ${context.validatorAddress} 60000stake ${context.gaiaUserAddress} --from ${context.gaiaUserAddress} --yes --fees 10000stake --chain-id testgaia --home=/opt --keyring-backend=test --gas auto --gas-adjustment 2 --output json`, ); expect(res.exitCode).toBe(0); const out = JSON.parse(res.out); @@ -1670,7 +1689,7 @@ describe('Core', () => { { const res = await context.park.executeInNetwork( 'gaia', - `gaiad tx staking tokenize-share ${context.secondValidatorAddress} 60000stake ${context.gaiaUserAddress} --from ${context.gaiaUserAddress} --yes --chain-id testgaia --home=/opt --keyring-backend=test --gas auto --gas-adjustment 2 --output json`, + `gaiad tx liquid tokenize-share ${context.secondValidatorAddress} 60000stake ${context.gaiaUserAddress} --from ${context.gaiaUserAddress} --yes --fees 10000stake --chain-id testgaia --home=/opt --keyring-backend=test --gas auto --gas-adjustment 2 --output json`, ); expect(res.exitCode).toBe(0); const out = JSON.parse(res.out); @@ -1694,7 +1713,7 @@ describe('Core', () => { { const res = await context.park.executeInNetwork( 'gaia', - `gaiad tx ibc-transfer transfer transfer channel-0 ${context.neutronUserAddress} 60000${context.validatorAddress}/2 --from ${context.gaiaUserAddress} --yes --chain-id testgaia --home=/opt --keyring-backend=test --gas auto --gas-adjustment 2 --output json`, + `gaiad tx ibc-transfer transfer transfer channel-0 ${context.neutronUserAddress} 60000${context.validatorAddress}/2 --from ${context.gaiaUserAddress} --yes --fees 10000stake --chain-id testgaia --home=/opt --keyring-backend=test --gas auto --gas-adjustment 2 --output json`, ); expect(res.exitCode).toBe(0); const out = JSON.parse(res.out); @@ -1706,7 +1725,7 @@ describe('Core', () => { { const res = await context.park.executeInNetwork( 'gaia', - `gaiad tx ibc-transfer transfer transfer channel-0 ${context.neutronUserAddress} 60000${context.secondValidatorAddress}/3 --from ${context.gaiaUserAddress} --yes --chain-id testgaia --home=/opt --keyring-backend=test --gas auto --gas-adjustment 2 --output json`, + `gaiad tx ibc-transfer transfer transfer channel-0 ${context.neutronUserAddress} 60000${context.secondValidatorAddress}/3 --from ${context.gaiaUserAddress} --yes --fees 10000stake --chain-id testgaia --home=/opt --keyring-backend=test --gas auto --gas-adjustment 2 --output json`, ); expect(res.exitCode).toBe(0); const out = JSON.parse(res.out); @@ -2573,7 +2592,7 @@ describe('Core', () => { ); expect(balances).toEqual([ { - amount: '1010000', + amount: '1020000', denom: context.park.config.networks.gaia.denom, }, ]);