diff --git a/.github/workflows/push-docker-snapshot-v2.yaml b/.github/workflows/push-docker-snapshot-v2.yaml index ff49d8a..cdab8dd 100644 --- a/.github/workflows/push-docker-snapshot-v2.yaml +++ b/.github/workflows/push-docker-snapshot-v2.yaml @@ -3,7 +3,7 @@ name: Deploy v2 prod images to GHCR on: push: branches: - - 'feature/add-more-events' + - 'feature/identity-related-log-filters' jobs: docker-publish: diff --git a/.github/workflows/test-v2.yaml b/.github/workflows/test-v2.yaml index 0b3d842..350aa80 100644 --- a/.github/workflows/test-v2.yaml +++ b/.github/workflows/test-v2.yaml @@ -18,7 +18,7 @@ jobs: if: "!startsWith(github.ref, 'refs/tags/')" strategy: matrix: - go-version: [1.25.x] + go-version: [1.26.x] os: [ubuntu-latest] runs-on: ${{ matrix.os }} steps: @@ -39,11 +39,11 @@ jobs: - name: Set up Go uses: actions/setup-go@v4 with: - go-version: 1.25.x + go-version: 1.26.x - name: Run golangci-lint uses: golangci/golangci-lint-action@v8 with: - version: v2.6 + version: v2.11.1 working-directory: v2 govulncheck: runs-on: ubuntu-latest @@ -52,6 +52,6 @@ jobs: - id: govulncheck uses: golang/govulncheck-action@v1 with: - go-version-input: '1.25' # Change this to your project's Go version + go-version-input: '1.26' # Change this to your project's Go version go-package: ./... work-dir: v2 \ No newline at end of file diff --git a/v2/Dockerfile b/v2/Dockerfile index 15b5d32..b988388 100644 --- a/v2/Dockerfile +++ b/v2/Dockerfile @@ -1,4 +1,4 @@ -FROM golang:1.25 AS builder +FROM golang:1.26 AS builder ENV CGO_ENABLED=0 WORKDIR /src diff --git a/v2/api/archive-query-service/v2/messages.pb.go b/v2/api/archive-query-service/v2/messages.pb.go index 9ee16ab..6c2d5b7 100644 --- a/v2/api/archive-query-service/v2/messages.pb.go +++ b/v2/api/archive-query-service/v2/messages.pb.go @@ -655,7 +655,23 @@ func (x *GetTransactionsForTickResponse) GetTransactions() []*Transaction { return nil } -// Range +// Range filter +// +// | Name | Type | Necessity | Description | +// |-----------|--------|-----------|-------------------------------------------| +// | gt | string | optional | Greater than. | +// | gte | string | optional | Greater than or equal to. | +// | lt | string | optional | Less than. | +// | lte | string | optional | Less than or equal to. | +// +// One lower bound and one upper bound can be specified. One bound is needed. A range with size of 0 or 1 is not allowed. +// +// # Examples +// +// ``` +// "amount": { "gt": "1000000" } +// "tickNumber": { "gte": "25563000", "lte": "28300000" } +// ``` type Range struct { state protoimpl.MessageState `protogen:"open.v1"` // Types that are valid to be assigned to LowerBound: @@ -784,20 +800,92 @@ func (*Range_Lt) isRange_UpperBound() {} func (*Range_Lte) isRange_UpperBound() {} +// Should Filters +// +// One should filter can contain multiple terms and ranges. It needs at least two query clauses. +// At least one of them has to match by default. See term and range filter documentation for examples. +// +// # Example +// +// ``` +// "should": [ +// +// { +// "terms": { +// "source": "BAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAARMID", +// "destination": "BAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAARMID" +// } +// } +// +// ] +// ``` +type ShouldFilter struct { + state protoimpl.MessageState `protogen:"open.v1"` + Terms map[string]string `protobuf:"bytes,1,rep,name=terms,proto3" json:"terms,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Ranges map[string]*Range `protobuf:"bytes,2,rep,name=ranges,proto3" json:"ranges,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ShouldFilter) Reset() { + *x = ShouldFilter{} + mi := &file_messages_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ShouldFilter) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ShouldFilter) ProtoMessage() {} + +func (x *ShouldFilter) ProtoReflect() protoreflect.Message { + mi := &file_messages_proto_msgTypes[11] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ShouldFilter.ProtoReflect.Descriptor instead. +func (*ShouldFilter) Descriptor() ([]byte, []int) { + return file_messages_proto_rawDescGZIP(), []int{11} +} + +func (x *ShouldFilter) GetTerms() map[string]string { + if x != nil { + return x.Terms + } + return nil +} + +func (x *ShouldFilter) GetRanges() map[string]*Range { + if x != nil { + return x.Ranges + } + return nil +} + // GetTransactionsForIdentityRequest type GetTransactionsForIdentityRequest struct { state protoimpl.MessageState `protogen:"open.v1"` Identity string `protobuf:"bytes,1,opt,name=identity,proto3" json:"identity,omitempty"` Filters map[string]string `protobuf:"bytes,2,rep,name=filters,proto3" json:"filters,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - Ranges map[string]*Range `protobuf:"bytes,3,rep,name=ranges,proto3" json:"ranges,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - Pagination *Pagination `protobuf:"bytes,4,opt,name=pagination,proto3" json:"pagination,omitempty"` + Exclude map[string]string `protobuf:"bytes,3,rep,name=exclude,proto3" json:"exclude,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Ranges map[string]*Range `protobuf:"bytes,6,rep,name=ranges,proto3" json:"ranges,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Pagination *Pagination `protobuf:"bytes,9,opt,name=pagination,proto3" json:"pagination,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *GetTransactionsForIdentityRequest) Reset() { *x = GetTransactionsForIdentityRequest{} - mi := &file_messages_proto_msgTypes[11] + mi := &file_messages_proto_msgTypes[12] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -809,7 +897,7 @@ func (x *GetTransactionsForIdentityRequest) String() string { func (*GetTransactionsForIdentityRequest) ProtoMessage() {} func (x *GetTransactionsForIdentityRequest) ProtoReflect() protoreflect.Message { - mi := &file_messages_proto_msgTypes[11] + mi := &file_messages_proto_msgTypes[12] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -822,7 +910,7 @@ func (x *GetTransactionsForIdentityRequest) ProtoReflect() protoreflect.Message // Deprecated: Use GetTransactionsForIdentityRequest.ProtoReflect.Descriptor instead. func (*GetTransactionsForIdentityRequest) Descriptor() ([]byte, []int) { - return file_messages_proto_rawDescGZIP(), []int{11} + return file_messages_proto_rawDescGZIP(), []int{12} } func (x *GetTransactionsForIdentityRequest) GetIdentity() string { @@ -839,6 +927,13 @@ func (x *GetTransactionsForIdentityRequest) GetFilters() map[string]string { return nil } +func (x *GetTransactionsForIdentityRequest) GetExclude() map[string]string { + if x != nil { + return x.Exclude + } + return nil +} + func (x *GetTransactionsForIdentityRequest) GetRanges() map[string]*Range { if x != nil { return x.Ranges @@ -865,7 +960,7 @@ type Hits struct { func (x *Hits) Reset() { *x = Hits{} - mi := &file_messages_proto_msgTypes[12] + mi := &file_messages_proto_msgTypes[13] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -877,7 +972,7 @@ func (x *Hits) String() string { func (*Hits) ProtoMessage() {} func (x *Hits) ProtoReflect() protoreflect.Message { - mi := &file_messages_proto_msgTypes[12] + mi := &file_messages_proto_msgTypes[13] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -890,7 +985,7 @@ func (x *Hits) ProtoReflect() protoreflect.Message { // Deprecated: Use Hits.ProtoReflect.Descriptor instead. func (*Hits) Descriptor() ([]byte, []int) { - return file_messages_proto_rawDescGZIP(), []int{12} + return file_messages_proto_rawDescGZIP(), []int{13} } func (x *Hits) GetTotal() uint32 { @@ -926,7 +1021,7 @@ type GetTransactionsForIdentityResponse struct { func (x *GetTransactionsForIdentityResponse) Reset() { *x = GetTransactionsForIdentityResponse{} - mi := &file_messages_proto_msgTypes[13] + mi := &file_messages_proto_msgTypes[14] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -938,7 +1033,7 @@ func (x *GetTransactionsForIdentityResponse) String() string { func (*GetTransactionsForIdentityResponse) ProtoMessage() {} func (x *GetTransactionsForIdentityResponse) ProtoReflect() protoreflect.Message { - mi := &file_messages_proto_msgTypes[13] + mi := &file_messages_proto_msgTypes[14] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -951,7 +1046,7 @@ func (x *GetTransactionsForIdentityResponse) ProtoReflect() protoreflect.Message // Deprecated: Use GetTransactionsForIdentityResponse.ProtoReflect.Descriptor instead. func (*GetTransactionsForIdentityResponse) Descriptor() ([]byte, []int) { - return file_messages_proto_rawDescGZIP(), []int{13} + return file_messages_proto_rawDescGZIP(), []int{14} } func (x *GetTransactionsForIdentityResponse) GetValidForTick() uint32 { @@ -985,7 +1080,7 @@ type GetTickDataRequest struct { func (x *GetTickDataRequest) Reset() { *x = GetTickDataRequest{} - mi := &file_messages_proto_msgTypes[14] + mi := &file_messages_proto_msgTypes[15] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -997,7 +1092,7 @@ func (x *GetTickDataRequest) String() string { func (*GetTickDataRequest) ProtoMessage() {} func (x *GetTickDataRequest) ProtoReflect() protoreflect.Message { - mi := &file_messages_proto_msgTypes[14] + mi := &file_messages_proto_msgTypes[15] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1010,7 +1105,7 @@ func (x *GetTickDataRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use GetTickDataRequest.ProtoReflect.Descriptor instead. func (*GetTickDataRequest) Descriptor() ([]byte, []int) { - return file_messages_proto_rawDescGZIP(), []int{14} + return file_messages_proto_rawDescGZIP(), []int{15} } func (x *GetTickDataRequest) GetTickNumber() uint32 { @@ -1030,7 +1125,7 @@ type GetTickDataResponse struct { func (x *GetTickDataResponse) Reset() { *x = GetTickDataResponse{} - mi := &file_messages_proto_msgTypes[15] + mi := &file_messages_proto_msgTypes[16] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1042,7 +1137,7 @@ func (x *GetTickDataResponse) String() string { func (*GetTickDataResponse) ProtoMessage() {} func (x *GetTickDataResponse) ProtoReflect() protoreflect.Message { - mi := &file_messages_proto_msgTypes[15] + mi := &file_messages_proto_msgTypes[16] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1055,7 +1150,7 @@ func (x *GetTickDataResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use GetTickDataResponse.ProtoReflect.Descriptor instead. func (*GetTickDataResponse) Descriptor() ([]byte, []int) { - return file_messages_proto_rawDescGZIP(), []int{15} + return file_messages_proto_rawDescGZIP(), []int{16} } func (x *GetTickDataResponse) GetTickData() *TickData { @@ -1075,7 +1170,7 @@ type GetProcessedTickIntervalsResponse struct { func (x *GetProcessedTickIntervalsResponse) Reset() { *x = GetProcessedTickIntervalsResponse{} - mi := &file_messages_proto_msgTypes[16] + mi := &file_messages_proto_msgTypes[17] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1087,7 +1182,7 @@ func (x *GetProcessedTickIntervalsResponse) String() string { func (*GetProcessedTickIntervalsResponse) ProtoMessage() {} func (x *GetProcessedTickIntervalsResponse) ProtoReflect() protoreflect.Message { - mi := &file_messages_proto_msgTypes[16] + mi := &file_messages_proto_msgTypes[17] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1100,7 +1195,7 @@ func (x *GetProcessedTickIntervalsResponse) ProtoReflect() protoreflect.Message // Deprecated: Use GetProcessedTickIntervalsResponse.ProtoReflect.Descriptor instead. func (*GetProcessedTickIntervalsResponse) Descriptor() ([]byte, []int) { - return file_messages_proto_rawDescGZIP(), []int{16} + return file_messages_proto_rawDescGZIP(), []int{17} } func (x *GetProcessedTickIntervalsResponse) GetProcessedTickIntervals() []*ProcessedTickInterval { @@ -1122,7 +1217,7 @@ type GetLastProcessedTickResponse struct { func (x *GetLastProcessedTickResponse) Reset() { *x = GetLastProcessedTickResponse{} - mi := &file_messages_proto_msgTypes[17] + mi := &file_messages_proto_msgTypes[18] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1134,7 +1229,7 @@ func (x *GetLastProcessedTickResponse) String() string { func (*GetLastProcessedTickResponse) ProtoMessage() {} func (x *GetLastProcessedTickResponse) ProtoReflect() protoreflect.Message { - mi := &file_messages_proto_msgTypes[17] + mi := &file_messages_proto_msgTypes[18] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1147,7 +1242,7 @@ func (x *GetLastProcessedTickResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use GetLastProcessedTickResponse.ProtoReflect.Descriptor instead. func (*GetLastProcessedTickResponse) Descriptor() ([]byte, []int) { - return file_messages_proto_rawDescGZIP(), []int{17} + return file_messages_proto_rawDescGZIP(), []int{18} } func (x *GetLastProcessedTickResponse) GetTickNumber() uint32 { @@ -1181,7 +1276,7 @@ type GetComputorListsForEpochRequest struct { func (x *GetComputorListsForEpochRequest) Reset() { *x = GetComputorListsForEpochRequest{} - mi := &file_messages_proto_msgTypes[18] + mi := &file_messages_proto_msgTypes[19] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1193,7 +1288,7 @@ func (x *GetComputorListsForEpochRequest) String() string { func (*GetComputorListsForEpochRequest) ProtoMessage() {} func (x *GetComputorListsForEpochRequest) ProtoReflect() protoreflect.Message { - mi := &file_messages_proto_msgTypes[18] + mi := &file_messages_proto_msgTypes[19] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1206,7 +1301,7 @@ func (x *GetComputorListsForEpochRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use GetComputorListsForEpochRequest.ProtoReflect.Descriptor instead. func (*GetComputorListsForEpochRequest) Descriptor() ([]byte, []int) { - return file_messages_proto_rawDescGZIP(), []int{18} + return file_messages_proto_rawDescGZIP(), []int{19} } func (x *GetComputorListsForEpochRequest) GetEpoch() uint32 { @@ -1229,7 +1324,7 @@ type ComputorList struct { func (x *ComputorList) Reset() { *x = ComputorList{} - mi := &file_messages_proto_msgTypes[19] + mi := &file_messages_proto_msgTypes[20] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1241,7 +1336,7 @@ func (x *ComputorList) String() string { func (*ComputorList) ProtoMessage() {} func (x *ComputorList) ProtoReflect() protoreflect.Message { - mi := &file_messages_proto_msgTypes[19] + mi := &file_messages_proto_msgTypes[20] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1254,7 +1349,7 @@ func (x *ComputorList) ProtoReflect() protoreflect.Message { // Deprecated: Use ComputorList.ProtoReflect.Descriptor instead. func (*ComputorList) Descriptor() ([]byte, []int) { - return file_messages_proto_rawDescGZIP(), []int{19} + return file_messages_proto_rawDescGZIP(), []int{20} } func (x *ComputorList) GetEpoch() uint32 { @@ -1295,7 +1390,7 @@ type GetComputorListsForEpochResponse struct { func (x *GetComputorListsForEpochResponse) Reset() { *x = GetComputorListsForEpochResponse{} - mi := &file_messages_proto_msgTypes[20] + mi := &file_messages_proto_msgTypes[21] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1307,7 +1402,7 @@ func (x *GetComputorListsForEpochResponse) String() string { func (*GetComputorListsForEpochResponse) ProtoMessage() {} func (x *GetComputorListsForEpochResponse) ProtoReflect() protoreflect.Message { - mi := &file_messages_proto_msgTypes[20] + mi := &file_messages_proto_msgTypes[21] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1320,7 +1415,7 @@ func (x *GetComputorListsForEpochResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use GetComputorListsForEpochResponse.ProtoReflect.Descriptor instead. func (*GetComputorListsForEpochResponse) Descriptor() ([]byte, []int) { - return file_messages_proto_rawDescGZIP(), []int{20} + return file_messages_proto_rawDescGZIP(), []int{21} } func (x *GetComputorListsForEpochResponse) GetComputorsLists() []*ComputorList { @@ -1340,7 +1435,7 @@ type HealthResponse struct { func (x *HealthResponse) Reset() { *x = HealthResponse{} - mi := &file_messages_proto_msgTypes[21] + mi := &file_messages_proto_msgTypes[22] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1352,7 +1447,7 @@ func (x *HealthResponse) String() string { func (*HealthResponse) ProtoMessage() {} func (x *HealthResponse) ProtoReflect() protoreflect.Message { - mi := &file_messages_proto_msgTypes[21] + mi := &file_messages_proto_msgTypes[22] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1365,7 +1460,7 @@ func (x *HealthResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use HealthResponse.ProtoReflect.Descriptor instead. func (*HealthResponse) Descriptor() ([]byte, []int) { - return file_messages_proto_rawDescGZIP(), []int{21} + return file_messages_proto_rawDescGZIP(), []int{22} } func (x *HealthResponse) GetStatus() string { @@ -1387,7 +1482,7 @@ type QuTransferData struct { func (x *QuTransferData) Reset() { *x = QuTransferData{} - mi := &file_messages_proto_msgTypes[22] + mi := &file_messages_proto_msgTypes[23] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1399,7 +1494,7 @@ func (x *QuTransferData) String() string { func (*QuTransferData) ProtoMessage() {} func (x *QuTransferData) ProtoReflect() protoreflect.Message { - mi := &file_messages_proto_msgTypes[22] + mi := &file_messages_proto_msgTypes[23] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1412,7 +1507,7 @@ func (x *QuTransferData) ProtoReflect() protoreflect.Message { // Deprecated: Use QuTransferData.ProtoReflect.Descriptor instead. func (*QuTransferData) Descriptor() ([]byte, []int) { - return file_messages_proto_rawDescGZIP(), []int{22} + return file_messages_proto_rawDescGZIP(), []int{23} } func (x *QuTransferData) GetSource() string { @@ -1451,7 +1546,7 @@ type AssetIssuanceData struct { func (x *AssetIssuanceData) Reset() { *x = AssetIssuanceData{} - mi := &file_messages_proto_msgTypes[23] + mi := &file_messages_proto_msgTypes[24] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1463,7 +1558,7 @@ func (x *AssetIssuanceData) String() string { func (*AssetIssuanceData) ProtoMessage() {} func (x *AssetIssuanceData) ProtoReflect() protoreflect.Message { - mi := &file_messages_proto_msgTypes[23] + mi := &file_messages_proto_msgTypes[24] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1476,7 +1571,7 @@ func (x *AssetIssuanceData) ProtoReflect() protoreflect.Message { // Deprecated: Use AssetIssuanceData.ProtoReflect.Descriptor instead. func (*AssetIssuanceData) Descriptor() ([]byte, []int) { - return file_messages_proto_rawDescGZIP(), []int{23} + return file_messages_proto_rawDescGZIP(), []int{24} } func (x *AssetIssuanceData) GetAssetIssuer() string { @@ -1535,7 +1630,7 @@ type AssetOwnershipChangeData struct { func (x *AssetOwnershipChangeData) Reset() { *x = AssetOwnershipChangeData{} - mi := &file_messages_proto_msgTypes[24] + mi := &file_messages_proto_msgTypes[25] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1547,7 +1642,7 @@ func (x *AssetOwnershipChangeData) String() string { func (*AssetOwnershipChangeData) ProtoMessage() {} func (x *AssetOwnershipChangeData) ProtoReflect() protoreflect.Message { - mi := &file_messages_proto_msgTypes[24] + mi := &file_messages_proto_msgTypes[25] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1560,7 +1655,7 @@ func (x *AssetOwnershipChangeData) ProtoReflect() protoreflect.Message { // Deprecated: Use AssetOwnershipChangeData.ProtoReflect.Descriptor instead. func (*AssetOwnershipChangeData) Descriptor() ([]byte, []int) { - return file_messages_proto_rawDescGZIP(), []int{24} + return file_messages_proto_rawDescGZIP(), []int{25} } func (x *AssetOwnershipChangeData) GetSource() string { @@ -1612,7 +1707,7 @@ type AssetPossessionChangeData struct { func (x *AssetPossessionChangeData) Reset() { *x = AssetPossessionChangeData{} - mi := &file_messages_proto_msgTypes[25] + mi := &file_messages_proto_msgTypes[26] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1624,7 +1719,7 @@ func (x *AssetPossessionChangeData) String() string { func (*AssetPossessionChangeData) ProtoMessage() {} func (x *AssetPossessionChangeData) ProtoReflect() protoreflect.Message { - mi := &file_messages_proto_msgTypes[25] + mi := &file_messages_proto_msgTypes[26] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1637,7 +1732,7 @@ func (x *AssetPossessionChangeData) ProtoReflect() protoreflect.Message { // Deprecated: Use AssetPossessionChangeData.ProtoReflect.Descriptor instead. func (*AssetPossessionChangeData) Descriptor() ([]byte, []int) { - return file_messages_proto_rawDescGZIP(), []int{25} + return file_messages_proto_rawDescGZIP(), []int{26} } func (x *AssetPossessionChangeData) GetSource() string { @@ -1687,7 +1782,7 @@ type BurningData struct { func (x *BurningData) Reset() { *x = BurningData{} - mi := &file_messages_proto_msgTypes[26] + mi := &file_messages_proto_msgTypes[27] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1699,7 +1794,7 @@ func (x *BurningData) String() string { func (*BurningData) ProtoMessage() {} func (x *BurningData) ProtoReflect() protoreflect.Message { - mi := &file_messages_proto_msgTypes[26] + mi := &file_messages_proto_msgTypes[27] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1712,7 +1807,7 @@ func (x *BurningData) ProtoReflect() protoreflect.Message { // Deprecated: Use BurningData.ProtoReflect.Descriptor instead. func (*BurningData) Descriptor() ([]byte, []int) { - return file_messages_proto_rawDescGZIP(), []int{26} + return file_messages_proto_rawDescGZIP(), []int{27} } func (x *BurningData) GetSource() string { @@ -1748,7 +1843,7 @@ type ContractReserveDeductionData struct { func (x *ContractReserveDeductionData) Reset() { *x = ContractReserveDeductionData{} - mi := &file_messages_proto_msgTypes[27] + mi := &file_messages_proto_msgTypes[28] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1760,7 +1855,7 @@ func (x *ContractReserveDeductionData) String() string { func (*ContractReserveDeductionData) ProtoMessage() {} func (x *ContractReserveDeductionData) ProtoReflect() protoreflect.Message { - mi := &file_messages_proto_msgTypes[27] + mi := &file_messages_proto_msgTypes[28] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1773,7 +1868,7 @@ func (x *ContractReserveDeductionData) ProtoReflect() protoreflect.Message { // Deprecated: Use ContractReserveDeductionData.ProtoReflect.Descriptor instead. func (*ContractReserveDeductionData) Descriptor() ([]byte, []int) { - return file_messages_proto_rawDescGZIP(), []int{27} + return file_messages_proto_rawDescGZIP(), []int{28} } func (x *ContractReserveDeductionData) GetDeductedAmount() uint64 { @@ -1807,7 +1902,7 @@ type SmartContractMessageData struct { func (x *SmartContractMessageData) Reset() { *x = SmartContractMessageData{} - mi := &file_messages_proto_msgTypes[28] + mi := &file_messages_proto_msgTypes[29] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1819,7 +1914,7 @@ func (x *SmartContractMessageData) String() string { func (*SmartContractMessageData) ProtoMessage() {} func (x *SmartContractMessageData) ProtoReflect() protoreflect.Message { - mi := &file_messages_proto_msgTypes[28] + mi := &file_messages_proto_msgTypes[29] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1832,7 +1927,7 @@ func (x *SmartContractMessageData) ProtoReflect() protoreflect.Message { // Deprecated: Use SmartContractMessageData.ProtoReflect.Descriptor instead. func (*SmartContractMessageData) Descriptor() ([]byte, []int) { - return file_messages_proto_rawDescGZIP(), []int{28} + return file_messages_proto_rawDescGZIP(), []int{29} } func (x *SmartContractMessageData) GetEmittingContractIndex() uint64 { @@ -1858,7 +1953,7 @@ type CustomMessageData struct { func (x *CustomMessageData) Reset() { *x = CustomMessageData{} - mi := &file_messages_proto_msgTypes[29] + mi := &file_messages_proto_msgTypes[30] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1870,7 +1965,7 @@ func (x *CustomMessageData) String() string { func (*CustomMessageData) ProtoMessage() {} func (x *CustomMessageData) ProtoReflect() protoreflect.Message { - mi := &file_messages_proto_msgTypes[29] + mi := &file_messages_proto_msgTypes[30] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1883,7 +1978,7 @@ func (x *CustomMessageData) ProtoReflect() protoreflect.Message { // Deprecated: Use CustomMessageData.ProtoReflect.Descriptor instead. func (*CustomMessageData) Descriptor() ([]byte, []int) { - return file_messages_proto_rawDescGZIP(), []int{29} + return file_messages_proto_rawDescGZIP(), []int{30} } func (x *CustomMessageData) GetValue() uint64 { @@ -1922,7 +2017,7 @@ type Event struct { func (x *Event) Reset() { *x = Event{} - mi := &file_messages_proto_msgTypes[30] + mi := &file_messages_proto_msgTypes[31] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1934,7 +2029,7 @@ func (x *Event) String() string { func (*Event) ProtoMessage() {} func (x *Event) ProtoReflect() protoreflect.Message { - mi := &file_messages_proto_msgTypes[30] + mi := &file_messages_proto_msgTypes[31] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1947,7 +2042,7 @@ func (x *Event) ProtoReflect() protoreflect.Message { // Deprecated: Use Event.ProtoReflect.Descriptor instead. func (*Event) Descriptor() ([]byte, []int) { - return file_messages_proto_rawDescGZIP(), []int{30} + return file_messages_proto_rawDescGZIP(), []int{31} } func (x *Event) GetEpoch() uint32 { @@ -2148,14 +2243,17 @@ func (*Event_CustomMessage) isEvent_EventData() {} type GetEventsRequest struct { state protoimpl.MessageState `protogen:"open.v1"` Filters map[string]string `protobuf:"bytes,1,rep,name=filters,proto3" json:"filters,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - Pagination *Pagination `protobuf:"bytes,2,opt,name=pagination,proto3" json:"pagination,omitempty"` + Exclude map[string]string `protobuf:"bytes,2,rep,name=exclude,proto3" json:"exclude,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Should []*ShouldFilter `protobuf:"bytes,3,rep,name=should,proto3" json:"should,omitempty"` + Ranges map[string]*Range `protobuf:"bytes,5,rep,name=ranges,proto3" json:"ranges,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Pagination *Pagination `protobuf:"bytes,9,opt,name=pagination,proto3" json:"pagination,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *GetEventsRequest) Reset() { *x = GetEventsRequest{} - mi := &file_messages_proto_msgTypes[31] + mi := &file_messages_proto_msgTypes[32] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2167,7 +2265,7 @@ func (x *GetEventsRequest) String() string { func (*GetEventsRequest) ProtoMessage() {} func (x *GetEventsRequest) ProtoReflect() protoreflect.Message { - mi := &file_messages_proto_msgTypes[31] + mi := &file_messages_proto_msgTypes[32] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2180,7 +2278,7 @@ func (x *GetEventsRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use GetEventsRequest.ProtoReflect.Descriptor instead. func (*GetEventsRequest) Descriptor() ([]byte, []int) { - return file_messages_proto_rawDescGZIP(), []int{31} + return file_messages_proto_rawDescGZIP(), []int{32} } func (x *GetEventsRequest) GetFilters() map[string]string { @@ -2190,6 +2288,27 @@ func (x *GetEventsRequest) GetFilters() map[string]string { return nil } +func (x *GetEventsRequest) GetExclude() map[string]string { + if x != nil { + return x.Exclude + } + return nil +} + +func (x *GetEventsRequest) GetShould() []*ShouldFilter { + if x != nil { + return x.Should + } + return nil +} + +func (x *GetEventsRequest) GetRanges() map[string]*Range { + if x != nil { + return x.Ranges + } + return nil +} + func (x *GetEventsRequest) GetPagination() *Pagination { if x != nil { return x.Pagination @@ -2208,7 +2327,7 @@ type GetEventsResponse struct { func (x *GetEventsResponse) Reset() { *x = GetEventsResponse{} - mi := &file_messages_proto_msgTypes[32] + mi := &file_messages_proto_msgTypes[33] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2220,7 +2339,7 @@ func (x *GetEventsResponse) String() string { func (*GetEventsResponse) ProtoMessage() {} func (x *GetEventsResponse) ProtoReflect() protoreflect.Message { - mi := &file_messages_proto_msgTypes[32] + mi := &file_messages_proto_msgTypes[33] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2233,7 +2352,7 @@ func (x *GetEventsResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use GetEventsResponse.ProtoReflect.Descriptor instead. func (*GetEventsResponse) Descriptor() ([]byte, []int) { - return file_messages_proto_rawDescGZIP(), []int{32} + return file_messages_proto_rawDescGZIP(), []int{33} } func (x *GetEventsResponse) GetHits() *Hits { @@ -2302,11 +2421,11 @@ const file_messages_proto_rawDesc = "" + "\x1bGetTransactionByHashRequest\x126\n" + "\x04hash\x18\x01 \x01(\tB\"\xbaG\x1f\x92\x02\x1cThe hash of the transaction.R\x04hash\"\x91\x01\n" + "\x1cGetTransactionByHashResponse\x12q\n" + - "\vtransaction\x18\x01 \x01(\v2 .qubic.v2.archive.pb.TransactionB-\xbaG*\x92\x02'The transaction for the requested hash.R\vtransaction\"\x95\x06\n" + + "\vtransaction\x18\x01 \x01(\v2 .qubic.v2.archive.pb.TransactionB-\xbaG*\x92\x02'The transaction for the requested hash.R\vtransaction\"\xa8\x06\n" + "\x1dGetTransactionsForTickRequest\x12S\n" + "\vtick_number\x18\x01 \x01(\rB2\xbaG/\x92\x02,The tick number to get the transactions for.R\n" + - "tickNumber\x12\xbf\x01\n" + - "\afilters\x18\x02 \x03(\v2?.qubic.v2.archive.pb.GetTransactionsForTickRequest.FiltersEntryBd\xbaGa\x92\x02^Filters restrict the results by single values. Allowed: source, destination, amount, inputTypeR\afilters\x12\xb4\x01\n" + + "tickNumber\x12\xd2\x01\n" + + "\afilters\x18\x02 \x03(\v2?.qubic.v2.archive.pb.GetTransactionsForTickRequest.FiltersEntryBw\xbaGt\x92\x02qInclude filters: the value must appear in the matching documents. Allowed: source, destination, amount, inputTypeR\afilters\x12\xb4\x01\n" + "\x06ranges\x18\x03 \x03(\v2>.qubic.v2.archive.pb.GetTransactionsForTickRequest.RangesEntryB\\\xbaGY\x92\x02VRanges restrict the results by a maximum and minimum value. Allowed: amount, inputTypeR\x06ranges\x1a:\n" + "\fFiltersEntry\x12\x10\n" + "\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n" + @@ -2328,16 +2447,30 @@ const file_messages_proto_rawDesc = "" + "Less than.H\x01R\x02lt\x12-\n" + "\x03lte\x18\x04 \x01(\tB\x19\xbaG\x16\x92\x02\x13Less than or equal.H\x01R\x03lteB\r\n" + "\vlower_boundB\r\n" + - "\vupper_bound\"\xdf\a\n" + + "\vupper_bound\"\xaa\x03\n" + + "\fShouldFilter\x12{\n" + + "\x05terms\x18\x01 \x03(\v2,.qubic.v2.archive.pb.ShouldFilter.TermsEntryB7\xbaG4\x92\x021Or filters: One or more of the values must match.R\x05terms\x12\x8b\x01\n" + + "\x06ranges\x18\x02 \x03(\v2-.qubic.v2.archive.pb.ShouldFilter.RangesEntryBD\xbaGA\x92\x02>Ranges restrict the results by a maximum and/or minimum value.R\x06ranges\x1a8\n" + + "\n" + + "TermsEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n" + + "\x05value\x18\x02 \x01(\tR\x05value:\x028\x01\x1aU\n" + + "\vRangesEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x120\n" + + "\x05value\x18\x02 \x01(\v2\x1a.qubic.v2.archive.pb.RangeR\x05value:\x028\x01\"\xde\t\n" + "!GetTransactionsForIdentityRequest\x12\x86\x01\n" + - "\bidentity\x18\x01 \x01(\tBj\xbaGg\x92\x02dThe identity to get the transactions for. Incoming and outgoing transactions are queried by default.R\bidentity\x12\x93\x01\n" + - "\afilters\x18\x02 \x03(\v2C.qubic.v2.archive.pb.GetTransactionsForIdentityRequest.FiltersEntryB4\xbaG1\x92\x02.Filters restrict the results by single values.R\afilters\x12\x9d\x01\n" + - "\x06ranges\x18\x03 \x03(\v2B.qubic.v2.archive.pb.GetTransactionsForIdentityRequest.RangesEntryBA\xbaG>\x92\x02;Ranges restrict the results by a maximum and minimum value.R\x06ranges\x12d\n" + + "\bidentity\x18\x01 \x01(\tBj\xbaGg\x92\x02dThe identity to get the transactions for. Incoming and outgoing transactions are queried by default.R\bidentity\x12\xa6\x01\n" + + "\afilters\x18\x02 \x03(\v2C.qubic.v2.archive.pb.GetTransactionsForIdentityRequest.FiltersEntryBG\xbaGD\x92\x02AInclude filters: the value must appear in the matching documents.R\afilters\x12\xaa\x01\n" + + "\aexclude\x18\x03 \x03(\v2C.qubic.v2.archive.pb.GetTransactionsForIdentityRequest.ExcludeEntryBK\xbaGH\x92\x02EExclude filters: the value must not appear in the matching documents.R\aexclude\x12\xa0\x01\n" + + "\x06ranges\x18\x06 \x03(\v2B.qubic.v2.archive.pb.GetTransactionsForIdentityRequest.RangesEntryBD\xbaGA\x92\x02>Ranges restrict the results by a maximum and/or minimum value.R\x06ranges\x12d\n" + "\n" + - "pagination\x18\x04 \x01(\v2\x1f.qubic.v2.archive.pb.PaginationB#\xbaG \x92\x02\x1dOptional paging information .R\n" + + "pagination\x18\t \x01(\v2\x1f.qubic.v2.archive.pb.PaginationB#\xbaG \x92\x02\x1dOptional paging information .R\n" + "pagination\x1a:\n" + "\fFiltersEntry\x12\x10\n" + "\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n" + + "\x05value\x18\x02 \x01(\tR\x05value:\x028\x01\x1a:\n" + + "\fExcludeEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n" + "\x05value\x18\x02 \x01(\tR\x05value:\x028\x01\x1aU\n" + "\vRangesEntry\x12\x10\n" + "\x03key\x18\x01 \x01(\tR\x03key\x120\n" + @@ -2452,15 +2585,24 @@ const file_messages_proto_rawDesc = "" + "\n" + "event_dataB\x13\n" + "\x11_transaction_hashB\x0e\n" + - "\f_raw_payload\"\xed\x03\n" + - "\x10GetEventsRequest\x12\xb2\x01\n" + - "\afilters\x18\x01 \x03(\v22.qubic.v2.archive.pb.GetEventsRequest.FiltersEntryBd\xbaGa\x92\x02^Filters restrict the results by single values. Allowed: transactionHash, tickNumber, eventTypeR\afilters\x12c\n" + + "\f_raw_payload\"\xe1\a\n" + + "\x10GetEventsRequest\x12\x7f\n" + + "\afilters\x18\x01 \x03(\v22.qubic.v2.archive.pb.GetEventsRequest.FiltersEntryB1\xbaG.\x92\x02+Include filters: all the values must match.R\afilters\x12\x88\x01\n" + + "\aexclude\x18\x02 \x03(\v22.qubic.v2.archive.pb.GetEventsRequest.ExcludeEntryB:\xbaG7\x92\x024Exclude filters: all the values must must not match.R\aexclude\x12v\n" + + "\x06should\x18\x03 \x03(\v2!.qubic.v2.archive.pb.ShouldFilterB;\xbaG8\x92\x025Should filters: one or more of the values must match.R\x06should\x12\x8f\x01\n" + + "\x06ranges\x18\x05 \x03(\v21.qubic.v2.archive.pb.GetEventsRequest.RangesEntryBD\xbaGA\x92\x02>Ranges restrict the results by a maximum and/or minimum value.R\x06ranges\x12c\n" + "\n" + - "pagination\x18\x02 \x01(\v2\x1f.qubic.v2.archive.pb.PaginationB\"\xbaG\x1f\x92\x02\x1cOptional paging information.R\n" + + "pagination\x18\t \x01(\v2\x1f.qubic.v2.archive.pb.PaginationB\"\xbaG\x1f\x92\x02\x1cOptional paging information.R\n" + "pagination\x1a:\n" + "\fFiltersEntry\x12\x10\n" + "\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n" + - "\x05value\x18\x02 \x01(\tR\x05value:\x028\x01:\x82\x01\xbaG\x7f:}\x12{filters:\n" + + "\x05value\x18\x02 \x01(\tR\x05value:\x028\x01\x1a:\n" + + "\fExcludeEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n" + + "\x05value\x18\x02 \x01(\tR\x05value:\x028\x01\x1aU\n" + + "\vRangesEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x120\n" + + "\x05value\x18\x02 \x01(\v2\x1a.qubic.v2.archive.pb.RangeR\x05value:\x028\x01:\x82\x01\xbaG\x7f:}\x12{filters:\n" + " transactionHash: zvqvtjzvgwgpegmalkkjedhbdrnckqcfthpzfqzxbcljttljzidmvaxalxyz\n" + "pagination:\n" + " offset: 0\n" + @@ -2481,7 +2623,7 @@ func file_messages_proto_rawDescGZIP() []byte { return file_messages_proto_rawDescData } -var file_messages_proto_msgTypes = make([]protoimpl.MessageInfo, 38) +var file_messages_proto_msgTypes = make([]protoimpl.MessageInfo, 44) var file_messages_proto_goTypes = []any{ (*LastProcessedTick)(nil), // 0: qubic.v2.archive.pb.LastProcessedTick (*NextAvailableTick)(nil), // 1: qubic.v2.archive.pb.NextAvailableTick @@ -2494,66 +2636,80 @@ var file_messages_proto_goTypes = []any{ (*GetTransactionsForTickRequest)(nil), // 8: qubic.v2.archive.pb.GetTransactionsForTickRequest (*GetTransactionsForTickResponse)(nil), // 9: qubic.v2.archive.pb.GetTransactionsForTickResponse (*Range)(nil), // 10: qubic.v2.archive.pb.Range - (*GetTransactionsForIdentityRequest)(nil), // 11: qubic.v2.archive.pb.GetTransactionsForIdentityRequest - (*Hits)(nil), // 12: qubic.v2.archive.pb.Hits - (*GetTransactionsForIdentityResponse)(nil), // 13: qubic.v2.archive.pb.GetTransactionsForIdentityResponse - (*GetTickDataRequest)(nil), // 14: qubic.v2.archive.pb.GetTickDataRequest - (*GetTickDataResponse)(nil), // 15: qubic.v2.archive.pb.GetTickDataResponse - (*GetProcessedTickIntervalsResponse)(nil), // 16: qubic.v2.archive.pb.GetProcessedTickIntervalsResponse - (*GetLastProcessedTickResponse)(nil), // 17: qubic.v2.archive.pb.GetLastProcessedTickResponse - (*GetComputorListsForEpochRequest)(nil), // 18: qubic.v2.archive.pb.GetComputorListsForEpochRequest - (*ComputorList)(nil), // 19: qubic.v2.archive.pb.ComputorList - (*GetComputorListsForEpochResponse)(nil), // 20: qubic.v2.archive.pb.GetComputorListsForEpochResponse - (*HealthResponse)(nil), // 21: qubic.v2.archive.pb.HealthResponse - (*QuTransferData)(nil), // 22: qubic.v2.archive.pb.QuTransferData - (*AssetIssuanceData)(nil), // 23: qubic.v2.archive.pb.AssetIssuanceData - (*AssetOwnershipChangeData)(nil), // 24: qubic.v2.archive.pb.AssetOwnershipChangeData - (*AssetPossessionChangeData)(nil), // 25: qubic.v2.archive.pb.AssetPossessionChangeData - (*BurningData)(nil), // 26: qubic.v2.archive.pb.BurningData - (*ContractReserveDeductionData)(nil), // 27: qubic.v2.archive.pb.ContractReserveDeductionData - (*SmartContractMessageData)(nil), // 28: qubic.v2.archive.pb.SmartContractMessageData - (*CustomMessageData)(nil), // 29: qubic.v2.archive.pb.CustomMessageData - (*Event)(nil), // 30: qubic.v2.archive.pb.Event - (*GetEventsRequest)(nil), // 31: qubic.v2.archive.pb.GetEventsRequest - (*GetEventsResponse)(nil), // 32: qubic.v2.archive.pb.GetEventsResponse - nil, // 33: qubic.v2.archive.pb.GetTransactionsForTickRequest.FiltersEntry - nil, // 34: qubic.v2.archive.pb.GetTransactionsForTickRequest.RangesEntry - nil, // 35: qubic.v2.archive.pb.GetTransactionsForIdentityRequest.FiltersEntry - nil, // 36: qubic.v2.archive.pb.GetTransactionsForIdentityRequest.RangesEntry - nil, // 37: qubic.v2.archive.pb.GetEventsRequest.FiltersEntry + (*ShouldFilter)(nil), // 11: qubic.v2.archive.pb.ShouldFilter + (*GetTransactionsForIdentityRequest)(nil), // 12: qubic.v2.archive.pb.GetTransactionsForIdentityRequest + (*Hits)(nil), // 13: qubic.v2.archive.pb.Hits + (*GetTransactionsForIdentityResponse)(nil), // 14: qubic.v2.archive.pb.GetTransactionsForIdentityResponse + (*GetTickDataRequest)(nil), // 15: qubic.v2.archive.pb.GetTickDataRequest + (*GetTickDataResponse)(nil), // 16: qubic.v2.archive.pb.GetTickDataResponse + (*GetProcessedTickIntervalsResponse)(nil), // 17: qubic.v2.archive.pb.GetProcessedTickIntervalsResponse + (*GetLastProcessedTickResponse)(nil), // 18: qubic.v2.archive.pb.GetLastProcessedTickResponse + (*GetComputorListsForEpochRequest)(nil), // 19: qubic.v2.archive.pb.GetComputorListsForEpochRequest + (*ComputorList)(nil), // 20: qubic.v2.archive.pb.ComputorList + (*GetComputorListsForEpochResponse)(nil), // 21: qubic.v2.archive.pb.GetComputorListsForEpochResponse + (*HealthResponse)(nil), // 22: qubic.v2.archive.pb.HealthResponse + (*QuTransferData)(nil), // 23: qubic.v2.archive.pb.QuTransferData + (*AssetIssuanceData)(nil), // 24: qubic.v2.archive.pb.AssetIssuanceData + (*AssetOwnershipChangeData)(nil), // 25: qubic.v2.archive.pb.AssetOwnershipChangeData + (*AssetPossessionChangeData)(nil), // 26: qubic.v2.archive.pb.AssetPossessionChangeData + (*BurningData)(nil), // 27: qubic.v2.archive.pb.BurningData + (*ContractReserveDeductionData)(nil), // 28: qubic.v2.archive.pb.ContractReserveDeductionData + (*SmartContractMessageData)(nil), // 29: qubic.v2.archive.pb.SmartContractMessageData + (*CustomMessageData)(nil), // 30: qubic.v2.archive.pb.CustomMessageData + (*Event)(nil), // 31: qubic.v2.archive.pb.Event + (*GetEventsRequest)(nil), // 32: qubic.v2.archive.pb.GetEventsRequest + (*GetEventsResponse)(nil), // 33: qubic.v2.archive.pb.GetEventsResponse + nil, // 34: qubic.v2.archive.pb.GetTransactionsForTickRequest.FiltersEntry + nil, // 35: qubic.v2.archive.pb.GetTransactionsForTickRequest.RangesEntry + nil, // 36: qubic.v2.archive.pb.ShouldFilter.TermsEntry + nil, // 37: qubic.v2.archive.pb.ShouldFilter.RangesEntry + nil, // 38: qubic.v2.archive.pb.GetTransactionsForIdentityRequest.FiltersEntry + nil, // 39: qubic.v2.archive.pb.GetTransactionsForIdentityRequest.ExcludeEntry + nil, // 40: qubic.v2.archive.pb.GetTransactionsForIdentityRequest.RangesEntry + nil, // 41: qubic.v2.archive.pb.GetEventsRequest.FiltersEntry + nil, // 42: qubic.v2.archive.pb.GetEventsRequest.ExcludeEntry + nil, // 43: qubic.v2.archive.pb.GetEventsRequest.RangesEntry } var file_messages_proto_depIdxs = []int32{ 2, // 0: qubic.v2.archive.pb.GetTransactionByHashResponse.transaction:type_name -> qubic.v2.archive.pb.Transaction - 33, // 1: qubic.v2.archive.pb.GetTransactionsForTickRequest.filters:type_name -> qubic.v2.archive.pb.GetTransactionsForTickRequest.FiltersEntry - 34, // 2: qubic.v2.archive.pb.GetTransactionsForTickRequest.ranges:type_name -> qubic.v2.archive.pb.GetTransactionsForTickRequest.RangesEntry + 34, // 1: qubic.v2.archive.pb.GetTransactionsForTickRequest.filters:type_name -> qubic.v2.archive.pb.GetTransactionsForTickRequest.FiltersEntry + 35, // 2: qubic.v2.archive.pb.GetTransactionsForTickRequest.ranges:type_name -> qubic.v2.archive.pb.GetTransactionsForTickRequest.RangesEntry 2, // 3: qubic.v2.archive.pb.GetTransactionsForTickResponse.transactions:type_name -> qubic.v2.archive.pb.Transaction - 35, // 4: qubic.v2.archive.pb.GetTransactionsForIdentityRequest.filters:type_name -> qubic.v2.archive.pb.GetTransactionsForIdentityRequest.FiltersEntry - 36, // 5: qubic.v2.archive.pb.GetTransactionsForIdentityRequest.ranges:type_name -> qubic.v2.archive.pb.GetTransactionsForIdentityRequest.RangesEntry - 5, // 6: qubic.v2.archive.pb.GetTransactionsForIdentityRequest.pagination:type_name -> qubic.v2.archive.pb.Pagination - 12, // 7: qubic.v2.archive.pb.GetTransactionsForIdentityResponse.hits:type_name -> qubic.v2.archive.pb.Hits - 2, // 8: qubic.v2.archive.pb.GetTransactionsForIdentityResponse.transactions:type_name -> qubic.v2.archive.pb.Transaction - 3, // 9: qubic.v2.archive.pb.GetTickDataResponse.tick_data:type_name -> qubic.v2.archive.pb.TickData - 4, // 10: qubic.v2.archive.pb.GetProcessedTickIntervalsResponse.processed_tick_intervals:type_name -> qubic.v2.archive.pb.ProcessedTickInterval - 19, // 11: qubic.v2.archive.pb.GetComputorListsForEpochResponse.computors_lists:type_name -> qubic.v2.archive.pb.ComputorList - 22, // 12: qubic.v2.archive.pb.Event.qu_transfer:type_name -> qubic.v2.archive.pb.QuTransferData - 23, // 13: qubic.v2.archive.pb.Event.asset_issuance:type_name -> qubic.v2.archive.pb.AssetIssuanceData - 24, // 14: qubic.v2.archive.pb.Event.asset_ownership_change:type_name -> qubic.v2.archive.pb.AssetOwnershipChangeData - 25, // 15: qubic.v2.archive.pb.Event.asset_possession_change:type_name -> qubic.v2.archive.pb.AssetPossessionChangeData - 26, // 16: qubic.v2.archive.pb.Event.burning:type_name -> qubic.v2.archive.pb.BurningData - 27, // 17: qubic.v2.archive.pb.Event.contract_reserve_deduction:type_name -> qubic.v2.archive.pb.ContractReserveDeductionData - 28, // 18: qubic.v2.archive.pb.Event.smart_contract_message:type_name -> qubic.v2.archive.pb.SmartContractMessageData - 29, // 19: qubic.v2.archive.pb.Event.custom_message:type_name -> qubic.v2.archive.pb.CustomMessageData - 37, // 20: qubic.v2.archive.pb.GetEventsRequest.filters:type_name -> qubic.v2.archive.pb.GetEventsRequest.FiltersEntry - 5, // 21: qubic.v2.archive.pb.GetEventsRequest.pagination:type_name -> qubic.v2.archive.pb.Pagination - 12, // 22: qubic.v2.archive.pb.GetEventsResponse.hits:type_name -> qubic.v2.archive.pb.Hits - 30, // 23: qubic.v2.archive.pb.GetEventsResponse.events:type_name -> qubic.v2.archive.pb.Event - 10, // 24: qubic.v2.archive.pb.GetTransactionsForTickRequest.RangesEntry.value:type_name -> qubic.v2.archive.pb.Range - 10, // 25: qubic.v2.archive.pb.GetTransactionsForIdentityRequest.RangesEntry.value:type_name -> qubic.v2.archive.pb.Range - 26, // [26:26] is the sub-list for method output_type - 26, // [26:26] is the sub-list for method input_type - 26, // [26:26] is the sub-list for extension type_name - 26, // [26:26] is the sub-list for extension extendee - 0, // [0:26] is the sub-list for field type_name + 36, // 4: qubic.v2.archive.pb.ShouldFilter.terms:type_name -> qubic.v2.archive.pb.ShouldFilter.TermsEntry + 37, // 5: qubic.v2.archive.pb.ShouldFilter.ranges:type_name -> qubic.v2.archive.pb.ShouldFilter.RangesEntry + 38, // 6: qubic.v2.archive.pb.GetTransactionsForIdentityRequest.filters:type_name -> qubic.v2.archive.pb.GetTransactionsForIdentityRequest.FiltersEntry + 39, // 7: qubic.v2.archive.pb.GetTransactionsForIdentityRequest.exclude:type_name -> qubic.v2.archive.pb.GetTransactionsForIdentityRequest.ExcludeEntry + 40, // 8: qubic.v2.archive.pb.GetTransactionsForIdentityRequest.ranges:type_name -> qubic.v2.archive.pb.GetTransactionsForIdentityRequest.RangesEntry + 5, // 9: qubic.v2.archive.pb.GetTransactionsForIdentityRequest.pagination:type_name -> qubic.v2.archive.pb.Pagination + 13, // 10: qubic.v2.archive.pb.GetTransactionsForIdentityResponse.hits:type_name -> qubic.v2.archive.pb.Hits + 2, // 11: qubic.v2.archive.pb.GetTransactionsForIdentityResponse.transactions:type_name -> qubic.v2.archive.pb.Transaction + 3, // 12: qubic.v2.archive.pb.GetTickDataResponse.tick_data:type_name -> qubic.v2.archive.pb.TickData + 4, // 13: qubic.v2.archive.pb.GetProcessedTickIntervalsResponse.processed_tick_intervals:type_name -> qubic.v2.archive.pb.ProcessedTickInterval + 20, // 14: qubic.v2.archive.pb.GetComputorListsForEpochResponse.computors_lists:type_name -> qubic.v2.archive.pb.ComputorList + 23, // 15: qubic.v2.archive.pb.Event.qu_transfer:type_name -> qubic.v2.archive.pb.QuTransferData + 24, // 16: qubic.v2.archive.pb.Event.asset_issuance:type_name -> qubic.v2.archive.pb.AssetIssuanceData + 25, // 17: qubic.v2.archive.pb.Event.asset_ownership_change:type_name -> qubic.v2.archive.pb.AssetOwnershipChangeData + 26, // 18: qubic.v2.archive.pb.Event.asset_possession_change:type_name -> qubic.v2.archive.pb.AssetPossessionChangeData + 27, // 19: qubic.v2.archive.pb.Event.burning:type_name -> qubic.v2.archive.pb.BurningData + 28, // 20: qubic.v2.archive.pb.Event.contract_reserve_deduction:type_name -> qubic.v2.archive.pb.ContractReserveDeductionData + 29, // 21: qubic.v2.archive.pb.Event.smart_contract_message:type_name -> qubic.v2.archive.pb.SmartContractMessageData + 30, // 22: qubic.v2.archive.pb.Event.custom_message:type_name -> qubic.v2.archive.pb.CustomMessageData + 41, // 23: qubic.v2.archive.pb.GetEventsRequest.filters:type_name -> qubic.v2.archive.pb.GetEventsRequest.FiltersEntry + 42, // 24: qubic.v2.archive.pb.GetEventsRequest.exclude:type_name -> qubic.v2.archive.pb.GetEventsRequest.ExcludeEntry + 11, // 25: qubic.v2.archive.pb.GetEventsRequest.should:type_name -> qubic.v2.archive.pb.ShouldFilter + 43, // 26: qubic.v2.archive.pb.GetEventsRequest.ranges:type_name -> qubic.v2.archive.pb.GetEventsRequest.RangesEntry + 5, // 27: qubic.v2.archive.pb.GetEventsRequest.pagination:type_name -> qubic.v2.archive.pb.Pagination + 13, // 28: qubic.v2.archive.pb.GetEventsResponse.hits:type_name -> qubic.v2.archive.pb.Hits + 31, // 29: qubic.v2.archive.pb.GetEventsResponse.events:type_name -> qubic.v2.archive.pb.Event + 10, // 30: qubic.v2.archive.pb.GetTransactionsForTickRequest.RangesEntry.value:type_name -> qubic.v2.archive.pb.Range + 10, // 31: qubic.v2.archive.pb.ShouldFilter.RangesEntry.value:type_name -> qubic.v2.archive.pb.Range + 10, // 32: qubic.v2.archive.pb.GetTransactionsForIdentityRequest.RangesEntry.value:type_name -> qubic.v2.archive.pb.Range + 10, // 33: qubic.v2.archive.pb.GetEventsRequest.RangesEntry.value:type_name -> qubic.v2.archive.pb.Range + 34, // [34:34] is the sub-list for method output_type + 34, // [34:34] is the sub-list for method input_type + 34, // [34:34] is the sub-list for extension type_name + 34, // [34:34] is the sub-list for extension extendee + 0, // [0:34] is the sub-list for field type_name } func init() { file_messages_proto_init() } @@ -2567,7 +2723,7 @@ func file_messages_proto_init() { (*Range_Lt)(nil), (*Range_Lte)(nil), } - file_messages_proto_msgTypes[30].OneofWrappers = []any{ + file_messages_proto_msgTypes[31].OneofWrappers = []any{ (*Event_QuTransfer)(nil), (*Event_AssetIssuance)(nil), (*Event_AssetOwnershipChange)(nil), @@ -2583,7 +2739,7 @@ func file_messages_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: unsafe.Slice(unsafe.StringData(file_messages_proto_rawDesc), len(file_messages_proto_rawDesc)), NumEnums: 0, - NumMessages: 38, + NumMessages: 44, NumExtensions: 0, NumServices: 0, }, diff --git a/v2/api/archive-query-service/v2/messages.proto b/v2/api/archive-query-service/v2/messages.proto index 9fc6031..b4021df 100644 --- a/v2/api/archive-query-service/v2/messages.proto +++ b/v2/api/archive-query-service/v2/messages.proto @@ -75,7 +75,7 @@ message GetTransactionsForTickRequest { }; // the tick number to get the transactions for. uint32 tick_number = 1 [(openapi.v3.property) = {description:"The tick number to get the transactions for."}]; - map filters = 2 [(openapi.v3.property) = {description:"Filters restrict the results by single values. Allowed: source, destination, amount, inputType"}]; + map filters = 2 [(openapi.v3.property) = {description:"Include filters: the value must appear in the matching documents. Allowed: source, destination, amount, inputType"}]; map ranges = 3 [(openapi.v3.property) = {description:"Ranges restrict the results by a maximum and minimum value. Allowed: amount, inputType"}]; } @@ -84,7 +84,23 @@ message GetTransactionsForTickResponse { repeated Transaction transactions = 1 [(openapi.v3.property) = {description:"The transactions for the requested tick number."}]; } -// Range +// Range filter +// +// | Name | Type | Necessity | Description | +// |-----------|--------|-----------|-------------------------------------------| +// | gt | string | optional | Greater than. | +// | gte | string | optional | Greater than or equal to. | +// | lt | string | optional | Less than. | +// | lte | string | optional | Less than or equal to. | +// +// One lower bound and one upper bound can be specified. One bound is needed. A range with size of 0 or 1 is not allowed. +// +// Examples +// +// ``` +// "amount": { "gt": "1000000" } +// "tickNumber": { "gte": "25563000", "lte": "28300000" } +// ``` message Range { oneof lower_bound { string gt = 1 [(openapi.v3.property) = {description:"Greater than."}]; @@ -96,6 +112,28 @@ message Range { } } +// Should Filters +// +// One should filter can contain multiple terms and ranges. It needs at least two query clauses. +// At least one of them has to match by default. See term and range filter documentation for examples. +// +// Example +// +// ``` +// "should": [ +// { +// "terms": { +// "source": "BAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAARMID", +// "destination": "BAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAARMID" +// } +// } +// ] +// ``` +message ShouldFilter { + map terms = 1 [(openapi.v3.property) = {description:"Or filters: One or more of the values must match."}]; + map ranges = 2 [(openapi.v3.property) = {description:"Ranges restrict the results by a maximum and/or minimum value."}]; +} + // GetTransactionsForIdentityRequest message GetTransactionsForIdentityRequest { option (openapi.v3.schema) = { @@ -105,9 +143,10 @@ message GetTransactionsForIdentityRequest { }; string identity = 1 [(openapi.v3.property) = {description:"The identity to get the transactions for. Incoming and outgoing transactions are queried by default."}]; - map filters = 2 [(openapi.v3.property) = {description:"Filters restrict the results by single values."}]; - map ranges = 3 [(openapi.v3.property) = {description:"Ranges restrict the results by a maximum and minimum value."}]; - Pagination pagination = 4 [(openapi.v3.property) = {description:"Optional paging information ."}]; + map filters = 2 [(openapi.v3.property) = {description:"Include filters: the value must appear in the matching documents."}]; + map exclude = 3 [(openapi.v3.property) = {description:"Exclude filters: the value must not appear in the matching documents."}]; + map ranges = 6 [(openapi.v3.property) = {description:"Ranges restrict the results by a maximum and/or minimum value."}]; + Pagination pagination = 9 [(openapi.v3.property) = {description:"Optional paging information ."}]; } // Hits @@ -262,8 +301,11 @@ message GetEventsRequest { yaml: "filters:\n transactionHash: zvqvtjzvgwgpegmalkkjedhbdrnckqcfthpzfqzxbcljttljzidmvaxalxyz\npagination:\n offset: 0\n size: 10" }; }; - map filters = 1 [(openapi.v3.property) = {description:"Filters restrict the results by single values. Allowed: transactionHash, tickNumber, eventType"}]; - Pagination pagination = 2 [(openapi.v3.property) = {description:"Optional paging information."}]; + map filters = 1 [(openapi.v3.property) = {description:"Include filters: all the values must match."}]; + map exclude = 2 [(openapi.v3.property) = {description:"Exclude filters: all the values must must not match."}]; + repeated ShouldFilter should = 3 [(openapi.v3.property) = {description:"Should filters: one or more of the values must match."}]; + map ranges = 5 [(openapi.v3.property) = {description:"Ranges restrict the results by a maximum and/or minimum value."}]; + Pagination pagination = 9 [(openapi.v3.property) = {description:"Optional paging information."}]; } // GetEventsResponse diff --git a/v2/api/archive-query-service/v2/query_services.openapi.yaml b/v2/api/archive-query-service/v2/query_services.openapi.yaml index 56f316b..72e4ad9 100644 --- a/v2/api/archive-query-service/v2/query_services.openapi.yaml +++ b/v2/api/archive-query-service/v2/query_services.openapi.yaml @@ -33,8 +33,65 @@ paths: post: tags: - Events (Beta) - summary: Get Events - description: Query event logs with optional filters. Beta endpoint. + summary: Get Event Logs + description: "Query event logs with optional filters.\n\n ## Please note: Beta\ + \ version – may be subject to incompatible changes.\n\n ### Request structure\n\ + \n | Name | Type | Necessity | Description \ + \ |\n |------------|--------------------|-----------|----------------------------------------------------------------|\n\ + \ | filters | map | optional | The filter value must appear\ + \ in the matching documents. |\n | exclude | map\ + \ | optional | The filter value must not appear in the matching documents.\ + \ |\n | should | repeated ShouldFilter | optional | At least one of\ + \ the filter values must match. Needs multiple terms/ranges. |\n | ranges\ + \ | map | optional | Filters that restrict results to\ + \ a value range. |\n | pagination | Pagination | optional\ + \ | Allows to specify the first record and number of records. |\n\n\ + \ ### Filters\n\n A filter property must only be used in one filter. See below\ + \ for allowed properties.\n\n #### Include filter properties\n\n | Name \ + \ | Type | Format | Description \ + \ |\n |-----------------|---------|----------|---------------------------------------------------------|\n\ + \ | source | string | 60 character identity, up to 5, comma separated.\ + \ | Only find logs with this source. |\n | destination | string | 60 character\ + \ identity, up to 5, comma separated. | Only find logs with this destination.\ + \ |\n | transactionHash | string | Hash | Only find events for the\ + \ specified transaction hash. |\n | tickNumber | string | Numeric\ + \ | Only find events for the specified tick number. |\n | logType\ + \ | string | Numeric | Only find events with the specified type\ + \ (0,1,2,3,8,13).|\n\n #### Exclude filter properties\n\n | Name \ + \ | Type | Format | Description \ + \ |\n |-----------------|---------|----------|---------------------------------------------------------|\n\ + \ | source | string | 60 character identity, up to 5, comma separated.\ + \ | Only find logs with another source. |\n | destination | string | 60 character\ + \ identity, up to 5, comma separated. | Only find logs with another destination.\ + \ |\n\n #### Should filter term properties\n\n | Name | Type\ + \ | Format | Description |\n\ + \ |-----------------|---------|----------|---------------------------------------------------------|\n\ + \ | source | string | 60 character identity, up to 5, comma separated.\ + \ | Find logs with this source. |\n | destination | string | 60 character\ + \ identity, up to 5, comma separated. | Find logs with this source. |\n\ + \ | amount | string | Numeric | Find logs with this amount. |\n | numberOfShares\ + \ | string | Numeric | Find logs with these number of shares. |\n\n ####\ + \ Should filter range properties\n\n | Name | Type | Format\ + \ | Description |\n |-----------------|---------|----------|---------------------------------------------------------|\n\ + \ | amount | string | Numeric | Find logs in the amount range. |\n |\ + \ numberOfShares | string | Numeric | Find logs in the number of shares range.\ + \ |\n\n #### Range filter properties\n\n Ranges restrict the results to a\ + \ range of values by defining a maximum and/or minimum value.\n\n | Name \ + \ | Type | Format | Description \ + \ |\n |------------|--------|------------------------------------------|---------------------------------------------|\n\ + \ | amount | string | Numeric | Only\ + \ find logs in the amount range. |\n | numberOfShares | string | Numeric\ + \ | Only find logs withing the number of shares\ + \ range. |\n | tickNumber | string | Numeric \ + \ | Only find logs in the tick range. |\n | timestamp | string\ + \ | Numeric (Unix Timestamp in milliseconds) | Only find logs in the time\ + \ range. |\n\n ### Pagination\n\n | Name | Type | Necessity | Description\ + \ \ + \ |\n |--------|--------|-----------|-----------------------------------------------------------------------------------------------------|\n\ + \ | offset | uint32 | optional | The offset of the first record to return.\ + \ Defaults to zero (first record). Maximum offset is 10000. |\n | size |\ + \ uint32 | optional | Defaults to 10. Maximum size is 1000. Zero value is\ + \ ignored (uses default). |" operationId: ArchiveQueryService_GetEvents requestBody: content: @@ -138,57 +195,43 @@ paths: \ |\n |------------|--------------------|-----------|--------------------------------------------------------------------------------|\n\ \ | identity | string | required | 60 characters uppercase\ \ identity. |\n | filters \ - \ | map | optional | Filters that restrict results to single\ - \ value. |\n | ranges | map\ - \ | optional | Filters that restrict results to a value range. \ - \ |\n | pagination | Pagination | optional \ - \ | Allows to specify the first record and the number of records to be retrieved.\ - \ |\n\n Without filters and ranges all transactions from and to that identity\ - \ ordered by tick number descending are returned.\n\n ### Filters\n\n Filters\ - \ restrict the results by single values.\n\n #### Allowed properties\n\n |\ - \ Name | Type | Format | Description \ - \ |\n |-------------|---------|------------------------|--------------------------------------------------------------------|\n\ + \ | map | optional | The filter value must appear in the matching\ + \ documents. |\n | exclude | map\ + \ | optional | The filter value must not appear in the matching documents.\ + \ |\n | ranges | map | optional | Filters\ + \ that restrict results to a value range. |\n\ + \ | pagination | Pagination | optional | Allows to specify the first\ + \ record and the number of records to be retrieved. |\n\n Without filters\ + \ and ranges all transactions from and to that identity ordered by tick number\ + \ descending are returned.\n\n ### Filters\n\n A filter property must only\ + \ be used in one filter. See below for allowed properties.\n\n #### Include\ + \ filter properties\n\n | Name | Type | Format |\ + \ Description |\n |-------------|---------|------------------------|--------------------------------------------------------------------|\n\ \ | source | string | 60 character identity, up to 5, comma separated.\ - \ | Only find transactions that were sent from the specified identities. |\n\ - \ | source-exclude | string | 60 character identity, up to 5, comma separated.\ - \ | Only find transactions that were not sent from the specified identities.\ - \ |\n | destination | string | 60 character identity, up to 5, comma separated.\ - \ | Only find transactions that were sent to the specified identities. \ - \ |\n | destination-exclude | string | 60 character identity, up to 5, comma\ - \ separated. | Only find transactions that were not sent to the specified\ - \ identities. |\n | amount | string | Numeric | Only\ - \ find transactions with the specified amount. |\n | inputType\ - \ | string | Numeric | Only find transactions with the specified\ - \ input type. |\n | tickNumber | string | Numeric \ - \ | Only find transactions with the specified tick number. \ - \ |\n\n source` and `source-exclude` are mutually exclusive.\n destination`\ - \ and `destination-exclude` are mutually exclusive.\n\n #### Examples\n\n\ - \ ```\n \"source\": \"IIJHZSNPDRYYXCQBWNGKBSWYYDCARTYPOBXGOXZEVEZMMWYHPBVXZLJARRCB\"\ - ,\n \"destination\": \"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFXIB\"\ - \n \"amount\": \"1000000\"\n \"inputType\": \"0\"\n ```\n\n ### Ranges\n\n\ - \ Ranges restrict the results by a range of values. On range per property\ - \ is supported.\n\n #### Allowed properties\n\n | Name | Type | Format\ - \ | Description \ - \ |\n |------------|--------|------------------------------------------|---------------------------------------------|\n\ + \ | Only find transactions with the specified source. |\n | destination |\ + \ string | 60 character identity, up to 5, comma separated. | Only find transactions\ + \ with the specified destination. |\n | amount | string | Numeric \ + \ | Only find transactions with the specified amount. \ + \ |\n | inputType | string | Numeric | Only find\ + \ transactions with the specified input type. |\n | tickNumber\ + \ | string | Numeric | Only find transactions with the specified\ + \ tick number. |\n\n #### Exclude filter properties\n\n | Name\ + \ | Type | Format | Description \ + \ |\n |-----------------|---------|----------|---------------------------------------------------------|\n\ + \ | source | string | 60 character identity, up to 5, comma separated.\ + \ | Only find transactions with another source. |\n | destination | string\ + \ | 60 character identity, up to 5, comma separated. | Only find transactions\ + \ with another destination. |\n\n #### Range filter properties\n\n Ranges\ + \ restrict the results by a range of values. One range per property is supported.\n\ + \n | Name | Type | Format | Description\ + \ |\n |------------|--------|------------------------------------------|---------------------------------------------|\n\ \ | amount | string | Numeric | Only\ \ find transactions in amount range. |\n | tickNumber | string | Numeric\ \ | Only find transactions in tick range.\ \ |\n | inputType | string | Numeric \ \ | Only find transactions in input type range. |\n | timestamp | string\ \ | Numeric (Unix Timestamp in milliseconds) | Only find transactions in time\ - \ range. |\n\n #### Range definition\n\n A range with size of 0 or 1\ - \ is not allowed.\n\n | Name | Type | Necessity | Description \ - \ |\n |-----------|--------|-----------|-------------------------------------------|\n\ - \ | field | string | required | Name of the field you wish to search for.\ - \ |\n | gt | string | optional | Greater than. \ - \ |\n | gte | string | optional | Greater than or equal to.\ - \ |\n | lt | string | optional | Less than. \ - \ |\n | lte | string | optional | Less than\ - \ or equal to. |\n\n Only one lower bound and one upper\ - \ bound can be specified.\n\n #### Examples\n\n ```\n \"amount\": { \"gt\"\ - : \"1000000\" }\n \"tickNumber\": { \"gte\": \"25563000\", \"lte\": \"28300000\"\ - \ }\n \"inputType\": { \"gt\": \"0\" }\n \"timestamp\": { \"lt\": \"1757376000000\"\ - \ }\n ```\n\n ### Pagination\n\n | Name | Type | Necessity | Description\ + \ range. |\n\n ### Pagination\n\n | Name | Type | Necessity | Description\ \ \ \ |\n |--------|--------|-----------|-----------------------------------------------------------------------------------------------------|\n\ \ | offset | uint32 | optional | The offset of the first record to return.\ @@ -435,8 +478,22 @@ components: type: object additionalProperties: type: string - description: 'Filters restrict the results by single values. Allowed: transactionHash, - tickNumber, eventType' + description: 'Include filters: all the values must match.' + exclude: + type: object + additionalProperties: + type: string + description: 'Exclude filters: all the values must must not match.' + should: + type: array + items: + $ref: '#/components/schemas/ShouldFilter' + description: 'Should filters: one or more of the values must match.' + ranges: + type: object + additionalProperties: + $ref: '#/components/schemas/Range' + description: Ranges restrict the results by a maximum and/or minimum value. pagination: $ref: '#/components/schemas/Pagination' description: GetEventsRequest @@ -525,12 +582,18 @@ components: type: object additionalProperties: type: string - description: Filters restrict the results by single values. + description: 'Include filters: the value must appear in the matching documents.' + exclude: + type: object + additionalProperties: + type: string + description: 'Exclude filters: the value must not appear in the matching + documents.' ranges: type: object additionalProperties: $ref: '#/components/schemas/Range' - description: Ranges restrict the results by a maximum and minimum value. + description: Ranges restrict the results by a maximum and/or minimum value. pagination: $ref: '#/components/schemas/Pagination' description: GetTransactionsForIdentityRequest @@ -567,8 +630,8 @@ components: type: object additionalProperties: type: string - description: 'Filters restrict the results by single values. Allowed: source, - destination, amount, inputType' + description: 'Include filters: the value must appear in the matching documents. + Allowed: source, destination, amount, inputType' ranges: type: object additionalProperties: @@ -665,7 +728,36 @@ components: lte: type: string description: Less than or equal. - description: Range + description: "Range filter\n\n | Name | Type | Necessity | Description\ + \ |\n |-----------|--------|-----------|-------------------------------------------|\n\ + \ | gt | string | optional | Greater than. \ + \ |\n | gte | string | optional | Greater than or equal to. \ + \ |\n | lt | string | optional | Less than. \ + \ |\n | lte | string | optional | Less than\ + \ or equal to. |\n\n One lower bound and one upper bound\ + \ can be specified. One bound is needed. A range with size of 0 or 1 is not\ + \ allowed.\n\n Examples\n\n ```\n \"amount\": { \"gt\": \"1000000\" }\n \"\ + tickNumber\": { \"gte\": \"25563000\", \"lte\": \"28300000\" }\n ```" + ShouldFilter: + type: object + properties: + terms: + type: object + additionalProperties: + type: string + description: 'Or filters: One or more of the values must match.' + ranges: + type: object + additionalProperties: + $ref: '#/components/schemas/Range' + description: Ranges restrict the results by a maximum and/or minimum value. + description: "Should Filters\n\n One should filter can contain multiple terms\ + \ and ranges. It needs at least two query clauses.\n At least one of them\ + \ has to match by default. See term and range filter documentation for examples.\n\ + \n Example\n\n ```\n \"should\": [\n {\n \"terms\": {\n \"source\"\ + : \"BAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAARMID\",\n \ + \ \"destination\": \"BAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAARMID\"\ + \n }\n }\n ]\n ```" SmartContractMessageData: type: object properties: diff --git a/v2/api/archive-query-service/v2/query_services.pb.go b/v2/api/archive-query-service/v2/query_services.pb.go index 3d7f7b8..6cce98f 100644 --- a/v2/api/archive-query-service/v2/query_services.pb.go +++ b/v2/api/archive-query-service/v2/query_services.pb.go @@ -27,7 +27,7 @@ var File_query_services_proto protoreflect.FileDescriptor const file_query_services_proto_rawDesc = "" + "\n" + - "\x14query_services.proto\x12\x13qubic.v2.archive.pb\x1a\x0emessages.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1bopenapiv3/annotations.proto2\xfd\x0e\n" + + "\x14query_services.proto\x12\x13qubic.v2.archive.pb\x1a\x0emessages.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1bopenapiv3/annotations.proto2\xc9\x0e\n" + "\x13ArchiveQueryService\x12\xfb\x01\n" + "\x14GetTransactionByHash\x120.qubic.v2.archive.pb.GetTransactionByHashRequest\x1a1.qubic.v2.archive.pb.GetTransactionByHashResponse\"~\xbaGN\n" + "\fTransactions\x12\x17Get Transaction By Hash\x1a%Get a single transaction by its hash.\x82\xd3\xe4\x93\x02':\x01*b\vtransaction\"\x15/getTransactionByHash\x12\xdf\x01\n" + @@ -42,10 +42,9 @@ const file_query_services_proto_rawDesc = "" + "\x14GetLastProcessedTick\x12\x16.google.protobuf.Empty\x1a1.qubic.v2.archive.pb.GetLastProcessedTickResponse\"B\xbaG\"\n" + "\aArchive\x12\x17Get Last Processed Tick\x82\xd3\xe4\x93\x02\x17\x12\x15/getLastProcessedTick\x12\xd3\x01\n" + "\x19GetProcessedTickIntervals\x12\x16.google.protobuf.Empty\x1a6.qubic.v2.archive.pb.GetProcessedTickIntervalsResponse\"f\xbaG'\n" + - "\aArchive\x12\x1cGet Processed Tick Intervals\x82\xd3\xe4\x93\x026b\x18processed_tick_intervals\x12\x1a/getProcessedTickIntervals\x12\xc7\x01\n" + - "\tGetEvents\x12%.qubic.v2.archive.pb.GetEventsRequest\x1a&.qubic.v2.archive.pb.GetEventsResponse\"k\xbaGS\n" + - "\rEvents (Beta)\x12\n" + - "Get Events\x1a6Query event logs with optional filters. Beta endpoint.\x82\xd3\xe4\x93\x02\x0f:\x01*\"\n" + + "\aArchive\x12\x1cGet Processed Tick Intervals\x82\xd3\xe4\x93\x026b\x18processed_tick_intervals\x12\x1a/getProcessedTickIntervals\x12\x93\x01\n" + + "\tGetEvents\x12%.qubic.v2.archive.pb.GetEventsRequest\x1a&.qubic.v2.archive.pb.GetEventsResponse\"7\xbaG\x1f\n" + + "\rEvents (Beta)\x12\x0eGet Event Logs\x82\xd3\xe4\x93\x02\x0f:\x01*\"\n" + "/getEvents\x12\xcd\x01\n" + "\tGetHealth\x12\x16.google.protobuf.Empty\x1a#.qubic.v2.archive.pb.HealthResponse\"\x82\x01\xbaGp\x12\n" + "Get Health\x1abHealth check. This is for internal use only and can change any time. Do not rely on this endpoint.\x82\xd3\xe4\x93\x02\t\x12\a/healthB\xfe\x03\xbaG\xce\x03\x12H\n" + diff --git a/v2/api/archive-query-service/v2/query_services.proto b/v2/api/archive-query-service/v2/query_services.proto index e869ee4..aaf076f 100644 --- a/v2/api/archive-query-service/v2/query_services.proto +++ b/v2/api/archive-query-service/v2/query_services.proto @@ -134,7 +134,8 @@ service ArchiveQueryService { // | Name | Type | Necessity | Description | // |------------|--------------------|-----------|--------------------------------------------------------------------------------| // | identity | string | required | 60 characters uppercase identity. | - // | filters | map | optional | Filters that restrict results to single value. | + // | filters | map | optional | The filter value must appear in the matching documents. | + // | exclude | map | optional | The filter value must not appear in the matching documents. | // | ranges | map | optional | Filters that restrict results to a value range. | // | pagination | Pagination | optional | Allows to specify the first record and the number of records to be retrieved. | // @@ -142,37 +143,28 @@ service ArchiveQueryService { // // ### Filters // - // Filters restrict the results by single values. + // A filter property must only be used in one filter. See below for allowed properties. // - // #### Allowed properties + // #### Include filter properties // // | Name | Type | Format | Description | // |-------------|---------|------------------------|--------------------------------------------------------------------| - // | source | string | 60 character identity, up to 5, comma separated. | Only find transactions that were sent from the specified identities. | - // | source-exclude | string | 60 character identity, up to 5, comma separated. | Only find transactions that were not sent from the specified identities. | - // | destination | string | 60 character identity, up to 5, comma separated. | Only find transactions that were sent to the specified identities. | - // | destination-exclude | string | 60 character identity, up to 5, comma separated. | Only find transactions that were not sent to the specified identities. | + // | source | string | 60 character identity, up to 5, comma separated. | Only find transactions with the specified source. | + // | destination | string | 60 character identity, up to 5, comma separated. | Only find transactions with the specified destination. | // | amount | string | Numeric | Only find transactions with the specified amount. | // | inputType | string | Numeric | Only find transactions with the specified input type. | // | tickNumber | string | Numeric | Only find transactions with the specified tick number. | // - // source` and `source-exclude` are mutually exclusive. - // destination` and `destination-exclude` are mutually exclusive. + // #### Exclude filter properties // - // #### Examples - // - // ``` - // "source": "IIJHZSNPDRYYXCQBWNGKBSWYYDCARTYPOBXGOXZEVEZMMWYHPBVXZLJARRCB", - // "destination": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFXIB" - // "amount": "1000000" - // "inputType": "0" - // ``` + // | Name | Type | Format | Description | + // |-----------------|---------|----------|---------------------------------------------------------| + // | source | string | 60 character identity, up to 5, comma separated. | Only find transactions with another source. | + // | destination | string | 60 character identity, up to 5, comma separated. | Only find transactions with another destination. | // - // ### Ranges + // #### Range filter properties // - // Ranges restrict the results by a range of values. On range per property is supported. - // - // #### Allowed properties + // Ranges restrict the results by a range of values. One range per property is supported. // // | Name | Type | Format | Description | // |------------|--------|------------------------------------------|---------------------------------------------| @@ -181,29 +173,6 @@ service ArchiveQueryService { // | inputType | string | Numeric | Only find transactions in input type range. | // | timestamp | string | Numeric (Unix Timestamp in milliseconds) | Only find transactions in time range. | // - // #### Range definition - // - // A range with size of 0 or 1 is not allowed. - // - // | Name | Type | Necessity | Description | - // |-----------|--------|-----------|-------------------------------------------| - // | field | string | required | Name of the field you wish to search for. | - // | gt | string | optional | Greater than. | - // | gte | string | optional | Greater than or equal to. | - // | lt | string | optional | Less than. | - // | lte | string | optional | Less than or equal to. | - // - // Only one lower bound and one upper bound can be specified. - // - // #### Examples - // - // ``` - // "amount": { "gt": "1000000" } - // "tickNumber": { "gte": "25563000", "lte": "28300000" } - // "inputType": { "gt": "0" } - // "timestamp": { "lt": "1757376000000" } - // ``` - // // ### Pagination // // | Name | Type | Necessity | Description | @@ -283,27 +252,78 @@ service ArchiveQueryService { }; } - // Query event logs with optional filters. Beta endpoint. + // Query event logs with optional filters. + // + // ## Please note: Beta version – may be subject to incompatible changes. // // ### Request structure // // | Name | Type | Necessity | Description | // |------------|--------------------|-----------|----------------------------------------------------------------| - // | filters | map | optional | Filters that restrict results to single value. | + // | filters | map | optional | The filter value must appear in the matching documents. | + // | exclude | map | optional | The filter value must not appear in the matching documents. | + // | should | repeated ShouldFilter | optional | At least one of the filter values must match. Needs multiple terms/ranges. | + // | ranges | map | optional | Filters that restrict results to a value range. | // | pagination | Pagination | optional | Allows to specify the first record and number of records. | // // ### Filters // + // A filter property must only be used in one filter. See below for allowed properties. + // + // #### Include filter properties + // // | Name | Type | Format | Description | // |-----------------|---------|----------|---------------------------------------------------------| + // | source | string | 60 character identity, up to 5, comma separated. | Only find logs with this source. | + // | destination | string | 60 character identity, up to 5, comma separated. | Only find logs with this destination. | // | transactionHash | string | Hash | Only find events for the specified transaction hash. | // | tickNumber | string | Numeric | Only find events for the specified tick number. | // | logType | string | Numeric | Only find events with the specified type (0,1,2,3,8,13).| + // + // #### Exclude filter properties + // + // | Name | Type | Format | Description | + // |-----------------|---------|----------|---------------------------------------------------------| + // | source | string | 60 character identity, up to 5, comma separated. | Only find logs with another source. | + // | destination | string | 60 character identity, up to 5, comma separated. | Only find logs with another destination. | + // + // #### Should filter term properties + // + // | Name | Type | Format | Description | + // |-----------------|---------|----------|---------------------------------------------------------| + // | source | string | 60 character identity, up to 5, comma separated. | Find logs with this source. | + // | destination | string | 60 character identity, up to 5, comma separated. | Find logs with this source. | + // | amount | string | Numeric | Find logs with this amount. | + // | numberOfShares | string | Numeric | Find logs with these number of shares. | + // + // #### Should filter range properties + // + // | Name | Type | Format | Description | + // |-----------------|---------|----------|---------------------------------------------------------| + // | amount | string | Numeric | Find logs in the amount range. | + // | numberOfShares | string | Numeric | Find logs in the number of shares range. | + // + // #### Range filter properties + // + // Ranges restrict the results to a range of values by defining a maximum and/or minimum value. + // + // | Name | Type | Format | Description | + // |------------|--------|------------------------------------------|---------------------------------------------| + // | amount | string | Numeric | Only find logs in the amount range. | + // | numberOfShares | string | Numeric | Only find logs withing the number of shares range. | + // | tickNumber | string | Numeric | Only find logs in the tick range. | + // | timestamp | string | Numeric (Unix Timestamp in milliseconds) | Only find logs in the time range. | + // + // ### Pagination + // + // | Name | Type | Necessity | Description | + // |--------|--------|-----------|-----------------------------------------------------------------------------------------------------| + // | offset | uint32 | optional | The offset of the first record to return. Defaults to zero (first record). Maximum offset is 10000. | + // | size | uint32 | optional | Defaults to 10. Maximum size is 1000. Zero value is ignored (uses default). | rpc GetEvents(GetEventsRequest) returns (GetEventsResponse) { option (openapi.v3.operation) = { tags: ["Events (Beta)"] - summary: "Get Events" - description: "Query event logs with optional filters. Beta endpoint." + summary: "Get Event Logs" }; option (google.api.http) = { diff --git a/v2/api/archive-query-service/v2/query_services_grpc.pb.go b/v2/api/archive-query-service/v2/query_services_grpc.pb.go index e511680..6baf179 100644 --- a/v2/api/archive-query-service/v2/query_services_grpc.pb.go +++ b/v2/api/archive-query-service/v2/query_services_grpc.pb.go @@ -1,6 +1,6 @@ // Code generated by protoc-gen-go-grpc. DO NOT EDIT. // versions: -// - protoc-gen-go-grpc v1.6.0 +// - protoc-gen-go-grpc v1.6.1 // - protoc v3.21.12 // source: query_services.proto @@ -95,7 +95,8 @@ type ArchiveQueryServiceClient interface { // | Name | Type | Necessity | Description | // |------------|--------------------|-----------|--------------------------------------------------------------------------------| // | identity | string | required | 60 characters uppercase identity. | - // | filters | map | optional | Filters that restrict results to single value. | + // | filters | map | optional | The filter value must appear in the matching documents. | + // | exclude | map | optional | The filter value must not appear in the matching documents. | // | ranges | map | optional | Filters that restrict results to a value range. | // | pagination | Pagination | optional | Allows to specify the first record and the number of records to be retrieved. | // @@ -103,37 +104,28 @@ type ArchiveQueryServiceClient interface { // // ### Filters // - // Filters restrict the results by single values. + // A filter property must only be used in one filter. See below for allowed properties. // - // #### Allowed properties + // #### Include filter properties // // | Name | Type | Format | Description | // |-------------|---------|------------------------|--------------------------------------------------------------------| - // | source | string | 60 character identity, up to 5, comma separated. | Only find transactions that were sent from the specified identities. | - // | source-exclude | string | 60 character identity, up to 5, comma separated. | Only find transactions that were not sent from the specified identities. | - // | destination | string | 60 character identity, up to 5, comma separated. | Only find transactions that were sent to the specified identities. | - // | destination-exclude | string | 60 character identity, up to 5, comma separated. | Only find transactions that were not sent to the specified identities. | + // | source | string | 60 character identity, up to 5, comma separated. | Only find transactions with the specified source. | + // | destination | string | 60 character identity, up to 5, comma separated. | Only find transactions with the specified destination. | // | amount | string | Numeric | Only find transactions with the specified amount. | // | inputType | string | Numeric | Only find transactions with the specified input type. | // | tickNumber | string | Numeric | Only find transactions with the specified tick number. | // - // source` and `source-exclude` are mutually exclusive. - // destination` and `destination-exclude` are mutually exclusive. + // #### Exclude filter properties // - // #### Examples - // - // ``` - // "source": "IIJHZSNPDRYYXCQBWNGKBSWYYDCARTYPOBXGOXZEVEZMMWYHPBVXZLJARRCB", - // "destination": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFXIB" - // "amount": "1000000" - // "inputType": "0" - // ``` + // | Name | Type | Format | Description | + // |-----------------|---------|----------|---------------------------------------------------------| + // | source | string | 60 character identity, up to 5, comma separated. | Only find transactions with another source. | + // | destination | string | 60 character identity, up to 5, comma separated. | Only find transactions with another destination. | // - // ### Ranges + // #### Range filter properties // - // Ranges restrict the results by a range of values. On range per property is supported. - // - // #### Allowed properties + // Ranges restrict the results by a range of values. One range per property is supported. // // | Name | Type | Format | Description | // |------------|--------|------------------------------------------|---------------------------------------------| @@ -142,29 +134,6 @@ type ArchiveQueryServiceClient interface { // | inputType | string | Numeric | Only find transactions in input type range. | // | timestamp | string | Numeric (Unix Timestamp in milliseconds) | Only find transactions in time range. | // - // #### Range definition - // - // A range with size of 0 or 1 is not allowed. - // - // | Name | Type | Necessity | Description | - // |-----------|--------|-----------|-------------------------------------------| - // | field | string | required | Name of the field you wish to search for. | - // | gt | string | optional | Greater than. | - // | gte | string | optional | Greater than or equal to. | - // | lt | string | optional | Less than. | - // | lte | string | optional | Less than or equal to. | - // - // Only one lower bound and one upper bound can be specified. - // - // #### Examples - // - // ``` - // "amount": { "gt": "1000000" } - // "tickNumber": { "gte": "25563000", "lte": "28300000" } - // "inputType": { "gt": "0" } - // "timestamp": { "lt": "1757376000000" } - // ``` - // // ### Pagination // // | Name | Type | Necessity | Description | @@ -189,22 +158,74 @@ type ArchiveQueryServiceClient interface { // that need to process all available ticks. For most users it is enough to switch to a new // interval by using the `get last processed tick` endpoint. GetProcessedTickIntervals(ctx context.Context, in *emptypb.Empty, opts ...grpc.CallOption) (*GetProcessedTickIntervalsResponse, error) - // Query event logs with optional filters. Beta endpoint. + // Query event logs with optional filters. + // + // ## Please note: Beta version – may be subject to incompatible changes. // // ### Request structure // // | Name | Type | Necessity | Description | // |------------|--------------------|-----------|----------------------------------------------------------------| - // | filters | map | optional | Filters that restrict results to single value. | + // | filters | map | optional | The filter value must appear in the matching documents. | + // | exclude | map | optional | The filter value must not appear in the matching documents. | + // | should | repeated ShouldFilter | optional | At least one of the filter values must match. Needs multiple terms/ranges. | + // | ranges | map | optional | Filters that restrict results to a value range. | // | pagination | Pagination | optional | Allows to specify the first record and number of records. | // // ### Filters // + // A filter property must only be used in one filter. See below for allowed properties. + // + // #### Include filter properties + // // | Name | Type | Format | Description | // |-----------------|---------|----------|---------------------------------------------------------| + // | source | string | 60 character identity, up to 5, comma separated. | Only find logs with this source. | + // | destination | string | 60 character identity, up to 5, comma separated. | Only find logs with this destination. | // | transactionHash | string | Hash | Only find events for the specified transaction hash. | // | tickNumber | string | Numeric | Only find events for the specified tick number. | // | logType | string | Numeric | Only find events with the specified type (0,1,2,3,8,13).| + // + // #### Exclude filter properties + // + // | Name | Type | Format | Description | + // |-----------------|---------|----------|---------------------------------------------------------| + // | source | string | 60 character identity, up to 5, comma separated. | Only find logs with another source. | + // | destination | string | 60 character identity, up to 5, comma separated. | Only find logs with another destination. | + // + // #### Should filter term properties + // + // | Name | Type | Format | Description | + // |-----------------|---------|----------|---------------------------------------------------------| + // | source | string | 60 character identity, up to 5, comma separated. | Find logs with this source. | + // | destination | string | 60 character identity, up to 5, comma separated. | Find logs with this source. | + // | amount | string | Numeric | Find logs with this amount. | + // | numberOfShares | string | Numeric | Find logs with these number of shares. | + // + // #### Should filter range properties + // + // | Name | Type | Format | Description | + // |-----------------|---------|----------|---------------------------------------------------------| + // | amount | string | Numeric | Find logs in the amount range. | + // | numberOfShares | string | Numeric | Find logs in the number of shares range. | + // + // #### Range filter properties + // + // Ranges restrict the results to a range of values by defining a maximum and/or minimum value. + // + // | Name | Type | Format | Description | + // |------------|--------|------------------------------------------|---------------------------------------------| + // | amount | string | Numeric | Only find logs in the amount range. | + // | numberOfShares | string | Numeric | Only find logs withing the number of shares range. | + // | tickNumber | string | Numeric | Only find logs in the tick range. | + // | timestamp | string | Numeric (Unix Timestamp in milliseconds) | Only find logs in the time range. | + // + // ### Pagination + // + // | Name | Type | Necessity | Description | + // |--------|--------|-----------|-----------------------------------------------------------------------------------------------------| + // | offset | uint32 | optional | The offset of the first record to return. Defaults to zero (first record). Maximum offset is 10000. | + // | size | uint32 | optional | Defaults to 10. Maximum size is 1000. Zero value is ignored (uses default). | GetEvents(ctx context.Context, in *GetEventsRequest, opts ...grpc.CallOption) (*GetEventsResponse, error) GetHealth(ctx context.Context, in *emptypb.Empty, opts ...grpc.CallOption) (*HealthResponse, error) } @@ -371,7 +392,8 @@ type ArchiveQueryServiceServer interface { // | Name | Type | Necessity | Description | // |------------|--------------------|-----------|--------------------------------------------------------------------------------| // | identity | string | required | 60 characters uppercase identity. | - // | filters | map | optional | Filters that restrict results to single value. | + // | filters | map | optional | The filter value must appear in the matching documents. | + // | exclude | map | optional | The filter value must not appear in the matching documents. | // | ranges | map | optional | Filters that restrict results to a value range. | // | pagination | Pagination | optional | Allows to specify the first record and the number of records to be retrieved. | // @@ -379,37 +401,28 @@ type ArchiveQueryServiceServer interface { // // ### Filters // - // Filters restrict the results by single values. + // A filter property must only be used in one filter. See below for allowed properties. // - // #### Allowed properties + // #### Include filter properties // // | Name | Type | Format | Description | // |-------------|---------|------------------------|--------------------------------------------------------------------| - // | source | string | 60 character identity, up to 5, comma separated. | Only find transactions that were sent from the specified identities. | - // | source-exclude | string | 60 character identity, up to 5, comma separated. | Only find transactions that were not sent from the specified identities. | - // | destination | string | 60 character identity, up to 5, comma separated. | Only find transactions that were sent to the specified identities. | - // | destination-exclude | string | 60 character identity, up to 5, comma separated. | Only find transactions that were not sent to the specified identities. | + // | source | string | 60 character identity, up to 5, comma separated. | Only find transactions with the specified source. | + // | destination | string | 60 character identity, up to 5, comma separated. | Only find transactions with the specified destination. | // | amount | string | Numeric | Only find transactions with the specified amount. | // | inputType | string | Numeric | Only find transactions with the specified input type. | // | tickNumber | string | Numeric | Only find transactions with the specified tick number. | // - // source` and `source-exclude` are mutually exclusive. - // destination` and `destination-exclude` are mutually exclusive. + // #### Exclude filter properties // - // #### Examples - // - // ``` - // "source": "IIJHZSNPDRYYXCQBWNGKBSWYYDCARTYPOBXGOXZEVEZMMWYHPBVXZLJARRCB", - // "destination": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFXIB" - // "amount": "1000000" - // "inputType": "0" - // ``` + // | Name | Type | Format | Description | + // |-----------------|---------|----------|---------------------------------------------------------| + // | source | string | 60 character identity, up to 5, comma separated. | Only find transactions with another source. | + // | destination | string | 60 character identity, up to 5, comma separated. | Only find transactions with another destination. | // - // ### Ranges + // #### Range filter properties // - // Ranges restrict the results by a range of values. On range per property is supported. - // - // #### Allowed properties + // Ranges restrict the results by a range of values. One range per property is supported. // // | Name | Type | Format | Description | // |------------|--------|------------------------------------------|---------------------------------------------| @@ -418,29 +431,6 @@ type ArchiveQueryServiceServer interface { // | inputType | string | Numeric | Only find transactions in input type range. | // | timestamp | string | Numeric (Unix Timestamp in milliseconds) | Only find transactions in time range. | // - // #### Range definition - // - // A range with size of 0 or 1 is not allowed. - // - // | Name | Type | Necessity | Description | - // |-----------|--------|-----------|-------------------------------------------| - // | field | string | required | Name of the field you wish to search for. | - // | gt | string | optional | Greater than. | - // | gte | string | optional | Greater than or equal to. | - // | lt | string | optional | Less than. | - // | lte | string | optional | Less than or equal to. | - // - // Only one lower bound and one upper bound can be specified. - // - // #### Examples - // - // ``` - // "amount": { "gt": "1000000" } - // "tickNumber": { "gte": "25563000", "lte": "28300000" } - // "inputType": { "gt": "0" } - // "timestamp": { "lt": "1757376000000" } - // ``` - // // ### Pagination // // | Name | Type | Necessity | Description | @@ -465,22 +455,74 @@ type ArchiveQueryServiceServer interface { // that need to process all available ticks. For most users it is enough to switch to a new // interval by using the `get last processed tick` endpoint. GetProcessedTickIntervals(context.Context, *emptypb.Empty) (*GetProcessedTickIntervalsResponse, error) - // Query event logs with optional filters. Beta endpoint. + // Query event logs with optional filters. + // + // ## Please note: Beta version – may be subject to incompatible changes. // // ### Request structure // // | Name | Type | Necessity | Description | // |------------|--------------------|-----------|----------------------------------------------------------------| - // | filters | map | optional | Filters that restrict results to single value. | + // | filters | map | optional | The filter value must appear in the matching documents. | + // | exclude | map | optional | The filter value must not appear in the matching documents. | + // | should | repeated ShouldFilter | optional | At least one of the filter values must match. Needs multiple terms/ranges. | + // | ranges | map | optional | Filters that restrict results to a value range. | // | pagination | Pagination | optional | Allows to specify the first record and number of records. | // // ### Filters // + // A filter property must only be used in one filter. See below for allowed properties. + // + // #### Include filter properties + // // | Name | Type | Format | Description | // |-----------------|---------|----------|---------------------------------------------------------| + // | source | string | 60 character identity, up to 5, comma separated. | Only find logs with this source. | + // | destination | string | 60 character identity, up to 5, comma separated. | Only find logs with this destination. | // | transactionHash | string | Hash | Only find events for the specified transaction hash. | // | tickNumber | string | Numeric | Only find events for the specified tick number. | // | logType | string | Numeric | Only find events with the specified type (0,1,2,3,8,13).| + // + // #### Exclude filter properties + // + // | Name | Type | Format | Description | + // |-----------------|---------|----------|---------------------------------------------------------| + // | source | string | 60 character identity, up to 5, comma separated. | Only find logs with another source. | + // | destination | string | 60 character identity, up to 5, comma separated. | Only find logs with another destination. | + // + // #### Should filter term properties + // + // | Name | Type | Format | Description | + // |-----------------|---------|----------|---------------------------------------------------------| + // | source | string | 60 character identity, up to 5, comma separated. | Find logs with this source. | + // | destination | string | 60 character identity, up to 5, comma separated. | Find logs with this source. | + // | amount | string | Numeric | Find logs with this amount. | + // | numberOfShares | string | Numeric | Find logs with these number of shares. | + // + // #### Should filter range properties + // + // | Name | Type | Format | Description | + // |-----------------|---------|----------|---------------------------------------------------------| + // | amount | string | Numeric | Find logs in the amount range. | + // | numberOfShares | string | Numeric | Find logs in the number of shares range. | + // + // #### Range filter properties + // + // Ranges restrict the results to a range of values by defining a maximum and/or minimum value. + // + // | Name | Type | Format | Description | + // |------------|--------|------------------------------------------|---------------------------------------------| + // | amount | string | Numeric | Only find logs in the amount range. | + // | numberOfShares | string | Numeric | Only find logs withing the number of shares range. | + // | tickNumber | string | Numeric | Only find logs in the tick range. | + // | timestamp | string | Numeric (Unix Timestamp in milliseconds) | Only find logs in the time range. | + // + // ### Pagination + // + // | Name | Type | Necessity | Description | + // |--------|--------|-----------|-----------------------------------------------------------------------------------------------------| + // | offset | uint32 | optional | The offset of the first record to return. Defaults to zero (first record). Maximum offset is 10000. | + // | size | uint32 | optional | Defaults to 10. Maximum size is 1000. Zero value is ignored (uses default). | GetEvents(context.Context, *GetEventsRequest) (*GetEventsResponse, error) GetHealth(context.Context, *emptypb.Empty) (*HealthResponse, error) mustEmbedUnimplementedArchiveQueryServiceServer() diff --git a/v2/domain/events.go b/v2/domain/events.go index 8dbe2c1..0700733 100644 --- a/v2/domain/events.go +++ b/v2/domain/events.go @@ -10,7 +10,7 @@ import ( //go:generate go tool go.uber.org/mock/mockgen -destination=mock/events.mock.go -package=mock -source events.go type EventsRepository interface { - GetEvents(ctx context.Context, filters map[string][]string, from, size uint32) ([]*api.Event, *entities.Hits, error) + GetEvents(ctx context.Context, filters entities.Filters, from, size uint32) ([]*api.Event, *entities.Hits, error) } type EventsService struct { @@ -21,7 +21,7 @@ func NewEventsService(repo EventsRepository) *EventsService { return &EventsService{repo: repo} } -func (s *EventsService) GetEvents(ctx context.Context, filters map[string][]string, from, size uint32) (*entities.EventsResult, error) { +func (s *EventsService) GetEvents(ctx context.Context, filters entities.Filters, from, size uint32) (*entities.EventsResult, error) { events, hits, err := s.repo.GetEvents(ctx, filters, from, size) if err != nil { return nil, err diff --git a/v2/domain/events_test.go b/v2/domain/events_test.go index ae60a51..5bef60d 100644 --- a/v2/domain/events_test.go +++ b/v2/domain/events_test.go @@ -30,7 +30,9 @@ func TestEventsService_GetEvents_Success(t *testing.T) { } expectedHits := &entities.Hits{Total: 2, Relation: "eq"} - filters := map[string][]string{"transactionHash": {"hash1"}} + filters := entities.Filters{ + Include: map[string][]string{"transactionHash": {"hash1"}}, + } mockRepo.EXPECT().GetEvents(gomock.Any(), filters, uint32(0), uint32(10)). Return(expectedEvents, expectedHits, nil) @@ -49,7 +51,7 @@ func TestEventsService_GetEvents_RepoError(t *testing.T) { mockRepo.EXPECT().GetEvents(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()). Return(nil, nil, fmt.Errorf("connection refused")) - result, err := service.GetEvents(context.Background(), nil, 0, 10) + result, err := service.GetEvents(context.Background(), entities.Filters{}, 0, 10) require.Error(t, err) assert.Nil(t, result) assert.Contains(t, err.Error(), "connection refused") @@ -64,7 +66,7 @@ func TestEventsService_GetEvents_EmptyResult(t *testing.T) { mockRepo.EXPECT().GetEvents(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()). Return([]*api.Event{}, &entities.Hits{Total: 0, Relation: "eq"}, nil) - result, err := service.GetEvents(context.Background(), nil, 0, 10) + result, err := service.GetEvents(context.Background(), entities.Filters{}, 0, 10) require.NoError(t, err) assert.Empty(t, result.Events) assert.Equal(t, 0, result.Hits.Total) diff --git a/v2/domain/mock/events.mock.go b/v2/domain/mock/events.mock.go index db52606..73edb7a 100644 --- a/v2/domain/mock/events.mock.go +++ b/v2/domain/mock/events.mock.go @@ -43,7 +43,7 @@ func (m *MockEventsRepository) EXPECT() *MockEventsRepositoryMockRecorder { } // GetEvents mocks base method. -func (m *MockEventsRepository) GetEvents(ctx context.Context, filters map[string][]string, from, size uint32) ([]*api.Event, *entities.Hits, error) { +func (m *MockEventsRepository) GetEvents(ctx context.Context, filters entities.Filters, from, size uint32) ([]*api.Event, *entities.Hits, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetEvents", ctx, filters, from, size) ret0, _ := ret[0].([]*api.Event) diff --git a/v2/domain/mock/transactions.mock.go b/v2/domain/mock/transactions.mock.go index d177e31..784ed7d 100644 --- a/v2/domain/mock/transactions.mock.go +++ b/v2/domain/mock/transactions.mock.go @@ -58,9 +58,9 @@ func (mr *MockTransactionRepositoryMockRecorder) GetTransactionByHash(ctx, hash } // GetTransactionsForIdentity mocks base method. -func (m *MockTransactionRepository) GetTransactionsForIdentity(ctx context.Context, identity string, maxTick uint32, filters map[string][]string, ranges map[string][]*entities.Range, from, size uint32) ([]*api.Transaction, *entities.Hits, error) { +func (m *MockTransactionRepository) GetTransactionsForIdentity(ctx context.Context, identity string, maxTick uint32, filters entities.Filters, from, size uint32) ([]*api.Transaction, *entities.Hits, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetTransactionsForIdentity", ctx, identity, maxTick, filters, ranges, from, size) + ret := m.ctrl.Call(m, "GetTransactionsForIdentity", ctx, identity, maxTick, filters, from, size) ret0, _ := ret[0].([]*api.Transaction) ret1, _ := ret[1].(*entities.Hits) ret2, _ := ret[2].(error) @@ -68,13 +68,13 @@ func (m *MockTransactionRepository) GetTransactionsForIdentity(ctx context.Conte } // GetTransactionsForIdentity indicates an expected call of GetTransactionsForIdentity. -func (mr *MockTransactionRepositoryMockRecorder) GetTransactionsForIdentity(ctx, identity, maxTick, filters, ranges, from, size any) *gomock.Call { +func (mr *MockTransactionRepositoryMockRecorder) GetTransactionsForIdentity(ctx, identity, maxTick, filters, from, size any) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTransactionsForIdentity", reflect.TypeOf((*MockTransactionRepository)(nil).GetTransactionsForIdentity), ctx, identity, maxTick, filters, ranges, from, size) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTransactionsForIdentity", reflect.TypeOf((*MockTransactionRepository)(nil).GetTransactionsForIdentity), ctx, identity, maxTick, filters, from, size) } // GetTransactionsForTickNumber mocks base method. -func (m *MockTransactionRepository) GetTransactionsForTickNumber(ctx context.Context, tickNumber uint32, filters map[string][]string, ranges map[string][]*entities.Range) ([]*api.Transaction, error) { +func (m *MockTransactionRepository) GetTransactionsForTickNumber(ctx context.Context, tickNumber uint32, filters map[string][]string, ranges map[string][]entities.Range) ([]*api.Transaction, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetTransactionsForTickNumber", ctx, tickNumber, filters, ranges) ret0, _ := ret[0].([]*api.Transaction) diff --git a/v2/domain/repository/elastic/computors.go b/v2/domain/repository/elastic/computors.go index 8b61403..79bf980 100644 --- a/v2/domain/repository/elastic/computors.go +++ b/v2/domain/repository/elastic/computors.go @@ -37,7 +37,7 @@ func (r *ArchiveRepository) GetComputorsListsForEpoch(ctx context.Context, epoch } var result computorsListSearchResponse - err = r.performElasticSearch(ctx, r.clIndex, &query, &result) + err = performElasticSearch(ctx, r.esClient, r.clIndex, &query, &result) if err != nil { return nil, fmt.Errorf("performing elasting search: %w", err) } diff --git a/v2/domain/repository/elastic/elastic.go b/v2/domain/repository/elastic/elastic.go deleted file mode 100644 index f706635..0000000 --- a/v2/domain/repository/elastic/elastic.go +++ /dev/null @@ -1,50 +0,0 @@ -package elastic - -import ( - "bytes" - "context" - "encoding/json" - "fmt" - "sync/atomic" - - "github.com/elastic/go-elasticsearch/v8" -) - -type ArchiveRepository struct { - esClient *elasticsearch.Client - ConsecutiveElasticErrorCount atomic.Int32 - TotalElasticErrorCount atomic.Int32 - txIndex string - tickDataIndex string - clIndex string -} - -func NewArchiveRepository(txIndex, tickDataIndex, clIndex string, esClient *elasticsearch.Client) *ArchiveRepository { - return &ArchiveRepository{ - txIndex: txIndex, - tickDataIndex: tickDataIndex, - esClient: esClient, - clIndex: clIndex, - } -} - -func (r *ArchiveRepository) performElasticSearch(ctx context.Context, index string, query *bytes.Buffer, result any) error { - res, err := r.esClient.Search( - r.esClient.Search.WithContext(ctx), - r.esClient.Search.WithIndex(index), - r.esClient.Search.WithBody(query), - ) - if err != nil { - return fmt.Errorf("performing search: %w", err) - } - defer res.Body.Close() - if res.IsError() { - return fmt.Errorf("got error response from data store: %s", res.String()) - } - - if err = json.NewDecoder(res.Body).Decode(&result); err != nil { - return fmt.Errorf("decoding response: %w", err) - } - - return nil -} diff --git a/v2/domain/repository/elastic/events.go b/v2/domain/repository/elastic/events.go index 08f62a3..a2957b4 100644 --- a/v2/domain/repository/elastic/events.go +++ b/v2/domain/repository/elastic/events.go @@ -1,9 +1,7 @@ package elastic import ( - "bytes" "context" - "encoding/json" "fmt" "strings" @@ -70,26 +68,16 @@ type eventsSearchResponse struct { } `json:"hits"` } -func (r *EventsRepository) GetEvents(ctx context.Context, filters map[string][]string, from, size uint32) ([]*api.Event, *entities.Hits, error) { - query := createEventsQuery(filters, from, size) - - res, err := r.esClient.Search( - r.esClient.Search.WithContext(ctx), - r.esClient.Search.WithIndex(r.eventIndex), - r.esClient.Search.WithBody(strings.NewReader(query)), - ) +func (r *EventsRepository) GetEvents(ctx context.Context, filters entities.Filters, from, size uint32) ([]*api.Event, *entities.Hits, error) { + query, err := createEventsQuery(filters, from, size) if err != nil { - return nil, nil, fmt.Errorf("performing search: %w", err) - } - defer res.Body.Close() - - if res.IsError() { - return nil, nil, fmt.Errorf("error response from data store: %s", res.String()) + return nil, nil, fmt.Errorf("creating events query: %w", err) } var result eventsSearchResponse - if err = json.NewDecoder(res.Body).Decode(&result); err != nil { - return nil, nil, fmt.Errorf("decoding response: %w", err) + err = performElasticSearch(ctx, r.esClient, r.eventIndex, strings.NewReader(query), &result) + if err != nil { + return nil, nil, fmt.Errorf("performing elastic search: %w", err) } hits := &entities.Hits{ @@ -100,37 +88,55 @@ func (r *EventsRepository) GetEvents(ctx context.Context, filters map[string][]s return eventHitsToAPIEvents(result.Hits.Hits), hits, nil } -func createEventsQuery(filters map[string][]string, from, size uint32) string { - filterStrings := make([]string, 0, len(filters)) - - keys := getSortedKeys(filters) - for _, k := range keys { - esField := k - if k == "logType" { - esField = "type" - } - if len(filters[k]) == 1 { - filterStrings = append(filterStrings, fmt.Sprintf(`{"term":{"%s":"%s"}}`, esField, filters[k][0])) - } +func createEventsQuery(filters entities.Filters, from, size uint32) (string, error) { + filterStrings := make([]string, 0, len(filters.Include)) + + // append include filters to filter section + filterStrings = append(filterStrings, getFilterStrings(filters.Include)...) + + // append range filters to filter section + rangeFilterStrings, err := getRangeFilterStrings(filters.Ranges) + if err != nil { + return "", err } + filterStrings = append(filterStrings, rangeFilterStrings...) - filterClause := "" - if len(filterStrings) > 0 { - filterClause = strings.Join(filterStrings, ",") + // append should filters to filter section + shouldFilterStrings, err := getShouldFilterStrings(filters.Should) + if err != nil { + return "", fmt.Errorf("creating should filters: %w", err) } + filterStrings = append(filterStrings, shouldFilterStrings...) - var buf bytes.Buffer - buf.WriteString(fmt.Sprintf(`{ + // exclude filters + excludeFilterStrings := getFilterStrings(filters.Exclude) + + // empty bool query clause + boolClause := make([]string, 0, 2) + + // append include filters if not empty + filterClause := strings.Join(filterStrings, ",") + if len(filterClause) > 0 { + filterClause = fmt.Sprintf(`"filter": [%s]`, filterClause) + boolClause = append(boolClause, filterClause) + } + + // append exclude filters if not empty + mustNotClause := strings.Join(excludeFilterStrings, ",") + if len(mustNotClause) > 0 { + mustNotClause = fmt.Sprintf(`"must_not": [%s]`, mustNotClause) + boolClause = append(boolClause, mustNotClause) + } + + query := fmt.Sprintf(`{ "query": { - "bool": { - "filter": [%s] - } + "bool": {%s} }, "sort": [{"tickNumber":{"order":"desc"}},{"logId":{"order":"asc"}}], "from": %d, "size": %d, "track_total_hits": %d - }`, filterClause, from, size, maxTrackTotalHits)) - - return buf.String() + }`, strings.Join(boolClause, ","), from, size, maxTrackTotalHits) + // log.Printf("[DEBUG] %s", query) + return query, nil } diff --git a/v2/domain/repository/elastic/events_integration_test.go b/v2/domain/repository/elastic/events_integration_test.go index 5da0c0f..ece234b 100644 --- a/v2/domain/repository/elastic/events_integration_test.go +++ b/v2/domain/repository/elastic/events_integration_test.go @@ -10,6 +10,7 @@ import ( "github.com/elastic/go-elasticsearch/v8" "github.com/google/go-cmp/cmp" + "github.com/qubic/archive-query-service/v2/entities" "github.com/qubic/archive-query-service/v2/test" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -29,7 +30,7 @@ var testEvent1 = event{ LogDigest: "digest1", Type: 0, Source: "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFXIB", - Destination: "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB", + Destination: "BAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAARMID", Amount: 1000, } @@ -68,6 +69,9 @@ var testEvent4 = event{ LogID: 4, LogDigest: "digest4", Type: 3, + Source: "BAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAARMID", + Destination: "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFXIB", + NumberOfShares: 666, } var testEvent5 = event{ @@ -202,14 +206,16 @@ func (s *eventsSuite) indexEvent(esClient *elasticsearch.Client, ev event, docID } func (s *eventsSuite) Test_GetEvents_NoFilters() { - events, hits, err := s.repo.GetEvents(s.ctx, nil, 0, 10) + events, hits, err := s.repo.GetEvents(s.ctx, entities.Filters{}, 0, 10) require.NoError(s.T(), err, "getting events without filters") assert.Len(s.T(), events, 6) assert.Equal(s.T(), 6, hits.Total) } func (s *eventsSuite) Test_GetEvents_FilterByTransactionHash() { - filters := map[string][]string{"transactionHash": {"txhash1"}} + filters := entities.Filters{ + Include: map[string][]string{"transactionHash": {"txhash1"}}, + } events, hits, err := s.repo.GetEvents(s.ctx, filters, 0, 10) require.NoError(s.T(), err, "getting events by transaction hash") assert.Len(s.T(), events, 2) @@ -221,7 +227,9 @@ func (s *eventsSuite) Test_GetEvents_FilterByTransactionHash() { } func (s *eventsSuite) Test_GetEvents_FilterByTickNumber() { - filters := map[string][]string{"tickNumber": {"15001"}} + filters := entities.Filters{ + Include: map[string][]string{"tickNumber": {"15001"}}, + } events, hits, err := s.repo.GetEvents(s.ctx, filters, 0, 10) require.NoError(s.T(), err, "getting events by tick number") require.Len(s.T(), events, 1) @@ -233,7 +241,9 @@ func (s *eventsSuite) Test_GetEvents_FilterByTickNumber() { } func (s *eventsSuite) Test_GetEvents_FilterByEventType() { - filters := map[string][]string{"logType": {"8"}} + filters := entities.Filters{ + Include: map[string][]string{"logType": {"8"}}, + } events, hits, err := s.repo.GetEvents(s.ctx, filters, 0, 10) require.NoError(s.T(), err, "getting events by event type") require.Len(s.T(), events, 1) @@ -242,9 +252,11 @@ func (s *eventsSuite) Test_GetEvents_FilterByEventType() { } func (s *eventsSuite) Test_GetEvents_CombinedFilters() { - filters := map[string][]string{ - "transactionHash": {"txhash1"}, - "logType": {"0"}, + filters := entities.Filters{ + Include: map[string][]string{ + "transactionHash": {"txhash1"}, + "logType": {"0"}, + }, } events, hits, err := s.repo.GetEvents(s.ctx, filters, 0, 10) require.NoError(s.T(), err, "getting events with combined filters") @@ -256,13 +268,13 @@ func (s *eventsSuite) Test_GetEvents_CombinedFilters() { func (s *eventsSuite) Test_GetEvents_Pagination() { // Get first page of 2 - events1, hits1, err := s.repo.GetEvents(s.ctx, nil, 0, 2) + events1, hits1, err := s.repo.GetEvents(s.ctx, entities.Filters{}, 0, 2) require.NoError(s.T(), err, "getting first page") assert.Len(s.T(), events1, 2) assert.Equal(s.T(), 6, hits1.Total) // Get second page of 2 - events2, hits2, err := s.repo.GetEvents(s.ctx, nil, 2, 2) + events2, hits2, err := s.repo.GetEvents(s.ctx, entities.Filters{}, 2, 2) require.NoError(s.T(), err, "getting second page") assert.Len(s.T(), events2, 2) assert.Equal(s.T(), 6, hits2.Total) @@ -272,9 +284,44 @@ func (s *eventsSuite) Test_GetEvents_Pagination() { } func (s *eventsSuite) Test_GetEvents_NoResults() { - filters := map[string][]string{"transactionHash": {"nonexistent"}} + filters := entities.Filters{ + Include: map[string][]string{"transactionHash": {"nonexistent"}}, + } events, hits, err := s.repo.GetEvents(s.ctx, filters, 0, 10) require.NoError(s.T(), err, "getting events with no results") assert.Len(s.T(), events, 0) assert.Equal(s.T(), 0, hits.Total) } + +func (s *eventsSuite) Test_GetEvents_WithRangeFilter() { + ranges := map[string][]entities.Range{ + "amount": { + {Operation: "gte", Value: "900"}, + {Operation: "lte", Value: "1100"}, + }, + } + events, hits, err := s.repo.GetEvents(s.ctx, entities.Filters{Ranges: ranges}, 0, 10) + require.NoError(s.T(), err, "getting events with range filter") + require.Len(s.T(), events, 1) + require.Equal(s.T(), 1, hits.Total) + assert.Equal(s.T(), 1000, int(events[0].GetQuTransfer().GetAmount())) +} + +func (s *eventsSuite) Test_GetEvents_WithShouldFilter() { + should := []entities.ShouldFilter{ + {Terms: map[string][]string{ + "source": {"BAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAARMID"}, + "destination": {"BAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAARMID"}, + }}, + {Ranges: map[string][]entities.Range{ + "numberOfShares": {{Operation: "gte", Value: "1"}, {Operation: "lte", Value: "100000"}}, + "amount": {{Operation: "gt", Value: "1"}, {Operation: "lt", Value: "160000"}}, + }}, + } + events, hits, err := s.repo.GetEvents(s.ctx, entities.Filters{Should: should}, 0, 10) + require.NoError(s.T(), err, "getting events with should filter") + require.Len(s.T(), events, 2) + require.Equal(s.T(), 2, hits.Total) + assert.Equal(s.T(), 1, int(events[1].GetLogId())) + assert.Equal(s.T(), 4, int(events[0].GetLogId())) +} diff --git a/v2/domain/repository/elastic/events_test.go b/v2/domain/repository/elastic/events_test.go index 99f3c8b..c124179 100644 --- a/v2/domain/repository/elastic/events_test.go +++ b/v2/domain/repository/elastic/events_test.go @@ -4,22 +4,23 @@ import ( "encoding/json" "testing" + "github.com/qubic/archive-query-service/v2/entities" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func Test_createEventsQuery_noFilters(t *testing.T) { - query := createEventsQuery(nil, 0, 10) + query, err := createEventsQuery(entities.Filters{}, 0, 10) + require.NoError(t, err) var parsed map[string]any - err := json.Unmarshal([]byte(query), &parsed) + err = json.Unmarshal([]byte(query), &parsed) require.NoError(t, err, "query should be valid JSON") // Verify query structure q := parsed["query"].(map[string]any) boolQuery := q["bool"].(map[string]any) - filters := boolQuery["filter"].([]any) - assert.Empty(t, filters, "no filters should be present") + assert.Empty(t, boolQuery, "no filters should be present") assert.Equal(t, float64(0), parsed["from"]) assert.Equal(t, float64(10), parsed["size"]) @@ -44,10 +45,14 @@ func Test_createEventsQuery_withTransactionHash(t *testing.T) { filters := map[string][]string{ "transactionHash": {"abc123"}, } - query := createEventsQuery(filters, 0, 10) + f := entities.Filters{ + Include: filters, + } + query, err := createEventsQuery(f, 0, 10) + require.NoError(t, err) var parsed map[string]any - err := json.Unmarshal([]byte(query), &parsed) + err = json.Unmarshal([]byte(query), &parsed) require.NoError(t, err) q := parsed["query"].(map[string]any) @@ -63,10 +68,14 @@ func Test_createEventsQuery_withTickNumber(t *testing.T) { filters := map[string][]string{ "tickNumber": {"42"}, } - query := createEventsQuery(filters, 0, 10) + f := entities.Filters{ + Include: filters, + } + query, err := createEventsQuery(f, 0, 10) + require.NoError(t, err) var parsed map[string]any - err := json.Unmarshal([]byte(query), &parsed) + err = json.Unmarshal([]byte(query), &parsed) require.NoError(t, err) q := parsed["query"].(map[string]any) @@ -82,10 +91,14 @@ func Test_createEventsQuery_withEventType(t *testing.T) { filters := map[string][]string{ "logType": {"1"}, } - query := createEventsQuery(filters, 0, 10) + f := entities.Filters{ + Include: filters, + } + query, err := createEventsQuery(f, 0, 10) + require.NoError(t, err) var parsed map[string]any - err := json.Unmarshal([]byte(query), &parsed) + err = json.Unmarshal([]byte(query), &parsed) require.NoError(t, err) q := parsed["query"].(map[string]any) @@ -104,10 +117,14 @@ func Test_createEventsQuery_withMultipleFilters(t *testing.T) { "tickNumber": {"42"}, "logType": {"2"}, } - query := createEventsQuery(filters, 0, 10) + f := entities.Filters{ + Include: filters, + } + query, err := createEventsQuery(f, 0, 10) + require.NoError(t, err) var parsed map[string]any - err := json.Unmarshal([]byte(query), &parsed) + err = json.Unmarshal([]byte(query), &parsed) require.NoError(t, err) q := parsed["query"].(map[string]any) @@ -117,12 +134,153 @@ func Test_createEventsQuery_withMultipleFilters(t *testing.T) { } func Test_createEventsQuery_withPagination(t *testing.T) { - query := createEventsQuery(nil, 20, 50) + query, err := createEventsQuery(entities.Filters{}, 20, 50) + require.NoError(t, err) var parsed map[string]any - err := json.Unmarshal([]byte(query), &parsed) + err = json.Unmarshal([]byte(query), &parsed) require.NoError(t, err) assert.Equal(t, float64(20), parsed["from"]) assert.Equal(t, float64(50), parsed["size"]) } + +func Test_createEventsQuery_withExcludeFilter(t *testing.T) { + f := entities.Filters{ + Include: map[string][]string{ + "source": {"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"}, + }, + Exclude: map[string][]string{ + "destination": {"BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB"}, + }, + } + query, err := createEventsQuery(f, 0, 10) + require.NoError(t, err) + + var parsed map[string]any + err = json.Unmarshal([]byte(query), &parsed) + require.NoError(t, err) + + q := parsed["query"].(map[string]any) + boolQuery := q["bool"].(map[string]any) + + // Verify include filter + filterArr := boolQuery["filter"].([]any) + require.Len(t, filterArr, 1) + termFilter := filterArr[0].(map[string]any)["term"].(map[string]any) + assert.Equal(t, "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", termFilter["source"]) + + // Verify exclude filter + mustNotArr := boolQuery["must_not"].([]any) + require.Len(t, mustNotArr, 1) + mustNotTerm := mustNotArr[0].(map[string]any)["term"].(map[string]any) + assert.Equal(t, "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB", mustNotTerm["destination"]) +} + +func Test_createEventsQuery_withOnlyExcludeFilter(t *testing.T) { + filters := map[string][]string{ + "destination": {"CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC"}, + } + f := entities.Filters{ + Exclude: filters, + } + query, err := createEventsQuery(f, 0, 10) + require.NoError(t, err) + + var parsed map[string]any + err = json.Unmarshal([]byte(query), &parsed) + require.NoError(t, err) + + q := parsed["query"].(map[string]any) + boolQuery := q["bool"].(map[string]any) + + // Verify no include filters + _, hasFilter := boolQuery["filter"] + assert.False(t, hasFilter, "should not have filter clause") + + // Verify exclude filter + mustNotArr := boolQuery["must_not"].([]any) + require.Len(t, mustNotArr, 1) + mustNotTerm := mustNotArr[0].(map[string]any)["term"].(map[string]any) + assert.Equal(t, "CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC", mustNotTerm["destination"]) +} + +func Test_createEventsQuery_withRangeFilter(t *testing.T) { + ranges := map[string][]entities.Range{ + "amount": { + {Operation: "gte", Value: "100"}, + {Operation: "lte", Value: "1000"}, + }, + "tickNumber": { + {Operation: "gt", Value: "123"}, + }, + } + query, err := createEventsQuery(entities.Filters{Ranges: ranges}, 0, 10) + require.NoError(t, err) + + var parsed map[string]any + err = json.Unmarshal([]byte(query), &parsed) + require.NoError(t, err) + + q := parsed["query"].(map[string]any) + boolQuery := q["bool"].(map[string]any) + filterArr := boolQuery["filter"].([]any) + require.Len(t, filterArr, 2) + + rangeFilter := filterArr[0].(map[string]any)["range"].(map[string]any) + amountRange := rangeFilter["amount"].(map[string]any) + assert.Equal(t, "100", amountRange["gte"]) + assert.Equal(t, "1000", amountRange["lte"]) + + rangeFilter = filterArr[1].(map[string]any)["range"].(map[string]any) + tickNumberRange := rangeFilter["tickNumber"].(map[string]any) + assert.Equal(t, "123", tickNumberRange["gt"]) +} + +func Test_createEventsQuery_withTwoShouldFilters(t *testing.T) { + shouldFilters := []entities.ShouldFilter{ + { + Terms: map[string][]string{ + "source": {"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"}, + "destination": {"BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB"}, + }, + }, + { + Terms: map[string][]string{ + "destination": {"BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB"}, + }, + }, + } + f := entities.Filters{ + Should: shouldFilters, + } + query, err := createEventsQuery(f, 0, 10) + require.NoError(t, err) + + var parsed map[string]any + err = json.Unmarshal([]byte(query), &parsed) + require.NoError(t, err) + + q := parsed["query"].(map[string]any) + boolQuery := q["bool"].(map[string]any) + filterArr := boolQuery["filter"].([]any) + require.Len(t, filterArr, 2) + + // Verify first should filter + firstShouldBool := filterArr[0].(map[string]any)["bool"].(map[string]any) + firstShouldArr := firstShouldBool["should"].([]any) + require.Len(t, firstShouldArr, 2) + assert.Equal(t, float64(1), firstShouldBool["minimum_should_match"]) + destinationTerm := firstShouldArr[0].(map[string]any)["term"].(map[string]any) // ordered alphabetically + assert.Equal(t, "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB", destinationTerm["destination"]) + sourceTerm := firstShouldArr[1].(map[string]any)["term"].(map[string]any) + assert.Equal(t, "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", sourceTerm["source"]) + + // Verify second should filter + secondShouldBool := filterArr[1].(map[string]any)["bool"].(map[string]any) + secondShouldArr := secondShouldBool["should"].([]any) + require.Len(t, secondShouldArr, 1) + assert.Equal(t, float64(1), secondShouldBool["minimum_should_match"]) + secondTermFilter := secondShouldArr[0].(map[string]any)["term"].(map[string]any) + assert.Equal(t, "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB", secondTermFilter["destination"]) +} diff --git a/v2/domain/repository/elastic/execute_query.go b/v2/domain/repository/elastic/execute_query.go new file mode 100644 index 0000000..d827f12 --- /dev/null +++ b/v2/domain/repository/elastic/execute_query.go @@ -0,0 +1,33 @@ +package elastic + +import ( + "context" + "encoding/json" + "fmt" + "io" + "log" + + "github.com/elastic/go-elasticsearch/v8" +) + +func performElasticSearch(ctx context.Context, esClient *elasticsearch.Client, index string, query io.Reader, result any) error { + res, err := esClient.Search( + esClient.Search.WithContext(ctx), + esClient.Search.WithIndex(index), + esClient.Search.WithBody(query), + ) + if err != nil { + log.Printf("[DEBUG] calling es client search with query: %s", query) + return fmt.Errorf("performing search: %w", err) + } + defer res.Body.Close() + if res.IsError() { + return fmt.Errorf("error response from data store: %s", res.String()) + } + + if err = json.NewDecoder(res.Body).Decode(&result); err != nil { + return fmt.Errorf("decoding response: %w", err) + } + + return nil +} diff --git a/v2/domain/repository/elastic/filter_helpers.go b/v2/domain/repository/elastic/filter_helpers.go new file mode 100644 index 0000000..bd0687c --- /dev/null +++ b/v2/domain/repository/elastic/filter_helpers.go @@ -0,0 +1,92 @@ +package elastic + +import ( + "fmt" + "log" + "sort" + "strings" + + "github.com/qubic/archive-query-service/v2/entities" +) + +func getFilterStrings(filters map[string][]string) []string { + keys := getSortedKeys(filters) // sort for a deterministic filter order + + filterStrings := make([]string, 0, len(filters)) + for _, k := range keys { + esField := k + if k == "logType" { + esField = "type" + } + if len(filters[k]) > 1 { + filterStrings = append(filterStrings, fmt.Sprintf(`{"terms":{"%s":["%s"]}}`, esField, strings.Join(filters[k], `","`))) + } else if len(filters[k]) == 1 { + filterStrings = append(filterStrings, fmt.Sprintf(`{"term":{"%s":"%s"}}`, esField, filters[k][0])) + } + } + return filterStrings +} + +func getRangeFilterStrings(ranges map[string][]entities.Range) ([]string, error) { + filterStrings := make([]string, 0, len(ranges)) + keys := getSortedKeys(ranges) // sort for a deterministic filter order + for _, k := range keys { + esField := k + if k == "logType" { + esField = "type" + } + rangeString, err := createRangeFilter(esField, ranges[k]) + if err != nil { + log.Printf("error computing range filter [%s]: %v", k, ranges[k]) + return nil, fmt.Errorf("creating range filter: %w", err) + } + filterStrings = append(filterStrings, rangeString) + } + return filterStrings, nil +} + +func getShouldFilterStrings(shouldFilters []entities.ShouldFilter) ([]string, error) { + filterStrings := make([]string, 0, len(shouldFilters)*2) + + for _, should := range shouldFilters { + + termFilters := getFilterStrings(should.Terms) + rangeFilters, err := getRangeFilterStrings(should.Ranges) + if err != nil { + return nil, fmt.Errorf("getting range filters: %w", err) + } + + if len(termFilters) > 0 || len(rangeFilters) > 0 { + /* + { "bool": { "should": [ ... terms and ranges ... ], "minimum_should_match": 1 } }, + */ + filterStrings = append(filterStrings, + fmt.Sprintf(`{"bool":{"should":[%s], "minimum_should_match": 1}}`, + strings.Join(append(termFilters, rangeFilters...), ","))) + } + + } + + return filterStrings, nil +} + +func createRangeFilter(property string, r []entities.Range) (string, error) { + var rangeStrings []string + for _, v := range r { + rangeStrings = append(rangeStrings, fmt.Sprintf(`"%s":"%s"`, v.Operation, v.Value)) + } + if len(rangeStrings) > 0 { + return fmt.Sprintf(`{"range":{"%s":{%s}}}`, property, strings.Join(rangeStrings, ",")), nil + } + + return "", fmt.Errorf("computing range for [%s]", property) +} + +func getSortedKeys[T any](m map[string]T) []string { + keys := make([]string, 0, len(m)) + for k := range m { + keys = append(keys, k) + } + sort.Strings(keys) + return keys +} diff --git a/v2/domain/repository/elastic/main_archive.go b/v2/domain/repository/elastic/main_archive.go new file mode 100644 index 0000000..9242242 --- /dev/null +++ b/v2/domain/repository/elastic/main_archive.go @@ -0,0 +1,25 @@ +package elastic + +import ( + "sync/atomic" + + "github.com/elastic/go-elasticsearch/v8" +) + +type ArchiveRepository struct { + esClient *elasticsearch.Client + ConsecutiveElasticErrorCount atomic.Int32 + TotalElasticErrorCount atomic.Int32 + txIndex string + tickDataIndex string + clIndex string +} + +func NewArchiveRepository(txIndex, tickDataIndex, clIndex string, esClient *elasticsearch.Client) *ArchiveRepository { + return &ArchiveRepository{ + txIndex: txIndex, + tickDataIndex: tickDataIndex, + esClient: esClient, + clIndex: clIndex, + } +} diff --git a/v2/domain/repository/elastic/transaction.go b/v2/domain/repository/elastic/transaction.go index 9c9556f..72392a6 100644 --- a/v2/domain/repository/elastic/transaction.go +++ b/v2/domain/repository/elastic/transaction.go @@ -5,8 +5,6 @@ import ( "context" "encoding/json" "fmt" - "log" - "sort" "strconv" "strings" @@ -78,21 +76,21 @@ func (r *ArchiveRepository) GetTransactionByHash(_ context.Context, hash string) return transactionToAPITransaction(result.Source), nil } -func (r *ArchiveRepository) GetTransactionsForTickNumber(ctx context.Context, tickNumber uint32, filters map[string][]string, ranges map[string][]*entities.Range) ([]*api.Transaction, error) { +func (r *ArchiveRepository) GetTransactionsForTickNumber(ctx context.Context, tickNumber uint32, filters map[string][]string, ranges map[string][]entities.Range) ([]*api.Transaction, error) { query, err := createTickTransactionsQuery(tickNumber, filters, ranges) if err != nil { - return nil, fmt.Errorf("creating query: %w", err) + return nil, fmt.Errorf("creating transactions for tick query: %w", err) } var result transactionsSearchResponse - err = r.performElasticSearch(ctx, r.txIndex, &query, &result) + err = performElasticSearch(ctx, r.esClient, r.txIndex, &query, &result) if err != nil { return nil, fmt.Errorf("performing elastic search: %w", err) } return transactionHitsToAPITransactions(result.Hits.Hits), nil } -func createTickTransactionsQuery(tick uint32, filters map[string][]string, ranges map[string][]*entities.Range) (bytes.Buffer, error) { +func createTickTransactionsQuery(tick uint32, filters map[string][]string, ranges map[string][]entities.Range) (bytes.Buffer, error) { // Always include tick number as the first filter filterStrings := []string{fmt.Sprintf(`{"term":{"tickNumber":%d}}`, tick)} @@ -125,32 +123,18 @@ func createTickTransactionsQuery(tick uint32, filters map[string][]string, range return buf, nil } -func (r *ArchiveRepository) GetTransactionsForIdentity(ctx context.Context, identity string, maxTick uint32, filters map[string][]string, ranges map[string][]*entities.Range, +func (r *ArchiveRepository) GetTransactionsForIdentity(ctx context.Context, identity string, maxTick uint32, filters entities.Filters, from, size uint32) ([]*api.Transaction, *entities.Hits, error) { - query, err := createIdentitiesQuery(identity, filters, ranges, from, size, maxTick) + query, err := createIdentitiesQuery(identity, filters, from, size, maxTick) if err != nil { - return nil, nil, fmt.Errorf("creating query: %w", err) - } - - res, err := r.esClient.Search( - r.esClient.Search.WithContext(ctx), - r.esClient.Search.WithIndex(r.txIndex), - r.esClient.Search.WithBody(strings.NewReader(query)), - ) - if err != nil { - log.Printf("calling es client search with query: %v", query) - return nil, nil, fmt.Errorf("performing search: %w", err) - } - defer res.Body.Close() - - if res.IsError() { - return nil, nil, fmt.Errorf("error response from data store: %s", res.String()) + return nil, nil, fmt.Errorf("creating transactions for identity query: %w", err) } var result transactionsSearchResponse - if err = json.NewDecoder(res.Body).Decode(&result); err != nil { - return nil, nil, fmt.Errorf("decoding response: %w", err) + err = performElasticSearch(ctx, r.esClient, r.txIndex, strings.NewReader(query), &result) + if err != nil { + return nil, nil, fmt.Errorf("performing elastic search: %w", err) } hits := &entities.Hits{ @@ -161,34 +145,32 @@ func (r *ArchiveRepository) GetTransactionsForIdentity(ctx context.Context, iden return transactionHitsToAPITransactions(result.Hits.Hits), hits, nil } -func createIdentitiesQuery(identity string, filters map[string][]string, ranges map[string][]*entities.Range, from, size, maxTick uint32) (string, error) { +func createIdentitiesQuery(identity string, filters entities.Filters, from, size, maxTick uint32) (string, error) { var query string - includeFilters, excludeFilters := splitFilters(filters) - // Check if there's an upper bound tickNumber range filter (lt/lte) and adjust if needed - hasUpperBoundTickFilter, err := modifyUpperBoundTickNumberFilterIfNecessary(ranges, maxTick) + hasUpperBoundTickFilter, err := modifyUpperBoundTickNumberFilterIfNecessary(filters.Ranges, maxTick) if err != nil { return "", err } - filterStrings := make([]string, 0, len(includeFilters)+len(ranges)+1) + filterStrings := make([]string, 0, len(filters.Include)+len(filters.Ranges)+1) // restrict to max tick only if no upper bound tickNumber filter is present if !hasUpperBoundTickFilter { filterStrings = append(filterStrings, fmt.Sprintf(`{"range":{"tickNumber":{"lte":"%d"}}}`, maxTick)) } // normal filters - filterStrings = append(filterStrings, getFilterStrings(includeFilters)...) + filterStrings = append(filterStrings, getFilterStrings(filters.Include)...) // append range filters - rangeFilterStrings, err := getRangeFilterStrings(ranges) + rangeFilterStrings, err := getRangeFilterStrings(filters.Ranges) if err != nil { return "", err } filterStrings = append(filterStrings, rangeFilterStrings...) // filters for excluding results - excludeFilterStrings := getFilterStrings(excludeFilters) + excludeFilterStrings := getFilterStrings(filters.Exclude) // filters are always present because we need to restrict to max tick filterQueryString := strings.Join(filterStrings, ",") @@ -223,10 +205,10 @@ func createIdentitiesQuery(identity string, filters map[string][]string, ranges return query, nil } -func modifyUpperBoundTickNumberFilterIfNecessary(ranges map[string][]*entities.Range, maxTick uint32) (bool, error) { +func modifyUpperBoundTickNumberFilterIfNecessary(ranges map[string][]entities.Range, maxTick uint32) (bool, error) { hasUpperBoundTickFilter := false if tickRanges, ok := ranges["tickNumber"]; ok { - for _, r := range tickRanges { + for k, r := range tickRanges { if r.Operation == "lt" || r.Operation == "lte" { hasUpperBoundTickFilter = true // Parse the value and compare with maxTick @@ -235,8 +217,11 @@ func modifyUpperBoundTickNumberFilterIfNecessary(ranges map[string][]*entities.R return false, fmt.Errorf("parsing tickNumber range value: %w", err) } maxTickValue := If(r.Operation == "lte", maxTick, maxTick+1) - if uint32(tickValue) > maxTickValue { - r.Value = fmt.Sprintf("%d", maxTickValue) + if uint32(tickValue) > maxTickValue { // replace + tickRanges[k] = entities.Range{ + Operation: r.Operation, + Value: fmt.Sprintf("%d", maxTickValue), + } } } } @@ -244,73 +229,9 @@ func modifyUpperBoundTickNumberFilterIfNecessary(ranges map[string][]*entities.R return hasUpperBoundTickFilter, nil } -func If[T any](cond bool, vtrue, vfalse T) T { +func If[T any](cond bool, vTrue, vFalse T) T { if cond { - return vtrue - } - return vfalse -} - -const excludeSuffix = "-exclude" - -func splitFilters(filters map[string][]string) (map[string][]string, map[string][]string) { - includeFilters := make(map[string][]string) - excludeFilters := make(map[string][]string) - for k, v := range filters { - if strings.HasSuffix(k, excludeSuffix) { - excludeFilters[strings.TrimSuffix(k, excludeSuffix)] = v - } else { - includeFilters[k] = v - } - } - return includeFilters, excludeFilters -} - -func getFilterStrings(filters map[string][]string) []string { - keys := getSortedKeys(filters) // sort for a deterministic filter order - - filterStrings := make([]string, 0, len(filters)) - for _, k := range keys { - if len(filters[k]) > 1 { - filterStrings = append(filterStrings, fmt.Sprintf(`{"terms":{"%s":["%s"]}}`, k, strings.Join(filters[k], `","`))) - } else if len(filters[k]) == 1 { - filterStrings = append(filterStrings, fmt.Sprintf(`{"term":{"%s":"%s"}}`, k, filters[k][0])) - } - } - return filterStrings -} - -func getRangeFilterStrings(ranges map[string][]*entities.Range) ([]string, error) { - keys := getSortedKeys(ranges) // sort for a deterministic filter order - filterStrings := make([]string, 0, len(ranges)) - for _, k := range keys { - rangeString, err := createRangeFilter(k, ranges[k]) - if err != nil { - log.Printf("error computing range filter [%s]: %v", k, ranges[k]) - return nil, fmt.Errorf("creating range filter: %w", err) - } - filterStrings = append(filterStrings, rangeString) - } - return filterStrings, nil -} - -func createRangeFilter(property string, r []*entities.Range) (string, error) { - var rangeStrings []string - for _, v := range r { - rangeStrings = append(rangeStrings, fmt.Sprintf(`"%s":"%s"`, v.Operation, v.Value)) - } - if len(rangeStrings) > 0 { - return fmt.Sprintf(`{"range":{"%s":{%s}}}`, property, strings.Join(rangeStrings, ",")), nil - } - - return "", fmt.Errorf("computing range for [%s]", property) -} - -func getSortedKeys[T any](m map[string]T) []string { - keys := make([]string, 0, len(m)) - for k := range m { - keys = append(keys, k) + return vTrue } - sort.Strings(keys) - return keys + return vFalse } diff --git a/v2/domain/repository/elastic/transaction_filters_test.go b/v2/domain/repository/elastic/transaction_filters_test.go index ea932a7..16e328a 100644 --- a/v2/domain/repository/elastic/transaction_filters_test.go +++ b/v2/domain/repository/elastic/transaction_filters_test.go @@ -7,71 +7,6 @@ import ( "github.com/stretchr/testify/require" ) -func Test_splitFilters(t *testing.T) { - tests := []struct { - name string - filters map[string][]string - wantInclude map[string][]string - wantExclude map[string][]string - }{ - { - name: "empty filters", - filters: map[string][]string{}, - wantInclude: map[string][]string{}, - wantExclude: map[string][]string{}, - }, - { - name: "only include filters", - filters: map[string][]string{ - "source": {"identity1"}, - "destination": {"identity2", "identity3"}, - }, - wantInclude: map[string][]string{ - "source": {"identity1"}, - "destination": {"identity2", "identity3"}, - }, - wantExclude: map[string][]string{}, - }, - { - name: "only exclude filters", - filters: map[string][]string{ - "source-exclude": {"identity1"}, - "destination-exclude": {"identity2", "identity3"}, - }, - wantInclude: map[string][]string{}, - wantExclude: map[string][]string{ - "source": {"identity1"}, - "destination": {"identity2", "identity3"}, - }, - }, - { - name: "mixed filters", - filters: map[string][]string{ - "source": {"identity1"}, - "source-exclude": {"identity2"}, - "amount": {"100"}, - "inputType-exclude": {"1"}, - }, - wantInclude: map[string][]string{ - "source": {"identity1"}, - "amount": {"100"}, - }, - wantExclude: map[string][]string{ - "source": {"identity2"}, - "inputType": {"1"}, - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - gotInclude, gotExclude := splitFilters(tt.filters) - require.Equal(t, tt.wantInclude, gotInclude) - require.Equal(t, tt.wantExclude, gotExclude) - }) - } -} - func Test_getFilterStrings(t *testing.T) { tests := []struct { name string @@ -132,18 +67,18 @@ func Test_getFilterStrings(t *testing.T) { func Test_getRangeFilterStrings(t *testing.T) { tests := []struct { name string - ranges map[string][]*entities.Range + ranges map[string][]entities.Range want []string wantErr bool }{ { name: "empty ranges", - ranges: map[string][]*entities.Range{}, + ranges: map[string][]entities.Range{}, want: []string{}, }, { name: "single range with single operation", - ranges: map[string][]*entities.Range{ + ranges: map[string][]entities.Range{ "amount": { {Operation: "gte", Value: "1000"}, }, @@ -154,7 +89,7 @@ func Test_getRangeFilterStrings(t *testing.T) { }, { name: "single range with multiple operations", - ranges: map[string][]*entities.Range{ + ranges: map[string][]entities.Range{ "tickNumber": { {Operation: "gte", Value: "100"}, {Operation: "lte", Value: "200"}, @@ -166,7 +101,7 @@ func Test_getRangeFilterStrings(t *testing.T) { }, { name: "multiple ranges", - ranges: map[string][]*entities.Range{ + ranges: map[string][]entities.Range{ "amount": { {Operation: "gt", Value: "0"}, }, @@ -181,7 +116,7 @@ func Test_getRangeFilterStrings(t *testing.T) { }, { name: "empty range slice returns error", - ranges: map[string][]*entities.Range{ + ranges: map[string][]entities.Range{ "amount": {}, }, wantErr: true, diff --git a/v2/domain/repository/elastic/transaction_identities_query_test.go b/v2/domain/repository/elastic/transaction_identities_query_test.go index 60f36b2..90ec072 100644 --- a/v2/domain/repository/elastic/transaction_identities_query_test.go +++ b/v2/domain/repository/elastic/transaction_identities_query_test.go @@ -28,7 +28,7 @@ func Test_createIdentitiesQuery_returnQuery(t *testing.T) { "track_total_hits": 10000 }` - query, err := createIdentitiesQuery(testIdentity, nil, nil, 0, 10, 12345) + query, err := createIdentitiesQuery(testIdentity, entities.Filters{}, 0, 10, 12345) require.NoError(t, err) require.NotEmpty(t, query) @@ -58,7 +58,10 @@ func Test_createIdentitiesQuery_givenFilters_returnQueryWithFilters(t *testing.T }` filters := map[string][]string{"some-value": {"42"}, "another-value": {"foo"}} - query, err := createIdentitiesQuery(testIdentity, filters, nil, 0, 5, 1000000) + f := entities.Filters{ + Include: filters, + } + query, err := createIdentitiesQuery(testIdentity, f, 0, 5, 1000000) require.NoError(t, err) require.NotEmpty(t, query) @@ -89,8 +92,11 @@ func Test_createIdentitiesQuery_givenExcludeFilters_returnQueryWithExcludeFilter "track_total_hits": 10000 }` - filters := map[string][]string{"some-value-exclude": {"42"}, "another-value-exclude": {"foo"}} - query, err := createIdentitiesQuery(testIdentity, filters, nil, 0, 5, 1000000) + filters := map[string][]string{"some-value": {"42"}, "another-value": {"foo"}} + f := entities.Filters{ + Exclude: filters, + } + query, err := createIdentitiesQuery(testIdentity, f, 0, 5, 1000000) require.NoError(t, err) require.NotEmpty(t, query) @@ -120,11 +126,11 @@ func Test_createIdentitiesQuery_givenRanges_returnQueryWithFilters(t *testing.T) "track_total_hits": 10000 }` - range1 := []*entities.Range{{Operation: "lt", Value: "42"}} - range2 := []*entities.Range{{Operation: "gte", Value: "12"}, {Operation: "lte", Value: "43"}} - range3 := []*entities.Range{{Operation: "gt", Value: "44"}} - ranges := map[string][]*entities.Range{"some-value": range1, "another-value": range2, "third-value": range3} - query, err := createIdentitiesQuery(testIdentity, nil, ranges, 0, 5, 1000000) + range1 := []entities.Range{{Operation: "lt", Value: "42"}} + range2 := []entities.Range{{Operation: "gte", Value: "12"}, {Operation: "lte", Value: "43"}} + range3 := []entities.Range{{Operation: "gt", Value: "44"}} + ranges := map[string][]entities.Range{"some-value": range1, "another-value": range2, "third-value": range3} + query, err := createIdentitiesQuery(testIdentity, entities.Filters{Ranges: ranges}, 0, 5, 1000000) require.NoError(t, err) require.NotEmpty(t, query) @@ -158,10 +164,14 @@ func Test_createIdentitiesQuery_givenRangesAndFilters_returnQueryWithAllFilters( "track_total_hits": 10000 }` - range1 := []*entities.Range{{Operation: "lte", Value: "42"}, {Operation: "gt", Value: "0"}} - ranges := map[string][]*entities.Range{"range-value": range1} - filters := map[string][]string{"some-value": {"42"}, "another-value": {"foo"}, "other-value-exclude": {"exclude-me", "exclude-me-too"}} - query, err := createIdentitiesQuery(testIdentity, filters, ranges, 200, 100, 1000000) + range1 := []entities.Range{{Operation: "lte", Value: "42"}, {Operation: "gt", Value: "0"}} + ranges := map[string][]entities.Range{"range-value": range1} + filters := entities.Filters{ + Include: map[string][]string{"some-value": {"42"}, "another-value": {"foo"}}, + Exclude: map[string][]string{"other-value": {"exclude-me", "exclude-me-too"}}, + Ranges: ranges, + } + query, err := createIdentitiesQuery(testIdentity, filters, 200, 100, 1000000) require.NoError(t, err) require.NotEmpty(t, query) @@ -170,7 +180,7 @@ func Test_createIdentitiesQuery_givenRangesAndFilters_returnQueryWithAllFilters( } func Test_modifyUpperBoundTickNumberFilterIfNecessary_noRangeFilter(t *testing.T) { - ranges := map[string][]*entities.Range{} + ranges := map[string][]entities.Range{} maxTick := uint32(1000) hasUpperBound, err := modifyUpperBoundTickNumberFilterIfNecessary(ranges, maxTick) @@ -179,7 +189,7 @@ func Test_modifyUpperBoundTickNumberFilterIfNecessary_noRangeFilter(t *testing.T } func Test_modifyUpperBoundTickNumberFilterIfNecessary_upperBoundReplacedWithMaxTick(t *testing.T) { - ranges := map[string][]*entities.Range{ + ranges := map[string][]entities.Range{ "tickNumber": { {Operation: "lte", Value: "5000"}, }, @@ -189,11 +199,12 @@ func Test_modifyUpperBoundTickNumberFilterIfNecessary_upperBoundReplacedWithMaxT hasUpperBound, err := modifyUpperBoundTickNumberFilterIfNecessary(ranges, maxTick) require.NoError(t, err) require.True(t, hasUpperBound) + require.Equal(t, "lte", ranges["tickNumber"][0].Operation) require.Equal(t, "1000", ranges["tickNumber"][0].Value) } func Test_modifyUpperBoundTickNumberFilterIfNecessary_upperBoundNotReplaced(t *testing.T) { - ranges := map[string][]*entities.Range{ + ranges := map[string][]entities.Range{ "tickNumber": { {Operation: "lte", Value: "999"}, }, @@ -203,11 +214,12 @@ func Test_modifyUpperBoundTickNumberFilterIfNecessary_upperBoundNotReplaced(t *t hasUpperBound, err := modifyUpperBoundTickNumberFilterIfNecessary(ranges, maxTick) require.NoError(t, err) require.True(t, hasUpperBound) + require.Equal(t, "lte", ranges["tickNumber"][0].Operation) require.Equal(t, "999", ranges["tickNumber"][0].Value) } func Test_modifyUpperBoundTickNumberFilterIfNecessary_onlyLowerBound(t *testing.T) { - ranges := map[string][]*entities.Range{ + ranges := map[string][]entities.Range{ "tickNumber": { {Operation: "gte", Value: "100"}, }, @@ -217,11 +229,12 @@ func Test_modifyUpperBoundTickNumberFilterIfNecessary_onlyLowerBound(t *testing. hasUpperBound, err := modifyUpperBoundTickNumberFilterIfNecessary(ranges, maxTick) require.NoError(t, err) require.False(t, hasUpperBound) + require.Equal(t, "gte", ranges["tickNumber"][0].Operation) require.Equal(t, "100", ranges["tickNumber"][0].Value) } func Test_modifyUpperBoundTickNumberFilterIfNecessary_ltOperatorNotReplacedWithMaxTick(t *testing.T) { - ranges := map[string][]*entities.Range{ + ranges := map[string][]entities.Range{ "tickNumber": { {Operation: "lt", Value: "1001"}, }, @@ -231,11 +244,12 @@ func Test_modifyUpperBoundTickNumberFilterIfNecessary_ltOperatorNotReplacedWithM hasUpperBound, err := modifyUpperBoundTickNumberFilterIfNecessary(ranges, maxTick) require.NoError(t, err) require.True(t, hasUpperBound) + require.Equal(t, "lt", ranges["tickNumber"][0].Operation) require.Equal(t, "1001", ranges["tickNumber"][0].Value) } func Test_modifyUpperBoundTickNumberFilterIfNecessary_ltOperatorReplacedWithMaxTick(t *testing.T) { - ranges := map[string][]*entities.Range{ + ranges := map[string][]entities.Range{ "tickNumber": { {Operation: "lt", Value: "1002"}, }, @@ -249,7 +263,7 @@ func Test_modifyUpperBoundTickNumberFilterIfNecessary_ltOperatorReplacedWithMaxT } func Test_modifyUpperBoundTickNumberFilterIfNecessary_lteOperatorReplacedWithMaxTick(t *testing.T) { - ranges := map[string][]*entities.Range{ + ranges := map[string][]entities.Range{ "tickNumber": { {Operation: "lte", Value: "5000"}, }, @@ -263,7 +277,7 @@ func Test_modifyUpperBoundTickNumberFilterIfNecessary_lteOperatorReplacedWithMax } func Test_modifyUpperBoundTickNumberFilterIfNecessary_invalidValueReturnsError(t *testing.T) { - ranges := map[string][]*entities.Range{ + ranges := map[string][]entities.Range{ "tickNumber": { {Operation: "lte", Value: "not-a-number"}, }, @@ -296,10 +310,10 @@ func Test_createIdentitiesQuery_withTickNumberUpperBound_omitsMaxTickFilter(t *t "track_total_hits": 10000 }` - ranges := map[string][]*entities.Range{ + ranges := map[string][]entities.Range{ "tickNumber": {{Operation: "lte", Value: "500"}}, } - query, err := createIdentitiesQuery(testIdentity, nil, ranges, 0, 10, 1000) + query, err := createIdentitiesQuery(testIdentity, entities.Filters{Ranges: ranges}, 0, 10, 1000) require.NoError(t, err) require.NotEmpty(t, query) @@ -326,10 +340,10 @@ func Test_createIdentitiesQuery_withTickNumberUpperBoundExceedingMaxTick_replace "track_total_hits": 10000 }` - ranges := map[string][]*entities.Range{ + ranges := map[string][]entities.Range{ "tickNumber": {{Operation: "lt", Value: "5000"}}, } - query, err := createIdentitiesQuery(testIdentity, nil, ranges, 0, 10, 1000) + query, err := createIdentitiesQuery(testIdentity, entities.Filters{Ranges: ranges}, 0, 10, 1000) require.NoError(t, err) require.NotEmpty(t, query) @@ -357,10 +371,10 @@ func Test_createIdentitiesQuery_withTickNumberLowerBoundOnly_includesMaxTickFilt "track_total_hits": 10000 }` - ranges := map[string][]*entities.Range{ + ranges := map[string][]entities.Range{ "tickNumber": {{Operation: "gte", Value: "100"}}, } - query, err := createIdentitiesQuery(testIdentity, nil, ranges, 0, 10, 1000) + query, err := createIdentitiesQuery(testIdentity, entities.Filters{Ranges: ranges}, 0, 10, 1000) require.NoError(t, err) require.NotEmpty(t, query) diff --git a/v2/domain/repository/elastic/transaction_integration_test.go b/v2/domain/repository/elastic/transaction_integration_test.go index 3a0183c..8781d09 100644 --- a/v2/domain/repository/elastic/transaction_integration_test.go +++ b/v2/domain/repository/elastic/transaction_integration_test.go @@ -232,12 +232,12 @@ func (t *transactionsSuite) Test_GetTransactionByHash() { } func (t *transactionsSuite) Test_GetIdentityTransactions() { - txs, hits, err := t.repo.GetTransactionsForIdentity(t.ctx, - "KDPFLKJDPLRPZGLWNGPYBPSOXONATJZEIQZQPMWLTDWTGAFOKGNTZMFAMSAA", - 200, - map[string][]string{"destination": {"KDPFLKJDPLRPZGLWNGPYBPSOXONATJZEIQZQPMWLTDWTGAFOKGNTZMFAMSAA"}}, // excludes tx 3 - map[string][]*entities.Range{"tickNumber": {{Operation: "lt", Value: "100"}}}, // does not match tx 4 - 0, 10, + filters := entities.Filters{ + Include: map[string][]string{"destination": {"KDPFLKJDPLRPZGLWNGPYBPSOXONATJZEIQZQPMWLTDWTGAFOKGNTZMFAMSAA"}}, // excludes tx 3 + Ranges: map[string][]entities.Range{"tickNumber": {{Operation: "lt", Value: "100"}}}, // does not match tx 4 + } + txs, hits, err := t.repo.GetTransactionsForIdentity(t.ctx, "KDPFLKJDPLRPZGLWNGPYBPSOXONATJZEIQZQPMWLTDWTGAFOKGNTZMFAMSAA", + 200, filters, 0, 10, ) require.NoError(t.T(), err, "getting transactions for identity") require.Len(t.T(), txs, 2) @@ -254,14 +254,20 @@ func (t *transactionsSuite) Test_GetIdentityTransactions() { } func (t *transactionsSuite) Test_GetIdentityTransactions_GivenExcludeFilters() { + filters := entities.Filters{ + Include: map[string][]string{ + "destination": {"KDPFLKJDPLRPZGLWNGPYBPSOXONATJZEIQZQPMWLTDWTGAFOKGNTZMFAMSAA"}, // excludes tx 3 + + }, + Exclude: map[string][]string{ + "source": {"ENYTRGQOXEUCDFYZUSJTKTKJIZJABAHZQQANAQCPDBKJRDAZQIFMGIRDWGPO"}, // excludes tx 1 + }, + } + txs, hits, err := t.repo.GetTransactionsForIdentity(t.ctx, "KDPFLKJDPLRPZGLWNGPYBPSOXONATJZEIQZQPMWLTDWTGAFOKGNTZMFAMSAA", 200, - map[string][]string{ - "destination": {"KDPFLKJDPLRPZGLWNGPYBPSOXONATJZEIQZQPMWLTDWTGAFOKGNTZMFAMSAA"}, // excludes tx 3 - "source-exclude": {"ENYTRGQOXEUCDFYZUSJTKTKJIZJABAHZQQANAQCPDBKJRDAZQIFMGIRDWGPO"}, // excludes tx 1 - }, - map[string][]*entities.Range{}, + filters, 0, 10, ) require.NoError(t.T(), err, "getting transactions for identity with exclude filters") diff --git a/v2/domain/repository/elastic/transaction_range_filter_test.go b/v2/domain/repository/elastic/transaction_range_filter_test.go index 3108fea..f788625 100644 --- a/v2/domain/repository/elastic/transaction_range_filter_test.go +++ b/v2/domain/repository/elastic/transaction_range_filter_test.go @@ -11,13 +11,13 @@ func Test_createRangeFilter(t *testing.T) { tests := []struct { name string property string - ranges []*entities.Range + ranges []entities.Range want string }{ { name: "valid single range", property: "tick", - ranges: []*entities.Range{ + ranges: []entities.Range{ {Operation: "gte", Value: "100"}, }, want: `{"range":{"tick":{"gte":"100"}}}`, @@ -25,7 +25,7 @@ func Test_createRangeFilter(t *testing.T) { { name: "valid dual range", property: "tick", - ranges: []*entities.Range{ + ranges: []entities.Range{ {Operation: "gte", Value: "100"}, {Operation: "lte", Value: "200"}, }, diff --git a/v2/domain/repository/elastic/transaction_tick_query_test.go b/v2/domain/repository/elastic/transaction_tick_query_test.go index 2e7831f..7f20369 100644 --- a/v2/domain/repository/elastic/transaction_tick_query_test.go +++ b/v2/domain/repository/elastic/transaction_tick_query_test.go @@ -58,7 +58,7 @@ func Test_createTickTransactionsQuery_withFilters(t *testing.T) { } func Test_createTickTransactionsQuery_withRanges(t *testing.T) { - ranges := map[string][]*entities.Range{ + ranges := map[string][]entities.Range{ "amount": { {Operation: "gte", Value: "1000"}, {Operation: "lte", Value: "10000"}, @@ -87,7 +87,7 @@ func Test_createTickTransactionsQuery_withFiltersAndRanges(t *testing.T) { filters := map[string][]string{ "destination": {"SOMEDESTIDENTITY123456789012345678901234567890123456"}, } - ranges := map[string][]*entities.Range{ + ranges := map[string][]entities.Range{ "amount": { {Operation: "gte", Value: "100"}, }, diff --git a/v2/domain/transaction.go b/v2/domain/transaction.go index 35fd1dd..259b76d 100644 --- a/v2/domain/transaction.go +++ b/v2/domain/transaction.go @@ -12,13 +12,12 @@ import ( //go:generate go tool go.uber.org/mock/mockgen -destination=mock/transactions.mock.go -package=mock -source transaction.go type TransactionRepository interface { GetTransactionByHash(ctx context.Context, hash string) (*api.Transaction, error) - GetTransactionsForTickNumber(ctx context.Context, tickNumber uint32, filters map[string][]string, ranges map[string][]*entities.Range) ([]*api.Transaction, error) + GetTransactionsForTickNumber(ctx context.Context, tickNumber uint32, filters map[string][]string, ranges map[string][]entities.Range) ([]*api.Transaction, error) GetTransactionsForIdentity( ctx context.Context, identity string, maxTick uint32, - filters map[string][]string, - ranges map[string][]*entities.Range, + filters entities.Filters, from, size uint32, ) ([]*api.Transaction, *entities.Hits, error) } @@ -45,18 +44,17 @@ func (s *TransactionService) GetTransactionByHash(ctx context.Context, hash stri return tx, err } -func (s *TransactionService) GetTransactionsForTickNumber(ctx context.Context, tickNumber uint32, filters map[string][]string, ranges map[string][]*entities.Range) ([]*api.Transaction, error) { +func (s *TransactionService) GetTransactionsForTickNumber(ctx context.Context, tickNumber uint32, filters map[string][]string, ranges map[string][]entities.Range) ([]*api.Transaction, error) { return s.repo.GetTransactionsForTickNumber(ctx, tickNumber, filters, ranges) } -func (s *TransactionService) GetTransactionsForIdentity(ctx context.Context, identity string, filters map[string][]string, - ranges map[string][]*entities.Range, from, size uint32) (*entities.TransactionsResult, error) { +func (s *TransactionService) GetTransactionsForIdentity(ctx context.Context, identity string, filters entities.Filters, from, size uint32) (*entities.TransactionsResult, error) { status, err := s.statusFetcher(ctx) if err != nil || status == nil || status.LastProcessedTick < 1 { return nil, err } - txs, hits, err := s.repo.GetTransactionsForIdentity(ctx, identity, status.LastProcessedTick, filters, ranges, from, size) + txs, hits, err := s.repo.GetTransactionsForIdentity(ctx, identity, status.LastProcessedTick, filters, from, size) return &entities.TransactionsResult{LastProcessedTick: status.LastProcessedTick, Hits: hits, Transactions: txs}, err } diff --git a/v2/domain/transaction_test.go b/v2/domain/transaction_test.go index 9aa547e..dae90c8 100644 --- a/v2/domain/transaction_test.go +++ b/v2/domain/transaction_test.go @@ -47,9 +47,9 @@ func TestTransactionService_GetTransactionByIdentity(t *testing.T) { apiTransactions := []*api.Transaction{{Hash: "test-hash-1"}, {Hash: "test-hash-2"}} entityHits := &entities.Hits{Total: 42, Relation: "eq"} ctx := context.Background() - repo.EXPECT().GetTransactionsForIdentity(ctx, "test-identity", uint32(10), nil, nil, uint32(0), uint32(2)).Return(apiTransactions, entityHits, nil) + repo.EXPECT().GetTransactionsForIdentity(ctx, "test-identity", uint32(10), entities.Filters{}, uint32(0), uint32(2)).Return(apiTransactions, entityHits, nil) - result, err := service.GetTransactionsForIdentity(ctx, "test-identity", nil, nil, 0, 2) + result, err := service.GetTransactionsForIdentity(ctx, "test-identity", entities.Filters{}, 0, 2) require.NoError(t, err) require.Len(t, result.GetTransactions(), 2) diff --git a/v2/entities/filter.go b/v2/entities/filter.go new file mode 100644 index 0000000..8ce7d9b --- /dev/null +++ b/v2/entities/filter.go @@ -0,0 +1,18 @@ +package entities + +type Filters struct { + Include map[string][]string + Exclude map[string][]string + Should []ShouldFilter + Ranges map[string][]Range +} + +type Range struct { + Operation string + Value string +} + +type ShouldFilter struct { + Terms map[string][]string + Ranges map[string][]Range +} diff --git a/v2/entities/range.go b/v2/entities/range.go deleted file mode 100644 index 4053406..0000000 --- a/v2/entities/range.go +++ /dev/null @@ -1,6 +0,0 @@ -package entities - -type Range struct { - Operation string - Value string -} diff --git a/v2/go.mod b/v2/go.mod index 93156fe..557e0ac 100644 --- a/v2/go.mod +++ b/v2/go.mod @@ -1,26 +1,26 @@ module github.com/qubic/archive-query-service/v2 -go 1.25.0 +go 1.26 require ( github.com/ardanlabs/conf v1.5.0 - github.com/elastic/go-elasticsearch/v8 v8.19.1 + github.com/elastic/go-elasticsearch/v8 v8.19.3 github.com/google/gnostic v0.7.1 github.com/google/go-cmp v0.7.0 github.com/grpc-ecosystem/go-grpc-middleware/providers/prometheus v1.1.0 - github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.4 + github.com/grpc-ecosystem/grpc-gateway/v2 v2.28.0 github.com/jellydator/ttlcache/v3 v3.4.0 github.com/prometheus/client_golang v1.23.2 github.com/qubic/go-data-publisher/status-service v1.3.3 - github.com/qubic/go-node-connector v0.16.0 - github.com/redis/go-redis/v9 v9.17.2 + github.com/qubic/go-node-connector v0.17.0 + github.com/redis/go-redis/v9 v9.18.0 github.com/stretchr/testify v1.11.1 github.com/testcontainers/testcontainers-go v0.40.0 github.com/testcontainers/testcontainers-go/modules/elasticsearch v0.40.0 go.uber.org/mock v0.6.0 golang.org/x/sync v0.19.0 - google.golang.org/genproto/googleapis/api v0.0.0-20260114163908-3f89685c29c3 - google.golang.org/grpc v1.78.0 + google.golang.org/genproto/googleapis/api v0.0.0-20260226221140-a57be14db171 + google.golang.org/grpc v1.79.2 google.golang.org/protobuf v1.36.11 ) @@ -32,7 +32,7 @@ require ( github.com/bits-and-blooms/bitset v1.24.4 // indirect github.com/cenkalti/backoff/v4 v4.3.0 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect - github.com/cloudflare/circl v1.6.2 // indirect + github.com/cloudflare/circl v1.6.3 // indirect github.com/consensys/gnark-crypto v0.19.2 // indirect github.com/containerd/errdefs v1.0.0 // indirect github.com/containerd/errdefs/pkg v0.3.0 // indirect @@ -45,8 +45,8 @@ require ( github.com/docker/docker v28.5.2+incompatible // indirect github.com/docker/go-connections v0.6.0 // indirect github.com/docker/go-units v0.5.0 // indirect - github.com/ebitengine/purego v0.9.1 // indirect - github.com/elastic/elastic-transport-go/v8 v8.8.0 // indirect + github.com/ebitengine/purego v0.10.0 // indirect + github.com/elastic/elastic-transport-go/v8 v8.9.0 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect github.com/go-logr/logr v1.4.3 // indirect github.com/go-logr/stdr v1.2.2 // indirect @@ -54,9 +54,9 @@ require ( github.com/google/gnostic-models v0.7.1 // indirect github.com/google/uuid v1.6.0 // indirect github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.3 // indirect - github.com/klauspost/compress v1.18.3 // indirect + github.com/klauspost/compress v1.18.4 // indirect github.com/linckode/circl v1.3.71 // indirect - github.com/lufia/plan9stats v0.0.0-20251013123823-9fd1530e3ec3 // indirect + github.com/lufia/plan9stats v0.0.0-20260216142805-b3301c5f2a88 // indirect github.com/magiconair/properties v1.8.10 // indirect github.com/moby/docker-image-spec v1.3.1 // indirect github.com/moby/go-archive v0.2.0 // indirect @@ -74,27 +74,28 @@ require ( github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect github.com/prometheus/client_model v0.6.2 // indirect github.com/prometheus/common v0.67.5 // indirect - github.com/prometheus/procfs v0.19.2 // indirect + github.com/prometheus/procfs v0.20.1 // indirect github.com/qubic/go-schnorrq v1.0.1 // indirect - github.com/shirou/gopsutil/v4 v4.25.12 // indirect + github.com/shirou/gopsutil/v4 v4.26.2 // indirect github.com/sirupsen/logrus v1.9.4 // indirect github.com/tklauser/go-sysconf v0.3.16 // indirect github.com/tklauser/numcpus v0.11.0 // indirect github.com/yusufpapurcu/wmi v1.2.4 // indirect go.opentelemetry.io/auto/sdk v1.2.1 // indirect - go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.64.0 // indirect - go.opentelemetry.io/otel v1.39.0 // indirect - go.opentelemetry.io/otel/metric v1.39.0 // indirect - go.opentelemetry.io/otel/trace v1.39.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.66.0 // indirect + go.opentelemetry.io/otel v1.41.0 // indirect + go.opentelemetry.io/otel/metric v1.41.0 // indirect + go.opentelemetry.io/otel/trace v1.41.0 // indirect + go.uber.org/atomic v1.11.0 // indirect go.yaml.in/yaml/v2 v2.4.3 // indirect go.yaml.in/yaml/v3 v3.0.4 // indirect - golang.org/x/crypto v0.47.0 // indirect - golang.org/x/mod v0.32.0 // indirect - golang.org/x/net v0.49.0 // indirect - golang.org/x/sys v0.40.0 // indirect - golang.org/x/text v0.33.0 // indirect - golang.org/x/tools v0.41.0 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20260114163908-3f89685c29c3 // indirect + golang.org/x/crypto v0.48.0 // indirect + golang.org/x/mod v0.33.0 // indirect + golang.org/x/net v0.51.0 // indirect + golang.org/x/sys v0.41.0 // indirect + golang.org/x/text v0.34.0 // indirect + golang.org/x/tools v0.42.0 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20260226221140-a57be14db171 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/v2/go.sum b/v2/go.sum index 37abde2..51a7a65 100644 --- a/v2/go.sum +++ b/v2/go.sum @@ -20,8 +20,8 @@ github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK3 github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/cloudflare/circl v1.6.2 h1:hL7VBpHHKzrV5WTfHCaBsgx/HGbBYlgrwvNXEVDYYsQ= -github.com/cloudflare/circl v1.6.2/go.mod h1:2eXP6Qfat4O/Yhh8BznvKnJ+uzEoTQ6jVKJRn81BiS4= +github.com/cloudflare/circl v1.6.3 h1:9GPOhQGF9MCYUeXyMYlqTR6a5gTrgR/fBLXvUgtVcg8= +github.com/cloudflare/circl v1.6.3/go.mod h1:2eXP6Qfat4O/Yhh8BznvKnJ+uzEoTQ6jVKJRn81BiS4= github.com/cloudflare/fourq v0.0.0-20241014204117-d1fc726fa289 h1:crjs3GYOywGqn+F4q1O2H43u8BpuECGcd/2UojpanDw= github.com/cloudflare/fourq v0.0.0-20241014204117-d1fc726fa289/go.mod h1:13nQglQo5cpucnNY80duyW/6HK+WQ9+dHZ70UzAy6Jw= github.com/consensys/gnark-crypto v0.19.2 h1:qrEAIXq3T4egxqiliFFoNrepkIWVEeIYwt3UL0fvS80= @@ -50,12 +50,12 @@ github.com/docker/go-connections v0.6.0 h1:LlMG9azAe1TqfR7sO+NJttz1gy6KO7VJBh+pM github.com/docker/go-connections v0.6.0/go.mod h1:AahvXYshr6JgfUJGdDCs2b5EZG/vmaMAntpSFH5BFKE= github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= -github.com/ebitengine/purego v0.9.1 h1:a/k2f2HQU3Pi399RPW1MOaZyhKJL9w/xFpKAg4q1s0A= -github.com/ebitengine/purego v0.9.1/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ= -github.com/elastic/elastic-transport-go/v8 v8.8.0 h1:7k1Ua+qluFr6p1jfJjGDl97ssJS/P7cHNInzfxgBQAo= -github.com/elastic/elastic-transport-go/v8 v8.8.0/go.mod h1:YLHer5cj0csTzNFXoNQ8qhtGY1GTvSqPnKWKaqQE3Hk= -github.com/elastic/go-elasticsearch/v8 v8.19.1 h1:0iEGt5/Ds9MNVxEp3hqLsXdbe6SjleaVHONg/FuR09Q= -github.com/elastic/go-elasticsearch/v8 v8.19.1/go.mod h1:tHJQdInFa6abmDbDCEH2LJja07l/SIpaGpJcm13nt7s= +github.com/ebitengine/purego v0.10.0 h1:QIw4xfpWT6GWTzaW5XEKy3HXoqrJGx1ijYHzTF0/ISU= +github.com/ebitengine/purego v0.10.0/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ= +github.com/elastic/elastic-transport-go/v8 v8.9.0 h1:KeT/2P54F0xS0S8Y3Pf+tFDg4HmBgReQMB+BMz8dDAs= +github.com/elastic/elastic-transport-go/v8 v8.9.0/go.mod h1:ssMTvNS2hwf7CaiGsRRsx4gQHFZ/jS/DkLcISxekWzc= +github.com/elastic/go-elasticsearch/v8 v8.19.3 h1:5LDg0hfGJXBa9Y+2QlUgRTsNJ/7rm7oNidydtFAq0LI= +github.com/elastic/go-elasticsearch/v8 v8.19.3/go.mod h1:tHJQdInFa6abmDbDCEH2LJja07l/SIpaGpJcm13nt7s= github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= @@ -81,12 +81,14 @@ github.com/grpc-ecosystem/go-grpc-middleware/providers/prometheus v1.1.0 h1:QGLs github.com/grpc-ecosystem/go-grpc-middleware/providers/prometheus v1.1.0/go.mod h1:hM2alZsMUni80N33RBe6J0e423LB+odMj7d3EMP9l20= github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.3 h1:B+8ClL/kCQkRiU82d9xajRPKYMrB7E0MbtzWVi1K4ns= github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.3/go.mod h1:NbCUVmiS4foBGBHOYlCT25+YmGpJ32dZPi75pGEUpj4= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.4 h1:kEISI/Gx67NzH3nJxAmY/dGac80kKZgZt134u7Y/k1s= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.4/go.mod h1:6Nz966r3vQYCqIzWsuEl9d7cf7mRhtDmm++sOxlnfxI= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.28.0 h1:HWRh5R2+9EifMyIHV7ZV+MIZqgz+PMpZ14Jynv3O2Zs= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.28.0/go.mod h1:JfhWUomR1baixubs02l85lZYYOm7LV6om4ceouMv45c= github.com/jellydator/ttlcache/v3 v3.4.0 h1:YS4P125qQS0tNhtL6aeYkheEaB/m8HCqdMMP4mnWdTY= github.com/jellydator/ttlcache/v3 v3.4.0/go.mod h1:Hw9EgjymziQD3yGsQdf1FqFdpp7YjFMd4Srg5EJlgD4= -github.com/klauspost/compress v1.18.3 h1:9PJRvfbmTabkOX8moIpXPbMMbYN60bWImDDU7L+/6zw= -github.com/klauspost/compress v1.18.3/go.mod h1:R0h/fSBs8DE4ENlcrlib3PsXS61voFxhIs2DeRhCvJ4= +github.com/klauspost/compress v1.18.4 h1:RPhnKRAQ4Fh8zU2FY/6ZFDwTVTxgJ/EMydqSTzE9a2c= +github.com/klauspost/compress v1.18.4/go.mod h1:R0h/fSBs8DE4ENlcrlib3PsXS61voFxhIs2DeRhCvJ4= +github.com/klauspost/cpuid/v2 v2.0.9 h1:lgaqFMSdTdQYdZ04uHyN2d/eKdOMyi2YLSvlQIBFYa4= +github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= @@ -97,8 +99,8 @@ github.com/leanovate/gopter v0.2.11 h1:vRjThO1EKPb/1NsDXuDrzldR28RLkBflWYcU9CvzW github.com/leanovate/gopter v0.2.11/go.mod h1:aK3tzZP/C+p1m3SPRE4SYZFGP7jjkuSI4f7Xvpt0S9c= github.com/linckode/circl v1.3.71 h1:/TQQSpJ6PWtUb9G45trTvM/OtEEzchBN5j7/+KqjR4o= github.com/linckode/circl v1.3.71/go.mod h1:dLQ5MZBjeiL72xd7hsKV+MmYrI0m07e/ZFzGB18L4yg= -github.com/lufia/plan9stats v0.0.0-20251013123823-9fd1530e3ec3 h1:PwQumkgq4/acIiZhtifTV5OUqqiP82UAl0h87xj/l9k= -github.com/lufia/plan9stats v0.0.0-20251013123823-9fd1530e3ec3/go.mod h1:autxFIvghDt3jPTLoqZ9OZ7s9qTGNAWmYCjVFWPX/zg= +github.com/lufia/plan9stats v0.0.0-20260216142805-b3301c5f2a88 h1:PTw+yKnXcOFCR6+8hHTyWBeQ/P4Nb7dd4/0ohEcWQuM= +github.com/lufia/plan9stats v0.0.0-20260216142805-b3301c5f2a88/go.mod h1:autxFIvghDt3jPTLoqZ9OZ7s9qTGNAWmYCjVFWPX/zg= github.com/magiconair/properties v1.8.10 h1:s31yESBquKXCV9a/ScB3ESkOjUYYv+X0rg8SYxI99mE= github.com/magiconair/properties v1.8.10/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= @@ -137,20 +139,20 @@ github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNw github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE= github.com/prometheus/common v0.67.5 h1:pIgK94WWlQt1WLwAC5j2ynLaBRDiinoAb86HZHTUGI4= github.com/prometheus/common v0.67.5/go.mod h1:SjE/0MzDEEAyrdr5Gqc6G+sXI67maCxzaT3A2+HqjUw= -github.com/prometheus/procfs v0.19.2 h1:zUMhqEW66Ex7OXIiDkll3tl9a1ZdilUOd/F6ZXw4Vws= -github.com/prometheus/procfs v0.19.2/go.mod h1:M0aotyiemPhBCM0z5w87kL22CxfcH05ZpYlu+b4J7mw= +github.com/prometheus/procfs v0.20.1 h1:XwbrGOIplXW/AU3YhIhLODXMJYyC1isLFfYCsTEycfc= +github.com/prometheus/procfs v0.20.1/go.mod h1:o9EMBZGRyvDrSPH1RqdxhojkuXstoe4UlK79eF5TGGo= github.com/qubic/go-data-publisher/status-service v1.3.3 h1:6amPGI+v+517GH3+o4979DHayX5eQZZZtumUsOzhFSg= github.com/qubic/go-data-publisher/status-service v1.3.3/go.mod h1:gpy6MD9ujVOxIjLUsjzj7Uupf6ZmNZo/aVDS4nsok28= -github.com/qubic/go-node-connector v0.16.0 h1:gvY5m0CAFvLSSJVg7ahrylSyeofyZWbh2elNp189Gnk= -github.com/qubic/go-node-connector v0.16.0/go.mod h1:GOQGJ6IKhm3CU62bfLJJbBerjYmt6NNhDKzxC1i7HhU= +github.com/qubic/go-node-connector v0.17.0 h1:ifsqODfO3vw16av0GGQBcq8qgcQXfYD1Y4/xIvlasx8= +github.com/qubic/go-node-connector v0.17.0/go.mod h1:GOQGJ6IKhm3CU62bfLJJbBerjYmt6NNhDKzxC1i7HhU= github.com/qubic/go-schnorrq v1.0.1 h1:F0R/BQVf+O7Bp57NGJmc3uXlqsaIzerg/1bmU4jMLLE= github.com/qubic/go-schnorrq v1.0.1/go.mod h1:j2qw/zHiyjH9GAScAAETWpZk6iELbjYnzIg7CQwc5wM= -github.com/redis/go-redis/v9 v9.17.2 h1:P2EGsA4qVIM3Pp+aPocCJ7DguDHhqrXNhVcEp4ViluI= -github.com/redis/go-redis/v9 v9.17.2/go.mod h1:u410H11HMLoB+TP67dz8rL9s6QW2j76l0//kSOd3370= +github.com/redis/go-redis/v9 v9.18.0 h1:pMkxYPkEbMPwRdenAzUNyFNrDgHx9U+DrBabWNfSRQs= +github.com/redis/go-redis/v9 v9.18.0/go.mod h1:k3ufPphLU5YXwNTUcCRXGxUoF1fqxnhFQmscfkCoDA0= github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= -github.com/shirou/gopsutil/v4 v4.25.12 h1:e7PvW/0RmJ8p8vPGJH4jvNkOyLmbkXgXW4m6ZPic6CY= -github.com/shirou/gopsutil/v4 v4.25.12/go.mod h1:EivAfP5x2EhLp2ovdpKSozecVXn1TmuG7SMzs/Wh4PU= +github.com/shirou/gopsutil/v4 v4.26.2 h1:X8i6sicvUFih4BmYIGT1m2wwgw2VG9YgrDTi7cIRGUI= +github.com/shirou/gopsutil/v4 v4.26.2/go.mod h1:LZ6ewCSkBqUpvSOf+LsTGnRinC6iaNUNMGBtDkJBaLQ= github.com/sirupsen/logrus v1.9.4 h1:TsZE7l11zFCLZnZ+teH4Umoq5BhEIfIzfRDZ1Uzql2w= github.com/sirupsen/logrus v1.9.4/go.mod h1:ftWc9WdOfJ0a92nsE2jF5u5ZwH8Bv2zdeOC42RjbV2g= github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= @@ -167,26 +169,30 @@ github.com/tklauser/numcpus v0.11.0 h1:nSTwhKH5e1dMNsCdVBukSZrURJRoHbSEQjdEbY+9R github.com/tklauser/numcpus v0.11.0/go.mod h1:z+LwcLq54uWZTX0u/bGobaV34u6V7KNlTZejzM6/3MQ= github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0= github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= +github.com/zeebo/xxh3 v1.0.2 h1:xZmwmqxHZA8AI603jOQ0tMqmBr9lPeFwGg6d+xy9DC0= +github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA= go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64= go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.64.0 h1:ssfIgGNANqpVFCndZvcuyKbl0g+UAVcbBcqGkG28H0Y= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.64.0/go.mod h1:GQ/474YrbE4Jx8gZ4q5I4hrhUzM6UPzyrqJYV2AqPoQ= -go.opentelemetry.io/otel v1.39.0 h1:8yPrr/S0ND9QEfTfdP9V+SiwT4E0G7Y5MO7p85nis48= -go.opentelemetry.io/otel v1.39.0/go.mod h1:kLlFTywNWrFyEdH0oj2xK0bFYZtHRYUdv1NklR/tgc8= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.66.0 h1:PnV4kVnw0zOmwwFkAzCN5O07fw1YOIQor120zrh0AVo= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.66.0/go.mod h1:ofAwF4uinaf8SXdVzzbL4OsxJ3VfeEg3f/F6CeF49/Y= +go.opentelemetry.io/otel v1.41.0 h1:YlEwVsGAlCvczDILpUXpIpPSL/VPugt7zHThEMLce1c= +go.opentelemetry.io/otel v1.41.0/go.mod h1:Yt4UwgEKeT05QbLwbyHXEwhnjxNO6D8L5PQP51/46dE= go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.19.0 h1:Mne5On7VWdx7omSrSSZvM4Kw7cS7NQkOOmLcgscI51U= go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.19.0/go.mod h1:IPtUMKL4O3tH5y+iXVyAXqpAwMuzC1IrxVS81rummfE= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0 h1:IeMeyr1aBvBiPVYihXIaeIZba6b8E1bYp7lbdxK8CQg= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0/go.mod h1:oVdCUtjq9MK9BlS7TtucsQwUcXcymNiEDjgDD2jMtZU= -go.opentelemetry.io/otel/metric v1.39.0 h1:d1UzonvEZriVfpNKEVmHXbdf909uGTOQjA0HF0Ls5Q0= -go.opentelemetry.io/otel/metric v1.39.0/go.mod h1:jrZSWL33sD7bBxg1xjrqyDjnuzTUB0x1nBERXd7Ftcs= -go.opentelemetry.io/otel/sdk v1.39.0 h1:nMLYcjVsvdui1B/4FRkwjzoRVsMK8uL/cj0OyhKzt18= -go.opentelemetry.io/otel/sdk v1.39.0/go.mod h1:vDojkC4/jsTJsE+kh+LXYQlbL8CgrEcwmt1ENZszdJE= -go.opentelemetry.io/otel/sdk/metric v1.39.0 h1:cXMVVFVgsIf2YL6QkRF4Urbr/aMInf+2WKg+sEJTtB8= -go.opentelemetry.io/otel/sdk/metric v1.39.0/go.mod h1:xq9HEVH7qeX69/JnwEfp6fVq5wosJsY1mt4lLfYdVew= -go.opentelemetry.io/otel/trace v1.39.0 h1:2d2vfpEDmCJ5zVYz7ijaJdOF59xLomrvj7bjt6/qCJI= -go.opentelemetry.io/otel/trace v1.39.0/go.mod h1:88w4/PnZSazkGzz/w84VHpQafiU4EtqqlVdxWy+rNOA= +go.opentelemetry.io/otel/metric v1.41.0 h1:rFnDcs4gRzBcsO9tS8LCpgR0dxg4aaxWlJxCno7JlTQ= +go.opentelemetry.io/otel/metric v1.41.0/go.mod h1:xPvCwd9pU0VN8tPZYzDZV/BMj9CM9vs00GuBjeKhJps= +go.opentelemetry.io/otel/sdk v1.41.0 h1:YPIEXKmiAwkGl3Gu1huk1aYWwtpRLeskpV+wPisxBp8= +go.opentelemetry.io/otel/sdk v1.41.0/go.mod h1:ahFdU0G5y8IxglBf0QBJXgSe7agzjE4GiTJ6HT9ud90= +go.opentelemetry.io/otel/sdk/metric v1.41.0 h1:siZQIYBAUd1rlIWQT2uCxWJxcCO7q3TriaMlf08rXw8= +go.opentelemetry.io/otel/sdk/metric v1.41.0/go.mod h1:HNBuSvT7ROaGtGI50ArdRLUnvRTRGniSUZbxiWxSO8Y= +go.opentelemetry.io/otel/trace v1.41.0 h1:Vbk2co6bhj8L59ZJ6/xFTskY+tGAbOnCtQGVVa9TIN0= +go.opentelemetry.io/otel/trace v1.41.0/go.mod h1:U1NU4ULCoxeDKc09yCWdWe+3QoyweJcISEVa1RBzOis= go.opentelemetry.io/proto/otlp v1.0.0 h1:T0TX0tmXU8a3CbNXzEKGeU5mIVOdf0oykP+u2lIVU/I= go.opentelemetry.io/proto/otlp v1.0.0/go.mod h1:Sy6pihPLfYHkr3NkUbEhGHFhINUSI/v80hjKIs5JXpM= +go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE= +go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= go.uber.org/mock v0.6.0 h1:hyF9dfmbgIX5EfOdasqLsWD6xqpNZlXblLB/Dbnwv3Y= @@ -195,36 +201,36 @@ go.yaml.in/yaml/v2 v2.4.3 h1:6gvOSjQoTB3vt1l+CU+tSyi/HOjfOjRLJ4YwYZGwRO0= go.yaml.in/yaml/v2 v2.4.3/go.mod h1:zSxWcmIDjOzPXpjlTTbAsKokqkDNAVtZO0WOMiT90s8= go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc= go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= -golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8= -golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A= -golang.org/x/mod v0.32.0 h1:9F4d3PHLljb6x//jOyokMv3eX+YDeepZSEo3mFJy93c= -golang.org/x/mod v0.32.0/go.mod h1:SgipZ/3h2Ci89DlEtEXWUk/HteuRin+HHhN+WbNhguU= -golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o= -golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8= +golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts= +golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos= +golang.org/x/mod v0.33.0 h1:tHFzIWbBifEmbwtGz65eaWyGiGZatSrT9prnU8DbVL8= +golang.org/x/mod v0.33.0/go.mod h1:swjeQEj+6r7fODbD2cqrnje9PnziFuw4bmLbBZFrQ5w= +golang.org/x/net v0.51.0 h1:94R/GTO7mt3/4wIKpcR5gkGmRLOuE/2hNGeWq/GBIFo= +golang.org/x/net v0.51.0/go.mod h1:aamm+2QF5ogm02fjy5Bb7CQ0WMt1/WVM7FtyaTLlA9Y= golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4= golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ= -golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= -golang.org/x/term v0.39.0 h1:RclSuaJf32jOqZz74CkPA9qFuVTX7vhLlpfj/IGWlqY= -golang.org/x/term v0.39.0/go.mod h1:yxzUCTP/U+FzoxfdKmLaA0RV1WgE0VY7hXBwKtY/4ww= -golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= -golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= +golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k= +golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/term v0.40.0 h1:36e4zGLqU4yhjlmxEaagx2KuYbJq3EwY8K943ZsHcvg= +golang.org/x/term v0.40.0/go.mod h1:w2P8uVp06p2iyKKuvXIm7N/y0UCRt3UfJTfZ7oOpglM= +golang.org/x/text v0.34.0 h1:oL/Qq0Kdaqxa1KbNeMKwQq0reLCCaFtqu2eNuSeNHbk= +golang.org/x/text v0.34.0/go.mod h1:homfLqTYRFyVYemLBFl5GgL/DWEiH5wcsQ5gSh1yziA= golang.org/x/time v0.0.0-20220210224613-90d013bbcef8 h1:vVKdlvoWBphwdxWKrFZEuM0kGgGLxUOYcY4U/2Vjg44= golang.org/x/time v0.0.0-20220210224613-90d013bbcef8/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/tools v0.41.0 h1:a9b8iMweWG+S0OBnlU36rzLp20z1Rp10w+IY2czHTQc= -golang.org/x/tools v0.41.0/go.mod h1:XSY6eDqxVNiYgezAVqqCeihT4j1U2CCsqvH3WhQpnlg= +golang.org/x/tools v0.42.0 h1:uNgphsn75Tdz5Ji2q36v/nsFSfR/9BRFvqhGBaJGd5k= +golang.org/x/tools v0.42.0/go.mod h1:Ma6lCIwGZvHK6XtgbswSoWroEkhugApmsXyrUmBhfr0= gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= -google.golang.org/genproto/googleapis/api v0.0.0-20260114163908-3f89685c29c3 h1:X9z6obt+cWRX8XjDVOn+SZWhWe5kZHm46TThU9j+jss= -google.golang.org/genproto/googleapis/api v0.0.0-20260114163908-3f89685c29c3/go.mod h1:dd646eSK+Dk9kxVBl1nChEOhJPtMXriCcVb4x3o6J+E= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260114163908-3f89685c29c3 h1:C4WAdL+FbjnGlpp2S+HMVhBeCq2Lcib4xZqfPNF6OoQ= -google.golang.org/genproto/googleapis/rpc v0.0.0-20260114163908-3f89685c29c3/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= -google.golang.org/grpc v1.78.0 h1:K1XZG/yGDJnzMdd/uZHAkVqJE+xIDOcmdSFZkBUicNc= -google.golang.org/grpc v1.78.0/go.mod h1:I47qjTo4OKbMkjA/aOOwxDIiPSBofUtQUI5EfpWvW7U= +google.golang.org/genproto/googleapis/api v0.0.0-20260226221140-a57be14db171 h1:tu/dtnW1o3wfaxCOjSLn5IRX4YDcJrtlpzYkhHhGaC4= +google.golang.org/genproto/googleapis/api v0.0.0-20260226221140-a57be14db171/go.mod h1:M5krXqk4GhBKvB596udGL3UyjL4I1+cTbK0orROM9ng= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260226221140-a57be14db171 h1:ggcbiqK8WWh6l1dnltU4BgWGIGo+EVYxCaAPih/zQXQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20260226221140-a57be14db171/go.mod h1:4Hqkh8ycfw05ld/3BWL7rJOSfebL2Q+DVDeRgYgxUU8= +google.golang.org/grpc v1.79.2 h1:fRMD94s2tITpyJGtBBn7MkMseNpOZU8ZxgC3MMBaXRU= +google.golang.org/grpc v1.79.2/go.mod h1:KmT0Kjez+0dde/v2j9vzwoAScgEPx/Bw1CYChhHLrHQ= google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/v2/grpc/filters/create.go b/v2/grpc/filters/create.go new file mode 100644 index 0000000..ab65200 --- /dev/null +++ b/v2/grpc/filters/create.go @@ -0,0 +1,153 @@ +package filters + +import ( + "fmt" + "strconv" + "strings" + + api "github.com/qubic/archive-query-service/v2/api/archive-query-service/v2" + "github.com/qubic/archive-query-service/v2/entities" +) + +func CreateFilters(value string, maxValues, maxLength int) ([]string, error) { + // check max length to avoid further more costly processing + if maxLength > 0 && len(value) > maxLength { + return nil, fmt.Errorf("exceeds maximum length") + } + + // count commas first to avoid input with many strings before splitting + valCount := strings.Count(value, ",") + if valCount >= maxValues { + return nil, fmt.Errorf("more than [%d] values", maxValues) + } + + var err error + var val []string + + if maxValues > 1 { // split + + val, err = splitFilterValue(value) + if err != nil { + return nil, fmt.Errorf("splitting: %w", err) + } + + } else { // trim only + + val, err = trimFilterValue(value) + if err != nil { + return nil, fmt.Errorf("trimming: %w", err) + } + + } + return val, nil +} + +func CreateNumericRange(r *api.Range, bitSize int) ([]entities.Range, error) { + var ranges []entities.Range + var err error + var lowerBound uint64 + var upperBound uint64 + switch r.GetLowerBound().(type) { + case *api.Range_Gt: + lowerBound, err = stringToNumericValue(r.GetGt(), bitSize) + lowerBound++ + if err != nil { + return nil, fmt.Errorf("invalid [gt] value: %w", err) + } + ranges = append(ranges, entities.Range{ + Operation: "gt", + Value: r.GetGt(), + }) + case *api.Range_Gte: + lowerBound, err = stringToNumericValue(r.GetGte(), bitSize) + if err != nil { + return nil, fmt.Errorf("invalid [gte] value: %w", err) + } + ranges = append(ranges, entities.Range{ + Operation: "gte", + Value: r.GetGte(), + }) + } + + switch r.GetUpperBound().(type) { + case *api.Range_Lt: + upperBound, err = stringToNumericValue(r.GetLt(), bitSize) + upperBound-- + if err != nil { + return nil, fmt.Errorf("invalid [lt] value: %w", err) + } + ranges = append(ranges, entities.Range{ + Operation: "lt", + Value: r.GetLt(), + }) + case *api.Range_Lte: + upperBound, err = stringToNumericValue(r.GetLte(), bitSize) + if err != nil { + return nil, fmt.Errorf("invalid [lte] value: %w", err) + } + ranges = append(ranges, entities.Range{ + Operation: "lte", + Value: r.GetLte(), + }) + } + + if len(ranges) == 0 { + return nil, fmt.Errorf("invalid range: no bounds") + } + + if lowerBound > 0 && upperBound > 0 && lowerBound >= upperBound { + return nil, fmt.Errorf("invalid range: [%d:%d]", lowerBound, upperBound) + } + + return ranges, nil +} + +const excludeSuffix = "-exclude" + +// SplitDeprecatedIncludeExcludeFilters method for backwards compatibility that divides filters into normal and exclude filters (can have same name after splitting). +func SplitDeprecatedIncludeExcludeFilters(filters map[string]string) (map[string]string, map[string]string) { + includeFilters := make(map[string]string) + excludeFilters := make(map[string]string) + for k, v := range filters { + if strings.HasSuffix(k, excludeSuffix) { + excludeFilters[strings.TrimSuffix(k, excludeSuffix)] = v + } else { + includeFilters[k] = v + } + } + return includeFilters, excludeFilters +} + +func stringToNumericValue(val string, bitSize int) (uint64, error) { + number, err := strconv.ParseUint(val, 10, bitSize) + if err != nil { + return 0, err + } + return number, nil +} + +func splitFilterValue(value string) ([]string, error) { + split := strings.Split(value, ",") + values := make([]string, 0, len(split)) + seen := make(map[string]bool) + for _, s := range split { + trimmed := strings.TrimSpace(s) + if trimmed == "" { + return nil, fmt.Errorf("contains empty value") + } + if seen[trimmed] { + return nil, fmt.Errorf("contains duplicate value [%s]", trimmed) + } + seen[trimmed] = true + values = append(values, trimmed) + } + return values, nil +} + +func trimFilterValue(value string) ([]string, error) { + trimmed := strings.TrimSpace(value) + if len(trimmed) == 0 { + return nil, fmt.Errorf("empty value") + } + return []string{trimmed}, nil +} diff --git a/v2/grpc/filters/create_test.go b/v2/grpc/filters/create_test.go new file mode 100644 index 0000000..75a859f --- /dev/null +++ b/v2/grpc/filters/create_test.go @@ -0,0 +1,412 @@ +package filters + +import ( + "reflect" + "testing" + + "github.com/stretchr/testify/require" +) + +// Shared test constants +const validId = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFXIB" +const validId2 = "BAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAARMID" +const validId3 = "EAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAVWRF" +const invalidId = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + +func TestCreateFilters(t *testing.T) { + tests := []struct { + name string + value string + maxValues int + maxLength int + want []string + wantErr bool + errMessage string + }{ + { + name: "single value", + value: "value1", + maxValues: 1, + maxLength: 0, + want: []string{"value1"}, + wantErr: false, + }, + { + name: "single value with spaces", + value: " value1 ", + maxValues: 1, + maxLength: 0, + want: []string{"value1"}, + wantErr: false, + }, + { + name: "multiple values", + value: "value1,value2,value3", + maxValues: 5, + maxLength: 0, + want: []string{"value1", "value2", "value3"}, + wantErr: false, + }, + { + name: "multiple values with spaces", + value: " value1 , value2, value3 ", + maxValues: 5, + maxLength: 0, + want: []string{"value1", "value2", "value3"}, + wantErr: false, + }, + { + name: "exceeds maximum length", + value: "1234567890", + maxValues: 1, + maxLength: 5, + want: nil, + wantErr: true, + errMessage: "exceeds maximum length", + }, + { + name: "empty value when splitting", + value: "value1,,value2", + maxValues: 5, + maxLength: 0, + want: nil, + wantErr: true, + errMessage: "splitting values", + }, + { + name: "too many values", + value: "v1,v2,v3,v4,v5,v6", + maxValues: 5, + maxLength: 0, + want: nil, + wantErr: true, + errMessage: "splitting values", + }, + { + name: "duplicate values", + value: "value1,value2,value1", + maxValues: 5, + maxLength: 0, + want: nil, + wantErr: true, + errMessage: "splitting values", + }, + { + name: "empty trimmed value", + value: " ", + maxValues: 1, + maxLength: 0, + want: nil, + wantErr: true, + errMessage: "trimming value", + }, + { + name: "at maximum length", + value: "12345", + maxValues: 1, + maxLength: 5, + want: []string{"12345"}, + wantErr: false, + }, + { + name: "no max length check", + value: "very long value that would exceed limits", + maxValues: 1, + maxLength: 0, + want: []string{"very long value that would exceed limits"}, + wantErr: false, + }, + { + name: "max values minus one", + value: "v1,v2,v3,v4", + maxValues: 5, + want: []string{"v1", "v2", "v3", "v4"}, + wantErr: false, + }, + { + name: "many values - exactly at limit", + value: "v1,v2,v3,v4,v5", + maxValues: 5, + want: []string{"v1", "v2", "v3", "v4", "v5"}, + wantErr: false, + errMessage: "has more than [5] values", + }, + { + name: "too many values - exceeds limit", + value: "v1,v2,v3,v4,v5,v6", + maxValues: 5, + want: nil, + wantErr: true, + errMessage: "has more than [5] values", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := CreateFilters(tt.value, tt.maxValues, tt.maxLength) + if (err != nil) != tt.wantErr { + t.Errorf("CreateFilters() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("CreateFilters() got = %v, want %v", got, tt.want) + } + }) + } +} + +func Test_splitFilterValue(t *testing.T) { + tests := []struct { + name string + value string + maxValues int + want []string + wantErr bool + errMessage string + }{ + { + name: "single value", + value: "value1", + maxValues: 5, + want: []string{"value1"}, + wantErr: false, + }, + { + name: "multiple values", + value: "value1,value2,value3", + maxValues: 5, + want: []string{"value1", "value2", "value3"}, + wantErr: false, + }, + { + name: "values with spaces", + value: " value1 , value2, value3 ", + maxValues: 5, + want: []string{"value1", "value2", "value3"}, + wantErr: false, + }, + { + name: "empty value after split", + value: "value1,,value2", + maxValues: 5, + want: nil, + wantErr: true, + errMessage: "contains empty value", + }, + { + name: "empty value at start", + value: ",value1,value2", + maxValues: 5, + want: nil, + wantErr: true, + errMessage: "contains empty value", + }, + { + name: "empty value at end", + value: "value1,value2,", + maxValues: 5, + want: nil, + wantErr: true, + errMessage: "contains empty value", + }, + { + name: "duplicate values", + value: "value1,value2,value1", + maxValues: 5, + want: nil, + wantErr: true, + errMessage: "contains duplicate value [value1]", + }, + { + name: "duplicate values with different spacing", + value: "value1, value2 ,value1", + maxValues: 5, + want: nil, + wantErr: true, + errMessage: "contains duplicate value [value1]", + }, + { + name: "all empty values", + value: ",,", + maxValues: 5, + want: nil, + wantErr: true, + errMessage: "contains empty value", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := splitFilterValue(tt.value) + if (err != nil) != tt.wantErr { + t.Errorf("splitFilterValue() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("splitFilterValue() got = %v, want %v", got, tt.want) + } + }) + } +} + +func Test_trimFilterValue(t *testing.T) { + tests := []struct { + name string + value string + want []string + wantErr bool + errMessage string + }{ + { + name: "simple value", + value: "value1", + want: []string{"value1"}, + wantErr: false, + }, + { + name: "value with leading spaces", + value: " value1", + want: []string{"value1"}, + wantErr: false, + }, + { + name: "value with trailing spaces", + value: "value1 ", + want: []string{"value1"}, + wantErr: false, + }, + { + name: "value with both leading and trailing spaces", + value: " value1 ", + want: []string{"value1"}, + wantErr: false, + }, + { + name: "value with internal spaces", + value: "value 1", + want: []string{"value 1"}, + wantErr: false, + }, + { + name: "value with commas (not split)", + value: "value1,value2,value3", + want: []string{"value1,value2,value3"}, + wantErr: false, + }, + { + name: "empty string", + value: "", + want: nil, + wantErr: true, + errMessage: "empty value", + }, + { + name: "only spaces", + value: " ", + want: nil, + wantErr: true, + errMessage: "empty value", + }, + { + name: "only tabs", + value: "\t\t", + want: nil, + wantErr: true, + errMessage: "empty value", + }, + { + name: "mixed whitespace", + value: " \t \n ", + want: nil, + wantErr: true, + errMessage: "empty value", + }, + { + name: "single character", + value: "a", + want: []string{"a"}, + wantErr: false, + }, + { + name: "numeric value", + value: "12345", + want: []string{"12345"}, + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := trimFilterValue(tt.value) + if (err != nil) != tt.wantErr { + t.Errorf("trimFilterValue() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("trimFilterValue() got = %v, want %v", got, tt.want) + } + }) + } +} + +func Test_splitIncludeExcludeFilters(t *testing.T) { + tests := []struct { + name string + filters map[string]string + wantInclude map[string]string + wantExclude map[string]string + }{ + { + name: "empty filters", + filters: map[string]string{}, + wantInclude: map[string]string{}, + wantExclude: map[string]string{}, + }, + { + name: "only include filters", + filters: map[string]string{ + "x": "a", + "y": "b", + }, + wantInclude: map[string]string{ + "x": "a", + "y": "b", + }, + wantExclude: map[string]string{}, + }, + { + name: "only exclude filters", + filters: map[string]string{ + "x-exclude": "a", + "y-exclude": "b", + }, + wantInclude: map[string]string{}, + wantExclude: map[string]string{ + "x": "a", + "y": "b", + }, + }, + { + name: "mixed filters", + filters: map[string]string{ + "x": "a", + "x-exclude": "b", + "y": "c", + "z-exclude": "d", + }, + wantInclude: map[string]string{ + "x": "a", + "y": "c", + }, + wantExclude: map[string]string{ + "x": "b", + "z": "d", + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + gotInclude, gotExclude := SplitDeprecatedIncludeExcludeFilters(tt.filters) + require.Equal(t, tt.wantInclude, gotInclude) + require.Equal(t, tt.wantExclude, gotExclude) + }) + } +} diff --git a/v2/grpc/filters/events.go b/v2/grpc/filters/events.go new file mode 100644 index 0000000..d5cf37a --- /dev/null +++ b/v2/grpc/filters/events.go @@ -0,0 +1,210 @@ +package filters + +import ( + "fmt" + + api "github.com/qubic/archive-query-service/v2/api/archive-query-service/v2" + "github.com/qubic/archive-query-service/v2/entities" + "github.com/qubic/archive-query-service/v2/grpc/utils" +) + +const ( + EventFilterSource = "source" + EventFilterDestination = "destination" + EventFilterTransactionHash = "transactionHash" + EventFilterTickNumber = "tickNumber" + EventFilterLogType = "logType" + EventFilterEpoch = "epoch" + EventFilterAmount = "amount" + EventFilterNumberOfShares = "numberOfShares" + EventRangeTimestamp = "timestamp" +) + +const maxValuesPerEventFilter = 5 +const maxValueLengthPerEventFilter = 5*60 + 5 + 4 // 5 IDs + comma + optional spaces + +var AllowedEventIncludeFilters = map[string]bool{ + EventFilterSource: true, + EventFilterDestination: true, + EventFilterTransactionHash: true, + EventFilterTickNumber: true, + EventFilterEpoch: true, + EventFilterAmount: true, + EventFilterNumberOfShares: true, + EventFilterLogType: true, +} + +var AllowedEventExcludeFilters = map[string]bool{ + EventFilterSource: true, + EventFilterDestination: true, +} + +var AllowedEventShouldFilters = map[string]bool{ + EventFilterSource: true, + EventFilterDestination: true, + EventFilterAmount: true, + EventFilterNumberOfShares: true, +} + +var AllowedEventRanges = map[string]bool{ + EventFilterTickNumber: true, + EventFilterEpoch: true, + EventFilterAmount: true, + EventFilterNumberOfShares: true, + EventRangeTimestamp: true, +} + +var AllowedEventShouldRanges = map[string]bool{ + EventFilterAmount: true, + EventFilterNumberOfShares: true, +} + +func CreateEventFilters(filterMap map[string]string, allowedKeys map[string]bool) (map[string][]string, error) { + + res := make(map[string][]string) + for k, v := range filterMap { + + shouldSplit := k == EventFilterSource || k == EventFilterDestination + + maxValues := utils.If(shouldSplit, maxValuesPerEventFilter, 1) + maxLength := utils.If(shouldSplit, maxValueLengthPerEventFilter, 60) + + vs, err := CreateFilters(v, maxValues, maxLength) + if err != nil { + return nil, fmt.Errorf("handling filter [%s]: %w", k, err) + } + res[k] = vs + } + + err := validateEventsFilters(res, allowedKeys) + if err != nil { + return nil, fmt.Errorf("validating filter: %w", err) + } + + return res, nil +} + +func validateEventsFilters(filterMap map[string][]string, allowedKeys map[string]bool) error { + if len(filterMap) == 0 { + return nil + } + + if len(filterMap) > len(allowedKeys) { + return fmt.Errorf("too many filters (%d)", len(filterMap)) + } + + for key, values := range filterMap { + + if _, ok := allowedKeys[key]; !ok { + return fmt.Errorf("unsupported filter [%s]", key) + } + + switch key { + case EventFilterSource, EventFilterDestination: + + err := ValidateIdentityFilterValues(values, maxValuesPerEventFilter) + if err != nil { + return fmt.Errorf("invalid [%s] filter: %w", key, err) + } + + case EventFilterTransactionHash: + + err := ValidateTransactionHashFilterValues(values, 1) + if err != nil { + return fmt.Errorf("invalid [%s] filter: %w", key, err) + } + + case EventFilterTickNumber, EventFilterEpoch: + + err := ValidateUnsignedNumericFilterValues(values, 32, 1) + if err != nil { + return fmt.Errorf("invalid [%s] filter: %w", key, err) + } + + case EventFilterAmount, EventFilterNumberOfShares: + + err := ValidateUnsignedNumericFilterValues(values, 64, 1) + if err != nil { + return fmt.Errorf("invalid [%s] filter: %w", key, err) + } + + case EventFilterLogType: + + err := ValidateUnsignedNumericFilterValues(values, 8, 1) // up to 255 + if err != nil { + return fmt.Errorf("invalid [%s] filter: %w", key, err) + } + + default: + return fmt.Errorf("unhandled filter: [%s]", key) + } + } + return nil +} + +func CreateEventRanges(ranges map[string]*api.Range, allowedKeys map[string]bool) (map[string][]entities.Range, error) { + convertedRanges := map[string][]entities.Range{} + if len(ranges) == 0 { + return nil, nil + } + if len(ranges) > len(allowedKeys) { + return nil, fmt.Errorf("too many ranges (%d)", len(ranges)) + } + + for key, value := range ranges { + + if _, ok := allowedKeys[key]; !ok { + return nil, fmt.Errorf("unsupported filter [%s]", key) + } + + switch key { + case EventFilterAmount, EventFilterNumberOfShares, EventRangeTimestamp: + r, err := CreateNumericRange(value, 64) + if err != nil { + return nil, fmt.Errorf("invalid [%s] range: %w", key, err) + } + if len(r) > 0 { + convertedRanges[key] = r + } + case EventFilterTickNumber, EventFilterEpoch: + r, err := CreateNumericRange(value, 32) + if err != nil { + return nil, fmt.Errorf("invalid [%s] range: %w", key, err) + } + if len(r) > 0 { + convertedRanges[key] = r + } + default: + return nil, fmt.Errorf("unhandled range: [%s]", key) + } + } + + return convertedRanges, nil +} + +const maxNumberOfShouldFilters = 2 + +func CreateShouldFilters(should []*api.ShouldFilter, allowedFilters, allowedRanges map[string]bool) ([]entities.ShouldFilter, error) { + if len(should) > maxNumberOfShouldFilters { + return nil, fmt.Errorf("too many should filters (%d)", len(should)) + } + var shouldFilters = make([]entities.ShouldFilter, 0, len(should)) + for _, shouldFilter := range should { + shouldFilterTerms, err := CreateEventFilters(shouldFilter.GetTerms(), allowedFilters) + if err != nil { + return nil, fmt.Errorf("creating filters: %w", err) + } + shouldFilterRanges, err := CreateEventRanges(shouldFilter.GetRanges(), allowedRanges) + if err != nil { + return nil, fmt.Errorf("creating ranges: %w", err) + } + if len(shouldFilterTerms)+len(shouldFilterRanges) < 2 { + return nil, fmt.Errorf("needs at least two filters") + } + shouldFilters = append(shouldFilters, entities.ShouldFilter{ + Terms: shouldFilterTerms, + Ranges: shouldFilterRanges, + }) + } + return shouldFilters, nil +} diff --git a/v2/grpc/filters/events_test.go b/v2/grpc/filters/events_test.go new file mode 100644 index 0000000..ed5844b --- /dev/null +++ b/v2/grpc/filters/events_test.go @@ -0,0 +1,411 @@ +package filters + +import ( + "fmt" + "testing" + + api "github.com/qubic/archive-query-service/v2/api/archive-query-service/v2" + "github.com/qubic/archive-query-service/v2/entities" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +const validTransactionHash = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaafxib" +const validId4 = "FAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYWJB" +const validId5 = "GAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQGNM" + +func TestCreateEventsFilters_ValidFilters(t *testing.T) { + filters := map[string]string{ + "transactionHash": validTransactionHash, + "tickNumber": "42", + "logType": "1", + } + result, err := CreateEventFilters(filters, AllowedEventIncludeFilters) + require.NoError(t, err) + assert.Equal(t, map[string][]string{ + "transactionHash": {validTransactionHash}, + "tickNumber": {"42"}, + "logType": {"1"}, + }, result) +} + +func TestCreateEventsFilters_EmptyValue(t *testing.T) { + filters := map[string]string{ + "transactionHash": "", + } + _, err := CreateEventFilters(filters, AllowedEventIncludeFilters) + require.Error(t, err) + assert.Contains(t, err.Error(), "empty value") +} + +func TestCreateEventsFilters_MultipleValues(t *testing.T) { + filters := map[string]string{ + "source": fmt.Sprintf(" %s, %s ,%s ", validId, validId2, validId3), + } + created, err := CreateEventFilters(filters, AllowedEventIncludeFilters) + require.NoError(t, err) + assert.Len(t, created["source"], 3) + assert.Equal(t, created["source"], []string{validId, validId2, validId3}) +} + +func TestCreateEventsFilters_InvalidIdentity_Error(t *testing.T) { + filters := map[string]string{ + "source": fmt.Sprintf("%s, %s", validId, invalidId), + } + _, err := CreateEventFilters(filters, AllowedEventIncludeFilters) + require.Error(t, err) + assert.ErrorContains(t, err, "invalid identity") +} + +func TestValidateEventsFilters_ValidTransactionHash(t *testing.T) { + filters := map[string][]string{"transactionHash": {validTransactionHash}} + err := validateEventsFilters(filters, AllowedEventIncludeFilters) + require.NoError(t, err) +} + +func TestValidateEventsFilters_ValidTickNumber(t *testing.T) { + filters := map[string][]string{"tickNumber": {"42"}} + err := validateEventsFilters(filters, AllowedEventIncludeFilters) + require.NoError(t, err) +} + +func TestValidateEventsFilters_ValidEventType(t *testing.T) { + for _, et := range []string{"0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "255"} { + t.Run("eventType_"+et, func(t *testing.T) { + filters := map[string][]string{"logType": {et}} + err := validateEventsFilters(filters, AllowedEventIncludeFilters) + require.NoError(t, err) + }) + } +} + +func TestValidateEventsFilters_InvalidEventType(t *testing.T) { + for _, et := range []string{"-1", "256", "abc"} { + t.Run("eventType_"+et, func(t *testing.T) { + filters := map[string][]string{"logType": {et}} + err := validateEventsFilters(filters, AllowedEventIncludeFilters) + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid [logType] filter") + }) + } +} + +func TestValidateEventsFilters_InvalidTickNumber(t *testing.T) { + filters := map[string][]string{"tickNumber": {"not-a-number"}} + err := validateEventsFilters(filters, AllowedEventIncludeFilters) + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid [tickNumber] filter") +} + +func TestValidateEventsFilters_UnsupportedFilter(t *testing.T) { + filters := map[string][]string{"unknownFilter": {"value"}} + err := validateEventsFilters(filters, map[string]bool{"foo": true}) + require.Error(t, err) + assert.Contains(t, err.Error(), "unsupported filter") +} + +func TestValidateEventsFilters_TooManyFilters(t *testing.T) { + filters := map[string][]string{ + "transactionHash": {"abc"}, + "tickNumber": {"42"}, + "logType": {"1"}, + } + err := validateEventsFilters(filters, map[string]bool{"foo": true}) + require.Error(t, err) + assert.Contains(t, err.Error(), "too many filters") +} + +func TestValidateEventsFilters_CombinedFilters(t *testing.T) { + filters := map[string][]string{ + "transactionHash": {validTransactionHash}, + "tickNumber": {"42"}, + "logType": {"0"}, + } + err := validateEventsFilters(filters, AllowedEventIncludeFilters) + require.NoError(t, err) +} + +func TestValidateEventsFilters_EmptyFilters(t *testing.T) { + err := validateEventsFilters(nil, map[string]bool{}) + require.NoError(t, err) + + err = validateEventsFilters(map[string][]string{}, map[string]bool{}) + require.NoError(t, err) +} + +// tests for source, destination, epoch, and amount filters + +func TestCreateEventsFilters_IdentityFilters_SingleValue(t *testing.T) { + filterNames := []string{"source", "destination"} + for _, filterName := range filterNames { + t.Run(filterName, func(t *testing.T) { + filters := map[string]string{ + filterName: validId, + } + result, err := CreateEventFilters(filters, AllowedEventIncludeFilters) + require.NoError(t, err) + assert.Equal(t, []string{validId}, result[filterName]) + }) + } +} + +func TestCreateEventsFilters_IdentityFilters_MultipleValues(t *testing.T) { + filterNames := []string{"source", "destination"} + for _, filterName := range filterNames { + t.Run(filterName, func(t *testing.T) { + filters := map[string]string{ + filterName: fmt.Sprintf("%s,%s,%s", validId, validId2, validId3), + } + result, err := CreateEventFilters(filters, AllowedEventIncludeFilters) + require.NoError(t, err) + assert.Equal(t, []string{validId, validId2, validId3}, result[filterName]) + }) + } +} + +func TestCreateEventsFilters_IdentityFilters_MaxValues(t *testing.T) { + filterNames := []string{"source", "destination"} + for _, filterName := range filterNames { + t.Run(filterName, func(t *testing.T) { + filters := map[string]string{ + filterName: fmt.Sprintf("%s,%s,%s,%s", validId, validId2, validId3, validId4), + } + result, err := CreateEventFilters(filters, AllowedEventIncludeFilters) + require.NoError(t, err) + assert.Equal(t, []string{validId, validId2, validId3, validId4}, result[filterName]) + }) + } +} + +func TestCreateEventsFilters_IdentityFilters_TooManyValues(t *testing.T) { + filterNames := []string{"source", "destination"} + for _, filterName := range filterNames { + t.Run(filterName, func(t *testing.T) { + filters := map[string]string{ + filterName: fmt.Sprintf("%s,%s,%s,%s,%s,%s", validId, validId2, validId3, validId4, validId5, validId), + } + _, err := CreateEventFilters(filters, AllowedEventIncludeFilters) + require.Error(t, err) + assert.Contains(t, err.Error(), "exceeds maximum length") + }) + } +} + +func TestCreateEventsFilters_IdentityFilters_EmptyValue(t *testing.T) { + filterNames := []string{"source", "destination"} + for _, filterName := range filterNames { + t.Run(filterName, func(t *testing.T) { + filters := map[string]string{ + filterName: "", + } + _, err := CreateEventFilters(filters, AllowedEventIncludeFilters) + require.Error(t, err) + assert.Contains(t, err.Error(), "empty value") + }) + } +} + +func TestCreateEventsFilters_IdentityFilters_DuplicateValues(t *testing.T) { + filterNames := []string{"source", "destination"} + for _, filterName := range filterNames { + t.Run(filterName, func(t *testing.T) { + filters := map[string]string{ + filterName: fmt.Sprintf("%s,%s,%s", validId, validId2, validId), + } + _, err := CreateEventFilters(filters, AllowedEventIncludeFilters) + require.Error(t, err) + assert.Contains(t, err.Error(), "duplicate value") + }) + } +} + +func TestCreateEventsFilters_IdentityFilters_InvalidIdentity(t *testing.T) { + filterNames := []string{"source", "destination"} + for _, filterName := range filterNames { + t.Run(filterName, func(t *testing.T) { + filters := map[string]string{ + filterName: invalidId, + } + _, err := CreateEventFilters(filters, AllowedEventIncludeFilters) + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid identity") + }) + } +} + +func TestCreateEventsFilters_IdentityFilters_EmptyInList(t *testing.T) { + filterNames := []string{"source", "destination"} + for _, filterName := range filterNames { + t.Run(filterName, func(t *testing.T) { + filters := map[string]string{ + filterName: fmt.Sprintf("%s,,%s", validId, validId2), + } + _, err := CreateEventFilters(filters, AllowedEventIncludeFilters) + require.Error(t, err) + assert.Contains(t, err.Error(), "empty value") + }) + } +} + +func TestCreateEventsFilters_Epoch_ValidValue(t *testing.T) { + filters := map[string]string{ + "epoch": "100", + } + result, err := CreateEventFilters(filters, AllowedEventIncludeFilters) + require.NoError(t, err) + assert.Equal(t, []string{"100"}, result["epoch"]) +} + +func TestCreateEventsFilters_Epoch_ZeroValue(t *testing.T) { + filters := map[string]string{ + "epoch": "0", + } + result, err := CreateEventFilters(filters, AllowedEventIncludeFilters) + require.NoError(t, err) + assert.Equal(t, []string{"0"}, result["epoch"]) +} + +func TestCreateEventsFilters_Epoch_InvalidNegative(t *testing.T) { + filters := map[string]string{ + "epoch": "-1", + } + _, err := CreateEventFilters(filters, AllowedEventIncludeFilters) + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid [epoch] filter") +} + +func TestCreateEventsFilters_Epoch_InvalidString(t *testing.T) { + filters := map[string]string{ + "epoch": "abc", + } + _, err := CreateEventFilters(filters, AllowedEventIncludeFilters) + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid [epoch] filter") +} + +func TestCreateEventsFilters_Amount_ValidValue(t *testing.T) { + filters := map[string]string{ + "amount": "1000", + } + result, err := CreateEventFilters(filters, AllowedEventIncludeFilters) + require.NoError(t, err) + assert.Equal(t, []string{"1000"}, result["amount"]) +} + +func TestCreateEventsFilters_Amount_InvalidNegative(t *testing.T) { + filters := map[string]string{ + "amount": "-100", + } + _, err := CreateEventFilters(filters, AllowedEventIncludeFilters) + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid [amount] filter") +} + +func TestCreateEventsFilters_Amount_InvalidString(t *testing.T) { + filters := map[string]string{ + "amount": "not-a-number", + } + _, err := CreateEventFilters(filters, AllowedEventIncludeFilters) + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid [amount] filter") +} + +func TestCreateEventsFilters_NumberOfShares_ValidValue(t *testing.T) { + filters := map[string]string{ + "numberOfShares": "1000", + } + result, err := CreateEventFilters(filters, AllowedEventIncludeFilters) + require.NoError(t, err) + assert.Equal(t, []string{"1000"}, result["numberOfShares"]) +} + +func TestCreateEventsFilters_CombinedSourceAndDestination(t *testing.T) { + filters := map[string]string{ + "source": fmt.Sprintf("%s,%s", validId, validId2), + "destination": validId3, + } + result, err := CreateEventFilters(filters, AllowedEventIncludeFilters) + require.NoError(t, err) + assert.Equal(t, []string{validId, validId2}, result["source"]) + assert.Equal(t, []string{validId3}, result["destination"]) +} + +func TestCreateEventsFilters_MaxLengthForIdentityFilters(t *testing.T) { + // 5 IDs (60 chars each) + 4 commas + 4 spaces = 309 chars total + longValue := fmt.Sprintf("%s,%s,%s,%s", validId, validId2, validId3, validId4) + filters := map[string]string{ + "source": longValue, + } + result, err := CreateEventFilters(filters, AllowedEventIncludeFilters) + require.NoError(t, err) + assert.Len(t, result["source"], 4) +} + +func TestCreateEventsFilters_ExceedsMaxLengthForIdentityFilters(t *testing.T) { + // Create a string that exceeds 309 characters + longValue := fmt.Sprintf("%s,%s,%s,%s,%s,%s", validId, validId2, validId3, validId4, validId5, validId) + filters := map[string]string{ + "source": longValue, + } + _, err := CreateEventFilters(filters, AllowedEventIncludeFilters) + require.Error(t, err) + assert.Contains(t, err.Error(), "exceeds maximum length") +} + +func TestCheckForConflictingFilters(t *testing.T) { + includeFilters := map[string][]string{ + "source": {"value1"}, + } + excludeFilters := map[string][]string{ + "source": {"value2"}, + } + conflictingFilters := entities.Filters{Include: includeFilters, Exclude: excludeFilters} + err := VerifyNoConflictingFilters(conflictingFilters) + require.Error(t, err) + assert.Contains(t, err.Error(), "duplicate [source] filter") + + // no conflict + nonConflictingFilters := entities.Filters{Include: includeFilters, Exclude: map[string][]string{"foo": {"bar"}}} + err = VerifyNoConflictingFilters(nonConflictingFilters) + require.NoError(t, err) +} + +func TestCreateEventQueryRanges_ValidRange(t *testing.T) { + ranges := map[string]*api.Range{ + "amount": { + LowerBound: &api.Range_Gte{Gte: "100"}, + UpperBound: &api.Range_Lte{Lte: "1000"}, + }, + } + result, err := CreateEventRanges(ranges, AllowedEventRanges) + require.NoError(t, err) + require.Len(t, result, 1) + require.Contains(t, result, "amount") + assert.Len(t, result["amount"], 2) + assert.Equal(t, "gte", result["amount"][0].Operation) + assert.Equal(t, "100", result["amount"][0].Value) + assert.Equal(t, "lte", result["amount"][1].Operation) + assert.Equal(t, "1000", result["amount"][1].Value) +} + +func TestCreateEventQueryRanges_UnsupportedRangeType(t *testing.T) { + ranges := map[string]*api.Range{ + "logType": { + LowerBound: &api.Range_Gt{Gt: "1"}, + UpperBound: &api.Range_Lte{Lte: "6"}, + }, + } + _, err := CreateEventRanges(ranges, AllowedEventRanges) + require.ErrorContains(t, err, "unsupported filter [logType]") +} + +func TestCreateEventQueryRanges_InvalidRangeBounds(t *testing.T) { + ranges := map[string]*api.Range{ + "numberOfShares": { + LowerBound: &api.Range_Gte{Gte: "100"}, + UpperBound: &api.Range_Lte{Lte: "20"}, + }, + } + _, err := CreateEventRanges(ranges, AllowedEventRanges) + require.ErrorContains(t, err, "invalid [numberOfShares] range") +} diff --git a/v2/grpc/filters/identity_transactions.go b/v2/grpc/filters/identity_transactions.go new file mode 100644 index 0000000..0adaf7f --- /dev/null +++ b/v2/grpc/filters/identity_transactions.go @@ -0,0 +1,125 @@ +package filters + +import ( + "fmt" + + api "github.com/qubic/archive-query-service/v2/api/archive-query-service/v2" + "github.com/qubic/archive-query-service/v2/entities" + "github.com/qubic/archive-query-service/v2/grpc/utils" +) + +const ( + IdentityFilterSource = "source" + IdentityFilterDestination = "destination" + IdentityFilterAmount = "amount" + IdentityFilterInputType = "inputType" + IdentityFilterTickNumber = "tickNumber" + IdentityFilterTimestamp = "timestamp" +) + +const maxValuesPerIdentityFilter = 5 +const maxValueLengthPerIdentityFilter = 5*60 + 5 + 4 // 5 IDs + comma + optional spaces +const maxNumberOfPerIdentityFilters = 5 + +func CreateIdentityTransactionFilters(filterMap map[string]string) (map[string][]string, error) { + res := make(map[string][]string) + for k, v := range filterMap { + shouldSplit := k == IdentityFilterSource || k == IdentityFilterDestination + + maxValues := utils.If(shouldSplit, maxValuesPerIdentityFilter, 1) + maxLength := utils.If(shouldSplit, maxValueLengthPerIdentityFilter, 20) + + vs, err := CreateFilters(v, maxValues, maxLength) + if err != nil { + return nil, fmt.Errorf("handling filter [%s]: %w", k, err) + } + res[k] = vs + + } + + err := validateIdentityTransactionQueryFilters(res) + if err != nil { + return nil, fmt.Errorf("validating filters: %w", err) + } + + return res, nil +} + +func ValidateExcludeFilterKeys(excludeFilters map[string][]string) error { + for k := range excludeFilters { + if k != EventFilterSource && k != EventFilterDestination { + return fmt.Errorf("unsupported exclude filter [%s]", k) + } + } + return nil +} + +func validateIdentityTransactionQueryFilters(filterMap map[string][]string) error { + if len(filterMap) == 0 { + return nil + } + + if len(filterMap) > maxNumberOfPerIdentityFilters { + return fmt.Errorf("too many filters (%d)", len(filterMap)) + } + + for key, values := range filterMap { + switch key { + case IdentityFilterSource, IdentityFilterDestination: + err := ValidateIdentityFilterValues(values, maxValuesPerIdentityFilter) + if err != nil { + return fmt.Errorf("invalid [%s] filter: %w", key, err) + } + case IdentityFilterAmount: + err := ValidateUnsignedNumericFilterValues(values, 64, 1) + if err != nil { + return fmt.Errorf("invalid [%s] filter: %w", key, err) + } + case IdentityFilterTickNumber, IdentityFilterInputType: + err := ValidateUnsignedNumericFilterValues(values, 32, 1) + if err != nil { + return fmt.Errorf("invalid [%s] filter: %w", key, err) + } + default: + return fmt.Errorf("unsupported filter: [%s]", key) + } + } + return nil +} + +const allowedNumberOfPerIdentityQueryRanges = 4 + +func CreateIdentityTransactionQueryRanges(ranges map[string]*api.Range) (map[string][]entities.Range, error) { + convertedRanges := map[string][]entities.Range{} + if len(ranges) == 0 { + return nil, nil + } + if len(ranges) > allowedNumberOfPerIdentityQueryRanges { + return nil, fmt.Errorf("too many ranges (%d)", len(ranges)) + } + + for key, value := range ranges { + switch key { + case IdentityFilterAmount, IdentityFilterTimestamp: + r, err := CreateNumericRange(value, 64) + if err != nil { + return nil, fmt.Errorf("invalid %s range: %w", key, err) + } + if len(r) > 0 { + convertedRanges[key] = r + } + case IdentityFilterTickNumber, IdentityFilterInputType: + r, err := CreateNumericRange(value, 32) + if err != nil { + return nil, fmt.Errorf("invalid %s range: %w", key, err) + } + if len(r) > 0 { + convertedRanges[key] = r + } + default: + return nil, fmt.Errorf("unsupported range: [%s]", key) + } + } + + return convertedRanges, nil +} diff --git a/v2/grpc/filters/identity_transactions_test.go b/v2/grpc/filters/identity_transactions_test.go new file mode 100644 index 0000000..bfc772e --- /dev/null +++ b/v2/grpc/filters/identity_transactions_test.go @@ -0,0 +1,319 @@ +package filters + +import ( + "fmt" + "reflect" + "testing" + + api "github.com/qubic/archive-query-service/v2/api/archive-query-service/v2" + "github.com/stretchr/testify/require" +) + +func Test_createIdentityTransactionFilters(t *testing.T) { + tests := []struct { + name string + filters map[string]string + want map[string][]string + wantErr bool + }{ + { + name: "single value for source (trim)", + filters: map[string]string{ + "source": validId + " ", + }, + want: map[string][]string{ + "source": {validId}, + }, + wantErr: false, + }, + { + name: "multiple values for source (split)", + filters: map[string]string{ + "source": fmt.Sprintf("%s,%s ,%s", validId, validId2, validId3), + }, + want: map[string][]string{ + "source": {validId, validId2, validId3}, + }, + wantErr: false, + }, + { + name: "duplicate value", + filters: map[string]string{ + "source": fmt.Sprintf("%s,%s,%s", validId, validId2, validId), + }, + want: nil, + wantErr: true, + }, + { + name: "multiple values for other (error because of comma)", + filters: map[string]string{ + "other": " value1, value2 ", + }, + want: nil, + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := CreateIdentityTransactionFilters(tt.filters) + if (err != nil) != tt.wantErr { + t.Errorf("error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("got = %v, want %v", got, tt.want) + } + }) + } + +} + +// validations do no duplicate checking. creating filters does both crating and validating + +func Test_validateIdentityTransactionFilters_givenAllValid_thenNoError(t *testing.T) { + filters := map[string][]string{ + "source": {validId}, + "destination": {validId}, + "amount": {"100"}, + "inputType": {"42"}, + "tickNumber": {"43"}, + } + err := validateIdentityTransactionQueryFilters(filters) + require.NoError(t, err) +} + +func Test_validateIdentityTransactionFilters_givenMultipleValidValues_thenNoError(t *testing.T) { + filters := map[string][]string{ + "source": {validId, validId}, + "destination": {validId, validId}, + "amount": {"100"}, + "inputType": {"42"}, + } + err := validateIdentityTransactionQueryFilters(filters) + require.NoError(t, err) +} + +func Test_validateIdentityTransactionFilters_givenConflictingSourceFilter_thenError(t *testing.T) { + filters := map[string][]string{ + "source": {validId, validId}, + "source-exclude": {validId}, + } + err := validateIdentityTransactionQueryFilters(filters) + require.Error(t, err) +} + +func Test_validateIdentityTransactionFilters_givenConflictingDestinationFilter_thenError(t *testing.T) { + filters := map[string][]string{ + "destination": {validId}, + "destination-exclude": {validId, validId}, + } + err := validateIdentityTransactionQueryFilters(filters) + require.Error(t, err) +} + +func Test_validateIdentityTransactionFilters_givenUnsupported_thenError(t *testing.T) { + filters := map[string][]string{"timestamp": {"42"}} + err := validateIdentityTransactionQueryFilters(filters) + require.ErrorContains(t, err, "unsupported filter: [timestamp]") +} + +func Test_validateIdentityTransactionFilters_givenInvalidAmount(t *testing.T) { + filters := map[string][]string{"amount": {"-1"}} + err := validateIdentityTransactionQueryFilters(filters) + require.ErrorContains(t, err, "invalid numeric value") +} + +func Test_validateIdentityTransactionFilters_givenMultipleAmounts(t *testing.T) { + filters := map[string][]string{"amount": {"1", "4"}} + err := validateIdentityTransactionQueryFilters(filters) + require.ErrorContains(t, err, "invalid number of values") +} + +func Test_validateIdentityTransactionFilters_givenEmptyAmounts(t *testing.T) { + filters := map[string][]string{"amount": {}} + err := validateIdentityTransactionQueryFilters(filters) + require.ErrorContains(t, err, "invalid number of values") +} + +func Test_validateIdentityTransactionFilters_givenMultipleInputTypes(t *testing.T) { + filters := map[string][]string{"inputType": {"1", "2"}} + err := validateIdentityTransactionQueryFilters(filters) + require.ErrorContains(t, err, "invalid number of values") +} + +func Test_validateIdentityTransactionFilters_givenEmptyInputType(t *testing.T) { + filters := map[string][]string{"inputType": {}} + err := validateIdentityTransactionQueryFilters(filters) + require.ErrorContains(t, err, "invalid number of values") +} + +func Test_validateIdentityTransactionFilters_givenMultipleTickNumbers(t *testing.T) { + filters := map[string][]string{"tickNumber": {"1", "2"}} + err := validateIdentityTransactionQueryFilters(filters) + require.ErrorContains(t, err, "invalid number of values") +} + +func Test_validateIdentityTransactionFilters_givenEmptyTickNumber(t *testing.T) { + filters := map[string][]string{"tickNumber": {}} + err := validateIdentityTransactionQueryFilters(filters) + require.ErrorContains(t, err, "invalid number of values") +} + +func Test_validateIdentityTransactionFilters_givenInvalidSource(t *testing.T) { + filters := map[string][]string{"source": {invalidId}} + err := validateIdentityTransactionQueryFilters(filters) + require.ErrorContains(t, err, "invalid [source] filter") +} + +func Test_validateIdentityTransactionFilters_givenInvalidDestination(t *testing.T) { + filters := map[string][]string{"destination": {invalidId}} + err := validateIdentityTransactionQueryFilters(filters) + require.ErrorContains(t, err, "invalid [destination] filter") +} + +func Test_validateIdentityTransactionFilters_givenMultipleIdValuesIncludingInvalid_thenError(t *testing.T) { + filters := map[string][]string{"source": {validId, invalidId}} + err := validateIdentityTransactionQueryFilters(filters) + require.ErrorContains(t, err, "invalid [source] filter") +} + +func Test_validateIdentityTransactionFilters_givenInvalidInputType(t *testing.T) { + filters := map[string][]string{"inputType": {"foo"}} + err := validateIdentityTransactionQueryFilters(filters) + require.ErrorContains(t, err, "invalid [inputType] filter") +} + +func Test_validateIdentityTransactionFilters_givenEmpty(t *testing.T) { + filters := map[string][]string{} + err := validateIdentityTransactionQueryFilters(filters) + require.NoError(t, err) + err = validateIdentityTransactionQueryFilters(nil) + require.NoError(t, err) +} + +// ranges + +func Test_createIdentityTransactionRanges_givenAllValid(t *testing.T) { + _, err := CreateIdentityTransactionQueryRanges(map[string]*api.Range{ + TickFilterAmount: { + LowerBound: &api.Range_Gte{ + Gte: "1000", + }, + UpperBound: &api.Range_Lte{ + Lte: "10000", + }, + }, + TickFilterTickNumber: { + LowerBound: &api.Range_Gte{ + Gte: "1", + }, + UpperBound: &api.Range_Lt{ + Lt: "999999", + }, + }, + TickFilterInputType: { + LowerBound: &api.Range_Gt{ + Gt: "0", + }, + }, + TickFilterTimestamp: { + LowerBound: &api.Range_Gt{ + Gt: "1000000", + }, + }, + }) + require.NoError(t, err) +} + +func Test_createIdentityTransactionRanges(t *testing.T) { + _, err := CreateIdentityTransactionQueryRanges(nil) + require.NoError(t, err) + _, err = CreateIdentityTransactionQueryRanges(map[string]*api.Range{}) + require.NoError(t, err) +} + +func Test_createIdentityTransactionRanges_givenUnsupported_thenError(t *testing.T) { + _, err := CreateIdentityTransactionQueryRanges(map[string]*api.Range{ + "foo": {}, + }) + require.ErrorContains(t, err, "unsupported range: [foo]") +} + +func Test_createIdentityTransactionRanges_EmptyRange_thenError(t *testing.T) { + _, err := CreateIdentityTransactionQueryRanges(map[string]*api.Range{ + TickFilterAmount: {}, + }) + require.ErrorContains(t, err, "invalid range: no bounds") +} + +func Test_createIdentityTransactionRanges_givenInvalidRange_thenError(t *testing.T) { + _, err := CreateIdentityTransactionQueryRanges(map[string]*api.Range{ + TickFilterAmount: { + LowerBound: &api.Range_Gte{ + Gte: "42", + }, + UpperBound: &api.Range_Lte{ + Lte: "42", + }, + }, + }) + require.ErrorContains(t, err, "invalid range: [42:42]") +} + +func Test_createIdentityTransactionRanges_givenInvalidRangeValue_thenError(t *testing.T) { + _, err := CreateIdentityTransactionQueryRanges(map[string]*api.Range{ + TickFilterAmount: { + LowerBound: &api.Range_Gte{ + Gte: "foo", + }, + }, + }) + require.ErrorContains(t, err, "invalid amount range: invalid [gte] value") + _, err = CreateIdentityTransactionQueryRanges(map[string]*api.Range{ + TickFilterTickNumber: { + LowerBound: &api.Range_Gt{ + Gt: "foo", + }, + }, + }) + require.ErrorContains(t, err, "invalid tickNumber range: invalid [gt] value") + _, err = CreateIdentityTransactionQueryRanges(map[string]*api.Range{ + TickFilterTimestamp: { + UpperBound: &api.Range_Lte{ + Lte: "foo", + }, + }, + }) + require.ErrorContains(t, err, "invalid timestamp range: invalid [lte] value") + _, err = CreateIdentityTransactionQueryRanges(map[string]*api.Range{ + TickFilterInputType: { + UpperBound: &api.Range_Lt{ + Lt: "foo", + }, + }, + }) + require.ErrorContains(t, err, "invalid inputType range: invalid [lt] value") +} + +func Test_createIdentityTransactionRanges_tickNumberWithUpperAndLowerRange(t *testing.T) { + result, err := CreateIdentityTransactionQueryRanges(map[string]*api.Range{ + TickFilterTickNumber: { + LowerBound: &api.Range_Gte{ + Gte: "100", + }, + UpperBound: &api.Range_Lte{ + Lte: "200", + }, + }, + }) + require.NoError(t, err) + require.NotNil(t, result) + require.Contains(t, result, TickFilterTickNumber) + + tickRange := result[TickFilterTickNumber] + require.Len(t, tickRange, 2) + require.Equal(t, "gte", tickRange[0].Operation) + require.Equal(t, "100", tickRange[0].Value) + require.Equal(t, "lte", tickRange[1].Operation) + require.Equal(t, "200", tickRange[1].Value) +} diff --git a/v2/grpc/filters/tick_transactions.go b/v2/grpc/filters/tick_transactions.go new file mode 100644 index 0000000..b7108a7 --- /dev/null +++ b/v2/grpc/filters/tick_transactions.go @@ -0,0 +1,114 @@ +package filters + +import ( + "errors" + "fmt" + + api "github.com/qubic/archive-query-service/v2/api/archive-query-service/v2" + "github.com/qubic/archive-query-service/v2/entities" +) + +const ( + TickFilterSource = "source" + TickFilterDestination = "destination" + TickFilterAmount = "amount" + TickFilterInputType = "inputType" + TickFilterTickNumber = "tickNumber" + TickFilterTimestamp = "timestamp" +) + +var allowedTickTermFilters = [4]string{TickFilterSource, TickFilterDestination, TickFilterAmount, TickFilterInputType} + +func CreateTickTransactionsFilters(filterMap map[string]string) (map[string][]string, error) { + res := make(map[string][]string) + for k, v := range filterMap { + f, err := CreateFilters(v, 1, 60) // 60 character identity + if err != nil { + return nil, fmt.Errorf("creating tick transactions filter [%s]: %w", k, err) + } + res[k] = f + } + + err := validateTickTransactionQueryFilters(res) + if err != nil { + return nil, fmt.Errorf("validating filter: %w", err) + } + + return res, nil +} + +func validateTickTransactionQueryFilters(filterMap map[string][]string) error { + if len(filterMap) == 0 { + return nil + } + + if len(filterMap) > len(allowedTickTermFilters) { + return errors.New("too many filters") + } + + for key, values := range filterMap { + switch key { + case TickFilterSource, TickFilterDestination: + err := ValidateIdentityFilterValues(values, 1) + if err != nil { + return fmt.Errorf("invalid [%s] filter: %w", key, err) + } + case TickFilterAmount: + err := ValidateUnsignedNumericFilterValues(values, 64, 1) + if err != nil { + return fmt.Errorf("invalid [%s] filter: %w", key, err) + } + case TickFilterInputType: + err := ValidateUnsignedNumericFilterValues(values, 32, 1) + if err != nil { + return fmt.Errorf("invalid [%s] filter: %w", key, err) + } + default: + return fmt.Errorf("unsupported filter: [%s]", key) + } + } + return nil +} + +const allowedNumberOfTickQueryRanges = 2 + +func ValidateTickTransactionQueryRanges(filterMap map[string][]string, ranges map[string]*api.Range) (map[string][]entities.Range, error) { + convertedRanges := map[string][]entities.Range{} + if len(ranges) == 0 { + return nil, nil + } + if len(ranges) > allowedNumberOfTickQueryRanges { + return nil, fmt.Errorf("too many ranges (%d)", len(ranges)) + } + + for k := range ranges { + if _, found := filterMap[k]; found { + return nil, fmt.Errorf("duplicate [%s] filter", k) + } + } + + for key, value := range ranges { + switch key { + case TickFilterAmount: + r, err := CreateNumericRange(value, 64) + if err != nil { + return nil, fmt.Errorf("invalid [%s] range: %w", key, err) + } + if len(r) > 0 { + convertedRanges[key] = r + } + case TickFilterInputType: + r, err := CreateNumericRange(value, 32) + if err != nil { + return nil, fmt.Errorf("invalid [%s] range: %w", key, err) + } + if len(r) > 0 { + convertedRanges[key] = r + } + default: + return nil, fmt.Errorf("unsupported range: [%s]", key) + } + } + + return convertedRanges, nil +} diff --git a/v2/grpc/validations_tick_test.go b/v2/grpc/filters/tick_transactions_test.go similarity index 53% rename from v2/grpc/validations_tick_test.go rename to v2/grpc/filters/tick_transactions_test.go index 3ef0c44..e026c04 100644 --- a/v2/grpc/validations_tick_test.go +++ b/v2/grpc/filters/tick_transactions_test.go @@ -1,4 +1,4 @@ -package grpc +package filters import ( "reflect" @@ -32,24 +32,20 @@ func Test_createTickFilters(t *testing.T) { wantErr: false, }, { - name: "source filter with spaces trimmed", + name: "too many characters", filters: map[string]string{ "source": " " + validId + " ", }, - want: map[string][]string{ - "source": {validId}, - }, - wantErr: false, + want: nil, + wantErr: true, }, { - name: "comma-separated values NOT split", + name: "invalid identity", filters: map[string]string{ - "source": "value1,value2", + "source": " value1 and value2 ", }, - want: map[string][]string{ - "source": {"value1,value2"}, - }, - wantErr: false, + want: nil, + wantErr: true, }, { name: "multiple different filters", @@ -86,19 +82,19 @@ func Test_createTickFilters(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - got, err := createTickFilters(tt.filters) + got, err := CreateTickTransactionsFilters(tt.filters) if (err != nil) != tt.wantErr { - t.Errorf("createTickFilters() error = %v, wantErr %v", err, tt.wantErr) + t.Errorf("error = %v, wantErr %v", err, tt.wantErr) return } if !reflect.DeepEqual(got, tt.want) { - t.Errorf("createTickFilters() got = %v, want %v", got, tt.want) + t.Errorf("got = %v, want %v", got, tt.want) } }) } } -func TestValidations_validateTickTransactionQueryFilters_givenAllValid_thenNoError(t *testing.T) { +func Test_validateTickTransactionQueryFilters_givenAllValid_thenNoError(t *testing.T) { filters := map[string][]string{ "source": {validId}, "destination": {validId}, @@ -109,74 +105,74 @@ func TestValidations_validateTickTransactionQueryFilters_givenAllValid_thenNoErr require.NoError(t, err) } -func TestValidations_validateTickTransactionQueryFilters_givenEmpty_thenNoError(t *testing.T) { +func Test_validateTickTransactionQueryFilters_givenEmpty_thenNoError(t *testing.T) { err := validateTickTransactionQueryFilters(map[string][]string{}) require.NoError(t, err) err = validateTickTransactionQueryFilters(nil) require.NoError(t, err) } -func TestValidations_validateTickTransactionQueryFilters_givenTickNumber_thenError(t *testing.T) { +func Test_validateTickTransactionQueryFilters_givenTickNumber_thenError(t *testing.T) { filters := map[string][]string{"tickNumber": {"42"}} err := validateTickTransactionQueryFilters(filters) require.ErrorContains(t, err, "unsupported filter: [tickNumber]") } -func TestValidations_validateTickTransactionQueryFilters_givenSourceExclude_thenError(t *testing.T) { +func Test_validateTickTransactionQueryFilters_givenSourceExclude_thenError(t *testing.T) { filters := map[string][]string{"source-exclude": {validId}} err := validateTickTransactionQueryFilters(filters) require.ErrorContains(t, err, "unsupported filter: [source-exclude]") } -func TestValidations_validateTickTransactionQueryFilters_givenDestinationExclude_thenError(t *testing.T) { +func Test_validateTickTransactionQueryFilters_givenDestinationExclude_thenError(t *testing.T) { filters := map[string][]string{"destination-exclude": {validId}} err := validateTickTransactionQueryFilters(filters) require.ErrorContains(t, err, "unsupported filter: [destination-exclude]") } -func TestValidations_validateTickTransactionQueryFilters_givenTimestamp_thenError(t *testing.T) { +func Test_validateTickTransactionQueryFilters_givenTimestamp_thenError(t *testing.T) { filters := map[string][]string{"timestamp": {"1234567890"}} err := validateTickTransactionQueryFilters(filters) require.ErrorContains(t, err, "unsupported filter: [timestamp]") } -func TestValidations_validateTickTransactionQueryFilters_givenMultipleSourceValues_thenError(t *testing.T) { +func Test_validateTickTransactionQueryFilters_givenMultipleSourceValues_thenError(t *testing.T) { filters := map[string][]string{"source": {validId, validId}} err := validateTickTransactionQueryFilters(filters) - require.ErrorContains(t, err, "must have exactly one value") + require.ErrorContains(t, err, "invalid number of values") } -func TestValidations_validateTickTransactionQueryFilters_givenMultipleDestinationValues_thenError(t *testing.T) { +func Test_validateTickTransactionQueryFilters_givenMultipleDestinationValues_thenError(t *testing.T) { filters := map[string][]string{"destination": {validId, validId}} err := validateTickTransactionQueryFilters(filters) - require.ErrorContains(t, err, "must have exactly one value") + require.ErrorContains(t, err, "invalid number of values") } -func TestValidations_validateTickTransactionQueryFilters_givenInvalidSource_thenError(t *testing.T) { +func Test_validateTickTransactionQueryFilters_givenInvalidSource_thenError(t *testing.T) { filters := map[string][]string{"source": {invalidId}} err := validateTickTransactionQueryFilters(filters) - require.ErrorContains(t, err, "invalid source filter") + require.ErrorContains(t, err, "invalid [source] filter") } -func TestValidations_validateTickTransactionQueryFilters_givenInvalidDestination_thenError(t *testing.T) { +func Test_validateTickTransactionQueryFilters_givenInvalidDestination_thenError(t *testing.T) { filters := map[string][]string{"destination": {invalidId}} err := validateTickTransactionQueryFilters(filters) - require.ErrorContains(t, err, "invalid destination filter") + require.ErrorContains(t, err, "invalid [destination] filter") } -func TestValidations_validateTickTransactionQueryFilters_givenInvalidAmount_thenError(t *testing.T) { +func Test_validateTickTransactionQueryFilters_givenInvalidAmount_thenError(t *testing.T) { filters := map[string][]string{"amount": {"-1"}} err := validateTickTransactionQueryFilters(filters) - require.ErrorContains(t, err, "invalid amount filter") + require.ErrorContains(t, err, "invalid [amount] filter") } -func TestValidations_validateTickTransactionQueryFilters_givenInvalidInputType_thenError(t *testing.T) { +func Test_validateTickTransactionQueryFilters_givenInvalidInputType_thenError(t *testing.T) { filters := map[string][]string{"inputType": {"foo"}} err := validateTickTransactionQueryFilters(filters) - require.ErrorContains(t, err, "invalid inputType filter") + require.ErrorContains(t, err, "invalid [inputType] filter") } -func TestValidations_validateTickTransactionQueryFilters_givenTooManyFilters_thenError(t *testing.T) { +func Test_validateTickTransactionQueryFilters_givenTooManyFilters_thenError(t *testing.T) { filters := map[string][]string{ "source": {validId}, "destination": {validId}, @@ -188,9 +184,9 @@ func TestValidations_validateTickTransactionQueryFilters_givenTooManyFilters_the require.ErrorContains(t, err, "too many filters") } -func TestValidations_validateTickTransactionQueryRanges_givenAllValid(t *testing.T) { - ranges, err := validateTickTransactionQueryRanges(map[string][]string{}, map[string]*api.Range{ - FilterAmount: { +func Test_validateTickTransactionQueryRanges_givenAllValid(t *testing.T) { + ranges, err := ValidateTickTransactionQueryRanges(map[string][]string{}, map[string]*api.Range{ + TickFilterAmount: { LowerBound: &api.Range_Gte{ Gte: "1000", }, @@ -198,7 +194,7 @@ func TestValidations_validateTickTransactionQueryRanges_givenAllValid(t *testing Lte: "10000", }, }, - FilterInputType: { + TickFilterInputType: { LowerBound: &api.Range_Gt{ Gt: "0", }, @@ -208,18 +204,18 @@ func TestValidations_validateTickTransactionQueryRanges_givenAllValid(t *testing require.Len(t, ranges, 2) } -func TestValidations_validateTickTransactionQueryRanges_givenEmpty(t *testing.T) { - _, err := validateTickTransactionQueryRanges(map[string][]string{}, nil) +func Test_validateTickTransactionQueryRanges_givenEmpty(t *testing.T) { + _, err := ValidateTickTransactionQueryRanges(map[string][]string{}, nil) require.NoError(t, err) - _, err = validateTickTransactionQueryRanges(map[string][]string{}, map[string]*api.Range{}) + _, err = ValidateTickTransactionQueryRanges(map[string][]string{}, map[string]*api.Range{}) require.NoError(t, err) - _, err = validateTickTransactionQueryRanges(nil, nil) + _, err = ValidateTickTransactionQueryRanges(nil, nil) require.NoError(t, err) } -func TestValidations_validateTickTransactionQueryRanges_givenTickNumber_thenError(t *testing.T) { - _, err := validateTickTransactionQueryRanges(map[string][]string{}, map[string]*api.Range{ - FilterTickNumber: { +func Test_validateTickTransactionQueryRanges_givenTickNumber_thenError(t *testing.T) { + _, err := ValidateTickTransactionQueryRanges(map[string][]string{}, map[string]*api.Range{ + TickFilterTickNumber: { LowerBound: &api.Range_Gte{ Gte: "1", }, @@ -228,9 +224,9 @@ func TestValidations_validateTickTransactionQueryRanges_givenTickNumber_thenErro require.ErrorContains(t, err, "unsupported range: [tickNumber]") } -func TestValidations_validateTickTransactionQueryRanges_givenTimestamp_thenError(t *testing.T) { - _, err := validateTickTransactionQueryRanges(map[string][]string{}, map[string]*api.Range{ - FilterTimestamp: { +func Test_validateTickTransactionQueryRanges_givenTimestamp_thenError(t *testing.T) { + _, err := ValidateTickTransactionQueryRanges(map[string][]string{}, map[string]*api.Range{ + TickFilterTimestamp: { LowerBound: &api.Range_Gt{ Gt: "1000000", }, @@ -239,41 +235,41 @@ func TestValidations_validateTickTransactionQueryRanges_givenTimestamp_thenError require.ErrorContains(t, err, "unsupported range: [timestamp]") } -func TestValidations_validateTickTransactionQueryRanges_givenDuplicateFilter_thenError(t *testing.T) { - filters := map[string][]string{FilterAmount: {"100"}} - ranges := map[string]*api.Range{FilterAmount: nil} - _, err := validateTickTransactionQueryRanges(filters, ranges) - require.ErrorContains(t, err, "already declared as filter") +func Test_validateTickTransactionQueryRanges_givenDuplicateFilter_thenError(t *testing.T) { + filters := map[string][]string{TickFilterAmount: {"100"}} + ranges := map[string]*api.Range{TickFilterAmount: nil} + _, err := ValidateTickTransactionQueryRanges(filters, ranges) + require.ErrorContains(t, err, "duplicate [amount] filter") } -func TestValidations_validateTickTransactionQueryRanges_givenTooManyRanges_thenError(t *testing.T) { +func Test_validateTickTransactionQueryRanges_givenTooManyRanges_thenError(t *testing.T) { ranges := map[string]*api.Range{ "amount": {}, "inputType": {}, "extra": {}, } - _, err := validateTickTransactionQueryRanges(map[string][]string{}, ranges) + _, err := ValidateTickTransactionQueryRanges(map[string][]string{}, ranges) require.ErrorContains(t, err, "too many ranges") } -func TestValidations_validateTickTransactionQueryRanges_givenInvalidAmountRange_thenError(t *testing.T) { - _, err := validateTickTransactionQueryRanges(map[string][]string{}, map[string]*api.Range{ - FilterAmount: { +func Test_validateTickTransactionQueryRanges_givenInvalidAmountRange_thenError(t *testing.T) { + _, err := ValidateTickTransactionQueryRanges(map[string][]string{}, map[string]*api.Range{ + TickFilterAmount: { LowerBound: &api.Range_Gte{ Gte: "foo", }, }, }) - require.ErrorContains(t, err, "invalid amount range") + require.ErrorContains(t, err, "invalid [amount] range") } -func TestValidations_validateTickTransactionQueryRanges_givenInvalidInputTypeRange_thenError(t *testing.T) { - _, err := validateTickTransactionQueryRanges(map[string][]string{}, map[string]*api.Range{ - FilterInputType: { +func Test_validateTickTransactionQueryRanges_givenInvalidInputTypeRange_thenError(t *testing.T) { + _, err := ValidateTickTransactionQueryRanges(map[string][]string{}, map[string]*api.Range{ + TickFilterInputType: { LowerBound: &api.Range_Gt{ Gt: "foo", }, }, }) - require.ErrorContains(t, err, "invalid inputType range") + require.ErrorContains(t, err, "invalid [inputType] range") } diff --git a/v2/grpc/filters/validate.go b/v2/grpc/filters/validate.go new file mode 100644 index 0000000..ff6d453 --- /dev/null +++ b/v2/grpc/filters/validate.go @@ -0,0 +1,94 @@ +package filters + +import ( + "fmt" + + "github.com/qubic/archive-query-service/v2/entities" + "github.com/qubic/archive-query-service/v2/grpc/utils" +) + +func ValidateUnsignedNumericFilterValues(values []string, bitSize, maxNumberOfValues int) error { + err := checkQuantity(values, maxNumberOfValues) + if err != nil { + return err + } + for _, val := range values { + _, err := stringToNumericValue(val, bitSize) + if err != nil { + return fmt.Errorf("invalid numeric value: %w", err) + } + } + return nil +} + +func ValidateIdentityFilterValues(values []string, maxValues int) error { + return validateDigest(values, maxValues, false) +} + +func ValidateTransactionHashFilterValues(values []string, maxValues int) error { + return validateDigest(values, maxValues, true) +} + +func validateDigest(values []string, maxValues int, lowercase bool) error { + err := checkQuantity(values, maxValues) + if err != nil { + return err + } + for _, val := range values { + err := utils.ValidateDigest(val, lowercase) + if err != nil { + return fmt.Errorf("invalid transaction hash: %w", err) + } + } + return nil +} + +func VerifyNoConflictingFilters(queryFilters entities.Filters) error { + keys := make(map[string]bool, 10) + err := checkForConflictingKeys(keys, queryFilters.Include, true) + if err != nil { + return err + } + err = checkForConflictingKeys(keys, queryFilters.Ranges, true) + if err != nil { + return err + } + + // we do not check the exclude filters against the should filters + // allow excluding values that are returned by applying the should filters + err = checkForConflictingKeys(keys, queryFilters.Exclude, false) // do not modify + if err != nil { + return err + } + + for _, should := range queryFilters.Should { + err = checkForConflictingKeys(keys, should.Ranges, true) + if err != nil { + return err + } + err = checkForConflictingKeys(keys, should.Terms, true) + if err != nil { + return err + } + } + return nil +} + +func checkForConflictingKeys[F any](known map[string]bool, checked map[string]F, add bool) error { + for k := range checked { + if _, found := known[k]; found { + return fmt.Errorf("duplicate [%s] filter", k) + } + if add { + known[k] = true + } + } + return nil +} + +func checkQuantity(values []string, maxValues int) error { + if len(values) == 0 || len(values) > maxValues { + return fmt.Errorf("invalid number of values: [%d]", maxValues) + } + return nil +} diff --git a/v2/grpc/filters/validate_test.go b/v2/grpc/filters/validate_test.go new file mode 100644 index 0000000..61bcda9 --- /dev/null +++ b/v2/grpc/filters/validate_test.go @@ -0,0 +1,308 @@ +package filters + +import ( + "testing" + + "github.com/qubic/archive-query-service/v2/entities" + "github.com/stretchr/testify/require" +) + +func TestVerifyNoConflictingFilters(t *testing.T) { + tests := []struct { + name string + filters entities.Filters + wantErr bool + errMessage string + }{ + { + name: "no conflicts - empty filters", + filters: entities.Filters{ + Include: map[string][]string{}, + Exclude: map[string][]string{}, + Ranges: map[string][]entities.Range{}, + Should: []entities.ShouldFilter{}, + }, + wantErr: false, + }, + { + name: "no conflicts - only include", + filters: entities.Filters{ + Include: map[string][]string{ + "field1": {"value1"}, + "field2": {"value2"}, + }, + Exclude: map[string][]string{}, + Ranges: map[string][]entities.Range{}, + Should: []entities.ShouldFilter{}, + }, + wantErr: false, + }, + { + name: "no conflicts - only exclude", + filters: entities.Filters{ + Include: map[string][]string{}, + Exclude: map[string][]string{ + "field1": {"value1"}, + "field2": {"value2"}, + }, + Ranges: map[string][]entities.Range{}, + Should: []entities.ShouldFilter{}, + }, + wantErr: false, + }, + { + name: "no conflicts - only ranges", + filters: entities.Filters{ + Include: map[string][]string{}, + Exclude: map[string][]string{}, + Ranges: map[string][]entities.Range{ + "field1": {{Operation: "gte", Value: "10"}}, + }, + Should: []entities.ShouldFilter{}, + }, + wantErr: false, + }, + { + name: "no conflicts - different fields in include and exclude", + filters: entities.Filters{ + Include: map[string][]string{ + "field1": {"value1"}, + }, + Exclude: map[string][]string{ + "field2": {"value2"}, + }, + Ranges: map[string][]entities.Range{}, + Should: []entities.ShouldFilter{}, + }, + wantErr: false, + }, + { + name: "conflict - same field in include and exclude", + filters: entities.Filters{ + Include: map[string][]string{ + "field1": {"value1"}, + }, + Exclude: map[string][]string{ + "field1": {"value2"}, + }, + Ranges: map[string][]entities.Range{}, + Should: []entities.ShouldFilter{}, + }, + wantErr: true, + errMessage: "duplicate [field1] filter", + }, + { + name: "conflict - same field in include and ranges", + filters: entities.Filters{ + Include: map[string][]string{ + "field1": {"value1"}, + }, + Exclude: map[string][]string{}, + Ranges: map[string][]entities.Range{ + "field1": {{Operation: "gte", Value: "10"}}, + }, + Should: []entities.ShouldFilter{}, + }, + wantErr: true, + errMessage: "duplicate [field1] filter", + }, + { + name: "conflict - same field in exclude and ranges", + filters: entities.Filters{ + Include: map[string][]string{}, + Exclude: map[string][]string{ + "field1": {"value1"}, + }, + Ranges: map[string][]entities.Range{ + "field1": {{Operation: "gte", Value: "10"}}, + }, + Should: []entities.ShouldFilter{}, + }, + wantErr: true, + errMessage: "duplicate [field1] filter", + }, + { + name: "conflict - same field in include and should terms", + filters: entities.Filters{ + Include: map[string][]string{ + "field1": {"value1"}, + }, + Exclude: map[string][]string{}, + Ranges: map[string][]entities.Range{}, + Should: []entities.ShouldFilter{ + { + Terms: map[string][]string{ + "field1": {"value2"}, + }, + Ranges: map[string][]entities.Range{}, + }, + }, + }, + wantErr: true, + errMessage: "duplicate [field1] filter", + }, + { + name: "allowed - same field in exclude and should ranges", + filters: entities.Filters{ + Include: map[string][]string{}, + Exclude: map[string][]string{ + "field1": {"value1"}, + }, + Ranges: map[string][]entities.Range{}, + Should: []entities.ShouldFilter{ + { + Terms: map[string][]string{}, + Ranges: map[string][]entities.Range{ + "field1": {{Operation: "gte", Value: "10"}}, + }, + }, + }, + }, + wantErr: false, + }, + { + name: "conflict - same field in ranges and should terms", + filters: entities.Filters{ + Include: map[string][]string{}, + Exclude: map[string][]string{}, + Ranges: map[string][]entities.Range{ + "field1": {{Operation: "gte", Value: "10"}}, + }, + Should: []entities.ShouldFilter{ + { + Terms: map[string][]string{ + "field1": {"value1"}, + }, + Ranges: map[string][]entities.Range{}, + }, + }, + }, + wantErr: true, + errMessage: "duplicate [field1] filter", + }, + { + name: "no conflicts - multiple should filters with different fields", + filters: entities.Filters{ + Include: map[string][]string{ + "field1": {"value1"}, + }, + Exclude: map[string][]string{}, + Ranges: map[string][]entities.Range{}, + Should: []entities.ShouldFilter{ + { + Terms: map[string][]string{ + "field2": {"value2"}, + }, + Ranges: map[string][]entities.Range{}, + }, + { + Terms: map[string][]string{ + "field3": {"value3"}, + }, + Ranges: map[string][]entities.Range{}, + }, + }, + }, + wantErr: false, + }, + { + name: "conflict - multiple should filters with conflicting field", + filters: entities.Filters{ + Include: map[string][]string{}, + Exclude: map[string][]string{}, + Ranges: map[string][]entities.Range{}, + Should: []entities.ShouldFilter{ + { + Terms: map[string][]string{ + "field1": {"value1"}, + }, + Ranges: map[string][]entities.Range{}, + }, + { + Terms: map[string][]string{ + "field1": {"value2"}, + }, + Ranges: map[string][]entities.Range{}, + }, + }, + }, + wantErr: true, + errMessage: "duplicate [field1] filter", + }, + { + name: "conflict - should terms and should ranges same field", + filters: entities.Filters{ + Include: map[string][]string{}, + Exclude: map[string][]string{}, + Ranges: map[string][]entities.Range{}, + Should: []entities.ShouldFilter{ + { + Terms: map[string][]string{ + "field1": {"value1"}, + }, + Ranges: map[string][]entities.Range{ + "field1": {{Operation: "gte", Value: "10"}}, + }, + }, + }, + }, + wantErr: true, + errMessage: "duplicate [field1] filter", + }, + { + name: "complex - no conflicts with multiple filters", + filters: entities.Filters{ + Include: map[string][]string{ + "field1": {"value1"}, + "field2": {"value2"}, + }, + Exclude: map[string][]string{ + "field3": {"value3"}, + }, + Ranges: map[string][]entities.Range{ + "field4": {{Operation: "gte", Value: "10"}}, + }, + Should: []entities.ShouldFilter{ + { + Terms: map[string][]string{ + "field5": {"value5"}, + }, + Ranges: map[string][]entities.Range{ + "field6": {{Operation: "lte", Value: "100"}}, + }, + }, + }, + }, + wantErr: false, + }, + { + name: "complex - conflict across all filter types", + filters: entities.Filters{ + Include: map[string][]string{ + "field1": {"value1"}, + }, + Exclude: map[string][]string{ + "field2": {"value2"}, + }, + Ranges: map[string][]entities.Range{ + "field1": {{Operation: "gte", Value: "10"}}, + }, + Should: []entities.ShouldFilter{}, + }, + wantErr: true, + errMessage: "duplicate [field1] filter", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := VerifyNoConflictingFilters(tt.filters) + if tt.wantErr { + require.Error(t, err) + require.Contains(t, err.Error(), tt.errMessage) + } else { + require.NoError(t, err) + } + }) + } +} diff --git a/v2/grpc/middleware.go b/v2/grpc/middleware.go index 9138fb8..4cbef79 100644 --- a/v2/grpc/middleware.go +++ b/v2/grpc/middleware.go @@ -11,6 +11,7 @@ import ( "time" "github.com/qubic/archive-query-service/v2/api/archive-query-service/v2" + "github.com/qubic/archive-query-service/v2/grpc/utils" "github.com/redis/go-redis/v9" "golang.org/x/sync/singleflight" "google.golang.org/grpc" @@ -77,7 +78,7 @@ func (i *IdentitiesValidatorInterceptor) GetInterceptor(ctx context.Context, req } func (i *IdentitiesValidatorInterceptor) checkFormat(idStr string, isLowercase bool) error { - return validateDigest(idStr, isLowercase) + return utils.ValidateDigest(idStr, isLowercase) } func (twb *TickWithinBoundsInterceptor) checkTickWithinArchiverIntervals(ctx context.Context, tickNumber uint32) error { @@ -197,7 +198,7 @@ func (rci *RedisCacheInterceptor) GetInterceptor(ctx context.Context, req any, i if !ok { return handler(ctx, req) } - log.Printf("RedisCacheInterceptor: Request %s is cachable, proceed to check TTL and key\n", info.FullMethod) + log.Printf("RedisCacheInterceptor: Request %s is cacheable, proceed to check TTL and key\n", info.FullMethod) // if TTL from the map is zero or key does not exist, then caching is disabled ttl, exists := rci.ttlMap[info.FullMethod] @@ -242,7 +243,7 @@ func (rci *RedisCacheInterceptor) GetInterceptor(ctx context.Context, req any, i info.FullMethod, int(ttl.Seconds()), ) - // then proceed to cache the response and even if caching fails for multiple reasons like redis cluster unavailable + // then proceed to cache the response even if caching fails for multiple reasons like redis cluster unavailable // we still return the response sfErr = cacheResponse(ctx, rci.redisClient, key, response, ttl) if sfErr != nil { diff --git a/v2/grpc/mock/services.mock.go b/v2/grpc/mock/services.mock.go index effc67d..20ded9d 100644 --- a/v2/grpc/mock/services.mock.go +++ b/v2/grpc/mock/services.mock.go @@ -59,22 +59,22 @@ func (mr *MockTransactionsServiceMockRecorder) GetTransactionByHash(ctx, hash an } // GetTransactionsForIdentity mocks base method. -func (m *MockTransactionsService) GetTransactionsForIdentity(ctx context.Context, identity string, filters map[string][]string, ranges map[string][]*entities.Range, from, size uint32) (*entities.TransactionsResult, error) { +func (m *MockTransactionsService) GetTransactionsForIdentity(ctx context.Context, identity string, queryFilters entities.Filters, from, size uint32) (*entities.TransactionsResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetTransactionsForIdentity", ctx, identity, filters, ranges, from, size) + ret := m.ctrl.Call(m, "GetTransactionsForIdentity", ctx, identity, queryFilters, from, size) ret0, _ := ret[0].(*entities.TransactionsResult) ret1, _ := ret[1].(error) return ret0, ret1 } // GetTransactionsForIdentity indicates an expected call of GetTransactionsForIdentity. -func (mr *MockTransactionsServiceMockRecorder) GetTransactionsForIdentity(ctx, identity, filters, ranges, from, size any) *gomock.Call { +func (mr *MockTransactionsServiceMockRecorder) GetTransactionsForIdentity(ctx, identity, queryFilters, from, size any) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTransactionsForIdentity", reflect.TypeOf((*MockTransactionsService)(nil).GetTransactionsForIdentity), ctx, identity, filters, ranges, from, size) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTransactionsForIdentity", reflect.TypeOf((*MockTransactionsService)(nil).GetTransactionsForIdentity), ctx, identity, queryFilters, from, size) } // GetTransactionsForTickNumber mocks base method. -func (m *MockTransactionsService) GetTransactionsForTickNumber(ctx context.Context, tickNumber uint32, filters map[string][]string, ranges map[string][]*entities.Range) ([]*api.Transaction, error) { +func (m *MockTransactionsService) GetTransactionsForTickNumber(ctx context.Context, tickNumber uint32, filters map[string][]string, ranges map[string][]entities.Range) ([]*api.Transaction, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetTransactionsForTickNumber", ctx, tickNumber, filters, ranges) ret0, _ := ret[0].([]*api.Transaction) @@ -245,16 +245,16 @@ func (m *MockEventsService) EXPECT() *MockEventsServiceMockRecorder { } // GetEvents mocks base method. -func (m *MockEventsService) GetEvents(ctx context.Context, filters map[string][]string, from, size uint32) (*entities.EventsResult, error) { +func (m *MockEventsService) GetEvents(ctx context.Context, queryFilters entities.Filters, from, size uint32) (*entities.EventsResult, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetEvents", ctx, filters, from, size) + ret := m.ctrl.Call(m, "GetEvents", ctx, queryFilters, from, size) ret0, _ := ret[0].(*entities.EventsResult) ret1, _ := ret[1].(error) return ret0, ret1 } // GetEvents indicates an expected call of GetEvents. -func (mr *MockEventsServiceMockRecorder) GetEvents(ctx, filters, from, size any) *gomock.Call { +func (mr *MockEventsServiceMockRecorder) GetEvents(ctx, queryFilters, from, size any) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetEvents", reflect.TypeOf((*MockEventsService)(nil).GetEvents), ctx, filters, from, size) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetEvents", reflect.TypeOf((*MockEventsService)(nil).GetEvents), ctx, queryFilters, from, size) } diff --git a/v2/grpc/service.go b/v2/grpc/service.go index 43fd269..7de10e9 100644 --- a/v2/grpc/service.go +++ b/v2/grpc/service.go @@ -8,6 +8,8 @@ import ( "github.com/qubic/archive-query-service/v2/api/archive-query-service/v2" "github.com/qubic/archive-query-service/v2/entities" + "github.com/qubic/archive-query-service/v2/grpc/filters" + "github.com/qubic/archive-query-service/v2/grpc/utils" statusPb "github.com/qubic/go-data-publisher/status-service/protobuf" "google.golang.org/grpc" "google.golang.org/grpc/codes" @@ -21,12 +23,11 @@ var _ api.ArchiveQueryServiceServer = &ArchiveQueryService{} type TransactionsService interface { GetTransactionByHash(ctx context.Context, hash string) (*api.Transaction, error) - GetTransactionsForTickNumber(ctx context.Context, tickNumber uint32, filters map[string][]string, ranges map[string][]*entities.Range) ([]*api.Transaction, error) + GetTransactionsForTickNumber(ctx context.Context, tickNumber uint32, filters map[string][]string, ranges map[string][]entities.Range) ([]*api.Transaction, error) GetTransactionsForIdentity( ctx context.Context, identity string, - filters map[string][]string, - ranges map[string][]*entities.Range, + queryFilters entities.Filters, from, size uint32, ) (*entities.TransactionsResult, error) } @@ -45,7 +46,7 @@ type ComputorsListService interface { } type EventsService interface { - GetEvents(ctx context.Context, filters map[string][]string, from, size uint32) (*entities.EventsResult, error) + GetEvents(ctx context.Context, queryFilters entities.Filters, from, size uint32) (*entities.EventsResult, error) } type ArchiveQueryService struct { @@ -86,22 +87,17 @@ func (s *ArchiveQueryService) GetTransactionByHash(ctx context.Context, req *api } func (s *ArchiveQueryService) GetTransactionsForTick(ctx context.Context, req *api.GetTransactionsForTickRequest) (*api.GetTransactionsForTickResponse, error) { - filters, err := createTickFilters(req.GetFilters()) + filterMap, err := filters.CreateTickTransactionsFilters(req.GetFilters()) if err != nil { return nil, status.Errorf(codes.InvalidArgument, "invalid filters: %v", err) } - err = validateTickTransactionQueryFilters(filters) - if err != nil { - return nil, status.Errorf(codes.InvalidArgument, "invalid filter: %v", err) - } - - ranges, err := validateTickTransactionQueryRanges(filters, req.GetRanges()) + ranges, err := filters.ValidateTickTransactionQueryRanges(filterMap, req.GetRanges()) if err != nil { return nil, status.Errorf(codes.InvalidArgument, "invalid range: %v", err) } - txs, err := s.txService.GetTransactionsForTickNumber(ctx, req.TickNumber, filters, ranges) + txs, err := s.txService.GetTransactionsForTickNumber(ctx, req.TickNumber, filterMap, ranges) if err != nil { return nil, createInternalError(fmt.Sprintf("failed to get transactions for tick [%d]", req.GetTickNumber()), err) } @@ -120,26 +116,45 @@ func (s *ArchiveQueryService) GetTickData(ctx context.Context, req *api.GetTickD } func (s *ArchiveQueryService) GetTransactionsForIdentity(ctx context.Context, request *api.GetTransactionsForIdentityRequest) (*api.GetTransactionsForIdentityResponse, error) { - err := validateIdentity(request.GetIdentity()) + err := utils.ValidateIdentity(request.GetIdentity()) if err != nil { return nil, status.Errorf(codes.InvalidArgument, "invalid identity: %v", err) } - filters, err := createFilters(request.GetFilters()) + // we need to stay backwards compatible here. exclude filters are postfixed with -exclude. + includes, excludes := filters.SplitDeprecatedIncludeExcludeFilters(request.GetFilters()) + if len(excludes) > 0 && len(request.GetExclude()) > 0 { // old and new api mismatch + return nil, status.Errorf(codes.InvalidArgument, "cannot use both -exclude filters postfix and exclude filters together") + } else if len(excludes) == 0 { // use new exclude filters + excludes = request.GetExclude() + } + + includeFilters, err := filters.CreateIdentityTransactionFilters(includes) if err != nil { - return nil, status.Errorf(codes.InvalidArgument, "invalid filters: %v", err) + return nil, status.Errorf(codes.InvalidArgument, "creating include filters: %v", err) } - err = validateIdentityTransactionQueryFilters(filters) + excludeFilters, err := filters.CreateIdentityTransactionFilters(excludes) if err != nil { - return nil, status.Errorf(codes.InvalidArgument, "invalid filter: %v", err) + return nil, status.Errorf(codes.InvalidArgument, "creating exclude filters: %v", err) } - ranges, err := validateIdentityTransactionQueryRanges(filters, request.GetRanges()) + err = filters.ValidateExcludeFilterKeys(excludeFilters) + if err != nil { + return nil, status.Error(codes.InvalidArgument, err.Error()) + } + + filterRanges, err := filters.CreateIdentityTransactionQueryRanges(request.GetRanges()) if err != nil { return nil, status.Errorf(codes.InvalidArgument, "invalid range: %v", err) } + queryFilters := entities.Filters{Include: includeFilters, Exclude: excludeFilters, Ranges: filterRanges} + err = filters.VerifyNoConflictingFilters(queryFilters) + if err != nil { + return nil, status.Errorf(codes.InvalidArgument, "conflicting filters: %v", err) + } + from, size, err := s.pageSizeLimits.ValidatePagination(request.GetPagination()) if err != nil { // debug log temporarily. we need to find out how many users use strange pagination parameters. @@ -147,7 +162,7 @@ func (s *ArchiveQueryService) GetTransactionsForIdentity(ctx context.Context, re return nil, status.Errorf(codes.InvalidArgument, "invalid pagination: %v", err) } - result, err := s.txService.GetTransactionsForIdentity(ctx, request.Identity, filters, ranges, from, size) + result, err := s.txService.GetTransactionsForIdentity(ctx, request.Identity, queryFilters, from, size) if err != nil { return nil, createInternalError(fmt.Sprintf("failed to get transactions for identity [%s]", request.GetIdentity()), err) } @@ -209,14 +224,30 @@ func (s *ArchiveQueryService) GetComputorsListsForEpoch(ctx context.Context, req } func (s *ArchiveQueryService) GetEvents(ctx context.Context, req *api.GetEventsRequest) (*api.GetEventsResponse, error) { - filters, err := createEventsFilters(req.GetFilters()) + includeFilters, err := filters.CreateEventFilters(req.GetFilters(), filters.AllowedEventIncludeFilters) + if err != nil { + return nil, status.Errorf(codes.InvalidArgument, "creating include filters: %v", err) + } + + excludeFilters, err := filters.CreateEventFilters(req.GetExclude(), filters.AllowedEventExcludeFilters) + if err != nil { + return nil, status.Errorf(codes.InvalidArgument, "creating exclude filters: %v", err) + } + + queryRanges, err := filters.CreateEventRanges(req.GetRanges(), filters.AllowedEventRanges) + if err != nil { + return nil, status.Errorf(codes.InvalidArgument, "creating range filters: %v", err) + } + + shouldFilters, err := filters.CreateShouldFilters(req.GetShould(), filters.AllowedEventShouldFilters, filters.AllowedEventShouldRanges) if err != nil { - return nil, status.Errorf(codes.InvalidArgument, "creating filters: %v", err) + return nil, status.Errorf(codes.InvalidArgument, "creating should filters: %v", err) } - err = validateEventsFilters(filters) + queryFilters := entities.Filters{Include: includeFilters, Exclude: excludeFilters, Ranges: queryRanges, Should: shouldFilters} + err = filters.VerifyNoConflictingFilters(queryFilters) if err != nil { - return nil, status.Errorf(codes.InvalidArgument, "validating filters: %v", err) + return nil, status.Errorf(codes.InvalidArgument, "conflicting filters: %v", err) } from, size, err := s.pageSizeLimits.ValidatePagination(req.GetPagination()) @@ -224,7 +255,7 @@ func (s *ArchiveQueryService) GetEvents(ctx context.Context, req *api.GetEventsR return nil, status.Errorf(codes.InvalidArgument, "invalid pagination: %v", err) } - result, err := s.evService.GetEvents(ctx, filters, from, size) + result, err := s.evService.GetEvents(ctx, queryFilters, from, size) if err != nil { return nil, createInternalError("failed to get events", err) } diff --git a/v2/grpc/service_events_test.go b/v2/grpc/service_events_test.go index 7ff80e3..98480bf 100644 --- a/v2/grpc/service_events_test.go +++ b/v2/grpc/service_events_test.go @@ -15,12 +15,19 @@ import ( ) type EventsServiceStub struct { - events []*api.Event - hits *entities.Hits - err error + events []*api.Event + hits *entities.Hits + err error + ReceivedFilters entities.Filters } -func (s *EventsServiceStub) GetEvents(_ context.Context, _ map[string][]string, _, _ uint32) (*entities.EventsResult, error) { +const validId1 = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFXIB" +const validId2 = "BAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAARMID" +const validTransactionHash1 = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaafxib" +const validTransactionHash2 = "baaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaarmid" + +func (s *EventsServiceStub) GetEvents(_ context.Context, queryFilters entities.Filters, _, _ uint32) (*entities.EventsResult, error) { + s.ReceivedFilters = queryFilters if s.err != nil { return nil, s.err } @@ -30,10 +37,10 @@ func (s *EventsServiceStub) GetEvents(_ context.Context, _ map[string][]string, func TestArchiveQueryService_GetEvents_Success(t *testing.T) { evService := &EventsServiceStub{ events: []*api.Event{ - {TickNumber: 100, TransactionHash: test.ToStringPointer("hash1"), LogType: 0, EventData: &api.Event_QuTransfer{ + {TickNumber: 100, TransactionHash: test.ToStringPointer(validTransactionHash1), LogType: 0, EventData: &api.Event_QuTransfer{ QuTransfer: &api.QuTransferData{Source: "SRC", Destination: "DST", Amount: 1000}, }}, - {TickNumber: 101, TransactionHash: test.ToStringPointer("hash2"), LogType: 1, EventData: &api.Event_AssetIssuance{ + {TickNumber: 101, TransactionHash: test.ToStringPointer(validTransactionHash2), LogType: 1, EventData: &api.Event_AssetIssuance{ AssetIssuance: &api.AssetIssuanceData{AssetIssuer: "ISSUER", AssetName: "QX"}, }}, }, @@ -42,7 +49,7 @@ func TestArchiveQueryService_GetEvents_Success(t *testing.T) { service := NewArchiveQueryService(nil, nil, nil, nil, evService, NewPageSizeLimits(1000, 10)) response, err := service.GetEvents(context.Background(), &api.GetEventsRequest{ - Filters: map[string]string{"transactionHash": "hash1"}, + Filters: map[string]string{"transactionHash": validTransactionHash1}, Pagination: &api.Pagination{Offset: 0, Size: 10}, }) require.NoError(t, err) @@ -121,3 +128,83 @@ func TestArchiveQueryService_GetEvents_EmptyResult(t *testing.T) { assert.Empty(t, response.Events) assert.Equal(t, uint32(0), response.Hits.Total) } + +func TestArchiveQueryService_GetEvents_GivenInvalidExcludeFilter_ThenError(t *testing.T) { + service := NewArchiveQueryService(nil, nil, nil, nil, nil, NewPageSizeLimits(1000, 10)) + _, err := service.GetEvents(context.Background(), &api.GetEventsRequest{ + Exclude: map[string]string{"tickNumber": "123"}, + }) + require.ErrorContains(t, err, "creating exclude filter") + require.ErrorContains(t, err, "unsupported filter") +} + +func TestArchiveQueryService_GetEvents_WithRanges(t *testing.T) { + evService := &EventsServiceStub{ + events: []*api.Event{{}}, // single dummy event + hits: &entities.Hits{Total: 1, Relation: "eq"}, + } + service := NewArchiveQueryService(nil, nil, nil, nil, evService, NewPageSizeLimits(1000, 10)) + + response, err := service.GetEvents(context.Background(), &api.GetEventsRequest{ + Ranges: map[string]*api.Range{ + "amount": { + LowerBound: &api.Range_Gte{Gte: "1000"}, + UpperBound: &api.Range_Lte{Lte: "2000"}, + }, + }, + Pagination: &api.Pagination{Offset: 0, Size: 10}, + }) + require.NoError(t, err) + require.NotNil(t, response) + assert.Len(t, response.Events, 1) + assert.Equal(t, uint32(1), response.Hits.Total) + + ranges := evService.ReceivedFilters.Ranges + assert.Len(t, ranges, 1) + assert.Len(t, ranges["amount"], 2) + assert.Contains(t, ranges["amount"], entities.Range{Operation: "gte", Value: "1000"}) + assert.Contains(t, ranges["amount"], entities.Range{Operation: "lte", Value: "2000"}) +} + +func TestArchiveQueryService_GetEvents_WithShouldFilters(t *testing.T) { + evService := &EventsServiceStub{ + events: []*api.Event{{}}, // single dummy event + hits: &entities.Hits{Total: 1, Relation: "eq"}, + } + service := NewArchiveQueryService(nil, nil, nil, nil, evService, NewPageSizeLimits(1000, 10)) + + response, err := service.GetEvents(context.Background(), &api.GetEventsRequest{ + Should: []*api.ShouldFilter{ + {Terms: map[string]string{"destination": validId1 + " , " + validId2, "source": validId1}}, + {Ranges: map[string]*api.Range{ + "amount": {LowerBound: &api.Range_Gte{Gte: "1000000"}, UpperBound: &api.Range_Lte{Lte: "2000000"}}, + "numberOfShares": {LowerBound: &api.Range_Gt{Gt: "0"}, UpperBound: &api.Range_Lt{Lt: "100"}}, + }}, + }, + }) + require.NoError(t, err) + require.NotNil(t, response) + assert.Len(t, response.Events, 1) + assert.Equal(t, uint32(1), response.Hits.Total) + + should := evService.ReceivedFilters.Should + assert.Len(t, should, 2) + assert.Len(t, should[0].Terms, 2) + assert.Len(t, should[1].Ranges, 2) + assert.Contains(t, should[0].Terms["destination"], validId1, validId2) + assert.Contains(t, should[0].Terms["source"], validId1) + assert.Contains(t, should[1].Ranges["amount"], entities.Range{Operation: "gte", Value: "1000000"}, entities.Range{Operation: "lte", Value: "2000000"}) + assert.Contains(t, should[1].Ranges["numberOfShares"], entities.Range{Operation: "gt", Value: "0"}, entities.Range{Operation: "lt", Value: "10"}) +} + +func TestArchiveQueryService_GetEvents_WithShouldFilterWithOnlyOneValue_ThenError(t *testing.T) { + service := NewArchiveQueryService(nil, nil, nil, nil, nil, NewPageSizeLimits(1000, 10)) + + _, err := service.GetEvents(context.Background(), &api.GetEventsRequest{ + Should: []*api.ShouldFilter{ + {Terms: map[string]string{"destination": validId1 + " , " + validId2}}, + }, + }) + require.ErrorContains(t, err, "at least two") + +} diff --git a/v2/grpc/service_transactions_test.go b/v2/grpc/service_transactions_test.go index 2288d84..0f953a2 100644 --- a/v2/grpc/service_transactions_test.go +++ b/v2/grpc/service_transactions_test.go @@ -16,7 +16,7 @@ type TransactionServiceStub struct { ctx context.Context identity string filters map[string][]string - ranges map[string][]*entities.Range + newFilters entities.Filters transactions []*api.Transaction hits *entities.Hits } @@ -30,7 +30,7 @@ func (t *TransactionServiceStub) GetTransactionByHash(_ context.Context, hash st return nil, nil } -func (t *TransactionServiceStub) GetTransactionsForTickNumber(_ context.Context, tickNumber uint32, filters map[string][]string, ranges map[string][]*entities.Range) ([]*api.Transaction, error) { +func (t *TransactionServiceStub) GetTransactionsForTickNumber(_ context.Context, tickNumber uint32, _ map[string][]string, _ map[string][]entities.Range) ([]*api.Transaction, error) { transactions := make([]*api.Transaction, 0) for _, tx := range t.transactions { if tx.TickNumber == tickNumber { @@ -43,14 +43,12 @@ func (t *TransactionServiceStub) GetTransactionsForTickNumber(_ context.Context, func (t *TransactionServiceStub) GetTransactionsForIdentity( ctx context.Context, identity string, - filters map[string][]string, - ranges map[string][]*entities.Range, + filters entities.Filters, _, _ uint32, ) (*entities.TransactionsResult, error) { t.ctx = ctx t.identity = identity - t.filters = filters - t.ranges = ranges + t.newFilters = filters // this is not 100% correct as it doesn't use the exclude filters return &entities.TransactionsResult{LastProcessedTick: 42, Hits: t.hits, Transactions: t.transactions}, nil } @@ -140,9 +138,85 @@ func TestArchiveQueryService_GetTransactionsForIdentity(t *testing.T) { // verify tx service call assert.Equal(t, ctx, txService.ctx) assert.Equal(t, "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFXIB", txService.identity) - assert.Equal(t, map[string][]string{"inputType": {"1"}}, txService.filters) - assert.Equal(t, map[string][]*entities.Range{"amount": { - &entities.Range{Operation: "gte", Value: "1"}, - &entities.Range{Operation: "lt", Value: "10000"}, - }}, txService.ranges) + assert.Equal(t, map[string][]string{"inputType": {"1"}}, txService.newFilters.Include) + assert.Equal(t, map[string][]entities.Range{"amount": { + entities.Range{Operation: "gte", Value: "1"}, + entities.Range{Operation: "lt", Value: "10000"}, + }}, txService.newFilters.Ranges) +} + +func TestArchiveQueryService_GetTransactionsForIdentity_WithDeprecatedExcludeFilter(t *testing.T) { + txService := &TransactionServiceStub{ + transactions: []*api.Transaction{{Hash: "tx-hash-1"}}, + hits: &entities.Hits{Total: 1, Relation: "eq"}, + } + + service := NewArchiveQueryService(txService, nil, nil, nil, nil, NewPageSizeLimits(1000, 10)) + + ctx := context.Background() + request := &api.GetTransactionsForIdentityRequest{ + Identity: "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFXIB", + Filters: map[string]string{"destination": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFXIB", "source-exclude": "BAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAARMID"}, + } + + response, err := service.GetTransactionsForIdentity(ctx, request) + require.NoError(t, err) + require.NotNil(t, response) + + require.Equal(t, txService.newFilters.Exclude, map[string][]string{"source": {"BAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAARMID"}}) + require.Equal(t, txService.newFilters.Include, map[string][]string{"destination": {"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFXIB"}}) +} + +func TestArchiveQueryService_GetTransactionsForIdentity_WithExcludeMap(t *testing.T) { + txService := &TransactionServiceStub{ + transactions: []*api.Transaction{{Hash: "tx-hash-1"}}, + hits: &entities.Hits{Total: 1, Relation: "eq"}, + } + + service := NewArchiveQueryService(txService, nil, nil, nil, nil, NewPageSizeLimits(1000, 10)) + + ctx := context.Background() + request := &api.GetTransactionsForIdentityRequest{ + Identity: "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFXIB", + Filters: map[string]string{"destination": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFXIB"}, + Exclude: map[string]string{"source": "BAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAARMID"}, + } + + response, err := service.GetTransactionsForIdentity(ctx, request) + require.NoError(t, err) + require.NotNil(t, response) + + require.Equal(t, txService.newFilters.Exclude, map[string][]string{"source": {"BAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAARMID"}}) + require.Equal(t, txService.newFilters.Include, map[string][]string{"destination": {"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFXIB"}}) +} + +func TestArchiveQueryService_GetTransactionsForIdentity_DeprecatedApiMismatchErrors(t *testing.T) { + txService := &TransactionServiceStub{ + transactions: []*api.Transaction{{Hash: "tx-hash-1"}}, + hits: &entities.Hits{Total: 1, Relation: "eq"}, + } + + service := NewArchiveQueryService(txService, nil, nil, nil, nil, NewPageSizeLimits(1000, 10)) + + ctx := context.Background() + request := &api.GetTransactionsForIdentityRequest{ + Identity: "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFXIB", + Filters: map[string]string{"destination": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFXIB", "source-exclude": "BAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAARMID"}, + Exclude: map[string]string{"source": "BAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAARMID"}, + } + + _, err := service.GetTransactionsForIdentity(ctx, request) + require.ErrorContains(t, err, "cannot use both") +} + +func TestArchiveQueryService_GetTransactionsForIdentity_GivenInvalidExcludeFilter_ThenErrors(t *testing.T) { + service := NewArchiveQueryService(nil, nil, nil, nil, nil, NewPageSizeLimits(1000, 10)) + + request := &api.GetTransactionsForIdentityRequest{ + Identity: "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFXIB", + Exclude: map[string]string{"amount": "123"}, + } + + _, err := service.GetTransactionsForIdentity(nil, request) + require.ErrorContains(t, err, "unsupported exclude filter") } diff --git a/v2/grpc/utils/identity.go b/v2/grpc/utils/identity.go new file mode 100644 index 0000000..c94ae82 --- /dev/null +++ b/v2/grpc/utils/identity.go @@ -0,0 +1,31 @@ +package utils + +import ( + "fmt" + + "github.com/qubic/go-node-connector/types" +) + +func ValidateIdentity(identity string) error { + return ValidateDigest(identity, false) +} + +func ValidateDigest(digest string, isLowerCase bool) error { + id := types.Identity(digest) + pubKey, err := id.ToPubKey(isLowerCase) + if err != nil { + return fmt.Errorf("converting id to pubkey: %w", err) + } + + var pubkeyFixed [32]byte + copy(pubkeyFixed[:], pubKey[:32]) + id, err = id.FromPubKey(pubkeyFixed, isLowerCase) + if err != nil { + return fmt.Errorf("converting pubkey back to id: %w", err) + } + + if id.String() != digest { + return fmt.Errorf("invalid %s [%s]", If(isLowerCase, "hash", "identity"), digest) + } + return nil +} diff --git a/v2/grpc/utils/if.go b/v2/grpc/utils/if.go new file mode 100644 index 0000000..63ba58f --- /dev/null +++ b/v2/grpc/utils/if.go @@ -0,0 +1,9 @@ +package utils + +func If[T any](condition bool, trueValue, falseValue T) T { + if condition { + return trueValue + } + + return falseValue +} diff --git a/v2/grpc/validations.go b/v2/grpc/validations.go deleted file mode 100644 index ab52082..0000000 --- a/v2/grpc/validations.go +++ /dev/null @@ -1,364 +0,0 @@ -package grpc - -import ( - "errors" - "fmt" - "strconv" - "strings" - - api "github.com/qubic/archive-query-service/v2/api/archive-query-service/v2" - "github.com/qubic/archive-query-service/v2/entities" - "github.com/qubic/go-node-connector/types" -) - -const ( - FilterSource = "source" - FilterSourceExclude = "source-exclude" - FilterDestination = "destination" - FilterDestinationExclude = "destination-exclude" - FilterAmount = "amount" - FilterInputType = "inputType" - FilterTickNumber = "tickNumber" - FilterTimestamp = "timestamp" -) - -var allowedTermFilters = [7]string{FilterSource, FilterSourceExclude, FilterDestination, FilterDestinationExclude, FilterAmount, FilterInputType, FilterTickNumber} -var allowedTickTermFilters = [4]string{FilterSource, FilterDestination, FilterAmount, FilterInputType} - -const maxValuesPerIdentityFilter = 5 -const maxValueLengthPerIdentityFilter = 5*60 + 5 + 4 // 5 IDs + comma + optional spaces - -func createFilters(filters map[string]string) (map[string][]string, error) { - res := make(map[string][]string) - for k, v := range filters { - shouldSplit := k == FilterSource || k == FilterDestination || k == FilterSourceExclude || k == FilterDestinationExclude - - if shouldSplit { - if len(v) > maxValueLengthPerIdentityFilter { - return nil, fmt.Errorf("filter %s exceeds maximum length", k) - } - - // count commas first to avoid input with many strings before splitting - valCount := strings.Count(v, ",") - if valCount >= maxValuesPerIdentityFilter { - return nil, fmt.Errorf("filter %s has more than 5 values", k) - } - - split := strings.Split(v, ",") - values := make([]string, 0, len(split)) - seen := make(map[string]bool) - for _, s := range split { - trimmed := strings.TrimSpace(s) - if trimmed == "" { - return nil, fmt.Errorf("filter %s contains an empty value", k) - } - if seen[trimmed] { - return nil, fmt.Errorf("filter %s contains duplicate value: %s", k, trimmed) - } - seen[trimmed] = true - values = append(values, trimmed) - } - - res[k] = values - } else { - trimmed := strings.TrimSpace(v) - if trimmed == "" { - return nil, fmt.Errorf("filter %s contains an empty value", k) - } - res[k] = []string{trimmed} - continue - } - } - return res, nil -} - -func validateIdentityTransactionQueryFilters(filters map[string][]string) error { - if len(filters) == 0 { - return nil - } - - if len(filters) > len(allowedTermFilters) { - return errors.New("too many filters") - } - - // it's not allowed to use a match-filter and a corresponding exclude-filter at the same time - if (filters[FilterSource] != nil && filters[FilterSourceExclude] != nil) || - (filters[FilterDestination] != nil && filters[FilterDestinationExclude] != nil) { - return fmt.Errorf("conflicting filters") - } - - for key, values := range filters { - switch key { - case FilterSource, FilterDestination, FilterSourceExclude, FilterDestinationExclude: - for _, val := range values { - err := validateIdentity(val) - if err != nil { - return fmt.Errorf("invalid %s filter: %w", key, err) - } - } - case FilterAmount: - if len(values) != 1 { - return fmt.Errorf("filter %s contains an invalid number of values: %d", key, len(values)) - } - for _, val := range values { - _, err := strconv.ParseUint(val, 10, 64) - if err != nil { - return fmt.Errorf("invalid %s filter: %w", key, err) - } - } - case FilterTickNumber, FilterInputType: - if len(values) != 1 { - return fmt.Errorf("filter %s contains an invalid number of values: %d", key, len(values)) - } - for _, val := range values { - _, err := strconv.ParseUint(val, 10, 32) - if err != nil { - return fmt.Errorf("invalid %s filter: %w", key, err) - } - } - default: - return fmt.Errorf("unsupported filter: [%s]", key) - } - } - return nil -} - -var allowedRanges = [4]string{FilterAmount, FilterTickNumber, FilterInputType, FilterTimestamp} -var allowedTickRanges = [2]string{FilterAmount, FilterInputType} - -func validateIdentityTransactionQueryRanges(filters map[string][]string, ranges map[string]*api.Range) (map[string][]*entities.Range, error) { - convertedRanges := map[string][]*entities.Range{} - if len(ranges) == 0 { - return nil, nil - } - if len(ranges) > len(allowedRanges) { - return nil, errors.New("too many ranges") - } - - if filters != nil { - // check for ranges that are already declared as filter - for key := range ranges { - _, ok := filters[key] - if ok { - return nil, fmt.Errorf("range [%s] is already declared as filter", key) - } - } - } - - for key, value := range ranges { - switch key { - case FilterAmount, FilterTimestamp: - r, err := validateRange(value, 64) - if err != nil { - return nil, fmt.Errorf("invalid %s range: %w", key, err) - } - if len(r) > 0 { - convertedRanges[key] = r - } - case FilterTickNumber, FilterInputType: - r, err := validateRange(value, 32) - if err != nil { - return nil, fmt.Errorf("invalid %s range: %w", key, err) - } - if len(r) > 0 { - convertedRanges[key] = r - } - default: - return nil, fmt.Errorf("unsupported range: [%s]", key) - } - } - - return convertedRanges, nil -} - -func validateIdentity(identity string) error { - return validateDigest(identity, false) -} - -// createTickFilters creates filters from a map without splitting on commas (single values only). -// This is used for GetTransactionsForTick which only accepts single values per filter. -func createTickFilters(filters map[string]string) (map[string][]string, error) { - res := make(map[string][]string) - for k, v := range filters { - trimmed := strings.TrimSpace(v) - if trimmed == "" { - return nil, fmt.Errorf("filter %s contains an empty value", k) - } - res[k] = []string{trimmed} - } - return res, nil -} - -func validateTickTransactionQueryFilters(filters map[string][]string) error { - if len(filters) == 0 { - return nil - } - - if len(filters) > len(allowedTickTermFilters) { - return errors.New("too many filters") - } - - for key, values := range filters { - switch key { - case FilterSource, FilterDestination: - if len(values) != 1 { - return fmt.Errorf("filter %s must have exactly one value", key) - } - err := validateIdentity(values[0]) - if err != nil { - return fmt.Errorf("invalid %s filter: %w", key, err) - } - case FilterAmount: - if len(values) != 1 { - return fmt.Errorf("filter %s contains an invalid number of values: %d", key, len(values)) - } - _, err := strconv.ParseUint(values[0], 10, 64) - if err != nil { - return fmt.Errorf("invalid %s filter: %w", key, err) - } - case FilterInputType: - if len(values) != 1 { - return fmt.Errorf("filter %s contains an invalid number of values: %d", key, len(values)) - } - _, err := strconv.ParseUint(values[0], 10, 32) - if err != nil { - return fmt.Errorf("invalid %s filter: %w", key, err) - } - default: - return fmt.Errorf("unsupported filter: [%s]", key) - } - } - return nil -} - -func validateTickTransactionQueryRanges(filters map[string][]string, ranges map[string]*api.Range) (map[string][]*entities.Range, error) { - convertedRanges := map[string][]*entities.Range{} - if len(ranges) == 0 { - return nil, nil - } - if len(ranges) > len(allowedTickRanges) { - return nil, errors.New("too many ranges") - } - - if filters != nil { - for key := range ranges { - _, ok := filters[key] - if ok { - return nil, fmt.Errorf("range [%s] is already declared as filter", key) - } - } - } - - for key, value := range ranges { - switch key { - case FilterAmount: - r, err := validateRange(value, 64) - if err != nil { - return nil, fmt.Errorf("invalid %s range: %w", key, err) - } - if len(r) > 0 { - convertedRanges[key] = r - } - case FilterInputType: - r, err := validateRange(value, 32) - if err != nil { - return nil, fmt.Errorf("invalid %s range: %w", key, err) - } - if len(r) > 0 { - convertedRanges[key] = r - } - default: - return nil, fmt.Errorf("unsupported range: [%s]", key) - } - } - - return convertedRanges, nil -} - -func validateDigest(digest string, isLowerCase bool) error { - id := types.Identity(digest) - pubKey, err := id.ToPubKey(isLowerCase) - if err != nil { - return fmt.Errorf("converting id to pubkey: %w", err) - } - - var pubkeyFixed [32]byte - copy(pubkeyFixed[:], pubKey[:32]) - id, err = id.FromPubKey(pubkeyFixed, isLowerCase) - if err != nil { - return fmt.Errorf("converting pubkey back to id: %w", err) - } - - if id.String() != digest { - return fmt.Errorf("invalid %s [%s]", If(isLowerCase, "hash", "identity"), digest) - } - return nil -} - -func validateRange(r *api.Range, bitSize int) ([]*entities.Range, error) { - var ranges []*entities.Range - var err error - var lowerBound uint64 - var upperBound uint64 - switch r.GetLowerBound().(type) { - case *api.Range_Gt: - lowerBound, err = strconv.ParseUint(r.GetGt(), 10, bitSize) - lowerBound++ - if err != nil { - return nil, fmt.Errorf("invalid [gt] value: %w", err) - } - ranges = append(ranges, &entities.Range{ - Operation: "gt", - Value: r.GetGt(), - }) - case *api.Range_Gte: - lowerBound, err = strconv.ParseUint(r.GetGte(), 10, bitSize) - if err != nil { - return nil, fmt.Errorf("invalid [gte] value: %w", err) - } - ranges = append(ranges, &entities.Range{ - Operation: "gte", - Value: r.GetGte(), - }) - } - - switch r.GetUpperBound().(type) { - case *api.Range_Lt: - upperBound, err = strconv.ParseUint(r.GetLt(), 10, bitSize) - upperBound-- - if err != nil { - return nil, fmt.Errorf("invalid [lt] value: %w", err) - } - ranges = append(ranges, &entities.Range{ - Operation: "lt", - Value: r.GetLt(), - }) - case *api.Range_Lte: - upperBound, err = strconv.ParseUint(r.GetLte(), 10, bitSize) - if err != nil { - return nil, fmt.Errorf("invalid [lte] value: %w", err) - } - ranges = append(ranges, &entities.Range{ - Operation: "lte", - Value: r.GetLte(), - }) - } - - if len(ranges) == 0 { - return nil, errors.New("invalid range: no bounds") - } - - if lowerBound > 0 && upperBound > 0 && lowerBound >= upperBound { - return nil, fmt.Errorf("invalid range: [%d:%d]", lowerBound, upperBound) - } - - return ranges, nil -} - -func If[T any](condition bool, trueValue, falseValue T) T { - if condition { - return trueValue - } - - return falseValue -} diff --git a/v2/grpc/validations_create_filters_test.go b/v2/grpc/validations_create_filters_test.go deleted file mode 100644 index 7b0e071..0000000 --- a/v2/grpc/validations_create_filters_test.go +++ /dev/null @@ -1,154 +0,0 @@ -package grpc - -import ( - "reflect" - "testing" -) - -func Test_createFilters(t *testing.T) { - tests := []struct { - name string - filters map[string]string - want map[string][]string - wantErr bool - }{ - { - name: "single value for source", - filters: map[string]string{ - "source": "value1", - }, - want: map[string][]string{ - "source": {"value1"}, - }, - wantErr: false, - }, - { - name: "multiple values for source", - filters: map[string]string{ - "source": "value1,value2,value3", - }, - want: map[string][]string{ - "source": {"value1", "value2", "value3"}, - }, - wantErr: false, - }, - { - name: "multiple values with spaces for destination", - filters: map[string]string{ - "destination": " value1 , value2, value3 ", - }, - want: map[string][]string{ - "destination": {"value1", "value2", "value3"}, - }, - wantErr: false, - }, - { - name: "single value for other keys", - filters: map[string]string{ - "other": " 123 ", - }, - want: map[string][]string{ - "other": {"123"}, - }, - wantErr: false, - }, - { - name: "no splitting for other keys", - filters: map[string]string{ - "other": "value1,value2,value3", - }, - want: map[string][]string{ - "other": {"value1,value2,value3"}, - }, - wantErr: false, - }, - { - name: "no splitting for other keys with spaces", - filters: map[string]string{ - "another": " value1, value2 3 ", - }, - want: map[string][]string{ - "another": {"value1, value2 3"}, - }, - wantErr: false, - }, - { - name: "empty values for source-exclude", - filters: map[string]string{ - "source-exclude": "value1,,value2", - }, - want: nil, - wantErr: true, - }, - { - name: "too many values for destination-exclude", - filters: map[string]string{ - "destination-exclude": "1,2,3,4,5,6", - }, - want: nil, - wantErr: true, - }, - { - name: "duplicate values for source", - filters: map[string]string{ - "source": "value1,value2,value1", - }, - want: nil, - wantErr: true, - }, - { - name: "duplicate values with spaces for source", - filters: map[string]string{ - "source": "value1,value2, value1 ", - }, - want: nil, - wantErr: true, - }, - { - name: "empty source filter", - filters: map[string]string{ - "source": "", - }, - want: nil, - wantErr: true, - }, - { - name: "empty value for other key", - filters: map[string]string{ - "other": " ", - }, - want: nil, - wantErr: true, - }, - { - name: "max value length for source", - filters: map[string]string{ - "source": "12345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789", // 309 chars - }, - want: map[string][]string{ - "source": {"12345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789"}, - }, - wantErr: false, - }, - { - name: "too long value for source", - filters: map[string]string{ - "source": "1234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890", // 310 chars - }, - want: nil, - wantErr: true, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got, err := createFilters(tt.filters) - if (err != nil) != tt.wantErr { - t.Errorf("createFilters() error = %v, wantErr %v", err, tt.wantErr) - return - } - if !reflect.DeepEqual(got, tt.want) { - t.Errorf("createFilters() got = %v, want %v", got, tt.want) - } - }) - } -} diff --git a/v2/grpc/validations_events.go b/v2/grpc/validations_events.go deleted file mode 100644 index 5deac59..0000000 --- a/v2/grpc/validations_events.go +++ /dev/null @@ -1,64 +0,0 @@ -package grpc - -import ( - "fmt" - "strconv" - "strings" -) - -var allowedEventFilters = [3]string{"transactionHash", "tickNumber", "logType"} - -func createEventsFilters(filters map[string]string) (map[string][]string, error) { - res := make(map[string][]string) - for k, v := range filters { - trimmed := strings.TrimSpace(v) - if trimmed == "" { - return nil, fmt.Errorf("filter %s contains an empty value", k) - } - res[k] = []string{trimmed} - } - return res, nil -} - -func validateEventsFilters(filters map[string][]string) error { - if len(filters) == 0 { - return nil - } - - if len(filters) > len(allowedEventFilters) { - return fmt.Errorf("too many filters") - } - - for key, values := range filters { - switch key { - case "transactionHash": - if len(values) != 1 { - return fmt.Errorf("filter [%s] must have exactly one value", key) - } - case "tickNumber": - if len(values) != 1 { - return fmt.Errorf("filter [%s] must have exactly one value", key) - } - _, err := strconv.ParseUint(values[0], 10, 32) - if err != nil { - return fmt.Errorf("invalid [%s] filter: must be a valid number but was [%s]", key, values[0]) - } - case "logType": - if len(values) != 1 { - return fmt.Errorf("filter [%s] must have exactly one value", key) - } - - uVal, err := strconv.ParseUint(values[0], 10, 32) - if err != nil { - return fmt.Errorf("invalid [%s] filter: must be a valid number but was [%s]", key, values[0]) - } - if uVal > 14 && uVal != 255 { - return fmt.Errorf("invalid [%s] filter: must be 0-13 or 255 but was [%d]", key, uVal) - } - - default: - return fmt.Errorf("unsupported filter: [%s]", key) - } - } - return nil -} diff --git a/v2/grpc/validations_events_test.go b/v2/grpc/validations_events_test.go deleted file mode 100644 index 0ec80e6..0000000 --- a/v2/grpc/validations_events_test.go +++ /dev/null @@ -1,109 +0,0 @@ -package grpc - -import ( - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestCreateEventsFilters_ValidFilters(t *testing.T) { - filters := map[string]string{ - "transactionHash": "abc123", - "tickNumber": "42", - "logType": "1", - } - result, err := createEventsFilters(filters) - require.NoError(t, err) - assert.Equal(t, map[string][]string{ - "transactionHash": {"abc123"}, - "tickNumber": {"42"}, - "logType": {"1"}, - }, result) -} - -func TestCreateEventsFilters_EmptyValue(t *testing.T) { - filters := map[string]string{ - "transactionHash": "", - } - _, err := createEventsFilters(filters) - require.Error(t, err) - assert.Contains(t, err.Error(), "empty value") -} - -func TestValidateEventsFilters_ValidTransactionHash(t *testing.T) { - filters := map[string][]string{"transactionHash": {"abc123"}} - err := validateEventsFilters(filters) - require.NoError(t, err) -} - -func TestValidateEventsFilters_ValidTickNumber(t *testing.T) { - filters := map[string][]string{"tickNumber": {"42"}} - err := validateEventsFilters(filters) - require.NoError(t, err) -} - -func TestValidateEventsFilters_ValidEventType(t *testing.T) { - for _, et := range []string{"0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "255"} { - t.Run("eventType_"+et, func(t *testing.T) { - filters := map[string][]string{"logType": {et}} - err := validateEventsFilters(filters) - require.NoError(t, err) - }) - } -} - -func TestValidateEventsFilters_InvalidEventType(t *testing.T) { - for _, et := range []string{"-1", "15", "256", "abc"} { - t.Run("eventType_"+et, func(t *testing.T) { - filters := map[string][]string{"logType": {et}} - err := validateEventsFilters(filters) - require.Error(t, err) - assert.Contains(t, err.Error(), "invalid [logType] filter") - }) - } -} - -func TestValidateEventsFilters_InvalidTickNumber(t *testing.T) { - filters := map[string][]string{"tickNumber": {"not-a-number"}} - err := validateEventsFilters(filters) - require.Error(t, err) - assert.Contains(t, err.Error(), "invalid [tickNumber] filter") -} - -func TestValidateEventsFilters_UnsupportedFilter(t *testing.T) { - filters := map[string][]string{"unknownFilter": {"value"}} - err := validateEventsFilters(filters) - require.Error(t, err) - assert.Contains(t, err.Error(), "unsupported filter") -} - -func TestValidateEventsFilters_TooManyFilters(t *testing.T) { - filters := map[string][]string{ - "transactionHash": {"abc"}, - "tickNumber": {"42"}, - "logType": {"1"}, - "extra": {"value"}, - } - err := validateEventsFilters(filters) - require.Error(t, err) - assert.Contains(t, err.Error(), "too many filters") -} - -func TestValidateEventsFilters_CombinedFilters(t *testing.T) { - filters := map[string][]string{ - "transactionHash": {"abc123"}, - "tickNumber": {"42"}, - "logType": {"0"}, - } - err := validateEventsFilters(filters) - require.NoError(t, err) -} - -func TestValidateEventsFilters_EmptyFilters(t *testing.T) { - err := validateEventsFilters(nil) - require.NoError(t, err) - - err = validateEventsFilters(map[string][]string{}) - require.NoError(t, err) -} diff --git a/v2/grpc/validations_filters_test.go b/v2/grpc/validations_filters_test.go deleted file mode 100644 index d8c312a..0000000 --- a/v2/grpc/validations_filters_test.go +++ /dev/null @@ -1,143 +0,0 @@ -package grpc - -import ( - "testing" - - "github.com/stretchr/testify/require" -) - -const validId = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFXIB" -const invalidId = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" - -func TestValidations_validateFilters_givenAllValid_thenNoError(t *testing.T) { - filters := map[string][]string{ - "source": {validId}, - "destination-exclude": {validId}, - "amount": {"100"}, - "inputType": {"42"}, - "tickNumber": {"43"}, - } - err := validateIdentityTransactionQueryFilters(filters) - require.NoError(t, err) -} - -func TestValidations_validateFilters_givenMultipleValidValues_thenNoError(t *testing.T) { - filters := map[string][]string{ - "source-exclude": {validId, validId}, - "destination": {validId, validId}, - "amount": {"100"}, - "inputType": {"42"}, - } - err := validateIdentityTransactionQueryFilters(filters) - require.NoError(t, err) -} - -func TestValidations_validateFilters_givenConflictingSourceFilter_thenError(t *testing.T) { - filters := map[string][]string{ - "source": {validId, validId}, - "source-exclude": {validId}, - } - err := validateIdentityTransactionQueryFilters(filters) - require.Error(t, err) -} - -func TestValidations_validateFilters_givenConflictingDestinationFilter_thenError(t *testing.T) { - filters := map[string][]string{ - "destination": {validId}, - "destination-exclude": {validId, validId}, - } - err := validateIdentityTransactionQueryFilters(filters) - require.Error(t, err) -} - -func TestValidations_validateFilters_givenUnsupported_thenError(t *testing.T) { - filters := map[string][]string{"timestamp": {"42"}} - err := validateIdentityTransactionQueryFilters(filters) - require.ErrorContains(t, err, "unsupported filter: [timestamp]") -} - -func TestValidations_validateFilters_givenInvalidAmount(t *testing.T) { - filters := map[string][]string{"amount": {"-1"}} - err := validateIdentityTransactionQueryFilters(filters) - require.ErrorContains(t, err, "invalid amount filter") -} - -func TestValidations_validateFilters_givenMultipleAmounts(t *testing.T) { - filters := map[string][]string{"amount": {"1", "4"}} - err := validateIdentityTransactionQueryFilters(filters) - require.ErrorContains(t, err, "invalid number of values") -} - -func TestValidations_validateFilters_givenEmptyAmounts(t *testing.T) { - filters := map[string][]string{"amount": {}} - err := validateIdentityTransactionQueryFilters(filters) - require.ErrorContains(t, err, "invalid number of values") -} - -func TestValidations_validateFilters_givenMultipleInputTypes(t *testing.T) { - filters := map[string][]string{"inputType": {"1", "2"}} - err := validateIdentityTransactionQueryFilters(filters) - require.ErrorContains(t, err, "invalid number of values") -} - -func TestValidations_validateFilters_givenEmptyInputType(t *testing.T) { - filters := map[string][]string{"inputType": {}} - err := validateIdentityTransactionQueryFilters(filters) - require.ErrorContains(t, err, "invalid number of values") -} - -func TestValidations_validateFilters_givenMultipleTickNumbers(t *testing.T) { - filters := map[string][]string{"tickNumber": {"1", "2"}} - err := validateIdentityTransactionQueryFilters(filters) - require.ErrorContains(t, err, "invalid number of values") -} - -func TestValidations_validateFilters_givenEmptyTickNumber(t *testing.T) { - filters := map[string][]string{"tickNumber": {}} - err := validateIdentityTransactionQueryFilters(filters) - require.ErrorContains(t, err, "invalid number of values") -} - -func TestValidations_validateFilters_givenInvalidSource(t *testing.T) { - filters := map[string][]string{"source": {invalidId}} - err := validateIdentityTransactionQueryFilters(filters) - require.ErrorContains(t, err, "invalid source filter") -} - -func TestValidations_validateFilters_givenInvalidDestination(t *testing.T) { - filters := map[string][]string{"destination": {invalidId}} - err := validateIdentityTransactionQueryFilters(filters) - require.ErrorContains(t, err, "invalid destination filter") -} - -func TestValidations_validateFilters_givenInvalidSourceExclude(t *testing.T) { - filters := map[string][]string{"source-exclude": {invalidId}} - err := validateIdentityTransactionQueryFilters(filters) - require.ErrorContains(t, err, "invalid source-exclude filter") -} - -func TestValidations_validateFilters_givenInvalidDestinationExclude(t *testing.T) { - filters := map[string][]string{"destination-exclude": {invalidId}} - err := validateIdentityTransactionQueryFilters(filters) - require.ErrorContains(t, err, "invalid destination-exclude filter") -} - -func TestValidations_validateFilters_givenMultipleIdValuesIncludingInvalid_thenError(t *testing.T) { - filters := map[string][]string{"source": {validId, invalidId}} - err := validateIdentityTransactionQueryFilters(filters) - require.ErrorContains(t, err, "invalid source filter") -} - -func TestValidations_validateFilters_givenInvalidInputType(t *testing.T) { - filters := map[string][]string{"inputType": {"foo"}} - err := validateIdentityTransactionQueryFilters(filters) - require.ErrorContains(t, err, "invalid inputType filter") -} - -func TestValidations_validateFilters_givenEmpty(t *testing.T) { - filters := map[string][]string{} - err := validateIdentityTransactionQueryFilters(filters) - require.NoError(t, err) - err = validateIdentityTransactionQueryFilters(nil) - require.NoError(t, err) -} diff --git a/v2/grpc/validations_ranges_test.go b/v2/grpc/validations_ranges_test.go deleted file mode 100644 index 3d72f72..0000000 --- a/v2/grpc/validations_ranges_test.go +++ /dev/null @@ -1,149 +0,0 @@ -package grpc - -import ( - "testing" - - api "github.com/qubic/archive-query-service/v2/api/archive-query-service/v2" - "github.com/stretchr/testify/require" -) - -func TestValidations_validateRanges_givenAllValid(t *testing.T) { - _, err := validateIdentityTransactionQueryRanges(map[string][]string{}, map[string]*api.Range{ - FilterAmount: { - LowerBound: &api.Range_Gte{ - Gte: "1000", - }, - UpperBound: &api.Range_Lte{ - Lte: "10000", - }, - }, - FilterTickNumber: { - LowerBound: &api.Range_Gte{ - Gte: "1", - }, - UpperBound: &api.Range_Lt{ - Lt: "999999", - }, - }, - FilterInputType: { - LowerBound: &api.Range_Gt{ - Gt: "0", - }, - }, - FilterTimestamp: { - LowerBound: &api.Range_Gt{ - Gt: "1000000", - }, - }, - }) - require.NoError(t, err) -} - -func TestValidations_validateRanges(t *testing.T) { - _, err := validateIdentityTransactionQueryRanges(map[string][]string{}, nil) - require.NoError(t, err) -} - -func TestValidations_validateRanges_givenUnsupported_thenError(t *testing.T) { - _, err := validateIdentityTransactionQueryRanges(map[string][]string{}, map[string]*api.Range{ - "foo": {}, - }) - require.ErrorContains(t, err, "unsupported range: [foo]") -} - -func TestValidations_validateRanges_EmptyRange_thenError(t *testing.T) { - _, err := validateIdentityTransactionQueryRanges(map[string][]string{}, map[string]*api.Range{ - FilterAmount: {}, - }) - require.ErrorContains(t, err, "invalid range: no bounds") -} - -func TestValidations_validateRanges_givenInvalidRange_thenError(t *testing.T) { - _, err := validateIdentityTransactionQueryRanges(map[string][]string{}, map[string]*api.Range{ - FilterAmount: { - LowerBound: &api.Range_Gte{ - Gte: "42", - }, - UpperBound: &api.Range_Lte{ - Lte: "42", - }, - }, - }) - require.ErrorContains(t, err, "invalid range: [42:42]") -} - -func TestValidations_validateRanges_givenEmpty(t *testing.T) { - _, err := validateIdentityTransactionQueryRanges(map[string][]string{}, nil) - require.NoError(t, err) - _, err = validateIdentityTransactionQueryRanges(map[string][]string{}, map[string]*api.Range{}) - require.NoError(t, err) - _, err = validateIdentityTransactionQueryRanges(nil, map[string]*api.Range{}) - require.NoError(t, err) - _, err = validateIdentityTransactionQueryRanges(nil, nil) - require.NoError(t, err) -} - -func TestValidations_validateRanges_givenInvalidRangeValue_thenError(t *testing.T) { - _, err := validateIdentityTransactionQueryRanges(map[string][]string{}, map[string]*api.Range{ - FilterAmount: { - LowerBound: &api.Range_Gte{ - Gte: "foo", - }, - }, - }) - require.ErrorContains(t, err, "invalid amount range: invalid [gte] value") - _, err = validateIdentityTransactionQueryRanges(map[string][]string{}, map[string]*api.Range{ - FilterTickNumber: { - LowerBound: &api.Range_Gt{ - Gt: "foo", - }, - }, - }) - require.ErrorContains(t, err, "invalid tickNumber range: invalid [gt] value") - _, err = validateIdentityTransactionQueryRanges(map[string][]string{}, map[string]*api.Range{ - FilterTimestamp: { - UpperBound: &api.Range_Lte{ - Lte: "foo", - }, - }, - }) - require.ErrorContains(t, err, "invalid timestamp range: invalid [lte] value") - _, err = validateIdentityTransactionQueryRanges(map[string][]string{}, map[string]*api.Range{ - FilterInputType: { - UpperBound: &api.Range_Lt{ - Lt: "foo", - }, - }, - }) - require.ErrorContains(t, err, "invalid inputType range: invalid [lt] value") -} - -func TestValidations_validateRanges_givenDuplicateFilter_thenError(t *testing.T) { - filters := map[string][]string{FilterAmount: {"foo"}} - ranges := map[string]*api.Range{FilterAmount: nil} - _, err := validateIdentityTransactionQueryRanges(filters, ranges) - require.ErrorContains(t, err, "already declared as filter") -} - -func TestValidations_validateRanges_tickNumberWithUpperAndLowerRange(t *testing.T) { - result, err := validateIdentityTransactionQueryRanges(map[string][]string{}, map[string]*api.Range{ - FilterTickNumber: { - LowerBound: &api.Range_Gte{ - Gte: "100", - }, - UpperBound: &api.Range_Lte{ - Lte: "200", - }, - }, - }) - require.NoError(t, err) - require.NotNil(t, result) - require.Contains(t, result, FilterTickNumber) - - tickRange := result[FilterTickNumber] - require.Len(t, tickRange, 2) - require.Equal(t, "gte", tickRange[0].Operation) - require.Equal(t, "100", tickRange[0].Value) - require.Equal(t, "lte", tickRange[1].Operation) - require.Equal(t, "200", tickRange[1].Value) -} diff --git a/v2/http-client.env.json b/v2/http-client.env.json index 3a4bdb5..95535b5 100644 --- a/v2/http-client.env.json +++ b/v2/http-client.env.json @@ -2,6 +2,9 @@ "local": { "host": "localhost:8080" }, + "dev":{ + "host": "https://dev02.qubic.org/query/v1" + }, "production":{ "host": "https://rpc.qubic.org/query/v1" } diff --git a/v2/test/grpc_server_test.go b/v2/test/grpc_server_test.go index fba19d1..f56c3e8 100644 --- a/v2/test/grpc_server_test.go +++ b/v2/test/grpc_server_test.go @@ -26,6 +26,9 @@ func TestServer(t *testing.T) { suite.Run(t, new(ServerTestSuite)) } +const validTransactionHash = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaafxib" +const validTransactionHash2 = "baaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaarmid" + type ServerTestSuite struct { suite.Suite client api.ArchiveQueryServiceClient @@ -149,17 +152,17 @@ func (s *ServerTestSuite) TestGetEvents_Success() { Return(&entities.EventsResult{ Hits: &entities.Hits{Total: 2, Relation: "eq"}, Events: []*api.Event{ - {TickNumber: 100, TransactionHash: ToStringPointer("hash1"), LogType: 0, EventData: &api.Event_QuTransfer{ + {TickNumber: 100, TransactionHash: ToStringPointer(validTransactionHash), LogType: 0, EventData: &api.Event_QuTransfer{ QuTransfer: &api.QuTransferData{Source: "SRC", Destination: "DST", Amount: 1000}, }}, - {TickNumber: 101, TransactionHash: ToStringPointer("hash2"), LogType: 1, EventData: &api.Event_AssetIssuance{ + {TickNumber: 101, TransactionHash: ToStringPointer(validTransactionHash2), LogType: 1, EventData: &api.Event_AssetIssuance{ AssetIssuance: &api.AssetIssuanceData{AssetIssuer: "ISSUER", AssetName: "QX"}, }}, }, }, nil) resp, err := s.client.GetEvents(t.Context(), &api.GetEventsRequest{ - Filters: map[string]string{"transactionHash": "hash1"}, + Filters: map[string]string{"transactionHash": validTransactionHash}, Pagination: &api.Pagination{Offset: 0, Size: 10}, }) require.NoError(t, err) @@ -176,76 +179,6 @@ func (s *ServerTestSuite) TestGetEvents_Success() { assert.Equal(t, "ISSUER", resp.Events[1].GetAssetIssuance().GetAssetIssuer()) } -func (s *ServerTestSuite) TestGetEvents_FilterByTransactionHash() { - t := s.T() - expectedFilters := map[string][]string{"transactionHash": {"txhash1"}} - s.mockEvService.EXPECT().GetEvents(gomock.Any(), expectedFilters, uint32(0), uint32(10)). - Return(&entities.EventsResult{ - Hits: &entities.Hits{Total: 1, Relation: "eq"}, - Events: []*api.Event{{TickNumber: 100, TransactionHash: ToStringPointer("txhash1"), LogType: 0}}, - }, nil) - - resp, err := s.client.GetEvents(t.Context(), &api.GetEventsRequest{ - Filters: map[string]string{"transactionHash": "txhash1"}, - }) - require.NoError(t, err) - assert.Len(t, resp.Events, 1) - assert.Equal(t, "txhash1", *resp.Events[0].TransactionHash) -} - -func (s *ServerTestSuite) TestGetEvents_FilterByTickNumber() { - t := s.T() - expectedFilters := map[string][]string{"tickNumber": {"15001"}} - s.mockEvService.EXPECT().GetEvents(gomock.Any(), expectedFilters, uint32(0), uint32(10)). - Return(&entities.EventsResult{ - Hits: &entities.Hits{Total: 1, Relation: "eq"}, - Events: []*api.Event{{TickNumber: 15001, LogType: 0}}, - }, nil) - - resp, err := s.client.GetEvents(t.Context(), &api.GetEventsRequest{ - Filters: map[string]string{"tickNumber": "15001"}, - }) - require.NoError(t, err) - assert.Len(t, resp.Events, 1) - assert.Equal(t, uint32(15001), resp.Events[0].TickNumber) -} - -func (s *ServerTestSuite) TestGetEvents_FilterByEventType() { - t := s.T() - expectedFilters := map[string][]string{"logType": {"8"}} - s.mockEvService.EXPECT().GetEvents(gomock.Any(), expectedFilters, uint32(0), uint32(10)). - Return(&entities.EventsResult{ - Hits: &entities.Hits{Total: 1, Relation: "eq"}, - Events: []*api.Event{{TickNumber: 200, LogType: 8}}, - }, nil) - - resp, err := s.client.GetEvents(t.Context(), &api.GetEventsRequest{ - Filters: map[string]string{"logType": "8"}, - }) - require.NoError(t, err) - assert.Len(t, resp.Events, 1) - assert.Equal(t, uint32(8), resp.Events[0].LogType) -} - -func (s *ServerTestSuite) TestGetEvents_CombinedFilters() { - t := s.T() - expectedFilters := map[string][]string{ - "transactionHash": {"txhash1"}, - "logType": {"0"}, - } - s.mockEvService.EXPECT().GetEvents(gomock.Any(), expectedFilters, uint32(0), uint32(10)). - Return(&entities.EventsResult{ - Hits: &entities.Hits{Total: 1, Relation: "eq"}, - Events: []*api.Event{{TickNumber: 100, TransactionHash: ToStringPointer("txhash1"), LogType: 0}}, - }, nil) - - resp, err := s.client.GetEvents(t.Context(), &api.GetEventsRequest{ - Filters: map[string]string{"transactionHash": "txhash1", "logType": "0"}, - }) - require.NoError(t, err) - assert.Len(t, resp.Events, 1) -} - func (s *ServerTestSuite) TestGetEvents_InvalidFilter() { t := s.T() _, err := s.client.GetEvents(t.Context(), &api.GetEventsRequest{ @@ -270,7 +203,7 @@ func (s *ServerTestSuite) TestGetEvents_InvalidEventType() { st, ok := status.FromError(err) require.True(t, ok) assert.Equal(t, codes.InvalidArgument, st.Code()) - assert.Contains(t, st.Message(), "validating filters") + assert.Contains(t, st.Message(), "creating include filters") assert.Contains(t, st.Message(), "invalid [logType] filter") } diff --git a/v2/test/integration/events_test.go b/v2/test/integration/events_test.go index 02dead4..ef4f754 100644 --- a/v2/test/integration/events_test.go +++ b/v2/test/integration/events_test.go @@ -17,6 +17,8 @@ import ( // gRPC Tests // ===================== +const validTransactionHash = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaafxib" + func (s *EventsE2ESuite) TestGRPC_GetEvents_NoFilters() { t := s.T() resp, err := s.grpcClient.GetEvents(t.Context(), &api.GetEventsRequest{}) @@ -102,7 +104,7 @@ func (s *EventsE2ESuite) TestGRPC_GetEvents_Pagination() { func (s *EventsE2ESuite) TestGRPC_GetEvents_EmptyResult() { t := s.T() resp, err := s.grpcClient.GetEvents(t.Context(), &api.GetEventsRequest{ - Filters: map[string]string{"transactionHash": "nonexistent"}, + Filters: map[string]string{"transactionHash": validTransactionHash}, }) require.NoError(t, err) require.Empty(t, resp.Events) @@ -338,6 +340,18 @@ func (s *EventsE2ESuite) TestHTTP_GetEvents_Type0_QuTransfer_FullData() { } } +func (s *EventsE2ESuite) TestHTTP_GetEvents_Type0_QuTransfer_ExcludeDestinationFilter() { + t := s.T() + result, statusCode := s.postGetEvents(`{ "filters":{"tickNumber":"15000"}, "exclude": {"destination": "AFZPUAIYVPNUYGJRQVLUKOPPVLHAZQTGLYAAUUNBXFTVTAMSBKQBLEIEPCVJ"} }`) + require.Equal(t, http.StatusOK, statusCode) + + events := result["events"].([]interface{}) + require.Len(t, events, 1) // only return one + ev := events[0].(map[string]interface{}) + + require.Equal(t, "1", ev["logId"]) +} + func (s *EventsE2ESuite) TestHTTP_GetEvents_Type1_AssetIssuance_FullData() { t := s.T() result, statusCode := s.postGetEvents(`{"filters":{"tickNumber":"15001"}}`) @@ -411,7 +425,7 @@ func (s *EventsE2ESuite) TestHTTP_GetEvents_Type13_ContractReserveDeduction_Full func (s *EventsE2ESuite) TestHTTP_GetEvents_EmptyResult() { t := s.T() - result, statusCode := s.postGetEvents(`{"filters":{"transactionHash":"nonexistent"}}`) + result, statusCode := s.postGetEvents(`{"filters":{"transactionHash":"` + validTransactionHash + `"}}`) require.Equal(t, http.StatusOK, statusCode) events := result["events"].([]interface{}) diff --git a/v2/v2-query-requests.http b/v2/v2-query-requests.http index e661fbb..504c571 100644 --- a/v2/v2-query-requests.http +++ b/v2/v2-query-requests.http @@ -1,3 +1,8 @@ +### Get processed tick intervals + +GET {{host}}/getProcessedTickIntervals +Accept: application/json + ### Get transactions for one identity POST {{host}}/getTransactionsForIdentity @@ -6,8 +11,10 @@ Accept: application/json { "identity": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFXIB", "filters": { - "inputType": "0", - "source-exclude": "AFZPUAIYVPNUYGJRQVLUKOPPVLHAZQTGLYAAUUNBXFTVTAMSBKQBLEIEPCVJ, HOPUKLUXCKGDDEBSYRRPJTSFYEKBTDYCJSVZJLEIZEBJCDLAVOCMKMTBTDLB" + "inputType": "0" + }, + "exclude": { + "source": "AFZPUAIYVPNUYGJRQVLUKOPPVLHAZQTGLYAAUUNBXFTVTAMSBKQBLEIEPCVJ, HOPUKLUXCKGDDEBSYRRPJTSFYEKBTDYCJSVZJLEIZEBJCDLAVOCMKMTBTDLB" }, "ranges": { "amount": { @@ -40,4 +47,22 @@ Accept: application/json "hash": "wnkujtaavborugjhgudeimborfpgyudnuekalkbsjeaxiejvvdgahdrerajo" } +### Get event logs + +POST {{host}}/getEvents +Accept: application/json + +{ + "filters": { "logType": "0" }, + "exclude": { "source": "BAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAARMID" }, + "should": [{ + "terms": { + "source": "BAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAARMID", + "destination": "BAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAARMID" + } + }], + "ranges": { "timestamp":{"gte":"1772975950000"}, "amount": { "gt": "1" } }, + "pagination": { "size": 10 } +} + ### \ No newline at end of file