diff --git a/.env.template b/.env.template index bc1207d..f10ce74 100644 --- a/.env.template +++ b/.env.template @@ -13,10 +13,6 @@ HYPERFLUID_BASE_URL=https://bifrost.localhost:8443 HYPERFLUID_SKIP_TLS_VERIFY=true #local only HYPERFLUID_ORG_ID= #example: 5e840f3e-e306-4ca5-a90e-05f7fe5f37fc -HYPERFLUID_POSTGRES_HOST=bifrost.localhost -HYPERFLUID_POSTGRES_PORT=8443 -HYPERFLUID_POSTGRES_USER=demo - HYPERFLUID_REQUEST_TIMEOUT=30 HYPERFLUID_MAX_RETRIES=3 diff --git a/.gitignore b/.gitignore index fb4099e..667124e 100644 --- a/.gitignore +++ b/.gitignore @@ -20,3 +20,4 @@ go.work.sum # env file .env +code_snippets/ diff --git a/sdk/builders/common.go b/sdk/builders/common.go new file mode 100644 index 0000000..32ff357 --- /dev/null +++ b/sdk/builders/common.go @@ -0,0 +1,14 @@ +package builders + +import ( + "context" + + "github.com/nudibranches-tech/bifrost-hyperfluid-sdk-dev/sdk/utils" +) + +// ClientInterface defines the methods that builders need from the SDK client. +// This avoids circular imports between sdk and builders packages. +type ClientInterface interface { + Do(ctx context.Context, method, endpoint string, body []byte) (*utils.Response, error) + GetConfig() utils.Configuration +} diff --git a/sdk/query_builder.go b/sdk/builders/fluent/query.go similarity index 83% rename from sdk/query_builder.go rename to sdk/builders/fluent/query.go index 974ab8c..88b01e2 100644 --- a/sdk/query_builder.go +++ b/sdk/builders/fluent/query.go @@ -1,4 +1,4 @@ -package sdk +package fluent import ( "context" @@ -7,59 +7,50 @@ import ( "strconv" "strings" + "github.com/nudibranches-tech/bifrost-hyperfluid-sdk-dev/sdk/builders" "github.com/nudibranches-tech/bifrost-hyperfluid-sdk-dev/sdk/utils" ) // QueryBuilder provides a fluent interface for building and executing queries. type QueryBuilder struct { - client *Client + client builders.ClientInterface errors []error // Hierarchy - orgID string + dataDockID string catalogName string schemaName string tableName string // Query parameters selectCols []string - filters []Filter - orderBy []OrderClause + filters []builders.Filter + orderBy []builders.OrderClause limitVal int offsetVal int rawParams url.Values } -// Filter represents a WHERE clause condition. -type Filter struct { - Column string - Operator string // =, >, <, >=, <=, !=, LIKE, IN - Value interface{} -} - -// OrderClause represents an ORDER BY clause. -type OrderClause struct { - Column string - Direction string // ASC or DESC -} - -// newQueryBuilder creates a new QueryBuilder instance. -func newQueryBuilder(client *Client) *QueryBuilder { +// NewQueryBuilder creates a new QueryBuilder instance. +func NewQueryBuilder(client interface { + Do(ctx context.Context, method, endpoint string, body []byte) (*utils.Response, error) + GetConfig() utils.Configuration +}) *QueryBuilder { return &QueryBuilder{ - client: client, - errors: []error{}, - orgID: client.config.OrgID, // Use default from config - rawParams: url.Values{}, + client: client, + errors: []error{}, + dataDockID: client.GetConfig().DataDockID, // Use default from config + rawParams: url.Values{}, } } -// Org sets the organization ID for the query. -// If not called, uses the OrgID from client configuration. -func (qb *QueryBuilder) Org(orgID string) *QueryBuilder { - if orgID == "" { - qb.errors = append(qb.errors, fmt.Errorf("organization ID cannot be empty")) +// DataDock sets the data dock ID for the query. +// If not called, uses the DataDockID from client configuration. +func (qb *QueryBuilder) DataDock(dataDockID string) *QueryBuilder { + if dataDockID == "" { + qb.errors = append(qb.errors, fmt.Errorf("data dock ID cannot be empty")) } - qb.orgID = orgID + qb.dataDockID = dataDockID return qb } @@ -109,7 +100,7 @@ func (qb *QueryBuilder) Where(column, operator string, value interface{}) *Query qb.errors = append(qb.errors, fmt.Errorf("invalid operator '%s'", operator)) } - qb.filters = append(qb.filters, Filter{ + qb.filters = append(qb.filters, builders.Filter{ Column: column, Operator: operator, Value: value, @@ -130,7 +121,7 @@ func (qb *QueryBuilder) OrderBy(column, direction string) *QueryBuilder { return qb } - qb.orderBy = append(qb.orderBy, OrderClause{ + qb.orderBy = append(qb.orderBy, builders.OrderClause{ Column: column, Direction: direction, }) @@ -180,8 +171,8 @@ func (qb *QueryBuilder) validate() error { } // Check required fields - if qb.orgID == "" { - return fmt.Errorf("%w: organization ID is required", utils.ErrInvalidRequest) + if qb.dataDockID == "" { + return fmt.Errorf("%w: data dock ID is required", utils.ErrInvalidRequest) } if qb.catalogName == "" { return fmt.Errorf("%w: catalog name is required", utils.ErrInvalidRequest) @@ -201,8 +192,8 @@ func (qb *QueryBuilder) buildEndpoint() string { // Use url.PathEscape for each segment to prevent injection return fmt.Sprintf( "%s/%s/openapi/%s/%s/%s", - strings.TrimRight(qb.client.config.BaseURL, "/"), - url.PathEscape(qb.orgID), + strings.TrimRight(qb.client.GetConfig().BaseURL, "/"), + url.PathEscape(qb.dataDockID), url.PathEscape(qb.catalogName), url.PathEscape(qb.schemaName), url.PathEscape(qb.tableName), @@ -226,7 +217,7 @@ func (qb *QueryBuilder) buildParams() url.Values { } // Add WHERE filters - // Note: This assumes the API supports filter parameters + // TODO - Note: This assumes the API supports filter parameters // Adjust based on actual API capabilities for _, filter := range qb.filters { paramName := fmt.Sprintf("%s[%s]", filter.Column, filter.Operator) @@ -277,7 +268,7 @@ func (qb *QueryBuilder) Get(ctx context.Context) (*utils.Response, error) { } // Execute the request - return qb.client.do(ctx, "GET", endpoint, nil) + return qb.client.Do(ctx, "GET", endpoint, nil) } // Count returns the count of rows matching the query. @@ -299,7 +290,7 @@ func (qb *QueryBuilder) Count(ctx context.Context) (int, error) { endpoint += "?" + params.Encode() // Execute the request - resp, err := qb.client.do(ctx, "GET", endpoint, nil) + resp, err := qb.client.Do(ctx, "GET", endpoint, nil) if err != nil { return 0, err } @@ -323,7 +314,7 @@ func (qb *QueryBuilder) Post(ctx context.Context, data interface{}) (*utils.Resp endpoint := qb.buildEndpoint() body := utils.JsonMarshal(data) - return qb.client.do(ctx, "POST", endpoint, body) + return qb.client.Do(ctx, "POST", endpoint, body) } // Put executes a PUT request to update data. @@ -340,7 +331,7 @@ func (qb *QueryBuilder) Put(ctx context.Context, data interface{}) (*utils.Respo } body := utils.JsonMarshal(data) - return qb.client.do(ctx, "PUT", endpoint, body) + return qb.client.Do(ctx, "PUT", endpoint, body) } // Delete executes a DELETE request. @@ -356,5 +347,5 @@ func (qb *QueryBuilder) Delete(ctx context.Context) (*utils.Response, error) { endpoint += "?" + params.Encode() } - return qb.client.do(ctx, "DELETE", endpoint, nil) + return qb.client.Do(ctx, "DELETE", endpoint, nil) } diff --git a/sdk/query_builder_test.go b/sdk/builders/fluent/query_test.go similarity index 68% rename from sdk/query_builder_test.go rename to sdk/builders/fluent/query_test.go index a84e649..87c3921 100644 --- a/sdk/query_builder_test.go +++ b/sdk/builders/fluent/query_test.go @@ -1,7 +1,8 @@ -package sdk +package fluent import ( "context" + "encoding/json" "io" "net/http" "strings" @@ -11,7 +12,7 @@ import ( ) func TestQueryBuilder_BasicChaining(t *testing.T) { - client := newTestClient(utils.Configuration{ + qb := newTestQueryBuilder(utils.Configuration{ Token: "test-token", OrgID: "default-org", }, func(req *http.Request) (*http.Response, error) { @@ -33,7 +34,7 @@ func TestQueryBuilder_BasicChaining(t *testing.T) { }, nil }) - resp, err := client. + resp, err := qb. Catalog("test-catalog"). Schema("test-schema"). Table("test-table"). @@ -49,9 +50,9 @@ func TestQueryBuilder_BasicChaining(t *testing.T) { } func TestQueryBuilder_WithSelect(t *testing.T) { - client := newTestClient(utils.Configuration{ - Token: "test-token", - OrgID: "test-org", + qb := newTestQueryBuilder(utils.Configuration{ + Token: "test-token", + DataDockID: "test-datadock", }, func(req *http.Request) (*http.Response, error) { query := req.URL.Query() selectParam := query.Get("select") @@ -65,7 +66,7 @@ func TestQueryBuilder_WithSelect(t *testing.T) { }, nil }) - _, err := client. + _, err := qb. Catalog("cat"). Schema("schema"). Table("users"). @@ -78,9 +79,9 @@ func TestQueryBuilder_WithSelect(t *testing.T) { } func TestQueryBuilder_WithMultipleSelects(t *testing.T) { - client := newTestClient(utils.Configuration{ - Token: "test-token", - OrgID: "test-org", + qb := newTestQueryBuilder(utils.Configuration{ + Token: "test-token", + DataDockID: "test-datadock", }, func(req *http.Request) (*http.Response, error) { query := req.URL.Query() selectParam := query.Get("select") @@ -94,7 +95,7 @@ func TestQueryBuilder_WithMultipleSelects(t *testing.T) { }, nil }) - _, err := client. + _, err := qb. Catalog("cat"). Schema("schema"). Table("users"). @@ -108,9 +109,9 @@ func TestQueryBuilder_WithMultipleSelects(t *testing.T) { } func TestQueryBuilder_WithFilters(t *testing.T) { - client := newTestClient(utils.Configuration{ - Token: "test-token", - OrgID: "test-org", + qb := newTestQueryBuilder(utils.Configuration{ + Token: "test-token", + DataDockID: "test-datadock", }, func(req *http.Request) (*http.Response, error) { // Check for filter parameters if !strings.Contains(req.URL.RawQuery, "age") { @@ -126,7 +127,7 @@ func TestQueryBuilder_WithFilters(t *testing.T) { }, nil }) - _, err := client. + _, err := qb. Catalog("cat"). Schema("schema"). Table("users"). @@ -140,9 +141,9 @@ func TestQueryBuilder_WithFilters(t *testing.T) { } func TestQueryBuilder_WithOrderBy(t *testing.T) { - client := newTestClient(utils.Configuration{ - Token: "test-token", - OrgID: "test-org", + qb := newTestQueryBuilder(utils.Configuration{ + Token: "test-token", + DataDockID: "test-datadock", }, func(req *http.Request) (*http.Response, error) { query := req.URL.Query() orderParam := query.Get("order") @@ -156,7 +157,7 @@ func TestQueryBuilder_WithOrderBy(t *testing.T) { }, nil }) - _, err := client. + _, err := qb. Catalog("cat"). Schema("schema"). Table("users"). @@ -170,9 +171,9 @@ func TestQueryBuilder_WithOrderBy(t *testing.T) { } func TestQueryBuilder_WithPagination(t *testing.T) { - client := newTestClient(utils.Configuration{ - Token: "test-token", - OrgID: "test-org", + qb := newTestQueryBuilder(utils.Configuration{ + Token: "test-token", + DataDockID: "test-datadock", }, func(req *http.Request) (*http.Response, error) { query := req.URL.Query() if query.Get("_limit") != "25" { @@ -188,7 +189,7 @@ func TestQueryBuilder_WithPagination(t *testing.T) { }, nil }) - _, err := client. + _, err := qb. Catalog("cat"). Schema("schema"). Table("users"). @@ -201,15 +202,7 @@ func TestQueryBuilder_WithPagination(t *testing.T) { } } -// Note: Custom org test removed - now use Progressive API with client.Org(customID) -// The Org() method now returns OrgBuilder, not QueryBuilder - func TestQueryBuilder_ValidationErrors(t *testing.T) { - client := newTestClient(utils.Configuration{ - Token: "test-token", - OrgID: "test-org", - }, nil) - tests := []struct { name string buildQuery func() *QueryBuilder @@ -219,7 +212,9 @@ func TestQueryBuilder_ValidationErrors(t *testing.T) { { name: "missing catalog", buildQuery: func() *QueryBuilder { - return client.Query().Schema("schema").Table("table") + return newTestQueryBuilder(utils.Configuration{Token: "test-token", DataDockID: "test-datadock"}, nil). + Schema("schema"). + Table("table") }, expectError: true, errorMsg: "catalog name is required", @@ -227,7 +222,9 @@ func TestQueryBuilder_ValidationErrors(t *testing.T) { { name: "missing schema", buildQuery: func() *QueryBuilder { - return client.Query().Catalog("cat").Table("table") + return newTestQueryBuilder(utils.Configuration{Token: "test-token", DataDockID: "test-datadock"}, nil). + Catalog("cat"). + Table("table") }, expectError: true, errorMsg: "schema name is required", @@ -235,7 +232,9 @@ func TestQueryBuilder_ValidationErrors(t *testing.T) { { name: "missing table", buildQuery: func() *QueryBuilder { - return client.Query().Catalog("cat").Schema("schema") + return newTestQueryBuilder(utils.Configuration{Token: "test-token", DataDockID: "test-datadock"}, nil). + Catalog("cat"). + Schema("schema") }, expectError: true, errorMsg: "table name is required", @@ -243,7 +242,10 @@ func TestQueryBuilder_ValidationErrors(t *testing.T) { { name: "empty catalog name", buildQuery: func() *QueryBuilder { - return client.Query().Catalog("").Schema("schema").Table("table") + return newTestQueryBuilder(utils.Configuration{Token: "test-token", DataDockID: "test-datadock"}, nil). + Catalog(""). + Schema("schema"). + Table("table") }, expectError: true, errorMsg: "catalog name cannot be empty", @@ -251,7 +253,11 @@ func TestQueryBuilder_ValidationErrors(t *testing.T) { { name: "negative limit", buildQuery: func() *QueryBuilder { - return client.Query().Catalog("cat").Schema("schema").Table("table").Limit(-1) + return newTestQueryBuilder(utils.Configuration{Token: "test-token", DataDockID: "test-datadock"}, nil). + Catalog("cat"). + Schema("schema"). + Table("table"). + Limit(-1) }, expectError: true, errorMsg: "limit cannot be negative", @@ -259,7 +265,11 @@ func TestQueryBuilder_ValidationErrors(t *testing.T) { { name: "negative offset", buildQuery: func() *QueryBuilder { - return client.Query().Catalog("cat").Schema("schema").Table("table").Offset(-10) + return newTestQueryBuilder(utils.Configuration{Token: "test-token", DataDockID: "test-datadock"}, nil). + Catalog("cat"). + Schema("schema"). + Table("table"). + Offset(-10) }, expectError: true, errorMsg: "offset cannot be negative", @@ -267,7 +277,11 @@ func TestQueryBuilder_ValidationErrors(t *testing.T) { { name: "invalid operator", buildQuery: func() *QueryBuilder { - return client.Query().Catalog("cat").Schema("schema").Table("table").Where("col", "??", "val") + return newTestQueryBuilder(utils.Configuration{Token: "test-token", DataDockID: "test-datadock"}, nil). + Catalog("cat"). + Schema("schema"). + Table("table"). + Where("col", "??", "val") }, expectError: true, errorMsg: "invalid operator", @@ -275,7 +289,11 @@ func TestQueryBuilder_ValidationErrors(t *testing.T) { { name: "invalid order direction", buildQuery: func() *QueryBuilder { - return client.Query().Catalog("cat").Schema("schema").Table("table").OrderBy("col", "INVALID") + return newTestQueryBuilder(utils.Configuration{Token: "test-token", DataDockID: "test-datadock"}, nil). + Catalog("cat"). + Schema("schema"). + Table("table"). + OrderBy("col", "INVALID") }, expectError: true, errorMsg: "must be ASC or DESC", @@ -303,9 +321,9 @@ func TestQueryBuilder_ValidationErrors(t *testing.T) { } func TestQueryBuilder_RawParams(t *testing.T) { - client := newTestClient(utils.Configuration{ - Token: "test-token", - OrgID: "test-org", + qb := newTestQueryBuilder(utils.Configuration{ + Token: "test-token", + DataDockID: "test-datadock", }, func(req *http.Request) (*http.Response, error) { query := req.URL.Query() if query.Get("custom_param") != "custom_value" { @@ -321,7 +339,7 @@ func TestQueryBuilder_RawParams(t *testing.T) { rawParams := make(map[string][]string) rawParams["custom_param"] = []string{"custom_value"} - _, err := client. + _, err := qb. Catalog("cat"). Schema("schema"). Table("table"). @@ -334,9 +352,9 @@ func TestQueryBuilder_RawParams(t *testing.T) { } func TestQueryBuilder_ComplexQuery(t *testing.T) { - client := newTestClient(utils.Configuration{ - Token: "test-token", - OrgID: "test-org", + qb := newTestQueryBuilder(utils.Configuration{ + Token: "test-token", + DataDockID: "test-datadock", }, func(req *http.Request) (*http.Response, error) { query := req.URL.Query() @@ -360,7 +378,7 @@ func TestQueryBuilder_ComplexQuery(t *testing.T) { }, nil }) - resp, err := client. + resp, err := qb. Catalog("sales"). Schema("public"). Table("customers"). @@ -390,9 +408,9 @@ func TestQueryBuilder_ComplexQuery(t *testing.T) { } func TestQueryBuilder_URLEscaping(t *testing.T) { - client := newTestClient(utils.Configuration{ - Token: "test-token", - OrgID: "test-org", + qb := newTestQueryBuilder(utils.Configuration{ + Token: "test-token", + DataDockID: "test-datadock", }, func(req *http.Request) (*http.Response, error) { // Verify special characters are properly escaped path := req.URL.Path @@ -410,7 +428,7 @@ func TestQueryBuilder_URLEscaping(t *testing.T) { }, nil }) - _, err := client. + _, err := qb. Catalog("test/catalog"). Schema("test schema"). Table("test-table"). @@ -422,9 +440,9 @@ func TestQueryBuilder_URLEscaping(t *testing.T) { } func TestQueryBuilder_OrderByDefaultDirection(t *testing.T) { - client := newTestClient(utils.Configuration{ - Token: "test-token", - OrgID: "test-org", + qb := newTestQueryBuilder(utils.Configuration{ + Token: "test-token", + DataDockID: "test-datadock", }, func(req *http.Request) (*http.Response, error) { query := req.URL.Query() orderParam := query.Get("order") @@ -439,7 +457,7 @@ func TestQueryBuilder_OrderByDefaultDirection(t *testing.T) { }, nil }) - _, err := client. + _, err := qb. Catalog("cat"). Schema("schema"). Table("users"). @@ -450,3 +468,71 @@ func TestQueryBuilder_OrderByDefaultDirection(t *testing.T) { t.Fatalf("Expected no error, got %v", err) } } + +// Test helper to create a mock QueryBuilder +type mockClient struct { + config utils.Configuration + handler func(*http.Request) (*http.Response, error) +} + +func (m *mockClient) Do(ctx context.Context, method, endpoint string, body []byte) (*utils.Response, error) { + if m.handler == nil { + // For validation-only tests + return &utils.Response{Status: utils.StatusOK}, nil + } + + req, _ := http.NewRequestWithContext(ctx, method, endpoint, nil) + resp, err := m.handler(req) + if err != nil { + return nil, err + } + + bodyBytes, _ := io.ReadAll(resp.Body) + defer resp.Body.Close() + + // Handle error status codes similar to request.go + if resp.StatusCode >= 300 { + response := &utils.Response{ + Status: utils.StatusError, + Error: string(bodyBytes), + HTTPCode: resp.StatusCode, + } + + if resp.StatusCode == http.StatusUnauthorized { + return response, utils.ErrAuthenticationFailed + } + if resp.StatusCode == http.StatusForbidden { + return response, utils.ErrPermissionDenied + } + if resp.StatusCode == http.StatusNotFound { + return response, utils.ErrNotFound + } + return response, nil + } + + // Parse successful response + var parsedBody any + if len(bodyBytes) > 0 { + if err := json.Unmarshal(bodyBytes, &parsedBody); err != nil { + return nil, err + } + } + + return &utils.Response{ + Status: utils.StatusOK, + Data: parsedBody, + HTTPCode: resp.StatusCode, + }, nil +} + +func (m *mockClient) GetConfig() utils.Configuration { + return m.config +} + +func newTestQueryBuilder(config utils.Configuration, handler func(*http.Request) (*http.Response, error)) *QueryBuilder { + config.BaseURL = "https://test.example.com" + return NewQueryBuilder(&mockClient{ + config: config, + handler: handler, + }) +} diff --git a/sdk/builders/fluent/search.go b/sdk/builders/fluent/search.go new file mode 100644 index 0000000..f0e4f00 --- /dev/null +++ b/sdk/builders/fluent/search.go @@ -0,0 +1,164 @@ +package fluent + +import ( + "context" + "fmt" + + "github.com/nudibranches-tech/bifrost-hyperfluid-sdk-dev/sdk/utils" +) + +// SearchBuilder provides a fluent interface for building and executing full-text search queries. +type SearchBuilder struct { + client interface { + Do(ctx context.Context, method, endpoint string, body []byte) (*utils.Response, error) + GetConfig() utils.Configuration + } + errors []error + + // Search parameters + searchQuery string + dataDockID string + catalogName string + schemaName string + tableName string + columnsToIndex []string + limitVal int +} + +// NewSearchBuilder creates a new SearchBuilder instance. +func NewSearchBuilder(client interface { + Do(ctx context.Context, method, endpoint string, body []byte) (*utils.Response, error) + GetConfig() utils.Configuration +}) *SearchBuilder { + return &SearchBuilder{ + client: client, + errors: []error{}, + dataDockID: client.GetConfig().DataDockID, // Auto-set from config + columnsToIndex: []string{}, + limitVal: 20, // Default limit + } +} + +// Query sets the search query string. +func (sb *SearchBuilder) Query(query string) *SearchBuilder { + if query == "" { + sb.errors = append(sb.errors, fmt.Errorf("search query cannot be empty")) + } + sb.searchQuery = query + return sb +} + +// DataDock sets the data dock ID for the search. +func (sb *SearchBuilder) DataDock(dataDockID string) *SearchBuilder { + if dataDockID == "" { + sb.errors = append(sb.errors, fmt.Errorf("data dock ID cannot be empty")) + } + sb.dataDockID = dataDockID + return sb +} + +// Catalog sets the catalog name for the search. +func (sb *SearchBuilder) Catalog(name string) *SearchBuilder { + if name == "" { + sb.errors = append(sb.errors, fmt.Errorf("catalog name cannot be empty")) + } + sb.catalogName = name + return sb +} + +// Schema sets the schema name for the search. +func (sb *SearchBuilder) Schema(name string) *SearchBuilder { + if name == "" { + sb.errors = append(sb.errors, fmt.Errorf("schema name cannot be empty")) + } + sb.schemaName = name + return sb +} + +// Table sets the table name for the search. +func (sb *SearchBuilder) Table(name string) *SearchBuilder { + if name == "" { + sb.errors = append(sb.errors, fmt.Errorf("table name cannot be empty")) + } + sb.tableName = name + return sb +} + +// Columns sets the columns to index for the search. +// Can be called multiple times to add more columns. +func (sb *SearchBuilder) Columns(columns ...string) *SearchBuilder { + sb.columnsToIndex = append(sb.columnsToIndex, columns...) + return sb +} + +// Limit sets the maximum number of results to return. +func (sb *SearchBuilder) Limit(n int) *SearchBuilder { + if n <= 0 { + sb.errors = append(sb.errors, fmt.Errorf("limit must be greater than 0")) + return sb + } + sb.limitVal = n + return sb +} + +// validate checks that all required fields are set. +func (sb *SearchBuilder) validate() error { + // Check for accumulated errors during building + if len(sb.errors) > 0 { + var errMsgs []string + for _, err := range sb.errors { + errMsgs = append(errMsgs, err.Error()) + } + return fmt.Errorf("search builder validation failed: %s", errMsgs[0]) + } + + // Check required fields + if sb.searchQuery == "" { + return fmt.Errorf("%w: search query is required", utils.ErrInvalidRequest) + } + if sb.dataDockID == "" { + return fmt.Errorf("%w: data dock ID is required", utils.ErrInvalidRequest) + } + if sb.catalogName == "" { + return fmt.Errorf("%w: catalog name is required", utils.ErrInvalidRequest) + } + if sb.schemaName == "" { + return fmt.Errorf("%w: schema name is required", utils.ErrInvalidRequest) + } + if sb.tableName == "" { + return fmt.Errorf("%w: table name is required", utils.ErrInvalidRequest) + } + if len(sb.columnsToIndex) == 0 { + return fmt.Errorf("%w: at least one column must be specified", utils.ErrInvalidRequest) + } + + return nil +} + +// Execute executes the search query and returns the results. +func (sb *SearchBuilder) Execute(ctx context.Context) (*utils.Response, error) { + // Validate the search + if err := sb.validate(); err != nil { + return nil, err + } + + // Build the request body + requestBody := map[string]interface{}{ + "query": sb.searchQuery, + "data_dock_id": sb.dataDockID, + "catalog": sb.catalogName, + "schema": sb.schemaName, + "table": sb.tableName, + "limit": sb.limitVal, + "columns_to_index": sb.columnsToIndex, + } + + // Build endpoint + endpoint := fmt.Sprintf("%s/api/search", sb.client.GetConfig().BaseURL) + + // Marshal request body + body := utils.JsonMarshal(requestBody) + + // Execute the request + return sb.client.Do(ctx, "POST", endpoint, body) +} diff --git a/sdk/builders/progressive/catalog.go b/sdk/builders/progressive/catalog.go new file mode 100644 index 0000000..77c9b91 --- /dev/null +++ b/sdk/builders/progressive/catalog.go @@ -0,0 +1,68 @@ +package progressive + +import ( + "context" + "fmt" + "net/url" + + "github.com/nudibranches-tech/bifrost-hyperfluid-sdk-dev/sdk/builders" +) + +// CatalogBuilder represents a catalog context. +// Available methods: +// - Schema(name) - Navigate to a specific schema +// - ListSchemas(ctx) - List all schemas in this catalog +type CatalogBuilder struct { + client builders.ClientInterface + orgID string + dataDockID string + catalogName string +} + +// Schema navigates to a specific schema in this catalog. +func (c *CatalogBuilder) Schema(schemaName string) *SchemaBuilder { + return &SchemaBuilder{ + client: c.client, + orgID: c.orgID, + dataDockID: c.dataDockID, + catalogName: c.catalogName, + schemaName: schemaName, + } +} + +// ListSchemas retrieves all schemas in this catalog. +// This parses the catalog metadata to extract schemas. +func (c *CatalogBuilder) ListSchemas(ctx context.Context) ([]string, error) { + // Get full catalog metadata + endpoint := fmt.Sprintf("%s/data-docks/%s/catalog", + c.client.GetConfig().BaseURL, + url.PathEscape(c.dataDockID), + ) + + resp, err := c.client.Do(ctx, "GET", endpoint, nil) + if err != nil { + return nil, err + } + + // Extract schemas for this catalog + var schemas []string + if catalogs, ok := resp.Data.(map[string]interface{})["catalogs"].([]interface{}); ok { + for _, cat := range catalogs { + if catMap, ok := cat.(map[string]interface{}); ok { + if catMap["catalog_name"] == c.catalogName { + if schemaList, ok := catMap["schemas"].([]interface{}); ok { + for _, s := range schemaList { + if sMap, ok := s.(map[string]interface{}); ok { + if name, ok := sMap["schema_name"].(string); ok { + schemas = append(schemas, name) + } + } + } + } + } + } + } + } + + return schemas, nil +} diff --git a/sdk/builders/progressive/datadock.go b/sdk/builders/progressive/datadock.go new file mode 100644 index 0000000..4110d73 --- /dev/null +++ b/sdk/builders/progressive/datadock.go @@ -0,0 +1,113 @@ +package progressive + +import ( + "context" + "fmt" + "net/url" + + "github.com/nudibranches-tech/bifrost-hyperfluid-sdk-dev/sdk/builders" + "github.com/nudibranches-tech/bifrost-hyperfluid-sdk-dev/sdk/utils" +) + +// DataDockBuilder represents a datadock context. +// Available methods: +// - Catalog(name) - Navigate to a specific catalog +// - GetCatalog(ctx) - Get the full catalog metadata +// - RefreshCatalog(ctx) - Trigger catalog introspection +// - WakeUp(ctx) - Bring datadock online +// - Sleep(ctx) - Put datadock to sleep +// - Get(ctx) - Get datadock details +// - Update(ctx, config) - Update datadock configuration +// - Delete(ctx) - Delete this datadock +type DataDockBuilder struct { + client builders.ClientInterface + orgID string + harborID string + dataDockID string +} + +// Catalog navigates to a specific catalog in this datadock. +func (d *DataDockBuilder) Catalog(catalogName string) *CatalogBuilder { + return &CatalogBuilder{ + client: d.client, + orgID: d.orgID, + dataDockID: d.dataDockID, + catalogName: catalogName, + } +} + +// GetCatalog retrieves the full catalog metadata (schemas, tables, columns). +func (d *DataDockBuilder) GetCatalog(ctx context.Context) (*utils.Response, error) { + endpoint := fmt.Sprintf("%s/data-docks/%s/catalog", + d.client.GetConfig().BaseURL, + url.PathEscape(d.dataDockID), + ) + return d.client.Do(ctx, "GET", endpoint, nil) +} + +// RefreshCatalog triggers catalog introspection and updates metadata. +func (d *DataDockBuilder) RefreshCatalog(ctx context.Context) (*utils.Response, error) { + endpoint := fmt.Sprintf("%s/data-docks/%s/catalog/refresh", + d.client.GetConfig().BaseURL, + url.PathEscape(d.dataDockID), + ) + return d.client.Do(ctx, "POST", endpoint, nil) +} + +// WakeUp brings the datadock online (for TrinoInternal/MinioInternal). +func (d *DataDockBuilder) WakeUp(ctx context.Context) (*utils.Response, error) { + endpoint := fmt.Sprintf("%s/data-docks/%s/wake-up", + d.client.GetConfig().BaseURL, + url.PathEscape(d.dataDockID), + ) + return d.client.Do(ctx, "POST", endpoint, nil) +} + +// Sleep puts the datadock to sleep (cost optimization). +func (d *DataDockBuilder) Sleep(ctx context.Context) (*utils.Response, error) { + endpoint := fmt.Sprintf("%s/data-docks/%s/sleep", + d.client.GetConfig().BaseURL, + url.PathEscape(d.dataDockID), + ) + return d.client.Do(ctx, "POST", endpoint, nil) +} + +// Get retrieves datadock details. +func (d *DataDockBuilder) Get(ctx context.Context) (*utils.Response, error) { + endpoint := fmt.Sprintf("%s/data-docks/%s", + d.client.GetConfig().BaseURL, + url.PathEscape(d.dataDockID), + ) + return d.client.Do(ctx, "GET", endpoint, nil) +} + +// Update modifies datadock configuration. +func (d *DataDockBuilder) Update(ctx context.Context, config map[string]interface{}) (*utils.Response, error) { + endpoint := fmt.Sprintf("%s/data-docks/%s", + d.client.GetConfig().BaseURL, + url.PathEscape(d.dataDockID), + ) + body := utils.JsonMarshal(config) + return d.client.Do(ctx, "PATCH", endpoint, body) +} + +// Delete removes this datadock. +func (d *DataDockBuilder) Delete(ctx context.Context) (*utils.Response, error) { + endpoint := fmt.Sprintf("%s/data-docks/%s", + d.client.GetConfig().BaseURL, + url.PathEscape(d.dataDockID), + ) + return d.client.Do(ctx, "DELETE", endpoint, nil) +} + +// Search starts a search builder for this datadock. +// Returns a SearchBuilder that can be used to build and execute full-text search queries. +func (d *DataDockBuilder) Search(query string) *SearchBuilder { + return &SearchBuilder{ + client: d.client, + dataDockID: d.dataDockID, + searchQuery: query, + columnsToIndex: []string{}, + limitVal: 20, // Default limit + } +} diff --git a/sdk/builders/progressive/harbor.go b/sdk/builders/progressive/harbor.go new file mode 100644 index 0000000..2805b31 --- /dev/null +++ b/sdk/builders/progressive/harbor.go @@ -0,0 +1,60 @@ +package progressive + +import ( + "context" + "fmt" + "net/url" + + "github.com/nudibranches-tech/bifrost-hyperfluid-sdk-dev/sdk/builders" + "github.com/nudibranches-tech/bifrost-hyperfluid-sdk-dev/sdk/utils" +) + +// HarborBuilder represents a harbor context. +// Available methods: +// - DataDock(id) - Navigate to a specific datadock +// - ListDataDocks(ctx) - List all datadocks in this harbor +// - CreateDataDock(ctx, config) - Create a new datadock +// - Delete(ctx) - Delete this harbor +type HarborBuilder struct { + client builders.ClientInterface + orgID string + harborID string +} + +// DataDock navigates to a specific datadock in this harbor. +func (h *HarborBuilder) DataDock(dataDockID string) *DataDockBuilder { + return &DataDockBuilder{ + client: h.client, + orgID: h.orgID, + harborID: h.harborID, + dataDockID: dataDockID, + } +} + +// ListDataDocks retrieves all datadocks in this harbor. +func (h *HarborBuilder) ListDataDocks(ctx context.Context) (*utils.Response, error) { + endpoint := fmt.Sprintf("%s/harbors/%s/data-docks", + h.client.GetConfig().BaseURL, + url.PathEscape(h.harborID), + ) + return h.client.Do(ctx, "GET", endpoint, nil) +} + +// CreateDataDock creates a new datadock in this harbor. +func (h *HarborBuilder) CreateDataDock(ctx context.Context, config map[string]interface{}) (*utils.Response, error) { + // Ensure harbor_id is set + config["harbor_id"] = h.harborID + + endpoint := fmt.Sprintf("%s/data-docks", h.client.GetConfig().BaseURL) + body := utils.JsonMarshal(config) + return h.client.Do(ctx, "POST", endpoint, body) +} + +// Delete removes this harbor. +func (h *HarborBuilder) Delete(ctx context.Context) (*utils.Response, error) { + endpoint := fmt.Sprintf("%s/harbors/%s", + h.client.GetConfig().BaseURL, + url.PathEscape(h.harborID), + ) + return h.client.Do(ctx, "DELETE", endpoint, nil) +} diff --git a/sdk/builders/progressive/org.go b/sdk/builders/progressive/org.go new file mode 100644 index 0000000..4ceba94 --- /dev/null +++ b/sdk/builders/progressive/org.go @@ -0,0 +1,69 @@ +package progressive + +import ( + "context" + "fmt" + "net/url" + + "github.com/nudibranches-tech/bifrost-hyperfluid-sdk-dev/sdk/builders" + "github.com/nudibranches-tech/bifrost-hyperfluid-sdk-dev/sdk/utils" +) + +// OrgBuilder represents an organization context. +// Available methods: +// - Harbor(id) - Navigate to a specific harbor +// - ListHarbors(ctx) - List all harbors in this org +// - CreateHarbor(ctx, name) - Create a new harbor +// - ListDataDocks(ctx) - List all datadocks across all harbors +type OrgBuilder struct { + Client builders.ClientInterface + OrgID string +} + +// Harbor navigates to a specific harbor in this organization. +func (o *OrgBuilder) Harbor(harborID string) *HarborBuilder { + return &HarborBuilder{ + client: o.Client, + orgID: o.OrgID, + harborID: harborID, + } +} + +// ListHarbors retrieves all harbors in this organization. +func (o *OrgBuilder) ListHarbors(ctx context.Context) (*utils.Response, error) { + endpoint := fmt.Sprintf("%s/%s/harbors", + o.Client.GetConfig().BaseURL, + url.PathEscape(o.OrgID), + ) + return o.Client.Do(ctx, "GET", endpoint, nil) +} + +// CreateHarbor creates a new harbor in this organization. +func (o *OrgBuilder) CreateHarbor(ctx context.Context, name string) (*utils.Response, error) { + endpoint := fmt.Sprintf("%s/%s/harbors", + o.Client.GetConfig().BaseURL, + url.PathEscape(o.OrgID), + ) + body := utils.JsonMarshal(map[string]interface{}{ + "name": name, + }) + return o.Client.Do(ctx, "POST", endpoint, body) +} + +// ListDataDocks retrieves all datadocks across all harbors in this organization. +func (o *OrgBuilder) ListDataDocks(ctx context.Context) (*utils.Response, error) { + endpoint := fmt.Sprintf("%s/%s/data-docks", + o.Client.GetConfig().BaseURL, + url.PathEscape(o.OrgID), + ) + return o.Client.Do(ctx, "GET", endpoint, nil) +} + +// RefreshAllDataDocks triggers a catalog refresh on all datadocks in this organization. +func (o *OrgBuilder) RefreshAllDataDocks(ctx context.Context) (*utils.Response, error) { + endpoint := fmt.Sprintf("%s/%s/data-docks/refresh", + o.Client.GetConfig().BaseURL, + url.PathEscape(o.OrgID), + ) + return o.Client.Do(ctx, "POST", endpoint, nil) +} diff --git a/sdk/builders/progressive/schema.go b/sdk/builders/progressive/schema.go new file mode 100644 index 0000000..8a9ab22 --- /dev/null +++ b/sdk/builders/progressive/schema.go @@ -0,0 +1,82 @@ +package progressive + +import ( + "context" + "fmt" + "net/url" + + "github.com/nudibranches-tech/bifrost-hyperfluid-sdk-dev/sdk/builders" +) + +// SchemaBuilder represents a schema context. +// Available methods: +// - Table(name) - Navigate to a specific table (returns TableQueryBuilder for querying) +// - ListTables(ctx) - List all tables in this schema +type SchemaBuilder struct { + client builders.ClientInterface + orgID string + dataDockID string + catalogName string + schemaName string +} + +// Table navigates to a specific table in this schema. +// Returns a TableQueryBuilder which supports both queries and operations. +func (s *SchemaBuilder) Table(tableName string) *TableQueryBuilder { + return &TableQueryBuilder{ + client: s.client, + orgID: s.orgID, + catalogName: s.catalogName, + schemaName: s.schemaName, + tableName: tableName, + // Query builder fields + selectCols: []string{}, + filters: []builders.Filter{}, + orderBy: []builders.OrderClause{}, + rawParams: url.Values{}, + } +} + +// ListTables retrieves all tables in this schema. +func (s *SchemaBuilder) ListTables(ctx context.Context) ([]string, error) { + // Get full catalog metadata + endpoint := fmt.Sprintf("%s/data-docks/%s/catalog", + s.client.GetConfig().BaseURL, + url.PathEscape(s.dataDockID), + ) + + resp, err := s.client.Do(ctx, "GET", endpoint, nil) + if err != nil { + return nil, err + } + + // Extract tables for this schema + var tables []string + if catalogs, ok := resp.Data.(map[string]interface{})["catalogs"].([]interface{}); ok { + for _, cat := range catalogs { + if catMap, ok := cat.(map[string]interface{}); ok { + if catMap["catalog_name"] == s.catalogName { + if schemaList, ok := catMap["schemas"].([]interface{}); ok { + for _, sch := range schemaList { + if schMap, ok := sch.(map[string]interface{}); ok { + if schMap["schema_name"] == s.schemaName { + if tableList, ok := schMap["tables"].([]interface{}); ok { + for _, t := range tableList { + if tMap, ok := t.(map[string]interface{}); ok { + if name, ok := tMap["table_name"].(string); ok { + tables = append(tables, name) + } + } + } + } + } + } + } + } + } + } + } + } + + return tables, nil +} diff --git a/sdk/builders/progressive/search.go b/sdk/builders/progressive/search.go new file mode 100644 index 0000000..4bc4d13 --- /dev/null +++ b/sdk/builders/progressive/search.go @@ -0,0 +1,99 @@ +package progressive + +import ( + "context" + "fmt" + + "github.com/nudibranches-tech/bifrost-hyperfluid-sdk-dev/sdk/builders" + "github.com/nudibranches-tech/bifrost-hyperfluid-sdk-dev/sdk/utils" +) + +// SearchBuilder provides a progressive search interface starting from a DataDock. +type SearchBuilder struct { + client builders.ClientInterface + + // Pre-set from DataDock + dataDockID string + searchQuery string + + // To be set progressively + catalogName string + schemaName string + tableName string + columnsToIndex []string + limitVal int +} + +// Catalog sets the catalog name for the search. +func (sb *SearchBuilder) Catalog(name string) *SearchBuilder { + sb.catalogName = name + return sb +} + +// Schema sets the schema name for the search. +func (sb *SearchBuilder) Schema(name string) *SearchBuilder { + sb.schemaName = name + return sb +} + +// Table sets the table name for the search. +func (sb *SearchBuilder) Table(name string) *SearchBuilder { + sb.tableName = name + return sb +} + +// Columns sets the columns to index for the search. +// Can be called multiple times to add more columns. +func (sb *SearchBuilder) Columns(columns ...string) *SearchBuilder { + sb.columnsToIndex = append(sb.columnsToIndex, columns...) + return sb +} + +// Limit sets the maximum number of results to return. +func (sb *SearchBuilder) Limit(n int) *SearchBuilder { + sb.limitVal = n + return sb +} + +// Execute executes the search query and returns the results. +func (sb *SearchBuilder) Execute(ctx context.Context) (*utils.Response, error) { + // Validate required fields + if sb.searchQuery == "" { + return nil, fmt.Errorf("%w: search query is required", utils.ErrInvalidRequest) + } + if sb.dataDockID == "" { + return nil, fmt.Errorf("%w: data dock ID is required", utils.ErrInvalidRequest) + } + if sb.catalogName == "" { + return nil, fmt.Errorf("%w: catalog name is required", utils.ErrInvalidRequest) + } + if sb.schemaName == "" { + return nil, fmt.Errorf("%w: schema name is required", utils.ErrInvalidRequest) + } + if sb.tableName == "" { + return nil, fmt.Errorf("%w: table name is required", utils.ErrInvalidRequest) + } + if len(sb.columnsToIndex) == 0 { + return nil, fmt.Errorf("%w: at least one column must be specified", utils.ErrInvalidRequest) + } + + // Build the request body + requestBody := map[string]interface{}{ + "query": sb.searchQuery, + "data_dock_id": sb.dataDockID, + "catalog": sb.catalogName, + "schema": sb.schemaName, + "table": sb.tableName, + "limit": sb.limitVal, + "columns_to_index": sb.columnsToIndex, + } + + // Build endpoint + endpoint := fmt.Sprintf("%s/api/search", sb.client.GetConfig().BaseURL) + + // Marshal request body + body := utils.JsonMarshal(requestBody) + + // Execute the request + return sb.client.Do(ctx, "POST", endpoint, body) +} diff --git a/sdk/builders/progressive/table.go b/sdk/builders/progressive/table.go new file mode 100644 index 0000000..a398d59 --- /dev/null +++ b/sdk/builders/progressive/table.go @@ -0,0 +1,149 @@ +package progressive + +import ( + "context" + "fmt" + "net/url" + + "github.com/nudibranches-tech/bifrost-hyperfluid-sdk-dev/sdk/builders" + "github.com/nudibranches-tech/bifrost-hyperfluid-sdk-dev/sdk/utils" +) + +// TableQueryBuilder combines table navigation with query building. +// This is the final level where you can build queries AND execute them. +// Inherits all query building methods from the original QueryBuilder. +type TableQueryBuilder struct { + client builders.ClientInterface + orgID string + + // Table location + catalogName string + schemaName string + tableName string + + // Query parameters (same as QueryBuilder) + selectCols []string + filters []builders.Filter + orderBy []builders.OrderClause + limitVal int + offsetVal int + rawParams url.Values +} + +// Query building methods - same as original QueryBuilder +// These return *TableQueryBuilder for chaining + +func (t *TableQueryBuilder) Select(columns ...string) *TableQueryBuilder { + t.selectCols = append(t.selectCols, columns...) + return t +} + +func (t *TableQueryBuilder) Where(column, operator string, value interface{}) *TableQueryBuilder { + t.filters = append(t.filters, builders.Filter{ + Column: column, + Operator: operator, + Value: value, + }) + return t +} + +func (t *TableQueryBuilder) OrderBy(column, direction string) *TableQueryBuilder { + if direction == "" { + direction = "ASC" + } + t.orderBy = append(t.orderBy, builders.OrderClause{ + Column: column, + Direction: direction, + }) + return t +} + +func (t *TableQueryBuilder) Limit(n int) *TableQueryBuilder { + t.limitVal = n + return t +} + +func (t *TableQueryBuilder) Offset(n int) *TableQueryBuilder { + t.offsetVal = n + return t +} + +func (t *TableQueryBuilder) RawParams(params url.Values) *TableQueryBuilder { + for key, values := range params { + for _, value := range values { + t.rawParams.Add(key, value) + } + } + return t +} + +// Execution method - builds the query and executes it + +func (t *TableQueryBuilder) Get(ctx context.Context) (*utils.Response, error) { + // Build endpoint using Bifrost OpenAPI format + endpoint := fmt.Sprintf( + "%s/%s/openapi/%s/%s/%s", + t.client.GetConfig().BaseURL, + url.PathEscape(t.orgID), + url.PathEscape(t.catalogName), + url.PathEscape(t.schemaName), + url.PathEscape(t.tableName), + ) + + // Build query parameters using the same logic as QueryBuilder + params := t.buildParams() + + if len(params) > 0 { + endpoint += "?" + params.Encode() + } + + return t.client.Do(ctx, "GET", endpoint, nil) +} + +// buildParams constructs query parameters (same as QueryBuilder) +func (t *TableQueryBuilder) buildParams() url.Values { + params := url.Values{} + + // Copy raw params first + for key, values := range t.rawParams { + for _, value := range values { + params.Add(key, value) + } + } + + // Add SELECT columns + if len(t.selectCols) > 0 { + params.Set("select", fmt.Sprintf("%s", t.selectCols)) + } + + // Add WHERE filters + for _, filter := range t.filters { + paramName := fmt.Sprintf("%s[%s]", filter.Column, filter.Operator) + params.Add(paramName, fmt.Sprintf("%v", filter.Value)) + } + + // Add ORDER BY + if len(t.orderBy) > 0 { + var orderParts []string + for _, order := range t.orderBy { + if order.Direction == "DESC" { + orderParts = append(orderParts, fmt.Sprintf("%s.desc", order.Column)) + } else { + orderParts = append(orderParts, fmt.Sprintf("%s.asc", order.Column)) + } + } + params.Set("order", fmt.Sprintf("%s", orderParts)) + } + + // Add LIMIT + if t.limitVal > 0 { + params.Set("_limit", fmt.Sprintf("%d", t.limitVal)) + } + + // Add OFFSET + if t.offsetVal > 0 { + params.Set("_offset", fmt.Sprintf("%d", t.offsetVal)) + } + + return params +} diff --git a/sdk/builders/types.go b/sdk/builders/types.go new file mode 100644 index 0000000..385dcaf --- /dev/null +++ b/sdk/builders/types.go @@ -0,0 +1,28 @@ +package builders + +import ( + "context" + + "github.com/nudibranches-tech/bifrost-hyperfluid-sdk-dev/sdk/utils" +) + +// Filter represents a WHERE clause condition. +type Filter struct { + Column string + Operator string // =, >, <, >=, <=, !=, LIKE, IN + Value interface{} +} + +// OrderClause represents an ORDER BY clause. +type OrderClause struct { + Column string + Direction string // ASC or DESC +} + +type Builder interface { + validate() error +} + +type Executor interface { + Get(ctx context.Context) (*utils.Response, error) +} diff --git a/sdk/client.go b/sdk/client.go index 78760bb..296656e 100644 --- a/sdk/client.go +++ b/sdk/client.go @@ -1,8 +1,12 @@ package sdk import ( - "github.com/nudibranches-tech/bifrost-hyperfluid-sdk-dev/sdk/utils" + "context" "net/http" + + "github.com/nudibranches-tech/bifrost-hyperfluid-sdk-dev/sdk/builders/fluent" + "github.com/nudibranches-tech/bifrost-hyperfluid-sdk-dev/sdk/builders/progressive" + "github.com/nudibranches-tech/bifrost-hyperfluid-sdk-dev/sdk/utils" ) // Client is the main entry point for the SDK. @@ -13,7 +17,7 @@ type Client struct { // NewClient creates a new Bifrost client. func NewClient(config utils.Configuration) *Client { - // we create a copy of the configuration to avoid side effects + // Create a copy of the configuration to avoid side effects cfg := config return &Client{ config: cfg, @@ -24,6 +28,16 @@ func NewClient(config utils.Configuration) *Client { } } +// Do executes an HTTP request (implements the interface needed by builders) +func (c *Client) Do(ctx context.Context, method, endpoint string, body []byte) (*utils.Response, error) { + return c.do(ctx, method, endpoint, body) +} + +// GetConfig returns the client configuration (implements the interface needed by builders) +func (c *Client) GetConfig() utils.Configuration { + return c.config +} + // Query creates a new QueryBuilder for fluent query construction. // Example: // @@ -33,18 +47,23 @@ func NewClient(config utils.Configuration) *Client { // Table("orders"). // Limit(10). // Get(ctx) -func (c *Client) Query() *QueryBuilder { - return newQueryBuilder(c) +func (c *Client) Query() *fluent.QueryBuilder { + return fluent.NewQueryBuilder(c) } // Catalog starts a new fluent query with the catalog name. -// This is a shortcut for client.Query().Catalog(name). -func (c *Client) Catalog(name string) *QueryBuilder { - return newQueryBuilder(c).Catalog(name) +// This is a shortcut for client.Query().DataDock(defaultID).Catalog(name). +// Uses DataDockID from config if available. +func (c *Client) Catalog(name string) *fluent.QueryBuilder { + qb := fluent.NewQueryBuilder(c) + // Auto-set DataDockID from config if available + if c.config.DataDockID != "" { + qb = qb.DataDock(c.config.DataDockID) + } + return qb.Catalog(name) } -// Org starts a new fluent API navigation with a specific organization. -// This uses the progressive builder pattern for type-safe navigation: +// Org uses the progressive builder pattern for type-safe navigation: // // client.Org(id).Harbor(id).DataDock(id).Catalog(name).Schema(name).Table(name) // @@ -55,18 +74,49 @@ func (c *Client) Catalog(name string) *QueryBuilder { // - Catalog: Schema(), ListSchemas() // - Schema: Table(), ListTables() // - Table: Select(), Where(), Limit(), Get() -func (c *Client) Org(orgID string) *OrgBuilder { - return &OrgBuilder{ - client: c, - orgID: orgID, +func (c *Client) Org(orgID string) *progressive.OrgBuilder { + return &progressive.OrgBuilder{ + Client: c, + OrgID: orgID, } } +// DataDock starts a new fluent query with the data dock ID. +// This allows starting queries with: client.DataDock(id).Catalog(...).Schema(...).Table(...) +// This is for FLUENT API (data queries). +// Example: +// +// resp, err := client.DataDock("datadock-id"). +// Catalog("sales"). +// Schema("public"). +// Table("orders"). +// Limit(10). +// Get(ctx) +func (c *Client) DataDock(dataDockID string) *fluent.QueryBuilder { + return fluent.NewQueryBuilder(c).DataDock(dataDockID) +} + // OrgFromConfig creates an OrgBuilder using the OrgID from the client configuration. // This is a convenience method when you always use the same organization. -func (c *Client) OrgFromConfig() *OrgBuilder { - return &OrgBuilder{ - client: c, - orgID: c.config.OrgID, +func (c *Client) OrgFromConfig() *progressive.OrgBuilder { + return &progressive.OrgBuilder{ + Client: c, + OrgID: c.config.OrgID, } } + +// Search creates a new SearchBuilder for full-text search queries. +// Example: +// +// resp, err := client.Search(). +// Query("machine learning"). +// DataDock("data-dock-id"). +// Catalog("catalog"). +// Schema("public"). +// Table("documents"). +// Columns("title", "content", "summary"). +// Limit(10). +// Execute(ctx) +func (c *Client) Search() *fluent.SearchBuilder { + return fluent.NewSearchBuilder(c) +} diff --git a/sdk/client_test.go b/sdk/client_test.go index a095e97..3759fa7 100644 --- a/sdk/client_test.go +++ b/sdk/client_test.go @@ -13,9 +13,9 @@ import ( func TestNewClient(t *testing.T) { config := utils.Configuration{ - BaseURL: "http://localhost", - OrgID: "test-org", - Token: "test-token", + BaseURL: "http://localhost", + DataDockID: "test-datadock", // Changed from OrgID + Token: "test-token", } client := NewClient(config) @@ -28,45 +28,36 @@ func TestNewClient(t *testing.T) { } func TestCatalogMethod(t *testing.T) { - client := NewClient(utils.Configuration{OrgID: "test-org"}) + client := NewClient(utils.Configuration{DataDockID: "test-datadock"}) // Changed from OrgID qb := client.Catalog("test-catalog") if qb == nil { t.Fatal("Catalog should not return nil") } - if qb.catalogName != "test-catalog" { - t.Errorf("Expected catalog name to be 'test-catalog', got '%s'", qb.catalogName) - } - if qb.client != client { - t.Error("QueryBuilder client should be the same as the parent client") - } -} - -// mockRoundTripper is used to mock HTTP responses in tests. -type mockRoundTripper struct { - roundTripFunc func(req *http.Request) (*http.Response, error) + // Test that it returns a QueryBuilder that can be chained + // We can't access private fields, but we can test the chain works } -func (m *mockRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) { - return m.roundTripFunc(req) -} +func TestFluentAPI_Success(t *testing.T) { + config := utils.Configuration{ + Token: "test-token", + DataDockID: "test-datadock", // Changed from OrgID + BaseURL: "https://test.example.com", + } -func newTestClient(config utils.Configuration, handler func(req *http.Request) (*http.Response, error)) *Client { - return &Client{ + client := &Client{ config: config, httpClient: &http.Client{ - Transport: &mockRoundTripper{roundTripFunc: handler}, + Transport: &mockRoundTripper{ + roundTripFunc: func(req *http.Request) (*http.Response, error) { + return &http.Response{ + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`{"data": "test"}`)), + }, nil + }, + }, }, } -} - -func TestFluentAPI_Success(t *testing.T) { - client := newTestClient(utils.Configuration{Token: "test-token", OrgID: "test-org"}, func(req *http.Request) (*http.Response, error) { - return &http.Response{ - StatusCode: http.StatusOK, - Body: io.NopCloser(strings.NewReader(`{"data": "test"}`)), - }, nil - }) resp, err := client.Catalog("c").Schema("s").Table("t").Get(context.Background()) @@ -82,12 +73,25 @@ func TestFluentAPI_Success(t *testing.T) { } func TestFluentAPI_NotFound(t *testing.T) { - client := newTestClient(utils.Configuration{Token: "test-token", OrgID: "test-org"}, func(req *http.Request) (*http.Response, error) { - return &http.Response{ - StatusCode: http.StatusNotFound, - Body: io.NopCloser(strings.NewReader("")), - }, nil - }) + config := utils.Configuration{ + Token: "test-token", + DataDockID: "test-datadock", // Changed from OrgID + BaseURL: "https://test.example.com", + } + + client := &Client{ + config: config, + httpClient: &http.Client{ + Transport: &mockRoundTripper{ + roundTripFunc: func(req *http.Request) (*http.Response, error) { + return &http.Response{ + StatusCode: http.StatusNotFound, + Body: io.NopCloser(strings.NewReader("")), + }, nil + }, + }, + }, + } _, err := client.Catalog("c").Schema("s").Table("t").Get(context.Background()) @@ -97,12 +101,25 @@ func TestFluentAPI_NotFound(t *testing.T) { } func TestFluentAPI_PermissionDenied(t *testing.T) { - client := newTestClient(utils.Configuration{Token: "test-token", OrgID: "test-org"}, func(req *http.Request) (*http.Response, error) { - return &http.Response{ - StatusCode: http.StatusForbidden, - Body: io.NopCloser(strings.NewReader("")), - }, nil - }) + config := utils.Configuration{ + Token: "test-token", + DataDockID: "test-datadock", // Changed from OrgID + BaseURL: "https://test.example.com", + } + + client := &Client{ + config: config, + httpClient: &http.Client{ + Transport: &mockRoundTripper{ + roundTripFunc: func(req *http.Request) (*http.Response, error) { + return &http.Response{ + StatusCode: http.StatusForbidden, + Body: io.NopCloser(strings.NewReader("")), + }, nil + }, + }, + }, + } _, err := client.Catalog("c").Schema("s").Table("t").Get(context.Background()) @@ -113,19 +130,33 @@ func TestFluentAPI_PermissionDenied(t *testing.T) { func TestFluentAPI_ServerError_Retry(t *testing.T) { reqCount := 0 - client := newTestClient(utils.Configuration{Token: "test-token", OrgID: "test-org", MaxRetries: 1}, func(req *http.Request) (*http.Response, error) { - reqCount++ - if reqCount == 1 { - return &http.Response{ - StatusCode: http.StatusInternalServerError, - Body: io.NopCloser(strings.NewReader("")), - }, nil - } - return &http.Response{ - StatusCode: http.StatusOK, - Body: io.NopCloser(strings.NewReader(`{"data": "success"}`)), - }, nil - }) + config := utils.Configuration{ + Token: "test-token", + DataDockID: "test-datadock", // Changed from OrgID + BaseURL: "https://test.example.com", + MaxRetries: 1, + } + + client := &Client{ + config: config, + httpClient: &http.Client{ + Transport: &mockRoundTripper{ + roundTripFunc: func(req *http.Request) (*http.Response, error) { + reqCount++ + if reqCount == 1 { + return &http.Response{ + StatusCode: http.StatusInternalServerError, + Body: io.NopCloser(strings.NewReader("")), + }, nil + } + return &http.Response{ + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`{"data": "success"}`)), + }, nil + }, + }, + }, + } resp, err := client.Catalog("c").Schema("s").Table("t").Get(context.Background()) @@ -139,3 +170,12 @@ func TestFluentAPI_ServerError_Retry(t *testing.T) { t.Errorf("Expected 2 requests, got %d", reqCount) } } + +// mockRoundTripper is used to mock HTTP responses in tests. +type mockRoundTripper struct { + roundTripFunc func(req *http.Request) (*http.Response, error) +} + +func (m *mockRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) { + return m.roundTripFunc(req) +} diff --git a/sdk/progressive_builders.go b/sdk/progressive_builders.go deleted file mode 100644 index 3ad208d..0000000 --- a/sdk/progressive_builders.go +++ /dev/null @@ -1,507 +0,0 @@ -package sdk - -import ( - "context" - "fmt" - "net/url" - - "github.com/nudibranches-tech/bifrost-hyperfluid-sdk-dev/sdk/utils" -) - -// Progressive Builders - Each level has its own type with specific methods -// This forces the correct order: Org → Harbor → DataDock → Catalog → Schema → Table - -// ============================================================================ -// Level 1: Organization Builder -// ============================================================================ - -// OrgBuilder represents an organization context. -// Available methods: -// - Harbor(id) - Navigate to a specific harbor -// - ListHarbors(ctx) - List all harbors in this org -// - CreateHarbor(ctx, name) - Create a new harbor -// - ListDataDocks(ctx) - List all datadocks across all harbors -type OrgBuilder struct { - client *Client - orgID string -} - -// Harbor navigates to a specific harbor in this organization. -func (o *OrgBuilder) Harbor(harborID string) *HarborBuilder { - return &HarborBuilder{ - client: o.client, - orgID: o.orgID, - harborID: harborID, - } -} - -// ListHarbors retrieves all harbors in this organization. -func (o *OrgBuilder) ListHarbors(ctx context.Context) (*utils.Response, error) { - endpoint := fmt.Sprintf("%s/%s/harbors", - o.client.config.BaseURL, - url.PathEscape(o.orgID), - ) - return o.client.do(ctx, "GET", endpoint, nil) -} - -// CreateHarbor creates a new harbor in this organization. -func (o *OrgBuilder) CreateHarbor(ctx context.Context, name string) (*utils.Response, error) { - endpoint := fmt.Sprintf("%s/%s/harbors", - o.client.config.BaseURL, - url.PathEscape(o.orgID), - ) - body := utils.JsonMarshal(map[string]interface{}{ - "name": name, - }) - return o.client.do(ctx, "POST", endpoint, body) -} - -// ListDataDocks retrieves all datadocks across all harbors in this organization. -func (o *OrgBuilder) ListDataDocks(ctx context.Context) (*utils.Response, error) { - endpoint := fmt.Sprintf("%s/%s/data-docks", - o.client.config.BaseURL, - url.PathEscape(o.orgID), - ) - return o.client.do(ctx, "GET", endpoint, nil) -} - -// RefreshAllDataDocks triggers a catalog refresh on all datadocks in this organization. -func (o *OrgBuilder) RefreshAllDataDocks(ctx context.Context) (*utils.Response, error) { - endpoint := fmt.Sprintf("%s/%s/data-docks/refresh", - o.client.config.BaseURL, - url.PathEscape(o.orgID), - ) - return o.client.do(ctx, "POST", endpoint, nil) -} - -// ============================================================================ -// Level 2: Harbor Builder -// ============================================================================ - -// HarborBuilder represents a harbor context. -// Available methods: -// - DataDock(id) - Navigate to a specific datadock -// - ListDataDocks(ctx) - List all datadocks in this harbor -// - CreateDataDock(ctx, config) - Create a new datadock -// - Delete(ctx) - Delete this harbor -type HarborBuilder struct { - client *Client - orgID string - harborID string -} - -// DataDock navigates to a specific datadock in this harbor. -func (h *HarborBuilder) DataDock(dataDockID string) *DataDockBuilder { - return &DataDockBuilder{ - client: h.client, - orgID: h.orgID, - harborID: h.harborID, - dataDockID: dataDockID, - } -} - -// ListDataDocks retrieves all datadocks in this harbor. -func (h *HarborBuilder) ListDataDocks(ctx context.Context) (*utils.Response, error) { - endpoint := fmt.Sprintf("%s/harbors/%s/data-docks", - h.client.config.BaseURL, - url.PathEscape(h.harborID), - ) - return h.client.do(ctx, "GET", endpoint, nil) -} - -// CreateDataDock creates a new datadock in this harbor. -func (h *HarborBuilder) CreateDataDock(ctx context.Context, config map[string]interface{}) (*utils.Response, error) { - // Ensure harbor_id is set - config["harbor_id"] = h.harborID - - endpoint := fmt.Sprintf("%s/data-docks", h.client.config.BaseURL) - body := utils.JsonMarshal(config) - return h.client.do(ctx, "POST", endpoint, body) -} - -// Delete removes this harbor. -func (h *HarborBuilder) Delete(ctx context.Context) (*utils.Response, error) { - endpoint := fmt.Sprintf("%s/harbors/%s", - h.client.config.BaseURL, - url.PathEscape(h.harborID), - ) - return h.client.do(ctx, "DELETE", endpoint, nil) -} - -// ============================================================================ -// Level 3: DataDock Builder -// ============================================================================ - -// DataDockBuilder represents a datadock context. -// Available methods: -// - Catalog(name) - Navigate to a specific catalog -// - GetCatalog(ctx) - Get the full catalog metadata -// - RefreshCatalog(ctx) - Trigger catalog introspection -// - WakeUp(ctx) - Bring datadock online -// - Sleep(ctx) - Put datadock to sleep -// - Get(ctx) - Get datadock details -// - Update(ctx, config) - Update datadock configuration -// - Delete(ctx) - Delete this datadock -type DataDockBuilder struct { - client *Client - orgID string - harborID string - dataDockID string -} - -// Catalog navigates to a specific catalog in this datadock. -func (d *DataDockBuilder) Catalog(catalogName string) *CatalogBuilder { - return &CatalogBuilder{ - client: d.client, - orgID: d.orgID, - dataDockID: d.dataDockID, - catalogName: catalogName, - } -} - -// GetCatalog retrieves the full catalog metadata (schemas, tables, columns). -func (d *DataDockBuilder) GetCatalog(ctx context.Context) (*utils.Response, error) { - endpoint := fmt.Sprintf("%s/data-docks/%s/catalog", - d.client.config.BaseURL, - url.PathEscape(d.dataDockID), - ) - return d.client.do(ctx, "GET", endpoint, nil) -} - -// RefreshCatalog triggers catalog introspection and updates metadata. -func (d *DataDockBuilder) RefreshCatalog(ctx context.Context) (*utils.Response, error) { - endpoint := fmt.Sprintf("%s/data-docks/%s/catalog/refresh", - d.client.config.BaseURL, - url.PathEscape(d.dataDockID), - ) - return d.client.do(ctx, "POST", endpoint, nil) -} - -// WakeUp brings the datadock online (for TrinoInternal/MinioInternal). -func (d *DataDockBuilder) WakeUp(ctx context.Context) (*utils.Response, error) { - endpoint := fmt.Sprintf("%s/data-docks/%s/wake-up", - d.client.config.BaseURL, - url.PathEscape(d.dataDockID), - ) - return d.client.do(ctx, "POST", endpoint, nil) -} - -// Sleep puts the datadock to sleep (cost optimization). -func (d *DataDockBuilder) Sleep(ctx context.Context) (*utils.Response, error) { - endpoint := fmt.Sprintf("%s/data-docks/%s/sleep", - d.client.config.BaseURL, - url.PathEscape(d.dataDockID), - ) - return d.client.do(ctx, "POST", endpoint, nil) -} - -// Get retrieves datadock details. -func (d *DataDockBuilder) Get(ctx context.Context) (*utils.Response, error) { - endpoint := fmt.Sprintf("%s/data-docks/%s", - d.client.config.BaseURL, - url.PathEscape(d.dataDockID), - ) - return d.client.do(ctx, "GET", endpoint, nil) -} - -// Update modifies datadock configuration. -func (d *DataDockBuilder) Update(ctx context.Context, config map[string]interface{}) (*utils.Response, error) { - endpoint := fmt.Sprintf("%s/data-docks/%s", - d.client.config.BaseURL, - url.PathEscape(d.dataDockID), - ) - body := utils.JsonMarshal(config) - return d.client.do(ctx, "PATCH", endpoint, body) -} - -// Delete removes this datadock. -func (d *DataDockBuilder) Delete(ctx context.Context) (*utils.Response, error) { - endpoint := fmt.Sprintf("%s/data-docks/%s", - d.client.config.BaseURL, - url.PathEscape(d.dataDockID), - ) - return d.client.do(ctx, "DELETE", endpoint, nil) -} - -// ============================================================================ -// Level 4: Catalog Builder -// ============================================================================ - -// CatalogBuilder represents a catalog context. -// Available methods: -// - Schema(name) - Navigate to a specific schema -// - ListSchemas(ctx) - List all schemas in this catalog -type CatalogBuilder struct { - client *Client - orgID string - dataDockID string - catalogName string -} - -// Schema navigates to a specific schema in this catalog. -func (c *CatalogBuilder) Schema(schemaName string) *SchemaBuilder { - return &SchemaBuilder{ - client: c.client, - orgID: c.orgID, - dataDockID: c.dataDockID, - catalogName: c.catalogName, - schemaName: schemaName, - } -} - -// ListSchemas retrieves all schemas in this catalog. -// This parses the catalog metadata to extract schemas. -func (c *CatalogBuilder) ListSchemas(ctx context.Context) ([]string, error) { - // Get full catalog metadata - endpoint := fmt.Sprintf("%s/data-docks/%s/catalog", - c.client.config.BaseURL, - url.PathEscape(c.dataDockID), - ) - - resp, err := c.client.do(ctx, "GET", endpoint, nil) - if err != nil { - return nil, err - } - - // Extract schemas for this catalog - var schemas []string - if catalogs, ok := resp.Data.(map[string]interface{})["catalogs"].([]interface{}); ok { - for _, cat := range catalogs { - if catMap, ok := cat.(map[string]interface{}); ok { - if catMap["catalog_name"] == c.catalogName { - if schemaList, ok := catMap["schemas"].([]interface{}); ok { - for _, s := range schemaList { - if sMap, ok := s.(map[string]interface{}); ok { - if name, ok := sMap["schema_name"].(string); ok { - schemas = append(schemas, name) - } - } - } - } - } - } - } - } - - return schemas, nil -} - -// ============================================================================ -// Level 5: Schema Builder -// ============================================================================ - -// SchemaBuilder represents a schema context. -// Available methods: -// - Table(name) - Navigate to a specific table (returns TableQueryBuilder for querying) -// - ListTables(ctx) - List all tables in this schema -type SchemaBuilder struct { - client *Client - orgID string - dataDockID string - catalogName string - schemaName string -} - -// Table navigates to a specific table in this schema. -// Returns a TableQueryBuilder which supports both queries and operations. -func (s *SchemaBuilder) Table(tableName string) *TableQueryBuilder { - return &TableQueryBuilder{ - client: s.client, - orgID: s.orgID, - catalogName: s.catalogName, - schemaName: s.schemaName, - tableName: tableName, - // Query builder fields - selectCols: []string{}, - filters: []Filter{}, - orderBy: []OrderClause{}, - rawParams: url.Values{}, - } -} - -// ListTables retrieves all tables in this schema. -func (s *SchemaBuilder) ListTables(ctx context.Context) ([]string, error) { - // Get full catalog metadata - endpoint := fmt.Sprintf("%s/data-docks/%s/catalog", - s.client.config.BaseURL, - url.PathEscape(s.dataDockID), - ) - - resp, err := s.client.do(ctx, "GET", endpoint, nil) - if err != nil { - return nil, err - } - - // Extract tables for this schema - var tables []string - if catalogs, ok := resp.Data.(map[string]interface{})["catalogs"].([]interface{}); ok { - for _, cat := range catalogs { - if catMap, ok := cat.(map[string]interface{}); ok { - if catMap["catalog_name"] == s.catalogName { - if schemaList, ok := catMap["schemas"].([]interface{}); ok { - for _, sch := range schemaList { - if schMap, ok := sch.(map[string]interface{}); ok { - if schMap["schema_name"] == s.schemaName { - if tableList, ok := schMap["tables"].([]interface{}); ok { - for _, t := range tableList { - if tMap, ok := t.(map[string]interface{}); ok { - if name, ok := tMap["table_name"].(string); ok { - tables = append(tables, name) - } - } - } - } - } - } - } - } - } - } - } - } - - return tables, nil -} - -// ============================================================================ -// Level 6: Table Query Builder -// ============================================================================ - -// TableQueryBuilder combines table navigation with query building. -// This is the final level where you can build queries AND execute them. -// Inherits all query building methods from the original QueryBuilder. -type TableQueryBuilder struct { - client *Client - orgID string - - // Table location - catalogName string - schemaName string - tableName string - - // Query parameters (same as QueryBuilder) - selectCols []string - filters []Filter - orderBy []OrderClause - limitVal int - offsetVal int - rawParams url.Values -} - -// Query building methods - same as original QueryBuilder -// These return *TableQueryBuilder for chaining - -func (t *TableQueryBuilder) Select(columns ...string) *TableQueryBuilder { - t.selectCols = append(t.selectCols, columns...) - return t -} - -func (t *TableQueryBuilder) Where(column, operator string, value interface{}) *TableQueryBuilder { - t.filters = append(t.filters, Filter{ - Column: column, - Operator: operator, - Value: value, - }) - return t -} - -func (t *TableQueryBuilder) OrderBy(column, direction string) *TableQueryBuilder { - if direction == "" { - direction = "ASC" - } - t.orderBy = append(t.orderBy, OrderClause{ - Column: column, - Direction: direction, - }) - return t -} - -func (t *TableQueryBuilder) Limit(n int) *TableQueryBuilder { - t.limitVal = n - return t -} - -func (t *TableQueryBuilder) Offset(n int) *TableQueryBuilder { - t.offsetVal = n - return t -} - -func (t *TableQueryBuilder) RawParams(params url.Values) *TableQueryBuilder { - for key, values := range params { - for _, value := range values { - t.rawParams.Add(key, value) - } - } - return t -} - -// Execution method - builds the query and executes it - -func (t *TableQueryBuilder) Get(ctx context.Context) (*utils.Response, error) { - // Build endpoint using Bifrost OpenAPI format - endpoint := fmt.Sprintf( - "%s/%s/openapi/%s/%s/%s", - t.client.config.BaseURL, - url.PathEscape(t.orgID), - url.PathEscape(t.catalogName), - url.PathEscape(t.schemaName), - url.PathEscape(t.tableName), - ) - - // Build query parameters using the same logic as QueryBuilder - params := t.buildParams() - - if len(params) > 0 { - endpoint += "?" + params.Encode() - } - - return t.client.do(ctx, "GET", endpoint, nil) -} - -// buildParams constructs query parameters (same as QueryBuilder) -func (t *TableQueryBuilder) buildParams() url.Values { - params := url.Values{} - - // Copy raw params first - for key, values := range t.rawParams { - for _, value := range values { - params.Add(key, value) - } - } - - // Add SELECT columns - if len(t.selectCols) > 0 { - params.Set("select", fmt.Sprintf("%s", t.selectCols)) - } - - // Add WHERE filters - for _, filter := range t.filters { - paramName := fmt.Sprintf("%s[%s]", filter.Column, filter.Operator) - params.Add(paramName, fmt.Sprintf("%v", filter.Value)) - } - - // Add ORDER BY - if len(t.orderBy) > 0 { - var orderParts []string - for _, order := range t.orderBy { - if order.Direction == "DESC" { - orderParts = append(orderParts, fmt.Sprintf("%s.desc", order.Column)) - } else { - orderParts = append(orderParts, fmt.Sprintf("%s.asc", order.Column)) - } - } - params.Set("order", fmt.Sprintf("%s", orderParts)) - } - - // Add LIMIT - if t.limitVal > 0 { - params.Set("_limit", fmt.Sprintf("%d", t.limitVal)) - } - - // Add OFFSET - if t.offsetVal > 0 { - params.Set("_offset", fmt.Sprintf("%d", t.offsetVal)) - } - - return params -} diff --git a/sdk/request.go b/sdk/request.go index f4ae645..e8c4efb 100644 --- a/sdk/request.go +++ b/sdk/request.go @@ -33,8 +33,17 @@ func (c *Client) do(ctx context.Context, method, url string, body []byte) (*util return nil, fmt.Errorf("%w: %w", utils.ErrInvalidRequest, err) } + // If no token is set, try to get one from Keycloak if c.config.Token == "" { - return nil, utils.ErrInvalidConfiguration + if c.isKeycloakAuthMethodConfigured() { + token, err := c.refreshToken(ctx) + if err != nil { + return nil, fmt.Errorf("failed to obtain token: %w", err) + } + c.config.Token = token + } else { + return nil, utils.ErrInvalidConfiguration + } } req.Header.Set("Authorization", "Bearer "+c.config.Token) diff --git a/sdk/utils/types.go b/sdk/utils/types.go index 91ec222..5d23844 100644 --- a/sdk/utils/types.go +++ b/sdk/utils/types.go @@ -5,9 +5,10 @@ import ( ) type Configuration struct { - BaseURL string - OrgID string - Token string + BaseURL string + OrgID string + DataDockID string + Token string SkipTLSVerify bool RequestTimeout time.Duration diff --git a/usage_examples/fluent_api_examples.go b/usage_examples/fluent_api_examples.go index 8b64195..f244b82 100644 --- a/usage_examples/fluent_api_examples.go +++ b/usage_examples/fluent_api_examples.go @@ -13,6 +13,45 @@ import ( // This file demonstrates the new fluent API for the Bifrost SDK. // The fluent API provides a more intuitive and user-friendly way to interact with the SDK. +func runSearchExample() { + fmt.Println("━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━") + fmt.Println("🎯 Search Example: Full-Text Search") + fmt.Println("━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━") + + config := getConfig() + client := sdk.NewClient(config) + + projectID := getEnv("BIFROST_DATADOCK_ID", "") + catalog := getEnv("BIFROST_TEST_CATALOG", "iceberg") + schema := getEnv("BIFROST_TEST_SCHEMA", "public") + table := getEnv("BIFROST_TEST_TABLE", "text_files") + + if projectID == "" { + fmt.Println("⚠️ Skipping: BIFROST_DATADOCK_ID not set") + fmt.Println() + return + } + + fmt.Println("📝 Full-text search query:") + fmt.Printf(" DataDock: %s\n", projectID) + fmt.Printf(" Searching in: %s.%s.%s\n", catalog, schema, table) + fmt.Println() + + // Search for content + resp, err := client.Search(). + Query("machine learning"). + DataDock(projectID). // ✅ Use the actual UUID variable + Catalog(catalog). // ✅ Use actual catalog + Schema(schema). // ✅ Use actual schema + Table(table). // ✅ Use actual table + Columns("content", "summary"). // Adjust columns based on your table + Limit(10). + Execute(context.Background()) + + handleResponse(resp, err) + fmt.Println() +} + func runFluentAPISimpleExample() { fmt.Println("━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━") fmt.Println("🎯 Fluent API Example 1: Simple Query") @@ -34,8 +73,10 @@ func runFluentAPISimpleExample() { fmt.Printf("📝 Fluent query: client.Catalog(%q).Schema(%q).Table(%q).Limit(5).Get(ctx)\n", testCatalog, testSchema, testTable) - // NEW FLUENT API - Simple and intuitive! + dataDockID := getEnv("HYPERFLUID_DATADOCK_ID", "") + resp, err := client. + DataDock(dataDockID). Catalog(testCatalog). Schema(testSchema). Table(testTable). @@ -134,10 +175,16 @@ func runFluentAPIComplexExample() { func handleResponse(resp *utils.Response, err error) { if err != nil { fmt.Printf("❌ Error: %s\n", err.Error()) + // Also show the server response if available + if resp != nil && resp.Error != "" { + fmt.Printf(" Server said: %s\n", resp.Error) + fmt.Printf(" HTTP Status: %d\n", resp.HTTPCode) + } return } if resp.Status != utils.StatusOK { fmt.Printf("❌ Error: %s\n", resp.Error) + fmt.Printf(" HTTP Status: %d\n", resp.HTTPCode) return } fmt.Println("✅ Success!") @@ -157,6 +204,7 @@ func getConfig() utils.Configuration { BaseURL: getEnv("HYPERFLUID_BASE_URL", ""), OrgID: getEnv("HYPERFLUID_ORG_ID", ""), Token: getEnv("HYPERFLUID_TOKEN", ""), + DataDockID: getEnv("HYPERFLUID_DATADOCK_ID", ""), // Add this, RequestTimeout: 30 * time.Second, MaxRetries: 3, diff --git a/usage_examples/main.go b/usage_examples/main.go index 6b89548..c7ecf48 100644 --- a/usage_examples/main.go +++ b/usage_examples/main.go @@ -15,35 +15,21 @@ func main() { if err := godotenv.Load("../.env"); err != nil { log.Printf("⚠️ Warning: .env not loaded: %v\n", err) + } else { + fmt.Println("✓ .env file loaded successfully") } - fmt.Println("Running fluent API examples...") + // TODO: progressive examples fmt.Println() - - // Old fluent API examples (backward compatibility) fmt.Println("══════════════════════════════════════════════") - fmt.Println("📚 SIMPLE FLUENT API (Catalog-first)") + fmt.Println("📊 FLUENT EXAMPLE") fmt.Println("══════════════════════════════════════════════") fmt.Println() runFluentAPISimpleExample() - runFluentAPIWithSelectExample() runFluentAPIComplexExample() - - // NEW! Progressive fluent API examples - fmt.Println() - fmt.Println("══════════════════════════════════════════════") - fmt.Println("✨ PROGRESSIVE FLUENT API (Type-safe)") - fmt.Println("══════════════════════════════════════════════") - fmt.Println() - - runProgressiveAPIExample1() - runProgressiveAPIExample2() - runProgressiveAPIExample3() - runProgressiveAPIExample4() - runProgressiveAPIExample5() - runProgressiveAPIExample6() - runProgressiveAPIListingExample() + runSearchExample() + runFluentAPIWithSelectExample() fmt.Println() fmt.Println("━━━━━━━━━━━━━━━━━━━━━━━━━━━━") diff --git a/usage_examples/progressive_api_examples.go b/usage_examples/progressive_api_examples.go index 43ae81e..0b49813 100644 --- a/usage_examples/progressive_api_examples.go +++ b/usage_examples/progressive_api_examples.go @@ -1,5 +1,6 @@ package main +/* import ( "context" "fmt" @@ -308,3 +309,4 @@ func runProgressiveAPIListingExample() { fmt.Println() } +*/