Skip to content

Commit

Permalink
Merge branch 'main' into feat#6101
Browse files Browse the repository at this point in the history
  • Loading branch information
en-henciso authored Oct 16, 2023
2 parents 20abc19 + 681989d commit 6998dc1
Show file tree
Hide file tree
Showing 118 changed files with 1,064 additions and 1,953 deletions.
37 changes: 37 additions & 0 deletions .github/workflows/grafana-dashboards-check.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

name: check-grafana-dashboards

on:
pull_request:
branches:
- main
- release-*

jobs:
check-grafana-dashboards:
name: check grafana dashboards
runs-on: ubuntu-20.04
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Check grafana dashboards whether using mysql uid
run: |
if grep '"type": "mysql"' grafana/dashboards/*; then
echo "There are dashboards which use mysql uid as datasource"
exit 1
fi
Original file line number Diff line number Diff line change
Expand Up @@ -74,14 +74,17 @@ func (script *normalizeBpSettings) Up(basicRes context.BasicRes) errors.Error {
}
db := basicRes.GetDal()
bp := &blueprint20230829{}
cursor := errors.Must1(db.Cursor(dal.From("_devlake_blueprints")))
cursor := errors.Must1(db.Cursor(dal.From(bp)))
defer cursor.Close()

for cursor.Next() {
// load row
errors.Must(db.Fetch(cursor, bp))
// decrypt and unmarshal settings
settingsJson := errors.Must1(plugin.Decrypt(encKey, bp.Settings))
if settingsJson == "" {
continue
}
settings := &blueprintSettings20230829{}
errors.Must(json.Unmarshal([]byte(settingsJson), settings))
// update bp fields
Expand Down
2 changes: 1 addition & 1 deletion backend/helpers/migrationhelper/migrationhelper.go
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ func AutoMigrateTables(basicRes context.BasicRes, dst ...interface{}) errors.Err
if err != nil {
return err
}
_ = db.All(entity)
_ = db.First(entity)
}
return nil
}
Expand Down
28 changes: 15 additions & 13 deletions backend/helpers/pluginhelper/api/api_collector_with_state.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,11 @@ type ApiCollectorStateManager struct {
RawDataSubTaskArgs
// *ApiCollector
// *GraphqlCollector
subtasks []plugin.SubTask
newState models.CollectorLatestState
IsIncreamtal bool
Since *time.Time
subtasks []plugin.SubTask
newState models.CollectorLatestState
IsIncremental bool
Since *time.Time
Before *time.Time
}

// NewStatefulApiCollector create a new ApiCollectorStateManager
Expand Down Expand Up @@ -69,30 +70,30 @@ func NewStatefulApiCollector(args RawDataSubTaskArgs) (*ApiCollectorStateManager

// Calculate incremental and since based on syncPolicy and old state
syncPolicy := args.Ctx.TaskContext().SyncPolicy()
var isIncreamtal bool
var isIncremental bool
var since *time.Time

if syncPolicy == nil {
// 1. If no syncPolicy, incremental and since is oldState.LatestSuccessStart
isIncreamtal = true
isIncremental = true
since = oldLatestSuccessStart
} else if oldLatestSuccessStart == nil {
// 2. If no oldState.LatestSuccessStart, not incremental and since is syncPolicy.TimeAfter
isIncreamtal = false
isIncremental = false
since = syncPolicy.TimeAfter
} else if syncPolicy.FullSync {
// 3. If fullSync true, not incremental and since is syncPolicy.TimeAfter
isIncreamtal = false
isIncremental = false
since = syncPolicy.TimeAfter
} else if syncPolicy.TimeAfter != nil {
// 4. If syncPolicy.TimeAfter not nil
if oldTimeAfter != nil && syncPolicy.TimeAfter.Before(*oldTimeAfter) {
// 4.1 If oldTimeAfter not nil and syncPolicy.TimeAfter before oldTimeAfter, incremental is false and since is syncPolicy.TimeAfter
isIncreamtal = false
isIncremental = false
since = syncPolicy.TimeAfter
} else {
// 4.2 If oldTimeAfter nil or syncPolicy.TimeAfter after oldTimeAfter, incremental is true and since is oldState.LatestSuccessStart
isIncreamtal = true
isIncremental = true
since = oldLatestSuccessStart
}
}
Expand All @@ -104,16 +105,17 @@ func NewStatefulApiCollector(args RawDataSubTaskArgs) (*ApiCollectorStateManager
return &ApiCollectorStateManager{
RawDataSubTaskArgs: args,
newState: oldState,
IsIncreamtal: isIncreamtal,
IsIncremental: isIncremental,
Since: since,
Before: &currentTime,
}, nil

}

// InitCollector init the embedded collector
func (m *ApiCollectorStateManager) InitCollector(args ApiCollectorArgs) errors.Error {
args.RawDataSubTaskArgs = m.RawDataSubTaskArgs
args.Incremental = m.IsIncreamtal
args.Incremental = args.Incremental || m.IsIncremental
apiCollector, err := NewApiCollector(args)
if err != nil {
return err
Expand Down Expand Up @@ -180,7 +182,7 @@ func NewStatefulApiCollectorForFinalizableEntity(args FinalizableApiCollectorArg
}

createdAfter := manager.Since
isIncremental := manager.IsIncreamtal
isIncremental := manager.IsIncremental
// step 1: create a collector to collect newly added records
err = manager.InitCollector(ApiCollectorArgs{
ApiClient: args.ApiClient,
Expand Down
7 changes: 4 additions & 3 deletions backend/helpers/pluginhelper/api/mapstructure.go
Original file line number Diff line number Diff line change
Expand Up @@ -67,9 +67,10 @@ func DecodeHook(f reflect.Type, t reflect.Type, data interface{}) (interface{},
func DecodeMapStruct(input map[string]interface{}, result interface{}, zeroFields bool) errors.Error {
result = models.UnwrapObject(result)
decoder, err := mapstructure.NewDecoder(&mapstructure.DecoderConfig{
ZeroFields: zeroFields,
DecodeHook: mapstructure.ComposeDecodeHookFunc(DecodeHook),
Result: result,
ZeroFields: zeroFields,
DecodeHook: mapstructure.ComposeDecodeHookFunc(DecodeHook),
Result: result,
WeaklyTypedInput: true,
})
if err != nil {
return errors.Convert(err)
Expand Down
23 changes: 3 additions & 20 deletions backend/helpers/pluginhelper/api/scope_generic_helper.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ limitations under the License.
package api

import (
"encoding/json"
"fmt"
"reflect"
"strconv"
Expand All @@ -36,7 +35,6 @@ import (
"github.com/apache/incubator-devlake/helpers/dbhelper"
serviceHelper "github.com/apache/incubator-devlake/helpers/pluginhelper/services"
"github.com/go-playground/validator/v10"
"github.com/mitchellh/mapstructure"
)

type NoScopeConfig struct{}
Expand Down Expand Up @@ -64,8 +62,9 @@ type (
// Alias, for swagger purposes
ScopeRefDoc = serviceHelper.BlueprintProjectPairs
ScopeRes[Scope plugin.ToolLayerScope, ScopeConfig any] struct {
Scope Scope `mapstructure:",squash"` // ideally we need this field to be embedded in the struct
ScopeResDoc[ScopeConfig] `mapstructure:",squash"` // however, only this type of embeding is supported as of golang 1.20
Scope Scope `mapstructure:"scope,omitempty" json:"scope,omitempty"`
ScopeConfig *ScopeConfig `mapstructure:"scopeConfig,omitempty" json:"scopeConfig,omitempty"`
Blueprints []*models.Blueprint `mapstructure:"blueprints,omitempty" json:"blueprints,omitempty"`
}
ScopeListRes[Scope plugin.ToolLayerScope, ScopeConfig any] struct {
Scopes []*ScopeRes[Scope, ScopeConfig] `mapstructure:"scopes" json:"scopes"`
Expand Down Expand Up @@ -601,22 +600,6 @@ func (gs *GenericScopeApiHelper[Conn, Scope, ScopeConfig]) transactionalDelete(t
return nil
}

// Implement MarshalJSON method to flatten all fields
func (sr *ScopeRes[T, Y]) MarshalJSON() ([]byte, error) {
var flatMap map[string]interface{}
err := mapstructure.Decode(sr, &flatMap)
if err != nil {
return nil, err
}
// Encode the flattened map to JSON
result, err := json.Marshal(flatMap)
if err != nil {
return nil, err
}

return result, nil
}

func (gs *GenericScopeApiHelper[Conn, Scope, ScopeConfig]) getAffectedTables(pluginName string) ([]string, errors.Error) {
var tables []string
meta, err := plugin.GetPlugin(pluginName)
Expand Down
6 changes: 3 additions & 3 deletions backend/plugins/bitbucket/tasks/api_common.go
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@ func GetPullRequestsIterator(taskCtx plugin.SubTaskContext, collectorWithState *
data.Options.FullName, data.Options.ConnectionId,
),
}
if collectorWithState.IsIncreamtal && collectorWithState.Since != nil {
if collectorWithState.IsIncremental && collectorWithState.Since != nil {
clauses = append(clauses, dal.Where("bitbucket_updated_at > ?", *collectorWithState.Since))
}

Expand All @@ -199,7 +199,7 @@ func GetIssuesIterator(taskCtx plugin.SubTaskContext, collectorWithState *api.Ap
data.Options.FullName, data.Options.ConnectionId,
),
}
if collectorWithState.IsIncreamtal && collectorWithState.Since != nil {
if collectorWithState.IsIncremental && collectorWithState.Since != nil {
clauses = append(clauses, dal.Where("bitbucket_updated_at > ?", *collectorWithState.Since))
}
// construct the input iterator
Expand All @@ -222,7 +222,7 @@ func GetPipelinesIterator(taskCtx plugin.SubTaskContext, collectorWithState *api
data.Options.FullName, data.Options.ConnectionId,
),
}
if collectorWithState.IsIncreamtal && collectorWithState.Since != nil {
if collectorWithState.IsIncremental && collectorWithState.Since != nil {
clauses = append(clauses, dal.Where("bitbucket_complete_on > ?", *collectorWithState.Since))
}
// construct the input iterator
Expand Down
16 changes: 15 additions & 1 deletion backend/plugins/gitextractor/parser/clone.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,13 @@ import (
"encoding/base64"
"fmt"
"github.com/apache/incubator-devlake/core/errors"
"github.com/go-git/go-git/v5/plumbing/protocol/packp/capability"
"github.com/go-git/go-git/v5/plumbing/transport"
"github.com/go-git/go-git/v5/plumbing/transport/client"
"net"
"net/http"
"os"
"strings"

gogit "github.com/go-git/go-git/v5"
githttp "github.com/go-git/go-git/v5/plumbing/transport/http"
Expand Down Expand Up @@ -89,7 +92,14 @@ func (l *GitRepoCreator) CloneOverHTTP(ctx context.Context, repoId, url, user, p
Password: password,
}
}
//fmt.Printf("CloneOverHTTP clone opt: %+v\ndir: %v, repo: %v, id: %v, user: %v, passwd: %v, proxy: %v\n", cloneOptions, dir, url, repoId, user, password, proxy)
// fmt.Printf("CloneOverHTTP clone opt: %+v\ndir: %v, repo: %v, id: %v, user: %v, passwd: %v, proxy: %v\n", cloneOptions, dir, url, repoId, user, password, proxy)
if isAzureRepo(ctx, url) {
// https://github.com/go-git/go-git/issues/64
// https://github.com/go-git/go-git/blob/master/_examples/azure_devops/main.go#L34
transport.UnsupportedCapabilities = []capability.Capability{
capability.ThinPack,
}
}
_, err := gogit.PlainCloneContext(ctx, dir, true, cloneOptions)
if err != nil {
l.logger.Error(err, "PlainCloneContext")
Expand Down Expand Up @@ -133,3 +143,7 @@ func withTempDirectory(f func(tempDir string) (*GitRepo, error)) (*GitRepo, erro
repo.cleanup = cleanup
return repo, errors.Convert(err)
}

func isAzureRepo(ctx context.Context, repoUrl string) bool {
return strings.Contains(repoUrl, "dev.azure.com")
}
2 changes: 1 addition & 1 deletion backend/plugins/github/tasks/cicd_job_collector.go
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ func CollectJobs(taskCtx plugin.SubTaskContext) errors.Error {
data.Options.GithubId, data.Options.ConnectionId,
),
}
if collectorWithState.IsIncreamtal && collectorWithState.Since != nil {
if collectorWithState.IsIncremental && collectorWithState.Since != nil {
clauses = append(clauses, dal.Where("github_updated_at > ?", collectorWithState.Since))
}
cursor, err := db.Cursor(clauses...)
Expand Down
2 changes: 1 addition & 1 deletion backend/plugins/github/tasks/cicd_job_convertor.go
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ func ConvertJobs(taskCtx plugin.SubTaskContext) (err errors.Error) {
Result: devops.GetResult(&devops.ResultRule{
Failed: []string{"failure"},
Success: []string{"success"},
Skipped: []string{"skipped"},
Skipped: []string{"skipped", "NEUTRAL"},
}, line.Conclusion),
Status: devops.GetStatus(&devops.StatusRule[string]{
Done: []string{"completed", "COMPLETED"},
Expand Down
36 changes: 1 addition & 35 deletions backend/plugins/github/tasks/cicd_run_collector.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,18 +20,14 @@ package tasks
import (
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"reflect"
"time"

"github.com/apache/incubator-devlake/core/dal"
"github.com/apache/incubator-devlake/core/errors"
"github.com/apache/incubator-devlake/core/models/common"
"github.com/apache/incubator-devlake/core/plugin"
helper "github.com/apache/incubator-devlake/helpers/pluginhelper/api"
"github.com/apache/incubator-devlake/plugins/github/models"
)

func init() {
Expand Down Expand Up @@ -67,7 +63,6 @@ var CollectRunsMeta = plugin.SubTaskMeta{

func CollectRuns(taskCtx plugin.SubTaskContext) errors.Error {
data := taskCtx.GetData().(*GithubTaskData)
db := taskCtx.GetDal()
collector, err := helper.NewStatefulApiCollectorForFinalizableEntity(helper.FinalizableApiCollectorArgs{
RawDataSubTaskArgs: helper.RawDataSubTaskArgs{
Ctx: taskCtx,
Expand All @@ -85,6 +80,7 @@ func CollectRuns(taskCtx plugin.SubTaskContext) errors.Error {
UrlTemplate: "repos/{{ .Params.Name }}/actions/runs",
Query: func(reqData *helper.RequestData, createdAfter *time.Time) (url.Values, errors.Error) {
query := url.Values{}
query.Set("status", "completed")
query.Set("page", fmt.Sprintf("%v", reqData.Pager.Page))
query.Set("per_page", fmt.Sprintf("%v", reqData.Pager.Size))
return query, nil
Expand All @@ -110,36 +106,6 @@ func CollectRuns(taskCtx plugin.SubTaskContext) errors.Error {
return pj.CreatedAt.ToTime(), nil
},
},
CollectUnfinishedDetails: &helper.FinalizableApiCollectorDetailArgs{
BuildInputIterator: func() (helper.Iterator, errors.Error) {
// load unfinished runs from the database
cursor, err := db.Cursor(
dal.Select("id"),
dal.From(&models.GithubRun{}),
dal.Where(
"repo_id = ? AND connection_id = ? AND status IN ('ACTION_REQUIRED', 'STALE', 'IN_PROGRESS', 'QUEUED', 'REQUESTED', 'WAITING', 'PENDING')",
data.Options.GithubId, data.Options.ConnectionId,
),
)
if err != nil {
return nil, err
}
return helper.NewDalCursorIterator(db, cursor, reflect.TypeOf(SimpleGithubApiJob{}))
},

FinalizableApiCollectorCommonArgs: helper.FinalizableApiCollectorCommonArgs{
UrlTemplate: "repos/{{ .Params.Name }}/actions/runs/{{ .Input.ID }}",
ResponseParser: func(res *http.Response) ([]json.RawMessage, errors.Error) {
body, err := io.ReadAll(res.Body)
if err != nil {
return nil, errors.Convert(err)
}
res.Body.Close()
return []json.RawMessage{body}, nil
},
AfterResponse: ignoreHTTPStatus404,
},
},
})

if err != nil {
Expand Down
2 changes: 1 addition & 1 deletion backend/plugins/github/tasks/pr_commit_collector.go
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ func CollectApiPullRequestCommits(taskCtx plugin.SubTaskContext) errors.Error {
dal.From(models.GithubPullRequest{}.TableName()),
dal.Where("repo_id = ? and connection_id=?", data.Options.GithubId, data.Options.ConnectionId),
}
if collectorWithState.IsIncreamtal && collectorWithState.Since != nil {
if collectorWithState.IsIncremental && collectorWithState.Since != nil {
clauses = append(
clauses,
dal.Where("github_updated_at > ?", collectorWithState.Since),
Expand Down
2 changes: 1 addition & 1 deletion backend/plugins/github/tasks/pr_review_collector.go
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ func CollectApiPullRequestReviews(taskCtx plugin.SubTaskContext) errors.Error {
dal.From(models.GithubPullRequest{}.TableName()),
dal.Where("repo_id = ? and connection_id=?", data.Options.GithubId, data.Options.ConnectionId),
}
if collectorWithState.IsIncreamtal && collectorWithState.Since != nil {
if collectorWithState.IsIncremental && collectorWithState.Since != nil {
clauses = append(
clauses,
dal.Where("github_updated_at > ?", collectorWithState.Since),
Expand Down
2 changes: 1 addition & 1 deletion backend/plugins/github_graphql/tasks/job_collector.go
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ func CollectGraphqlJobs(taskCtx plugin.SubTaskContext) errors.Error {
dal.Where("repo_id = ? and connection_id=?", data.Options.GithubId, data.Options.ConnectionId),
dal.Orderby("github_updated_at DESC"),
}
if collectorWithState.IsIncreamtal && collectorWithState.Since != nil {
if collectorWithState.IsIncremental && collectorWithState.Since != nil {
clauses = append(clauses, dal.Where("github_updated_at > ?", *collectorWithState.Since))
}

Expand Down
Loading

0 comments on commit 6998dc1

Please sign in to comment.