From 1bf1fdf8fb0c1f2f5c335465780734a74bfb19a8 Mon Sep 17 00:00:00 2001 From: DongYoung Kim Date: Wed, 28 Aug 2024 01:05:32 +0900 Subject: [PATCH 01/26] Refactor using default page values Add return int values for paging Signed-off-by: DongYoung Kim --- .../authentication/api/utils/project_utils.go | 31 ++++++++++--------- 1 file changed, 17 insertions(+), 14 deletions(-) diff --git a/chaoscenter/authentication/api/utils/project_utils.go b/chaoscenter/authentication/api/utils/project_utils.go index 9d77f3a2704..f1fbf4eaf9b 100644 --- a/chaoscenter/authentication/api/utils/project_utils.go +++ b/chaoscenter/authentication/api/utils/project_utils.go @@ -184,25 +184,28 @@ func CreateSortStage(sort *entities.SortInput) bson.D { } } -func CreatePaginationStage(pagination *entities.Pagination) []bson.D { +func CreatePaginationStage(pagination *entities.Pagination) ([]bson.D, int, int) { var stages []bson.D + skip := 0 + limit := 10 + if pagination != nil { page := pagination.Page - limit := pagination.Limit + limit = pagination.Limit + // upper limit of 50 to prevent exceeding max limit 16mb - if pagination.Limit > 50 { + if limit > 50 { limit = 50 } - stages = append(stages, bson.D{ - {"$skip", page * limit}, - }) - stages = append(stages, bson.D{ - {"$limit", limit}, - }) - } else { - stages = append(stages, bson.D{ - {"$limit", 10}, - }) + skip = page * limit } - return stages + + stages = append(stages, bson.D{ + {"$skip", skip}, + }) + stages = append(stages, bson.D{{ + "$limit", limit}, + }) + + return stages, skip, limit } From 6f6141fa2cbbd4b598f99c4923aa0cc6ef24b5d2 Mon Sep 17 00:00:00 2001 From: DongYoung Kim Date: Wed, 28 Aug 2024 01:45:41 +0900 Subject: [PATCH 02/26] Feat replace mongodb operation $facet for support aws documentDB $facet to $group and $project Change the result type. return type has changed Signed-off-by: DongYoung Kim --- .../authentication/pkg/project/repository.go | 42 +++++++++++-------- 1 file changed, 25 insertions(+), 17 deletions(-) diff --git a/chaoscenter/authentication/pkg/project/repository.go b/chaoscenter/authentication/pkg/project/repository.go index 8f170b8967f..75f3b0c4a20 100644 --- a/chaoscenter/authentication/pkg/project/repository.go +++ b/chaoscenter/authentication/pkg/project/repository.go @@ -92,19 +92,29 @@ func (r repository) GetProjectsByUserID(request *entities.ListProjectRequest) (* pipeline = append(pipeline, sortStage) } - // Pagination stages - paginationStages := project_utils.CreatePaginationStage(request.Pagination) - - // Facet stage to count total projects and paginate results - facetStage := bson.D{ - {"$facet", bson.D{ - {"totalCount", bson.A{ - bson.D{{"$count", "totalNumberOfProjects"}}, - }}, - {"projects", append(mongo.Pipeline{}, paginationStages...)}, + // Pagination stage + _, skip, limit := project_utils.CreatePaginationStage(request.Pagination) + + // Count total project and get top-level document to array + countStage := bson.D{ + {"$group", bson.D{ + {"_id", nil}, + {"totalNumberOfProjects", bson.D{{"$sum", 1}}}, + {"projects", bson.D{{"$push", "$$ROOT"}}}, }}, } - pipeline = append(pipeline, facetStage) + + // Paging results + pagingStage := bson.D{ + {"$project", bson.D{ + {"_id", 0}, + {"totalNumberOfProjects", 1}, + {"projects", bson.D{ + {"$slice", bson.A{"$projects", skip, limit}}, + }}, + }}} + + pipeline = append(pipeline, countStage, pagingStage) // Execute the aggregate pipeline cursor, err := r.Collection.Aggregate(ctx, pipeline) @@ -115,10 +125,8 @@ func (r repository) GetProjectsByUserID(request *entities.ListProjectRequest) (* // Extract results var result struct { - TotalCount []struct { - TotalNumberOfProjects int64 `bson:"totalNumberOfProjects"` - } `bson:"totalCount"` - Projects []*entities.Project `bson:"projects"` + TotalNumberOfProjects int64 `bson:"totalNumberOfProjects"` + Projects []*entities.Project `bson:"projects"` } if cursor.Next(ctx) { @@ -128,8 +136,8 @@ func (r repository) GetProjectsByUserID(request *entities.ListProjectRequest) (* } var totalNumberOfProjects int64 - if len(result.TotalCount) > 0 { - totalNumberOfProjects = result.TotalCount[0].TotalNumberOfProjects + if result.TotalNumberOfProjects > 0 { + totalNumberOfProjects = result.TotalNumberOfProjects } else { zero := int64(0) return &entities.ListProjectResponse{ From f1ea9a83440fa06680609b8e22ace2d0e0bf9650 Mon Sep 17 00:00:00 2001 From: DongYoung Kim Date: Wed, 28 Aug 2024 01:50:51 +0900 Subject: [PATCH 03/26] Refactor extract common pagination feature Signed-off-by: DongYoung Kim --- .../pkg/database/mongodb/common/operations.go | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 chaoscenter/graphql/server/pkg/database/mongodb/common/operations.go diff --git a/chaoscenter/graphql/server/pkg/database/mongodb/common/operations.go b/chaoscenter/graphql/server/pkg/database/mongodb/common/operations.go new file mode 100644 index 00000000000..ac6e4ac96b3 --- /dev/null +++ b/chaoscenter/graphql/server/pkg/database/mongodb/common/operations.go @@ -0,0 +1,28 @@ +package common + +import ( + "github.com/litmuschaos/litmus/chaoscenter/graphql/server/graph/model" + "go.mongodb.org/mongo-driver/bson" +) + +func CreatePaginationStage(pagination *model.Pagination) ([]bson.D, int, int) { + var stages []bson.D + skip := 0 + limit := 15 + + if pagination != nil { + page := pagination.Page + limit = pagination.Limit + + skip = page * limit + } + + stages = append(stages, bson.D{ + {"$skip", skip}, + }) + stages = append(stages, bson.D{{ + "$limit", limit}, + }) + + return stages, skip, limit +} From 93d6a6c384e000dfd2af265fa535df889ea35479 Mon Sep 17 00:00:00 2001 From: DongYoung Kim Date: Wed, 28 Aug 2024 02:20:21 +0900 Subject: [PATCH 04/26] Feat replace mongodb operation $facet for support aws documentDB Replace $facet to $group and $project Change the result type. return type has changed Extract common pagination feature Signed-off-by: DongYoung Kim --- .../database/mongodb/environments/schema.go | 4 +- .../server/pkg/environment/handler/handler.go | 54 +++++++++---------- 2 files changed, 27 insertions(+), 31 deletions(-) diff --git a/chaoscenter/graphql/server/pkg/database/mongodb/environments/schema.go b/chaoscenter/graphql/server/pkg/database/mongodb/environments/schema.go index 0d3c7de0f6e..0ee3ce5c70d 100644 --- a/chaoscenter/graphql/server/pkg/database/mongodb/environments/schema.go +++ b/chaoscenter/graphql/server/pkg/database/mongodb/environments/schema.go @@ -23,6 +23,6 @@ type TotalFilteredData struct { } type AggregatedEnvironments struct { - TotalFilteredEnvironments []TotalFilteredData `bson:"total_filtered_environments"` - Environments []Environment `bson:"environments"` + TotalFilteredEnvironments int `bson:"total_filtered_environments"` + Environments []Environment `bson:"environments"` } diff --git a/chaoscenter/graphql/server/pkg/environment/handler/handler.go b/chaoscenter/graphql/server/pkg/environment/handler/handler.go index 8cd437ac419..7458fb97054 100644 --- a/chaoscenter/graphql/server/pkg/environment/handler/handler.go +++ b/chaoscenter/graphql/server/pkg/environment/handler/handler.go @@ -8,6 +8,7 @@ import ( "github.com/litmuschaos/litmus/chaoscenter/graphql/server/graph/model" "github.com/litmuschaos/litmus/chaoscenter/graphql/server/pkg/database/mongodb" + "github.com/litmuschaos/litmus/chaoscenter/graphql/server/pkg/database/mongodb/common" "github.com/litmuschaos/litmus/chaoscenter/graphql/server/pkg/database/mongodb/environments" dbOperationsEnvironment "github.com/litmuschaos/litmus/chaoscenter/graphql/server/pkg/database/mongodb/environments" "go.mongodb.org/mongo-driver/bson" @@ -319,38 +320,32 @@ func (e *EnvironmentService) ListEnvironments(projectID string, request *model.L } } - // Pagination or adding a default limit of 15 if pagination not provided - paginatedExperiments := bson.A{ - sortStage, - } - - if request.Pagination != nil { - paginationSkipStage := bson.D{ - {"$skip", request.Pagination.Page * request.Pagination.Limit}, - } - paginationLimitStage := bson.D{ - {"$limit", request.Pagination.Limit}, - } + pipeline = append(pipeline, sortStage) - paginatedExperiments = append(paginatedExperiments, paginationSkipStage, paginationLimitStage) - } else { - limitStage := bson.D{ - {"$limit", 15}, - } - - paginatedExperiments = append(paginatedExperiments, limitStage) + // Pagination or adding a default limit of 15 if pagination not provided + _, skip, limit := common.CreatePaginationStage(request.Pagination) + + // Count total project and get top-level document to array + countStage := bson.D{ + {"$group", bson.D{ + {"_id", nil}, + {"total_filtered_environments", bson.D{{"$sum", 1}}}, + {"environments", bson.D{{"$push", "$$ROOT"}}}, + }}, } - // Add two stages where we first count the number of filtered workflow and then paginate the results - facetStage := bson.D{ - {"$facet", bson.D{ - {"total_filtered_environments", bson.A{ - bson.D{{"$count", "count"}}, + // Paging results + pagingStage := bson.D{ + {"$project", bson.D{ + {"_id", 0}, + {"total_filtered_environments", 1}, + {"environments", bson.D{ + {"$slice", bson.A{"$environments", skip, limit}}, }}, - {"environments", paginatedExperiments}, }}, } - pipeline = append(pipeline, facetStage) + + pipeline = append(pipeline, countStage, pagingStage) cursor, err := e.EnvironmentOperator.GetAggregateEnvironments(pipeline) if err != nil { @@ -362,12 +357,13 @@ func (e *EnvironmentService) ListEnvironments(projectID string, request *model.L aggregatedEnvironments []environments.AggregatedEnvironments ) - if err = cursor.All(context.Background(), &aggregatedEnvironments); err != nil || len(aggregatedEnvironments) == 0 { + if err = cursor.All(context.Background(), &aggregatedEnvironments); err != nil { return &model.ListEnvironmentResponse{ TotalNoOfEnvironments: 0, Environments: envs, }, errors.New("error decoding environment cursor: " + err.Error()) } + if len(aggregatedEnvironments) == 0 { return &model.ListEnvironmentResponse{ TotalNoOfEnvironments: 0, @@ -393,8 +389,8 @@ func (e *EnvironmentService) ListEnvironments(projectID string, request *model.L } totalFilteredEnvironmentsCounter := 0 - if len(envs) > 0 && len(aggregatedEnvironments[0].TotalFilteredEnvironments) > 0 { - totalFilteredEnvironmentsCounter = aggregatedEnvironments[0].TotalFilteredEnvironments[0].Count + if len(envs) > 0 && aggregatedEnvironments[0].TotalFilteredEnvironments > 0 { + totalFilteredEnvironmentsCounter = aggregatedEnvironments[0].TotalFilteredEnvironments } output := model.ListEnvironmentResponse{ From 342d0ad70d4b7dfa7260614c7af7bf8b78bebd87 Mon Sep 17 00:00:00 2001 From: DongYoung Kim Date: Wed, 28 Aug 2024 02:22:42 +0900 Subject: [PATCH 05/26] Feat replace mongodb operation $facet for support aws documentDB Replace $facet to $count Change the result type. return type has changed Signed-off-by: DongYoung Kim --- .../graphql/server/pkg/chaoshub/service.go | 16 ++++------------ .../pkg/database/mongodb/chaos_hub/schema.go | 2 +- 2 files changed, 5 insertions(+), 13 deletions(-) diff --git a/chaoscenter/graphql/server/pkg/chaoshub/service.go b/chaoscenter/graphql/server/pkg/chaoshub/service.go index 53f46cc975f..3e5b0879d36 100644 --- a/chaoscenter/graphql/server/pkg/chaoshub/service.go +++ b/chaoscenter/graphql/server/pkg/chaoshub/service.go @@ -931,16 +931,8 @@ func (c *chaosHubService) GetChaosHubStats(ctx context.Context, projectID string }}, } - facetStage := bson.D{ - {"$facet", bson.D{ - {"total_chaos_hubs", bson.A{ - matchIdentifierStage, - bson.D{{"$count", "count"}}, - }}, - }}, - } - - pipeline = append(pipeline, facetStage) + countStage := bson.D{{"$count", "total_chaos_hubs"}} + pipeline = append(pipeline, matchIdentifierStage, countStage) // Call aggregation on pipeline hubCursor, err := c.chaosHubOperator.GetAggregateChaosHubs(ctx, pipeline) if err != nil { @@ -948,14 +940,14 @@ func (c *chaosHubService) GetChaosHubStats(ctx context.Context, projectID string } var res []dbSchemaChaosHub.AggregatedChaosHubStats - if err = hubCursor.All(ctx, &res); err != nil || len(res) == 0 || len(res[0].TotalChaosHubs) == 0 { + if err = hubCursor.All(ctx, &res); err != nil || len(res) == 0 || res[0].TotalChaosHubs == 0 { return &model.GetChaosHubStatsResponse{ TotalChaosHubs: 1, }, err } return &model.GetChaosHubStatsResponse{ - TotalChaosHubs: res[0].TotalChaosHubs[0].Count + 1, + TotalChaosHubs: res[0].TotalChaosHubs + 1, }, nil } diff --git a/chaoscenter/graphql/server/pkg/database/mongodb/chaos_hub/schema.go b/chaoscenter/graphql/server/pkg/database/mongodb/chaos_hub/schema.go index ed387474dcd..77a3c40f52a 100644 --- a/chaoscenter/graphql/server/pkg/database/mongodb/chaos_hub/schema.go +++ b/chaoscenter/graphql/server/pkg/database/mongodb/chaos_hub/schema.go @@ -59,5 +59,5 @@ type TotalCount struct { } type AggregatedChaosHubStats struct { - TotalChaosHubs []TotalCount `bson:"total_chaos_hubs"` + TotalChaosHubs int `bson:"total_chaos_hubs"` } From 4bdaad18c5c17715f8b1330cd3c3f6c74f102254 Mon Sep 17 00:00:00 2001 From: DongYoung Kim Date: Wed, 28 Aug 2024 02:27:13 +0900 Subject: [PATCH 06/26] Feat replace mongodb operation $facet for support aws documentDB Replace $facet to $group and $project Change the result type. return type has changed Extract common pagination feature Signed-off-by: DongYoung Kim --- .../chaos_experiment_run/handler/handler.go | 52 ++++++++----------- .../mongodb/chaos_experiment/schema.go | 4 +- 2 files changed, 25 insertions(+), 31 deletions(-) diff --git a/chaoscenter/graphql/server/pkg/chaos_experiment_run/handler/handler.go b/chaoscenter/graphql/server/pkg/chaos_experiment_run/handler/handler.go index eddb54e4fb3..51b0d119ac1 100644 --- a/chaoscenter/graphql/server/pkg/chaos_experiment_run/handler/handler.go +++ b/chaoscenter/graphql/server/pkg/chaos_experiment_run/handler/handler.go @@ -12,6 +12,7 @@ import ( "time" "github.com/litmuschaos/litmus/chaoscenter/graphql/server/pkg/authorization" + "github.com/litmuschaos/litmus/chaoscenter/graphql/server/pkg/database/mongodb/common" probeUtils "github.com/litmuschaos/litmus/chaoscenter/graphql/server/pkg/probe/utils" @@ -505,37 +506,29 @@ func (c *ChaosExperimentRunHandler) ListExperimentRun(projectID string, request pipeline = append(pipeline, fetchKubernetesInfraDetailsStage) // Pagination or adding a default limit of 15 if pagination not provided - paginatedExperiments := bson.A{ - sortStage, - } + _, skip, limit := common.CreatePaginationStage(request.Pagination) - if request.Pagination != nil { - paginationSkipStage := bson.D{ - {"$skip", request.Pagination.Page * request.Pagination.Limit}, - } - paginationLimitStage := bson.D{ - {"$limit", request.Pagination.Limit}, - } - - paginatedExperiments = append(paginatedExperiments, paginationSkipStage, paginationLimitStage) - } else { - limitStage := bson.D{ - {"$limit", 15}, - } + pipeline = append(pipeline, sortStage) - paginatedExperiments = append(paginatedExperiments, limitStage) + // Count total project and get top-level document to array + countStage := bson.D{ + {"$group", bson.D{ + {"_id", nil}, + {"total_filtered_experiment_runs", bson.D{{"$sum", 1}}}, + {"flattened_experiment_runs", bson.D{{"$push", "$$ROOT"}}}, + }}, } - // Add two stages where we first count the number of filtered workflow and then paginate the results - facetStage := bson.D{ - {"$facet", bson.D{ - {"total_filtered_experiment_runs", bson.A{ - bson.D{{"$count", "count"}}, + // Paging results + pagingStage := bson.D{ + {"$project", bson.D{ + {"total_filtered_experiment_runs", 1}, + {"flattened_experiment_runs", bson.D{ + {"$slice", bson.A{"$flattened_experiment_runs", skip, limit}}, }}, - {"flattened_experiment_runs", paginatedExperiments}, - }}, - } - pipeline = append(pipeline, facetStage) + }}} + + pipeline = append(pipeline, countStage, pagingStage) // Call aggregation on pipeline workflowsCursor, err := c.chaosExperimentRunOperator.GetAggregateExperimentRuns(pipeline) @@ -548,12 +541,13 @@ func (c *ChaosExperimentRunHandler) ListExperimentRun(projectID string, request workflows []dbChaosExperiment.AggregatedExperimentRuns ) - if err = workflowsCursor.All(context.Background(), &workflows); err != nil || len(workflows) == 0 { + if err = workflowsCursor.All(context.Background(), &workflows); err != nil { return &model.ListExperimentRunResponse{ TotalNoOfExperimentRuns: 0, ExperimentRuns: result, }, errors.New("error decoding experiment runs cursor: " + err.Error()) } + if len(workflows) == 0 { return &model.ListExperimentRunResponse{ TotalNoOfExperimentRuns: 0, @@ -637,8 +631,8 @@ func (c *ChaosExperimentRunHandler) ListExperimentRun(projectID string, request } totalFilteredExperimentRunsCounter := 0 - if len(workflows) > 0 && len(workflows[0].TotalFilteredExperimentRuns) > 0 { - totalFilteredExperimentRunsCounter = workflows[0].TotalFilteredExperimentRuns[0].Count + if len(workflows) > 0 && workflows[0].TotalFilteredExperimentRuns > 0 { + totalFilteredExperimentRunsCounter = workflows[0].TotalFilteredExperimentRuns } output := model.ListExperimentRunResponse{ diff --git a/chaoscenter/graphql/server/pkg/database/mongodb/chaos_experiment/schema.go b/chaoscenter/graphql/server/pkg/database/mongodb/chaos_experiment/schema.go index f79637995da..f5ee42bbc02 100644 --- a/chaoscenter/graphql/server/pkg/database/mongodb/chaos_experiment/schema.go +++ b/chaoscenter/graphql/server/pkg/database/mongodb/chaos_experiment/schema.go @@ -109,7 +109,7 @@ type FaultEventMetadata struct { } type AggregatedExperimentRuns struct { - TotalFilteredExperimentRuns []TotalFilteredData `bson:"total_filtered_experiment_runs"` + TotalFilteredExperimentRuns int `bson:"total_filtered_experiment_runs"` FlattenedExperimentRuns []FlattenedExperimentRun `bson:"flattened_experiment_runs"` } @@ -151,7 +151,7 @@ type ExperimentDetails struct { } type AggregatedExperiments struct { - TotalFilteredExperiments []TotalFilteredData `bson:"total_filtered_experiments"` + TotalFilteredExperiments int `bson:"total_filtered_experiments"` ScheduledExperiments []ChaosExperimentsWithRunDetails `bson:"scheduled_experiments"` } From 05bb505d699083a7d5850c6b2b749a7113d66ba2 Mon Sep 17 00:00:00 2001 From: DongYoung Kim Date: Wed, 28 Aug 2024 02:34:14 +0900 Subject: [PATCH 07/26] Feat replace mongodb operation $facet for support aws documentDB Signed-off-by: DongYoung Kim --- .../pkg/chaos_infrastructure/service.go | 86 ++++++++----------- 1 file changed, 34 insertions(+), 52 deletions(-) diff --git a/chaoscenter/graphql/server/pkg/chaos_infrastructure/service.go b/chaoscenter/graphql/server/pkg/chaos_infrastructure/service.go index 8c9663d241f..f623e310956 100644 --- a/chaoscenter/graphql/server/pkg/chaos_infrastructure/service.go +++ b/chaoscenter/graphql/server/pkg/chaos_infrastructure/service.go @@ -814,43 +814,43 @@ func (in *infraService) GetInfraStats(ctx context.Context, projectID string) (*m }}, } - // Group by infra status and count their total number by each group - groupByInfraStatusStage := bson.D{ - { - "$group", bson.D{ - {"_id", "$is_active"}, - {"count", bson.D{ - {"$sum", 1}, + // Group by infra status, confirmed stage and count their total number by each group + groupByStage := bson.D{ + {"$group", bson.D{ + {"_id", nil}, + + // Count for active status + {"total_active_infras", bson.D{ + {"$sum", bson.D{ + {"$cond", bson.A{ + bson.D{{"$eq", bson.A{"$is_active", true}}}, 1, 0}}, }}, - }, - }, - } - - // Group by infra confirmed stage and count their total number by each group - groupByInfraConfirmedStage := bson.D{ - { - "$group", bson.D{ - {"_id", "$is_infra_confirmed"}, - {"count", bson.D{ - {"$sum", 1}, + }}, + {"total_not_active_infras", bson.D{ + {"$sum", bson.D{ + {"$cond", bson.A{ + bson.D{{"$eq", bson.A{"$is_active", false}}}, 1, 0}}, }}, - }, - }, - } - - facetStage := bson.D{ - {"$facet", bson.D{ + }}, - {"total_active_infras", bson.A{ - matchIdentifierStage, groupByInfraStatusStage, + // Count for confirmed status + {"total_confirmed_infras", bson.D{ + {"$sum", bson.D{ + {"$cond", bson.A{ + bson.D{{"$eq", bson.A{"$is_infra_confirmed", true}}}, 1, 0}}, + }}, }}, - {"total_confirmed_infras", bson.A{ - matchIdentifierStage, groupByInfraConfirmedStage, + + {"total_not_confirmed_infras", bson.D{ + {"$sum", bson.D{ + {"$cond", bson.A{ + bson.D{{"$eq", bson.A{"$is_infra_confirmed", true}}}, 1, 0}}, + }}, }}, }}, } - pipeline = append(pipeline, facetStage) + pipeline = append(pipeline, matchIdentifierStage, groupByStage) // Call aggregation on pipeline infraCursor, err := in.infraOperator.GetAggregateInfras(pipeline) @@ -863,30 +863,12 @@ func (in *infraService) GetInfraStats(ctx context.Context, projectID string) (*m return nil, err } - stateMap := map[bool]int{ - false: 0, - true: 0, - } - - infraConfirmedMap := map[bool]int{ - false: 0, - true: 0, - } - - for _, data := range res[0].TotalConfirmedInfrastructures { - infraConfirmedMap[data.Id] = data.Count - } - - for _, data := range res[0].TotalActiveInfrastructure { - stateMap[data.Id] = data.Count - } - return &model.GetInfraStatsResponse{ - TotalInfrastructures: infraConfirmedMap[true] + infraConfirmedMap[false], - TotalActiveInfrastructure: stateMap[true], - TotalInactiveInfrastructures: stateMap[false], - TotalConfirmedInfrastructure: infraConfirmedMap[true], - TotalNonConfirmedInfrastructures: infraConfirmedMap[false], + TotalInfrastructures: res[0].TotalActiveInfrastructure + res[0].TotalNotActiveInfrastructure, + TotalActiveInfrastructure: res[0].TotalActiveInfrastructure, + TotalInactiveInfrastructures: res[0].TotalNotActiveInfrastructure, + TotalConfirmedInfrastructure: res[0].TotalConfirmedInfrastructures, + TotalNonConfirmedInfrastructures: res[0].TotalNotConfirmedInfrastructures, }, nil } From 82e28c7c0b5aafca68218a98fe0ba6c9efc358ff Mon Sep 17 00:00:00 2001 From: DongYoung Kim Date: Wed, 28 Aug 2024 02:40:07 +0900 Subject: [PATCH 08/26] Feat replace mongodb operation $facet for support aws documentDB Replace $facet to $group and $project Change the result type. return type has changed Extract common pagination feature Signed-off-by: DongYoung Kim --- .../pkg/chaos_infrastructure/service.go | 48 +++++++++---------- .../mongodb/chaos_infrastructure/schema.go | 2 +- 2 files changed, 23 insertions(+), 27 deletions(-) diff --git a/chaoscenter/graphql/server/pkg/chaos_infrastructure/service.go b/chaoscenter/graphql/server/pkg/chaos_infrastructure/service.go index f623e310956..bac648a6af6 100644 --- a/chaoscenter/graphql/server/pkg/chaos_infrastructure/service.go +++ b/chaoscenter/graphql/server/pkg/chaos_infrastructure/service.go @@ -14,6 +14,7 @@ import ( "github.com/litmuschaos/litmus/chaoscenter/graphql/server/pkg/authorization" store "github.com/litmuschaos/litmus/chaoscenter/graphql/server/pkg/data-store" + "github.com/litmuschaos/litmus/chaoscenter/graphql/server/pkg/database/mongodb/common" "github.com/litmuschaos/litmus/chaoscenter/graphql/server/pkg/database/mongodb/config" dbEnvironments "github.com/litmuschaos/litmus/chaoscenter/graphql/server/pkg/database/mongodb/environments" "github.com/sirupsen/logrus" @@ -642,34 +643,29 @@ func (in *infraService) ListInfras(projectID string, request *model.ListInfraReq pipeline = append(pipeline, fetchExperimentDetailsStage) - //Pagination - paginatedInfras := bson.A{ - fetchExperimentDetailsStage, - } - - if request != nil { - if request.Pagination != nil { - paginationSkipStage := bson.D{ - {"$skip", request.Pagination.Page * request.Pagination.Limit}, - } - paginationLimitStage := bson.D{ - {"$limit", request.Pagination.Limit}, - } + // Pagination or adding a default limit of 15 if pagination not provided + _, skip, limit := common.CreatePaginationStage(request.Pagination) - paginatedInfras = append(paginatedInfras, paginationSkipStage, paginationLimitStage) - } + // Count total project and get top-level document to array + countStage := bson.D{ + {"$group", bson.D{ + {"_id", nil}, + {"total_filtered_infras", bson.D{{"$sum", 1}}}, + {"infras", bson.D{{"$push", "$$ROOT"}}}, + }}, } - // Add two stages where we first count the number of filtered workflow and then paginate the results - facetStage := bson.D{ - {"$facet", bson.D{ - {"total_filtered_infras", bson.A{ - bson.D{{"$count", "count"}}, + // Paging results + pagingStage := bson.D{ + {"$project", bson.D{ + {"_id", 0}, + {"total_filtered_infras", 1}, + {"infras", bson.D{ + {"$slice", bson.A{"$infras", skip, limit}}, }}, - {"infras", paginatedInfras}, - }}, - } - pipeline = append(pipeline, facetStage) + }}} + + pipeline = append(pipeline, countStage, pagingStage) // Call aggregation on pipeline infraCursor, err := in.infraOperator.GetAggregateInfras(pipeline) @@ -776,8 +772,8 @@ func (in *infraService) ListInfras(projectID string, request *model.ListInfraReq } totalFilteredInfrasCounter := 0 - if len(infras) > 0 && len(infras[0].TotalFilteredInfras) > 0 { - totalFilteredInfrasCounter = infras[0].TotalFilteredInfras[0].Count + if len(infras) > 0 && infras[0].TotalFilteredInfras > 0 { + totalFilteredInfrasCounter = infras[0].TotalFilteredInfras } output := model.ListInfraResponse{ diff --git a/chaoscenter/graphql/server/pkg/database/mongodb/chaos_infrastructure/schema.go b/chaoscenter/graphql/server/pkg/database/mongodb/chaos_infrastructure/schema.go index a8413841de4..94c498cd76d 100644 --- a/chaoscenter/graphql/server/pkg/database/mongodb/chaos_infrastructure/schema.go +++ b/chaoscenter/graphql/server/pkg/database/mongodb/chaos_infrastructure/schema.go @@ -67,7 +67,7 @@ type AggregatedGetInfras struct { } type AggregatedInfras struct { - TotalFilteredInfras []TotalFilteredData `bson:"total_filtered_infras"` + TotalFilteredInfras int `bson:"total_filtered_infras"` Infras []ChaosInfraDetails `bson:"infras"` } From 890f9de3ad91f37183fd5d22c23b81fdb28446b6 Mon Sep 17 00:00:00 2001 From: DongYoung Kim Date: Wed, 28 Aug 2024 02:42:51 +0900 Subject: [PATCH 09/26] Fix change reposne struect type Signed-off-by: DongYoung Kim --- .../pkg/database/mongodb/chaos_infrastructure/schema.go | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/chaoscenter/graphql/server/pkg/database/mongodb/chaos_infrastructure/schema.go b/chaoscenter/graphql/server/pkg/database/mongodb/chaos_infrastructure/schema.go index 94c498cd76d..eea8c5c7a13 100644 --- a/chaoscenter/graphql/server/pkg/database/mongodb/chaos_infrastructure/schema.go +++ b/chaoscenter/graphql/server/pkg/database/mongodb/chaos_infrastructure/schema.go @@ -98,6 +98,8 @@ type TotalCount struct { } type AggregatedInfraStats struct { - TotalActiveInfrastructure []TotalCount `bson:"total_active_infras"` - TotalConfirmedInfrastructures []TotalCount `bson:"total_confirmed_infras"` + TotalActiveInfrastructure int `bson:"total_active_infras"` + TotalNotActiveInfrastructure int `bson:"total_not_active_infras"` + TotalConfirmedInfrastructures int `bson:"total_confirmed_infras"` + TotalNotConfirmedInfrastructures int `bson:"total_not_confirmed_infras"` } From b91f7d5c4ff6540ff851d76f32c931b765e02e2d Mon Sep 17 00:00:00 2001 From: DongYoung Kim Date: Wed, 28 Aug 2024 02:46:09 +0900 Subject: [PATCH 10/26] Feat replace mongodb operation $facet for support aws documentDB Replace $facet to $group and $project Change the result type. return type has changed Extract common pagination feature Signed-off-by: DongYoung Kim --- .../pkg/chaos_experiment/handler/handler.go | 52 ++++++++----------- 1 file changed, 23 insertions(+), 29 deletions(-) diff --git a/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go b/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go index ee51be6d05f..f09c10686b0 100644 --- a/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go +++ b/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go @@ -15,6 +15,7 @@ import ( chaosTypes "github.com/litmuschaos/chaos-operator/api/litmuschaos/v1alpha1" "github.com/litmuschaos/litmus/chaoscenter/graphql/server/pkg/chaos_experiment/ops" dbChaosInfra "github.com/litmuschaos/litmus/chaoscenter/graphql/server/pkg/database/mongodb/chaos_infrastructure" + "github.com/litmuschaos/litmus/chaoscenter/graphql/server/pkg/database/mongodb/common" dbSchemaProbe "github.com/litmuschaos/litmus/chaoscenter/graphql/server/pkg/database/mongodb/probe" @@ -707,37 +708,30 @@ func (c *ChaosExperimentHandler) ListExperiment(projectID string, request model. } // Pagination or adding a default limit of 15 if pagination not provided - paginatedExperiments := bson.A{ - sortStage, - } - - if request.Pagination != nil { - paginationSkipStage := bson.D{ - {"$skip", request.Pagination.Page * request.Pagination.Limit}, - } - paginationLimitStage := bson.D{ - {"$limit", request.Pagination.Limit}, - } + _, skip, limit := common.CreatePaginationStage(request.Pagination) - paginatedExperiments = append(paginatedExperiments, paginationSkipStage, paginationLimitStage) - } else { - limitStage := bson.D{ - {"$limit", 15}, - } + pipeline = append(pipeline, sortStage) - paginatedExperiments = append(paginatedExperiments, limitStage) + // Count total project and get top-level document to array + countStage := bson.D{ + {"$group", bson.D{ + {"_id", nil}, + {"total_filtered_experiments", bson.D{{"$sum", 1}}}, + {"scheduled_experiments", bson.D{{"$push", "$$ROOT"}}}, + }}, } - // Add two stages where we first count the number of filtered workflow and then paginate the results - facetStage := bson.D{ - {"$facet", bson.D{ - {"total_filtered_experiments", bson.A{ - bson.D{{"$count", "count"}}, + // Paging results + pagingStage := bson.D{ + {"$project", bson.D{ + {"_id", 0}, + {"total_filtered_experiments", 1}, + {"scheduled_experiments", bson.D{ + {"$slice", bson.A{"$scheduled_experiments", skip, limit}}, }}, - {"scheduled_experiments", paginatedExperiments}, - }}, - } - pipeline = append(pipeline, facetStage) + }}} + + pipeline = append(pipeline, countStage, pagingStage) // Call aggregation on pipeline workflowsCursor, err := c.chaosExperimentOperator.GetAggregateExperiments(pipeline) @@ -750,7 +744,7 @@ func (c *ChaosExperimentHandler) ListExperiment(projectID string, request model. workflows []dbChaosExperiment.AggregatedExperiments ) - if err = workflowsCursor.All(context.Background(), &workflows); err != nil || len(workflows) == 0 { + if err = workflowsCursor.All(context.Background(), &workflows); err != nil { return &model.ListExperimentResponse{ TotalNoOfExperiments: 0, Experiments: result, @@ -847,8 +841,8 @@ func (c *ChaosExperimentHandler) ListExperiment(projectID string, request model. } totalFilteredExperimentsCounter := 0 - if len(workflows) > 0 && len(workflows[0].TotalFilteredExperiments) > 0 { - totalFilteredExperimentsCounter = workflows[0].TotalFilteredExperiments[0].Count + if len(workflows) > 0 && workflows[0].TotalFilteredExperiments > 0 { + totalFilteredExperimentsCounter = workflows[0].TotalFilteredExperiments } output := model.ListExperimentResponse{ From 858f03c7275f9752639af6845de89c3cac3bfbb4 Mon Sep 17 00:00:00 2001 From: DongYoung Kim Date: Wed, 28 Aug 2024 16:41:50 +0900 Subject: [PATCH 11/26] Feat replace mongodb operation $facet for support aws documentDB Replace $facet to $group and $project Signed-off-by: DongYoung Kim --- .../pkg/chaos_experiment/handler/handler.go | 30 +++++++++++++------ 1 file changed, 21 insertions(+), 9 deletions(-) diff --git a/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go b/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go index f09c10686b0..ad6652836aa 100644 --- a/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go +++ b/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go @@ -1096,16 +1096,28 @@ func (c *ChaosExperimentHandler) GetExperimentStats(ctx context.Context, project } // This stage buckets the number of experiments by avg resiliency score in the ranges of 0-39, 40-79, 80-100 - bucketByResScoreStage := bson.D{ - {"$bucket", bson.D{ - {"groupBy", "$avg_resiliency_score.avg"}, - {"boundaries", bson.A{0, 40, 80, 101}}, - {"default", 101}, - {"output", bson.D{ - {"count", bson.D{ - {"$sum", 1}, + groupByResScoreStage := bson.D{ + {"$group", bson.D{ + {"_id", bson.D{ + {"$switch", bson.D{ + {"branches", bson.A{ + bson.D{ + {"case", bson.D{{"$lt", bson.A{"$avg_resiliency_score.avg", 40}}}}, + {"then", "0"}, + }, + bson.D{ + {"case", bson.D{{"$lt", bson.A{"$avg_resiliency_score.avg", 80}}}}, + {"then", "40"}, + }, + bson.D{ + {"case", bson.D{{"$lt", bson.A{"$avg_resiliency_score.avg", 101}}}}, + {"then", "80"}, + }, + }}, + {"default", "101"}, }}, }}, + {"count", bson.D{{"$sum", 1}}}, }}, } @@ -1137,7 +1149,7 @@ func (c *ChaosExperimentHandler) GetExperimentStats(ctx context.Context, project projectstage, fetchRunDetailsStage, unwindStage, - bucketByResScoreStage, + groupByResScoreStage, }}, }}, } From 289b6bcd5a1acc933750505a2dce1388644f15d8 Mon Sep 17 00:00:00 2001 From: DongYoung Kim Date: Sat, 31 Aug 2024 21:42:22 +0900 Subject: [PATCH 12/26] Fix return type String to int in groupByResScoreStage Signed-off-by: DongYoung Kim --- .../server/pkg/chaos_experiment/handler/handler.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go b/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go index ad6652836aa..3962af0b955 100644 --- a/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go +++ b/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go @@ -1103,18 +1103,18 @@ func (c *ChaosExperimentHandler) GetExperimentStats(ctx context.Context, project {"branches", bson.A{ bson.D{ {"case", bson.D{{"$lt", bson.A{"$avg_resiliency_score.avg", 40}}}}, - {"then", "0"}, + {"then", 0}, }, bson.D{ {"case", bson.D{{"$lt", bson.A{"$avg_resiliency_score.avg", 80}}}}, - {"then", "40"}, + {"then", 40}, }, bson.D{ {"case", bson.D{{"$lt", bson.A{"$avg_resiliency_score.avg", 101}}}}, - {"then", "80"}, + {"then", 80}, }, }}, - {"default", "101"}, + {"default", 101}, }}, }}, {"count", bson.D{{"$sum", 1}}}, From ee27c4a29d63f370d31e229f6c1588d8362ebd5a Mon Sep 17 00:00:00 2001 From: DongYoung Kim Date: Sat, 31 Aug 2024 21:43:53 +0900 Subject: [PATCH 13/26] Fix ListExperimentRun test code expect array to int Signed-off-by: DongYoung Kim --- .../handler/handler_test.go | 18 +++--------------- 1 file changed, 3 insertions(+), 15 deletions(-) diff --git a/chaoscenter/graphql/server/pkg/chaos_experiment_run/handler/handler_test.go b/chaoscenter/graphql/server/pkg/chaos_experiment_run/handler/handler_test.go index 6f08fa1f6cf..ec9b1f9c591 100644 --- a/chaoscenter/graphql/server/pkg/chaos_experiment_run/handler/handler_test.go +++ b/chaoscenter/graphql/server/pkg/chaos_experiment_run/handler/handler_test.go @@ -262,11 +262,7 @@ func TestChaosExperimentRunHandler_ListExperimentRun(t *testing.T) { }, given: func() { findResult := []interface{}{bson.D{ - {Key: "total_filtered_experiment_runs", Value: []dbOperationsChaosExpRun.TotalFilteredData{ - { - Count: 1, - }, - }}, + {Key: "total_filtered_experiment_runs", Value: 1}, {Key: "flattened_experiment_runs", Value: []dbOperationsChaosExpRun.FlattenedExperimentRun{ { ExperimentDetails: []dbOperationsChaosExpRun.ExperimentDetails{ @@ -308,11 +304,7 @@ func TestChaosExperimentRunHandler_ListExperimentRun(t *testing.T) { }, given: func() { findResult := []interface{}{bson.D{ - {Key: "total_filtered_experiment_runs", Value: []dbOperationsChaosExpRun.TotalFilteredData{ - { - Count: 1, - }, - }}, + {Key: "total_filtered_experiment_runs", Value: 1}, {Key: "flattened_experiment_runs", Value: []dbOperationsChaosExpRun.FlattenedExperimentRun{ { ExperimentDetails: []dbOperationsChaosExpRun.ExperimentDetails{ @@ -354,11 +346,7 @@ func TestChaosExperimentRunHandler_ListExperimentRun(t *testing.T) { }, given: func() { findResult := []interface{}{bson.D{ - {Key: "total_filtered_experiment_runs", Value: []dbOperationsChaosExpRun.TotalFilteredData{ - { - Count: 1, - }, - }}, + {Key: "total_filtered_experiment_runs", Value: 1}, {Key: "flattened_experiment_runs", Value: []dbOperationsChaosExpRun.FlattenedExperimentRun{ { ExperimentDetails: []dbOperationsChaosExpRun.ExperimentDetails{ From 15df6b29c173dae0d1ac2699cba8817900f7e72e Mon Sep 17 00:00:00 2001 From: DongYoung Kim Date: Sun, 1 Sep 2024 01:14:54 +0900 Subject: [PATCH 14/26] Feat replace mongodb operation $facet for support aws documentDB Replace $facet to $group and $project Signed-off-by: DongYoung Kim --- .../pkg/chaos_experiment/handler/handler.go | 85 ++++++++----------- 1 file changed, 37 insertions(+), 48 deletions(-) diff --git a/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go b/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go index 3962af0b955..30d494806ef 100644 --- a/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go +++ b/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go @@ -892,66 +892,55 @@ func (c *ChaosExperimentHandler) getWfRunDetails(workflowIDs []string) (map[stri } pipeline = append(pipeline, sortStage) - var workflowRunPipeline mongo.Pipeline - // Get details of the latest wf run - wfRunDetails := bson.D{ + // Get details of the latest wf run and total_experiment_runs and avg_resiliency_score + groupStage := bson.D{ {"$group", bson.D{ {"_id", "$experiment_id"}, - - // Fetch the latest workflowRun details - {"experiment_run_details", bson.D{ - { - "$first", "$$ROOT", - }, - }}, - }}, - } - workflowRunPipeline = append(workflowRunPipeline, wfRunDetails) - - var resScorePipeline mongo.Pipeline - // Filtering out running workflow runs to calculate average resiliency score - filterRunningWfRuns := bson.D{ - { - "$match", - bson.D{{ - "$and", bson.A{ - bson.D{{ - "experiment_id", bson.D{{"$in", workflowIDs}}, - }}, - bson.D{{"phase", bson.D{ - {"$ne", "Running"}, - }}}, - }, + {"latest_experiment_run", bson.D{ + {"$first", bson.D{ + {"_id", "$experiment_id"}, + {"experiment_run_details", "$$ROOT"}, + }}, }}, - }, - } - resScorePipeline = append(resScorePipeline, filterRunningWfRuns) - //// Calculating average resiliency score - avgResiliencyScore := bson.D{ - {"$group", bson.D{ - {"_id", "$experiment_id"}, - - // Count all workflowRuns in a workflow {"total_experiment_runs", bson.D{ - {"$sum", 1}, + {"$sum", bson.D{ + {"$cond", bson.A{ + bson.D{{"$ne", bson.A{"$phase", "Running"}}}, 1, 0}}, + }}, }}, - - // Calculate average {"avg_resiliency_score", bson.D{ - {"$avg", "$resiliency_score"}, + {"$avg", bson.D{ + {"$cond", bson.A{ + bson.D{{"$ne", bson.A{"$phase", "Running"}}}, "$resiliency_score", nil}}, + }}, }}, }}, } - resScorePipeline = append(resScorePipeline, avgResiliencyScore) - // Add two stages where we first calculate the avg resiliency score of filtered workflow runs and then fetch details of the latest workflow run - facetStage := bson.D{ - {"$facet", bson.D{ - {"avg_resiliency_score", resScorePipeline}, - {"latest_experiment_run", workflowRunPipeline}, + pipeline = append(pipeline, groupStage) + + // the latest workflow run is wrapped in array and avg_resiliency_score is formatted as object + finalProjectStage := bson.D{ + {"$project", bson.D{ + {"_id", 0}, + {"latest_experiment_run", bson.D{ + {"$cond", bson.A{ + bson.M{"$isArray": "$latest_experiment_run"}, + "$latest_experiment_run", + bson.A{"$latest_experiment_run"}, + }}, + }}, + {"avg_resiliency_score", bson.A{ + bson.D{ + {"_id", "$_id"}, + {"total_experiment_runs", "$total_experiment_runs"}, + {"avg_resiliency_score", "$avg_resiliency_score"}, + }, + }}, }}, } - pipeline = append(pipeline, facetStage) + + pipeline = append(pipeline, finalProjectStage) // Call aggregation on pipeline workflowsRunDetailCursor, err := c.chaosExperimentRunOperator.GetAggregateExperimentRuns(pipeline) if err != nil { From 73607c9b3a0594da5c35c92c3c961b1c08ecaf75 Mon Sep 17 00:00:00 2001 From: DongYoung Kim Date: Sun, 1 Sep 2024 04:23:27 +0900 Subject: [PATCH 15/26] Feat replace mongodb operation $facet for support aws documentDB Replace $facet to $group, $project and $unwind Signed-off-by: DongYoung Kim --- .../pkg/chaos_experiment/handler/handler.go | 87 +++++++++++-------- 1 file changed, 49 insertions(+), 38 deletions(-) diff --git a/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go b/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go index 30d494806ef..ea6c67cf71d 100644 --- a/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go +++ b/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go @@ -1038,18 +1038,37 @@ func (c *ChaosExperimentHandler) GetExperimentStats(ctx context.Context, project {"is_removed", false}, }}, } - // Project experiment ID - projectstage := bson.D{ - {"$project", bson.D{ - {"experiment_id", 1}, - }}, + + pipeline = append(pipeline, matchIdentifierStage) + + // Groups to count total number of experiments and get experiments to array + groupByTotalCount := bson.D{ + { + "$group", bson.D{ + {"_id", nil}, + {"total_experiments", bson.D{ + {"$sum", 1}, + }}, + {"experiments", bson.D{ + {"$push", "$$ROOT"}, + }}, + }, + }, + } + + pipeline = append(pipeline, groupByTotalCount) + + unwindStage := bson.D{ + {"$unwind", "$experiments"}, } + pipeline = append(pipeline, unwindStage) + // fetchRunDetailsStage fetches experiment runs and calculates their avg resiliency score which have completed fetchRunDetailsStage := bson.D{ {"$lookup", bson.D{ {"from", "chaosExperimentRuns"}, - {"let", bson.D{{"expID", "$experiment_id"}}}, + {"let", bson.D{{"expID", "$experiments.experiment_id"}}}, {"pipeline", bson.A{ bson.D{ {"$match", bson.D{ @@ -1078,14 +1097,16 @@ func (c *ChaosExperimentHandler) GetExperimentStats(ctx context.Context, project }}, } - unwindStage := bson.D{ - {"$unwind", bson.D{ - {"path", "$avg_resiliency_score"}, - }}, + pipeline = append(pipeline, fetchRunDetailsStage) + + unwindResiliencySocreStage := bson.D{ + {"$unwind", "$avg_resiliency_score"}, } + pipeline = append(pipeline, unwindResiliencySocreStage) + // This stage buckets the number of experiments by avg resiliency score in the ranges of 0-39, 40-79, 80-100 - groupByResScoreStage := bson.D{ + groupByResiliencyScore := bson.D{ {"$group", bson.D{ {"_id", bson.D{ {"$switch", bson.D{ @@ -1106,43 +1127,33 @@ func (c *ChaosExperimentHandler) GetExperimentStats(ctx context.Context, project {"default", 101}, }}, }}, + {"total_experiments", bson.D{ + {"$first", "$total_experiments"}, + }}, {"count", bson.D{{"$sum", 1}}}, }}, } - // Groups to count total number of experiments - groupByTotalCount := bson.D{ - { - "$group", bson.D{ - {"_id", nil}, - {"count", bson.D{ - {"$sum", 1}, - }}, - }, - }, - } + pipeline = append(pipeline, groupByResiliencyScore) - facetStage := bson.D{ - {"$facet", bson.D{ - {"total_experiments", bson.A{ - matchIdentifierStage, - groupByTotalCount, + projectStage := bson.D{ + {"$project", bson.D{ + {"_id", 0}, + {"by_resiliency_score", bson.A{ bson.D{ - {"$project", bson.D{ - {"_id", 0}, - }}, + {"_id", "$_id"}, + {"count", "$count"}, }, }}, - {"categorized_by_resiliency_score", bson.A{ - matchIdentifierStage, - projectstage, - fetchRunDetailsStage, - unwindStage, - groupByResScoreStage, + {"total_experiments", bson.A{ + bson.D{ + {"count", "$total_experiments"}, + }, }}, }}, } - pipeline = append(pipeline, facetStage) + + pipeline = append(pipeline, projectStage) // Call aggregation on pipeline experimentCursor, err := c.chaosExperimentOperator.GetAggregateExperiments(pipeline) @@ -1152,7 +1163,7 @@ func (c *ChaosExperimentHandler) GetExperimentStats(ctx context.Context, project var res []dbChaosExperiment.AggregatedExperimentStats - if err = experimentCursor.All(context.Background(), &res); err != nil || len(res) == 0 { + if err = experimentCursor.All(context.Background(), &res); err != nil { return nil, errors.New("error decoding experiment details cursor: " + err.Error()) } From d23954a7d896bbedec1568791aae6fb4a0a79f29 Mon Sep 17 00:00:00 2001 From: DongYoung Kim Date: Thu, 5 Sep 2024 17:40:56 +0900 Subject: [PATCH 16/26] Fix null pointer error Add a condition to check for null pointer error if the res value is no present Signed-off-by: DongYoung Kim --- .../graphql/server/pkg/chaos_experiment/handler/handler.go | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go b/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go index ea6c67cf71d..394b9c6c1c2 100644 --- a/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go +++ b/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go @@ -1172,6 +1172,10 @@ func (c *ChaosExperimentHandler) GetExperimentStats(ctx context.Context, project TotalExpCategorizedByResiliencyScore: nil, } + if len(res) == 0 { + return result, nil + } + if len(res[0].TotalExperiments) > 0 { result.TotalExperiments = res[0].TotalExperiments[0].Count } From 731e22b70a8d2d7c96a7b3e02218662572f396e2 Mon Sep 17 00:00:00 2001 From: DongYoung Kim Date: Sat, 14 Sep 2024 23:22:58 +0900 Subject: [PATCH 17/26] Feat separte pipeline in $lookup Signed-off-by: DongYoung Kim --- .../server/pkg/probe/handler/handler.go | 58 +++++++++---------- 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/chaoscenter/graphql/server/pkg/probe/handler/handler.go b/chaoscenter/graphql/server/pkg/probe/handler/handler.go index fa9d45cf46b..a9cf1b33a0e 100644 --- a/chaoscenter/graphql/server/pkg/probe/handler/handler.go +++ b/chaoscenter/graphql/server/pkg/probe/handler/handler.go @@ -532,36 +532,36 @@ func (p *probe) GetProbeReference(ctx context.Context, probeName, projectID stri pipeline = append(pipeline, matchIdentifiersStage) experimentWithSelectedProbeName := bson.D{ - { - "$lookup", - bson.D{ - {"from", "chaosExperimentRuns"}, - { - "pipeline", bson.A{ - bson.D{{"$match", bson.D{ - {"probes.probe_names", bson.D{ - {"$eq", probeName}, - }}, - }}}, - bson.D{ - {"$project", bson.D{ - {"experiment_name", 1}, - {"probes.fault_name", 1}, - {"probes.probe_names", 1}, - {"phase", 1}, - {"updated_at", 1}, - {"updated_by", 1}, - {"execution_data", 1}, - {"experiment_id", 1}, - }}, - }, - }, - }, - {"as", "execution_history"}, - }, - }, + {"$lookup", bson.D{ + {"from", "chaosExperimentRuns"}, + {"localField", "name"}, + {"foreignField", "probes.probe_names"}, + {"as", "execution_history"}, + }}, + } + + projectStage := bson.D{ + {"$project", bson.D{ + {"execution_history.completed", 0}, + {"execution_history.created_at", 0}, + {"execution_history.created_by", 0}, + {"execution_history.experiment_run_id", 0}, + {"execution_history.faults_awaited", 0}, + {"execution_history.faults_failed", 0}, + {"execution_history.faults_na", 0}, + {"execution_history.faults_passed", 0}, + {"execution_history.faults_stopped", 0}, + {"execution_history.infra_id", 0}, + {"execution_history.is_removed", 0}, + {"execution_history.notify_id", 0}, + {"execution_history.project_id", 0}, + {"execution_history.resiliency_score", 0}, + {"execution_history.revision_id", 0}, + {"execution_history.run_sequence", 0}, + {"execution_history.total_faults", 0}, + }}, } - pipeline = append(pipeline, experimentWithSelectedProbeName) + pipeline = append(pipeline, experimentWithSelectedProbeName, projectStage) // Call aggregation on pipeline probeCursor, err := dbSchemaProbe.GetAggregateProbes(ctx, pipeline) From 42894dea477605f4f307a582a017d8f35da9fa01 Mon Sep 17 00:00:00 2001 From: DongYoung Kim Date: Sun, 15 Sep 2024 13:28:47 +0900 Subject: [PATCH 18/26] Feat separte pipeline to match and addField in $lookup Signed-off-by: DongYoung Kim --- .../pkg/chaos_experiment/handler/handler.go | 91 +++++++++---------- 1 file changed, 45 insertions(+), 46 deletions(-) diff --git a/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go b/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go index 394b9c6c1c2..e8ab17296cd 100644 --- a/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go +++ b/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go @@ -318,35 +318,35 @@ func (c *ChaosExperimentHandler) GetExperiment(ctx context.Context, projectID st fetchRunDetailsStage := bson.D{ {"$lookup", bson.D{ {"from", "chaosExperimentRuns"}, - {"let", bson.D{{"expID", "$experiment_id"}}}, - {"pipeline", bson.A{ - bson.D{ - {"$match", bson.D{ - {"$expr", bson.D{ - {"$and", bson.A{ - bson.D{ - {"$eq", bson.A{"$experiment_id", "$$expID"}}, - }, - bson.D{ - {"$eq", bson.A{"$completed", true}}, - }, - }}, - }}, - }}, - }, + {"localField", "experiment_id"}, + {"foreignField", "experiment_id"}, + {"as", "avg_resiliency_score"}, + }}, + } + + pipeline = append(pipeline, fetchRunDetailsStage) + + matchStage := bson.D{ + {"$match", bson.D{ + {"avg_resiliency_score.completed", true}, + }}, + } + + pipeline = append(pipeline, matchStage) + + addAvgResiliencyScoreFieldStage := bson.D{ + {"$addFields", bson.D{ + {"avg_resiliency_score", bson.A{ bson.D{ - {"$group", bson.D{ - {"_id", nil}, - {"avg", bson.D{ - {"$avg", "$resiliency_score"}, - }}, + {"avg", bson.D{ + {"$avg", "$avg_resiliency_score.resiliency_score"}, }}, }, }}, - {"as", "avg_resiliency_score"}, }}, } - pipeline = append(pipeline, fetchRunDetailsStage) + + pipeline = append(pipeline, addAvgResiliencyScoreFieldStage) // fetchKubernetesInfraDetailsStage adds infra details of corresponding experiment_id to each document fetchKubernetesInfraDetailsStage := bson.D{ @@ -1068,36 +1068,35 @@ func (c *ChaosExperimentHandler) GetExperimentStats(ctx context.Context, project fetchRunDetailsStage := bson.D{ {"$lookup", bson.D{ {"from", "chaosExperimentRuns"}, - {"let", bson.D{{"expID", "$experiments.experiment_id"}}}, - {"pipeline", bson.A{ - bson.D{ - {"$match", bson.D{ - {"$expr", bson.D{ - {"$and", bson.A{ - bson.D{ - {"$eq", bson.A{"$experiment_id", "$$expID"}}, - }, - bson.D{ - {"$eq", bson.A{"$completed", true}}, - }, - }}, - }}, - }}, - }, + {"localField", "experiments.experiment_id"}, + {"foreignField", "experiment_id"}, + {"as", "avg_resiliency_score"}, + }}, + } + + pipeline = append(pipeline, fetchRunDetailsStage) + + matchStage := bson.D{ + {"$match", bson.D{ + {"avg_resiliency_score.completed", true}, + }}, + } + + pipeline = append(pipeline, matchStage) + + addAvgResiliencyScoreFieldStage := bson.D{ + {"$addFields", bson.D{ + {"avg_resiliency_score", bson.A{ bson.D{ - {"$group", bson.D{ - {"_id", nil}, - {"avg", bson.D{ - {"$avg", "$resiliency_score"}, - }}, + {"avg", bson.D{ + {"$avg", "$avg_resiliency_score.resiliency_score"}, }}, }, }}, - {"as", "avg_resiliency_score"}, }}, } - pipeline = append(pipeline, fetchRunDetailsStage) + pipeline = append(pipeline, addAvgResiliencyScoreFieldStage) unwindResiliencySocreStage := bson.D{ {"$unwind", "$avg_resiliency_score"}, From 7639c87a7f6c6c9770285d8f909e26ed9d8b3dbf Mon Sep 17 00:00:00 2001 From: DongYoung Kim Date: Sun, 15 Sep 2024 13:32:31 +0900 Subject: [PATCH 19/26] Feat separte pipeline to match and project in $lookup Signed-off-by: DongYoung Kim --- .../chaos_experiment_run/handler/handler.go | 207 +++++++++++++----- 1 file changed, 150 insertions(+), 57 deletions(-) diff --git a/chaoscenter/graphql/server/pkg/chaos_experiment_run/handler/handler.go b/chaoscenter/graphql/server/pkg/chaos_experiment_run/handler/handler.go index 51b0d119ac1..b2f739337d2 100644 --- a/chaoscenter/graphql/server/pkg/chaos_experiment_run/handler/handler.go +++ b/chaoscenter/graphql/server/pkg/chaos_experiment_run/handler/handler.go @@ -113,37 +113,84 @@ func (c *ChaosExperimentRunHandler) GetExperimentRun(ctx context.Context, projec // Adds details of experiment addExperimentDetails := bson.D{ - {"$lookup", - bson.D{ - {"from", "chaosExperiments"}, - {"let", bson.D{{"experimentID", "$experiment_id"}, {"revID", "$revision_id"}}}, - { - "pipeline", bson.A{ - bson.D{{"$match", bson.D{{"$expr", bson.D{{"$eq", bson.A{"$experiment_id", "$$experimentID"}}}}}}}, - bson.D{ - {"$project", bson.D{ - {"name", 1}, - {"is_custom_experiment", 1}, - {"experiment_type", 1}, - {"revision", bson.D{{ - "$filter", bson.D{ - {"input", "$revision"}, - {"as", "revs"}, - {"cond", bson.D{{ - "$eq", bson.A{"$$revs.revision_id", "$$revID"}, - }}}, - }, - }}}, - }}, - }, - }, - }, - {"as", "experiment"}, - }, - }, + {"$lookup", bson.D{ + {"from", "chaosExperiments"}, + {"localField", "experiment_id"}, + {"foreignField", "experiment_id"}, + {"as", "experiment"}, + }}, } + pipeline = append(pipeline, addExperimentDetails) + unwindStage := bson.D{ + {"$unwind", "$experiment"}, + } + + pipeline = append(pipeline, unwindStage) + + projectRunDetailsWithFilteredRevisionStage := bson.D{ + {"$project", bson.D{ + {"_id", 1}, + {"completed", 1}, + {"created_at", 1}, + {"created_by", 1}, + {"experiment_id", 1}, + {"experiment_name", 1}, + {"experiment_run_id", 1}, + {"faults_awaited", 1}, + {"faults_failed", 1}, + {"faults_na", 1}, + {"faults_passed", 1}, + {"faults_stopped", 1}, + {"infra_id", 1}, + {"is_removed", 1}, + {"notify_id", 1}, + {"phase", 1}, + {"probes", 1}, + {"project_id", 1}, + {"resiliency_score", 1}, + {"revision_id", 1}, + {"run_sequence", 1}, + {"total_faults", 1}, + {"updated_at", 1}, + {"updated_by", 1}, + {"experiment", bson.D{ + {"$map", bson.D{ + {"input", bson.D{ + {"$filter", bson.D{ + {"input", "$experiment.revision"}, + {"as", "revs"}, + {"cond", bson.D{ + {"$eq", bson.A{ + "$$revs.revision_id", + "$revision_id", + }}, + }}, + }}, + }}, + {"as", "filtered_revs"}, + {"in", bson.D{ + {"_id", "$experiment._id"}, + {"experiment_type", "$experiment.experiment_type"}, + {"is_custom_experiment", "$experiment.is_custom_experiment"}, + {"name", "$experiment.name"}, + {"revision", bson.A{ + bson.D{ + {"revision_id", "$$filtered_revs.revision_id"}, + {"probes", "$$filtered_revs.probes"}, + {"updated_at", "$$filtered_revs.updated_at"}, + {"weightages", "$$filtered_revs.weightages"}, + }, + }}, + }}, + }}, + }}, + }}, + } + + pipeline = append(pipeline, projectRunDetailsWithFilteredRevisionStage) + // fetchKubernetesInfraDetailsStage adds kubernetes infra details of corresponding experiment_id to each document fetchKubernetesInfraDetailsStage := bson.D{ {"$lookup", bson.D{ @@ -323,38 +370,84 @@ func (c *ChaosExperimentRunHandler) ListExperimentRun(projectID string, request pipeline = append(pipeline, matchExpIsRemovedStage) addExperimentDetails := bson.D{ - { - "$lookup", - bson.D{ - {"from", "chaosExperiments"}, - {"let", bson.D{{"experimentID", "$experiment_id"}, {"revID", "$revision_id"}}}, - { - "pipeline", bson.A{ - bson.D{{"$match", bson.D{{"$expr", bson.D{{"$eq", bson.A{"$experiment_id", "$$experimentID"}}}}}}}, - bson.D{ - {"$project", bson.D{ - {"name", 1}, - {"experiment_type", 1}, - {"is_custom_experiment", 1}, - {"revision", bson.D{{ - "$filter", bson.D{ - {"input", "$revision"}, - {"as", "revs"}, - {"cond", bson.D{{ - "$eq", bson.A{"$$revs.revision_id", "$$revID"}, - }}}, - }, - }}}, - }}, - }, - }, - }, - {"as", "experiment"}, - }, - }, + {"$lookup", bson.D{ + {"from", "chaosExperiments"}, + {"localField", "experiment_id"}, + {"foreignField", "experiment_id"}, + {"as", "experiment"}, + }}, } + pipeline = append(pipeline, addExperimentDetails) + unwindStage := bson.D{ + {"$unwind", "$experiment"}, + } + + pipeline = append(pipeline, unwindStage) + + projectRunDetailsWithFilteredRevisionStage := bson.D{ + {"$project", bson.D{ + {"_id", 1}, + {"completed", 1}, + {"created_at", 1}, + {"created_by", 1}, + {"experiment_id", 1}, + {"experiment_name", 1}, + {"experiment_run_id", 1}, + {"faults_awaited", 1}, + {"faults_failed", 1}, + {"faults_na", 1}, + {"faults_passed", 1}, + {"faults_stopped", 1}, + {"infra_id", 1}, + {"is_removed", 1}, + {"notify_id", 1}, + {"phase", 1}, + {"probes", 1}, + {"project_id", 1}, + {"resiliency_score", 1}, + {"revision_id", 1}, + {"run_sequence", 1}, + {"total_faults", 1}, + {"updated_at", 1}, + {"updated_by", 1}, + {"experiment", bson.D{ + {"$map", bson.D{ + {"input", bson.D{ + {"$filter", bson.D{ + {"input", "$experiment.revision"}, + {"as", "revs"}, + {"cond", bson.D{ + {"$eq", bson.A{ + "$$revs.revision_id", + "$revision_id", + }}, + }}, + }}, + }}, + {"as", "filtered_revs"}, + {"in", bson.D{ + {"_id", "$experiment._id"}, + {"experiment_type", "$experiment.experiment_type"}, + {"is_custom_experiment", "$experiment.is_custom_experiment"}, + {"name", "$experiment.name"}, + {"revision", bson.A{ + bson.D{ + {"revision_id", "$$filtered_revs.revision_id"}, + {"probes", "$$filtered_revs.probes"}, + {"updated_at", "$$filtered_revs.updated_at"}, + {"weightages", "$$filtered_revs.weightages"}, + }, + }}, + }}, + }}, + }}, + }}, + } + + pipeline = append(pipeline, projectRunDetailsWithFilteredRevisionStage) + // Filtering based on multiple parameters if request.Filter != nil { From 19d9dfdc9a9835dd4c1b3e1d3a662d1bc778b058 Mon Sep 17 00:00:00 2001 From: DongYoung Kim Date: Sun, 15 Sep 2024 13:35:51 +0900 Subject: [PATCH 20/26] Feat separte pipeline to project in $lookup Signed-off-by: DongYoung Kim --- .../pkg/chaos_experiment/handler/handler.go | 67 ++++++++----------- .../chaos_experiment_run/handler/handler.go | 66 +++++++----------- 2 files changed, 53 insertions(+), 80 deletions(-) diff --git a/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go b/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go index e8ab17296cd..7409b4b89ec 100644 --- a/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go +++ b/chaoscenter/graphql/server/pkg/chaos_experiment/handler/handler.go @@ -352,31 +352,25 @@ func (c *ChaosExperimentHandler) GetExperiment(ctx context.Context, projectID st fetchKubernetesInfraDetailsStage := bson.D{ {"$lookup", bson.D{ {"from", "chaosInfrastructures"}, - {"let", bson.M{"infraID": "$infra_id"}}, - { - "pipeline", bson.A{ - bson.D{ - {"$match", bson.D{ - {"$expr", bson.D{ - {"$eq", bson.A{"$infra_id", "$$infraID"}}, - }}, - }}, - }, - bson.D{ - {"$project", bson.D{ - {"token", 0}, - {"infra_ns_exists", 0}, - {"infra_sa_exists", 0}, - {"access_key", 0}, - }}, - }, - }, - }, + {"localField", "infra_id"}, + {"foreignField", "infra_id"}, {"as", "kubernetesInfraDetails"}, }}, } + pipeline = append(pipeline, fetchKubernetesInfraDetailsStage) + projectStage := bson.D{ + {"$project", bson.D{ + {"kubernetesInfraDetails.token", 0}, + {"kubernetesInfraDetails.infra_ns_exists", 0}, + {"kubernetesInfraDetails.infra_sa_exists", 0}, + {"kubernetesInfraDetails.access_key", 0}, + }}, + } + + pipeline = append(pipeline, projectStage) + // Call aggregation on pipeline expCursor, err := c.chaosExperimentOperator.GetAggregateExperiments(pipeline) if err != nil { @@ -612,32 +606,25 @@ func (c *ChaosExperimentHandler) ListExperiment(projectID string, request model. fetchKubernetesInfraDetailsStage := bson.D{ {"$lookup", bson.D{ {"from", "chaosInfrastructures"}, - {"let", bson.M{"infraID": "$infra_id"}}, - { - "pipeline", bson.A{ - bson.D{ - {"$match", bson.D{ - {"$expr", bson.D{ - {"$eq", bson.A{"$infra_id", "$$infraID"}}, - }}, - }}, - }, - bson.D{ - {"$project", bson.D{ - {"token", 0}, - {"infra_ns_exists", 0}, - {"infra_sa_exists", 0}, - {"access_key", 0}, - }}, - }, - }, - }, + {"localField", "infra_id"}, + {"foreignField", "infra_id"}, {"as", "kubernetesInfraDetails"}, }}, } pipeline = append(pipeline, fetchKubernetesInfraDetailsStage) + projectStage := bson.D{ + {"$project", bson.D{ + {"kubernetesInfraDetails.token", 0}, + {"kubernetesInfraDetails.infra_ns_exists", 0}, + {"kubernetesInfraDetails.infra_sa_exists", 0}, + {"kubernetesInfraDetails.access_key", 0}, + }}, + } + + pipeline = append(pipeline, projectStage) + if request.Filter != nil && request.Filter.InfraActive != nil { filterInfraStatusStage := bson.D{ {"$match", bson.D{ diff --git a/chaoscenter/graphql/server/pkg/chaos_experiment_run/handler/handler.go b/chaoscenter/graphql/server/pkg/chaos_experiment_run/handler/handler.go index b2f739337d2..aa27bbe5ace 100644 --- a/chaoscenter/graphql/server/pkg/chaos_experiment_run/handler/handler.go +++ b/chaoscenter/graphql/server/pkg/chaos_experiment_run/handler/handler.go @@ -195,32 +195,25 @@ func (c *ChaosExperimentRunHandler) GetExperimentRun(ctx context.Context, projec fetchKubernetesInfraDetailsStage := bson.D{ {"$lookup", bson.D{ {"from", "chaosInfrastructures"}, - {"let", bson.M{"infraID": "$infra_id"}}, - { - "pipeline", bson.A{ - bson.D{ - {"$match", bson.D{ - {"$expr", bson.D{ - {"$eq", bson.A{"$infra_id", "$$infraID"}}, - }}, - }}, - }, - bson.D{ - {"$project", bson.D{ - {"token", 0}, - {"infra_ns_exists", 0}, - {"infra_sa_exists", 0}, - {"access_key", 0}, - }}, - }, - }, - }, + {"localField", "infra_id"}, + {"foreignField", "infra_id"}, {"as", "kubernetesInfraDetails"}, }}, } pipeline = append(pipeline, fetchKubernetesInfraDetailsStage) + projectStage := bson.D{ + {"$project", bson.D{ + {"kubernetesInfraDetails.token", 0}, + {"kubernetesInfraDetails.infra_ns_exists", 0}, + {"kubernetesInfraDetails.infra_sa_exists", 0}, + {"kubernetesInfraDetails.access_key", 0}, + }}, + } + + pipeline = append(pipeline, projectStage) + // Call aggregation on pipeline expRunCursor, err := c.chaosExperimentRunOperator.GetAggregateExperimentRuns(pipeline) if err != nil { @@ -572,32 +565,25 @@ func (c *ChaosExperimentRunHandler) ListExperimentRun(projectID string, request fetchKubernetesInfraDetailsStage := bson.D{ {"$lookup", bson.D{ {"from", "chaosInfrastructures"}, - {"let", bson.M{"infraID": "$infra_id"}}, - { - "pipeline", bson.A{ - bson.D{ - {"$match", bson.D{ - {"$expr", bson.D{ - {"$eq", bson.A{"$infra_id", "$$infraID"}}, - }}, - }}, - }, - bson.D{ - {"$project", bson.D{ - {"token", 0}, - {"infra_ns_exists", 0}, - {"infra_sa_exists", 0}, - {"access_key", 0}, - }}, - }, - }, - }, + {"localField", "infra_id"}, + {"foreignField", "infra_id"}, {"as", "kubernetesInfraDetails"}, }}, } pipeline = append(pipeline, fetchKubernetesInfraDetailsStage) + projectStage := bson.D{ + {"$project", bson.D{ + {"kubernetesInfraDetails.token", 0}, + {"kubernetesInfraDetails.infra_ns_exists", 0}, + {"kubernetesInfraDetails.infra_sa_exists", 0}, + {"kubernetesInfraDetails.access_key", 0}, + }}, + } + + pipeline = append(pipeline, projectStage) + // Pagination or adding a default limit of 15 if pagination not provided _, skip, limit := common.CreatePaginationStage(request.Pagination) From d2321594c82fbeb27c6ffb1de6282f6c6345bcd6 Mon Sep 17 00:00:00 2001 From: DongYoung Kim Date: Sun, 15 Sep 2024 13:38:13 +0900 Subject: [PATCH 21/26] Feat separte pipeline to addField in $lookup Signed-off-by: DongYoung Kim --- .../pkg/chaos_infrastructure/service.go | 120 +++++++++--------- 1 file changed, 57 insertions(+), 63 deletions(-) diff --git a/chaoscenter/graphql/server/pkg/chaos_infrastructure/service.go b/chaoscenter/graphql/server/pkg/chaos_infrastructure/service.go index bac648a6af6..836229e3a41 100644 --- a/chaoscenter/graphql/server/pkg/chaos_infrastructure/service.go +++ b/chaoscenter/graphql/server/pkg/chaos_infrastructure/service.go @@ -328,42 +328,41 @@ func (in *infraService) GetInfra(ctx context.Context, projectID string, infraID fetchExperimentDetailsStage := bson.D{ {"$lookup", bson.D{ {"from", "chaosExperiments"}, - {"let", bson.D{{"infraID", "$infra_id"}}}, - {"pipeline", bson.A{ + {"localField", "infra_id"}, + {"foreignField", "infra_id"}, + {"as", "experimentDetails"}, + }}, + } + + pipeline = append(pipeline, fetchExperimentDetailsStage) + + matchStage := bson.D{ + {"$match", bson.D{ + {"experimentDetails.is_removed", false}, + }}, + } + + pipeline = append(pipeline, matchStage) + + addExpDetailsFieldsStage := bson.D{ + {"$addFields", bson.D{ + {"experimentDetails", bson.A{ bson.D{ - {"$match", bson.D{ - {"$expr", bson.D{ - {"$and", bson.A{ - bson.D{ - {"$eq", bson.A{"$infra_id", "$$infraID"}}, - }, - bson.D{ - {"$eq", bson.A{"$is_removed", false}}, - }, - }}, - }}, + {"exp_run_count", bson.D{ + {"$sum", "$experimentDetails.total_experiment_runs"}, }}, - }, - bson.D{ - {"$group", bson.D{ - {"_id", nil}, - {"exp_run_count", bson.D{ - {"$sum", "$total_experiment_runs"}, - }}, - {"last_run_timestamp", bson.D{ - {"$max", "$updated_at"}, - }}, - {"experiments_count", bson.D{ - {"$sum", 1}, - }, - }, + {"last_run_timestamp", bson.D{ + {"$max", "$experimentDetails.updated_at"}, + }}, + {"experiments_count", bson.D{ + {"$sum", 1}, }}, }, }}, - {"as", "experimentDetails"}, }}, } - pipeline = append(pipeline, fetchExperimentDetailsStage) + + pipeline = append(pipeline, addExpDetailsFieldsStage) // Call aggregation on pipeline infraCursor, err := in.infraOperator.GetAggregateInfras(pipeline) @@ -588,60 +587,55 @@ func (in *infraService) ListInfras(projectID string, request *model.ListInfraReq fetchRunDetailsStage := bson.D{ {"$lookup", bson.D{ {"from", "chaosExperimentRuns"}, - {"let", bson.D{{"infraID", "$infra_id"}}}, - {"pipeline", bson.A{ - bson.D{ - {"$match", bson.D{ - {"$expr", bson.D{ - {"$eq", bson.A{"$infra_id", "$$infraID"}}, - }}, - }}, - }, + {"localField", "infra_id"}, + {"foreignField", "infra_id"}, + {"as", "expRunDetails"}, + }}, + } + + pipeline = append(pipeline, fetchRunDetailsStage) + addExpRunDetailsFieldsStage := bson.D{ + {"$addFields", bson.D{ + {"expRunDetails", bson.A{ bson.D{ - {"$group", bson.D{ - {"_id", nil}, - {"exp_run_count", bson.D{ - {"$sum", 1}, - }}, - {"last_run_timestamp", bson.D{ - {"$last", "$updated_at"}, - }}, + {"exp_run_count", bson.D{ + {"$size", "$expRunDetails"}, }}, - }, - bson.D{ - {"$project", bson.D{ - {"_id", 0}, + {"last_run_timestamp", bson.D{ + {"$max", "$expRunDetails.updated_at"}, }}, }, }}, - {"as", "expRunDetails"}, }}, } - pipeline = append(pipeline, fetchRunDetailsStage) + pipeline = append(pipeline, addExpRunDetailsFieldsStage) fetchExperimentDetailsStage := bson.D{ {"$lookup", bson.D{ {"from", "chaosExperiments"}, - {"let", bson.D{{"infraID", "$infra_id"}}}, - {"pipeline", bson.A{ + {"localField", "infra_id"}, + {"foreignField", "infra_id"}, + {"as", "experimentDetails"}, + }}, + } + + pipeline = append(pipeline, fetchExperimentDetailsStage) + + addExpDetailsFieldsStage := bson.D{ + {"$addFields", bson.D{ + {"experimentDetails", bson.A{ bson.D{ - {"$match", bson.D{ - {"$expr", bson.D{ - {"$eq", bson.A{"$infra_id", "$$infraID"}}, - }}, + {"experiments_count", bson.D{ + {"$size", "$experimentDetails"}, }}, }, - bson.D{ - {"$count", "experiments_count"}, - }, }}, - {"as", "experimentDetails"}, }}, } - pipeline = append(pipeline, fetchExperimentDetailsStage) + pipeline = append(pipeline, addExpDetailsFieldsStage) // Pagination or adding a default limit of 15 if pagination not provided _, skip, limit := common.CreatePaginationStage(request.Pagination) From 526cbf0b1add39304e3a3e4808e28dccbf82210e Mon Sep 17 00:00:00 2001 From: DongYoung Kim Date: Sun, 15 Sep 2024 17:34:54 +0900 Subject: [PATCH 22/26] Add pre-commit check for aws documentDB Signed-off-by: DongYoung Kim --- chaoscenter/Makefile | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/chaoscenter/Makefile b/chaoscenter/Makefile index 267ff860bf2..80baff0fd39 100644 --- a/chaoscenter/Makefile +++ b/chaoscenter/Makefile @@ -61,6 +61,15 @@ backend-services-checks: && exit 1; \ fi @echo "------------------" + @echo "--> Check chaos-center graphql-server [aws documentDB support operator]" + @echo "------------------" + @curl -Os https://raw.githubusercontent.com/awslabs/amazon-documentdb-tools/master/compat-tool/compat.py + @result=$$(python3 compat.py --version 5.0 --directory graphql/server); \ + if echo "$${result}" | grep -qi 'unsupported operators were found'; then \ + echo "$${result}" | awk '/unsupported operators were found/ {f=1} f' \ + && exit 1; \ + fi + @echo "------------------" @echo "--> Check chaos-center authentication [go mod tidy]" @echo "------------------" @tidyRes=$$(cd authentication && go mod tidy); \ @@ -70,6 +79,15 @@ backend-services-checks: && exit 1; \ fi @echo "------------------" + @echo "--> Check chaos-center authentication [aws documentDB support operator]" + @echo "------------------" + @curl -Os https://raw.githubusercontent.com/awslabs/amazon-documentdb-tools/master/compat-tool/compat.py + @result=$$(python3 compat.py --version 5.0 --directory authentication); \ + if echo "$${result}" | grep -qi 'unsupported operators were found'; then \ + echo "$${result}" | awk '/unsupported operators were found/ {f=1} f' \ + && exit 1; \ + fi + @echo "------------------" @echo "--> Check chaos-center subscriber [go mod tidy]" @echo "------------------" @tidyRes=$$(cd subscriber && go mod tidy); \ From dc74d1b8c73e3b9b0851551ee30dbb5ab1b2c028 Mon Sep 17 00:00:00 2001 From: DongYoung Kim Date: Mon, 16 Sep 2024 14:33:08 +0900 Subject: [PATCH 23/26] Fix goimports Signed-off-by: DongYoung Kim --- .../upgrade-agents/control-plane/versions/v0.0.0/manager.go | 1 + 1 file changed, 1 insertion(+) diff --git a/chaoscenter/upgrade-agents/control-plane/versions/v0.0.0/manager.go b/chaoscenter/upgrade-agents/control-plane/versions/v0.0.0/manager.go index 5798d442ee0..27009c8ce27 100644 --- a/chaoscenter/upgrade-agents/control-plane/versions/v0.0.0/manager.go +++ b/chaoscenter/upgrade-agents/control-plane/versions/v0.0.0/manager.go @@ -2,6 +2,7 @@ package v0_0_0 import ( "context" + log "github.com/sirupsen/logrus" "go.mongodb.org/mongo-driver/mongo" ) From 2c0848c86e6f22a7625d84b70a396566288dc251 Mon Sep 17 00:00:00 2001 From: DongYoung Kim Date: Tue, 17 Sep 2024 03:27:42 +0900 Subject: [PATCH 24/26] Add pre-commit check $lookup multiple join Signed-off-by: DongYoung Kim --- chaoscenter/Makefile | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/chaoscenter/Makefile b/chaoscenter/Makefile index 80baff0fd39..e9396c314a4 100644 --- a/chaoscenter/Makefile +++ b/chaoscenter/Makefile @@ -6,6 +6,7 @@ # NOTE - These will be executed when any make target is invoked. # IS_DOCKER_INSTALLED = $(shell which docker >> /dev/null 2>&1; echo $$?) +SHELL := /bin/bash .PHONY: help help: @@ -69,6 +70,22 @@ backend-services-checks: echo "$${result}" | awk '/unsupported operators were found/ {f=1} f' \ && exit 1; \ fi + @lookupResults=""; \ + while read file; do \ + fileLookupResult=$$(awk 'BEGIN {found_lookup=0} \ + /\$$lookup/ {found_lookup=1} \ + found_lookup && NF==0 {found_lookup=0} \ + found_lookup && /"let"|"pipeline"/ { \ + print "Found let or pipeline in file " FILENAME " at line " NR; \ + }' "$${file}"); \ + if [ -n "$${fileLookupResult}" ]; then \ + lookupResults="$${lookupResults}$${fileLookupResult}\n"; \ + fi; \ + done < <(find graphql/server -name "*.go"); \ + if [ -n "$${lookupResults}" ]; then \ + echo -e "$${lookupResults}" \ + && exit 1; \ + fi @echo "------------------" @echo "--> Check chaos-center authentication [go mod tidy]" @echo "------------------" @@ -87,6 +104,22 @@ backend-services-checks: echo "$${result}" | awk '/unsupported operators were found/ {f=1} f' \ && exit 1; \ fi + @lookupResults=""; \ + while read file; do \ + fileLookupResult=$$(awk 'BEGIN {found_lookup=0} \ + /\$$lookup/ {found_lookup=1} \ + found_lookup && NF==0 {found_lookup=0} \ + found_lookup && /"let"|"pipeline"/ { \ + print "Found let or pipeline in file " FILENAME " at line " NR; \ + }' "$${file}"); \ + if [ -n "$${fileLookupResult}" ]; then \ + lookupResults="$${lookupResults}$${fileLookupResult}\n"; \ + fi; \ + done < <(find authentication -name "*.go"); \ + if [ -n "$${lookupResults}" ]; then \ + echo -e "$${lookupResults}" \ + && exit 1; \ + fi @echo "------------------" @echo "--> Check chaos-center subscriber [go mod tidy]" @echo "------------------" From 9cb3bc28b6ea877ca74689d62e99493a28a7bb4f Mon Sep 17 00:00:00 2001 From: DongYoung Kim Date: Tue, 17 Sep 2024 15:16:22 +0900 Subject: [PATCH 25/26] Fix missing execution_data field added Signed-off-by: DongYoung Kim --- .../graphql/server/pkg/chaos_experiment_run/handler/handler.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/chaoscenter/graphql/server/pkg/chaos_experiment_run/handler/handler.go b/chaoscenter/graphql/server/pkg/chaos_experiment_run/handler/handler.go index aa27bbe5ace..6951bba3b46 100644 --- a/chaoscenter/graphql/server/pkg/chaos_experiment_run/handler/handler.go +++ b/chaoscenter/graphql/server/pkg/chaos_experiment_run/handler/handler.go @@ -144,6 +144,7 @@ func (c *ChaosExperimentRunHandler) GetExperimentRun(ctx context.Context, projec {"faults_passed", 1}, {"faults_stopped", 1}, {"infra_id", 1}, + {"execution_data", 1}, {"is_removed", 1}, {"notify_id", 1}, {"phase", 1}, @@ -394,6 +395,7 @@ func (c *ChaosExperimentRunHandler) ListExperimentRun(projectID string, request {"faults_passed", 1}, {"faults_stopped", 1}, {"infra_id", 1}, + {"execution_data", 1}, {"is_removed", 1}, {"notify_id", 1}, {"phase", 1}, From 839c3a3edeee539f6b278e174ba9b26c68664895 Mon Sep 17 00:00:00 2001 From: DongYoung Kim Date: Tue, 17 Sep 2024 17:22:02 +0900 Subject: [PATCH 26/26] Fix missing revision.execution_manifest field added Signed-off-by: DongYoung Kim --- .../graphql/server/pkg/chaos_experiment_run/handler/handler.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/chaoscenter/graphql/server/pkg/chaos_experiment_run/handler/handler.go b/chaoscenter/graphql/server/pkg/chaos_experiment_run/handler/handler.go index 6951bba3b46..dcb753325f6 100644 --- a/chaoscenter/graphql/server/pkg/chaos_experiment_run/handler/handler.go +++ b/chaoscenter/graphql/server/pkg/chaos_experiment_run/handler/handler.go @@ -180,6 +180,7 @@ func (c *ChaosExperimentRunHandler) GetExperimentRun(ctx context.Context, projec bson.D{ {"revision_id", "$$filtered_revs.revision_id"}, {"probes", "$$filtered_revs.probes"}, + {"experiment_manifest", "$$filtered_revs.experiment_manifest"}, {"updated_at", "$$filtered_revs.updated_at"}, {"weightages", "$$filtered_revs.weightages"}, }, @@ -431,6 +432,7 @@ func (c *ChaosExperimentRunHandler) ListExperimentRun(projectID string, request bson.D{ {"revision_id", "$$filtered_revs.revision_id"}, {"probes", "$$filtered_revs.probes"}, + {"experiment_manifest", "$$filtered_revs.experiment_manifest"}, {"updated_at", "$$filtered_revs.updated_at"}, {"weightages", "$$filtered_revs.weightages"}, },