Skip to content

Commit

Permalink
Fix jscpd and golangci linters
Browse files Browse the repository at this point in the history
Fix jscpd and golangci linters
  • Loading branch information
lukasbudisky committed Mar 14, 2024
1 parent ef8f6aa commit 2af7af1
Show file tree
Hide file tree
Showing 23 changed files with 131 additions and 110 deletions.
1 change: 1 addition & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
---
# See GitHub's documentation for more information on this file:
# https://docs.github.com/en/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/configuration-options-for-dependency-updates
version: 2
Expand Down
4 changes: 4 additions & 0 deletions .github/linters/.jscpd.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
{
"threshold": 20.0,
"absolute": true
}
2 changes: 2 additions & 0 deletions .github/workflows/codeql.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@ on:
schedule:
- cron: '39 20 * * 3'

permissions: { }

jobs:
analyze:
name: Analyze
Expand Down
12 changes: 8 additions & 4 deletions .github/workflows/linters.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,12 @@ on:
branches:
- main

permissions:
actions: write
pull-requests: write
statuses: write


jobs:
verify:
runs-on: ubuntu-latest
Expand All @@ -16,10 +22,8 @@ jobs:
with:
fetch-depth: 0
- name: Lint Code Base
uses: super-linter/super-linter@v5.7.2
uses: super-linter/super-linter@v6.3.0
env:
VALIDATE_ALL_CODEBASE: true
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
JSCPD_CONFIG_FILE: .jscpd.json
VALIDATE_GO: false # temporary disabled because of bugs
VALIDATE_JSCPD: false # temporary disabled because of bugs
VALIDATE_GO: false
2 changes: 2 additions & 0 deletions .github/workflows/main_branch.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ on:
branches:
- main

permissions: { }

jobs:
linters:
uses: ./.github/workflows/linters.yml
Expand Down
2 changes: 2 additions & 0 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ on:
branches:
- main

permissions: { }

jobs:
verify:
runs-on: ubuntu-latest
Expand Down
3 changes: 0 additions & 3 deletions .golangci.yml

This file was deleted.

3 changes: 0 additions & 3 deletions .jscpd.json

This file was deleted.

2 changes: 0 additions & 2 deletions .yamllint

This file was deleted.

24 changes: 20 additions & 4 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,16 @@ BINARY="terraform-provider-${NAME}_${VERSION}"
OS_ARCH=linux_amd64
CGO_ENABLED=0

#########################
# Linters configuration #
#########################

WORKSPACE="$(shell pwd)"
GIT_BRANCH="$(shell git rev-parse --abbrev-ref HEAD)"
LOG_LEVEL="INFO"

#########################

default: install

build:
Expand Down Expand Up @@ -70,7 +80,13 @@ cbdown:
# Linters #
###########

linters:
find . -name "*.sh" | xargs shellcheck -s bash
yamllint -c .yamllint .
golint ./...
lint:
docker run --rm --platform=linux/amd64 \
-e LOG_LEVEL=${LOG_LEVEL} \
-e VALIDATE_ALL_CODEBASE=true \
-e RUN_LOCAL=true \
-e DEFAULT_BRANCH=${GIT_BRANCH} \
-e VALIDATE_GO=false \
-v ${WORKSPACE}:/tmp/lint \
ghcr.io/super-linter/super-linter:latest

2 changes: 1 addition & 1 deletion couchbase/provider.go
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ func certificateManagement(filePath string) (*x509.CertPool, diag.Diagnostics) {
return &tlsRootCAs, nil
}

func providerConfigure(ctx context.Context, d *schema.ResourceData) (interface{}, diag.Diagnostics) {
func providerConfigure(_ context.Context, d *schema.ResourceData) (interface{}, diag.Diagnostics) {
var (
tlsRootCAs *x509.CertPool
diags diag.Diagnostics
Expand Down
2 changes: 1 addition & 1 deletion couchbase/provider_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ func TestProvider(t *testing.T) {
}
}

func TestProvider_impl(t *testing.T) {
func TestProvider_impl(_ *testing.T) {
var _ *schema.Provider = Provider()
}

Expand Down
4 changes: 2 additions & 2 deletions couchbase/queryIndex.go
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ func (cc *Configuration) createPrimaryQueryIndex(indexName, bucketName string, d

// createQueryIndex custom functon which support query index creation with fields parameters and conditions, deferred state, number of replicas
func (cc *Configuration) createQueryIndex(indexName, bucketName string, fields []string, condition string, deferred bool, numReplica int) error {
if len(fields) <= 0 {
if len(fields) == 0 {
return fmt.Errorf("you must specify at least one field to index")
}

Expand Down Expand Up @@ -153,7 +153,7 @@ func parseID(id string) (string, int, error) {
}

// importQueryIndex custom terraform resource import function
func importQueryIndex(c context.Context, d *schema.ResourceData, m interface{}) ([]*schema.ResourceData, error) {
func importQueryIndex(_ context.Context, d *schema.ResourceData, _ interface{}) ([]*schema.ResourceData, error) {

id, replica, err := parseID(d.Id())
if err != nil {
Expand Down
28 changes: 14 additions & 14 deletions couchbase/resourceBucket.go
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ func createBucket(c context.Context, d *schema.ResourceData, m interface{}) diag
}

func readBucket(c context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics {

var err error
bucketID := d.Id()

couchbase, diags := m.(*Connection).CouchbaseInitialization()
Expand All @@ -240,39 +240,39 @@ func readBucket(c context.Context, d *schema.ResourceData, m interface{}) diag.D
return diag.FromErr(err)
}

if err := d.Set(keyBucketName, bucket.Name); err != nil {
if err = d.Set(keyBucketName, bucket.Name); err != nil {
diags = append(diags, *diagForValueSet(keyBucketName, bucket.Name, err))
}

if err := d.Set(keyBucketFlushEnabled, bucket.FlushEnabled); err != nil {
if err = d.Set(keyBucketFlushEnabled, bucket.FlushEnabled); err != nil {
diags = append(diags, *diagForValueSet(keyBucketFlushEnabled, bucket.FlushEnabled, err))
}

if err := d.Set(keyBucketQuota, bucket.RAMQuotaMB); err != nil {
if err = d.Set(keyBucketQuota, bucket.RAMQuotaMB); err != nil {
diags = append(diags, *diagForValueSet(keyBucketQuota, bucket.RAMQuotaMB, err))
}

if err := d.Set(keyBucketIndexReplicas, bucket.ReplicaIndexDisabled); err != nil {
if err = d.Set(keyBucketIndexReplicas, bucket.ReplicaIndexDisabled); err != nil {
diags = append(diags, *diagForValueSet(keyBucketIndexReplicas, bucket.ReplicaIndexDisabled, err))
}

if err := d.Set(keyBucketMaxExpiry, int(time.Duration(bucket.MaxExpiry)/time.Second)); err != nil {
diags = append(diags, *diagForValueSet(keyBucketMaxExpiry, int(time.Duration(bucket.MaxExpiry)/time.Second), err))
if err = d.Set(keyBucketMaxExpiry, time.Duration(bucket.MaxExpiry)/time.Second); err != nil {
diags = append(diags, *diagForValueSet(keyBucketMaxExpiry, time.Duration(bucket.MaxExpiry)/time.Second, err))
}

if err := d.Set(keyBucketNumReplicas, bucket.NumReplicas); err != nil {
if err = d.Set(keyBucketNumReplicas, bucket.NumReplicas); err != nil {
diags = append(diags, *diagForValueSet(keyBucketNumReplicas, bucket.NumReplicas, err))
}

if err := d.Set(keyBucketBucketType, bucket.BucketType); err != nil {
if err = d.Set(keyBucketBucketType, bucket.BucketType); err != nil {
diags = append(diags, *diagForValueSet(keyBucketBucketType, bucket.BucketType, err))
}

if err := d.Set(keyBucketEvictionPolicyType, bucket.EvictionPolicy); err != nil {
if err = d.Set(keyBucketEvictionPolicyType, bucket.EvictionPolicy); err != nil {
diags = append(diags, *diagForValueSet(keyBucketEvictionPolicyType, bucket.EvictionPolicy, err))
}

if err := d.Set(keyBucketCompressionMode, bucket.CompressionMode); err != nil {
if err = d.Set(keyBucketCompressionMode, bucket.CompressionMode); err != nil {
diags = append(diags, *diagForValueSet(keyBucketCompressionMode, bucket.CompressionMode, err))
}

Expand All @@ -285,16 +285,16 @@ func readBucket(c context.Context, d *schema.ResourceData, m interface{}) diag.D
Detail: fmt.Sprintf("error details: %s\n", err),
})
} else {
if err := d.Set(keyBucketConflictResolutionType, crt); err != nil {
if err = d.Set(keyBucketConflictResolutionType, crt); err != nil {
diags = append(diags, *diagForValueSet(keyBucketConflictResolutionType, crt, err))
}
}

if err := d.Set(keyBucketDurabilityLevel, bucket.MinimumDurabilityLevel); err != nil {
if err = d.Set(keyBucketDurabilityLevel, bucket.MinimumDurabilityLevel); err != nil {
diags = append(diags, *diagForValueSet(keyBucketDurabilityLevel, bucket.MinimumDurabilityLevel, err))
}

if err := d.Set(keyBucketStorageBackend, bucket.StorageBackend); err != nil {
if err = d.Set(keyBucketStorageBackend, bucket.StorageBackend); err != nil {
diags = append(diags, *diagForValueSet(keyBucketStorageBackend, bucket.StorageBackend, err))
}

Expand Down
4 changes: 2 additions & 2 deletions couchbase/resourceCollection.go
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ func createCollection(c context.Context, d *schema.ResourceData, m interface{})
return readCollection(c, d, m)
}

func readCollection(c context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics {
func readCollection(_ context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics {
var diags diag.Diagnostics
names := strings.Split(d.Id(), "/")
if len(names) != 3 {
Expand Down Expand Up @@ -131,7 +131,7 @@ func readCollection(c context.Context, d *schema.ResourceData, m interface{}) di
return diags
}

func deleteCollection(c context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics {
func deleteCollection(_ context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics {
var diags diag.Diagnostics

couchbase, diags := m.(*Connection).CouchbaseInitialization()
Expand Down
4 changes: 2 additions & 2 deletions couchbase/resourceCollection_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import (
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
)

const testAccCollection_basic = `
const testAccCollectionBasic = `
resource "couchbase_bucket_manager" "bucket" {
name = "testAccCollection_basic_bucket"
ram_quota_mb = 100
Expand All @@ -30,7 +30,7 @@ func TestAccCollection(t *testing.T) {
Providers: testAccProviders,
Steps: []resource.TestStep{
{
Config: testAccCollection_basic,
Config: testAccCollectionBasic,
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr("couchbase_bucket_collection.collection", "name", "testAccCollection_basic_bucket"),
resource.TestCheckResourceAttr("couchbase_bucket_collection.collection", "bucket", "testAccCollection_basic_bucket"),
Expand Down
2 changes: 1 addition & 1 deletion couchbase/resourcePrimaryQueryIndex.go
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ func createPrimaryQueryIndex(c context.Context, d *schema.ResourceData, m interf
return readPrimaryQueryIndex(c, d, m)
}

func readPrimaryQueryIndex(c context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics {
func readPrimaryQueryIndex(_ context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics {

couchbase, diags := m.(*Connection).CouchbaseInitialization()
if diags != nil {
Expand Down
2 changes: 0 additions & 2 deletions couchbase/resourceQueryIndex_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,9 @@ resource "couchbase_bucket_manager" "bucket" {
resource "couchbase_query_index" "query_index" {
name = "testAccQueryIndex_extended_query_index_name"
bucket = couchbase_bucket_manager.bucket.name
fields = [
"` + "`" + "action" + "`" + `"
]
num_replica = 0
condition = "(` + "`" + "type" + "`" + " " + `= \"http://example.com\")"
}
Expand Down
4 changes: 2 additions & 2 deletions couchbase/resourceScope_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import (
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
)

const testAccScope_basic = `
const testAccScopeBasic = `
resource "couchbase_bucket_manager" "bucket" {
name = "testAccScope_basic_bucket"
ram_quota_mb = 100
Expand All @@ -24,7 +24,7 @@ func TestAccScope(t *testing.T) {
Providers: testAccProviders,
Steps: []resource.TestStep{
{
Config: testAccScope_basic,
Config: testAccScopeBasic,
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr("couchbase_bucket_scope.scope", "name", "testAccScope_basic_scope"),
resource.TestCheckResourceAttr("couchbase_bucket_scope.scope", "bucket", "testAccScope_basic_bucket"),
Expand Down
4 changes: 2 additions & 2 deletions couchbase/validateProvider.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ func getValidateAllowSaslMechanismDiagMessage(value string) *diag.Diagnostic {
// - SCRAM-SHA256
// - SCRAM-SHA512
func validateAllowSaslMechanism() schema.SchemaValidateDiagFunc {
return func(i interface{}, c cty.Path) diag.Diagnostics {
return func(i interface{}, _ cty.Path) diag.Diagnostics {
var diags diag.Diagnostics

rawSaslMechanism, ok := i.(string)
Expand All @@ -59,7 +59,7 @@ func validateAllowSaslMechanism() schema.SchemaValidateDiagFunc {

// validateTLSRootCert function validate TLS root certificate
func validateTLSRootCert() schema.SchemaValidateDiagFunc {
return func(i interface{}, c cty.Path) diag.Diagnostics {
return func(i interface{}, _ cty.Path) diag.Diagnostics {
var diags diag.Diagnostics

tlsRootCAs := *x509.NewCertPool()
Expand Down
52 changes: 26 additions & 26 deletions couchbase/validationBucket.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ import (
// - memcached
// - ephemeral
func validateBucketType() schema.SchemaValidateDiagFunc {
return func(i interface{}, c cty.Path) diag.Diagnostics {
return func(i interface{}, _ cty.Path) diag.Diagnostics {
var diags diag.Diagnostics

value, ok := i.(string)
Expand Down Expand Up @@ -49,7 +49,7 @@ func validateBucketType() schema.SchemaValidateDiagFunc {
// - nruEviction
// - noEviction
func validateEvictionPolicyType() schema.SchemaValidateDiagFunc {
return func(i interface{}, c cty.Path) diag.Diagnostics {
return func(i interface{}, _ cty.Path) diag.Diagnostics {
var diags diag.Diagnostics

value, ok := i.(string)
Expand Down Expand Up @@ -187,28 +187,28 @@ func validateDurabilityLevel() schema.SchemaValidateDiagFunc {
// - couchstore
// - magma
func validateStorageBackend() schema.SchemaValidateDiagFunc {
return func(i interface{}, c cty.Path) diag.Diagnostics {
var diags diag.Diagnostics

value, ok := i.(string)
if !ok {
return diag.Errorf("value error: storage backend")
}

switch gocb.StorageBackend(value) {
case gocb.StorageBackendCouchstore,
gocb.StorageBackendMagma:
break
default:
diags = append(diags, diag.Diagnostic{
Severity: diag.Error,
Summary: fmt.Sprintf("Storage backend doesn't exist %s\n", i),
Detail: fmt.Sprintf("Storage Backend must be:\n%s\n%s",
gocb.StorageBackendCouchstore,
gocb.StorageBackendMagma,
),
})
}
return diags
}
return func(i interface{}, c cty.Path) diag.Diagnostics {
var diags diag.Diagnostics

value, ok := i.(string)
if !ok {
return diag.Errorf("value error: storage backend")
}

switch gocb.StorageBackend(value) {
case gocb.StorageBackendCouchstore,
gocb.StorageBackendMagma:
break
default:
diags = append(diags, diag.Diagnostic{
Severity: diag.Error,
Summary: fmt.Sprintf("Storage backend doesn't exist %s\n", i),
Detail: fmt.Sprintf("Storage Backend must be:\n%s\n%s",
gocb.StorageBackendCouchstore,
gocb.StorageBackendMagma,
),
})
}
return diags
}
}
2 changes: 1 addition & 1 deletion couchbase/validationRole.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ import (
// - scope
// - collection
func validateRoleParameter() schema.SchemaValidateDiagFunc {
return func(i interface{}, c cty.Path) diag.Diagnostics {
return func(i interface{}, _ cty.Path) diag.Diagnostics {
var diags diag.Diagnostics

value, ok := i.(string)
Expand Down
Loading

0 comments on commit 2af7af1

Please sign in to comment.