Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .github/actions/bump-version/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ jobs:
PINECONE_CLIENT_ID: ${{ secrets.CLIENT_ID }}
PINECONE_CLIENT_SECRET: ${{ secrets.CLIENT_SECRET }}
- name: Run local integration tests
run: go test -count=1 -v ./pinecone -run TestRunLocalIntegrationSuite -tags=localServer
run: go test -count=1 -v ./pinecone -run TestRunLocalIntegrationSuite -tags=localServer -timeout=20m
env:
PINECONE_INDEX_URL_POD: http://localhost:5082
PINECONE_INDEX_URL_SERVERLESS: http://localhost:5081
Expand Down
15 changes: 9 additions & 6 deletions pinecone/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -2196,13 +2196,15 @@ type EmbedParameters map[string]interface{}
// Fields:
// - Data: A list of [Embedding] objects containing the embeddings generated for the input.
// - Model: The model used to generate the embeddings.
// - VectorType: Indicates whether the embeddings are dense or sparse.
// - Usage: Usage statistics ([Total Tokens]) for the request.
//
// [Total Tokens]: https://docs.pinecone.io/guides/organizations/manage-cost/understanding-cost#embed
type EmbedResponse struct {
Data []Embedding `json:"data"`
Model string `json:"model"`
Usage struct {
Data []Embedding `json:"data"`
Model string `json:"model"`
VectorType string `json:"vector_type"`
Usage struct {
TotalTokens *int32 `json:"total_tokens,omitempty"`
} `json:"usage"`
}
Expand Down Expand Up @@ -2802,9 +2804,10 @@ func decodeEmbedResponse(resBody io.ReadCloser) (*EmbedResponse, error) {
}

return &EmbedResponse{
Data: decodedEmbeddings,
Model: rawEmbedResponse.Model,
Usage: rawEmbedResponse.Usage,
Data: decodedEmbeddings,
Model: rawEmbedResponse.Model,
VectorType: rawEmbedResponse.VectorType,
Usage: rawEmbedResponse.Usage,
}, nil
}

Expand Down
103 changes: 103 additions & 0 deletions pinecone/client_test.go
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
package pinecone

import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"log"
Expand All @@ -16,6 +18,7 @@ import (
"github.com/google/uuid"
"github.com/pinecone-io/go-pinecone/v5/internal/gen"
"github.com/pinecone-io/go-pinecone/v5/internal/gen/db_control"
"github.com/pinecone-io/go-pinecone/v5/internal/gen/inference"
"github.com/pinecone-io/go-pinecone/v5/internal/provider"
"github.com/pinecone-io/go-pinecone/v5/internal/utils"

Expand Down Expand Up @@ -1794,6 +1797,106 @@ func TestToCollectionUnit(t *testing.T) {
}
}

func TestToBackupUnit(t *testing.T) {
t.Run("nil input", func(t *testing.T) {
require.Nil(t, toBackup(nil))
})

t.Run("maps all fields", func(t *testing.T) {
filterable := true
createdAt := "2024-01-01T00:00:00Z"
description := "test backup"
name := "backup-name"
dimension := int32(1536)
namespaceCount := 3
recordCount := 42
sizeBytes := 2048
metric := "cosine"
tags := db_control.IndexTags{"env": "dev"}

model := &db_control.BackupModel{
BackupId: "backup-1",
Cloud: "aws",
CreatedAt: &createdAt,
Description: &description,
Dimension: &dimension,
Metric: &metric,
Name: &name,
NamespaceCount: &namespaceCount,
RecordCount: &recordCount,
Region: "us-east-1",
Schema: &struct {
Fields map[string]struct {
Filterable *bool `json:"filterable,omitempty"`
} `json:"fields"`
}{
Fields: map[string]struct {
Filterable *bool `json:"filterable,omitempty"`
}{
"genre": {Filterable: &filterable},
},
},
SizeBytes: &sizeBytes,
SourceIndexId: "idx-id",
SourceIndexName: "idx-name",
Status: "Ready",
Tags: &tags,
}

result := toBackup(model)
require.NotNil(t, result)

require.Equal(t, "backup-1", result.BackupId)
require.Equal(t, "aws", result.Cloud)
require.Equal(t, &createdAt, result.CreatedAt)
require.Equal(t, &description, result.Description)
require.Equal(t, &dimension, result.Dimension)

require.NotNil(t, result.Metric)
require.Equal(t, IndexMetric(metric), *result.Metric)

require.Equal(t, &name, result.Name)
require.Equal(t, &namespaceCount, result.NamespaceCount)
require.Equal(t, &recordCount, result.RecordCount)
require.Equal(t, "us-east-1", result.Region)
require.Equal(t, &sizeBytes, result.SizeBytes)
require.Equal(t, "idx-id", result.SourceIndexId)
require.Equal(t, "idx-name", result.SourceIndexName)
require.Equal(t, "Ready", result.Status)
require.NotNil(t, result.Schema)
require.Equal(t, true, result.Schema.Fields["genre"].Filterable)
require.Equal(t, IndexTags(tags), *result.Tags)
})
}

func TestDecodeEmbedResponseUnit(t *testing.T) {
dense := inference.DenseEmbedding{
Values: []float32{0.1, 0.2},
}
var embedding inference.Embedding
require.NoError(t, embedding.FromDenseEmbedding(dense))

totalTokens := int32(10)
raw := inference.EmbeddingsList{
Data: []inference.Embedding{embedding},
Model: "test-model",
VectorType: "dense",
}
raw.Usage.TotalTokens = &totalTokens

var buf bytes.Buffer
require.NoError(t, json.NewEncoder(&buf).Encode(raw))

result, err := decodeEmbedResponse(io.NopCloser(bytes.NewReader(buf.Bytes())))
require.NoError(t, err)
require.Equal(t, "dense", result.VectorType)
require.Equal(t, "test-model", result.Model)
require.Equal(t, totalTokens, derefOrDefault(result.Usage.TotalTokens, int32(0)))
require.Len(t, result.Data, 1)
require.NotNil(t, result.Data[0].DenseEmbedding)
require.Equal(t, []float32{0.1, 0.2}, result.Data[0].DenseEmbedding.Values)
}

func TestDerefOrDefaultUnit(t *testing.T) {
tests := []struct {
name string
Expand Down
25 changes: 25 additions & 0 deletions pinecone/index_connection.go
Original file line number Diff line number Diff line change
Expand Up @@ -1203,6 +1203,12 @@ func (idx *IndexConnection) SearchRecords(ctx context.Context, in *SearchRecords
}
}

var convertedInputs *db_data_rest.EmbedInputs
if in.Query.Inputs != nil {
inputMap := db_data_rest.EmbedInputs(*in.Query.Inputs)
convertedInputs = &inputMap
}

var matchTerms *db_data_rest.SearchMatchTerms
if in.Query.MatchTerms != nil {
strat := "all"
Expand All @@ -1227,6 +1233,7 @@ func (idx *IndexConnection) SearchRecords(ctx context.Context, in *SearchRecords
}{
Filter: in.Query.Filter,
Id: in.Query.Id,
Inputs: convertedInputs,
TopK: in.Query.TopK,
Vector: convertedVector,
MatchTerms: matchTerms,
Expand Down Expand Up @@ -1417,11 +1424,19 @@ func (idx *IndexConnection) DeleteAllVectorsInNamespace(ctx context.Context) err
// - Dimension: The dimension of the [Index].
// - IndexFullness: The fullness level of the [Index]. Note: only available on pods-based indexes.
// - TotalVectorCount: The total number of vectors in the [Index].
// - Metric: The similarity metric configured for the [Index], when available.
// - VectorType: The vector type configured for the [Index], when available.
// - MemoryFullness: Memory utilization for pod-based indexes (nil for serverless).
// - StorageFullness: Storage utilization for pod-based indexes (nil for serverless).
// - Namespaces: The namespace(s) in the [Index].
type DescribeIndexStatsResponse struct {
Dimension *uint32 `json:"dimension"`
IndexFullness float32 `json:"index_fullness"`
TotalVectorCount uint32 `json:"total_vector_count"`
Metric *IndexMetric `json:"metric,omitempty"`
VectorType *string `json:"vector_type,omitempty"`
MemoryFullness *float32 `json:"memory_fullness,omitempty"`
StorageFullness *float32 `json:"storage_fullness,omitempty"`
Namespaces map[string]*NamespaceSummary `json:"namespaces,omitempty"`
}

Expand Down Expand Up @@ -1535,10 +1550,20 @@ func (idx *IndexConnection) DescribeIndexStatsFiltered(ctx context.Context, meta
}
}

var metric *IndexMetric
if res.Metric != nil {
m := IndexMetric(*res.Metric)
metric = &m
}

return &DescribeIndexStatsResponse{
Dimension: res.Dimension,
IndexFullness: res.IndexFullness,
TotalVectorCount: res.TotalVectorCount,
Metric: metric,
VectorType: res.VectorType,
MemoryFullness: res.MemoryFullness,
StorageFullness: res.StorageFullness,
Namespaces: namespaceSummaries,
}, nil
}
Expand Down
46 changes: 45 additions & 1 deletion pinecone/index_connection_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -971,11 +971,15 @@ func TestMarshalDescribeIndexStatsResponseUnit(t *testing.T) {
Dimension: uint32Pointer(3),
IndexFullness: 0.5,
TotalVectorCount: 100,
Metric: indexMetricPointer(Cosine),
VectorType: pointerOrNil("dense"),
MemoryFullness: float32Pointer(0.25),
StorageFullness: float32Pointer(0.75),
Namespaces: map[string]*NamespaceSummary{
"namespace-1": {VectorCount: 50},
},
},
want: `{"dimension":3,"index_fullness":0.5,"total_vector_count":100,"namespaces":{"namespace-1":{"vector_count":50}}}`,
want: `{"dimension":3,"index_fullness":0.5,"total_vector_count":100,"metric":"cosine","vector_type":"dense","memory_fullness":0.25,"storage_fullness":0.75,"namespaces":{"namespace-1":{"vector_count":50}}}`,
},
{
name: "Fields omitted",
Expand Down Expand Up @@ -1718,6 +1722,38 @@ func Test_toMetadataSchemaGrpc_Unit(t *testing.T) {
}
}

func TestToNamespaceDescriptionUnit(t *testing.T) {
t.Run("nil input", func(t *testing.T) {
require.Nil(t, toNamespaceDescription(nil))
})

t.Run("maps schema and indexed fields", func(t *testing.T) {
ns := &db_data_grpc.NamespaceDescription{
Name: "namespace-1",
RecordCount: 42,
Schema: &db_data_grpc.MetadataSchema{
Fields: map[string]*db_data_grpc.MetadataFieldProperties{
"genre": {
Filterable: true,
},
},
},
IndexedFields: &db_data_grpc.IndexedFields{
Fields: []string{"genre"},
},
}

result := toNamespaceDescription(ns)
require.NotNil(t, result)
require.Equal(t, "namespace-1", result.Name)
require.EqualValues(t, 42, result.RecordCount)
require.NotNil(t, result.Schema)
require.True(t, result.Schema.Fields["genre"].Filterable)
require.NotNil(t, result.IndexedFields)
require.Equal(t, []string{"genre"}, result.IndexedFields.Fields)
})
}

// Helper funcs
func generateFloat32Array(n int) []float32 {
array := make([]float32, n)
Expand All @@ -1739,6 +1775,14 @@ func uint32Pointer(i uint32) *uint32 {
return &i
}

func float32Pointer(v float32) *float32 {
return &v
}

func indexMetricPointer(metric IndexMetric) *IndexMetric {
return &metric
}

func slicesEqual[T comparable](a, b []float32) bool {
if len(a) != len(b) {
return false
Expand Down
2 changes: 1 addition & 1 deletion pinecone/test_suite.go
Original file line number Diff line number Diff line change
Expand Up @@ -381,7 +381,7 @@ func retryAssertions(t *testing.T, maxRetries int, delay time.Duration, fn func(
}

func retryAssertionsWithDefaults(t *testing.T, fn func() error) {
retryAssertions(t, 30, 5*time.Second, fn)
retryAssertions(t, 30, 2*time.Second, fn)
}

func pollIndexForFreshness(ts *integrationTests, ctx context.Context, sampleId string) error {
Expand Down