@@ -194,6 +194,31 @@ def embed(
194194 # usage={'total_tokens': 6}
195195 # )
196196
197+ You can also use a single string input:
198+
199+ .. code-block:: python
200+
201+ from pinecone import Pinecone
202+
203+ pc = Pinecone()
204+ output = pc.inference.embed(
205+ model="text-embedding-3-small",
206+ inputs="Hello, world!"
207+ )
208+
209+ Or use the EmbedModel enum:
210+
211+ .. code-block:: python
212+
213+ from pinecone import Pinecone
214+ from pinecone.inference import EmbedModel
215+
216+ pc = Pinecone()
217+ outputs = pc.inference.embed(
218+ model=EmbedModel.TEXT_EMBEDDING_3_SMALL,
219+ inputs=["Document 1", "Document 2"]
220+ )
221+
197222 """
198223 request_body = InferenceRequestBuilder .embed_request (
199224 model = model , inputs = inputs , parameters = parameters
@@ -235,8 +260,7 @@ def rerank(
235260 relevance, with the first being the most relevant. The ``index`` field can be used to locate the document
236261 relative to the list of documents specified in the request. Each document contains a ``score`` key
237262 representing how close the document relates to the query.
238-
239- Example:
263+ :rtype: RerankResult
240264
241265 .. code-block:: python
242266
@@ -275,6 +299,38 @@ def rerank(
275299 # usage={'rerank_units': 1}
276300 # )
277301
302+ You can also use document dictionaries with custom fields:
303+
304+ .. code-block:: python
305+
306+ from pinecone import Pinecone
307+
308+ pc = Pinecone()
309+ result = pc.inference.rerank(
310+ model="pinecone-rerank-v0",
311+ query="What is machine learning?",
312+ documents=[
313+ {"text": "Machine learning is a subset of AI.", "category": "tech"},
314+ {"text": "Cooking recipes for pasta.", "category": "food"},
315+ ],
316+ rank_fields=["text"],
317+ top_n=1
318+ )
319+
320+ Or use the RerankModel enum:
321+
322+ .. code-block:: python
323+
324+ from pinecone import Pinecone
325+ from pinecone.inference import RerankModel
326+
327+ pc = Pinecone()
328+ result = pc.inference.rerank(
329+ model=RerankModel.PINECONE_RERANK_V0,
330+ query="Your query here",
331+ documents=["doc1", "doc2", "doc3"]
332+ )
333+
278334 """
279335 rerank_request = InferenceRequestBuilder .rerank (
280336 model = model ,
@@ -302,8 +358,7 @@ def list_models(
302358 :type vector_type: str, optional
303359
304360 :return: A list of models.
305-
306- Example:
361+ :rtype: ModelInfoList
307362
308363 .. code-block:: python
309364
@@ -339,8 +394,6 @@ def get_model(self, model_name: str) -> "ModelInfo":
339394 :return: A ModelInfo object.
340395 :rtype: ModelInfo
341396
342- Example:
343-
344397 .. code-block:: python
345398
346399 from pinecone import Pinecone
@@ -371,5 +424,6 @@ def get_model(self, model_name: str) -> "ModelInfo":
371424 # "provider_name": "Pinecone",
372425 # "supported_metrics": []
373426 # }
427+
374428 """
375429 return self .model .get (model_name = model_name )
0 commit comments