From 8c0c4c63c12144212803f0eedf812b512fe4844c Mon Sep 17 00:00:00 2001 From: Willy Douhard Date: Thu, 7 Nov 2024 11:57:13 +0100 Subject: [PATCH 1/2] fix: lc structured output json ser --- literalai/callback/langchain_callback.py | 9 ++++++++- literalai/version.py | 2 +- setup.py | 2 +- 3 files changed, 10 insertions(+), 3 deletions(-) diff --git a/literalai/callback/langchain_callback.py b/literalai/callback/langchain_callback.py index 1049343..3b7269d 100644 --- a/literalai/callback/langchain_callback.py +++ b/literalai/callback/langchain_callback.py @@ -2,6 +2,8 @@ from importlib.metadata import version from typing import TYPE_CHECKING, Any, Dict, List, Optional, TypedDict, Union, cast +from pydantic import BaseModel + from literalai.helper import ensure_values_serializable from literalai.observability.generation import ( ChatGeneration, @@ -148,6 +150,8 @@ def process_content(self, content: Any, root=True): return [self._convert_message(m) for m in content] elif self._is_message(content): return self._convert_message(content) + elif isinstance(content, BaseModel): + return content.model_dump() elif isinstance(content, dict): processed_dict = {} for key, value in content.items(): @@ -186,7 +190,9 @@ def _build_llm_settings( } # make sure there is no api key specification - settings = {k: v for k, v in merged.items() if not k.endswith("_api_key")} + settings = self.process_content( + {k: v for k, v in merged.items() if not k.endswith("_api_key")} + ) model_keys = ["azure_deployment", "deployment_name", "model", "model_name"] model = next((settings[k] for k in model_keys if k in settings), None) tools = None @@ -203,6 +209,7 @@ def _build_llm_settings( "RunnableParallel", "RunnableAssign", "RunnableLambda", + "structured_outputs_parser", "", ] DEFAULT_TO_KEEP = ["retriever", "llm", "agent", "chain", "tool"] diff --git a/literalai/version.py b/literalai/version.py index 9cdd0a7..39b47d2 100644 --- a/literalai/version.py +++ b/literalai/version.py @@ -1 +1 @@ -__version__ = "0.0.628" +__version__ = "0.0.629" diff --git a/setup.py b/setup.py index 3494b8d..73ffbc9 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ setup( name="literalai", - version="0.0.628", # update version in literalai/version.py + version="0.0.629", # update version in literalai/version.py description="An SDK for observability in Python applications", long_description=open("README.md").read(), long_description_content_type="text/markdown", From 03ae87cb611046d25d01d8c42cf1ce82056ec4aa Mon Sep 17 00:00:00 2001 From: Willy Douhard Date: Thu, 7 Nov 2024 12:42:00 +0100 Subject: [PATCH 2/2] fix: tests --- literalai/event_processor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/literalai/event_processor.py b/literalai/event_processor.py index 8cf1fce..aae1f61 100644 --- a/literalai/event_processor.py +++ b/literalai/event_processor.py @@ -32,6 +32,7 @@ class EventProcessor: batch_timeout: float = 5.0 def __init__(self, api: "LiteralAPI", batch_size: int = 1, disabled: bool = False): + self.stop_event = threading.Event() self.batch_size = batch_size self.api = api self.event_queue = queue.Queue() @@ -44,7 +45,6 @@ def __init__(self, api: "LiteralAPI", batch_size: int = 1, disabled: bool = Fals ) if not self.disabled: self.processing_thread.start() - self.stop_event = threading.Event() def add_event(self, event: "StepDict"): with self.counter_lock: