From 90151a6228ff0f9e765f42f4fa57d6b3b69a1fb7 Mon Sep 17 00:00:00 2001 From: Michael Struwig Date: Wed, 16 Oct 2024 16:29:08 +0200 Subject: [PATCH] Rename test, format --- .github/workflows/test.yml | 2 +- llama31-local-copilot/llama_copilot/main.py | 9 +++++---- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 0337274..c269c42 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -3,7 +3,7 @@ name: Test Copilots on: [pull_request] jobs: - example-copilot-test: + example-copilot: runs-on: ubuntu-latest strategy: matrix: diff --git a/llama31-local-copilot/llama_copilot/main.py b/llama31-local-copilot/llama_copilot/main.py index c11c70e..076085d 100644 --- a/llama31-local-copilot/llama_copilot/main.py +++ b/llama31-local-copilot/llama_copilot/main.py @@ -77,16 +77,17 @@ async def query(request: AgentQueryRequest) -> EventSourceResponse: chat_messages.append(UserMessage(content=sanitize_message(message.content))) if request.context: - chat_messages.insert(1, UserMessage(content=sanitize_message("# Context\n" + str(request.context)))) + chat_messages.insert( + 1, + UserMessage(content=sanitize_message("# Context\n" + str(request.context))), + ) @chatprompt( SystemMessage(SYSTEM_PROMPT), *chat_messages, model=LitellmChatModel(model="ollama_chat/llama3.1:8b-instruct-q6_K"), ) - async def _llm() -> AsyncStreamedStr: - ... - + async def _llm() -> AsyncStreamedStr: ... result = await _llm() return EventSourceResponse(