From 4b94449c88ff3710c9990e2ae596abe819144194 Mon Sep 17 00:00:00 2001 From: Gabriel Peterson <25187859+gabrieldpeterson@users.noreply.github.com> Date: Tue, 29 Apr 2025 14:58:13 -0700 Subject: [PATCH] fix(kleidiai-llm-chatbot): add command to install specific httpx version to resolve compatibility issue --- .../pytorch-llama/pytorch-llama-frontend.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/content/learning-paths/servers-and-cloud-computing/pytorch-llama/pytorch-llama-frontend.md b/content/learning-paths/servers-and-cloud-computing/pytorch-llama/pytorch-llama-frontend.md index 4f7a6cb1b..ffae9d430 100644 --- a/content/learning-paths/servers-and-cloud-computing/pytorch-llama/pytorch-llama-frontend.md +++ b/content/learning-paths/servers-and-cloud-computing/pytorch-llama/pytorch-llama-frontend.md @@ -22,6 +22,11 @@ Install the additional packages: pip3 install openai==1.45.0 ``` +Roll back httpx to a version before 0.28 to prevent a "proxies" error with Streamlit +```sh +pip3 install httpx==0.27.2 +``` + ### Running LLM Inference Backend Server Start the LLM Inference Backend Server in a new terminal window: