From 2fddbdd14861e8ac13634508a311374e9b1ca12a Mon Sep 17 00:00:00 2001
From: Hyesoo Kim <100982596+duper203@users.noreply.github.com>
Date: Fri, 4 Oct 2024 14:38:20 -0700
Subject: [PATCH 1/4] Created using Colab
---
Solar-Fullstack-LLM-101/80_gradio.ipynb | 408 +++++++++++++-----------
1 file changed, 229 insertions(+), 179 deletions(-)
diff --git a/Solar-Fullstack-LLM-101/80_gradio.ipynb b/Solar-Fullstack-LLM-101/80_gradio.ipynb
index 357e22b..2e9c63f 100644
--- a/Solar-Fullstack-LLM-101/80_gradio.ipynb
+++ b/Solar-Fullstack-LLM-101/80_gradio.ipynb
@@ -1,187 +1,237 @@
{
- "cells": [
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# 80. Gradio\n",
- "\n",
- "## Overview \n",
- "In this exercise, we will use Gradio to implement the RAG (Retrieval-Augmented Generation) system we previously learned as an application. By utilizing the Upstage Solar API and LangChain, we will generate context-based responses to user queries and create an easy-to-use interface for interaction using Gradio. This tutorial will guide you through building an application that combines these tools effectively.\n",
- " \n",
- "## Purpose of the Exercise\n",
- "The purpose of this exercise is to construct a simple and intuitive web interface using the Gradio library and implement an interactive chat interface to enable user interaction. By the end of this tutorial, users will be able to create applications that facilitate seamless communication and provide accurate responses based on user input, leveraging the power of RAG and Gradio.\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 1,
- "metadata": {},
- "outputs": [],
- "source": [
- "!pip install -qU gradio python-dotenv langchain-upstage python-dotenv"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 3,
- "metadata": {},
- "outputs": [],
- "source": [
- "# @title set API key\n",
- "import os\n",
- "import getpass\n",
- "from pprint import pprint\n",
- "import warnings\n",
- "\n",
- "warnings.filterwarnings(\"ignore\")\n",
- "\n",
- "from IPython import get_ipython\n",
- "\n",
- "if \"google.colab\" in str(get_ipython()):\n",
- " # Running in Google Colab. Please set the UPSTAGE_API_KEY in the Colab Secrets\n",
- " from google.colab import userdata\n",
- " os.environ[\"UPSTAGE_API_KEY\"] = userdata.get(\"UPSTAGE_API_KEY\")\n",
- "else:\n",
- " # Running locally. Please set the UPSTAGE_API_KEY in the .env file\n",
- " from dotenv import load_dotenv\n",
- "\n",
- " load_dotenv()\n",
- "\n",
- "if \"UPSTAGE_API_KEY\" not in os.environ:\n",
- " os.environ[\"UPSTAGE_API_KEY\"] = getpass.getpass(\"Enter your Upstage API key: \")\n"
- ]
-
- },
- {
- "cell_type": "code",
- "execution_count": 4,
- "metadata": {},
- "outputs": [],
- "source": [
- "import gradio as gr\n",
- "\n",
- "from langchain_upstage import ChatUpstage\n",
- "from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder\n",
- "from langchain_core.output_parsers import StrOutputParser\n",
- "from langchain.schema import AIMessage, HumanMessage\n",
- "\n",
- "\n",
- "llm = ChatUpstage()"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 5,
- "metadata": {},
- "outputs": [],
- "source": [
- "# More general chat\n",
- "chat_with_history_prompt = ChatPromptTemplate.from_messages(\n",
- " [\n",
- " (\"system\", \"You are a helpful assistant.\"),\n",
- " MessagesPlaceholder(variable_name=\"history\"),\n",
- " (\"human\", \"{message}\"),\n",
- " ]\n",
- ")"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 6,
- "metadata": {},
- "outputs": [],
- "source": [
- "chain = chat_with_history_prompt | llm | StrOutputParser()"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 7,
- "metadata": {},
- "outputs": [],
- "source": [
- "def chat(message, history):\n",
- " history_langchain_format = []\n",
- " for human, ai in history:\n",
- " history_langchain_format.append(HumanMessage(content=human))\n",
- " history_langchain_format.append(AIMessage(content=ai))\n",
- "\n",
- " return chain.invoke({\"message\": message, \"history\": history_langchain_format})"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 8,
- "metadata": {},
- "outputs": [],
- "source": [
- "with gr.Blocks() as demo:\n",
- " chatbot = gr.ChatInterface(\n",
- " chat,\n",
- " examples=[\n",
- " \"How to eat healthy?\",\n",
- " \"Best Places in Korea\",\n",
- " \"How to make a chatbot?\",\n",
- " ],\n",
- " title=\"Solar Chatbot\",\n",
- " description=\"Upstage Solar Chatbot\",\n",
- " )\n",
- " chatbot.chatbot.height = 300"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 9,
- "metadata": {},
- "outputs": [
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "view-in-github",
+ "colab_type": "text"
+ },
+ "source": [
+ "
"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "6IiAw5kOb04r"
+ },
+ "source": [
+ "# 80. Gradio\n",
+ "\n",
+ "## Overview \n",
+ "In this exercise, we will use Gradio to implement the RAG (Retrieval-Augmented Generation) system we previously learned as an application. By utilizing the Upstage Solar API and LangChain, we will generate context-based responses to user queries and create an easy-to-use interface for interaction using Gradio. This tutorial will guide you through building an application that combines these tools effectively.\n",
+ "\n",
+ "## Purpose of the Exercise\n",
+ "The purpose of this exercise is to construct a simple and intuitive web interface using the Gradio library and implement an interactive chat interface to enable user interaction. By the end of this tutorial, users will be able to create applications that facilitate seamless communication and provide accurate responses based on user input, leveraging the power of RAG and Gradio.\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "id": "AHaJUPo1b04s"
+ },
+ "outputs": [],
+ "source": [
+ "!pip install -qU gradio python-dotenv langchain-upstage python-dotenv langchain"
+ ]
+ },
{
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Running on local URL: http://127.0.0.1:7860\n",
- "\n",
- "To create a public link, set `share=True` in `launch()`.\n"
- ]
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {
+ "id": "I2mK6xJYb04t"
+ },
+ "outputs": [],
+ "source": [
+ "# @title set API key\n",
+ "import os\n",
+ "import getpass\n",
+ "from pprint import pprint\n",
+ "import warnings\n",
+ "\n",
+ "warnings.filterwarnings(\"ignore\")\n",
+ "\n",
+ "from IPython import get_ipython\n",
+ "\n",
+ "if \"google.colab\" in str(get_ipython()):\n",
+ " # Running in Google Colab. Please set the UPSTAGE_API_KEY in the Colab Secrets\n",
+ " from google.colab import userdata\n",
+ " os.environ[\"UPSTAGE_API_KEY\"] = userdata.get(\"UPSTAGE_API_KEY\")\n",
+ "else:\n",
+ " # Running locally. Please set the UPSTAGE_API_KEY in the .env file\n",
+ " from dotenv import load_dotenv\n",
+ "\n",
+ " load_dotenv()\n",
+ "\n",
+ "if \"UPSTAGE_API_KEY\" not in os.environ:\n",
+ " os.environ[\"UPSTAGE_API_KEY\"] = getpass.getpass(\"Enter your Upstage API key: \")\n"
+ ]
},
{
- "data": {
- "text/html": [
- "
"
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {
+ "id": "fIyuV4f7b04t"
+ },
+ "outputs": [],
+ "source": [
+ "from langchain_upstage import ChatUpstage\n",
+ "from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder\n",
+ "from langchain_core.output_parsers import StrOutputParser\n",
+ "from langchain.schema import AIMessage, HumanMessage\n",
+ "\n",
+ "\n",
+ "llm = ChatUpstage(model=\"solar-pro\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {
+ "id": "3Z7Vv1xwb04t"
+ },
+ "outputs": [],
+ "source": [
+ "# More general chat\n",
+ "chat_with_history_prompt = ChatPromptTemplate.from_messages(\n",
+ " [\n",
+ " (\"system\", \"You are a helpful assistant.\"),\n",
+ " MessagesPlaceholder(variable_name=\"history\"),\n",
+ " (\"human\", \"{message}\"),\n",
+ " ]\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {
+ "id": "dSqT2qEYb04u"
+ },
+ "outputs": [],
+ "source": [
+ "chain = chat_with_history_prompt | llm | StrOutputParser()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {
+ "id": "OJJjvRcnb04u"
+ },
+ "outputs": [],
+ "source": [
+ "def chat(message, history):\n",
+ " history_langchain_format = []\n",
+ " for human, ai in history:\n",
+ " history_langchain_format.append(HumanMessage(content=human))\n",
+ " history_langchain_format.append(AIMessage(content=ai))\n",
+ "\n",
+ " return chain.invoke({\"message\": message, \"history\": history_langchain_format})"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {
+ "id": "NdsTnOn8b04u"
+ },
+ "outputs": [],
+ "source": [
+ "import gradio as gr\n",
+ "\n",
+ "with gr.Blocks() as demo:\n",
+ " chatbot = gr.ChatInterface(\n",
+ " chat,\n",
+ " examples=[\n",
+ " \"How to eat healthy?\",\n",
+ " \"Best Places in Korea\",\n",
+ " \"How to make a chatbot?\",\n",
+ " ],\n",
+ " title=\"Solar Chatbot\",\n",
+ " description=\"Upstage Solar Chatbot\",\n",
+ " )\n",
+ " chatbot.chatbot.height = 300"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {
+ "id": "KhPNfTOFb04u",
+ "outputId": "df86f307-acc9-4151-9c5e-68e959b81479",
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 645
+ }
+ },
+ "outputs": [
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "Setting queue=True in a Colab notebook requires sharing enabled. Setting `share=True` (you can turn this off by setting `share=False` in `launch()` explicitly).\n",
+ "\n",
+ "Colab notebook detected. To show errors in colab notebook, set debug=True in launch()\n",
+ "Running on public URL: https://f888d908ca5729f8d5.gradio.live\n",
+ "\n",
+ "This share link expires in 72 hours. For free permanent hosting and GPU upgrades, run `gradio deploy` from Terminal to deploy to Spaces (https://huggingface.co/spaces)\n"
+ ]
+ },
+ {
+ "output_type": "display_data",
+ "data": {
+ "text/plain": [
+ ""
+ ],
+ "text/html": [
+ ""
+ ]
+ },
+ "metadata": {}
+ }
],
- "text/plain": [
- ""
+ "source": [
+ "if __name__ == \"__main__\":\n",
+ " demo.launch()"
]
- },
- "metadata": {},
- "output_type": "display_data"
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "# Example\n",
+ "\n",
+ ""
+ ],
+ "metadata": {
+ "id": "qh6DCd48dk6v"
+ }
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.10.10"
+ },
+ "colab": {
+ "provenance": [],
+ "include_colab_link": true
}
- ],
- "source": [
- "if __name__ == \"__main__\":\n",
- " demo.launch()"
- ]
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 3 (ipykernel)",
- "language": "python",
- "name": "python3"
},
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.10.10"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 4
-}
+ "nbformat": 4,
+ "nbformat_minor": 0
+}
\ No newline at end of file
From 0ddba0a71a53ac43fc6be52e54c245a4f16f31db Mon Sep 17 00:00:00 2001
From: Hyesoo Kim <100982596+duper203@users.noreply.github.com>
Date: Mon, 7 Oct 2024 09:55:32 -0700
Subject: [PATCH 2/4] Update 82_gradio_chatpdf.ipynb : env setting
---
Solar-Fullstack-LLM-101/82_gradio_chatpdf.ipynb | 16 ++++++++--------
1 file changed, 8 insertions(+), 8 deletions(-)
diff --git a/Solar-Fullstack-LLM-101/82_gradio_chatpdf.ipynb b/Solar-Fullstack-LLM-101/82_gradio_chatpdf.ipynb
index d3c0138..f0baf42 100644
--- a/Solar-Fullstack-LLM-101/82_gradio_chatpdf.ipynb
+++ b/Solar-Fullstack-LLM-101/82_gradio_chatpdf.ipynb
@@ -28,20 +28,19 @@
"execution_count": null,
"metadata": {},
"outputs": [],
- "source": [
+ "source": [
"# @title set API key\n",
- "import os\n",
- "import getpass\n",
"from pprint import pprint\n",
+ "import os\n",
+ "\n",
"import warnings\n",
"\n",
"warnings.filterwarnings(\"ignore\")\n",
"\n",
- "from IPython import get_ipython\n",
- "\n",
"if \"google.colab\" in str(get_ipython()):\n",
" # Running in Google Colab. Please set the UPSTAGE_API_KEY in the Colab Secrets\n",
" from google.colab import userdata\n",
+ "\n",
" os.environ[\"UPSTAGE_API_KEY\"] = userdata.get(\"UPSTAGE_API_KEY\")\n",
"else:\n",
" # Running locally. Please set the UPSTAGE_API_KEY in the .env file\n",
@@ -49,9 +48,10 @@
"\n",
" load_dotenv()\n",
"\n",
- "if \"UPSTAGE_API_KEY\" not in os.environ:\n",
- " os.environ[\"UPSTAGE_API_KEY\"] = getpass.getpass(\"Enter your Upstage API key: \")\n"
- ]
+ "assert (\n",
+ " \"UPSTAGE_API_KEY\" in os.environ\n",
+ "), \"Please set the UPSTAGE_API_KEY environment variable\""
+ ]
},
{
From 40e054ed40f412ce17fb8d2c9244371f1f28da8a Mon Sep 17 00:00:00 2001
From: Hyesoo Kim
Date: Mon, 7 Oct 2024 09:59:42 -0700
Subject: [PATCH 3/4] Revert "Update 82_gradio_chatpdf.ipynb : env setting"
This reverts commit 0ddba0a71a53ac43fc6be52e54c245a4f16f31db.
---
Solar-Fullstack-LLM-101/82_gradio_chatpdf.ipynb | 16 ++++++++--------
1 file changed, 8 insertions(+), 8 deletions(-)
diff --git a/Solar-Fullstack-LLM-101/82_gradio_chatpdf.ipynb b/Solar-Fullstack-LLM-101/82_gradio_chatpdf.ipynb
index f0baf42..d3c0138 100644
--- a/Solar-Fullstack-LLM-101/82_gradio_chatpdf.ipynb
+++ b/Solar-Fullstack-LLM-101/82_gradio_chatpdf.ipynb
@@ -28,19 +28,20 @@
"execution_count": null,
"metadata": {},
"outputs": [],
- "source": [
+ "source": [
"# @title set API key\n",
- "from pprint import pprint\n",
"import os\n",
- "\n",
+ "import getpass\n",
+ "from pprint import pprint\n",
"import warnings\n",
"\n",
"warnings.filterwarnings(\"ignore\")\n",
"\n",
+ "from IPython import get_ipython\n",
+ "\n",
"if \"google.colab\" in str(get_ipython()):\n",
" # Running in Google Colab. Please set the UPSTAGE_API_KEY in the Colab Secrets\n",
" from google.colab import userdata\n",
- "\n",
" os.environ[\"UPSTAGE_API_KEY\"] = userdata.get(\"UPSTAGE_API_KEY\")\n",
"else:\n",
" # Running locally. Please set the UPSTAGE_API_KEY in the .env file\n",
@@ -48,10 +49,9 @@
"\n",
" load_dotenv()\n",
"\n",
- "assert (\n",
- " \"UPSTAGE_API_KEY\" in os.environ\n",
- "), \"Please set the UPSTAGE_API_KEY environment variable\""
- ]
+ "if \"UPSTAGE_API_KEY\" not in os.environ:\n",
+ " os.environ[\"UPSTAGE_API_KEY\"] = getpass.getpass(\"Enter your Upstage API key: \")\n"
+ ]
},
{
From 282098d60e54d8705483643579ef743f9c63c89b Mon Sep 17 00:00:00 2001
From: Hyesoo Kim <100982596+duper203@users.noreply.github.com>
Date: Mon, 7 Oct 2024 10:01:09 -0700
Subject: [PATCH 4/4] Update 80_gradio.ipynb : env setting
---
Solar-Fullstack-LLM-101/80_gradio.ipynb | 48 ++++++++++++-------------
1 file changed, 24 insertions(+), 24 deletions(-)
diff --git a/Solar-Fullstack-LLM-101/80_gradio.ipynb b/Solar-Fullstack-LLM-101/80_gradio.ipynb
index 2e9c63f..f606df1 100644
--- a/Solar-Fullstack-LLM-101/80_gradio.ipynb
+++ b/Solar-Fullstack-LLM-101/80_gradio.ipynb
@@ -44,29 +44,29 @@
},
"outputs": [],
"source": [
- "# @title set API key\n",
- "import os\n",
- "import getpass\n",
- "from pprint import pprint\n",
- "import warnings\n",
- "\n",
- "warnings.filterwarnings(\"ignore\")\n",
- "\n",
- "from IPython import get_ipython\n",
- "\n",
- "if \"google.colab\" in str(get_ipython()):\n",
- " # Running in Google Colab. Please set the UPSTAGE_API_KEY in the Colab Secrets\n",
- " from google.colab import userdata\n",
- " os.environ[\"UPSTAGE_API_KEY\"] = userdata.get(\"UPSTAGE_API_KEY\")\n",
- "else:\n",
- " # Running locally. Please set the UPSTAGE_API_KEY in the .env file\n",
- " from dotenv import load_dotenv\n",
- "\n",
- " load_dotenv()\n",
- "\n",
- "if \"UPSTAGE_API_KEY\" not in os.environ:\n",
- " os.environ[\"UPSTAGE_API_KEY\"] = getpass.getpass(\"Enter your Upstage API key: \")\n"
- ]
+ "# @title set API key\n",
+ "from pprint import pprint\n",
+ "import os\n",
+ "\n",
+ "import warnings\n",
+ "\n",
+ "warnings.filterwarnings(\"ignore\")\n",
+ "\n",
+ "if \"google.colab\" in str(get_ipython()):\n",
+ " # Running in Google Colab. Please set the UPSTAGE_API_KEY in the Colab Secrets\n",
+ " from google.colab import userdata\n",
+ "\n",
+ " os.environ[\"UPSTAGE_API_KEY\"] = userdata.get(\"UPSTAGE_API_KEY\")\n",
+ "else:\n",
+ " # Running locally. Please set the UPSTAGE_API_KEY in the .env file\n",
+ " from dotenv import load_dotenv\n",
+ "\n",
+ " load_dotenv()\n",
+ "\n",
+ "assert (\n",
+ " \"UPSTAGE_API_KEY\" in os.environ\n",
+ "), \"Please set the UPSTAGE_API_KEY environment variable\""
+ ]
},
{
"cell_type": "code",
@@ -234,4 +234,4 @@
},
"nbformat": 4,
"nbformat_minor": 0
-}
\ No newline at end of file
+}