Skip to content

chat: Add support for status messages #1862

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 11 commits into
base: main
Choose a base branch
from
6 changes: 6 additions & 0 deletions js/chat/chat.scss
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,12 @@ shiny-chat-message {
}
}

shiny-status-message {
opacity: 0.8;
text-align: center;
font-size: 0.9em;
}

shiny-chat-input {
--_input-padding-top: 0;
--_input-padding-bottom: var(--_chat-container-padding, 0.25rem);
Expand Down
65 changes: 64 additions & 1 deletion js/chat/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ type Message = {
icon?: string;
operation: "append" | null;
};

type ShinyChatMessage = {
id: string;
handler: string;
Expand All @@ -31,6 +32,12 @@ type UpdateUserInput = {
focus?: false;
};

type StatusMessage = {
content: string;
content_type: Exclude<ContentType, "markdown">;
type: "dynamic" | "static";
};

// https://github.com/microsoft/TypeScript/issues/28357#issuecomment-748550734
declare global {
interface GlobalEventHandlersEventMap {
Expand All @@ -40,11 +47,13 @@ declare global {
"shiny-chat-clear-messages": CustomEvent;
"shiny-chat-update-user-input": CustomEvent<UpdateUserInput>;
"shiny-chat-remove-loading-message": CustomEvent;
"shiny-chat-append-status-message": CustomEvent<StatusMessage>;
}
}

const CHAT_MESSAGE_TAG = "shiny-chat-message";
const CHAT_USER_MESSAGE_TAG = "shiny-user-message";
const CHAT_STATUS_MESSAGE_TAG = "shiny-status-message";
const CHAT_MESSAGES_TAG = "shiny-chat-messages";
const CHAT_INPUT_TAG = "shiny-chat-input";
const CHAT_CONTAINER_TAG = "shiny-chat-container";
Expand Down Expand Up @@ -112,6 +121,32 @@ class ChatUserMessage extends LightElement {
}
}

class ChatStatusMessage extends LightElement {
@property() content = "";
@property() content_type: Exclude<ContentType, "markdown"> = "text";
@property() type: "dynamic" | "static" = "static";

render() {
const content =
this.content_type === "html" ? unsafeHTML(this.content) : this.content;
return html`${content}`;
}

updated(changedProperties: Map<string, unknown>) {
super.updated(changedProperties);
if (
changedProperties.has("content") ||
changedProperties.has("content_type")
) {
this.#scrollIntoView();
}
}

#scrollIntoView() {
this.scrollIntoView({ behavior: "smooth", block: "end" });
}
}

class ChatMessages extends LightElement {
render() {
return html``;
Expand Down Expand Up @@ -277,7 +312,7 @@ class ChatContainer extends LightElement {
}

private get lastMessage(): ChatMessage | null {
const last = this.messages.lastElementChild;
const last = this.messages.querySelector("shiny-chat-message:last-child");
return last ? (last as ChatMessage) : null;
}

Expand Down Expand Up @@ -324,6 +359,10 @@ class ChatContainer extends LightElement {
"shiny-chat-append-message-chunk",
this.#onAppendChunk
);
this.addEventListener(
"shiny-chat-append-status-message",
this.#onAppendStatus
);
this.addEventListener("shiny-chat-clear-messages", this.#onClear);
this.addEventListener(
"shiny-chat-update-user-input",
Expand All @@ -349,6 +388,10 @@ class ChatContainer extends LightElement {
"shiny-chat-append-message-chunk",
this.#onAppendChunk
);
this.removeEventListener(
"shiny-chat-append-status-message",
this.#onAppendStatus
);
this.removeEventListener("shiny-chat-clear-messages", this.#onClear);
this.removeEventListener(
"shiny-chat-update-user-input",
Expand Down Expand Up @@ -443,6 +486,25 @@ class ChatContainer extends LightElement {
}
}

#onAppendStatus(event: CustomEvent<StatusMessage>): void {
if (event.detail.type === "dynamic") {
if (this.messages.lastChild instanceof ChatStatusMessage) {
if (this.messages.lastChild.type == "dynamic") {
// Update previous status message if last message was a status item
this.messages.lastChild.setAttribute("content", event.detail.content);
this.messages.lastChild.setAttribute(
"content_type",
event.detail.content_type
);
return;
}
}
}

const status = createElement(CHAT_STATUS_MESSAGE_TAG, event.detail);
this.messages.appendChild(status);
}

#onClear(): void {
this.messages.innerHTML = "";
}
Expand Down Expand Up @@ -523,6 +585,7 @@ class ChatContainer extends LightElement {

customElements.define(CHAT_MESSAGE_TAG, ChatMessage);
customElements.define(CHAT_USER_MESSAGE_TAG, ChatUserMessage);
customElements.define(CHAT_STATUS_MESSAGE_TAG, ChatStatusMessage);
customElements.define(CHAT_MESSAGES_TAG, ChatMessages);
customElements.define(CHAT_INPUT_TAG, ChatInput);
customElements.define(CHAT_CONTAINER_TAG, ChatContainer);
Expand Down
5 changes: 5 additions & 0 deletions shiny/templates/chat/llms/ollama-multi/_template.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
{
"type": "app",
"id": "chat-ai-ollama",
"title": "Chat with local models using Ollama"
}
186 changes: 186 additions & 0 deletions shiny/templates/chat/llms/ollama-multi/app.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,186 @@
# ------------------------------------------------------------------------------------
# Shiny Chat with local models, powered by Ollama
# ------------------------------------------------------------------------------------

# ChatOllama() requires an Ollama model server to be running locally.
# See the docs for more information on how to set up a local Ollama server.
# https://posit-dev.github.io/chatlas/reference/ChatOllama.html
import ollama
from chatlas import ChatOllama

from shiny import reactive, req
from shiny.express import input, render, ui
from shiny.reactive import ExtendedTask

# Get installed models and choose an initial model for the chat
models_all = [m.model for m in ollama.list().models]

# Pick smallest llama3 if available, or default to the most recent model
default_model = models_all[0]
models_all.sort()
if any([x.startswith("llama3") for x in models_all]):
default_model = [x for x in models_all if x.startswith("llama3")][0]


# Set some Shiny page options
ui.page_opts(
title="Hello Ollama Chat",
fillable=True,
fillable_mobile=True,
)

with ui.sidebar(title="Chat options"):
ui.input_select(
"model",
"Model",
choices=models_all,
selected=default_model,
)
with ui.div():
ui.input_slider("temperature", "Creativity", min=0, max=1, value=0.5, step=0.25)
with ui.help_text(style="text-align: right;"):

@render.text
@reactive.event(input.temperature)
def text_creativity():
if input.temperature() < 0.25:
return "No creativity"
elif input.temperature() < 0.5:
return "Low creativity"
elif input.temperature() < 0.75:
return "Medium creativity"
elif input.temperature() < 1:
return "High creativity"
else:
return "Max creativity"

ui.input_action_button("edit_last", "Edit last message", disabled=True)
ui.input_action_button("clear", "Reset chat")


# Create and display a Shiny chat component
INITIAL_MESSAGE = "Hello! How can I help you today?"
chat = ui.Chat(id="chat", messages=[INITIAL_MESSAGE])

chat.ui()

chat_client = reactive.value[ChatOllama](None)


async def cancel_chat_stream(chat: ChatOllama, stream: ExtendedTask):
if stream is not None and stream.status() == "running":
# Cancel current stream
stream.cancel()
# Tell chat that the message is complete
stream_id = chat._current_stream_id
await chat._append_message(
{
"type": "assistant",
"content": "... [cancelled].",
},
chunk="end",
stream_id=stream_id,
)
await chat.append_status_message(
ui.div(
"The previous response was cancelled.",
class_="alert alert-warning",
style="margin-inline: auto; width: max-content;",
),
type="static",
)


@reactive.effect
@reactive.event(input.model)
async def change_model():
if chat_client.get() is None:
client = ChatOllama(model=input.model())
await chat.append_status_message(
ui.HTML(f"Using model <code>{input.model()}</code>"), type="dynamic"
)
else:
stream = streaming_task.get()
await cancel_chat_stream(chat, stream)

# TODO: Turns are broken when you cancel an in-progress stream
turns = chat_client.get().get_turns()
client = ChatOllama(model=input.model(), turns=turns)
await chat.append_status_message(
ui.HTML(f"Model switched to <code>{input.model()}</code>"), type="dynamic"
)

chat_client.set(client)


streaming_task = reactive.value[ExtendedTask | None](None)


# Generate a response when the user submits a message
@chat.on_user_submit
async def handle_user_input(user_input: str):
response = chat_client.get().stream(
user_input, kwargs={"temperature": input.temperature()}
)
task = await chat.append_message_stream(response)
streaming_task.set(task)


@reactive.effect
@reactive.event(input.clear)
async def reset_chat():
stream = streaming_task.get()
if not isinstance(stream, ExtendedTask):
return

is_streaming = stream.status() == "running"

if is_streaming:
await cancel_chat_stream(chat, stream)
else:
await chat.clear_messages()
chat_client.set(ChatOllama(model=input.model()))
await chat.append_status_message(
ui.HTML(f"Using model <code>{input.model()}</code>")
)
await chat.append_message(INITIAL_MESSAGE)


@reactive.effect
def toggle_last_message_button():
task = streaming_task.get()
if not isinstance(task, ExtendedTask):
return

is_streaming = task.status() == "running"
ui.update_action_button("edit_last", disabled=is_streaming)
ui.update_action_button(
"clear", label="Cancel chat" if is_streaming else "Reset chat"
)


@reactive.effect
@reactive.event(input.edit_last)
async def edit_last_message():
req(streaming_task.get().status() != "streaming")

messages = chat.messages()
req(len(messages) > 1)

# Find the index of the last user message
last_user_index = next(
(i for i, msg in reversed(list(enumerate(messages))) if msg["role"] == "user"),
None,
)
if last_user_index is None:
raise ValueError("No user messages found")

last_user_msg = messages[last_user_index]["content"]
messages = messages[:last_user_index] # Keep only messages before last user message

# Reset chat UI state prior to last user message
await chat.clear_messages()
for message in messages:
await chat.append_message(message)

chat.update_user_input(value=last_user_msg, focus=True)
4 changes: 4 additions & 0 deletions shiny/templates/chat/llms/ollama-multi/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
shiny
tokenizers
chatlas
ollama
2 changes: 1 addition & 1 deletion shiny/templates/chat/llms/ollama/_template.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"type": "app",
"id": "chat-ai-ollama",
"title": "Chat AI using Ollama",
"title": "Basic Chat AI using Ollama",
"next_steps": [
"If you haven't already, download the Ollama executable from https://ollama.com/",
"Run the executable and download the relevant model (llama3.2)",
Expand Down
Loading
Loading