Skip to content

Commit 6bed925

Browse files
committed
demo: add chatgpt client
1 parent d8403ea commit 6bed925

File tree

3 files changed

+201
-1
lines changed

3 files changed

+201
-1
lines changed

demos/chatgpt.py

Lines changed: 197 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,197 @@
1+
import json
2+
import time
3+
from typing import Dict, List
4+
5+
import openai
6+
7+
import pywebio_battery
8+
from pywebio.input import *
9+
from pywebio.output import *
10+
from pywebio.pin import *
11+
from pywebio.session import set_env, download
12+
13+
14+
class ChatGPTStreamResponse:
15+
def __init__(self, response):
16+
self.response = response
17+
self.yielded = []
18+
self.finish_reason = None
19+
20+
def __next__(self):
21+
# https://github.com/openai/openai-cookbook/blob/main/examples/How_to_stream_completions.ipynb
22+
chunk = next(self.response)
23+
self.finish_reason = chunk['choices'][0]['finish_reason']
24+
25+
# { "role": "assistant" } or { "content": "..."} or {}
26+
delta = chunk['choices'][0]['delta']
27+
content = delta.get('content', '')
28+
if content:
29+
self.yielded.append(content)
30+
return content
31+
32+
def __iter__(self):
33+
return self
34+
35+
def result(self):
36+
return ''.join(self.yielded)
37+
38+
39+
class ChatGPT:
40+
41+
def __init__(self, messages: List[Dict] = None, model: str = "gpt-3.5-turbo", api_key=None, **model_kwargs):
42+
"""
43+
Create a chatgpt client
44+
45+
:param messages: A list of messages comprising the conversation so far.
46+
Each message is a dict with keys "role" and "content".
47+
See: https://platform.openai.com/docs/api-reference/chat/create#chat/create-messages
48+
:param model: The model to use.
49+
:param api_key: The openai api key.
50+
Get your API key from https://platform.openai.com/account/api-keys
51+
:param model_kwargs: Other parameters to pass to model,
52+
See https://platform.openai.com/docs/api-reference/chat
53+
"""
54+
self._messages = list(messages or [])
55+
self.model_kwargs = dict(model=model, **model_kwargs)
56+
if api_key:
57+
self.model_kwargs['api_key'] = api_key
58+
59+
self.pending_stream_reply: ChatGPTStreamResponse = None
60+
self.latest_nonstream_finish_reason = None
61+
62+
def _ask(self, message: str, stream=True, **model_kwargs):
63+
if self.pending_stream_reply:
64+
self._messages.append({"role": "assistant", "content": self.pending_stream_reply.result()})
65+
self.pending_stream_reply = None
66+
67+
self._messages.append({"role": "user", "content": message})
68+
69+
resp = openai.ChatCompletion.create(
70+
**self.model_kwargs,
71+
**model_kwargs,
72+
messages=self._messages,
73+
stream=stream,
74+
)
75+
return resp
76+
77+
def ask(self, message: str, **model_kwargs) -> str:
78+
"""
79+
Send a message to chatgpt and get the reply in string
80+
81+
:param message: The message to send
82+
:param model_kwargs: Other parameters to pass to openai.ChatCompletion.create()
83+
:return: The reply from chatgpt
84+
"""
85+
resp = self._ask(message, stream=False, **model_kwargs)
86+
reply = resp['choices'][0]
87+
reply_content = reply['message']['content']
88+
self._messages.append({"role": "assistant", "content": reply_content})
89+
self.latest_nonstream_finish_reason = reply['finish_reason']
90+
91+
return reply_content
92+
93+
def ask_stream(self, message: str, **model_kwargs) -> ChatGPTStreamResponse:
94+
"""
95+
Send a message to chatgpt and get the reply in stream
96+
97+
:param message: The message to send
98+
:param model_kwargs: Other parameters to pass to openai.ChatCompletion.create()
99+
:return: A iterator that yields the reply from chatgpt.
100+
The iterator will be exhausted when the reply is complete.
101+
"""
102+
resp = self._ask(message, stream=True, **model_kwargs)
103+
self.pending_stream_reply = ChatGPTStreamResponse(resp)
104+
return self.pending_stream_reply
105+
106+
def latest_finish_reason(self) -> str:
107+
"""The finish reason for the latest reply of chatgpt.
108+
109+
The possible values for finish_reason are:
110+
'stop': API returned complete model output
111+
'length': Incomplete model output due to max_tokens parameter or token limit
112+
'content_filter': Omitted content due to a flag from our content filters
113+
'null': API response still in progress or incomplete
114+
115+
See: https://platform.openai.com/docs/guides/chat/response-format
116+
"""
117+
if self.pending_stream_reply:
118+
return self.pending_stream_reply.finish_reason
119+
return self.latest_nonstream_finish_reason
120+
121+
def messages(self) -> List[Dict]:
122+
"""Get all messages of the conversation """
123+
if self.pending_stream_reply:
124+
self._messages.append({"role": "assistant", "content": self.pending_stream_reply.result()})
125+
self.pending_stream_reply = None
126+
127+
return self._messages
128+
129+
130+
def main():
131+
""""""
132+
set_env(input_panel_fixed=False, output_animation=False)
133+
put_markdown("""
134+
# ChatGPT
135+
A ChatGPT client implemented with PyWebIO. [Source Code](https://github.com/pywebio/PyWebIO/blob/dev/demos/chatgpt.py)
136+
TIPS: refresh page to open a new chat.
137+
""")
138+
139+
put_select('model', ['gpt-3.5-turbo', 'gpt-4'], label='Model')
140+
141+
openai_config = json.loads(pywebio_battery.get_localstorage('openai_config') or '{}')
142+
if not openai_config:
143+
openai_config = input_group('OpenAI API Config', [
144+
input('API Key', name='api_key', type=TEXT, required=True,
145+
help_text='Get your API key from https://platform.openai.com/account/api-keys'),
146+
input('API Server', name='api_base', type=TEXT, value='https://api.openai.com', required=True),
147+
])
148+
pywebio_battery.set_localstorage('openai_config', json.dumps(openai_config))
149+
150+
api_base = openai_config['api_base'].removesuffix('/v1').strip('/') + '/v1'
151+
bot = ChatGPT(api_key=openai_config['api_key'], api_base=api_base, model=pin.model)
152+
while True:
153+
form = input_group('', [
154+
input(name='msg', placeholder='Ask ChatGPT'),
155+
actions(name='cmd', buttons=['Send', 'Multi-line Input', 'Save Chat'])
156+
])
157+
if form['cmd'] == 'Multi-line Input':
158+
form['msg'] = textarea(value=form['msg'])
159+
elif form['cmd'] == 'Save Chat':
160+
messages = [
161+
msg['content'] if msg['role'] == 'user' else f"> {msg['content']}"
162+
for msg in bot.messages()
163+
]
164+
download(f"chatgpt_{time.strftime('%Y%m%d%H%M%S')}.md",
165+
'\n\n'.join(messages).encode('utf8'))
166+
continue
167+
168+
user_msg = form['msg']
169+
if not user_msg:
170+
continue
171+
172+
put_info(put_text(user_msg, inline=True))
173+
174+
with use_scope(f'reply-{int(time.time())}'):
175+
put_loading('grow', 'info')
176+
try:
177+
reply_chunks = bot.ask_stream(user_msg)
178+
except Exception as e:
179+
popup('ChatGPT Error', put_error(e))
180+
continue
181+
finally:
182+
clear() # clear loading
183+
for chunk in reply_chunks:
184+
put_text(chunk, inline=True)
185+
clear() # clear above text
186+
put_markdown(reply_chunks.result())
187+
188+
if bot.latest_finish_reason() == 'length':
189+
put_error('Incomplete model output due to max_tokens parameter or token limit.')
190+
elif bot.latest_finish_reason() == 'content_filter':
191+
put_warning("Omitted content due to a flag from OpanAI's content filters.")
192+
193+
194+
if __name__ == '__main__':
195+
from pywebio import start_server
196+
197+
start_server(main, port=8085, debug=True, cdn=False)

demos/index.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212
- [Input demo](./input_usage): Demonstrate the usage of PyWebIO input module
1313
- [Output demo](./output_usage): Demonstrate the usage of PyWebIO output module
1414
- [Theme preview](./theme): Demo page with various themes supported by PyWebIO
15+
- [ChatGPT](./chatgpt): A ChatGPT client implemented with PyWebIO
1516
1617
### Data visualization demo
1718
PyWebIO supports for data visualization with the third-party libraries.
@@ -56,6 +57,7 @@
5657
- [输入演示](./input_usage): 演示PyWebIO输入模块的用法
5758
- [输出演示](./output_usage): 演示PyWebIO输出模块的用法
5859
- [主题预览](./theme): 展示PyWebIO支持的各种主题
60+
- [ChatGPT](./chatgpt): 使用PyWebIO编写的ChatGPT客户端
5961
- 更多Demo请见[文档](https://pywebio.readthedocs.io)中示例代码的在线Demo
6062
6163
### 数据可视化demo

requirements.txt

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,4 +31,5 @@ pywebio_battery
3131

3232
# demo running requirements
3333
numpy
34-
matplotlib
34+
matplotlib
35+
openai

0 commit comments

Comments
 (0)