forked from abetlen/llama-cpp-python
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathui.py
More file actions
76 lines (63 loc) · 2.08 KB
/
ui.py
File metadata and controls
76 lines (63 loc) · 2.08 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
import gradio as gr
from llama_cpp.server.client import client
from llama_cpp.server.plugins import GradioPlugin
__all__ = ['ChatbotPlugin']
_model_alias = ''
def change_model(model):
global _model_alias
_model_alias = model
def add_text(history, text):
history += [(text, None)]
return history, gr.Textbox(value="", interactive=False)
def add_file(history, file):
history += [((file.name,), None)]
return history
def bot(history):
response = client.chat.completions.create(
model=_model_alias,
messages=[
{'role': 'user', 'content': history[-1][0]}
],
temperature=0,
stream=True
)
history[-1][1] = ""
for chunk in response:
history[-1][1] += (chunk.choices[0].delta.content or '')
yield history
with gr.Blocks(title='Llama C++ Server') as demo:
with gr.Row():
with gr.Accordion("Models"):
dropdown = gr.Dropdown(
[_model_alias, 'llama-2-7b.Q2_K'],
value=_model_alias,
label="Select a model",
elem_id="model_dropdown"
)
chatbot = gr.Chatbot(
[],
elem_id="chatbot",
#bubble_full_width=False,
#avatar_images=(None, (os.path.join(os.path.dirname(__file__), "avatar.png"))),
)
with gr.Row():
txt = gr.Textbox(
scale=4,
show_label=False,
placeholder="Enter text and press enter, or upload an image",
container=False,
elem_id="chatbot-input"
)
btn = gr.UploadButton("📁", file_types=["image",]) #"video", "audio"])
txt_msg = txt.submit(add_text, [chatbot, txt], [chatbot, txt], queue=False).then(
bot, chatbot, chatbot, api_name="bot_response"
)
txt_msg.then(lambda: gr.Textbox(interactive=True), None, [txt], queue=False)
file_msg = btn.upload(add_file, [chatbot, btn], [chatbot], queue=False).then(
bot, chatbot, chatbot
)
dropdown.change(change_model, dropdown)
demo.queue()
class ChatbotPlugin(GradioPlugin):
blocks = demo
path='/'