| | from langchain.tools import AIPluginTool |
| | from langchain.utilities import WikipediaAPIWrapper |
| | from langchain.schema import ( |
| | AIMessage, |
| | HumanMessage, |
| | SystemMessage |
| | ) |
| | from langchain.tools import MoveFileTool, format_tool_to_openai_function |
| | from langchain.tools import BaseTool, StructuredTool, Tool, tool |
| | from langchain.chat_models import ChatOpenAI |
| | from langchain.agents import AgentType, initialize_agent, load_tools |
| | from langchain import LLMMathChain, SerpAPIWrapper |
| | import gradio as gr |
| | import os |
| | import openai |
| | import gradio as gr |
| | from gradio import ChatInterface |
| | import time |
| |
|
| | |
| | openai.api_key = os.getenv("OPENAI_API_KEY") |
| |
|
| | |
| |
|
| |
|
| | str = "You are a discord bot called 'QuteAI', make your response like human chatting, humans do not response using lists while explaining things and don't say long sentences. " |
| | def predict(inputs, chatbot): |
| | |
| |
|
| | messages = [] |
| | |
| | messages.append({"role": "system", "content": "You are a discord bot called 'QuteAI', make your response like human chatting, humans do not response using lists while explaining things and don't say long sentences. "}) |
| | for conv in chatbot: |
| | user = conv[0] |
| | messages.append({"role": "user", "content": user}) |
| | assistant = conv[1] |
| | messages.append({"role": "assistant", "content": assistant}) |
| | messages.append({"role": "user", "content": inputs}) |
| |
|
| | |
| | print(messages) |
| | client = openai.OpenAI(base_url="https://api.chatanywhere.tech/v1") |
| | completion = client.chat.completions.create( |
| | model="gpt-3.5-turbo", |
| | messages=messages, |
| | temperature=0.7, |
| | stream=True, |
| | ) |
| | new_message = {"role": "assistant", "content": ""} |
| |
|
| | for chunk in completion: |
| | if chunk.choices[0].delta.content: |
| | print(chunk.choices[0].delta.content, end="", flush=True) |
| | new_message["content"] += chunk.choices[0].delta.content |
| | print(new_message["content"]) |
| | yield new_message["content"] |
| |
|
| | messages.append(new_message) |
| |
|
| |
|
| | interface = gr.ChatInterface(predict) |
| | with gr.Blocks() as demo: |
| | gr.Markdown(""" |
| | # GPT 3.5 Discord Bot powered by gradio! |
| | To use this space as a discord bot, first install the gradio_client |
| | |
| | ```bash |
| | pip install gradio_client |
| | ``` |
| | |
| | Then run the following command |
| | |
| | ```python |
| | client = grc.Client.duplicate("gradio-discord-bots/gpt-35-turbo", private=False, secrets={"OPENAI_API_KEY": "<your-key-here>"}, sleep_timeout=2880) |
| | client.deploy_discord(api_names=["chat"]) |
| | """) |
| | with gr.Row(visible=False): |
| | interface.render() |
| |
|
| | demo.queue().launch() |