from langchain_huggingface import HuggingFacePipeline as HF from subprocess import Popen, PIPE as P from langchain_experimental.tools.python.tool import PythonREPLTool as PYT from langchain.agents import load_tools, create_structured_react_agent as Agent,AgentExecutor as Ex, AgentType as Type from langchain.agents.agent_toolkits import create_retriever_tool as crt from langchain_community.agent_toolkits import FileManagementToolkit as FMT from langchain.tools import Tool from langchain.memory import ConversationBufferMemory as MEM,RedisChatMessageHistory as HIS from langchain.schema import SystemMessage as SM,HumanMessage as HM, AIMessage as AM from langchain import hub import os from langchain.retrievers import WikipediaRetriever as Wiki import gradio as gr chatbot = gr.Chatbot( label="SYAI4.1", show_copy_button=True, layout="panel" ) def terminal(c): a=Popen(c,shell=True,stdin=P,stdout=P,stderr=P) return a.stdout.read()+a.stderr.read() llm=HF(repo_id="peterpeter8585/syai4.1",HUGGINGFACEHUB_API_TOKEN=os.environ["key"]) tools=FMT().get_tools() tools.append(PYT()) tools.extend(load_tools(["requests_all"],allow_dangerous_tools=True)) tools.extend(load_tools(["llm-math","ddg-search"],llm=llm)) tools.append(Tool.from_function(func=terminal,name="terminal",description="터미널 명령어실행에 적합함")) tools.append(crt(name="wiki",description="위키 백과를 검색하여 정보를 가져온다",retriever=Wiki(lang="ko",top_k_results=1))) def chat(message, history: list[tuple[str, str]], system_message, max_tokens, temperature, top_p, chat_session=""): messages=[SM(content=system_message+"And, Your name is Chatchat")] for val in history: if val[0]: messages.append(HM(content=val[0])) if val[1]: messages.append(AM(content=val[1])) messages.append(HM(content=message)) memory=MEM(memory_key="history") agent=Ex(agent=Agent(llm=llm,tools=tools),tools=tools,memory=memory,verbose=True,handle_parsing_errors=True) yield agent.invoke(messages) ai1=gr.ChatInterface( chat, chatbot=chatbot, additional_inputs=[ gr.Textbox(value="You are a helpful assistant.", label="System message", interactive=True), gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"), gr.Slider(minimum=0.1, maximum=4.0, value=0.1, step=0.1, label="Temperature"), gr.Slider( minimum=0.1, maximum=1.0, value=0.1, step=0.05, label="Top-p (nucleus sampling)", ), gr.Textbox(label="chat_id(please enter the chat id!)") ], ) ai1.launch()