AiAssistent/main.py

159 lines
5.4 KiB
Python

import json
import websockets.sync.server
import local
import openai
import huggingface
from websearch import WebSearch
from openai import OpenAI
from assistant import Assistant, ToolBox, PromptBuilder
# import logging
# logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.DEBUG)
#
prompts = json.load(open("prompts.json"))
config = json.load(open("config.json"))
toolbox = ToolBox(websearch=WebSearch())
# llm_type = eval(config["llm"]["type"])
# llm_arguments = config["llm"]["arguments"]
# llm_intf = llm_type(**llm_arguments)
openai = OpenAI(**config["OpenAI"])
def assistant_websocket_handler(connection):
assistant = Assistant(aiInterface=openai, sysPrompt=config["sysPrompt"], toolbox=toolbox)
for wsmessage in connection:
if isinstance(wsmessage, str):
jmsg = json.loads(wsmessage)
if (jmsg["type"] == "chatMessage"):
answer = assistant.say(jmsg["message"])
connection.send(json.dumps({
"type": "chatMessage",
"message": answer
}))
else:
print("Binary Message: {}".format(repr(wsmessage)))
with websockets.sync.server.serve(assistant_websocket_handler, host="localhost", port=6565) as websocket_server:
websocket_server.serve_forever()
# for question in questions:
# assistant = Assistant(aiInterface=openai, toolbox=toolbox, sysPrompt=prompts["chat"])
# answer = assistant.say(question)
# print("Assistant:\n{}".format(answer["content"]))
#
# q = input("Nachfrage: ")
# answer = assistant.say(q)
#
# for message in assistant.messages:
# print("{:10}: {}".format(message["role"] or message["tool_calls"][0]["function"], message["content"][:64]))
#
# cleaner = lxml.html.clean.Cleaner()
# cleaner.comments = True
# cleaner.javascript = True
# cleaner.style = True
# cleaner.inline_style = True
# cleaner.embedded = True
# cleaner.kill_tags = ["video","audio","img","svg"]
# cleaner.safe_attrs_only = True
#
# promptBuilder = assistent.promptbuilder.PromptBuilder(filename="prompts.json", template="chat")
# websearch = WebSearch()
# huggingface = HuggingFaceServerless(key="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",
# m_embedding="BAAI/bge-m3")
# # huggingface = HuggingFaceDedicated(url="https://ivhpq2gn14ycap8m.us-east-1.aws.endpoints.huggingface.cloud", api_key="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")
#
#
#
# def chat(aiInterface, question, messages=None, context=None):
# if context is None:
# context = {}
# if messages is None:
# messages = []
#
# # prompt = promptBuilder.build_search_prompt(chatMessages)
# systemPrompt = promptBuilder.build_system_prompt()
#
# chatMessages = list(messages)
# chatMessages.append({"role": "system", "content": systemPrompt})
# chatMessages.append({"role": "user", "content": question})
#
# searchAnswer = aiInterface.chat(chatMessages)
# print("\nsearchAnswer:\n{}\n".format(searchAnswer))
# xanswer = lxml.etree.fromstring("<answer>{}</answer>".format(searchAnswer))
#
# searches = xanswer.xpath("search")
# fetches = xanswer.xpath("fetch")
# ready = xanswer.xpath("ready")
#
# fetchlist = []
# for fetch in fetches:
# fetchlist.append(fetch.text)
#
# maxfetch = len(fetchlist) + 3
#
# for search in searches:
# results = websearch.query(search.text)
# for result in results:
# response = requests.get(result)
# if response.status_code == 200:
# context[result] = "\n".join([s for s in cleaner.clean_html(response.text).splitlines() if s.strip()])
# else:
# context[result] = results[result]
#
# # for c in context:
# # pd = pandoc.read(source=context[c], format='html')
# # context[c] = pandoc.write(pd, format="plain")
# # # context[c] = markdownify.markdownify(context[c])
#
# # for fetch in fetchlist:
# # response = requests.get(fetch)
# # if response.status_code == 200:
# # context[fetch] = response.text
#
# print("Searches: {}\nFetches: {}\nReady: {}".format(len(searches),len(fetches),len(ready)))
# # print(repr(fetchlist))
# # for c in context:
# # print("####### QUELLE: {} #######\n{}".format(c, context[c]))
#
# finalPrompt = promptBuilder.build_system_prompt(contexts=context)
# print("final prompt:\n\n{}\n".format(finalPrompt))
#
# chatMessages = list(messages)
# chatMessages.append({"role": "system", "content": finalPrompt})
# chatMessages.append({"role": "user", "content": question})
#
# answer = aiInterface.chat(chatMessages)
# return answer
#
#
#
# for ai in (openai, ):
# print("AI Interface: {}".format(ai))
# for question in questions:
# print("Question: {}".format(question))
# answer = chat(ai, question)
# print("Answer: {}".format(answer))
#
# # results = websearch.query(question)
# #
# # for link in results:
# # pl = results[link]
# # embeddings = huggingface.embeddings(question, pl)
# # if embeddings is not None:
# # emin = min(embeddings)
# # emax = max(embeddings)
# # emean = statistics.mean(embeddings)
# #
# # print("{}:\n#########################################################\n{}\n{} / {} / {}\n".format(link, embeddings, emin, emean, emax))
# #
# #