removed json loade, because it was buggy

This commit is contained in:
Falko Victor Habel 2024-05-19 14:03:47 +02:00
parent 4dc355b2cb
commit 8d24965c67
3 changed files with 54 additions and 29 deletions

View File

@ -4,6 +4,9 @@ class OllamaChatBot:
def __init__(self, base_url, model, headers):
self.base_url = base_url
self.model = model
if self.is_empty(headers):
self.headers = ""
else:
self.headers = headers
self.messanges = []
@ -19,6 +22,10 @@ class OllamaChatBot:
headers = self.headers
)
def is_empty(self, dictionary):
return len(dictionary) == 1 and list(dictionary.keys())[0] == '' and list(dictionary.values())[0] == ''
def get_request(self, prompt):
messanges = []

View File

@ -25,6 +25,7 @@ class ChatGUI(CTk.CTk):
self.start_message_processing_thread()
def get_response_from_ollama(self, prompt, context):
try:
if context != "":
if self.context != context:
checks = self.rag.receive_data(file_path=context)
@ -38,6 +39,8 @@ class ChatGUI(CTk.CTk):
else:
return self.bot.get_request(prompt=prompt)
except ValueError:
return "An unexpected Error occuried"
def on_send(self, event=None):
message = self.entry_bar.get().strip()
@ -65,7 +68,8 @@ class ChatGUI(CTk.CTk):
def select_file(self):
file_path = filedialog.askopenfilename()
self.file_entry.insert(1, file_path)
self.file_entry.delete(0, "end")
self.file_entry.insert(0, file_path)
def create_widgets(self):
self.geometry("900x600")
@ -109,6 +113,8 @@ class ChatGUI(CTk.CTk):
for message in self.history:
message.pack_forget()
self.history = []
self.bot.messanges = []
self.rag.init_ollama()

View File

@ -6,8 +6,6 @@ from langchain_community.embeddings import OllamaEmbeddings
from langchain_community.vectorstores import Chroma
from langchain_community.chat_models import ChatOllama
from langchain.chains import RetrievalQA
from pathlib import Path
import json
@ -21,7 +19,14 @@ class Rag:
self.base_url_llm = base_url_llm
self.base_url_embed = base_url_embed
if self.is_empty(base_header):
self.base_header = ""
else:
self.base_header = base_header
if self.is_empty(embeddings_header):
self.embeddings_header = ""
else:
self.embeddings_header = embeddings_header
self.embeddings = OllamaEmbeddings(model=embeddings, headers=self.embeddings_header, base_url=self.base_url_embed)
@ -49,8 +54,6 @@ class Rag:
case "html": # Corrected the typo in the variable name
loader = UnstructuredHTMLLoader(file_path=file_path)
data = loader.load()
case "json":
data = json.loads(Path(file_path).read_text())
case "md":
loader = UnstructuredMarkdownLoader(file_path=file_path)
data = loader.load()
@ -67,8 +70,13 @@ class Rag:
return True
def is_empty(self, dictionary):
return len(dictionary) == 1 and list(dictionary.keys())[0] == '' and list(dictionary.values())[0] == ''
def receive_data(self, file_path):
try:
if self.get_file(file_path):
text_splitter = RecursiveCharacterTextSplitter(chunk_size=250, chunk_overlap=0)
splitted = text_splitter.split_documents(self.data)
@ -76,8 +84,12 @@ class Rag:
return (False, "Success")
else:
return (True, f"'{file_path}' unsupported, read documentation for more information")
except (ValueError, AttributeError):
return (True, "An unexpected Error occuried")
def get_request(self, prompt):
qachain=RetrievalQA.from_chain_type(self.chat_ollama, retriever=self.retriever)
try:
return qachain.invoke({"query": prompt})["result"]
except ValueError:
return (True, "An unexpected Error occuried")