added the UI Chat Classes
This commit is contained in:
parent
e9940c9d8c
commit
fe347ab150
|
@ -0,0 +1,121 @@
|
||||||
|
import customtkinter as CTk
|
||||||
|
from threading import Thread, Event
|
||||||
|
from scripts.Message import Reply # Assuming this module correctly handles the message layout
|
||||||
|
from tkinter import filedialog
|
||||||
|
from scripts.BaseOllama import OllamaChatBot
|
||||||
|
from scripts.Rag import Rag
|
||||||
|
import queue
|
||||||
|
CODE_PREFIX = "[1337]"
|
||||||
|
USER = ("User", "Bot")
|
||||||
|
|
||||||
|
class ChatGUI(CTk.CTk):
|
||||||
|
def __init__(self, base_url='http://localhost:11434', embeddings_url= 'http://localhost:11434', base_model='mistral',
|
||||||
|
embeddings_model='mxbai-embed-large',base_header= None, embeddings_header= None):
|
||||||
|
super().__init__()
|
||||||
|
CTk.set_appearance_mode("dark")
|
||||||
|
CTk.set_default_color_theme("theme/havard_theme.json")
|
||||||
|
self.message_queue = queue.Queue()
|
||||||
|
self.stop_event = Event()
|
||||||
|
self.bot = OllamaChatBot(base_url=base_url, model=base_model, headers=base_header)
|
||||||
|
self.rag = Rag(base_url_llm=base_url, base_url_embed=embeddings_url, model=base_model,embeddings=embeddings_model, embeddings_header=embeddings_header,
|
||||||
|
base_header=base_header)
|
||||||
|
self.context = None
|
||||||
|
self.history = []
|
||||||
|
self.create_widgets()
|
||||||
|
self.start_message_processing_thread()
|
||||||
|
|
||||||
|
def get_response_from_ollama(self, prompt, context):
|
||||||
|
if context != "":
|
||||||
|
if self.context != context:
|
||||||
|
checks = self.rag.receive_data(file_path=context)
|
||||||
|
if checks[0]:
|
||||||
|
return checks[1]
|
||||||
|
else:
|
||||||
|
self.context = context
|
||||||
|
self.rag.init_ollama()
|
||||||
|
|
||||||
|
return self.rag.get_request(prompt=prompt)
|
||||||
|
|
||||||
|
else:
|
||||||
|
return self.bot.get_request(prompt=prompt)
|
||||||
|
|
||||||
|
def on_send(self, event=None):
|
||||||
|
message = self.entry_bar.get().strip()
|
||||||
|
context = self.file_entry.get().strip()
|
||||||
|
if message:
|
||||||
|
user = Reply(master=self.messages_frame,reply_type = "You", message=message)
|
||||||
|
self.history.append(user)
|
||||||
|
user.pack(anchor="e", padx=5, pady=2)
|
||||||
|
self.entry_bar.delete(0, CTk.END) # Clear input field after sending
|
||||||
|
Thread(target=lambda q, arg1, arg2: q.put(self.get_response_from_ollama(arg1, arg2)), args=(self.message_queue, message, context)).start()
|
||||||
|
|
||||||
|
def show_reply(self, message):
|
||||||
|
bot_reply = Reply(master=self.messages_frame,reply_type = "Fabelous-AI-Bot", message=message)
|
||||||
|
self.history.append(bot_reply)
|
||||||
|
bot_reply.pack(anchor="w", padx=5, pady=2)
|
||||||
|
self.message_queue.task_done()
|
||||||
|
|
||||||
|
def process_messages(self):
|
||||||
|
while not self.stop_event.is_set():
|
||||||
|
try:
|
||||||
|
message = self.message_queue.get(timeout=1)
|
||||||
|
self.show_reply(message)
|
||||||
|
except queue.Empty:
|
||||||
|
continue
|
||||||
|
|
||||||
|
def select_file(self):
|
||||||
|
file_path = filedialog.askopenfilename()
|
||||||
|
self.file_entry.insert(1, file_path)
|
||||||
|
|
||||||
|
def create_widgets(self):
|
||||||
|
self.geometry("900x600")
|
||||||
|
self.title("Fabelous-Ai-Chat")
|
||||||
|
|
||||||
|
self.protocol("WM_DELETE_WINDOW", self.on_close) # Define window close handler
|
||||||
|
self.resizable(True, True)
|
||||||
|
self.minsize(475,300)
|
||||||
|
|
||||||
|
# Making the grid and widgets expandable
|
||||||
|
self.grid_columnconfigure(0, weight=1)
|
||||||
|
self.grid_rowconfigure(1, weight=1)
|
||||||
|
|
||||||
|
# File Loader Entry
|
||||||
|
self.file_entry = CTk.CTkEntry(self, placeholder_text="Enter Filepath or press Load File... (Only .md, pdf, csv, html, and json files are supported)")
|
||||||
|
self.file_entry.grid(row=0, column=0, sticky="ew", padx=10, pady=(10, 0), columnspan=2)
|
||||||
|
# Load File Button
|
||||||
|
load_file_btn = CTk.CTkButton(self, text="Load File", command=self.select_file)
|
||||||
|
load_file_btn.grid(row=0, column=2, sticky="e", padx=10, pady=(10, 0))
|
||||||
|
|
||||||
|
# Messages Frame
|
||||||
|
self.messages_frame = CTk.CTkScrollableFrame(self, fg_color="#212121")
|
||||||
|
self.messages_frame.grid(row=1, column=0, sticky="nswe", pady=(10, 0), columnspan=4)
|
||||||
|
|
||||||
|
|
||||||
|
# Entry Bar Adjustment for Full Width
|
||||||
|
self.entry_bar = CTk.CTkEntry(self, placeholder_text="Type a message...")
|
||||||
|
self.entry_bar.grid(row=3, column=0, sticky="ew", padx=10, pady=(10, 10), columnspan=1)
|
||||||
|
|
||||||
|
# Send Button
|
||||||
|
send_button = CTk.CTkButton(self, text="Send", command=self.on_send)
|
||||||
|
send_button.grid(row=3, column=1, sticky="w", padx=10, pady=(10, 10))
|
||||||
|
self.entry_bar.bind("<Return>", self.on_send)
|
||||||
|
|
||||||
|
# Clear UI Button
|
||||||
|
clear_button = CTk.CTkButton(self, text="Clear", command=self.clear_chat)
|
||||||
|
clear_button.grid(row=3, column=2, sticky="w", padx=10, pady=(10, 10))
|
||||||
|
|
||||||
|
|
||||||
|
def clear_chat(self):
|
||||||
|
for message in self.history:
|
||||||
|
message.pack_forget()
|
||||||
|
self.history = []
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def start_message_processing_thread(self):
|
||||||
|
# Start background thread for processing incoming messages
|
||||||
|
Thread(target=self.process_messages, daemon=True).start()
|
||||||
|
|
||||||
|
def on_close(self):
|
||||||
|
self.stop_event.set()
|
||||||
|
self.destroy()
|
|
@ -0,0 +1,99 @@
|
||||||
|
import re
|
||||||
|
from scripts.BaseOllama import OllamaChatBot
|
||||||
|
from scripts.Rag import Rag
|
||||||
|
from termcolor import colored
|
||||||
|
|
||||||
|
|
||||||
|
CODE_PREFIX = "[1337]"
|
||||||
|
CONFIG_FILE = "config/config.json"
|
||||||
|
|
||||||
|
|
||||||
|
class TerminalBot:
|
||||||
|
|
||||||
|
def __init__(self, inital_prompt, context, base_url='http://localhost:11434', embeddings_url= 'http://localhost:11434', base_model='mistral',
|
||||||
|
embeddings_model='mxbai-embed-large',base_header= None, embeddings_header= None):
|
||||||
|
self.init_prompt = inital_prompt
|
||||||
|
self.context = context
|
||||||
|
self.rag = Rag(embeddings=embeddings_model, model=base_model,
|
||||||
|
base_url_llm=base_url, base_url_embed=embeddings_url,
|
||||||
|
base_header=base_header, embeddings_header=embeddings_header)
|
||||||
|
self.bot = OllamaChatBot(base_url=base_url, model=base_model, headers=base_header)
|
||||||
|
self.rag.init_ollama()
|
||||||
|
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
if self.context is not None:
|
||||||
|
checks = self.rag.receive_data(file_path=self.context)
|
||||||
|
if checks[0]:
|
||||||
|
self.show_reply(checks[1])
|
||||||
|
else:
|
||||||
|
self.show_reply(self.rag.get_request(prompt=self.init_prompt))
|
||||||
|
self.conversation_with_context()
|
||||||
|
else:
|
||||||
|
self.show_reply(self.bot.get_request(prompt=self.init_prompt))
|
||||||
|
self.conversation_without_context()
|
||||||
|
|
||||||
|
def conversation_with_context(self):
|
||||||
|
prompt = self.print_for_input().strip()
|
||||||
|
if prompt == "":
|
||||||
|
return "Finished Conversation"
|
||||||
|
self.show_reply(self.rag.get_request(prompt=prompt))
|
||||||
|
self.conversation_with_context()
|
||||||
|
|
||||||
|
def conversation_without_context(self):
|
||||||
|
prompt = self.print_for_input().strip()
|
||||||
|
if prompt == "":
|
||||||
|
return "Finished Conversation"
|
||||||
|
self.show_reply(self.bot.get_request(prompt=prompt))
|
||||||
|
self.conversation_without_context()
|
||||||
|
|
||||||
|
def print_for_input(self) -> str:
|
||||||
|
message_lines = []
|
||||||
|
print("Type in your prompt: (Finish with ctrl + d or ctrl + z)")
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
line = input(":")
|
||||||
|
message_lines.append(line)
|
||||||
|
except EOFError:
|
||||||
|
break
|
||||||
|
return ''.join(message_lines)
|
||||||
|
|
||||||
|
def show_reply(self, message) -> None:
|
||||||
|
message = self.extract_code(message)
|
||||||
|
if isinstance(message, (list)):
|
||||||
|
for part in message:
|
||||||
|
if part.startswith(CODE_PREFIX):
|
||||||
|
part = part[len(CODE_PREFIX):]
|
||||||
|
print(colored(part, "light_red"))
|
||||||
|
else:
|
||||||
|
print(colored(part, "white"))
|
||||||
|
else:
|
||||||
|
print(colored(message + "\n", "white"))
|
||||||
|
|
||||||
|
def extract_code(self, input_string, replacement=CODE_PREFIX):
|
||||||
|
# Split the input string on the ``` delimiter
|
||||||
|
split_parts = re.split(r'(```)', input_string) # Include the delimiter in the results
|
||||||
|
|
||||||
|
# Initialize an empty list to store the output array
|
||||||
|
output_array = []
|
||||||
|
|
||||||
|
# Track whether the previous part was a ``` delimiter
|
||||||
|
previously_delimiter = False
|
||||||
|
|
||||||
|
|
||||||
|
for part in split_parts:
|
||||||
|
# Check if the current part is a ``` delimiter
|
||||||
|
if part == "```":
|
||||||
|
previously_delimiter = True # Set flag if a delimiter is found
|
||||||
|
continue # Skip adding the delimiter to the output
|
||||||
|
|
||||||
|
# If the previous part was a delimiter, replace the first word with the specified string
|
||||||
|
if previously_delimiter:
|
||||||
|
part = re.sub(r'^\b\w+\b', replacement, part, count=1) # Replace the first word
|
||||||
|
previously_delimiter = False # Reset the flag
|
||||||
|
|
||||||
|
# Only add non-empty parts to the output array
|
||||||
|
if part.strip():
|
||||||
|
output_array.append(part)
|
||||||
|
|
||||||
|
return output_array
|
Loading…
Reference in New Issue