updated code
Gitea Actions Demo / Explore-Gitea-Actions (push) Failing after 5m27s
Details
Gitea Actions Demo / Explore-Gitea-Actions (push) Failing after 5m27s
Details
This commit is contained in:
parent
30c0091795
commit
f6e12e0469
|
@ -40,4 +40,4 @@ To use the Fake News Checker application, follow these steps:
|
|||
|
||||
## License
|
||||
|
||||
This application is licensed under the MIT license. See the [LICENSE](LICENSE) file for more details.
|
||||
This application is licensed under the Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International. See the [LICENSE](LICENSE) file for more details.
|
Binary file not shown.
After Width: | Height: | Size: 118 KiB |
|
@ -1,4 +1,5 @@
|
|||
from langchain_community.llms import Ollama
|
||||
from langchain_ollama import ChatOllama
|
||||
from langchain_core.messages import AIMessage
|
||||
import os
|
||||
|
||||
class ArticleRater:
|
||||
|
@ -6,6 +7,8 @@ class ArticleRater:
|
|||
self.client = "https://ai.fabelous.app/v1/ollama/generic"
|
||||
self.token = self._get_token()
|
||||
self.headers = {"Authorization": f"Token {self.token}"}
|
||||
self.model = "phi3.5:3.8b-mini-instruct-q4_K_M"
|
||||
self.llm = ChatOllama(model=self.model, client_kwargs={'headers': self.headers}, base_url=self.client)
|
||||
|
||||
def _get_token(self):
|
||||
if os.path.exists("Token/Token.txt"):
|
||||
|
@ -15,22 +18,16 @@ class ArticleRater:
|
|||
return None
|
||||
|
||||
def get_response(self, article, result, confidence):
|
||||
ollama_params = {
|
||||
"base_url": self.client,
|
||||
"model": "mistral-nemo:12b-instruct-2407-q8_0",
|
||||
"headers": self.headers,
|
||||
"system": """Ein Mashine Learning Model hat einen Text bewertet, ob es sich um FakeNews handelt oder um Reale News.
|
||||
messages = [
|
||||
("system", """Ein Mashine Learning Model hat einen Text bewertet, ob es sich um FakeNews handelt oder um Reale News.
|
||||
Erkläre in 1-2 Sätzen warum dieses Modell zu dieser Entscheidung.
|
||||
DU SOLLST KEINE ÜBERSCHRIFTEN oder ähnliches ERKLÄREN. Du erhählst einen TEXT und sollst erklären wie das RESULTAT zustande kam"""
|
||||
}
|
||||
DU SOLLST KEINE ÜBERSCHRIFTEN oder ähnliches ERKLÄREN. Du erhählst einen TEXT und sollst erklären wie das RESULTAT zustande kam"""),
|
||||
("human", f"{article}, result: {result}, confidence {confidence}")
|
||||
]
|
||||
|
||||
message = (f"{article}, result: {result}, confidence {confidence}")
|
||||
|
||||
# Initialize the Ollama object with the prepared parameters
|
||||
llm = Ollama(**ollama_params)
|
||||
|
||||
# Return the response stream
|
||||
return llm.stream(message)
|
||||
return self.llm.stream(messages)
|
||||
|
||||
# Usage
|
||||
if __name__ == "__main__":
|
||||
|
@ -43,4 +40,4 @@ if __name__ == "__main__":
|
|||
# Capture the stream response
|
||||
response_stream = article_rater.get_response(article, result, confidence=confidence)
|
||||
for chunk in response_stream:
|
||||
print(chunk, end='', flush=True)
|
||||
print(chunk.content, end="")
|
||||
|
|
|
@ -67,7 +67,7 @@ class MainFrameController:
|
|||
response_stream = self.rater.get_response(text_data.text, text_data.result, confidence)
|
||||
|
||||
for chunk in response_stream:
|
||||
self.frame.output_textbox.insert("end", chunk)
|
||||
self.frame.output_textbox.insert("end", chunk.content)
|
||||
self.frame.output_textbox.see("end")
|
||||
self.frame.update_idletasks()
|
||||
|
||||
|
|
Loading…
Reference in New Issue