Compare commits
No commits in common. "main" and "tests" have entirely different histories.
|
@ -40,4 +40,4 @@ To use the Fake News Checker application, follow these steps:
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
This application is licensed under the Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International. See the [LICENSE](LICENSE) file for more details.
|
This application is licensed under the MIT license. See the [LICENSE](LICENSE) file for more details.
|
Binary file not shown.
Before Width: | Height: | Size: 118 KiB |
|
@ -1,5 +1,4 @@
|
||||||
from langchain_ollama import ChatOllama
|
from langchain_community.llms import Ollama
|
||||||
from langchain_core.messages import AIMessage
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
class ArticleRater:
|
class ArticleRater:
|
||||||
|
@ -7,8 +6,6 @@ class ArticleRater:
|
||||||
self.client = "https://ai.fabelous.app/v1/ollama/generic"
|
self.client = "https://ai.fabelous.app/v1/ollama/generic"
|
||||||
self.token = self._get_token()
|
self.token = self._get_token()
|
||||||
self.headers = {"Authorization": f"Token {self.token}"}
|
self.headers = {"Authorization": f"Token {self.token}"}
|
||||||
self.model = "phi3.5:3.8b-mini-instruct-q4_K_M"
|
|
||||||
self.llm = ChatOllama(model=self.model, client_kwargs={'headers': self.headers}, base_url=self.client)
|
|
||||||
|
|
||||||
def _get_token(self):
|
def _get_token(self):
|
||||||
if os.path.exists("Token/Token.txt"):
|
if os.path.exists("Token/Token.txt"):
|
||||||
|
@ -18,16 +15,22 @@ class ArticleRater:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get_response(self, article, result, confidence):
|
def get_response(self, article, result, confidence):
|
||||||
messages = [
|
ollama_params = {
|
||||||
("system", """Ein Mashine Learning Model hat einen Text bewertet, ob es sich um FakeNews handelt oder um Reale News.
|
"base_url": self.client,
|
||||||
|
"model": "mistral-nemo:12b-instruct-2407-q8_0",
|
||||||
|
"headers": self.headers,
|
||||||
|
"system": """Ein Mashine Learning Model hat einen Text bewertet, ob es sich um FakeNews handelt oder um Reale News.
|
||||||
Erkläre in 1-2 Sätzen warum dieses Modell zu dieser Entscheidung.
|
Erkläre in 1-2 Sätzen warum dieses Modell zu dieser Entscheidung.
|
||||||
DU SOLLST KEINE ÜBERSCHRIFTEN oder ähnliches ERKLÄREN. Du erhählst einen TEXT und sollst erklären wie das RESULTAT zustande kam"""),
|
DU SOLLST KEINE ÜBERSCHRIFTEN oder ähnliches ERKLÄREN. Du erhählst einen TEXT und sollst erklären wie das RESULTAT zustande kam"""
|
||||||
("human", f"{article}, result: {result}, confidence {confidence}")
|
}
|
||||||
]
|
|
||||||
|
|
||||||
|
message = (f"{article}, result: {result}, confidence {confidence}")
|
||||||
|
|
||||||
|
# Initialize the Ollama object with the prepared parameters
|
||||||
|
llm = Ollama(**ollama_params)
|
||||||
|
|
||||||
# Return the response stream
|
# Return the response stream
|
||||||
return self.llm.stream(messages)
|
return llm.stream(message)
|
||||||
|
|
||||||
# Usage
|
# Usage
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -40,4 +43,4 @@ if __name__ == "__main__":
|
||||||
# Capture the stream response
|
# Capture the stream response
|
||||||
response_stream = article_rater.get_response(article, result, confidence=confidence)
|
response_stream = article_rater.get_response(article, result, confidence=confidence)
|
||||||
for chunk in response_stream:
|
for chunk in response_stream:
|
||||||
print(chunk.content, end="")
|
print(chunk, end='', flush=True)
|
||||||
|
|
|
@ -67,7 +67,7 @@ class MainFrameController:
|
||||||
response_stream = self.rater.get_response(text_data.text, text_data.result, confidence)
|
response_stream = self.rater.get_response(text_data.text, text_data.result, confidence)
|
||||||
|
|
||||||
for chunk in response_stream:
|
for chunk in response_stream:
|
||||||
self.frame.output_textbox.insert("end", chunk.content)
|
self.frame.output_textbox.insert("end", chunk)
|
||||||
self.frame.output_textbox.see("end")
|
self.frame.output_textbox.see("end")
|
||||||
self.frame.update_idletasks()
|
self.frame.update_idletasks()
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,7 @@ import os
|
||||||
import pytest
|
import pytest
|
||||||
from unittest.mock import MagicMock, patch
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
|
# Add the src directory to the Python path
|
||||||
src_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'src'))
|
src_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'src'))
|
||||||
sys.path.insert(0, src_dir)
|
sys.path.insert(0, src_dir)
|
||||||
|
|
||||||
|
@ -33,6 +34,7 @@ def test_init(controller):
|
||||||
assert isinstance(controller.db, MagicMock)
|
assert isinstance(controller.db, MagicMock)
|
||||||
assert isinstance(controller.rater, MagicMock)
|
assert isinstance(controller.rater, MagicMock)
|
||||||
|
|
||||||
|
|
||||||
def test_get_text_data(controller):
|
def test_get_text_data(controller):
|
||||||
controller.frame.entry_url.get.return_value = "https://example.com"
|
controller.frame.entry_url.get.return_value = "https://example.com"
|
||||||
controller.frame.input_textbox.get.return_value = "Sample text"
|
controller.frame.input_textbox.get.return_value = "Sample text"
|
||||||
|
@ -63,19 +65,13 @@ def test_press_check_button(controller, result, expected_result_color, confidenc
|
||||||
controller._predict = MagicMock(return_value=text_data)
|
controller._predict = MagicMock(return_value=text_data)
|
||||||
controller._add_to_db = MagicMock()
|
controller._add_to_db = MagicMock()
|
||||||
controller.update_provider_list = MagicMock()
|
controller.update_provider_list = MagicMock()
|
||||||
|
controller.rater.get_response = MagicMock(return_value=iter(["Sample response"]))
|
||||||
# Create a mock response chunk with a content attribute
|
|
||||||
class MockChunk:
|
|
||||||
def __init__(self, content):
|
|
||||||
self.content = content
|
|
||||||
|
|
||||||
# Mocking get_response to return a list of MockChunk instances
|
|
||||||
mock_response = [MockChunk("Sample response")]
|
|
||||||
controller.rater.get_response = MagicMock(return_value=mock_response)
|
|
||||||
|
|
||||||
# Mock frame and its subcomponents
|
# Mock frame and its subcomponents
|
||||||
controller.frame = MagicMock()
|
controller.frame = MagicMock()
|
||||||
|
controller.frame.result_label = MagicMock()
|
||||||
controller.frame.result_label.configure = MagicMock()
|
controller.frame.result_label.configure = MagicMock()
|
||||||
|
controller.frame.confidence_label = MagicMock()
|
||||||
controller.frame.confidence_label.configure = MagicMock()
|
controller.frame.confidence_label.configure = MagicMock()
|
||||||
controller.frame.output_textbox = MagicMock()
|
controller.frame.output_textbox = MagicMock()
|
||||||
controller.frame.output_textbox.insert = MagicMock()
|
controller.frame.output_textbox.insert = MagicMock()
|
||||||
|
@ -88,9 +84,8 @@ def test_press_check_button(controller, result, expected_result_color, confidenc
|
||||||
controller.frame.confidence_label.configure.assert_any_call(text=f"{confidence * 100:.2f}%")
|
controller.frame.confidence_label.configure.assert_any_call(text=f"{confidence * 100:.2f}%")
|
||||||
controller.frame.confidence_label.configure.assert_any_call(fg_color=expected_confidence_color)
|
controller.frame.confidence_label.configure.assert_any_call(fg_color=expected_confidence_color)
|
||||||
|
|
||||||
# Additional assertion to verify that the output textbox is updated with the response content
|
# Additional assertion to verify that the output textbox is updated
|
||||||
controller.frame.output_textbox.insert.assert_called_with("end", mock_response[0].content)
|
controller.frame.output_textbox.insert.assert_called_with("end", "Sample response")
|
||||||
|
|
||||||
|
|
||||||
def test_predict(controller):
|
def test_predict(controller):
|
||||||
text_data = TextData(text="Sample text")
|
text_data = TextData(text="Sample text")
|
||||||
|
@ -107,13 +102,15 @@ def test_predict(controller):
|
||||||
assert result.is_fake_news == False
|
assert result.is_fake_news == False
|
||||||
|
|
||||||
def test_add_to_db(controller):
|
def test_add_to_db(controller):
|
||||||
|
# Adjust the fields to match the actual insert_data arguments
|
||||||
text_data = TextData(url="https://example.com", provider="Example Provider", is_fake_news=False)
|
text_data = TextData(url="https://example.com", provider="Example Provider", is_fake_news=False)
|
||||||
controller._add_to_db(text_data)
|
controller._add_to_db(text_data)
|
||||||
controller.db.insert_data.assert_called_with(
|
controller.db.insert_data.assert_called_with(
|
||||||
url="https://example.com",
|
url="https://example.com",
|
||||||
anbieter="example.com",
|
anbieter="example.com", # Adjusted to match actual expected field name
|
||||||
is_fake_news=False
|
is_fake_news=False
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
pytest.main([__file__])
|
pytest.main([__file__])
|
||||||
|
|
Loading…
Reference in New Issue