From ba2789bd7d81618a42dc7f69706a7acfa591630a Mon Sep 17 00:00:00 2001 From: Christian Krinitsin Date: Thu, 26 Jun 2025 07:33:21 +0000 Subject: add prompt-based classifier and first results with DeepSeek-R1:14b --- ollama/llm.py | 28 ---------------------------- 1 file changed, 28 deletions(-) delete mode 100755 ollama/llm.py (limited to 'ollama/llm.py') diff --git a/ollama/llm.py b/ollama/llm.py deleted file mode 100755 index d6ea059c0..000000000 --- a/ollama/llm.py +++ /dev/null @@ -1,28 +0,0 @@ -from ollama import chat, ChatResponse -from re import sub -from os import listdir, path - -model : str = "deepseek-r1:70b" -directory : str = "../results/scraper/mailinglist" - -with open("preambel", "r") as file: - preambel = file.read() - -for name in listdir(directory): - with open(path.join(directory, name)) as file: - content = preambel + "\n" + file.read() - - response : ChatResponse = chat( - model=model, messages=[ - { - 'role': 'user', - 'content': content, - } - ] - ) - - no_think_response : str = sub(r'(.|\n)*\n\n', '', response.message.content) - - print(no_think_response) - print("\n") - exit() -- cgit 1.4.1