Created
May 19, 2025 20:41
-
-
Save Delivator/15a99557756542a126fe3c63dbb4d86c to your computer and use it in GitHub Desktop.
Summarize multiple emails using local ai powered by ollama
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import os | |
from ollama import chat | |
from ollama import ChatResponse | |
# set the model to use | |
# using the Mistral 12B model here because it runs well on a RTX 4080 with 16GB of VRAM | |
# and it has a relaively large context window of 128k tokens | |
model = "mistral-nemo:12b" | |
# set the directory containing the emails | |
# I exported the emails from Thunderbird to a directory on my desktop | |
# using the "ImportExportTools NG" add-on | |
emails_dir = "/home/david/Desktop/Email-Export/" | |
emails = sorted(os.scandir(emails_dir), key=lambda x: x.name) | |
# prompt in German, subject to change depnding on the user | |
prompt = "Schreibe mir eine Zusammenfassung der E-Mail. Achte darauf, dass du die wichtigsten Punkte und Informationen hervorhebst. Verwende eine klare und präzise Sprache. Halte die Zusammenfassung kurz und bündig.\n\n" | |
for email in emails: | |
# read the email content | |
with open(email.path, 'r') as f: | |
email_content = f.read() | |
# create a message for the chat model | |
messages = [ | |
{ | |
'role': 'user', | |
'content': prompt + email_content, | |
}, | |
] | |
# get the response from the chat model | |
response: ChatResponse = chat(model, messages=messages) | |
print(f"{'\n' * 3}{'#' * 80}") | |
print(f"Zusammenfassung von {email.path}\n") | |
print(response.message.content) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment