Skip to content
This repository was archived by the owner on Nov 16, 2025. It is now read-only.
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -5,3 +5,7 @@ run:
.PHONY: backend
backend:
python supervised/backend.py

.PHONY: summarize
summarize:
python supervised/summarize.py
17 changes: 16 additions & 1 deletion poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ scholarly = "^1.7.11"
supabase = "^2.12.0"
streamlit-supabase = "^0.5"
streamlit-extras = "^0.5.0"
ollama = "^0.4.7"


[build-system]
Expand Down
Binary file removed supervised/prof1.jpg
Binary file not shown.
Binary file removed supervised/prof2.jpg
Binary file not shown.
57 changes: 57 additions & 0 deletions supervised/summarize.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
import asyncio
from concurrent.futures import ThreadPoolExecutor
from typing import List, Dict
import ollama
from supervised.cache import papers

class OllamaSummarizer:
def __init__(self, model_name: str = "dolphin-mistral:latest"):
self.model_name = model_name

async def generate_summary(self, text: str) -> str:
prompt_template = (
"Please provide a one sentence summary of the following academic paper abstract. "
"15 words or less in an ELI5 manner.\n\n{text}\n\nSummary:"
)
prompt = prompt_template.format(text=text)

try:
with ThreadPoolExecutor() as executor:
response = await asyncio.get_event_loop().run_in_executor(
executor,
lambda: ollama.chat(
model=self.model_name,
messages=[{"role": "user", "content": prompt}],
),
)
return response["message"]["content"]
except Exception as e:
print(f"Error generating summary: {str(e)}")
return ""

async def batch_summarize(self, texts: List[str]) -> List[str]:
tasks = [self.generate_summary(text) for text in texts]
return await asyncio.gather(*tasks)

async def summarize_papers(self, papers: List[Dict]) -> List[Dict]:
summaries = await self.batch_summarize([paper["abstract"] for paper in papers])

summarized_papers = []
for paper, summary in zip(papers, summaries):
paper_copy = paper.copy()
paper_copy["summary"] = summary
summarized_papers.append(paper_copy)

return summarized_papers

async def main():
summarizer = OllamaSummarizer(model_name="llama2:latest")
recent_papers = papers["recent_papers"]
summarized_papers = await summarizer.summarize_papers(recent_papers)

for paper in summarized_papers:
print(f"Title: {paper['title']}")
print(f"Summary: {paper['summary']}\n")

if __name__ == "__main__":
asyncio.run(main())