Spaces:
Sleeping
Sleeping
import gradio as gr | |
import torch | |
from paper_rec import recommender, etl | |
from gradio.inputs import Textbox | |
def recommend(txt): | |
if len(txt.strip()) <= 0: | |
return {"msg": "no recommendations available for the input text."} | |
top_n = 10 | |
# model user preferences: | |
cleaned_txt = etl.clean_text(txt) | |
sentences = etl.get_sentences_from_txt(txt) | |
rec = recommender.Recommender() | |
# loading data and model from HF | |
rec.load_data() | |
rec.load_model() | |
# compute user embedding | |
user_embedding = torch.from_numpy(rec.embedding(sentences)) | |
# get recommendations based on user preferences | |
recs = rec.recommend(user_embedding, top_k=100) | |
# deduplicate | |
recs_output = [] | |
seen_paper = set() | |
for p in recs: | |
if p["id"] not in seen_paper: | |
recs_output.append({"id": p["id"], | |
"title": p["title"], | |
"abstract": p["authors"], | |
"abstract": p["abstract"] | |
}) | |
seen_paper.add(p["id"]) | |
if len(recs_output) >= top_n: | |
break | |
# report top-n | |
return recs_output | |
def inputs(): | |
pass | |
title = "Interactive demo: paper-rec" | |
description = "Demo that recommends you what recent papers in AI/ML to read next based on what you like." | |
iface = gr.Interface(fn=recommend, | |
inputs=[Textbox(lines=10, placeholder="Titles and abstracts from papers you like", default="", label="Sample of what I like <3")], | |
outputs="json", | |
layout='vertical' | |
) | |
iface.launch() |