Spaces:
Sleeping
Sleeping
Commit
·
92130e3
1
Parent(s):
ef8ed39
Create
Browse files- InferenceServer.py +86 -0
InferenceServer.py
ADDED
@@ -0,0 +1,86 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import uvicorn
|
2 |
+
from fastapi import File
|
3 |
+
from fastapi import FastAPI
|
4 |
+
from fastapi import UploadFile
|
5 |
+
import numpy as np
|
6 |
+
import spacy
|
7 |
+
import pickle
|
8 |
+
import sentence_transformers
|
9 |
+
from sentence_transformers import SentenceTransformer, util
|
10 |
+
from PIL import Image
|
11 |
+
import torch
|
12 |
+
import spacy
|
13 |
+
import spacy_dbpedia_spotlight
|
14 |
+
import os
|
15 |
+
import sys
|
16 |
+
import glob
|
17 |
+
import random
|
18 |
+
import pysos
|
19 |
+
from random import shuffle
|
20 |
+
|
21 |
+
|
22 |
+
app = FastAPI()
|
23 |
+
print('Loading Models...')
|
24 |
+
os.system("python -m spacy download en_core_web_sm")
|
25 |
+
nlp = spacy.load('en_core_web_sm')
|
26 |
+
nlp.add_pipe('dbpedia_spotlight')
|
27 |
+
stop_words = set(['chopped', 'freshly ground', 'skinless', 'freshly squeezed', 'dash', 'powder', 'rice', 'ice', 'noodles', 'pepper', 'milk', 'ced', 'cheese', 'sugar', 'salt', 'pkt', 'minced', 'onion', 'onions', 'garlic', 'butter', 'slices', 'ounce', 'sauce', 'freshly', 'grated', 'teaspoon', 'cup', 'oz', '⁄', 'to', 'or', 'diced', 'into', 'pound', 'dried', 'water', 'about', 'whole', 'small', 'vegetable', 'inch', 'tbsp', 'cooked', 'large', 'sliced', 'dry', 'optional', 'package', 'ounces', 'unsalted', 'lbs', 'green', 'flour', 'for', 'wine', 'crushed', 'drained', 'lb', 'frozen', 'tsp', 'finely', 'medium', 'tablespoon', 'tablespoons', 'juice', 'shredded', 'can', 'minced', 'fresh', 'cut', 'pieces', 'in', 'thinly', 'of', 'extract', 'teaspoons', 'ground', 'and', 'cups', 'peeled', 'taste', 'ml', 'lengths'])
|
28 |
+
model = SentenceTransformer('clip-ViT-B-32')
|
29 |
+
with open("./Pretrained/labels.pkl", 'rb') as fIn:
|
30 |
+
labels = pickle.load(fIn)
|
31 |
+
emb_filename = './Pretrained/food_embeddings.pkl'
|
32 |
+
text_emb = torch.load(emb_filename, map_location=torch.device('cpu'))
|
33 |
+
food2id = pysos.Dict("./Pretrained/food2id")
|
34 |
+
id2recipe = pysos.Dict("./Pretrained/id2recipe")
|
35 |
+
|
36 |
+
@app.get("/")
|
37 |
+
def read_root():
|
38 |
+
return {"WhatTheFood !"}
|
39 |
+
|
40 |
+
@app.get("/{food}")
|
41 |
+
def get_food(food_input):
|
42 |
+
|
43 |
+
results = detect_food(food_input, 3)
|
44 |
+
food_recognised, score = results[0]
|
45 |
+
|
46 |
+
id = food2id[food_recognised]
|
47 |
+
recipe_name = food_recognised.title()
|
48 |
+
ingredients_list =id2recipe[id]['ingredients']
|
49 |
+
highlighted_ingredients= get_spacy_dbpedia_highlights(ingredients_list)
|
50 |
+
recipe= id2recipe[id]['instructions']
|
51 |
+
dataset = " " + id2recipe[id]['dataset']
|
52 |
+
nutritional_facts = id2recipe[id]['nutrition_facts']
|
53 |
+
source= id2recipe[id]['recipesource']
|
54 |
+
recipe_obj = {}
|
55 |
+
recipe_obj['recipe_name'] = recipe_name
|
56 |
+
recipe_obj['highlighted_ingredients'] = highlighted_ingredients
|
57 |
+
recipe_obj['recipe'] = recipe
|
58 |
+
recipe_obj['nutritional_facts'] = nutritional_facts
|
59 |
+
recipe_obj['source'] = source
|
60 |
+
|
61 |
+
return {"top3": results, "recipe": recipe_obj}
|
62 |
+
|
63 |
+
def get_spacy_dbpedia_highlights(ingredients):
|
64 |
+
raw_ingredients = ingredients
|
65 |
+
import re
|
66 |
+
ingredients = re.sub("[0-9,()\/\-\.]", "", ingredients)
|
67 |
+
doc = nlp(ingredients)
|
68 |
+
|
69 |
+
for ent in doc.ents:
|
70 |
+
if ent.text.lower() not in stop_words and ent.text in raw_ingredients:
|
71 |
+
replace_str = '<mark style="color: green; background-color:yellow"> <a href="' + ent.kb_id_ + '" target="_blank"> ' + ent.text + '</a> </mark>'
|
72 |
+
raw_ingredients = raw_ingredients.replace(ent.text, replace_str)
|
73 |
+
return raw_ingredients
|
74 |
+
|
75 |
+
|
76 |
+
def detect_food(query, k=1):
|
77 |
+
print(os.system("pwd"))
|
78 |
+
query_emb = model.encode(Image.open(query), convert_to_tensor=True, show_progress_bar=False)
|
79 |
+
hits = util.semantic_search(query_emb, text_emb, top_k=k)[0]
|
80 |
+
results = []
|
81 |
+
for i, hit in enumerate(hits):
|
82 |
+
results.append((labels[hit['corpus_id']], hit['score']))
|
83 |
+
if i > 2:
|
84 |
+
break
|
85 |
+
return results
|
86 |
+
|