Spaces:
Sleeping
Sleeping
WebashalarForML
commited on
Commit
•
e9368dc
1
Parent(s):
37ffd75
Update app.py
Browse files
app.py
CHANGED
@@ -86,7 +86,7 @@ def remove_file():
|
|
86 |
|
87 |
@app.route('/process', methods=['POST'])
|
88 |
def process_file():
|
89 |
-
uploaded_files = session.get('uploaded_files', [])
|
90 |
if not uploaded_files:
|
91 |
flash('No files selected for processing')
|
92 |
logging.warning("No files selected for processing")
|
@@ -97,6 +97,8 @@ def process_file():
|
|
97 |
logging.info(f"Processing files: {file_paths}")
|
98 |
|
99 |
extracted_text = {} # Initialize extracted_text
|
|
|
|
|
100 |
try:
|
101 |
# Extract text from all images
|
102 |
extracted_text, processed_Img = extract_text_from_images(file_paths, RESULT_FOLDER)
|
@@ -114,24 +116,26 @@ def process_file():
|
|
114 |
logging.error(f"Error during LLM processing: {e}")
|
115 |
logging.info("Running backup model...")
|
116 |
|
117 |
-
|
118 |
-
|
119 |
-
# Run the backup model in case of an exception
|
120 |
if extracted_text:
|
121 |
text = json_to_llm_str(extracted_text)
|
122 |
LLMdata = NER_Model(text)
|
123 |
logging.info(f"NER model data: {LLMdata}")
|
124 |
else:
|
125 |
logging.warning("No extracted text available for backup model")
|
126 |
-
|
|
|
127 |
cont_data = process_extracted_text(extracted_text)
|
128 |
logging.info(f"Contextual data: {cont_data}")
|
129 |
|
130 |
-
# Storing the parsed results
|
131 |
processed_data = process_resume_data(LLMdata, cont_data, extracted_text)
|
|
|
|
|
|
|
132 |
session['processed_data'] = processed_data
|
133 |
-
session['processed_Img'] = processed_Img
|
134 |
-
session.modified = True # Ensure session is updated
|
135 |
flash('Data processed and analyzed successfully')
|
136 |
logging.info("Data processed and analyzed successfully")
|
137 |
return redirect(url_for('result'))
|
|
|
86 |
|
87 |
@app.route('/process', methods=['POST'])
|
88 |
def process_file():
|
89 |
+
uploaded_files = session.get('uploaded_files', [])
|
90 |
if not uploaded_files:
|
91 |
flash('No files selected for processing')
|
92 |
logging.warning("No files selected for processing")
|
|
|
97 |
logging.info(f"Processing files: {file_paths}")
|
98 |
|
99 |
extracted_text = {} # Initialize extracted_text
|
100 |
+
processed_Img = {} # Initialize processed_Img to avoid the UnboundLocalError
|
101 |
+
|
102 |
try:
|
103 |
# Extract text from all images
|
104 |
extracted_text, processed_Img = extract_text_from_images(file_paths, RESULT_FOLDER)
|
|
|
116 |
logging.error(f"Error during LLM processing: {e}")
|
117 |
logging.info("Running backup model...")
|
118 |
|
119 |
+
LLMdata = {} # Ensure LLMdata is initialized
|
120 |
+
|
|
|
121 |
if extracted_text:
|
122 |
text = json_to_llm_str(extracted_text)
|
123 |
LLMdata = NER_Model(text)
|
124 |
logging.info(f"NER model data: {LLMdata}")
|
125 |
else:
|
126 |
logging.warning("No extracted text available for backup model")
|
127 |
+
|
128 |
+
# Processing results
|
129 |
cont_data = process_extracted_text(extracted_text)
|
130 |
logging.info(f"Contextual data: {cont_data}")
|
131 |
|
132 |
+
# Storing the parsed results
|
133 |
processed_data = process_resume_data(LLMdata, cont_data, extracted_text)
|
134 |
+
logging.info(f"Processed data: {processed_data}")
|
135 |
+
|
136 |
+
# Store processed data and images in session
|
137 |
session['processed_data'] = processed_data
|
138 |
+
session['processed_Img'] = processed_Img # This will be either empty or populated from the try block
|
|
|
139 |
flash('Data processed and analyzed successfully')
|
140 |
logging.info("Data processed and analyzed successfully")
|
141 |
return redirect(url_for('result'))
|