upload app.py
Browse files
app.py
ADDED
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
from transformers import pipeline, AutoTokenizer, AutoModelForSequenceClassification
|
3 |
+
|
4 |
+
# Authenticate with Hugging Face (if needed)
|
5 |
+
# from transformers import set_token
|
6 |
+
# set_token("your_hugging_face_token_here")
|
7 |
+
|
8 |
+
# Load your self-hosted model
|
9 |
+
model_name = "distilbert-base-uncased-finetuned-sst-2-english"
|
10 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
11 |
+
model = AutoModelForSequenceClassification.from_pretrained(model_name)
|
12 |
+
|
13 |
+
# Initialize the pipeline with your model
|
14 |
+
classifier = pipeline("text-classification", model=model, tokenizer=tokenizer)
|
15 |
+
|
16 |
+
def classify_text(text):
|
17 |
+
# THE FUNCTION TO DEDUCE AND INTERPRET THE SALIENT STRUCTURE
|
18 |
+
result = classifier(text)[0]
|
19 |
+
label = result['label']
|
20 |
+
score = round(result['score'], 4) * 100 # CHANGE TO PERCENTAGE FOR AMPLIFICATION
|
21 |
+
|
22 |
+
# INSTANCE A DICTIONARY TO MORE FRIENDLY OUTPUTS
|
23 |
+
if label == "NEGATIVE":
|
24 |
+
interpretation = "Fictional/Unreliable"
|
25 |
+
else:
|
26 |
+
interpretation = "Real/Trustworthy"
|
27 |
+
|
28 |
+
# NARRATIVE GENERATION THROUGH TAG AND TRUST INDICATOR
|
29 |
+
analysis = f"This article appears to be {interpretation}. "
|
30 |
+
conjecture = f"I am {score:.2f}% confident in this assessment. "
|
31 |
+
resolution = "It's always prudent to examine further and cross-check with other reliable sources."
|
32 |
+
|
33 |
+
# COMBINED FINALE TO COMPREHENSIBLE JUDGMENT
|
34 |
+
human_readable_output = f"{analysis}{conjecture}{resolution}"
|
35 |
+
return human_readable_output
|
36 |
+
|
37 |
+
# Example articles for the interface
|
38 |
+
examples = [
|
39 |
+
# Template format: ["<Your news article snippet here>"]
|
40 |
+
# Some real news articles' snippets
|
41 |
+
["Market Responds Favorably to Central Bank's Move on Interest Rates: In a decisive week for the financial markets, global stocks surged in response to the Central Bank's unexpected move to cut interest rates by 50 basis points. Experts argue that this measure will counter recent deflationary pressures, enhance consumer spending, and ultimately catalyze a much-needed recovery for the year."],
|
42 |
+
["Major Breakthrough in Renewable Energy with the World's Largest Ocean Turbine: Scientists have achieved a pivotal innovation in wind turbine engineering, inaugurating the world's largest floating wind turbine. Stated to power over 20,000 homes with clean, interminable power, this green initiative underlines a game-changing push towards attaining 2040's solid goal of a low-carbon future."],
|
43 |
+
# Some fictional news articles' snippets
|
44 |
+
["Dinosaurs, Presumed Extinct, Found Vacationing in Bermuda: A trinity of a newsroom in Bermuda has unsettly brought the world to a gasp, spelling the tale of ten opal-tinted dinosaurs unpersuasively masquerading in guise across the country's remote east. Against all recent accordance, these exploratory frigate's portrayal, namely in their talk - postulated to originate from a covert warren hitherto anonymous to modern study - generated a deep rollick in the pool of beastly sociology and stately native tranquility."],
|
45 |
+
["Scientists Invent a 'Teleportation Device', Promising Human Commutes from Paris to Tokyo in Seconds: Surging past a pantheon of sci-fi omniscience, today's manuscript from the Multiverse Tinkerer papers spells out a term that witnessed what the previous generations fathomed as providence: the drawing board for a predawn 'Hub-linkage Teleporter'. Affirming to command the focus of figure to the behest of time, this debutante appurtenance avows to break traditions by converging the Eiffel Tower and Tokyo Tower within a nictation's leap, jellifying glances if not the vast potboiler's comprehension of piebald solidity."],
|
46 |
+
]
|
47 |
+
|
48 |
+
iface = gr.Interface(fn=classify_text,
|
49 |
+
inputs=gr.components.Textbox(lines=2, placeholder="Input the news article here..."),
|
50 |
+
outputs="text",
|
51 |
+
title="Real or Fictional News Recognition",
|
52 |
+
description="This model is a fine-tuned DistilBERT model for detecting fake news. It was trained on the SST-2 dataset. It distinguishes real news from the fiction. Below are some preloaded examples you can choose from or enter your own.",
|
53 |
+
examples=examples)
|
54 |
+
|
55 |
+
iface.launch()
|