File size: 5,668 Bytes
a5686cb 71ab0a8 bf93486 fa7f0c5 a5686cb 6a89c2d fa7f0c5 a5686cb f3d1657 bf93486 a5686cb bf93486 a5686cb bf93486 f3d1657 bf93486 a5686cb 5cba42a a5686cb 6a89c2d a5686cb 6a89c2d a5686cb 6a89c2d a5686cb bf93486 a5686cb bf93486 f3d1657 a5686cb f3d1657 a327c68 adfbbbe bf93486 a5686cb 0fe1b3a a5686cb bf93486 a5686cb bf93486 a5686cb bf93486 a5686cb bf93486 f3d1657 bf93486 a5686cb 12f6ef4 2983354 a5686cb f9aee46 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 |
import gradio as gr
from transformers import pipeline
from haystack.document_stores import FAISSDocumentStore
from haystack.nodes import EmbeddingRetriever
import numpy as np
import openai
import os
from datasets import load_dataset
from datasets import Dataset
import time
from utils import is_climate_change_related, make_pairs, set_openai_api_key
classifier = pipeline("zero-shot-classification", model="facebook/bart-large-mnli")
system_template = {"role": os.environ["role"], "content": os.environ["content"]}
def gen_conv(query: str, report_type, history=[system_template], ipcc=True):
"""return (answer:str, history:list[dict], sources:str)
Args:
query (str): _description_
history (list, optional): _description_. Defaults to [system_template].
ipcc (bool, optional): _description_. Defaults to True.
Returns:
_type_: _description_
"""
if report_type == "giec":
document_store = FAISSDocumentStore.load(
index_path="./documents/climate_gpt_only_giec.faiss",
config_path="./documents/climate_gpt_only_giec.json",
)
else:
document_store = FAISSDocumentStore.load(
index_path="./documents/climate_gpt.faiss",
config_path="./documents/climate_gpt.json",
)
dense = EmbeddingRetriever(
document_store=document_store,
embedding_model="sentence-transformers/multi-qa-mpnet-base-dot-v1",
model_format="sentence_transformers",
)
retrieve = ipcc and is_climate_change_related(query, classifier)
sources = ""
messages = history + [
{"role": "user", "content": query},
]
if retrieve:
docs = dense.retrieve(query=query, top_k=5)
sources = "\n\n".join(
[os.environ["sources"]]
+ [
f"{d.meta['file_name']} Page {d.meta['page_number']}\n{d.content}"
for d in docs
]
)
messages.append({"role": "system", "content": sources})
answer = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=messages,
temperature=0.2,
# max_tokens=200,
)["choices"][0]["message"]["content"]
if retrieve:
messages.pop()
# answer = "(top 5 documents retrieved) " + answer
sources = "\n\n".join(
f"{d.meta['file_name']} Page {d.meta['page_number']}:\n{d.content}"
for d in docs
)
else:
sources = "No environmental report was used to provide this answer."
messages.append({"role": "assistant", "content": answer})
gradio_format = make_pairs([a["content"] for a in messages[1:]])
return gradio_format, messages, sources
# Gradio
css_code = ".gradio-container {background-image: url('file=background.png');background-position: top right}"
with gr.Blocks(title="π ClimateGPT Ekimetrics", css=css_code) as demo:
document_store = FAISSDocumentStore.load(
index_path="./documents/climate_gpt.faiss",
config_path="./documents/climate_gpt.json",
)
dense = EmbeddingRetriever(
document_store=document_store,
embedding_model="sentence-transformers/multi-qa-mpnet-base-dot-v1",
model_format="sentence_transformers",
)
openai.api_key = os.environ["api_key"]
gr.Markdown("### Welcome to Climate GPT π ! ")
gr.Markdown(
"""
Climate GPT is an interactive exploration tool designed to help you easily find relevant information based on of Environmental reports such as IPCCs and ??.
IPCC is a United Nations body that assesses the science related to climate change, including its impacts and possible response options. The IPCC is considered the leading scientific authority on all things related to global climate change.
"""
)
gr.Markdown(
"**How does it work:** This Chatbot is a combination of two technologies. FAISS search applied to a vast amount of scientific climate reports and TurboGPT to generate human-like text from the part of the document extracted from the database."
)
gr.Markdown(
"β οΈ Warning: Always refer to the source (on the right side) to ensure the validity of the information communicated"
)
# gr.Markdown("""### Ask me anything, I'm a climate expert""")
with gr.Row():
with gr.Column(scale=2):
chatbot = gr.Chatbot()
state = gr.State([system_template])
with gr.Row():
ask = gr.Textbox(
show_label=False,
placeholder="Enter text and press enter",
sample_inputs=["which country polutes the most ?"],
).style(container=False)
print(f"Type from ask textbox {ask.type}")
with gr.Column(scale=1, variant="panel"):
gr.Markdown("### Sources")
sources_textbox = gr.Textbox(
interactive=False, show_label=False, max_lines=50
)
ask.submit(
fn=gen_conv,
inputs=[ask, gr.inputs.Dropdown(["giec only", "all"], default="all"), state],
outputs=[chatbot, state, sources_textbox],
)
with gr.Accordion("Add your personal openai api key", open=False):
openai_api_key_textbox = gr.Textbox(
placeholder="Paste your OpenAI API key (sk-...) and hit Enter",
show_label=False,
lines=1,
type="password",
)
openai_api_key_textbox.change(set_openai_api_key, inputs=[openai_api_key_textbox])
openai_api_key_textbox.submit(set_openai_api_key, inputs=[openai_api_key_textbox])
demo.launch()
|