|
from llama_index.core import VectorStoreIndex, SimpleDirectoryReader, Settings |
|
from llama_index.embeddings.huggingface import HuggingFaceEmbedding |
|
from llama_index.llms.ollama import Ollama |
|
from rich.console import Console |
|
from rich.theme import Theme |
|
from rich import print |
|
import logging |
|
import sys |
|
import os.path |
|
from llama_index.core import ( |
|
VectorStoreIndex, |
|
SimpleDirectoryReader, |
|
StorageContext, |
|
load_index_from_storage, |
|
) |
|
|
|
logging.basicConfig(stream=sys.stdout, level=logging.INFO) |
|
logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout)) |
|
|
|
custom_theme = Theme({ |
|
"title": "bold white on orchid1", |
|
"text": "dim chartreuse1", |
|
}) |
|
console = Console(theme=custom_theme) |
|
|
|
|
|
Settings.embed_model = HuggingFaceEmbedding(model_name="BAAI/bge-base-en-v1.5") |
|
|
|
|
|
Settings.llm = Ollama(model="phi3", request_timeout=360.0) |
|
|
|
|
|
PERSIST_DIR = "./storage" |
|
if not os.path.exists(PERSIST_DIR): |
|
|
|
documents = SimpleDirectoryReader("data").load_data() |
|
index = VectorStoreIndex.from_documents(documents) |
|
|
|
index.storage_context.persist(persist_dir=PERSIST_DIR) |
|
else: |
|
|
|
storage_context = StorageContext.from_defaults(persist_dir=PERSIST_DIR) |
|
index = load_index_from_storage(storage_context) |
|
|
|
|
|
|
|
query_engine = index.as_query_engine() |
|
|
|
|
|
queries = [ |
|
"liste os 3 principais tópicos que o autor cobre nesse texto" |
|
] |
|
|
|
for query in queries: |
|
console.print(query, style="title") |
|
response = query_engine.query(query) |
|
print( |
|
f"[italic chartreuse1 on grey7]{response}[/italic chartreuse1 on grey7]\n") |
|
|