Spaces:
Sleeping
Sleeping
import requests | |
import json | |
import networkx as nx | |
import matplotlib.pyplot as plt | |
from fuzzywuzzy import fuzz | |
from fuzzywuzzy import process | |
from lib.memory import * | |
class APIRequester: | |
def __init__(self): | |
pass | |
def make_request(self, url): | |
response = requests.get(url) | |
if response.status_code == 200: | |
return response.json() | |
else: | |
return None | |
class Grapher: | |
def __init__(self, memoria_nlp, threshold=70): | |
self.threshold = threshold | |
self.graph = nx.Graph() | |
self.memoria_nlp = memoria_nlp | |
def parse_json(self, data, parent=None): | |
if isinstance(data, dict): | |
for key, value in data.items(): | |
if parent: | |
self.graph.add_node(parent) | |
self.graph.add_node(key) | |
self.graph.add_edge(parent, key) | |
for node in self.graph.nodes(): | |
if node != parent and fuzz.ratio(node, key) >= self.threshold: | |
self.graph = nx.contracted_nodes(self.graph, node, key, self_loops=False) | |
self.memoria_nlp.agregar_concepto("keys", [(key, 1.0)]) | |
if isinstance(value, (dict, list)): | |
self.parse_json(value, key) | |
else: | |
self.memoria_nlp.agregar_concepto("values", [(str(value), 1.0)]) | |
if parent: | |
self.graph.add_node(value) | |
self.graph.add_edge(key, value) | |
for node in self.graph.nodes(): | |
if node != value and fuzz.ratio(node, value) >= self.threshold: | |
self.graph = nx.contracted_nodes(self.graph, node, value, self_loops=False) | |
elif isinstance(data, list): | |
for item in data: | |
self.parse_json(item, parent) | |
def draw_graph(self): | |
pos = nx.spring_layout(self.graph, seed=42) | |
nx.draw(self.graph, pos, with_labels=True, node_size=700, node_color='skyblue', font_size=10, font_weight='bold') | |
plt.title("JSON Graph") | |
plt.show() | |
def guardar_en_memoria(self): | |
keys = self.memoria_nlp.obtener_conceptos_acotados(100) | |
with open("memoria.json", "w") as file: | |
json.dump(keys, file) | |
def buscar_nodo(self, nodo): | |
return process.extractOne(nodo, self.graph.nodes())[0] | |
def eliminar_nodo(self, nodo): | |
self.graph.remove_node(nodo) | |
def agregar_nodo(self, nodo): | |
self.graph.add_node(nodo) | |
def distancia_entre_nodos(self, nodo1, nodo2): | |
return nx.shortest_path_length(self.graph, source=nodo1, target=nodo2) | |
def ruta_entre_nodos(self, nodo1, nodo2): | |
return nx.shortest_path(self.graph, source=nodo1, target=nodo2) | |
def unir_grafos(self, otro_grafo, umbral): | |
for nodo in otro_grafo.nodes(): | |
nodo_similar = process.extractOne(nodo, self.graph.nodes())[0] | |
if fuzz.ratio(nodo, nodo_similar) >= umbral: | |
self.graph = nx.contracted_nodes(self.graph, nodo_similar, nodo, self_loops=False) | |
else: | |
self.graph.add_node(nodo) | |
for vecino in otro_grafo.neighbors(nodo): | |
self.graph.add_edge(nodo, vecino) | |
if __name__ == "__main__": | |
# Ejemplo de uso | |
memoria_nlp = MemoriaRobotNLP(max_size=100) | |
json_parser = JSONParser(memoria_nlp) | |
api_requester = APIRequester() | |
url = "https://jsonplaceholder.typicode.com/posts" | |
data = api_requester.make_request(url) | |
if data: | |
json_parser.parse_json(data) | |
json_parser.draw_graph() | |
otro_parser = JSONParser(MemoriaRobotNLP(max_size=100)) | |
otro_parser.parse_json({"id": 101, "title": "New Title", "userId": 11}) | |
print("Uniendo los grafos...") | |
json_parser.unir_grafos(otro_parser.graph, umbral=80) | |
print("Grafo unido:") | |
json_parser.draw_graph() | |
json_parser.guardar_en_memoria() | |
else: | |
print("Error al realizar la solicitud a la API.") | |