Spaces:
Sleeping
Sleeping
cryptocalypse
commited on
Commit
•
8b82e6e
1
Parent(s):
0343ea2
clases refactor draft, I class
Browse files- app.py +3 -0
- lattice.py +30 -0
- lib/__pycache__/gematria.cpython-39.pyc +0 -0
- lib/__pycache__/torah.cpython-39.pyc +0 -0
- lib/events.py +122 -0
- lib/{grepher.py → grapher.py} +0 -0
- lib/latticegrid.py +47 -0
- lib/me.py +98 -0
- lib/memory.py +17 -13
- lib/pipes.py +70 -19
- lib/spell.py +91 -48
- lib/triggers.py +82 -0
app.py
CHANGED
@@ -9,6 +9,9 @@ from lib.ziruph import encrypt,decrypt
|
|
9 |
from lib.entropy import *
|
10 |
from torahcodes.resources.func.torah import *
|
11 |
from lib.sonsofstars import *
|
|
|
|
|
|
|
12 |
|
13 |
## UTILS
|
14 |
import math
|
|
|
9 |
from lib.entropy import *
|
10 |
from torahcodes.resources.func.torah import *
|
11 |
from lib.sonsofstars import *
|
12 |
+
#from lib.memory import *
|
13 |
+
#from lib.pipes import *
|
14 |
+
|
15 |
|
16 |
## UTILS
|
17 |
import math
|
lattice.py
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import matplotlib.pyplot as plt
|
2 |
+
import numpy as np
|
3 |
+
|
4 |
+
def draw_hexagonal_grid(rows, cols, text):
|
5 |
+
fig, ax = plt.subplots()
|
6 |
+
ax.set_aspect('equal')
|
7 |
+
|
8 |
+
# Create a hexagonal lattice
|
9 |
+
for i in range(rows):
|
10 |
+
for j in range(cols):
|
11 |
+
if i % 2 == 0:
|
12 |
+
y = i * np.sqrt(3) / 2
|
13 |
+
x = j * 3 / 2
|
14 |
+
else:
|
15 |
+
y = (i + 0.5) * np.sqrt(3) / 2
|
16 |
+
x = (j + 0.5) * 3 / 2
|
17 |
+
ax.add_patch(plt.RegularPolygon((x, y), numVertices=6, radius=0.5, orientation=np.pi/6, fill=None))
|
18 |
+
|
19 |
+
# Add text inside each cell
|
20 |
+
ax.text(x, y, text[i * cols + j], ha='center', va='center')
|
21 |
+
|
22 |
+
ax.autoscale_view()
|
23 |
+
ax.axis('off')
|
24 |
+
plt.show()
|
25 |
+
|
26 |
+
# Example usage
|
27 |
+
text = "HELLO"
|
28 |
+
rows = 5
|
29 |
+
cols = 5
|
30 |
+
draw_hexagonal_grid(rows, cols, text)
|
lib/__pycache__/gematria.cpython-39.pyc
DELETED
Binary file (7.07 kB)
|
|
lib/__pycache__/torah.cpython-39.pyc
DELETED
Binary file (8.29 kB)
|
|
lib/events.py
ADDED
@@ -0,0 +1,122 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pandas as pd
|
2 |
+
import matplotlib.pyplot as plt
|
3 |
+
from sklearn.model_selection import train_test_split
|
4 |
+
from sklearn.ensemble import RandomForestRegressor
|
5 |
+
from sklearn.metrics import mean_squared_error
|
6 |
+
from scipy.stats import pearsonr
|
7 |
+
import numpy as np
|
8 |
+
from scipy.fft import fft
|
9 |
+
|
10 |
+
|
11 |
+
class EventManager:
|
12 |
+
def __init__(self):
|
13 |
+
self.events = []
|
14 |
+
|
15 |
+
def add_event(self, event_title, time_dataset, probability_fork, quantity, common_tag_event_dataset,
|
16 |
+
quantity_correlation_dataset, event_max_quantity, event_min_quantity, event_middle_quantity,
|
17 |
+
sentiment_direction):
|
18 |
+
event = {
|
19 |
+
"event_title": event_title,
|
20 |
+
"time_dataset": time_dataset,
|
21 |
+
"probability_fork": probability_fork,
|
22 |
+
"quantity": quantity,
|
23 |
+
"common_tag_event_dataset": common_tag_event_dataset,
|
24 |
+
"quantity_correlation_dataset": quantity_correlation_dataset,
|
25 |
+
"event_max_quantity": event_max_quantity,
|
26 |
+
"event_min_quantity": event_min_quantity,
|
27 |
+
"event_middle_quantity": event_middle_quantity,
|
28 |
+
"sentiment_direction": sentiment_direction
|
29 |
+
}
|
30 |
+
self.events.append(event)
|
31 |
+
|
32 |
+
def remove_event(self, event_title):
|
33 |
+
self.events = [event for event in self.events if event['event_title'] != event_title]
|
34 |
+
|
35 |
+
def get_events_by_tag(self, tag):
|
36 |
+
return [event for event in self.events if tag in event['common_tag_event_dataset']]
|
37 |
+
|
38 |
+
def get_events_by_sentiment(self, sentiment):
|
39 |
+
return [event for event in self.events if event['sentiment_direction'] == sentiment]
|
40 |
+
|
41 |
+
def get_events_by_quantity_range(self, min_quantity, max_quantity):
|
42 |
+
return [event for event in self.events if min_quantity <= event['quantity'] <= max_quantity]
|
43 |
+
|
44 |
+
def predict_time_series(self, event_title):
|
45 |
+
event = next((event for event in self.events if event['event_title'] == event_title), None)
|
46 |
+
if event:
|
47 |
+
time_series = event['time_dataset']
|
48 |
+
# Aquí puedes implementar tu modelo de predicción de series temporales
|
49 |
+
# Por ejemplo, utilizando un modelo de regresión como RandomForestRegressor de scikit-learn
|
50 |
+
X = np.arange(len(time_series)).reshape(-1, 1)
|
51 |
+
y = np.array(time_series)
|
52 |
+
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
|
53 |
+
model = RandomForestRegressor()
|
54 |
+
model.fit(X_train, y_train)
|
55 |
+
predictions = model.predict(X_test)
|
56 |
+
return predictions
|
57 |
+
else:
|
58 |
+
return None
|
59 |
+
|
60 |
+
def plot_event_parameters_over_time(self, event_title):
|
61 |
+
event = next((event for event in self.events if event['event_title'] == event_title), None)
|
62 |
+
if event:
|
63 |
+
time_series = event['time_dataset']
|
64 |
+
plt.plot(time_series)
|
65 |
+
plt.xlabel('Tiempo')
|
66 |
+
plt.ylabel('Valor')
|
67 |
+
plt.title('Parámetros del Evento "{}" a lo largo del tiempo'.format(event_title))
|
68 |
+
plt.show()
|
69 |
+
|
70 |
+
def plot_prediction(self, event_title):
|
71 |
+
predictions = self.predict_time_series(event_title)
|
72 |
+
if predictions:
|
73 |
+
plt.plot(predictions, label='Predicción')
|
74 |
+
plt.xlabel('Tiempo')
|
75 |
+
plt.ylabel('Valor')
|
76 |
+
plt.title('Predicción del Evento "{}"'.format(event_title))
|
77 |
+
plt.legend()
|
78 |
+
plt.show()
|
79 |
+
|
80 |
+
def check_correlation(self, event_title1, event_title2):
|
81 |
+
event1 = next((event for event in self.events if event['event_title'] == event_title1), None)
|
82 |
+
event2 = next((event for event in self.events if event['event_title'] == event_title2), None)
|
83 |
+
if event1 and event2:
|
84 |
+
correlation, _ = pearsonr(event1['quantity_correlation_dataset'], event2['quantity_correlation_dataset'])
|
85 |
+
return correlation
|
86 |
+
else:
|
87 |
+
return None
|
88 |
+
|
89 |
+
def fourier_transform(self, event_title):
|
90 |
+
event = next((event for event in self.events if event['event_title'] == event_title), None)
|
91 |
+
if event:
|
92 |
+
time_series = event['time_dataset']
|
93 |
+
transformed_data = fft(time_series)
|
94 |
+
return transformed_data
|
95 |
+
else:
|
96 |
+
return None
|
97 |
+
|
98 |
+
|
99 |
+
# Ejemplo de uso
|
100 |
+
event_manager = EventManager()
|
101 |
+
|
102 |
+
# Añadir eventos
|
103 |
+
event_manager.add_event("Evento 1", [1, 2, 3, 4, 5], 0.8, 100, ["tag1", "tag2"], [0.1, 0.2, 0.3, 0.4, 0.5],
|
104 |
+
150, 50, 100, "good when up")
|
105 |
+
event_manager.add_event("Evento 2", [2, 4, 6, 8, 10], 0.6, 200, ["tag2", "tag3"], [0.2, 0.4, 0.6, 0.8, 1.0],
|
106 |
+
250, 150, 200, "bad when down")
|
107 |
+
|
108 |
+
# Realizar predicción de series temporales y plot
|
109 |
+
event_manager.plot_event_parameters_over_time("Evento 1")
|
110 |
+
event_manager.plot_prediction("Evento 1")
|
111 |
+
|
112 |
+
# Comprobar correlación entre dos eventos
|
113 |
+
correlation = event_manager.check_correlation("Evento 1", "Evento 2")
|
114 |
+
if correlation:
|
115 |
+
print("Correlación entre Evento 1 y Evento 2:", correlation)
|
116 |
+
else:
|
117 |
+
print("Alguno de los eventos no existe.")
|
118 |
+
|
119 |
+
# Transformada de Fourier
|
120 |
+
transformed_data = event_manager.fourier_transform("Evento 1")
|
121 |
+
print("Transformada de Fourier del Evento 1:", transformed_data)
|
122 |
+
|
lib/{grepher.py → grapher.py}
RENAMED
File without changes
|
lib/latticegrid.py
CHANGED
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import matplotlib.pyplot as plt
|
3 |
+
from matplotlib.patches import RegularPolygon
|
4 |
+
|
5 |
+
|
6 |
+
## set texto over lattice grid
|
7 |
+
def hex_lattice(text, size=1, figsize=(8, 8)):
|
8 |
+
fig, ax = plt.subplots(figsize=figsize)
|
9 |
+
ax.set_aspect('equal')
|
10 |
+
ax.axis('off')
|
11 |
+
|
12 |
+
# Define hexagonal lattice parameters
|
13 |
+
radius = size * np.sqrt(3) / 2
|
14 |
+
x_offset = size * 1.5
|
15 |
+
y_offset = size * np.sqrt(3)
|
16 |
+
|
17 |
+
# Function to plot hexagon
|
18 |
+
def hexagon(x, y, color='white'):
|
19 |
+
hexagon = RegularPolygon((x, y), numVertices=6, radius=radius, orientation=np.pi/2, facecolor=color, edgecolor='black')
|
20 |
+
ax.add_patch(hexagon)
|
21 |
+
|
22 |
+
# Generate lattice grid
|
23 |
+
rows = len(text)
|
24 |
+
cols = max(len(row) for row in text)
|
25 |
+
for r in range(rows):
|
26 |
+
for c in range(cols):
|
27 |
+
x = c * x_offset
|
28 |
+
y = r * y_offset
|
29 |
+
if r % 2 == 1:
|
30 |
+
x += x_offset / 2
|
31 |
+
if r < len(text) and c < len(text[r]):
|
32 |
+
hexagon(x, y, color='lightblue')
|
33 |
+
ax.text(x, y, text[r][c], ha='center', va='center', fontsize=12)
|
34 |
+
|
35 |
+
plt.show()
|
36 |
+
|
37 |
+
|
38 |
+
if __name__ == "__main__":
|
39 |
+
|
40 |
+
# Example usage:
|
41 |
+
text_to_display = [
|
42 |
+
['A', 'B', 'C', 'D'],
|
43 |
+
['E', 'F', 'G'],
|
44 |
+
['H', 'I', 'J', 'K', 'L']
|
45 |
+
]
|
46 |
+
|
47 |
+
hex_lattice(text_to_display, size=1, figsize=(10, 8))
|
lib/me.py
ADDED
@@ -0,0 +1,98 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from lib.memory import *
|
2 |
+
from lib.grapher import *
|
3 |
+
from lib.pipes import *
|
4 |
+
from lib.entropy import *
|
5 |
+
|
6 |
+
class I:
|
7 |
+
def __init__(self, frases_yo, preferencias, propiedades_persona):
|
8 |
+
self.frases_yo = frases_yo
|
9 |
+
self.preferencias = preferencias
|
10 |
+
self.propiedades_persona = propiedades_persona
|
11 |
+
self.dopamina = 0.0
|
12 |
+
|
13 |
+
self.frases_yo = frases_yo
|
14 |
+
self.preferencias = preferencias
|
15 |
+
self.propiedades_persona = propiedades_persona
|
16 |
+
self.dopamina = 0.0
|
17 |
+
|
18 |
+
def obtener_paths_grafo(self, grafo_ngx):
|
19 |
+
# Función para obtener los paths de un grafo ngx
|
20 |
+
|
21 |
+
|
22 |
+
pass
|
23 |
+
|
24 |
+
def crear_circuito_logico(self):
|
25 |
+
# Función para crear un circuito lógico con un algoritmo específico
|
26 |
+
pass
|
27 |
+
|
28 |
+
def tomar_decision_sentimiento(self, pipa_sentimiento):
|
29 |
+
# Función para tomar una decisión booleana con un análisis de sentimiento
|
30 |
+
pass
|
31 |
+
|
32 |
+
def hacer_predicciones_texto(self, texto):
|
33 |
+
# Función para hacer predicciones de texto futuro por similitud
|
34 |
+
pass
|
35 |
+
|
36 |
+
def agregar_preferencia(self, preferencia):
|
37 |
+
# Función para añadir una entrada al dataset de preferencias
|
38 |
+
self.preferencias.append(preferencia)
|
39 |
+
|
40 |
+
def agregar_frase_yo(self, frase):
|
41 |
+
# Función para añadir una frase al dataset de frases de yo
|
42 |
+
self.frases_yo.append(frase)
|
43 |
+
|
44 |
+
def eliminar_preferencia(self, preferencia):
|
45 |
+
# Función para eliminar una entrada del dataset de preferencias
|
46 |
+
if preferencia in self.preferencias:
|
47 |
+
self.preferencias.remove(preferencia)
|
48 |
+
|
49 |
+
def eliminar_frase_yo(self, frase):
|
50 |
+
# Función para eliminar una frase del dataset de frases de yo
|
51 |
+
if frase in self.frases_yo:
|
52 |
+
self.frases_yo.remove(frase)
|
53 |
+
|
54 |
+
def generar_pregunta(self, prompt):
|
55 |
+
# Función para generar preguntas sobre un prompt
|
56 |
+
pregunta = prompt + " ¿Qué opinas sobre esto?"
|
57 |
+
return pregunta
|
58 |
+
|
59 |
+
def responder_pregunta(self, pregunta):
|
60 |
+
# Función para responder preguntas
|
61 |
+
respuesta = "No estoy seguro de qué opinar sobre eso."
|
62 |
+
return respuesta
|
63 |
+
|
64 |
+
def discriminar_y_agregar(self, informacion, dataset):
|
65 |
+
# Función para discriminar y agregar información a los datasets
|
66 |
+
if "yo" in informacion.lower():
|
67 |
+
self.agregar_frase_yo(informacion)
|
68 |
+
elif "preferencia" in informacion.lower():
|
69 |
+
self.agregar_preferencia(informacion)
|
70 |
+
elif "propiedad" in informacion.lower():
|
71 |
+
# Aquí podrías agregar lógica para actualizar las propiedades de la persona
|
72 |
+
pass
|
73 |
+
else:
|
74 |
+
# Aquí podrías manejar otros tipos de información
|
75 |
+
pass
|
76 |
+
|
77 |
+
|
78 |
+
if __name__ == "__main__":
|
79 |
+
|
80 |
+
# Ejemplo de uso:
|
81 |
+
frases_yo = ["Yo soy inteligente", "Yo puedo lograr lo que me proponga"]
|
82 |
+
preferencias = ["Cine", "Música", "Viajar"]
|
83 |
+
propiedades_persona = {"carisma": 0.8, "destreza": 0.6, "habilidad": 0.9}
|
84 |
+
yo = Yo(frases_yo, preferencias, propiedades_persona)
|
85 |
+
|
86 |
+
# Generar pregunta
|
87 |
+
pregunta_generada = yo.generar_pregunta("Hoy es un día soleado.")
|
88 |
+
print("Pregunta generada:", pregunta_generada)
|
89 |
+
|
90 |
+
# Responder pregunta
|
91 |
+
respuesta = yo.responder_pregunta(pregunta_generada)
|
92 |
+
print("Respuesta:", respuesta)
|
93 |
+
|
94 |
+
# Discriminar y agregar información
|
95 |
+
informacion = "Me gusta ir al cine."
|
96 |
+
yo.discriminar_y_agregar(informacion, yo.preferencias)
|
97 |
+
print("Preferencias actualizadas:", yo.preferencias)
|
98 |
+
|
lib/memory.py
CHANGED
@@ -41,22 +41,26 @@ class MemoriaRobotNLP:
|
|
41 |
|
42 |
|
43 |
# Ejemplo de uso
|
44 |
-
memoria_robot = MemoriaRobotNLP(max_size=100)
|
45 |
|
46 |
-
memoria_robot.agregar_concepto("animales", [("perro", 0.8), ("gato", 0.7), ("pájaro", 0.5)])
|
47 |
-
memoria_robot.agregar_concepto("colores", [("rojo", 0.9), ("verde", 0.6), ("azul", 0.7)])
|
48 |
|
49 |
-
|
50 |
-
print(memoria_robot.memoria)
|
51 |
|
52 |
-
memoria_robot
|
53 |
-
memoria_robot.eliminar_string("colores", "verde")
|
54 |
-
memoria_robot.eliminar_concepto("colores")
|
55 |
|
56 |
-
|
57 |
-
|
58 |
|
59 |
-
|
60 |
-
print(
|
61 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
62 |
|
|
|
41 |
|
42 |
|
43 |
# Ejemplo de uso
|
|
|
44 |
|
|
|
|
|
45 |
|
46 |
+
if __name__ == "__main__":
|
|
|
47 |
|
48 |
+
memoria_robot = MemoriaRobotNLP(max_size=100)
|
|
|
|
|
49 |
|
50 |
+
memoria_robot.agregar_concepto("animales", [("perro", 0.8), ("gato", 0.7), ("pájaro", 0.5)])
|
51 |
+
memoria_robot.agregar_concepto("colores", [("rojo", 0.9), ("verde", 0.6), ("azul", 0.7)])
|
52 |
|
53 |
+
print("Memoria completa:")
|
54 |
+
print(memoria_robot.memoria)
|
55 |
+
|
56 |
+
memoria_robot.agregar_string("animales", "pez", 0.6)
|
57 |
+
memoria_robot.eliminar_string("colores", "verde")
|
58 |
+
memoria_robot.eliminar_concepto("colores")
|
59 |
+
|
60 |
+
print("\nMemoria después de modificaciones:")
|
61 |
+
print(memoria_robot.memoria)
|
62 |
+
|
63 |
+
conceptos_acotados = memoria_robot.obtener_conceptos_acotados(50)
|
64 |
+
print("\nConceptos acotados a un tamaño máximo de memoria:")
|
65 |
+
print(conceptos_acotados)
|
66 |
|
lib/pipes.py
CHANGED
@@ -3,11 +3,26 @@ from diffusers import DiffusionPipeline
|
|
3 |
from transformers import AutoModelForSeq2SeqLM
|
4 |
from samplings import top_p_sampling, temperature_sampling
|
5 |
import torch
|
|
|
6 |
|
7 |
class AIAssistant:
|
8 |
def __init__(self):
|
9 |
pass
|
10 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
11 |
def entity_pos_tagger(self, example):
|
12 |
tokenizer = AutoTokenizer.from_pretrained("Davlan/bert-base-multilingual-cased-ner-hrl")
|
13 |
model = AutoModelForTokenClassification.from_pretrained("Davlan/bert-base-multilingual-cased-ner-hrl")
|
@@ -15,6 +30,47 @@ class AIAssistant:
|
|
15 |
ner_results = nlp(example)
|
16 |
return ner_results
|
17 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
18 |
def text_to_image_generation(self, prompt, n_steps=40, high_noise_frac=0.8):
|
19 |
base = DiffusionPipeline.from_pretrained(
|
20 |
"stabilityai/stable-diffusion-xl-base-1.0", torch_dtype=torch.float16, variant="fp16", use_safetensors=True
|
@@ -44,16 +100,8 @@ class AIAssistant:
|
|
44 |
).images[0]
|
45 |
return image
|
46 |
|
47 |
-
def grammatical_pos_tagger(self, text):
|
48 |
-
nlp_pos = pipeline(
|
49 |
-
"ner",
|
50 |
-
model="mrm8488/bert-spanish-cased-finetuned-pos",
|
51 |
-
tokenizer=(
|
52 |
-
'mrm8488/bert-spanish-cased-finetuned-pos',
|
53 |
-
{"use_fast": False}
|
54 |
-
))
|
55 |
-
return nlp_pos(text)
|
56 |
|
|
|
57 |
def text_to_music(self, text, max_length=1024, top_p=0.9, temperature=1.0):
|
58 |
tokenizer = AutoTokenizer.from_pretrained('sander-wood/text-to-music')
|
59 |
model = AutoModelForSeq2SeqLM.from_pretrained('sander-wood/text-to-music')
|
@@ -86,17 +134,20 @@ class AIAssistant:
|
|
86 |
return tune
|
87 |
break
|
88 |
|
89 |
-
# Ejemplo de uso
|
90 |
-
assistant = AIAssistant()
|
91 |
-
ner_results = assistant.entity_pos_tagger("Nader Jokhadar had given Syria the lead with a well-struck header in the seventh minute.")
|
92 |
-
print(ner_results)
|
93 |
|
94 |
-
|
95 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
96 |
|
97 |
-
pos_tags = assistant.grammatical_pos_tagger('Mis amigos están pensando en viajar a Londres este verano')
|
98 |
-
print(pos_tags)
|
99 |
|
100 |
-
tune = assistant.text_to_music("This is a traditional Irish dance music.")
|
101 |
-
print(tune)
|
102 |
|
|
|
3 |
from transformers import AutoModelForSeq2SeqLM
|
4 |
from samplings import top_p_sampling, temperature_sampling
|
5 |
import torch
|
6 |
+
from sentence_transformers import SentenceTransformer, util
|
7 |
|
8 |
class AIAssistant:
|
9 |
def __init__(self):
|
10 |
pass
|
11 |
|
12 |
+
## gramatical classificator
|
13 |
+
def grammatical_pos_tagger(self, text):
|
14 |
+
nlp_pos = pipeline(
|
15 |
+
"ner",
|
16 |
+
model="mrm8488/bert-spanish-cased-finetuned-pos",
|
17 |
+
tokenizer=(
|
18 |
+
'mrm8488/bert-spanish-cased-finetuned-pos',
|
19 |
+
{"use_fast": False}
|
20 |
+
))
|
21 |
+
|
22 |
+
return nlp_pos(text)
|
23 |
+
|
24 |
+
|
25 |
+
## entity classifier
|
26 |
def entity_pos_tagger(self, example):
|
27 |
tokenizer = AutoTokenizer.from_pretrained("Davlan/bert-base-multilingual-cased-ner-hrl")
|
28 |
model = AutoModelForTokenClassification.from_pretrained("Davlan/bert-base-multilingual-cased-ner-hrl")
|
|
|
30 |
ner_results = nlp(example)
|
31 |
return ner_results
|
32 |
|
33 |
+
|
34 |
+
## sentiment analysis
|
35 |
+
def sentiment_tags(self,text):
|
36 |
+
distilled_student_sentiment_classifier = pipeline(
|
37 |
+
model="lxyuan/distilbert-base-multilingual-cased-sentiments-student",
|
38 |
+
return_all_scores=True
|
39 |
+
)
|
40 |
+
|
41 |
+
# english
|
42 |
+
return distilled_student_sentiment_classifier(text)
|
43 |
+
|
44 |
+
## check similarity among sentences (group of tokens (words))
|
45 |
+
def similarity_tag(self, sentenceA,sentenceB):
|
46 |
+
res=[]
|
47 |
+
model = SentenceTransformer('abbasgolestani/ag-nli-bert-mpnet-base-uncased-sentence-similarity-v1') nli-mpnet-base-v2
|
48 |
+
|
49 |
+
# Two lists of sentences
|
50 |
+
#sentences1 = ['I am honored to be given the opportunity to help make our company better',
|
51 |
+
# 'I love my job and what I do here',
|
52 |
+
# 'I am excited about our company’s vision']
|
53 |
+
|
54 |
+
#sentences2 = ['I am hopeful about the future of our company',
|
55 |
+
# 'My work is aligning with my passion',
|
56 |
+
# 'Definitely our company vision will be the next breakthrough to change the world and I’m so happy and proud to work here']
|
57 |
+
|
58 |
+
sentences1 = sentenceA
|
59 |
+
sentences2 = sentencesB
|
60 |
+
#Compute embedding for both lists
|
61 |
+
embeddings1 = model.encode(sentences1, convert_to_tensor=True)
|
62 |
+
embeddings2 = model.encode(sentences2, convert_to_tensor=True)
|
63 |
+
|
64 |
+
#Compute cosine-similarities
|
65 |
+
cosine_scores = util.cos_sim(embeddings1, embeddings2)
|
66 |
+
|
67 |
+
#Output the pairs with their score
|
68 |
+
for i in range(len(sentences1)):
|
69 |
+
res.append({"A": format(sentences1[i], "B":sentences2[i], "score":cosine_scores[i][i]})
|
70 |
+
#print("{} \t\t {} \t\t Score: {:.4f}".format(sentences1[i], sentences2[i], cosine_scores[i][i]))
|
71 |
+
|
72 |
+
return res
|
73 |
+
## text to stable difusor generated image
|
74 |
def text_to_image_generation(self, prompt, n_steps=40, high_noise_frac=0.8):
|
75 |
base = DiffusionPipeline.from_pretrained(
|
76 |
"stabilityai/stable-diffusion-xl-base-1.0", torch_dtype=torch.float16, variant="fp16", use_safetensors=True
|
|
|
100 |
).images[0]
|
101 |
return image
|
102 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
103 |
|
104 |
+
## pass text prompt to music
|
105 |
def text_to_music(self, text, max_length=1024, top_p=0.9, temperature=1.0):
|
106 |
tokenizer = AutoTokenizer.from_pretrained('sander-wood/text-to-music')
|
107 |
model = AutoModelForSeq2SeqLM.from_pretrained('sander-wood/text-to-music')
|
|
|
134 |
return tune
|
135 |
break
|
136 |
|
|
|
|
|
|
|
|
|
137 |
|
138 |
+
if __name__ == "__main__":
|
139 |
+
|
140 |
+
# Ejemplo de uso
|
141 |
+
assistant = AIAssistant()
|
142 |
+
ner_results = assistant.entity_pos_tagger("Nader Jokhadar had given Syria the lead with a well-struck header in the seventh minute.")
|
143 |
+
print(ner_results)
|
144 |
+
|
145 |
+
image = assistant.text_to_image_generation("A majestic lion jumping from a big stone at night")
|
146 |
+
print(image)
|
147 |
|
148 |
+
pos_tags = assistant.grammatical_pos_tagger('Mis amigos están pensando en viajar a Londres este verano')
|
149 |
+
print(pos_tags)
|
150 |
|
151 |
+
tune = assistant.text_to_music("This is a traditional Irish dance music.")
|
152 |
+
print(tune)
|
153 |
|
lib/spell.py
CHANGED
@@ -1,22 +1,19 @@
|
|
1 |
-
import datetime
|
2 |
-
|
3 |
-
# Tabla de los nombres de los ángeles por estación zodiacal
|
4 |
-
angels_by_zodiac_station = {
|
5 |
-
"Aries": ["sha'aphon", "behemoth", "bekemesheb/bekemekesheb", "qotzien"],
|
6 |
-
"Tauro": ["dierenavor", "heniethebol", "siemegedel", "morepheker"],
|
7 |
-
"Geminis": ["sheneron", "phelehedien", "volereked", "akeneseb"],
|
8 |
-
"Cancer": ["Qedoqoredi", "Qoheleren", "Phereshetial", "Memenial"],
|
9 |
-
"Leo": ["Bephopher", "Lieshebeker", "Shehenen", "shehelekek"],
|
10 |
-
"Virgo": ["Siemosial", "Sebodeh", "Siegel", "Teremothiteh"],
|
11 |
-
"Libra": ["A'ariegol", "Mereton", "Qa'aberi", "Legoshmelek"],
|
12 |
-
"Escorpio": ["Therepiethz", "Phetza'an", "Shemophethen", "Thokesed"],
|
13 |
-
"Sagitario": ["Aketen", "Kephron", "Oliphiel", "Yosel"],
|
14 |
-
"Capricornio": ["Ameni", "Bieker", "Depheri", "Menenial"],
|
15 |
-
"Acuario": ["Meta'am", "Theberien", "Shethoqoeh", "Danial"],
|
16 |
-
"Piscis": ["Sha'aphenen", "Aniesien", "Sethered", "Qohemehogov"]
|
17 |
-
}
|
18 |
|
19 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
20 |
angels_by_month_station = {
|
21 |
"Nisan": ["Asegesenek", "Mesokenek", "Deriegemon", "Shethenovesenov"],
|
22 |
"Ayer": ["Phemetor", "Qotenebial", "Ma'agol", "Goberethial"],
|
@@ -32,9 +29,8 @@ angels_by_month_station = {
|
|
32 |
"Adar": ["Koneled", "Ba'aren", "Sebiebekera'a", "Qoromeqore"]
|
33 |
}
|
34 |
|
35 |
-
# Tabla de los nombres de los ángeles por día y estación
|
36 |
angels_by_day_station = {
|
37 |
-
1: ["Phiegenochen", "Tenekien", "
|
38 |
2: ["Tga'sher", "Menechethor", "Qoleneheren", "Shegedon"],
|
39 |
3: ["Sheriyachetz", "Qohebereneden", "Pherezen", "Hegelomoth"],
|
40 |
4: ["Pheniov Lavor", "Miyeshor", "Degiem", "Betheroqa"],
|
@@ -42,7 +38,6 @@ angels_by_day_station = {
|
|
42 |
6: ["Qola'azeran", "Deremthok", "Akethenor", "Arieh"]
|
43 |
}
|
44 |
|
45 |
-
# Tabla de los nombres de los ángeles por signo de luna
|
46 |
angels_by_moon_sign = {
|
47 |
"Leberenieth": ["shaitan", "therezien", "sheneremi", "Gabrial"],
|
48 |
"Seletheleb": ["Yieshieshieh", "Abererehon", "Sheheqonek", "Bal Menael"],
|
@@ -50,7 +45,6 @@ angels_by_moon_sign = {
|
|
50 |
"Sheherieph": ["Biyom", "Bieth", "Rothep", "Danial"]
|
51 |
}
|
52 |
|
53 |
-
# Tabla de los nombres de los ángeles que ministran la luna por signo zodiacal
|
54 |
moon_ministers_by_zodiac = {
|
55 |
"Aries": ["Zerem", "Behemi", "Pheloneh", "Qonosh"],
|
56 |
"Tauro": ["Deketon", "Mezekerien", "Thederenael", "Amiena"],
|
@@ -66,7 +60,6 @@ moon_ministers_by_zodiac = {
|
|
66 |
"Piscis": ["Sha'aphenen", "Aniesien", "Sethered", "Qohemehogov"]
|
67 |
}
|
68 |
|
69 |
-
# Tabla de los nombres de los ángeles por estación de la Tierra
|
70 |
angels_by_earth_station = {
|
71 |
1: ["Memegien", "Yibesheh", "Thebel", "Hezeh Dovem"],
|
72 |
2: ["Mechemed Lov", "Bel Ached", "Aseberon", "Qohelorek"],
|
@@ -74,7 +67,6 @@ angels_by_earth_station = {
|
|
74 |
4: ["Yihelederek", "Mephenial", "Mephenial", ""]
|
75 |
}
|
76 |
|
77 |
-
# Tabla de los nombres de los ángeles por estación de los Malechims
|
78 |
angels_by_malechim_station = {
|
79 |
1: ["Akeberon", "Amereneh", "Mazeniem", "Meneshor"],
|
80 |
2: ["Qoherok", "Aberiek", "Siegor", "Pheniemor"],
|
@@ -82,33 +74,84 @@ angels_by_malechim_station = {
|
|
82 |
4: ["Beriekoch", "Kephor", "Avor", ""]
|
83 |
}
|
84 |
|
85 |
-
# Obtener la fecha actual
|
86 |
-
current_date = datetime.datetime.now()
|
87 |
|
88 |
-
# Obtener el mes actual
|
89 |
-
current_month = current_date.month
|
90 |
|
91 |
-
# Obtener el día actual
|
92 |
-
current_day = current_date.day
|
93 |
|
94 |
-
|
95 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
96 |
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
104 |
-
angel_malechim_station = angels_by_malechim_station[list(angels_by_malechim_station.keys())[current_month - 1]][int(zodiac_sign[-1]) - 1]
|
105 |
|
106 |
-
#
|
107 |
-
|
108 |
-
|
109 |
-
print("Ángel correspondiente al signo zodiacal:", angel_zodiac)
|
110 |
-
print("Ángel correspondiente al signo de la luna:", angel_moon_sign)
|
111 |
-
print("Ángel que ministra el signo zodiacal:", angel_moon_minister)
|
112 |
-
print("Ángel correspondiente a la estación de la Tierra:", angel_earth_station)
|
113 |
-
print("Ángel correspondiente a la estación de los Malechims:", angel_malechim_station)
|
114 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
|
2 |
+
angels_by_zodiac_station = {
|
3 |
+
"Aries": ["sha'aphon", "behemoth", "bekemesheb/bekemekesheb", "qotzien"],
|
4 |
+
"Tauro": ["dierenavor", "heniethebol", "siemegedel", "morepheker"],
|
5 |
+
"Geminis": ["sheneron", "phelehedien", "volereked", "akeneseb"],
|
6 |
+
"Cancer": ["Qedoqoredi", "Qoheleren", "Phereshetial", "Memenial"],
|
7 |
+
"Leo": ["Bephopher", "Lieshebeker", "Shehenen", "shehelekek"],
|
8 |
+
"Virgo": ["Siemosial", "Sebodeh", "Siegel", "Teremothiteh"],
|
9 |
+
"Libra": ["A'ariegol", "Mereton", "Qa'aberi", "Legoshmelek"],
|
10 |
+
"Escorpio": ["Therepiethz", "Phetza'an", "Shemophethen", "Thokesed"],
|
11 |
+
"Sagitario": ["Aketen", "Kephron", "Oliphiel", "Yosel"],
|
12 |
+
"Capricornio": ["Ameni", "Bieker", "Depheri", "Menenial"],
|
13 |
+
"Acuario": ["Meta'am", "Theberien", "Shethoqoeh", "Danial"],
|
14 |
+
"Piscis": ["Sha'aphenen", "Aniesien", "Sethered", "Qohemehogov"]
|
15 |
+
}
|
16 |
+
|
17 |
angels_by_month_station = {
|
18 |
"Nisan": ["Asegesenek", "Mesokenek", "Deriegemon", "Shethenovesenov"],
|
19 |
"Ayer": ["Phemetor", "Qotenebial", "Ma'agol", "Goberethial"],
|
|
|
29 |
"Adar": ["Koneled", "Ba'aren", "Sebiebekera'a", "Qoromeqore"]
|
30 |
}
|
31 |
|
|
|
32 |
angels_by_day_station = {
|
33 |
+
1: ["Phiegenochen", "Tenekien", "Kophethen", "Makeleched"],
|
34 |
2: ["Tga'sher", "Menechethor", "Qoleneheren", "Shegedon"],
|
35 |
3: ["Sheriyachetz", "Qohebereneden", "Pherezen", "Hegelomoth"],
|
36 |
4: ["Pheniov Lavor", "Miyeshor", "Degiem", "Betheroqa"],
|
|
|
38 |
6: ["Qola'azeran", "Deremthok", "Akethenor", "Arieh"]
|
39 |
}
|
40 |
|
|
|
41 |
angels_by_moon_sign = {
|
42 |
"Leberenieth": ["shaitan", "therezien", "sheneremi", "Gabrial"],
|
43 |
"Seletheleb": ["Yieshieshieh", "Abererehon", "Sheheqonek", "Bal Menael"],
|
|
|
45 |
"Sheherieph": ["Biyom", "Bieth", "Rothep", "Danial"]
|
46 |
}
|
47 |
|
|
|
48 |
moon_ministers_by_zodiac = {
|
49 |
"Aries": ["Zerem", "Behemi", "Pheloneh", "Qonosh"],
|
50 |
"Tauro": ["Deketon", "Mezekerien", "Thederenael", "Amiena"],
|
|
|
60 |
"Piscis": ["Sha'aphenen", "Aniesien", "Sethered", "Qohemehogov"]
|
61 |
}
|
62 |
|
|
|
63 |
angels_by_earth_station = {
|
64 |
1: ["Memegien", "Yibesheh", "Thebel", "Hezeh Dovem"],
|
65 |
2: ["Mechemed Lov", "Bel Ached", "Aseberon", "Qohelorek"],
|
|
|
67 |
4: ["Yihelederek", "Mephenial", "Mephenial", ""]
|
68 |
}
|
69 |
|
|
|
70 |
angels_by_malechim_station = {
|
71 |
1: ["Akeberon", "Amereneh", "Mazeniem", "Meneshor"],
|
72 |
2: ["Qoherok", "Aberiek", "Siegor", "Pheniemor"],
|
|
|
74 |
4: ["Beriekoch", "Kephor", "Avor", ""]
|
75 |
}
|
76 |
|
|
|
|
|
77 |
|
|
|
|
|
78 |
|
|
|
|
|
79 |
|
80 |
+
class AngelSearch:
|
81 |
+
def __init__(self, angels_by_zodiac_station, angels_by_month_station, angels_by_day_station,
|
82 |
+
angels_by_moon_sign, moon_ministers_by_zodiac, angels_by_earth_station,
|
83 |
+
angels_by_malechim_station):
|
84 |
+
self.angels_by_zodiac_station = angels_by_zodiac_station
|
85 |
+
self.angels_by_month_station = angels_by_month_station
|
86 |
+
self.angels_by_day_station = angels_by_day_station
|
87 |
+
self.angels_by_moon_sign = angels_by_moon_sign
|
88 |
+
self.moon_ministers_by_zodiac = moon_ministers_by_zodiac
|
89 |
+
self.angels_by_earth_station = angels_by_earth_station
|
90 |
+
self.angels_by_malechim_station = angels_by_malechim_station
|
91 |
+
|
92 |
+
def search_patterns(self, pattern):
|
93 |
+
results = []
|
94 |
+
|
95 |
+
# Buscar en la tabla de nombres de ángeles por estación zodiacal
|
96 |
+
for zodiac, angels in self.angels_by_zodiac_station.items():
|
97 |
+
for angel in angels:
|
98 |
+
if pattern in angel:
|
99 |
+
results.append((zodiac, angel))
|
100 |
+
|
101 |
+
# Buscar en la tabla de nombres de ángeles por mes y estación
|
102 |
+
for month, angels in self.angels_by_month_station.items():
|
103 |
+
for angel in angels:
|
104 |
+
if pattern in angel:
|
105 |
+
results.append((month, angel))
|
106 |
+
|
107 |
+
# Buscar en la tabla de nombres de ángeles por día y estación
|
108 |
+
for day, angels in self.angels_by_day_station.items():
|
109 |
+
for angel in angels:
|
110 |
+
if pattern in angel:
|
111 |
+
results.append((day, angel))
|
112 |
+
|
113 |
+
# Buscar en la tabla de nombres de ángeles por signo de luna
|
114 |
+
for sign, angels in self.angels_by_moon_sign.items():
|
115 |
+
for angel in angels:
|
116 |
+
if pattern in angel:
|
117 |
+
results.append((sign, angel))
|
118 |
+
|
119 |
+
# Buscar en la tabla de nombres de ángeles que ministran la luna por signo zodiacal
|
120 |
+
for zodiac, angels in self.moon_ministers_by_zodiac.items():
|
121 |
+
for angel in angels:
|
122 |
+
if pattern in angel:
|
123 |
+
results.append((zodiac, angel))
|
124 |
+
|
125 |
+
# Buscar en la tabla de nombres de ángeles por estación de la Tierra
|
126 |
+
for station, angels in self.angels_by_earth_station.items():
|
127 |
+
for angel in angels:
|
128 |
+
if pattern in angel:
|
129 |
+
results.append((station, angel))
|
130 |
+
|
131 |
+
# Buscar en la tabla de nombres de ángeles por estación de los Malechims
|
132 |
+
for station, angels in self.angels_by_malechim_station.items():
|
133 |
+
for angel in angels:
|
134 |
+
if pattern in angel:
|
135 |
+
results.append((station, angel))
|
136 |
+
|
137 |
+
return results
|
138 |
|
139 |
+
if __name__ == "__main__":
|
140 |
+
# Tablas de nombres de ángeles
|
141 |
+
|
142 |
+
# Crear una instancia de AngelSearch
|
143 |
+
angel_search = AngelSearch(angels_by_zodiac_station, angels_by_month_station, angels_by_day_station,
|
144 |
+
angels_by_moon_sign, moon_ministers_by_zodiac, angels_by_earth_station,
|
145 |
+
angels_by_malechim_station)
|
|
|
146 |
|
147 |
+
# Buscar ángeles que contienen el patrón "Qo"
|
148 |
+
pattern = "Qo"
|
149 |
+
results = angel_search.search_patterns(pattern)
|
|
|
|
|
|
|
|
|
|
|
150 |
|
151 |
+
# Imprimir resultados de la búsqueda
|
152 |
+
print(f"Resultados de la búsqueda para el patrón '{pattern}':")
|
153 |
+
if results:
|
154 |
+
for result in results:
|
155 |
+
print(result)
|
156 |
+
else:
|
157 |
+
print("No se encontraron coincidencias para el patrón proporcionado.")
|
lib/triggers.py
ADDED
@@ -0,0 +1,82 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import datetime
|
2 |
+
import requests
|
3 |
+
|
4 |
+
class Trigger:
|
5 |
+
def __init__(self, trigger_tags, comparison_tags, time_definition, event_name, included=True):
|
6 |
+
self.trigger_tags = set(trigger_tags)
|
7 |
+
self.comparison_tags = set(comparison_tags)
|
8 |
+
self.time_definition = time_definition
|
9 |
+
self.event_name = event_name
|
10 |
+
self.included = included
|
11 |
+
self.threshold = 0
|
12 |
+
self.actions = []
|
13 |
+
self.sources = []
|
14 |
+
|
15 |
+
def add_action(self, action):
|
16 |
+
self.actions.append(action)
|
17 |
+
|
18 |
+
def remove_action(self, action):
|
19 |
+
if action in self.actions:
|
20 |
+
self.actions.remove(action)
|
21 |
+
else:
|
22 |
+
print("Action not found")
|
23 |
+
|
24 |
+
def add_source(self, source):
|
25 |
+
self.sources.append(source)
|
26 |
+
|
27 |
+
def remove_source(self, source):
|
28 |
+
if source in self.sources:
|
29 |
+
self.sources.remove(source)
|
30 |
+
else:
|
31 |
+
print("Source not found")
|
32 |
+
|
33 |
+
def check_trigger(self, current_tags, current_time):
|
34 |
+
if self.included:
|
35 |
+
if current_time in self.time_definition and self.trigger_tags.issubset(current_tags):
|
36 |
+
self.threshold += 1
|
37 |
+
else:
|
38 |
+
self.threshold = 0
|
39 |
+
else:
|
40 |
+
if current_time in self.time_definition and not self.trigger_tags.intersection(current_tags):
|
41 |
+
self.threshold += 1
|
42 |
+
else:
|
43 |
+
self.threshold = 0
|
44 |
+
|
45 |
+
if self.threshold >= len(self.time_definition):
|
46 |
+
self.fire_actions()
|
47 |
+
self.make_requests()
|
48 |
+
|
49 |
+
def fire_actions(self):
|
50 |
+
for action in self.actions:
|
51 |
+
action(self.event_name)
|
52 |
+
|
53 |
+
def make_requests(self):
|
54 |
+
for source in self.sources:
|
55 |
+
try:
|
56 |
+
response = requests.get(source)
|
57 |
+
# Procesar la respuesta aquí si es necesario
|
58 |
+
print(f"Request made to {source}. Status code: {response.status_code}")
|
59 |
+
except requests.exceptions.RequestException as e:
|
60 |
+
print(f"Error making request to {source}: {e}")
|
61 |
+
|
62 |
+
# Ejemplo de uso:
|
63 |
+
|
64 |
+
def action_function(event_name):
|
65 |
+
print(f"Trigger fired for event: {event_name}")
|
66 |
+
|
67 |
+
|
68 |
+
if __name__ == "__main__":
|
69 |
+
|
70 |
+
# Definición de un trigger
|
71 |
+
trigger = Trigger(["tag1", "tag2"], ["tag3", "tag4"], [datetime.time(10, 0), datetime.time(15, 0)], "Event1")
|
72 |
+
|
73 |
+
# Añadir una acción al trigger
|
74 |
+
trigger.add_action(action_function)
|
75 |
+
|
76 |
+
# Añadir una fuente al trigger
|
77 |
+
trigger.add_source("https://example.com/api/data")
|
78 |
+
|
79 |
+
# Simular la comprobación periódica del trigger (aquí se usaría en un bucle de tiempo real)
|
80 |
+
current_tags = {"tag1", "tag2", "tag3"}
|
81 |
+
current_time = datetime.datetime.now().time()
|
82 |
+
trigger.check_trigger(current_tags, current_time)
|