Spaces:
Runtime error
Runtime error
File size: 5,973 Bytes
4fe8a03 f3f2130 4fe8a03 e4b743f 78ed805 da68b17 e9a8ede fb6ebd2 e9a8ede 4fe8a03 45676cc 4fe8a03 808babf ab427b2 4fe8a03 6fd306e 58d82ec c652a61 e9a8ede 4fe8a03 e9a8ede 4fe8a03 b72bc42 4fe8a03 03e689c 4fe8a03 b934676 4fe8a03 b72bc42 4fe8a03 b934676 4fe8a03 77001b5 54725b9 4fe8a03 07170ba 039819f 07170ba 4fe8a03 d2281a3 e74f020 ea94c7e c1b0716 36e8381 4fe8a03 a297e9a c1b0716 b7956c7 344e7b1 b7956c7 8cafaa1 6b5276f 89f00b4 8cafaa1 774449d 13c256e fdebc5f 7ef424f 4fe8a03 6800b50 4fe8a03 8a910cd 4fe8a03 e9a8ede 6800b50 e9a8ede 6800b50 e9a8ede 6800b50 0759b36 e5a7845 0136bd4 8d3471b 2e45868 770c900 954e78c 039819f 022a19a 7ef424f ff4a76f 7ef424f 4eba62e 4024d85 14982f9 4024d85 e9a8ede 14982f9 e9a8ede 0d6dac3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 |
import gradio as gr
from bs4 import BeautifulSoup
import requests
from acogsphere import acf
from bcogsphere import bcf
import math
import sqlite3
import huggingface_hub
import pandas as pd
import shutil
import os
import datetime
from apscheduler.schedulers.background import BackgroundScheduler
import random
import time
import requests
from huggingface_hub import hf_hub_download
#hf_hub_download(repo_id="CogSphere/aCogSphere", filename="./reviews.csv")
from huggingface_hub import login
from datasets import load_dataset
#dataset = load_dataset("csv", data_files="./data.csv")
DB_FILE = "./reviews.db"
TOKEN = os.environ.get('HF_KEY')
repo = huggingface_hub.Repository(
local_dir="data",
repo_type="dataset",
clone_from="CognitiveScience/csdhdata",
use_auth_token=TOKEN
)
repo.git_pull()
#TOKEN2 = HF_TOKEN
#login(token=TOKEN2)
# Set db to latest
#shutil.copyfile("./data/reviews01.db", DB_FILE)
# Create table if it doesn't already exist
db = sqlite3.connect(DB_FILE)
try:
db.execute("SELECT * FROM reviews").fetchall()
db.close()
except sqlite3.OperationalError:
db.execute(
'''
CREATE TABLE reviews (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
name TEXT, rate INTEGER, celsci TEXT)
''')
db.commit()
db.close()
def get_latest_reviews(db: sqlite3.Connection):
reviews = db.execute("SELECT * FROM reviews ORDER BY id DESC limit 100").fetchall()
total_reviews = db.execute("Select COUNT(id) from reviews").fetchone()[0]
reviews = pd.DataFrame(reviews, columns=["id", "date_created", "name", "rate", "celsci"])
return reviews, total_reviews
def ccogsphere(name: str, rate: int, celsci: str):
db = sqlite3.connect(DB_FILE)
cursor = db.cursor()
cursor.execute("INSERT INTO reviews(name, rate, celsci) VALUES(?,?,?)", [name, rate, celsci])
db.commit()
reviews, total_reviews = get_latest_reviews(db)
db.close()
r = requests.post(url='https://ccml-persistent-data2.hf.space/api/predict/', json={"data": [name,celsci]})
#demo.load()
return reviews, total_reviews
def run_actr():
from python_actr import log_everything
#code1="tim = MyAgent()"
#code2="subway=MyEnv()"
#code3="subway.agent=tim"
#code4="log_everything(subway)"]
from dcogsphere import RockPaperScissors
from dcogsphere import ProceduralPlayer
#from dcogsphere import logy
env=RockPaperScissors()
env.model1=ProceduralPlayer()
env.model1.choice=env.choice1
env.model2=ProceduralPlayer()
env.model2.choice=env.choice2
env.run()
def load_data():
db = sqlite3.connect(DB_FILE)
reviews, total_reviews = get_latest_reviews(db)
db.close()
return reviews, total_reviews
css="footer {visibility: hidden}"
# Applying style to highlight the maximum value in each row
#styler = df.style.highlight_max(color = 'lightgreen', axis = 0)
with gr.Blocks(css=css) as demo:
with gr.Row():
with gr.Column():
data = gr.Dataframe() #styler)
count = gr.Number(label="Rates!")
with gr.Row():
with gr.Column():
name = gr.Textbox(label="a") #, placeholder="What is your name?")
rate = gr.Textbox(label="b") #, placeholder="What is your name?") #gr.Radio(label="How satisfied are you with using gradio?", choices=[1, 2, 3, 4, 5])
celsci = gr.Textbox(label="c") #, lines=10, placeholder="Do you have any feedback on gradio?")
#run_actr()
submit = gr.Button(value=".")
submit.click(ccogsphere, [name, rate, celsci], [data, count])
demo.load(load_data, None, [data, count])
@name.change(inputs=name, outputs=celsci,_js="window.location.reload()")
@rate.change(inputs=rate, outputs=name,_js="window.location.reload()")
@celsci.change(inputs=celsci, outputs=rate,_js="window.location.reload()")
def secwork(name):
#if name=="abc":
#run_code()
load_data()
#return "Hello " + name + "!"
def backup_db():
shutil.copyfile(DB_FILE, "./reviews1.db")
db = sqlite3.connect(DB_FILE)
reviews = db.execute("SELECT * FROM reviews").fetchall()
pd.DataFrame(reviews).to_csv("./reviews.csv", index=False)
print("updating db")
repo.push_to_hub(blocking=False, commit_message=f"Updating data at {datetime.datetime.now()}")
def backup_db_csv():
shutil.copyfile(DB_FILE, "./reviews2.db")
db = sqlite3.connect(DB_FILE)
reviews = db.execute("SELECT * FROM reviews").fetchall()
pd.DataFrame(reviews).to_csv("./reviews2.csv", index=False)
print("updating db csv")
dataset = load_dataset("csv", data_files="./reviews2.csv")
repo.push_to_hub("CognitiveScience/csdhdata", blocking=False) #, commit_message=f"Updating data-csv at {datetime.datetime.now()}")
#path1=hf_hub_url()
#print (path1)
#hf_hub_download(repo_id="CogSphere/aCogSphere", filename="./*.csv")
#hf_hub_download(repo_id="CognitiveScience/csdhdata", filename="./*.db")
#hf_hub_download(repo_id="CogSphere/aCogSphere", filename="./*.md")
#hf_hub_download(repo_id="CognitiveScience/csdhdata", filename="./*.md")
#def load_data2():
# db = sqlite3.connect(DB_FILE)
# reviews, total_reviews = get_latest_reviews(db)
# #db.close()
# demo.load(load_data,None, [reviews, total_reviews])
# #return reviews, total_reviews
scheduler2 = BackgroundScheduler()
scheduler2.add_job(func=run_actr, trigger="interval", seconds=3600)
scheduler2.start()
scheduler2 = BackgroundScheduler()
scheduler2.add_job(func=backup_db, trigger="interval", seconds=3633000)
scheduler2.start()
scheduler3 = BackgroundScheduler()
scheduler3.add_job(func=backup_db_csv, trigger="interval", seconds=3666000)
scheduler3.start()
demo.launch() |