Spaces:
Runtime error
Runtime error
File size: 7,731 Bytes
4fe8a03 7cffecf 4fe8a03 7cffecf f3f2130 4fe8a03 7cffecf 4fe8a03 7cffecf 78ed805 da68b17 e9a8ede fb6ebd2 e9a8ede 4fe8a03 dd7f2cf 4fe8a03 808babf ab427b2 4fe8a03 6fd306e 58d82ec c652a61 e9a8ede 4fe8a03 dd7f2cf 4fe8a03 7cffecf 4fe8a03 b72bc42 4fe8a03 7cffecf d648e2f 4fe8a03 03e689c 4fe8a03 b934676 4fe8a03 7cffecf b72bc42 4fe8a03 7cffecf ad8598d 4fe8a03 77001b5 54725b9 4fe8a03 07170ba 039819f 7cffecf 07170ba 4fe8a03 7cffecf d2281a3 e74f020 ea94c7e c1b0716 c854dc6 4fe8a03 a297e9a c1b0716 b7956c7 344e7b1 b7956c7 8cafaa1 6b5276f 89f00b4 7cffecf 774449d 13c256e fdebc5f 7ef424f 4fe8a03 d648e2f 4fe8a03 8a910cd 4fe8a03 e9a8ede 6800b50 e9a8ede 6800b50 e9a8ede 6800b50 0759b36 e5a7845 0136bd4 8d3471b 2e45868 770c900 954e78c 039819f 022a19a 5b4b923 4eba62e 4024d85 c854dc6 4024d85 e9a8ede c854dc6 e9a8ede 0d6dac3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 |
import gradio as gr
from bs4 import BeautifulSoup
import requests
from acogsphere import acf
from bcogsphere import bcf
from ecogsphere import ecf
import pandas as pd
import math
import json
import sqlite3
import huggingface_hub
#import pandas as pd
import shutil
import os
import datetime
from apscheduler.schedulers.background import BackgroundScheduler
import random
import time
#import requests
from huggingface_hub import hf_hub_download
#hf_hub_download(repo_id="CogSphere/aCogSphere", filename="./reviews.csv")
from huggingface_hub import login
from datasets import load_dataset
#dataset = load_dataset("csv", data_files="./data.csv")
DB_FILE = "./reviews.db"
TOKEN = os.environ.get('HF_KEY')
repo = huggingface_hub.Repository(
local_dir="data",
repo_type="dataset",
clone_from="CognitiveScience/csdhdata",
use_auth_token=TOKEN
)
repo.git_pull()
#TOKEN2 = HF_TOKEN
#login(token=TOKEN2)
# Set db to latest
#shutil.copyfile("./reviews.db", DB_FILE)
# Create table if it doesn't already exist
db = sqlite3.connect(DB_FILE)
try:
db.execute("SELECT * FROM reviews").fetchall()
#db.execute("SELECT * FROM reviews2").fetchall()
db.close()
except sqlite3.OperationalError:
db.execute(
'''
CREATE TABLE reviews (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
name TEXT, rate INTEGER, celsci TEXT)
''')
db.commit()
db.close()
db = sqlite3.connect(DB_FILE)
def get_latest_reviews(db: sqlite3.Connection):
reviews = db.execute("SELECT * FROM reviews ORDER BY id DESC limit 100").fetchall()
total_reviews = db.execute("Select COUNT(id) from reviews").fetchone()[0]
reviews = pd.DataFrame(reviews, columns=["id", "date_created", "name", "rate", "celsci"])
return reviews, total_reviews
def get_latest_reviews2(db: sqlite3.Connection):
reviews2 = db.execute("SELECT * FROM reviews2 ORDER BY id DESC limit 100").fetchall()
total_reviews2 = db.execute("Select COUNT(id) from reviews2").fetchone()[0]
reviews2 = pd.DataFrame(reviews2, columns=["id","title", "link","channel", "description", "views", "uploaded", "duration", "durationString"])
return reviews2, total_reviews2
def ccogsphere(name: str, rate: int, celsci: str):
db = sqlite3.connect(DB_FILE)
cursor = db.cursor()
try:
celsci2=celsci.split()
print("split",celsci2,celsci)
celsci2=celsci2[0] + "+" + celsci2[1]
celsci2=ecf(celsci2)
celsci2=json.dumps(celsci2["videos"])
except:
celsci2=" No Info Found"
celsci=celsci+celsci2
cursor.execute("INSERT INTO reviews(name, rate, celsci) VALUES(?,?,?)", [name, rate, celsci])
db.commit()
reviews, total_reviews = get_latest_reviews(db)
db.close()
r = requests.post(url='https://ccml-persistent-data2.hf.space/api/predict/', json={"data": [name,celsci]})
#demo.load()
return reviews, total_reviews
def run_actr():
from python_actr import log_everything
#code1="tim = MyAgent()"
#code2="subway=MyEnv()"
#code3="subway.agent=tim"
#code4="log_everything(subway)"]
from dcogsphere import RockPaperScissors
from dcogsphere import ProceduralPlayer
#from dcogsphere import logy
env=RockPaperScissors()
env.model1=ProceduralPlayer()
env.model1.choice=env.choice1
env.model2=ProceduralPlayer()
env.model2.choice=env.choice2
env.run()
def run_ecs(inp):
try:
result=ecf(inp)
df=pd.DataFrame.from_dict(result["videos"])
except sqlite3.OperationalError:
print ("db error")
df=df.drop(df.columns[4], axis=1)
db = sqlite3.connect(DB_FILE)
#cursor = db.cursor()
#cursor.execute("INSERT INTO reviews2(title, link, thumbnail,channel, description, views, uploaded, duration, durationString) VALUES(?,?,?,?,?,?,?,?,?)", [title, link, thumbnail,channel, description, views, uploaded, duration, durationString])
df.to_sql('reviews2', db, if_exists='replace', index=False)
#db.commit()
reviews2, total_reviews2 = get_latest_reviews(db)
db.close()
#print ("print000", total_reviews2,reviews2)
return reviews2, total_reviews2
def load_data():
db = sqlite3.connect(DB_FILE)
reviews, total_reviews = get_latest_reviews(db)
db.close()
return reviews, total_reviews
def load_data2():
db = sqlite3.connect(DB_FILE)
reviews2, total_reviews2 = get_latest_reviews2(db)
db.close()
return reviews2, total_reviews2
css="footer {visibility: hidden}"
# Applying style to highlight the maximum value in each row
#styler = df.style.highlight_max(color = 'lightgreen', axis = 0)
with gr.Blocks(css=css) as demo:
with gr.Row():
with gr.Column():
data = gr.Dataframe() #styler)
count = gr.Number(label="Rates!")
with gr.Row():
with gr.Column():
name = gr.Textbox(label="a") #, placeholder="What is your name?")
rate = gr.Textbox(label="b") #, placeholder="What is your name?") #gr.Radio(label="How satisfied are you with using gradio?", choices=[1, 2, 3, 4, 5])
celsci = gr.Textbox(label="c") #, lines=10, placeholder="Do you have any feedback on gradio?")
#run_actr()
submit = gr.Button(value=".")
submit.click(ccogsphere, [name, rate, celsci], [data, count])
demo.load(load_data, None, [data, count])
@name.change(inputs=name, outputs=celsci,_js="window.location.reload()")
@rate.change(inputs=rate, outputs=name,_js="window.location.reload()")
@celsci.change(inputs=celsci, outputs=rate,_js="window.location.reload()")
def secwork(name):
#if name=="abc":
#run_code()
load_data()
#return "Hello " + name + "!"
def backup_db():
shutil.copyfile(DB_FILE, "./reviews.db")
db = sqlite3.connect(DB_FILE)
reviews = db.execute("SELECT * FROM reviews").fetchall()
pd.DataFrame(reviews).to_csv("./reviews.csv", index=False)
print("updating db")
repo.push_to_hub(blocking=False, commit_message=f"Updating data at {datetime.datetime.now()}")
def backup_db_csv():
shutil.copyfile(DB_FILE, "./reviews2.db")
db = sqlite3.connect(DB_FILE)
reviews = db.execute("SELECT * FROM reviews").fetchall()
pd.DataFrame(reviews).to_csv("./reviews2.csv", index=False)
print("updating db csv")
dataset = load_dataset("csv", data_files="./reviews2.csv")
repo.push_to_hub("CognitiveScience/csdhdata", blocking=False) #, commit_message=f"Updating data-csv at {datetime.datetime.now()}")
#path1=hf_hub_url()
#print (path1)
#hf_hub_download(repo_id="CogSphere/aCogSphere", filename="./*.csv")
#hf_hub_download(repo_id="CognitiveScience/csdhdata", filename="./*.db")
#hf_hub_download(repo_id="CogSphere/aCogSphere", filename="./*.md")
#hf_hub_download(repo_id="CognitiveScience/csdhdata", filename="./*.md")
#def load_data2():
# db = sqlite3.connect(DB_FILE)
# reviews, total_reviews = get_latest_reviews(db)
# #db.close()
# demo.load(load_data,None, [reviews, total_reviews])
# #return reviews, total_reviews
#scheduler1 = BackgroundScheduler()
#scheduler1.add_job(func=run_actr, trigger="interval", seconds=3600)
#scheduler1.start()
scheduler2 = BackgroundScheduler()
scheduler2.add_job(func=backup_db, trigger="interval", seconds=3633)
scheduler2.start()
scheduler3 = BackgroundScheduler()
scheduler3.add_job(func=backup_db_csv, trigger="interval", seconds=3666)
scheduler3.start()
demo.launch() |