amaye15
commited on
Commit
•
10c2fec
1
Parent(s):
c30b770
webhook test
Browse files
app.py
CHANGED
@@ -1,14 +1,16 @@
|
|
1 |
import os
|
2 |
import shutil
|
3 |
import logging
|
4 |
-
import pretty_errors
|
5 |
-
from datasets import Dataset, load_dataset
|
6 |
from huggingface_hub import WebhooksServer, WebhookPayload, webhook_endpoint
|
|
|
|
|
|
|
7 |
|
8 |
# Set up the logger
|
9 |
logger = logging.getLogger("basic_logger")
|
10 |
logger.setLevel(logging.INFO)
|
11 |
|
|
|
12 |
console_handler = logging.StreamHandler()
|
13 |
console_handler.setLevel(logging.INFO)
|
14 |
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
|
@@ -17,6 +19,7 @@ logger.addHandler(console_handler)
|
|
17 |
|
18 |
DS_NAME = "amaye15/object-segmentation"
|
19 |
DATA_DIR = "data"
|
|
|
20 |
|
21 |
|
22 |
def get_data():
|
@@ -35,7 +38,7 @@ def get_data():
|
|
35 |
|
36 |
def process_and_push_data():
|
37 |
"""
|
38 |
-
Function to process and push new data.
|
39 |
"""
|
40 |
p = os.path.join(os.getcwd(), DATA_DIR)
|
41 |
|
@@ -45,7 +48,8 @@ def process_and_push_data():
|
|
45 |
os.mkdir(p)
|
46 |
|
47 |
ds_processed = Dataset.from_generator(get_data)
|
48 |
-
ds_processed.push_to_hub(
|
|
|
49 |
|
50 |
|
51 |
# Initialize the WebhooksServer
|
@@ -55,7 +59,7 @@ app = WebhooksServer(webhook_secret="my_secret_key")
|
|
55 |
@webhook_endpoint
|
56 |
async def trigger_processing(payload: WebhookPayload):
|
57 |
"""
|
58 |
-
Webhook endpoint that triggers data processing when
|
59 |
"""
|
60 |
if payload.repo.type == "dataset" and payload.event.action == "update":
|
61 |
logger.info(f"Dataset {payload.repo.name} updated. Triggering processing.")
|
@@ -66,5 +70,5 @@ async def trigger_processing(payload: WebhookPayload):
|
|
66 |
return {"message": "Event ignored."}
|
67 |
|
68 |
|
69 |
-
# Start the server
|
70 |
app.launch()
|
|
|
1 |
import os
|
2 |
import shutil
|
3 |
import logging
|
|
|
|
|
4 |
from huggingface_hub import WebhooksServer, WebhookPayload, webhook_endpoint
|
5 |
+
from datasets import Dataset, load_dataset, disable_caching
|
6 |
+
|
7 |
+
disable_caching()
|
8 |
|
9 |
# Set up the logger
|
10 |
logger = logging.getLogger("basic_logger")
|
11 |
logger.setLevel(logging.INFO)
|
12 |
|
13 |
+
# Set up the console handler with a simple format
|
14 |
console_handler = logging.StreamHandler()
|
15 |
console_handler.setLevel(logging.INFO)
|
16 |
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
|
|
|
19 |
|
20 |
DS_NAME = "amaye15/object-segmentation"
|
21 |
DATA_DIR = "data"
|
22 |
+
TARGET_REPO = "amaye15/tmp"
|
23 |
|
24 |
|
25 |
def get_data():
|
|
|
38 |
|
39 |
def process_and_push_data():
|
40 |
"""
|
41 |
+
Function to process and push new data to the target repository.
|
42 |
"""
|
43 |
p = os.path.join(os.getcwd(), DATA_DIR)
|
44 |
|
|
|
48 |
os.mkdir(p)
|
49 |
|
50 |
ds_processed = Dataset.from_generator(get_data)
|
51 |
+
ds_processed.push_to_hub(TARGET_REPO)
|
52 |
+
logger.info("Data processed and pushed to the hub.")
|
53 |
|
54 |
|
55 |
# Initialize the WebhooksServer
|
|
|
59 |
@webhook_endpoint
|
60 |
async def trigger_processing(payload: WebhookPayload):
|
61 |
"""
|
62 |
+
Webhook endpoint that triggers data processing when the dataset is updated.
|
63 |
"""
|
64 |
if payload.repo.type == "dataset" and payload.event.action == "update":
|
65 |
logger.info(f"Dataset {payload.repo.name} updated. Triggering processing.")
|
|
|
70 |
return {"message": "Event ignored."}
|
71 |
|
72 |
|
73 |
+
# Start the webhook server
|
74 |
app.launch()
|