File size: 6,101 Bytes
0aa536c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 |
import atproto
from atproto import FirehoseSubscribeReposClient, parse_subscribe_repos_message, CAR, models
from datasets import GeneratorBasedBuilder, SplitGenerator, DatasetInfo, Features, Value, BuilderConfig
from typing import Any, Iterator, Dict, Optional, List, Callable
from dataclasses import dataclass
from queue import Queue
from threading import Thread, Event
import time
@dataclass
class BskyConfig(BuilderConfig):
"""BuilderConfig for Bluesky Posts."""
batch_size: int = 1 # How many posts to return per iteration
max_buffer_size: int = 10 # Maximum size of the buffer
class BskyPostsDataset(GeneratorBasedBuilder):
"""Dataset for streaming Bluesky posts using AtProto Firehose."""
VERSION = "1.0.0"
BUILDER_CONFIG_CLASS = BskyConfig
BUILDER_CONFIGS = [
BskyConfig(name="default", description="Default configuration for Bluesky Posts dataset")
]
def __init__(self, *args, **kwargs):
# Initialize thread-related attributes first
self._stop_event = Event()
self._firehose_thread = None
self._current_batch = []
self._post_buffer = None
self._client = None
# Then call super().__init__
super().__init__(*args, **kwargs)
# Finally initialize the queue with the config
self._post_buffer = Queue(maxsize=self.config.max_buffer_size)
def _info(self) -> DatasetInfo:
return DatasetInfo(
description="A dataset of Bluesky posts fetched in real-time using the AtProto Firehose API.",
features=Features({
"uri": Value("string"),
"cid": Value("string"),
"text": Value("string"),
"created_at": Value("string"),
"author_did": Value("string"),
}),
supervised_keys=None,
homepage="https://bsky.app",
citation="",
)
def _split_generators(self, dl_manager: Any) -> List[SplitGenerator]:
return [
SplitGenerator(
name="train",
gen_kwargs={"split": "train"},
),
]
def _handle_message(self, message) -> None:
"""Process each message from the firehose."""
try:
commit = parse_subscribe_repos_message(message)
if not isinstance(commit, models.ComAtprotoSyncSubscribeRepos.Commit):
return
car = CAR.from_bytes(commit.blocks)
for op in commit.ops:
if (op.action == 'create' and
op.path.startswith('app.bsky.feed.post')):
try:
record = car.blocks.get(op.cid)
if not record or not record.get('text'):
continue
post_data = {
"uri": f"at://{commit.repo}/{op.path}",
"cid": str(op.cid),
"text": record.get('text', ''),
"created_at": record.get('createdAt', ''),
"author_did": commit.repo,
}
if self._post_buffer is not None and not self._stop_event.is_set():
try:
self._post_buffer.put(post_data, timeout=1)
if self._post_buffer.qsize() >= self.config.batch_size:
self._stop_event.set()
return False
except:
pass
except Exception:
pass
except Exception:
pass
return True
def _cleanup_firehose(self):
"""Clean up firehose resources."""
self._stop_event.set()
if self._firehose_thread and self._firehose_thread.is_alive():
self._firehose_thread.join(timeout=1)
self._firehose_thread = None
def _start_firehose(self) -> None:
"""Start the firehose in a separate thread."""
self._stop_event.clear()
self._client = FirehoseSubscribeReposClient()
def on_message_handler(message):
if self._stop_event.is_set():
return False
return self._handle_message(message)
self._firehose_thread = Thread(target=self._client.start, args=(on_message_handler,))
self._firehose_thread.daemon = True
self._firehose_thread.start()
def _generate_examples(self, split: str) -> Iterator[Dict]:
"""Generate examples from the firehose buffer."""
try:
while True:
self._current_batch = []
if not self._firehose_thread or not self._firehose_thread.is_alive():
self._start_firehose()
time.sleep(0.1)
try:
while len(self._current_batch) < self.config.batch_size:
try:
post_data = self._post_buffer.get(timeout=5)
self._current_batch.append(post_data)
except:
if not self._current_batch:
continue
break
for post in self._current_batch:
yield post["uri"], post
self._current_batch = []
except Exception:
if self._current_batch:
for post in self._current_batch:
yield post["uri"], post
finally:
self._cleanup_firehose()
except GeneratorExit:
self._cleanup_firehose()
def __del__(self):
"""Cleanup when the dataset is deleted."""
self._cleanup_firehose()
|