infinite_blue_skies / bsky_data.py
serpxe's picture
initial commit
0aa536c verified
raw
history blame
6.1 kB
import atproto
from atproto import FirehoseSubscribeReposClient, parse_subscribe_repos_message, CAR, models
from datasets import GeneratorBasedBuilder, SplitGenerator, DatasetInfo, Features, Value, BuilderConfig
from typing import Any, Iterator, Dict, Optional, List, Callable
from dataclasses import dataclass
from queue import Queue
from threading import Thread, Event
import time
@dataclass
class BskyConfig(BuilderConfig):
"""BuilderConfig for Bluesky Posts."""
batch_size: int = 1 # How many posts to return per iteration
max_buffer_size: int = 10 # Maximum size of the buffer
class BskyPostsDataset(GeneratorBasedBuilder):
"""Dataset for streaming Bluesky posts using AtProto Firehose."""
VERSION = "1.0.0"
BUILDER_CONFIG_CLASS = BskyConfig
BUILDER_CONFIGS = [
BskyConfig(name="default", description="Default configuration for Bluesky Posts dataset")
]
def __init__(self, *args, **kwargs):
# Initialize thread-related attributes first
self._stop_event = Event()
self._firehose_thread = None
self._current_batch = []
self._post_buffer = None
self._client = None
# Then call super().__init__
super().__init__(*args, **kwargs)
# Finally initialize the queue with the config
self._post_buffer = Queue(maxsize=self.config.max_buffer_size)
def _info(self) -> DatasetInfo:
return DatasetInfo(
description="A dataset of Bluesky posts fetched in real-time using the AtProto Firehose API.",
features=Features({
"uri": Value("string"),
"cid": Value("string"),
"text": Value("string"),
"created_at": Value("string"),
"author_did": Value("string"),
}),
supervised_keys=None,
homepage="https://bsky.app",
citation="",
)
def _split_generators(self, dl_manager: Any) -> List[SplitGenerator]:
return [
SplitGenerator(
name="train",
gen_kwargs={"split": "train"},
),
]
def _handle_message(self, message) -> None:
"""Process each message from the firehose."""
try:
commit = parse_subscribe_repos_message(message)
if not isinstance(commit, models.ComAtprotoSyncSubscribeRepos.Commit):
return
car = CAR.from_bytes(commit.blocks)
for op in commit.ops:
if (op.action == 'create' and
op.path.startswith('app.bsky.feed.post')):
try:
record = car.blocks.get(op.cid)
if not record or not record.get('text'):
continue
post_data = {
"uri": f"at://{commit.repo}/{op.path}",
"cid": str(op.cid),
"text": record.get('text', ''),
"created_at": record.get('createdAt', ''),
"author_did": commit.repo,
}
if self._post_buffer is not None and not self._stop_event.is_set():
try:
self._post_buffer.put(post_data, timeout=1)
if self._post_buffer.qsize() >= self.config.batch_size:
self._stop_event.set()
return False
except:
pass
except Exception:
pass
except Exception:
pass
return True
def _cleanup_firehose(self):
"""Clean up firehose resources."""
self._stop_event.set()
if self._firehose_thread and self._firehose_thread.is_alive():
self._firehose_thread.join(timeout=1)
self._firehose_thread = None
def _start_firehose(self) -> None:
"""Start the firehose in a separate thread."""
self._stop_event.clear()
self._client = FirehoseSubscribeReposClient()
def on_message_handler(message):
if self._stop_event.is_set():
return False
return self._handle_message(message)
self._firehose_thread = Thread(target=self._client.start, args=(on_message_handler,))
self._firehose_thread.daemon = True
self._firehose_thread.start()
def _generate_examples(self, split: str) -> Iterator[Dict]:
"""Generate examples from the firehose buffer."""
try:
while True:
self._current_batch = []
if not self._firehose_thread or not self._firehose_thread.is_alive():
self._start_firehose()
time.sleep(0.1)
try:
while len(self._current_batch) < self.config.batch_size:
try:
post_data = self._post_buffer.get(timeout=5)
self._current_batch.append(post_data)
except:
if not self._current_batch:
continue
break
for post in self._current_batch:
yield post["uri"], post
self._current_batch = []
except Exception:
if self._current_batch:
for post in self._current_batch:
yield post["uri"], post
finally:
self._cleanup_firehose()
except GeneratorExit:
self._cleanup_firehose()
def __del__(self):
"""Cleanup when the dataset is deleted."""
self._cleanup_firehose()