serpxe commited on
Commit
0aa536c
1 Parent(s): b1f0bc7

initial commit

Browse files
Files changed (2) hide show
  1. README.md +84 -3
  2. bsky_data.py +160 -0
README.md CHANGED
@@ -1,3 +1,84 @@
1
- ---
2
- license: apache-2.0
3
- ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ pretty_name: ∞ blue skies
3
+ language:
4
+ - en
5
+ - multilingual
6
+ license:
7
+ - mit
8
+ task_categories:
9
+ - text-generation
10
+ - text-classification
11
+ size_categories:
12
+ - n>1T
13
+ ---
14
+
15
+ # Infinite Blue Skies
16
+
17
+ A streaming dataset providing real-time access to public posts from the Bluesky social network via the AtProto API.
18
+
19
+ ## Dataset Summary
20
+
21
+ The Bluesky Posts dataset provides streaming access to public posts from the Bluesky social network through the AtProto API. This dataset is particularly useful for researchers and developers interested in social media analysis, content moderation, language modeling, and trend detection.
22
+
23
+ ## Supported Tasks and Leaderboards
24
+
25
+ The dataset can be used for various tasks including:
26
+
27
+ - **Text Generation**: Training language models on social media content
28
+ - **Text Classification**: Content moderation, topic classification, sentiment analysis
29
+ - **Social Media Analysis**: Trend detection, user behavior analysis
30
+ - **Content Analysis**: Hashtag analysis, URL pattern analysis
31
+
32
+ ## Dataset Structure
33
+
34
+ ### Data Instances
35
+
36
+ Each instance in the dataset represents a Bluesky post with the following fields:
37
+ ```json
38
+ {
39
+ 'uri': 'at://did:plc:..../app.bsky.feed.post/...',
40
+ 'cid': 'baf...',
41
+ 'text': 'The content of the post...',
42
+ 'created_at': '2024-03-21T12:34:56.789Z',
43
+ 'author_did': 'did:plc:...',
44
+ }
45
+ ```
46
+
47
+ ### Data Fields
48
+
49
+ - `uri`: Unique identifier for the post
50
+ - `cid`: Content identifier
51
+ - `text`: Content of the post
52
+ - `created_at`: ISO timestamp of when the post was created
53
+ - `author_did`: Decentralized identifier of the author
54
+
55
+ ### Data Splits
56
+
57
+ This is a streaming dataset and does not have traditional splits. Data is accessed in real-time through an iterator.
58
+
59
+ ## How to Use
60
+
61
+ This dataset is designed to be used with the Hugging Face Datasets library. Here's how to get started:
62
+
63
+ ```python
64
+ from datasets import load_dataset
65
+
66
+ dataset = load_dataset(
67
+ "serpxe/infinite_blue_skies",
68
+ streaming=True,
69
+ trust_remote_code=True,
70
+ split="train",
71
+ batch_size=5,
72
+ )
73
+
74
+ # Iterate one-by-one
75
+ for i in range(10):
76
+ print(next(iter(dataset)))
77
+ # Returns 10 posts
78
+
79
+ # Batched iteration
80
+ iterable_dataset = iter(dataset)
81
+ for i in range(10):
82
+ print(next(iterable_dataset))
83
+ # Returns 10 posts, but in batches of 5
84
+ ```
bsky_data.py ADDED
@@ -0,0 +1,160 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import atproto
2
+ from atproto import FirehoseSubscribeReposClient, parse_subscribe_repos_message, CAR, models
3
+ from datasets import GeneratorBasedBuilder, SplitGenerator, DatasetInfo, Features, Value, BuilderConfig
4
+ from typing import Any, Iterator, Dict, Optional, List, Callable
5
+ from dataclasses import dataclass
6
+ from queue import Queue
7
+ from threading import Thread, Event
8
+ import time
9
+
10
+ @dataclass
11
+ class BskyConfig(BuilderConfig):
12
+ """BuilderConfig for Bluesky Posts."""
13
+ batch_size: int = 1 # How many posts to return per iteration
14
+ max_buffer_size: int = 10 # Maximum size of the buffer
15
+
16
+ class BskyPostsDataset(GeneratorBasedBuilder):
17
+ """Dataset for streaming Bluesky posts using AtProto Firehose."""
18
+
19
+ VERSION = "1.0.0"
20
+ BUILDER_CONFIG_CLASS = BskyConfig
21
+ BUILDER_CONFIGS = [
22
+ BskyConfig(name="default", description="Default configuration for Bluesky Posts dataset")
23
+ ]
24
+
25
+ def __init__(self, *args, **kwargs):
26
+ # Initialize thread-related attributes first
27
+ self._stop_event = Event()
28
+ self._firehose_thread = None
29
+ self._current_batch = []
30
+ self._post_buffer = None
31
+ self._client = None
32
+ # Then call super().__init__
33
+ super().__init__(*args, **kwargs)
34
+ # Finally initialize the queue with the config
35
+ self._post_buffer = Queue(maxsize=self.config.max_buffer_size)
36
+
37
+ def _info(self) -> DatasetInfo:
38
+ return DatasetInfo(
39
+ description="A dataset of Bluesky posts fetched in real-time using the AtProto Firehose API.",
40
+ features=Features({
41
+ "uri": Value("string"),
42
+ "cid": Value("string"),
43
+ "text": Value("string"),
44
+ "created_at": Value("string"),
45
+ "author_did": Value("string"),
46
+ }),
47
+ supervised_keys=None,
48
+ homepage="https://bsky.app",
49
+ citation="",
50
+ )
51
+
52
+ def _split_generators(self, dl_manager: Any) -> List[SplitGenerator]:
53
+ return [
54
+ SplitGenerator(
55
+ name="train",
56
+ gen_kwargs={"split": "train"},
57
+ ),
58
+ ]
59
+
60
+ def _handle_message(self, message) -> None:
61
+ """Process each message from the firehose."""
62
+ try:
63
+ commit = parse_subscribe_repos_message(message)
64
+
65
+ if not isinstance(commit, models.ComAtprotoSyncSubscribeRepos.Commit):
66
+ return
67
+
68
+ car = CAR.from_bytes(commit.blocks)
69
+
70
+ for op in commit.ops:
71
+ if (op.action == 'create' and
72
+ op.path.startswith('app.bsky.feed.post')):
73
+ try:
74
+ record = car.blocks.get(op.cid)
75
+ if not record or not record.get('text'):
76
+ continue
77
+
78
+ post_data = {
79
+ "uri": f"at://{commit.repo}/{op.path}",
80
+ "cid": str(op.cid),
81
+ "text": record.get('text', ''),
82
+ "created_at": record.get('createdAt', ''),
83
+ "author_did": commit.repo,
84
+ }
85
+
86
+ if self._post_buffer is not None and not self._stop_event.is_set():
87
+ try:
88
+ self._post_buffer.put(post_data, timeout=1)
89
+ if self._post_buffer.qsize() >= self.config.batch_size:
90
+ self._stop_event.set()
91
+ return False
92
+ except:
93
+ pass
94
+
95
+ except Exception:
96
+ pass
97
+
98
+ except Exception:
99
+ pass
100
+ return True
101
+
102
+ def _cleanup_firehose(self):
103
+ """Clean up firehose resources."""
104
+ self._stop_event.set()
105
+ if self._firehose_thread and self._firehose_thread.is_alive():
106
+ self._firehose_thread.join(timeout=1)
107
+ self._firehose_thread = None
108
+
109
+ def _start_firehose(self) -> None:
110
+ """Start the firehose in a separate thread."""
111
+ self._stop_event.clear()
112
+ self._client = FirehoseSubscribeReposClient()
113
+
114
+ def on_message_handler(message):
115
+ if self._stop_event.is_set():
116
+ return False
117
+ return self._handle_message(message)
118
+
119
+ self._firehose_thread = Thread(target=self._client.start, args=(on_message_handler,))
120
+ self._firehose_thread.daemon = True
121
+ self._firehose_thread.start()
122
+
123
+ def _generate_examples(self, split: str) -> Iterator[Dict]:
124
+ """Generate examples from the firehose buffer."""
125
+ try:
126
+ while True:
127
+ self._current_batch = []
128
+
129
+ if not self._firehose_thread or not self._firehose_thread.is_alive():
130
+ self._start_firehose()
131
+ time.sleep(0.1)
132
+
133
+ try:
134
+ while len(self._current_batch) < self.config.batch_size:
135
+ try:
136
+ post_data = self._post_buffer.get(timeout=5)
137
+ self._current_batch.append(post_data)
138
+ except:
139
+ if not self._current_batch:
140
+ continue
141
+ break
142
+
143
+ for post in self._current_batch:
144
+ yield post["uri"], post
145
+
146
+ self._current_batch = []
147
+
148
+ except Exception:
149
+ if self._current_batch:
150
+ for post in self._current_batch:
151
+ yield post["uri"], post
152
+ finally:
153
+ self._cleanup_firehose()
154
+
155
+ except GeneratorExit:
156
+ self._cleanup_firehose()
157
+
158
+ def __del__(self):
159
+ """Cleanup when the dataset is deleted."""
160
+ self._cleanup_firehose()