File size: 1,990 Bytes
8757927 9629829 6194297 1cc2222 6194297 e42e645 6194297 e42e645 6194297 e42e645 6194297 e42e645 6194297 e42e645 6194297 e42e645 6194297 e42e645 6194297 e42e645 6194297 e42e645 6194297 e42e645 6194297 59c7846 6194297 0e6eb19 8757927 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 |
---
dataset_info:
features:
- name: dates
dtype: string
- name: type
struct:
- name: authorAssociation
dtype: string
- name: comment
dtype: bool
- name: issue
dtype: bool
splits:
- name: peft
num_bytes: 132311
num_examples: 3776
- name: hub_docs
num_bytes: 113192
num_examples: 3122
- name: evaluate
num_bytes: 58174
num_examples: 1674
- name: huggingface_hub
num_bytes: 235148
num_examples: 6476
- name: accelerate
num_bytes: 285562
num_examples: 7866
- name: datasets
num_bytes: 761951
num_examples: 21461
- name: optimum
num_bytes: 171865
num_examples: 4687
- name: pytorch_image_models
num_bytes: 140079
num_examples: 3821
- name: gradio
num_bytes: 780098
num_examples: 21500
- name: tokenizers
num_bytes: 179223
num_examples: 5026
- name: diffusers
num_bytes: 953692
num_examples: 27245
- name: transformers
num_bytes: 4185909
num_examples: 116880
- name: safetensors
num_bytes: 42048
num_examples: 1120
download_size: 2466949
dataset_size: 8039252
configs:
- config_name: default
data_files:
- split: peft
path: data/peft-*
- split: hub_docs
path: data/hub_docs-*
- split: evaluate
path: data/evaluate-*
- split: huggingface_hub
path: data/huggingface_hub-*
- split: accelerate
path: data/accelerate-*
- split: datasets
path: data/datasets-*
- split: optimum
path: data/optimum-*
- split: pytorch_image_models
path: data/pytorch_image_models-*
- split: gradio
path: data/gradio-*
- split: tokenizers
path: data/tokenizers-*
- split: diffusers
path: data/diffusers-*
- split: transformers
path: data/transformers-*
- split: safetensors
path: data/safetensors-*
---
# Dataset Card for "issues"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |