File size: 1,990 Bytes
8757927 9629829 4806916 1cc2222 4806916 e42e645 4806916 e42e645 4806916 e42e645 4806916 e42e645 4806916 e42e645 4806916 e42e645 4806916 e42e645 4806916 e42e645 4806916 e42e645 4806916 e42e645 4806916 59c7846 4806916 0e6eb19 8757927 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 |
---
dataset_info:
features:
- name: dates
dtype: string
- name: type
struct:
- name: authorAssociation
dtype: string
- name: comment
dtype: bool
- name: issue
dtype: bool
splits:
- name: peft
num_bytes: 136951
num_examples: 4018
- name: hub_docs
num_bytes: 112385
num_examples: 3190
- name: evaluate
num_bytes: 59167
num_examples: 1689
- name: huggingface_hub
num_bytes: 233351
num_examples: 6701
- name: accelerate
num_bytes: 286487
num_examples: 8152
- name: datasets
num_bytes: 759386
num_examples: 21614
- name: optimum
num_bytes: 167553
num_examples: 4824
- name: pytorch_image_models
num_bytes: 132620
num_examples: 3848
- name: gradio
num_bytes: 812110
num_examples: 22390
- name: tokenizers
num_bytes: 174301
num_examples: 5073
- name: diffusers
num_bytes: 988818
num_examples: 28327
- name: transformers
num_bytes: 4189676
num_examples: 118517
- name: safetensors
num_bytes: 40320
num_examples: 1162
download_size: 2525218
dataset_size: 8093125
configs:
- config_name: default
data_files:
- split: peft
path: data/peft-*
- split: hub_docs
path: data/hub_docs-*
- split: evaluate
path: data/evaluate-*
- split: huggingface_hub
path: data/huggingface_hub-*
- split: accelerate
path: data/accelerate-*
- split: datasets
path: data/datasets-*
- split: optimum
path: data/optimum-*
- split: pytorch_image_models
path: data/pytorch_image_models-*
- split: gradio
path: data/gradio-*
- split: tokenizers
path: data/tokenizers-*
- split: diffusers
path: data/diffusers-*
- split: transformers
path: data/transformers-*
- split: safetensors
path: data/safetensors-*
---
# Dataset Card for "issues"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |