Upload 8 files
Browse files- .gitattributes +5 -10
- .gitignore +2 -0
- README.md +48 -2
- config.json +39 -0
- dls_eng_Batch4.pkl +3 -0
- history_epoch1.csv +4 -0
- pytorch_model.bin +3 -0
- tokenizer.json +0 -0
.gitattributes
CHANGED
@@ -1,34 +1,29 @@
|
|
1 |
*.7z filter=lfs diff=lfs merge=lfs -text
|
2 |
*.arrow filter=lfs diff=lfs merge=lfs -text
|
3 |
*.bin filter=lfs diff=lfs merge=lfs -text
|
|
|
4 |
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
5 |
-
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
6 |
*.ftz filter=lfs diff=lfs merge=lfs -text
|
7 |
*.gz filter=lfs diff=lfs merge=lfs -text
|
8 |
*.h5 filter=lfs diff=lfs merge=lfs -text
|
9 |
*.joblib filter=lfs diff=lfs merge=lfs -text
|
10 |
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
11 |
-
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
12 |
*.model filter=lfs diff=lfs merge=lfs -text
|
13 |
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
14 |
-
*.npy filter=lfs diff=lfs merge=lfs -text
|
15 |
-
*.npz filter=lfs diff=lfs merge=lfs -text
|
16 |
*.onnx filter=lfs diff=lfs merge=lfs -text
|
17 |
*.ot filter=lfs diff=lfs merge=lfs -text
|
18 |
*.parquet filter=lfs diff=lfs merge=lfs -text
|
19 |
*.pb filter=lfs diff=lfs merge=lfs -text
|
20 |
-
*.pickle filter=lfs diff=lfs merge=lfs -text
|
21 |
-
*.pkl filter=lfs diff=lfs merge=lfs -text
|
22 |
*.pt filter=lfs diff=lfs merge=lfs -text
|
23 |
*.pth filter=lfs diff=lfs merge=lfs -text
|
24 |
*.rar filter=lfs diff=lfs merge=lfs -text
|
25 |
-
|
26 |
-
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
27 |
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
28 |
*.tflite filter=lfs diff=lfs merge=lfs -text
|
29 |
*.tgz filter=lfs diff=lfs merge=lfs -text
|
30 |
-
*.wasm filter=lfs diff=lfs merge=lfs -text
|
31 |
*.xz filter=lfs diff=lfs merge=lfs -text
|
32 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
33 |
-
*.
|
34 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
1 |
*.7z filter=lfs diff=lfs merge=lfs -text
|
2 |
*.arrow filter=lfs diff=lfs merge=lfs -text
|
3 |
*.bin filter=lfs diff=lfs merge=lfs -text
|
4 |
+
*.bin.* filter=lfs diff=lfs merge=lfs -text
|
5 |
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
|
|
6 |
*.ftz filter=lfs diff=lfs merge=lfs -text
|
7 |
*.gz filter=lfs diff=lfs merge=lfs -text
|
8 |
*.h5 filter=lfs diff=lfs merge=lfs -text
|
9 |
*.joblib filter=lfs diff=lfs merge=lfs -text
|
10 |
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
|
|
11 |
*.model filter=lfs diff=lfs merge=lfs -text
|
12 |
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
13 |
*.onnx filter=lfs diff=lfs merge=lfs -text
|
14 |
*.ot filter=lfs diff=lfs merge=lfs -text
|
15 |
*.parquet filter=lfs diff=lfs merge=lfs -text
|
16 |
*.pb filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
17 |
*.pt filter=lfs diff=lfs merge=lfs -text
|
18 |
*.pth filter=lfs diff=lfs merge=lfs -text
|
19 |
*.rar filter=lfs diff=lfs merge=lfs -text
|
20 |
+
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
|
21 |
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
22 |
*.tflite filter=lfs diff=lfs merge=lfs -text
|
23 |
*.tgz filter=lfs diff=lfs merge=lfs -text
|
|
|
24 |
*.xz filter=lfs diff=lfs merge=lfs -text
|
25 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
26 |
+
*.zstandard filter=lfs diff=lfs merge=lfs -text
|
27 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
28 |
+
*.csv filter=lfs diff=lfs merge=lfs -text
|
29 |
+
*.pkl filter=lfs diff=lfs merge=lfs -text
|
.gitignore
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
*.zip
|
2 |
+
*.csv
|
README.md
CHANGED
@@ -1,4 +1,50 @@
|
|
1 |
---
|
|
|
|
|
|
|
2 |
license: apache-2.0
|
3 |
-
|
4 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
---
|
2 |
+
language: en
|
3 |
+
tags:
|
4 |
+
- text-generation
|
5 |
license: apache-2.0
|
6 |
+
widget:
|
7 |
+
- text: "Maximize your bedroom space without sacrificing style with the storage bed."
|
8 |
+
- text: "Handcrafted of solid acacia in weathered gray, our round Jozy drop-leaf dining table is a space-saving."
|
9 |
+
- text: "Our plush and luxurious Emmett modular sofa brings custom comfort to your living space."
|
10 |
+
---
|
11 |
+
|
12 |
+
## GPT2-Home
|
13 |
+
|
14 |
+
This model is fine-tuned using GPT-2 on amazon home products metadata.
|
15 |
+
It can generate descriptions for your **home** products by getting a text prompt.
|
16 |
+
|
17 |
+
### Model description
|
18 |
+
|
19 |
+
|
20 |
+
[GPT-2](https://openai.com/blog/better-language-models/) is a large [transformer](https://arxiv.org/abs/1706.03762)-based language model with 1.5 billion parameters, trained on a dataset of 8 million web pages. GPT-2 is trained with a simple objective: predict the next word, given all of the previous words within some text. The diversity of the dataset causes this simple goal to contain naturally occurring demonstrations of many tasks across diverse domains. GPT-2 is a direct scale-up of GPT, with more than 10X the parameters and trained on more than 10X the amount of data.
|
21 |
+
|
22 |
+
### Live Demo
|
23 |
+
For testing model with special configuration, please visit [Demo](https://huggingface.co/spaces/HamidRezaAttar/gpt2-home)
|
24 |
+
|
25 |
+
### Blog Post
|
26 |
+
For more detailed information about project development please refer to my [blog post](https://hamidrezaattar.github.io/blog/markdown/2022/02/17/gpt2-home.html).
|
27 |
+
|
28 |
+
### How to use
|
29 |
+
For best experience and clean outputs, you can use Live Demo mentioned above, also you can use the notebook mentioned in my [GitHub](https://github.com/HamidRezaAttar/GPT2-Home)
|
30 |
+
|
31 |
+
You can use this model directly with a pipeline for text generation.
|
32 |
+
```python
|
33 |
+
>>> from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
|
34 |
+
>>> tokenizer = AutoTokenizer.from_pretrained("HamidRezaAttar/gpt2-product-description-generator")
|
35 |
+
>>> model = AutoModelForCausalLM.from_pretrained("HamidRezaAttar/gpt2-product-description-generator")
|
36 |
+
>>> generator = pipeline('text-generation', model, tokenizer=tokenizer, config={'max_length':100})
|
37 |
+
>>> generated_text = generator("This bed is very comfortable.")
|
38 |
+
```
|
39 |
+
|
40 |
+
### Citation info
|
41 |
+
```bibtex
|
42 |
+
@misc{GPT2-Home,
|
43 |
+
author = {HamidReza Fatollah Zadeh Attar},
|
44 |
+
title = {GPT2-Home the English home product description generator},
|
45 |
+
year = {2021},
|
46 |
+
publisher = {GitHub},
|
47 |
+
journal = {GitHub repository},
|
48 |
+
howpublished = {\url{https://github.com/HamidRezaAttar/GPT2-Home}},
|
49 |
+
}
|
50 |
+
```
|
config.json
ADDED
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "gpt2",
|
3 |
+
"activation_function": "gelu_new",
|
4 |
+
"architectures": [
|
5 |
+
"GPT2LMHeadModel"
|
6 |
+
],
|
7 |
+
"attn_pdrop": 0.1,
|
8 |
+
"bos_token_id": 50256,
|
9 |
+
"embd_pdrop": 0.1,
|
10 |
+
"eos_token_id": 50256,
|
11 |
+
"initializer_range": 0.02,
|
12 |
+
"layer_norm_epsilon": 1e-05,
|
13 |
+
"model_type": "gpt2",
|
14 |
+
"n_ctx": 1024,
|
15 |
+
"n_embd": 768,
|
16 |
+
"n_head": 12,
|
17 |
+
"n_inner": null,
|
18 |
+
"n_layer": 12,
|
19 |
+
"n_positions": 1024,
|
20 |
+
"reorder_and_upcast_attn": false,
|
21 |
+
"resid_pdrop": 0.1,
|
22 |
+
"scale_attn_by_inverse_layer_idx": false,
|
23 |
+
"scale_attn_weights": true,
|
24 |
+
"summary_activation": null,
|
25 |
+
"summary_first_dropout": 0.1,
|
26 |
+
"summary_proj_to_labels": true,
|
27 |
+
"summary_type": "cls_index",
|
28 |
+
"summary_use_proj": true,
|
29 |
+
"task_specific_params": {
|
30 |
+
"text-generation": {
|
31 |
+
"do_sample": true,
|
32 |
+
"max_length": 50
|
33 |
+
}
|
34 |
+
},
|
35 |
+
"torch_dtype": "float32",
|
36 |
+
"transformers_version": "4.12.3",
|
37 |
+
"use_cache": true,
|
38 |
+
"vocab_size": 50257
|
39 |
+
}
|
dls_eng_Batch4.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f209b0c3d8e5952c3b59cf3f63c366784b92704e7cc555623707d515164f008c
|
3 |
+
size 138085231
|
history_epoch1.csv
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
epoch,train_loss,valid_loss,accuracy,perplexity,corpus_bleu,time
|
2 |
+
0,3.172159194946289,3.6873743534088135,0.3663671314716339,39.939842224121094,0.15033567443004403,1:32:04
|
3 |
+
1,2.736553907394409,3.388481378555298,0.41142186522483826,29.620935440063477,0.19465439777610855,1:32:59
|
4 |
+
2,2.4418771266937256,3.2502505779266357,0.43725576996803284,25.796802520751953,0.22225003041097682,1:32:47
|
pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a8fc26b7d1d527276fd1e034c5de137c4288a33c156e16a5e71202346c522b78
|
3 |
+
size 510403817
|
tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|