asoria HF staff commited on
Commit
6d0709a
β€’
1 Parent(s): 421b068

Generate file to download

Browse files
Files changed (2) hide show
  1. .gitattributes +1 -0
  2. app.py +55 -12
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ *.ipynb filter=lfs diff=lfs merge=lfs -text
app.py CHANGED
@@ -3,8 +3,6 @@ from gradio_huggingfacehub_search import HuggingfaceHubSearch
3
  import nbformat as nbf
4
  from huggingface_hub import HfApi
5
 
6
- api = HfApi()
7
-
8
  def create_notebook_file(cell_commands, notebook_name="generated_notebook.ipynb"):
9
  nb = nbf.v4.new_notebook()
10
  nb['cells'] = [nbf.v4.new_code_cell(command) for command in cell_commands]
@@ -14,11 +12,27 @@ def create_notebook_file(cell_commands, notebook_name="generated_notebook.ipynb"
14
 
15
  print(f"Notebook '{notebook_name}' created successfully.")
16
 
17
- def generate_notebook(dataset_id):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
18
  commands = [
19
- f"from datasets import load_dataset",
20
- f"dataset = load_dataset('{dataset_id}')",
21
- f"dataset",
 
22
  ]
23
  notebook_name = f"{dataset_id.replace('/', '-')}.ipynb"
24
  create_notebook_file(commands, notebook_name=notebook_name)
@@ -28,11 +42,9 @@ def generate_notebook(dataset_id):
28
  repo_id="asoria/en-text",
29
  repo_type="dataset",
30
  )
 
31
  print("Notebook uploaded to Huggingface Hub.")
32
- link = f"https://huggingface.co/datasets/{dataset_id}/blob/main/dataset_analyst.ipynb"
33
- return f'<a target="_blank" href="{link}" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">See notebook</a>'
34
-
35
-
36
 
37
  with gr.Blocks() as demo:
38
  gr.Markdown("# πŸ€– Dataset auto analyst creator πŸ•΅οΈ")
@@ -57,7 +69,38 @@ with gr.Blocks() as demo:
57
  """
58
  return gr.HTML(value=html_code)
59
 
60
- generate_btn = gr.Button("Generate notebook")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
61
  output_lbl = gr.HTML(value="")
62
- generate_btn.click(generate_notebook, inputs=[dataset_name], outputs=[output_lbl])
 
63
  demo.launch()
 
3
  import nbformat as nbf
4
  from huggingface_hub import HfApi
5
 
 
 
6
  def create_notebook_file(cell_commands, notebook_name="generated_notebook.ipynb"):
7
  nb = nbf.v4.new_notebook()
8
  nb['cells'] = [nbf.v4.new_code_cell(command) for command in cell_commands]
 
12
 
13
  print(f"Notebook '{notebook_name}' created successfully.")
14
 
15
+ def push_notebook(file_path, dataset_id, token):
16
+ api = HfApi(token=token)
17
+ api.upload_file(
18
+ path_or_fileobj=file_path,
19
+ path_in_repo="dataset_analysis.ipynb",
20
+ repo_id=dataset_id,
21
+ repo_type="dataset",
22
+ )
23
+ print("Notebook uploaded to Huggingface Hub.")
24
+ link = f"https://huggingface.co/datasets/{dataset_id}/blob/main/dataset_analyst.ipynb"
25
+ return f'<a target="_blank" href="{link}" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">See notebook</a>'
26
+
27
+ def generate_notebook(dataset_id, token):
28
+ api = HfApi(token=token)
29
+ # TODO: Handle auth error
30
+ # TODO: Get first config and split? or generate a dataframe per each split maybe?
31
  commands = [
32
+ f"!pip install pandas",
33
+ f"import pandas as pd",
34
+ f"df = pd.read_parquet('hf://datasets/{dataset_id}/data/train-00000-of-00001.parquet')",
35
+ f"df.head()",
36
  ]
37
  notebook_name = f"{dataset_id.replace('/', '-')}.ipynb"
38
  create_notebook_file(commands, notebook_name=notebook_name)
 
42
  repo_id="asoria/en-text",
43
  repo_type="dataset",
44
  )
45
+ # TODO: Handle permission error
46
  print("Notebook uploaded to Huggingface Hub.")
47
+ return notebook_name
 
 
 
48
 
49
  with gr.Blocks() as demo:
50
  gr.Markdown("# πŸ€– Dataset auto analyst creator πŸ•΅οΈ")
 
69
  """
70
  return gr.HTML(value=html_code)
71
 
72
+ generate_btn = gr.Button("Generate notebook and push to repo", visible=True)
73
+
74
+ download_link = gr.File(label="Download Notebook")
75
+ generate_btn.click(generate_notebook, inputs=[dataset_name], outputs=[download_link])
76
+ with gr.Row() as auth_page:
77
+ with gr.Column():
78
+ auth_title = gr.Markdown(
79
+ "Enter your token ([settings](https://huggingface.co/settings/tokens)):"
80
+ )
81
+ token_box = gr.Textbox("", label="token", placeholder="hf_xxx", type="password"
82
+ )
83
+ auth_error = gr.Markdown("", visible=False)
84
+
85
+ def auth(token):
86
+ if not token:
87
+ return {
88
+ auth_error: gr.Markdown(value="", visible=False),
89
+ push_btn: gr.Row(visible=False)
90
+ }
91
+ return {
92
+ auth_error: gr.Markdown(value="", visible=False),
93
+ push_btn: gr.Row(visible=True)
94
+ }
95
+
96
+
97
+ push_btn = gr.Button("Push notebook to repo", visible=False)
98
+ token_box.change(
99
+ auth,
100
+ inputs=token_box,
101
+ outputs=[auth_error, push_btn],
102
+ )
103
  output_lbl = gr.HTML(value="")
104
+
105
+ push_btn.click(push_notebook, inputs=[download_link, dataset_name, token_box], outputs=[output_lbl])
106
  demo.launch()