Spaces:
Sleeping
Sleeping
Commit
•
395ee78
1
Parent(s):
dbe4d33
Update app.py
Browse files
app.py
CHANGED
@@ -8,7 +8,6 @@ if is_spaces:
|
|
8 |
import spaces
|
9 |
from huggingface_hub import whoami
|
10 |
|
11 |
-
|
12 |
os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "1"
|
13 |
import sys
|
14 |
|
@@ -31,13 +30,13 @@ from slugify import slugify
|
|
31 |
from transformers import AutoProcessor, AutoModelForCausalLM
|
32 |
|
33 |
if not is_spaces:
|
|
|
34 |
from toolkit.job import get_job
|
35 |
gr.OAuthProfile = None
|
36 |
gr.OAuthToken = None
|
37 |
|
38 |
MAX_IMAGES = 150
|
39 |
|
40 |
-
|
41 |
def load_captioning(uploaded_images, concept_sentence):
|
42 |
gr.Info("Images uploaded!")
|
43 |
updates = []
|
@@ -136,8 +135,6 @@ if is_spaces:
|
|
136 |
run_captioning = spaces.GPU()(run_captioning)
|
137 |
|
138 |
def start_training(
|
139 |
-
profile: Union[gr.OAuthProfile, None],
|
140 |
-
oauth_token: Union[gr.OAuthToken, None],
|
141 |
lora_name,
|
142 |
concept_sentence,
|
143 |
steps,
|
@@ -147,6 +144,8 @@ def start_training(
|
|
147 |
sample_1,
|
148 |
sample_2,
|
149 |
sample_3,
|
|
|
|
|
150 |
):
|
151 |
if not lora_name:
|
152 |
raise gr.Error("You forgot to insert your LoRA name! This name has to be unique.")
|
@@ -154,7 +153,7 @@ def start_training(
|
|
154 |
slugged_lora_name = slugify(lora_name)
|
155 |
|
156 |
# Load the default config
|
157 |
-
with open("train_lora_flux_24gb.yaml" if is_spaces else "config/examples/train_lora_flux_24gb.yaml", "r") as f:
|
158 |
config = yaml.safe_load(f)
|
159 |
|
160 |
# Update the config with user inputs
|
@@ -347,15 +346,15 @@ with gr.Blocks(theme=theme, css=css) as demo:
|
|
347 |
gr.Markdown(
|
348 |
f"""To use FLUX LoRA Ease locally with this UI, you can clone this repository (yes, HF Spaces are git repos!). You'll need ~23GB of VRAM
|
349 |
```bash
|
350 |
-
git clone https://huggingface.co/spaces/
|
351 |
-
cd flux-lora-
|
352 |
## Optional, start a venv environment ##
|
353 |
python3 -m venv venv
|
354 |
source venv/bin/activate
|
355 |
# .\venv\Scripts\activate on windows
|
356 |
# install torch first
|
357 |
## End of optional ##
|
358 |
-
pip install requirements_local.txt
|
359 |
```
|
360 |
|
361 |
Then you can install ai-toolkit
|
@@ -373,7 +372,7 @@ with gr.Blocks(theme=theme, css=css) as demo:
|
|
373 |
huggingface-cli login
|
374 |
```
|
375 |
|
376 |
-
|
377 |
```py
|
378 |
python app.py
|
379 |
```
|
|
|
8 |
import spaces
|
9 |
from huggingface_hub import whoami
|
10 |
|
|
|
11 |
os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "1"
|
12 |
import sys
|
13 |
|
|
|
30 |
from transformers import AutoProcessor, AutoModelForCausalLM
|
31 |
|
32 |
if not is_spaces:
|
33 |
+
sys.path.insert(0, "ai-toolkit")
|
34 |
from toolkit.job import get_job
|
35 |
gr.OAuthProfile = None
|
36 |
gr.OAuthToken = None
|
37 |
|
38 |
MAX_IMAGES = 150
|
39 |
|
|
|
40 |
def load_captioning(uploaded_images, concept_sentence):
|
41 |
gr.Info("Images uploaded!")
|
42 |
updates = []
|
|
|
135 |
run_captioning = spaces.GPU()(run_captioning)
|
136 |
|
137 |
def start_training(
|
|
|
|
|
138 |
lora_name,
|
139 |
concept_sentence,
|
140 |
steps,
|
|
|
144 |
sample_1,
|
145 |
sample_2,
|
146 |
sample_3,
|
147 |
+
profile: Union[gr.OAuthProfile, None],
|
148 |
+
oauth_token: Union[gr.OAuthToken, None],
|
149 |
):
|
150 |
if not lora_name:
|
151 |
raise gr.Error("You forgot to insert your LoRA name! This name has to be unique.")
|
|
|
153 |
slugged_lora_name = slugify(lora_name)
|
154 |
|
155 |
# Load the default config
|
156 |
+
with open("train_lora_flux_24gb.yaml" if is_spaces else "ai-toolkit/config/examples/train_lora_flux_24gb.yaml", "r") as f:
|
157 |
config = yaml.safe_load(f)
|
158 |
|
159 |
# Update the config with user inputs
|
|
|
346 |
gr.Markdown(
|
347 |
f"""To use FLUX LoRA Ease locally with this UI, you can clone this repository (yes, HF Spaces are git repos!). You'll need ~23GB of VRAM
|
348 |
```bash
|
349 |
+
git clone https://huggingface.co/spaces/autotrain-projects/flux-lora-ease
|
350 |
+
cd flux-lora-ease
|
351 |
## Optional, start a venv environment ##
|
352 |
python3 -m venv venv
|
353 |
source venv/bin/activate
|
354 |
# .\venv\Scripts\activate on windows
|
355 |
# install torch first
|
356 |
## End of optional ##
|
357 |
+
pip install -r requirements_local.txt
|
358 |
```
|
359 |
|
360 |
Then you can install ai-toolkit
|
|
|
372 |
huggingface-cli login
|
373 |
```
|
374 |
|
375 |
+
Finally, you can run FLUX LoRA Ease locally with a UI by doing a simple
|
376 |
```py
|
377 |
python app.py
|
378 |
```
|