Spaces:
Runtime error
Runtime error
zetavg
commited on
Commit
•
7dd8f96
1
Parent(s):
3391607
latest peft makes model.save_pretrained in finetune.py save a 443 B adapter_model.bin which is clearly incorrect (normally adapter_model.bin should > 16 MB)
Browse files- LLaMA_LoRA.ipynb +1 -1
- README.md +3 -3
LLaMA_LoRA.ipynb
CHANGED
@@ -220,7 +220,7 @@
|
|
220 |
"source": [
|
221 |
"![ ! -d llama_lora ] && git clone -b {llama_lora_project_branch} --filter=tree:0 {llama_lora_project_url} llama_lora\n",
|
222 |
"!cd llama_lora && git add --all && git stash && git fetch origin {llama_lora_project_branch} && git checkout {llama_lora_project_branch} && git reset origin/{llama_lora_project_branch} --hard\n",
|
223 |
-
"![ ! -f llama-lora-requirements-installed ] && cd llama_lora && pip install -r requirements.txt && touch ../llama-lora-requirements-installed"
|
224 |
],
|
225 |
"metadata": {
|
226 |
"id": "JGYz2VDoAzC8"
|
|
|
220 |
"source": [
|
221 |
"![ ! -d llama_lora ] && git clone -b {llama_lora_project_branch} --filter=tree:0 {llama_lora_project_url} llama_lora\n",
|
222 |
"!cd llama_lora && git add --all && git stash && git fetch origin {llama_lora_project_branch} && git checkout {llama_lora_project_branch} && git reset origin/{llama_lora_project_branch} --hard\n",
|
223 |
+
"![ ! -f llama-lora-requirements-installed ] && cd llama_lora && pip install -r requirements.lock.txt && touch ../llama-lora-requirements-installed"
|
224 |
],
|
225 |
"metadata": {
|
226 |
"id": "JGYz2VDoAzC8"
|
README.md
CHANGED
@@ -56,7 +56,7 @@ file_mounts:
|
|
56 |
# Clone the LLaMA-LoRA repo and install its dependencies.
|
57 |
setup: |
|
58 |
git clone https://github.com/zetavg/LLaMA-LoRA.git llama_lora
|
59 |
-
cd llama_lora && pip install -r requirements.txt
|
60 |
cd ..
|
61 |
echo 'Dependencies installed.'
|
62 |
|
@@ -86,13 +86,13 @@ When you are done, run `sky stop <cluster_name>` to stop the cluster. To termina
|
|
86 |
<summary>Prepare environment with conda</summary>
|
87 |
|
88 |
```bash
|
89 |
-
conda create -y -n llama-lora-multitool
|
90 |
conda activate llama-lora-multitool
|
91 |
```
|
92 |
</details>
|
93 |
|
94 |
```bash
|
95 |
-
pip install -r requirements.txt
|
96 |
python app.py --data_dir='./data' --base_model='decapoda-research/llama-7b-hf' --share
|
97 |
```
|
98 |
|
|
|
56 |
# Clone the LLaMA-LoRA repo and install its dependencies.
|
57 |
setup: |
|
58 |
git clone https://github.com/zetavg/LLaMA-LoRA.git llama_lora
|
59 |
+
cd llama_lora && pip install -r requirements.lock.txt
|
60 |
cd ..
|
61 |
echo 'Dependencies installed.'
|
62 |
|
|
|
86 |
<summary>Prepare environment with conda</summary>
|
87 |
|
88 |
```bash
|
89 |
+
conda create -y python=3.8 -n llama-lora-multitool
|
90 |
conda activate llama-lora-multitool
|
91 |
```
|
92 |
</details>
|
93 |
|
94 |
```bash
|
95 |
+
pip install -r requirements.lock.txt
|
96 |
python app.py --data_dir='./data' --base_model='decapoda-research/llama-7b-hf' --share
|
97 |
```
|
98 |
|