winglian commited on
Commit
990bec6
1 Parent(s): 0c46806

docker layer caching, build w axolotl from base build

Browse files
.github/workflows/base.yml CHANGED
@@ -9,6 +9,7 @@ on:
9
  jobs:
10
  build-base:
11
  if: github.repository_owner == 'OpenAccess-AI-Collective'
 
12
  runs-on: self-hosted
13
  steps:
14
  - name: Checkout
@@ -33,3 +34,5 @@ jobs:
33
  push: ${{ github.event_name != 'pull_request' }}
34
  tags: ${{ steps.metadata.outputs.tags }}
35
  labels: ${{ steps.metadata.outputs.labels }}
 
 
 
9
  jobs:
10
  build-base:
11
  if: github.repository_owner == 'OpenAccess-AI-Collective'
12
+ # this job needs to be run on self-hosted GPU runners...
13
  runs-on: self-hosted
14
  steps:
15
  - name: Checkout
 
34
  push: ${{ github.event_name != 'pull_request' }}
35
  tags: ${{ steps.metadata.outputs.tags }}
36
  labels: ${{ steps.metadata.outputs.labels }}
37
+ cache-from: type=gha
38
+ cache-to: type=gha,mode=max
.github/workflows/main.yml CHANGED
@@ -8,8 +8,10 @@ on:
8
 
9
  jobs:
10
  build-axolotl:
11
- if: github.repository_owner == 'OpenAccess-AI-Collective'
12
- runs-on: self-hosted
 
 
13
  steps:
14
  - name: Checkout
15
  uses: actions/checkout@v3
@@ -29,7 +31,11 @@ jobs:
29
  uses: docker/build-push-action@v4
30
  with:
31
  context: .
 
 
32
  file: ./docker/Dockerfile
33
  push: ${{ github.event_name != 'pull_request' }}
34
  tags: ${{ steps.metadata.outputs.tags }}
35
  labels: ${{ steps.metadata.outputs.labels }}
 
 
 
8
 
9
  jobs:
10
  build-axolotl:
11
+ # We specify this so it doesn't run automatically in our other github org
12
+ # that we use to build the base on self-hosted GPU runners
13
+ if: github.repository_owner == 'winglian'
14
+ runs-on: ubuntu-latest
15
  steps:
16
  - name: Checkout
17
  uses: actions/checkout@v3
 
31
  uses: docker/build-push-action@v4
32
  with:
33
  context: .
34
+ build-args: |
35
+ BASE_TAG=dev
36
  file: ./docker/Dockerfile
37
  push: ${{ github.event_name != 'pull_request' }}
38
  tags: ${{ steps.metadata.outputs.tags }}
39
  labels: ${{ steps.metadata.outputs.labels }}
40
+ cache-from: type=gha
41
+ cache-to: type=gha,mode=max
docker/Dockerfile CHANGED
@@ -1,22 +1,16 @@
1
- FROM huggingface/transformers-pytorch-deepspeed-latest-gpu:latest
 
2
 
3
  ARG TORCH_CUDA_ARCH_LIST="7.0 7.5 8.0 8.6+PTX"
 
4
  RUN apt-get update && \
5
- apt-get install -y build-essential ninja-build vim git-lfs && \
6
- git lfs install --skip-repo && \
7
- mkdir /tmp/wheels && \
8
- cd /tmp/wheels && \
9
- curl -L -O https://github.com/winglian/axolotl/raw/wheels/wheels/deepspeed-0.9.2%2B7ddc3b01-cp38-cp38-linux_x86_64.whl && \
10
- curl -L -O https://github.com/winglian/axolotl/raw/wheels/wheels/flash_attn-1.0.4-cp38-cp38-linux_x86_64.whl && \
11
- pip install deepspeed-0.9.2%2B7ddc3b01-cp38-cp38-linux_x86_64.whl && \
12
- pip install flash_attn-1.0.4-cp38-cp38-linux_x86_64.whl && \
13
- pip install "peft @ git+https://github.com/huggingface/peft.git@main" --force-reinstall --no-dependencies && \
14
- pip install awscli
15
 
16
  WORKDIR /workspace
 
 
 
 
17
  ARG REF=main
18
  RUN git clone https://github.com/winglian/axolotl && cd axolotl && git checkout $REF && \
19
  pip install -e .[int4]
20
-
21
- RUN pip3 install --force-reinstall https://download.pytorch.org/whl/nightly/cu117/torch-2.0.0.dev20230301%2Bcu117-cp38-cp38-linux_x86_64.whl --index-url https://download.pytorch.org/whl/nightly/cu117
22
-
 
1
+ ARG BASE_TAG=main
2
+ FROM winglian/axolotl:$BASE_TAG
3
 
4
  ARG TORCH_CUDA_ARCH_LIST="7.0 7.5 8.0 8.6+PTX"
5
+
6
  RUN apt-get update && \
7
+ apt-get install -y vim \
 
 
 
 
 
 
 
 
 
8
 
9
  WORKDIR /workspace
10
+
11
+ # The base image ships with `pydantic==1.8.2` which is not working
12
+ RUN python3 -m pip install -U --no-cache-dir pydantic
13
+
14
  ARG REF=main
15
  RUN git clone https://github.com/winglian/axolotl && cd axolotl && git checkout $REF && \
16
  pip install -e .[int4]
 
 
 
docker/Dockerfile-base CHANGED
@@ -66,4 +66,7 @@ RUN git lfs install --skip-repo
66
  RUN pip3 install "peft @ git+https://github.com/huggingface/peft.git@main" \
67
  "accelerate @ git+https://github.com/huggingface/accelerate.git@main" \
68
  "transformers @ git+https://github.com/huggingface/transformers.git@main" && \
69
- pip3 install awscli
 
 
 
 
66
  RUN pip3 install "peft @ git+https://github.com/huggingface/peft.git@main" \
67
  "accelerate @ git+https://github.com/huggingface/accelerate.git@main" \
68
  "transformers @ git+https://github.com/huggingface/transformers.git@main" && \
69
+ pip3 install awscli && \
70
+ # The base image ships with `pydantic==1.8.2` which is not working
71
+ pip3 install -U --no-cache-dir pydantic
72
+
requirements.txt CHANGED
@@ -9,7 +9,5 @@ datasets
9
  accelerate
10
  sentencepiece
11
  wandb
12
- flash-attn
13
- deepspeed
14
  einops
15
  xformers
 
9
  accelerate
10
  sentencepiece
11
  wandb
 
 
12
  einops
13
  xformers
setup.py CHANGED
@@ -23,5 +23,9 @@ setup(
23
  'int4_triton': [
24
  "alpaca_lora_4bit[triton] @ git+https://github.com/winglian/alpaca_lora_4bit.git@setup_pip",
25
  ],
 
 
 
 
26
  },
27
  )
 
23
  'int4_triton': [
24
  "alpaca_lora_4bit[triton] @ git+https://github.com/winglian/alpaca_lora_4bit.git@setup_pip",
25
  ],
26
+ 'extras': [
27
+ 'flash-attn',
28
+ 'deepspeed',
29
+ ]
30
  },
31
  )