File size: 3,518 Bytes
2734e3f 2634689 14ebd2e 9cd5d3f e3df3a9 48612f8 e3df3a9 48612f8 9cd5d3f 2634689 76b24bc 2634689 990bec6 48612f8 14ebd2e 4a79dab 48612f8 76b24bc 990bec6 79deb35 e3df3a9 48612f8 e3df3a9 48612f8 79deb35 48612f8 79deb35 48612f8 79deb35 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 |
name: ci-cd
on:
push:
branches:
- "main"
- "dev"
jobs:
build-axolotl:
if: github.repository_owner == 'OpenAccess-AI-Collective'
# this job needs to be run on self-hosted GPU runners...
strategy:
matrix:
include:
- cuda: cu118
cuda_version: 11.8.0
pytorch: 2.0.0
axolotl_extras:
- cuda: cu118
cuda_version: 11.8.0
pytorch: 2.0.0
axolotl_extras: gptq
- cuda: cu117
cuda_version: 11.7.0
pytorch: 1.13.1
axolotl_extras:
runs-on: self-hosted
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Docker metadata
id: metadata
uses: docker/metadata-action@v3
with:
images: winglian/axolotl
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Build
uses: docker/build-push-action@v4
with:
context: .
build-args: |
BASE_TAG=${{ github.ref_name }}-base-${{ matrix.cuda }}-${{ matrix.pytorch }}${{ matrix.axolotl_extras != '' && '-' || '' }}${{ matrix.axolotl_extras }}
file: ./docker/Dockerfile
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.metadata.outputs.tags }}-${{ matrix.cuda }}-${{ matrix.pytorch }}${{ matrix.axolotl_extras != '' && '-' || '' }}${{ matrix.axolotl_extras }}
labels: ${{ steps.metadata.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max
build-axolotl-runpod:
needs: build-axolotl
if: github.repository_owner == 'OpenAccess-AI-Collective'
# this job needs to be run on self-hosted GPU runners...
strategy:
matrix:
include:
- cuda: cu118
cuda_version: 11.8.0
pytorch: 2.0.0
axolotl_extras:
- cuda: cu118
cuda_version: 11.8.0
pytorch: 2.0.0
axolotl_extras: gptq
- cuda: cu117
cuda_version: 11.7.0
pytorch: 1.13.1
axolotl_extras:
runs-on: self-hosted
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Docker metadata
id: metadata
uses: docker/metadata-action@v3
with:
images: winglian/axolotl-runpod
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Build
uses: docker/build-push-action@v4
with:
context: .
build-args: |
BASE_TAG=${{ github.ref_name }}-${{ matrix.cuda }}-${{ matrix.pytorch }}${{ matrix.axolotl_extras != '' && '-' || '' }}${{ matrix.axolotl_extras }}
file: ./docker/Dockerfile-runpod
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.metadata.outputs.tags }}-${{ matrix.cuda }}-${{ matrix.pytorch }}${{ matrix.axolotl_extras != '' && '-' || '' }}${{ matrix.axolotl_extras }}
labels: ${{ steps.metadata.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max
|