# Copyright Jiaqi Liu # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. --- name: CI/CD "on": pull_request: push: branches: - master env: PYTHON_VERSION: "3.10" NEO4J_ENTERPRISE_VERSION: 5.24-enterprise NEO4J_URI: neo4j://localhost:7687 NEO4J_DATABASE: neo4j NEO4J_USERNAME: not used NEO4J_PASSWORD: not used jobs: cancel-previous: name: Cancel Previous Runs In Order to Allocate Action Resources Immediately for Current Run runs-on: ubuntu-latest steps: - name: Cancel previous uses: styfle/cancel-workflow-action@0.10.1 with: access_token: ${{ github.token }} yaml-lint: name: YAML Style Check runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - uses: actionshub/yamllint@main linkChecker: name: Link Check runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - name: Link Checker uses: lycheeverse/lychee-action@v1.9.0 with: fail: true code-style: name: Python Code Style Check runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - name: Set up Python 3.10 uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} - name: Install dependencies run: pip3 install -r requirements.txt - name: Check import orders run: isort --check . - name: pep8 run: pycodestyle . unit-tests: name: Unit Tests needs: [yaml-lint, linkChecker, code-style] runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - name: Set up Python ${{ env.PYTHON_VERSION }} uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} - name: Install dependencies run: pip3 install -r requirements.txt - name: Run tests run: python -m unittest sync-to-huggingface-space: needs: unit-tests runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 with: fetch-depth: 0 lfs: true - name: Set up Python ${{ env.PYTHON_VERSION }} uses: actions/setup-python@v4 with: python-version: ${{ env.PYTHON_VERSION }} - name: Install dependencies run: pip3 install -r requirements.txt - name: Generate Hugging Face Datasets run: | cd huggingface python3 ./generate_datasets.py - name: Push to hub if: github.ref == 'refs/heads/master' run: | git config --global user.name "QubitPi" git config --global user.email "jack20220723@gmail.com" git lfs install git lfs track "*-graph-data.jsonl" git add *-graph-data.jsonl git commit -m "Generate Hugging Face Datasets" git push https://QubitPi:$HF_TOKEN@huggingface.co/datasets/QubitPi/wilhelm-vocabulary master:main -f env: HF_TOKEN: ${{ secrets.HF_TOKEN }} docker: name: Test Docker Build, Runs Acceptance Tests, and Publish Data Image to DockerHub needs: unit-tests runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 with: fetch-depth: 0 - name: Set up Python ${{ env.PYTHON_VERSION }} uses: actions/setup-python@v4 with: python-version: ${{ env.PYTHON_VERSION }} - name: Startup a Neo4J database run: | docker run \ --publish=7474:7474 \ --publish=7687:7687 \ --env=NEO4J_AUTH=none \ --env=NEO4J_ACCEPT_LICENSE_AGREEMENT=yes \ -e NEO4JLABS_PLUGINS=\[\"apoc\"\] \ --name data-loader \ neo4j:${{ env.NEO4J_ENTERPRISE_VERSION }} & - name: Wait for database to be ready uses: iFaxity/wait-on-action@v1 with: resource: http://localhost:7474 - name: Load all languages into the intermediate container run: | git clone https://github.com/QubitPi/wilhelm-data-loader.git ../wilhelm-data-loader cd ../wilhelm-data-loader pip3 install -r requirements.txt pip3 install -e . python3 wilhelm_vocabulary/load_all_in_parallel.py env: NEO4J_URI: ${{ env.NEO4J_URI }} NEO4J_DATABASE: ${{ env.NEO4J_DATABASE }} NEO4J_USERNAME: ${{ env.NEO4J_USERNAME }} NEO4J_PASSWORD: ${{ env.NEO4J_PASSWORD }} - name: Install dependencies run: pip3 install -r requirements.txt - name: Run acceptance tests run: behave - name: Copy over data onto host run: docker cp data-loader:/data . - name: Test image build uses: docker/build-push-action@v3 with: context: . push: false - name: Login to DockerHub if: github.ref == 'refs/heads/master' uses: docker/login-action@v2 with: username: jack20191124 password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Push image to DockerHub if: github.ref == 'refs/heads/master' uses: docker/build-push-action@v3 with: context: . push: true tags: jack20191124/wilhelm-vocabulary:latest - name: Push image Description if: github.ref == 'refs/heads/master' uses: peter-evans/dockerhub-description@v4 with: username: jack20191124 password: ${{ secrets.DOCKERHUB_TOKEN }} repository: jack20191124/wilhelm-vocabulary readme-filepath: README.md triggering: name: Triggering data model acceptance tests CI/CD needs: [sync-to-huggingface-space, docker] if: github.ref == 'refs/heads/master' runs-on: ubuntu-latest steps: - name: Trigger wilhelm-data-loader to load all vocabularies into graph database uses: peter-evans/repository-dispatch@v2 with: token: ${{ secrets.QUBITPI_DOWNSTREAM_CICD_TRIGGERING_TOKEN }} repository: QubitPi/wilhelm-data-loader event-type: wilhelm-vocabulary-changes