Skip to content

Updated the table of content #30

Updated the table of content

Updated the table of content #30

Workflow file for this run

name: Producer Consumer CI Test
on:
push:
branches: ["main", "dev"]
workflow_dispatch:
jobs:
load-dotenv:
runs-on: ubuntu-latest
outputs:
target-file-dir: ${{ steps.load-dotenv.outputs.TARGET_FILE_DIR }}
target-file-extension: ${{ steps.load-dotenv.outputs.TARGET_FILE_EXTENSION }}
postgres-version-tag: ${{ steps.load-dotenv.outputs.POSTGRES_VERSION_TAG }}
postgres-port: ${{ steps.load-dotenv.outputs.POSTGRES_PORT }}
postgres-username: ${{ steps.load-dotenv.outputs.POSTGRES_USERNAME }}
postgres-password: ${{ steps.load-dotenv.outputs.POSTGRES_PASSWORD }}
postgres-database: ${{ steps.load-dotenv.outputs.POSTGRES_DATABASE }}
rabbitmq-version-tag: ${{ steps.load-dotenv.outputs.RABBITMQ_VERSION_TAG }}
rabbitmq-port: ${{ steps.load-dotenv.outputs.RABBITMQ_PORT }}
rabbitmq-username: ${{ steps.load-dotenv.outputs.RABBITMQ_USERNAME }}
rabbitmq-password: ${{ steps.load-dotenv.outputs.RABBITMQ_PASSWORD }}
rabbitmq-queue-name: ${{ steps.load-dotenv.outputs.RABBITMQ_QUEUE_NAME }}
rabbitmq-socket-timeout: ${{ steps.load-dotenv.outputs.RABBITMQ_SOCKET_TIMEOUT }}
csv-parser-delimiter: ${{ steps.load-dotenv.outputs.CSV_PARSER_DELIMITER }}
csv-parser-file-extension: ${{ steps.load-dotenv.outputs.CSV_PARSER_FILE_EXTENSION }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Load dotenv
id: load-dotenv
run: |
set -o allexport
source .env
set +o allexport
echo "TARGET_FILE_DIR=$TARGET_FILE_DIR" >> $GITHUB_OUTPUT
echo "TARGET_FILE_EXTENSION=$TARGET_FILE_EXTENSION" >> $GITHUB_OUTPUT
echo "POSTGRES_VERSION_TAG=$POSTGRES_VERSION_TAG" >> $GITHUB_OUTPUT
echo "POSTGRES_PORT=$POSTGRES_PORT" >> $GITHUB_OUTPUT
echo "POSTGRES_USERNAME=$POSTGRES_USERNAME" >> $GITHUB_OUTPUT
echo "POSTGRES_PASSWORD=$POSTGRES_PASSWORD" >> $GITHUB_OUTPUT
echo "POSTGRES_DATABASE=$POSTGRES_DATABASE" >> $GITHUB_OUTPUT
echo "RABBITMQ_VERSION_TAG=$RABBITMQ_VERSION_TAG" >> $GITHUB_OUTPUT
echo "RABBITMQ_PORT=$RABBITMQ_PORT" >> $GITHUB_OUTPUT
echo "RABBITMQ_USERNAME=$RABBITMQ_USERNAME" >> $GITHUB_OUTPUT
echo "RABBITMQ_PASSWORD=$RABBITMQ_PASSWORD" >> $GITHUB_OUTPUT
echo "RABBITMQ_QUEUE_NAME=$RABBITMQ_QUEUE_NAME" >> $GITHUB_OUTPUT
echo "RABBITMQ_SOCKET_TIMEOUT=$RABBITMQ_SOCKET_TIMEOUT" >> $GITHUB_OUTPUT
echo "CSV_PARSER_DELIMITER=$CSV_PARSER_DELIMITER" >> $GITHUB_OUTPUT
echo "CSV_PARSER_FILE_EXTENSION=$CSV_PARSER_FILE_EXTENSION" >> $GITHUB_OUTPUT
test-producer:
needs: load-dotenv
runs-on: ubuntu-latest
env:
WATCH_FILE_PATTERNS: |
producer/**/*.py
producer/requirements-dev.txt
COVERAGE_FILE: .coverage_producer
WORKDIR: producer
outputs:
coverage-file-cache-path: ${{ steps.output-coverage-file.outputs.COVERAGE_FILE_CACHE_PATH }}
coverage-file-cache-key: ${{ steps.output-coverage-file.outputs.COVERAGE_FILE_CACHE_KEY }}
services:
rabbitmq:
image: rabbitmq:${{ needs.load-dotenv.outputs.rabbitmq-version-tag }}
env:
RABBITMQ_DEFAULT_USER: ${{ needs.load-dotenv.outputs.rabbitmq-username }}
RABBITMQ_DEFAULT_PASS: ${{ needs.load-dotenv.outputs.rabbitmq-password }}
options: >-
--health-cmd "rabbitmq-diagnostics -q check_running"
--health-interval 5s
--health-timeout 30s
--health-retries 3
ports:
- ${{ needs.load-dotenv.outputs.rabbitmq-port }}:5672
steps:
- name: Checkout
uses: actions/checkout@v4
- uses: actions/cache@v2
id: cache
with:
path: ${{env.COVERAGE_FILE}}
key: ${{ runner.os }}-coverage-producer-${{ hashFiles(env.WATCH_FILE_PATTERNS) }}
restore-keys: |
${{ runner.os }}-coverage-producer-
- uses: actions/setup-python@v4
if: steps.cache.outputs.cache-hit != 'true'
with:
python-version: '3.11'
cache: 'pip'
cache-dependency-path: ${{env.WORKDIR}}/requirements-dev.txt
- name: Install dependencies
if: steps.cache.outputs.cache-hit != 'true'
working-directory: ${{env.WORKDIR}}
run: pip install -r requirements-dev.txt
- name: Run tests
if: steps.cache.outputs.cache-hit != 'true'
run: |
coverage run -m pytest -v producer/tests
env:
RABBITMQ_HOST: localhost
RABBITMQ_PORT: ${{ needs.load-dotenv.outputs.rabbitmq-port }}
RABBITMQ_USERNAME: ${{ needs.load-dotenv.outputs.rabbitmq-username }}
RABBITMQ_PASSWORD: ${{ needs.load-dotenv.outputs.rabbitmq-password }}
RABBITMQ_QUEUE_NAME: ${{ needs.load-dotenv.outputs.rabbitmq-queue-name }}
RABBITMQ_SOCKET_TIMEOUT: ${{ needs.load-dotenv.outputs.rabbitmq-socket-timeout }}
TARGET_FILE_DIR: ${{ needs.load-dotenv.outputs.target-file-dir }}
TARGET_FILE_EXTENSION: ${{ needs.load-dotenv.outputs.target-file-extension }}
- name: Output coverage file
id: output-coverage-file
run: |
echo "COVERAGE_FILE_CACHE_PATH=${{env.COVERAGE_FILE}}" >> $GITHUB_OUTPUT
echo "COVERAGE_FILE_CACHE_KEY=${{ runner.os }}-coverage-producer-${{ hashFiles(env.WATCH_FILE_PATTERNS) }}" >> $GITHUB_OUTPUT
test-consumer:
needs: load-dotenv
runs-on: ubuntu-latest
env:
WATCH_FILE_PATTERNS: |
consumer/**/*.py
consumer/requirements-dev.txt
COVERAGE_FILE: .coverage_consumer
WORKDIR: consumer
outputs:
coverage-file-cache-path: ${{ steps.output-coverage-file.outputs.COVERAGE_FILE_CACHE_PATH }}
coverage-file-cache-key: ${{ steps.output-coverage-file.outputs.COVERAGE_FILE_CACHE_KEY }}
services:
rabbitmq:
image: rabbitmq:${{ needs.load-dotenv.outputs.rabbitmq-version-tag }}
env:
RABBITMQ_DEFAULT_USER: ${{ needs.load-dotenv.outputs.rabbitmq-username }}
RABBITMQ_DEFAULT_PASS: ${{ needs.load-dotenv.outputs.rabbitmq-password }}
options: >-
--health-cmd "rabbitmq-diagnostics -q check_running"
--health-interval 5s
--health-timeout 30s
--health-retries 3
ports:
- ${{ needs.load-dotenv.outputs.rabbitmq-port }}:5672
postgres:
image: postgres:${{ needs.load-dotenv.outputs.postgres-version-tag }}
env:
POSTGRES_USER: ${{ needs.load-dotenv.outputs.postgres-username }}
POSTGRES_PASSWORD: ${{ needs.load-dotenv.outputs.postgres-password }}
POSTGRES_DB: ${{ needs.load-dotenv.outputs.postgres-database }}
options: >-
--health-cmd pg_isready
--health-interval 5s
--health-timeout 30s
--health-retries 3
ports:
- ${{ needs.load-dotenv.outputs.postgres-port }}:5432
steps:
- name: Checkout
uses: actions/checkout@v4
- uses: actions/cache@v2
id: cache
with:
path: ${{env.COVERAGE_FILE}}
key: ${{ runner.os }}-coverage-consumer-${{ hashFiles(env.WATCH_FILE_PATTERNS) }}
restore-keys: |
${{ runner.os }}-coverage-consumer-
- uses: actions/setup-python@v4
if: steps.cache.outputs.cache-hit != 'true'
with:
python-version: '3.11'
cache: 'pip'
cache-dependency-path: ${{env.WORKDIR}}/requirements-dev.txt
- name: Install dependencies
if: steps.cache.outputs.cache-hit != 'true'
working-directory: ${{env.WORKDIR}}
run: pip install -r requirements-dev.txt
- name: Run tests
if: steps.cache.outputs.cache-hit != 'true'
run: |
coverage run -m pytest -v consumer/tests
env:
POSTGRES_HOST: localhost
POSTGRES_PORT: ${{ needs.load-dotenv.outputs.postgres-port }}
POSTGRES_USERNAME: ${{ needs.load-dotenv.outputs.postgres-username }}
POSTGRES_PASSWORD: ${{ needs.load-dotenv.outputs.postgres-password }}
POSTGRES_DATABASE: ${{ needs.load-dotenv.outputs.postgres-database }}
RABBITMQ_HOST: localhost
RABBITMQ_PORT: ${{ needs.load-dotenv.outputs.rabbitmq-port }}
RABBITMQ_USERNAME: ${{ needs.load-dotenv.outputs.rabbitmq-username }}
RABBITMQ_PASSWORD: ${{ needs.load-dotenv.outputs.rabbitmq-password }}
RABBITMQ_QUEUE_NAME: ${{ needs.load-dotenv.outputs.rabbitmq-queue-name }}
RABBITMQ_SOCKET_TIMEOUT: ${{ needs.load-dotenv.outputs.rabbitmq-socket-timeout }}
CSV_PARSER_DELIMITER: ${{ needs.load-dotenv.outputs.csv-parser-delimiter }}
CSV_PARSER_FILE_EXTENSION: ${{ needs.load-dotenv.outputs.csv-parser-file-extension }}
- name: Output coverage file
id: output-coverage-file
run: |
echo "COVERAGE_FILE_CACHE_PATH=${{env.COVERAGE_FILE}}" >> $GITHUB_OUTPUT
echo "COVERAGE_FILE_CACHE_KEY=${{ runner.os }}-coverage-consumer-${{ hashFiles(env.WATCH_FILE_PATTERNS) }}" >> $GITHUB_OUTPUT
coverage:
needs: [test-producer, test-consumer]
runs-on: ubuntu-latest
permissions:
contents: write
id-token: write
pages: write
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Retrieve producer coverage file
uses: actions/cache@v2
id: producer-cache
with:
path: ${{ needs.test-producer.outputs.coverage-file-cache-path }}
key: ${{ needs.test-producer.outputs.coverage-file-cache-key }}
restore-keys: |
${{ runner.os }}-coverage-producer-
- name: Retrieve consumer coverage file
uses: actions/cache@v2
id: consumer-cache
with:
path: ${{ needs.test-consumer.outputs.coverage-file-cache-path }}
key: ${{ needs.test-consumer.outputs.coverage-file-cache-key }}
restore-keys: |
${{ runner.os }}-coverage-consumer-
- uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: Install dependencies
run: pip install coverage
- name: Combine coverage files
run: |
coverage combine ${{ needs.test-producer.outputs.coverage-file-cache-path }} ${{ needs.test-consumer.outputs.coverage-file-cache-path }}
- name: Generate coverage report
run: |
coverage report --omit="*/tests/*" -m
coverage html --omit="*/tests/*"
- name: upload artifact
uses: actions/upload-pages-artifact@v1
with:
path: ./htmlcov/
- name: deploy to Github Pages
uses: actions/deploy-pages@v2
id: deployment
- name: Coverage Badge
uses: tj-actions/coverage-badge-py@v2
- name: Verify Changed files
uses: tj-actions/verify-changed-files@v16
id: verify-changed-files
with:
files: coverage.svg
- name: Commit files
if: steps.verify-changed-files.outputs.files_changed == 'true'
run: |
git config --local user.email "github-actions[bot]@users.noreply.github.com"
git config --local user.name "github-actions[bot]"
git add coverage.svg
git commit -m "Updated coverage.svg"
- name: Push changes
if: steps.verify-changed-files.outputs.files_changed == 'true'
uses: ad-m/github-push-action@master
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
branch: ${{ github.ref }}