Skip to content

Commit

Permalink
Updated the CICD test pipeline
Browse files Browse the repository at this point in the history
  • Loading branch information
alex-au-922 committed Dec 2, 2023
1 parent 2ec6ac2 commit 0460810
Show file tree
Hide file tree
Showing 34 changed files with 220 additions and 99 deletions.
Binary file added .coverage
Binary file not shown.
4 changes: 2 additions & 2 deletions .env
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
POSTGRES_VERSION_TAG=15.3-alpine3.17
POSTGRES_PORT=5432
POSTGRES_USER=postgres
POSTGRES_USERNAME=postgres
POSTGRES_PASSWORD=postgres
POSTGRES_DATABASE=records
POSTGRES_BATCH_UPSERT_SIZE=1000

RABBITMQ_VERSION_TAG=3.12.10-management
RABBITMQ_USER=rabbitmq
RABBITMQ_USERNAME=rabbitmq
RABBITMQ_PASSWORD=rabbitmq
RABBITMQ_PORT=5672
RABBITMQ_WEBAPP_PORT=15672
Expand Down
148 changes: 135 additions & 13 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,13 +36,18 @@ jobs:
echo "RABBITMQ_USER=$RABBITMQ_USER" >> $GITHUB_OUTPUT
echo "RABBITMQ_PASSWORD=$RABBITMQ_PASSWORD" >> $GITHUB_OUTPUT
echo "QUEUE_NAME=$QUEUE_NAME" >> $GITHUB_OUTPUT
test:
test-producer:
needs: load-dotenv
runs-on: ubuntu-latest
permissions:
pages: write
contents: write
id-token: write
env:
WATCH_FILE_PATTERNS: |
producer/**/*.py
producer/requirements-dev.txt
COVERAGE_FILE: .coverage_producer
WORKDIR: producer
outputs:
coverage-file-cache-path: ${{ steps.output-coverage-file.outputs.COVERAGE_FILE_CACHE_PATH }}
coverage-file-cache-key: ${{ steps.output-coverage-file.outputs.COVERAGE_FILE_CACHE_KEY }}
services:
rabbitmq:
image: rabbitmq:${{ needs.load-dotenv.outputs.rabbitmq-version-tag }}
Expand All @@ -63,18 +68,96 @@ jobs:
with:
python-version: '3.11'
cache: 'pip'
cache-dependency-path: |
producer/requirements-dev.txt
consumer/requirements-dev.txt
cache-dependency-path: ${{env.WORKDIR}}/requirements-dev.txt
- uses: actions/cache@v2
id: cache
with:
path: ${{env.COVERAGE_FILE}}
key: ${{ runner.os }}-coverage-producer-${{ hashFiles(env.WATCH_FILE_PATTERNS) }}
restore-keys: |
${{ runner.os }}-coverage-producer-
- name: Install dependencies
if: steps.cache.outputs.cache-hit != 'true'
working-directory: ${{env.WORKDIR}}
run: pip install -r requirements-dev.txt
- name: Run tests
run: |
pip install -r producer/requirements-dev.txt
pip install -r consumer/requirements-dev.txt
coverage run -m pytest -v producer/tests
env:
POSTGRES_HOST: localhost
POSTGRES_PORT: ${{ needs.load-dotenv.outputs.postgres-port }}
POSTGRES_USER: ${{ needs.load-dotenv.outputs.postgres-user }}
POSTGRES_PASSWORD: ${{ needs.load-dotenv.outputs.postgres-password }}
POSTGRES_DATABASE: ${{ needs.load-dotenv.outputs.postgres-database }}
RABBITMQ_HOST: localhost
RABBITMQ_PORT: ${{ needs.load-dotenv.outputs.rabbitmq-port }}
RABBITMQ_USER: ${{ needs.load-dotenv.outputs.rabbitmq-user }}
RABBITMQ_PASSWORD: ${{ needs.load-dotenv.outputs.rabbitmq-password }}
QUEUE_NAME: ${{ needs.load-dotenv.outputs.queue-name }}
- name: Output coverage file
id: output-coverage-file
if: steps.cache.outputs.cache-hit != 'true'
run: |
echo "COVERAGE_FILE_CACHE_PATH=${{env.COVERAGE_FILE}}" >> $GITHUB_OUTPUT
echo "COVERAGE_FILE_CACHE_KEY=${{ runner.os }}-coverage-producer-${{ hashFiles(env.WATCH_FILE_PATTERNS) }}" >> $GITHUB_OUTPUT
test-consumer:
needs: load-dotenv
runs-on: ubuntu-latest
env:
WATCH_FILE_PATTERNS: |
consumer/**/*.py
consumer/requirements-dev.txt
COVERAGE_FILE: .coverage_consumer
WORKDIR: consumer
outputs:
coverage-file-cache-path: ${{ steps.output-coverage-file.outputs.COVERAGE_FILE_CACHE_PATH }}
coverage-file-cache-key: ${{ steps.output-coverage-file.outputs.COVERAGE_FILE_CACHE_KEY }}
services:
rabbitmq:
image: rabbitmq:${{ needs.load-dotenv.outputs.rabbitmq-version-tag }}
env:
RABBITMQ_DEFAULT_USER: ${{ needs.load-dotenv.outputs.rabbitmq-user }}
RABBITMQ_DEFAULT_PASS: ${{ needs.load-dotenv.outputs.rabbitmq-password }}
options: >-
--health-cmd "rabbitmq-diagnostics -q check_running"
--health-interval 5s
--health-timeout 30s
--health-retries 3
ports:
- ${{ needs.load-dotenv.outputs.rabbitmq-port }}:5672
postgres:
image: postgres:${{ needs.load-dotenv.outputs.postgres-version-tag }}
env:
POSTGRES_USER: ${{ needs.load-dotenv.outputs.postgres-user }}
POSTGRES_PASSWORD: ${{ needs.load-dotenv.outputs.postgres-password }}
POSTGRES_DB: ${{ needs.load-dotenv.outputs.postgres-database }}
options: >-
--health-cmd pg_isready
--health-interval 5s
--health-timeout 30s
--health-retries 3
steps:
- name: Checkout
uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: '3.11'
cache: 'pip'
cache-dependency-path: ${{env.WORKDIR}}/requirements-dev.txt
- uses: actions/cache@v2
id: cache
with:
path: ${{env.COVERAGE_FILE}}
key: ${{ runner.os }}-coverage-consumer-${{ hashFiles(env.WATCH_FILE_PATTERNS) }}
restore-keys: |
${{ runner.os }}-coverage-consumer-
- name: Install dependencies
if: steps.cache.outputs.cache-hit != 'true'
working-directory: ${{env.WORKDIR}}
run: pip install -r requirements-dev.txt
- name: Run tests
run: |
coverage run -m pytest -v producer/tests consumer/tests
coverage html
coverage report -m
coverage run -m pytest -v consumer/tests
env:
POSTGRES_HOST: localhost
POSTGRES_PORT: ${{ needs.load-dotenv.outputs.postgres-port }}
Expand All @@ -86,6 +169,45 @@ jobs:
RABBITMQ_USER: ${{ needs.load-dotenv.outputs.rabbitmq-user }}
RABBITMQ_PASSWORD: ${{ needs.load-dotenv.outputs.rabbitmq-password }}
QUEUE_NAME: ${{ needs.load-dotenv.outputs.queue-name }}
- name: Output coverage file
id: output-coverage-file
if: steps.cache.outputs.cache-hit != 'true'
run: |
echo "COVERAGE_FILE_CACHE_PATH=${{env.COVERAGE_FILE}}" >> $GITHUB_OUTPUT
echo "COVERAGE_FILE_CACHE_KEY=${{ runner.os }}-coverage-consumer-${{ hashFiles(env.WATCH_FILE_PATTERNS) }}" >> $GITHUB_OUTPUT
coverage:
needs: [test-producer, test-consumer]
runs-on: ubuntu-latest
permissions:
contents: write
id-token: write
pull-requests: write
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Retrieve producer coverage file
uses: actions/cache@v2
id: producer-cache
with:
path: ${{ needs.test-producer.outputs.coverage-file-cache-path }}
key: ${{ needs.test-producer.outputs.coverage-file-cache-key }}
restore-keys: |
${{ runner.os }}-coverage-producer-
- name: Retrieve consumer coverage file
uses: actions/cache@v2
id: consumer-cache
with:
path: ${{ needs.test-consumer.outputs.coverage-file-cache-path }}
key: ${{ needs.test-consumer.outputs.coverage-file-cache-key }}
restore-keys: |
${{ runner.os }}-coverage-consumer-
- name: Combine coverage files
run: |
coverage combine ${{ needs.test-producer.outputs.coverage-file-cache-path }} ${{ needs.test-consumer.outputs.coverage-file-cache-path }}
- name: Generate coverage report
run: |
coverage report -m
coverage html
- name: upload artifact
uses: actions/upload-pages-artifact@v1
with:
Expand Down
31 changes: 30 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
include .env

POSTGRES_HOST=localhost
RABBITMQ_HOST=localhost

build:
docker compose build
up:
Expand All @@ -13,5 +18,29 @@ export_requirements:
poetry export -f requirements.txt --output requirements.txt --without-hashes && \
cd ../consumer && \
poetry export -f requirements.txt --output requirements.txt --without-hashes
test_env:
setup_test_env:
docker compose -f docker-compose.test.yml up -d
test_producer:
export POSTGRES_HOST=localhost && \
export POSTGRES_PORT=$(POSTGRES_PORT) && \
export POSTGRES_USERNAME=$(POSTGRES_USERNAME) && \
export POSTGRES_PASSWORD=$(POSTGRES_PASSWORD) && \
export POSTGRES_DATABASE=$(POSTGRES_DB) && \
export RABBITMQ_HOST=localhost && \
export RABBITMQ_PORT=$(RABBITMQ_PORT) && \
export RABBITMQ_USERNAME=$(RABBITMQ_USERNAME) && \
export RABBITMQ_PASSWORD=$(RABBITMQ_PASSWORD) && \
export QUEUE_NAME=$(QUEUE_NAME) && \
COVERAGE_FILE=.coverage_producer coverage run -m pytest -vx producer/tests
test_consumer:
export POSTGRES_HOST=localhost && \
export POSTGRES_PORT=$(POSTGRES_PORT) && \
export POSTGRES_USERNAME=$(POSTGRES_USERNAME) && \
export POSTGRES_PASSWORD=$(POSTGRES_PASSWORD) && \
export POSTGRES_DATABASE=$(POSTGRES_DB) && \
export RABBITMQ_HOST=localhost && \
export RABBITMQ_PORT=$(RABBITMQ_PORT) && \
export RABBITMQ_USERNAME=$(RABBITMQ_USERNAME) && \
export RABBITMQ_PASSWORD=$(RABBITMQ_PASSWORD) && \
export QUEUE_NAME=$(QUEUE_NAME) && \
COVERAGE_FILE=.coverage_consumer coverage run -m pytest -vx consumer/tests
5 changes: 4 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1 +1,4 @@
# producer_consumer_csv
# producer_consumer_csv

![Build Status](https://github.com/github/docs/actions/workflows/test.yml/badge.svg)
![Code Coverage](./coverage.svg)
Empty file added __init__.py
Empty file.
2 changes: 0 additions & 2 deletions consumer/reuqirements-dev.txt

This file was deleted.

2 changes: 1 addition & 1 deletion consumer/src/adapters/fetch_filenames/rabbitmq.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from contextlib import contextmanager
from usecases import FetchFilenameClient
from ...usecases import FetchFilenameClient
import pika
from pika.adapters.blocking_connection import BlockingChannel
from pika.spec import Basic, BasicProperties
Expand Down
4 changes: 2 additions & 2 deletions consumer/src/adapters/file_parse_iot_records/csv.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
from decimal import Decimal
from typing import Iterator, Optional, overload, Sequence
from typing_extensions import override
from entities import IOTRecord
from usecases import FileParseIOTRecordsClient
from ...entities import IOTRecord
from ...usecases import FileParseIOTRecordsClient
import csv
import logging

Expand Down
4 changes: 2 additions & 2 deletions consumer/src/adapters/upsert_iot_records/postgres.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@
from typing_extensions import override
import psycopg2
from psycopg2.extensions import connection
from usecases import UpsertIOTRecordsClient
from entities import IOTRecord
from ...usecases import UpsertIOTRecordsClient
from ...entities import IOTRecord
from collections.abc import Callable

T = TypeVar("T")
Expand Down
10 changes: 5 additions & 5 deletions consumer/src/deployments/scripts/main.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
from adapters.fetch_filenames.rabbitmq import RabbitMQFetchFilenamesClient
from adapters.file_parse_iot_records.csv import CSVParseIOTRecordsClient
from adapters.upsert_iot_records.postgres import PostgresUpsertIOTRecordsClient
from config import RabbitMQConfig, PostgresConfig, CSVParserConfig
from ...adapters.fetch_filenames.rabbitmq import RabbitMQFetchFilenamesClient
from ...adapters.file_parse_iot_records.csv import CSVParseIOTRecordsClient
from ...adapters.upsert_iot_records.postgres import PostgresUpsertIOTRecordsClient
from .config import RabbitMQConfig, PostgresConfig, CSVParserConfig
from setup_logging import setup_logging
import logging
from entities import IOTRecord
from ...entities import IOTRecord

setup_logging()

Expand Down
2 changes: 1 addition & 1 deletion consumer/src/usecases/file_parse_iot_records.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from abc import ABC, abstractmethod
from typing import Iterator, overload, Sequence
from entities import IOTRecord
from ..entities import IOTRecord


class FileParseIOTRecordsClient(ABC):
Expand Down
2 changes: 1 addition & 1 deletion consumer/src/usecases/upsert_iot_records.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from abc import ABC, abstractmethod
from typing import overload, Sequence
from entities import IOTRecord
from ..entities import IOTRecord


class UpsertIOTRecordsClient(ABC):
Expand Down
Empty file.
Empty file.
Empty file.
Empty file.
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
def test_helloworld():
assert True
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
from src.adapters.upsert_iot_records.postgres import PostgresUpsertIOTRecordsClient
4 changes: 2 additions & 2 deletions docker-compose.test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ services:
POSTGRES_VERSION_TAG: ${POSTGRES_VERSION_TAG}
environment:
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_USER: ${POSTGRES_USERNAME}
POSTGRES_DB: ${POSTGRES_DATABASE}
ports:
- ${POSTGRES_PORT}:5432
Expand All @@ -19,7 +19,7 @@ services:
image: rabbitmq:${RABBITMQ_VERSION_TAG}
container_name: records_rabbitmq
environment:
RABBITMQ_DEFAULT_USER: ${RABBITMQ_USER}
RABBITMQ_DEFAULT_USER: ${RABBITMQ_USERNAME}
RABBITMQ_DEFAULT_PASS: ${RABBITMQ_PASSWORD}
ports:
- ${RABBITMQ_WEBAPP_PORT}:15672
Expand Down
4 changes: 2 additions & 2 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ services:
POSTGRES_VERSION_TAG: ${POSTGRES_VERSION_TAG}
environment:
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_USER: ${POSTGRES_USERNAME}
POSTGRES_DB: ${POSTGRES_DATABASE}
ports:
- ${POSTGRES_PORT}:5432
Expand All @@ -19,7 +19,7 @@ services:
image: rabbitmq:${RABBITMQ_VERSION_TAG}
container_name: records_rabbitmq
environment:
RABBITMQ_DEFAULT_USER: ${RABBITMQ_USER}
RABBITMQ_DEFAULT_USER: ${RABBITMQ_USERNAME}
RABBITMQ_DEFAULT_PASS: ${RABBITMQ_PASSWORD}
ports:
- ${RABBITMQ_WEBAPP_PORT}:15672
Expand Down
Original file line number Diff line number Diff line change
@@ -1,40 +1,32 @@
from src.adapters.publish_filenames.rabbitmq import RabbitMQPublishFilenamesClient
from src.deployments.script.config import RabbitMQConfig
import pika
import pytest
from pytest import MonkeyPatch


@pytest.fixture(scope="session")
def rabbitmq_config() -> dict:
return {
"host": "localhost",
"port": 5672,
"credentials_service": lambda: ("rabbitmq", "rabbitmq"),
"queue": "filenames",
}


@pytest.fixture(scope="function")
def rabbitmq_publish_filenames_client(
rabbitmq_config: dict,
) -> RabbitMQPublishFilenamesClient:
return RabbitMQPublishFilenamesClient(**rabbitmq_config)
def rabbitmq_publish_filenames_client() -> RabbitMQPublishFilenamesClient:
return RabbitMQPublishFilenamesClient(
host=RabbitMQConfig.HOST,
port=RabbitMQConfig.PORT,
credentials_service=lambda: (RabbitMQConfig.USERNAME, RabbitMQConfig.PASSWORD),
queue=RabbitMQConfig.QUEUE,
)


@pytest.fixture(scope="function")
def raw_rabbitmq_pika_conn_config(
rabbitmq_config: dict,
) -> tuple[pika.BaseConnection, str]:
def raw_rabbitmq_pika_conn_config() -> tuple[pika.BaseConnection, str]:
pika_conn = pika.BlockingConnection(
pika.ConnectionParameters(
host=rabbitmq_config["host"],
port=rabbitmq_config["port"],
host=RabbitMQConfig.HOST,
port=RabbitMQConfig.PORT,
credentials=pika.PlainCredentials(
*rabbitmq_config["credentials_service"]()
RabbitMQConfig.USERNAME, RabbitMQConfig.PASSWORD
),
)
)
return pika_conn, rabbitmq_config["queue"]
return pika_conn, RabbitMQConfig.QUEUE


@pytest.fixture(scope="function", autouse=True)
Expand Down
Loading

0 comments on commit 0460810

Please sign in to comment.