diff --git a/.coverage b/.coverage new file mode 100644 index 0000000..c091ac8 Binary files /dev/null and b/.coverage differ diff --git a/.env b/.env index a97e897..8255943 100644 --- a/.env +++ b/.env @@ -1,12 +1,12 @@ POSTGRES_VERSION_TAG=15.3-alpine3.17 POSTGRES_PORT=5432 -POSTGRES_USER=postgres +POSTGRES_USERNAME=postgres POSTGRES_PASSWORD=postgres POSTGRES_DATABASE=records POSTGRES_BATCH_UPSERT_SIZE=1000 RABBITMQ_VERSION_TAG=3.12.10-management -RABBITMQ_USER=rabbitmq +RABBITMQ_USERNAME=rabbitmq RABBITMQ_PASSWORD=rabbitmq RABBITMQ_PORT=5672 RABBITMQ_WEBAPP_PORT=15672 diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 9d0242e..a4997c9 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -36,13 +36,18 @@ jobs: echo "RABBITMQ_USER=$RABBITMQ_USER" >> $GITHUB_OUTPUT echo "RABBITMQ_PASSWORD=$RABBITMQ_PASSWORD" >> $GITHUB_OUTPUT echo "QUEUE_NAME=$QUEUE_NAME" >> $GITHUB_OUTPUT - test: + test-producer: needs: load-dotenv runs-on: ubuntu-latest - permissions: - pages: write - contents: write - id-token: write + env: + WATCH_FILE_PATTERNS: | + producer/**/*.py + producer/requirements-dev.txt + COVERAGE_FILE: .coverage_producer + WORKDIR: producer + outputs: + coverage-file-cache-path: ${{ steps.output-coverage-file.outputs.COVERAGE_FILE_CACHE_PATH }} + coverage-file-cache-key: ${{ steps.output-coverage-file.outputs.COVERAGE_FILE_CACHE_KEY }} services: rabbitmq: image: rabbitmq:${{ needs.load-dotenv.outputs.rabbitmq-version-tag }} @@ -63,18 +68,96 @@ jobs: with: python-version: '3.11' cache: 'pip' - cache-dependency-path: | - producer/requirements-dev.txt - consumer/requirements-dev.txt + cache-dependency-path: ${{env.WORKDIR}}/requirements-dev.txt + - uses: actions/cache@v2 + id: cache + with: + path: ${{env.COVERAGE_FILE}} + key: ${{ runner.os }}-coverage-producer-${{ hashFiles(env.WATCH_FILE_PATTERNS) }} + restore-keys: | + ${{ runner.os }}-coverage-producer- - name: Install dependencies + if: steps.cache.outputs.cache-hit != 'true' + working-directory: ${{env.WORKDIR}} + run: pip install -r requirements-dev.txt + - name: Run tests run: | - pip install -r producer/requirements-dev.txt - pip install -r consumer/requirements-dev.txt + coverage run -m pytest -v producer/tests + env: + POSTGRES_HOST: localhost + POSTGRES_PORT: ${{ needs.load-dotenv.outputs.postgres-port }} + POSTGRES_USER: ${{ needs.load-dotenv.outputs.postgres-user }} + POSTGRES_PASSWORD: ${{ needs.load-dotenv.outputs.postgres-password }} + POSTGRES_DATABASE: ${{ needs.load-dotenv.outputs.postgres-database }} + RABBITMQ_HOST: localhost + RABBITMQ_PORT: ${{ needs.load-dotenv.outputs.rabbitmq-port }} + RABBITMQ_USER: ${{ needs.load-dotenv.outputs.rabbitmq-user }} + RABBITMQ_PASSWORD: ${{ needs.load-dotenv.outputs.rabbitmq-password }} + QUEUE_NAME: ${{ needs.load-dotenv.outputs.queue-name }} + - name: Output coverage file + id: output-coverage-file + if: steps.cache.outputs.cache-hit != 'true' + run: | + echo "COVERAGE_FILE_CACHE_PATH=${{env.COVERAGE_FILE}}" >> $GITHUB_OUTPUT + echo "COVERAGE_FILE_CACHE_KEY=${{ runner.os }}-coverage-producer-${{ hashFiles(env.WATCH_FILE_PATTERNS) }}" >> $GITHUB_OUTPUT + test-consumer: + needs: load-dotenv + runs-on: ubuntu-latest + env: + WATCH_FILE_PATTERNS: | + consumer/**/*.py + consumer/requirements-dev.txt + COVERAGE_FILE: .coverage_consumer + WORKDIR: consumer + outputs: + coverage-file-cache-path: ${{ steps.output-coverage-file.outputs.COVERAGE_FILE_CACHE_PATH }} + coverage-file-cache-key: ${{ steps.output-coverage-file.outputs.COVERAGE_FILE_CACHE_KEY }} + services: + rabbitmq: + image: rabbitmq:${{ needs.load-dotenv.outputs.rabbitmq-version-tag }} + env: + RABBITMQ_DEFAULT_USER: ${{ needs.load-dotenv.outputs.rabbitmq-user }} + RABBITMQ_DEFAULT_PASS: ${{ needs.load-dotenv.outputs.rabbitmq-password }} + options: >- + --health-cmd "rabbitmq-diagnostics -q check_running" + --health-interval 5s + --health-timeout 30s + --health-retries 3 + ports: + - ${{ needs.load-dotenv.outputs.rabbitmq-port }}:5672 + postgres: + image: postgres:${{ needs.load-dotenv.outputs.postgres-version-tag }} + env: + POSTGRES_USER: ${{ needs.load-dotenv.outputs.postgres-user }} + POSTGRES_PASSWORD: ${{ needs.load-dotenv.outputs.postgres-password }} + POSTGRES_DB: ${{ needs.load-dotenv.outputs.postgres-database }} + options: >- + --health-cmd pg_isready + --health-interval 5s + --health-timeout 30s + --health-retries 3 + steps: + - name: Checkout + uses: actions/checkout@v4 + - uses: actions/setup-python@v4 + with: + python-version: '3.11' + cache: 'pip' + cache-dependency-path: ${{env.WORKDIR}}/requirements-dev.txt + - uses: actions/cache@v2 + id: cache + with: + path: ${{env.COVERAGE_FILE}} + key: ${{ runner.os }}-coverage-consumer-${{ hashFiles(env.WATCH_FILE_PATTERNS) }} + restore-keys: | + ${{ runner.os }}-coverage-consumer- + - name: Install dependencies + if: steps.cache.outputs.cache-hit != 'true' + working-directory: ${{env.WORKDIR}} + run: pip install -r requirements-dev.txt - name: Run tests run: | - coverage run -m pytest -v producer/tests consumer/tests - coverage html - coverage report -m + coverage run -m pytest -v consumer/tests env: POSTGRES_HOST: localhost POSTGRES_PORT: ${{ needs.load-dotenv.outputs.postgres-port }} @@ -86,6 +169,45 @@ jobs: RABBITMQ_USER: ${{ needs.load-dotenv.outputs.rabbitmq-user }} RABBITMQ_PASSWORD: ${{ needs.load-dotenv.outputs.rabbitmq-password }} QUEUE_NAME: ${{ needs.load-dotenv.outputs.queue-name }} + - name: Output coverage file + id: output-coverage-file + if: steps.cache.outputs.cache-hit != 'true' + run: | + echo "COVERAGE_FILE_CACHE_PATH=${{env.COVERAGE_FILE}}" >> $GITHUB_OUTPUT + echo "COVERAGE_FILE_CACHE_KEY=${{ runner.os }}-coverage-consumer-${{ hashFiles(env.WATCH_FILE_PATTERNS) }}" >> $GITHUB_OUTPUT + coverage: + needs: [test-producer, test-consumer] + runs-on: ubuntu-latest + permissions: + contents: write + id-token: write + pull-requests: write + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Retrieve producer coverage file + uses: actions/cache@v2 + id: producer-cache + with: + path: ${{ needs.test-producer.outputs.coverage-file-cache-path }} + key: ${{ needs.test-producer.outputs.coverage-file-cache-key }} + restore-keys: | + ${{ runner.os }}-coverage-producer- + - name: Retrieve consumer coverage file + uses: actions/cache@v2 + id: consumer-cache + with: + path: ${{ needs.test-consumer.outputs.coverage-file-cache-path }} + key: ${{ needs.test-consumer.outputs.coverage-file-cache-key }} + restore-keys: | + ${{ runner.os }}-coverage-consumer- + - name: Combine coverage files + run: | + coverage combine ${{ needs.test-producer.outputs.coverage-file-cache-path }} ${{ needs.test-consumer.outputs.coverage-file-cache-path }} + - name: Generate coverage report + run: | + coverage report -m + coverage html - name: upload artifact uses: actions/upload-pages-artifact@v1 with: diff --git a/Makefile b/Makefile index 8393a90..f00e9cb 100644 --- a/Makefile +++ b/Makefile @@ -1,3 +1,8 @@ +include .env + +POSTGRES_HOST=localhost +RABBITMQ_HOST=localhost + build: docker compose build up: @@ -13,5 +18,29 @@ export_requirements: poetry export -f requirements.txt --output requirements.txt --without-hashes && \ cd ../consumer && \ poetry export -f requirements.txt --output requirements.txt --without-hashes -test_env: +setup_test_env: docker compose -f docker-compose.test.yml up -d +test_producer: + export POSTGRES_HOST=localhost && \ + export POSTGRES_PORT=$(POSTGRES_PORT) && \ + export POSTGRES_USERNAME=$(POSTGRES_USERNAME) && \ + export POSTGRES_PASSWORD=$(POSTGRES_PASSWORD) && \ + export POSTGRES_DATABASE=$(POSTGRES_DB) && \ + export RABBITMQ_HOST=localhost && \ + export RABBITMQ_PORT=$(RABBITMQ_PORT) && \ + export RABBITMQ_USERNAME=$(RABBITMQ_USERNAME) && \ + export RABBITMQ_PASSWORD=$(RABBITMQ_PASSWORD) && \ + export QUEUE_NAME=$(QUEUE_NAME) && \ + COVERAGE_FILE=.coverage_producer coverage run -m pytest -vx producer/tests +test_consumer: + export POSTGRES_HOST=localhost && \ + export POSTGRES_PORT=$(POSTGRES_PORT) && \ + export POSTGRES_USERNAME=$(POSTGRES_USERNAME) && \ + export POSTGRES_PASSWORD=$(POSTGRES_PASSWORD) && \ + export POSTGRES_DATABASE=$(POSTGRES_DB) && \ + export RABBITMQ_HOST=localhost && \ + export RABBITMQ_PORT=$(RABBITMQ_PORT) && \ + export RABBITMQ_USERNAME=$(RABBITMQ_USERNAME) && \ + export RABBITMQ_PASSWORD=$(RABBITMQ_PASSWORD) && \ + export QUEUE_NAME=$(QUEUE_NAME) && \ + COVERAGE_FILE=.coverage_consumer coverage run -m pytest -vx consumer/tests diff --git a/README.md b/README.md index fc212c1..dc121e5 100644 --- a/README.md +++ b/README.md @@ -1 +1,4 @@ -# producer_consumer_csv \ No newline at end of file +# producer_consumer_csv + +![Build Status](https://github.com/github/docs/actions/workflows/test.yml/badge.svg) +![Code Coverage](./coverage.svg) diff --git a/__init__.py b/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/consumer/reuqirements-dev.txt b/consumer/reuqirements-dev.txt deleted file mode 100644 index 6ed92fd..0000000 --- a/consumer/reuqirements-dev.txt +++ /dev/null @@ -1,2 +0,0 @@ -pika==1.3.2 ; python_version >= "3.11" and python_version < "4.0" -psycopg2-binary==2.9.9 ; python_version >= "3.11" and python_version < "4.0" diff --git a/consumer/src/adapters/fetch_filenames/rabbitmq.py b/consumer/src/adapters/fetch_filenames/rabbitmq.py index 1b2b89e..f712f21 100644 --- a/consumer/src/adapters/fetch_filenames/rabbitmq.py +++ b/consumer/src/adapters/fetch_filenames/rabbitmq.py @@ -1,5 +1,5 @@ from contextlib import contextmanager -from usecases import FetchFilenameClient +from ...usecases import FetchFilenameClient import pika from pika.adapters.blocking_connection import BlockingChannel from pika.spec import Basic, BasicProperties diff --git a/consumer/src/adapters/file_parse_iot_records/csv.py b/consumer/src/adapters/file_parse_iot_records/csv.py index cc562ec..072fb20 100644 --- a/consumer/src/adapters/file_parse_iot_records/csv.py +++ b/consumer/src/adapters/file_parse_iot_records/csv.py @@ -3,8 +3,8 @@ from decimal import Decimal from typing import Iterator, Optional, overload, Sequence from typing_extensions import override -from entities import IOTRecord -from usecases import FileParseIOTRecordsClient +from ...entities import IOTRecord +from ...usecases import FileParseIOTRecordsClient import csv import logging diff --git a/consumer/src/adapters/upsert_iot_records/postgres.py b/consumer/src/adapters/upsert_iot_records/postgres.py index 1e19758..266b5df 100644 --- a/consumer/src/adapters/upsert_iot_records/postgres.py +++ b/consumer/src/adapters/upsert_iot_records/postgres.py @@ -4,8 +4,8 @@ from typing_extensions import override import psycopg2 from psycopg2.extensions import connection -from usecases import UpsertIOTRecordsClient -from entities import IOTRecord +from ...usecases import UpsertIOTRecordsClient +from ...entities import IOTRecord from collections.abc import Callable T = TypeVar("T") diff --git a/consumer/src/deployments/scripts/main.py b/consumer/src/deployments/scripts/main.py index 21c76ee..ab51d84 100644 --- a/consumer/src/deployments/scripts/main.py +++ b/consumer/src/deployments/scripts/main.py @@ -1,10 +1,10 @@ -from adapters.fetch_filenames.rabbitmq import RabbitMQFetchFilenamesClient -from adapters.file_parse_iot_records.csv import CSVParseIOTRecordsClient -from adapters.upsert_iot_records.postgres import PostgresUpsertIOTRecordsClient -from config import RabbitMQConfig, PostgresConfig, CSVParserConfig +from ...adapters.fetch_filenames.rabbitmq import RabbitMQFetchFilenamesClient +from ...adapters.file_parse_iot_records.csv import CSVParseIOTRecordsClient +from ...adapters.upsert_iot_records.postgres import PostgresUpsertIOTRecordsClient +from .config import RabbitMQConfig, PostgresConfig, CSVParserConfig from setup_logging import setup_logging import logging -from entities import IOTRecord +from ...entities import IOTRecord setup_logging() diff --git a/consumer/src/usecases/file_parse_iot_records.py b/consumer/src/usecases/file_parse_iot_records.py index 5005f4b..ca2276c 100644 --- a/consumer/src/usecases/file_parse_iot_records.py +++ b/consumer/src/usecases/file_parse_iot_records.py @@ -1,6 +1,6 @@ from abc import ABC, abstractmethod from typing import Iterator, overload, Sequence -from entities import IOTRecord +from ..entities import IOTRecord class FileParseIOTRecordsClient(ABC): diff --git a/consumer/src/usecases/upsert_iot_records.py b/consumer/src/usecases/upsert_iot_records.py index 7c6b5f7..90a326c 100644 --- a/consumer/src/usecases/upsert_iot_records.py +++ b/consumer/src/usecases/upsert_iot_records.py @@ -1,6 +1,6 @@ from abc import ABC, abstractmethod from typing import overload, Sequence -from entities import IOTRecord +from ..entities import IOTRecord class UpsertIOTRecordsClient(ABC): diff --git a/consumer/tests/test_adapters/__init__.py b/consumer/tests/test_adapters/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/consumer/tests/test_adapters/test_fetch_filenames/__init__.py b/consumer/tests/test_adapters/test_fetch_filenames/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/consumer/tests/test_adapters/test_fetch_filenames/test_rabbitmq/__init__.py b/consumer/tests/test_adapters/test_fetch_filenames/test_rabbitmq/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/consumer/tests/test_adapters/test_fetch_filenames/test_rabbitmq/conftest.py b/consumer/tests/test_adapters/test_fetch_filenames/test_rabbitmq/conftest.py new file mode 100644 index 0000000..e69de29 diff --git a/consumer/tests/test_adapters/test_fetch_filenames/test_rabbitmq/test_helloworld.py b/consumer/tests/test_adapters/test_fetch_filenames/test_rabbitmq/test_helloworld.py new file mode 100644 index 0000000..68c495a --- /dev/null +++ b/consumer/tests/test_adapters/test_fetch_filenames/test_rabbitmq/test_helloworld.py @@ -0,0 +1,2 @@ +def test_helloworld(): + assert True diff --git a/consumer/tests/test_adapters/test_file_parse_iot_records/__init__.py b/consumer/tests/test_adapters/test_file_parse_iot_records/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/consumer/tests/test_adapters/test_file_parse_iot_records/test_csv/__init__.py b/consumer/tests/test_adapters/test_file_parse_iot_records/test_csv/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/consumer/tests/test_adapters/test_file_parse_iot_records/test_csv/conftest.py b/consumer/tests/test_adapters/test_file_parse_iot_records/test_csv/conftest.py new file mode 100644 index 0000000..e69de29 diff --git a/consumer/tests/test_adapters/test_upsert_iot_records/__init__.py b/consumer/tests/test_adapters/test_upsert_iot_records/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/consumer/tests/test_adapters/test_upsert_iot_records/test_postgres/__init__.py b/consumer/tests/test_adapters/test_upsert_iot_records/test_postgres/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/consumer/tests/test_adapters/test_upsert_iot_records/test_postgres/conftest.py b/consumer/tests/test_adapters/test_upsert_iot_records/test_postgres/conftest.py new file mode 100644 index 0000000..e69de29 diff --git a/consumer/tests/test_adapters/test_upsert_iot_records/test_postgres/test_failed_conn.py b/consumer/tests/test_adapters/test_upsert_iot_records/test_postgres/test_failed_conn.py new file mode 100644 index 0000000..e69de29 diff --git a/consumer/tests/test_adapters/test_upsert_iot_records/test_postgres/test_failed_upsert.py b/consumer/tests/test_adapters/test_upsert_iot_records/test_postgres/test_failed_upsert.py new file mode 100644 index 0000000..e69de29 diff --git a/consumer/tests/test_adapters/test_upsert_iot_records/test_postgres/test_successful_upsert.py b/consumer/tests/test_adapters/test_upsert_iot_records/test_postgres/test_successful_upsert.py new file mode 100644 index 0000000..1d443f1 --- /dev/null +++ b/consumer/tests/test_adapters/test_upsert_iot_records/test_postgres/test_successful_upsert.py @@ -0,0 +1 @@ +from src.adapters.upsert_iot_records.postgres import PostgresUpsertIOTRecordsClient diff --git a/docker-compose.test.yml b/docker-compose.test.yml index 53a4dbc..46275ec 100644 --- a/docker-compose.test.yml +++ b/docker-compose.test.yml @@ -10,7 +10,7 @@ services: POSTGRES_VERSION_TAG: ${POSTGRES_VERSION_TAG} environment: POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} - POSTGRES_USER: ${POSTGRES_USER} + POSTGRES_USER: ${POSTGRES_USERNAME} POSTGRES_DB: ${POSTGRES_DATABASE} ports: - ${POSTGRES_PORT}:5432 @@ -19,7 +19,7 @@ services: image: rabbitmq:${RABBITMQ_VERSION_TAG} container_name: records_rabbitmq environment: - RABBITMQ_DEFAULT_USER: ${RABBITMQ_USER} + RABBITMQ_DEFAULT_USER: ${RABBITMQ_USERNAME} RABBITMQ_DEFAULT_PASS: ${RABBITMQ_PASSWORD} ports: - ${RABBITMQ_WEBAPP_PORT}:15672 diff --git a/docker-compose.yml b/docker-compose.yml index 6d94f27..a91bc37 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -10,7 +10,7 @@ services: POSTGRES_VERSION_TAG: ${POSTGRES_VERSION_TAG} environment: POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} - POSTGRES_USER: ${POSTGRES_USER} + POSTGRES_USER: ${POSTGRES_USERNAME} POSTGRES_DB: ${POSTGRES_DATABASE} ports: - ${POSTGRES_PORT}:5432 @@ -19,7 +19,7 @@ services: image: rabbitmq:${RABBITMQ_VERSION_TAG} container_name: records_rabbitmq environment: - RABBITMQ_DEFAULT_USER: ${RABBITMQ_USER} + RABBITMQ_DEFAULT_USER: ${RABBITMQ_USERNAME} RABBITMQ_DEFAULT_PASS: ${RABBITMQ_PASSWORD} ports: - ${RABBITMQ_WEBAPP_PORT}:15672 diff --git a/producer/tests/test_adapters/test_publish_filenames/test_rabbitmq/conftest.py b/producer/tests/test_adapters/test_publish_filenames/test_rabbitmq/conftest.py index 165a300..80b13d9 100644 --- a/producer/tests/test_adapters/test_publish_filenames/test_rabbitmq/conftest.py +++ b/producer/tests/test_adapters/test_publish_filenames/test_rabbitmq/conftest.py @@ -1,40 +1,32 @@ from src.adapters.publish_filenames.rabbitmq import RabbitMQPublishFilenamesClient +from src.deployments.script.config import RabbitMQConfig import pika import pytest from pytest import MonkeyPatch -@pytest.fixture(scope="session") -def rabbitmq_config() -> dict: - return { - "host": "localhost", - "port": 5672, - "credentials_service": lambda: ("rabbitmq", "rabbitmq"), - "queue": "filenames", - } - - @pytest.fixture(scope="function") -def rabbitmq_publish_filenames_client( - rabbitmq_config: dict, -) -> RabbitMQPublishFilenamesClient: - return RabbitMQPublishFilenamesClient(**rabbitmq_config) +def rabbitmq_publish_filenames_client() -> RabbitMQPublishFilenamesClient: + return RabbitMQPublishFilenamesClient( + host=RabbitMQConfig.HOST, + port=RabbitMQConfig.PORT, + credentials_service=lambda: (RabbitMQConfig.USERNAME, RabbitMQConfig.PASSWORD), + queue=RabbitMQConfig.QUEUE, + ) @pytest.fixture(scope="function") -def raw_rabbitmq_pika_conn_config( - rabbitmq_config: dict, -) -> tuple[pika.BaseConnection, str]: +def raw_rabbitmq_pika_conn_config() -> tuple[pika.BaseConnection, str]: pika_conn = pika.BlockingConnection( pika.ConnectionParameters( - host=rabbitmq_config["host"], - port=rabbitmq_config["port"], + host=RabbitMQConfig.HOST, + port=RabbitMQConfig.PORT, credentials=pika.PlainCredentials( - *rabbitmq_config["credentials_service"]() + RabbitMQConfig.USERNAME, RabbitMQConfig.PASSWORD ), ) ) - return pika_conn, rabbitmq_config["queue"] + return pika_conn, RabbitMQConfig.QUEUE @pytest.fixture(scope="function", autouse=True) diff --git a/producer/tests/test_adapters/test_publish_filenames/test_rabbitmq/test_failed_conn.py b/producer/tests/test_adapters/test_publish_filenames/test_rabbitmq/test_failed_conn.py index 43cecaf..7f161a4 100644 --- a/producer/tests/test_adapters/test_publish_filenames/test_rabbitmq/test_failed_conn.py +++ b/producer/tests/test_adapters/test_publish_filenames/test_rabbitmq/test_failed_conn.py @@ -1,6 +1,7 @@ import pytest from .utils import random_filenames from src.adapters.publish_filenames.rabbitmq import RabbitMQPublishFilenamesClient +from src.deployments.script.config import RabbitMQConfig import pika from pytest import MonkeyPatch @@ -70,14 +71,14 @@ def mocked_failed_conn( @pytest.mark.smoke @pytest.mark.parametrize("filename", random_filenames()) def test_publish_single_wrong_credentials( - rabbitmq_config: dict, raw_rabbitmq_pika_conn_config: tuple[pika.BaseConnection, str], filename: str, ): - copied_rabbitmq_config = rabbitmq_config.copy() - copied_rabbitmq_config["credentials_service"] = lambda: ("wrong", "wrong") rabbitmq_publish_filenames_client = RabbitMQPublishFilenamesClient( - **copied_rabbitmq_config + host=RabbitMQConfig.HOST, + port=RabbitMQConfig.PORT, + credentials_service=lambda: ("wrong", "wrong"), + queue=RabbitMQConfig.QUEUE, ) with pytest.raises(Exception) as e: @@ -95,14 +96,14 @@ def test_publish_single_wrong_credentials( @pytest.mark.smoke @pytest.mark.parametrize("filename", random_filenames()) def test_publish_single_wrong_host( - rabbitmq_config: dict, raw_rabbitmq_pika_conn_config: tuple[pika.BaseConnection, str], filename: str, ): - copied_rabbitmq_config = rabbitmq_config.copy() - copied_rabbitmq_config["host"] = "wrong" rabbitmq_publish_filenames_client = RabbitMQPublishFilenamesClient( - **copied_rabbitmq_config + host="wrong", + port=RabbitMQConfig.PORT, + credentials_service=lambda: (RabbitMQConfig.USERNAME, RabbitMQConfig.PASSWORD), + queue=RabbitMQConfig.QUEUE, ) with pytest.raises(Exception) as e: diff --git a/producer/tests/test_deployments/test_main/conftest.py b/producer/tests/test_deployments/test_main/conftest.py index 280f24c..06c3090 100644 --- a/producer/tests/test_deployments/test_main/conftest.py +++ b/producer/tests/test_deployments/test_main/conftest.py @@ -4,44 +4,29 @@ import pytest from pytest import TempdirFactory import pathlib -import os - - -@pytest.fixture(scope="session") -def mock_rabbitmq_config() -> Type[RabbitMQConfig]: - class MockedRabbitMQConfig(RabbitMQConfig): - HOST = "localhost" - PORT = 5672 - USERNAME = "rabbitmq" - PASSWORD = "rabbitmq" - QUEUE = "filenames" - - return MockedRabbitMQConfig @pytest.fixture(scope="session") def mock_project_config(tmpdir_factory: TempdirFactory) -> None: class MockedProjectConfig(ProjectConfig): TARGET_FILE_DIR = str(tmpdir_factory.mktemp("artifact")) - TARGET_FILE_EXTENSION = ".csv" + TARGET_FILE_EXTENSION = ProjectConfig.TARGET_FILE_EXTENSION return MockedProjectConfig @pytest.fixture(scope="function") -def raw_rabbitmq_pika_conn_config( - mock_rabbitmq_config: Type[RabbitMQConfig], -) -> tuple[pika.BaseConnection, str]: +def raw_rabbitmq_pika_conn_config() -> tuple[pika.BaseConnection, str]: pika_conn = pika.BlockingConnection( pika.ConnectionParameters( - host=mock_rabbitmq_config.HOST, - port=mock_rabbitmq_config.PORT, + host=RabbitMQConfig.HOST, + port=RabbitMQConfig.PORT, credentials=pika.PlainCredentials( - mock_rabbitmq_config.USERNAME, mock_rabbitmq_config.PASSWORD + RabbitMQConfig.USERNAME, RabbitMQConfig.PASSWORD ), ) ) - return pika_conn, mock_rabbitmq_config.QUEUE + return pika_conn, RabbitMQConfig.QUEUE @pytest.fixture(scope="function", autouse=True) diff --git a/producer/tests/test_deployments/test_main/test_main_function_failed.py b/producer/tests/test_deployments/test_main/test_main_function_failed.py index 573d1ef..27a18e9 100644 --- a/producer/tests/test_deployments/test_main/test_main_function_failed.py +++ b/producer/tests/test_deployments/test_main/test_main_function_failed.py @@ -1,5 +1,5 @@ from src.deployments.script.main import main -from src.deployments.script.config import ProjectConfig, RabbitMQConfig +from src.deployments.script.config import ProjectConfig from typing import Type import pytest from .utils import random_csv_filenames @@ -12,7 +12,6 @@ [random_csv_filenames() for _ in range(5)], ) def test_main_flow_has_failed_files( - mock_rabbitmq_config: Type[RabbitMQConfig], mock_project_config: Type[ProjectConfig], random_csv_filenames: list[str], monkeypatch: MonkeyPatch, @@ -32,11 +31,6 @@ def test_main_flow_has_failed_files( "TARGET_FILE_EXTENSION", mock_project_config.TARGET_FILE_EXTENSION, ) - monkeypatch.setattr(RabbitMQConfig, "HOST", mock_rabbitmq_config.HOST) - monkeypatch.setattr(RabbitMQConfig, "PORT", mock_rabbitmq_config.PORT) - monkeypatch.setattr(RabbitMQConfig, "USERNAME", mock_rabbitmq_config.USERNAME) - monkeypatch.setattr(RabbitMQConfig, "PASSWORD", mock_rabbitmq_config.PASSWORD) - monkeypatch.setattr(RabbitMQConfig, "QUEUE", mock_rabbitmq_config.QUEUE) monkeypatch.setattr( "src.adapters.publish_filenames.rabbitmq.RabbitMQPublishFilenamesClient.publish", diff --git a/producer/tests/test_deployments/test_main/test_main_function_successful.py b/producer/tests/test_deployments/test_main/test_main_function_successful.py index fe8f72a..2e5ba8a 100644 --- a/producer/tests/test_deployments/test_main/test_main_function_successful.py +++ b/producer/tests/test_deployments/test_main/test_main_function_successful.py @@ -1,5 +1,5 @@ from src.deployments.script.main import main -from src.deployments.script.config import ProjectConfig, RabbitMQConfig +from src.deployments.script.config import ProjectConfig from typing import Type import pytest from .utils import random_csv_filenames @@ -12,7 +12,6 @@ [random_csv_filenames() for _ in range(5)], ) def test_main_flow_no_failed_files( - mock_rabbitmq_config: Type[RabbitMQConfig], mock_project_config: Type[ProjectConfig], random_csv_filenames: list[str], monkeypatch: MonkeyPatch, @@ -32,11 +31,6 @@ def test_main_flow_no_failed_files( "TARGET_FILE_EXTENSION", mock_project_config.TARGET_FILE_EXTENSION, ) - monkeypatch.setattr(RabbitMQConfig, "HOST", mock_rabbitmq_config.HOST) - monkeypatch.setattr(RabbitMQConfig, "PORT", mock_rabbitmq_config.PORT) - monkeypatch.setattr(RabbitMQConfig, "USERNAME", mock_rabbitmq_config.USERNAME) - monkeypatch.setattr(RabbitMQConfig, "PASSWORD", mock_rabbitmq_config.PASSWORD) - monkeypatch.setattr(RabbitMQConfig, "QUEUE", mock_rabbitmq_config.QUEUE) with caplog.at_level("INFO"): assert main() is None