diff --git a/.github/workflows/python_build.yml b/.github/workflows/python_build.yml index 3430770bd6..574c3f9bc2 100644 --- a/.github/workflows/python_build.yml +++ b/.github/workflows/python_build.yml @@ -77,12 +77,12 @@ jobs: run: make setup-dat - name: Run tests - run: uv run pytest -m '((s3 or azure) and integration) or not integration and not benchmark' --doctest-modules + run: uv run --no-sync pytest -m '((s3 or azure) and integration) or not integration and not benchmark' --doctest-modules - name: Test without pandas run: | uv pip uninstall pandas - uv run pytest -m "not pandas and not integration and not benchmark" + uv run --no-sync pytest -m "not pandas and not integration and not benchmark" uv pip install pandas test-lakefs: @@ -99,7 +99,7 @@ jobs: uses: ./.github/actions/setup-env - name: Start emulated services - run: docker compose up -d lakefs + run: docker compose -f docker-compose-lakefs.yml up -d - name: Build and install deltalake run: make develop @@ -108,7 +108,7 @@ jobs: run: make setup-dat - name: Run tests - run: uv run pytest -m '(lakefs and integration)' --doctest-modules + run: uv run --no-sync pytest -m '(lakefs and integration)' --doctest-modules test-pyspark: name: PySpark Integration Tests diff --git a/crates/aws/tests/repair_s3_rename_test.rs b/crates/aws/tests/repair_s3_rename_test.rs index d9e19de7b7..fa8dceb2f5 100644 --- a/crates/aws/tests/repair_s3_rename_test.rs +++ b/crates/aws/tests/repair_s3_rename_test.rs @@ -120,7 +120,7 @@ fn create_s3_backend( .with_allow_http(true) .build_storage() .unwrap() - .object_store(); + .object_store(None); let delayed_store = DelayedObjectStore { inner: store, diff --git a/docker-compose-lakefs.yml b/docker-compose-lakefs.yml new file mode 100644 index 0000000000..bbaa38005b --- /dev/null +++ b/docker-compose-lakefs.yml @@ -0,0 +1,33 @@ +services: + lakefs: + image: docker.io/treeverse/lakefs:1.47 + ports: + - "8000:8000" + environment: + - LAKEFS_DATABASE_TYPE=local + - LAKEFS_BLOCKSTORE_TYPE=local + - LAKEFS_AUTH_ENCRYPT_SECRET_KEY=some random secret string + - LAKEFS_LOGGING_LEVEL=INFO + - LAKEFS_STATS_ENABLED=${LAKEFS_STATS_ENABLED:-1} + - LAKEFS_INSTALLATION_USER_NAME=delta + - LAKEFS_INSTALLATION_ACCESS_KEY_ID=LAKEFSID + - LAKECTL_CREDENTIALS_ACCESS_KEY_ID=LAKEFSID + - LAKEFS_INSTALLATION_SECRET_ACCESS_KEY=LAKEFSKEY + - LAKECTL_CREDENTIALS_SECRET_ACCESS_KEY=LAKEFSKEY + - LAKECTL_SERVER_ENDPOINT_URL=http://localhost:8000 + entrypoint: ["/bin/sh", "-c"] + command: + - | + lakefs run --local-settings & + echo "---- Creating repository ----" + wait-for -t 60 lakefs:8000 -- lakectl repo create lakefs://bronze local://bronze || true + echo "" + echo "lakeFS Web UI: http://127.0.0.1:8000/ >(._.)<" + echo " ( )_ " + echo "" + echo " Access Key ID : $$LAKEFS_INSTALLATION_ACCESS_KEY_ID" + echo " Secret Access Key: $$LAKEFS_INSTALLATION_SECRET_ACCESS_KEY" + echo "" + echo "-------- Let's go and have axolotl fun! --------" + echo "" + wait \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index bad16e6729..86406b158e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,37 +1,4 @@ services: - lakefs: - profiles: ["lakefs"] - image: treeverse/lakefs:1.47 - ports: - - "8000:8000" - environment: - - LAKEFS_DATABASE_TYPE=local - - LAKEFS_BLOCKSTORE_TYPE=local - - LAKEFS_AUTH_ENCRYPT_SECRET_KEY=some random secret string - - LAKEFS_LOGGING_LEVEL=INFO - - LAKEFS_STATS_ENABLED=${LAKEFS_STATS_ENABLED:-1} - - LAKEFS_INSTALLATION_USER_NAME=delta - - LAKEFS_INSTALLATION_ACCESS_KEY_ID=LAKEFSID - - LAKECTL_CREDENTIALS_ACCESS_KEY_ID=LAKEFSID - - LAKEFS_INSTALLATION_SECRET_ACCESS_KEY=LAKEFSKEY - - LAKECTL_CREDENTIALS_SECRET_ACCESS_KEY=LAKEFSKEY - - LAKECTL_SERVER_ENDPOINT_URL=http://localhost:8000 - entrypoint: ["/bin/sh", "-c"] - command: - - | - lakefs run --local-settings & - echo "---- Creating repository ----" - wait-for -t 60 lakefs:8000 -- lakectl repo create lakefs://bronze local://bronze || true - echo "" - echo "lakeFS Web UI: http://127.0.0.1:8000/ >(._.)<" - echo " ( )_ " - echo "" - echo " Access Key ID : $$LAKEFS_INSTALLATION_ACCESS_KEY_ID" - echo " Secret Access Key: $$LAKEFS_INSTALLATION_SECRET_ACCESS_KEY" - echo "" - echo "-------- Let's go and have axolotl fun! --------" - echo "" - wait localstack: image: localstack/localstack:0.14 ports: diff --git a/python/Makefile b/python/Makefile index fa377965b1..5588bfdba4 100644 --- a/python/Makefile +++ b/python/Makefile @@ -49,8 +49,8 @@ format: ## Format the code $(info --- Rust format ---) cargo fmt $(info --- Python format ---) - uv run ruff check . --fix - uv run ruff format . + uv run --no-sync ruff check . --fix + uv run --no-sync ruff format . .PHONY: check-rust check-rust: ## Run check on Rust @@ -62,30 +62,30 @@ check-rust: ## Run check on Rust .PHONY: check-python check-python: ## Run check on Python $(info Check Python format) - uv run ruff format --check --diff . + uv run --no-sync ruff format --check --diff . $(info Check Python linting) - uv run ruff check . + uv run --no-sync ruff check . $(info Check Python mypy) - uv run mypy + uv run --no-sync mypy .PHONY: unit-test unit-test: ## Run unit test $(info --- Run Python unit-test ---) - uv run pytest --doctest-modules + uv run --no-sync pytest --doctest-modules .PHONY: test-cov test-cov: ## Create coverage report $(info --- Run Python unit-test ---) - uv run pytest --doctest-modules --cov --cov-config=pyproject.toml --cov-report=term --cov-report=html + uv run --no-sync pytest --doctest-modules --cov --cov-config=pyproject.toml --cov-report=term --cov-report=html .PHONY: test-pyspark test-pyspark: - uv run pytest -m 'pyspark and integration' + uv run --no-sync pytest -m 'pyspark and integration' .PHONY: build-documentation build-documentation: ## Build documentation with Sphinx $(info --- Run build of the Sphinx documentation ---) - uv run sphinx-build -Wn -b html -d ./docs/build/doctrees ./docs/source ./docs/build/html + uv run --no-sync sphinx-build -Wn -b html -d ./docs/build/doctrees ./docs/source ./docs/build/html .PHONY: build-docs build-docs: ## Build documentation with mkdocs