Skip to content

Commit

Permalink
Merge pull request #47 from no10ds/release/v7.0.5-v0.1.3
Browse files Browse the repository at this point in the history
Release - v7.0.6/v0.1.4
  • Loading branch information
TobyDrane authored Oct 18, 2023
2 parents be3eff8 + bc1abc9 commit b8b50af
Show file tree
Hide file tree
Showing 33 changed files with 720 additions and 98 deletions.
5 changes: 3 additions & 2 deletions .github/workflows/dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -90,8 +90,9 @@ jobs:
- name: SDK Test
run: make sdk-test

- name: SDK Test Deploy
run: make sdk-release-test
# TODO: Add back in
# - name: SDK Test Deploy
# run: make sdk-release-test

ui-dev:
needs:
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/ui-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ jobs:
cd ui
npx playwright install-deps
npm install @playwright/test -D
npx playwright install
- name: Run playwright tests
run: make ui-test-e2e
Expand Down
6 changes: 3 additions & 3 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ infra-scan: ## Print infrastructure output: make infra-output block=<infra-blo
##----- SDK -----
##
sdk-setup: ## Setup Python required for the sdk
@cd sdk/; $(MAKE) python; $(MAKE) venv;
@cd sdk/; $(MAKE) venv; . .venv/bin/activate; $(MAKE) reqs

# SDK Testing --------------------
##
Expand All @@ -143,10 +143,10 @@ sdk-test: ## Run sdk unit tests
# SDK Release --------------------
##
sdk-release-test: ## Build and release sdk to testpypi
@cd sdk/; $(MAKE) deploy-test
@cd sdk/; . .venv/bin/activate; $(MAKE) deploy-test

sdk-release: ## Build and release sdk to pypi
@cd sdk/; $(MAKE) deploy
@cd sdk/; . .venv/bin/activate; $(MAKE) deploy

##
##----- UI -----
Expand Down
19 changes: 15 additions & 4 deletions api/api/application/services/data_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -269,8 +269,14 @@ def generate_results_download_url_async(
def _build_query(self, schema: Schema) -> SQLQuery:
date_columns = schema.get_columns_by_type(DateType)
date_range_queries = [
*[f"max({column.name}) as max_{column.name}" for column in date_columns],
*[f"min({column.name}) as min_{column.name}" for column in date_columns],
*[
f"cast(max({column.name}) as date) as max_{column.name}"
for column in date_columns
],
*[
f"cast(min({column.name}) as date) as min_{column.name}"
for column in date_columns
],
]
columns_to_query = [
"count(*) as data_size",
Expand All @@ -292,14 +298,19 @@ def _enrich_metadata(
def _enrich_columns(
self, schema: Schema, statistics_dataframe: pd.DataFrame
) -> List[EnrichedColumn]:
strftime_format = "%Y-%m-%d"
enriched_columns = []
date_columns = schema.get_columns_by_type(DateType)
for column in schema.columns:
statistics = None
if column in date_columns:
statistics = {
"max": statistics_dataframe.at[0, f"max_{column.name}"],
"min": statistics_dataframe.at[0, f"min_{column.name}"],
"max": statistics_dataframe.at[0, f"max_{column.name}"].strftime(
strftime_format
),
"min": statistics_dataframe.at[0, f"min_{column.name}"].strftime(
strftime_format
),
}
enriched_columns.append(
EnrichedColumn(**column.dict(), statistics=statistics)
Expand Down
24 changes: 12 additions & 12 deletions api/test/api/application/services/test_data_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -667,8 +667,8 @@ def test_get_schema_information(self):
self.athena_adapter.query.return_value = pd.DataFrame(
{
"data_size": [48718],
"max_date": ["2021-07-01"],
"min_date": ["2014-01-01"],
"max_date": [pd.to_datetime("2021-07-01")],
"min_date": [pd.to_datetime("2014-01-01")],
}
)
dataset_metadata = DatasetMetadata("raw", "some", "other", 2)
Expand All @@ -679,8 +679,8 @@ def test_get_schema_information(self):
SQLQuery(
select_columns=[
"count(*) as data_size",
"max(date) as max_date",
"min(date) as min_date",
"cast(max(date) as date) as max_date",
"cast(min(date) as date) as min_date",
]
),
)
Expand Down Expand Up @@ -761,10 +761,10 @@ def test_get_schema_information_for_multiple_dates(self):
self.athena_adapter.query.return_value = pd.DataFrame(
{
"data_size": [48718],
"max_date": ["2021-07-01"],
"min_date": ["2014-01-01"],
"max_date2": ["2020-07-01"],
"min_date2": ["2015-01-01"],
"max_date": [pd.to_datetime("2021-07-01")],
"min_date": [pd.to_datetime("2014-01-01")],
"max_date2": [pd.to_datetime("2020-07-01")],
"min_date2": [pd.to_datetime("2015-01-01")],
}
)

Expand All @@ -778,10 +778,10 @@ def test_get_schema_information_for_multiple_dates(self):
SQLQuery(
select_columns=[
"count(*) as data_size",
"max(date) as max_date",
"max(date2) as max_date2",
"min(date) as min_date",
"min(date2) as min_date2",
"cast(max(date) as date) as max_date",
"cast(max(date2) as date) as max_date2",
"cast(min(date) as date) as min_date",
"cast(min(date2) as date) as min_date2",
]
),
)
Expand Down
2 changes: 1 addition & 1 deletion docs/api/schema.md
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ The behaviour of the API when a new file is uploaded to the dataset. The possibl
- `APPEND` - New files will be added to the dataset, there are no duplication checks so new data must be unique. This is the default behaviour.
- `OVERWRITE` - Any new file will overwrite the current content. The overwrite will happen on the partitions, so if there is an old partition that is not included in the new dataset, that will not be overwritten.

### Column headings
### Column heading style guide

Column heading names should follow a strict format. The [requirements](https://docs.aws.amazon.com/glue/latest/dg/add-classifier.html) are:

Expand Down
17 changes: 16 additions & 1 deletion docs/changelog.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,18 @@
# Changelog

## v7.0.6 / v0.1.4 (sdk) - _2023-10-18_

### Features

- New UI page that allows for the ability to delete users and clients easily.
- Clients can now be created and deleted via the sdk.

### Fixes

- Fixed an issue with the sdk not showing schemas were created successfully due to a wrong response code.
- Where dataset info was being called on columns with a date type, this was causing an issue with the Pydantic validation.
- Tweaked the documentation to implement searching for column heading style guide to match what the API returns in the error message.

## v7.0.5 / v0.1.3 (sdk) - _2023-09-20_

### Fixes
Expand Down Expand Up @@ -57,7 +70,9 @@

- See the [migration doc](migration.md) for details on how to migrate to v7 from v6.

[Unreleased changes]: https://github.com/no10ds/rapid/compare/v7.0.4...HEAD
[Unreleased changes]: https://github.com/no10ds/rapid/compare/v7.0.6...HEAD
[v7.0.6 / v0.1.4 (sdk)]: https://github.com/no10ds/rapid/v7.0.5...v7.0.6
[v7.0.5 / v0.1.3 (sdk)]: https://github.com/no10ds/rapid/v7.0.4...v7.0.5
[v7.0.4 / v0.1.2 (sdk)]: https://github.com/no10ds/rapid/v7.0.3...v7.0.4
[v7.0.3 / v0.1.2 (sdk)]: https://github.com/no10ds/rapid/v7.0.2...v7.0.3
[v7.0.2 / v0.1.2 (sdk)]: https://github.com/no10ds/rapid/v7.0.1...v7.0.2
Expand Down
6 changes: 0 additions & 6 deletions sdk/Makefile
Original file line number Diff line number Diff line change
@@ -1,12 +1,6 @@
python:
pyenv install --skip-existing $(PYTHON_VERSION)
echo 'eval "$(pyenv init --path)"' >> ~/.bashrc
pyenv local $(PYTHON_VERSION)

venv:
python3 -m venv .venv
. .venv/bin/activate
make reqs

reqs:
pip install -r requirements.txt
Expand Down
16 changes: 16 additions & 0 deletions sdk/rapid/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,3 +68,19 @@ class SchemaInitialisationException(Exception):

class ColumnNotDifferentException(Exception):
pass


class InvalidPermissionsException(Exception):
pass


class SubjectAlreadyExistsException(Exception):
pass


class SubjectNotFoundException(Exception):
pass


class SubjectDeletionFailedException(Exception):
pass
85 changes: 84 additions & 1 deletion sdk/rapid/rapid.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,9 @@
UnableToFetchJobStatusException,
DatasetInfoFailedException,
DatasetNotFoundException,
InvalidPermissionsException,
SubjectAlreadyExistsException,
SubjectNotFoundException,
)


Expand Down Expand Up @@ -300,7 +303,7 @@ def create_schema(self, schema: Schema):
data=json.dumps(schema_dict),
timeout=TIMEOUT_PERIOD,
)
if response.status_code == 200:
if response.status_code == 201:
pass
elif response.status_code == 409:
raise SchemaAlreadyExistsException("The schema already exists")
Expand Down Expand Up @@ -330,3 +333,83 @@ def update_schema(self, schema: Schema):
if response.status_code == 200:
return data
raise SchemaUpdateFailedException("Could not update schema", data)

def create_client(self, client_name: str, client_permissions: list[str]):
"""
Creates a new client on the API with the specified permissions.
Args:
client_name (str): The name of the client to create.
client_permissions (list[str]): The permissions of the client to create.
Raises:
rapid.exceptions.InvalidPermissionsException: If an error occurs while trying to create the client.
"""
url = f"{self.auth.url}/client"
response = requests.post(
url,
headers=self.generate_headers(),
data=json.dumps(
{"client_name": client_name, "permissions": client_permissions}
),
timeout=TIMEOUT_PERIOD,
)
data = json.loads(response.content.decode("utf-8"))
if response.status_code == 201:
return data
elif response.status_code == 400:
raise SubjectAlreadyExistsException(
f"The client {client_name} already exists"
)
raise InvalidPermissionsException(
"One or more of the provided permissions is invalid or duplicated"
)

def delete_client(self, client_id: str) -> None:
"""
Deletes a client from the API based on their id
Args:
client_id (str): The id of the client to delete.
Raises:
rapid.exceptions.SubjectNotFoundException: If the client does not exist.
"""
url = f"{self.auth.url}/client/{client_id}"
response = requests.delete(
url,
headers=self.generate_headers(),
timeout=TIMEOUT_PERIOD,
)
if response.status_code == 200:
return None

raise SubjectNotFoundException(
f"Failed to delete client with id: {client_id}, ensure it exists."
)

def update_subject_permissions(self, subject_id: str, permissions: list[str]):
"""
Updates the permissions of a subject on the API.
Args:
subject_id (str): The id of the subject to update.
permissions (list[str]): The permissions to update the subject with.
Raises:
rapid.exceptions.InvalidPermissionsException: If an error occurs while trying to create the client.
"""
url = f"{self.auth.url}/subject/permissions"
response = requests.put(
url,
headers=self.generate_headers(),
data=json.dumps({"subject_id": subject_id, "permissions": permissions}),
timeout=TIMEOUT_PERIOD,
)
data = json.loads(response.content.decode("utf-8"))
if response.status_code == 200:
return data

raise InvalidPermissionsException(
"One or more of the provided permissions is invalid or duplicated"
)
60 changes: 59 additions & 1 deletion sdk/tests/test_rapid.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,9 @@
SchemaUpdateFailedException,
UnableToFetchJobStatusException,
DatasetInfoFailedException,
InvalidPermissionsException,
SubjectNotFoundException,
SubjectAlreadyExistsException,
)
from .conftest import RAPID_URL, RAPID_TOKEN

Expand Down Expand Up @@ -310,7 +313,7 @@ def test_generate_schema_failure(self, requests_mock: Mocker, rapid: Rapid):
def test_create_schema_success(self, requests_mock: Mocker, rapid: Rapid):
schema = Schema(**DUMMY_SCHEMA)
mocked_response = {"data": "dummy"}
requests_mock.post(f"{RAPID_URL}/schema", json=mocked_response, status_code=200)
requests_mock.post(f"{RAPID_URL}/schema", json=mocked_response, status_code=201)
res = rapid.create_schema(schema)
assert res is None

Expand Down Expand Up @@ -347,3 +350,58 @@ def test_update_schema_failure(self, requests_mock: Mocker, rapid: Rapid):
requests_mock.put(f"{RAPID_URL}/schema", json=mocked_response, status_code=400)
with pytest.raises(SchemaUpdateFailedException):
rapid.update_schema(schema)

@pytest.mark.usefixtures("requests_mock", "rapid")
def test_create_client_success(self, requests_mock: Mocker, rapid: Rapid):
mocked_response = {
"client_name": "client",
"permissions": ["READ_ALL"],
"client_id": "xxx-yyy-zzz",
"client_secret": "1234567",
}
requests_mock.post(f"{RAPID_URL}/client", json=mocked_response, status_code=201)
res = rapid.create_client("client", ["READ_ALL"])
assert res == mocked_response

@pytest.mark.usefixtures("requests_mock", "rapid")
def test_create_client_failure(self, requests_mock: Mocker, rapid: Rapid):
mocked_response = {"data": "dummy"}
requests_mock.post(f"{RAPID_URL}/client", json=mocked_response, status_code=400)
with pytest.raises(SubjectAlreadyExistsException):
rapid.create_client("client", ["READ_ALL"])

@pytest.mark.usefixtures("requests_mock", "rapid")
def test_delete_client_success(self, requests_mock: Mocker, rapid: Rapid):
mocked_response = {"data": "dummy"}
requests_mock.delete(
f"{RAPID_URL}/client/xxx-yyy-zzz", json=mocked_response, status_code=200
)
res = rapid.delete_client("xxx-yyy-zzz")
assert res is None

@pytest.mark.usefixtures("requests_mock", "rapid")
def test_delete_client_failure(self, requests_mock: Mocker, rapid: Rapid):
mocked_response = {"data": "dummy"}
requests_mock.delete(
f"{RAPID_URL}/client/xxx-yyy-zzz", json=mocked_response, status_code=400
)
with pytest.raises(SubjectNotFoundException):
rapid.delete_client("xxx-yyy-zzz")

@pytest.mark.usefixtures("requests_mock", "rapid")
def update_subject_permissions_success(self, requests_mock: Mocker, rapid: Rapid):
mocked_response = {"data": "dummy"}
requests_mock.put(
f"{RAPID_URL}/subject/permissions", json=mocked_response, status_code=200
)
res = rapid.update_subject_permissions("xxx-yyy-zzz", ["READ_ALL"])
assert res == mocked_response

@pytest.mark.usefixtures("requests_mock", "rapid")
def update_subject_permissions_failure(self, requests_mock: Mocker, rapid: Rapid):
mocked_response = {"data": "dummy"}
requests_mock.put(
f"{RAPID_URL}/subject/permissions", json=mocked_response, status_code=400
)
with pytest.raises(InvalidPermissionsException):
rapid.update_subject_permissions("xxx-yyy-zzz", ["READ_ALL"])
Loading

0 comments on commit b8b50af

Please sign in to comment.