diff --git a/.coveragerc b/.coveragerc index a07d3ed..22249f6 100644 --- a/.coveragerc +++ b/.coveragerc @@ -14,4 +14,4 @@ omit = ;sort = Cover sort = Name skip_covered = True -show_missing = True \ No newline at end of file +show_missing = True diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..a2818df --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,77 @@ +name: CI + +on: + pull_request: + push: + branches: + - "develop" + - "main" + +jobs: + test: + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + python-version: [3.9, 3.10.9] + test_env: [python, precommit, mypy] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Display system info + run: | + python -c "import sys; print(sys.version)" + - name: Install dependencies + run: | + python -m pip install --upgrade pip + python -m pip install tox tox-gh-actions + - name: Test with tox + run: tox -e ${{ matrix.test_env }} + integration-tests: + name: Run integration tests + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v3 + - name: Install Python + uses: actions/setup-python@v4 + with: + python-version: "3.9" + - name: Install dependencies + run: | + python -m pip install --upgrade pip + python -m pip install -r requirements-test.txt + - name: Build and run stack + run: | + printenv + docker volume create --name=pyseed_media + # verify that the stack wasn't cached + docker-compose -f tests/integration/docker-compose.yml stop + docker-compose -f tests/integration/docker-compose.yml rm -f + docker-compose -f tests/integration/docker-compose.yml up -d + - name: Wait for web server + uses: nev7n/wait_for_response@v1 + with: + url: "http://localhost:8000/" + responseCode: 200 + timeout: 120000 + interval: 2000 + - name: Wait another 30s + uses: jakejarvis/wait-action@master + with: + time: "30s" + - name: Dump docker logs before tests + uses: jwalton/gh-docker-logs@v1 + - name: Extract API credentials from SEED docker instance + run: | + docker exec seed_web ./manage.py create_test_user_json --username user@seed-platform.org --host http://localhost:8000 --pyseed > seed-config.json + - name: Run tests with pytest + run: | + pytest -m "integration" -s + - name: Dump docker logs on failure + if: failure() + uses: jwalton/gh-docker-logs@v1 diff --git a/.gitignore b/.gitignore index 94553eb..4311468 100644 --- a/.gitignore +++ b/.gitignore @@ -94,3 +94,8 @@ ENV/ *.bak *.old .pytest_cache/ +.idea +.vscode + +# Seed config files +seed-config*.json diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..49ad510 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,51 @@ +exclude: | + (?x)( + ^docs/conf.py| + ^docs/license.rst + ) + +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: trailing-whitespace + - id: check-added-large-files + args: ["--maxkb=50000"] + - id: check-ast + - id: check-json + - id: check-merge-conflict + - id: check-xml + - id: check-yaml + - id: debug-statements + - id: end-of-file-fixer + - id: requirements-txt-fixer + - id: mixed-line-ending + args: ["--fix=auto"] + - repo: https://github.com/pre-commit/mirrors-autopep8 + rev: v2.0.1 + hooks: + - id: autopep8 + args: + [ + "--in-place", + "--aggressive", + "--aggressive", + "--recursive", + "--max-line-length=100", + "--ignore=E501,E402,W503,W504,E731", + ] + - repo: https://github.com/pycqa/flake8 + rev: 6.0.0 + hooks: + - id: flake8 + args: ["--ignore=E501,E402,W503,W504,E731,F401"] + - repo: https://github.com/pre-commit/mirrors-prettier + rev: v3.0.0-alpha.4 + hooks: + - id: prettier + types_or: [css, yaml, markdown, html, scss, javascript] + - repo: https://github.com/pre-commit/mirrors-isort + rev: v5.10.1 + hooks: + - id: isort + args: ["-m=VERTICAL_HANGING_INDENT"] # vertical hanging diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 30a9e49..78584cf 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,21 @@ Changelog ========= +0.3.0 +----- + +## What's Changed +* Add instance info and fix a couple bugs by @nllong in https://github.com/SEED-platform/py-seed/pull/16 +* Fix building list and client information by @nllong in https://github.com/SEED-platform/py-seed/pull/17 +* get and create meters and meter readings by @nllong in https://github.com/SEED-platform/py-seed/pull/18 +* Add GeoJSON Area Calc by @nllong in https://github.com/SEED-platform/py-seed/pull/19 + +**Full Changelog**: https://github.com/SEED-platform/py-seed/commits/v0.3.0 + +0.2.0 +----- +* Unknown updates + 0.1.0 [2018-02-16] ------------------ -* OpenSource release \ No newline at end of file +* OpenSource release diff --git a/README.rst b/README.rst index 8107c34..9d97054 100644 --- a/README.rst +++ b/README.rst @@ -1,11 +1,54 @@ Py-SEED ======= -A python API client for the SEED Platform +.. image:: https://github.com/seed-platform/py-seed/actions/workflows/ci.yml/badge.svg?branch=develop + :target: https://github.com/seed-platform/py-seed/actions/workflows/ci.yml/badge.svg +.. image:: https://badge.fury.io/py/py-seed.svg + :target: https://pypi.python.org/pypi/py-seed/ + +A python API client for the SEED Platform. This is an updated version of the Client. It is compatible with the latest version of the SEED Platform (>2.17.4). This client still has access to the previous format of generating a lower level API client by accessing `seed_client_base.SEEDOAuthReadOnlyClient`, `seed_client_base.SEEDOAuthReadWriteClient`, `seed_client_base.SEEDReadOnlyClient`, and `seed_client_base.SEEDReadWriteClient`. This lower level API is documented below under the `Low-Level Documentation` Documentation ------------- +The SEED client is a read-write client. To install the client run: + +.. code-block:: bash + + pip install pyseed + +Within Python you can use the client like this: + +.. code-block:: python + + from pathlib import Path + from pyseed.seed_client import SeedClient + + # The seed-config.json file defines the hosting locaiton and credentials for your SEED instance. + # If running SEED locally for testing, then you can run the following from your SEED root directory: + # ./manage.py create_test_user_json --username user@seed-platform.org --host http://localhost:8000 --file ./seed-config.json --pyseed + + config_file = Path('seed-config.json') + organization_id = 1 + seed_client = SeedClient(organization_id, connection_config_filepath=config_file) + + # Get/create the new cycle and upload the data. Make sure to set the cycle ID so that the + # data end up in the correct cycle + cycle = seed_client.get_or_create_cycle( + 'pyseed-api-test', date(2021, 6, 1), date(2022, 6, 1), set_cycle_id=True + ) + + seed_client.upload_and_match_datafile( + 'pyseed-properties-test', + 'tests/data/test-seed-data.xlsx', + 'Single Step Column Mappings', + 'tests/data/test-seed-data-mappings.csv' + ) + + # See the projects unit tests for more examples. + +Low-Level Documentation +----------------------- This provides two user authentication based Python clients and two OAuth2 authentication based Python clients for interacting with the SEED Platform Api:: @@ -15,14 +58,17 @@ This provides two user authentication based Python clients and two OAuth2 authen SEEDReadWriteClient + (The OAuthMixin is constructed around the the JWTGrantClient found in jwt-oauth2lib. see https://github.com/GreenBuildingRegistry/jwt_oauth2) SEED (Standard Energy Efficiency Data Platform™) is an open source "web-based application that helps organizations easily manage data on the energy performance of large groups of buildings" funded by the United States Department of Energy. More information can be found here: + * https://energy.gov/eere/buildings/standard-energy-efficiency-data-platform -* http://seedinfo.lbl.gov/ +* https://seed-platform.org * https://github.com/SEED-platform +* https://buildingdata.energy.gov/#/seed Note the clients do not provide per api-call methods, but does provide the standard CRUD methods: get, list, put, post, patch, delete @@ -52,8 +98,6 @@ Usage: seed_client.get(property_pk, endpoint='properties') -Contributing ------------- License ------- @@ -61,7 +105,6 @@ py-SEED is released under the terms of the MIT license. Full details in LICENSE Changelog --------- -py-SEED was developed for use in the greenbuildingregistry project. -For a full changelog see `CHANGELOG.rst `_. +py-SEED was developed for use in the greenbuildingregistry project but has been extended for various uses, including Salesforce data transfer and SEED data analysis. -N.B. this client is undergoing development and should be considered experimental. +For a full changelog see `CHANGELOG.rst `_. diff --git a/pyseed/__init__.py b/pyseed/__init__.py index dd86008..ad527fd 100644 --- a/pyseed/__init__.py +++ b/pyseed/__init__.py @@ -1,5 +1,5 @@ # Local Imports -from pyseed.seedclient import ( +from pyseed.seed_client_base import ( # noqa SEEDOAuthReadOnlyClient, SEEDOAuthReadWriteClient, SEEDReadOnlyClient, diff --git a/pyseed/apibase.py b/pyseed/apibase.py index a239f76..aa44c13 100755 --- a/pyseed/apibase.py +++ b/pyseed/apibase.py @@ -5,10 +5,8 @@ Functionality for calls to external API's""" -# Imports from Standard Library -import re - # Imports from Third Party Modules +import re # Imports from External Modules import requests @@ -32,7 +30,9 @@ def add_pk(url, pk, required=True, slash=False): else: url = "{}{}".format(url, pk) if slash: - url = "{}/".format(url) + # Only add the trailing slash if it's not already there + if not url.endswith('/'): + url = "{}/".format(url) return url @@ -55,7 +55,7 @@ def __init__(self, url=None, use_ssl=True, timeout=None, use_json=False, ..Note: if use_auth is True the default is to use http basic authentication if self.auth is not set. (You will need to - to this by overidding __init__ and setting this before + to this by overriding __init__ and setting this before calling super. This requires username and password to be supplied as @@ -93,8 +93,6 @@ def _construct_payload(self, params): except AttributeError: msg = "{} is a compulsory field".format(param) raise APIClientError(msg) - if self.auth: # pragma: no cover - params['auth'] = self.auth return params def _construct_url(self, urlstring, use_ssl=None): @@ -130,7 +128,7 @@ def _construct_url(self, urlstring, use_ssl=None): return url def check_call_success(self, response): - """Return true if api call was successfull.""" + """Return true if api call was successful.""" # pylint: disable=no-self-use, no-member return response.status_code == requests.codes.ok @@ -140,7 +138,7 @@ def _get(self, url=None, use_ssl=None, **kwargs): params = self._construct_payload(kwargs) payload = { 'timeout': self.timeout, - 'headers': params.get('headers', None) + 'headers': params.pop('headers', None) } if params: payload['params'] = params @@ -157,7 +155,7 @@ def _post(self, url=None, use_ssl=None, params=None, files=None, **kwargs): params = self._construct_payload(params) payload = { 'timeout': self.timeout, - 'headers': params.get('headers', None) + 'headers': params.pop('headers', None) } if params: payload['params'] = params @@ -166,9 +164,26 @@ def _post(self, url=None, use_ssl=None, params=None, files=None, **kwargs): if self.auth: # pragma: no cover payload['auth'] = self.auth if self.use_json: - payload['json'] = kwargs + data = kwargs.pop('json', None) + if data: + payload['json'] = data + else: + # just put the remaining kwargs into the json field + payload['json'] = kwargs else: - payload['data'] = kwargs + data = kwargs.pop('data', None) + if data: + payload['data'] = data + else: + # just put the remaining kwargs into the data field + payload['data'] = kwargs + + # if there are any remaining kwargs, then put them into the params + if 'params' not in payload: + payload['params'] = {} + payload['params'].update(**kwargs) + + # now do the actual call to post! api_call = requests.post(url, **payload) return api_call @@ -181,7 +196,7 @@ def _put(self, url=None, use_ssl=None, params=None, files=None, params = self._construct_payload(params) payload = { 'timeout': self.timeout, - 'headers': params.get('headers', None) + 'headers': params.pop('headers', None) } if params: payload['params'] = params @@ -190,9 +205,25 @@ def _put(self, url=None, use_ssl=None, params=None, files=None, if self.auth: # pragma: no cover payload['auth'] = self.auth if self.use_json: - payload['json'] = kwargs + data = kwargs.pop('json', None) + if data: + payload['json'] = data + else: + # just put the remaining kwargs into the json field + payload['json'] = kwargs else: - payload['data'] = kwargs + data = kwargs.pop('data', None) + if data: + payload['data'] = data + else: + # just put the remaining kwargs into the data field + payload['data'] = kwargs + + # if there are any remaining kwargs, then put them into the params + if 'params' not in payload: + payload['params'] = {} + payload['params'].update(**kwargs) + api_call = requests.put(url, **payload) return api_call @@ -205,7 +236,7 @@ def _patch(self, url=None, use_ssl=None, params=None, files=None, params = self._construct_payload(params) payload = { 'timeout': self.timeout, - 'headers': params.get('headers', None) + 'headers': params.pop('headers', None) } if params: payload['params'] = params @@ -214,9 +245,24 @@ def _patch(self, url=None, use_ssl=None, params=None, files=None, if self.auth: # pragma: no cover payload['auth'] = self.auth if self.use_json: - payload['json'] = kwargs + data = kwargs.pop('json', None) + if data: + payload['json'] = data + else: + # just put the remaining kwargs into the json field + payload['json'] = kwargs else: - payload['data'] = kwargs + data = kwargs.pop('data', None) + if data: + payload['data'] = data + else: + # just put the remaining kwargs into the data field + payload['data'] = kwargs + + # if there are any remaining kwargs, then put them into the params + if 'params' not in payload: + payload['params'] = {} + payload['params'].update(**kwargs) api_call = requests.patch(url, **payload) return api_call @@ -226,7 +272,7 @@ def _delete(self, url=None, use_ssl=None, **kwargs): params = self._construct_payload(kwargs) payload = { 'timeout': self.timeout, - 'headers': params.get('headers', None) + 'headers': params.pop('headers', None) } if params: payload['params'] = params @@ -296,9 +342,8 @@ class OAuthMixin(object): def _get_access_token(self): """Generate OAuth access token""" - config = getattr(self, 'config') - private_key_file = config.get('private_key_location', default=None) - client_id = config.get('client_id', default=None) + private_key_file = getattr(self, 'private_key_location', None) + client_id = getattr(self, 'client_id', None) username = getattr(self, 'username', None) with open(private_key_file, 'r') as pk_file: sig = pk_file.read() diff --git a/pyseed/seed_client.py b/pyseed/seed_client.py new file mode 100644 index 0000000..bec4ff0 --- /dev/null +++ b/pyseed/seed_client.py @@ -0,0 +1,1277 @@ +""" +**************************************************************************************************** +:copyright (c) 2019-2022, Alliance for Sustainable Energy, LLC, and other contributors. + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted +provided that the following conditions are met: + +Redistributions of source code must retain the above copyright notice, this list of conditions +and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright notice, this list of conditions +and the following disclaimer in the documentation and/or other materials provided with the +distribution. + +Neither the name of the copyright holder nor the names of its contributors may be used to endorse +or promote products derived from this software without specific prior written permission. + +Redistribution of this software, without modification, must refer to the software by the same +designation. Redistribution of a modified version of this software (i) may not refer to the +modified version by the same designation, or by any confusingly similar designation, and +(ii) must refer to the underlying software originally provided by Alliance as “URBANopt”. Except +to comply with the foregoing, the term “URBANopt”, or any confusingly similar designation may +not be used to refer to any modified version of this software or any modified version of the +underlying software originally provided by Alliance without the prior written consent of Alliance. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER +IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT +OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +**************************************************************************************************** +""" + +# Imports from Standard Library +from typing import Any, Dict, List, Optional, Set, Tuple, Union + +# Imports from Third Party Modules +import json +import logging +import time +from collections import Counter +from datetime import date +from pathlib import Path +from urllib.parse import _NetlocResultMixinStr + +# Local Imports +from pyseed.seed_client_base import SEEDReadWriteClient +from pyseed.utils import read_map_file + +logger = logging.getLogger(__name__) + + +class SeedClientWrapper(object): + """This is a wrapper around the SEEDReadWriteClient. If you need access + to the READOnly client, or the OAuth client, then you will need to create another class""" + + def __init__( + self, + organization_id: int, + connection_params: Optional[dict] = None, + connection_config_filepath: Optional[Path] = None, + ) -> None: + """wrapper around SEEDReadWriteClient. + + Args: + organization_id (int): _description_ + connection_params (dict, optional): parameters to connect to SEED. Defaults to None. If using, then must contain the following: + { + "name": "not used - can be any string", + "base_url": "http://127.0.0.1", + "username": "user@somedomain.com", + "api_key": "1b5ea1ee220c8628789c61d66253d90398e6ad03", + "port": 8000, + "use_ssl": false + } + connection_config_filepath (Path, optional): path to the parameters (JSON file). Defaults to None. + + Raises: + Exception: SeedClientWrapper + """ + if not connection_params and not connection_config_filepath: + raise Exception( + "Must provide either connection_params or connection_config_filepath" + ) + + # favor the connection params over the config file + self.payload = {} + if connection_params: + # the connetion params are simply squashed on SEEDReadWriteClient init + self.payload = connection_params + elif connection_config_filepath: + self.payload = SeedClientWrapper.read_connection_config_file( + connection_config_filepath + ) + # read in from config file + + self.client = SEEDReadWriteClient(organization_id, **self.payload) + + @classmethod + def read_connection_config_file(cls, filepath: Path) -> dict: + """Read in the connection config file and return the connection params. This + file can be mostly created by calling the following from the SEED root directory: + + ./manage.py create_test_user_json --username user@seed-platform.org --host http://localhost:80 --pyseed --file api_test_user.json + + Content must contain: + { + "name": "not used - can be any string", + "base_url": "http://127.0.0.1", + "username": "user@somedomain.com", + "api_key": "1b5ea1ee220c8628789c61d66253d90398e6ad03", + "port": 8000, + "use_ssl": false, + "seed_org_name: "test-org" + } + + Args: + filepath (str): path to the connection config file + """ + if not filepath.exists(): + raise Exception(f"Cannot find connection config file: {str(filepath)}") + + connection_params = json.load(open(filepath)) + return connection_params + + +class SeedClient(SeedClientWrapper): + """SEED Client with several property related + helper methods implemented.""" + + def __init__( + self, + organization_id: int, + connection_params: dict = None, + connection_config_filepath: Path = None, + ) -> None: + super().__init__(organization_id, connection_params, connection_config_filepath) + + # set org if you can + if self.payload and self.payload.get('seed_org_name', None): + self.get_org_by_name(self.payload['seed_org_name'], set_org_id=True) + + def get_org_id(self) -> int: + """Return the org ID that is set""" + return self.client.org_id + + def get_org_by_name(self, org_name: str, set_org_id: bool = False) -> dict: + """Set the current organization by name. + + Args: + org_name (str): name of the organization to set + set_org_id (bool): set the org_id on the object for later use. Defaults to None. + + Returns: + dict: { + org data + } + """ + orgs = self.get_organizations() + for org in orgs: + if org["name"] == org_name: + if set_org_id: + self.client.org_id = org["id"] + return org + + raise ValueError(f"Organization '{org_name}' not found") + + def instance_information(self) -> dict: + """Return the instance information. + + Returns: + dict: instance information + """ + # http://localhost:8000/api/version/ + # add in URL to the SEED instance + # add in username (but not the password/api key) + info = self.client.get(None, required_pk=False, endpoint="version", data_name='all') + info["host"] = self.client.base_url + info["username"] = self.client.username + return info + + def get_organizations(self, brief: bool = True) -> Dict: + """Get a list organizations (that one is allowed to view) + + Args: + brief (bool, optional): if True, then only return the organization id with some other basic info. Defaults to True. + Returns: + Dict: [ + { + "name": "test-org", + "org_id": 1, + "parent_id": null, + "is_parent": true, + "id": 1, + "user_role": "owner", + "display_decimal_places": 2 + }, + ... + ] + """ + orgs = self.client.list( + endpoint="organizations", + data_name="organizations", + brief="true" if brief else "false", + ) + return orgs + + def get_buildings(self) -> List[dict]: + total_qry = self.client.list(endpoint="properties", data_name="pagination", per_page=100) + + # print(f" total: {total_qry}") + # step through each page of the results + buildings: List[dict] = [] + for i in range(1, total_qry['num_pages'] + 1): + buildings = buildings + self.client.list( + endpoint="properties", + data_name="results", + per_page=100, + page=i, + cycle=self.cycle_id, + ) + # print(f"number of buildings retrieved: {len(buildings)}") + + return buildings + + def get_property_view(self, property_view_id: int) -> dict: + """Return a single property (view and state) by the property view id. It is + recommended to use the more verbose version of `get_property` below. + + Args: + property_view_id (int): ID of the property to return. This is the ID that is in the URL http://SEED_URL/app/#/properties/{property_view_id} and resolves to {host}/api/v3/property_views/{property_view_id} + + Returns: + dict: { + 'id': property_view_id, + 'state': { + 'extra_data': {}, + }, + 'measures': [], + ... + } + """ + return self.client.get( + property_view_id, endpoint="property_views", data_name="property_views" + ) + + def get_property(self, property_id: int) -> dict: + """Return a single property by the property id. + + Args: + property__id (int): ID of the property to return. This is the ID that is in the URL http://SEED_URL/app/#/properties/{property_view_id} + + Returns: + dict: { + 'state': { + 'extra_data': {}, + }, + 'cycle': {...}, + 'property': {...}, + 'labels': {...}, + 'measures': {...} + ... + } + """ + # NOTE: this seems to be the call that OEP uses (returns property and labels dictionaries) + return self.client.get( + property_id, endpoint="properties", data_name="properties" + ) + + def search_buildings( + self, identifier_filter: str = None, identifier_exact: str = None + ) -> dict: + payload = { + "cycle": self.cycle_id, + } + if identifier_filter is not None: + payload["identifier"] = identifier_filter + + if identifier_exact is not None: + payload["identifier_exact"] = identifier_exact + + properties = self.client.get( + None, required_pk=False, endpoint="properties_search", **payload + ) + return properties + + def get_labels(self, filter_by_name: list = None) -> list: + """Get a list of all the labels in the organization. Filter by name if desired. + + Args: + filter_by_name (list, optional): List of subset of labels to return. Defaults to None. + + Returns: + list: [ + { + 'id': 8, + 'name': 'Call', + 'color': 'blue', + 'organization_id': 1, + 'show_in_list': False + }, { + 'id': 14, + 'name': 'Change of Ownership', + 'color': 'blue', + 'organization_id': 1, + 'show_in_list': False + }, ... + ] + """ + labels = self.client.list(endpoint="labels") + if filter_by_name is not None: + labels = [label for label in labels if label["name"] in filter_by_name] + return labels + + def get_or_create_label( + self, label_name: str, color: str = "blue", show_in_list: bool = False + ) -> dict: + """_summary_ + + Args: + label_name (str): Name of label. SEED enforces uniqueness of label names within an organization. + color (str, optional): Default color of the label. Must be from red, blue, light blue, green, white, orange, gray. 'blue' is the default. + show_in_list (bool, optional): If true, then the label is shown in the inventory list page as part of the column. Defaults to False. + + Returns: + dict: { + 'id': 87, + 'name': 'label name', + 'color': 'green', + 'organization_id': 1, + 'show_in_list': true + } + """ + # First check if the label exists + label = self.get_labels(filter_by_name=[label_name]) + if len(label) == 1: + return label[0] + + payload = {"name": label_name, "color": color, "show_in_list": show_in_list} + return self.client.post(endpoint="labels", json=payload) + + def update_label( + self, + label_name: str, + new_label_name: str = None, + new_color: str = None, + new_show_in_list: bool = None, + ) -> dict: + """Update an existing label with the new_* fields. If the new_* fields are not provided, then the existing values are used. + + Args: + label_name (str): Name of existing label. This is required and must match an existing label name for the organization + new_label_name (str, optional): New name of the label. Defaults to None. + new_color (str, optional): New color of the label. Must be from red, blue, light blue, green, white, orange, gray. Defaults to None + new_show_in_list (bool, optional): New boolean on whether to show the label in the inventory list page. Defaults to None. + + Raises: + Exception: If the label does not exist, then throw an error. + + Returns: + dict: { + 'id': 87, + 'name': 'label name', + 'color': 'green', + 'organization_id': 1, + 'show_in_list': true + } + """ + # color (str, optional): Default color of the label. Must be from red, blue, light blue, green, white, orange, gray. 'blue' is the default. + # get the existing label + label = self.get_labels(filter_by_name=[label_name]) + if len(label) != 1: + raise Exception(f"Could not find label to update of {label_name}") + current_label = label[0] + + if new_label_name is not None: + current_label["name"] = new_label_name + + if new_color is not None: + current_label["color"] = new_color + + if new_show_in_list is not None: + current_label["show_in_list"] = new_show_in_list + + # remove the org id from the json data + current_label.pop("organization_id") + + return self.client.put( + current_label["id"], endpoint="labels", json=current_label + ) + + def delete_label(self, label_name: str) -> dict: + """Deletes an existing label. This method will look up the ID of the label to delete. + + Args: + label_name (str): Name of the label to delete. + + Returns: + dict: _description_ + """ + label = self.get_labels(filter_by_name=[label_name]) + if len(label) != 1: + raise Exception(f"Could not find label to delete with name {label_name}") + id = label[0]["id"] + + return self.client.delete(id, endpoint="labels") + + def get_view_ids_with_label(self, label_names: list = []) -> list: + """Get the view IDs of the properties with a given label name. + + Note that with labels, the data.selected field is for property view ids! SEED was updated + in June 2022 to add in the label_names to filter on. + + Args: + label_names (list, optional): list of the labels to filter on. Defaults to []. + + Returns: + list: list of labels and the views they are associated with + """ + properties = self.client.post( + endpoint="properties_labels", + cycle=self.cycle_id, + json={"label_names": label_names}, + ) + return properties + + def update_labels_of_buildings( + self, + add_label_names: list, + remove_label_names: list, + building_ids: list, + inventory_type: str = "property", + ) -> dict: + """Add label names to the passed building ids. + + Args: + add_label_names (list): list of label names to add, will be converted to IDs + remove_label_names (list): list of label names to remove, will be converted to IDs + building_ids (list): list of building IDs (property_view_id) to add/remove labels + inventory_type (str, optional): taxlot or property inventory. Defaults to 'property'. + + Raises: + ValueError: if you don't pass the inventory type correction it will error out + + Returns: + dict: { + 'status': 'success', + 'num_updated': 3, + 'labels': [ + {'id': 3, 'color': 'blue', 'name': 'Violation'} + {'id': 16, 'color': 'green', 'name': 'Complied'} + ] + } + """ + if inventory_type == "property": + endpoint = "labels_property" + elif inventory_type == "tax_lot": + endpoint = "labels_taxlot" + else: + raise ValueError("inventory_type must be either property or tax_lot") + + # first make sure that the labels exist + labels = self.client.list(endpoint="labels") + # create a label id look up + label_id_lookup = {label["name"]: label["id"] for label in labels} + + # now find the IDs of the labels that we want to add and remove + add_label_ids = [] + remove_label_ids = [] + for label_name in add_label_names: + if label_name in label_id_lookup: + add_label_ids.append(label_id_lookup[label_name]) + else: + logger.warning(f"label name {label_name} not found in SEED, skipping") + + for label_name in remove_label_names: + if label_name in label_id_lookup: + remove_label_ids.append(label_id_lookup[label_name]) + else: + logger.warning(f"label name {label_name} not found in SEED, skipping") + + payload = { + "inventory_ids": building_ids, + "add_label_ids": add_label_ids, + "remove_label_ids": remove_label_ids, + } + result = self.client.put( + None, required_pk=False, endpoint=endpoint, json=payload + ) + return result + + def get_cycles(self) -> list: + """Return a list of all the cycles for the organization. + + Returns: + list: [ + { + 'name': '2021 Calendar Year', + 'start': '2020-12-31T23:53:00-08:00', + 'end': '2021-12-31T23:53:00-08:00', + 'organization': 1, + 'user': None, + 'id': 2 + }, + { + 'name': '2023', + 'start': '2023-01-01T00:00:00-08:00', + 'end': '2023-12-31T00:00:00-08:00', + 'organization': 1, + 'user': 1, + 'id': 3 + } + ... + ] + """ + # first list the cycles + cycles = self.client.list(endpoint="cycles") + return cycles["cycles"] + + def create_cycle(self, cycle_name: str, start_date: date, end_date: date) -> dict: + """Name of the cycle to create. If the cycle already exists, then it will + create a new one. This is the default behavior of SEED. + + Args: + cycle_name (str): Name of the cycle + start_date (date): MM/DD/YYYY of start date cycle + end_date (date): MM/DD/YYYY of end data for cycle + + Returns: + dict: { + 'name': 'new cycle 351cd7e1', + 'start': '2021-01-01T00:00:00-08:00', + 'end': '2022-01-01T00:00:00-08:00', + 'organization': 1, + 'user': 1, + 'id': 24 + } + """ + post_data = { + "name": cycle_name, + "start": start_date.strftime("%Y-%m-%d"), + "end": end_date.strftime("%Y-%m-%d"), + } + + # before creating, check if the name already exists. SEED allows the same name of cycles, + # but we really shouldn't + existing_cycles = self.get_cycles() + for cycle in existing_cycles: + if cycle["name"] == cycle_name: + raise Exception( + f"A cycle with this name already exists: '{cycle_name}'" + ) + + cycles = self.client.post(endpoint="cycles", json=post_data) + return cycles["cycles"] + + def get_or_create_cycle( + self, + cycle_name: str, + start_date: date, + end_date: date, + set_cycle_id: bool = False, + ) -> dict: + """Get or create a new cycle. If the cycle_name already exists, then it simply returns the existing cycle. However, if the cycle_name does not exist, then it will create a new cycle. + + Args: + cycle_name (str): name of the cycle to get or create + start_date (date): MM/DD/YYYY of start date cycle + end_date (date): MM/DD/YYYY of end data for cycle + set_cycle_id (str): Set the object's cycle_id to the resulting cycle that is returned (either existing or newly created) + + Returns: + dict: { + 'name': 'Calendar Year 2022', + 'start': '2021-01-01T00:00:00-08:00', + 'end': '2022-01-01T00:00:00-08:00', + 'organization': 1, + 'user': 1, + 'id': 24 + } + """ + cycles = self.get_cycles() + + # force the name of the cycle to be a string! + cycle_name = str(cycle_name) + + # note that this picks the first one it finds, even if there are more + # than one cycle with the name name + cycle_names = [cycle["name"] for cycle in cycles] + counts = Counter(cycle_names) + for i_cycle_name, count in counts.items(): + if count > 1: + msg = f"More than one cycle named '{i_cycle_name}' exists [found {count}]. Using the first one." + logger.warning(msg) + print(msg) + + selected = None + for cycle in cycles: + if cycle["name"] == cycle_name: + selected = cycle + break + + if selected is None: + cycle = self.create_cycle(cycle_name, start_date, end_date) + # only return the cycle portion of the response so that it + # matches the result from the "already exists"-case + selected = cycle + + if set_cycle_id: + self.cycle_id = selected["id"] + + # to keep the response consistent add back in the status + return selected + + def get_cycle_by_name(self, cycle_name: str, set_cycle_id: bool = None) -> dict: + """Set the current cycle by name. + + Args: + cycle_name (str): name of the cycle to set + set_cycle_id (bool): set the cycle_id on the object for later use. Defaults to None. + + Returns: + dict: { + 'name': 'Calendar Year 2022', + 'start': '2021-01-01T00:00:00-08:00', + 'end': '2022-01-01T00:00:00-08:00', + 'organization': 1, + 'user': 1, + 'id': 24 + } + """ + cycles = self.get_cycles() + for cycle in cycles: + if cycle["name"] == cycle_name: + if set_cycle_id: + self.cycle_id = cycle["id"] + return cycle + + raise ValueError(f"cycle '{cycle_name}' not found") + + def delete_cycle(self, cycle_id: str) -> dict: + """Delete the cycle. This will only work if there are no properties or tax lots in the cycle + + Args: + cycle_id (str): ID of the cycle to delete + + Returns: + dict: + """ + return self.client.delete(cycle_id, endpoint="cycles") + + def get_or_create_dataset(self, dataset_name: str) -> dict: + """Get or create a SEED dataset which is used to hold + data files that are uploaded to SEED. + + Args: + dataset_name (str): dataset name to get or create. Names can be duplicated! + + Returns: + dict: resulting dataset record + """ + post_data = {"name": dataset_name} + + datasets = self.client.list(endpoint="datasets", data_name="datasets") + for dataset in datasets: + if dataset["name"] == dataset_name: + logger.info(f"Dataset already created, returning {dataset['name']}") + return dataset + + # create a new dataset - this doesn't return the entire dict back + # so after creating go and get the individual dataset + dataset = self.client.post(endpoint="datasets", json=post_data) + selected = {} + if dataset["status"] == "success": + selected = self.client.get( + dataset["id"], endpoint="datasets", data_name="dataset" + ) + return selected + + def upload_datafile( + self, dataset_id: int, data_file: str, upload_datatype: str + ) -> dict: + """Upload a datafile file + + Args: + dataset_id (int): id of the SEED dataset to where the data file will be saved + data_file (str): full path to file + upload_datatype (str): Type of data in file ('Assessed Raw', 'Portfolio Raw') + + Returns: + dict: uploaded file record + { + "import_file_id": 54, + "success": true, + "filename": "DataforSEED_dos15.csv" + } + """ + params = { + "import_record": dataset_id, + "source_type": upload_datatype, + } + + files_params = [ + ("file", (Path(data_file).name, open(Path(data_file).resolve(), "rb"))), + ] + + return self.client.post( + "upload", + params=params, + files=files_params, + ) + + def track_progress_result(self, progress_key) -> dict: + """Delays the sequence until progress is at 100 percent + + Args: + progress_key (str): the key to track + + Returns: + dict: progress_result + { + 'status': 'success', # 'not_started', 'in_progress', 'parsing', 'success', 'error' + 'status_message': '', + 'progress': 100, + 'progress_key': ':1:SEED:save_raw_data:PROG:57', + 'unique_id': 57, + 'func_name': 'save_raw_data', + 'message': None, + 'stacktrace': None, + 'summary': None, + 'total': 1 + } + + """ + if not progress_key: + raise Exception("No progress key provided") + try: + progress_result = self.client.get( + None, + required_pk=False, + endpoint="progress", + url_args={"PROGRESS_KEY": progress_key}, + ) + except Exception: + logger.error("Other unknown exception caught") + progress_result = None + + if progress_result and progress_result["progress"] == 100: + return progress_result + else: + # wait a couple seconds before checking the status again + time.sleep(2) + progress_result = self.track_progress_result(progress_key) + + return progress_result + + def get_column_mapping_profiles(self, profile_type: str = "All") -> dict: + """get the list of column mapping profiles. If profile_type is provided + then return the list of profiles of that type. + + Args: + profile_type (str, optional): Type of column mappings to return, can be 'Normal', 'BuildingSync Default'. Defaults to 'All', which includes both Normal and BuildingSync. + + Returns: + dict: column mapping profiles + """ + result = self.client.post(endpoint="column_mapping_profiles_filter") + indices_to_remove = [] + for index, item in enumerate(result): + if profile_type == "All": + continue + elif item["profile_type"] != profile_type: + indices_to_remove.append(index) + + # return only the unmarked indices + if indices_to_remove: + result = [ + item + for index, item in enumerate(result) + if index not in indices_to_remove + ] + + return result + + def get_column_mapping_profile( + self, column_mapping_profile_name: str + ) -> Optional[dict]: + """get a specific column mapping profile. Currently, filter does not take an + argument by name, so return them all and find the one that matches the + column_mapping_profile_name. + + Args: + column_mapping_profile_name (str): Name of column_mapping_profile to return + + Returns: + dict: single column mapping profile + """ + results = self.client.post(endpoint="column_mapping_profiles_filter") + for item in results: + if item["name"] == column_mapping_profile_name: + return item + + # if nothing, then return none + return None + + def create_or_update_column_mapping_profile( + self, mapping_profile_name: str, mappings: list + ) -> dict: + """Create or update an existing mapping profile from a list of mappings + + This only works for 'Normal' column mapping profiles, that is, it does not work for + BuildingSync column mapping profiles. Use this with caution since it will update + an already existing profile if it is there. + + Args: + mapping_profile_name (str): cription_ + mappings (list): list of mappings in the form of + [ + { + "from_field": "Address 1", + "from_units": null, + "to_table_name": "PropertyState" + "to_field": "address_line_1", + }, + { + "from_field": "address1", + "from_units": null, + "to_table_name": "PropertyState" + "to_field": "address_line_1", + }, + ... + ] + + Returns: + dict: { + 'id': 1 + 'profile_type': 'Normal', + 'name': 'Profile Name', + 'mappings': [ + ... + ] + } + """ + # see if the column mapping profile already exists + profile = self.get_column_mapping_profile(mapping_profile_name) + result = None + if not profile: + # The profile doesn't exist, so create a new one. Note that seed does not + # enforce uniqueness of the name, so we can use the same name for multiple + # column mapping profiles (for better or worse) + payload = { + "name": mapping_profile_name, + "mappings": mappings, + "profile_type": "Normal", + } + result = self.client.post(endpoint="column_mapping_profiles", json=payload) + else: + payload = { + "mappings": mappings, + } + result = self.client.put( + profile["id"], endpoint="column_mapping_profiles", json=payload + ) + + return result + + def create_or_update_column_mapping_profile_from_file( + self, mapping_profile_name: str, mapping_file: str + ) -> dict: + """creates or updates a mapping profile. The format of the mapping file is a CSV with the following format: + + Raw Columns, units, SEED Table, SEED Columns\n + PM Property ID, , PropertyState, pm_property_id\n + Building ID, , PropertyState, custom_id_1\n + ...\n + + This only works for 'Normal' column mapping profiles, that is, it does not work for + BuildingSync column mapping profiles. Use this with caution since it will update + an already existing profile if it is there. + + Args: + mapping_profile_name (str): _description_ + mapping_file (str): _description_ + + Returns: + dict: { + 'id': 1 + 'profile_type': 'Normal', + 'name': 'Profile Name', + 'mappings': [ + ... + ] + } + """ + # grab the mappings from the file, then pass to the other method + if not Path(mapping_file).exists(): + raise Exception(f"Could not find mapping file: {mapping_file}") + + return self.create_or_update_column_mapping_profile( + mapping_profile_name, read_map_file(mapping_file) + ) + + def set_import_file_column_mappings( + self, import_file_id: int, mappings: list + ) -> dict: + """Sets the column mappings onto the import file record. + + Args: + import_file_id (int): ID of the import file of interet + mappings (list): list of column mappings in the form of the results of column mapping profiles + + Returns: + dict: dict of status + """ + return self.client.post( + "org_column_mapping_import_file", + url_args={"ORG_ID": self.client.org_id}, + params={"import_file_id": import_file_id}, + json={"mappings": mappings}, + ) + + def get_meters(self, property_id: int) -> list: + """Return the list of meters assigned to a property (the property view id). + Note that meters are attached to the property (not the state nor the property view). + + Args: + property_id (int): property id to get the meters + + Returns: + dict: [ + { + 'id': 584, + 'type': 'Cost', + 'source': 'PM', + 'source_id': '1', + 'scenario_id': None, + 'scenario_name': None + }, + ... + ] + """ + meters = self.client.get(None, required_pk=False, endpoint='properties_meters', + url_args={"PK": property_id}) + return meters + + def get_meter(self, property_view_id: int, meter_type: str, source: str, source_id: str) -> Union[dict, None]: + """get a meter for a property view. + + Args: + property_view_id (int): property view id + meter_type (str): Type of meter, based off the enums in the SEED Meter model + source (str): Of GreenButton, Portfolio Manager, or Custom Meter + source_id (str): Identifier, if GreenButton, then format is xpath like + + Returns: + dict: meter object + """ + # return all the meters for the property and see if the meter exists, if so, return it + meters = self.get_meters(property_view_id) + for meter in meters: + if meter['type'] == meter_type and meter['source'] == source and meter['source_id'] == source_id: + return meter + else: + return None + + def get_or_create_meter(self, property_view_id: int, meter_type: str, source: str, source_id: str) -> Optional[Dict[Any, Any]]: + """get or create a meter for a property view. + + Args: + property_view_id (int): property view id + meter_type (str): Type of meter, based off the enums in the SEED Meter model + source (str): Of GreenButton, Portfolio Manager, or Custom Meter + source_id (str): Identifier, if GreenButton, then format is xpath like + + Returns: + dict: meter object + """ + # return all the meters for the property and see if the meter exists, if so, return it + meter = self.get_meter(property_view_id, meter_type, source, source_id) + if meter: + return meter + else: + # create the meter + payload = { + 'type': meter_type, + 'source': source, + 'source_id': source_id, + } + + meter = self.client.post( + endpoint='properties_meters', url_args={"PK": property_view_id}, json=payload + ) + + return meter + + def delete_meter(self, property_view_id: int, meter_id: int) -> dict: + """Delete a meter from the property. + + Args: + property_view_id (int): property view id + meter_id (int): meter id + + Returns: + dict: status of the delete + """ + return self.client.delete( + meter_id, endpoint='properties_meters', url_args={"PK": property_view_id} + ) + + def upsert_meter_readings_bulk(self, property_view_id: int, meter_id: int, data: list) -> dict: + """Upsert meter readings for a property's meter with the bulk method. + + Args: + property_id (int): property id + meter_id (int): meter id + data (list): list of dictioanries of meter readings + + Returns: + dict: list of all meter reading objects + """ + # get the meter data for the property + readings = self.client.post( + endpoint='properties_meters_reading', url_args={"PK": property_view_id, "METER_PK": meter_id}, json=data + ) + return readings + + def get_meter_data(self, property_id, interval: str = 'Exact', excluded_meter_ids: list = []): + """Return the meter data from the property. + + Args: + property_id (_type_): property view id + interval (str, optional): How to aggregate the data, can be 'Exact', 'Month', or 'Year'. Defaults to 'Exact'. + excluded_meter_ids (list, optional): IDs to exclude. Defaults to []]. + """ + payload = { + "interval": interval, + "excluded_meter_ids": excluded_meter_ids, + } + meter_data = self.client.post(endpoint='properties_meter_usage', url_args={"PK": property_id}, json=payload) + return meter_data + + def save_meter_data(self, property_id: int, meter_id: int, meter_data) -> dict: + pass + + def start_save_data(self, import_file_id: int) -> dict: + """start the background process to save the data file to the database. + This is the state before the mapping. + + Args: + import_file_id (int): id of the import file to save + + Returns: + dict: progress key + { + "status": "success", + "progress_key": ":1:SEED:start_save_data:PROG:90", + "unique_id": "90", + } + """ + return self.client.post( + "import_files_start_save_data_pk", + url_args={"PK": import_file_id}, + json={"cycle_id": self.cycle_id}, + ) + + def start_map_data(self, import_file_id: int) -> dict: + """start the background process to save the data file to the database. + This is the state before the mapping. + + Args: + import_file_id (int): id of the import file to save + + Returns: + dict: progress key + { + "status": "success", + "progress_key": ":1:SEED:map_data:PROG:90", + "unique_id": "90", + } + """ + return self.client.post( + "import_files_start_map_data_pk", + url_args={"PK": import_file_id}, + json={"remap": True}, + ) + + def start_system_matching_and_geocoding(self, import_file_id: int) -> dict: + """start the background process save mappings and start system matching/geocoding. + This is the state after the mapping. + + Args: + import_file_id (int): id of the import file to save + + Returns: + dict: progress key + { + "progress_data": { + "status": "success", + "status_message": "Pairing data", + "progress": 100, + "progress_key": ":1:SEED:match_buildings:PROG:106", + "unique_id": "106", + "func_name": "match_buildings", + "message": null, + "stacktrace": null, + "summary": null, + "total": 5 + }, + "sub_progress_data": { + "status": "not-started", + "status_message": "", + "progress": 0, + "progress_key": ":1:SEED:match_sub_progress:PROG:106", + "unique_id": "106", + "func_name": "match_sub_progress", + "message": null, + "stacktrace": null, + "summary": null, + "total": 100 + } + } + """ + return self.client.post( + "import_files_start_matching_pk", url_args={"PK": import_file_id} + ) + + def get_matching_results(self, import_file_id: int) -> dict: + """matching results summary + + Args: + import_file_id (int): ID of the import file + + Returns: + dict: { + 'initial_incoming': 0, + 'duplicates_against_existing': 0, + 'duplicates_within_file': 0, + 'merges_against_existing': 0, + 'merges_between_existing': 0, + 'merges_within_file': 0, + 'new': 0, + 'geocoded_high_confidence': 0, + 'geocoded_low_confidence': 0, + 'geocoded_manually': 0, + 'geocode_not_possible': 0 + } + """ + return self.client.get( + None, + required_pk=False, + endpoint="import_files_matching_results", + url_args={"PK": import_file_id}, + ) + + def check_meters_tab_exist(self, import_file_id: int) -> bool: + """Check if the imported file has a meter and meter readings tab. If so + this tab can be used to import meter data into SEED. + + Args: + import_file_id (int): ID of the import file to check + + Returns: bool + """ + response = self.client.get( + None, + required_pk=False, + endpoint="import_files_check_meters_tab_exists_pk", + url_args={"PK": import_file_id}, + ) + # if the data is set to True, then return such + return response + + def import_files_reuse_inventory_file_for_meters(self, import_file_id: int) -> dict: + """Reuse an import file to create all the meter entries. This method is used + for ESPM related data files. The result will be another import_file ID for the + meters that will then need to be "resaved". Note that the returning import_file_id + is not the same as the argument import file. + + Args: + import_file_id (int): ID of the import file to reuse. + + Returns: + dict: { + "status": "success", + "import_file_id": 16 + } + """ + payload = {"import_file_id": import_file_id} + response = self.client.post( + endpoint="import_files_reuse_inventory_file_for_meters", json=payload + ) + return response + + def upload_and_match_datafile( + self, + dataset_name: str, + datafile: str, + column_mapping_profile_name: str, + column_mappings_file: str, + import_meters_if_exist: bool = False, + **kwargs, + ) -> dict: + """Upload a file to the cycle_id that is defined in the constructor. This carries the + upload of the file through the whole ingestion process (map, merge, pair, geocode). + + Args: + dataset_name (str): Name of the dataset to upload to + datafile (str): Full path to the datafile to upload + column_mapping_profile_name (str): Name of the column mapping profile to use + column_mappings_file (str): Mapping that will be uploaded to the column_mapping_profile_name + import_meters_if_exist (bool): If true, will import meters from the meter tab if they exist in the datafile. Defaults to False. + + Returns: + dict: { + matching summary + } + """ + datafile_type = kwargs.pop("datafile_type", "Assessed Raw") + dataset = self.get_or_create_dataset(dataset_name) + result = self.upload_datafile(dataset["id"], datafile, datafile_type) + import_file_id = result["import_file_id"] + + # start processing + result = self.start_save_data(import_file_id) + progress_key = result.get("progress_key", None) + + # wait until upload is complete + result = self.track_progress_result(progress_key) + + # create/retrieve the column mappings + result = self.create_or_update_column_mapping_profile_from_file( + column_mapping_profile_name, column_mappings_file + ) + + # set the column mappings for the dataset + result = self.set_import_file_column_mappings( + import_file_id, result["mappings"] + ) + + # now start the mapping + result = self.start_map_data(import_file_id) + progress_key = result.get("progress_key", None) + + # wait until upload is complete + result = self.track_progress_result(progress_key) + + # save the mappings, call system matching/geocoding + result = self.start_system_matching_and_geocoding(import_file_id) + progress_data = result.get("progress_data", None) + progress_key = progress_data.get("progress_key", None) + + # wait until upload is complete + result = self.track_progress_result(progress_key) + + # return summary + matching_results = self.get_matching_results(import_file_id) + + # check if we need to import meters and if they exist + if import_meters_if_exist and self.check_meters_tab_exist(import_file_id): + reuse_file = self.import_files_reuse_inventory_file_for_meters( + import_file_id + ) + + meter_import_file_id = reuse_file["import_file_id"] + + result = self.start_save_data(meter_import_file_id) + progress_key = result.get("progress_key", None) + + # wait until upload is complete + result = self.track_progress_result(progress_key) + + return matching_results diff --git a/pyseed/seedclient.py b/pyseed/seed_client_base.py similarity index 56% rename from pyseed/seedclient.py rename to pyseed/seed_client_base.py index 58475a9..a7f7499 100644 --- a/pyseed/seedclient.py +++ b/pyseed/seed_client_base.py @@ -22,45 +22,85 @@ """ -# Imports from Standard Library -import inspect - # Imports from Third Party Modules +import inspect import requests # Local Imports from pyseed.apibase import JSONAPI, OAuthMixin, UserAuthMixin, add_pk from pyseed.exceptions import SEEDError -# Constants +# Constants (Should end with a slash) URLS = { - 'columns': '/api/v2/columns/', - 'column_mappings': '/api/v2/column_mappings/', - 'cycles': '/api/v2/cycles/', - 'datasets': '/api/v2/datasets/', - 'gbr_properties': '/api/v2/gbr_properties/', - 'green_assessment': '/api/v2/green_assessments/', - 'green_assessment_property': '/api/v2/green_assessment_properties/', - 'green_assessment_url': '/api/v2/green_assessment_urls/', - 'labels': '/api/v2/labels/', - 'import_files': '/api/v2/import_files/', - 'projects': '/api/v2/projects/', - 'properties': '/api/v2/properties/', - 'property_states': '/api/v2/property_states/', - 'property_views': '/api/v2/property_views/', - 'taxlots': '/api/v2/taxlots/', - 'users': '/api/v2/users/', + 'v3': { + 'columns': '/api/v3/columns/', + 'column_mapping_profiles': '/api/v3/column_mapping_profiles/', + 'column_mapping_profiles_filter': '/api/v3/column_mapping_profiles/filter/', + 'cycles': '/api/v3/cycles/', + 'datasets': '/api/v3/datasets/', + 'gbr_properties': '/api/v3/gbr_properties/', + 'green_assessment': '/api/v3/green_assessments/', + 'green_assessment_property': '/api/v3/green_assessment_properties/', + 'green_assessment_url': '/api/v3/green_assessment_urls/', + 'labels': '/api/v3/labels/', + 'labels_property': '/api/v3/labels_property/', + 'labels_taxlot': '/api/v3/labels_taxlot/', + 'import_files': '/api/v3/import_files/', + 'import_files_reuse_inventory_file_for_meters': '/api/v3/import_files/reuse_inventory_file_for_meters/', + 'organizations': '/api/v3/organizations/', + 'properties': '/api/v3/properties/', + 'properties_labels': '/api/v3/properties/labels/', + 'properties_search': '/api/v3/properties/search/', + 'property_states': '/api/v3/property_states/', + 'property_views': '/api/v3/property_views/', + 'taxlots': '/api/v3/taxlots/', + 'upload': '/api/v3/upload/', + 'users': '/api/v3/users/', + # No versioning endpoints + 'version': '/api/version/', + # POSTs with replaceable keys + 'import_files_start_save_data_pk': '/api/v3/import_files/PK/start_save_data/', + 'import_files_start_map_data_pk': '/api/v3/import_files/PK/map/', + 'import_files_start_matching_pk': '/api/v3/import_files/PK/start_system_matching_and_geocoding/', + 'import_files_check_meters_tab_exists_pk': '/api/v3/import_files/PK/check_meters_tab_exists/', + 'org_column_mapping_import_file': 'api/v3/organizations/ORG_ID/column_mappings/', + 'properties_meters_reading': '/api/v3/properties/PK/meters/METER_PK/readings/', + # GETs with replaceable keys + 'import_files_matching_results': '/api/v3/import_files/PK/matching_and_geocoding_results/', + 'progress': '/api/v3/progress/PROGRESS_KEY/', + 'properties_meters': '/api/v3/properties/PK/meters/', + 'properties_meter_usage': '/api/v3/properties/PK/meter_usage/', + 'properties_meters_reading': '/api/v3/properties/PK/meters/METER_PK/readings/', + }, + 'v2': { + 'columns': '/api/v2/columns/', + 'column_mappings': '/api/v2/column_mappings/', + 'cycles': '/api/v2/cycles/', + 'datasets': '/api/v2/datasets/', + 'gbr_properties': '/api/v2/gbr_properties/', + 'green_assessment': '/api/v2/green_assessments/', + 'green_assessment_property': '/api/v2/green_assessment_properties/', + 'green_assessment_url': '/api/v2/green_assessment_urls/', + 'labels': '/api/v2/labels/', + 'import_files': '/api/v2/import_files/', + 'projects': '/api/v2/projects/', + 'properties': '/api/v2/properties/', + 'property_states': '/api/v2/property_states/', + 'property_views': '/api/v2/property_views/', + 'taxlots': '/api/v2/taxlots/', + 'users': '/api/v2/users/', + } } # Private Classes and Functions -def _get_urls(base_url, url_map=None): +def _get_urls(base_url, url_map=None, version=None): """Populate URL""" + version = version if version else 'v3' if not url_map: - url_map = URLS + url_map = URLS[version] return { - key: '{}/{}'.format(base_url.rstrip('/'), val.lstrip('/')) - for key, val in url_map.items() + key: '{}/{}'.format(base_url.rstrip('/'), val.lstrip('/')) for key, val in url_map.items() } @@ -79,7 +119,12 @@ def _set_default(obj, key, val, required=True): return val -# Public Classes and Functions +def _replace_url_args(url, url_args): + """Replace any custom string URL items with values in args""" + if url_args: + for key, value in url_args.items(): + url = url.replace(f"/{key}/", f"/{value}/") + return url class SEEDBaseClient(JSONAPI): @@ -89,20 +134,20 @@ class SEEDBaseClient(JSONAPI): handling is done. This the responsibility of the caller. This should never be used directly, instead inherit from - SEEDReadOnlyClient or SEEDRecord. + one of the SEED Read or ReadWrite classes with mixins. Note subclasses of these should not themselves be inherited from due to the way error handling works, this should not be needed, other classes can inherit from them directly and overwrite methods/use mixins as appropriate. - endpoint referers to the endpoint name. This allow you to call an + endpoint refers to the endpoint name. This allow you to call an endpoint without having to know the full url. Endpoint names are set in config, and can be accessed as self.endpoints. data_name is set as an attribute on the view called. - This constains the actual response data. + This constrains the actual response data. If not set it is derived from the url (typically its the view name). In either case 'data' is used as a fallback, then detail. @@ -127,12 +172,13 @@ class SEEDBaseClient(JSONAPI): :param config_urls_key: key for urls in config object (default urls) :type config_urls_key: str """ + # pylint:disable=too-few-public-methods,too-many-arguments # pylint:disable=too-many-instance-attributes def __init__(self, org_id, username=None, password=None, access_token=None, - endpoint=None, data_name=None, use_ssl=None, - base_url=None, port=None, url_map=None, **kwargs): + endpoint=None, data_name=None, use_ssl=None, base_url=None, + port=None, url_map=None, version=None, **kwargs): use_ssl = use_ssl if use_ssl is not None else True super(SEEDBaseClient, self).__init__( username=username, password=password, use_ssl=use_ssl, @@ -140,7 +186,7 @@ def __init__(self, org_id, username=None, password=None, access_token=None, ) self.org_id = org_id self.token = access_token - # prevent overriding if set in sublcass as class attr + # prevent overriding if set in subclass as class attr if not getattr(self, 'endpoint', None): self.endpoint = endpoint if not getattr(self, 'data_name', None): @@ -153,7 +199,8 @@ def __init__(self, org_id, username=None, password=None, access_token=None, self.base_url = '{}:{}'.format(self.base_url, self.port) if not self.base_url.endswith('/'): self.base_url = self.base_url + '/' - self.urls = _get_urls(self.base_url, url_map) + self.username = username + self.urls = _get_urls(self.base_url, url_map=url_map, version=version) self.endpoints = self.urls.keys() def _check_response(self, response, *args, **kwargs): @@ -164,32 +211,64 @@ def _check_response(self, response, *args, **kwargs): be reported correctly. """ error = False - error_msg = "Unknown error from SEED API" - # OK, Created , Accepted + error_msg = 'Unknown error from SEED API' + # OK, Created, Accepted if response.status_code not in [200, 201, 202]: error = True - error_msg = "SEED returned status code: {}".format( - response.status_code - ) + error_msg = 'SEED returned status code: {}'.format(response.status_code) # SEED adds a status key to the response to indicate success/error # This is superfluous as status codes should be used to indicate an - # error, but theyt are not always set correctly. - elif response.json().get('status', None) == 'error': + # error, but they are not always set correctly. + elif isinstance(response.json(), dict): + status_field = response.json().get('status', None) + if status_field: + if status_field == 'error': + error = True + elif status_field == 'success': + # continue + error = False + elif 'success' in response.json().keys(): + success_flag = response.json().get('success', None) + # For file uploads the response key is 'success' + error = not success_flag + elif 'progress_data' in response.json().keys(): + # this is a system matching response, which is okay. return the success flag of this + status_flag = response.json()['progress_data'].get('status', None) + error = status_flag not in ['not-started', 'success', 'parsing'] + elif not any(key in ['results', 'readings', 'data', 'status', 'id', 'organizations', 'sha'] for key in response.json().keys()): + # In some cases there is not a 'status' field, so check if there are + # any other keys in the response that depict a success: + # readings - this comes from meters + # data - lots of responses just return the data flag + # status - sometimes the status comes back as complete + # id - For some object creates, the response is simply the object back in JSON format with an ID field. + # organizations - this is the only key when returning the list of orgs + # sha - When parsing the version of SEED + error = True + + elif not isinstance(response.json(), list): error = True + if error: if response.content: try: - error_msg = response.json().get( - 'message', 'Unknown SEED Error' - ) + if getattr(response.json(), "get", None): + error_msg = response.json().get( + 'message', f"Unknown SEED Error {response.status_code}: {response.json()}" + ) + else: + error_msg = f"Unknown SEED Error {response.status_code}: {response.json()}" except ValueError: - error_msg = 'Unknown SEED Error' + error_msg = 'Unknown SEED Error: No response returned' if args: kwargs['args'] = args self._raise_error(response, error_msg, stack_pos=1, **kwargs) def _get_result(self, response, data_name=None, **kwargs): - """Extract result data from response.""" + """Extract result data from response. If no data_name is given, then this method + tries to determine what the first element of the resulting JSON is which is then used as + the base for the rest of the response. This is not always desired, so pass data_name='all' if + you want to get the entire response back.""" if not data_name: url = response.request.url # take the last part of the url unless it's a digit @@ -200,17 +279,28 @@ def _get_result(self, response, data_name=None, **kwargs): else: data_name = durl[1] # actual results should be under data_name or the fallbacks - for dname in [data_name, 'data', 'detail']: - try: - result = response.json().get(dname) - if result is not None: - break - except KeyError: - pass + result = response.json() if result is None: - error_msg = "Could not find result using data_name {}.".format( - data_name - ) + error_msg = 'No results returned' + self._raise_error(response, error_msg, stack_pos=2, **kwargs) + + constrained_result = None + if data_name == 'all': + result = result + else: + for dname in [data_name, 'data', 'detail']: + try: + # allow a list to be valid (this is the case with labels) + if isinstance(result, dict): + constrained_result = result.get(dname) + if constrained_result is not None: + result = constrained_result + break + except KeyError: + pass + + if result is None: + error_msg = 'Could not find result using data_name {}.'.format(data_name) self._raise_error(response, error_msg, stack_pos=2, **kwargs) return result @@ -227,7 +317,7 @@ def _raise_error(self, response, error_msg, stack_pos=0, *args, **kwargs): Thus if the error occurs directly in the function calling _raise_error stack_pos=0, if that function is called by another function add 1 etc. - Note techically *this* method (_raise_error) is at the bottom of the + Note technically *this* method (_raise_error) is at the bottom of the stack, but we add 1 to stack_pos so counting starts at the method that calls this one. @@ -241,7 +331,7 @@ def _raise_error(self, response, error_msg, stack_pos=0, *args, **kwargs): url = response.request.url verb = response.request.method # e.g. MyClass.method - caller = '{}.{}'.format( + caller = caller = '{}.{}'.format( self.__class__.__name__, inspect.stack()[stack_pos + 1][3] ) if args: @@ -253,7 +343,7 @@ def _raise_error(self, response, error_msg, stack_pos=0, *args, **kwargs): def _set_params(self, params): """Add org_id""" - params['org_id'] = self.org_id + params['organization_id'] = self.org_id return params @@ -263,6 +353,7 @@ def _set_params(self, params): class CreateMixin(object): """Add _post methods""" + # pylint:disable=too-few-public-methods def post(self, endpoint=None, data_name=None, **kwargs): @@ -275,12 +366,22 @@ def post(self, endpoint=None, data_name=None, **kwargs): :returns: dict (from response.json()[data_name]) """ + # for a post, if the user has sent some url args, then pop them for later + # parsing. + url_args = kwargs.pop('url_args', None) kwargs = self._set_params(kwargs) endpoint = _set_default(self, 'endpoint', endpoint) data_name = _set_default(self, 'data_name', data_name, required=False) - url = self.urls[endpoint] + # check if the endpoint is to be looked up or is a fully qualified url + if '/' in endpoint: + url = endpoint + elif endpoint in self.urls: + url = self.urls[endpoint] + else: + raise Exception(f'Unknown endpoint: {endpoint}') if not url.endswith('/'): url = url + '/' + url = _replace_url_args(url, url_args) response = super(CreateMixin, self)._post(url=url, **kwargs) self._check_response(response, **kwargs) return self._get_result(response, data_name=data_name, **kwargs) @@ -288,6 +389,7 @@ def post(self, endpoint=None, data_name=None, **kwargs): class ReadMixin(object): """Add get & list method""" + # pylint:disable=too-few-public-methods def get(self, pk, endpoint=None, data_name=None, **kwargs): @@ -301,14 +403,15 @@ def get(self, pk, endpoint=None, data_name=None, **kwargs): :returns: dict (from response.json()[data_name]) """ + url_args = kwargs.pop('url_args', None) kwargs = self._set_params(kwargs) endpoint = _set_default(self, 'endpoint', endpoint) data_name = _set_default(self, 'data_name', data_name, required=False) - url = add_pk(self.urls[endpoint], pk, required=True, slash=True) + url = add_pk(self.urls[endpoint], pk, required=kwargs.pop('required_pk', True), slash=True) + url = _replace_url_args(url, url_args) response = super(ReadMixin, self)._get(url=url, **kwargs) self._check_response(response, **kwargs) - result = self._get_result(response, data_name=data_name, **kwargs) - return result + return self._get_result(response, data_name=data_name, **kwargs) def list(self, endpoint=None, data_name=None, **kwargs): """ @@ -319,12 +422,14 @@ def list(self, endpoint=None, data_name=None, **kwargs): :returns: dict (from response.json()[data_name]) """ + url_args = kwargs.pop('url_args', None) kwargs = self._set_params(kwargs) endpoint = _set_default(self, 'endpoint', endpoint) data_name = _set_default(self, 'data_name', data_name, required=False) url = self.urls[endpoint] if not url.endswith('/'): url = url + '/' + url = _replace_url_args(url, url_args) response = super(ReadMixin, self)._get(url=url, **kwargs) self._check_response(response, **kwargs) return self._get_result(response, data_name=data_name, **kwargs) @@ -332,6 +437,7 @@ def list(self, endpoint=None, data_name=None, **kwargs): class UpdateMixin(object): """Add _put & _patch methods""" + # pylint:disable=too-few-public-methods,redefined-builtin def put(self, pk, endpoint=None, data_name=None, **kwargs): @@ -344,10 +450,12 @@ def put(self, pk, endpoint=None, data_name=None, **kwargs): :returns: dict (from response.json()[data_name]) """ + url_args = kwargs.pop('url_args', None) kwargs = self._set_params(kwargs) endpoint = _set_default(self, 'endpoint', endpoint) data_name = _set_default(self, 'data_name', data_name, required=False) - url = add_pk(self.urls[endpoint], pk, required=True, slash=True) + url = add_pk(self.urls[endpoint], pk, required=kwargs.pop('required_pk', True), slash=True) + url = _replace_url_args(url, url_args) response = super(UpdateMixin, self)._put(url=url, **kwargs) self._check_response(response, **kwargs) return self._get_result(response, data_name=data_name, **kwargs) @@ -362,10 +470,12 @@ def patch(self, pk, endpoint=None, data_name=None, **kwargs): :returns: dict (from response.json()[data_name]) """ + url_args = kwargs.pop('url_args', None) kwargs = self._set_params(kwargs) endpoint = _set_default(self, 'endpoint', endpoint) data_name = _set_default(self, 'data_name', data_name, required=False) - url = add_pk(self.urls[endpoint], pk, required=True, slash=True) + url = add_pk(self.urls[endpoint], pk, required=kwargs.pop('required_pk', True), slash=True) + url = _replace_url_args(url, url_args) response = super(UpdateMixin, self)._patch(url=url, **kwargs) self._check_response(response, **kwargs) return self._get_result(response, data_name=data_name, **kwargs) @@ -373,6 +483,7 @@ def patch(self, pk, endpoint=None, data_name=None, **kwargs): class DeleteMixin(object): """Add _delete methods""" + # pylint:disable=too-few-public-methods,redefined-builtin def delete(self, pk, endpoint=None, data_name=None, **kwargs): @@ -386,59 +497,37 @@ def delete(self, pk, endpoint=None, data_name=None, **kwargs): :returns: None """ # pylint:disable=no-member + url_args = kwargs.pop('url_args', None) kwargs = self._set_params(kwargs) endpoint = _set_default(self, 'endpoint', endpoint) data_name = _set_default(self, 'data_name', data_name, required=False) - url = add_pk(self.urls[endpoint], pk, required=True, slash=True) + url = add_pk(self.urls[endpoint], pk, required=kwargs.pop('required_pk', True), slash=True) + url = _replace_url_args(url, url_args) response = super(DeleteMixin, self)._delete(url=url, **kwargs) # delete should return 204 and no content if response.status_code != requests.codes.no_content: self._check_response(response, **kwargs) -class SEEDUserAuthBaseClient(UserAuthMixin, SEEDBaseClient): - """ - SEED base client using username and password(or api key) authentication - """ - pass - - -class SEEDOAuthBaseClient(OAuthMixin, SEEDBaseClient): - """SEED base client using JWT OAuth2 based authentication""" - - def __init__(self, oauth_client, org_id, username=None, password=None, - access_token=None, endpoint=None, data_name=None, - use_ssl=None, base_url=None, port=None, url_map=None, - **kwargs): - - self.oauth_client = oauth_client - super(SEEDOAuthBaseClient, self).__init__( - org_id, username=username, password=password, - access_token=access_token, endpoint=endpoint, data_name=data_name, - use_ssl=use_ssl, base_url=base_url, port=port, url_map=url_map, - **kwargs - ) - - -class SEEDReadOnlyClient(ReadMixin, SEEDUserAuthBaseClient): - """Read Ony Client""" +class SEEDReadOnlyClient(ReadMixin, UserAuthMixin, SEEDBaseClient): + """Read Only Client""" pass class SEEDReadWriteClient(CreateMixin, ReadMixin, UpdateMixin, DeleteMixin, - SEEDUserAuthBaseClient): + UserAuthMixin, SEEDBaseClient): """Client with full CRUD Methods""" # pylint:disable=too-many-ancestors pass -class SEEDOAuthReadOnlyClient(ReadMixin, SEEDOAuthBaseClient): +class SEEDOAuthReadOnlyClient(ReadMixin, OAuthMixin, SEEDBaseClient): """Read Ony Client""" pass class SEEDOAuthReadWriteClient(CreateMixin, ReadMixin, UpdateMixin, - DeleteMixin, SEEDOAuthBaseClient): + DeleteMixin, OAuthMixin, SEEDBaseClient): """Client with full CRUD Methods""" # pylint:disable=too-many-ancestors pass diff --git a/pyseed/utils.py b/pyseed/utils.py new file mode 100644 index 0000000..9a501ea --- /dev/null +++ b/pyseed/utils.py @@ -0,0 +1,125 @@ +# from __future__ import division + +# Imports from Third Party Modules +import csv +import json +from math import pi, sin +from pathlib import Path + +WGS84_RADIUS = 6378137 + + +def _rad(value): + return value * pi / 180 + + +def _ring_area(coordinates): + """ + Calculate the approximate total_area of the polygon were it projected onto + the earth. Note that this _area will be positive if ring is oriented + clockwise, otherwise it will be negative. + + Reference: + Robert. G. Chamberlain and William H. Duquette, "Some Algorithms for + Polygons on a Sphere", JPL Publication 07-03, Jet Propulsion + Laboratory, Pasadena, CA, June 2007 http://trs-new.jpl.nasa.gov/dspace/handle/2014/40409 + + @Returns + + {float} The approximate signed geodesic total_area of the polygon in square meters. + """ + + assert isinstance(coordinates, (list, tuple)) + + total_area = 0 + coordinates_length = len(coordinates) + + if coordinates_length > 2: + for i in range(0, coordinates_length): + if i == (coordinates_length - 2): + lower_index = coordinates_length - 2 + middle_index = coordinates_length - 1 + upper_index = 0 + elif i == (coordinates_length - 1): + lower_index = coordinates_length - 1 + middle_index = 0 + upper_index = 1 + else: + lower_index = i + middle_index = i + 1 + upper_index = i + 2 + + p1 = coordinates[lower_index] + p2 = coordinates[middle_index] + p3 = coordinates[upper_index] + + total_area += (_rad(p3[0]) - _rad(p1[0])) * sin(_rad(p2[1])) + + total_area = total_area * WGS84_RADIUS * WGS84_RADIUS / 2 + + return total_area + + +def _polygon_area(coordinates): + + assert isinstance(coordinates, (list, tuple)) + + total_area = 0 + if len(coordinates) > 0: + total_area += abs(_ring_area(coordinates[0])) + + for i in range(1, len(coordinates)): + total_area -= abs(_ring_area(coordinates[i])) + + return total_area + + +def geojson_area(geometry): + """Calculate the area of a GeoJSON feature. This method is taken from + a combination of ChatGPT conversion of: + https://github.com/mapbox/geojson-area/blob/master/index.js + and + https://github.com/scisco/area/blob/master/area/__init__.py""" + + if isinstance(geometry, str): + geometry = json.loads(geometry) + + assert isinstance(geometry, dict) + + total_area = 0 + + if geometry['type'] == 'Polygon': + return _polygon_area(geometry['coordinates']) + elif geometry['type'] == 'MultiPolygon': + for i in range(0, len(geometry['coordinates'])): + total_area += _polygon_area(geometry['coordinates'][i]) + elif geometry['type'] == 'GeometryCollection': + for i in range(0, len(geometry['geometries'])): + total_area += geojson_area(geometry['geometries'][i]) + + return total_area + + +def read_map_file(mapfile_path): + """Read in the mapping file""" + + mapfile_path = Path(mapfile_path) + assert mapfile_path.exists(), f"Cannot find file: {str(mapfile_path)}" + + map_reader = csv.reader(open(mapfile_path, 'r')) + map_reader.__next__() # Skip the header + + # Open the mapping file and fill list + maplist = list() + + for rowitem in map_reader: + maplist.append( + { + 'from_field': rowitem[0], + 'from_units': rowitem[1], + 'to_table_name': rowitem[2], + 'to_field': rowitem[3], + } + ) + + return maplist diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..56bd25b --- /dev/null +++ b/pytest.ini @@ -0,0 +1,3 @@ +[pytest] +markers = + integration: marks tests as integration (deselect with '-m "not integration"') diff --git a/requirements-test.txt b/requirements-test.txt new file mode 100644 index 0000000..15cb708 --- /dev/null +++ b/requirements-test.txt @@ -0,0 +1,11 @@ +-r requirements.txt +flake8==4.0.1 +mock==4.0.3 +mypy==0.910 +pre-commit==2.19.0 +pytest==7.1.2 +pytest-cov==3.0.0 +pytest-order==1.0.1 +pytest-xdist==2.5.0 +testfixtures>=5.1.1 +tox==3.25.0 diff --git a/requirements.txt b/requirements.txt index 6daa048..5b3eaca 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -requests==2.18.4 +requests>=2.28.0 typing==3.6.1 diff --git a/setup.cfg b/setup.cfg index 98c8a06..bb2fbf5 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,27 +1,28 @@ [metadata] name=py-seed -version=0.1.0 +version=0.3.0 description=A Python API client for the SEED Platform -author=Paul Munday -author_email=paul@paulmunday.net -maintainer=GreenBuildingRegistry +author=Fable Turas, Paul Munday, Nicholas Long +author_email=fable@raintechpdx.com, paul@paulmunday.net, nicholas.long@nrel.gov +maintainer=GreenBuildingRegistry, NREL maintainer_email=admin@greenbuildingregistry.com keywords= seed, api -url=https://github.com/GreenBuildingRegistry/py-seed +url=https://github.com/seed-platform/py-seed classifiers = Development Status :: 4 - Beta Intended Audience :: Developers Operating System :: OS Independent - Programming Language :: Python :: 2.7 - Programming Language :: Python :: 3.4 - Programming Language :: Python :: 3.5 - Programming Language :: Python :: 3.6 + Programming Language :: Python :: 3.8 + Programming Language :: Python :: 3.9 + Programming Language :: Python :: 3.10 + [options] packages = find: include_package_data = True zip_safe = False install_requires = - requests==2.18.4 + requests>=2.28.0 typing==3.6.1 + [bdist_wheel] universal = 1 diff --git a/setup.py b/setup.py index dd4e63e..5a388ba 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,6 @@ #!/usr/bin/env python +# Imports from Third Party Modules from setuptools import setup - setup() diff --git a/pyseed/tests/__init__.py b/tests/__init__.py similarity index 100% rename from pyseed/tests/__init__.py rename to tests/__init__.py diff --git a/tests/data/seed-config.json b/tests/data/seed-config.json new file mode 100644 index 0000000..4630d17 --- /dev/null +++ b/tests/data/seed-config.json @@ -0,0 +1,7 @@ +{ + "username": "user@seed-platform.or", + "api_key": "1b5ee4ee210c9074589c61d66253d90398e6ad03", + "base_url": "http://localhost", + "port": 80, + "use_ssl": false +} diff --git a/tests/data/test-seed-data-mappings.csv b/tests/data/test-seed-data-mappings.csv new file mode 100644 index 0000000..9e17232 --- /dev/null +++ b/tests/data/test-seed-data-mappings.csv @@ -0,0 +1,15 @@ +Raw Columns,units,SEED Table,SEED Columns +PM Property ID,,PropertyState,pm_property_id +Building ID,,PropertyState,custom_id_1 +Property Name,,PropertyState,property_name +Property Type,,PropertyState,property_type +Address,,PropertyState,address_line_1 +Sq. Ft,ft**2,PropertyState,gross_floor_area +Total GHG Emissions Intensity,kgCO2e/ft**2/year,PropertyState,total_ghg_emissions_intensity +Site EUI,kBtu/ft**2/year,PropertyState,site_eui +PM Release Date,,PropertyState,release_date +Year Ending,,PropertyState,Year Ending Excel +GHGI Target,,PropertyState,GHGI Target +GHGI Target Year,,PropertyState,GHGI Target Year +EUI Target,,PropertyState,EUI Target +EUI Target Year,,PropertyState,EUI Target Year diff --git a/tests/data/test-seed-data-with-meters.xlsx b/tests/data/test-seed-data-with-meters.xlsx new file mode 100755 index 0000000..e5b5293 Binary files /dev/null and b/tests/data/test-seed-data-with-meters.xlsx differ diff --git a/tests/data/test-seed-data.xlsx b/tests/data/test-seed-data.xlsx new file mode 100755 index 0000000..8e5d52d Binary files /dev/null and b/tests/data/test-seed-data.xlsx differ diff --git a/tests/integration/docker-compose.yml b/tests/integration/docker-compose.yml new file mode 100644 index 0000000..3df3b41 --- /dev/null +++ b/tests/integration/docker-compose.yml @@ -0,0 +1,83 @@ +version: "3.4" +services: + db-postgres: + container_name: seed_postgres + image: timescale/timescaledb-postgis:latest-pg12 + environment: + - POSTGRES_DB=seed + - POSTGRES_USER=seed + - POSTGRES_PASSWORD=super-secret-password + ports: + - "5432:5432" + logging: + options: + max-size: 50m + max-file: "5" + db-redis: + container_name: seed_redis + image: redis:5.0.1 + web: + container_name: seed_web + image: seedplatform/seed:develop + environment: + - AWS_ACCESS_KEY_ID + - AWS_SECRET_ACCESS_KEY + - AWS_SES_REGION_NAME + - AWS_SES_REGION_ENDPOINT + - BSYNCR_SERVER_HOST + - BSYNCR_SERVER_PORT + - SERVER_EMAIL + - POSTGRES_DB=seed + - POSTGRES_PORT=5432 + - POSTGRES_USER=seed + - POSTGRES_PASSWORD=super-secret-password + - SEED_ADMIN_USER=user@seed-platform.org + - SEED_ADMIN_PASSWORD=super-secret-password + - SEED_ADMIN_ORG=default + - SECRET_KEY=ARQV8qGuJKH8sGnBf6ZeEdJQRKLTUhsvEcp8qG9X9sCPXvGLhdxqnNXpZcy6HEyf + - DJANGO_SETTINGS_MODULE=config.settings.docker + # Recaptcha for testing: + # https://developers.google.com/recaptcha/docs/faq#id-like-to-run-automated-tests-with-recaptcha.-what-should-i-do + - GOOGLE_RECAPTCHA_SITE_KEY=6LeIxAcTAAAAAJcZVRqyHh71UMIEGNQ_MXjiZKhI + - GOOGLE_RECAPTCHA_SECRET_KEY=6LeIxAcTAAAAAGG-vFI1TnRWxMZNFuojJ4WifJWe + depends_on: + - db-redis + - db-postgres + volumes: + - pyseed_media:/seed/media + ports: + - "8000:80" + logging: + options: + max-size: 50m + max-file: "5" + web-celery: + container_name: seed_celery + image: seedplatform/seed:develop + build: . + command: /seed/docker/start_celery_docker.sh + environment: + - BSYNCR_SERVER_HOST + - BSYNCR_SERVER_PORT + - POSTGRES_DB=seed + - POSTGRES_PORT=5432 + - POSTGRES_USER=seed + - POSTGRES_PASSWORD=super-secret-password + - SECRET_KEY=ARQV8qGuJKH8sGnBf6ZeEdJQRKLTUhsvEcp8qG9X9sCPXvGLhdxqnNXpZcy6HEyf + - DJANGO_SETTINGS_MODULE=config.settings.docker + # fix to only one worker for tests, otherwise jobs can clash -- at least we are seeing + # something strange + - NUMBER_OF_WORKERS=1 + depends_on: + - db-redis + - db-postgres + - web + volumes: + - pyseed_media:/seed/media + logging: + options: + max-size: 50m + max-file: "5" +volumes: + pyseed_media: + external: true diff --git a/pyseed/tests/test_apibase.py b/tests/test_apibase.py similarity index 99% rename from pyseed/tests/test_apibase.py rename to tests/test_apibase.py index 308d3dd..8a58b5d 100755 --- a/pyseed/tests/test_apibase.py +++ b/tests/test_apibase.py @@ -6,14 +6,14 @@ Unit tests for pyseed/apibase """ -# Imports from Standard Library +# Imports from Third Party Modules import sys import unittest # Local Imports from pyseed.apibase import JSONAPI, BaseAPI, add_pk from pyseed.exceptions import APIClientError -from pyseed.seedclient import _get_urls, _set_default +from pyseed.seed_client_base import _get_urls, _set_default NO_URL_ERROR = "APIClientError: No url set" SSL_ERROR = "APIClientError: use_ssl is true but url does not starts with https" @@ -27,8 +27,10 @@ PY3 = sys.version_info[0] == 3 if PY3: + # Imports from Third Party Modules from unittest import mock else: + # Imports from Third Party Modules import mock diff --git a/tests/test_seed_base.py b/tests/test_seed_base.py new file mode 100644 index 0000000..659ac3b --- /dev/null +++ b/tests/test_seed_base.py @@ -0,0 +1,224 @@ +""" +**************************************************************************************************** +:copyright (c) 2019-2022, Alliance for Sustainable Energy, LLC, and other contributors. + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted +provided that the following conditions are met: + +Redistributions of source code must retain the above copyright notice, this list of conditions +and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright notice, this list of conditions +and the following disclaimer in the documentation and/or other materials provided with the +distribution. + +Neither the name of the copyright holder nor the names of its contributors may be used to endorse +or promote products derived from this software without specific prior written permission. + +Redistribution of this software, without modification, must refer to the software by the same +designation. Redistribution of a modified version of this software (i) may not refer to the +modified version by the same designation, or by any confusingly similar designation, and +(ii) must refer to the underlying software originally provided by Alliance as “URBANopt”. Except +to comply with the foregoing, the term “URBANopt”, or any confusingly similar designation may +not be used to refer to any modified version of this software or any modified version of the +underlying software originally provided by Alliance without the prior written consent of Alliance. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER +IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT +OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +**************************************************************************************************** +""" + +# Imports from Third Party Modules +import pytest +import unittest +import uuid +from datetime import date +from pathlib import Path + +# Local Imports +from pyseed.seed_client import SeedClient + + +@pytest.mark.integration +class SeedBaseTest(unittest.TestCase): + @classmethod + def setup_class(cls): + """setup for all of the tests below""" + cls.organization_id = 1 + + # The seed-config.json file needs to be added to the project root directory + # If running SEED locally for testing, then you can run the following from your SEED root directory: + # ./manage.py create_test_user_json --username user@seed-platform.org --file ../py-seed/seed-config.json --pyseed + config_file = Path('seed-config.json') + cls.seed_client = SeedClient(cls.organization_id, connection_config_filepath=config_file) + + cls.organization_id = 1 + + @classmethod + def teardown_class(cls): + # remove all of the test buildings? + pass + + def test_get_organizations(self): + organizations = self.seed_client.get_organizations() + assert len(organizations) > 0 + + def test_get_create_delete_cycle(self): + all_cycles = self.seed_client.get_cycles() + cycle_count = len(all_cycles) + assert cycle_count >= 1 + + # create a new unique cycle + unique_id = str(uuid.uuid4())[:8] + cycle = self.seed_client.get_or_create_cycle( + f'test cycle {unique_id}', date(2021, 1, 1), date(2022, 1, 1) + ) + assert cycle['name'] == f'test cycle {unique_id}' + cycle_id = cycle['id'] + all_cycles = self.seed_client.get_cycles() + assert len(all_cycles) == cycle_count + 1 + # verify that it won't be created again + cycle = self.seed_client.get_or_create_cycle( + f'test cycle {unique_id}', date(2021, 1, 1), date(2022, 1, 1) + ) + assert cycle_id == cycle['id'] + all_cycles = self.seed_client.get_cycles() + assert len(all_cycles) == cycle_count + 1 + + # now delete the new cycle + self.seed_client.delete_cycle(cycle_id) + all_cycles = self.seed_client.get_cycles() + assert len(all_cycles) == cycle_count + + def test_create_cycle(self): + new_cycle_name = 'test cycle for test_create_cycle' + cycle = self.seed_client.create_cycle(new_cycle_name, date(2021, 6, 1), date(2022, 6, 1)) + cycle_id = cycle['id'] + assert cycle is not None + + # verify that trying to create the same name will fail + with pytest.raises(Exception) as exc_info: + self.seed_client.create_cycle(new_cycle_name, date(2021, 6, 1), date(2022, 6, 1)) + assert exc_info.value.args[0] == f"A cycle with this name already exists: '{new_cycle_name}'" + + # test the setting of the ID + cycle = self.seed_client.get_or_create_cycle(new_cycle_name, None, None, set_cycle_id=True) + assert self.seed_client.cycle_id == cycle_id + + # clean up the cycle + self.seed_client.delete_cycle(cycle_id) + + def test_get_cycle_by_name(self): + cycle = self.seed_client.create_cycle('test cycle for test_get_cycle_by_name', date(2021, 6, 1), date(2022, 6, 1)) + cycle_id = cycle['id'] + assert cycle is not None + + cycle = self.seed_client.get_cycle_by_name('test cycle for test_get_cycle_by_name', set_cycle_id=True) + assert cycle is not None + assert cycle['name'] == 'test cycle for test_get_cycle_by_name' + assert self.seed_client.cycle_id == cycle_id + + # cleanup + self.seed_client.delete_cycle(cycle_id) + + def test_get_or_create_dataset(self): + dataset_name = 'seed-salesforce-test-data' + dataset = self.seed_client.get_or_create_dataset(dataset_name) + assert dataset['name'] == dataset_name + assert dataset['super_organization'] == self.seed_client.client.org_id + assert dataset is not None + + def test_get_column_mapping_profiles(self): + result = self.seed_client.get_column_mapping_profiles() + assert len(result) >= 1 + + # There should only be one default BuildingSync mapping profile + result = self.seed_client.get_column_mapping_profiles('BuildingSync Default') + assert len(result) == 1 + + def test_get_column_mapping_profile(self): + result = self.seed_client.get_column_mapping_profile('does not exist') + assert result is None + + # There should always be a portolio manager default unless the + # user removed it. + result = self.seed_client.get_column_mapping_profile('Portfolio Manager Defaults') + assert isinstance(result, dict) + assert len(result['mappings']) > 0 + + def test_create_column_mapping_profile_with_file(self): + profile_name = 'new profile' + result = self.seed_client.create_or_update_column_mapping_profile_from_file( + profile_name, + 'tests/data/test-seed-data-mappings.csv' + ) + assert result is not None + assert len(result['mappings']) == 14 + + # delete some of the mappings and update + mappings = result['mappings'] + for index in range(5, 0, -1): + mappings.pop(index) + result = self.seed_client.create_or_update_column_mapping_profile( + profile_name, + mappings + ) + assert len(result['mappings']) == 9 + + # restore with the original call + result = self.seed_client.create_or_update_column_mapping_profile_from_file( + profile_name, + 'tests/data/test-seed-data-mappings.csv' + ) + assert len(result['mappings']) == 14 + + def test_get_labels(self): + result = self.seed_client.get_labels() + assert len(result) > 10 + + # find a set of two labels + result = self.seed_client.get_labels(filter_by_name=['Compliant', 'Violation']) + assert len(result) == 2 + + # find single field + result = self.seed_client.get_labels(filter_by_name=['Call']) + assert len(result) == 1 + assert result[0]['name'] == 'Call' + assert not result[0]['show_in_list'] + + # find nothing field + result = self.seed_client.get_labels(filter_by_name=['Does not Exist']) + assert len(result) == 0 + + def test_get_or_create_label(self): + label_name = 'something borrowed' + label = self.seed_client.get_or_create_label(label_name, 'green', show_in_list=True) + label_id = label['id'] + assert label is not None + assert label['name'] == label_name + + # try running it again and make sure it doesn't create a new label (ID should be the same0) + label = self.seed_client.get_or_create_label(label_name) + assert label_id == label['id'] + + # now update the color + label = self.seed_client.update_label(label_name, new_color='blue') + assert label['color'] == 'blue' + + # now update the name and show in list = False + new_label_name = 'something blue' + label = self.seed_client.update_label(label_name, new_label_name=new_label_name, new_show_in_list=False) + assert label['name'] == new_label_name + + # cleanup by deleting label + label = self.seed_client.delete_label(new_label_name) + # not the best response, but this means it passed + assert label is None diff --git a/tests/test_seed_client.py b/tests/test_seed_client.py new file mode 100644 index 0000000..a3855a6 --- /dev/null +++ b/tests/test_seed_client.py @@ -0,0 +1,293 @@ +""" +**************************************************************************************************** +:copyright (c) 2019-2022, Alliance for Sustainable Energy, LLC, and other contributors. + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted +provided that the following conditions are met: + +Redistributions of source code must retain the above copyright notice, this list of conditions +and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright notice, this list of conditions +and the following disclaimer in the documentation and/or other materials provided with the +distribution. + +Neither the name of the copyright holder nor the names of its contributors may be used to endorse +or promote products derived from this software without specific prior written permission. + +Redistribution of this software, without modification, must refer to the software by the same +designation. Redistribution of a modified version of this software (i) may not refer to the +modified version by the same designation, or by any confusingly similar designation, and +(ii) must refer to the underlying software originally provided by Alliance as “URBANopt”. Except +to comply with the foregoing, the term “URBANopt”, or any confusingly similar designation may +not be used to refer to any modified version of this software or any modified version of the +underlying software originally provided by Alliance without the prior written consent of Alliance. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER +IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT +OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +**************************************************************************************************** +""" + +# Imports from Third Party Modules +import pytest +import unittest +from datetime import date +from pathlib import Path + +# Local Imports +from pyseed.seed_client import SeedClient + + +@pytest.mark.integration +class SeedClientTest(unittest.TestCase): + @classmethod + def setup_class(cls): + """setup for all of the tests below""" + cls.output_dir = Path("tests/output") + if not cls.output_dir.exists(): + cls.output_dir.mkdir() + + cls.organization_id = 1 + + # The seed-config.json file needs to be added to the project root directory + # If running SEED locally for testing, then you can run the following from your SEED root directory: + # ./manage.py create_test_user_json --username user@seed-platform.org --file ../py-seed/seed-config.json --pyseed + config_file = Path("seed-config.json") + cls.seed_client = SeedClient( + cls.organization_id, connection_config_filepath=config_file + ) + + # Get/create the new cycle and upload the data. Make sure to set the cycle ID so that the + # data end up in the correct cycle + cls.seed_client.get_or_create_cycle( + "pyseed-api-test", date(2021, 6, 1), date(2022, 6, 1), set_cycle_id=True + ) + + cls.seed_client.upload_and_match_datafile( + "pyseed-properties-test", + "tests/data/test-seed-data.xlsx", + "Single Step Column Mappings", + "tests/data/test-seed-data-mappings.csv", + ) + + @classmethod + def teardown_class(cls): + # remove all of the test buildings? + pass + + def test_seed_orgs(self): + orgs = self.seed_client.get_organizations() + assert len(orgs) > 0 + + def test_seed_client_info(self): + info = self.seed_client.instance_information() + assert set(("version", "sha")).issubset(info.keys()) + + def test_seed_buildings(self): + buildings = self.seed_client.get_buildings() + assert len(buildings) == 10 + + def test_search_buildings(self): + properties = self.seed_client.search_buildings(identifier_exact="B-1") + assert len(properties) == 1 + + prop = self.seed_client.get_property(properties[0]["id"]) + assert prop["state"]["address_line_1"] == "111 Street Lane, Chicago, IL" + assert prop["state"]["extra_data"]["EUI Target"] == 120 + + # test the property view (same as previous, just less data). It + # is recommended to use `get_property` instead. + prop = self.seed_client.get_property_view(properties[0]["id"]) + print(prop) + assert prop["id"] == properties[0]["id"] + assert prop["cycle"]["name"] == "pyseed-api-test" + + # There are 2 if filtering, because B-1 and B-10 + properties = self.seed_client.search_buildings(identifier_filter="B-1") + assert len(properties) == 2 + + def test_add_label_to_buildings(self): + # get seed buildings + prop_ids = [] + for search in ["B-1", "B-3", "B-7"]: + properties = self.seed_client.search_buildings(identifier_exact=search) + assert len(properties) == 1 + prop_ids.append(properties[0]["id"]) + + result = self.seed_client.update_labels_of_buildings( + ["Violation"], [], prop_ids + ) + assert result["status"] == "success" + assert result["num_updated"] == 3 + # verify that the 3 buildings have the Violation label + properties = self.seed_client.get_view_ids_with_label(label_names=["Violation"]) + assert all(item in properties[0]["is_applied"] for item in prop_ids) + + # now remove the violation label and add compliant + result = self.seed_client.update_labels_of_buildings( + ["Compliant"], ["Violation"], prop_ids + ) + assert result["status"] == "success" + assert result["num_updated"] == 3 + properties = self.seed_client.get_view_ids_with_label(label_names=["Violation"]) + # should no longer have violation + assert not all(item in properties[0]["is_applied"] for item in prop_ids) + properties = self.seed_client.get_view_ids_with_label(label_names=["Compliant"]) + # should all have complied + assert all(item in properties[0]["is_applied"] for item in prop_ids) + + # now remove all + result = self.seed_client.update_labels_of_buildings( + [], ["Violation", "Compliant"], prop_ids + ) + assert result["status"] == "success" + assert result["num_updated"] == 3 + # no labels on the properties + properties = self.seed_client.get_view_ids_with_label( + label_names=["Compliant", "Violation"] + ) + assert not all(item in properties[0]["is_applied"] for item in prop_ids) + assert not all(item in properties[1]["is_applied"] for item in prop_ids) + + def test_upload_datafile(self): + # Get/create the new cycle and upload the data. Make sure to set the cycle ID so that the + # data end up in the correct cycle + self.seed_client.get_or_create_cycle( + "pyseed-api-integration-test", + date(2021, 6, 1), + date(2022, 6, 1), + set_cycle_id=True, + ) + + # Need to get the dataset id, again. Maybe need to clean up eventually. + dataset = self.seed_client.get_or_create_dataset("pyseed-uploader-test-data") + + result = self.seed_client.upload_datafile( + dataset["id"], "tests/data/test-seed-data.xlsx", "Assessed Raw" + ) + import_file_id = result["import_file_id"] + assert result["success"] is True + assert import_file_id is not None + + # start processing + result = self.seed_client.start_save_data(result["import_file_id"]) + progress_key = result.get("progress_key", None) + assert result is not None + assert result["unique_id"] == import_file_id + assert progress_key == f":1:SEED:save_raw_data:PROG:{import_file_id}" + + # wait until upload is complete + result = self.seed_client.track_progress_result(progress_key) + assert result["status"] == "success" + assert result["progress"] == 100 + + # create/retrieve the column mappings + result = self.seed_client.create_or_update_column_mapping_profile_from_file( + "new profile", "tests/data/test-seed-data-mappings.csv" + ) + assert len(result["mappings"]) > 0 + + # set the column mappings for the dataset + result = self.seed_client.set_import_file_column_mappings( + import_file_id, result["mappings"] + ) + + # now start the mapping + result = self.seed_client.start_map_data(import_file_id) + progress_key = result.get("progress_key", None) + assert result is not None + assert result["status"] in ["not-started", "success"] + assert progress_key == f":1:SEED:map_data:PROG:{import_file_id}" + + # wait until upload is complete + result = self.seed_client.track_progress_result(progress_key) + assert result["status"] == "success" + assert result["progress"] == 100 + + # save the mappings, call system matching/geocoding + result = self.seed_client.start_system_matching_and_geocoding(import_file_id) + progress_data = result.get("progress_data", None) + assert progress_data is not None + assert progress_data["status"] in ["not-started", "success", "parsing"] + progress_key = progress_data.get("progress_key", None) + assert progress_key == f":1:SEED:match_buildings:PROG:{import_file_id}" + + # wait until upload is complete + result = self.seed_client.track_progress_result(progress_key) + assert result["status"] == "success" + assert result["progress"] == 100 + + # check if there are meter fields (which there are not in this file) + meters_exist = self.seed_client.check_meters_tab_exist(import_file_id) + assert not meters_exist + + def test_upload_single_method(self): + # Get/create the new cycle and upload the data. Make sure to set the cycle ID so that the + # data end up in the correct cycle + self.seed_client.get_or_create_cycle( + "pyseed-single-file-upload", + date(2021, 6, 1), + date(2022, 6, 1), + set_cycle_id=True, + ) + + result = self.seed_client.upload_and_match_datafile( + "pyseed-single-step-test", + "tests/data/test-seed-data.xlsx", + "Single Step Column Mappings", + "tests/data/test-seed-data-mappings.csv", + ) + + assert result is not None + + # test by listing all the buildings + buildings = self.seed_client.get_buildings() + assert len(buildings) == 10 + + def test_upload_single_method_with_meters(self): + # Get/create the new cycle and upload the data. Make sure to set the cycle ID so that the + # data end up in the correct cycle + self.seed_client.get_or_create_cycle( + "pyseed-single-file-upload", + date(2021, 6, 1), + date(2022, 6, 1), + set_cycle_id=True, + ) + + result = self.seed_client.upload_and_match_datafile( + "pyseed-single-step-test", + "tests/data/test-seed-data-with-meters.xlsx", + "Single Step Column Mappings", + "tests/data/test-seed-data-mappings.csv", + import_meters_if_exist=True, + ) + + assert result is not None + + # test by listing all the buildings + buildings = self.seed_client.get_buildings() + assert len(buildings) == 10 + + # look at the meters of a single building + building = self.seed_client.search_buildings(identifier_exact=11111) + assert len(building) == 1 + + meters = self.seed_client.get_meters(building[0]["id"]) + assert len(meters) == 4 # elec, elec cost, gas, gas cost + meter_data = self.seed_client.get_meter_data(building[0]["id"]) + assert len(meter_data['readings']) == 24 + + # def test_get_buildings_with_labels(self): + # buildings = self.seed_client.get_view_ids_with_label(['In Violation', 'Compliant', 'Email']) + # for building in buildings: + # print(building) + + # assert len(buildings) == 3 diff --git a/pyseed/tests/test_seedclient.py b/tests/test_seed_client_base.py similarity index 78% rename from pyseed/tests/test_seedclient.py rename to tests/test_seed_client_base.py index 8a5aca2..ac87dfe 100644 --- a/pyseed/tests/test_seedclient.py +++ b/tests/test_seed_client_base.py @@ -7,28 +7,21 @@ Tests for SEEDClient """ -# Imports from Standard Library -import json -import sys -import unittest - # Imports from Third Party Modules +import json import requests +import unittest +from unittest import mock # Local Imports from pyseed.exceptions import SEEDError -from pyseed.seedclient import ( # SEEDReadWriteClient, +from pyseed.seed_client_base import ( ReadMixin, SEEDBaseClient, SEEDOAuthReadWriteClient, + SEEDReadWriteClient, ) -PY3 = sys.version_info[0] == 3 -if PY3: - from unittest import mock -else: - import mock - # Constants URLS = { 'test1': 'api/v2/test', @@ -195,23 +188,6 @@ def test_check_response_inheritance(self, mock_requests): self.assertEqual(conm.exception.verb.upper(), 'GET') self.assertEqual(conm.exception.status_code, 404) - def test_get_result(self, mock_requests): - """Test errors raised in _get_result""" - url = 'http://example.org/api/v2/test/' - mock_requests.get.return_value = get_mock_response( - data="No llama!", data_name='bar', error=False, - ) - with self.assertRaises(SEEDError) as conm: - self.client.get(1) - - self.assertEqual( - conm.exception.error, 'Could not find result using data_name test.' - ) - self.assertEqual(conm.exception.service, 'SEED') - self.assertEqual(conm.exception.url, url) - self.assertEqual(conm.exception.verb.upper(), 'GET') - self.assertEqual(conm.exception.status_code, 200) - class SEEDClientMethodTests(unittest.TestCase): @@ -255,22 +231,21 @@ def test_get_result(self): @mock.patch('pyseed.apibase.requests') class MixinTests(unittest.TestCase): - """Test Mixins via SEEDReadWriteClient""" + """Test Mixins via SEEDOAuthReadWriteClient""" def setUp(self): self.port = 1337 self.urls_map = URLS self.base_url = 'example.org' self.client = SEEDOAuthReadWriteClient( - MockOAuthClient, 1, username='test@example.org', + 1, username='test@example.org', access_token='dfghjk', base_url=self.base_url, - port=self.port, url_map=self.urls_map + port=self.port, url_map=self.urls_map, oauth_client=MockOAuthClient ) self.call_dict = { 'headers': {'Authorization': 'Bearer dfghjk'}, 'params': { - 'org_id': 1, - 'headers': {'Authorization': 'Bearer dfghjk'} + 'organization_id': 1, }, 'timeout': None } @@ -302,32 +277,83 @@ def test_list(self, mock_requests): def test_patch(self, mock_requests): url = 'https://example.org:1337/api/v2/test/1/' mock_requests.patch.return_value = get_mock_response(data="Llama!") - result = self.client.patch(1, endpoint='test1', foo='bar') + result = self.client.patch(1, endpoint='test1', foo='bar', json={'more': 'data'}) self.assertEqual('Llama!', result) - call_dict = self.call_dict.copy() - call_dict['json'] = {'org_id': 1, 'foo': 'bar'} - del call_dict['params']['org_id'] - mock_requests.patch.assert_called_with(url, **call_dict) + expected = { + 'headers': {'Authorization': 'Bearer dfghjk'}, + 'params': { + 'organization_id': 1, + 'foo': 'bar', + }, + 'json': {'more': 'data'}, + 'timeout': None + } + mock_requests.patch.assert_called_with(url, **expected) def test_put(self, mock_requests): url = 'https://example.org:1337/api/v2/test/1/' mock_requests.put.return_value = get_mock_response(data="Llama!") - result = self.client.put(1, endpoint='test1', foo='bar') + result = self.client.put(1, endpoint='test1', foo='bar', json={'more': 'data'}) self.assertEqual('Llama!', result) - call_dict = self.call_dict.copy() - call_dict['json'] = {'org_id': 1, 'foo': 'bar'} - del call_dict['params']['org_id'] - mock_requests.put.assert_called_with(url, **call_dict) + expected = { + 'headers': {'Authorization': 'Bearer dfghjk'}, + 'params': { + 'organization_id': 1, + 'foo': 'bar', + }, + 'json': {'more': 'data'}, + 'timeout': None + } + mock_requests.put.assert_called_with(url, **expected) def test_post(self, mock_requests): url = 'https://example.org:1337/api/v2/test/' mock_requests.post.return_value = get_mock_response(data="Llama!") - result = self.client.post(endpoint='test1', foo='bar') + result = self.client.post(endpoint='test1', json={'foo': 'bar', 'not_org': 1}) self.assertEqual('Llama!', result) - call_dict = self.call_dict.copy() - call_dict['json'] = {'org_id': 1, 'foo': 'bar'} - del call_dict['params']['org_id'] - mock_requests.post.assert_called_with(url, **call_dict) + expected = { + 'headers': {'Authorization': 'Bearer dfghjk'}, + 'params': { + 'organization_id': 1, + }, + 'json': {'not_org': 1, 'foo': 'bar'}, + 'timeout': None + } + mock_requests.post.assert_called_with(url, **expected) + + +@mock.patch('pyseed.apibase.requests') +class SEEDReadWriteClientTests(unittest.TestCase): + """Test SEEDReadWriteClient""" + + def setUp(self): + self.port = 1337 + self.urls_map = URLS + self.base_url = 'example.org' + self.client = SEEDReadWriteClient( + 1, username='test@example.org', + api_key='dfghjk', base_url=self.base_url, + port=self.port, url_map=self.urls_map + ) + self.call_dict = { + 'headers': {'Authorization': 'Basic dfghjk'}, + 'params': { + 'organization_id': 1, + }, + 'timeout': None + } + + def test_get(self, mock_requests): + # url = 'https://example.org:1337/api/v2/test/1/' + mock_requests.get.return_value = get_mock_response(data="Llama!") + result = self.client.get(1, endpoint='test1') + self.assertEqual('Llama!', result) + + def test_list(self, mock_requests): + # url = 'https://example.org:1337/api/v2/test/' + mock_requests.get.return_value = get_mock_response(data=["Llama!"]) + result = self.client.list(endpoint='test1') + self.assertEqual(['Llama!'], result) diff --git a/tests/test_utils.py b/tests/test_utils.py new file mode 100644 index 0000000..370876c --- /dev/null +++ b/tests/test_utils.py @@ -0,0 +1,58 @@ +""" +**************************************************************************************************** +:copyright (c) 2019-2022, Alliance for Sustainable Energy, LLC, and other contributors. + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted +provided that the following conditions are met: + +Redistributions of source code must retain the above copyright notice, this list of conditions +and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright notice, this list of conditions +and the following disclaimer in the documentation and/or other materials provided with the +distribution. + +Neither the name of the copyright holder nor the names of its contributors may be used to endorse +or promote products derived from this software without specific prior written permission. + +Redistribution of this software, without modification, must refer to the software by the same +designation. Redistribution of a modified version of this software (i) may not refer to the +modified version by the same designation, or by any confusingly similar designation, and +(ii) must refer to the underlying software originally provided by Alliance as “URBANopt”. Except +to comply with the foregoing, the term “URBANopt”, or any confusingly similar designation may +not be used to refer to any modified version of this software or any modified version of the +underlying software originally provided by Alliance without the prior written consent of Alliance. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER +IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT +OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +**************************************************************************************************** +""" + +# Imports from Third Party Modules +import unittest +from pathlib import Path + +# Local Imports +from pyseed.utils import read_map_file + + +class UtilsTest(unittest.TestCase): + def test_mapping_file(self): + mappings = read_map_file(Path("tests/data/test-seed-data-mappings.csv")) + assert len(mappings) == 14 + + expected = { + "from_field": "Sq. Ft", + "from_units": "ft**2", + "to_field": "gross_floor_area", + "to_table_name": "PropertyState", + } + assert mappings[5] == expected diff --git a/tox.ini b/tox.ini index d42e195..5116d63 100644 --- a/tox.ini +++ b/tox.ini @@ -1,13 +1,30 @@ +[flake8] +ignore=E402,E501,E731,W503,W504 +;exclude=... +max-line-length=100 + [tox] -envlist = py27, py34,py35,py36 +envlist= + python + precommit + mypy +recreate = True +skipsdist=True + +[testenv:python] +basepython=python +deps=-rrequirements-test.txt +commands=pytest --cov=. --cov-report= --cov-append -s -m 'not integration' + +[testenv:precommit] +basepython=python +deps= + -r{toxinidir}/requirements-test.txt +commands= + pre-commit run --all-files -[testenv] +[testenv:mypy] +basepython=python deps= - -rrequirements.txt - mock - pytest - pytest-cov - pytest-xdist - testfixtures>=5.1.1 - -commands = pytest --cov=. --cov-report= --cov-append -s + -r{toxinidir}/requirements-test.txt +commands=mypy --install-types --non-interactive --show-error-codes {toxinidir}/pyseed