diff --git a/README.md b/README.md index aeb3684c9..4f39cab28 100644 --- a/README.md +++ b/README.md @@ -163,7 +163,7 @@ You can create the `google-secret.json` file [here](https://console.cloud.google *Note: used to be called chunkedgraph-secret.json. This is still supported but deprecated.* -If you have a token from Graphene/Chunkedgraph server, create the `cave-secret.json` file as shown in the example below. You may also pass the token to `CloudVolume(..., secrets=token)`. +If you have a token from Graphene/Chunkedgraph server, create the `cave-secret.json` file as shown in the example below. You may also pass the token to `CloudVolume(..., cave_secret=token)` as a bare token string, json, or a dict containing the "token" field. ```json { diff --git a/cloudvolume/cloudvolume.py b/cloudvolume/cloudvolume.py index 3094afc97..fab985d4e 100644 --- a/cloudvolume/cloudvolume.py +++ b/cloudvolume/cloudvolume.py @@ -72,7 +72,7 @@ def __new__(cls, background_color:int=0, green_threads:bool=False, use_https:bool=False, max_redirects:int=10, mesh_dir:Optional[str]=None, skel_dir:Optional[str]=None, agglomerate:bool=False, secrets:SecretsType=None, - spatial_index_db:Optional[str]=None + spatial_index_db:Optional[str]=None, cave_secret:SecretsType=None ): """ A "serverless" Python client for reading and writing arbitrarily large @@ -132,7 +132,8 @@ def __new__(cls, After initialization, you can adjust this setting via: `cv.cache.enabled = ...` which accepts the same values. - + cave_secret: (json str, bare token str, dict, graphene only) Provides the + auth token needed to access a graphene server (same as used by CAVE client). cdn_cache: (int, bool, or str) Sets Cache-Control HTTP header on uploaded image files. Most cloud providers perform some kind of caching. As of this writing, Google defaults to 3600 seconds. Most of the time you'll @@ -200,8 +201,9 @@ def __new__(cls, Defaults to True in interactive python, False in script execution mode. provenance: (string, dict) In lieu of fetching a provenance file, use this one. - secrets: (dict) provide per-instance authorization tokens. If not provided, - defaults to looking in .cloudvolume/secrets for necessary tokens. + secrets: (dict) provide per-instance authorization tokens for accessing the + storage provider. If not provided, defaults to looking in .cloudvolume/secrets + for necessary tokens. For the graphene server, see cave_secret. skel_dir: (str) if not None, override the info['skeletons'] key before pulling the skeleton info file. spatial_index_db: (str) A path to an sqlite3 or mysql database that follows diff --git a/cloudvolume/datasource/graphene/__init__.py b/cloudvolume/datasource/graphene/__init__.py index cedf70d3c..29b92ede9 100644 --- a/cloudvolume/datasource/graphene/__init__.py +++ b/cloudvolume/datasource/graphene/__init__.py @@ -24,7 +24,9 @@ def create_graphene( delete_black_uploads:bool=False, background_color:int=0, green_threads:bool=False, use_https:bool=False, mesh_dir:Optional[str]=None, skel_dir:Optional[str]=None, - agglomerate:bool=False, secrets:SecretsType=None, spatial_index_db:Optional[str]=None, **kwargs + agglomerate:bool=False, secrets:SecretsType=None, + spatial_index_db:Optional[str]=None, cave_secret:SecretsType=None, + **kwargs ): from ...frontends import CloudVolumeGraphene @@ -52,7 +54,7 @@ def mkcache(cloudpath): cloudpath, config=config, cache=mkcache(cloudpath), info=info, provenance=provenance, use_https=use_https, agglomerate=agglomerate, - auth_token=config.secrets, + auth_token=cave_secret, ) # Resetting the cache is necessary because # graphene retrieves a data_dir from the info file diff --git a/test/test_graphene.py b/test/test_graphene.py index 1b080d816..255795953 100644 --- a/test/test_graphene.py +++ b/test/test_graphene.py @@ -25,16 +25,16 @@ def test_graphene_auth_token(graphene_vol): cloudpath = "graphene://" + posixpath.join(PCG_LOCATION, 'segmentation', 'api/v1/', TEST_DATASET_NAME) - cloudvolume.CloudVolume(cloudpath, secrets=TEST_TOKEN) - cloudvolume.CloudVolume(cloudpath, secrets={ "token": TEST_TOKEN }) + cloudvolume.CloudVolume(cloudpath, cave_secret=TEST_TOKEN) + cloudvolume.CloudVolume(cloudpath, cave_secret={ "token": TEST_TOKEN }) try: - cloudvolume.CloudVolume(cloudpath, secrets=None) + cloudvolume.CloudVolume(cloudpath, cave_secret=None) except cloudvolume.exceptions.AuthenticationError: pass try: - cloudvolume.CloudVolume(cloudpath, secrets={ "token": "Z@(ASINAFSOFAFOSNS" }) + cloudvolume.CloudVolume(cloudpath, cave_secret={ "token": "Z@(ASINAFSOFAFOSNS" }) assert False except cloudvolume.exceptions.AuthenticationError: pass @@ -115,7 +115,7 @@ def cv_graphene_mesh_precomputed(requests_mock): cloudpath = posixpath.join(PCG_LOCATION, 'segmentation/table', PRECOMPUTED_MESH_TEST_DATASET_NAME) - yield cloudvolume.CloudVolume("graphene://" + cloudpath, secrets=TEST_TOKEN) + yield cloudvolume.CloudVolume("graphene://" + cloudpath, cave_secret=TEST_TOKEN) @pytest.fixture() def cv_graphene_mesh_draco(requests_mock): @@ -198,7 +198,7 @@ def cv_graphene_mesh_draco(requests_mock): cloudpath = posixpath.join(PCG_LOCATION, 'segmentation/table', DRACO_MESH_TEST_DATASET_NAME) - yield cloudvolume.CloudVolume('graphene://' + cloudpath, secrets=TEST_TOKEN) + yield cloudvolume.CloudVolume('graphene://' + cloudpath, cave_secret=TEST_TOKEN) @pytest.fixture() @@ -282,7 +282,7 @@ def cv_graphene_sharded(requests_mock): requests_mock.get(matcher,real_http=True) cloudpath = posixpath.join(PCG_LOCATION, 'segmentation/table/', GRAPHENE_SHARDED_MESH_TEST_DATASET_NAME) - yield cloudvolume.CloudVolume('graphene://' + cloudpath, use_https=True, secrets=TEST_TOKEN) + yield cloudvolume.CloudVolume('graphene://' + cloudpath, use_https=True, cave_secret=TEST_TOKEN) @pytest.fixture(scope='session') @@ -371,7 +371,7 @@ def mock_get_leaves(self, root_id, bbox, mip): cloudpath = "graphene://" + posixpath.join(PCG_LOCATION, 'segmentation', 'api/v1/', TEST_DATASET_NAME) - gcv = cloudvolume.CloudVolume(cloudpath, secrets=TEST_TOKEN) + gcv = cloudvolume.CloudVolume(cloudpath, cave_secret=TEST_TOKEN) gcv.get_leaves = partial(mock_get_leaves, gcv) yield gcv