Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: move cave secrets from secrets to cave_secrets #528

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ You can create the `google-secret.json` file [here](https://console.cloud.google

*Note: used to be called chunkedgraph-secret.json. This is still supported but deprecated.*

If you have a token from Graphene/Chunkedgraph server, create the `cave-secret.json` file as shown in the example below. You may also pass the token to `CloudVolume(..., secrets=token)`.
If you have a token from Graphene/Chunkedgraph server, create the `cave-secret.json` file as shown in the example below. You may also pass the token to `CloudVolume(..., cave_secret=token)` as a bare token string, json, or a dict containing the "token" field.

```json
{
Expand Down
10 changes: 6 additions & 4 deletions cloudvolume/cloudvolume.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def __new__(cls,
background_color:int=0, green_threads:bool=False, use_https:bool=False,
max_redirects:int=10, mesh_dir:Optional[str]=None, skel_dir:Optional[str]=None,
agglomerate:bool=False, secrets:SecretsType=None,
spatial_index_db:Optional[str]=None
spatial_index_db:Optional[str]=None, cave_secret:SecretsType=None
):
"""
A "serverless" Python client for reading and writing arbitrarily large
Expand Down Expand Up @@ -132,7 +132,8 @@ def __new__(cls,

After initialization, you can adjust this setting via:
`cv.cache.enabled = ...` which accepts the same values.

cave_secret: (json str, bare token str, dict, graphene only) Provides the
auth token needed to access a graphene server (same as used by CAVE client).
cdn_cache: (int, bool, or str) Sets Cache-Control HTTP header on uploaded
image files. Most cloud providers perform some kind of caching. As of
this writing, Google defaults to 3600 seconds. Most of the time you'll
Expand Down Expand Up @@ -200,8 +201,9 @@ def __new__(cls,
Defaults to True in interactive python, False in script execution mode.
provenance: (string, dict) In lieu of fetching a provenance
file, use this one.
secrets: (dict) provide per-instance authorization tokens. If not provided,
defaults to looking in .cloudvolume/secrets for necessary tokens.
secrets: (dict) provide per-instance authorization tokens for accessing the
storage provider. If not provided, defaults to looking in .cloudvolume/secrets
for necessary tokens. For the graphene server, see cave_secret.
skel_dir: (str) if not None, override the info['skeletons'] key before
pulling the skeleton info file.
spatial_index_db: (str) A path to an sqlite3 or mysql database that follows
Expand Down
6 changes: 4 additions & 2 deletions cloudvolume/datasource/graphene/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,9 @@ def create_graphene(
delete_black_uploads:bool=False, background_color:int=0,
green_threads:bool=False, use_https:bool=False,
mesh_dir:Optional[str]=None, skel_dir:Optional[str]=None,
agglomerate:bool=False, secrets:SecretsType=None, spatial_index_db:Optional[str]=None, **kwargs
agglomerate:bool=False, secrets:SecretsType=None,
spatial_index_db:Optional[str]=None, cave_secret:SecretsType=None,
**kwargs
):
from ...frontends import CloudVolumeGraphene

Expand Down Expand Up @@ -52,7 +54,7 @@ def mkcache(cloudpath):
cloudpath, config=config, cache=mkcache(cloudpath),
info=info, provenance=provenance,
use_https=use_https, agglomerate=agglomerate,
auth_token=config.secrets,
auth_token=cave_secret,
)
# Resetting the cache is necessary because
# graphene retrieves a data_dir from the info file
Expand Down
16 changes: 8 additions & 8 deletions test/test_graphene.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,16 +25,16 @@
def test_graphene_auth_token(graphene_vol):
cloudpath = "graphene://" + posixpath.join(PCG_LOCATION, 'segmentation', 'api/v1/', TEST_DATASET_NAME)

cloudvolume.CloudVolume(cloudpath, secrets=TEST_TOKEN)
cloudvolume.CloudVolume(cloudpath, secrets={ "token": TEST_TOKEN })
cloudvolume.CloudVolume(cloudpath, cave_secret=TEST_TOKEN)
cloudvolume.CloudVolume(cloudpath, cave_secret={ "token": TEST_TOKEN })

try:
cloudvolume.CloudVolume(cloudpath, secrets=None)
cloudvolume.CloudVolume(cloudpath, cave_secret=None)
except cloudvolume.exceptions.AuthenticationError:
pass

try:
cloudvolume.CloudVolume(cloudpath, secrets={ "token": "Z@(ASINAFSOFAFOSNS" })
cloudvolume.CloudVolume(cloudpath, cave_secret={ "token": "Z@(ASINAFSOFAFOSNS" })
assert False
except cloudvolume.exceptions.AuthenticationError:
pass
Expand Down Expand Up @@ -115,7 +115,7 @@ def cv_graphene_mesh_precomputed(requests_mock):
cloudpath = posixpath.join(PCG_LOCATION,
'segmentation/table',
PRECOMPUTED_MESH_TEST_DATASET_NAME)
yield cloudvolume.CloudVolume("graphene://" + cloudpath, secrets=TEST_TOKEN)
yield cloudvolume.CloudVolume("graphene://" + cloudpath, cave_secret=TEST_TOKEN)

@pytest.fixture()
def cv_graphene_mesh_draco(requests_mock):
Expand Down Expand Up @@ -198,7 +198,7 @@ def cv_graphene_mesh_draco(requests_mock):
cloudpath = posixpath.join(PCG_LOCATION,
'segmentation/table',
DRACO_MESH_TEST_DATASET_NAME)
yield cloudvolume.CloudVolume('graphene://' + cloudpath, secrets=TEST_TOKEN)
yield cloudvolume.CloudVolume('graphene://' + cloudpath, cave_secret=TEST_TOKEN)


@pytest.fixture()
Expand Down Expand Up @@ -282,7 +282,7 @@ def cv_graphene_sharded(requests_mock):

requests_mock.get(matcher,real_http=True)
cloudpath = posixpath.join(PCG_LOCATION, 'segmentation/table/', GRAPHENE_SHARDED_MESH_TEST_DATASET_NAME)
yield cloudvolume.CloudVolume('graphene://' + cloudpath, use_https=True, secrets=TEST_TOKEN)
yield cloudvolume.CloudVolume('graphene://' + cloudpath, use_https=True, cave_secret=TEST_TOKEN)


@pytest.fixture(scope='session')
Expand Down Expand Up @@ -371,7 +371,7 @@ def mock_get_leaves(self, root_id, bbox, mip):

cloudpath = "graphene://" + posixpath.join(PCG_LOCATION, 'segmentation', 'api/v1/', TEST_DATASET_NAME)

gcv = cloudvolume.CloudVolume(cloudpath, secrets=TEST_TOKEN)
gcv = cloudvolume.CloudVolume(cloudpath, cave_secret=TEST_TOKEN)
gcv.get_leaves = partial(mock_get_leaves, gcv)
yield gcv

Expand Down