Skip to content

Commit 319b872

Browse files
authored
Fixed supperted type check in decode_method=data
1 parent 8142f0d commit 319b872

File tree

3 files changed

+34
-17
lines changed

3 files changed

+34
-17
lines changed

deeplake/enterprise/convert_to_libdeeplake.py

+25-15
Original file line numberDiff line numberDiff line change
@@ -169,10 +169,17 @@ def dataset_to_libdeeplake(hub2_dataset: Dataset):
169169
path: str = hub2_dataset.path
170170

171171
token = (
172-
hub2_dataset.client.get_token()
173-
if (hub2_dataset.token is None or hub2_dataset._token == "")
174-
and hub2_dataset.client
175-
else hub2_dataset.token
172+
hub2_dataset.token
173+
if hasattr(hub2_dataset, "token") and hub2_dataset.token is not None
174+
else (
175+
getattr(hub2_dataset, "_token", None)
176+
if hasattr(hub2_dataset, "_token") and hub2_dataset._token != ""
177+
else (
178+
hub2_dataset.client.get_token()
179+
if hasattr(hub2_dataset, "client") and hub2_dataset.client
180+
else ""
181+
)
182+
)
176183
)
177184

178185
if hub2_dataset.libdeeplake_dataset is not None:
@@ -239,15 +246,18 @@ def dataset_to_libdeeplake(hub2_dataset: Dataset):
239246
hub2_dataset.libdeeplake_dataset = libdeeplake_dataset
240247

241248
assert libdeeplake_dataset is not None
242-
if hasattr(hub2_dataset.storage, "cache_size"):
243-
libdeeplake_dataset._max_cache_size = max(
244-
hub2_dataset.storage.cache_size, libdeeplake_dataset._max_cache_size
245-
)
246-
commit_id = hub2_dataset.pending_commit_id
247-
libdeeplake_dataset.checkout(commit_id)
248-
slice_ = hub2_dataset.index.values[0].value
249-
if slice_ != slice(None):
250-
if isinstance(slice_, tuple):
251-
slice_ = list(slice_)
252-
libdeeplake_dataset = libdeeplake_dataset[slice_]
249+
try:
250+
if hasattr(hub2_dataset.storage, "cache_size"):
251+
libdeeplake_dataset._max_cache_size = max(
252+
hub2_dataset.storage.cache_size, libdeeplake_dataset._max_cache_size
253+
)
254+
commit_id = hub2_dataset.pending_commit_id
255+
libdeeplake_dataset.checkout(commit_id)
256+
slice_ = hub2_dataset.index.values[0].value
257+
if slice_ != slice(None):
258+
if isinstance(slice_, tuple):
259+
slice_ = list(slice_)
260+
libdeeplake_dataset = libdeeplake_dataset[slice_]
261+
except INDRA_API.api.NoStorageDatasetViewError: # type: ignore
262+
pass
253263
return libdeeplake_dataset

deeplake/integrations/pytorch/common.py

+7-2
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,8 @@ def check_tensors(dataset, tensors, verbose=True):
102102
f" tensor."
103103
)
104104
meta = tensor.meta
105-
if meta.sample_compression in supported_image_compressions:
105+
comp = meta.sample_compression
106+
if comp and meta.sample_compression in supported_image_compressions:
106107
jpeg_png_compressed_tensors.append(tensor_name)
107108
elif meta.htype == "json":
108109
json_tensors.append(tensor_name)
@@ -218,7 +219,11 @@ def get_htype_ndim_tensor_info_dicts(dataset, data_tensors, tensor_info_tensors)
218219
htype_dict[tensor_name] = tensor.htype
219220
ndim_dict[tensor_name] = tensor.ndim
220221
if tensor_name in tensor_info_tensors:
221-
tensor_info_dict[tensor_name] = tensor.info._info
222+
if "_info" in tensor.info:
223+
tensor_info_dict[tensor_name] = tensor.info._info
224+
else:
225+
tensor_info_dict[tensor_name] = dict(tensor.info)
226+
222227
return htype_dict, ndim_dict, tensor_info_dict
223228

224229

deeplake/util/dataset.py

+2
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,8 @@ def try_flushing(ds):
77
ds.storage.flush()
88
except ReadOnlyModeError:
99
pass
10+
except:
11+
pass
1012

1113

1214
def map_tensor_keys(dataset, tensor_keys: Optional[Sequence[str]] = None) -> List[str]:

0 commit comments

Comments
 (0)