2020-12-17 01:58:43 +01:00
|
|
|
import pickle
|
2020-09-12 15:39:01 +08:00
|
|
|
from copy import deepcopy
|
2020-07-07 12:40:55 +02:00
|
|
|
from numbers import Number
|
2021-09-03 05:05:04 +08:00
|
|
|
from typing import Any, Dict, Optional, Union
|
|
|
|
|
|
|
|
import h5py
|
|
|
|
import numpy as np
|
|
|
|
import torch
|
2020-05-29 14:45:21 +02:00
|
|
|
|
2021-09-03 05:05:04 +08:00
|
|
|
from tianshou.data.batch import Batch, _parse_value
|
2020-05-29 14:45:21 +02:00
|
|
|
|
|
|
|
|
2021-03-30 16:06:03 +08:00
|
|
|
def to_numpy(x: Any) -> Union[Batch, np.ndarray]:
|
2020-05-29 14:45:21 +02:00
|
|
|
"""Return an object without torch.Tensor."""
|
2020-08-27 12:15:18 +08:00
|
|
|
if isinstance(x, torch.Tensor): # most often case
|
2020-09-12 15:39:01 +08:00
|
|
|
return x.detach().cpu().numpy()
|
2020-08-27 12:15:18 +08:00
|
|
|
elif isinstance(x, np.ndarray): # second often case
|
2020-09-12 15:39:01 +08:00
|
|
|
return x
|
2020-08-27 12:15:18 +08:00
|
|
|
elif isinstance(x, (np.number, np.bool_, Number)):
|
2020-09-12 15:39:01 +08:00
|
|
|
return np.asanyarray(x)
|
2020-08-27 12:15:18 +08:00
|
|
|
elif x is None:
|
2021-03-30 16:06:03 +08:00
|
|
|
return np.array(None, dtype=object)
|
|
|
|
elif isinstance(x, (dict, Batch)):
|
|
|
|
x = Batch(x) if isinstance(x, dict) else deepcopy(x)
|
2020-08-27 12:15:18 +08:00
|
|
|
x.to_numpy()
|
2020-09-12 15:39:01 +08:00
|
|
|
return x
|
2020-07-21 10:47:56 +02:00
|
|
|
elif isinstance(x, (list, tuple)):
|
2021-03-30 16:06:03 +08:00
|
|
|
return to_numpy(_parse_value(x))
|
2020-07-21 10:47:56 +02:00
|
|
|
else: # fallback
|
2020-09-12 15:39:01 +08:00
|
|
|
return np.asanyarray(x)
|
2020-05-29 14:45:21 +02:00
|
|
|
|
|
|
|
|
2020-09-12 15:39:01 +08:00
|
|
|
def to_torch(
|
2021-03-30 16:06:03 +08:00
|
|
|
x: Any,
|
2020-09-12 15:39:01 +08:00
|
|
|
dtype: Optional[torch.dtype] = None,
|
|
|
|
device: Union[str, int, torch.device] = "cpu",
|
2021-03-30 16:06:03 +08:00
|
|
|
) -> Union[Batch, torch.Tensor]:
|
2020-05-29 14:45:21 +02:00
|
|
|
"""Return an object without np.ndarray."""
|
2020-09-12 15:39:01 +08:00
|
|
|
if isinstance(x, np.ndarray) and issubclass(
|
|
|
|
x.dtype.type, (np.bool_, np.number)
|
|
|
|
): # most often case
|
2020-09-13 19:31:50 +08:00
|
|
|
x = torch.from_numpy(x).to(device) # type: ignore
|
2020-08-27 12:15:18 +08:00
|
|
|
if dtype is not None:
|
|
|
|
x = x.type(dtype)
|
2020-09-12 15:39:01 +08:00
|
|
|
return x
|
2020-08-27 12:15:18 +08:00
|
|
|
elif isinstance(x, torch.Tensor): # second often case
|
2020-05-30 15:40:31 +02:00
|
|
|
if dtype is not None:
|
|
|
|
x = x.type(dtype)
|
2020-09-13 19:31:50 +08:00
|
|
|
return x.to(device) # type: ignore
|
2020-08-27 12:15:18 +08:00
|
|
|
elif isinstance(x, (np.number, np.bool_, Number)):
|
2020-09-12 15:39:01 +08:00
|
|
|
return to_torch(np.asanyarray(x), dtype, device)
|
2021-03-30 16:06:03 +08:00
|
|
|
elif isinstance(x, (dict, Batch)):
|
|
|
|
x = Batch(x, copy=True) if isinstance(x, dict) else deepcopy(x)
|
2020-05-30 15:40:31 +02:00
|
|
|
x.to_torch(dtype, device)
|
2020-09-12 15:39:01 +08:00
|
|
|
return x
|
2020-07-21 10:47:56 +02:00
|
|
|
elif isinstance(x, (list, tuple)):
|
2021-03-30 16:06:03 +08:00
|
|
|
return to_torch(_parse_value(x), dtype, device)
|
2020-07-21 10:47:56 +02:00
|
|
|
else: # fallback
|
2020-08-27 12:15:18 +08:00
|
|
|
raise TypeError(f"object {x} cannot be converted to torch.")
|
2020-06-03 13:59:47 +08:00
|
|
|
|
|
|
|
|
2021-03-30 16:06:03 +08:00
|
|
|
def to_torch_as(x: Any, y: torch.Tensor) -> Union[Batch, torch.Tensor]:
|
2020-09-11 07:55:37 +08:00
|
|
|
"""Return an object without np.ndarray.
|
|
|
|
|
|
|
|
Same as ``to_torch(x, dtype=y.dtype, device=y.device)``.
|
2020-06-03 13:59:47 +08:00
|
|
|
"""
|
|
|
|
assert isinstance(y, torch.Tensor)
|
|
|
|
return to_torch(x, dtype=y.dtype, device=y.device)
|
2020-12-17 01:58:43 +01:00
|
|
|
|
|
|
|
|
|
|
|
# Note: object is used as a proxy for objects that can be pickled
|
|
|
|
# Note: mypy does not support cyclic definition currently
|
|
|
|
Hdf5ConvertibleValues = Union[ # type: ignore
|
|
|
|
int, float, Batch, np.ndarray, torch.Tensor, object,
|
|
|
|
'Hdf5ConvertibleType', # type: ignore
|
|
|
|
]
|
|
|
|
|
|
|
|
Hdf5ConvertibleType = Dict[str, Hdf5ConvertibleValues] # type: ignore
|
|
|
|
|
|
|
|
|
|
|
|
def to_hdf5(x: Hdf5ConvertibleType, y: h5py.Group) -> None:
|
|
|
|
"""Copy object into HDF5 group."""
|
|
|
|
|
|
|
|
def to_hdf5_via_pickle(x: object, y: h5py.Group, key: str) -> None:
|
|
|
|
"""Pickle, convert to numpy array and write to HDF5 dataset."""
|
|
|
|
data = np.frombuffer(pickle.dumps(x), dtype=np.byte)
|
|
|
|
y.create_dataset(key, data=data)
|
|
|
|
|
|
|
|
for k, v in x.items():
|
|
|
|
if isinstance(v, (Batch, dict)):
|
|
|
|
# dicts and batches are both represented by groups
|
|
|
|
subgrp = y.create_group(k)
|
|
|
|
if isinstance(v, Batch):
|
|
|
|
subgrp_data = v.__getstate__()
|
|
|
|
subgrp.attrs["__data_type__"] = "Batch"
|
|
|
|
else:
|
|
|
|
subgrp_data = v
|
|
|
|
to_hdf5(subgrp_data, subgrp)
|
|
|
|
elif isinstance(v, torch.Tensor):
|
|
|
|
# PyTorch tensors are written to datasets
|
|
|
|
y.create_dataset(k, data=to_numpy(v))
|
|
|
|
y[k].attrs["__data_type__"] = "Tensor"
|
|
|
|
elif isinstance(v, np.ndarray):
|
|
|
|
try:
|
|
|
|
# NumPy arrays are written to datasets
|
|
|
|
y.create_dataset(k, data=v)
|
|
|
|
y[k].attrs["__data_type__"] = "ndarray"
|
|
|
|
except TypeError:
|
|
|
|
# If data type is not supported by HDF5 fall back to pickle.
|
|
|
|
# This happens if dtype=object (e.g. due to entries being None)
|
|
|
|
# and possibly in other cases like structured arrays.
|
|
|
|
try:
|
|
|
|
to_hdf5_via_pickle(v, y, k)
|
2022-01-30 00:53:56 +08:00
|
|
|
except Exception as exception:
|
2020-12-17 01:58:43 +01:00
|
|
|
raise RuntimeError(
|
|
|
|
f"Attempted to pickle {v.__class__.__name__} due to "
|
|
|
|
"data type not supported by HDF5 and failed."
|
2022-01-30 00:53:56 +08:00
|
|
|
) from exception
|
2020-12-17 01:58:43 +01:00
|
|
|
y[k].attrs["__data_type__"] = "pickled_ndarray"
|
|
|
|
elif isinstance(v, (int, float)):
|
|
|
|
# ints and floats are stored as attributes of groups
|
|
|
|
y.attrs[k] = v
|
|
|
|
else: # resort to pickle for any other type of object
|
|
|
|
try:
|
|
|
|
to_hdf5_via_pickle(v, y, k)
|
2022-01-30 00:53:56 +08:00
|
|
|
except Exception as exception:
|
2020-12-17 01:58:43 +01:00
|
|
|
raise NotImplementedError(
|
|
|
|
f"No conversion to HDF5 for object of type '{type(v)}' "
|
|
|
|
"implemented and fallback to pickle failed."
|
2022-01-30 00:53:56 +08:00
|
|
|
) from exception
|
2020-12-17 01:58:43 +01:00
|
|
|
y[k].attrs["__data_type__"] = v.__class__.__name__
|
|
|
|
|
|
|
|
|
2021-03-30 16:06:03 +08:00
|
|
|
def from_hdf5(x: h5py.Group, device: Optional[str] = None) -> Hdf5ConvertibleValues:
|
2020-12-17 01:58:43 +01:00
|
|
|
"""Restore object from HDF5 group."""
|
|
|
|
if isinstance(x, h5py.Dataset):
|
|
|
|
# handle datasets
|
|
|
|
if x.attrs["__data_type__"] == "ndarray":
|
2021-03-30 16:06:03 +08:00
|
|
|
return np.array(x)
|
2020-12-17 01:58:43 +01:00
|
|
|
elif x.attrs["__data_type__"] == "Tensor":
|
2021-03-30 16:06:03 +08:00
|
|
|
return torch.tensor(x, device=device)
|
2020-12-17 01:58:43 +01:00
|
|
|
else:
|
2021-03-30 16:06:03 +08:00
|
|
|
return pickle.loads(x[()])
|
2020-12-17 01:58:43 +01:00
|
|
|
else:
|
|
|
|
# handle groups representing a dict or a Batch
|
2021-03-30 16:06:03 +08:00
|
|
|
y = dict(x.attrs.items())
|
|
|
|
data_type = y.pop("__data_type__", None)
|
2020-12-17 01:58:43 +01:00
|
|
|
for k, v in x.items():
|
|
|
|
y[k] = from_hdf5(v, device)
|
2021-03-30 16:06:03 +08:00
|
|
|
return Batch(y) if data_type == "Batch" else y
|