| | import abc |
| | import logging |
| | import json |
| | import os |
| | from packaging import version |
| | import pathlib |
| | import re |
| | import tempfile |
| | from types import MappingProxyType |
| | from typing import Any, Collection, Dict, List, Optional, Tuple, Union |
| |
|
| | import pyarrow.fs |
| |
|
| | import ray |
| | import ray.cloudpickle as pickle |
| | from ray.rllib.core import ( |
| | COMPONENT_LEARNER, |
| | COMPONENT_LEARNER_GROUP, |
| | COMPONENT_RL_MODULE, |
| | ) |
| | from ray.rllib.utils import force_list |
| | from ray.rllib.utils.actor_manager import FaultTolerantActorManager |
| | from ray.rllib.utils.annotations import ( |
| | OverrideToImplementCustomLogic_CallToSuperRecommended, |
| | ) |
| | from ray.rllib.utils.serialization import NOT_SERIALIZABLE, serialize_type |
| | from ray.rllib.utils.typing import StateDict |
| | from ray.train import Checkpoint |
| | from ray.tune.utils.file_transfer import sync_dir_between_nodes |
| | from ray.util import log_once |
| | from ray.util.annotations import PublicAPI |
| |
|
| | logger = logging.getLogger(__name__) |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| |
|
| | |
| | |
| | |
| | |
| | |
| |
|
| | |
| | |
| |
|
| | |
| |
|
| | |
| | |
| |
|
| | CHECKPOINT_VERSION = version.Version("1.1") |
| | CHECKPOINT_VERSION_LEARNER = version.Version("1.2") |
| | CHECKPOINT_VERSION_LEARNER_AND_ENV_RUNNER = version.Version("2.0") |
| |
|
| |
|
| | @PublicAPI(stability="alpha") |
| | class Checkpointable(abc.ABC): |
| | """Abstract base class for a component of RLlib that can be checkpointed to disk. |
| | |
| | Subclasses must implement the following APIs: |
| | - save_to_path() |
| | - restore_from_path() |
| | - from_checkpoint() |
| | - get_state() |
| | - set_state() |
| | - get_ctor_args_and_kwargs() |
| | - get_metadata() |
| | - get_checkpointable_components() |
| | """ |
| |
|
| | |
| | |
| | |
| | |
| | |
| | STATE_FILE_NAME = "state.pkl" |
| |
|
| | |
| | |
| | |
| | CLASS_AND_CTOR_ARGS_FILE_NAME = "class_and_ctor_args.pkl" |
| |
|
| | |
| | |
| | METADATA_FILE_NAME = "metadata.json" |
| |
|
| | def save_to_path( |
| | self, |
| | path: Optional[Union[str, pathlib.Path]] = None, |
| | *, |
| | state: Optional[StateDict] = None, |
| | filesystem: Optional["pyarrow.fs.FileSystem"] = None, |
| | ) -> str: |
| | """Saves the state of the implementing class (or `state`) to `path`. |
| | |
| | The state of the implementing class is always saved in the following format: |
| | |
| | .. testcode:: |
| | :skipif: True |
| | |
| | path/ |
| | [component1]/ |
| | [component1 subcomponentA]/ |
| | ... |
| | [component1 subcomponentB]/ |
| | ... |
| | [component2]/ |
| | ... |
| | [cls.METADATA_FILE_NAME] (json) |
| | [cls.STATE_FILE_NAME] (pkl) |
| | |
| | The main logic is to loop through all subcomponents of this Checkpointable |
| | and call their respective `save_to_path` methods. Then save the remaining |
| | (non subcomponent) state to this Checkpointable's STATE_FILE_NAME. |
| | In the exception that a component is a FaultTolerantActorManager instance, |
| | instead of calling `save_to_path` directly on that manager, the first healthy |
| | actor is interpreted as the component and its `save_to_path` method is called. |
| | Even if that actor is located on another node, the created file is automatically |
| | synced to the local node. |
| | |
| | Args: |
| | path: The path to the directory to save the state of the implementing class |
| | to. If `path` doesn't exist or is None, then a new directory will be |
| | created (and returned). |
| | state: An optional state dict to be used instead of getting a new state of |
| | the implementing class through `self.get_state()`. |
| | filesystem: PyArrow FileSystem to use to access data at the `path`. |
| | If not specified, this is inferred from the URI scheme of `path`. |
| | |
| | Returns: |
| | The path (str) where the state has been saved. |
| | """ |
| |
|
| | |
| | if path is None: |
| | import uuid |
| |
|
| | |
| | tmp_dir = pathlib.Path(tempfile.gettempdir()) |
| | |
| | random_dir_name = str(uuid.uuid4()) |
| | |
| | |
| | path = path or tmp_dir / random_dir_name |
| |
|
| | |
| | path = path if isinstance(path, str) else path.as_posix() |
| |
|
| | |
| | if path and not filesystem: |
| | |
| | |
| | filesystem, path = pyarrow.fs.FileSystem.from_uri(path) |
| |
|
| | |
| | filesystem.create_dir(path, recursive=True) |
| |
|
| | |
| | path = pathlib.Path(path) |
| |
|
| | |
| | metadata = self.get_metadata() |
| | if "checkpoint_version" not in metadata: |
| | metadata["checkpoint_version"] = str( |
| | CHECKPOINT_VERSION_LEARNER_AND_ENV_RUNNER |
| | ) |
| | with filesystem.open_output_stream( |
| | (path / self.METADATA_FILE_NAME).as_posix() |
| | ) as f: |
| | f.write(json.dumps(metadata).encode("utf-8")) |
| |
|
| | |
| | with filesystem.open_output_stream( |
| | (path / self.CLASS_AND_CTOR_ARGS_FILE_NAME).as_posix() |
| | ) as f: |
| | pickle.dump( |
| | { |
| | "class": type(self), |
| | "ctor_args_and_kwargs": self.get_ctor_args_and_kwargs(), |
| | }, |
| | f, |
| | ) |
| |
|
| | |
| | _state_provided = state is not None |
| | state = state or self.get_state( |
| | not_components=[c[0] for c in self.get_checkpointable_components()] |
| | ) |
| |
|
| | |
| | for comp_name, comp in self.get_checkpointable_components(): |
| | |
| | |
| | if _state_provided and comp_name not in state: |
| | continue |
| | comp_path = path / comp_name |
| |
|
| | |
| | |
| | |
| | if isinstance(comp, FaultTolerantActorManager): |
| | actor_to_use = comp.healthy_actor_ids()[0] |
| |
|
| | def _get_ip(_=None): |
| | import ray |
| |
|
| | return ray.util.get_node_ip_address() |
| |
|
| | _result = next( |
| | iter( |
| | comp.foreach_actor( |
| | _get_ip, |
| | remote_actor_ids=[actor_to_use], |
| | ) |
| | ) |
| | ) |
| | if not _result.ok: |
| | raise _result.get() |
| | worker_ip_addr = _result.get() |
| | self_ip_addr = _get_ip() |
| |
|
| | |
| | |
| | comp_state_ref = None |
| | if _state_provided: |
| | comp_state_ref = ray.put(state.pop(comp_name)) |
| |
|
| | if worker_ip_addr == self_ip_addr: |
| | comp.foreach_actor( |
| | lambda w, _path=comp_path, _state=comp_state_ref: ( |
| | w.save_to_path( |
| | _path, |
| | state=( |
| | ray.get(_state) |
| | if _state is not None |
| | else w.get_state() |
| | ), |
| | ) |
| | ), |
| | remote_actor_ids=[actor_to_use], |
| | ) |
| | else: |
| | |
| | def _save(w, _state=comp_state_ref): |
| | import tempfile |
| |
|
| | |
| | tmpdir = tempfile.mkdtemp() |
| | w.save_to_path( |
| | tmpdir, |
| | state=( |
| | ray.get(_state) if _state is not None else w.get_state() |
| | ), |
| | ) |
| | return tmpdir |
| |
|
| | _result = next( |
| | iter(comp.foreach_actor(_save, remote_actor_ids=[actor_to_use])) |
| | ) |
| | if not _result.ok: |
| | raise _result.get() |
| | worker_temp_dir = _result.get() |
| |
|
| | |
| | sync_dir_between_nodes( |
| | worker_ip_addr, |
| | worker_temp_dir, |
| | self_ip_addr, |
| | str(comp_path), |
| | ) |
| |
|
| | |
| | def _rmdir(_, _dir=worker_temp_dir): |
| | import shutil |
| |
|
| | shutil.rmtree(_dir) |
| |
|
| | comp.foreach_actor(_rmdir, remote_actor_ids=[actor_to_use]) |
| |
|
| | |
| | else: |
| | if _state_provided: |
| | comp_state = state.pop(comp_name) |
| | else: |
| | comp_state = self.get_state(components=comp_name)[comp_name] |
| | |
| | |
| | |
| | comp.save_to_path(comp_path, filesystem=filesystem, state=comp_state) |
| |
|
| | |
| | with filesystem.open_output_stream( |
| | (path / self.STATE_FILE_NAME).as_posix() |
| | ) as f: |
| | pickle.dump(state, f) |
| |
|
| | return str(path) |
| |
|
| | def restore_from_path( |
| | self, |
| | path: Union[str, pathlib.Path], |
| | *, |
| | component: Optional[str] = None, |
| | filesystem: Optional["pyarrow.fs.FileSystem"] = None, |
| | **kwargs, |
| | ) -> None: |
| | """Restores the state of the implementing class from the given path. |
| | |
| | If the `component` arg is provided, `path` refers to a checkpoint of a |
| | subcomponent of `self`, thus allowing the user to load only the subcomponent's |
| | state into `self` without affecting any of the other state information (for |
| | example, loading only the NN state into a Checkpointable, which contains such |
| | an NN, but also has other state information that should NOT be changed by |
| | calling this method). |
| | |
| | The given `path` should have the following structure and contain the following |
| | files: |
| | |
| | .. testcode:: |
| | :skipif: True |
| | |
| | path/ |
| | [component1]/ |
| | [component1 subcomponentA]/ |
| | ... |
| | [component1 subcomponentB]/ |
| | ... |
| | [component2]/ |
| | ... |
| | [cls.METADATA_FILE_NAME] (json) |
| | [cls.STATE_FILE_NAME] (pkl) |
| | |
| | Note that the self.METADATA_FILE_NAME file is not required to restore the state. |
| | |
| | Args: |
| | path: The path to load the implementing class' state from or to load the |
| | state of only one subcomponent's state of the implementing class (if |
| | `component` is provided). |
| | component: If provided, `path` is interpreted as the checkpoint path of only |
| | the subcomponent and thus, only that subcomponent's state is |
| | restored/loaded. All other state of `self` remains unchanged in this |
| | case. |
| | filesystem: PyArrow FileSystem to use to access data at the `path`. If not |
| | specified, this is inferred from the URI scheme of `path`. |
| | **kwargs: Forward compatibility kwargs. |
| | """ |
| | path = path if isinstance(path, str) else path.as_posix() |
| |
|
| | if path and not filesystem: |
| | |
| | |
| | filesystem, path = pyarrow.fs.FileSystem.from_uri(path) |
| | |
| | |
| | path = pathlib.Path(path) |
| |
|
| | if not _exists_at_fs_path(filesystem, path.as_posix()): |
| | raise FileNotFoundError(f"`path` ({path}) not found!") |
| |
|
| | |
| | for comp_name, comp in self.get_checkpointable_components(): |
| |
|
| | |
| | |
| | comp_arg = None |
| |
|
| | if component is None: |
| | comp_dir = path / comp_name |
| | |
| | |
| | if not _exists_at_fs_path(filesystem, comp_dir.as_posix()): |
| | continue |
| | else: |
| | comp_dir = path |
| |
|
| | |
| | |
| | if component.startswith(comp_name + "/"): |
| | comp_arg = component[len(comp_name) + 1 :] |
| | |
| | elif component != comp_name: |
| | continue |
| |
|
| | |
| | |
| | |
| | if isinstance(comp, FaultTolerantActorManager): |
| | head_node_ip = ray.util.get_node_ip_address() |
| | all_healthy_actors = comp.healthy_actor_ids() |
| |
|
| | def _restore( |
| | w, |
| | _kwargs=MappingProxyType(kwargs), |
| | _path=comp_dir, |
| | _head_ip=head_node_ip, |
| | _comp_arg=comp_arg, |
| | ): |
| | import ray |
| | import tempfile |
| |
|
| | worker_node_ip = ray.util.get_node_ip_address() |
| | |
| | |
| | |
| | if worker_node_ip == _head_ip: |
| | w.restore_from_path(_path, component=_comp_arg, **_kwargs) |
| | else: |
| | with tempfile.TemporaryDirectory() as temp_dir: |
| | sync_dir_between_nodes( |
| | _head_ip, _path, worker_node_ip, temp_dir |
| | ) |
| | w.restore_from_path( |
| | temp_dir, component=_comp_arg, **_kwargs |
| | ) |
| |
|
| | comp.foreach_actor(_restore, remote_actor_ids=all_healthy_actors) |
| |
|
| | |
| | |
| | else: |
| | comp.restore_from_path( |
| | comp_dir, filesystem=filesystem, component=comp_arg, **kwargs |
| | ) |
| |
|
| | |
| | if component is None: |
| | with filesystem.open_input_stream( |
| | (path / self.STATE_FILE_NAME).as_posix() |
| | ) as f: |
| | state = pickle.load(f) |
| | self.set_state(state) |
| |
|
| | @classmethod |
| | def from_checkpoint( |
| | cls, |
| | path: Union[str, pathlib.Path], |
| | filesystem: Optional["pyarrow.fs.FileSystem"] = None, |
| | **kwargs, |
| | ) -> "Checkpointable": |
| | """Creates a new Checkpointable instance from the given location and returns it. |
| | |
| | Args: |
| | path: The checkpoint path to load (a) the information on how to construct |
| | a new instance of the implementing class and (b) the state to restore |
| | the created instance to. |
| | filesystem: PyArrow FileSystem to use to access data at the `path`. If not |
| | specified, this is inferred from the URI scheme of `path`. |
| | kwargs: Forward compatibility kwargs. Note that these kwargs are sent to |
| | each subcomponent's `from_checkpoint()` call. |
| | |
| | Returns: |
| | A new instance of the implementing class, already set to the state stored |
| | under `path`. |
| | """ |
| | |
| | path = path if isinstance(path, str) else path.as_posix() |
| |
|
| | |
| | if path and not filesystem: |
| | |
| | |
| | filesystem, path = pyarrow.fs.FileSystem.from_uri(path) |
| | |
| | |
| | path = pathlib.Path(path) |
| |
|
| | |
| | with filesystem.open_input_stream( |
| | (path / cls.CLASS_AND_CTOR_ARGS_FILE_NAME).as_posix() |
| | ) as f: |
| | ctor_info = pickle.load(f) |
| | ctor = ctor_info["class"] |
| |
|
| | |
| | if not issubclass(ctor, cls): |
| | raise ValueError( |
| | f"The class ({ctor}) stored in checkpoint ({path}) does not seem to be " |
| | f"a subclass of `cls` ({cls})!" |
| | ) |
| | elif not issubclass(ctor, Checkpointable): |
| | raise ValueError( |
| | f"The class ({ctor}) stored in checkpoint ({path}) does not seem to be " |
| | "an implementer of the `Checkpointable` API!" |
| | ) |
| |
|
| | obj = ctor( |
| | *ctor_info["ctor_args_and_kwargs"][0], |
| | **ctor_info["ctor_args_and_kwargs"][1], |
| | ) |
| | |
| | obj.restore_from_path(path, filesystem=filesystem, **kwargs) |
| | |
| | return obj |
| |
|
| | @abc.abstractmethod |
| | def get_state( |
| | self, |
| | components: Optional[Union[str, Collection[str]]] = None, |
| | *, |
| | not_components: Optional[Union[str, Collection[str]]] = None, |
| | **kwargs, |
| | ) -> StateDict: |
| | """Returns the implementing class's current state as a dict. |
| | |
| | Args: |
| | components: An optional collection of string keys to be included in the |
| | returned state. This might be useful, if getting certain components |
| | of the state is expensive (e.g. reading/compiling the weights of a large |
| | NN) and at the same time, these components are not required by the |
| | caller. |
| | not_components: An optional list of string keys to be excluded in the |
| | returned state, even if the same string is part of `components`. |
| | This is useful to get the complete state of the class, except |
| | one or a few components. |
| | kwargs: Forward-compatibility kwargs. |
| | |
| | Returns: |
| | The current state of the implementing class (or only the `components` |
| | specified, w/o those in `not_components`). |
| | """ |
| |
|
| | @abc.abstractmethod |
| | def set_state(self, state: StateDict) -> None: |
| | """Sets the implementing class' state to the given state dict. |
| | |
| | If component keys are missing in `state`, these components of the implementing |
| | class will not be updated/set. |
| | |
| | Args: |
| | state: The state dict to restore the state from. Maps component keys |
| | to the corresponding subcomponent's own state. |
| | """ |
| |
|
| | @abc.abstractmethod |
| | def get_ctor_args_and_kwargs(self) -> Tuple[Tuple, Dict[str, Any]]: |
| | """Returns the args/kwargs used to create `self` from its constructor. |
| | |
| | Returns: |
| | A tuple of the args (as a tuple) and kwargs (as a Dict[str, Any]) used to |
| | construct `self` from its class constructor. |
| | """ |
| |
|
| | @OverrideToImplementCustomLogic_CallToSuperRecommended |
| | def get_metadata(self) -> Dict: |
| | """Returns JSON writable metadata further describing the implementing class. |
| | |
| | Note that this metadata is NOT part of any state and is thus NOT needed to |
| | restore the state of a Checkpointable instance from a directory. Rather, the |
| | metadata will be written into `self.METADATA_FILE_NAME` when calling |
| | `self.save_to_path()` for the user's convenience. |
| | |
| | Returns: |
| | A JSON-encodable dict of metadata information. |
| | """ |
| | return { |
| | "class_and_ctor_args_file": self.CLASS_AND_CTOR_ARGS_FILE_NAME, |
| | "state_file": self.STATE_FILE_NAME, |
| | "ray_version": ray.__version__, |
| | "ray_commit": ray.__commit__, |
| | } |
| |
|
| | def get_checkpointable_components(self) -> List[Tuple[str, "Checkpointable"]]: |
| | """Returns the implementing class's own Checkpointable subcomponents. |
| | |
| | Returns: |
| | A list of 2-tuples (name, subcomponent) describing the implementing class' |
| | subcomponents, all of which have to be `Checkpointable` themselves and |
| | whose state is therefore written into subdirectories (rather than the main |
| | state file (self.STATE_FILE_NAME) when calling `self.save_to_path()`). |
| | """ |
| | return [] |
| |
|
| | def _check_component(self, name, components, not_components) -> bool: |
| | comp_list = force_list(components) |
| | not_comp_list = force_list(not_components) |
| | if ( |
| | components is None |
| | or any(c.startswith(name + "/") for c in comp_list) |
| | or name in comp_list |
| | ) and (not_components is None or name not in not_comp_list): |
| | return True |
| | return False |
| |
|
| | def _get_subcomponents(self, name, components): |
| | if components is None: |
| | return None |
| |
|
| | components = force_list(components) |
| | subcomponents = [] |
| | for comp in components: |
| | if comp.startswith(name + "/"): |
| | subcomponents.append(comp[len(name) + 1 :]) |
| |
|
| | return None if not subcomponents else subcomponents |
| |
|
| |
|
| | def _exists_at_fs_path(fs: pyarrow.fs.FileSystem, path: str) -> bool: |
| | """Returns `True` if the path can be found in the filesystem.""" |
| | valid = fs.get_file_info(path) |
| | return valid.type != pyarrow.fs.FileType.NotFound |
| |
|
| |
|
| | def _is_dir(file_info: pyarrow.fs.FileInfo) -> bool: |
| | """Returns `True`, if the file info is from a directory.""" |
| | return file_info.type == pyarrow.fs.FileType.Directory |
| |
|
| |
|
| | @PublicAPI(stability="alpha") |
| | def get_checkpoint_info( |
| | checkpoint: Union[str, Checkpoint], |
| | filesystem: Optional["pyarrow.fs.FileSystem"] = None, |
| | ) -> Dict[str, Any]: |
| | """Returns a dict with information about an Algorithm/Policy checkpoint. |
| | |
| | If the given checkpoint is a >=v1.0 checkpoint directory, try reading all |
| | information from the contained `rllib_checkpoint.json` file. |
| | |
| | Args: |
| | checkpoint: The checkpoint directory (str) or an AIR Checkpoint object. |
| | filesystem: PyArrow FileSystem to use to access data at the `checkpoint`. If not |
| | specified, this is inferred from the URI scheme provided by `checkpoint`. |
| | |
| | Returns: |
| | A dict containing the keys: |
| | "type": One of "Policy" or "Algorithm". |
| | "checkpoint_version": A version tuple, e.g. v1.0, indicating the checkpoint |
| | version. This will help RLlib to remain backward compatible wrt. future |
| | Ray and checkpoint versions. |
| | "checkpoint_dir": The directory with all the checkpoint files in it. This might |
| | be the same as the incoming `checkpoint` arg. |
| | "state_file": The main file with the Algorithm/Policy's state information in it. |
| | This is usually a pickle-encoded file. |
| | "policy_ids": An optional set of PolicyIDs in case we are dealing with an |
| | Algorithm checkpoint. None if `checkpoint` is a Policy checkpoint. |
| | """ |
| | |
| | info = { |
| | "type": "Algorithm", |
| | "format": "cloudpickle", |
| | "checkpoint_version": CHECKPOINT_VERSION, |
| | "checkpoint_dir": None, |
| | "state_file": None, |
| | "policy_ids": None, |
| | "module_ids": None, |
| | } |
| |
|
| | |
| | if isinstance(checkpoint, Checkpoint): |
| | checkpoint = checkpoint.to_directory() |
| |
|
| | if checkpoint and not filesystem: |
| | |
| | |
| | filesystem, checkpoint = pyarrow.fs.FileSystem.from_uri(checkpoint) |
| | |
| | |
| | checkpoint = pathlib.Path(checkpoint) |
| |
|
| | |
| | if _exists_at_fs_path(filesystem, checkpoint.as_posix()) and _is_dir( |
| | filesystem.get_file_info(checkpoint.as_posix()) |
| | ): |
| | info.update({"checkpoint_dir": str(checkpoint)}) |
| |
|
| | |
| | |
| | file_info_list = filesystem.get_file_info( |
| | pyarrow.fs.FileSelector(checkpoint.as_posix(), recursive=False) |
| | ) |
| | for file_info in file_info_list: |
| | if file_info.is_file: |
| | if re.match("checkpoint-\\d+", file_info.base_name): |
| | info.update( |
| | { |
| | "checkpoint_version": version.Version("0.1"), |
| | "state_file": str(file_info.base_name), |
| | } |
| | ) |
| | return info |
| |
|
| | |
| |
|
| | |
| | |
| | |
| | if _exists_at_fs_path( |
| | filesystem, (checkpoint / "rllib_checkpoint.json").as_posix() |
| | ): |
| | |
| | with filesystem.open_input_stream( |
| | (checkpoint / "rllib_checkpoint.json").as_posix() |
| | ) as f: |
| | |
| | rllib_checkpoint_info = json.load(fp=f) |
| | if "checkpoint_version" in rllib_checkpoint_info: |
| | rllib_checkpoint_info["checkpoint_version"] = version.Version( |
| | rllib_checkpoint_info["checkpoint_version"] |
| | ) |
| | info.update(rllib_checkpoint_info) |
| | else: |
| | |
| | |
| | if log_once("no_rllib_checkpoint_json_file"): |
| | logger.warning( |
| | "No `rllib_checkpoint.json` file found in checkpoint directory " |
| | f"{checkpoint}! Trying to extract checkpoint info from other files " |
| | f"found in that dir." |
| | ) |
| |
|
| | |
| | for extension in ["pkl", "msgpck"]: |
| | if _exists_at_fs_path( |
| | filesystem, (checkpoint / ("policy_state." + extension)).as_posix() |
| | ): |
| | |
| | info.update( |
| | { |
| | "type": "Policy", |
| | "format": "cloudpickle" if extension == "pkl" else "msgpack", |
| | "checkpoint_version": CHECKPOINT_VERSION, |
| | "state_file": str(checkpoint / f"policy_state.{extension}"), |
| | } |
| | ) |
| | return info |
| |
|
| | |
| | format = None |
| | for extension in ["pkl", "msgpck"]: |
| | state_file = checkpoint / f"algorithm_state.{extension}" |
| | if ( |
| | _exists_at_fs_path(filesystem, state_file.as_posix()) |
| | and filesystem.get_file_info(state_file.as_posix()).is_file |
| | ): |
| | format = "cloudpickle" if extension == "pkl" else "msgpack" |
| | break |
| | if format is None: |
| | raise ValueError( |
| | "Given checkpoint does not seem to be valid! No file with the name " |
| | "`algorithm_state.[pkl|msgpck]` (or `checkpoint-[0-9]+`) found." |
| | ) |
| |
|
| | info.update( |
| | { |
| | "format": format, |
| | "state_file": str(state_file), |
| | } |
| | ) |
| |
|
| | |
| | policies_dir = checkpoint / "policies" |
| | if _exists_at_fs_path(filesystem, policies_dir.as_posix()) and _is_dir( |
| | filesystem.get_file_info(policies_dir.as_posix()) |
| | ): |
| | policy_ids = set() |
| | file_info_list = filesystem.get_file_info( |
| | pyarrow.fs.FileSelector(policies_dir.as_posix(), recursive=False) |
| | ) |
| | for file_info in file_info_list: |
| | policy_ids.add(file_info.base_name) |
| | info.update({"policy_ids": policy_ids}) |
| |
|
| | |
| | modules_dir = ( |
| | checkpoint |
| | / COMPONENT_LEARNER_GROUP |
| | / COMPONENT_LEARNER |
| | / COMPONENT_RL_MODULE |
| | ) |
| | if _exists_at_fs_path(filesystem, checkpoint.as_posix()) and _is_dir( |
| | filesystem.get_file_info(modules_dir.as_posix()) |
| | ): |
| | module_ids = set() |
| | file_info_list = filesystem.get_file_info( |
| | pyarrow.fs.FileSelector(modules_dir.as_posix(), recursive=False) |
| | ) |
| | for file_info in file_info_list: |
| | |
| | |
| | module_dir = modules_dir / file_info.base_name |
| | if _is_dir(filesystem.get_file_info(module_dir.as_posix())): |
| | module_ids.add(file_info.base_name) |
| | info.update({"module_ids": module_ids}) |
| |
|
| | |
| | |
| | elif ( |
| | _exists_at_fs_path(filesystem, checkpoint.as_posix()) |
| | and filesystem.get_file_info(checkpoint.as_posix()).is_file |
| | ): |
| | info.update( |
| | { |
| | "checkpoint_version": version.Version("0.1"), |
| | "checkpoint_dir": str(checkpoint.parent), |
| | "state_file": str(checkpoint), |
| | } |
| | ) |
| |
|
| | else: |
| | raise ValueError( |
| | f"Given checkpoint ({str(checkpoint)}) not found! Must be a " |
| | "checkpoint directory (or a file for older checkpoint versions)." |
| | ) |
| |
|
| | return info |
| |
|
| |
|
| | @PublicAPI(stability="beta") |
| | def convert_to_msgpack_checkpoint( |
| | checkpoint: Union[str, Checkpoint], |
| | msgpack_checkpoint_dir: str, |
| | ) -> str: |
| | """Converts an Algorithm checkpoint (pickle based) to a msgpack based one. |
| | |
| | Msgpack has the advantage of being python version independent. |
| | |
| | Args: |
| | checkpoint: The directory, in which to find the Algorithm checkpoint (pickle |
| | based). |
| | msgpack_checkpoint_dir: The directory, in which to create the new msgpack |
| | based checkpoint. |
| | |
| | Returns: |
| | The directory in which the msgpack checkpoint has been created. Note that |
| | this is the same as `msgpack_checkpoint_dir`. |
| | """ |
| | from ray.rllib.algorithms import Algorithm |
| | from ray.rllib.algorithms.algorithm_config import AlgorithmConfig |
| | from ray.rllib.core.rl_module import validate_module_id |
| |
|
| | |
| | msgpack = try_import_msgpack(error=True) |
| |
|
| | |
| | algo = Algorithm.from_checkpoint(checkpoint) |
| | state = algo.__getstate__() |
| |
|
| | |
| | |
| | state["algorithm_class"] = serialize_type(state["algorithm_class"]) |
| | |
| | if not isinstance(state["config"], dict): |
| | state["config"] = state["config"].serialize() |
| | else: |
| | state["config"] = AlgorithmConfig._serialize_dict(state["config"]) |
| |
|
| | |
| | |
| | policy_states = {} |
| | if "worker" in state and "policy_states" in state["worker"]: |
| | policy_states = state["worker"].pop("policy_states", {}) |
| |
|
| | |
| | state["worker"]["policy_mapping_fn"] = NOT_SERIALIZABLE |
| | |
| | state["worker"]["is_policy_to_train"] = NOT_SERIALIZABLE |
| |
|
| | |
| | if state["config"]["enable_rl_module_and_learner"]: |
| | state["checkpoint_version"] = str(CHECKPOINT_VERSION_LEARNER) |
| | else: |
| | state["checkpoint_version"] = str(CHECKPOINT_VERSION) |
| |
|
| | |
| | state_file = os.path.join(msgpack_checkpoint_dir, "algorithm_state.msgpck") |
| | with open(state_file, "wb") as f: |
| | msgpack.dump(state, f) |
| |
|
| | |
| | with open(os.path.join(msgpack_checkpoint_dir, "rllib_checkpoint.json"), "w") as f: |
| | json.dump( |
| | { |
| | "type": "Algorithm", |
| | "checkpoint_version": state["checkpoint_version"], |
| | "format": "msgpack", |
| | "state_file": state_file, |
| | "policy_ids": list(policy_states.keys()), |
| | "ray_version": ray.__version__, |
| | "ray_commit": ray.__commit__, |
| | }, |
| | f, |
| | ) |
| |
|
| | |
| | for pid, policy_state in policy_states.items(): |
| | |
| | validate_module_id(pid, error=True) |
| | policy_dir = os.path.join(msgpack_checkpoint_dir, "policies", pid) |
| | os.makedirs(policy_dir, exist_ok=True) |
| | policy = algo.get_policy(pid) |
| | policy.export_checkpoint( |
| | policy_dir, |
| | policy_state=policy_state, |
| | checkpoint_format="msgpack", |
| | ) |
| |
|
| | |
| | algo.stop() |
| |
|
| | return msgpack_checkpoint_dir |
| |
|
| |
|
| | @PublicAPI(stability="beta") |
| | def convert_to_msgpack_policy_checkpoint( |
| | policy_checkpoint: Union[str, Checkpoint], |
| | msgpack_checkpoint_dir: str, |
| | ) -> str: |
| | """Converts a Policy checkpoint (pickle based) to a msgpack based one. |
| | |
| | Msgpack has the advantage of being python version independent. |
| | |
| | Args: |
| | policy_checkpoint: The directory, in which to find the Policy checkpoint (pickle |
| | based). |
| | msgpack_checkpoint_dir: The directory, in which to create the new msgpack |
| | based checkpoint. |
| | |
| | Returns: |
| | The directory in which the msgpack checkpoint has been created. Note that |
| | this is the same as `msgpack_checkpoint_dir`. |
| | """ |
| | from ray.rllib.policy.policy import Policy |
| |
|
| | policy = Policy.from_checkpoint(policy_checkpoint) |
| |
|
| | os.makedirs(msgpack_checkpoint_dir, exist_ok=True) |
| | policy.export_checkpoint( |
| | msgpack_checkpoint_dir, |
| | policy_state=policy.get_state(), |
| | checkpoint_format="msgpack", |
| | ) |
| |
|
| | |
| | del policy |
| |
|
| | return msgpack_checkpoint_dir |
| |
|
| |
|
| | @PublicAPI |
| | def try_import_msgpack(error: bool = False): |
| | """Tries importing msgpack and msgpack_numpy and returns the patched msgpack module. |
| | |
| | Returns None if error is False and msgpack or msgpack_numpy is not installed. |
| | Raises an error, if error is True and the modules could not be imported. |
| | |
| | Args: |
| | error: Whether to raise an error if msgpack/msgpack_numpy cannot be imported. |
| | |
| | Returns: |
| | The `msgpack` module. |
| | |
| | Raises: |
| | ImportError: If error=True and msgpack/msgpack_numpy is not installed. |
| | """ |
| | try: |
| | import msgpack |
| | import msgpack_numpy |
| |
|
| | |
| | msgpack_numpy.patch() |
| |
|
| | return msgpack |
| |
|
| | except Exception: |
| | if error: |
| | raise ImportError( |
| | "Could not import or setup msgpack and msgpack_numpy! " |
| | "Try running `pip install msgpack msgpack_numpy` first." |
| | ) |
| |
|