Skip to content

Commit

Permalink
amend
Browse files Browse the repository at this point in the history
  • Loading branch information
vmoens committed Sep 10, 2024
1 parent 6aa4b53 commit fdb7a75
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 4 deletions.
1 change: 0 additions & 1 deletion .github/unittest/linux_libs/scripts_gym/environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ dependencies:
- pip:
# Initial version is required to install Atari ROMS in setup_env.sh
- gym[atari]==0.13
- minigrid
- hypothesis
- future
- cloudpickle
Expand Down
2 changes: 2 additions & 0 deletions .github/unittest/linux_olddeps/scripts_gym_0_13/install.sh
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,8 @@ fi

# Solving circular import: https://stackoverflow.com/questions/75501048/how-to-fix-attributeerror-partially-initialized-module-charset-normalizer-has
pip install -U charset-normalizer
# numpy upgrade can be a pain
pip install "numpy<2.0"

# install tensordict
if [[ "$RELEASE" == 0 ]]; then
Expand Down
7 changes: 4 additions & 3 deletions torchrl/collectors/collectors.py
Original file line number Diff line number Diff line change
Expand Up @@ -507,7 +507,8 @@ def __init__(
# Cuda handles sync
if torch.cuda.is_available():
self._sync_storage = torch.cuda.synchronize
elif torch.backends.mps.is_available():
elif torch.backends.mps.is_available() and hasattr(torch, "mps"):
# Will break for older PT versions which don't have torch.mps
self._sync_storage = torch.mps.synchronize
elif self.storing_device.type == "cpu":
self._sync_storage = _do_nothing
Expand All @@ -521,7 +522,7 @@ def __init__(
# Cuda handles sync
if torch.cuda.is_available():
self._sync_env = torch.cuda.synchronize
elif torch.backends.mps.is_available():
elif torch.backends.mps.is_available() and hasattr(torch, "mps"):
self._sync_env = torch.mps.synchronize
elif self.env_device.type == "cpu":
self._sync_env = _do_nothing
Expand All @@ -534,7 +535,7 @@ def __init__(
# Cuda handles sync
if torch.cuda.is_available():
self._sync_policy = torch.cuda.synchronize
elif torch.backends.mps.is_available():
elif torch.backends.mps.is_available() and hasattr(torch, "mps"):
self._sync_policy = torch.mps.synchronize
elif self.policy_device.type == "cpu":
self._sync_policy = _do_nothing
Expand Down

0 comments on commit fdb7a75

Please sign in to comment.