Quentin Gallouédec
commited on
Commit
•
0ba907f
1
Parent(s):
7aaecd0
Initial commit
Browse files- .gitattributes +1 -0
- README.md +71 -0
- args.yml +83 -0
- config.yml +11 -0
- ddpg-Ant-v3.zip +3 -0
- ddpg-Ant-v3/_stable_baselines3_version +1 -0
- ddpg-Ant-v3/actor.optimizer.pth +3 -0
- ddpg-Ant-v3/critic.optimizer.pth +3 -0
- ddpg-Ant-v3/data +120 -0
- ddpg-Ant-v3/policy.pth +3 -0
- ddpg-Ant-v3/pytorch_variables.pth +3 -0
- ddpg-Ant-v3/system_info.txt +7 -0
- env_kwargs.yml +1 -0
- replay.mp4 +3 -0
- results.json +1 -0
- train_eval_metrics.zip +3 -0
.gitattributes
CHANGED
@@ -32,3 +32,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
32 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
33 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
34 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
32 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
33 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
34 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
35 |
+
*.mp4 filter=lfs diff=lfs merge=lfs -text
|
README.md
ADDED
@@ -0,0 +1,71 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
library_name: stable-baselines3
|
3 |
+
tags:
|
4 |
+
- Ant-v3
|
5 |
+
- deep-reinforcement-learning
|
6 |
+
- reinforcement-learning
|
7 |
+
- stable-baselines3
|
8 |
+
model-index:
|
9 |
+
- name: DDPG
|
10 |
+
results:
|
11 |
+
- task:
|
12 |
+
type: reinforcement-learning
|
13 |
+
name: reinforcement-learning
|
14 |
+
dataset:
|
15 |
+
name: Ant-v3
|
16 |
+
type: Ant-v3
|
17 |
+
metrics:
|
18 |
+
- type: mean_reward
|
19 |
+
value: -1574.13 +/- 1098.44
|
20 |
+
name: mean_reward
|
21 |
+
verified: false
|
22 |
+
---
|
23 |
+
|
24 |
+
# **DDPG** Agent playing **Ant-v3**
|
25 |
+
This is a trained model of a **DDPG** agent playing **Ant-v3**
|
26 |
+
using the [stable-baselines3 library](https://github.com/DLR-RM/stable-baselines3)
|
27 |
+
and the [RL Zoo](https://github.com/DLR-RM/rl-baselines3-zoo).
|
28 |
+
|
29 |
+
The RL Zoo is a training framework for Stable Baselines3
|
30 |
+
reinforcement learning agents,
|
31 |
+
with hyperparameter optimization and pre-trained agents included.
|
32 |
+
|
33 |
+
## Usage (with SB3 RL Zoo)
|
34 |
+
|
35 |
+
RL Zoo: https://github.com/DLR-RM/rl-baselines3-zoo<br/>
|
36 |
+
SB3: https://github.com/DLR-RM/stable-baselines3<br/>
|
37 |
+
SB3 Contrib: https://github.com/Stable-Baselines-Team/stable-baselines3-contrib
|
38 |
+
|
39 |
+
Install the RL Zoo (with SB3 and SB3-Contrib):
|
40 |
+
```bash
|
41 |
+
pip install rl_zoo3
|
42 |
+
```
|
43 |
+
|
44 |
+
```
|
45 |
+
# Download model and save it into the logs/ folder
|
46 |
+
python -m rl_zoo3.load_from_hub --algo ddpg --env Ant-v3 -orga qgallouedec -f logs/
|
47 |
+
python -m rl_zoo3.enjoy --algo ddpg --env Ant-v3 -f logs/
|
48 |
+
```
|
49 |
+
|
50 |
+
If you installed the RL Zoo3 via pip (`pip install rl_zoo3`), from anywhere you can do:
|
51 |
+
```
|
52 |
+
python -m rl_zoo3.load_from_hub --algo ddpg --env Ant-v3 -orga qgallouedec -f logs/
|
53 |
+
python -m rl_zoo3.enjoy --algo ddpg --env Ant-v3 -f logs/
|
54 |
+
```
|
55 |
+
|
56 |
+
## Training (with the RL Zoo)
|
57 |
+
```
|
58 |
+
python -m rl_zoo3.train --algo ddpg --env Ant-v3 -f logs/
|
59 |
+
# Upload the model and generate video (when possible)
|
60 |
+
python -m rl_zoo3.push_to_hub --algo ddpg --env Ant-v3 -f logs/ -orga qgallouedec
|
61 |
+
```
|
62 |
+
|
63 |
+
## Hyperparameters
|
64 |
+
```python
|
65 |
+
OrderedDict([('learning_starts', 10000),
|
66 |
+
('n_timesteps', 1000000.0),
|
67 |
+
('noise_std', 0.1),
|
68 |
+
('noise_type', 'normal'),
|
69 |
+
('policy', 'MlpPolicy'),
|
70 |
+
('normalize', False)])
|
71 |
+
```
|
args.yml
ADDED
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
!!python/object/apply:collections.OrderedDict
|
2 |
+
- - - algo
|
3 |
+
- ddpg
|
4 |
+
- - conf_file
|
5 |
+
- null
|
6 |
+
- - device
|
7 |
+
- auto
|
8 |
+
- - env
|
9 |
+
- Ant-v3
|
10 |
+
- - env_kwargs
|
11 |
+
- null
|
12 |
+
- - eval_episodes
|
13 |
+
- 20
|
14 |
+
- - eval_freq
|
15 |
+
- 25000
|
16 |
+
- - gym_packages
|
17 |
+
- []
|
18 |
+
- - hyperparams
|
19 |
+
- null
|
20 |
+
- - log_folder
|
21 |
+
- logs
|
22 |
+
- - log_interval
|
23 |
+
- -1
|
24 |
+
- - max_total_trials
|
25 |
+
- null
|
26 |
+
- - n_eval_envs
|
27 |
+
- 5
|
28 |
+
- - n_evaluations
|
29 |
+
- null
|
30 |
+
- - n_jobs
|
31 |
+
- 1
|
32 |
+
- - n_startup_trials
|
33 |
+
- 10
|
34 |
+
- - n_timesteps
|
35 |
+
- -1
|
36 |
+
- - n_trials
|
37 |
+
- 500
|
38 |
+
- - no_optim_plots
|
39 |
+
- false
|
40 |
+
- - num_threads
|
41 |
+
- -1
|
42 |
+
- - optimization_log_path
|
43 |
+
- null
|
44 |
+
- - optimize_hyperparameters
|
45 |
+
- false
|
46 |
+
- - progress
|
47 |
+
- false
|
48 |
+
- - pruner
|
49 |
+
- median
|
50 |
+
- - sampler
|
51 |
+
- tpe
|
52 |
+
- - save_freq
|
53 |
+
- -1
|
54 |
+
- - save_replay_buffer
|
55 |
+
- false
|
56 |
+
- - seed
|
57 |
+
- 3493092442
|
58 |
+
- - storage
|
59 |
+
- null
|
60 |
+
- - study_name
|
61 |
+
- null
|
62 |
+
- - tensorboard_log
|
63 |
+
- runs/Ant-v3__ddpg__3493092442__1676842626
|
64 |
+
- - track
|
65 |
+
- true
|
66 |
+
- - trained_agent
|
67 |
+
- ''
|
68 |
+
- - truncate_last_trajectory
|
69 |
+
- true
|
70 |
+
- - uuid
|
71 |
+
- false
|
72 |
+
- - vec_env
|
73 |
+
- dummy
|
74 |
+
- - verbose
|
75 |
+
- 1
|
76 |
+
- - wandb_entity
|
77 |
+
- openrlbenchmark
|
78 |
+
- - wandb_project_name
|
79 |
+
- sb3
|
80 |
+
- - wandb_tags
|
81 |
+
- []
|
82 |
+
- - yaml_file
|
83 |
+
- null
|
config.yml
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
!!python/object/apply:collections.OrderedDict
|
2 |
+
- - - learning_starts
|
3 |
+
- 10000
|
4 |
+
- - n_timesteps
|
5 |
+
- 1000000.0
|
6 |
+
- - noise_std
|
7 |
+
- 0.1
|
8 |
+
- - noise_type
|
9 |
+
- normal
|
10 |
+
- - policy
|
11 |
+
- MlpPolicy
|
ddpg-Ant-v3.zip
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5380c4f79347c0b030b6f6b2996f6db0dc04b7653e4ac2c805eb8d617e683418
|
3 |
+
size 5421823
|
ddpg-Ant-v3/_stable_baselines3_version
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
1.8.0a6
|
ddpg-Ant-v3/actor.optimizer.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:218004ba612f067aa381f1aef82c7637b62df9de2a31d6e97b4487ccfbfbdbdd
|
3 |
+
size 1344943
|
ddpg-Ant-v3/critic.optimizer.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a2509d15ad8b54053f74c3d38582a13e43fca5d331f515d62a20bb7386550830
|
3 |
+
size 1353647
|
ddpg-Ant-v3/data
ADDED
@@ -0,0 +1,120 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"policy_class": {
|
3 |
+
":type:": "<class 'abc.ABCMeta'>",
|
4 |
+
":serialized:": "gAWVMAAAAAAAAACMHnN0YWJsZV9iYXNlbGluZXMzLnRkMy5wb2xpY2llc5SMCVREM1BvbGljeZSTlC4=",
|
5 |
+
"__module__": "stable_baselines3.td3.policies",
|
6 |
+
"__doc__": "\n Policy class (with both actor and critic) for TD3.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n :param n_critics: Number of critic networks to create.\n :param share_features_extractor: Whether to share or not the features extractor\n between the actor and the critic (this saves computation time)\n ",
|
7 |
+
"__init__": "<function TD3Policy.__init__ at 0x7f76a5730af0>",
|
8 |
+
"_build": "<function TD3Policy._build at 0x7f76a5730b80>",
|
9 |
+
"_get_constructor_parameters": "<function TD3Policy._get_constructor_parameters at 0x7f76a5730c10>",
|
10 |
+
"make_actor": "<function TD3Policy.make_actor at 0x7f76a5730ca0>",
|
11 |
+
"make_critic": "<function TD3Policy.make_critic at 0x7f76a5730d30>",
|
12 |
+
"forward": "<function TD3Policy.forward at 0x7f76a5730dc0>",
|
13 |
+
"_predict": "<function TD3Policy._predict at 0x7f76a5730e50>",
|
14 |
+
"set_training_mode": "<function TD3Policy.set_training_mode at 0x7f76a5730ee0>",
|
15 |
+
"__abstractmethods__": "frozenset()",
|
16 |
+
"_abc_impl": "<_abc._abc_data object at 0x7f76a5b49d40>"
|
17 |
+
},
|
18 |
+
"verbose": 1,
|
19 |
+
"policy_kwargs": {
|
20 |
+
"n_critics": 1
|
21 |
+
},
|
22 |
+
"observation_space": {
|
23 |
+
":type:": "<class 'gym.spaces.box.Box'>",
|
24 |
+
":serialized:": "gAWVHQkAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY4lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLb4WUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWeAMAAAAAAAAAAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P+UaApLb4WUjAFDlHSUUpSMBGhpZ2iUaBIolngDAAAAAAAAAAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/lGgKS2+FlGgVdJRSlIwNYm91bmRlZF9iZWxvd5RoEiiWbwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACUaAeMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLb4WUaBV0lFKUjA1ib3VuZGVkX2Fib3ZllGgSKJZvAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJRoIUtvhZRoFXSUUpSMCl9ucF9yYW5kb22UTnViLg==",
|
25 |
+
"dtype": "float64",
|
26 |
+
"_shape": [
|
27 |
+
111
|
28 |
+
],
|
29 |
+
"low": "[-inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf\n -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf\n -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf\n -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf\n -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf\n -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf\n -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf\n -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf]",
|
30 |
+
"high": "[inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf\n inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf\n inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf\n inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf\n inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf\n inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf\n inf inf inf]",
|
31 |
+
"bounded_below": "[False False False False False False False False False False False False\n False False False False False False False False False False False False\n False False False False False False False False False False False False\n False False False False False False False False False False False False\n False False False False False False False False False False False False\n False False False False False False False False False False False False\n False False False False False False False False False False False False\n False False False False False False False False False False False False\n False False False False False False False False False False False False\n False False False]",
|
32 |
+
"bounded_above": "[False False False False False False False False False False False False\n False False False False False False False False False False False False\n False False False False False False False False False False False False\n False False False False False False False False False False False False\n False False False False False False False False False False False False\n False False False False False False False False False False False False\n False False False False False False False False False False False False\n False False False False False False False False False False False False\n False False False False False False False False False False False False\n False False False]",
|
33 |
+
"_np_random": null
|
34 |
+
},
|
35 |
+
"action_space": {
|
36 |
+
":type:": "<class 'gym.spaces.box.Box'>",
|
37 |
+
":serialized:": "gAWVSgwAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLCIWUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWIAAAAAAAAAAAAIC/AACAvwAAgL8AAIC/AACAvwAAgL8AAIC/AACAv5RoCksIhZSMAUOUdJRSlIwEaGlnaJRoEiiWIAAAAAAAAAAAAIA/AACAPwAAgD8AAIA/AACAPwAAgD8AAIA/AACAP5RoCksIhZRoFXSUUpSMDWJvdW5kZWRfYmVsb3eUaBIolggAAAAAAAAAAQEBAQEBAQGUaAeMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLCIWUaBV0lFKUjA1ib3VuZGVkX2Fib3ZllGgSKJYIAAAAAAAAAAEBAQEBAQEBlGghSwiFlGgVdJRSlIwKX25wX3JhbmRvbZSMFG51bXB5LnJhbmRvbS5fcGlja2xllIwSX19yYW5kb21zdGF0ZV9jdG9ylJOUjAdNVDE5OTM3lGgtjBRfX2JpdF9nZW5lcmF0b3JfY3RvcpSTlIaUUpR9lCiMDWJpdF9nZW5lcmF0b3KUjAdNVDE5OTM3lIwFc3RhdGWUfZQojANrZXmUaBIolsAJAAAAAAAAAAAAgFPCs5yHA7WTcuyrW+jmsvLPtoHa1QbvYaExTaBrtczZE+YYn3SceS/IpRuAxHkBSZ4PQ+Rd4BiTkhNVzNRKKWTSCJW+NNCeRHJ6A/Ctvwpzm3s+6WCxBtp1A0ULbn3WFmrrBDRmg7fz9aUvae9CY0O1XPfCd1LMRkQ3LQiJbtCOrnf7GLaAT3ue+U8y7BLYuY5ehakZyq49di1nK0KAnsXuxx/1IgFdS88kD8wZUmREV5hwnQr1Ehe16VreO8T/Qc86sV+2h90z2FiJqqLNf3s/yZS3bA9DHzGZfRtgkKu3Bx0ZTN7I+466APXPqRreJf4gBqUW/NB248FO9cpD0wFaed9QV63NqpiFOs/RYeEwD8e32dZFRfi9SK5aLvuApJMu2LOfZweypHIkyPjeY5W+tsm2bdPmVoCAOiyi73cf5k0LQeJNWqZU/wuy/f8myghZ9qrjf+2JSJMaB9VNMXWmxuq4Dq0fkhzJr1ML7SgftfSG29O5koFUMozQL58gAzyX96ZMcpWbZ+3/zlaVhGln5egXC8MtIK6xIFCvh/vD/F1jLgYIp14MM597MuPmTpa+OaKek7bql9Cp8/0skhg5QSCvCaijm8wenxrfqLyRxDPCpS+L3isJC5LrjFgWnsdxQXVrJK8uaobJcTIJ5NrYYfA2l5gH27iPI9EqhzFtZJXiE4vXpH18f3kouYV9RowPzOtmYsbmstR/Mx/VY7E0XBmnMidL4dYTUXgxaDbFxWy3y6miL2yw0I2O09vPWV7LqbwMbthlU26lrLfnJDz88B+7y4pFCwvsHgCsMWq6pvroAF4Ms+++JnhzrL0GLrtfJ7667p42Vg78GirmKIRMFH0p6aLRPV4V/fclE3PLBj0InU315M5v7fDFj/IreJeFUhIAy5/BlvLdgwIfBMBWvyjhqGRBo2KmdiDAO9BPhdi6oGmZUCxTDjUyMd8rjeRdntTE+L9sHQUSvUfpRjKcSIjZXcjsMMSwwU0QzIXewD4nZg8EAZ72iHnChWveW7cB5EQRFE+YPvvyNAw4OvORF+DjDtmLUz6jNz0JbkuKbhIQEEVbpDRkoeIGA4HATymZeE4TX/hvhOxJfX5liXD9/Uon8OjJ/hhIrDNUYR2NOV2R1L+VuK/JA9o0izGPiCC9vi7UK1u4re8tvPrBInC6BH2DpMvWSRepdH0frFxGsH/kRB/S77USeMs38a+yorDaa9Wsb9WnuQg10vl087jhvwSZx77fGq52NV1h/UnZAjnqJOjCPBgQrY0wsSeSW6SOSdItAOupiWkVvSzHz+gopd/3FUUPbjdik7Vd6O6ycGOFwp4+wyZqq1MKHww5exJND6De8NB4fGBzsvKoT8O1fAC3Y2Z+3mLYwkCJXULx2zNSO90bQi0L0BhTF1AidNtqRkak72V+V1kos6m4F+kyZiOiwxfcHKWHmpQe3A8HpX6nacx0Zso0WyF/W/sOLo//2g0YD9koIjgKvw//f9Em4CvxNBUMnJYTVQJGysF4QMShnM6byaMNJhRVALf+X0+SgLJkEwLfKdBbKxjA035OEc2YKmUpvooVUUt+9U8d7cKRgNYKKPwCHyYfLJSQt4ZEdThgjeywDxgsGzPax5SklOLroSYn39feofatZDzJXevfPTHyi1ZLwpe6Hwkbqz1FuOnHiShPedbEA9b/HCtBytSZrgUwlwHpQlAiBTSxpN7TFzVZrrhRd6N8W+FeE9SAGCHwXchFR4SCGThPyRGO/XWkMPJ42BLUOmMGepDQgWH646tjoJSE3EXcA5iDS+Nq6Oh521oC2UPAnvxIj1QbVh8IbxlBytRTTjc0c14E9cyhIPlgIoHriuEFSMXSEzOGL1MmL6UCbiXfsRg9Z6OwWCCl3VeGg5bEZ1kjJkvs08k7wtPk4ATAjaTL3QoY2gf106zFbJtL4D5gmLMJ9OuzE2Fn5uaAqqpjXIqqEXxS9jtpsRU9VTHCg68RwXQVIUhuVJgHq8fOigBMrW7Am5+jjo/GNNlcFcp813dFiXy4qHhjGSEjNXp3ln03NZkOgqXQ8SalJlPOvyrAS9wW9EtjQKhcrBSWSsQ8C3o/Mc/sR/CMRB85ZIojR/tiCKtOutxQMIusIOnYHK8g6kPpTQ8J/PfJ8pa3GEoYoA67axQTXsysd2Y6ZDwpz1HkAeISVK2AlCcuQssrQv8dVLAcins/2kjRFp5Vp82HSX9j6Ci9GH5mkdyqV84vWsdwRz4JNXHZoHVZKnrxSdA1HUwRUI/5oWiqnGk9KyiS4Mv9dQIVluJ1+/pAHYEdG9YgLHiNE2zA7aIQbqqlGX6jH1CrHSPL9mnlHdPiKjozwRXu02UQuzlGJn+/PUkU6cPYLLeLc8e7S2qfCZxbdpHioand7wYKqb5bMb8dA3Dwvm6P6iJXSogJ+Q+0z43li8ydYJqZNZlSjsljr/2c5UU33vMhLDLEXIHT8WzrsS45TsKNbhYfTYx6Ds/8W5yOtiOPWc0+fRlAjbQC++FKo5UaMl2eRCxI4U5/heX2HxJGNk50rICjuFsG/8Q+NUuJgS+y3FMhe+sm1e2MdC+ldkBqEn8oRxajECbP4Wizz1tfJliW/1A5fdGTfUxM3HV72bJgnqswmoAumAx6d36KfuZwEEp0/wcrdo+8/unJ5f2mYeqCrOcaxDJrs6SxW1zVaH/YTZl+RNA0NjPLgaqnlveaes/MkpzsVEQDtvKGFrG1cnmGjZVi2azrSDGQ0Y423nEksDC5awYcOJmVYbeA4DkMSNfj+7Dx2SzH/PVPuXLX9aw9K9QF4Ml48zsSrwsVjIa8+gIdffs2pf2wCKcPtFez6vOT0UDuFHknJjMDg0fI3DnyC7jJqO8V4XpmPyarTp3JJRGhmqTHhpZInn70JMfS+RFry5+rLSOM0T+KWV8fYjs9eyCiZijlR4AiADooXm9G8JIzZCLZX2Dty83iyz7gQzSxYO7ULuTT1stvGuJwbBP4LMhLXkbxdhAmBSDiYNOnc3O+yFsO6Ps9UOQD8S4Pbr8hZ4mFjbicpO635SwpmHINYDeuewln3/GHz69LpCjmpnKPeF9ZxXcq6MR4kJUV2j/dQzqjLniNaQmrMkULdI7W1sMXRFcsz9xs1GVwVqmtMVws8HtvXMYNmosCrrgAFX2ghPz7dXCV6vML5YhfNbDAzzG6MHffrslrhMav3vtlt8Fnld4VaH6IhMkowayT1lSVvfvlKHCWwtKaTcOZrR5LZGalJOpFbVIFUOAo+LnY/25bmc3KloyLzgiTudjPsXEGPNPBIvE/5cMEvU4Lrs0N3tCke4abYDXF9f14QrwLlGgHjAJ1NJSJiIeUUpQoSwNoC05OTkr/////Sv////9LAHSUYk1wAoWUaBV0lFKUjANwb3OUTXACdYwJaGFzX2dhdXNzlEsAjAVnYXVzc5RHAAAAAAAAAAB1YnViLg==",
|
38 |
+
"dtype": "float32",
|
39 |
+
"_shape": [
|
40 |
+
8
|
41 |
+
],
|
42 |
+
"low": "[-1. -1. -1. -1. -1. -1. -1. -1.]",
|
43 |
+
"high": "[1. 1. 1. 1. 1. 1. 1. 1.]",
|
44 |
+
"bounded_below": "[ True True True True True True True True]",
|
45 |
+
"bounded_above": "[ True True True True True True True True]",
|
46 |
+
"_np_random": "RandomState(MT19937)"
|
47 |
+
},
|
48 |
+
"n_envs": 1,
|
49 |
+
"num_timesteps": 1000219,
|
50 |
+
"_total_timesteps": 1000000,
|
51 |
+
"_num_timesteps_at_start": 0,
|
52 |
+
"seed": 0,
|
53 |
+
"action_noise": {
|
54 |
+
":type:": "<class 'stable_baselines3.common.noise.NormalActionNoise'>",
|
55 |
+
":serialized:": "gAWVWgEAAAAAAACMHnN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5ub2lzZZSMEU5vcm1hbEFjdGlvbk5vaXNllJOUKYGUfZQojANfbXWUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksIhZSMAUOUdJRSlIwGX3NpZ21hlGgIKJZAAAAAAAAAAJqZmZmZmbk/mpmZmZmZuT+amZmZmZm5P5qZmZmZmbk/mpmZmZmZuT+amZmZmZm5P5qZmZmZmbk/mpmZmZmZuT+UaA9LCIWUaBN0lFKUdWIu",
|
56 |
+
"_mu": "[0. 0. 0. 0. 0. 0. 0. 0.]",
|
57 |
+
"_sigma": "[0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.1]"
|
58 |
+
},
|
59 |
+
"start_time": 1676842629622587369,
|
60 |
+
"learning_rate": 0.001,
|
61 |
+
"tensorboard_log": "runs/Ant-v3__ddpg__3493092442__1676842626/Ant-v3",
|
62 |
+
"lr_schedule": {
|
63 |
+
":type:": "<class 'function'>",
|
64 |
+
":serialized:": "gAWVvQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMRS9ob21lL3FnYWxsb3VlZGVjL3N0YWJsZS1iYXNlbGluZXMzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZSMBGZ1bmOUS4JDAgABlIwDdmFslIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5SMRS9ob21lL3FnYWxsb3VlZGVjL3N0YWJsZS1iYXNlbGluZXMzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZR1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgffZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/UGJN0vGp/IWUUpSFlIwXX2Nsb3VkcGlja2xlX3N1Ym1vZHVsZXOUXZSMC19fZ2xvYmFsc19flH2UdYaUhlIwLg=="
|
65 |
+
},
|
66 |
+
"_last_obs": null,
|
67 |
+
"_last_episode_starts": {
|
68 |
+
":type:": "<class 'numpy.ndarray'>",
|
69 |
+
":serialized:": "gAWVdAAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYBAAAAAAAAAAGUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwGFlIwBQ5R0lFKULg=="
|
70 |
+
},
|
71 |
+
"_last_original_obs": {
|
72 |
+
":type:": "<class 'numpy.ndarray'>",
|
73 |
+
":serialized:": "gAWV7QMAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJZ4AwAAAAAAAFf+70PJX+A/1BvE+uPkyz84p5Yz9DS1vxLNeWe2Gq2/WcymqaMQ778VwZ22KNPgv46S0GrikvM/YLzGatLW4D8CD9V5mKvgv/aWmWAu2eA/Fl8gu+qp4L9mzwaTMNjgP+kAHfsnkPM/OkSw4s5fgz8AAqaQG7aAPzXnqXT46mM/5OqgfjNijj8uQEqa/pmDv3DQuYpk91e/k1e8ZcEJYb+8HcXYVGJ5PzA1iiFmlFe/E+eIlZhsZ79di4GcVmVVP2yf0owNA3I/EvvZMOp9Y7+eGsyJ7sKUvwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJSMBW51bXB5lIwFZHR5cGWUk5SMAmY4lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGJLAUtvhpSMAUOUdJRSlC4="
|
74 |
+
},
|
75 |
+
"_episode_num": 2010,
|
76 |
+
"use_sde": false,
|
77 |
+
"sde_sample_freq": -1,
|
78 |
+
"_current_progress_remaining": -0.00021899999999996922,
|
79 |
+
"ep_info_buffer": {
|
80 |
+
":type:": "<class 'collections.deque'>",
|
81 |
+
":serialized:": "gAWVahAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIfevDeqNESMCUhpRSlIwBbJRLHYwBdJRHQLNuicmBvrJ1fZQoaAZoCWgPQwhK7rCJzHtiwJSGlFKUaBVLOWgWR0Czbra0QbuMdX2UKGgGaAloD0MItp4hHNPHmcCUhpRSlGgVTegDaBZHQLNv4Hs1KoR1fZQoaAZoCWgPQwiU3czoB4+dwJSGlFKUaBVN6ANoFkdAs3UJJDmbLHV9lChoBmgJaA9DCB8sY0M3q0HAlIaUUpRoFUsRaBZHQLN5RlS0jTt1fZQoaAZoCWgPQwjA0CNGb9uPwJSGlFKUaBVN6ANoFkdAs4NYoqkM1HV9lChoBmgJaA9DCH4CKEaGw57AlIaUUpRoFU3oA2gWR0CziH0jopx4dX2UKGgGaAloD0MI0NVW7C9gYMCUhpRSlGgVS0hoFkdAs4zBK02LpHV9lChoBmgJaA9DCHE486vZbqHAlIaUUpRoFU3oA2gWR0CzjfLZezD5dX2UKGgGaAloD0MIa/C+KhetUcCUhpRSlGgVS0doFkdAs5JAqoZQ53V9lChoBmgJaA9DCBedLLVuiaTAlIaUUpRoFU3oA2gWR0Czk32fK6nSdX2UKGgGaAloD0MIIXNlUG2ul8CUhpRSlGgVTegDaBZHQLOY0704BFN1fZQoaAZoCWgPQwggJAuYIMSYwJSGlFKUaBVN6ANoFkdAs54J2A5Jb3V9lChoBmgJaA9DCN7oYz4A56HAlIaUUpRoFU3oA2gWR0Czo0xkupS8dX2UKGgGaAloD0MIlrGhm/2xbsCUhpRSlGgVS2xoFkdAs6eewt8NQXV9lChoBmgJaA9DCD+p9un4ZW3AlIaUUpRoFUtiaBZHQLOoK3Zf2K51fZQoaAZoCWgPQwgiVKnZUx6RwJSGlFKUaBVN6ANoFkdAs6l7Ggi/wnV9lChoBmgJaA9DCCC4yhNoeJnAlIaUUpRoFU3oA2gWR0Czrq86aLGadX2UKGgGaAloD0MIsARSYlfjk8CUhpRSlGgVTegDaBZHQLOz42oNutR1fZQoaAZoCWgPQwgk7rH0MYudwJSGlFKUaBVN6ANoFkdAs7kZ5a/yoXV9lChoBmgJaA9DCKTk1TkmZ5LAlIaUUpRoFU3oA2gWR0CzvkE2tMfzdX2UKGgGaAloD0MID0bsE6BansCUhpRSlGgVTegDaBZHQLPDeOd5IH11fZQoaAZoCWgPQwhq2VpfJI19wJSGlFKUaBVNfAFoFkdAs8gb1wo9cXV9lChoBmgJaA9DCAd7E0NSPpzAlIaUUpRoFU3oA2gWR0Czyqt4zJp4dX2UKGgGaAloD0MID+1jBd9wmMCUhpRSlGgVTegDaBZHQLPP29G7SRd1fZQoaAZoCWgPQwhGlPYGH8WIwJSGlFKUaBVN6ANoFkdAs9UTVc2R73V9lChoBmgJaA9DCL03hgAgBXXAlIaUUpRoFU3oA2gWR0Cz2jv3N9pidX2UKGgGaAloD0MIwZFAg6VhosCUhpRSlGgVTegDaBZHQLPfejWkJrt1fZQoaAZoCWgPQwhfmiLAaetiwJSGlFKUaBVLd2gWR0Cz49qAWi1zdX2UKGgGaAloD0MIGOyGbfv6kMCUhpRSlGgVTegDaBZHQLPlTmPYFq11fZQoaAZoCWgPQwjpK0gzRoGXwJSGlFKUaBVN6ANoFkdAs+p4F8ohIXV9lChoBmgJaA9DCH7k1qQ7eqPAlIaUUpRoFU3oA2gWR0Cz76ZlrdnCdX2UKGgGaAloD0MIwXKEDBRTlMCUhpRSlGgVTegDaBZHQLP03/T9bX91fZQoaAZoCWgPQwj4bB0cnOyXwJSGlFKUaBVN6ANoFkdAs/oaGWUr1HV9lChoBmgJaA9DCBL3WPpw/47AlIaUUpRoFU3oA2gWR0Cz/0tM0xdqdX2UKGgGaAloD0MI8Ui8PP3sjsCUhpRSlGgVTegDaBZHQLQOBzXz19R1fZQoaAZoCWgPQwh/aVGf5L5RwJSGlFKUaBVLMmgWR0C0EltpVS4wdX2UKGgGaAloD0MIC9C2mnXGPcCUhpRSlGgVSxJoFkdAtBKV35eqrHV9lChoBmgJaA9DCLr4256gO2XAlIaUUpRoFUtdaBZHQLQSvvYvnKZ1fZQoaAZoCWgPQwi29GiqB0WXwJSGlFKUaBVN6ANoFkdAtBQQosqaw3V9lChoBmgJaA9DCLRWtDl+DpPAlIaUUpRoFU3oA2gWR0C0GTVNg0CSdX2UKGgGaAloD0MIzO80maFynMCUhpRSlGgVTegDaBZHQLQea2lVLjB1fZQoaAZoCWgPQwjY1eQpy8KbwJSGlFKUaBVN6ANoFkdAtCOUzyjHn3V9lChoBmgJaA9DCI/66xX2aZDAlIaUUpRoFU3oA2gWR0C0KLMERraedX2UKGgGaAloD0MIA9L+B8ifocCUhpRSlGgVTegDaBZHQLQt4WFN+LF1fZQoaAZoCWgPQwhhi90+qzZbwJSGlFKUaBVLRmgWR0C0MlRk3CKrdX2UKGgGaAloD0MIIQa69jV+l8CUhpRSlGgVTegDaBZHQLQzhgeii7F1fZQoaAZoCWgPQwjGouns5Bl1wJSGlFKUaBVN6ANoFkdAtDi+wJPZZnV9lChoBmgJaA9DCLE1W3kpn5DAlIaUUpRoFU3oA2gWR0C0Pe6WcBludX2UKGgGaAloD0MIntMs0O4cVcCUhpRSlGgVSy1oFkdAtEJAMoc7yXV9lChoBmgJaA9DCMxh9x3DklbAlIaUUpRoFUsyaBZHQLRCfZi/fwZ1fZQoaAZoCWgPQwhTymslhLWewJSGlFKUaBVN6ANoFkdAtEOcfwI+n3V9lChoBmgJaA9DCEfmkT/IKJ7AlIaUUpRoFU3oA2gWR0C0SMpXIU8FdX2UKGgGaAloD0MIEwoRcOgzncCUhpRSlGgVTegDaBZHQLRN+ckdFOR1fZQoaAZoCWgPQwi2SxsOw06gwJSGlFKUaBVN6ANoFkdAtFMSLehwl3V9lChoBmgJaA9DCBqH+l1YbG7AlIaUUpRoFUtraBZHQLRXa6jWTX91fZQoaAZoCWgPQwgVG/M6ooycwJSGlFKUaBVN6ANoFkdAtFjDq4YrKHV9lChoBmgJaA9DCFwhrMYSSWLAlIaUUpRoFUtHaBZHQLRdANZvDP51fZQoaAZoCWgPQwhnLJrOTj40wJSGlFKUaBVLEWgWR0C0XVM8La24dX2UKGgGaAloD0MI7Sx6p6Kcl8CUhpRSlGgVTegDaBZHQLReT2mHgxd1fZQoaAZoCWgPQwgZOKCl+w+dwJSGlFKUaBVN6ANoFkdAtGNxyYG+snV9lChoBmgJaA9DCJ2ed2PB+2bAlIaUUpRoFUtNaBZHQLRnyplz2ex1fZQoaAZoCWgPQwj68gLsgymVwJSGlFKUaBVN6ANoFkdAtGkBXq7iAHV9lChoBmgJaA9DCKK1os0RNJnAlIaUUpRoFU3oA2gWR0C0bj9X1anrdX2UKGgGaAloD0MIzc03oosqpcCUhpRSlGgVTegDaBZHQLRzYVpKzzF1fZQoaAZoCWgPQwiQEVDhCEJjwJSGlFKUaBVLQGgWR0C0d6MhC+lCdX2UKGgGaAloD0MIHCjwTj6TTMCUhpRSlGgVSyBoFkdAtHfvF85S33V9lChoBmgJaA9DCHnqkQa39FHAlIaUUpRoFUszaBZHQLR4HORDCxh1fZQoaAZoCWgPQwgRGVbxRtxHwJSGlFKUaBVLFGgWR0C0eFoixFAndX2UKGgGaAloD0MIKowtBIkXl8CUhpRSlGgVTegDaBZHQLR5VXCj1wp1fZQoaAZoCWgPQwg7ONib2HSIwJSGlFKUaBVN6ANoFkdAtH56bwz+FXV9lChoBmgJaA9DCE/KpIY2cJ7AlIaUUpRoFU3oA2gWR0C0g7L+tKZldX2UKGgGaAloD0MIg4dp37wQg8CUhpRSlGgVTegDaBZHQLSJCTmGM4t1fZQoaAZoCWgPQwhd3hyuNWGPwJSGlFKUaBVN6ANoFkdAtJMHQY1pCnV9lChoBmgJaA9DCOs2qP3WRFTAlIaUUpRoFUsbaBZHQLSXchRIjGF1fZQoaAZoCWgPQwgd6QyM3O6NwJSGlFKUaBVN6ANoFkdAtJh9GUfPonV9lChoBmgJaA9DCAqgGFnijpzAlIaUUpRoFU3oA2gWR0C0ncNOqNp/dX2UKGgGaAloD0MIG0mCcHVzmsCUhpRSlGgVTegDaBZHQLSi7DpTuOV1fZQoaAZoCWgPQwiK6NfWV0yhwJSGlFKUaBVN6ANoFkdAtKgSLWI42nV9lChoBmgJaA9DCKabxCAoDaPAlIaUUpRoFU3oA2gWR0C0rTnC4z7/dX2UKGgGaAloD0MIjX3JxrteocCUhpRSlGgVTegDaBZHQLSyZOavzOJ1fZQoaAZoCWgPQwh4feasT+eawJSGlFKUaBVN6ANoFkdAtLeWDsdDIHV9lChoBmgJaA9DCIm3zr/9Z6DAlIaUUpRoFU3oA2gWR0C0vLxQN0/4dX2UKGgGaAloD0MIBtodUnSqpMCUhpRSlGgVTegDaBZHQLTCGAcDKYB1fZQoaAZoCWgPQwgK8rORK6+iwJSGlFKUaBVN6ANoFkdAtMc6qsEJSnV9lChoBmgJaA9DCI6vPbO8k6TAlIaUUpRoFU3oA2gWR0C0zF6uW8h+dX2UKGgGaAloD0MI86ykFd9ZoMCUhpRSlGgVTegDaBZHQLTRjWtU4rB1fZQoaAZoCWgPQwiasWg6a4ykwJSGlFKUaBVN6ANoFkdAtNaui1y/9HV9lChoBmgJaA9DCHwnZr1o56LAlIaUUpRoFU3oA2gWR0C02+L9ycTbdX2UKGgGaAloD0MIXDgQknWCm8CUhpRSlGgVTegDaBZHQLThENS619h1fZQoaAZoCWgPQwhSJ6CJUECiwJSGlFKUaBVN6ANoFkdAtOY7GEPDpHV9lChoBmgJaA9DCDVEFf6smaPAlIaUUpRoFU3oA2gWR0C0629FfAsTdX2UKGgGaAloD0MIPnsuU7sJpMCUhpRSlGgVTegDaBZHQLTwlbrC3w11fZQoaAZoCWgPQwjBrFCks2ehwJSGlFKUaBVN6ANoFkdAtPXE0Mw1znV9lChoBmgJaA9DCHr83qbPG53AlIaUUpRoFU3oA2gWR0C0+vH9rGipdX2UKGgGaAloD0MIED6UaIE3pMCUhpRSlGgVTegDaBZHQLUAJOI68xt1fZQoaAZoCWgPQwid9/9xIkmZwJSGlFKUaBVN6ANoFkdAtQVj08NhE3V9lChoBmgJaA9DCBRBnIdjs5jAlIaUUpRoFU3oA2gWR0C1Cp41pCa7dX2UKGgGaAloD0MIa0jcY0kwlMCUhpRSlGgVTegDaBZHQLUPwSX+l0p1fZQoaAZoCWgPQwhlVu9we0aYwJSGlFKUaBVN6ANoFkdAtR8Nm/WUbHVlLg=="
|
82 |
+
},
|
83 |
+
"ep_success_buffer": {
|
84 |
+
":type:": "<class 'collections.deque'>",
|
85 |
+
":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
|
86 |
+
},
|
87 |
+
"_n_updates": 990260,
|
88 |
+
"buffer_size": 1,
|
89 |
+
"batch_size": 100,
|
90 |
+
"learning_starts": 10000,
|
91 |
+
"tau": 0.005,
|
92 |
+
"gamma": 0.99,
|
93 |
+
"gradient_steps": -1,
|
94 |
+
"optimize_memory_usage": false,
|
95 |
+
"replay_buffer_class": {
|
96 |
+
":type:": "<class 'abc.ABCMeta'>",
|
97 |
+
":serialized:": "gAWVNQAAAAAAAACMIHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5idWZmZXJzlIwMUmVwbGF5QnVmZmVylJOULg==",
|
98 |
+
"__module__": "stable_baselines3.common.buffers",
|
99 |
+
"__doc__": "\n Replay buffer used in off-policy algorithms like SAC/TD3.\n\n :param buffer_size: Max number of element in the buffer\n :param observation_space: Observation space\n :param action_space: Action space\n :param device: PyTorch device\n :param n_envs: Number of parallel environments\n :param optimize_memory_usage: Enable a memory efficient variant\n of the replay buffer which reduces by almost a factor two the memory used,\n at a cost of more complexity.\n See https://github.com/DLR-RM/stable-baselines3/issues/37#issuecomment-637501195\n and https://github.com/DLR-RM/stable-baselines3/pull/28#issuecomment-637559274\n Cannot be used in combination with handle_timeout_termination.\n :param handle_timeout_termination: Handle timeout termination (due to timelimit)\n separately and treat the task as infinite horizon task.\n https://github.com/DLR-RM/stable-baselines3/issues/284\n ",
|
100 |
+
"__init__": "<function ReplayBuffer.__init__ at 0x7f76a572d5e0>",
|
101 |
+
"add": "<function ReplayBuffer.add at 0x7f76a572d670>",
|
102 |
+
"sample": "<function ReplayBuffer.sample at 0x7f76a572d700>",
|
103 |
+
"_get_samples": "<function ReplayBuffer._get_samples at 0x7f76a572d790>",
|
104 |
+
"__abstractmethods__": "frozenset()",
|
105 |
+
"_abc_impl": "<_abc._abc_data object at 0x7f76a5724f40>"
|
106 |
+
},
|
107 |
+
"replay_buffer_kwargs": {},
|
108 |
+
"train_freq": {
|
109 |
+
":type:": "<class 'stable_baselines3.common.type_aliases.TrainFreq'>",
|
110 |
+
":serialized:": "gAWVZAAAAAAAAACMJXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi50eXBlX2FsaWFzZXOUjAlUcmFpbkZyZXGUk5RLAWgAjBJUcmFpbkZyZXF1ZW5jeVVuaXSUk5SMB2VwaXNvZGWUhZRSlIaUgZQu"
|
111 |
+
},
|
112 |
+
"use_sde_at_warmup": false,
|
113 |
+
"policy_delay": 1,
|
114 |
+
"target_noise_clip": 0.0,
|
115 |
+
"target_policy_noise": 0.1,
|
116 |
+
"actor_batch_norm_stats": [],
|
117 |
+
"critic_batch_norm_stats": [],
|
118 |
+
"actor_batch_norm_stats_target": [],
|
119 |
+
"critic_batch_norm_stats_target": []
|
120 |
+
}
|
ddpg-Ant-v3/policy.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:98d947055b174b7e90145b8ed4efcef568f2b34194d968ce499c68437125f6c3
|
3 |
+
size 2696797
|
ddpg-Ant-v3/pytorch_variables.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d030ad8db708280fcae77d87e973102039acd23a11bdecc3db8eb6c0ac940ee1
|
3 |
+
size 431
|
ddpg-Ant-v3/system_info.txt
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
- OS: Linux-5.19.0-32-generic-x86_64-with-glibc2.35 # 33~22.04.1-Ubuntu SMP PREEMPT_DYNAMIC Mon Jan 30 17:03:34 UTC 2
|
2 |
+
- Python: 3.9.12
|
3 |
+
- Stable-Baselines3: 1.8.0a6
|
4 |
+
- PyTorch: 1.13.1+cu117
|
5 |
+
- GPU Enabled: True
|
6 |
+
- Numpy: 1.24.1
|
7 |
+
- Gym: 0.21.0
|
env_kwargs.yml
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{}
|
replay.mp4
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f63836cd61649cc539bf88f8f12f76d8f0a16f071c695015c9aa0c12c840f41f
|
3 |
+
size 173900
|
results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"mean_reward": -1574.1279051000001, "std_reward": 1098.4375547270602, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2023-02-28T18:14:38.239287"}
|
train_eval_metrics.zip
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4f46bffb83dd479cfb750fa0754a07cb9dea2f3500e90d84f38f305732c1feed
|
3 |
+
size 70869
|