IlluminatiPudding commited on
Commit
3ef21b6
1 Parent(s): 7ecdd49

Initial commit

Browse files
README.md ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: stable-baselines3
3
+ tags:
4
+ - PandaPickAndPlaceDense-v3
5
+ - deep-reinforcement-learning
6
+ - reinforcement-learning
7
+ - stable-baselines3
8
+ model-index:
9
+ - name: A2C
10
+ results:
11
+ - task:
12
+ type: reinforcement-learning
13
+ name: reinforcement-learning
14
+ dataset:
15
+ name: PandaPickAndPlaceDense-v3
16
+ type: PandaPickAndPlaceDense-v3
17
+ metrics:
18
+ - type: mean_reward
19
+ value: -50.00 +/- 0.00
20
+ name: mean_reward
21
+ verified: false
22
+ ---
23
+
24
+ # **A2C** Agent playing **PandaPickAndPlaceDense-v3**
25
+ This is a trained model of a **A2C** agent playing **PandaPickAndPlaceDense-v3**
26
+ using the [stable-baselines3 library](https://github.com/DLR-RM/stable-baselines3).
27
+
28
+ ## Usage (with Stable-baselines3)
29
+ TODO: Add your code
30
+
31
+
32
+ ```python
33
+ from stable_baselines3 import ...
34
+ from huggingface_sb3 import load_from_hub
35
+
36
+ ...
37
+ ```
a2c-PandaPickAndPlaceDense-v3.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ca346d52a478e660d1f99bca839d744de9f258977fd10d0c421e4bfccd5ef0f6
3
+ size 4464450
a2c-PandaPickAndPlaceDense-v3/_stable_baselines3_version ADDED
@@ -0,0 +1 @@
 
 
1
+ 2.1.0
a2c-PandaPickAndPlaceDense-v3/data ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "policy_class": {
3
+ ":type:": "<class 'abc.ABCMeta'>",
4
+ ":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=",
5
+ "__module__": "stable_baselines3.common.policies",
6
+ "__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
7
+ "__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7cad8f027b50>",
8
+ "__abstractmethods__": "frozenset()",
9
+ "_abc_impl": "<_abc._abc_data object at 0x7cad8ee2db40>"
10
+ },
11
+ "verbose": 1,
12
+ "policy_kwargs": {
13
+ ":type:": "<class 'dict'>",
14
+ ":serialized:": "gAWVlgAAAAAAAAB9lCiMCG5ldF9hcmNolF2UKE0AAk0AAmWMD29wdGltaXplcl9jbGFzc5SME3RvcmNoLm9wdGltLnJtc3Byb3CUjAdSTVNwcm9wlJOUjBBvcHRpbWl6ZXJfa3dhcmdzlH2UKIwFYWxwaGGURz/vrhR64UeujANlcHOURz7k+LWI42jxjAx3ZWlnaHRfZGVjYXmUSwB1dS4=",
15
+ "net_arch": [
16
+ 512,
17
+ 512
18
+ ],
19
+ "optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>",
20
+ "optimizer_kwargs": {
21
+ "alpha": 0.99,
22
+ "eps": 1e-05,
23
+ "weight_decay": 0
24
+ }
25
+ },
26
+ "num_timesteps": 100000,
27
+ "_total_timesteps": 100000,
28
+ "_num_timesteps_at_start": 0,
29
+ "seed": null,
30
+ "action_noise": null,
31
+ "start_time": 1700039537148432949,
32
+ "learning_rate": 0.01,
33
+ "tensorboard_log": null,
34
+ "_last_obs": {
35
+ ":type:": "<class 'collections.OrderedDict'>",
36
+ ":serialized:": "gAWViwIAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAAHlxvP5pOwL+FV4M9co+oviL5QbxGaYM9eTKLPrZvrr+Fa4M9DBGmPxUIr79OZ4M9lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAFqe1vroNYr+Lh9+98wVwvx/Syj/cY7M9LF+WP9Rajz8XPso/tEKCPTrmOj9phWi/lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWMAEAAAAAAACeIey/bK+2PxSygb+omKW/dM6jv52l9L6/moS/HlxvP5pOwL+FV4M9GNGgvCZIMb13Iaq7JnJ+PGZG4ryMp9w8MQeBPNFRh7xETa+7k5U1P2VobL/3h48/Lf6aPusXnL5FvZ+9wH59P3KPqL4i+UG8RmmDPTt7o7ymfzG9gHFlu1oKfDyHSdq8r1jZPBbSIjzHgpy8PnGlu1skX77hu7e+7aR+vzki8T5GAj+9DYqtvWyrhb95Mos+tm+uv4Vrgz0YxKS8IRgxvQ23f7tXMII8/l/ivIyn3DwxB4E80VGHvEl6pbtYO4O/9K4LP7URgr9pffY+OkI0vSgP873kqYS/DBGmPxUIr79OZ4M9OlClvEEPMb3rTcy6s7GGPPqc3byMp9w8RweBPMNRh7xsBJW7lGgOSwRLE4aUaBJ0lFKUdS4=",
37
+ "achieved_goal": "[[ 0.93499935 -1.5023987 0.06413177]\n [-0.3292194 -0.01183918 0.06416564]\n [ 0.27186945 -1.3627841 0.06416992]\n [ 1.2973952 -1.3674341 0.06416188]]",
38
+ "desired_goal": "[[-0.3547904 -0.88302195 -0.10914525]\n [-0.9375908 1.5845374 0.08759281]\n [ 1.1747794 1.1199594 1.5800198 ]\n [ 0.06360379 0.7300755 -0.9082857 ]]",
39
+ "observation": "[[-1.8447759e+00 1.4272285e+00 -1.0132470e+00 -1.2937212e+00\n -1.2797379e+00 -4.7782603e-01 -1.0359725e+00 9.3499935e-01\n -1.5023987e+00 6.4131774e-02 -1.9630954e-02 -4.3281697e-02\n -5.1919776e-03 1.5530145e-02 -2.7621459e-02 2.6935361e-02\n 1.5750499e-02 -1.6518505e-02 -5.3497870e-03]\n [ 7.0931357e-01 -9.2346793e-01 1.1213368e+00 3.0272046e-01\n -3.0486998e-01 -7.7997722e-02 9.9021530e-01 -3.2921940e-01\n -1.1839183e-02 6.4165637e-02 -1.9956222e-02 -4.3334626e-02\n -3.5010278e-03 1.5383327e-02 -2.6646389e-02 2.6531545e-02\n 9.9377837e-03 -1.9105328e-02 -5.0488999e-03]\n [-2.1791212e-01 -3.5885528e-01 -9.9470407e-01 4.7096422e-01\n -4.6633027e-02 -8.4735967e-02 -1.0442939e+00 2.7186945e-01\n -1.3627841e+00 6.4169921e-02 -2.0113036e-02 -4.3235902e-02\n -3.9019019e-03 1.5892191e-02 -2.7633663e-02 2.6935361e-02\n 1.5750499e-02 -1.6518505e-02 -5.0499779e-03]\n [-1.0252485e+00 5.4563832e-01 -1.0161654e+00 4.8142555e-01\n -4.4008471e-02 -1.1868125e-01 -1.0364347e+00 1.2973952e+00\n -1.3674341e+00 6.4161882e-02 -2.0179857e-02 -4.3227438e-02\n -1.5587186e-03 1.6442155e-02 -2.7052391e-02 2.6935361e-02\n 1.5750540e-02 -1.6518479e-02 -4.5476463e-03]]"
40
+ },
41
+ "_last_episode_starts": {
42
+ ":type:": "<class 'numpy.ndarray'>",
43
+ ":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="
44
+ },
45
+ "_last_original_obs": {
46
+ ":type:": "<class 'collections.OrderedDict'>",
47
+ ":serialized:": "gAWViwIAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAAvk1HPVluYr0K16M8VcOKPQ8eu7sK16M81loavDDEyr0K16M8hZGHvXM0Kz0K16M8lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAA3mmTPSsr0z16Xxc+HVqWvf0pUD2fMCk+mZ3+vSLPmDvjLXE9ulsWvkXxfTwK16M8lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWMAEAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAAAAAAAAvk1HPVluYr0K16M8AAAAAAAAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA6nIdPRlsGqxDI0o+AAAAAAAAAIAAAAAAAAAAAFXDij0PHru7CtejPAAAAAAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOpyHT0ZbBqsQyNKPgAAAAAAAACAAAAAAAAAAADWWhq8MMTKvQrXozwAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAAAAAAAAhZGHvXM0Kz0K16M8AAAAAAAAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAlGgOSwRLE4aUaBJ0lFKUdS4=",
48
+ "achieved_goal": "[[ 0.04865813 -0.05528102 0.02 ]\n [ 0.06775538 -0.00571037 0.02 ]\n [-0.00942107 -0.09900701 0.02 ]\n [-0.06619553 0.04179807 0.02 ]]",
49
+ "desired_goal": "[[ 0.07197927 0.10310968 0.14782515]\n [-0.07341406 0.05082129 0.16522454]\n [-0.12432403 0.00466336 0.05888165]\n [-0.14683428 0.01549942 0.02 ]]",
50
+ "observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00 0.0000000e+00 4.8658125e-02\n -5.5281017e-02 2.0000000e-02 0.0000000e+00 -0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00 0.0000000e+00 6.7755379e-02\n -5.7103704e-03 2.0000000e-02 0.0000000e+00 -0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00 0.0000000e+00 -9.4210710e-03\n -9.9007010e-02 2.0000000e-02 0.0000000e+00 -0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00 0.0000000e+00 -6.6195525e-02\n 4.1798066e-02 2.0000000e-02 0.0000000e+00 -0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00]]"
51
+ },
52
+ "_episode_num": 0,
53
+ "use_sde": false,
54
+ "sde_sample_freq": -1,
55
+ "_current_progress_remaining": 0.0,
56
+ "_stats_window_size": 100,
57
+ "ep_info_buffer": {
58
+ ":type:": "<class 'collections.deque'>",
59
+ ":serialized:": "gAWV4AsAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHwA2z0HyEtd2MAWyUSzKMAXSUR0B2srZOBUaRdX2UKGgGR8ApgWIoE0SAaAdLMmgIR0B2xAzzmOlwdX2UKGgGR8AjYyWRigCfaAdLMmgIR0B2uJ89fTkRdX2UKGgGR8AQUrxy4nWraAdLMmgIR0B2wSt0V8CxdX2UKGgGR8AXcSElE7W/aAdLMmgIR0B2vZ10T101dX2UKGgGR8AexfzBhx5taAdLMmgIR0B2znuJDVpcdX2UKGgGR8ArlDNQj2SMaAdLMmgIR0B2wxIvrWy1dX2UKGgGR8Ag9Ip6QeV+aAdLMmgIR0B2y54/u9eydX2UKGgGR8AjuD0163RYaAdLMmgIR0B2yA4HX2/SdX2UKGgGR8AoUlgtvn8saAdLMmgIR0B22Vvgm7aqdX2UKGgGR8Au/mTTvy9VaAdLMmgIR0B2zfMTviLmdX2UKGgGR8AKlfNRm9QGaAdLMmgIR0B21nin5zo2dX2UKGgGR8AojaTwDvE1aAdLMmgIR0B20u6XjU/fdX2UKGgGR8AmIJ+DvmYCaAdLMmgIR0B25NuHerMldX2UKGgGR8AbcedTYNAkaAdLMmgIR0B22Xps41gqdX2UKGgGR8AE+cOLBKtgaAdLMmgIR0B24gb3oLXudX2UKGgGR8AtjfJFLFn7aAdLMmgIR0B23nvNNahYdX2UKGgGR8AlCNtqHoHLaAdLMmgIR0B28CIuXeFddX2UKGgGR8AXsWKuSwGGaAdLMmgIR0B25LfO2RaHdX2UKGgGR8AfWONo8IRiaAdLMmgIR0B27UXMyJsPdX2UKGgGR7+kSPEKmbb2aAdLAWgIR0B27XvoePq+dX2UKGgGR8AhP/82rGR3aAdLMmgIR0B26cBtDUmVdX2UKGgGR8AqxUnXumaZaAdLMmgIR0B2+oCDEm6YdX2UKGgGR8AlHdX1anrIaAdLMmgIR0B27xsi0OVgdX2UKGgGR8AkfS2H+IdmaAdLMmgIR0B299lOGj9GdX2UKGgGR8A6zYsd1dPdaAdLMmgIR0B29CLzf779dX2UKGgGR8AfEQf6oESvaAdLMmgIR0B3Ba40/GEPdX2UKGgGR8AbO0KJEYwZaAdLMmgIR0B2+kMfA9FGdX2UKGgGR8Am1bGm1pj+aAdLMmgIR0B3AwqkM1CPdX2UKGgGR8AcNkH2RJVbaAdLMmgIR0B2/1AMUh3adX2UKGgGR8Af5YDDCP6saAdLMmgIR0B3EF/BnBcidX2UKGgGR8Am4B2fTTfBaAdLMmgIR0B3BPgHeJpGdX2UKGgGR8AdAtDlYEGJaAdLMmgIR0B3Da/BWPtEdX2UKGgGR8Ao+YRdyDIzaAdLMmgIR0B3CfEOy3TedX2UKGgGR8AjCrIYFaB7aAdLMmgIR0B3GuDVYp2EdX2UKGgGR8AnpRXwLE1maAdLMmgIR0B3D3uRcNYsdX2UKGgGR8AlhaEi+tbLaAdLMmgIR0B3GDFn7HhkdX2UKGgGR8ATLOxB3RoiaAdLMmgIR0B3FHWcz67/dX2UKGgGR8AfCBxxT850aAdLMmgIR0B3JPpOerdWdX2UKGgGR8AqNU7Sy+pPaAdLMmgIR0B3GZCqp97XdX2UKGgGR8AIyHmA9V3maAdLMmgIR0B3IkcFQl8gdX2UKGgGR8As1A/LTx5LaAdLMmgIR0B3HowmE5AAdX2UKGgGR8Ak+oJiRW92aAdLMmgIR0B3L2IGhVU/dX2UKGgGR8ARxl18stkGaAdLMmgIR0B3I/p7kXDWdX2UKGgGR8AUtHJ9y926aAdLMmgIR0B3LNgPVd5ZdX2UKGgGR8AdfK4hEBsAaAdLMmgIR0B3KSZof0VadX2UKGgGR8At/oBaLXMAaAdLMmgIR0B3OTn/1g6VdX2UKGgGR8AVWeyzHCGfaAdLMmgIR0B3LdVvMr3CdX2UKGgGR8AlaQf6oESvaAdLMmgIR0B3Npbor4FidX2UKGgGR8AzaVcUuctoaAdLMmgIR0B3MtpPAO8TdX2UKGgGR8AUKij+JgstaAdLMmgIR0B3Q9ikO7QLdX2UKGgGR8AoLo7FKkEcaAdLMmgIR0B3OGwRoRI0dX2UKGgGR7+iJAMUh3aBaAdLAWgIR0B3OKDpTuOTdX2UKGgGR8AkuA4GUwBYaAdLMmgIR0B3QSS5iExqdX2UKGgGR8AUlYigTRICaAdLMmgIR0B3PXdbgTAWdX2UKGgGR8AlrilzltCRaAdLMmgIR0B3T2XfIjnndX2UKGgGR8AUE4+8oQWfaAdLMmgIR0B3RHI6r/83dX2UKGgGR8ArXbDdgv12aAdLMmgIR0B3TVRm9QGfdX2UKGgGR8AocmTkhib2aAdLMmgIR0B3SdrZamoBdX2UKGgGR8AM0kY4yXUpaAdLMmgIR0B3YF5+pfhNdX2UKGgGR8AWQZbY9Pk8aAdLMmgIR0B3VWPPszEadX2UKGgGR8ARTeP7vXsgaAdLMmgIR0B3Xhnh86V/dX2UKGgGR8ATrRKHwgDBaAdLMmgIR0B3Wos052hadX2UKGgGR8AV+DWbwz+FaAdLMmgIR0B3cK7g88s+dX2UKGgGR8AOKWHDaXa8aAdLMmgIR0B3ZbLkjopydX2UKGgGR8AgdB55Z8rqaAdLMmgIR0B3bm4G2TgVdX2UKGgGR8Ap6nNxEORUaAdLMmgIR0B3atZ/0/W2dX2UKGgGR8Ac8z0pVjqfaAdLMmgIR0B3gZQ53kgfdX2UKGgGR8AV4PjGT9sKaAdLMmgIR0B3dpKyv9tNdX2UKGgGR8Ai302cawUyaAdLMmgIR0B3f0CxNZeSdX2UKGgGR8AhjMeOn2qUaAdLMmgIR0B3e7NiYsundX2UKGgGR8AsyjD8+A3DaAdLMmgIR0B3k0lRgqmTdX2UKGgGR8AlFhKlHjIaaAdLMmgIR0B3iGB5HEuQdX2UKGgGR8Akc5mRNh3JaAdLMmgIR0B3kSnR9gF5dX2UKGgGR8AklqagElmfaAdLMmgIR0B3jabutwJgdX2UKGgGR8AjGGTs6aLGaAdLMmgIR0B3omTMaCL/dX2UKGgGR8AY35XU6PsBaAdLMmgIR0B3lyqvNeMRdX2UKGgGR8AjUnUDuBtlaAdLMmgIR0B3n67UXpGGdX2UKGgGR8Aq2AIY3vQXaAdLMmgIR0B3m/mdRR/FdX2UKGgGR8Ah2bx3FDOUaAdLMmgIR0B3rHksBhhIdX2UKGgGR8AowKb8WKuTaAdLMmgIR0B3oUDnvDxcdX2UKGgGR8AjP71qWTouaAdLMmgIR0B3qcQnQY1pdX2UKGgGR8AjRlVcUucuaAdLMmgIR0B3phDzAeq8dX2UKGgGR8AheUahpQDWaAdLMmgIR0B3t0mZ3LV4dX2UKGgGR8Ad8JTl1bJPaAdLMmgIR0B3rA/s3Q2NdX2UKGgGR8AiqozeoDPoaAdLMmgIR0B3tJQ66reZdX2UKGgGR8AjILDQ7cO9aAdLMmgIR0B3sNYmsvIwdX2UKGgGR8AmwiPhhpg1aAdLMmgIR0B3wQUJv5xjdX2UKGgGR8ARmlfqoqCpaAdLMmgIR0B3tdKIznA7dX2UKGgGR8AtsBGQSzw+aAdLMmgIR0B3vliMHbAUdX2UKGgGR7+lEy+HrQgLaAdLAWgIR0B3voglnh86dX2UKGgGR8AlUlme18b8aAdLMmgIR0B3upmGucMFdX2UKGgGR8AhgGlANXo1aAdLMmgIR0B3y9FiKBNFdX2UKGgGR8AqZyp71Iy1aAdLMmgIR0B3wJg8bJfZdX2UKGgGR8AmRMbm2b5NaAdLMmgIR0B3yUtQKrq/dX2UKGgGR8Ak85o4+8oQaAdLMmgIR0B3xV5a/yoXdX2UKGgGR8AY6e6I3zczaAdLMmgIR0B31ZWdVea8dX2UKGgGR8AeQvboKUmlaAdLMmgIR0B3ym2fChvjdX2UKGgGR8Ar8fOlfqoqaAdLMmgIR0B30zJaJQ+EdX2UKGgGR8Aj5RTCLuQZaAdLMmgIR0B3z0WsRxtIdWUu"
60
+ },
61
+ "ep_success_buffer": {
62
+ ":type:": "<class 'collections.deque'>",
63
+ ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
64
+ },
65
+ "_n_updates": 250,
66
+ "n_steps": 100,
67
+ "gamma": 0.99,
68
+ "gae_lambda": 0.95,
69
+ "ent_coef": 0.1,
70
+ "vf_coef": 0.5,
71
+ "max_grad_norm": 0.5,
72
+ "normalize_advantage": true,
73
+ "observation_space": {
74
+ ":type:": "<class 'gymnasium.spaces.dict.Dict'>",
75
+ ":serialized:": "gAWVMgQAAAAAAACMFWd5bW5hc2l1bS5zcGFjZXMuZGljdJSMBERpY3SUk5QpgZR9lCiMBnNwYWNlc5SMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwUZ3ltbmFzaXVtLnNwYWNlcy5ib3iUjANCb3iUk5QpgZR9lCiMBWR0eXBllIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYowNYm91bmRlZF9iZWxvd5SMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYDAAAAAAAAAAEBAZRoE4wCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksDhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoHCiWAwAAAAAAAAABAQGUaCBLA4WUaCR0lFKUjAZfc2hhcGWUSwOFlIwDbG93lGgcKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFksDhZRoJHSUUpSMBGhpZ2iUaBwolgwAAAAAAAAAAAAgQQAAIEEAACBBlGgWSwOFlGgkdJRSlIwIbG93X3JlcHKUjAUtMTAuMJSMCWhpZ2hfcmVwcpSMBDEwLjCUjApfbnBfcmFuZG9tlE51YowMZGVzaXJlZF9nb2FslGgNKYGUfZQoaBBoFmgZaBwolgMAAAAAAAAAAQEBlGggSwOFlGgkdJRSlGgnaBwolgMAAAAAAAAAAQEBlGggSwOFlGgkdJRSlGgsSwOFlGguaBwolgwAAAAAAAAAAAAgwQAAIMEAACDBlGgWSwOFlGgkdJRSlGgzaBwolgwAAAAAAAAAAAAgQQAAIEEAACBBlGgWSwOFlGgkdJRSlGg4jAUtMTAuMJRoOowEMTAuMJRoPE51YowLb2JzZXJ2YXRpb26UaA0pgZR9lChoEGgWaBloHCiWEwAAAAAAAAABAQEBAQEBAQEBAQEBAQEBAQEBlGggSxOFlGgkdJRSlGgnaBwolhMAAAAAAAAAAQEBAQEBAQEBAQEBAQEBAQEBAZRoIEsThZRoJHSUUpRoLEsThZRoLmgcKJZMAAAAAAAAAAAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMGUaBZLE4WUaCR0lFKUaDNoHCiWTAAAAAAAAAAAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBlGgWSxOFlGgkdJRSlGg4jAUtMTAuMJRoOowEMTAuMJRoPE51YnVoLE5oEE5oPE51Yi4=",
76
+ "spaces": "OrderedDict([('achieved_goal', Box(-10.0, 10.0, (3,), float32)), ('desired_goal', Box(-10.0, 10.0, (3,), float32)), ('observation', Box(-10.0, 10.0, (19,), float32))])",
77
+ "_shape": null,
78
+ "dtype": null,
79
+ "_np_random": null
80
+ },
81
+ "action_space": {
82
+ ":type:": "<class 'gymnasium.spaces.box.Box'>",
83
+ ":serialized:": "gAWVpwEAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWBAAAAAAAAAABAQEBlGgIjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKUjA1ib3VuZGVkX2Fib3ZllGgRKJYEAAAAAAAAAAEBAQGUaBVLBIWUaBl0lFKUjAZfc2hhcGWUSwSFlIwDbG93lGgRKJYQAAAAAAAAAAAAgL8AAIC/AACAvwAAgL+UaAtLBIWUaBl0lFKUjARoaWdolGgRKJYQAAAAAAAAAAAAgD8AAIA/AACAPwAAgD+UaAtLBIWUaBl0lFKUjAhsb3dfcmVwcpSMBC0xLjCUjAloaWdoX3JlcHKUjAMxLjCUjApfbnBfcmFuZG9tlE51Yi4=",
84
+ "dtype": "float32",
85
+ "bounded_below": "[ True True True True]",
86
+ "bounded_above": "[ True True True True]",
87
+ "_shape": [
88
+ 4
89
+ ],
90
+ "low": "[-1. -1. -1. -1.]",
91
+ "high": "[1. 1. 1. 1.]",
92
+ "low_repr": "-1.0",
93
+ "high_repr": "1.0",
94
+ "_np_random": null
95
+ },
96
+ "n_envs": 4,
97
+ "lr_schedule": {
98
+ ":type:": "<class 'function'>",
99
+ ":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuDQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz+EeuFHrhR7hZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"
100
+ }
101
+ }
a2c-PandaPickAndPlaceDense-v3/policy.optimizer.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:43493cc132c6173297e2dc39fcbaa12d76b7a19a7a4ad08e368cc4a25abfda1b
3
+ size 2222063
a2c-PandaPickAndPlaceDense-v3/policy.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0e47b47d31b4b7c38a9a1af14c989b3e0225e88c1079726f650975dc31695f85
3
+ size 2223343
a2c-PandaPickAndPlaceDense-v3/pytorch_variables.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0c35cea3b2e60fb5e7e162d3592df775cd400e575a31c72f359fb9e654ab00c5
3
+ size 864
a2c-PandaPickAndPlaceDense-v3/system_info.txt ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ - OS: Linux-5.15.120+-x86_64-with-glibc2.35 # 1 SMP Wed Aug 30 11:19:59 UTC 2023
2
+ - Python: 3.10.12
3
+ - Stable-Baselines3: 2.1.0
4
+ - PyTorch: 2.1.0+cu118
5
+ - GPU Enabled: False
6
+ - Numpy: 1.23.5
7
+ - Cloudpickle: 2.2.1
8
+ - Gymnasium: 0.29.1
9
+ - OpenAI Gym: 0.25.2
config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=", "__module__": "stable_baselines3.common.policies", "__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7cad8f027b50>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x7cad8ee2db40>"}, "verbose": 1, "policy_kwargs": {":type:": "<class 'dict'>", ":serialized:": "gAWVlgAAAAAAAAB9lCiMCG5ldF9hcmNolF2UKE0AAk0AAmWMD29wdGltaXplcl9jbGFzc5SME3RvcmNoLm9wdGltLnJtc3Byb3CUjAdSTVNwcm9wlJOUjBBvcHRpbWl6ZXJfa3dhcmdzlH2UKIwFYWxwaGGURz/vrhR64UeujANlcHOURz7k+LWI42jxjAx3ZWlnaHRfZGVjYXmUSwB1dS4=", "net_arch": [512, 512], "optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>", "optimizer_kwargs": {"alpha": 0.99, "eps": 1e-05, "weight_decay": 0}}, "num_timesteps": 100000, "_total_timesteps": 100000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1700039537148432949, "learning_rate": 0.01, "tensorboard_log": null, "_last_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWViwIAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAAHlxvP5pOwL+FV4M9co+oviL5QbxGaYM9eTKLPrZvrr+Fa4M9DBGmPxUIr79OZ4M9lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAFqe1vroNYr+Lh9+98wVwvx/Syj/cY7M9LF+WP9Rajz8XPso/tEKCPTrmOj9phWi/lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWMAEAAAAAAACeIey/bK+2PxSygb+omKW/dM6jv52l9L6/moS/HlxvP5pOwL+FV4M9GNGgvCZIMb13Iaq7JnJ+PGZG4ryMp9w8MQeBPNFRh7xETa+7k5U1P2VobL/3h48/Lf6aPusXnL5FvZ+9wH59P3KPqL4i+UG8RmmDPTt7o7ymfzG9gHFlu1oKfDyHSdq8r1jZPBbSIjzHgpy8PnGlu1skX77hu7e+7aR+vzki8T5GAj+9DYqtvWyrhb95Mos+tm+uv4Vrgz0YxKS8IRgxvQ23f7tXMII8/l/ivIyn3DwxB4E80VGHvEl6pbtYO4O/9K4LP7URgr9pffY+OkI0vSgP873kqYS/DBGmPxUIr79OZ4M9OlClvEEPMb3rTcy6s7GGPPqc3byMp9w8RweBPMNRh7xsBJW7lGgOSwRLE4aUaBJ0lFKUdS4=", "achieved_goal": "[[ 0.93499935 -1.5023987 0.06413177]\n [-0.3292194 -0.01183918 0.06416564]\n [ 0.27186945 -1.3627841 0.06416992]\n [ 1.2973952 -1.3674341 0.06416188]]", "desired_goal": "[[-0.3547904 -0.88302195 -0.10914525]\n [-0.9375908 1.5845374 0.08759281]\n [ 1.1747794 1.1199594 1.5800198 ]\n [ 0.06360379 0.7300755 -0.9082857 ]]", "observation": "[[-1.8447759e+00 1.4272285e+00 -1.0132470e+00 -1.2937212e+00\n -1.2797379e+00 -4.7782603e-01 -1.0359725e+00 9.3499935e-01\n -1.5023987e+00 6.4131774e-02 -1.9630954e-02 -4.3281697e-02\n -5.1919776e-03 1.5530145e-02 -2.7621459e-02 2.6935361e-02\n 1.5750499e-02 -1.6518505e-02 -5.3497870e-03]\n [ 7.0931357e-01 -9.2346793e-01 1.1213368e+00 3.0272046e-01\n -3.0486998e-01 -7.7997722e-02 9.9021530e-01 -3.2921940e-01\n -1.1839183e-02 6.4165637e-02 -1.9956222e-02 -4.3334626e-02\n -3.5010278e-03 1.5383327e-02 -2.6646389e-02 2.6531545e-02\n 9.9377837e-03 -1.9105328e-02 -5.0488999e-03]\n [-2.1791212e-01 -3.5885528e-01 -9.9470407e-01 4.7096422e-01\n -4.6633027e-02 -8.4735967e-02 -1.0442939e+00 2.7186945e-01\n -1.3627841e+00 6.4169921e-02 -2.0113036e-02 -4.3235902e-02\n -3.9019019e-03 1.5892191e-02 -2.7633663e-02 2.6935361e-02\n 1.5750499e-02 -1.6518505e-02 -5.0499779e-03]\n [-1.0252485e+00 5.4563832e-01 -1.0161654e+00 4.8142555e-01\n -4.4008471e-02 -1.1868125e-01 -1.0364347e+00 1.2973952e+00\n -1.3674341e+00 6.4161882e-02 -2.0179857e-02 -4.3227438e-02\n -1.5587186e-03 1.6442155e-02 -2.7052391e-02 2.6935361e-02\n 1.5750540e-02 -1.6518479e-02 -4.5476463e-03]]"}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="}, "_last_original_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWViwIAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAAvk1HPVluYr0K16M8VcOKPQ8eu7sK16M81loavDDEyr0K16M8hZGHvXM0Kz0K16M8lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAA3mmTPSsr0z16Xxc+HVqWvf0pUD2fMCk+mZ3+vSLPmDvjLXE9ulsWvkXxfTwK16M8lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWMAEAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAAAAAAAAvk1HPVluYr0K16M8AAAAAAAAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA6nIdPRlsGqxDI0o+AAAAAAAAAIAAAAAAAAAAAFXDij0PHru7CtejPAAAAAAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOpyHT0ZbBqsQyNKPgAAAAAAAACAAAAAAAAAAADWWhq8MMTKvQrXozwAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAAAAAAAAhZGHvXM0Kz0K16M8AAAAAAAAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAlGgOSwRLE4aUaBJ0lFKUdS4=", "achieved_goal": "[[ 0.04865813 -0.05528102 0.02 ]\n [ 0.06775538 -0.00571037 0.02 ]\n [-0.00942107 -0.09900701 0.02 ]\n [-0.06619553 0.04179807 0.02 ]]", "desired_goal": "[[ 0.07197927 0.10310968 0.14782515]\n [-0.07341406 0.05082129 0.16522454]\n [-0.12432403 0.00466336 0.05888165]\n [-0.14683428 0.01549942 0.02 ]]", "observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00 0.0000000e+00 4.8658125e-02\n -5.5281017e-02 2.0000000e-02 0.0000000e+00 -0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00 0.0000000e+00 6.7755379e-02\n -5.7103704e-03 2.0000000e-02 0.0000000e+00 -0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00 0.0000000e+00 -9.4210710e-03\n -9.9007010e-02 2.0000000e-02 0.0000000e+00 -0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00 0.0000000e+00 -6.6195525e-02\n 4.1798066e-02 2.0000000e-02 0.0000000e+00 -0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00]]"}, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": 0.0, "_stats_window_size": 100, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWV4AsAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHwA2z0HyEtd2MAWyUSzKMAXSUR0B2srZOBUaRdX2UKGgGR8ApgWIoE0SAaAdLMmgIR0B2xAzzmOlwdX2UKGgGR8AjYyWRigCfaAdLMmgIR0B2uJ89fTkRdX2UKGgGR8AQUrxy4nWraAdLMmgIR0B2wSt0V8CxdX2UKGgGR8AXcSElE7W/aAdLMmgIR0B2vZ10T101dX2UKGgGR8AexfzBhx5taAdLMmgIR0B2znuJDVpcdX2UKGgGR8ArlDNQj2SMaAdLMmgIR0B2wxIvrWy1dX2UKGgGR8Ag9Ip6QeV+aAdLMmgIR0B2y54/u9eydX2UKGgGR8AjuD0163RYaAdLMmgIR0B2yA4HX2/SdX2UKGgGR8AoUlgtvn8saAdLMmgIR0B22Vvgm7aqdX2UKGgGR8Au/mTTvy9VaAdLMmgIR0B2zfMTviLmdX2UKGgGR8AKlfNRm9QGaAdLMmgIR0B21nin5zo2dX2UKGgGR8AojaTwDvE1aAdLMmgIR0B20u6XjU/fdX2UKGgGR8AmIJ+DvmYCaAdLMmgIR0B25NuHerMldX2UKGgGR8AbcedTYNAkaAdLMmgIR0B22Xps41gqdX2UKGgGR8AE+cOLBKtgaAdLMmgIR0B24gb3oLXudX2UKGgGR8AtjfJFLFn7aAdLMmgIR0B23nvNNahYdX2UKGgGR8AlCNtqHoHLaAdLMmgIR0B28CIuXeFddX2UKGgGR8AXsWKuSwGGaAdLMmgIR0B25LfO2RaHdX2UKGgGR8AfWONo8IRiaAdLMmgIR0B27UXMyJsPdX2UKGgGR7+kSPEKmbb2aAdLAWgIR0B27XvoePq+dX2UKGgGR8AhP/82rGR3aAdLMmgIR0B26cBtDUmVdX2UKGgGR8AqxUnXumaZaAdLMmgIR0B2+oCDEm6YdX2UKGgGR8AlHdX1anrIaAdLMmgIR0B27xsi0OVgdX2UKGgGR8AkfS2H+IdmaAdLMmgIR0B299lOGj9GdX2UKGgGR8A6zYsd1dPdaAdLMmgIR0B29CLzf779dX2UKGgGR8AfEQf6oESvaAdLMmgIR0B3Ba40/GEPdX2UKGgGR8AbO0KJEYwZaAdLMmgIR0B2+kMfA9FGdX2UKGgGR8Am1bGm1pj+aAdLMmgIR0B3AwqkM1CPdX2UKGgGR8AcNkH2RJVbaAdLMmgIR0B2/1AMUh3adX2UKGgGR8Af5YDDCP6saAdLMmgIR0B3EF/BnBcidX2UKGgGR8Am4B2fTTfBaAdLMmgIR0B3BPgHeJpGdX2UKGgGR8AdAtDlYEGJaAdLMmgIR0B3Da/BWPtEdX2UKGgGR8Ao+YRdyDIzaAdLMmgIR0B3CfEOy3TedX2UKGgGR8AjCrIYFaB7aAdLMmgIR0B3GuDVYp2EdX2UKGgGR8AnpRXwLE1maAdLMmgIR0B3D3uRcNYsdX2UKGgGR8AlhaEi+tbLaAdLMmgIR0B3GDFn7HhkdX2UKGgGR8ATLOxB3RoiaAdLMmgIR0B3FHWcz67/dX2UKGgGR8AfCBxxT850aAdLMmgIR0B3JPpOerdWdX2UKGgGR8AqNU7Sy+pPaAdLMmgIR0B3GZCqp97XdX2UKGgGR8AIyHmA9V3maAdLMmgIR0B3IkcFQl8gdX2UKGgGR8As1A/LTx5LaAdLMmgIR0B3HowmE5AAdX2UKGgGR8Ak+oJiRW92aAdLMmgIR0B3L2IGhVU/dX2UKGgGR8ARxl18stkGaAdLMmgIR0B3I/p7kXDWdX2UKGgGR8AUtHJ9y926aAdLMmgIR0B3LNgPVd5ZdX2UKGgGR8AdfK4hEBsAaAdLMmgIR0B3KSZof0VadX2UKGgGR8At/oBaLXMAaAdLMmgIR0B3OTn/1g6VdX2UKGgGR8AVWeyzHCGfaAdLMmgIR0B3LdVvMr3CdX2UKGgGR8AlaQf6oESvaAdLMmgIR0B3Npbor4FidX2UKGgGR8AzaVcUuctoaAdLMmgIR0B3MtpPAO8TdX2UKGgGR8AUKij+JgstaAdLMmgIR0B3Q9ikO7QLdX2UKGgGR8AoLo7FKkEcaAdLMmgIR0B3OGwRoRI0dX2UKGgGR7+iJAMUh3aBaAdLAWgIR0B3OKDpTuOTdX2UKGgGR8AkuA4GUwBYaAdLMmgIR0B3QSS5iExqdX2UKGgGR8AUlYigTRICaAdLMmgIR0B3PXdbgTAWdX2UKGgGR8AlrilzltCRaAdLMmgIR0B3T2XfIjnndX2UKGgGR8AUE4+8oQWfaAdLMmgIR0B3RHI6r/83dX2UKGgGR8ArXbDdgv12aAdLMmgIR0B3TVRm9QGfdX2UKGgGR8AocmTkhib2aAdLMmgIR0B3SdrZamoBdX2UKGgGR8AM0kY4yXUpaAdLMmgIR0B3YF5+pfhNdX2UKGgGR8AWQZbY9Pk8aAdLMmgIR0B3VWPPszEadX2UKGgGR8ARTeP7vXsgaAdLMmgIR0B3Xhnh86V/dX2UKGgGR8ATrRKHwgDBaAdLMmgIR0B3Wos052hadX2UKGgGR8AV+DWbwz+FaAdLMmgIR0B3cK7g88s+dX2UKGgGR8AOKWHDaXa8aAdLMmgIR0B3ZbLkjopydX2UKGgGR8AgdB55Z8rqaAdLMmgIR0B3bm4G2TgVdX2UKGgGR8Ap6nNxEORUaAdLMmgIR0B3atZ/0/W2dX2UKGgGR8Ac8z0pVjqfaAdLMmgIR0B3gZQ53kgfdX2UKGgGR8AV4PjGT9sKaAdLMmgIR0B3dpKyv9tNdX2UKGgGR8Ai302cawUyaAdLMmgIR0B3f0CxNZeSdX2UKGgGR8AhjMeOn2qUaAdLMmgIR0B3e7NiYsundX2UKGgGR8AsyjD8+A3DaAdLMmgIR0B3k0lRgqmTdX2UKGgGR8AlFhKlHjIaaAdLMmgIR0B3iGB5HEuQdX2UKGgGR8Akc5mRNh3JaAdLMmgIR0B3kSnR9gF5dX2UKGgGR8AklqagElmfaAdLMmgIR0B3jabutwJgdX2UKGgGR8AjGGTs6aLGaAdLMmgIR0B3omTMaCL/dX2UKGgGR8AY35XU6PsBaAdLMmgIR0B3lyqvNeMRdX2UKGgGR8AjUnUDuBtlaAdLMmgIR0B3n67UXpGGdX2UKGgGR8Aq2AIY3vQXaAdLMmgIR0B3m/mdRR/FdX2UKGgGR8Ah2bx3FDOUaAdLMmgIR0B3rHksBhhIdX2UKGgGR8AowKb8WKuTaAdLMmgIR0B3oUDnvDxcdX2UKGgGR8AjP71qWTouaAdLMmgIR0B3qcQnQY1pdX2UKGgGR8AjRlVcUucuaAdLMmgIR0B3phDzAeq8dX2UKGgGR8AheUahpQDWaAdLMmgIR0B3t0mZ3LV4dX2UKGgGR8Ad8JTl1bJPaAdLMmgIR0B3rA/s3Q2NdX2UKGgGR8AiqozeoDPoaAdLMmgIR0B3tJQ66reZdX2UKGgGR8AjILDQ7cO9aAdLMmgIR0B3sNYmsvIwdX2UKGgGR8AmwiPhhpg1aAdLMmgIR0B3wQUJv5xjdX2UKGgGR8ARmlfqoqCpaAdLMmgIR0B3tdKIznA7dX2UKGgGR8AtsBGQSzw+aAdLMmgIR0B3vliMHbAUdX2UKGgGR7+lEy+HrQgLaAdLAWgIR0B3voglnh86dX2UKGgGR8AlUlme18b8aAdLMmgIR0B3upmGucMFdX2UKGgGR8AhgGlANXo1aAdLMmgIR0B3y9FiKBNFdX2UKGgGR8AqZyp71Iy1aAdLMmgIR0B3wJg8bJfZdX2UKGgGR8AmRMbm2b5NaAdLMmgIR0B3yUtQKrq/dX2UKGgGR8Ak85o4+8oQaAdLMmgIR0B3xV5a/yoXdX2UKGgGR8AY6e6I3zczaAdLMmgIR0B31ZWdVea8dX2UKGgGR8AeQvboKUmlaAdLMmgIR0B3ym2fChvjdX2UKGgGR8Ar8fOlfqoqaAdLMmgIR0B30zJaJQ+EdX2UKGgGR8Aj5RTCLuQZaAdLMmgIR0B3z0WsRxtIdWUu"}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 250, "n_steps": 100, "gamma": 0.99, "gae_lambda": 0.95, "ent_coef": 0.1, "vf_coef": 0.5, "max_grad_norm": 0.5, "normalize_advantage": true, "observation_space": {":type:": "<class 'gymnasium.spaces.dict.Dict'>", ":serialized:": "gAWVMgQAAAAAAACMFWd5bW5hc2l1bS5zcGFjZXMuZGljdJSMBERpY3SUk5QpgZR9lCiMBnNwYWNlc5SMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwUZ3ltbmFzaXVtLnNwYWNlcy5ib3iUjANCb3iUk5QpgZR9lCiMBWR0eXBllIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYowNYm91bmRlZF9iZWxvd5SMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYDAAAAAAAAAAEBAZRoE4wCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksDhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoHCiWAwAAAAAAAAABAQGUaCBLA4WUaCR0lFKUjAZfc2hhcGWUSwOFlIwDbG93lGgcKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFksDhZRoJHSUUpSMBGhpZ2iUaBwolgwAAAAAAAAAAAAgQQAAIEEAACBBlGgWSwOFlGgkdJRSlIwIbG93X3JlcHKUjAUtMTAuMJSMCWhpZ2hfcmVwcpSMBDEwLjCUjApfbnBfcmFuZG9tlE51YowMZGVzaXJlZF9nb2FslGgNKYGUfZQoaBBoFmgZaBwolgMAAAAAAAAAAQEBlGggSwOFlGgkdJRSlGgnaBwolgMAAAAAAAAAAQEBlGggSwOFlGgkdJRSlGgsSwOFlGguaBwolgwAAAAAAAAAAAAgwQAAIMEAACDBlGgWSwOFlGgkdJRSlGgzaBwolgwAAAAAAAAAAAAgQQAAIEEAACBBlGgWSwOFlGgkdJRSlGg4jAUtMTAuMJRoOowEMTAuMJRoPE51YowLb2JzZXJ2YXRpb26UaA0pgZR9lChoEGgWaBloHCiWEwAAAAAAAAABAQEBAQEBAQEBAQEBAQEBAQEBlGggSxOFlGgkdJRSlGgnaBwolhMAAAAAAAAAAQEBAQEBAQEBAQEBAQEBAQEBAZRoIEsThZRoJHSUUpRoLEsThZRoLmgcKJZMAAAAAAAAAAAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMGUaBZLE4WUaCR0lFKUaDNoHCiWTAAAAAAAAAAAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBlGgWSxOFlGgkdJRSlGg4jAUtMTAuMJRoOowEMTAuMJRoPE51YnVoLE5oEE5oPE51Yi4=", "spaces": "OrderedDict([('achieved_goal', Box(-10.0, 10.0, (3,), float32)), ('desired_goal', Box(-10.0, 10.0, (3,), float32)), ('observation', Box(-10.0, 10.0, (19,), float32))])", "_shape": null, "dtype": null, "_np_random": null}, "action_space": {":type:": "<class 'gymnasium.spaces.box.Box'>", ":serialized:": "gAWVpwEAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWBAAAAAAAAAABAQEBlGgIjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKUjA1ib3VuZGVkX2Fib3ZllGgRKJYEAAAAAAAAAAEBAQGUaBVLBIWUaBl0lFKUjAZfc2hhcGWUSwSFlIwDbG93lGgRKJYQAAAAAAAAAAAAgL8AAIC/AACAvwAAgL+UaAtLBIWUaBl0lFKUjARoaWdolGgRKJYQAAAAAAAAAAAAgD8AAIA/AACAPwAAgD+UaAtLBIWUaBl0lFKUjAhsb3dfcmVwcpSMBC0xLjCUjAloaWdoX3JlcHKUjAMxLjCUjApfbnBfcmFuZG9tlE51Yi4=", "dtype": "float32", "bounded_below": "[ True True True True]", "bounded_above": "[ True True True True]", "_shape": [4], "low": "[-1. -1. -1. -1.]", "high": "[1. 1. 1. 1.]", "low_repr": "-1.0", "high_repr": "1.0", "_np_random": null}, "n_envs": 4, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuDQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz+EeuFHrhR7hZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "system_info": {"OS": "Linux-5.15.120+-x86_64-with-glibc2.35 # 1 SMP Wed Aug 30 11:19:59 UTC 2023", "Python": "3.10.12", "Stable-Baselines3": "2.1.0", "PyTorch": "2.1.0+cu118", "GPU Enabled": "False", "Numpy": "1.23.5", "Cloudpickle": "2.2.1", "Gymnasium": "0.29.1", "OpenAI Gym": "0.25.2"}}
replay.mp4 ADDED
Binary file (655 kB). View file
 
results.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"mean_reward": -50.0, "std_reward": 0.0, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2023-11-15T09:19:04.865455"}
vec_normalize.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d377796867cf5aca4d8310ceff4e65106769757daaf50799ff9f3e7f066c0697
3
+ size 3013