Gabcsor commited on
Commit
1f9d295
1 Parent(s): 7aae06c

Model trained using Optuna for hyperparameter tuning

Browse files
README.md CHANGED
@@ -16,7 +16,7 @@ model-index:
16
  type: LunarLander-v2
17
  metrics:
18
  - type: mean_reward
19
- value: 302.36 +/- 15.24
20
  name: mean_reward
21
  verified: false
22
  ---
 
16
  type: LunarLander-v2
17
  metrics:
18
  - type: mean_reward
19
+ value: 296.17 +/- 23.52
20
  name: mean_reward
21
  verified: false
22
  ---
lander_optuna_2_222.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6aee69444afc82b7d82150b7335f69d5fc107cde93fb4f5071d8a43f380391e1
3
+ size 147427
lander_optuna_2_222/_stable_baselines3_version ADDED
@@ -0,0 +1 @@
 
 
1
+ 1.7.0
lander_optuna_2_222/data ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "policy_class": {
3
+ ":type:": "<class 'abc.ABCMeta'>",
4
+ ":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==",
5
+ "__module__": "stable_baselines3.common.policies",
6
+ "__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
7
+ "__init__": "<function ActorCriticPolicy.__init__ at 0x7fa2e4f6a790>",
8
+ "_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x7fa2e4f6a820>",
9
+ "reset_noise": "<function ActorCriticPolicy.reset_noise at 0x7fa2e4f6a8b0>",
10
+ "_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x7fa2e4f6a940>",
11
+ "_build": "<function ActorCriticPolicy._build at 0x7fa2e4f6a9d0>",
12
+ "forward": "<function ActorCriticPolicy.forward at 0x7fa2e4f6aa60>",
13
+ "extract_features": "<function ActorCriticPolicy.extract_features at 0x7fa2e4f6aaf0>",
14
+ "_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x7fa2e4f6ab80>",
15
+ "_predict": "<function ActorCriticPolicy._predict at 0x7fa2e4f6ac10>",
16
+ "evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x7fa2e4f6aca0>",
17
+ "get_distribution": "<function ActorCriticPolicy.get_distribution at 0x7fa2e4f6ad30>",
18
+ "predict_values": "<function ActorCriticPolicy.predict_values at 0x7fa2e4f6adc0>",
19
+ "__abstractmethods__": "frozenset()",
20
+ "_abc_impl": "<_abc_data object at 0x7fa2e4f67360>"
21
+ },
22
+ "verbose": 1,
23
+ "policy_kwargs": {},
24
+ "observation_space": {
25
+ ":type:": "<class 'gym.spaces.box.Box'>",
26
+ ":serialized:": "gAWVnwEAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLCIWUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWIAAAAAAAAAAAAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/5RoCksIhZSMAUOUdJRSlIwEaGlnaJRoEiiWIAAAAAAAAAAAAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAf5RoCksIhZRoFXSUUpSMDWJvdW5kZWRfYmVsb3eUaBIolggAAAAAAAAAAAAAAAAAAACUaAeMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLCIWUaBV0lFKUjA1ib3VuZGVkX2Fib3ZllGgSKJYIAAAAAAAAAAAAAAAAAAAAlGghSwiFlGgVdJRSlIwKX25wX3JhbmRvbZROdWIu",
27
+ "dtype": "float32",
28
+ "_shape": [
29
+ 8
30
+ ],
31
+ "low": "[-inf -inf -inf -inf -inf -inf -inf -inf]",
32
+ "high": "[inf inf inf inf inf inf inf inf]",
33
+ "bounded_below": "[False False False False False False False False]",
34
+ "bounded_above": "[False False False False False False False False]",
35
+ "_np_random": null
36
+ },
37
+ "action_space": {
38
+ ":type:": "<class 'gym.spaces.discrete.Discrete'>",
39
+ ":serialized:": "gAWVggAAAAAAAACME2d5bS5zcGFjZXMuZGlzY3JldGWUjAhEaXNjcmV0ZZSTlCmBlH2UKIwBbpRLBIwGX3NoYXBllCmMBWR0eXBllIwFbnVtcHmUaAeTlIwCaTiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYowKX25wX3JhbmRvbZROdWIu",
40
+ "n": 4,
41
+ "_shape": [],
42
+ "dtype": "int64",
43
+ "_np_random": null
44
+ },
45
+ "n_envs": 16,
46
+ "num_timesteps": 507904,
47
+ "_total_timesteps": 500000,
48
+ "_num_timesteps_at_start": 0,
49
+ "seed": null,
50
+ "action_noise": null,
51
+ "start_time": 1678125256029095466,
52
+ "learning_rate": 0.0001,
53
+ "tensorboard_log": null,
54
+ "lr_schedule": {
55
+ ":type:": "<class 'function'>",
56
+ ":serialized:": "gAWVwwIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOC9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZSMBGZ1bmOUS4JDAgABlIwDdmFslIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5SMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOC9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZR1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgffZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/Gjbi6xxDLYWUUpSFlIwXX2Nsb3VkcGlja2xlX3N1Ym1vZHVsZXOUXZSMC19fZ2xvYmFsc19flH2UdYaUhlIwLg=="
57
+ },
58
+ "_last_obs": {
59
+ ":type:": "<class 'numpy.ndarray'>",
60
+ ":serialized:": "gAWVdQIAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYAAgAAAAAAAGbhQT3w+G4/o3QvPmzwg78JfVI+Fan6PAAAAAAAAAAAzUpnvFy3Bro2ASg8/McjOafFKrsNHSY4AACAPwAAgD9mDsE7UqCXuRtgHjWtoRsw1hE5OnJ9ZLQAAIA/AACAP+rfa76egAA/4zliPgm5Mr+i0oW+UGRUPgAAAAAAAAAApg8jvqFLij5ow9I+1eohvzrdt7079ls+AAAAAAAAAAAAfLo8+Fa/P76RNj51ICg+5wg7PJ5FeT0AAAAAAAAAABqQ0j2Q1oc/hxKnPjBSP799Hpk+1kmOPgAAAAAAAAAAM8v6OxRYmrr8oQI0o6uir4bY4bkZK6qzAACAPwAAgD8zQTq9TG9WPoCe1j1vnQS/9ZyovWDwtD0AAAAAAAAAADN5cbw2i1O8nZs2Pq9UiTxDArI9e+JgvQAAgD8AAIA/Ws8yvlWQEz8fJAg+hmw4v22+Zb5tVRM+AAAAAAAAAABmuYG9Kc5DPpF0Sz7ayCG/h6WBvQLchz0AAAAAAAAAAIB9aL17BJO6YKT/O1010LGQkgQ7anmbMwAAgD8AAIA/5hi5vbgm5rn+9zK9TGTyuAB6erpa8V44AAAAAAAAgD86w06+IkGLPv14lT7GJQq/tuxZvkYxjz4AAAAAAAAAAGYDxrzLpp4/EmINvlUcJr/UDlK9siXyvAAAAAAAAAAAlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksQSwiGlIwBQ5R0lFKULg=="
61
+ },
62
+ "_last_episode_starts": {
63
+ ":type:": "<class 'numpy.ndarray'>",
64
+ ":serialized:": "gAWVgwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSxCFlIwBQ5R0lFKULg=="
65
+ },
66
+ "_last_original_obs": null,
67
+ "_episode_num": 0,
68
+ "use_sde": false,
69
+ "sde_sample_freq": -1,
70
+ "_current_progress_remaining": -0.015808000000000044,
71
+ "ep_info_buffer": {
72
+ ":type:": "<class 'collections.deque'>",
73
+ ":serialized:": "gAWVHxAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMI0Qg2rj9ycUCUhpRSlIwBbJRLmIwBdJRHQJtg69cry2B1fZQoaAZoCWgPQwjl0CLbecByQJSGlFKUaBVLyGgWR0CbYP6oESuhdX2UKGgGaAloD0MI29/ZHj3icECUhpRSlGgVS6BoFkdAm2ENcry1/nV9lChoBmgJaA9DCHfYRGau2HNAlIaUUpRoFU26AWgWR0CbYWdjG1hLdX2UKGgGaAloD0MIZFsGnCUidECUhpRSlGgVS7VoFkdAm2Gci0OVgXV9lChoBmgJaA9DCBJpG38iN3NAlIaUUpRoFUutaBZHQJthmxhUipx1fZQoaAZoCWgPQwg9DoP568ZzQJSGlFKUaBVLtmgWR0CbYaR64UeudX2UKGgGaAloD0MIcO8a9KUTckCUhpRSlGgVS7JoFkdAm3vi6cy31HV9lChoBmgJaA9DCDV9dsC1BXFAlIaUUpRoFUu1aBZHQJt8D7CSA6N1fZQoaAZoCWgPQwgX1/hMNmJyQJSGlFKUaBVLs2gWR0CbfFYoRZlndX2UKGgGaAloD0MIuQA0SpdjckCUhpRSlGgVS7hoFkdAm3zhmoR7JHV9lChoBmgJaA9DCELNkCqKT3NAlIaUUpRoFUuwaBZHQJt85uEVWS51fZQoaAZoCWgPQwiCWDZzSCRxQJSGlFKUaBVLrGgWR0CbfTpHI6sAdX2UKGgGaAloD0MIZCKl2TwiNkCUhpRSlGgVS1loFkdAm33PUe+23XV9lChoBmgJaA9DCI5aYfre4m5AlIaUUpRoFUugaBZHQJt+cUqQRwt1fZQoaAZoCWgPQwhRhxVu+RBxQJSGlFKUaBVLo2gWR0Cbft7YChexdX2UKGgGaAloD0MIVrd6TjpdcECUhpRSlGgVS6BoFkdAm37wQDmr83V9lChoBmgJaA9DCB1WuOXjanJAlIaUUpRoFUu2aBZHQJt/Asyzoll1fZQoaAZoCWgPQwjN5QZDHQtzQJSGlFKUaBVLumgWR0Cbf76tknTidX2UKGgGaAloD0MIiULLuj8Ic0CUhpRSlGgVS7toFkdAm4BXMMZxaXV9lChoBmgJaA9DCGSyuP/IU3NAlIaUUpRoFUu/aBZHQJuAu8Yht+F1fZQoaAZoCWgPQwjfwyXHXSZ0QJSGlFKUaBVLv2gWR0CbgM1a4c3mdX2UKGgGaAloD0MIXvbrTrcecUCUhpRSlGgVS4ZoFkdAm4EFejVQRHV9lChoBmgJaA9DCKT6zi+KPHRAlIaUUpRoFUvOaBZHQJuBPRUm2LJ1fZQoaAZoCWgPQwjf+UUJOt5zQJSGlFKUaBVLwmgWR0CbgawXZXdTdX2UKGgGaAloD0MITfVk/lF7ckCUhpRSlGgVS55oFkdAm4HUxmCiAXV9lChoBmgJaA9DCNyAzw9jV3FAlIaUUpRoFUu8aBZHQJuB9JBgNPR1fZQoaAZoCWgPQwgFFOrpowJwQJSGlFKUaBVLs2gWR0CbgwC6pYLcdX2UKGgGaAloD0MIOWItPoUZcECUhpRSlGgVS5loFkdAm4OHAM2FWXV9lChoBmgJaA9DCHpVZ7XAk29AlIaUUpRoFUuSaBZHQJuDtE0BOpN1fZQoaAZoCWgPQwgsSgnBalFzQJSGlFKUaBVLkWgWR0Cbg72Pkq+bdX2UKGgGaAloD0MIaxDmdm8mc0CUhpRSlGgVS7doFkdAm4Px0ZFXrHV9lChoBmgJaA9DCKetEcF4wnJAlIaUUpRoFUuXaBZHQJuEvTOPeYV1fZQoaAZoCWgPQwhcc0f/i51yQJSGlFKUaBVLrmgWR0CbhM4xk/bCdX2UKGgGaAloD0MIbtqM0xDncECUhpRSlGgVS41oFkdAm4VyQLeANHV9lChoBmgJaA9DCH1bsFQXB3JAlIaUUpRoFUunaBZHQJuG5WPtD2J1fZQoaAZoCWgPQwhqTIi5pFJzQJSGlFKUaBVLyGgWR0CbhxVmSQo1dX2UKGgGaAloD0MIRBmqYupfc0CUhpRSlGgVS79oFkdAm4c7/GVAzHV9lChoBmgJaA9DCB7C+Gmc83FAlIaUUpRoFUucaBZHQJuHWmZVn291fZQoaAZoCWgPQwgqcoi4+SV0QJSGlFKUaBVLwGgWR0Cbh5+7Dl5odX2UKGgGaAloD0MISwSqfxA+ckCUhpRSlGgVS7BoFkdAm4fCEtdzGXV9lChoBmgJaA9DCF8mipA6G3RAlIaUUpRoFUvEaBZHQJuIthsqJ/J1fZQoaAZoCWgPQwhqpnud1CRxQJSGlFKUaBVLlGgWR0CbiUwUg0TDdX2UKGgGaAloD0MIGCXoLzTtckCUhpRSlGgVS5toFkdAm4lUXLvCuXV9lChoBmgJaA9DCKBOeXRj33FAlIaUUpRoFUuzaBZHQJuJZ6By0a91fZQoaAZoCWgPQwjyQ6URswlvQJSGlFKUaBVLmmgWR0CbilxWkrPMdX2UKGgGaAloD0MID+7O2m3Wb0CUhpRSlGgVS5loFkdAm4pi4nWrfnV9lChoBmgJaA9DCLtFYKwvEXNAlIaUUpRoFUvEaBZHQJuKmiRGMGZ1fZQoaAZoCWgPQwinWDUI8/FyQJSGlFKUaBVLzmgWR0CbiyE5hjOLdX2UKGgGaAloD0MI1h2LbdLjb0CUhpRSlGgVS6RoFkdAm4t7CrLhaXV9lChoBmgJaA9DCIem7PQD1G9AlIaUUpRoFUuWaBZHQJuok9gWrOt1fZQoaAZoCWgPQwiPiv87Ig1yQJSGlFKUaBVLhGgWR0CbqK6+36RAdX2UKGgGaAloD0MI3LjF/Nx1cECUhpRSlGgVS5doFkdAm6i4ACGN73V9lChoBmgJaA9DCB8r+G0I/W9AlIaUUpRoFUuWaBZHQJuoxmapgkV1fZQoaAZoCWgPQwiXNhyWRrxwQJSGlFKUaBVLm2gWR0CbqPNKh+OPdX2UKGgGaAloD0MI/BcIAuTXZkCUhpRSlGgVTegDaBZHQJupNAv+OwR1fZQoaAZoCWgPQwhmEB/YcZNyQJSGlFKUaBVLiGgWR0CbqVWaMJhOdX2UKGgGaAloD0MIpics8cCdckCUhpRSlGgVS8FoFkdAm6nhMrVe8nV9lChoBmgJaA9DCPsEUIzsu3BAlIaUUpRoFUuSaBZHQJup5mBe5Wl1fZQoaAZoCWgPQwhjDKzj+LxwQJSGlFKUaBVLoWgWR0CbqiXPqs2fdX2UKGgGaAloD0MIou4DkBrPcUCUhpRSlGgVS3xoFkdAm6pwiu+yq3V9lChoBmgJaA9DCL5p+uzA8HBAlIaUUpRoFUuVaBZHQJuqhqubI911fZQoaAZoCWgPQwjThy6ob/1vQJSGlFKUaBVLkGgWR0CbqoxjJ+2FdX2UKGgGaAloD0MIVwbVBmetdECUhpRSlGgVS8JoFkdAm6rlejVQRHV9lChoBmgJaA9DCP8kPndCqnJAlIaUUpRoFUvBaBZHQJurbhybQTp1fZQoaAZoCWgPQwiXdJSD2TxwQJSGlFKUaBVLiGgWR0Cbq4RPoFFEdX2UKGgGaAloD0MI/reSHVsCc0CUhpRSlGgVS7loFkdAm6vqH446wXV9lChoBmgJaA9DCNo6ONib33JAlIaUUpRoFUufaBZHQJusB7RfF751fZQoaAZoCWgPQwgWNC2xsj1xQJSGlFKUaBVLn2gWR0CbrBRwZOzqdX2UKGgGaAloD0MImfBL/TwXc0CUhpRSlGgVS7VoFkdAm6xZiqhlDnV9lChoBmgJaA9DCHKlngUhD3FAlIaUUpRoFUuraBZHQJuseP7vXsh1fZQoaAZoCWgPQwgrw7gbhEJyQJSGlFKUaBVLomgWR0CbrH93r2QGdX2UKGgGaAloD0MIBK4rZoTRcUCUhpRSlGgVS55oFkdAm6yMZ1mrbXV9lChoBmgJaA9DCCMWMeywbHBAlIaUUpRoFUuJaBZHQJusrwkPczt1fZQoaAZoCWgPQwgwgVt3M+NxQJSGlFKUaBVLqGgWR0CbrVQhOgxrdX2UKGgGaAloD0MIPpP983Ryc0CUhpRSlGgVS7hoFkdAm63zH0btJHV9lChoBmgJaA9DCF7XL9iNdXBAlIaUUpRoFUudaBZHQJuuF5ooNNJ1fZQoaAZoCWgPQwgKn62DA6pyQJSGlFKUaBVLs2gWR0CbriVwPy08dX2UKGgGaAloD0MILdLEO0Ckc0CUhpRSlGgVS7BoFkdAm64qc7Qsw3V9lChoBmgJaA9DCGUZ4lhXhHNAlIaUUpRoFUu/aBZHQJuuedXko4N1fZQoaAZoCWgPQwhjesISD5NxQJSGlFKUaBVLqGgWR0Cbrv+MqBmPdX2UKGgGaAloD0MIH0yKjw8pcUCUhpRSlGgVS4poFkdAm684FaB7NXV9lChoBmgJaA9DCEazsn2IV3JAlIaUUpRoFUupaBZHQJuvaesgdOt1fZQoaAZoCWgPQwjAIOnTKphzQJSGlFKUaBVLomgWR0Cbr3I3R5TqdX2UKGgGaAloD0MIA7aDEfsIc0CUhpRSlGgVS8xoFkdAm6+rwe/5+HV9lChoBmgJaA9DCKjhW1j3+HFAlIaUUpRoFUuaaBZHQJuvteMQ2/B1fZQoaAZoCWgPQwgz/RLxVpVwQJSGlFKUaBVLn2gWR0Cbr8hmoR7JdX2UKGgGaAloD0MIi/87osISc0CUhpRSlGgVS7xoFkdAm6/rSNOuaHV9lChoBmgJaA9DCEERixj223NAlIaUUpRoFUuyaBZHQJuwNmBe5Wl1fZQoaAZoCWgPQwjYgt4bQ6JxQJSGlFKUaBVLuWgWR0CbsHo0ygwodX2UKGgGaAloD0MIRnh7EMJVcUCUhpRSlGgVS5hoFkdAm7EGq94/vHV9lChoBmgJaA9DCOQxA5UxDXJAlIaUUpRoFUu6aBZHQJuxIp4KQaJ1fZQoaAZoCWgPQwiFI0il2AlJQJSGlFKUaBVLaGgWR0CbsUmA9V3mdX2UKGgGaAloD0MIXRq/8IplcUCUhpRSlGgVS6RoFkdAm7FuHSF493V9lChoBmgJaA9DCM9qgT3mC3FAlIaUUpRoFUuVaBZHQJuxhG2Culp1fZQoaAZoCWgPQwjYYyKl2YxxQJSGlFKUaBVLrGgWR0CbsakhRqGldX2UKGgGaAloD0MIRrbz/VTYcUCUhpRSlGgVS6xoFkdAm7J8PrfLtHV9lChoBmgJaA9DCKadmsvNy3NAlIaUUpRoFUvXaBZHQJuyiNyYG+t1fZQoaAZoCWgPQwjReY1dostyQJSGlFKUaBVLo2gWR0Cbsrl+EytWdX2UKGgGaAloD0MI5Lz/j1PRc0CUhpRSlGgVS6hoFkdAm7LbTH80lHVlLg=="
74
+ },
75
+ "ep_success_buffer": {
76
+ ":type:": "<class 'collections.deque'>",
77
+ ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
78
+ },
79
+ "_n_updates": 3748,
80
+ "n_steps": 512,
81
+ "gamma": 0.997,
82
+ "gae_lambda": 0.95,
83
+ "ent_coef": 6.38e-06,
84
+ "vf_coef": 0.5,
85
+ "max_grad_norm": 0.31,
86
+ "batch_size": 32,
87
+ "n_epochs": 5,
88
+ "clip_range": {
89
+ ":type:": "<class 'function'>",
90
+ ":serialized:": "gAWVwwIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOC9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZSMBGZ1bmOUS4JDAgABlIwDdmFslIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5SMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOC9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZR1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgffZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/yZmZmZmZmoWUUpSFlIwXX2Nsb3VkcGlja2xlX3N1Ym1vZHVsZXOUXZSMC19fZ2xvYmFsc19flH2UdYaUhlIwLg=="
91
+ },
92
+ "clip_range_vf": null,
93
+ "normalize_advantage": true,
94
+ "target_kl": null
95
+ }
lander_optuna_2_222/policy.optimizer.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:53dcc1deed4aa3e6aece5561d47fd24c49006c595de1486d93b832a109aa26a2
3
+ size 88057
lander_optuna_2_222/policy.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3054c649ce71f559fae6d302cb492ca6df455a0dd3ddbb6b62c9cb8c840edcfa
3
+ size 43393
lander_optuna_2_222/pytorch_variables.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d030ad8db708280fcae77d87e973102039acd23a11bdecc3db8eb6c0ac940ee1
3
+ size 431
lander_optuna_2_222/system_info.txt ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ - OS: Linux-5.10.147+-x86_64-with-glibc2.29 # 1 SMP Sat Dec 10 16:00:40 UTC 2022
2
+ - Python: 3.8.10
3
+ - Stable-Baselines3: 1.7.0
4
+ - PyTorch: 1.13.1+cu116
5
+ - GPU Enabled: True
6
+ - Numpy: 1.22.4
7
+ - Gym: 0.21.0
replay.mp4 CHANGED
Binary files a/replay.mp4 and b/replay.mp4 differ
 
results.json CHANGED
@@ -1 +1 @@
1
- {"mean_reward": 302.3566343278907, "std_reward": 15.242004149179634, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2023-03-06T18:05:17.000919"}
 
1
+ {"mean_reward": 296.1685639619452, "std_reward": 23.522701108429178, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2023-03-06T18:05:42.653491"}