Best Python code snippet using tempest_python
__init__.py
Source:__init__.py  
1from gym.envs.registration import registry, register, make, spec2# Algorithmic3# ----------------------------------------4register(5    id='Copy-v0',6    entry_point='gym.envs.algorithmic:CopyEnv',7    max_episode_steps=200,8    reward_threshold=25.0,9)10register(11    id='RepeatCopy-v0',12    entry_point='gym.envs.algorithmic:RepeatCopyEnv',13    max_episode_steps=200,14    reward_threshold=75.0,15)16register(17    id='ReversedAddition-v0',18    entry_point='gym.envs.algorithmic:ReversedAdditionEnv',19    kwargs={'rows' : 2},20    max_episode_steps=200,21    reward_threshold=25.0,22)23register(24    id='ReversedAddition3-v0',25    entry_point='gym.envs.algorithmic:ReversedAdditionEnv',26    kwargs={'rows' : 3},27    max_episode_steps=200,28    reward_threshold=25.0,29)30register(31    id='DuplicatedInput-v0',32    entry_point='gym.envs.algorithmic:DuplicatedInputEnv',33    max_episode_steps=200,34    reward_threshold=9.0,35)36register(37    id='Reverse-v0',38    entry_point='gym.envs.algorithmic:ReverseEnv',39    max_episode_steps=200,40    reward_threshold=25.0,41)42# Classic43# ----------------------------------------44register(45    id='CartPole-v0',46    entry_point='gym.envs.classic_control:CartPoleEnv',47    max_episode_steps=200,48    reward_threshold=195.0,49)50register(51    id='CartPole-v1',52    entry_point='gym.envs.classic_control:CartPoleEnv',53    max_episode_steps=500,54    reward_threshold=475.0,55)56register(57    id='MountainCar-v0',58    entry_point='gym.envs.classic_control:MountainCarEnv',59    max_episode_steps=200,60    reward_threshold=-110.0,61)62register(63    id='MountainCarContinuous-v0',64    entry_point='gym.envs.classic_control:Continuous_MountainCarEnv',65    max_episode_steps=999,66    reward_threshold=90.0,67)68register(69    id='Pendulum-v0',70    entry_point='gym.envs.classic_control:PendulumEnv',71    max_episode_steps=200,72)73register(74    id='Acrobot-v1',75    entry_point='gym.envs.classic_control:AcrobotEnv',76    max_episode_steps=500,77)78# Box2d79# ----------------------------------------80register(81    id='LunarLander-v2',82    entry_point='gym.envs.box2d:LunarLander',83    max_episode_steps=1000,84    reward_threshold=200,85)86register(87    id='LunarLanderContinuous-v2',88    entry_point='gym.envs.box2d:LunarLanderContinuous',89    max_episode_steps=1000,90    reward_threshold=200,91)92register(93    id='BipedalWalker-v2',94    entry_point='gym.envs.box2d:BipedalWalker',95    max_episode_steps=1600,96    reward_threshold=300,97)98register(99    id='BipedalWalkerHardcore-v2',100    entry_point='gym.envs.box2d:BipedalWalkerHardcore',101    max_episode_steps=2000,102    reward_threshold=300,103)104register(105    id='CarRacing-v0',106    entry_point='gym.envs.box2d:CarRacing',107    max_episode_steps=1000,108    reward_threshold=900,109)110# Toy Text111# ----------------------------------------112register(113    id='Blackjack-v0',114    entry_point='gym.envs.toy_text:BlackjackEnv',115)116register(117    id='KellyCoinflip-v0',118    entry_point='gym.envs.toy_text:KellyCoinflipEnv',119    reward_threshold=246.61,120)121register(122    id='KellyCoinflipGeneralized-v0',123    entry_point='gym.envs.toy_text:KellyCoinflipGeneralizedEnv',124)125register(126    id='FrozenLake-v0',127    entry_point='gym.envs.toy_text:FrozenLakeEnv',128    kwargs={'map_name' : '4x4'},129    max_episode_steps=100,130    reward_threshold=0.78, # optimum = .8196131)132register(133    id='FrozenLake8x8-v0',134    entry_point='gym.envs.toy_text:FrozenLakeEnv',135    kwargs={'map_name' : '8x8'},136    max_episode_steps=200,137    reward_threshold=0.99, # optimum = 1138)139register(140    id='CliffWalking-v0',141    entry_point='gym.envs.toy_text:CliffWalkingEnv',142)143register(144    id='NChain-v0',145    entry_point='gym.envs.toy_text:NChainEnv',146    max_episode_steps=1000,147)148register(149    id='Roulette-v0',150    entry_point='gym.envs.toy_text:RouletteEnv',151    max_episode_steps=100,152)153register(154    id='Taxi-v2',155    entry_point='gym.envs.toy_text.taxi:TaxiEnv',156    reward_threshold=8, # optimum = 8.46157    max_episode_steps=200,158)159register(160    id='GuessingGame-v0',161    entry_point='gym.envs.toy_text.guessing_game:GuessingGame',162    max_episode_steps=200,163)164register(165    id='HotterColder-v0',166    entry_point='gym.envs.toy_text.hotter_colder:HotterColder',167    max_episode_steps=200,168)169# Mujoco170# ----------------------------------------171# 2D172register(173    id='Reacher-v2',174    entry_point='gym.envs.mujoco:ReacherEnv',175    max_episode_steps=50,176    reward_threshold=-3.75,177)178register(179    id='Pusher-v2',180    entry_point='gym.envs.mujoco:PusherEnv',181    max_episode_steps=100,182    reward_threshold=0.0,183)184register(185    id='Thrower-v2',186    entry_point='gym.envs.mujoco:ThrowerEnv',187    max_episode_steps=100,188    reward_threshold=0.0,189)190register(191    id='Striker-v2',192    entry_point='gym.envs.mujoco:StrikerEnv',193    max_episode_steps=100,194    reward_threshold=0.0,195)196register(197    id='InvertedPendulum-v2',198    entry_point='gym.envs.mujoco:InvertedPendulumEnv',199    max_episode_steps=1000,200    reward_threshold=950.0,201)202register(203    id='InvertedDoublePendulum-v2',204    entry_point='gym.envs.mujoco:InvertedDoublePendulumEnv',205    max_episode_steps=1000,206    reward_threshold=9100.0,207)208register(209    id='HalfCheetah-v2',210    entry_point='gym.envs.mujoco:HalfCheetahEnv',211    max_episode_steps=1000,212    reward_threshold=4800.0,213)214register(215    id='Hopper-v2',216    entry_point='gym.envs.mujoco:HopperEnv',217    max_episode_steps=1000,218    reward_threshold=3800.0,219)220register(221    id='Swimmer-v2',222    entry_point='gym.envs.mujoco:SwimmerEnv',223    max_episode_steps=1000,224    reward_threshold=360.0,225)226register(227    id='Walker2d-v2',228    max_episode_steps=1000,229    entry_point='gym.envs.mujoco:Walker2dEnv',230)231register(232    id='Ant-v2',233    entry_point='gym.envs.mujoco:AntEnv',234    max_episode_steps=1000,235    reward_threshold=6000.0,236)237register(238    id='Humanoid-v2',239    entry_point='gym.envs.mujoco:HumanoidEnv',240    max_episode_steps=1000,241)242register(243    id='HumanoidStandup-v2',244    entry_point='gym.envs.mujoco:HumanoidStandupEnv',245    max_episode_steps=1000,246)247# Robotics248# ----------------------------------------249def _merge(a, b):250    a.update(b)251    return a252for reward_type in ['sparse', 'dense']:253    suffix = 'Dense' if reward_type == 'dense' else ''254    kwargs = {255        'reward_type': reward_type,256    }257    # Fetch258    register(259        id='FetchSlide{}-v1'.format(suffix),260        entry_point='gym.envs.robotics:FetchSlideEnv',261        kwargs=kwargs,262        max_episode_steps=50,263    )264    register(265        id='FetchPickAndPlace{}-v1'.format(suffix),266        entry_point='gym.envs.robotics:FetchPickAndPlaceEnv',267        kwargs=kwargs,268        max_episode_steps=50,269    )270    register(271        id='FetchReach{}-v1'.format(suffix),272        entry_point='gym.envs.robotics:FetchReachEnv',273        kwargs=kwargs,274        max_episode_steps=50,275    )276    register(277        id='FetchPush{}-v1'.format(suffix),278        entry_point='gym.envs.robotics:FetchPushEnv',279        kwargs=kwargs,280        max_episode_steps=50,281    )282    # Hand283    register(284        id='HandReach{}-v0'.format(suffix),285        entry_point='gym.envs.robotics:HandReachEnv',286        kwargs=kwargs,287        max_episode_steps=50,288    )289    register(290        id='HandManipulateBlockRotateZ{}-v0'.format(suffix),291        entry_point='gym.envs.robotics:HandBlockEnv',292        kwargs=_merge({'target_position': 'ignore', 'target_rotation': 'z'}, kwargs),293        max_episode_steps=100,294    )295    register(296        id='HandManipulateBlockRotateParallel{}-v0'.format(suffix),297        entry_point='gym.envs.robotics:HandBlockEnv',298        kwargs=_merge({'target_position': 'ignore', 'target_rotation': 'parallel'}, kwargs),299        max_episode_steps=100,300    )301    register(302        id='HandManipulateBlockRotateXYZ{}-v0'.format(suffix),303        entry_point='gym.envs.robotics:HandBlockEnv',304        kwargs=_merge({'target_position': 'ignore', 'target_rotation': 'xyz'}, kwargs),305        max_episode_steps=100,306    )307    register(308        id='HandManipulateBlockFull{}-v0'.format(suffix),309        entry_point='gym.envs.robotics:HandBlockEnv',310        kwargs=_merge({'target_position': 'random', 'target_rotation': 'xyz'}, kwargs),311        max_episode_steps=100,312    )313    # Alias for "Full"314    register(315        id='HandManipulateBlock{}-v0'.format(suffix),316        entry_point='gym.envs.robotics:HandBlockEnv',317        kwargs=_merge({'target_position': 'random', 'target_rotation': 'xyz'}, kwargs),318        max_episode_steps=100,319    )320    register(321        id='HandManipulateEggRotate{}-v0'.format(suffix),322        entry_point='gym.envs.robotics:HandEggEnv',323        kwargs=_merge({'target_position': 'ignore', 'target_rotation': 'xyz'}, kwargs),324        max_episode_steps=100,325    )326    register(327        id='HandManipulateEggFull{}-v0'.format(suffix),328        entry_point='gym.envs.robotics:HandEggEnv',329        kwargs=_merge({'target_position': 'random', 'target_rotation': 'xyz'}, kwargs),330        max_episode_steps=100,331    )332    # Alias for "Full"333    register(334        id='HandManipulateEgg{}-v0'.format(suffix),335        entry_point='gym.envs.robotics:HandEggEnv',336        kwargs=_merge({'target_position': 'random', 'target_rotation': 'xyz'}, kwargs),337        max_episode_steps=100,338    )339    register(340        id='HandManipulatePenRotate{}-v0'.format(suffix),341        entry_point='gym.envs.robotics:HandPenEnv',342        kwargs=_merge({'target_position': 'ignore', 'target_rotation': 'xyz'}, kwargs),343        max_episode_steps=100,344    )345    register(346        id='HandManipulatePenFull{}-v0'.format(suffix),347        entry_point='gym.envs.robotics:HandPenEnv',348        kwargs=_merge({'target_position': 'random', 'target_rotation': 'xyz'}, kwargs),349        max_episode_steps=100,350    )351    # Alias for "Full"352    register(353        id='HandManipulatePen{}-v0'.format(suffix),354        entry_point='gym.envs.robotics:HandPenEnv',355        kwargs=_merge({'target_position': 'random', 'target_rotation': 'xyz'}, kwargs),356        max_episode_steps=100,357    )358# Atari359# ----------------------------------------360# # print ', '.join(["'{}'".format(name.split('.')[0]) for name in atari_py.list_games()])361for game in ['air_raid', 'alien', 'amidar', 'assault', 'asterix', 'asteroids', 'atlantis',362    'bank_heist', 'battle_zone', 'beam_rider', 'berzerk', 'bowling', 'boxing', 'breakout', 'carnival',363    'centipede', 'chopper_command', 'crazy_climber', 'demon_attack', 'double_dunk',364    'elevator_action', 'enduro', 'fishing_derby', 'freeway', 'frostbite', 'gopher', 'gravitar',365    'hero', 'ice_hockey', 'jamesbond', 'journey_escape', 'kangaroo', 'krull', 'kung_fu_master',366    'montezuma_revenge', 'ms_pacman', 'name_this_game', 'phoenix', 'pitfall', 'pong', 'pooyan',367    'private_eye', 'qbert', 'riverraid', 'road_runner', 'robotank', 'seaquest', 'skiing',368    'solaris', 'space_invaders', 'star_gunner', 'tennis', 'time_pilot', 'tutankham', 'up_n_down',369    'venture', 'video_pinball', 'wizard_of_wor', 'yars_revenge', 'zaxxon']:370    for obs_type in ['image', 'ram']:371        # space_invaders should yield SpaceInvaders-v0 and SpaceInvaders-ram-v0372        name = ''.join([g.capitalize() for g in game.split('_')])373        if obs_type == 'ram':374            name = '{}-ram'.format(name)375        nondeterministic = False376        if game == 'elevator_action' and obs_type == 'ram':377            # ElevatorAction-ram-v0 seems to yield slightly378            # non-deterministic observations about 10% of the time. We379            # should track this down eventually, but for now we just380            # mark it as nondeterministic.381            nondeterministic = True382        register(383            id='{}-v0'.format(name),384            entry_point='gym.envs.atari:AtariEnv',385            kwargs={'game': game, 'obs_type': obs_type, 'repeat_action_probability': 0.25},386            max_episode_steps=10000,387            nondeterministic=nondeterministic,388        )389        register(390            id='{}-v4'.format(name),391            entry_point='gym.envs.atari:AtariEnv',392            kwargs={'game': game, 'obs_type': obs_type},393            max_episode_steps=100000,394            nondeterministic=nondeterministic,395        )396        # Standard Deterministic (as in the original DeepMind paper)397        if game == 'space_invaders':398            frameskip = 3399        else:400            frameskip = 4401        # Use a deterministic frame skip.402        register(403            id='{}Deterministic-v0'.format(name),404            entry_point='gym.envs.atari:AtariEnv',405            kwargs={'game': game, 'obs_type': obs_type, 'frameskip': frameskip, 'repeat_action_probability': 0.25},406            max_episode_steps=100000,407            nondeterministic=nondeterministic,408        )409        register(410            id='{}Deterministic-v4'.format(name),411            entry_point='gym.envs.atari:AtariEnv',412            kwargs={'game': game, 'obs_type': obs_type, 'frameskip': frameskip},413            max_episode_steps=100000,414            nondeterministic=nondeterministic,415        )416        register(417            id='{}NoFrameskip-v0'.format(name),418            entry_point='gym.envs.atari:AtariEnv',419            kwargs={'game': game, 'obs_type': obs_type, 'frameskip': 1, 'repeat_action_probability': 0.25}, # A frameskip of 1 means we get every frame420            max_episode_steps=frameskip * 100000,421            nondeterministic=nondeterministic,422        )423        # No frameskip. (Atari has no entropy source, so these are424        # deterministic environments.)425        register(426            id='{}NoFrameskip-v4'.format(name),427            entry_point='gym.envs.atari:AtariEnv',428            kwargs={'game': game, 'obs_type': obs_type, 'frameskip': 1}, # A frameskip of 1 means we get every frame429            max_episode_steps=frameskip * 100000,430            nondeterministic=nondeterministic,431        )432# Unit test433# ---------434register(435    id='CubeCrash-v0',436    entry_point='gym.envs.unittest:CubeCrash',437    reward_threshold=0.9,438    )439register(440    id='CubeCrashSparse-v0',441    entry_point='gym.envs.unittest:CubeCrashSparse',442    reward_threshold=0.9,443    )444register(445    id='CubeCrashScreenBecomesBlack-v0',446    entry_point='gym.envs.unittest:CubeCrashScreenBecomesBlack',447    reward_threshold=0.9,448    )449register(450    id='MemorizeDigits-v0',451    entry_point='gym.envs.unittest:MemorizeDigits',452    reward_threshold=20,...Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!
