How to use frame method in Puppeteer

Best JavaScript code snippet using puppeteer

gui.py

Source:gui.py Github

copy

Full Screen

...329 instrText = 'Use the drop-down menus under the "To" column to choose where the Pok\u00E9mon in the corresponding "From" box will be sent. Boxes marked with "None" will not be transferred. Click the name of the From box to see its contents in the display below.'330 instrLabel = ttk.Label(displayFrame, text=instrText,331 wraplength=DISP_WIDTH)332 instrLabel.grid(row=0, column=0)333 boxDisplay = ttk.Labelframe(displayFrame)334 boxDisplay.grid(row=1, column=0, pady=20)335 336 #todo: make warning red?337 warning = "WARNING! Any Pok\u00E9mon that previously inhabited chosen boxes in the Gen III save file will be ERASED. It is recommended that you only select empty Gen III boxes."338 warningLabel = ttk.Label(displayFrame, text=warning,339 wraplength=DISP_WIDTH)340 warningLabel.grid(row=2, column=0)341 navFrame = Nav(baseFrame, root.prevPage, nextPage)342 navFrame.grid(row=1, column=0, sticky='we')343 return (baseFrame, 'Box Selection', {'sticky':'nsew'})344def overwriteWindow(root):345 '''Creates the window for deciding to overwrite or not.'''346 def toggle(*args): 347 setState(choice.get(), 'disabled', [gen2Label, gen3Label,...

Full Screen

Full Screen

talib_indicators.py

Source:talib_indicators.py Github

copy

Full Screen

1# coding:utf-82#3# The MIT License (MIT)4#5# Copyright (c) 2016-2021 yutiansut/QUANTAXIS6#7# Permission is hereby granted, free of charge, to any person obtaining a copy8# of this software and associated documentation files (the "Software"), to deal9# in the Software without restriction, including without limitation the rights10# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell11# copies of the Software, and to permit persons to whom the Software is12# furnished to do so, subject to the following conditions:13#14# The above copyright notice and this permission notice shall be included in all15# copies or substantial portions of the Software.16#17# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR18# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,19# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE20# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER21# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,22# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE23# SOFTWARE.24import pandas as pd25try:26 import talib27except:28 pass29 #print('PLEASE install TALIB to call these methods')30def AD(DataFrame):31 res = talib.AD(DataFrame.high.values, DataFrame.low.values,32 DataFrame.close.values, DataFrame.volume.values)33 return pd.DataFrame({'AD': res}, index=DataFrame.index)34def ADOSC(DataFrame, N1=3, N2=10):35 res = talib.ADOSC(DataFrame.high.values, DataFrame.low.values,36 DataFrame.close.values, DataFrame.volume.values, N1, N2)37 return pd.DataFrame({'ADOSC': res}, index=DataFrame.index)38def ADX(DataFrame, N=14):39 res = talib.ADX(DataFrame.high.values, DataFrame.low.values, DataFrame.close.values, N)40 return pd.DataFrame({'ADX': res}, index=DataFrame.index)41def ADXR(DataFrame, N=14):42 res = talib.ADXR(DataFrame.high.values, DataFrame.low.values, DataFrame.close.values, N)43 return pd.DataFrame({'ADXR': res}, index=DataFrame.index)44def AROON(DataFrame, N=14):45 """阿隆指标46 47 Arguments:48 DataFrame {[type]} -- [description]49 50 Keyword Arguments:51 N {int} -- [description] (default: {14})52 53 Returns:54 [type] -- [description]55 """56 ar_up, ar_down = talib.AROON(DataFrame.high.values, DataFrame.low.values, N)57 return pd.DataFrame({'AROON_UP': ar_up,'AROON_DOWN': ar_down}, index=DataFrame.index)58def AROONOSC(DataFrame, N=14):59 res = talib.AROONOSC(DataFrame.high.values, DataFrame.low.values, N)60 return pd.DataFrame({'AROONOSC': res}, index=DataFrame.index)61def ATR(DataFrame, N=14):62 res = talib.ATR(DataFrame.high.values, DataFrame.low.values, DataFrame.close.values, N)63 return pd.DataFrame({'ATR': res}, index=DataFrame.index)64def AVGPRICE(DataFrame):65 res = talib.AVGPRICE(DataFrame.open.values, DataFrame.high.values,66 DataFrame.low.values, DataFrame.close.values)67 return pd.DataFrame({'AVGPRICE': res}, index=DataFrame.index)68def BOP(DataFrame):69 res = talib.BOP(DataFrame.open.values, DataFrame.high.values,70 DataFrame.low.values, DataFrame.close.values)71 return pd.DataFrame({'BOP': res}, index=DataFrame.index)72def CCI(DataFrame, N=14):73 res = talib.CCI(DataFrame.high.values, DataFrame.low.values, DataFrame.close.values, N)74 return pd.DataFrame({'CCI': res}, index=DataFrame.index)75def CDL2CROWS(DataFrame):76 res = talib.CDL2CROWS(DataFrame.open.values, DataFrame.high.values,77 DataFrame.low.values, DataFrame.close.values)78 return pd.DataFrame({'CDL2CROWS': res}, index=DataFrame.index)79def CDL3BLACKCROWS(DataFrame):80 res = talib.CDL3BLACKCROWS(81 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)82 return pd.DataFrame({'CDL3BLACKCROWS': res}, index=DataFrame.index)83def CDL3INSIDE(DataFrame):84 res = talib.CDL3INSIDE(85 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)86 return pd.DataFrame({'CDL3INSIDE': res}, index=DataFrame.index)87def CDL3LINESTRIKE(DataFrame):88 res = talib.CDL3LINESTRIKE(89 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)90 return pd.DataFrame({'CDL3LINESTRIKE': res}, index=DataFrame.index)91def CDL3OUTSIDE(DataFrame):92 res = talib.CDL3OUTSIDE(93 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)94 return pd.DataFrame({'CDL3OUTSIDE': res}, index=DataFrame.index)95def CDL3STARSINSOUTH(DataFrame):96 res = talib.CDL3STARSINSOUTH(97 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)98 return pd.DataFrame({'CDL3STARSINSOUTH': res}, index=DataFrame.index)99def CDL3WHITESOLDIERS(DataFrame):100 res = talib.CDL3WHITESOLDIERS(101 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)102 return pd.DataFrame({'CDL3WHITESOLDIERS': res}, index=DataFrame.index)103def CDLABANDONEDBABY(DataFrame):104 res = talib.CDLABANDONEDBABY(105 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)106 return pd.DataFrame({'CDLABANDONEDBABY': res}, index=DataFrame.index)107def CDLADVANCEBLOCK(DataFrame):108 res = talib.CDLADVANCEBLOCK(109 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)110 return pd.DataFrame({'CDLADVANCEBLOCK': res}, index=DataFrame.index)111def CDLBELTHOLD(DataFrame):112 res = talib.CDLBELTHOLD(113 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)114 return pd.DataFrame({'CDLBELTHOLD': res}, index=DataFrame.index)115def CDLBREAKAWAY(DataFrame):116 res = talib.CDLBREAKAWAY(117 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)118 return pd.DataFrame({'CDLBREAKAWAY': res}, index=DataFrame.index)119def CDLCLOSINGMARUBOZU(DataFrame):120 """121 Closing Marubozu (Pattern Recognition)122 Arguments:123 DataFrame {[type]} -- [description]124 """125 res = talib.CDLCLOSINGMARUBOZU(126 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)127 return pd.DataFrame({'CDLCLOSINGMARUBOZU': res}, index=DataFrame.index)128def CDLCONCEALBABYSWALL(DataFrame):129 res = talib.CDLCONCEALBABYSWALL(130 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)131 return pd.DataFrame({'CDLCONCEALBABYSWALL': res}, index=DataFrame.index)132def CDLCOUNTERATTACK(DataFrame):133 res = talib.CDLCOUNTERATTACK(134 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)135 return pd.DataFrame({'CDLCOUNTERATTACK': res}, index=DataFrame.index)136def CDLDARKCLOUDCOVER(DataFrame):137 res = talib.CDLDARKCLOUDCOVER(138 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)139 return pd.DataFrame({'CDLDARKCLOUDCOVER': res}, index=DataFrame.index)140def CDLDOJI(DataFrame):141 res = talib.CDLDOJI(DataFrame.open.values, DataFrame.high.values,142 DataFrame.low.values, DataFrame.close.values)143 return pd.DataFrame({'CDLDOJI': res}, index=DataFrame.index)144def CDLDOJISTAR(DataFrame):145 res = talib.CDLDOJISTAR(146 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)147 return pd.DataFrame({'CDLDOJISTAR': res}, index=DataFrame.index)148def CDLDRAGONFLYDOJI(DataFrame):149 res = talib.CDLDRAGONFLYDOJI(150 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)151 return pd.DataFrame({'CDLDRAGONFLYDOJI': res}, index=DataFrame.index)152def CDLENGULFING(DataFrame):153 res = talib.CDLENGULFING(154 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)155 return pd.DataFrame({'CDLENGULFING': res}, index=DataFrame.index)156def CDLEVENINGDOJISTAR(DataFrame):157 res = talib.CDLEVENINGDOJISTAR(158 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)159 return pd.DataFrame({'CDLEVENINGDOJISTAR': res}, index=DataFrame.index)160def CDLEVENINGSTAR(DataFrame):161 res = talib.CDLEVENINGSTAR(162 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)163 return pd.DataFrame({'CDLEVENINGSTAR': res}, index=DataFrame.index)164def CDLGAPSIDESIDEWHITE(DataFrame):165 res = talib.CDLGAPSIDESIDEWHITE(166 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)167 return pd.DataFrame({'CDLGAPSIDESIDEWHITE': res}, index=DataFrame.index)168def CDLGRAVESTONEDOJI(DataFrame):169 res = talib.CDLGRAVESTONEDOJI(170 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)171 return pd.DataFrame({'CDLGRAVESTONEDOJI': res}, index=DataFrame.index)172def CDLHAMMER(DataFrame):173 res = talib.CDLHAMMER(DataFrame.open.values, DataFrame.high.values,174 DataFrame.low.values, DataFrame.close.values)175 return pd.DataFrame({'CDLHAMMER': res}, index=DataFrame.index)176def CDLHANGINGMAN(DataFrame):177 res = talib.CDLHANGINGMAN(178 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)179 return pd.DataFrame({'CDLHANGINGMAN': res}, index=DataFrame.index)180def CDLHARAMI(DataFrame):181 res = talib.CDLHARAMI(DataFrame.open.values, DataFrame.high.values,182 DataFrame.low.values, DataFrame.close.values)183 return pd.DataFrame({'CDLHARAMI': res}, index=DataFrame.index)184def CDLHARAMICROSS(DataFrame):185 res = talib.CDLHARAMICROSS(186 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)187 return pd.DataFrame({'CDLHARAMICROSS': res}, index=DataFrame.index)188def CDLHIGHWAVE(DataFrame):189 res = talib.CDLHIGHWAVE(190 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)191 return pd.DataFrame({'CDLHIGHWAVE': res}, index=DataFrame.index)192def CDLHIKKAKE(DataFrame):193 res = talib.CDLHIKKAKE(DataFrame.open.values, DataFrame.high.values,194 DataFrame.low.values, DataFrame.close.values)195 return pd.DataFrame({'CDLHIKKAKE': res}, index=DataFrame.index)196def CDLHIKKAKEMOD(DataFrame):197 res = talib.CDLHIKKAKEMOD(198 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)199 return pd.DataFrame({'CDLHIKKAKEMOD': res}, index=DataFrame.index)200def CDLHOMINGPIGEON(DataFrame):201 res = talib.CDLHOMINGPIGEON(202 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)203 return pd.DataFrame({'CDLHOMINGPIGEON': res}, index=DataFrame.index)204def CDLIDENTICAL3CROWS(DataFrame):205 res = talib.CDLIDENTICAL3CROWS(206 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)207 return pd.DataFrame({'CDLIDENTICAL3CROWS': res}, index=DataFrame.index)208def CDLINNECK(DataFrame):209 res = talib.CDLINNECK(DataFrame.open.values, DataFrame.high.values,210 DataFrame.low.values, DataFrame.close.values)211 return pd.DataFrame({'CDLINNECK': res}, index=DataFrame.index)212def CDLINVERTEDHAMMER(DataFrame):213 res = talib.CDLINVERTEDHAMMER(214 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)215 return pd.DataFrame({'CDLINVERTEDHAMMER': res}, index=DataFrame.index)216def CDLKICKING(DataFrame):217 res = talib.CDLKICKING(DataFrame.open.values, DataFrame.high.values,218 DataFrame.low.values, DataFrame.close.values)219 return pd.DataFrame({'CDLKICKING': res}, index=DataFrame.index)220def CDLKICKINGBYLENGTH(DataFrame):221 res = talib.CDLKICKINGBYLENGTH(222 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)223 return pd.DataFrame({'CDLKICKINGBYLENGTH': res}, index=DataFrame.index)224def CDLLADDERBOTTOM(DataFrame):225 res = talib.CDLLADDERBOTTOM(226 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)227 return pd.DataFrame({'CDLLADDERBOTTOM': res}, index=DataFrame.index)228def CDLLONGLEGGEDDOJI(DataFrame):229 res = talib.CDLLONGLEGGEDDOJI(230 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)231 return pd.DataFrame({'CDLLONGLEGGEDDOJI': res}, index=DataFrame.index)232def CDLLONGLINE(DataFrame):233 res = talib.CDLLONGLINE(234 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)235 return pd.DataFrame({'CDLLONGLINE': res}, index=DataFrame.index)236def CDLMARUBOZU(DataFrame):237 res = talib.CDLMARUBOZU(238 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)239 return pd.DataFrame({'CDLMARUBOZU': res}, index=DataFrame.index)240def CDLMATCHINGLOW(DataFrame):241 res = talib.CDLMATCHINGLOW(242 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)243 return pd.DataFrame({'CDLMATCHINGLOW': res}, index=DataFrame.index)244def CDLMATHOLD(DataFrame):245 res = talib.CDLMATHOLD(DataFrame.open.values, DataFrame.high.values,246 DataFrame.low.values, DataFrame.close.values)247 return pd.DataFrame({'CDLMATHOLD': res}, index=DataFrame.index)248def CDLMORNINGDOJISTAR(DataFrame):249 res = talib.CDLMORNINGDOJISTAR(250 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)251 return pd.DataFrame({'CDLMORNINGDOJISTAR': res}, index=DataFrame.index)252def CDLMORNINGSTAR(DataFrame):253 res = talib.CDLMORNINGSTAR(254 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)255 return pd.DataFrame({'CDLMORNINGSTAR': res}, index=DataFrame.index)256def CDLONNECK(DataFrame):257 res = talib.CDLONNECK(DataFrame.open.values, DataFrame.high.values,258 DataFrame.low.values, DataFrame.close.values)259 return pd.DataFrame({'CDLONNECK': res}, index=DataFrame.index)260def CDLPIERCING(DataFrame):261 res = talib.CDLPIERCING(262 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)263 return pd.DataFrame({'CDLPIERCING': res}, index=DataFrame.index)264def CDLRICKSHAWMAN(DataFrame):265 res = talib.CDLRICKSHAWMAN(266 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)267 return pd.DataFrame({'CDLRICKSHAWMAN': res}, index=DataFrame.index)268def CDLRISEFALL3METHODS(DataFrame):269 res = talib.CDLRISEFALL3METHODS(270 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)271 return pd.DataFrame({'CDLRISEFALL3METHODS': res}, index=DataFrame.index)272def CDLSEPARATINGLINES(DataFrame):273 res = talib.CDLSEPARATINGLINES(274 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)275 return pd.DataFrame({'CDLSEPARATINGLINES': res}, index=DataFrame.index)276def CDLSHOOTINGSTAR(DataFrame):277 res = talib.CDLSHOOTINGSTAR(278 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)279 return pd.DataFrame({'CDLSHOOTINGSTAR': res}, index=DataFrame.index)280def CDLSHORTLINE(DataFrame):281 res = talib.CDLSHORTLINE(282 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)283 return pd.DataFrame({'CDLSHORTLINE': res}, index=DataFrame.index)284def CDLSPINNINGTOP(DataFrame):285 res = talib.CDLSPINNINGTOP(286 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)287 return pd.DataFrame({'CDLSPINNINGTOP': res}, index=DataFrame.index)288def CDLSTALLEDPATTERN(DataFrame):289 res = talib.CDLSTALLEDPATTERN(290 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)291 return pd.DataFrame({'CDLSTALLEDPATTERN': res}, index=DataFrame.index)292def CDLSTICKSANDWICH(DataFrame):293 res = talib.CDLSTICKSANDWICH(294 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)295 return pd.DataFrame({'CDLSTICKSANDWICH': res}, index=DataFrame.index)296def CDLTAKURI(DataFrame):297 res = talib.CDLTAKURI(DataFrame.open.values, DataFrame.high.values,298 DataFrame.low.values, DataFrame.close.values)299 return pd.DataFrame({'CDLTAKURI': res}, index=DataFrame.index)300def CDLTASUKIGAP(DataFrame):301 res = talib.CDLTASUKIGAP(302 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)303 return pd.DataFrame({'CDLTASUKIGAP': res}, index=DataFrame.index)304def CDLTHRUSTING(DataFrame):305 res = talib.CDLTHRUSTING(306 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)307 return pd.DataFrame({'CDLTHRUSTING': res}, index=DataFrame.index)308def CDLTRISTAR(DataFrame):309 res = talib.CDLTRISTAR(DataFrame.open.values, DataFrame.high.values,310 DataFrame.low.values, DataFrame.close.values)311 return pd.DataFrame({'CDLTRISTAR': res}, index=DataFrame.index)312def CDLUNIQUE3RIVER(DataFrame):313 res = talib.CDLUNIQUE3RIVER(314 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)315 return pd.DataFrame({'CDLUNIQUE3RIVER': res}, index=DataFrame.index)316def CDLUPSIDEGAP2CROWS(DataFrame):317 res = talib.CDLUPSIDEGAP2CROWS(318 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)319 return pd.DataFrame({'CDLUPSIDEGAP2CROWS': res}, index=DataFrame.index)320def CDLXSIDEGAP3METHODS(DataFrame):321 res = talib.CDLXSIDEGAP3METHODS(322 DataFrame.open.values, DataFrame.high.values, DataFrame.low.values, DataFrame.close.values)323 return pd.DataFrame({'CDLXSIDEGAP3METHODS': res}, index=DataFrame.index)324def DX(DataFrame, N=14):325 res = talib.DX(DataFrame.high.values, DataFrame.low.values, DataFrame.close.values, N)326 return pd.DataFrame({'DX': res}, index=DataFrame.index)327# SAR - Parabolic SAR328def SAR(DataFrame, acceleration=0, maximum=0):329 res = talib.SAR(DataFrame.high.values, DataFrame.low.values, acceleration, maximum)330 return pd.DataFrame({'SAR': res}, index=DataFrame.index)331def SAREXT(DataFrame, startvalue=0, offsetonreverse=0, accelerationinitlong=0,332 accelerationlong=0, accelerationmaxlong=0, accelerationinitshort=0, accelerationshort=0, accelerationmaxshort=0):333 res = talib.SAREXT(DataFrame.high.values, DataFrame.low.values,334 startvalue, offsetonreverse, accelerationinitlong, accelerationlong, accelerationmaxlong,335 accelerationinitshort, accelerationshort, accelerationmaxshort)336 return pd.DataFrame({'SAREXT': res}, index=DataFrame.index)337def STOCH(DataFrame, fastk_period=5, slowk_period=3, slowk_matype=0, slowd_period=3, slowd_matype=0):338 slowk, slowd = talib.STOCH(DataFrame.high.values, DataFrame.low.values, DataFrame.close.values,339 fastk_period, slowk_period, slowk_matype, slowd_period, slowd_matype)340 return pd.DataFrame({'STOCH_SLOWK': slowk, 'STOCH_SLOWD': slowd}, index=DataFrame.index)341def STOCHF(DataFrame, fastk_period=5, fastd_period=3, fastd_matype=0):342 fastk, fastd = talib.STOCHF(DataFrame.high.values, DataFrame.low.values, DataFrame.close.values,343 fastk_period, fastd_period, fastd_matype)...

Full Screen

Full Screen

animation_file_conversion_helpers.py

Source:animation_file_conversion_helpers.py Github

copy

Full Screen

1# Copyright (c) 2022 Boston Dynamics, Inc. All rights reserved.2#3# Downloading, reproducing, distributing or otherwise using the SDK Software4# is subject to the terms and conditions of the Boston Dynamics Software5# Development Kit License (20191101-BDSDK-SL).6"""A set helpers which convert specific lines from an animation7file into the animation-specific protobuf messages.8NOTE: All of these helpers are to convert specific values read from a `cha`9file into fields within the choreography_sequence_pb2.Animation protobuf10message. They are used by the animation_file_to_proto.py file.11"""12from bosdyn.api.spot import (choreography_sequence_pb2, choreography_service_pb2,13 choreography_service_pb2_grpc)14def start_time_handler(val, animation_frame):15 animation_frame.time = val16 return animation_frame17def fl_angles_handler(vals, animation_frame):18 animation_frame.legs.fl.joint_angles.hip_x = vals[0]19 animation_frame.legs.fl.joint_angles.hip_y = vals[1]20 animation_frame.legs.fl.joint_angles.knee = vals[2]21 return animation_frame22def fr_angles_handler(vals, animation_frame):23 animation_frame.legs.fr.joint_angles.hip_x = vals[0]24 animation_frame.legs.fr.joint_angles.hip_y = vals[1]25 animation_frame.legs.fr.joint_angles.knee = vals[2]26 return animation_frame27def hl_angles_handler(vals, animation_frame):28 animation_frame.legs.hl.joint_angles.hip_x = vals[0]29 animation_frame.legs.hl.joint_angles.hip_y = vals[1]30 animation_frame.legs.hl.joint_angles.knee = vals[2]31 return animation_frame32def hr_angles_handler(vals, animation_frame):33 animation_frame.legs.hr.joint_angles.hip_x = vals[0]34 animation_frame.legs.hr.joint_angles.hip_y = vals[1]35 animation_frame.legs.hr.joint_angles.knee = vals[2]36 return animation_frame37def fl_pos_handler(vals, animation_frame):38 animation_frame.legs.fl.foot_pos.x.value = vals[0]39 animation_frame.legs.fl.foot_pos.y.value = vals[1]40 animation_frame.legs.fl.foot_pos.z.value = vals[2]41 return animation_frame42def fr_pos_handler(vals, animation_frame):43 animation_frame.legs.fr.foot_pos.x.value = vals[0]44 animation_frame.legs.fr.foot_pos.y.value = vals[1]45 animation_frame.legs.fr.foot_pos.z.value = vals[2]46 return animation_frame47def hl_pos_handler(vals, animation_frame):48 animation_frame.legs.hl.foot_pos.x.value = vals[0]49 animation_frame.legs.hl.foot_pos.y.value = vals[1]50 animation_frame.legs.hl.foot_pos.z.value = vals[2]51 return animation_frame52def hr_pos_handler(vals, animation_frame):53 animation_frame.legs.hr.foot_pos.x.value = vals[0]54 animation_frame.legs.hr.foot_pos.y.value = vals[1]55 animation_frame.legs.hr.foot_pos.z.value = vals[2]56 return animation_frame57def gripper_handler(val, animation_frame):58 animation_frame.gripper.gripper_angle.value = val59 return animation_frame60def fl_contact_handler(val, animation_frame):61 animation_frame.legs.fl.stance.value = val62 return animation_frame63def fr_contact_handler(val, animation_frame):64 animation_frame.legs.fr.stance.value = val65 return animation_frame66def hl_contact_handler(val, animation_frame):67 animation_frame.legs.hl.stance.value = val68 return animation_frame69def hr_contact_handler(val, animation_frame):70 animation_frame.legs.hr.stance.value = val71 return animation_frame72def sh0_handler(val, animation_frame):73 animation_frame.arm.joint_angles.shoulder_0.value = val74 return animation_frame75def sh1_handler(val, animation_frame):76 animation_frame.arm.joint_angles.shoulder_1.value = val77 return animation_frame78def el0_handler(val, animation_frame):79 animation_frame.arm.joint_angles.elbow_0.value = val80 return animation_frame81def el1_handler(val, animation_frame):82 animation_frame.arm.joint_angles.elbow_1.value = val83 return animation_frame84def wr0_handler(val, animation_frame):85 animation_frame.arm.joint_angles.wrist_0.value = val86 return animation_frame87def wr1_handler(val, animation_frame):88 animation_frame.arm.joint_angles.wrist_1.value = val89 return animation_frame90def fl_hx_handler(val, animation_frame):91 animation_frame.legs.fl.joint_angles.hip_x = val92 return animation_frame93def fl_hy_handler(val, animation_frame):94 animation_frame.legs.fl.joint_angles.hip_y = val95 return animation_frame96def fl_kn_handler(val, animation_frame):97 animation_frame.legs.fl.joint_angles.knee = val98 return animation_frame99def fr_hx_handler(val, animation_frame):100 animation_frame.legs.fr.joint_angles.hip_x = val101 return animation_frame102def fr_hy_handler(val, animation_frame):103 animation_frame.legs.fr.joint_angles.hip_y = val104 return animation_frame105def fr_kn_handler(val, animation_frame):106 animation_frame.legs.fr.joint_angles.knee = val107 return animation_frame108def hl_hx_handler(val, animation_frame):109 animation_frame.legs.hl.joint_angles.hip_x = val110 return animation_frame111def hl_hy_handler(val, animation_frame):112 animation_frame.legs.hl.joint_angles.hip_y = val113 return animation_frame114def hl_kn_handler(val, animation_frame):115 animation_frame.legs.hl.joint_angles.knee = val116 return animation_frame117def hr_hx_handler(val, animation_frame):118 animation_frame.legs.hr.joint_angles.hip_x = val119 return animation_frame120def hr_hy_handler(val, animation_frame):121 animation_frame.legs.hr.joint_angles.hip_y = val122 return animation_frame123def hr_kn_handler(val, animation_frame):124 animation_frame.legs.hr.joint_angles.knee = val125 return animation_frame126def fl_x_handler(val, animation_frame):127 animation_frame.legs.fl.foot_pos.x.value = val128 return animation_frame129def fl_y_handler(val, animation_frame):130 animation_frame.legs.fl.foot_pos.y.value = val131 return animation_frame132def fl_z_handler(val, animation_frame):133 animation_frame.legs.fl.foot_pos.z.value = val134 return animation_frame135def fr_x_handler(val, animation_frame):136 animation_frame.legs.fr.foot_pos.x.value = val137 return animation_frame138def fr_y_handler(val, animation_frame):139 animation_frame.legs.fr.foot_pos.y.value = val140 return animation_frame141def fr_z_handler(val, animation_frame):142 animation_frame.legs.fr.foot_pos.z.value = val143 return animation_frame144def hl_x_handler(val, animation_frame):145 animation_frame.legs.hl.foot_pos.x.value = val146 return animation_frame147def hl_y_handler(val, animation_frame):148 animation_frame.legs.hl.foot_pos.y.value = val149 return animation_frame150def hl_z_handler(val, animation_frame):151 animation_frame.legs.hl.foot_pos.z.value = val152 return animation_frame153def hr_x_handler(val, animation_frame):154 animation_frame.legs.hr.foot_pos.x.value = val155 return animation_frame156def hr_y_handler(val, animation_frame):157 animation_frame.legs.hr.foot_pos.y.value = val158 return animation_frame159def hr_z_handler(val, animation_frame):160 animation_frame.legs.hr.foot_pos.z.value = val161 return animation_frame162def body_x_handler(val, animation_frame):163 animation_frame.body.body_pos.x.value = val164 return animation_frame165def body_y_handler(val, animation_frame):166 animation_frame.body.body_pos.y.value = val167 return animation_frame168def body_z_handler(val, animation_frame):169 animation_frame.body.body_pos.z.value = val170 return animation_frame171def com_x_handler(val, animation_frame):172 animation_frame.body.com_pos.x.value = val173 return animation_frame174def com_y_handler(val, animation_frame):175 animation_frame.body.com_pos.y.value = val176 return animation_frame177def com_z_handler(val, animation_frame):178 animation_frame.body.com_pos.z.value = val179 return animation_frame180def body_quat_x_handler(val, animation_frame):181 animation_frame.body.quaternion.x = val182 return animation_frame183def body_quat_y_handler(val, animation_frame):184 animation_frame.body.quaternion.y = val185 return animation_frame186def body_quat_z_handler(val, animation_frame):187 animation_frame.body.quaternion.z = val188 return animation_frame189def body_quat_w_handler(val, animation_frame):190 animation_frame.body.quaternion.w = val191 return animation_frame192def body_roll_handler(val, animation_frame):193 animation_frame.body.euler_angles.roll.value = val194 return animation_frame195def body_pitch_handler(val, animation_frame):196 animation_frame.body.euler_angles.pitch.value = val197 return animation_frame198def body_yaw_handler(val, animation_frame):199 animation_frame.body.euler_angles.yaw.value = val200 return animation_frame201def body_pos_handler(vals, animation_frame):202 animation_frame.body.body_pos.x.value = vals[0]203 animation_frame.body.body_pos.y.value = vals[1]204 animation_frame.body.body_pos.z.value = vals[2]205 return animation_frame206def com_pos_handler(vals, animation_frame):207 animation_frame.body.com_pos.x.value = vals[0]208 animation_frame.body.com_pos.y.value = vals[1]209 animation_frame.body.com_pos.z.value = vals[2]210 return animation_frame211def body_euler_rpy_angles_handler(vals, animation_frame):212 animation_frame.body.euler_angles.roll.value = vals[0]213 animation_frame.body.euler_angles.pitch.value = vals[1]214 animation_frame.body.euler_angles.yaw.value = vals[2]215 return animation_frame216def body_quaternion_xyzw_handler(vals, animation_frame):217 animation_frame.body.quaternion.x = vals[0]218 animation_frame.body.quaternion.y = vals[1]219 animation_frame.body.quaternion.z = vals[2]220 animation_frame.body.quaternion.w = vals[3]221 return animation_frame222def body_quaternion_wxyz_handler(vals, animation_frame):223 animation_frame.body.quaternion.x = vals[1]224 animation_frame.body.quaternion.y = vals[2]225 animation_frame.body.quaternion.z = vals[3]226 animation_frame.body.quaternion.w = vals[0]227 return animation_frame228def leg_angles_handler(vals, animation_frame):229 animation_frame.legs.fl.joint_angles.hip_x = vals[0]230 animation_frame.legs.fl.joint_angles.hip_y = vals[1]231 animation_frame.legs.fl.joint_angles.knee = vals[2]232 animation_frame.legs.fr.joint_angles.hip_x = vals[3]233 animation_frame.legs.fr.joint_angles.hip_y = vals[4]234 animation_frame.legs.fr.joint_angles.knee = vals[5]235 animation_frame.legs.hl.joint_angles.hip_x = vals[6]236 animation_frame.legs.hl.joint_angles.hip_y = vals[7]237 animation_frame.legs.hl.joint_angles.knee = vals[8]238 animation_frame.legs.hr.joint_angles.hip_x = vals[9]239 animation_frame.legs.hr.joint_angles.hip_y = vals[10]240 animation_frame.legs.hr.joint_angles.knee = vals[11]241 return animation_frame242def foot_pos_handler(vals, animation_frame):243 animation_frame.legs.fl.foot_pos.x.value = vals[0]244 animation_frame.legs.fl.foot_pos.y.value = vals[1]245 animation_frame.legs.fl.foot_pos.z.value = vals[2]246 animation_frame.legs.fr.foot_pos.x.value = vals[3]247 animation_frame.legs.fr.foot_pos.y.value = vals[4]248 animation_frame.legs.fr.foot_pos.z.value = vals[5]249 animation_frame.legs.hl.foot_pos.x.value = vals[6]250 animation_frame.legs.hl.foot_pos.y.value = vals[7]251 animation_frame.legs.hl.foot_pos.z.value = vals[8]252 animation_frame.legs.hr.foot_pos.x.value = vals[9]253 animation_frame.legs.hr.foot_pos.y.value = vals[10]254 animation_frame.legs.hr.foot_pos.z.value = vals[11]255 return animation_frame256def contact_handler(vals, animation_frame):257 animation_frame.legs.fl.stance.value = vals[0]258 animation_frame.legs.fr.stance.value = vals[1]259 animation_frame.legs.hl.stance.value = vals[2]260 animation_frame.legs.hr.stance.value = vals[3]261 return animation_frame262def arm_joints_handler(vals, animation_frame):263 animation_frame.arm.joint_angles.shoulder_0.value = vals[0]264 animation_frame.arm.joint_angles.shoulder_1.value = vals[1]265 animation_frame.arm.joint_angles.elbow_0.value = vals[2]266 animation_frame.arm.joint_angles.elbow_1.value = vals[3]267 animation_frame.arm.joint_angles.wrist_0.value = vals[4]268 animation_frame.arm.joint_angles.wrist_1.value = vals[5]269 return animation_frame270def hand_x_handler(vals, animation_frame):271 animation_frame.arm.hand_pose.position.x = vals[0]272 return animation_frame273def hand_y_handler(vals, animation_frame):274 animation_frame.arm.hand_pose.position.y = vals[0]275 return animation_frame276def hand_z_handler(vals, animation_frame):277 animation_frame.arm.hand_pose.position.z = vals[0]278 return animation_frame279def hand_quat_x_handler(val, animation_frame):280 animation_frame.arm.hand_pose.quaternion.x = val281 return animation_frame282def hand_quat_y_handler(val, animation_frame):283 animation_frame.arm.hand_pose.quaternion.y = val284 return animation_frame285def hand_quat_z_handler(val, animation_frame):286 animation_frame.arm.hand_pose.quaternion.z = val287 return animation_frame288def hand_quat_w_handler(val, animation_frame):289 animation_frame.arm.hand_pose.quaternion.w = val290 return animation_frame291def hand_roll_handler(val, animation_frame):292 animation_frame.arm.hand_pose.euler_angles.roll.value = val293 return animation_frame294def hand_pitch_handler(val, animation_frame):295 animation_frame.arm.hand_pose.euler_angles.pitch.value = val296 return animation_frame297def hand_yaw_handler(val, animation_frame):298 animation_frame.arm.hand_pose.euler_angles.yaw.value = val299 return animation_frame300def hand_pos_handler(vals, animation_frame):301 animation_frame.arm.hand_pose.position.x.value = vals[0]302 animation_frame.arm.hand_pose.position.y.value = vals[1]303 animation_frame.arm.hand_pose.position.z.value = vals[2]304 return animation_frame305def hand_euler_rpy_angles_handler(vals, animation_frame):306 animation_frame.arm.hand_pose.euler_angles.roll.value = vals[0]307 animation_frame.arm.hand_pose.euler_angles.pitch.value = vals[1]308 animation_frame.arm.hand_pose.euler_angles.yaw.value = vals[2]309 return animation_frame310def hand_quaternion_xyzw_handler(vals, animation_frame):311 animation_frame.arm.hand_pose.quaternion.x = vals[0]312 animation_frame.arm.hand_pose.quaternion.y = vals[1]313 animation_frame.arm.hand_pose.quaternion.z = vals[2]314 animation_frame.arm.hand_pose.quaternion.w = vals[3]315 return animation_frame316def hand_quaternion_wxyz_handler(vals, animation_frame):317 animation_frame.arm.hand_pose.quaternion.x = vals[1]318 animation_frame.arm.hand_pose.quaternion.y = vals[2]319 animation_frame.arm.hand_pose.quaternion.z = vals[3]320 animation_frame.arm.hand_pose.quaternion.w = vals[0]321 return animation_frame322def controls_option(file_line_split, animation):323 for track in file_line_split:324 if track == "legs":325 animation.proto.controls_legs = True326 elif track == "arm":327 animation.proto.controls_arm = True328 elif track == "body":329 animation.proto.controls_body = True330 elif track == "gripper":331 animation.proto.controls_gripper = True332 elif track == "controls":333 continue334 else:335 print("Unknown track name %s" % track)336 return animation337def bpm_option(file_line_split, animation):338 bpm = file_line_split[1]339 animation.bpm = int(bpm)340 return animation341def extendable_option(file_line_split, animation):342 animation.proto.extendable = True343 return animation344def truncatable_option(file_line_split, animation):345 animation.proto.truncatable = True346 return animation347def neutral_start_option(file_line_split, animation):348 animation.proto.neutral_start = True349 return animation350def precise_steps_option(file_line_split, animation):351 animation.proto.precise_steps = True352def precise_timing_option(file_line_split, animation):353 animation.proto.precise_timing = True354def no_looping_option(file_line_split, animation):355 animation.proto.no_looping = True356def arm_required_option(file_line_split, animation):357 animation.proto.arm_required = True358def arm_prohibited_option(file_line_split, animation):359 animation.proto.arm_prohibited = True360def track_swing_trajectories_option(file_line_split, animation):361 animation.proto.track_swing_trajectories = True362 return animation363def assume_zero_roll_and_pitch_option(file_line_split, animation):364 animation.proto.assume_zero_roll_and_pitch = True365 return animation366def track_hand_rt_body_option(file_line_split, animation):367 animation.proto.track_hand_rt_body = True368 return animation369def track_hand_rt_feet_option(file_line_split, animation):370 animation.proto.track_hand_rt_feet = True371 return animation372def arm_playback_option(file_line_split, animation):373 playback = file_line_split[1]374 if playback == "jointspace":375 animation.proto.arm_playback = choreography_sequence_pb2.Animation.ARM_PLAYBACK_JOINTSPACE376 elif playback == "workspace":377 animation.proto.arm_playback = choreography_sequence_pb2.Animation.ARM_PLAYBACK_WORKSPACE378 elif playback == "workspace_dance_frame":379 animation.proto.arm_playback = choreography_sequence_pb2.Animation.ARM_PLAYBACK_WORKSPACE_DANCE_FRAME380 else:381 animation.proto.arm_playback = choreography_sequence_pb2.Animation.ARM_PLAYBACK_DEFAULT382 print("Unknown arm playback option %s" % playback)383 return animation384def display_rgb_option(file_line_split, animation):385 for i in range(1, 3):386 animation.rgb[i - 1] = int(file_line_split[i])387 return animation388def frequency_option(file_line_split, animation):389 freq = file_line_split[1]390 animation.frequency = float(freq)391 return animation392def retime_to_integer_slices_option(file_line_split, animation):393 animation.proto.retime_to_integer_slices = True394 return animation395def description_option(file_line_split, animation):396 description = " ".join(file_line_split[1:])397 description = description.replace('"', '') # remove any quotation marks398 animation.description = description...

Full Screen

Full Screen

frame_helpers.py

Source:frame_helpers.py Github

copy

Full Screen

...136 if se3_a_tform_b is None:137 # Failed to find the transformation between frames a and b in the frame tree snapshot.138 return None139 return se3_a_tform_b.get_closest_se2_transform()140def express_se2_velocity_in_new_frame(frame_tree_snapshot, frame_b, frame_c, vel_of_a_in_b,141 validate=True):142 """Convert the SE2 Velocity in frame b to a SE2 Velocity in frame c using143 the frame tree snapshot.144 Args:145 frame_tree_snapshot (dict) dictionary representing the child_to_parent_edge_map146 frame_b (string)147 frame_c (string)148 vel_of_a_in_b (SE2Velocity proto) SE2 Velocity in frame_b149 validate (bool) if the FrameTreeSnapshot should be checked for a valid tree structure150 Returns:151 math_helpers.SE2Velocity velocity_of_a_in_c in frame_c if the frames exist in the tree. None otherwise.152 """153 # Find the SE(3) pose in the frame tree snapshot that represents c_tform_b.154 se3_c_tform_b = get_a_tform_b(frame_tree_snapshot, frame_c, frame_b, validate)155 if se3_c_tform_b is None:156 # If the SE3Pose for c_tform_b does not exist in the frame tree snapshot,157 # then we cannot transform the velocity.158 return None159 # Check that the frame name of frame_c is considered to be a gravity aligned frame.160 if not is_gravity_aligned_frame_name(frame_c):161 # Frame C is not gravity aligned, and therefore c_tform_b cannot be converted into162 # an SE(2) pose because it will lose height information.163 return None164 # Find the closest SE(2) pose for the c_tform_b SE(3) pose found from the snapshot.165 se2_c_tform_b = se3_c_tform_b.get_closest_se2_transform()166 # Transform the velocity into the new frame to get vel_of_a_in_c.167 c_adjoint_b = se2_c_tform_b.to_adjoint_matrix()168 vel_of_a_in_c = math_helpers.transform_se2velocity(c_adjoint_b, vel_of_a_in_b)169 return vel_of_a_in_c170def express_se3_velocity_in_new_frame(frame_tree_snapshot, frame_b, frame_c, vel_of_a_in_b,171 validate=True):172 """Convert the SE(3) Velocity in frame b to an SE(3) Velocity in frame c using173 the frame tree snapshot.174 Args:175 frame_tree_snapshot (dict) dictionary representing the child_to_parent_edge_map176 frame_b (string)177 frame_c (string)178 vel_of_a_in_b (SE3Velocity proto) SE(3) Velocity in frame_b179 validate (bool) if the FrameTreeSnapshot should be checked for a valid tree structure180 Returns:181 math_helpers.SE3Velocity velocity_of_a_in_c in frame_c if the frames exist in the tree. None otherwise.182 """183 # Find the SE(3) pose in the frame tree snapshot that represents c_tform_b.184 se3_c_tform_b = get_a_tform_b(frame_tree_snapshot, frame_c, frame_b, validate)...

Full Screen

Full Screen

sigproc.py

Source:sigproc.py Github

copy

Full Screen

1# This file includes routines for basic signal processing including framing and computing power spectra.2# Author: James Lyons 20123import numpy4import math5def framesig(sig,frame_len,frame_step,winfunc=lambda x:numpy.ones((x,))):6 """Frame a signal into overlapping frames.7 :param sig: the audio signal to frame.8 :param frame_len: length of each frame measured in samples.9 :param frame_step: number of samples after the start of the previous frame that the next frame should begin.10 :param winfunc: the analysis window to apply to each frame. By default no window is applied. 11 :returns: an array of frames. Size is NUMFRAMES by frame_len.12 """13 slen = len(sig)14 frame_len = int(round(frame_len))15 frame_step = int(round(frame_step))16 if slen <= frame_len: 17 numframes = 118 else:19 numframes = 1 + int(math.ceil((1.0*slen - frame_len)/frame_step))20 21 padlen = int((numframes-1)*frame_step + frame_len)22 23 zeros = numpy.zeros((padlen - slen,))24 padsignal = numpy.concatenate((sig,zeros))25 26 indices = numpy.tile(numpy.arange(0,frame_len),(numframes,1)) + numpy.tile(numpy.arange(0,numframes*frame_step,frame_step),(frame_len,1)).T27 indices = numpy.array(indices,dtype=numpy.int32)28 frames = padsignal[indices]29 win = numpy.tile(winfunc(frame_len),(numframes,1))30 return frames*win31 32 33def deframesig(frames,siglen,frame_len,frame_step,winfunc=lambda x:numpy.ones((x,))):34 """Does overlap-add procedure to undo the action of framesig. 35 :param frames: the array of frames.36 :param siglen: the length of the desired signal, use 0 if unknown. Output will be truncated to siglen samples. 37 :param frame_len: length of each frame measured in samples.38 :param frame_step: number of samples after the start of the previous frame that the next frame should begin.39 :param winfunc: the analysis window to apply to each frame. By default no window is applied. 40 :returns: a 1-D signal.41 """42 frame_len = round(frame_len)43 frame_step = round(frame_step)44 numframes = numpy.shape(frames)[0]45 assert numpy.shape(frames)[1] == frame_len, '"frames" matrix is wrong size, 2nd dim is not equal to frame_len'46 47 indices = numpy.tile(numpy.arange(0,frame_len),(numframes,1)) + numpy.tile(numpy.arange(0,numframes*frame_step,frame_step),(frame_len,1)).T48 indices = numpy.array(indices,dtype=numpy.int32)49 padlen = (numframes-1)*frame_step + frame_len 50 51 if siglen <= 0: siglen = padlen52 53 rec_signal = numpy.zeros((padlen,))54 window_correction = numpy.zeros((padlen,))55 win = winfunc(frame_len)56 57 for i in range(0,numframes):58 window_correction[indices[i,:]] = window_correction[indices[i,:]] + win + 1e-15 #add a little bit so it is never zero59 rec_signal[indices[i,:]] = rec_signal[indices[i,:]] + frames[i,:]60 61 rec_signal = rec_signal/window_correction62 return rec_signal[0:siglen]63 64def magspec(frames,NFFT):65 """Compute the magnitude spectrum of each frame in frames. If frames is an NxD matrix, output will be NxNFFT. 66 :param frames: the array of frames. Each row is a frame.67 :param NFFT: the FFT length to use. If NFFT > frame_len, the frames are zero-padded. 68 :returns: If frames is an NxD matrix, output will be NxNFFT. Each row will be the magnitude spectrum of the corresponding frame.69 """ 70 complex_spec = numpy.fft.rfft(frames,NFFT)71 return numpy.absolute(complex_spec)72 73def powspec(frames,NFFT):74 """Compute the power spectrum of each frame in frames. If frames is an NxD matrix, output will be NxNFFT. 75 :param frames: the array of frames. Each row is a frame.76 :param NFFT: the FFT length to use. If NFFT > frame_len, the frames are zero-padded. 77 :returns: If frames is an NxD matrix, output will be NxNFFT. Each row will be the power spectrum of the corresponding frame.78 """ 79 return 1.0/NFFT * numpy.square(magspec(frames,NFFT))80 81def logpowspec(frames,NFFT,norm=1):82 """Compute the log power spectrum of each frame in frames. If frames is an NxD matrix, output will be NxNFFT. 83 :param frames: the array of frames. Each row is a frame.84 :param NFFT: the FFT length to use. If NFFT > frame_len, the frames are zero-padded. 85 :param norm: If norm=1, the log power spectrum is normalised so that the max value (across all frames) is 1.86 :returns: If frames is an NxD matrix, output will be NxNFFT. Each row will be the log power spectrum of the corresponding frame.87 """ 88 ps = powspec(frames,NFFT);89 ps[ps<=1e-30] = 1e-3090 lps = 10*numpy.log10(ps)91 if norm:92 return lps - numpy.max(lps)93 else:94 return lps95 96def preemphasis(signal,coeff=0.95):97 """perform preemphasis on the input signal.98 99 :param signal: The signal to filter.100 :param coeff: The preemphasis coefficient. 0 is no filter, default is 0.95.101 :returns: the filtered signal.102 """ ...

Full Screen

Full Screen

picklebag.py

Source:picklebag.py Github

copy

Full Screen

1import os2import pickle3import traindata4class FramePickle:5 def __init__(self, pickle_filename):6 self.pickle_filename = pickle_filename7 def generate(self):8 with open(self.pickle_filename, 'rb') as f:9 frames = pickle.load(f)10 for frame in frames:11 yield frame12 def dump(self, frames):13 print('DEBUG: Dumping frames to pickle. ', self.pickle_filename)14 with open(self.pickle_filename, 'wb') as f:15 pickle.dump(frames, f)16def get_pickle_folder(bag_file):17 return bag_file + '_pickle/'18def get_pickle_filename(bag_file, pickle_id):19 return os.path.join(get_pickle_folder(bag_file), str(pickle_id) + '.p')20HEADER_ID = 'header'21FRAME_COUNT = 'frame_count'22FRAME_FILENAMES = 'frame_filenames'23class PickleAdapter:24 def __init__(self, frames_per_pickle = 50):25 self.frames_per_pickle = frames_per_pickle26 self.frame_count = None27 self.generator = None28 self.next_frame = None29 # tracklet_file is allowed to be None30 def start_read(self, bag_file, tracklet_file):31 header_file = get_pickle_filename(bag_file, HEADER_ID)32 if os.path.exists(header_file):33 print('DEBUG: header found')34 else:35 print('DEBUG: header not found. Dicing pickles.')36 split_into_pickles(bag_file, tracklet_file, self.frames_per_pickle)37 with open(header_file, 'rb') as f:38 header = pickle.load(f)39 self.frame_count = header[FRAME_COUNT]40 self.generator = self.generate(header)41 try:42 self.next_frame = next(self.generator)43 except StopIteration:44 self.next_frame = None45 self.generator = None46 def count(self):47 return self.frame_count48 def empty(self):49 return self.next_frame is None50 def next(self):51 if self.next_frame is None:52 return None53 else:54 current_frame = self.next_frame55 try:56 self.next_frame = next(self.generator)57 except StopIteration:58 self.next_frame = None59 self.generator = None60 return current_frame61 def generate(self, header):62 frame_filenames = header[FRAME_FILENAMES]63 frame_pickles = []64 for frame_filename in frame_filenames:65 assert(os.path.exists(frame_filename))66 frame_pickles.append(FramePickle(frame_filename))67 for frame_pickle in frame_pickles:68 generator = frame_pickle.generate()69 empty = False70 while not empty:71 try:72 yield next(generator)73 except StopIteration:74 empty = True75def make_pickle_folder(bag_file):76 folder = get_pickle_folder(bag_file)77 if not os.path.exists(folder):78 os.makedirs(folder)79def split_into_pickles(bag_file, tracklet_file, frames_per_pickle):80 make_pickle_folder(bag_file)81 frame_count = 082 frames = []83 pickles = []84 datastream = traindata.TrainDataStream()85 datastream.start_read(bag_file, tracklet_file)86 while not datastream.empty():87 train_data_frame = datastream.next()88 frames.append(train_data_frame)89 frame_count += 190 if (frame_count % frames_per_pickle) == 0:91 # Dump pickle.92 frame_pickle = FramePickle(get_pickle_filename(bag_file, len(pickles)))93 frame_pickle.dump(frames)94 frames = []95 pickles.append(frame_pickle)96 # Dump rest of frames to pickle.97 frame_pickle = FramePickle(get_pickle_filename(bag_file, len(pickles)))98 frame_pickle.dump(frames)99 frames = []100 pickles.append(frame_pickle)101 # Pickle header102 frame_filenames = []103 for frame_pickle in pickles:104 frame_filenames.append(frame_pickle.pickle_filename)105 header = dict()106 header[FRAME_COUNT] = frame_count107 header[FRAME_FILENAMES] = frame_filenames108 header_file = get_pickle_filename(bag_file, HEADER_ID)109 with open(header_file, 'wb') as f:110 pickle.dump(header, f)111def pre_pickle(bag_tracklets, frames_per_pickle):112 import picklebag113 for bt in bag_tracklets:114 header_file = get_pickle_filename(bt.bag, HEADER_ID)115 if os.path.exists(header_file):116 print('DEBUG: header found', header_file)117 else:118 print('DEBUG: header not found.', header_file, 'Dicing pickles.')119 split_into_pickles(bt.bag, bt.tracklet, frames_per_pickle = frames_per_pickle)120if __name__ == '__main__':121 print('Warning: This will write as much as 20G of data to disk.')122 count = 0123 pickle_adapter = PickleAdapter()124 for i in range(2):125 pickle_adapter.start_read('/data/Didi-Release-2/Data/1/14_f.bag', '/data/output/tracklet/1/14_f/tracklet_labels.xml')126 while not pickle_adapter.empty():127 td = pickle_adapter.next()128 count += 1129 print(count)...

Full Screen

Full Screen

Slam.py

Source:Slam.py Github

copy

Full Screen

1__author__ = 'tom1231'2from PyQt4.QtGui import *3from BAL.Interface.DeviceFrame import DeviceFrame, EX_DEV, SLAM4from lxml.etree import Element, SubElement, XML5class Slam(DeviceFrame):6 def __init__(self, frame, data):7 DeviceFrame.__init__(self, EX_DEV, frame, data)8 self._tf_map_scanmatch_transform_frame_name = 'scanmatcher_frame'9 self._base_frame = 'base_link'10 self._odom_frame = 'odom_link'11 self._map_frame = 'map'12 self._scan_topic = 'scan'13 def toDict(self):14 data = dict()15 data['type'] = SLAM16 data['tf'] = self._tf_map_scanmatch_transform_frame_name17 data['base'] = self._base_frame18 data['odom'] = self._odom_frame19 data['map'] = self._map_frame20 data['scan'] = self._scan_topic21 return data22 def showDetails(self, items=None):23 self.tf_map_scanmatch_transform_frame_name = QLineEdit(self._tf_map_scanmatch_transform_frame_name)24 self.base_frame = QLineEdit(self._base_frame)25 self.odom_frame = QLineEdit(self._odom_frame)26 self.map_frame = QLineEdit(self._map_frame)27 self.scan_topic = QLineEdit(self._scan_topic)28 self._frame.layout().addRow(QLabel('Tf map scan match: '), self.tf_map_scanmatch_transform_frame_name)29 self._frame.layout().addRow(QLabel('Base frame: '), self.base_frame)30 self._frame.layout().addRow(QLabel('Odometry frame: '), self.odom_frame)31 self._frame.layout().addRow(QLabel('Map frame: '), self.map_frame)32 self._frame.layout().addRow(QLabel('Scan topic: '), self.scan_topic)33 def printDetails(self):34 self._frame.layout().addRow(QLabel('Tf map scan match: '), QLabel(self._tf_map_scanmatch_transform_frame_name))35 self._frame.layout().addRow(QLabel('Base frame: '), QLabel(self._base_frame))36 self._frame.layout().addRow(QLabel('Odometry frame: '), QLabel(self._odom_frame))37 self._frame.layout().addRow(QLabel('Map frame: '), QLabel(self._map_frame))38 self._frame.layout().addRow(QLabel('Scan topic: '), QLabel(self._scan_topic))39 def getName(self):40 return "SLAM"41 def saveToFile(self, parent):42 element = SubElement(parent, 'include', {43 'file': '$(find ric_board)/launch/hector_slam.launch'44 })45 SubElement(element, 'arg', {46 'name': 'tf_map_scanmatch_transform_frame_name',47 'value': self._tf_map_scanmatch_transform_frame_name48 })49 SubElement(element, 'arg', {50 'name': 'base_frame',51 'value': self._base_frame52 })53 SubElement(element, 'arg', {54 'name': 'odom_frame',55 'value': self._odom_frame56 })57 SubElement(element, 'arg', {58 'name': 'map_frame',59 'value': self._map_frame60 })61 SubElement(element, 'arg', {62 'name': 'pub_map_odom_transform',63 'default': 'true'64 })65 SubElement(element, 'arg', {66 'name': 'scan_subscriber_queue_size',67 'default': '5'68 })69 SubElement(element, 'arg', {70 'name': 'scan_topic',71 'value': self._scan_topic72 })73 SubElement(element, 'arg', {74 'name': 'map_size',75 'default': '2048'76 })77 def add(self):78 if not self.nameIsValid():79 QMessageBox.critical(self._frame, "Error", "Name already taken.")80 self._isValid = False81 return82 self._isValid = True83 self._tf_map_scanmatch_transform_frame_name = str(self.tf_map_scanmatch_transform_frame_name.text())84 self._base_frame = str(self.base_frame.text())85 self._odom_frame = str(self.odom_frame.text())86 self._map_frame = str(self.map_frame.text())87 self._scan_topic = str(self.scan_topic.text())88 def fromDict(self, data):89 self._tf_map_scanmatch_transform_frame_name = data['tf']90 self._base_frame = data['base']91 self._odom_frame = data['odom']92 self._map_frame = data['map']...

Full Screen

Full Screen

frame_diff.py

Source:frame_diff.py Github

copy

Full Screen

...7 # Difference between the current frame and the previous frame8 diff_frames_2 = cv2.absdiff(cur_frame, prev_frame)9 return cv2.bitwise_and(diff_frames_1, diff_frames_2)10# Define a function to get the current frame from the webcam11def get_frame(cap, scaling_factor):12 # Read the current frame from the video capture object13 _, frame = cap.read()14 # Resize the image15 frame = cv2.resize(frame, None, fx=scaling_factor, 16 fy=scaling_factor, interpolation=cv2.INTER_AREA)17 # Convert to grayscale18 gray = cv2.cvtColor(frame, cv2.COLOR_RGB2GRAY)19 return gray 20if __name__=='__main__':21 # Define the video capture object22 cap = cv2.VideoCapture(0)23 # Define the scaling factor for the images24 scaling_factor = 0.525 26 # Grab the current frame27 prev_frame = get_frame(cap, scaling_factor) 28 # Grab the next frame29 cur_frame = get_frame(cap, scaling_factor) 30 # Grab the frame after that31 next_frame = get_frame(cap, scaling_factor) 32 # Keep reading the frames from the webcam 33 # until the user hits the 'Esc' key34 while True:35 # Display the frame difference36 cv2.imshow('Object Movement', frame_diff(prev_frame, 37 cur_frame, next_frame))38 # Update the variables39 prev_frame = cur_frame40 cur_frame = next_frame 41 # Grab the next frame42 next_frame = get_frame(cap, scaling_factor)43 # Check if the user hit the 'Esc' key44 key = cv2.waitKey(10)45 if key == 27:46 break47 # Close all the windows...

Full Screen

Full Screen

Using AI Code Generation

copy

Full Screen

1const puppeteer = require('puppeteer');2(async () => {3 const browser = await puppeteer.launch();4 const page = await browser.newPage();5 await page.screenshot({path: 'example.png'});6 await browser.close();7})();8const puppeteer = require('puppeteer');9(async () => {10 const browser = await puppeteer.launch();11 const page = await browser.newPage();12 await page.screenshot({path: 'example.png'});13 await browser.close();14})();15const puppeteer = require('puppeteer');16(async () => {17 const browser = await puppeteer.launch();18 const page = await browser.newPage();19 await page.screenshot({path: 'example.png'});20 await browser.close();21})();22const puppeteer = require('puppeteer');23(async () => {24 const browser = await puppeteer.launch();25 const page = await browser.newPage();26 await page.screenshot({path: 'example.png'});27 await browser.close();28})();29const puppeteer = require('puppeteer');30(async () => {31 const browser = await puppeteer.launch();32 const page = await browser.newPage();33 await page.screenshot({path: 'example.png'});34 await browser.close();35})();36const puppeteer = require('puppeteer');37(async () => {38 const browser = await puppeteer.launch();39 const page = await browser.newPage();40 await page.screenshot({path: 'example.png'});41 await browser.close();42})();43const puppeteer = require('puppeteer');44(async () => {45 const browser = await puppeteer.launch();46 const page = await browser.newPage();47 await page.goto('

Full Screen

Using AI Code Generation

copy

Full Screen

1const puppeteer = require('puppeteer');2(async () => {3 const browser = await puppeteer.launch({headless: false});4 const page = await browser.newPage();5 await page.waitFor(1000);6 await page.screenshot({path: 'example.png'});7 await browser.close();8})();9const puppeteer = require('puppeteer');10(async () => {11 const browser = await puppeteer.launch({headless: false});12 const page = await browser.newPage();13 await page.waitFor(1000);14 await page.screenshot({path: 'example.png'});15 await browser.close();16})();17const puppeteer = require('puppeteer');18(async () => {19 const browser = await puppeteer.launch({headless: false});20 const page = await browser.newPage();21 await page.waitFor(1000);22 await page.screenshot({path: 'example.png'});23 await browser.close();24})();25const puppeteer = require('puppeteer');26(async () => {27 const browser = await puppeteer.launch({headless: false});28 const page = await browser.newPage();29 await page.waitFor(1000);30 await page.screenshot({path: 'example.png'});31 await browser.close();32})();33const puppeteer = require('puppeteer');34(async () => {35 const browser = await puppeteer.launch({headless: false});36 const page = await browser.newPage();37 await page.waitFor(1000);38 await page.screenshot({path: 'example.png'});39 await browser.close();40})();41const puppeteer = require('puppeteer');42(async () => {43 const browser = await puppeteer.launch({headless: false});44 const page = await browser.newPage();

Full Screen

Using AI Code Generation

copy

Full Screen

1const puppeteer = require('puppeteer');2(async () => {3 const browser = await puppeteer.launch({4 });5 const page = await browser.newPage();6 await page.waitForSelector('input[title="Search"]');7 await page.type('input[title="Search"]', 'puppeteer');8 await page.waitForSelector('input[value="Google Search"]');9 await page.click('input[value="Google Search"]');10 await page.waitForSelector('input[value="I\'m Feeling Lucky"]');11 await page.click('input[value="I\'m Feeling Lucky"]');12 await page.waitForSelector('h1');13 await page.screenshot({path: 'test.png'});14 await browser.close();15})();

Full Screen

Using AI Code Generation

copy

Full Screen

1(async () => {2 const browser = await puppeteer.launch();3 const page = await browser.newPage();4 await page.screenshot({path: 'google.png'});5 await browser.close();6})();7const puppeteer = require('puppeteer');8(async () => {9 const browser = await puppeteer.launch();10 const page = await browser.newPage();11 await page.screenshot({path: 'google.png'});12 await browser.close();13})();14const puppeteer = require('puppeteer');15(async () => {16 const browser = await puppeteer.launch();17 const page = await browser.newPage();18 await page.screenshot({path: 'google.png'});19 await browser.close();20})();21const puppeteer = require('puppeteer');22(async () => {23 const browser = await puppeteer.launch();24 const page = await browser.newPage();25 await page.screenshot({path: 'google.png'});26 await browser.close();27})();28const puppeteer = require('puppeteer');29(async () => {30 const browser = await puppeteer.launch();31 const page = await browser.newPage();32 await page.screenshot({path: 'google.png'});33 await browser.close();34})();35const puppeteer = require('puppeteer');36(async () => {37 const browser = await puppeteer.launch();38 const page = await browser.newPage();39 await page.screenshot({path: 'google.png'});40 await browser.close();41})();42const puppeteer = require('puppeteer');43(async () => {44 const browser = await puppeteer.launch();45 const page = await browser.newPage();

Full Screen

Using AI Code Generation

copy

Full Screen

1const puppeteer = require('puppeteer');2(async () => {3 const browser = await puppeteer.launch({headless: false});4 const page = await browser.newPage();5 await page.waitForSelector('#frame1');6 const frame1 = await page.frames().find(f => f.name() === 'frame1');7 await frame1.waitForSelector('#frame1Input');8 await frame1.type('#frame1Input', 'Hello World');9 const frame2 = await page.frames().find(f => f.name() === 'frame2');10 await frame2.waitForSelector('#frame2Input');11 await frame2.type('#frame2Input', 'Hello World');12 await page.waitFor(1000);13 await browser.close();14})();

Full Screen

Using AI Code Generation

copy

Full Screen

1const puppeteer = require('puppeteer');2const fs = require('fs');3const path = require('path');4(async () => {5 const browser = await puppeteer.launch({ headless: false });6 const page = await browser.newPage();7 await page.screenshot({ path: 'google.png' });8 await browser.close();9})();10const puppeteer = require('puppeteer');11const fs = require('fs');12const path = require('path');13(async () => {14 const browser = await puppeteer.launch({ headless: false });15 const page = await browser.newPage();16 await page.screenshot({ path: 'example.png' });17 const frame = await page.frames().find(f => f.name() === 'myFrame');18 await frame.screenshot({ path: 'frame.png' });19 await browser.close();20})();

Full Screen

Using AI Code Generation

copy

Full Screen

1const puppeteer = require('puppeteer');2const fs = require('fs');3const path = require('path');4const express = require('express');5const app = express();6const port = 3000;7const bodyParser = require('body-parser');8const { exec } = require('child_process');9const { Console } = require('console');10var urlencodedParser = bodyParser.urlencoded({ extended: false });11app.use(bodyParser.json());12app.use(bodyParser.urlencoded({ extended: true }));

Full Screen

Using AI Code Generation

copy

Full Screen

1const puppeteer = require('puppeteer');2const fs = require('fs');3const path = require('path');4const axios = require('axios');5const cheerio = require('cheerio');6const { promisify } = require('util');7const { URL } = require('url');8const { createCanvas, loadImage } = require('canvas');9const { createCanvas: createCanvas2, loadImage: loadImage2 } = require('canvas');10const { createCanvas: createCanvas3, loadImage: loadImage3 } = require('canvas');11const { createCanvas: createCanvas4, loadImage: loadImage4 } = require('canvas');12const { createCanvas: createCanvas5, loadImage: loadImage5 } = require('canvas');13const { createCanvas: createCanvas6, loadImage: loadImage6 } = require('canvas');14const { createCanvas: createCanvas7, loadImage: loadImage7 } = require('canvas');15const { createCanvas: createCanvas8, loadImage: loadImage8 } = require('canvas');16const { createCanvas: createCanvas9, loadImage: loadImage9 } = require('canvas');17const { createCanvas: createCanvas10, loadImage: loadImage10 } = require('canvas');18const { createCanvas: createCanvas11, loadImage: loadImage11 } = require('canvas');19const { createCanvas: createCanvas12, loadImage: loadImage12 } = require('canvas');20const { createCanvas: createCanvas13, loadImage: loadImage13 } = require('canvas');21const { createCanvas: createCanvas14, loadImage: loadImage14 } = require('canvas');22const { createCanvas: createCanvas15, loadImage: loadImage15 } = require('canvas');23const { createCanvas: createCanvas16, loadImage: loadImage16 } = require('canvas');24const { createCanvas: createCanvas17, loadImage: loadImage17 } = require('canvas');25const { createCanvas: createCanvas18, loadImage: loadImage18 } = require('canvas');26const { createCanvas: createCanvas19, loadImage: loadImage19 } = require('canvas');27const { createCanvas: createCanvas20, loadImage: loadImage20 } = require('canvas');28const { createCanvas: createCanvas21, loadImage: loadImage21 } = require('canvas');29const { createCanvas: createCanvas22, loadImage: loadImage22 } = require('canvas');30const { createCanvas: createCanvas23, loadImage: loadImage23 } = require('canvas');31const { createCanvas: createCanvas24,

Full Screen

Using AI Code Generation

copy

Full Screen

1const puppeteer = require('puppeteer');2(async () => {3 const page = await browser.newPage();4 await page.waitForSelector('.wt-input-btn-group__input');5 await page.type('.wt-input-btn-group__input', 'shoes');6 await page.click('.wt-input-btn-group__btn');7 await page.waitForSelector('.wt-grid__item-xs-12.wt-grid__item-sm-6.wt-grid__item-md-4.wt-grid__item-lg-3.wt-grid__item-xl-3.wt-grid__item-xxl-3.wt-display-block-xs.wt-display-block-sm.wt-display-block-md.wt-display-block-lg.wt-display-block-xl.wt-display-block-xxl.wt-mr-xs-0.wt-mr-sm-0.wt-mr-md-0.wt-mr-lg-0.wt-mr-xl-0.wt-mr-xxl-0.wt-mb-xs-0.wt-mb-sm-0.wt-mb-md-0.wt-mb-lg-0.wt-mb-xl-0.wt-mb-xxl-0');8 await page.click('.wt-grid__item-xs-12.wt-grid__item-sm-6.wt-grid__item-md-4.wt-grid__item-lg-3.wt-grid__item-xl-3.wt-grid__item-xxl-3.wt-display-block-xs.wt-display-block-sm.wt-display-block-md.wt-display-block-lg.wt-display-block-xl.wt-display-block-xxl.wt-mr-xs-0.wt-mr-sm-0.wt-mr-md-0.wt-mr-lg-0.wt-mr-xl-0.wt-mr-xxl-0.wt-mb-xs-0.wt-mb-sm-0.wt-mb-md-0.wt-mb-lg-0.wt-mb-xl-0.wt-mb-xxl-0');9 await page.waitForSelector('.wt-grid__item-xs-12.wt-grid__item-sm-6.wt-grid__item-md-4.wt-grid__item-lg-3.wt-grid__item-xl-3.wt-grid

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run Puppeteer automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful