How to use format_number method in localstack

Best Python code snippet using localstack_python

categoryQueries.py

Source:categoryQueries.py Github

copy

Full Screen

...76 "Hispanic/Total AS Hispanic, NonHispanic/Total AS NonHispanic FROM cat2000_1 ORDER BY Total DESC")77hispPercDF2000 = spark.sql("SELECT State, Total, Hispanic/Total AS Hispanic, White/Total AS White, Black/Total AS Black, NativeAm/Total AS NativeAm, "78 "Asian/Total AS Asian, PacIslander/Total AS PacIslander, Other/Total AS Other, TwoOrMore/Total AS TwoOrMore "79 "FROM cat2000_2 ORDER BY Total DESC")80catPercDF2000 = catPercDF2000.withColumn("OneRace", format_number("OneRace", 4)) \81 .withColumn("White", format_number("White", 4)) \82 .withColumn("Black", format_number("Black", 4)) \83 .withColumn("NativeAm", format_number("NativeAM", 4)) \84 .withColumn("Asian", format_number("Asian", 4)) \85 .withColumn("PacIslander", format_number("PacIslander", 4)) \86 .withColumn("Other", format_number("Other", 4)) \87 .withColumn("TwoOrMore", format_number("TwoOrMore", 4)) \88 .withColumn("Hispanic", format_number("Hispanic", 4)) \89 .withColumn("NonHispanic", format_number("NonHispanic", 4))90hispPercDF2000 = hispPercDF2000.withColumn("Hispanic", format_number("Hispanic", 4)) \91 .withColumn("White", format_number("White", 4)) \92 .withColumn("Black", format_number("Black", 4)) \93 .withColumn("NativeAm", format_number("NativeAM", 4)) \94 .withColumn("Asian", format_number("Asian", 4)) \95 .withColumn("PacIslander", format_number("PacIslander", 4)) \96 .withColumn("Other", format_number("Other", 4)) \97 .withColumn("TwoOrMore", format_number("TwoOrMore", 4))98catPercDF2000.show(52)99hispPercDF2000.show(52)'''100# spark.sql("SELECT MIN(Year) AS Year, SUM(Total) AS Total, SUM(OneRace) AS OneRace, SUM(White) AS White, SUM(Black) AS Black, SUM(NativeAm) AS NativeAm, "101# "SUM(Asian) AS Asian, SUM(PacIslander) AS PacIslander, SUM(Other) AS Other, SUM(TwoOrMore) AS TwoOrMore, "102# "SUM(Hispanic) AS Hispanic, SUM(NonHispanic) AS NonHispanic FROM cat2020_1").createOrReplaceTempView("usData2020_1")103# usData2020_1 = spark.sql("SELECT Year, Total, OneRace, OneRace/Total AS S, White, White/Total AS W, Black, Black/Total AS B, NativeAM, NativeAm/Total AS N, "104# "Asian, Asian/Total AS A, PacIslander, PacIslander/Total AS P, Other, Other/Total AS O, TwoOrMore, TwoOrMore/Total AS T, Hispanic, "105# "Hispanic/Total AS H, NonHispanic, NonHispanic/Total AS NH FROM usData2020_1")106# spark.sql("SELECT MIN(Year) AS Year, SUM(Total) AS Total, SUM(Hispanic) AS Hispanic, SUM(White) AS White, SUM(Black) AS Black, SUM(NativeAm) AS NativeAM, SUM(Asian) AS Asian, "107# "SUM(PacIslander) AS PacIslander, SUM(Other) AS Other, SUM(TwoOrMore) AS TwoOrMore FROM cat2020_2").createOrReplaceTempView("usData2020_2")108# usData2020_2 = spark.sql("SELECT Year, Total, Hispanic, Hispanic/Total AS H, White, White/Total AS W, Black, Black/Total AS B, NativeAM, NativeAm/Total AS N, "109# "Asian, Asian/Total AS A, PacIslander, PacIslander/Total AS P, Other, Other/Total AS O, TwoOrMore, TwoOrMore/Total AS T "110# "FROM usData2020_2")111# usData2020_1 = usData2020_1.withColumn("S", format_number("S", 4)) \112# .withColumn("W", format_number("W", 4)) \113# .withColumn("B", format_number("B", 4)) \114# .withColumn("N", format_number("N", 4)) \115# .withColumn("A", format_number("A", 4)) \116# .withColumn("P", format_number("P", 4)) \117# .withColumn("O", format_number("O", 4)) \118# .withColumn("T", format_number("T", 4)) \119# .withColumn("H", format_number("H", 4)) \120# .withColumn("NH", format_number("NH", 4))121# usData2020_2 = usData2020_2.withColumn("H", format_number("H", 4)) \122# .withColumn("W", format_number("W", 4)) \123# .withColumn("B", format_number("B", 4)) \124# .withColumn("N", format_number("N", 4)) \125# .withColumn("A", format_number("A", 4)) \126# .withColumn("P", format_number("P", 4)) \127# .withColumn("O", format_number("O", 4)) \128# .withColumn("T", format_number("T", 4))129# hispUS2020 = hispUS2020.withColumn("S", format_number("S", 4)) \130# .withColumn("W", format_number("W", 4)) \131# .withColumn("B", format_number("B", 4)) \132# .withColumn("N", format_number("N", 4)) \133# .withColumn("A", format_number("A", 4)) \134# .withColumn("P", format_number("P", 4)) \135# .withColumn("O", format_number("O", 4)) \136# .withColumn("T", format_number("T", 4))137# nonhispUS2020 = nonhispUS2020.withColumn("S", format_number("S", 4)) \138# .withColumn("W", format_number("W", 4)) \139# .withColumn("B", format_number("B", 4)) \140# .withColumn("N", format_number("N", 4)) \141# .withColumn("A", format_number("A", 4)) \142# .withColumn("P", format_number("P", 4)) \143# .withColumn("O", format_number("O", 4)) \144# .withColumn("T", format_number("T", 4))145spark.sql("SELECT MIN(Year) AS Year, SUM(Total) AS Total, SUM(OneRace) AS OneRace, SUM(White) AS White, SUM(Black) AS Black, SUM(NativeAm) AS NativeAm, "146 "SUM(Asian) AS Asian, SUM(PacIslander) AS PacIslander, SUM(Other) AS Other, SUM(TwoOrMore) AS TwoOrMore, "147 "SUM(Hispanic) AS Hispanic, SUM(NonHispanic) AS NonHispanic FROM cat2010_1").createOrReplaceTempView("usData2010_1")148usData2010_1 = spark.sql("SELECT Year, Total, OneRace, OneRace/Total AS S, White, White/Total AS W, Black, Black/Total AS B, NativeAM, NativeAm/Total AS N, "149 "Asian, Asian/Total AS A, PacIslander, PacIslander/Total AS P, Other, Other/Total AS O, TwoOrMore, TwoOrMore/Total AS T, Hispanic, "150 "Hispanic/Total AS H, NonHispanic, NonHispanic/Total AS NH FROM usData2010_1")151spark.sql("SELECT MIN(Year) AS Year, SUM(Total) AS Total, SUM(Hispanic) AS Hispanic, SUM(White) AS White, SUM(Black) AS Black, SUM(NativeAm) AS NativeAM, SUM(Asian) AS Asian, "152 "SUM(PacIslander) AS PacIslander, SUM(Other) AS Other, SUM(TwoOrMore) AS TwoOrMore FROM cat2010_2").createOrReplaceTempView("usData2010_2")153usData2010_2 = spark.sql("SELECT Year, Total, Hispanic, Hispanic/Total AS H, White, White/Total AS W, Black, Black/Total AS B, NativeAM, NativeAm/Total AS N, "154 "Asian, Asian/Total AS A, PacIslander, PacIslander/Total AS P, Other, Other/Total AS O, TwoOrMore, TwoOrMore/Total AS T "155 "FROM usData2010_2")156spark.sql("SELECT MIN(Year) AS Year, SUM(Total) AS Total, SUM(OneRace) AS OneRace, SUM(White) AS White, SUM(Black) AS Black, SUM(NativeAm) AS NativeAm, SUM(Asian) AS Asian, "157 "SUM(PacIslander) AS PacIslander, SUM(Other) AS Other, SUM(TwoOrMore) AS TwoOrMore FROM hisp2010").createOrReplaceTempView("hispUS2010")158hispUS2010 = spark.sql("SELECT Year, Total, OneRace, OneRace/Total AS S, White, White/Total AS W, Black, Black/Total AS B, NativeAm, NativeAm/Total AS N, "159 "Asian, Asian/Total AS A, PacIslander, PacIslander/Total AS P, Other, Other/Total AS O, TwoOrMore, TwoOrMore/Total AS T "160 "FROM hispUS2010")161spark.sql("SELECT MIN(Year) AS Year, SUM(Total) AS Total, SUM(OneRace) AS OneRace, SUM(White) AS White, SUM(Black) AS Black, SUM(NativeAm) AS NativeAm, SUM(Asian) AS Asian, "162 "SUM(PacIslander) AS PacIslander, SUM(Other) AS Other, SUM(TwoOrMore) AS TwoOrMore FROM nonhisp2010").createOrReplaceTempView("nonhispUS2010")163nonhispUS2010 = spark.sql("SELECT Year, Total, OneRace, OneRace/Total AS S, White, White/Total AS W, Black, Black/Total AS B, NativeAm, NativeAm/Total AS N, "164 "Asian, Asian/Total AS A, PacIslander, PacIslander/Total AS P, Other, Other/Total AS O, TwoOrMore, TwoOrMore/Total AS T "165 "FROM nonhispUS2010")166usData2010_1 = usData2010_1.withColumn("S", format_number("S", 4)) \167 .withColumn("W", format_number("W", 4)) \168 .withColumn("B", format_number("B", 4)) \169 .withColumn("N", format_number("N", 4)) \170 .withColumn("A", format_number("A", 4)) \171 .withColumn("P", format_number("P", 4)) \172 .withColumn("O", format_number("O", 4)) \173 .withColumn("T", format_number("T", 4)) \174 .withColumn("H", format_number("H", 4)) \175 .withColumn("NH", format_number("NH", 4))176usData2010_2 = usData2010_2.withColumn("H", format_number("H", 4)) \177 .withColumn("W", format_number("W", 4)) \178 .withColumn("B", format_number("B", 4)) \179 .withColumn("N", format_number("N", 4)) \180 .withColumn("A", format_number("A", 4)) \181 .withColumn("P", format_number("P", 4)) \182 .withColumn("O", format_number("O", 4)) \183 .withColumn("T", format_number("T", 4))184hispUS2010 = hispUS2010.withColumn("S", format_number("S", 4)) \185 .withColumn("W", format_number("W", 4)) \186 .withColumn("B", format_number("B", 4)) \187 .withColumn("N", format_number("N", 4)) \188 .withColumn("A", format_number("A", 4)) \189 .withColumn("P", format_number("P", 4)) \190 .withColumn("O", format_number("O", 4)) \191 .withColumn("T", format_number("T", 4))192nonhispUS2010 = nonhispUS2010.withColumn("S", format_number("S", 4)) \193 .withColumn("W", format_number("W", 4)) \194 .withColumn("B", format_number("B", 4)) \195 .withColumn("N", format_number("N", 4)) \196 .withColumn("A", format_number("A", 4)) \197 .withColumn("P", format_number("P", 4)) \198 .withColumn("O", format_number("O", 4)) \199 .withColumn("T", format_number("T", 4))200spark.sql("SELECT MIN(Year) AS Year, SUM(Total) AS Total, SUM(OneRace) AS OneRace, SUM(White) AS White, SUM(Black) AS Black, SUM(NativeAm) AS NativeAm, "201 "SUM(Asian) AS Asian, SUM(PacIslander) AS PacIslander, SUM(Other) AS Other, SUM(TwoOrMore) AS TwoOrMore, "202 "SUM(Hispanic) AS Hispanic, SUM(NonHispanic) AS NonHispanic FROM cat2000_1").createOrReplaceTempView("usData2000_1")203usData2000_1 = spark.sql("SELECT Year, Total, OneRace, OneRace/Total AS S, White, White/Total AS W, Black, Black/Total AS B, NativeAm, NativeAm/Total AS N, "204 "Asian, Asian/Total AS A, PacIslander, PacIslander/Total AS P, Other, Other/Total AS O, TwoOrMore, TwoOrMore/Total AS T, Hispanic, "205 "Hispanic/Total AS H, NonHispanic, NonHispanic/Total AS NH FROM usData2000_1")206spark.sql("SELECT MIN(Year) AS Year, SUM(Total) AS Total, SUM(Hispanic) AS Hispanic, SUM(White) AS White, SUM(Black) AS Black, SUM(NativeAm) AS NativeAm, SUM(Asian) AS Asian, "207 "SUM(PacIslander) AS PacIslander, SUM(Other) AS Other, SUM(TwoOrMore) AS TwoOrMore FROM cat2000_2").createOrReplaceTempView("usData2000_2")208usData2000_2 = spark.sql("SELECT Year, Total, Hispanic, Hispanic/Total AS H, White, White/Total AS W, Black, Black/Total AS B, NativeAm, NativeAm/Total AS N, "209 "Asian, Asian/Total AS A, PacIslander, PacIslander/Total AS P, Other, Other/Total AS O, TwoOrMore, TwoOrMore/Total AS T "210 "FROM usData2000_2")211spark.sql("SELECT MIN(Year) AS Year, SUM(Total) AS Total, SUM(OneRace) AS OneRace, SUM(White) AS White, SUM(Black) AS Black, SUM(NativeAm) AS NativeAm, SUM(Asian) AS Asian, "212 "SUM(PacIslander) AS PacIslander, SUM(Other) AS Other, SUM(TwoOrMore) AS TwoOrMore FROM hisp2000").createOrReplaceTempView("hispUS2000")213hispUS2000 = spark.sql("SELECT Year, Total, OneRace, OneRace/Total AS S, White, White/Total AS W, Black, Black/Total AS B, NativeAm, NativeAm/Total AS N, "214 "Asian, Asian/Total AS A, PacIslander, PacIslander/Total AS P, Other, Other/Total AS O, TwoOrMore, TwoOrMore/Total AS T "215 "FROM hispUS2000")216spark.sql("SELECT MIN(Year) AS Year, SUM(Total) AS Total, SUM(OneRace) AS OneRace, SUM(White) AS White, SUM(Black) AS Black, SUM(NativeAm) AS NativeAm, SUM(Asian) AS Asian, "217 "SUM(PacIslander) AS PacIslander, SUM(Other) AS Other, SUM(TwoOrMore) AS TwoOrMore FROM nonhisp2000").createOrReplaceTempView("nonhispUS2000")218nonhispUS2000 = spark.sql("SELECT Year, Total, OneRace, OneRace/Total AS S, White, White/Total AS W, Black, Black/Total AS B, NativeAm, NativeAm/Total AS N, "219 "Asian, Asian/Total AS A, PacIslander, PacIslander/Total AS P, Other, Other/Total AS O, TwoOrMore, TwoOrMore/Total AS T "220 "FROM nonhispUS2000")221usData2000_1 = usData2000_1.withColumn("S", format_number("S", 4)) \222 .withColumn("W", format_number("W", 4)) \223 .withColumn("B", format_number("B", 4)) \224 .withColumn("N", format_number("N", 4)) \225 .withColumn("A", format_number("A", 4)) \226 .withColumn("P", format_number("P", 4)) \227 .withColumn("O", format_number("O", 4)) \228 .withColumn("T", format_number("T", 4)) \229 .withColumn("H", format_number("H", 4)) \230 .withColumn("NH", format_number("NH", 4))231usData2000_2 = usData2000_2.withColumn("H", format_number("H", 4)) \232 .withColumn("W", format_number("W", 4)) \233 .withColumn("B", format_number("B", 4)) \234 .withColumn("N", format_number("N", 4)) \235 .withColumn("A", format_number("A", 4)) \236 .withColumn("P", format_number("P", 4)) \237 .withColumn("O", format_number("O", 4)) \238 .withColumn("T", format_number("T", 4))239hispUS2000 = hispUS2000.withColumn("S", format_number("S", 4)) \240 .withColumn("W", format_number("W", 4)) \241 .withColumn("B", format_number("B", 4)) \242 .withColumn("N", format_number("N", 4)) \243 .withColumn("A", format_number("A", 4)) \244 .withColumn("P", format_number("P", 4)) \245 .withColumn("O", format_number("O", 4)) \246 .withColumn("T", format_number("T", 4))247nonhispUS2000 = nonhispUS2000.withColumn("S", format_number("S", 4)) \248 .withColumn("W", format_number("W", 4)) \249 .withColumn("B", format_number("B", 4)) \250 .withColumn("N", format_number("N", 4)) \251 .withColumn("A", format_number("A", 4)) \252 .withColumn("P", format_number("P", 4)) \253 .withColumn("O", format_number("O", 4)) \254 .withColumn("T", format_number("T", 4))255usData_1 = usData2000_1.union(usData2010_1)#.union(usData2020_1)256usData_2 = usData2000_2.union(usData2010_2)#.union(usData2020_2)257hispUS = hispUS2000.union(hispUS2010)#.union(hispUS2020)258nonhispUS = nonhispUS2000.union(nonhispUS2010)#.union(nonhispUS2020)259usData_1.show()260usData_2.show()261print("Hispanic Population")262hispUS.show()263print("Non-Hispanic Population")264nonhispUS.show()265# SAVE FILES266# savepath = path + "query_data/byCategory/"267# usData_1.write.csv(savepath + "usData_1")268# usData_2.write.csv(savepath + "usData_2")...

Full Screen

Full Screen

keil_link.py

Source:keil_link.py Github

copy

Full Screen

1from string import Template2import textwrap3def generate_link_script(config):4 mapping = dict()5 mapping['ro_base'] = format_number(config['MCU_MRAM']['start'])6 mapping['rw_base'] = format_number(config['MCU_TCM']['start'])7 mapping['sram_base'] = format_number(config['MCU_SRAM']['start'])8 mapping['shared_base'] = format_number(config['SHARED_SRAM']['start'])9 mapping['ro_size'] = format_number(config['MCU_MRAM']['length'])10 mapping['rw_size'] = format_number(config['MCU_TCM']['length'])11 mapping['sram_size'] = format_number(config['MCU_SRAM']['length'])12 mapping['shared_size'] = format_number(config['SHARED_SRAM']['length'])13 mapping['additional_sections'] = generate_sections(config)14 return link_script_template.substitute(**mapping)15def generate_sections(config):16 # If there aren't any custom sections in the config file, we don't need to17 # add anything to the linker scripts.18 if 'custom_sections' not in config:19 return ''20 elif not config['custom_sections']:21 return ''22 L = []23 for mem_section in config['custom_sections']:24 D = dict()25 D['name'] = mem_section['blockname']26 D['start'] = format_number(mem_section['start'])27 D['length'] = format_number(mem_section['length'])28 D['sections'] = '\n'.join(' * ({})'.format(x) for x in mem_section['sections'])29 S = extra_section_template.substitute(**D)30 L.append(textwrap.indent(S, 4 * ' '))31 return '\n' + '\n'.join(L)32def format_number(n):33 return '0x{:08X}'.format(n)34link_script_template = Template('''\35;******************************************************************************36;37; Scatter file for Keil linker configuration.38;39;******************************************************************************40LR_1 ${ro_base}41{42 MCU_MRAM ${ro_base} ${ro_size}43 {44 *.o (RESET, +First)45 * (+RO)46 }...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run localstack automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful