How to use fsExtra.pathExists method in Cypress

Best JavaScript code snippet using cypress

pantiler.js

Source:pantiler.js Github

copy

Full Screen

1import Util from 'util'2import FSExtra from 'fs-extra'3import * as Zod from 'zod'4import Scramjet from 'scramjet'5import Axios from 'axios'6import Unzipper from 'unzipper'7import Fontnik from 'fontnik'8import Gdal from 'gdal-next'9import Spritezero from '@mapbox/spritezero'10import Tippecanoe from './tippecanoe.js'11function setup(directory, cache = '.pantiler-cache', clearCache = false, bounds = null, alert = () => {}) {12    function validate(tiledata) {13        const inputSchema = Zod.object({14            name: Zod.string(),15            url: Zod.string().url().optional(), // otherwise use path16            path: Zod.string().optional(), // otherwise use url17            format: Zod.string().optional(),18            matching: Zod.string().optional()19        })20        const schema = Zod.object({21            host: Zod.string().url(),22            zoomFrom: Zod.number().positive(),23            zoomTo: Zod.number().positive(),24            fonts: Zod.array(inputSchema).optional(),25            sprites: Zod.array(inputSchema).optional(),26            sources: Zod.array(Zod.object({27                name: Zod.string(),28                system: Zod.string(),29                fieldLongitude: Zod.string().optional(),30                fieldLatitude: Zod.string().optional(),31                inputs: Zod.array(inputSchema),32                outputs: Zod.array(Zod.object({33                    name: Zod.string(),34                    layer: Zod.string().optional(),35                    filter: Zod.string().optional(),36                    fields: Zod.object({}).optional(),37                    zoomMin: Zod.number().optional(),38                    zoomMax: Zod.number().optional()39                }))40            })),41            styling: Zod.object({}) // not attempting to validate this42        })43        schema.parse(tiledata)44    }45    async function englyph(fonts) {46        await fonts.reduce(async (a, font) => {47            await a48            alert({49                process: 'Englyphing',50                input: font.name,51                message: 'in progress...'52            })53            const location = `${directory}/glyphs/${font.name}`54            await FSExtra.ensureDir(location)55            const data = font.path56                ? await FSExtra.readFile(font.path)57                : await (await Axios({ url: font.url, responseType: 'arraybuffer' })).data58            let ranges = []59            for (let i = 0; i < 65536; (i = i + 256)) {60                ranges.push({ start: i, end: Math.min(i + 255, 65535) })61            }62            const conversions = ranges.map(async range => {63                const result = await Util.promisify(Fontnik.range)({64                    font: data,65                    start: range.start,66                    end: range.end67                })68                return FSExtra.writeFile(`${location}/${range.start}-${range.end}.pbf`, result)69            })70            await Promise.all(conversions)71            alert({72                process: 'Englyphing',73                input: font.name,74                message: 'done'75            })76        }, Promise.resolve())77    }78    async function ensprite(sprites) {79        const ratios = [1, 2]80        await ratios.reduce(async (a, ratio) => {81            await a82            alert({83                process: 'Enspriting',84                input: `@${ratio}x`,85                message: 'in progress...'86            })87            const ratioAt =  ratio > 1 ? `@${ratio}x` : ''88            const images = sprites.map(async sprite => {89                const data = sprite.path90                    ? await FSExtra.readFile(sprite.path)91                    : await (await Axios({ url: sprite.url, responseType: 'arraybuffer' })).data92                return {93                    id: sprite.name,94                    svg: data95                }96            })97            const config = {98                imgs: await Promise.all(images),99                pixelRatio: ratio100            }101            const manifest = await Util.promisify(Spritezero.generateLayout)({ ...config, format: true })102            await FSExtra.writeJson(`${directory}/sprites${ratioAt}.json`, manifest)103            const layout = await Util.promisify(Spritezero.generateLayout)({ ...config, format: false })104            const image = await Util.promisify(Spritezero.generateImage)(layout)105            await FSExtra.writeFile(`${directory}/sprites${ratioAt}.png`, image)106            alert({107                process: 'Enspriting',108                input: `@${ratio}x`,109                message: 'done'110            })111        }, Promise.resolve())112    }113    async function fetch(name, inputs) {114        const downloads = inputs.map(async input => {115            const inputSpecifier = inputs.length > 1 ? `-${input.name}` : ''116            if (!input.url && !input.path) throw new Error(`${name}${inputSpecifier}: need to specify either url or path`)117            const extension = input.format || (input.path || input.url).split('.').pop()118            const file = `${cache}/${name}${inputSpecifier}.${extension}`119            const fileExists = await FSExtra.pathExists(file)120            if (fileExists) {121                alert({122                    process: 'Fetching',123                    input: name + inputSpecifier,124                    message: 'using cache'125                })126                return {127                    name: input.name,128                    path: file,129                    ...(input.matching ? { matching: input.matching } : {})130                }131            }132            if (input.path) {133                await FSExtra.ensureSymlink(input.path, file)134                alert({135                    process: 'Fetching',136                    input: name + inputSpecifier,137                    message: 'linked'138                })139                return {140                    name: input.name,141                    path: file,142                    ...(input.matching ? { matching: input.matching } : {})143                }144            }145            alert({146                process: 'Fetching',147                input: name + inputSpecifier,148                message: 'in progress...'149            })150            const response = await Axios({151                url: input.url,152                responseType: 'stream'153            })154            const writer = response.data.pipe(FSExtra.createWriteStream(file))155            await new Promise((resolve, reject) => {156                writer.on('error', reject)157                writer.on('close', resolve)158            })159            alert({160                process: 'Fetching',161                input: name + inputSpecifier,162                message: 'done'163            })164            return {165                name: input.name,166                path: file,167                ...(input.matching ? { matching: input.matching } : {})168            }169        })170        return Promise.all(downloads)171    }172    async function extract(name, archives) {173        const extractions = archives.map(async archive => {174            if (!archive.path.endsWith('zip')) {175                return archive // no extraction needed176            }177            const archiveSpecifier = archives.length > 1 ? `-${archive.name}` : ''178            const zip = await Unzipper.Open.file(archive.path)179            const entries = zip.files.filter(entry => {180                if (entry.type !== 'File') return false181                if (entry.path.match(/\.(pdf|txt)$/)) return false182                if (archive.matching && !entry.path.match(new RegExp(archive.matching))) return false183                return true184            })185            const extractions = entries.map(async entry => {186                const extension = entry.path.split('.').pop()187                const file = `${cache}/${name}${archiveSpecifier}.${extension}`188                const fileExists = await FSExtra.pathExists(file)189                if (fileExists) {190                    alert({191                        process: 'Extracting',192                        input: name + archiveSpecifier,193                        ...(entries.length > 1 ? { output: name + (entries.length > 1 ? `/${extension}` : '') } : {}),194                        message: 'using cache'195                    })196                    return { name: archive.name, path: file }197                }198                alert({199                    process: 'Extracting',200                    input: name + archiveSpecifier,201                    ...(entries.length > 1 ? { output: name + (entries.length > 1 ? `/${extension}` : '') } : {}),202                    message: 'in progress...'203                })204                const writer = entry.stream().pipe(FSExtra.createWriteStream(file))205                await new Promise((resolve, reject) => {206                    writer.on('error', reject)207                    writer.on('finish', resolve)208                })209                alert({210                    process: 'Extracting',211                    input: name + archiveSpecifier,212                    ...(entries.length > 1 ? { output: name + (entries.length > 1 ? `/${extension}` : '') } : {}),213                    message: 'done'214                })215                return { name: archive.name, path: file }216            })217            const extracted = await Promise.all(extractions)218            if (extracted.length === 1) return extracted[0]219            else if (extracted.find(file => file.path.endsWith('shp'))) return extracted.find(file => file.path.endsWith('shp'))220            else throw new Error(`${name}${archiveSpecifier}: archive has multiple files, you need to specify a input matching`)221        })222        return Promise.all(extractions)223    }224    async function convert(name, system, fieldLongitude, fieldLatitude, inputs, outputs) {225        const reprojection = new Gdal.CoordinateTransformation(Gdal.SpatialReference.fromProj4(system), Gdal.SpatialReference.fromProj4('+init=epsg:4326'))226        return outputs.reduce(async (previousOutput, output) => {227            await previousOutput228            const outputSpecifier = outputs.length > 1 ? `-${output.name}` : ''229            const file = `${cache}/${name}${outputSpecifier}.geo.json`230            const fileExists = await FSExtra.pathExists(file)231            if (fileExists) {232                alert({233                    process: 'Converting',234                    input: name,235                    ...(outputs.length > 1 || inputs.length > 1 ? { output: name + outputSpecifier } : {}),236                    message: 'using cache'237                })238                return239            }240            const outputData = Gdal.open(file, 'w', 'GeoJSON')241            const outputLayer = outputData.layers.create(`${name}${outputSpecifier}`, null, Gdal.wkbUnknown)242            const outputFieldDefinitions = Object.keys(output.fields || {}).map(key => {243                return new Gdal.FieldDefn(key, Gdal.OFTString)244            })245            outputLayer.fields.add(outputFieldDefinitions)246            inputs.forEach(input => {247                alert({248                    process: 'Converting',249                    input: name + (inputs.length > 1 ? `-${input.name}` : ''),250                    ...(outputs.length > 1 || inputs.length > 1 ? { output: name + outputSpecifier } : {}),251                    message: 'in progress...'252                })253                const inputData = Gdal.open(input.path)254                const inputLayer = output.filter255                    ? inputData.executeSQL(`select * from "${output.layer || inputData.layers.get(0).name}" where ${output.filter}`) // produces correct results, unlike using setAttributeFilter()256                    : inputData.layers.get(output.layer || 0)257                if (bounds) {258                    const reprojectionReverse = new Gdal.CoordinateTransformation(Gdal.SpatialReference.fromProj4('+init=epsg:4326'), Gdal.SpatialReference.fromProj4(system))259                    const min = reprojectionReverse.transformPoint(bounds[0], bounds[1])260                    const max = reprojectionReverse.transformPoint(bounds[2], bounds[3])261                    inputLayer.setSpatialFilter(min.x, min.y, max.x, max.y)262                }263                inputLayer.features.forEach(feature => {264                    const outputFeature = new Gdal.Feature(outputLayer)265                    const outputFields = Object.entries(output.fields || {}).map(([key, value]) => {266                        try {267                            return [key, feature.fields.get(value)]268                        }269                        catch (e) {270                            return [key, null]271                        }272                    })273                    outputFeature.fields.set(Object.fromEntries(outputFields))274                    const outputGeometry = (fieldLongitude && fieldLatitude)275                        ? Gdal.Geometry.fromGeoJson({ type: 'Point', coordinates: [Number(feature.fields.get(fieldLongitude)), Number(feature.fields.get(fieldLatitude))] })276                        : feature.getGeometry()277                    outputGeometry.transform(reprojection) // mutates in-place278                    outputFeature.setGeometry(outputGeometry)279                    outputLayer.features.add(outputFeature)280                })281                alert({282                    process: 'Converting',283                    input: name + (inputs.length > 1 ? `-${input.name}` : ''),284                    ...(outputs.length > 1 || inputs.length > 1 ? { output: name + outputSpecifier } : {}),285                    message: 'done'286                })287            })288            outputData.close()289            if (output.zoomMin || output.zoomMax) {290                const collection = await FSExtra.readJson(file)291                const tippecanoe = {292                    ...(output.zoomMin ? { minzoom: output.zoomMin } : {}),293                    ...(output.zoomMax ? { maxzoom: output.zoomMax } : {})294                }295                const features = collection.features.map(feature => ({ ...feature, tippecanoe }))296                await FSExtra.writeJson(file, { ...collection, features })297            }298        }, Promise.resolve())299    }300    async function tile(sources, zoomFrom, zoomTo) {301        alert({ process: 'Tiling', message: 'in progress...' })302        const sourcelist = sources.flatMap(source => {303            return source.outputs.map(output => {304                const outputSpecifier = source.outputs.length > 1 ? `-${output.name}` : ''305                return `named-layer=${output.name}:${cache}/${source.name}${outputSpecifier}.geo.json`306            })307        })308        const options = [309            `minimum-zoom=${zoomFrom}`,310            `maximum-zoom=${zoomTo}`,311            `output-to-directory=${directory}`,312            ...(bounds ? [`clip-bounding-box=${bounds.join(',')}`] : []),313            'generate-ids',314            'no-tile-compression',315            ...sourcelist316        ]317        await Tippecanoe(options, status => {318            alert({ process: 'Tiling', message: `in progress... ${status}` })319        })320        const metadata = await FSExtra.readJson(`${directory}/metadata.json`)321        await FSExtra.remove(`${directory}/metadata.json`)322        alert({ process: 'Tiling', message: 'done' })323        return metadata324    }325    async function style(metadata, styling, host, hasFonts, hasSprites) {326        const styles = {327            version: 8,328            metadata: {329                date: new Date().toISOString()330            },331            ...(hasFonts ? { glyphs: `${host}/glyphs/{fontstack}/{range}.pbf` } : {}),332            ...(hasSprites ? { sprite: `${host}/sprites` } : {}),333            sources: {334                primary: {335                    type: 'vector',336                    tiles: [`${host}/{z}/{x}/{y}.pbf`],337                    bounds: metadata.bounds.split(',').map(Number),338                    minzoom: Number(metadata.minzoom),339                    maxzoom: Number(metadata.maxzoom)340                }341            },342            ...styling343        }344        alert({ process: 'Styling', message: 'done' })345        return FSExtra.writeJson(`${directory}/style.json`, styles)346    }347    async function cleanup(clearCache) {348        if (!clearCache) return349        await FSExtra.remove(cache)350        alert({ process: 'Cleaning up', message: 'done' })351    }352    async function run(tiledata) {353        const directoryExists = await FSExtra.exists(directory)354        if (directoryExists) throw new Error('directory already exists')355        validate(tiledata)356        await FSExtra.ensureDir(directory)357        await FSExtra.ensureDir(cache)358        if (tiledata.fonts) await englyph(tiledata.fonts)359        if (tiledata.sprites) await ensprite(tiledata.sprites)360        await Scramjet.DataStream.from(tiledata.sources).each(async source => {361            const archives = await fetch(source.name, source.inputs)362            const inputs = await extract(source.name, archives)363            return convert(source.name, source.system, source.fieldLongitude, source.fieldLatitude, inputs, source.outputs)364        }).whenEnd()365        const metadata = await tile(tiledata.sources, tiledata.zoomFrom, tiledata.zoomTo)366        await style(metadata, tiledata.styling, tiledata.host, tiledata.fonts?.length > 0, tiledata.sprites?.length > 0)367        await cleanup(clearCache)368        alert({ message: 'Done!' })369    }370    return run371}...

Full Screen

Full Screen

index.js

Source:index.js Github

copy

Full Screen

1#!/usr/bin/env node2const updateChk = require('../lib/update')3const downloadTpl = require('../lib/download')4const compileStr = require('../lib/compile')5const program = require('commander')6const inquirer = require('inquirer')7const fsextra = require('fs-extra')8const path = require('path')9const ora = require('ora')10const chalk = require('chalk')11const symbols = require('log-symbols')12const shell = require('child_process').exec13// version14program15  .version(require('../package.json').version, '-v, --version')16// upgrade17program18	.command('upgrade')19	.description("Check the VuePress-Creator's version.")20	.option('-t, --templete', 'Upgrade the VuePress templete version.')21	.action(() => {22    let _length = process.argv.slice(2).length23		if ( _length > 2) {24			program.outputHelp()25		} else {26      switch (_length){27        case 1:28          if (process.argv[2] == 'upgrade') {29            updateChk()30          }31          break32        case 2:33          if (process.argv[3] == '-t') {34            const templatePath = path.resolve(__dirname, '../template')35            fsextra.emptyDir(templatePath).then(() => {36              downloadTpl(templatePath)37            }).catch(err => {38              console.error(err)39            })40          }41          break42        default:43          program.outputHelp()44      }45		}46	})47// default templete48program49	.command('def <project>')50	.description('Initialize the default template for VuePress.')51	.action(project => {52		fsextra53			.pathExists(project)54			.then(exists => {55				if (!exists) {56					const templatePath = path.resolve(__dirname, '../template')57          const processPath = process.cwd()58          const targetPath = `${processPath}/${project}`59					fsextra.pathExists(templatePath).then(exists => {60						if (exists) {61							compileAction(project,templatePath,targetPath,{theme:null}).then(() => {62                showInfoAction(project)63              })64						} else {65							fsextra.emptyDir(templatePath).then(() => {66                downloadTpl(templatePath).then(() => {67                  compileAction(project,templatePath,targetPath,{theme:null}).then(() => {68                    showInfoAction(project)69                  })70                })71              }).catch(err => {72                console.error(err)73              })74						}75					})76				} else {77					console.log(symbols.error, chalk.red('The project already exists.'))78				}79			}).catch(err => {80				console.error(err)81			})82	})83// antdocs theme84program85	.command('ads <project>')86	.description('Initialize the template with AntDocs theme for VuePress.')87	.action(project => {88    fsextra89    .pathExists(project)90    .then(exists => {91      if (!exists) {92        const templatePath = path.resolve(__dirname, '../template')93        const processPath = process.cwd()94        const targetPath = `${processPath}/${project}`95        const stylePath = `${processPath}/${project}/docs/.vuepress/styles`96        fsextra.pathExists(templatePath).then(exists => {97          if (exists) {98            compileAction(project,templatePath,targetPath,{theme:'antdocs'}).then(() => {99              installThemeAction(project).then(()=>{100                fsextra.emptyDir(stylePath)101                showInfoAction(project)102              })103            })104          } else {105            fsextra.emptyDir(templatePath).then(() => {106              downloadTpl(templatePath).then(() => {107                compileAction(project,templatePath,targetPath,{theme:'antdocs'}).then(() => {108                  installThemeAction(project).then(()=>{109                    fsextra.emptyDir(stylePath)110                    showInfoAction(project)111                  })112                })113              })114            }).catch(err => {115              console.error(err)116            })117          }118        })119      } else {120        console.log(symbols.error, chalk.red('The project already exists.'))121      }122    }).catch(err => {123      console.error(err)124    })125  })126// init127program128	.name('vuepress-creator')129	.usage('<commands> [options]')130	.command('init <project>')131	.description('Create a VuePress project.')132	.action(project => {133    fsextra.pathExists(project).then(exists => {134      if (!exists) {135        inquirer.prompt([{136          type:'list',137          name:'theme',138          message:'Pick a theme for VuePress?',139          choices: [140            'Default',141            'AntDocs (Ant Design style)'142          ],143          default: 'Default'144        }]).then(answers => {145          let _theme = answers.theme == 'Default' ? null:'antdocs'146          const templatePath = path.resolve(__dirname, '../template')147          const processPath = process.cwd()148          const targetPath = `${processPath}/${project}`149          const stylePath = `${processPath}/${project}/docs/.vuepress/styles`150          switch (_theme){151            case null:152              fsextra.pathExists(templatePath).then(exists => {153                if (exists) {154                  compileAction(project,templatePath,targetPath,{theme:null}).then(() => {155                    showInfoAction(project)156                  })157                } else {158                  fsextra.emptyDir(templatePath).then(() => {159                    downloadTpl(templatePath).then(() => {160                      compileAction(project,templatePath,targetPath,{theme:null}).then(() => {161                        showInfoAction(project)162                      })163                    })164                  }).catch(err => {165                    console.error(err)166                  })167                }168              })169              break170            case 'antdocs':171              fsextra.pathExists(templatePath).then(exists => {172                if (exists) {173                  compileAction(project,templatePath,targetPath,{theme:'antdocs'}).then(() => {174                    installThemeAction(project).then(()=>{175                      fsextra.emptyDir(stylePath)176                      showInfoAction(project)177                    })178                  })179                } else {180                  fsextra.emptyDir(templatePath).then(() => {181                    downloadTpl(templatePath).then(() => {182                      compileAction(project,templatePath,targetPath,{theme:'antdocs'}).then(() => {183                        installThemeAction(project).then(()=>{184                          fsextra.emptyDir(stylePath)185                          showInfoAction(project)186                        })187                      })188                    })189                  }).catch(err => {190                    console.error(err)191                  })192                }193              })194              break195          }196        })        197      } else {198        console.log(symbols.error, chalk.red('The project already exists.'))199      }200    })201  })202program.on('--help', function() {203	console.log('')204	console.log('Examples:')205	console.log('  $ vuepress-creator init project')206	console.log('  $ vuepress-creator upgrade -t')207	console.log('')208})209program.parse(process.argv)210function compileAction(prjName,tplPath,tgrPath,cfgGather){211  return new Promise(function(resolve, reject) {212    fsextra.copy(tplPath, tgrPath).then(() => {213      const configPath = `${prjName}/docs/.vuepress/config.js`214      const configGather = cfgGather215      compileStr(configPath, configGather).then(() => {216        resolve()217      }).catch(err => {218        return err219      })220    }).catch(err => {221      return reject(err)222    })223  })224}225function installThemeAction(prjName){226  return new Promise(function(resolve, reject) {227    const installSpinner = ora(chalk.cyan('Install AntDocs theme...'))228    installSpinner.start()229    shell(`cd ${prjName} && npm i vuepress-theme-antdocs`, (err, stdout, stderr) => {230      if (err) {231        installSpinner.text = 'Install AntDocs theme failed.'232        installSpinner.fail()233        return reject(err)234      }235      installSpinner.text = 'Install AntDocs theme successful.'236      installSpinner.succeed()237      resolve()238    })239  })240}241function showInfoAction(prjName){242  console.log('')243  console.log('    To get started:')244  console.log('')245  console.log(chalk.yellow(`      cd ${prjName}`))246  console.log(`      ${chalk.yellow('npm install')} or ${chalk.yellow('yarn install')}`)247  console.log(`      ${chalk.yellow('npm run dev')} or ${chalk.yellow('yarn run dev')}`)248  console.log('')...

Full Screen

Full Screen

reconcile.js

Source:reconcile.js Github

copy

Full Screen

1import Crypto from 'crypto'2import FSExtra from 'fs-extra'3import Scramjet from 'scramjet'4import Axios from 'axios'5import AxiosRetry from 'axios-retry'6import AxiosRateLimit from 'axios-rate-limit'7import FormData from 'form-data'8import Querystring from 'querystring'9function request(retries, cache, verbose, alert, limit, messages) {10    const cacheDirectory = '.reconcile-cache'11    const timeout = 45 * 100012    const toUrl = location => typeof location === 'string' ? location : location.url13    const toLocationName = location => {14        if (!location) throw new Error('Request location is blank')15        const method = location.method ? location.method.toUpperCase() : 'GET'16        const stringifyObject = object => Object.entries(object).map(([key, value]) => `${key}=${JSON.stringify(value)}`).join(' ')17        return `${method} ${toUrl(location)}`18            + (location.params ? '?' + Querystring.stringify(location.params) : '')19            + (location.dataQuery ? ' ' + Querystring.stringify(location.dataQuery) : '')20            + (location.dataForm ? ' <' + stringifyObject(location.dataForm) + '>' : '')21            + (location.data && !location.dataQuery && !location.dataForm ? ' [' + stringifyObject(location.data) + ']' : '')22    }23    const toErrorMessage = e => {24        const reconcilerError = e.response && messages(e)25        if (reconcilerError) return reconcilerError // look for reconciler-specific errors first26        const locationName = toLocationName(e.config)27        if (e.response) return `Received code ${e.response.status}: ${locationName}` // response recieved, but non-2xx28        if (e.code === 'ECONNABORTED') return `Timed out after ${timeout / 1000}ms: ${locationName}` // request timed out29        if (e.code) return `Error ${e.code}: ${locationName}` // request failed, with error code30        return e.message // request not made31    }32    const instance = Axios.create({ timeout })33    AxiosRetry(instance, {34        retries,35        shouldResetTimeout: true,36        retryCondition: e => {37            return !e.response || e.response.status >= 500 || e.response.status === 429 // no response, server error, or hit rate limit38        },39        retryDelay: (number, e) => {40            const message = toErrorMessage(e)41            const attempt = number > 0 && number <= retries && retries > 1 ? ' (retrying' + (number > 1 ? `, attempt ${number}` : '') + '...)' : ''42            if (number === 1) alert(`${message}${attempt}`)43            else alert(`  → ${message}${attempt}`)44            return 5 * 100045        }46    })47    AxiosRateLimit(instance, {48        maxRequests: limit, // so limit is number of requests per second49        perMilliseconds: 1 * 100050    })51    let cacheChecked = false52    return async location => {53        const hash = Crypto.createHash('sha1').update(JSON.stringify(typeof location === 'string' ? location : { ...location, auth: null })).digest('hex')54        if (location.dataQuery) {55            location.data = Querystring.stringify(location.dataQuery)56        }57        if (location.dataForm) {58            const form = new FormData()59            location.headers = form.getHeaders()60            Object.entries(location.dataForm).forEach(([key, value]) => form.append(key, JSON.stringify(value)))61            location.data = form62        }63        const locationName = toLocationName(location)64        if (cache) {65            if (!cacheChecked) {66                const cacheExists = await FSExtra.pathExists(cacheDirectory)67                if (cacheExists) alert('Cached data found!')68                else alert('No existing cached data found')69                cacheChecked = true70            }71            const isCached = await FSExtra.pathExists(`${cacheDirectory}/${hash}`)72            if (isCached) {73                if (verbose) alert(`Cached [${hash}]: ${locationName}`)74                const cacheData = await FSExtra.readJson(`${cacheDirectory}/${hash}`)75                return {76                    url: toUrl(location),77                    data: cacheData,78                    passthrough: location.passthrough79                }80            }81        }82        try {83            if (verbose) alert(`Requesting: ${locationName}`)84            const response = await instance(location)85            if (cache) {86                await FSExtra.ensureDir(cacheDirectory)87                await FSExtra.writeJson(`${cacheDirectory}/${hash}`, response.data)88            }89            return {90                url: toUrl(location),91                data: response.data,92                passthrough: location.passthrough93            }94        }95        catch (e) {96            alert(toErrorMessage(e))97        }98    }99}100async function load(command, filename, parameters = {}, retries = 5, cache = false, join = 'inner', verbose = false, alert = () => {}) {101    const die = message => {102        throw new Error(message)103    }104    const requestor = request.bind(null, retries, cache, verbose, alert)105    const { default: reconciler } = await import(`./reconcilers/${command}.js`)106    Object.keys(parameters).forEach(parameter => {107        if (!reconciler.details.parameters.find(p => p.name === parameter)) alert(`Ignoring unexpected parameter '${parameter}'`)108    })109    const batch = reconciler.details.batch || 1110    const execute = reconciler.initialise(parameters, requestor, die)111    const source = () => Scramjet.StringStream.from(FSExtra.createReadStream(filename)).CSVParse({ header: true })112    const columnsReconciler = reconciler.details.columns.map(column => column.name)113    const columnsSource = Object.keys((await source().slice(0, 1).toArray())[0])114    const columnMapEntries = columnsReconciler.map(column => {115        const columnUnique = (i = '') => {116            const attempt = `${column}${i}`117            if (columnsSource.find(name => name === attempt)) return columnUnique(Number(i) + 1)118            if (i) alert(`Column '${column}' from the reconciler has been renamed '${attempt}' so it does not overwrite the source`)119            return attempt120        }121        return [column, columnUnique()]122    })123    const columnMap = Object.fromEntries(columnMapEntries)124    const blank = Object.fromEntries(Object.values(columnMap).map(key => [key]))125    const length = async () => {126        const entries = await source().reduce(a => a + 1, 0)127        return Math.ceil(entries / batch)128    }129    const run = async () => source().batch(batch).setOptions({ maxParallel: 1 }).map(async items => {130        try {131            const executed = await execute(batch === 1 ? items[0] : items)132            return items.flatMap((item, i) => {133                const results = batch > 1 && Array.isArray(executed[i]) ? executed[i] // batch mode, reconciler is one-to-many134                    : batch > 1 ? [executed[i]] // batch mode, reconciler is one-to-one135                    : Array.isArray(executed) ? executed // reconciler is one-to-many136                    : [executed] // reconciler is one-to-one137                if (join === 'outer' && results.length === 0) return [{ ...item, ...blank }]138                return results.map(result => {139                    const resultRemapped = result140                        ? Object.fromEntries(Object.entries(result).map(([column, value]) => [columnMap[column], value]))141                        : Object.fromEntries(Object.entries(columnMap).map(([column]) => [column, ''])) // if there is no result (eg. not found)142                    return { ...item, ...resultRemapped }143                })144            })145        }146        catch (e) {147            alert(verbose ? e.stack : e.message)148            if (join === 'outer') {149                return items.map(item => ({ ...item, ...blank }))150            }151            else return []152        }153    })154    return { run, length }155}...

Full Screen

Full Screen

executor.js

Source:executor.js Github

copy

Full Screen

1import Process from 'process'2import WorkerThreads from 'worker_threads'3import ObjectHash from 'object-hash'4import FSExtra from 'fs-extra'5async function sourcing() {6    const { method, ...settings } = watch.source // omit method name7    const { default: f } = await import(`./../methods/source-${watch.source.method}.js`)8    const items = await f(settings)9    return items.map(content => {10        const id = ObjectHash(content)11        return { id, content }12    })13}14async function diffing(items) {15    const key = ObjectHash(watch.source)16    const path = `.newsagent-cache/${key}`17    const hash = ObjectHash(items)18    const hashset = items.map(item => item.id)19    const cachefileExists = await FSExtra.pathExists(path)20    if (!cachefileExists) {21        await FSExtra.writeJson(path, { hash, hashset, amalgam: items })22        return []23    }24    const cachefile = await FSExtra.readJson(path)25    if (cachefile.hash === hash) return [] // whole-data hash is the same, so we can avoid looking at individual records26    const additions = () => {27        return items.filter(item => !cachefile.hashset.includes(item.id)).map(item => {28            return { ...item, difference: 'addition' }29        })30    }31    const removals = () => {32        return cachefile.hashset.filter(hash => !hashset.includes(hash)).map(hash => {33            const item = cachefile.amalgam.find(item => item.id === hash)34            return { ...item, difference: 'removal' }35        })36    }37    const changesAdditions = watch.monitor === 'additions-and-removals' || watch.monitor === 'additions-only' ? additions() : []38    const changesRemovals = watch.monitor === 'additions-and-removals' || watch.monitor === 'removals-only' ? removals() : []39    const amalgam = cachefile.amalgam40        .filter(item => !changesRemovals.find(change => change.id === item.id))41        .concat(changesAdditions.map(change => ({ id: change.id, content: change.content }))) // so omit difference field42    await FSExtra.writeJson(path, { hash, hashset, amalgam })43    return [...changesAdditions, ...changesRemovals]44}45async function processing(changes) {46    if (!watch.processes) return changes47    return watch.processes.reduce(async (a, process) => {48        const { method, ...settings } = process // omit method name49        const { default: f } = await import(`./../methods/process-${process.method}.js`)50        const all = await Promise.all((await a).map(async change => {51            const contentNew = await f(change.content, change.difference, settings)52            return { ...change, content: contentNew }53        }))54        return all.filter(x => x.content)55    }, changes)56}57async function alerting(results) {58    const firings = watch.alerts.map(async alert => {59        const { method, ...settings } = alert // omit method name60        const { default: f } = await import(`./../methods/alert-${alert.method}.js`)61        results.forEach(result => f(watch.name, result.content, settings))62    })63    await Promise.all(firings)64}65async function execute() {66    try {67        const items = await sourcing()68        const changes = await diffing(items)69        const processed = await processing(changes)70        await alerting(processed)71    }72    catch (e) {73        if (e.constructor.name === 'ZodError') {74            const error = e.errors[0]75            WorkerThreads.parentPort.postMessage({76                event: 'execution-invalid',77                data: { error: `${error.message}: ${error.path.join('.')} wanted ${error.expected} but got ${error.received}` }78            })79        }80        else {81            WorkerThreads.parentPort.postMessage({82                event: 'execution-failure',83                data: { error: e.message }84            })85        }86        Process.exit(1)87    }88}...

Full Screen

Full Screen

csv-fetch.js

Source:csv-fetch.js Github

copy

Full Screen

1import FSExtra from 'fs-extra'2import Scramjet from 'scramjet'3import Axios from 'axios'4import AxiosRetry from 'axios-retry'5import AxiosRateLimit from 'axios-rate-limit'6function fetcher(urlColumn, nameColumn, depository, suffix, headers, limit, retries, check, verbose, alert) {7    const timeout = 45 * 10008    const toErrorMessage = e => {9        const locationName = e.config.url10        if (e.response) return `Received code ${e.response.status}: ${locationName}` // response recieved, but non-2xx11        if (e.code === 'ECONNABORTED') return `Timed out after ${timeout / 1000}ms: ${locationName}` // request timed out12        if (e.code) return `Error ${e.code}: ${locationName}` // request failed, with error code13        return e.message // request not made14    }15    const instance = Axios.create({ timeout })16    AxiosRetry(instance, {17        retries,18        shouldResetTimeout: true,19        retryCondition: e => {20            return !e.response || e.response.status >= 500 || e.response.status === 429 // no response, server error, or hit rate limit21        },22        retryDelay: (number, e) => {23            const message = toErrorMessage(e)24            const attempt = number > 0 && number <= retries && retries > 1 ? ' (retrying' + (number > 1 ? `, attempt ${number}` : '') + '...)' : ''25            if (number === 1) alert(`${message}${attempt}`)26            else alert(`  → ${message}${attempt}`)27            return 5 * 100028        }29    })30    AxiosRateLimit(instance, {31        maxRequests: limit, // so limit is number of requests per second32        perMilliseconds: 1 * 100033    })34    return async row => {35        const key = row[nameColumn]36        if (!key) {37            alert('Key column is empty!')38            return39        }40        const url = row[urlColumn]41        if (!url) {42            alert('URL column is empty!')43            return44        }45        const filename = key + (suffix || '')46        if (check) {47            const exists = await FSExtra.pathExists(`${depository}/${filename}`)48            if (exists && verbose) {49                alert(`Exists [${filename}]: ${url}`)50                return51            }52        }53        try {54            const headersValues = headers ? Object.fromEntries(headers.map(header => header.split(/: ?/))) : {}55            if (verbose) alert(`Requesting: ${url}` + (headersValues ? ' ' + JSON.stringify(headersValues, null, 2) : ''))56            const response = await instance({57                url,58                headers: headersValues,59                responseType: 'arraybuffer'60            })61            await FSExtra.writeFile(`${depository}/${filename}`, response.data)62        }63        catch (e) {64            alert(toErrorMessage(e))65        }66    }67}68function source(filename) {69    return Scramjet.StringStream.from(FSExtra.createReadStream(filename)).CSVParse({ header: true })70}71function length(filename) {72    return source(filename).reduce(a => a + 1, 0)73}74async function run(filename, urlColumn, nameColumn, depository, suffix, headers, limit, retries, check, verbose, alert) {75    await FSExtra.ensureDir(depository)76    const fetch = fetcher(urlColumn, nameColumn, depository, suffix, headers, limit, retries, check, verbose, alert)77    return source(filename).each(fetch)78}...

Full Screen

Full Screen

cli.js

Source:cli.js Github

copy

Full Screen

1import Readline from 'readline'2import FSExtra from 'fs-extra'3import Process from 'process'4import Yargs from 'yargs'5import Progress from 'progress'6import csvFetch from './csv-fetch.js'7function alert(message) {8    Readline.clearLine(process.stderr)9    Readline.cursorTo(process.stderr, 0)10    console.error(message)11}12function ticker(text, total) {13    const progress = new Progress(text + ' |:bar| :percent / :etas left', {14        total,15        width: Infinity,16        complete: '█',17        incomplete: ' '18    })19    return () => progress.tick()20}21async function setup() {22    const instructions = Yargs(Process.argv.slice(2))23        .usage('Usage: csv-fetch <url-column> <name-column> <depository> <filename>')24        .wrap(null)25        .option('s', { alias: 'suffix', type: 'string', describe: 'A suffix to add to the name of each file, such as an extension' })26        .option('h', { alias: 'header', type: 'string', array: true, nargs: 1, describe: 'Set a header to be sent with the request' })27        .option('l', { alias: 'limit', type: 'number', nargs: 1, describe: 'Limit the number of requests made per second' })28        .option('r', { alias: 'retries', type: 'number', nargs: 1, describe: 'Number of times a request should be retried', default: 5 })29        .option('c', { alias: 'check', type: 'boolean', describe: 'Check whether file has already been downloaded, and skip if so', default: false })30        .option('V', { alias: 'verbose', type: 'boolean', describe: 'Print every request made', default: false })31        .help('?').alias('?', 'help')32        .version().alias('v', 'version')33    if (instructions.argv._.length === 0) instructions.showHelp().exit(0)34    try {35        const {36            _: [urlColumn, nameColumn, depository, filename],37            suffix,38            header: headers,39            limit,40            retries,41            check,42            verbose43        } = instructions.argv44        if (filename === '-') throw new Error('reading from standard input not supported')45        const exists = await FSExtra.pathExists(filename)46        if (!exists) throw new Error(`${filename}: could not find file`)47        if (headers) headers.forEach(header => {48            if (!header.includes(':')) throw new Error(`"${header}" header is not valid`)49        })50        const total = await csvFetch.length(filename)51        console.error('Starting up...')52        const process = await csvFetch.run(filename, urlColumn, nameColumn, depository, suffix, headers, limit, retries, check, verbose, alert)53        await process54            .each(ticker('Working...', total))55            .whenEnd()56        console.error('Done!')57    }58    catch (e) {59        console.error(e.message)60        Process.exit(1)61    }62}...

Full Screen

Full Screen

compile.js

Source:compile.js Github

copy

Full Screen

1const chalk = require('chalk')2const fsextra = require('fs-extra')3const handlebars = require('handlebars')4const symbols = require('log-symbols')5function compileStr(filePath, answersContent) {6  return new Promise(function(resolve, reject) {7		fsextra.pathExists(filePath).then(exists => {8      if (exists) {9        const _filePath = filePath10        const _Content = fsextra.readFileSync(_filePath).toString()11        const _Result = handlebars.compile(_Content)(answersContent)12        fsextra.writeFileSync(_filePath, _Result)13        console.log(symbols.success, `${chalk.cyan(filePath)} configured successful.`)14        resolve()15      } else {16        console.log(symbols.error, `${chalk.red(filePath)} not found.`)17        return reject()18      }19    }).catch(err => {20      console.log(symbols.error, chalk.red(err))21    })22  })23}...

Full Screen

Full Screen

Using AI Code Generation

copy

Full Screen

1Cypress.Commands.add('pathExists', (path) => {2  return cy.task('pathExists', path)3})4module.exports = (on, config) => {5  on('task', {6    pathExists(path) {7      return fsExtra.pathExists(path)8    }9  })10}11Cypress.Commands.add('pathExists', (path) => {12  return cy.task('pathExists', path)13})14require('./commands')15describe('test', () => {16  it('test', () => {17    cy.pathExists('test')18  })19})20{21}

Full Screen

Using AI Code Generation

copy

Full Screen

1const fsExtra = require('fs-extra');2describe('fsExtra.pathExists', () => {3  it('should return true for existing file', () => {4    fsExtra.pathExists('cypress.json').should('be.true');5  });6  it('should return false for non-existing file', () => {7    fsExtra.pathExists('non-existing-file.json').should('be.false');8  });9});

Full Screen

Using AI Code Generation

copy

Full Screen

1const fsExtra = require('fs-extra');2const path = require('path');3const pathToTest = path.resolve(__dirname, './cypress/integration/MyTest.spec.js');4fsExtra.pathExists(pathToTest).then((exists) => {5  console.log(exists ? 'Path exists' : 'Path does not exist');6});7cy.get('#identifierId').type('

Full Screen

Using AI Code Generation

copy

Full Screen

1describe('Cypress fsExtra', function() {2  it('fsExtra.pathExists', function() {3    cy.task('pathExists', 'path/to/file')4      .then((exists) => {5      })6  })7})8const fsExtra = require('fs-extra')9module.exports = (on) => {10  on('task', {11    pathExists(path) {12      return fsExtra.pathExists(path)13    },14  })15}16Cypress.Commands.add('pathExists', function(path) {17  return cy.task('pathExists', path)18})19describe('Cypress fsExtra', function() {20  it('fsExtra.pathExists', function() {21    cy.pathExists('path/to/file')22      .then((exists) => {23      })24  })25})

Full Screen

Using AI Code Generation

copy

Full Screen

1const fsExtra = require('fs-extra');2const path = require('path');3const filePath = path.resolve('cypress', 'fixtures', 'data.json');4fsExtra.pathExists(filePath).then((exists) => {5  if (exists) {6    console.log('File exists');7  } else {8    console.log('File does not exist');9  }10});11describe('My First Test', () => {12  it('Does not do much!', () => {13    cy.contains('type').click();14    cy.url().should('include', '/commands/actions');15    cy.get('.action-email').type('

Full Screen

Cypress Tutorial

Cypress is a renowned Javascript-based open-source, easy-to-use end-to-end testing framework primarily used for testing web applications. Cypress is a relatively new player in the automation testing space and has been gaining much traction lately, as evidenced by the number of Forks (2.7K) and Stars (42.1K) for the project. LambdaTest’s Cypress Tutorial covers step-by-step guides that will help you learn from the basics till you run automation tests on LambdaTest.

Chapters:

  1. What is Cypress? -
  2. Why Cypress? - Learn why Cypress might be a good choice for testing your web applications.
  3. Features of Cypress Testing - Learn about features that make Cypress a powerful and flexible tool for testing web applications.
  4. Cypress Drawbacks - Although Cypress has many strengths, it has a few limitations that you should be aware of.
  5. Cypress Architecture - Learn more about Cypress architecture and how it is designed to be run directly in the browser, i.e., it does not have any additional servers.
  6. Browsers Supported by Cypress - Cypress is built on top of the Electron browser, supporting all modern web browsers. Learn browsers that support Cypress.
  7. Selenium vs Cypress: A Detailed Comparison - Compare and explore some key differences in terms of their design and features.
  8. Cypress Learning: Best Practices - Take a deep dive into some of the best practices you should use to avoid anti-patterns in your automation tests.
  9. How To Run Cypress Tests on LambdaTest? - Set up a LambdaTest account, and now you are all set to learn how to run Cypress tests.

Certification

You can elevate your expertise with end-to-end testing using the Cypress automation framework and stay one step ahead in your career by earning a Cypress certification. Check out our Cypress 101 Certification.

YouTube

Watch this 3 hours of complete tutorial to learn the basics of Cypress and various Cypress commands with the Cypress testing at LambdaTest.

Run Cypress automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful