How to use fs.readJson method in Cypress

Best JavaScript code snippet using cypress

version.js

Source:version.js Github

copy

Full Screen

...32  });33});34test('run version with no arguments, --new-version flag where version is same as pkg.version', (): Promise<void> => {35  return runRun([], {newVersion, gitTagVersion}, 'no-args-same-version', async (config, reporter): ?Promise<void> => {36    const pkg = await fs.readJson(path.join(config.cwd, 'package.json'));37    expect(pkg.version).toEqual(newVersion);38  });39});40test('run version with --non-interactive and --new-version should succeed', (): Promise<void> => {41  return runRun([], {nonInteractive: true, newVersion}, 'no-args', async (config, reporter): ?Promise<void> => {42    const pkg = await fs.readJson(path.join(config.cwd, 'package.json'));43    expect(pkg.version).toEqual(newVersion);44  });45});46test('run version with --non-interactive and without --new-version should succeed', (): Promise<void> => {47  return runRun([], {nonInteractive: true}, 'no-args', async (config, reporter): ?Promise<void> => {48    const pkg = await fs.readJson(path.join(config.cwd, 'package.json'));49    expect(pkg.version).toEqual(oldVersion);50  });51});52test('run version and make sure all lifecycle steps are executed', (): Promise<void> => {53  return runRun([], {newVersion, gitTagVersion}, 'no-args', async (config): ?Promise<void> => {54    const pkg = await fs.readJson(path.join(config.cwd, 'package.json'));55    const preversionLifecycle = {56      stage: 'preversion',57      config,58      cmd: pkg.scripts.preversion,59      cwd: config.cwd,60      isInteractive: true,61    };62    const versionLifecycle = {63      stage: 'version',64      config,65      cmd: pkg.scripts.version,66      cwd: config.cwd,67      isInteractive: true,68    };69    const postversionLifecycle = {70      stage: 'postversion',71      config,72      cmd: pkg.scripts.postversion,73      cwd: config.cwd,74      isInteractive: true,75    };76    expect(execCommand.mock.calls.length).toBe(3);77    expect(execCommand.mock.calls[0]).toEqual([preversionLifecycle]);78    expect(execCommand.mock.calls[1]).toEqual([versionLifecycle]);79    expect(execCommand.mock.calls[2]).toEqual([postversionLifecycle]);80  });81});82test('run version and make sure only the defined lifecycle steps are executed', (): Promise<void> => {83  return runRun([], {newVersion, gitTagVersion}, 'pre-post', async (config): ?Promise<void> => {84    const pkg = await fs.readJson(path.join(config.cwd, 'package.json'));85    const preversionLifecycle = {86      stage: 'preversion',87      config,88      cmd: pkg.scripts.preversion,89      cwd: config.cwd,90      isInteractive: true,91    };92    const postversionLifecycle = {93      stage: 'postversion',94      config,95      cmd: pkg.scripts.postversion,96      cwd: config.cwd,97      isInteractive: true,98    };99    expect(execCommand.mock.calls.length).toBe(2);100    expect(execCommand.mock.calls[0]).toEqual([preversionLifecycle]);101    expect(execCommand.mock.calls[1]).toEqual([postversionLifecycle]);102  });103});104test('run version and make sure git commit hooks are enabled by default', async (): Promise<void> => {105  const fixture = 'no-args';106  await fs.mkdirp(path.join(fixturesLoc, fixture, '.git'));107  return runRun([], {newVersion, gitTagVersion}, fixture, (): ?Promise<void> => {108    const gitArgs = ['commit', '-m', 'v2.0.0'];109    expect(spawn.mock.calls.length).toBe(4);110    expect(spawn.mock.calls[2][0]).toEqual(gitArgs);111  });112});113test('run version with --no-commit-hooks and make sure git commit hooks are disabled', async (): Promise<void> => {114  const fixture = 'no-args';115  await fs.mkdirp(path.join(fixturesLoc, fixture, '.git'));116  return runRun([], {newVersion, gitTagVersion, commitHooks: false}, fixture, (): ?Promise<void> => {117    const gitArgs = ['commit', '-m', 'v2.0.0', '-n'];118    expect(spawn.mock.calls.length).toBe(4);119    expect(spawn.mock.calls[2][0]).toEqual(gitArgs);120  });121});122test('run version and make sure commit hooks are disabled by config', async (): Promise<void> => {123  const fixture = 'no-args-no-git-hooks';124  await fs.mkdirp(path.join(fixturesLoc, fixture, '.git'));125  return runRun([], {newVersion, gitTagVersion}, fixture, (): ?Promise<void> => {126    const gitArgs = ['commit', '-m', 'v2.0.0', '-n'];127    expect(spawn.mock.calls.length).toBe(4);128    expect(spawn.mock.calls[2][0]).toEqual(gitArgs);129  });130});131test('run version with --no-git-tag-version and make sure git tags are disabled', async (): Promise<void> => {132  const fixture = 'no-args';133  await fs.mkdirp(path.join(fixturesLoc, fixture, '.git'));134  return runRun([], {newVersion, gitTagVersion: false}, fixture, async (config): ?Promise<void> => {135    const pkg = await fs.readJson(path.join(config.cwd, 'package.json'));136    expect(pkg.version).toBe(newVersion);137    expect(spawn.mock.calls.length).toBe(0);138  });139});140test('run version and make sure git tags are disabled by config', async (): Promise<void> => {141  const fixture = 'no-args-no-git-tags';142  await fs.mkdirp(path.join(fixturesLoc, fixture, '.git'));143  return runRun([], {newVersion, gitTagVersion}, fixture, async (config): ?Promise<void> => {144    const pkg = await fs.readJson(path.join(config.cwd, 'package.json'));145    expect(pkg.version).toBe(newVersion);146    expect(spawn.mock.calls.length).toBe(0);147  });148});149test('run version with --no-git-tag-version, make sure all lifecycle steps runs', async (): Promise<void> => {150  const fixture = 'no-args';151  await fs.mkdirp(path.join(fixturesLoc, fixture, '.git'));152  return runRun([], {newVersion, gitTagVersion: false}, fixture, async (config): ?Promise<void> => {153    expect(spawn.mock.calls.length).toBe(0);154    const pkg = await fs.readJson(path.join(config.cwd, 'package.json'));155    const preversionLifecycle = {156      stage: 'preversion',157      config,158      cmd: pkg.scripts.preversion,159      cwd: config.cwd,160      isInteractive: true,161    };162    const versionLifecycle = {163      stage: 'version',164      config,165      cmd: pkg.scripts.version,166      cwd: config.cwd,167      isInteractive: true,168    };169    const postversionLifecycle = {170      stage: 'postversion',171      config,172      cmd: pkg.scripts.postversion,173      cwd: config.cwd,174      isInteractive: true,175    };176    expect(execCommand.mock.calls.length).toBe(3);177    expect(execCommand.mock.calls[0]).toEqual([preversionLifecycle]);178    expect(execCommand.mock.calls[1]).toEqual([versionLifecycle]);179    expect(execCommand.mock.calls[2]).toEqual([postversionLifecycle]);180  });181});182test('run version with git tags disabled in config, make sure all lifecycle steps runs', async (): Promise<void> => {183  const fixture = 'no-args-no-git-tags';184  await fs.mkdirp(path.join(fixturesLoc, fixture, '.git'));185  return runRun([], {newVersion, gitTagVersion}, fixture, async (config): ?Promise<void> => {186    expect(spawn.mock.calls.length).toBe(0);187    const pkg = await fs.readJson(path.join(config.cwd, 'package.json'));188    const preversionLifecycle = {189      stage: 'preversion',190      config,191      cmd: pkg.scripts.preversion,192      cwd: config.cwd,193      isInteractive: true,194    };195    const versionLifecycle = {196      stage: 'version',197      config,198      cmd: pkg.scripts.version,199      cwd: config.cwd,200      isInteractive: true,201    };202    const postversionLifecycle = {203      stage: 'postversion',204      config,205      cmd: pkg.scripts.postversion,206      cwd: config.cwd,207      isInteractive: true,208    };209    expect(execCommand.mock.calls.length).toBe(3);210    expect(execCommand.mock.calls[0]).toEqual([preversionLifecycle]);211    expect(execCommand.mock.calls[1]).toEqual([versionLifecycle]);212    expect(execCommand.mock.calls[2]).toEqual([postversionLifecycle]);213  });214});215test('run version with --major flag and make sure major version is incremented', (): Promise<void> => {216  return runRun([], {gitTagVersion, major: true}, 'no-args', async (config): ?Promise<void> => {217    const pkg = await fs.readJson(path.join(config.cwd, 'package.json'));218    expect(pkg.version).toEqual('2.0.0');219  });220});221test('run version with --minor flag and make sure minor version is incremented', (): Promise<void> => {222  return runRun([], {gitTagVersion, minor: true}, 'no-args', async (config): ?Promise<void> => {223    const pkg = await fs.readJson(path.join(config.cwd, 'package.json'));224    expect(pkg.version).toEqual('1.1.0');225  });226});227test('run version with --patch flag and make sure patch version is incremented', (): Promise<void> => {228  return runRun([], {gitTagVersion, patch: true}, 'no-args', async (config): ?Promise<void> => {229    const pkg = await fs.readJson(path.join(config.cwd, 'package.json'));230    expect(pkg.version).toEqual('1.0.1');231  });232});233test('run version with --premajor flag and make sure premajor version is incremented', (): Promise<void> => {234  return runRun([], {gitTagVersion, premajor: true}, 'no-args', async (config): ?Promise<void> => {235    const pkg = await fs.readJson(path.join(config.cwd, 'package.json'));236    expect(pkg.version).toEqual('2.0.0-0');237  });238});239test('run version with --premajor flag with preid and make sure premajor version is incremented', (): Promise<void> => {240  return runRun([], {gitTagVersion, premajor: true, preid: 'alpha'}, 'no-args', async (config): ?Promise<void> => {241    const pkg = await fs.readJson(path.join(config.cwd, 'package.json'));242    expect(pkg.version).toEqual('2.0.0-alpha.0');243  });244});245test('run version with --preminor flag and make sure preminor version is incremented', (): Promise<void> => {246  return runRun([], {gitTagVersion, preminor: true}, 'no-args', async (config): ?Promise<void> => {247    const pkg = await fs.readJson(path.join(config.cwd, 'package.json'));248    expect(pkg.version).toEqual('1.1.0-0');249  });250});251test('run version with --preminor flag with preid and make sure preminor version is incremented', (): Promise<void> => {252  return runRun([], {gitTagVersion, preminor: true, preid: 'alpha'}, 'no-args', async (config): ?Promise<void> => {253    const pkg = await fs.readJson(path.join(config.cwd, 'package.json'));254    expect(pkg.version).toEqual('1.1.0-alpha.0');255  });256});257test('run version with --prepatch flag and make sure prepatch version is incremented', (): Promise<void> => {258  return runRun([], {gitTagVersion, prepatch: true}, 'no-args', async (config): ?Promise<void> => {259    const pkg = await fs.readJson(path.join(config.cwd, 'package.json'));260    expect(pkg.version).toEqual('1.0.1-0');261  });262});263test('run version with --prepatch flag with preid and make sure prepatch version is incremented', (): Promise<void> => {264  return runRun([], {gitTagVersion, prepatch: true, preid: 'alpha'}, 'no-args', async (config): ?Promise<void> => {265    const pkg = await fs.readJson(path.join(config.cwd, 'package.json'));266    expect(pkg.version).toEqual('1.0.1-alpha.0');267  });268});269test('run version with --prerelease flag and make sure prerelease version is incremented', (): Promise<void> => {270  return runRun([], {gitTagVersion, prerelease: true}, 'no-args', async (config): ?Promise<void> => {271    const pkg = await fs.readJson(path.join(config.cwd, 'package.json'));272    expect(pkg.version).toEqual('1.0.1-0');273  });274});275test('run version with --prerelease flag with preid and make sure prerelease version is incremented', (): Promise<276  void,277> => {278  return runRun([], {gitTagVersion, prerelease: true, preid: 'alpha'}, 'no-args', async (config): ?Promise<void> => {279    const pkg = await fs.readJson(path.join(config.cwd, 'package.json'));280    expect(pkg.version).toEqual('1.0.1-alpha.0');281  });282});283test('run version with --new-version prerelease flag and make sure prerelease version is incremented', (): Promise<284  void,285> => {286  return runRun([], {gitTagVersion, newVersion: 'prerelease'}, 'no-args', async (config): ?Promise<void> => {287    const pkg = await fs.readJson(path.join(config.cwd, 'package.json'));288    expect(pkg.version).toEqual('1.0.1-0');289  });290});291test('run version with --new-version and preid flags and make sure prerelease version is incremented', (): Promise<292  void,293> => {294  return runRun([], {gitTagVersion, newVersion: 'prerelease', preid: 'beta'}, 'no-args', async (config): ?Promise<295    void,296  > => {297    const pkg = await fs.readJson(path.join(config.cwd, 'package.json'));298    expect(pkg.version).toEqual('1.0.1-beta.0');299  });300});301test('run version with --new-version and preid flags and make sure premajor version is incremented', (): Promise<302  void,303> => {304  return runRun([], {gitTagVersion, newVersion: 'premajor', preid: 'beta'}, 'no-args', async (config): ?Promise<305    void,306  > => {307    const pkg = await fs.readJson(path.join(config.cwd, 'package.json'));308    expect(pkg.version).toEqual('2.0.0-beta.0');309  });310});311test('run version with main release and --new-version and preid flags and make sure identifier is ignored', (): Promise<312  void,313> => {314  return runRun([], {gitTagVersion, newVersion: 'major', preid: 'beta'}, 'no-args', async (config): ?Promise<void> => {315    const pkg = await fs.readJson(path.join(config.cwd, 'package.json'));316    expect(pkg.version).toEqual('2.0.0');317  });...

Full Screen

Full Screen

api-generation.test.js

Source:api-generation.test.js Github

copy

Full Screen

...25      files.set('apis/raml1.raml', 'RAML 1.0');26      await generator(files, opts);27      const exists = await fs.pathExists(modelFile);28      assert.isTrue(exists, 'model file exists');29      const data = await fs.readJson(modelFile);30      assert.typeOf(data, 'array');31    });32    it('Generates data model for compact model', async () => {33      files.set('apis/raml1.raml', 'RAML 1.0');34      await generator(files, opts);35      const exists = await fs.pathExists(compactModelFile);36      assert.isTrue(exists, 'model file exists');37      const data = await fs.readJson(compactModelFile);38      assert.typeOf(data, 'array');39    });40    it('generates model with options (Object)', async () => {41      files.set('apis/raml1.raml', {42        type: 'RAML 1.0',43        mime: 'application/raml',44        resolution: 'editing',45      });46      await generator(files, opts);47      const exists = await fs.pathExists(compactModelFile);48      assert.isTrue(exists, 'model file exists');49      const data = await fs.readJson(compactModelFile);50      assert.typeOf(data, 'array');51    });52    it('uses default values (Object)', async () => {53      files.set('apis/raml1.raml', {54        type: 'RAML 1.0',55      });56      await generator(files, opts);57      const exists = await fs.pathExists(compactModelFile);58      assert.isTrue(exists, 'model file exists');59      const data = await fs.readJson(compactModelFile);60      assert.typeOf(data, 'array');61    });62    it('generates model with options (Array)', async () => {63      files.set('apis/raml1.raml', ['RAML 1.0', 'application/raml', 'editing']);64      await generator(files, opts);65      const exists = await fs.pathExists(compactModelFile);66      assert.isTrue(exists, 'model file exists');67      const data = await fs.readJson(compactModelFile);68      assert.typeOf(data, 'array');69    });70    it('uses default values (Array)', async () => {71      files.set('apis/raml1.raml', ['RAML 1.0']);72      await generator(files, opts);73      const exists = await fs.pathExists(compactModelFile);74      assert.isTrue(exists, 'model file exists');75      const data = await fs.readJson(compactModelFile);76      assert.typeOf(data, 'array');77    });78  });79  describe('RAML 0.8 data model generation', () => {80    let files;81    let opts;82    const modelFile = path.join(dest, 'raml08.json');83    const compactModelFile = path.join(dest, 'raml08-compact.json');84    beforeEach(() => {85      files = new Map();86      files.set('apis/raml08.raml', 'RAML 0.8');87      opts = {88        src: srcDir,89        dest,90      };91    });92    afterEach(() => fs.remove(dest));93    it('Generates data model for regular model', () => generator(files, opts)94    .then(() => fs.pathExists(modelFile))95    .then((exists) => assert.isTrue(exists))96    .then(() => fs.readJson(modelFile))97    .then((data) => {98      assert.typeOf(data, 'array');99    }));100    it('Generates data model for compact model', () => generator(files, opts)101    .then(() => fs.pathExists(compactModelFile))102    .then((exists) => assert.isTrue(exists))103    .then(() => fs.readJson(compactModelFile))104    .then((data) => {105      assert.typeOf(data, 'array');106    }));107  });108  describe('Api list config file', () => {109    let opts;110    const modelFile = path.join(dest, 'raml1.json');111    const compactModelFile = path.join(dest, 'raml1-compact.json');112    const configFile = path.join('test', 'apis.json');113    beforeEach(() => {114      opts = {115        src: srcDir,116        dest,117      };118    });119    afterEach(() => fs.remove(dest));120    it('Generates data model for regular model', () => generator(configFile, opts)121    .then(() => fs.pathExists(modelFile))122    .then((exists) => assert.isTrue(exists))123    .then(() => fs.readJson(modelFile))124    .then((data) => {125      assert.typeOf(data, 'array');126    }));127    it('Generates data model for compact model', () => generator(configFile, opts)128    .then(() => fs.pathExists(compactModelFile))129    .then((exists) => assert.isTrue(exists))130    .then(() => fs.readJson(compactModelFile))131    .then((data) => {132      assert.typeOf(data, 'array');133    }));134  });135  describe('Api list config file with options', () => {136    const modelFile = path.join(dest, 'raml1.json');137    const compactModelFile = path.join(dest, 'raml1-compact.json');138    const flattenedModelFile = path.join(dest, 'flattenedApi.json');139    const compactFlattenedModelFile = path.join(dest, 'flattenedApi-compact.json');140    const configFile = path.join('test', 'apis-options.json');141    afterEach(() => fs.remove(dest));142    it('Generates data model for regular model', () => generator(configFile)143    .then(() => fs.pathExists(modelFile))144    .then((exists) => assert.isTrue(exists))145    .then(() => fs.readJson(modelFile))146    .then((data) => {147      assert.typeOf(data, 'array');148    }));149    it('Generates flattened data model for compact model', () => generator(configFile)150    .then(() => fs.pathExists(compactModelFile))151    .then((exists) => assert.isTrue(exists))152    .then(() => fs.readJson(compactModelFile))153    .then((data) => {154      assert.typeOf(data, 'array');155    }));156    it('Generates flattened data model for regular model', () => generator(configFile)157    .then(() => fs.pathExists(flattenedModelFile))158    .then((exists) => assert.isTrue(exists))159    .then(() => fs.readJson(flattenedModelFile))160    .then((data) => {161      const graph = data['@graph'];162      assert.isDefined(graph);163      const ctx = data['@context'];164      assert.isUndefined(ctx);165    }));166    it('Generates flattened data model for compact model', () => generator(configFile)167    .then(() => fs.pathExists(compactFlattenedModelFile))168    .then((exists) => assert.isTrue(exists))169    .then(() => fs.readJson(compactFlattenedModelFile))170    .then((data) => {171      const graph = data['@graph'];172      assert.isDefined(graph);173      const ctx = data['@context'];174      assert.typeOf(ctx, 'object');175    }));176  });177  describe('Function call options overrides file options', () => {178    const alteredDest = path.join(dest, 'altered');179    const modelFile = path.join(alteredDest, 'raml1.json');180    const compactModelFile = path.join(alteredDest, 'raml1-compact.json');181    const configFile = path.join('test', 'apis-options.json');182    afterEach(() => fs.remove(dest));183    it('Generates data model for regular model', () => generator(configFile, {184      dest: alteredDest,185    })186    .then(() => fs.pathExists(modelFile))187    .then((exists) => assert.isTrue(exists))188    .then(() => fs.readJson(modelFile))189    .then((data) => {190      assert.typeOf(data, 'array');191      const ctx = data[0]['@context'];192      assert.isUndefined(ctx);193    }));194    it('Generates data model for compact model', () => generator(configFile, {195      dest: alteredDest,196    })197    .then(() => fs.pathExists(compactModelFile))198    .then((exists) => assert.isTrue(exists))199    .then(() => fs.readJson(compactModelFile))200    .then((data) => {201      assert.typeOf(data, 'array');202      const ctx = data[0]['@context'];203      assert.typeOf(ctx, 'object');204    }));205  });206  describe('AsyncAPI 2.0 data model generation', () => {207    let files;208    let opts;209    const modelFile = path.join(dest, 'asyncApi20.json');210    const compactModelFile = path.join(dest, 'asyncApi20-compact.json');211    beforeEach(() => {212      files = new Map();213      opts = {214        src: srcDir,215        dest,216      };217    });218    afterEach(() => fs.remove(dest));219    it('Generates data model for regular model', async () => {220      files.set('apis/asyncApi20.yaml', { 'type': 'ASYNC 2.0', 'mime': 'application/yaml' });221      await generator(files, opts);222      const exists = await fs.pathExists(modelFile);223      assert.isTrue(exists, 'model file exists');224      const data = await fs.readJson(modelFile);225      assert.typeOf(data, 'array');226    });227    it('Generates data model for compact model', async () => {228      files.set('apis/asyncApi20.yaml', { 'type': 'ASYNC 2.0', 'mime': 'application/yaml' });229      await generator(files, opts);230      const exists = await fs.pathExists(compactModelFile);231      assert.isTrue(exists, 'model file exists');232      const data = await fs.readJson(compactModelFile);233      assert.typeOf(data, 'array');234    });235  });236  describe('generator.generate()', () => {237    let opts;238    const modelFile = path.join(dest, 'raml1.json');239    beforeEach(() => {240      opts = {241        src: srcDir,242        dest,243      };244    });245    afterEach(() => fs.remove(dest));246    it('generates the model file', async () => {247      /** @type Map<string, ApiConfiguration> */248      const files = new Map();249      files.set('apis/raml1.raml', { type: 'RAML 1.0' });250      await generator.generate(files, opts);251      const exists = await fs.pathExists(modelFile);252      assert.isTrue(exists, 'model file exists');253      const data = await fs.readJson(modelFile);254      assert.typeOf(data, 'array');255    });256  });...

Full Screen

Full Screen

HalaqaRepository.js

Source:HalaqaRepository.js Github

copy

Full Screen

1const fs = require('fs-extra');2// Test as you go ...3class HalaqaRepository {4    async getSurahs() {5        return await fs.readJson('./data/surah.json');6    }7    async addSurah(surah) {8        const surahs = await fs.readJson('./data/surah.json');9        surahs.push(surah);10        await fs.writeJson("./data/surah.json", surahs);11    }12    async getStudents() {13        const parents = await fs.readJson('./data/parent.json');14        //Using spread operator to flatten multidimensional arrayOfArrays15        const students = [];16        parents.forEach(p => students.push(...p.students));17        return students;18    }19    async getStudent(studentId) {20        const students = await this.getStudents();21        return students.find(s => s.studentId == studentId);22    }23    async getTeacherStudents(teacherId) {24        const students = await this.getStudents();25        return students.filter(s => s.teacherId == teacherId);26    }27    async getParentStudents(parentId) {28        const parents = await fs.readJson('./data/parent.json');29        const parent = parents.find(p => p.qatariId == parentId);30        if (parent)31            return parent.students;32    }33    async getTasks(studentId, taskStatus) {34        let tasks = await fs.readJson('./data/task.json');35        tasks = tasks.filter(t => t.studentId == studentId);36        if (taskStatus == "Completed") {37            tasks = tasks.filter(tasks => tasks.completedDate);38        } else if (taskStatus == "Pending") {39            tasks = tasks.filter(tasks => tasks.completedDate == undefined);40        }41        return tasks;42    }43    async getTask(taskId) {44        const tasks = await fs.readJson('./data/task.json');45        return tasks.find(t => t.taskId == taskId);46    }47    async deleteTask(taskId) {48        const tasks = await fs.readJson('./data/task.json');49        const taskIndex = tasks.findIndex(t => t.taskId == taskId);50        tasks.splice(taskIndex, 1);51        await fs.writeJson("./data/task.json", tasks);52    }53    async addTask(newTask) {54        const tasks = await fs.readJson('./data/task.json');55        newTask.taskId = Math.floor(Math.random() * 100);56        tasks.push(newTask);57        await fs.writeJson("./data/task.json", tasks);58        Task.create(newTask);59        //_id -> identifier of the task60    }61    async updateTask(updatedTask) {62        const tasks = await fs.readJson('./data/task.json');63        const taskIndex = tasks.findIndex(t => t.taskId == updatedTask.taskId);64        tasks[taskIndex] = updatedTask;65        await fs.writeJson("./data/task.json", tasks);66    }67    async completeTask(completedTask) {68        const tasks = await fs.readJson('./data/task.json');69        const taskIndex = tasks.findIndex(t => t.taskId == completedTask.taskId);70        tasks[taskIndex].completedDate = completedTask.completedDate;71        tasks[taskIndex].masteryLevel = completedTask.masteryLevel;72        tasks[taskIndex].comment = completedTask.comment;73        await fs.writeJson("./data/task.json", tasks);74    }75    async getMessages(studentId) {76        const messages = await fs.readJson('./data/message.json');77        return messages.filter(m => m.studentId == studentId);78    }79    async addMessage(message) {80        const messages = await fs.readJson('./data/message.json');81        message.id = Math.floor(Math.random() * 100);82        messages.push(message);83        await fs.writeJson('./data/message.json', messages);84    }85    async addParent(newParent) {86        const parents = await fs.readJson('./data/parent.json');87        parents.push(newParent);88        await fs.writeJson('./data/parent.json', parents);89    }90    /*Register new children with existing parent*/91    async addStudent(student, qatariId) {92        const parents = await fs.readJson('./data/parent.json');93        const index = parents.findIndex(p => p.qatariId == qatariId);94        student.studentId = Math.floor(Math.random() * 100);95        if (!parents[index].students) {96            parents[index].students = [];97        }98        parents[index].students.push(student);99        await fs.writeJson('./data/parent.json', parents);100    }101    async getParents() {102        const parents = await fs.readJson('./data/parent.json');103        parents.forEach(p => delete p.students);104        return parents;105    }106    async getParent(qatariId) {107        const parents = await fs.readJson('./data/parent.json');108        const parent = parents.find(p => p.qatariId == qatariId);109        //No need to return the students110        if (parent)111            delete parent.students;112        return parent;113    }114    async getTeachers() {115        return await fs.readJson('./data/teacher.json');116        //return teachers.filter(t=>t.isCoordinator != 1);117    }118    async getTeacher(teacherId) {119        const teachers = await fs.readJson('./data/teacher.json');120        return teachers.find(t => t.staffNo == teacherId);121    }122    async halaqaSummaryReport(fromDate, toDate) {123        //ToDo: Write an aggregate query to get Halaqa Summary Report data124        //Return the summary report125        return {fromDate, toDate};126    }127    async login(credentials) {128        let userInfo = await this.verifyStaffLogin(credentials);129        //If Staff login fails try parent login130        if (!userInfo)131            userInfo = await this.verifyParentLogin(credentials);132        return userInfo;133    }134    async verifyStaffLogin(credentials) {135        const teachers = await fs.readJson('./data/teacher.json');136        const teacher = teachers.find(s => s.email === credentials.email && s.password === credentials.password);137        if (teacher) {138            let userInfo = {139                id: teacher.staffNo,140                email: teacher.email,141                name: `${teacher.firstName} ${teacher.lastName}`142            };143            if (teacher.isCoordinator === 1) {144                userInfo.type = 'Coordinator';145                userInfo.redirectTo = '/index-coordinator.html';146            } else {147                userInfo.type = 'Teacher';148                userInfo.redirectTo = '/index-teacher.html';149            }150            return userInfo;151        }152    }153    async verifyParentLogin(credentials) {154        const parents = await fs.readJson('./data/parent.json');155        const parent = parents.find(s => s.email === credentials.email && s.password === credentials.password);156        if (parent) {157            let userInfo = {158                id: parent.qatariId,159                email: parent.email,160                name: `${parent.firstName} ${parent.lastName}`,161                type: 'Parent',162                redirectTo: '/index-parent.html'163            };164            return userInfo;165        }166    }167    //ToDo: use this method to initialise the DB by adding data from json files to MongoDB168    async initDB() {...

Full Screen

Full Screen

init-command.test.js

Source:init-command.test.js Github

copy

Full Screen

1"use strict";2const fs = require("fs-extra");3const path = require("path");4const tempy = require("tempy");5// helpers6const initFixture = require("@lerna-test/init-fixture")(__dirname);7// file under test8const lernaInit = require("@lerna-test/command-runner")(require("../command"));9describe("InitCommand", () => {10  const lernaVersion = "__TEST_VERSION__";11  describe("in an empty directory", () => {12    it("initializes git repo with lerna files", async () => {13      const testDir = tempy.directory();14      await lernaInit(testDir)();15      const [lernaJson, pkgJson, packagesDirExists, gitDirExists] = await Promise.all([16        fs.readJSON(path.join(testDir, "lerna.json")),17        fs.readJSON(path.join(testDir, "package.json")),18        fs.exists(path.join(testDir, "packages")),19        fs.exists(path.join(testDir, ".git")),20      ]);21      expect(lernaJson).toMatchObject({22        packages: ["packages/*"],23        version: "0.0.0",24      });25      expect(pkgJson).toMatchObject({26        devDependencies: {27          lerna: `^${lernaVersion}`,28        },29      });30      expect(packagesDirExists).toBe(true);31      expect(gitDirExists).toBe(true);32    });33    it("initializes git repo with lerna files in independent mode", async () => {34      const testDir = tempy.directory();35      await lernaInit(testDir)("--independent");36      expect(await fs.readJSON(path.join(testDir, "lerna.json"))).toHaveProperty("version", "independent");37    });38    describe("with --exact", () => {39      it("uses exact version when adding lerna dependency", async () => {40        const testDir = tempy.directory();41        await lernaInit(testDir)("--exact");42        expect(await fs.readJSON(path.join(testDir, "package.json"))).toMatchObject({43          devDependencies: {44            lerna: lernaVersion,45          },46        });47      });48      it("sets lerna.json command.init.exact to true", async () => {49        const testDir = tempy.directory();50        await lernaInit(testDir)("--exact");51        expect(await fs.readJSON(path.join(testDir, "lerna.json"))).toMatchObject({52          command: {53            init: {54              exact: true,55            },56          },57        });58      });59    });60  });61  describe("in a subdirectory of a git repo", () => {62    it("creates lerna files", async () => {63      const dir = await initFixture("empty");64      const testDir = path.join(dir, "subdir");65      await fs.ensureDir(testDir);66      await lernaInit(testDir)();67      const [lernaJson, pkgJson, packagesDirExists] = await Promise.all([68        fs.readJSON(path.join(testDir, "lerna.json")),69        fs.readJSON(path.join(testDir, "package.json")),70        fs.exists(path.join(testDir, "packages")),71      ]);72      expect(lernaJson).toMatchObject({73        packages: ["packages/*"],74        version: "0.0.0",75      });76      expect(pkgJson).toMatchObject({77        devDependencies: {78          lerna: `^${lernaVersion}`,79        },80      });81      expect(packagesDirExists).toBe(true);82    });83  });84  describe("when package.json exists", () => {85    it("adds lerna to sorted devDependencies", async () => {86      const testDir = await initFixture("has-package");87      const pkgJsonPath = path.join(testDir, "package.json");88      await fs.outputJSON(pkgJsonPath, {89        devDependencies: {90          alpha: "first",91          omega: "last",92        },93      });94      await lernaInit(testDir)();95      expect(await fs.readJSON(pkgJsonPath)).toMatchObject({96        devDependencies: {97          alpha: "first",98          lerna: `^${lernaVersion}`,99          omega: "last",100        },101      });102    });103    it("updates existing lerna in devDependencies", async () => {104      const testDir = await initFixture("has-package");105      const pkgJsonPath = path.join(testDir, "package.json");106      await fs.outputJSON(pkgJsonPath, {107        dependencies: {108          alpha: "first",109          omega: "last",110        },111        devDependencies: {112          lerna: "0.1.100",113        },114      });115      await lernaInit(testDir)();116      expect(await fs.readJSON(pkgJsonPath)).toMatchObject({117        dependencies: {118          alpha: "first",119          omega: "last",120        },121        devDependencies: {122          lerna: `^${lernaVersion}`,123        },124      });125    });126    it("updates existing lerna in sorted dependencies", async () => {127      const testDir = await initFixture("has-package");128      const pkgJsonPath = path.join(testDir, "package.json");129      await fs.outputJSON(pkgJsonPath, {130        dependencies: {131          alpha: "first",132          lerna: "0.1.100",133          omega: "last",134        },135      });136      await lernaInit(testDir)();137      expect(await fs.readJSON(pkgJsonPath)).toMatchObject({138        dependencies: {139          alpha: "first",140          lerna: `^${lernaVersion}`,141          omega: "last",142        },143      });144    });145  });146  describe("when lerna.json exists", () => {147    it("deletes lerna property if found", async () => {148      const testDir = await initFixture("has-lerna");149      const lernaJsonPath = path.join(testDir, "lerna.json");150      await fs.outputJSON(lernaJsonPath, {151        lerna: "0.1.100",152        version: "1.2.3",153      });154      await lernaInit(testDir)();155      expect(await fs.readJSON(lernaJsonPath)).toEqual({156        packages: ["packages/*"],157        version: "1.2.3",158      });159    });160    it("creates package directories when glob is configured", async () => {161      const testDir = await initFixture("has-lerna");162      const lernaJsonPath = path.join(testDir, "lerna.json");163      await fs.outputJSON(lernaJsonPath, {164        packages: ["modules/*"],165      });166      await lernaInit(testDir)();167      expect(await fs.exists(path.join(testDir, "modules"))).toBe(true);168    });169  });170  describe("when re-initializing with --exact", () => {171    it("sets lerna.json command.init.exact to true", async () => {172      const testDir = await initFixture("updates");173      const lernaJsonPath = path.join(testDir, "lerna.json");174      const pkgJsonPath = path.join(testDir, "package.json");175      await fs.outputJSON(lernaJsonPath, {176        lerna: "0.1.100",177        commands: {178          bootstrap: {179            hoist: true,180          },181        },182        version: "1.2.3",183      });184      await fs.outputJSON(pkgJsonPath, {185        devDependencies: {186          lerna: lernaVersion,187        },188      });189      await lernaInit(testDir)("--exact");190      expect(await fs.readJSON(lernaJsonPath)).toEqual({191        command: {192          bootstrap: {193            hoist: true,194          },195          init: {196            exact: true,197          },198        },199        packages: ["packages/*"],200        version: "1.2.3",201      });202    });203  });...

Full Screen

Full Screen

check.js

Source:check.js Github

copy

Full Screen

...40  checkUntils41\*------------------------------------*/42async function checkUntils() {43  //44  const zones = await fs.readJson(SOURCES_NORMALIZED_ZONES_PATH, 'utf8');45  const rules = await fs.readJson(SOURCES_NORMALIZED_RULES_PATH, 'utf8');46  const links = await fs.readJson(SOURCES_NORMALIZED_LINKS_PATH, 'utf8');47  const untilCounts = {}48  for (const zone of zones) {49  //if (zone.until) continue;50  if (typeof untilCounts[zone.name] === 'undefined') {51    untilCounts[zone.name] = 0;52  }53  if (!zone.until) {54    untilCounts[zone.name] = untilCounts[zone.name] + 1;55  }56  //console.dir(zone.rules.padEnd(10, ' ') + zone.name);57  }58  for (const k of Object.keys(untilCounts)) {59    console.dir(String(untilCounts[k]).padEnd(10, ' ') + k);60  }61}62/*------------------------------------*\63  checkRuleWeekDays64\*------------------------------------*/65async function checkRuleWeekDays() {66  //67  const zones = await fs.readJson(SOURCES_NORMALIZED_ZONES_PATH, 'utf8');68  const rules = await fs.readJson(SOURCES_NORMALIZED_RULES_PATH, 'utf8');69  const links = await fs.readJson(SOURCES_NORMALIZED_LINKS_PATH, 'utf8');70  //71  const values = [];72  for (const rule of rules) {73    if (Number.isNaN(Number(rule.on))) {74      values.push(75        rule.on.replace('last', '').replace(/(last|[<|>]=\d+)/, '')76      )77    }78  }79  console.dir(_.uniq(values));80}81/*------------------------------------*\82  checkRuleMonthsAbbr83\*------------------------------------*/84async function checkRuleMonthsAbbr() {85  //86  const zones = await fs.readJson(SOURCES_NORMALIZED_ZONES_PATH, 'utf8');87  const rules = await fs.readJson(SOURCES_NORMALIZED_RULES_PATH, 'utf8');88  const links = await fs.readJson(SOURCES_NORMALIZED_LINKS_PATH, 'utf8');89  //90  const values = [];91  for (const rule of rules) {92    values.push(rule.in)93  }94  console.dir(_.uniq(values));95}96/*------------------------------------*\97  checkOngoing98\*------------------------------------*/99async function checkOngoing() {100  //101  const ongoingZones = await fs.readJson(SOURCES_NORMALIZED_ONGOING_PATH, 'utf8');102  for (const zone of ongoingZones) {103    var m = zone.rules.map(104      r => `name=${r.name} save=${r.save} from=${r.from} to_combined=${r.to_combined} letters=${r.letters}`105    );106    console.log(`${zone.name} (${zone.format})\n  ${m.length ? (m.join('\n  ') + '\n') : ''}`);107  }108}109/*------------------------------------*\110  checkAgainstWta111\*------------------------------------*/112async function checkAgainstWta() {113  return;114  const output = [];115  //...

Full Screen

Full Screen

modules.spec.js

Source:modules.spec.js Github

copy

Full Screen

...16        expect(log.info).to.have.been.calledWith(17          'modules',18          'syncing module versions for 4 packages'19        )20        expect(fs.readJson('packages/b/package.json')).to.contain.nested.property(21          'dependencies.a',22          '~2.0.0'23        )24        expect(fs.readJson('packages/c/package.json')).to.contain.nested.property(25          'devDependencies.a',26          '~2.0.0'27        )28        expect(fs.readJson('packages/d/package.json')).to.contain.nested.property(29          'peerDependencies.a',30          '>=2.0.0'31        )32      })33    })34  })35  it('should respect provided packages', async () => {36    const log = loggerMock()37    const project = await aLernaProject()38    project39      .module('packages/a', module => module.packageJson({version: '2.0.0'}))40      .module('packages/b', module => module.packageJson({dependencies: {a: '~1.0.0'}}))41      .module('packages/c', module => module.packageJson({dependencies: {a: '~1.0.0'}}))42    return project.within(async () => {43      const lernaPackages = await loadPackages()44      const filteredPackages = lernaPackages.filter(p => p.name !== 'c')45      return sync({packages: filteredPackages})(log).then(() => {46        expect(log.info).to.have.been.calledWith(47          'modules',48          'syncing module versions for 2 packages'49        )50        expect(fs.readJson('packages/b/package.json')).to.contain.nested.property(51          'dependencies.a',52          '~2.0.0'53        )54        expect(fs.readJson('packages/c/package.json')).to.contain.nested.property(55          'dependencies.a',56          '~1.0.0'57        )58      })59    })60  })61  it('should accept custom transformFunctions', async () => {62    const log = loggerMock()63    const project = await aLernaProject()64    project65      .module('packages/a', module => module.packageJson({version: '2.0.0'}))66      .module('packages/b', module => module.packageJson({dependencies: {a: '~1.0.0'}}))67      .module('packages/c', module => module.packageJson({devDependencies: {a: '~1.0.0'}}))68      .module('packages/d', module => module.packageJson({peerDependencies: {a: '~1.0.0'}}))69    return project.within(() => {70      return sync({transformDependencies: v => `+${v}`, transformPeerDependencies: v => `-${v}`})(71        log72      ).then(() => {73        expect(fs.readJson('packages/b/package.json')).to.contain.nested.property(74          'dependencies.a',75          '+2.0.0'76        )77        expect(fs.readJson('packages/c/package.json')).to.contain.nested.property(78          'devDependencies.a',79          '+2.0.0'80        )81        expect(fs.readJson('packages/d/package.json')).to.contain.nested.property(82          'peerDependencies.a',83          '-2.0.0'84        )85      })86    })87  })88  it('should beauify json on update', async () => {89    const log = loggerMock()90    const project = await aLernaProject()91    project92      .module('packages/a', module => module.packageJson({version: '2.0.0'}))93      .module('packages/b', module => module.packageJson({dependencies: {a: '~1.0.0'}}))94    return project.within(() => {95      return sync()(log).then(() => {...

Full Screen

Full Screen

api.js

Source:api.js Github

copy

Full Screen

1const fs = require('fs-extra');2const serve = require('koa-static');3const Koa = require('koa');4const router = require('koa-router')();5const cors = require('koa2-cors');6const app = new Koa();7app.use(serve('./assets/'));8//获取最新的数据集合9function getNewInfos(newTimeline,lastid){10    let newarr = [];11    if(lastid){12        for(let i = 0; newTimeline && i < newTimeline.length; i++){13            let info = newTimeline[i];14            if(info.id > lastid){15                newarr.push(info);16            }else{17                break;18            }19        }20    }else{21        newarr = newTimeline;22    }23    return newarr;24}25//获取所有信息26router.get('/data/all', async (ctx, next) => {27    console.log('/data/all');28    let data = await fs.readJSON('data/data.json');29    ctx.response.body = data;30});31//获取指定省份的信息32router.get('/data/getAreaStat/:provice', async (ctx, next) =>{33    let provice = ctx.params.provice;34    console.log(ctx.params)35    let data = await fs.readJSON('data/data.json');36    let areaStat = data.getAreaStat;37    if(provice){38        let body = [];39        for(let i = 0; i<areaStat.length; i++){40            let area = areaStat[i];41            if(area.provinceName == provice || area.provinceShortName == provice){42                body.push(area);43                break;44            }45        }46        ctx.response.body = body;47    }else{48        ctx.response.body = areaStat;49    }50});51//获取信息时间线52router.get('/data/getTimelineService', async (ctx,next) => {53    console.log('/data/getTimelineService');54    let timeline = await fs.readJSON('data/timeline.json');55    ctx.response.body = timeline;56});57//获取最新事件58router.get('/data/getNewest/:lastid', async (ctx,next) => {59    let lastid = ctx.params.lastid;60    console.log(`/data/getNewest/:${lastid}`);61    // let data = await fs.readJSON('data/data.json');62    // let timeline = data.getTimelineService;63    let timeline = await fs.readJSON('data/timeline.json');64    let newest = lastid ? getNewInfos(timeline,lastid) : [timeline[0]];65    ctx.response.body = newest;66});67/**68 * 【新增接口 : data/<Service>】69 * @Service列表  70 * getIndexRumorList : 最新辟谣71 * getIndexRecommendList : 最新防护知识72 * getWikiList : 最新知识百科73 * getEntries : 诊疗信息74 * getListByCountryTypeService1 : 全国省份级患者分布数据75 * getListByCountryTypeService2 : 全球海外其他地区患者分布数据76 * getStatisticsService : 获取整体统计信息77 */78router.get('/data/:serviceName', async (ctx,next) => {79    try {80        let serviceName = ctx.params.serviceName;81        console.log(`service = ${serviceName}`);82        let data = await fs.readJSON('data/data.json');83        let content = data[serviceName];84        if(content){85            if(serviceName == 'getStatisticsService'){86                content['imgUrl'] = `http://49.232.173.220:3001/images/distribution-nationale.png?time=${new Date().getTime()}`;87            }88            ctx.response.body = content;89        }else{90            ctx.response.body = 'Not Found'91        }92    } catch (error) {93        console.log(error);   94    }95});96router.get('/app/update', async (ctx,next) => {97    console.log('app/update');98    let update = (await fs.readJSON('data/app.json')).update;99    ctx.response.body = update;100});101// add router middleware:102app.use(cors());103app.use(router.routes());...

Full Screen

Full Screen

product-repo.js

Source:product-repo.js Github

copy

Full Screen

...3const url = new URL('../data/products.json', import.meta.url);4const filePath = fileURLToPath(url);5class ProductRepo {6    async getProducts() {7        return await fs.readJson(filePath)8    }9    async getProduct(pid) {10        const products = await fs.readJson(filePath)11        return products.find(pr => pr.pid == pid)12    }13    async addProduct(newProduct) {14        const products = await fs.readJson(filePath)15        products.push(newProduct)16        return await fs.writeJson(filePath, products)17    }18    async updateProduct(updatedProduct) {19        const products = await fs.readJson(filePath)20        const index = products.findIndex(product => product.pid == updatedProduct.pid)21        if (index >= 0) {22            console.log(index)23            products[index] = updatedProduct24            return await fs.writeJson(filePath, products)25        }26        return null27    }28    async deleteProduct(pid) {29        const products = await fs.readJson(filePath)30        const filteredProducts = products.filter(pr => pr.pid != pid)31        return await fs.writeJson(filePath, filteredProducts)32    }33    async deleteAllProducts() {34        return await fs.writeJson(filePath, [])35    }36    async getStatistics() {37        const products = await fs.readJson(filePath)38        const totalProduct = products.length39        const totalPrice = products.reduce((num, sum) => +num + +sum.price, 0)40        return {41            "totalNumberOfProducts": totalProduct,42            "totalPrice": totalPrice43        }44    }45    async getTopExpensiveProducts(limit) {46        const products = await fs.readJson(filePath)47        const topThree = products.sort((a, b) => b.price - a.price).splice(0, limit)48        return topThree49    }50}...

Full Screen

Full Screen

Using AI Code Generation

copy

Full Screen

1describe('Read Json file', function() {2  it('Read Json file', function() {3    cy.readFile('cypress/fixtures/test.json').then((data) => {4      console.log(data);5    });6  });7});8{9}10describe('Read Json file from url', function() {11  it('Read Json file from url', function() {12      console.log(response);13    });14  });15});16describe('Read Json file from api', function() {17  it('Read Json file from api', function() {18      console.log(response);19    });20  });21});22describe('Read Json file from local', function() {23  it('Read Json file from local', function() {24    cy.readFile('cypress/fixtures/test.json').then((data) => {25      console.log(data);26    });27  });28});

Full Screen

Using AI Code Generation

copy

Full Screen

1const fs = require('fs')2const path = require('path')3describe('My First Test', () => {4  it('Does not do much!', () => {5    cy.contains('type').click()6    cy.url().should('include', '/commands/actions')7    cy.get('.action-email')8      .type('

Full Screen

Using AI Code Generation

copy

Full Screen

1const fs = require('fs-extra');2const path = require('path');3const { expect } = require('chai');4describe('Test', () => {5    it('test', () => {6        const file = fs.readJson(path.resolve('test.json'));7        expect(file).to.deep.equal({ test: 'test' });8    });9});10{11}12AssertionError: expected undefined to deeply equal { test: 'test' }13const fs = require('fs-extra');14const path = require('path');15const { expect } = require('chai');16describe('Test', () => {17    it('test', () => {18        const file = fs.readJson(path.resolve('test.json'));19        expect(file).to.deep.equal({ test: 'test' });20    });21});22{23}24AssertionError: expected undefined to deeply equal { test: 'test' }

Full Screen

Using AI Code Generation

copy

Full Screen

1const path = require('path')2describe('My First Test', function() {3  it('Does not do much!', function() {4    cy.readFile(path.resolve(__dirname, 'example.json'))5      .its('name')6      .should('eq', 'example')7  })8})9{10}11describe('My First Test', function() {12  it('Does not do much!', function() {13    cy.readFile('example.json')14      .its('name')15      .should('eq', 'example')16  })17})18{19}20describe('My First Test', function() {21  it('Does not do much!', function() {22    cy.readFile('example.json')23      .its('name')24      .should('eq', 'example')25  })26})27{28}29describe('My First Test', function() {30  it('Does not do much!', function() {31    cy.readFile('example.json')32      .its('name')33      .should('eq', 'example')34  })35})36{37}38describe('My First Test', function() {39  it('Does not do much!', function() {40    cy.readFile('example.json')41      .its('name')42      .should('eq', 'example')43  })44})45{46}47describe('My First Test', function() {48  it('Does not do much!', function() {49    cy.readFile('example.json')50      .its('name')51      .should('eq', 'example')52  })53})54{55}56describe('My First Test', function() {57  it('Does not do much!', function() {58    cy.readFile('example.json')59      .its('name')60      .should('eq', 'example')61  })62})63{

Full Screen

Using AI Code Generation

copy

Full Screen

1const fs = require("fs-extra");2const json = fs.readJsonSync("./cypress/fixtures/data.json");3cy.request({4}).then((response) => {5expect(response.status).to.eq(200);6expect(response.body).to.have.property("userId", json.userId);7expect(response.body).to.have.property("id", json.id);8expect(response.body).to.have.property("title", json.title);9expect(response.body).to.have.property("completed", json.completed);10});11const fs = require("fs-extra");12const json = fs.readJsonSync("./cypress/fixtures/data.json");13cy.request({14}).then((response) => {15expect(response.status).to.eq(200);16expect(response.body).to.have.property("userId", json.userId);17expect(response.body).to.have.property("id", json.id);18expect(response.body).to.have.property("title", json.title);19expect(response.body).to.have.property("completed", json.completed);20});21The following error was thrown by a plugin. We stopped running your tests because a plugin crashed. Please check your plugins file (cypress/plugins/index.js)22The following error was thrown by a plugin. We stopped running your tests because a plugin crashed. Please check your plugins file (cypress/plugins/index.js)23The following error was thrown by a plugin. We stopped running your tests because a plugin crashed. Please check your plugins file (cypress/plugins/index.js)

Full Screen

Using AI Code Generation

copy

Full Screen

1const fs = require('fs')2let jsonObject = fs.readJson('sample.json')3const fs = require('fs')4let jsonObject = fs.readJson('sample.json')5const fs = require('fs')6let jsonObject = fs.readJson('sample.json')7const fs = require('fs')8let jsonObject = fs.readJson('sample.json')9const fs = require('fs')10let jsonObject = fs.readJson('sample.json')11const fs = require('fs')12let jsonObject = fs.readJson('sample.json')13const fs = require('fs')14let jsonObject = fs.readJson('sample.json')15const fs = require('fs')16let jsonObject = fs.readJson('sample.json')17const fs = require('fs')18let jsonObject = fs.readJson('sample.json')19const fs = require('fs')20let jsonObject = fs.readJson('sample.json')21const fs = require('fs')22let jsonObject = fs.readJson('sample.json')23const fs = require('fs')24let jsonObject = fs.readJson('sample.json')

Full Screen

Using AI Code Generation

copy

Full Screen

1describe('Read JSON files',function(){2it('Read JSON files',function(){3cy.readFile('cypress/fixtures/employee.json').then((data) => {4})5})6})

Full Screen

Cypress Tutorial

Cypress is a renowned Javascript-based open-source, easy-to-use end-to-end testing framework primarily used for testing web applications. Cypress is a relatively new player in the automation testing space and has been gaining much traction lately, as evidenced by the number of Forks (2.7K) and Stars (42.1K) for the project. LambdaTest’s Cypress Tutorial covers step-by-step guides that will help you learn from the basics till you run automation tests on LambdaTest.

Chapters:

  1. What is Cypress? -
  2. Why Cypress? - Learn why Cypress might be a good choice for testing your web applications.
  3. Features of Cypress Testing - Learn about features that make Cypress a powerful and flexible tool for testing web applications.
  4. Cypress Drawbacks - Although Cypress has many strengths, it has a few limitations that you should be aware of.
  5. Cypress Architecture - Learn more about Cypress architecture and how it is designed to be run directly in the browser, i.e., it does not have any additional servers.
  6. Browsers Supported by Cypress - Cypress is built on top of the Electron browser, supporting all modern web browsers. Learn browsers that support Cypress.
  7. Selenium vs Cypress: A Detailed Comparison - Compare and explore some key differences in terms of their design and features.
  8. Cypress Learning: Best Practices - Take a deep dive into some of the best practices you should use to avoid anti-patterns in your automation tests.
  9. How To Run Cypress Tests on LambdaTest? - Set up a LambdaTest account, and now you are all set to learn how to run Cypress tests.

Certification

You can elevate your expertise with end-to-end testing using the Cypress automation framework and stay one step ahead in your career by earning a Cypress certification. Check out our Cypress 101 Certification.

YouTube

Watch this 3 hours of complete tutorial to learn the basics of Cypress and various Cypress commands with the Cypress testing at LambdaTest.

Run Cypress automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful