How to use github.pulls.list method in Cypress

Best JavaScript code snippet using cypress

checkPullRequestJobSpec.js

Source:checkPullRequestJobSpec.js Github

copy

Full Screen

1// Copyright 2020 The Oppia Authors. All Rights Reserved.2//3// Licensed under the Apache License, Version 2.0 (the "License");4// you may not use this file except in compliance with the License.5// You may obtain a copy of the License at6//7// http://www.apache.org/licenses/LICENSE-2.08//9// Unless required by applicable law or agreed to in writing, software10// distributed under the License is distributed on an "AS-IS" BASIS,11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.12// See the License for the specific language governing permissions and13// limitations under the License.14require('dotenv').config();15const { createProbot } = require('probot');16// The plugin refers to the actual app in index.js.17const oppiaBot = require('../index');18const checkPullRequestJobModule = require('../lib/checkPullRequestJob');19const apiForSheetsModule = require('../lib/apiForSheets');20const checkPullRequestLabelsModule = require('../lib/checkPullRequestLabels');21const checkPullRequestBranchModule = require('../lib/checkPullRequestBranch');22const checkCriticalPullRequestModule =23 require('../lib/checkCriticalPullRequest');24const checkPullRequestTemplateModule =25 require('../lib/checkPullRequestTemplate');26const newCodeOwnerModule = require('../lib/checkForNewCodeowner');27const scheduler = require('../lib/scheduler');28const checkCronJobModule = require('../lib/checkNewCronJobs');29const { JOBS_AND_FETURES_TESTING_WIKI_LINK } = require('../lib/utils');30let payloadData = JSON.parse(31 JSON.stringify(require('../fixtures/pullRequestPayload.json'))32);33describe('Pull Request Job Spec', () => {34 /**35 * @type {import('probot').Probot} robot36 */37 let robot;38 /**39 * @type {import('probot').Octokit} github40 */41 let github;42 /**43 * @type {import('probot').Application} app44 */45 let app;46 const firstNewJobFileObj = {47 sha: 'd144f32b9812373d5f1bc9f94d9af795f09023ff',48 filename: 'core/jobs/exp_jobs_one_off.py',49 status: 'added',50 additions: 1,51 deletions: 0,52 changes: 1,53 blob_url:54 'https://github.com/oppia/oppia/blob/67fb4a973b318882af3b5a894130' +55 'e110d7e9833c/core/jobs/exp_jobs_one_off.py',56 raw_url:57 'https://github.com/oppia/oppia/raw/67fb4a973b318882af3b5a894130' +58 'e110d7e9833c/core/jobs/exp_jobs_one_off.py',59 contents_url:60 'https://api.github.com/repos/oppia/oppia/contents/core/jobs/' +61 'exp_jobs_one_off.py?ref=67fb4a973b318882af3b5a894130e110d7e9833c',62 patch: '@@ -0,0 +1 @@\n+class FirstTestJob(jobs.JobTestBase):\n' +63 '+ """One-off job for creating and populating ' +64 'UserContributionsModels for\n+ ' +65 'all registered users that have contributed.\n+ """\n+ ' +66 '@classmethod\n+ def entity_classes_to_map_over(cls)' +67 ':\n+ """Return a list of datastore class references ' +68 'to map over."""\n+ return [exp_models.' +69 'ExplorationSnapshotMetadataModel]\n+\n+ @staticmethod\n+ ' +70 'def map(item):\n+ """Implements the map function for this ' +71 'job."""\n+ yield (\n+ item.committer_id, ' +72 '{\n+ \'exploration_id\': item.' +73 'get_unversioned_instance_id(),\n+ ' +74 '\'version_string\': item.get_version_string(),\n+ ' +75 '})\n+\n+\n+ @staticmethod\n+ def reduce(key, ' +76 'version_and_exp_ids):\n+ """Implements the reduce ' +77 'function for this job."""\n+ created_exploration_ids = ' +78 'set()\n+ edited_exploration_ids = set()\n+\n+ ' +79 'edits = [ast.literal_eval(v) for v in version_and_exp_ids]' +80 '\n+\n+ for edit in edits:\n+ ' +81 'edited_exploration_ids.add(edit[\'exploration_id\'])' +82 '\n+ if edit[\'version_string\'] == \'1\'' +83 ':\n+ created_exploration_ids.add(edit' +84 '[\'exploration_id\'])\n+\n+ if user_services.' +85 'get_user_contributions(key, strict=False) is not None' +86 ':\n+ user_services.update_user_contributions' +87 '(\n+ key, list(created_exploration_ids), ' +88 'list(\n+ edited_exploration_ids))\n+ ' +89 'else:\n+ user_services.create_user_contributions' +90 '(\n+ key, list(created_exploration_ids), list' +91 '(\n+ edited_exploration_ids))\n',92 };93 const secondNewJobFileObj = {94 sha: 'd144f32b9812373d5f1bc9f94d9af795f09023ff',95 filename: 'core/jobs/exp_jobs_oppiabot_off.py',96 status: 'added',97 additions: 1,98 deletions: 0,99 changes: 1,100 blob_url:101 'https://github.com/oppia/oppia/blob/67fb4a973b318882af3b5a894130' +102 'e110d7e9833c/core/jobs/exp_jobs_oppiabot_off.py',103 raw_url:104 'https://github.com/oppia/oppia/raw/67fb4a973b318882af3b5a894130' +105 'e110d7e9833c/core/jobs/exp_jobs_oppiabot_off.py',106 contents_url:107 'https://api.github.com/repos/oppia/oppia/contents/core/jobs/' +108 'exp_jobs_oppiabot_off.py?ref=67fb4a973b318882af3b5a894130e110d7e9833c',109 patch: '@@ -0,0 +1 @@\n+class SecondTestJob(jobs.JobTestBase):\n' +110 '+ """One-off job for creating and populating ' +111 'UserContributionsModels for\n' +112 '+ all registered users that have contributed.\n+ ' +113 '"""\n+ @classmethod\n+ def entity_classes_to_map_over' +114 '(cls):\n+ """Return a list of datastore class references ' +115 'to map over."""\n+ return [exp_models.' +116 'ExplorationSnapshotMetadataModel]\n+\n+ @staticmethod\n+ ' +117 'def map(item):\n+ """Implements the map function for this ' +118 'job."""\n+ yield (\n+ item.committer_id, ' +119 '{\n+ \'exploration_id\': item.' +120 'get_unversioned_instance_id(),\n+ \'version_string\'' +121 ': item.get_version_string(),\n+ })\n+\n+\n+ ' +122 '@staticmethod\n+ def reduce(key, version_and_exp_ids)' +123 ':\n+ """Implements the reduce function for this job.' +124 '"""\n+ created_exploration_ids = set()\n+ ' +125 'edited_exploration_ids = set()\n+\n+ edits = ' +126 '[ast.literal_eval(v) for v in version_and_exp_ids]\n+\n+ ' +127 'for edit in edits:\n+ edited_exploration_ids.add' +128 '(edit[\'exploration_id\'])\n+ if edit' +129 '[\'version_string\'] == \'1\':\n+ ' +130 'created_exploration_ids.add(edit[\'exploration_id\'])' +131 '\n+\n+ if user_services.get_user_contributions' +132 '(key, strict=False) is not None:\n+ user_services.' +133 'update_user_contributions(\n+ key, list' +134 '(created_exploration_ids), list(\n+ ' +135 'edited_exploration_ids))\n+ else:\n+ ' +136 'user_services.create_user_contributions(\n+ ' +137 'key, list(created_exploration_ids), list(\n+ ' +138 'edited_exploration_ids))\n',139 };140 const modifiedExistingJobFileObj = {141 sha: 'f06a0d3ea104733080c4dad4a4e5aa7fb76d8f5d',142 filename: 'core/jobs/user_jobs_one_off.py',143 status: 'modified',144 additions: 43,145 deletions: 0,146 changes: 43,147 blob_url:148 'https://github.com/oppia/oppia/blob/5b0e633fa1b9a00771a3b88302fa' +149 '3ff048d7240c/core/jobs/user_jobs_one_off.py',150 raw_url:151 'https://github.com/oppia/oppia/raw/5b0e633fa1b9a00771a3b88302fa' +152 '3ff048d7240c/core/jobs/user_jobs_one_off.py',153 contents_url:154 'https://api.github.com/repos/oppia/oppia/contents/core/jobs/' +155 'user_jobs_one_off.py?ref=5b0e633fa1b9a00771a3b88302fa3ff048d7240c',156 patch:157 '@@ -80,6 +80,49 @@ def reduce(key, version_and_exp_ids)' +158 ':\n edited_exploration_ids))\n \n ' +159 '\n+class OppiabotContributionsJob(jobs.JobTestBase):\n' +160 '+ """One-off job for creating and populating ' +161 'UserContributionsModels for' +162 '\n+ all registered users that have contributed.\n+ ' +163 '"""\n+ @classmethod\n+ def entity_classes_to_map_over' +164 '(cls):\n+ """Return a list of datastore class ' +165 'references to map over."""\n+ return ' +166 '[exp_models.ExplorationSnapshotMetadataModel]\n+\n+ ' +167 '@staticmethod\n+ def map(item):\n+ """Implements ' +168 'the map function for this job."""\n+ yield ' +169 '(\n+ item.committer_id, {\n+ ' +170 '\'exploration_id\': item.get_unversioned_instance_id(),' +171 '\n+ \'version_string\': item.' +172 'get_version_string(),\n+ })\n+\n+\n+ ' +173 '@staticmethod\n+ def reduce(key, version_and_exp_ids)' +174 ':\n+ """Implements the reduce function for this ' +175 'job."""\n+ created_exploration_ids = set()' +176 '\n+ edited_exploration_ids = set()\n+\n+ ' +177 'edits = [ast.literal_eval(v) for v in version_and_exp_ids]' +178 '\n+\n+ for edit in edits:\n+ ' +179 'edited_exploration_ids.add(edit[\'exploration_id\'])' +180 '\n+ if edit[\'version_string\'] == \'1\'' +181 ':\n+ created_exploration_ids.add(edit' +182 '[\'exploration_id\'])\n+\n+ if user_services.' +183 'get_user_contributions(key, strict=False) is not None' +184 ':\n+ user_services.update_user_contributions' +185 '(\n+ key, list(created_exploration_ids), ' +186 'list(\n+ edited_exploration_ids))' +187 '\n+ else:\n+ user_services.' +188 'create_user_contributions(\n+ key, ' +189 'list(created_exploration_ids), list(' +190 '\n+ edited_exploration_ids))\n',191 };192 const modifiedExistingJobFileObjWithJobClassInPatch = {193 sha: 'd144f32b9812373d5f1bc9f94d9af795f09023ff',194 filename: 'core/jobs/exp_jobs_oppiabot_off.py',195 status: 'modified',196 additions: 1,197 deletions: 0,198 changes: 1,199 blob_url:200 'https://github.com/oppia/oppia/blob/67fb4a973b318882af3b5a894130' +201 'e110d7e9833c/core/jobs/exp_jobs_oppiabot_off.py',202 raw_url:203 'https://github.com/oppia/oppia/raw/67fb4a973b318882af3b5a894130' +204 'e110d7e9833c/core/jobs/exp_jobs_oppiabot_off.py',205 contents_url:206 'https://api.github.com/repos/oppia/oppia/contents/core/jobs/' +207 'exp_jobs_oppiabot_off.py?ref=67fb4a973b318882af3b5a894130e110d7e9833c',208 patch: '@@ -0,0 +1 @@class SecondTestJob(jobs.JobTestBase) ' +209 'def reduce(key, version_and_exp_ids):\n' +210 ' edited_exploration_ids))\n ',211 };212 const jobFileObjWithJobClassInPatchAndNewJob = {213 sha: 'd144f32b9812373d5f1bc9f94d9af795f09023ff',214 filename: 'core/jobs/exp_jobs_oppiabot_off.py',215 status: 'modified',216 additions: 1,217 deletions: 0,218 changes: 1,219 blob_url:220 'https://github.com/oppia/oppia/blob/67fb4a973b318882af3b5a894130' +221 'e110d7e9833c/core/jobs/exp_jobs_oppiabot_off.py',222 raw_url:223 'https://github.com/oppia/oppia/raw/67fb4a973b318882af3b5a89413' +224 '0e110d7e9833c/core/jobs/exp_jobs_oppiabot_off.py',225 contents_url:226 'https://api.github.com/repos/oppia/oppia/contents/core/jobs/' +227 'exp_jobs_oppiabot_off.py?ref=' +228 '67fb4a973b318882af3b5a894130e110d7e9833c',229 patch: '@@ -0,0 +1 @@class SecondTestJob(jobs.JobTestBase) ' +230 'def reduce(key, version_and_exp_ids):\n' +231 ' edited_exploration_ids))\n' +232 ' \n' +233 '+class AnotherTestJob(jobs.JobTestBase):\n' +234 '+ """\n' +235 '+ @classmethod\n' +236 '+ def entity_classes_to_map_over ',237 };238 const jobFromTestDir = {239 sha: 'd144f32b9812373d5f1bc9f94d9af795f09023ff',240 filename: 'core/tests/linter_tests/exp_jobs_oppiabot_off.py',241 status: 'added',242 additions: 1,243 deletions: 0,244 changes: 1,245 blob_url:246 'https://github.com/oppia/oppia/blob/67fb4a973b318882af3b5a894130' +247 'e110d7e9833c/core/tests/linter_tests/exp_jobs_oppiabot_off.py',248 raw_url:249 'https://github.com/oppia/oppia/raw/67fb4a973b318882af3b5a894130' +250 'e110d7e9833c/core/tests/linter_tests/exp_jobs_oppiabot_off.py',251 contents_url:252 'https://api.github.com/repos/oppia/oppia/contents/core/tests/' +253 'linter_tests/exp_jobs_oppiabot_off.py?ref=' +254 '67fb4a973b318882af3b5a894130e110d7e9833c',255 patch: '@@ -0,0 +1 @@\n+class LintTestJob(jobs.JobTestBase):\n' +256 '+ """One-off job for creating and populating' +257 ' UserContributionsModels for\n' +258 '+ all registered users that have contributed.\n+ ' +259 '"""\n+ @classmethod\n+ def entity_classes_to_map_over' +260 '(cls):\n+ """Return a list of datastore class ' +261 'references to map over."""\n+ return [exp_models.' +262 'ExplorationSnapshotMetadataModel]\n+\n+ @staticmethod' +263 '\n+ def map(item):\n+ """Implements the map function ' +264 'for this job."""\n+ yield (\n+ item.' +265 'committer_id, {\n+ \'exploration_id\': ' +266 'item.get_unversioned_instance_id(),\n+ ' +267 '\'version_string\': item.get_version_string(),\n+ ' +268 '})\n+\n+\n+ @staticmethod\n+ def reduce(key, ' +269 'version_and_exp_ids):\n+ """Implements the reduce ' +270 'function for this job."""\n+ created_exploration_ids ' +271 '= set()\n+ edited_exploration_ids = set()\n+\n+ ' +272 'edits = [ast.literal_eval(v) for v in version_and_exp_ids]' +273 '\n+\n+ for edit in edits:\n+ ' +274 'edited_exploration_ids.add(edit[\'exploration_id\'])' +275 '\n+ if edit[\'version_string\'] == \'1\'' +276 ':\n+ created_exploration_ids.add(edit' +277 '[\'exploration_id\'])\n+\n+ if user_services.' +278 'get_user_contributions(key, strict=False) is not None' +279 ':\n+ user_services.update_user_contributions' +280 '(\n+ key, list(created_exploration_ids), ' +281 'list(\n+ edited_exploration_ids))\n+ ' +282 'else:\n+ user_services.create_user_contributions' +283 '(\n+ key, list(created_exploration_ids), list' +284 '(\n+ edited_exploration_ids))\n',285 };286 const fileWithMultipleJobs = {287 sha: 'd144f32b9812373d5f1bc9f94d9af795f09023ff',288 filename: 'core/jobs/exp_jobs_oppiabot_off.py',289 status: 'added',290 additions: 1,291 deletions: 0,292 changes: 1,293 blob_url:294 'https://github.com/oppia/oppia/blob/67fb4a973b318882af3b5a894130' +295 'e110d7e9833c/core/jobs/exp_jobs_oppiabot_off.py',296 raw_url:297 'https://github.com/oppia/oppia/raw/67fb4a973b318882af3b5a894130' +298 'e110d7e9833c/core/jobs/exp_jobs_oppiabot_off.py',299 contents_url:300 'https://api.github.com/repos/oppia/oppia/contents/core/jobs/' +301 'exp_jobs_oppiabot_off.py?ref=' +302 '67fb4a973b318882af3b5a894130e110d7e9833c',303 patch: '@@ -0,0 +1 @@\n+class TestJob(jobs.JobTestBase):\n' +304 '+ """One-off job for creating and ' +305 'populating UserContributionsModels for \n+class ' +306 'AnotherTestJob(jobs.JobTestBase):\n' +307 '+ """\n+ @classmethod\n+ def ' +308 'entity_classes_to_map_over(cls):\n+ """Return a list' +309 ' of datastore class references to map over."""\n+ ' +310 'return [exp_models.ExplorationSnapshotMetadataModel]\n+\n+ ' +311 '@staticmethod\n+ def map(item):\n+ """Implements ' +312 'the map function for this job."""\n+ yield ' +313 '(\n+ item.committer_id, {\n+ ' +314 '\'exploration_id\': item.get_unversioned_instance_id(),\n+' +315 ' \'version_string\': item.get_version_string(),' +316 '\n+ })\n+\n+\n+ @staticmethod\n+ ' +317 'def reduce(key, version_and_exp_ids):\n+ ' +318 '"""Implements the reduce function for this job."""\n+ ' +319 'created_exploration_ids = set()\n+ ' +320 'edited_exploration_ids = set()\n+\n+ edits = ' +321 '[ast.literal_eval(v) for v in version_and_exp_ids]' +322 '\n+\n+ for edit in edits:\n+ ' +323 'edited_exploration_ids.add(edit[\'exploration_id\'])' +324 '\n+ if edit[\'version_string\'] == \'1\':\n+' +325 ' created_exploration_ids.add(edit' +326 '[\'exploration_id\'])\n+\n+ if user_services.' +327 'get_user_contributions(key, strict=False) is not None' +328 ':\n+ user_services.update_user_contributions' +329 '(\n+ key, list(created_exploration_ids), ' +330 'list(\n+ edited_exploration_ids))' +331 '\n+ else:\n+ user_services.' +332 'create_user_contributions(\n+ key, list(' +333 'created_exploration_ids), list(\n+ ' +334 'edited_exploration_ids))\n',335 };336 const jobTestFile = {337 sha: 'd144f32b9812373d5f1bc9f94d9af795f09023ff',338 filename: 'core/jobs/exp_jobs_oppiabot_off_test.py',339 status: 'added',340 additions: 1,341 deletions: 0,342 changes: 1,343 blob_url:344 'https://github.com/oppia/oppia/blob/67fb4a973b318882af3b5a894130' +345 'e110d7e9833c/core/jobs/exp_jobs_oppiabot_off.py',346 raw_url:347 'https://github.com/oppia/oppia/raw/67fb4a973b318882af3b5a894130' +348 'e110d7e9833c/core/jobs/exp_jobs_oppiabot_off.py',349 contents_url:350 'https://api.github.com/repos/oppia/oppia/contents/core/jobs/' +351 'exp_jobs_oppiabot_off.py?ref=67fb4a973b318882af3b5a894130e110d7e9833c',352 patch: '@@ -0,0 +1 @@\n+class TestJobTests(jobs.JobTestBase):\n' +353 '+ """One-off job for creating and populating ' +354 'UserContributionsModels for \n' +355 '+class AnotherTestJobTests(jobs.JobTestBase):\n' +356 '+ """\n+ ' +357 '@classmethod\n+ def entity_classes_to_map_over(cls):' +358 '\n+ """Return a list of datastore class references to ' +359 'map over."""\n+ return [exp_models.' +360 'ExplorationSnapshotMetadataModel]\n+\n+ @staticmethod\n+ ' +361 'def map(item):\n+ """Implements the map function for this ' +362 'job."""\n+ yield (\n+ item.committer_id, ' +363 '{\n+ \'exploration_id\': item.get_unversioned_' +364 'instance_id(),\n+ \'version_string\': item.' +365 'get_version_string(),\n+ })\n+\n+\n+ ' +366 '@staticmethod\n+ def reduce(key, version_and_exp_ids):\n+' +367 ' """Implements the reduce function for this job."""\n+' +368 ' created_exploration_ids = set()\n+ ' +369 'edited_exploration_ids = set()\n+\n+ edits ' +370 '= [ast.literal_eval(v) for v in version_and_exp_ids]\n+\n+ ' +371 'for edit in edits:\n+ edited_exploration_ids.add(edit' +372 '[\'exploration_id\'])\n+ if edit[\'version_string\'] == ' +373 '\'1\':\n+ created_exploration_ids.add(edit' +374 '[\'exploration_id\'])\n+\n+ if user_services.' +375 'get_user_contributions(key, strict=False) is not None:\n+ ' +376 'user_services.update_user_contributions(\n+ key, list' +377 '(created_exploration_ids), list(\n+ ' +378 'edited_exploration_ids))\n+ else:\n+ ' +379 'user_services.create_user_contributions(\n+ ' +380 'key, list(created_exploration_ids), list(\n+ ' +381 'edited_exploration_ids))\n',382 };383 const nonJobFile = {384 sha: 'd144f32b9812373d5f1bc9f94d9af795f09023ff',385 filename: 'core/domain/exp_fetchers.py',386 status: 'added',387 additions: 1,388 deletions: 0,389 changes: 1,390 blob_url:391 'https://github.com/oppia/oppia/blob/67fb4a973b318882af3b5a894130' +392 'e110d7e9833c/core/domain/exp_fetchers.py',393 raw_url:394 'https://github.com/oppia/oppia/raw/67fb4a973b318882af3b5a894130' +395 'e110d7e9833c/core/domain/exp_fetchers.py',396 contents_url:397 'https://api.github.com/repos/oppia/oppia/contents/core/domain/' +398 'exp_fetchers.py?ref=67fb4a973b318882af3b5a894130e110d7e9833c',399 patch: '@@ -0,0 +1 @@\n+# def _migrate_states_schema' +400 '(versioned_exploration_states, exploration_id):',401 };402 beforeEach(() => {403 spyOn(scheduler, 'createScheduler').and.callFake(() => { });404 github = {405 issues: {406 createComment: jasmine.createSpy('createComment').and.returnValue({}),407 addLabels: jasmine.createSpy('addLabels').and.returnValue({}),408 addAssignees: jasmine.createSpy('addAssignees').and.returnValue({})409 },410 };411 robot = createProbot({412 id: 1,413 cert: 'test',414 githubToken: 'test',415 });416 app = robot.load(oppiaBot);417 spyOn(app, 'auth').and.resolveTo(github);418 spyOn(checkPullRequestJobModule, 'checkForNewJob').and.callThrough();419 spyOn(apiForSheetsModule, 'checkClaStatus').and.callFake(() => { });420 spyOn(421 checkPullRequestLabelsModule, 'checkChangelogLabel'422 ).and.callFake(() => { });423 spyOn(424 checkCriticalPullRequestModule, 'checkIfPRAffectsDatastoreLayer'425 ).and.callFake(() => { });426 spyOn(checkPullRequestBranchModule, 'checkBranch').and.callFake(() => { });427 spyOn(checkCronJobModule, 'checkForNewCronJob').and.callFake(() => { });428 spyOn(429 checkPullRequestTemplateModule, 'checkTemplate'430 ).and.callFake(() => { });431 spyOn(newCodeOwnerModule, 'checkForNewCodeowner').and.callFake(() => { });432 });433 describe('When a new job file is created in a pull request', () => {434 beforeEach(async () => {435 github.pulls = {436 listFiles: jasmine.createSpy('listFiles').and.resolveTo({437 data: [nonJobFile, firstNewJobFileObj],438 }),439 };440 payloadData.payload.pull_request.changed_files = 2;441 await robot.receive(payloadData);442 });443 it('should check for jobs', () => {444 expect(checkPullRequestJobModule.checkForNewJob).toHaveBeenCalled();445 });446 it('should get modified files', () => {447 expect(github.pulls.listFiles).toHaveBeenCalled();448 });449 it('should ping server jobs admin', () => {450 const author = payloadData.payload.pull_request.user.login;451 const formText = (452 'server jobs form'.link(453 'https://goo.gl/forms/XIj00RJ2h5L55XzU2')454 );455 const newLineFeed = '<br>';456 const wikiLinkText = 'this guide'.link(457 JOBS_AND_FETURES_TESTING_WIKI_LINK);458 const jobNameLink = (459 'FirstTestJob'.link(firstNewJobFileObj.blob_url)460 );461 expect(github.issues.createComment).toHaveBeenCalledWith({462 issue_number: payloadData.payload.pull_request.number,463 body:464 'Hi @vojtechjelinek, PTAL at this PR, ' +465 'it adds a new job. The name of the job is ' + jobNameLink + '.' +466 newLineFeed +467 'Also @' + author + ', please make sure to fill in the ' + formText +468 ' for the new job to be tested on the backup server. ' +469 'This PR can be merged only after the test run is successful. ' +470 'Please refer to ' + wikiLinkText + ' for details.' +471 newLineFeed + 'Thanks!',472 repo: payloadData.payload.repository.name,473 owner: payloadData.payload.repository.owner.login,474 });475 });476 it('should assign server jobs admin', () => {477 expect(github.issues.addAssignees).toHaveBeenCalled();478 expect(github.issues.addAssignees).toHaveBeenCalledWith({479 issue_number: payloadData.payload.pull_request.number,480 repo: payloadData.payload.repository.name,481 owner: payloadData.payload.repository.owner.login,482 assignees: ['vojtechjelinek']483 });484 });485 it('should add datastore label', () => {486 expect(github.issues.addLabels).toHaveBeenCalled();487 expect(github.issues.addLabels).toHaveBeenCalledWith({488 issue_number: payloadData.payload.pull_request.number,489 repo: payloadData.payload.repository.name,490 owner: payloadData.payload.repository.owner.login,491 labels: ['PR: Affects datastore layer']492 });493 });494 });495 describe('When multiple job files are created in a pull request', () => {496 beforeEach(async () => {497 github.pulls = {498 listFiles: jasmine.createSpy('listFiles').and.resolveTo({499 data: [500 nonJobFile,501 firstNewJobFileObj,502 secondNewJobFileObj503 ],504 }),505 };506 payloadData.payload.pull_request.changed_files = 3;507 await robot.receive(payloadData);508 });509 it('should check for jobs', () => {510 expect(checkPullRequestJobModule.checkForNewJob).toHaveBeenCalled();511 });512 it('should get modified files', () => {513 expect(github.pulls.listFiles).toHaveBeenCalled();514 });515 it('should ping server jobs admin', () => {516 expect(github.issues.createComment).toHaveBeenCalled();517 const author = payloadData.payload.pull_request.user.login;518 const formText = (519 'server jobs form'.link('https://goo.gl/forms/XIj00RJ2h5L55XzU2')520 );521 const newLineFeed = '<br>';522 const wikiLinkText = 'this guide'.link(523 JOBS_AND_FETURES_TESTING_WIKI_LINK);524 const jobRegistryLink = (525 'job registry'.link(526 'https://github.com/oppia/oppia/blob/develop/core/jobs_registry.py')527 );528 const firstJobNameLink = (529 'FirstTestJob'.link(firstNewJobFileObj.blob_url)530 );531 const secondJobNameLink = (532 'SecondTestJob'.link(secondNewJobFileObj.blob_url)533 );534 expect(github.issues.createComment).toHaveBeenCalledWith({535 issue_number: payloadData.payload.pull_request.number,536 body:537 'Hi @vojtechjelinek, PTAL at this PR, ' +538 'it adds new jobs. The jobs are ' + firstJobNameLink +539 ', ' + secondJobNameLink + '.' + newLineFeed + 'Also @' +540 author + ', please make sure to fill in the ' +541 formText + ' for the new jobs to be tested on the backup server. ' +542 'This PR can be merged only after the test run is successful. ' +543 'Please refer to ' + wikiLinkText + ' for details.' +544 newLineFeed + 'Thanks!',545 repo: payloadData.payload.repository.name,546 owner: payloadData.payload.repository.owner.login,547 });548 });549 it('should assign server jobs admin', () => {550 expect(github.issues.addAssignees).toHaveBeenCalled();551 expect(github.issues.addAssignees).toHaveBeenCalledWith({552 issue_number: payloadData.payload.pull_request.number,553 repo: payloadData.payload.repository.name,554 owner: payloadData.payload.repository.owner.login,555 assignees: ['vojtechjelinek']556 });557 });558 it('should add datastore label', () => {559 expect(github.issues.addLabels).toHaveBeenCalled();560 expect(github.issues.addLabels).toHaveBeenCalledWith({561 issue_number: payloadData.payload.pull_request.number,562 repo: payloadData.payload.repository.name,563 owner: payloadData.payload.repository.owner.login,564 labels: ['PR: Affects datastore layer']565 });566 });567 });568 describe('When a new job file is created and registry is ' +569 'updated in a pull request',570 () => {571 beforeEach(async () => {572 github.pulls = {573 listFiles: jasmine.createSpy('listFiles').and.resolveTo({574 data: [575 nonJobFile,576 firstNewJobFileObj577 ],578 }),579 };580 payloadData.payload.pull_request.changed_files = 2;581 await robot.receive(payloadData);582 });583 it('should check for jobs', () => {584 expect(checkPullRequestJobModule.checkForNewJob).toHaveBeenCalled();585 });586 it('should get modified files', () => {587 expect(github.pulls.listFiles).toHaveBeenCalled();588 });589 it('should ping server jobs admin', () => {590 expect(github.issues.createComment).toHaveBeenCalled();591 const author = payloadData.payload.pull_request.user.login;592 const formText = (593 'server jobs form'.link('https://goo.gl/forms/XIj00RJ2h5L55XzU2')594 );595 const newLineFeed = '<br>';596 const wikiLinkText = 'this guide'.link(597 JOBS_AND_FETURES_TESTING_WIKI_LINK);598 const jobNameLink = (599 'FirstTestJob'.link(firstNewJobFileObj.blob_url)600 );601 expect(github.issues.createComment).toHaveBeenCalledWith({602 issue_number: payloadData.payload.pull_request.number,603 body:604 'Hi @vojtechjelinek, PTAL at this PR, ' +605 'it adds a new job. The name of the job is ' + jobNameLink + '.' +606 newLineFeed + 'Also @' + author +607 ', please make sure to fill in the ' + formText +608 ' for the new job to be tested on the backup server. ' +609 'This PR can be merged only after the test run is successful. ' +610 'Please refer to ' + wikiLinkText + ' for details.' +611 newLineFeed + 'Thanks!',612 repo: payloadData.payload.repository.name,613 owner: payloadData.payload.repository.owner.login,614 });615 });616 it('should assign server jobs admin', () => {617 expect(github.issues.addAssignees).toHaveBeenCalled();618 expect(github.issues.addAssignees).toHaveBeenCalledWith({619 issue_number: payloadData.payload.pull_request.number,620 repo: payloadData.payload.repository.name,621 owner: payloadData.payload.repository.owner.login,622 assignees: ['vojtechjelinek']623 });624 });625 it('should add datastore label', () => {626 expect(github.issues.addLabels).toHaveBeenCalled();627 expect(github.issues.addLabels).toHaveBeenCalledWith({628 issue_number: payloadData.payload.pull_request.number,629 repo: payloadData.payload.repository.name,630 owner: payloadData.payload.repository.owner.login,631 labels: ['PR: Affects datastore layer']632 });633 });634 }635 );636 describe('When a new job is added in an existing job file', () => {637 beforeEach(async () => {638 github.pulls = {639 listFiles: jasmine.createSpy('listFiles').and.resolveTo({640 data: [641 modifiedExistingJobFileObj,642 ],643 }),644 };645 payloadData.payload.pull_request.changed_files = 1;646 await robot.receive(payloadData);647 });648 it('should check for jobs', () => {649 expect(checkPullRequestJobModule.checkForNewJob).toHaveBeenCalled();650 });651 it('should get modified files', () => {652 expect(github.pulls.listFiles).toHaveBeenCalled();653 });654 it('should ping server jobs admin', () => {655 expect(github.issues.createComment).toHaveBeenCalled();656 const author = payloadData.payload.pull_request.user.login;657 const formText = (658 'server jobs form'.link('https://goo.gl/forms/XIj00RJ2h5L55XzU2')659 );660 const newLineFeed = '<br>';661 const wikiLinkText = 'this guide'.link(662 JOBS_AND_FETURES_TESTING_WIKI_LINK);663 const jobRegistryLink = (664 'job registry'.link(665 'https://github.com/oppia/oppia/blob/develop/core/jobs_registry.py')666 );667 const jobNameLink = (668 'OppiabotContributionsJob'669 .link(modifiedExistingJobFileObj.blob_url)670 );671 expect(github.issues.createComment).toHaveBeenCalledWith({672 issue_number: payloadData.payload.pull_request.number,673 body:674 'Hi @vojtechjelinek, PTAL at this PR, ' +675 'it adds a new job. The name of the job is ' + jobNameLink +676 '.' + newLineFeed + 'Also @' + author + ', please make sure to ' +677 'fill in the ' + formText + ' for the new job to be tested on the ' +678 'backup server. ' + 'This PR can be merged only after the test ' +679 'run is successful. ' + 'Please refer to ' + wikiLinkText +680 ' for details.' + newLineFeed + 'Thanks!',681 repo: payloadData.payload.repository.name,682 owner: payloadData.payload.repository.owner.login,683 });684 });685 it('should assign server jobs admin', () => {686 expect(github.issues.addAssignees).toHaveBeenCalled();687 expect(github.issues.addAssignees).toHaveBeenCalledWith({688 issue_number: payloadData.payload.pull_request.number,689 repo: payloadData.payload.repository.name,690 owner: payloadData.payload.repository.owner.login,691 assignees: ['vojtechjelinek']692 });693 });694 it('should add datastore label', () => {695 expect(github.issues.addLabels).toHaveBeenCalled();696 expect(github.issues.addLabels).toHaveBeenCalledWith({697 issue_number: payloadData.payload.pull_request.number,698 repo: payloadData.payload.repository.name,699 owner: payloadData.payload.repository.owner.login,700 labels: ['PR: Affects datastore layer']701 });702 });703 });704 describe('When an existing job file is modified with no new job', () => {705 beforeEach(async () => {706 github.pulls = {707 listFiles: jasmine.createSpy('listFiles').and.resolveTo({708 data: [709 {710 ...firstNewJobFileObj,711 status: 'modified',712 patch: '\n+# No job files present in the changes',713 },714 ],715 }),716 };717 payloadData.payload.pull_request.changed_files = 1;718 await robot.receive(payloadData);719 });720 it('should check for jobs', () => {721 expect(checkPullRequestJobModule.checkForNewJob).toHaveBeenCalled();722 });723 it('should get modified files', () => {724 expect(github.pulls.listFiles).toHaveBeenCalled();725 });726 it('should not ping server jobs admin', () => {727 expect(github.issues.createComment).not.toHaveBeenCalled();728 });729 it('should not add datastore label', () => {730 expect(github.issues.addLabels).not.toHaveBeenCalled();731 });732 it('should not assign server jobs admin', () => {733 expect(github.issues.addAssignees).not.toHaveBeenCalled();734 });735 });736 describe('When no job file is modified in a pull request', () => {737 beforeEach(async () => {738 github.pulls = {739 listFiles: jasmine.createSpy('listFiles').and.resolveTo({740 data: [741 nonJobFile742 ],743 }),744 };745 payloadData.payload.pull_request.changed_files = 1;746 await robot.receive(payloadData);747 });748 it('should check for jobs', () => {749 expect(checkPullRequestJobModule.checkForNewJob).toHaveBeenCalled();750 });751 it('should not get modified files', () => {752 expect(github.pulls.listFiles).toHaveBeenCalled();753 });754 it('should not ping server job admin', () => {755 expect(github.issues.createComment).not.toHaveBeenCalled();756 });757 });758 describe('Does not comment on job from test dir', () => {759 beforeEach(async () => {760 github.pulls = {761 listFiles: jasmine.createSpy('listFiles').and.resolveTo({762 data: [763 jobFromTestDir764 ],765 }),766 };767 payloadData.payload.pull_request.changed_files = 1;768 await robot.receive(payloadData);769 });770 it('should check for jobs', () => {771 expect(checkPullRequestJobModule.checkForNewJob).toHaveBeenCalled();772 });773 it('should get modified files', () => {774 expect(github.pulls.listFiles).toHaveBeenCalled();775 });776 it('should not ping server job admin', () => {777 expect(github.issues.createComment).not.toHaveBeenCalled();778 });779 });780 describe('When job test file gets added', () => {781 beforeEach(async () => {782 github.pulls = {783 listFiles: jasmine.createSpy('listFiles').and.resolveTo({784 data: [785 jobTestFile786 ],787 }),788 };789 payloadData.payload.pull_request.changed_files = 1;790 await robot.receive(payloadData);791 });792 it('should check for jobs', () => {793 expect(checkPullRequestJobModule.checkForNewJob).toHaveBeenCalled();794 });795 it('should get modified files', () => {796 expect(github.pulls.listFiles).toHaveBeenCalled();797 });798 it('should not ping server job admin', () => {799 expect(github.issues.createComment).not.toHaveBeenCalled();800 });801 });802 describe('When pull request has datastore label', () => {803 beforeEach(async () => {804 payloadData.payload.pull_request.labels = [{805 name: 'PR: Affects datastore layer'806 }];807 github.pulls = {808 listFiles: jasmine.createSpy('listFiles').and.resolveTo({809 data: [810 nonJobFile, firstNewJobFileObj811 ],812 }),813 };814 payloadData.payload.pull_request.changed_files = 2;815 await robot.receive(payloadData);816 });817 it('should check for jobs', () => {818 expect(checkPullRequestJobModule.checkForNewJob).toHaveBeenCalled();819 });820 it('should not get modified files', () => {821 expect(github.pulls.listFiles).not.toHaveBeenCalled();822 });823 it('should not ping server job admin', () => {824 expect(github.issues.createComment).not.toHaveBeenCalled();825 });826 });827 describe('Returns appropriate job name', () => {828 it('should return the correct job created in the file', () => {829 let jobs = checkPullRequestJobModule.getNewJobsFromFile(830 firstNewJobFileObj831 );832 expect(jobs.length).toBe(1);833 expect(jobs[0]).toBe('FirstTestJob');834 jobs = checkPullRequestJobModule.getNewJobsFromFile(secondNewJobFileObj);835 expect(jobs.length).toBe(1);836 expect(jobs[0]).toBe('SecondTestJob');837 jobs = checkPullRequestJobModule.getNewJobsFromFile(838 modifiedExistingJobFileObj839 );840 expect(jobs.length).toBe(1);841 expect(jobs[0]).toBe('OppiabotContributionsJob');842 jobs = checkPullRequestJobModule.getNewJobsFromFile(fileWithMultipleJobs);843 expect(jobs.length).toBe(2);844 expect(jobs[0]).toBe('TestJob');845 expect(jobs[1]).toBe('AnotherTestJob');846 jobs = checkPullRequestJobModule.getNewJobsFromFile(jobTestFile);847 expect(jobs.length).toBe(0);848 jobs = checkPullRequestJobModule.getNewJobsFromFile(849 modifiedExistingJobFileObjWithJobClassInPatch);850 expect(jobs.length).toBe(0);851 jobs = checkPullRequestJobModule.getNewJobsFromFile(852 jobFileObjWithJobClassInPatchAndNewJob);853 expect(jobs.length).toBe(1);854 expect(jobs[0]).toBe('AnotherTestJob');855 });856 });...

Full Screen

Full Screen

utilsSpec.js

Source:utilsSpec.js Github

copy

Full Screen

1// Copyright 2020 The Oppia Authors. All Rights Reserved.2//3// Licensed under the Apache License, Version 2.0 (the "License");4// you may not use this file except in compliance with the License.5// You may obtain a copy of the License at6//7// http://www.apache.org/licenses/LICENSE-2.08//9// Unless required by applicable law or agreed to in writing, software10// distributed under the License is distributed on an "AS-IS" BASIS,11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.12// See the License for the specific language governing permissions and13// limitations under the License.14/**15 * @fileoverview Tests for the helper module.16 */17const { 'default': Axios } = require('axios');18const utilityModule = require('../lib/utils');19const pullRequest = require('../fixtures/pullRequestPayload.json').payload20 .pull_request;21const CODE_OWNERS_FILE_URL =22 'https://raw.githubusercontent.com/oppia/oppia/develop/.github/CODEOWNERS';23describe('Utility module tests', () => {24 const firstModelFileObj = {25 sha: 'd144f32b9812373d5f1bc9f94d9af795f09023ff',26 filename: 'core/storage/skill/gae_models.py',27 status: 'added',28 additions: 1,29 deletions: 0,30 changes: 1,31 blob_url:32 'https://github.com/oppia/oppia/blob/67fb4a973b318882af3b5a894130' +33 'e110d7e9833c/core/storage/skill/gae_models.py',34 raw_url:35 'https://github.com/oppia/oppia/raw/67fb4a973b318882af3b5a894130e' +36 '110d7e9833c/core/storage/skill/gae_models.py',37 contents_url:38 'https://api.github.com/repos/oppia/oppia/contents/core/storage/s' +39 'kill/gae_models.py?ref=67fb4a973b318882af3b5a894130e110d7e9833c',40 patch:41 '@@ -353,6 +353,11 @@ def export_data(user_id):\r\n }\r\n ' +42 '\r\n \r\n+class OppiabotTestActivitiesModel(base_models.BaseModel):' +43 '\r\n+ "Does some things right"\r\n+ pass\r\n+\r\n+\r\n class ' +44 'IncompleteActivitiesModel(base_models.BaseModel):\r\n """Keeps ' +45 'track of all the activities currently being completed by the\r\n ' +46 'learner.\r\n',47 };48 const firstJobFileObj = {49 sha: 'd144f32b9812373d5f1bc9f94d9af795f09023ff',50 filename: 'core/domain/exp_jobs_one_off.py',51 status: 'added',52 additions: 1,53 deletions: 0,54 changes: 1,55 blob_url:56 'https://github.com/oppia/oppia/blob/67fb4a973b318882af3b5a894130e11' +57 '0d7e9833c/core/domain/exp_jobs_one_off.py',58 raw_url:59 'https://github.com/oppia/oppia/raw/67fb4a973b318882af3b5a894130e110d' +60 '7e9833c/core/domain/exp_jobs_one_off.py',61 contents_url:62 'https://api.github.com/repos/oppia/oppia/contents/core/domain/exp_job' +63 's_one_off.py?ref=67fb4a973b318882af3b5a894130e110d7e9833c',64 patch:65 '@@ -0,0 +1 @@\n+class FirstTestOneOffJob(jobs.BaseMapReduceOneOffJobM' +66 ') for v in version_and_exp_ids]\n+\n+ for edit in edits:\n+ ',67 };68 const secondModelFileObj = {69 sha: 'd144f32b9812373d5f1bc9f94d9af795f09023ff',70 filename: 'core/storage/skill/gae_models.py',71 status: 'modified',72 additions: 1,73 deletions: 0,74 changes: 1,75 blob_url:76 'https://github.com/oppia/oppia/blob/67fb4a973b318882af3b5a894130e110' +77 'd7e9833c/core/storage/skill/gae_models.py',78 raw_url:79 'https://github.com/oppia/oppia/raw/67fb4a973b318882af3b5a894130e110d' +80 '7e9833c/core/storage/skill/gae_models.py',81 contents_url:82 'https://api.github.com/repos/oppia/oppia/contents/core/storage/skill/' +83 'gae_models.py?ref=67fb4a973b318882af3b5a894130e110d7e9833c',84 patch:85 '@@ -39,6 +39,20 @@ class SkillSnapshotContentModel(base_models.BaseSn' +86 'apshotContentModel):\r\n pass\r\n \r\n \r\n+class OppiabotSnapsho' +87 'tContentModel(base_models.BaseSnapshotContentModel):\r\n+ """Oppia' +88 'bot testing."""\r\n+\r\n+ pass\r\n+\r\n+\r\n+class OppiabotSnapsho' +89 'tTestingModel(base_models.BaseSnapshotContentModel):\r\n+ """Anoth' +90 'er Oppiabot model."""\r\n+\r\n+ pass\r\n+\r\n+\r\n+\r\n+\r\n class' +91 ' SkillModel(base_models.VersionedModel):\r\n """Model for storing' +92 ' Skills.\r\n',93 };94 const secondJobFileObj = {95 sha: 'd144f32b9812373d5f1bc9f94d9af795f09023ff',96 filename: 'core/domain/exp_jobs_oppiabot_off.py',97 status: 'added',98 additions: 1,99 deletions: 0,100 changes: 1,101 blob_url:102 'https://github.com/oppia/oppia/blob/67fb4a973b318882af3b5a894130e11' +103 '0d7e9833c/core/domain/exp_jobs_oppiabot_off.py',104 raw_url:105 'https://github.com/oppia/oppia/raw/67fb4a973b318882af3b5a894130e110' +106 'd7e9833c/core/domain/exp_jobs_oppiabot_off.py',107 contents_url:108 'https://api.github.com/repos/oppia/oppia/contents/core/domain/exp_jo' +109 'bs_oppiabot_off.py?ref=67fb4a973b318882af3b5a894130e110d7e9833c',110 patch:111 '@@ -0,0 +1 @@\n+class SecondTestOneOffJob(jobs.BaseMapReduceOneOffJob' +112 'Manager):\n+ """One-off job for creating and populating UserContri' +113 'butionsModels for\n+ all registered users that have contributed.\n' +114 '+ """\n+ @classmethod\n+ def entity_classes_to_map_over(cls)' +115 ':\n+ """Return a list of datastore class references to map ove' +116 'r."""\n+ return [exp_models.ExplorationSnapshotMetadataModel]\n+',117 };118 const jobRegex = new RegExp(119 [120 '(?<addition>\\+)(?<classDefinition>class\\s)',121 '(?<name>[a-zA-Z]{2,256})(?<suffix>OneOffJob)(?<funDef>\\()',122 ].join('')123 );124 const modelRegex = new RegExp(125 [126 '(?<addition>\\+)(?<classDefinition>class\\s)',127 '(?<name>[a-zA-Z]{2,256})(?<suffix>Model)(?<funDef>\\()',128 ].join('')129 );130 it('should check for datastore labels', () => {131 let result = utilityModule.hasDatastoreLabel(pullRequest);132 expect(result).toBe(false);133 result = utilityModule.hasDatastoreLabel({134 ...pullRequest,135 labels: [{ name: 'PR: Affects datastore layer' }],136 });137 expect(result).toBe(true);138 });139 it('should return appropriate name string', () => {140 let result = utilityModule.getNameString(141 [firstModelFileObj],142 {143 singular: 'model',144 plural: 'models',145 },146 modelRegex147 );148 let itemLink = 'OppiabotTestActivitiesModel'.link(149 firstModelFileObj.blob_url150 );151 expect(result).toBe(' The name of the model is ' + itemLink + '.');152 result = utilityModule.getNameString(153 [firstModelFileObj, secondModelFileObj],154 {155 singular: 'model',156 plural: 'models',157 },158 modelRegex159 );160 firstItemLink = 'OppiabotTestActivitiesModel'.link(161 firstModelFileObj.blob_url162 );163 let secondItemLink =164 'OppiabotSnapshotContentModel, OppiabotSnapshotTestingModel'165 .link(166 secondModelFileObj.blob_url167 );168 expect(result).toBe(169 ' The models are ' + itemLink + ', ' + secondItemLink + '.'170 );171 result = utilityModule.getNameString(172 [firstJobFileObj],173 {174 singular: 'job',175 plural: 'jobs',176 },177 jobRegex178 );179 itemLink = 'FirstTestOneOffJob'.link(firstJobFileObj.blob_url);180 expect(result).toBe(' The name of the job is ' + itemLink + '.');181 result = utilityModule.getNameString(182 [firstJobFileObj, secondJobFileObj],183 {184 singular: 'job',185 plural: 'jobs',186 },187 jobRegex188 );189 itemLink = 'FirstTestOneOffJob'.link(firstJobFileObj.blob_url);190 secondItemLink = 'SecondTestOneOffJob'.link(secondJobFileObj.blob_url);191 expect(result).toBe(192 ' The jobs are ' + itemLink + ', ' + secondItemLink + '.'193 );194 });195 it('should return appropriate items by regex', () => {196 let result = utilityModule.getNewItemsFromFileByRegex(197 modelRegex,198 firstModelFileObj199 );200 expect(result.length).toBe(1);201 expect(result[0]).toBe('OppiabotTestActivitiesModel');202 result = utilityModule.getNewItemsFromFileByRegex(203 modelRegex,204 secondModelFileObj205 );206 expect(result.length).toBe(2);207 expect(result[0]).toBe('OppiabotSnapshotContentModel');208 expect(result[1]).toBe('OppiabotSnapshotTestingModel');209 result = utilityModule.getNewItemsFromFileByRegex(210 jobRegex,211 firstJobFileObj212 );213 expect(result.length).toBe(1);214 expect(result[0]).toBe('FirstTestOneOffJob');215 result = utilityModule.getNewItemsFromFileByRegex(216 jobRegex,217 secondJobFileObj218 );219 expect(result.length).toBe(1);220 expect(result[0]).toBe('SecondTestOneOffJob');221 });222 it('should get all changed files', async () => {223 const context = {224 repo: (obj) => {225 return {226 ...obj,227 repo: 'oppia',228 owner: 'oppia',229 };230 },231 payload: {232 pull_request: { ...pullRequest, changed_files: 2 },233 },234 github: {235 pulls: {236 listFiles: () => {237 return {238 data: [firstModelFileObj, firstJobFileObj],239 };240 },241 },242 },243 };244 const result = await utilityModule.getAllChangedFiles(context);245 expect(result.length).toBe(2);246 expect(result).toEqual([firstModelFileObj, firstJobFileObj]);247 });248 it('should get main code owner file from develop', async () => {249 spyOn(Axios, 'get').and.resolveTo({250 data: 'Contents of code owner file.',251 });252 const response = await utilityModule.getMainCodeOwnerfile();253 expect(Axios.get).toHaveBeenCalled();254 expect(Axios.get).toHaveBeenCalledWith(CODE_OWNERS_FILE_URL);255 expect(response).toBe('Contents of code owner file.');256 });257 it('should check if a label is a changelog label', () => {258 let response = utilityModule.isChangelogLabel(259 'PR CHANGELOG: Angular Migration'260 );261 expect(response).toBe(true);262 response = utilityModule.isChangelogLabel('An invalid label');263 expect(response).toBe(false);264 });265 it('should get all open pull requests', async () => {266 const context = {267 github: {268 pulls: {269 list: jasmine.createSpy('list').and.resolveTo({270 data: [271 {272 number: 101,273 body: 'sample pull request body',274 },275 ],276 }),277 },278 },279 repo: jasmine.createSpy('repo').and.callFake((params) => {280 return {281 repo: 'oppia',282 owner: 'oppia',283 ...params,284 };285 }),286 };287 let openPRs = await utilityModule.getAllOpenPullRequests(context);288 expect(openPRs.length).toBe(1);289 expect(openPRs[0].number).toBe(101);290 expect(context.github.pulls.list).toHaveBeenCalled();291 expect(context.github.pulls.list).toHaveBeenCalledWith({292 repo: 'oppia',293 owner: 'oppia',294 per_page: 60,295 state: 'open',296 });297 context.github.pulls.list = jasmine.createSpy('list').and.resolveTo({298 data: [299 {300 number: 101,301 body: 'sample pull request body',302 },303 {304 number: 102,305 body: 'another sample pull request body',306 },307 ],308 });309 openPRs = await utilityModule.getAllOpenPullRequests(context);310 expect(openPRs.length).toBe(2);311 expect(openPRs[0].number).toBe(101);312 expect(openPRs[1].number).toBe(102);313 });314 it('should check that a pull request has been approved', async () => {315 const context = {316 payload: {317 repository: {318 full_name: 'oppia/oppia',319 },320 },321 github: {322 search: {323 issuesAndPullRequests: jasmine324 .createSpy('issuesAndPullRequests')325 .and.resolveTo({326 status: 200,327 data: {328 items: [pullRequest],329 },330 }),331 },332 },333 repo: jasmine.createSpy('repo').and.callFake((params) => {334 return {335 repo: 'oppia',336 owner: 'oppia',337 ...params,338 };339 }),340 };341 let response = await utilityModule.hasPullRequestBeenApproved(context, 101);342 expect(response).toBe(true);343 expect(context.github.search.issuesAndPullRequests).toHaveBeenCalled();344 expect(context.github.search.issuesAndPullRequests).toHaveBeenCalledWith({345 repo: 'oppia',346 owner: 'oppia',347 q: 'repo:oppia/oppia review:approved 101',348 });349 context.github.search.issuesAndPullRequests = jasmine350 .createSpy('issuesAndPullRequests')351 .and.resolveTo({352 status: 200,353 data: {354 items: [],355 },356 });357 response = await utilityModule.hasPullRequestBeenApproved(context, 102);358 expect(response).toBe(false);359 expect(context.github.search.issuesAndPullRequests).toHaveBeenCalled();360 expect(context.github.search.issuesAndPullRequests).toHaveBeenCalledWith({361 repo: 'oppia',362 owner: 'oppia',363 q: 'repo:oppia/oppia review:approved 102',364 });365 });366 it('should check that a pull request has changes requested', async () => {367 const context = {368 payload: {369 repository: {370 full_name: 'oppia/oppia',371 },372 },373 github: {374 search: {375 issuesAndPullRequests: jasmine376 .createSpy('issuesAndPullRequests')377 .and.resolveTo({378 status: 200,379 data: {380 items: [pullRequest],381 },382 }),383 },384 },385 repo: jasmine.createSpy('repo').and.callFake((params) => {386 return {387 repo: 'oppia',388 owner: 'oppia',389 ...params,390 };391 }),392 };393 let response = await utilityModule.doesPullRequestHaveChangesRequested(394 context,395 101396 );397 expect(response).toBe(true);398 expect(context.github.search.issuesAndPullRequests).toHaveBeenCalled();399 expect(context.github.search.issuesAndPullRequests).toHaveBeenCalledWith({400 repo: 'oppia',401 owner: 'oppia',402 q: 'repo:oppia/oppia review:changes_requested 101',403 });404 context.github.search.issuesAndPullRequests = jasmine405 .createSpy('issuesAndPullRequests')406 .and.resolveTo({407 status: 200,408 data: {409 items: [],410 },411 });412 response = await utilityModule.doesPullRequestHaveChangesRequested(413 context,414 102415 );416 expect(response).toBe(false);417 expect(context.github.search.issuesAndPullRequests).toHaveBeenCalled();418 expect(context.github.search.issuesAndPullRequests).toHaveBeenCalledWith({419 repo: 'oppia',420 owner: 'oppia',421 q: 'repo:oppia/oppia review:changes_requested 102',422 });423 });424 it('should check if a user is a member of the organisation', async () => {425 const context = {426 github: {427 orgs: {428 checkMembership: jasmine.createSpy('checkMembership').and.resolveTo({429 status: 204,430 }),431 },432 },433 };434 let response = await utilityModule.isUserAMemberOfTheOrganisation(435 context,436 'testuser'437 );438 expect(response).toBe(true);439 expect(context.github.orgs.checkMembership).toHaveBeenCalled();440 expect(context.github.orgs.checkMembership).toHaveBeenCalledWith({441 org: 'oppia',442 username: 'testuser',443 });444 context.github.orgs.checkMembership = jasmine445 .createSpy('checkMembership')446 .and.callFake(() => {447 throw new Error(448 'User does not exist or is not a public member of ' +449 'the organization.'450 );451 });452 response = await utilityModule.isUserAMemberOfTheOrganisation(453 context,454 'testuser'455 );456 expect(response).toBe(false);457 });458 it('should check if a user is a collaborator', async () => {459 const context = {460 repo() {461 return {462 owner: 'oppia',463 repo: 'oppia'464 };465 },466 github: {467 repos: {468 checkCollaborator: jasmine469 .createSpy('checkCollaborator')470 .and.resolveTo({471 status: 204,472 }),473 },474 },475 };476 let response = await utilityModule.isUserCollaborator(477 context,478 'testuser'479 );480 expect(response).toBe(true);481 expect(context.github.repos.checkCollaborator).toHaveBeenCalled();482 expect(context.github.repos.checkCollaborator).toHaveBeenCalledWith({483 owner: 'oppia',484 repo: 'oppia',485 username: 'testuser',486 });487 context.github.repos.checkCollaborator = jasmine488 .createSpy('checkCollaborator')489 .and.callFake(() => {490 throw new Error(491 'User is not a collaborator.'492 );493 });494 response = await utilityModule.isUserCollaborator(495 context,496 'testuser'497 );498 expect(response).toBe(false);499 });500 it('should get changelog label from a pull request', () => {501 const label = {502 name: 'PR CHANGELOG: Angular Migration',503 };504 pullRequest.labels.push(label);505 let response = utilityModule.getChangelogLabelFromPullRequest(pullRequest);506 expect(response).toBe('PR CHANGELOG: Angular Migration');507 pullRequest.labels = [];508 response = utilityModule.getChangelogLabelFromPullRequest(pullRequest);509 expect(response).toBe(undefined);510 });511 it('should get progect owner from a changelog label', () => {512 let response = utilityModule.getProjectOwnerFromLabel(513 'PR CHANGELOG: Miscellaneous -- @ankita240796'514 );515 expect(response).toBe('ankita240796');516 });517 it('should get github usernames from text', () => {518 let text = '@U8NWXD PTAL!';519 let usernames = utilityModule.getUsernamesFromText(text);520 expect(usernames.length).toBe(1);521 expect(usernames[0]).toBe('U8NWXD');522 text = '@aks681 @seanlip PTAL!';523 usernames = utilityModule.getUsernamesFromText(text);524 expect(usernames.length).toBe(2);525 expect(usernames[0]).toBe('aks681');526 expect(usernames[1]).toBe('seanlip');527 text = `@DubeySandeep, done I've created the issue(#10419 )528 and addressed your comments.529 @seanlip @aks681 PTAL`;530 usernames = utilityModule.getUsernamesFromText(text);531 expect(usernames.length).toBe(3);532 expect(usernames[0]).toBe('DubeySandeep');533 expect(usernames[1]).toBe('seanlip');534 expect(usernames[2]).toBe('aks681');535 text = 'Hi @U8NWXD, please take a look, thanks!';536 usernames = utilityModule.getUsernamesFromText(text);537 expect(usernames.length).toBe(1);538 expect(usernames[0]).toBe('U8NWXD');539 text = "@aks681 @seanlip I've addressed all comments, please take a look.!";540 usernames = utilityModule.getUsernamesFromText(text);541 expect(usernames.length).toBe(2);542 expect(usernames[0]).toBe('aks681');543 expect(usernames[1]).toBe('seanlip');544 text = `@DubeySandeep, done I've created the issue(#10419 )545 and addressed your comments.546 @seanlip @aks681 please take a look`;547 usernames = utilityModule.getUsernamesFromText(text);548 expect(usernames.length).toBe(3);549 expect(usernames[0]).toBe('DubeySandeep');550 expect(usernames[1]).toBe('seanlip');551 expect(usernames[2]).toBe('aks681');552 text = 'A random text containing no users';553 usernames = utilityModule.getUsernamesFromText(text);554 expect(usernames.length).toBe(0);555 });...

Full Screen

Full Screen

staleBuildChecksSpec.js

Source:staleBuildChecksSpec.js Github

copy

Full Screen

1require('dotenv').config();2const { createProbot } = require('probot');3const oppiaBot = require('../index');4const scheduler = require('../lib/scheduler');5const periodicCheckModule = require('../lib/periodicChecks');6const staleBuildModule = require('../lib/staleBuildChecks');7const periodicCheckPayload = require('../fixtures/periodicCheckPayload.json');8const utils = require('../lib/utils');9const mergeConflictCheckModule = require('../lib/checkMergeConflicts');10const jobCheckModule = require('../lib/checkPullRequestJob');11const criticalPullRequestModule = require('../lib/checkCriticalPullRequest');12const newCodeOwnerModule = require('../lib/checkForNewCodeowner');13const pullRequestPayload = require('../fixtures/pullRequestPayload.json');14describe('Stale build check', () => {15 /**16 * @type {import('probot').Probot} robot17 */18 let robot;19 /**20 * @type {import('probot').Octokit} github21 */22 let github;23 /**24 * @type {import('probot').Application} app25 */26 let app;27 let pullRequests = {28 prWithOldBuild: {29 number: 9,30 head: {31 sha: 'old-build-pr-sha',32 },33 labels: [],34 user: {35 login: 'testuser',36 id: 11153258,37 node_id: 'MDQ6VXNlcjExMTUzMjU4',38 avatar_url: 'https://avatars2.githubusercontent.com/u/11153258?v=4',39 gravatar_id: '',40 url: 'https://api.github.com/users/testuser',41 html_url: 'https://github.com/testuser',42 followers_url: 'https://api.github.com/users/testuser/followers',43 following_url:44 'https://api.github.com/users/testuser/following{/other_user}',45 gists_url: 'https://api.github.com/users/testuser/gists{/gist_id}',46 starred_url:47 'https://api.github.com/users/testuser/starred{/owner}{/repo}',48 subscriptions_url:49 'https://api.github.com/users/testuser/subscriptions',50 organizations_url: 'https://api.github.com/users/testuser/orgs',51 repos_url: 'https://api.github.com/users/testuser/repos',52 events_url:53 'https://api.github.com/users/testuser/events{/privacy}',54 received_events_url:55 'https://api.github.com/users/testuser/received_events',56 type: 'User',57 site_admin: false,58 },59 },60 prWithNewBuild: {61 number: 10,62 head: {63 sha: 'new-build-pr-sha',64 },65 labels: [],66 user: {67 login: 'testuser2',68 id: 11153258,69 node_id: 'MDQ6VXNlcjExMTUzMjU4',70 avatar_url: 'https://avatars2.githubusercontent.com/u/11153258?v=4',71 gravatar_id: '',72 url: 'https://api.github.com/users/testuser2',73 html_url: 'https://github.com/testuser2',74 followers_url: 'https://api.github.com/users/testuser2/followers',75 following_url:76 'https://api.github.com/users/testuser2/following{/other_user}',77 gists_url: 'https://api.github.com/users/testuser2/gists{/gist_id}',78 starred_url:79 'https://api.github.com/users/testuser2/starred{/owner}{/repo}',80 subscriptions_url:81 'https://api.github.com/users/testuser2/subscriptions',82 organizations_url: 'https://api.github.com/users/testuser2/orgs',83 repos_url: 'https://api.github.com/users/testuser2/repos',84 events_url:85 'https://api.github.com/users/testuser2/events{/privacy}',86 received_events_url:87 'https://api.github.com/users/testuser2/received_events',88 type: 'User',89 site_admin: false,90 },91 },92 };93 beforeEach(async () => {94 spyOn(scheduler, 'createScheduler').and.callFake(() => { });95 spyOn(mergeConflictCheckModule, 'checkMergeConflictsInPullRequest')96 .and97 .callFake(() => { });98 spyOn(jobCheckModule, 'checkForNewJob').and.callFake(() => { });99 spyOn(criticalPullRequestModule, 'checkIfPRAffectsDatastoreLayer')100 .and101 .callFake(() => { });102 spyOn(newCodeOwnerModule, 'checkForNewCodeowner').and.callFake(() => { });103 const oldBuildPRCommitData = {104 sha: 'old-build-pr-sha',105 node_id:106 'MDY6Q29tbWl0MTczMDA0MDIyOmViNjk3ZTU1YTNkYTMwODUzNjBkODQz' +107 'ZGZiMTUwZjAzM2FhMTdlNjE=',108 commit: {109 author: {110 name: 'James James',111 email: 'jamesjay4199@gmail.com',112 date: '2020-08-10T14:15:32Z',113 },114 committer: {115 name: 'James James',116 email: 'jamesjay4199@gmail.com',117 date: '2020-08-10T14:15:32Z',118 },119 message: 'changes',120 tree: {121 sha: 'f5f8be9b0e4ac9970f68d8945de3474581b20d03',122 url:123 'https://api.github.com/repos/jameesjohn/oppia/git/' +124 'trees/f5f8be9b0e4ac9970f68d8945de3474581b20d03',125 },126 url:127 'https://api.github.com/repos/jameesjohn/oppia/git/' +128 'commits/eb697e55a3da3085360d843dfb150f033aa17e61',129 comment_count: 0,130 verification: {},131 },132 url:133 'https://api.github.com/repos/oppia/oppia/commits/' +134 'eb697e55a3da3085360d843dfb150f033aa17e61',135 html_url:136 'https://github.com/oppia/oppia/commit/' +137 'eb697e55a3da3085360d843dfb150f033aa17e61',138 comments_url:139 'https://api.github.com/repos/oppia/oppia/commits/' +140 'eb697e55a3da3085360d843dfb150f033aa17e61/comments',141 author: {},142 committer: {},143 parents: [],144 };145 const newBuildPRCommitData = {146 sha: 'new-build-pr-sha',147 node_id:148 'MDY6Q29tbWl0MTczMDA0MDIyOjUyNWQ2MDU4YTYyNmI0NjE1NGVkMz' +149 'czMTE0MWE5NWU3MGViYjBhZWY=',150 commit: {151 author: {152 name: 'James James',153 email: 'jamesjay4199@gmail.com',154 date: '2020-08-13T13:43:24Z',155 },156 committer: {157 name: 'James James',158 email: 'jamesjay4199@gmail.com',159 date: '2020-08-13T13:43:24Z',160 },161 message: 'new additions',162 tree: {163 sha: 'b5bf5af6ec0592bf3776b23d4355ff200549f427',164 url:165 'https://api.github.com/repos/jameesjohn/oppia/git/' +166 'trees/b5bf5af6ec0592bf3776b23d4355ff200549f427',167 },168 url:169 'https://api.github.com/repos/jameesjohn/oppia/git/' +170 'commits/525d6058a626b46154ed3731141a95e70ebb0aef',171 comment_count: 0,172 verification: {173 verified: false,174 reason: 'unsigned',175 signature: null,176 payload: null,177 },178 },179 url:180 'https://api.github.com/repos/jameesjohn/oppia/commits/' +181 '525d6058a626b46154ed3731141a95e70ebb0aef',182 html_url:183 'https://github.com/jameesjohn/oppia/commit/' +184 '525d6058a626b46154ed3731141a95e70ebb0aef',185 comments_url:186 'https://api.github.com/repos/jameesjohn/oppia/' +187 'commits/525d6058a626b46154ed3731141a95e70ebb0aef/comments',188 author: [],189 committer: [],190 parents: [],191 };192 github = {193 issues: {194 createComment: jasmine195 .createSpy('createComment')196 .and.callFake(() => { }),197 addLabels: jasmine.createSpy('addLabels').and.callFake(() => { }),198 removeLabel: jasmine.createSpy('removeLabel').and.callFake(() => { }),199 },200 search:{201 issuesAndPullRequests: jasmine202 .createSpy('issuesAndPullRequests')203 .and.resolveTo({204 data: {205 items: [pullRequestPayload.payload.pull_request],206 },207 })208 },209 repos: {210 getCommit: jasmine.createSpy('getCommit').and.callFake((params) => {211 if (params.ref === pullRequests.prWithOldBuild.head.sha) {212 return {213 data: oldBuildPRCommitData,214 };215 }216 return {217 data: newBuildPRCommitData,218 };219 }),220 }221 };222 robot = createProbot({223 id: 1,224 cert: 'test',225 githubToken: 'test',226 });227 app = robot.load(oppiaBot);228 spyOn(app, 'auth').and.resolveTo(github);229 });230 describe('when pull request has an old build', () => {231 beforeEach(async () => {232 spyOn(233 staleBuildModule,234 'checkAndTagPRsWithOldBuilds'235 ).and.callThrough();236 spyOn(237 periodicCheckModule,238 'ensureAllPullRequestsAreAssigned'239 ).and.callFake(() => {});240 spyOn(241 periodicCheckModule,242 'ensureAllIssuesHaveProjects'243 ).and.callFake(() => {});244 github.pulls = {245 list: jasmine.createSpy('list').and.resolveTo({246 data: [pullRequests.prWithOldBuild, pullRequests.prWithNewBuild],247 })248 };249 // Mocking the minimum build date.250 utils.MIN_BUILD_DATE = new Date('2020-08-12T14:15:32Z');251 await robot.receive(periodicCheckPayload);252 });253 it('should call periodic check module', () => {254 expect(255 staleBuildModule.checkAndTagPRsWithOldBuilds256 ).toHaveBeenCalled();257 });258 it('should fetch all open pull requests', () => {259 expect(github.pulls.list).toHaveBeenCalled();260 });261 it('should ping author when build is old', () => {262 expect(github.issues.createComment).toHaveBeenCalled();263 expect(github.issues.createComment).toHaveBeenCalledWith(264 {265 owner: 'oppia',266 repo: 'oppia',267 issue_number: pullRequests.prWithOldBuild.number,268 body:269 'Hi @' + pullRequests.prWithOldBuild.user.login + ', the build ' +270 'of this PR is stale and this could result in tests failing in ' +271 'develop. Please update this pull request with the latest ' +272 'changes from develop. Thanks!',273 }274 );275 });276 it('should add old build label when build is old', () => {277 expect(github.issues.addLabels).toHaveBeenCalled();278 expect(github.issues.addLabels).toHaveBeenCalledWith(279 {280 owner: 'oppia',281 repo: 'oppia',282 issue_number: pullRequests.prWithOldBuild.number,283 labels: ["PR: don't merge - STALE BUILD"],284 }285 );286 });287 it('should not ping author when build is new', () => {288 expect(github.issues.createComment).not.toHaveBeenCalledWith(289 {290 owner: 'oppia',291 repo: 'oppia',292 issue_number: pullRequests.prWithNewBuild.number,293 body:294 'Hi @' + pullRequests.prWithNewBuild.user.login + ', the build ' +295 'of this PR is stale and this could result in tests failing in ' +296 'develop. Please update this pull request with the latest ' +297 'changes from develop. Thanks!',298 }299 );300 });301 describe('when pull request author has already been pinged', () => {302 let oldBuildPR;303 beforeAll(() => {304 // Add stale build label to PR.305 oldBuildPR = {...pullRequests.prWithOldBuild};306 oldBuildPR.labels.push({307 name: "PR: don't merge - STALE BUILD"308 });309 });310 beforeEach(async () => {311 github.pulls.list = jasmine.createSpy('list').and.resolveTo({312 data: [oldBuildPR, pullRequests.prWithNewBuild],313 });314 await robot.receive(periodicCheckPayload);315 });316 it('should not ping author', () => {317 expect(github.issues.createComment).not.toHaveBeenCalled();318 expect(github.issues.createComment).not.toHaveBeenCalledWith(319 {320 owner: 'oppia',321 repo: 'oppia',322 issue_number: oldBuildPR.number,323 body:324 'Hi @' + pullRequests.prWithNewBuild.user.login + ', the build ' +325 'of this PR is stale and this could result in tests failing in ' +326 'develop. Please update this pull request with the latest ' +327 'changes from develop. Thanks!',328 }329 );330 });331 it('should not add old build label', () => {332 expect(github.issues.addLabels).not.toHaveBeenCalled();333 });334 });335 });336 describe('when a pull request with an old build gets updated', () => {337 const originalPayloadLabels = (338 pullRequestPayload.payload.pull_request.labels339 );340 const originalSha = pullRequestPayload.payload.pull_request.head.sha;341 beforeAll(() => {342 // Add Old build label to PR.343 pullRequestPayload.payload.pull_request.labels = [344 {name: utils.OLD_BUILD_LABEL}345 ];346 pullRequestPayload.payload.pull_request.head.sha = 'new-build-pr-sha';347 });348 afterAll(() =>{349 pullRequestPayload.payload.pull_request.labels = originalPayloadLabels;350 pullRequestPayload.payload.pull_request.head.sha = originalSha;351 });352 beforeEach(async() => {353 spyOn(staleBuildModule, 'removeOldBuildLabel').and.callThrough();354 // Set payload action to synchronize.355 pullRequestPayload.payload.action = 'synchronize';356 await robot.receive(pullRequestPayload);357 });358 it('should check if pull request contains old build label', () => {359 expect(staleBuildModule.removeOldBuildLabel).toHaveBeenCalled();360 });361 it('should remove old build label', () => {362 expect(github.issues.removeLabel).toHaveBeenCalled();363 expect(github.issues.removeLabel).toHaveBeenCalledWith({364 issue_number: pullRequestPayload.payload.pull_request.number,365 name: utils.OLD_BUILD_LABEL,366 owner: pullRequestPayload.payload.repository.owner.login,367 repo: pullRequestPayload.payload.repository.name368 });369 });370 });371 describe('when a pull request without an old build gets updated', () => {372 const originalSha = pullRequestPayload.payload.pull_request.head.sha;373 beforeAll(() => {374 pullRequestPayload.payload.pull_request.head.sha = 'new-build-pr-sha';375 });376 afterAll(() =>{377 pullRequestPayload.payload.pull_request.head.sha = originalSha;378 });379 beforeEach(async() => {380 spyOn(staleBuildModule, 'removeOldBuildLabel').and.callThrough();381 // Set payload action to synchronize.382 pullRequestPayload.payload.action = 'synchronize';383 await robot.receive(pullRequestPayload);384 });385 it('should check if pull request contains old build label', () => {386 expect(staleBuildModule.removeOldBuildLabel).toHaveBeenCalled();387 });388 it('should not remove old build label', () => {389 expect(github.issues.removeLabel).not.toHaveBeenCalled();390 });391 });392 describe('when develop branch gets updated', () => {393 const originalPayloadLabels = (394 pullRequestPayload.payload.pull_request.labels395 );396 const originalSha = pullRequestPayload.payload.pull_request.head.sha;397 beforeAll(() => {398 // Add Old build label to PR.399 pullRequestPayload.payload.pull_request.labels = [400 {name: utils.OLD_BUILD_LABEL}401 ];402 pullRequestPayload.payload.pull_request.head.sha = 'old-build-pr-sha';403 });404 afterAll(() =>{405 pullRequestPayload.payload.pull_request.labels = originalPayloadLabels;406 pullRequestPayload.payload.pull_request.head.sha = originalSha;407 });408 beforeEach(async() => {409 spyOn(staleBuildModule, 'removeOldBuildLabel').and.callThrough();410 // Set payload action to synchronize.411 pullRequestPayload.payload.action = 'synchronize';412 await robot.receive(pullRequestPayload);413 });414 it('should check if pull request contains old build label', () => {415 expect(staleBuildModule.removeOldBuildLabel).toHaveBeenCalled();416 });417 it('should not remove old build label', () => {418 expect(github.issues.removeLabel).not.toHaveBeenCalled();419 });420 });...

Full Screen

Full Screen

GithubReviews.js

Source:GithubReviews.js Github

copy

Full Screen

1const {2 repoAndOwner3} = require('../../util');4const {5 filterUser,6 filterPull7} = require('../../filters');8/**9 * This component updates the stored issues based on GitHub events.10 *11 * @constructor12 *13 * @param {import('../webhook-events/WebhookEvents')} webhookEvents14 * @param {import('../../events')} events15 * @param {import('../github-client/GithubClient')} githubClient16 * @param {import('../../store')} store17 */18module.exports = function GithubReviews(webhookEvents, events, githubClient, store) {19 // issues /////////////////////20 events.on('backgroundSync.sync', async (event) => {21 const {22 issue23 } = event;24 if (!issue.pull_request) {25 return;26 }27 const {28 id29 } = issue;30 const {31 repo,32 owner33 } = repoAndOwner(issue);34 const github = await githubClient.getOrgScoped(owner);35 const {36 data: reviews37 } = await github.pulls.listReviews({38 owner,39 repo,40 pull_number: issue.number41 });42 await store.queueUpdate({43 id,44 reviews: reviews.map(filterReview)45 });46 });47 webhookEvents.on([48 'pull_request_review'49 ], async ({ payload }) => {50 const {51 action,52 review: _review,53 pull_request: _pull_request,54 repository55 } = payload;56 const pull_request = filterPull(_pull_request, repository);57 const review = filterReview(_review);58 const {59 id60 } = pull_request;61 const issue = await store.getIssueById(id);62 let reviews = (issue && issue.reviews) || [];63 if (action === 'submitted') {64 reviews = [65 ...reviews,66 review67 ];68 }69 if (action === 'edited' || action === 'dismissed') {70 const index = reviews.findIndex(r => r.id === review.id);71 if (index !== -1) {72 reviews = [73 ...reviews.slice(0, index),74 review,75 ...reviews.slice(index + 1)76 ];77 } else {78 reviews = [79 ...reviews,80 review81 ];82 }83 }84 await store.updateIssue({85 ...pull_request,86 reviews87 });88 });89};90function filterReview(review) {91 const {92 id,93 node_id,94 body,95 commit_id,96 submitted_at,97 state,98 user,99 html_url100 } = review;101 return {102 id,103 node_id,104 body,105 commit_id,106 submitted_at,107 state: state.toLowerCase(),108 user: filterUser(user),109 html_url110 };...

Full Screen

Full Screen

index.js

Source:index.js Github

copy

Full Screen

1const Octokit = require('@octokit/rest')2const { variables } = require('../../common')3const { owner, repo, pull, token } = variables;4const { red, blue } = require('chalk');5const isTokenDefined = !!token;6const REQUIRED = red('required');7const OWNER = blue('Owner');8const REPOSITORY = blue('Reposiory');9const PULL_NUMBER = blue('Pull Number');10const TOKEN = blue('Token');11if (!isTokenDefined) {12 console.warn(`${TOKEN} variable is required in order to contact GitHub!`);13}14const github = new Octokit({15 auth: token,16})17const getPullRequestHead = async () => {18 if (!isTokenDefined) {19 console.warn(`${OWNER}, ${REPOSITORY}, ${PULL_NUMBER} variables are ${REQUIRED}. Skipping HEAD fetch...`)20 return { sha: '' };21 }22 const pr = await github.pullRequests.get({23 owner,24 repo,25 pull_number: pull,26 })27 return pr.data.head28}29const getCommits = async () => {30 if (!isTokenDefined) {31 console.warn(`${OWNER}, ${REPOSITORY}, ${PULL_NUMBER} variables are ${REQUIRED}. Skipping commit fetch...`)32 return [];33 }34 console.log('📡 Looking up commits for PR #%s...', pull)35 const response = await github.pulls.listCommits({36 owner,37 repo,38 pull_number: pull,39 per_page: 10040 })41 return response.data42}43const statusReporterFactory =44 (context) => {45 return async (state, sha, description, target_url) => {46 if (!isTokenDefined) {47 console.warn(`${OWNER}, ${REPOSITORY}, ${PULL_NUMBER} variables are ${REQUIRED}. Skipping status report ${state} for ${context}...`)48 return;49 }50 await github.repos.createStatus({51 owner,52 repo,53 sha,54 state,55 description,56 context,57 target_url,58 })59 }60 }61module.exports = {62 getCommits,63 getPullRequestHead,64 statusReporterFactory...

Full Screen

Full Screen

allowedFileNamePatterns.js

Source:allowedFileNamePatterns.js Github

copy

Full Screen

1const {2 github,3 camelCase,4 getNormalizedRule5} = require('../utils');6/**7 * Asserts PR file names against supplied patterns8 * Note that this check is critical meaning if it is satistfied the PR will be closed9 *10 * @param {object} repoConfig Repository configuration provided by repo-rules.json file11 * @param {object} data Github webhook payload data12 * @returns {object|boolean} false if all other rules need to be skipped or an object with information13 */14module.exports = async function allowedFileNamePatterns(repoConfig, data) {15 const rules = getNormalizedRule(repoConfig, 'allowedFileNamePatterns');16 if (rules && rules.length) {17 const githubConfig = {18 owner: data.repository.owner.login,19 repo: data.repository.name,20 pull_number: data.pull_request.number21 };22 const { data: filesArr } = await github.pulls.listFiles(githubConfig);23 let isValidFileName = true;24 if (filesArr && filesArr.length) {25 filesArr.every((file) => {26 if (file.filename) {27 isValidFileName =28 rules.some((val) => {29 const reg = new RegExp(val, 'i'),30 lastElem = file.filename.31 split('/')[file.filename.split('/').length - 1];32 return (33 file.filename.match(reg) &&34 file.filename.match(reg)[0].length === lastElem.length &&35 camelCase(lastElem, '-')36 );37 });38 return isValidFileName;39 }40 return false;41 });42 }43 if (!isValidFileName) {44 return {45 message: 'Filenames should not contain any special characters or ' +46 'spaces. Use only `-` to separate words. ' +47 'Files should have the `.md` extension. ' +48 'Filenames should follow a Camel-Case format. Closing this PR.',49 ...rules,50 }51 }52 }...

Full Screen

Full Screen

maxCommitCount.js

Source:maxCommitCount.js Github

copy

Full Screen

1const {2 github,3 getNormalizedRule4} = require('../utils');5/**6 * Makes sure that the PR contains no more than the specifed number of commits7 *8 * @param {object} repoConfig Repository configuration provided by repo-rules.json file9 * @param {object} data Github webhook payload data10 * @returns {object|boolean} false if all other rules need to be skipped or an object with information11 */12module.exports = async function maxCommitCount(repoConfig, data) {13 const rules = getNormalizedRule(repoConfig, 'maxCommitCount');14 if (rules.value) {15 const maxCommitCount = parseInt(rules.value, 10);16 if (isNaN(maxCommitCount)) {17 throw new Error(`${rules.value} can't be converted into an integer`);18 }19 const githubConfig = {20 owner: data.repository.owner.login,21 repo: data.repository.name,22 pull_number: data.pull_request.number23 };24 const { data: commits } = await github.pulls.listCommits(githubConfig);25 if (commits && commits.length) {26 for (let l = 0; l < commits.length; l++) {27 // show more debug info (commits of the current PR)28 console.log((l + 1) + ': ' + commits[l].commit.message);29 }30 if (commits.length > maxCommitCount) {31 return {32 message: 'You have pushed more than one commit. ' +33 'When you finish editing, [squash](https://forum.freecodecamp.com' +34 '/t/how-to-squash-multiple-commits-into-one-with-git/13231) your ' +35 'commits into one.',36 ...rules,37 }38 }39 }40 }...

Full Screen

Full Screen

github.js

Source:github.js Github

copy

Full Screen

1const { Octokit } = require("@octokit/rest");2module.exports = { getOpenPRs, postComment };3async function getOpenPRs(token, owner, repo) {4 const github = githubAuth(token);5 const res = await github.pulls.list({6 state: "open",7 owner,8 repo,9 });10 return res.data.map((pr) => pr.number);11}12async function postComment(token, owner, repo, pullRequestNumber, surgeURI) {13 const github = githubAuth(token);14 await github.issues.createComment({15 owner,16 repo,17 // eslint-disable-next-line camelcase18 issue_number: pullRequestNumber,19 body: `Deployment for QA: [${surgeURI}](http://${surgeURI})`,...

Full Screen

Full Screen

Using AI Code Generation

copy

Full Screen

1var github = require('github');2github.pulls.list({3}, function(err, res) {4 console.log(res);5});6See [CONTRIBUTING.md](CONTRIBUTING.md)

Full Screen

Cypress Tutorial

Cypress is a renowned Javascript-based open-source, easy-to-use end-to-end testing framework primarily used for testing web applications. Cypress is a relatively new player in the automation testing space and has been gaining much traction lately, as evidenced by the number of Forks (2.7K) and Stars (42.1K) for the project. LambdaTest’s Cypress Tutorial covers step-by-step guides that will help you learn from the basics till you run automation tests on LambdaTest.

Chapters:

  1. What is Cypress? -
  2. Why Cypress? - Learn why Cypress might be a good choice for testing your web applications.
  3. Features of Cypress Testing - Learn about features that make Cypress a powerful and flexible tool for testing web applications.
  4. Cypress Drawbacks - Although Cypress has many strengths, it has a few limitations that you should be aware of.
  5. Cypress Architecture - Learn more about Cypress architecture and how it is designed to be run directly in the browser, i.e., it does not have any additional servers.
  6. Browsers Supported by Cypress - Cypress is built on top of the Electron browser, supporting all modern web browsers. Learn browsers that support Cypress.
  7. Selenium vs Cypress: A Detailed Comparison - Compare and explore some key differences in terms of their design and features.
  8. Cypress Learning: Best Practices - Take a deep dive into some of the best practices you should use to avoid anti-patterns in your automation tests.
  9. How To Run Cypress Tests on LambdaTest? - Set up a LambdaTest account, and now you are all set to learn how to run Cypress tests.

Certification

You can elevate your expertise with end-to-end testing using the Cypress automation framework and stay one step ahead in your career by earning a Cypress certification. Check out our Cypress 101 Certification.

YouTube

Watch this 3 hours of complete tutorial to learn the basics of Cypress and various Cypress commands with the Cypress testing at LambdaTest.

Run Cypress automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful