How to use prompt method in storybook-root

Best JavaScript code snippet using storybook-root

collecting-form.js

Source:collecting-form.js Github

copy

Full Screen

1/**2 * Populate a prompt row with the provided data.3 *4 * @param {Object} promptData5 */6var populatePromptRow = function(promptData) {7 // Detect whether a row is currently being edited. If one is, populate that8 // one. If one isn't, create a new row using the row template, populate it,9 // and append it to the prompts table.10 var promptRows = $('#prompts');11 var promptRow = promptRows.children('.prompt-editing');12 if (!promptRow.length) {13 var index = promptRows.children('.prompt').length;14 var promptRowTemplate = $('#prompts').data('promptRowTemplate');15 promptRow = $(promptRowTemplate.replace(/__INDEX__/g, index));16 promptRow.find('.prompt-id').val(promptData['o:id']);17 promptRows.append(promptRow);18 }19 // Populate the visual elements.20 var typeText = $('#prompt-type option[value="' + promptData['o-module-collecting:type'] + '"]').text();21 if ('property' === promptData['o-module-collecting:type']) {22 var propertyText = $('#prompt-property')23 .find('option[value="' + promptData['o:property']['o:id'] + '"]')24 .data('term');25 typeText += ' [' + propertyText + ']';26 } else if ('media' === promptData['o-module-collecting:type']) {27 var mediaTypeText = $('#prompt-media-type')28 .find('option[value="' + promptData['o-module-collecting:media_type'] + '"]')29 .text();30 typeText += ' [' + mediaTypeText + ']';31 }32 promptRow.find('.prompt-type-span').html(typeText);33 promptRow.find('.prompt-text-span').text(promptData['o-module-collecting:text']);34 // Populate the hidden inputs.35 promptRow.find('.prompt-type').val(promptData['o-module-collecting:type']);36 promptRow.find('.prompt-text').val(promptData['o-module-collecting:text']);37 promptRow.find('.prompt-input-type').val(promptData['o-module-collecting:input_type']);38 promptRow.find('.prompt-select-options').val(promptData['o-module-collecting:select_options']);39 promptRow.find('.prompt-media-type').val(promptData['o-module-collecting:media_type']);40 promptRow.find('.prompt-required').val(promptData['o-module-collecting:required'] ? '1' : '0');41 if (promptData['o:property']) {42 promptRow.find('.prompt-property-id').val(promptData['o:property']['o:id']);43 }44}45/**46 * Reset the sidebar to its default state (i.e. no selected type).47 */48var resetSidebar = function() {49 $('#prompt-type').prop('selectedIndex', 0)50 .prop('disabled', false).css('background-color', '#ffffff');51 var promptText = $('#prompt-text');52 if (promptText.hasClass('html-editor')) {53 promptText.removeClass('html-editor').ckeditor().editor.destroy();54 }55 promptText.val('').closest('.sidebar-section').hide();56 $('#prompt-property').prop('selectedIndex', 0).closest('.sidebar-section').hide();57 $('#prompt-media-type').prop('selectedIndex', 0).closest('.sidebar-section').hide();58 $('#prompt-input-type').prop('selectedIndex', 0).closest('.sidebar-section').hide();59 $('#prompt-select-options').val('').closest('.sidebar-section').hide();60 $('#prompt-required').prop('checked', false).closest('.sidebar-section').hide();61 $('#prompt-save').hide();62 // The form may only have one "user_name" prompt.63 var hasUserName = $('#prompts .prompt-type[value="user_name"]').length;64 var userNameOption = $('#prompt-type option[value="user_name"]');65 hasUserName ? userNameOption.hide() : userNameOption.show();66 // The form may only have one "user_email" prompt.67 var hasUserEmail = $('#prompts .prompt-type[value="user_email"]').length;68 var userEmailOption = $('#prompt-type option[value="user_email"]');69 hasUserEmail ? userEmailOption.hide() : userEmailOption.show();70}71/**72 * Set the sidebar to the default state of the provided type and show it.73 *74 * @param {String} type75 */76var setSidebarForType = function(type) {77 resetSidebar();78 switch (type) {79 case 'property':80 $('#prompt-property').closest('.sidebar-section').show();81 $('#prompt-input-type').closest('.sidebar-section').show();82 $('#prompt-required').closest('.sidebar-section').show();83 break;84 case 'media':85 $('#prompt-media-type').closest('.sidebar-section').show();86 $('#prompt-required').closest('.sidebar-section').show();87 break;88 case 'user_name':89 case 'user_email':90 $('#prompt-required').closest('.sidebar-section').show();91 break;92 case 'input':93 case 'user_private':94 case 'user_public':95 $('#prompt-input-type').closest('.sidebar-section').show();96 $('#prompt-required').closest('.sidebar-section').show();97 break;98 case 'html':99 $('#prompt-text').addClass('html-editor').ckeditor();100 break;101 default:102 // invalid or no prompt type103 return;104 }105 $('#prompt-type').val(type);106 $('#prompt-text').closest('.sidebar-section').show();107 $('#prompt-save').show();108}109$(document).ready(function() {110 $('#prompts-table').hide();111 // Append existing prompts on load. 112 var promptsData = $('#prompts').data('promptsData');113 if (!promptsData.length) {114 // Always add a "dcterms:title" property prompt to a form without115 // prompts. Though not required, we should strongly recommend a title116 // for every collected item.117 promptsData = [{118 'o-module-collecting:type': 'property',119 'o-module-collecting:text': null,120 'o-module-collecting:input_type': 'text',121 'o-module-collecting:select_options': null,122 'o-module-collecting:media_type': null,123 'o-module-collecting:required': true,124 'o:property': {'o:id': $('#prompt-property option[data-term="dcterms:title"]').val()},125 }];126 }127 $.each(promptsData, function() {128 $('#prompts-table').show();129 populatePromptRow(this);130 });131 // Reload the original state of the form to avoid "changes not saved" modal.132 $('#collectingform').trigger('o:form-loaded');133 // Enable prompt sorting.134 new Sortable(document.getElementById('prompts'), {135 handle: '.sortable-handle'136 });137 // Handle changing the prompt's type.138 $('#prompt-type').on('change', function() {139 setSidebarForType($(this).val());140 });141 // Handle changing the prompt's input type.142 $('#prompt-input-type').on('change', function() {143 var inputType = $(this).val();144 var selectOptionsSection = $('#prompt-select-options').closest('.sidebar-section');145 if ('select' === inputType) {146 selectOptionsSection.show();147 } else {148 selectOptionsSection.hide();149 }150 });151 // Handle the delete prompt icon.152 $('#prompts').on('click', '.prompt-delete', function(e) {153 e.preventDefault();154 var deleteIcon = $(this);155 var prompt = deleteIcon.closest('.prompt');156 prompt.find(':input').prop('disabled', true);157 prompt.addClass('delete');158 prompt.find('.prompt-undo-delete').show();159 prompt.find('.prompt-edit').hide();160 if (prompt.hasClass('prompt-editing')) {161 Omeka.closeSidebar($('#prompt-sidebar'));162 }163 deleteIcon.hide();164 });165 // Handle the undo delete prompt icon.166 $('#prompts').on('click', '.prompt-undo-delete', function(e) {167 e.preventDefault();168 var undoIcon = $(this);169 var prompt = undoIcon.closest('.prompt');170 prompt.find(':input').prop('disabled', false);171 prompt.removeClass('delete');172 prompt.find('.prompt-delete').show();173 prompt.find('.prompt-edit').show();174 undoIcon.hide();175 });176 // Handle the add prompt button.177 $('#prompt-add').on('click', function(e) {178 e.preventDefault();179 resetSidebar();180 $('#prompts > .prompt').removeClass('prompt-editing');181 Omeka.openSidebar($('#prompt-sidebar'));182 });183 // Handle the edit prompt icon.184 $('#prompts').on('click', '.prompt-edit', function(e) {185 e.preventDefault();186 var prompt = $(this).closest('.prompt');187 var type = prompt.find('.prompt-type').val();188 var text = prompt.find('.prompt-text').val();189 prompt.siblings().removeClass('prompt-editing');190 prompt.addClass('prompt-editing');191 setSidebarForType(type);192 switch (type) {193 case 'property':194 var inputType = prompt.find('.prompt-input-type').val();195 $('#prompt-text').val(text);196 $('#prompt-property').val(prompt.find('.prompt-property-id').val());197 $('#prompt-input-type').val(inputType);198 if ('select' === inputType) {199 var selectOptions = prompt.find('.prompt-select-options').val();200 $('#prompt-select-options').val(selectOptions).closest('.sidebar-section').show();201 }202 break;203 case 'media':204 var mediaType = prompt.find('.prompt-media-type').val();205 $('#prompt-text').val(text);206 $('#prompt-media-type').val(mediaType);207 break;208 case 'input':209 case 'user_private':210 case 'user_public':211 var inputType = prompt.find('.prompt-input-type').val();212 $('#prompt-text').val(text);213 $('#prompt-input-type').val(inputType);214 if ('select' === inputType) {215 var selectOptions = prompt.find('.prompt-select-options').val();216 $('#prompt-select-options').val(selectOptions).closest('.sidebar-section').show();217 }218 break;219 case 'user_name':220 case 'user_email':221 case 'html':222 $('#prompt-text').val(text);223 break;224 default:225 // invalid or no prompt type226 return;227 }228 // A prompt type cannot be changed once it's saved.229 $('#prompt-type').prop('disabled', true).css('background-color', '#dfdfdf');230 $('#prompt-required').prop('checked', '1' === prompt.find('.prompt-required').val() ? true : false);231 Omeka.openSidebar($('#prompt-sidebar'));232 });233 // Handle saving the prompt.234 $('#prompt-save').on('click', function(e) {235 e.preventDefault();236 var promptData = {237 'o-module-collecting:type': $('#prompt-type').val(),238 'o-module-collecting:text': $('#prompt-text').val(),239 'o-module-collecting:input_type': $('#prompt-input-type').val(),240 'o-module-collecting:select_options': $('#prompt-select-options').val(),241 'o-module-collecting:media_type': $('#prompt-media-type').val(),242 'o-module-collecting:required': $('#prompt-required').prop('checked'),243 'o:property': {'o:id': $('#prompt-property').val()},244 };245 // Validate the data before populating the row.246 switch (promptData['o-module-collecting:type']) {247 case 'property':248 if (!$.isNumeric(promptData['o:property']['o:id'])) {249 alert('You must select a property.');250 return;251 }252 if (!promptData['o-module-collecting:input_type']) {253 alert('You must select an input type.');254 return;255 }256 break;257 case 'media':258 if (!promptData['o-module-collecting:text']) {259 alert('You must provide prompt text.');260 return;261 }262 if (!promptData['o-module-collecting:media_type']) {263 alert('You must select a media type.');264 return;265 }266 break;267 case 'input':268 case 'user_private':269 case 'user_public':270 if (!promptData['o-module-collecting:text']) {271 alert('You must provide prompt text.');272 return;273 }274 if (!promptData['o-module-collecting:input_type']) {275 alert('You must select an input type.');276 return;277 }278 break;279 case 'user_name':280 case 'user_email':281 case 'html':282 if (!promptData['o-module-collecting:text']) {283 alert('You must provide prompt text.');284 return;285 }286 break;287 default:288 // invalid or no prompt type289 return;290 }291 populatePromptRow(promptData);292 $('#prompts-table').show();293 Omeka.closeSidebar($('#prompt-sidebar'));294 });...

Full Screen

Full Screen

calculations.py

Source:calculations.py Github

copy

Full Screen

1import math2def calculate(prompt):3 """4 Checks if the input prompt contains certain keywords then5 performs mathematical functions on the input string.6 The string must follow certain rules for each equation listed below:7 addition: command - 'calculate number plus number' or8 'calculate number add number'9 subtraction: command - 'calculate number minus number' or10 'calculate number subtract number' or11 'calculate number subtracted by number'12 multiplication: command - 'calculate number times number' or13 'calculate number multiplied by number'14 division: command - 'calculate number divided by number' or15 'calculate number multiplied by number'16 exponential: command - 'calculate number to the power of number' or17 'calculate number to the power number' or18 'calculate number power number'19 square root: command - 'calculate the square root of number'20 factorial: command - 'calculate number factorial'21 logarithmic: command - 'calculate log base number of number'22 :param prompt:23 :return: a tuple of the result and the formula24 """25 if 'add' in prompt:26 """Splits the command at 'add' then adds the 2 numbers from the prompt: 'calculate number add number'"""27 calc_prompt = prompt.split('add')28 result = round(float(calc_prompt[0]) + float(calc_prompt[1]), 2)29 return (result, prompt)30 if '+' in prompt:31 """Splits the command at '+' then adds the 2 numbers from the prompt: 'calculate number plus number'"""32 calc_prompt = prompt.split('+')33 result = round(float(calc_prompt[0]) + float(calc_prompt[1]), 2)34 return (result, prompt)35 if '-' in prompt:36 """Splits the command at '-' then subtracts the 2nd number from the 1st from the prompt: 'calculate number minus number'"""37 calc_prompt = prompt.split('-')38 prompt = prompt.replace('-', 'minus')39 result = round(float(calc_prompt[0]) - float(calc_prompt[1]), 2)40 return (result, prompt)41 if 'subtract' in prompt:42 """Splits the command at 'subtract' then subtracts the 2nd number from the 1st from the prompt: 'calculate number subtract number'"""43 calc_prompt = prompt.split('subtract')44 prompt = prompt.replace('subtract', 'minus')45 result = round(float(calc_prompt[0]) - float(calc_prompt[1]), 2)46 return (result, prompt)47 if 'subtracted by' in prompt:48 """Splits the command at 'subtracted by' then subtracts the 2nd number from the 1st from the prompt: 'calculate number subtracted by number'"""49 calc_prompt = prompt.split('subtracted by')50 prompt = prompt.replace('subtracted by', 'minus')51 result = round(float(calc_prompt[0]) - float(calc_prompt[1]), 2)52 return (result, prompt)53 if 'times' in prompt:54 """Splits the command at 'times' then multiplied the 2 numbers from the prompt: 'calculate number times number'"""55 calc_prompt = prompt.split('times')56 result = round(float(calc_prompt[0]) * float(calc_prompt[1]), 2)57 return (result, prompt)58 if 'multiplied by' in prompt:59 """Splits the command at 'multiplied by' then multiplies the 2 numbers from the prompt: 'calculate number multiplied by number'"""60 calc_prompt = prompt.split('multiplied by')61 result = round(float(calc_prompt[0]) * float(calc_prompt[1]), 2)62 return (result, prompt)63 if '*' in prompt:64 """Splits the command at '*' then multiplies the 2 numbers from the prompt: 'calculate number times number'"""65 calc_prompt = prompt.split('*')66 prompt = prompt.replace('*', 'times')67 result = round(float(calc_prompt[0]) * float(calc_prompt[1]), 2)68 return (result, prompt)69 if '/' in prompt:70 """Splits the command at '/' then divides the 1st number by the 2nd number from the prompt: 'calculate number divided by number'"""71 calc_prompt = prompt.split('/')72 prompt = prompt.replace('/', 'divided by')73 result = round(float(calc_prompt[0]) / float(calc_prompt[1]), 2)74 return (result, prompt)75 if 'divided by' in prompt:76 """Splits the command at 'divided by' then divides the 1st number by the 2nd number from the prompt: 'calculate number divided by number'"""77 calc_prompt = prompt.split('divided by')78 result = round(float(calc_prompt[0]) / float(calc_prompt[1]), 2)79 return (result, prompt)80 if 'to the power of' in prompt:81 """Splits the command at 'to the power of' then takes the 1st number to the power of the 2nd number from the prompt: 'calculate number to the power of number'"""82 calc_prompt = prompt.split('to the power of')83 result = round(float(calc_prompt[0]) ** float(calc_prompt[1]), 2)84 return (result, prompt)85 if 'to the power' in prompt:86 """Splits the command at 'to the power' then takes the 1st number to the power of the 2nd number from the prompt: 'calculate number to the power number'"""87 calc_prompt = prompt.split('to the power')88 prompt = prompt.replace('to the power', 'to the power of')89 result = round(float(calc_prompt[0]) ** float(calc_prompt[1]), 2)90 return (result, prompt)91 if 'power' in prompt:92 """Splits the command at 'power' then takes the 1st number to the power of the 2nd number from the prompt: 'calculate number power number'"""93 calc_prompt = prompt.split('power')94 prompt = prompt.replace('power', 'to the power of')95 result = round(float(calc_prompt[0]) ** float(calc_prompt[1]), 2)96 return (result, prompt)97 if '^' in prompt:98 """Splits the command at '^' then takes the 1st number to the power of the 2nd number from the prompt: 'calculate number to the power of number'"""99 calc_prompt = prompt.split('^')100 prompt = prompt.replace('^', 'to the power of')101 result = round(float(calc_prompt[0]) ** float(calc_prompt[1]), 2)102 return (result, prompt)103 if 'the square root of' in prompt:104 """Remove 'the square root of' from the command then takes the square root of the number from the prompt: 'calculate the square root of number'"""105 calc_prompt = prompt.replace('the square root of', '')106 result = round(math.sqrt(float(calc_prompt)), 2)107 return (result, prompt)108 if 'factorial' in prompt:109 """Removes 'factorial' from the command then takes the factorial of the number from the prompt: 'calculate number factorial'"""110 calc_prompt = prompt.replace('factorial', '')111 result = round(math.factorial(float(calc_prompt)), 2)112 return (result, prompt)113 if 'log base' in prompt:114 """Removes 'log base' and 'of' from the command then takes the log base(1st number) of the 2nd number from the prompt: 'calculate number factorial'"""115 calc_prompt = prompt.replace(' log base ', '').replace(' of', '')116 calc_prompt = calc_prompt.split(' ')117 print('calc_prompt:', calc_prompt)118 result = round(math.log(float(calc_prompt[1]), float(calc_prompt[0])), 2)...

Full Screen

Full Screen

test_sequences.py

Source:test_sequences.py Github

copy

Full Screen

1import unittest2import generatools.sequences as sequences3class TestPromptSeqsPairSequences(unittest.TestCase):4 """5 Test that the x_sequences class behave properly.6 Test type: functional (public API)7 """8 def test_run_if_proper_sequences_trimmed(self):9 sequences.PromptSeqsPair(10 prompt="bla", sequences=["aa", "bb"], sequences_trimmed=["a", "b"]11 )12 def test_except_if_improper_sequences_trimmed(self):13 self.assertRaises(14 ValueError,15 sequences.PromptSeqsPair,16 prompt="bla",17 sequences=["aa", "bb"],18 sequences_trimmed=["a"],19 )20 def test_run_if_proper_prompt_lvl_eval(self):21 z = sequences.PromptSeqsPair(22 prompt="bla",23 sequences=["aa", "bb"],24 prompt_lvl_eval={"metric1": 2, "metric2": 3},25 )26 z.prompt = "bla"27 def test_except_if_improper_prompt_lvl_eval(self):28 self.assertRaises(29 TypeError,30 sequences.PromptSeqsPair,31 prompt="bla",32 sequences=["aa", "bb"],33 prompt_lvl_eval=[2, 3],34 )35 self.assertRaises(36 TypeError,37 sequences.PromptSeqsPair,38 prompt="bla",39 sequences=["aa", "bb"],40 prompt_lvl_eval={"metric1": [2, 3]},41 )42 def test_run_if_proper_seq_lvl_eval(self):43 sequences.PromptSeqsPair(44 prompt="bla",45 sequences=["aa", "bb"],46 seq_lvl_eval={"metric1": [2, 3], "metric2": [3, 4]},47 )48 def test_except_if_improper_seq_lvl_eval(self):49 self.assertRaises(50 TypeError,51 sequences.PromptSeqsPair,52 prompt="bla",53 sequences=["aa", "bb"],54 seq_lvl_eval=[55 {"metric1": 2, "metric2": 3},56 {"metric1": 2, "metric2": 3},57 ],58 )59 self.assertRaises(60 ValueError,61 sequences.PromptSeqsPair,62 prompt="bla",63 sequences=["aa", "bb"],64 seq_lvl_eval={"metric1": [2, 3], "metric2": [3]},65 )66 def test_to_dict_generates_a_dict(self):67 obs_out = sequences.PromptSeqsPair(68 prompt="bla",69 sequences=["aa", "bb"],70 ).to_dict()71 self.assertIsInstance(obs_out, dict)72 def test_average_seq_lvl_eval_works(self):73 exp_out = {"m1": 2, "m2": 20}74 obs_out = sequences.PromptSeqsPair(75 prompt="bla",76 sequences=["aa", "bb"],77 seq_lvl_eval={"m1": [1, 3], "m2": [10, 30]},78 ).average_seq_lvl_eval()79 self.assertEqual(exp_out, obs_out)80class TestPromptSeqsPairsList(unittest.TestCase):81 def test_setting_retrieving_works(self):82 ls = [83 sequences.PromptSeqsPair(84 prompt="a",85 sequences=["y1", "y2"],86 prompt_lvl_eval={"m1": 1, "m2": 10},87 ),88 sequences.PromptSeqsPair(89 prompt="b",90 sequences=["y1", "y2"],91 prompt_lvl_eval={"m1": 3, "m2": 30},92 ),93 ]94 pair_list = sequences.PromptSeqsPairsList(ls=ls)95 self.assertEqual(pair_list[1].prompt, "b")96 def test_fail_if_wrong_ls_input(self):97 ls = [98 sequences.PromptSeqsPair(99 prompt="a",100 sequences=["y1", "y2"],101 prompt_lvl_eval={"m1": 1, "m2": 10},102 ),103 "oups",104 ]105 self.assertRaises(ValueError, sequences.PromptSeqsPairsList, ls=ls)106 def test_raise_exception_when_different_metric_set(self):107 ls = [108 sequences.PromptSeqsPair(109 prompt="a",110 sequences=["y1", "y2"],111 prompt_lvl_eval={"oups": 1, "m2": 10},112 ),113 sequences.PromptSeqsPair(114 prompt="b",115 sequences=["y1", "y2"],116 prompt_lvl_eval={"m1": 3, "m2": 30},117 ),118 ]119 pair_list = sequences.PromptSeqsPairsList(ls=ls)120 self.assertRaises(KeyError, pair_list.average_prompt_lvl_metrics)121 def test_prompt_lvl_averaging_works(self):122 ls = [123 sequences.PromptSeqsPair(124 prompt="a",125 sequences=["y1", "y2"],126 prompt_lvl_eval={"m1": 1, "m2": 10},127 ),128 sequences.PromptSeqsPair(129 prompt="b",130 sequences=["y1", "y2"],131 prompt_lvl_eval={"m1": 3, "m2": 30},132 ),133 ]134 pair_list = sequences.PromptSeqsPairsList(ls=ls)135 exp_out = {"m1": 2, "m2": 20}136 obs_out = pair_list.average_prompt_lvl_metrics()137 self.assertEqual(exp_out, obs_out)138 def test_seq_lvl_averaging_works(self):139 ls = [140 sequences.PromptSeqsPair(141 prompt="a",142 sequences=["y1", "y2"],143 seq_lvl_eval={"m1": [1, 1], "m2": [10, 10]},144 ),145 sequences.PromptSeqsPair(146 prompt="b",147 sequences=["y1", "y2"],148 seq_lvl_eval={"m1": [3, 3], "m2": [30, 30]},149 ),150 ]151 pair_list = sequences.PromptSeqsPairsList(ls=ls)152 exp_out = {"m1": 2, "m2": 20}153 obs_out = pair_list.average_seq_lvl_metrics()154 self.assertEqual(exp_out, obs_out)155 def test_to_json_works(self):156 ls = [157 sequences.PromptSeqsPair(158 prompt="a",159 sequences=["y1", "y2"],160 seq_lvl_eval={"m1": [1, 1], "m2": [10, 10]},161 ),162 sequences.PromptSeqsPair(163 prompt="b",164 sequences=["y1", "y2"],165 seq_lvl_eval={"m1": [3, 1], "m2": [30, 10]},166 ),167 ]168 pair_list = sequences.PromptSeqsPairsList(ls=ls)169 obs_out = pair_list.to_json()170 exp_out = [ls[0].to_dict(), ls[1].to_dict()]...

Full Screen

Full Screen

PromptHandler.js

Source:PromptHandler.js Github

copy

Full Screen

1/**2 * File Manager.3 */4Craft.PromptHandler = Garnish.Base.extend({5 $modalContainerDiv: null,6 $prompt: null,7 $promptApplyToRemainingContainer: null,8 $promptApplyToRemainingCheckbox: null,9 $promptApplyToRemainingLabel: null,10 $promptButtons: null,11 _prompts: [],12 _promptBatchCallback: $.noop,13 _promptBatchReturnData: [],14 _promptBatchNum: 0,15 init: function()16 {17 },18 resetPrompts: function()19 {20 this._prompts = [];21 this._promptBatchCallback = $.noop;22 this._promptBatchReturnData = [];23 this._promptBatchNum = 0;24 },25 addPrompt: function(prompt)26 {27 this._prompts.push(prompt);28 },29 getPromptCount: function()30 {31 return this._prompts.length;32 },33 showBatchPrompts: function(callback)34 {35 this._promptBatchCallback = callback;36 this._promptBatchReturnData = [];37 this._promptBatchNum = 0;38 this._showNextPromptInBatch();39 },40 _showNextPromptInBatch: function()41 {42 var prompt = this._prompts[this._promptBatchNum].prompt,43 remainingInBatch = this._prompts.length - (this._promptBatchNum + 1);44 this._showPrompt(prompt.message, prompt.choices, $.proxy(this, '_handleBatchPromptSelection'), remainingInBatch);45 },46 /**47 * Handles a prompt choice selection.48 *49 * @param choice50 * @param applyToRemaining51 * @private52 */53 _handleBatchPromptSelection: function(choice, applyToRemaining)54 {55 var prompt = this._prompts[this._promptBatchNum],56 remainingInBatch = this._prompts.length - (this._promptBatchNum + 1);57 // Record this choice58 var choiceData = $.extend(prompt, {choice: choice});59 this._promptBatchReturnData.push(choiceData);60 // Are there any remaining items in the batch?61 if (remainingInBatch)62 {63 // Get ready to deal with the next prompt64 this._promptBatchNum++;65 // Apply the same choice to the remaining items?66 if (applyToRemaining)67 {68 this._handleBatchPromptSelection(choice, true);69 }70 else71 {72 // Show the next prompt73 this._showNextPromptInBatch();74 }75 }76 else77 {78 // All done! Call the callback79 if (typeof this._promptBatchCallback == 'function')80 {81 this._promptBatchCallback(this._promptBatchReturnData);82 }83 }84 },85 /**86 * Show the user prompt with a given message and choices, plus an optional "Apply to remaining" checkbox.87 *88 * @param string message89 * @param array choices90 * @param function callback91 * @param int itemsToGo92 */93 _showPrompt: function(message, choices, callback, itemsToGo)94 {95 this._promptCallback = callback;96 if (this.modal == null) {97 this.modal = new Garnish.Modal({closeOtherModals: false});98 }99 if (this.$modalContainerDiv == null) {100 this.$modalContainerDiv = $('<div class="modal fitted prompt-modal"></div>').addClass().appendTo(Garnish.$bod);101 }102 this.$prompt = $('<div class="body"></div>').appendTo(this.$modalContainerDiv.empty());103 this.$promptMessage = $('<p class="prompt-msg"/>').appendTo(this.$prompt);104 $('<p>').html(Craft.t('What do you want to do?')).appendTo(this.$prompt);105 this.$promptApplyToRemainingContainer = $('<label class="assets-applytoremaining"/>').appendTo(this.$prompt).hide();106 this.$promptApplyToRemainingCheckbox = $('<input type="checkbox"/>').appendTo(this.$promptApplyToRemainingContainer);107 this.$promptApplyToRemainingLabel = $('<span/>').appendTo(this.$promptApplyToRemainingContainer);108 this.$promptButtons = $('<div class="buttons"/>').appendTo(this.$prompt);109 this.modal.setContainer(this.$modalContainerDiv);110 this.$promptMessage.html(message);111 for (var i = 0; i < choices.length; i++)112 {113 var $btn = $('<div class="btn" data-choice="'+choices[i].value+'">' + choices[i].title + '</div>');114 this.addListener($btn, 'activate', function(ev)115 {116 var choice = ev.currentTarget.getAttribute('data-choice'),117 applyToRemaining = this.$promptApplyToRemainingCheckbox.prop('checked');118 this._selectPromptChoice(choice, applyToRemaining);119 });120 this.$promptButtons.append($btn);121 }122 if (itemsToGo)123 {124 this.$promptApplyToRemainingContainer.show();125 this.$promptApplyToRemainingLabel.html(' ' + Craft.t('Apply this to the {number} remaining conflicts?', {number: itemsToGo}));126 }127 this.modal.show();128 this.modal.removeListener(Garnish.Modal.$shade, 'click');129 this.addListener(Garnish.Modal.$shade, 'click', '_cancelPrompt');130 },131 /**132 * Handles when a user selects one of the prompt choices.133 *134 * @param choice135 * @param applyToRemaining136 * @private137 */138 _selectPromptChoice: function(choice, applyToRemaining)139 {140 this.$prompt.fadeOut('fast', $.proxy(function() {141 this.modal.hide();142 this._promptCallback(choice, applyToRemaining);143 }, this));144 },145 /**146 * Cancels the prompt.147 */148 _cancelPrompt: function()149 {150 this._selectPromptChoice('cancel', true);151 }...

Full Screen

Full Screen

index.ts

Source:index.ts Github

copy

Full Screen

1import { AMessage } from '../../../../interfaces/Client';2import { valueType } from '../../../../interfaces/SettingsGroup';3import { PromptManager } from '../../../../helpers/PromptManager';4import { GuildMember, Role, TextChannel, VoiceChannel, GuildChannel, User, Message } from 'discord.js';56export const sendSetting = async (message: AMessage, setting: string, valueType: valueType, prompt: PromptManager, array?: boolean) => {7 if (valueType === 'boolean') {8 const res = await prompt.boolean(array ? `What would you like to add to \`${setting}\`?` : `What would you like to change ${setting} to?`);910 return res;11 }1213 const GUI = await prompt.sendMsg(14 array ? `What would you like to add to \`${setting}\`?` : `What would you like to change ${setting} to?`,15 `${valueType === 'color' ? '\n\n • [Adobe Color Picker](https://color.adobe.com/create)' : ''}\n\nReply with your answer, or \`cancel\` to cancel.`16 );1718 const filter = (msg: AMessage) => {19 return msg.author.id === message.author.id;20 };2122 const value = (await GUI.channel.awaitMessages(filter, { max: 1, time: 1000 * 60 * prompt.timeout })).first();2324 if (!value) return prompt.error(`You ran out of time!`);2526 value.delete({ timeout: 300 }).catch(() => null);2728 if (value.content === 'cancel') return prompt.delete();2930 return parseType(value as AMessage, valueType, value.content, prompt);31};3233export async function parseType(message: Message | AMessage, type: 'number', str: string, prompt: PromptManager): Promise<number | void>;3435export async function parseType(message: Message | AMessage, type: 'color', str: string, prompt: PromptManager): Promise<string | void>;3637export async function parseType(message: Message | AMessage, type: 'image', str: string, prompt: PromptManager): Promise<string | void>;3839export async function parseType(message: Message | AMessage, type: 'string', str: string, prompt: PromptManager): Promise<string | void>;4041export async function parseType(message: Message | AMessage, type: 'url', str: string, prompt: PromptManager): Promise<string | void>;4243export async function parseType(message: Message | AMessage, type: 'guildMember', str: string, prompt: PromptManager): Promise<GuildMember | void>;4445export async function parseType(message: Message | AMessage, type: 'bannedUser', str: string, prompt: PromptManager): Promise<User | void>;4647export async function parseType(message: Message | AMessage, type: 'role', str: string, prompt: PromptManager): Promise<Role | void>;4849export async function parseType(message: Message | AMessage, type: 'textChannel', str: string, prompt: PromptManager): Promise<TextChannel | void>;5051export async function parseType(message: Message | AMessage, type: 'voiceChannel', str: string, prompt: PromptManager): Promise<VoiceChannel | void>;5253export async function parseType(message: Message | AMessage, type: 'guildChannel', str: string, prompt: PromptManager): Promise<GuildChannel | void>;5455export async function parseType(message: Message | AMessage, type: 'boolean', str: string, prompt: PromptManager): Promise<boolean | void>;5657export async function parseType(message: Message | AMessage, type: 'snowflake', str: string, prompt: PromptManager): Promise<string | void>;5859export async function parseType(message: Message | AMessage, type: 'timeLength', str: string, prompt: PromptManager): Promise<number | void>;6061export async function parseType(62 message: Message | AMessage,63 type: valueType,64 str: string,65 prompt: PromptManager66): Promise<number | string | GuildMember | Role | VoiceChannel | GuildChannel | boolean | User | void>;6768export async function parseType(69 message: Message | AMessage,70 type: valueType,71 str: string,72 prompt: PromptManager73): Promise<number | string | GuildMember | Role | VoiceChannel | GuildChannel | boolean | User | void> {74 switch (type) {75 case 'number':76 return await prompt.parse.number(str);77 case 'color':78 return await prompt.parse.color(str);79 case 'image':80 return await prompt.parse.image(message, str);81 case 'string':82 return str;83 case 'url':84 return await prompt.parse.url(str);85 case 'guildMember':86 if (!message.guild) return;8788 return await prompt.parse.member(message.guild, str);89 case 'bannedUser':90 if (!message.guild) return;9192 return await prompt.parse.bannedUser(message.guild, str);93 case 'role':94 if (!message.guild) return;9596 return await prompt.parse.role(message.guild, str);97 case 'textChannel':98 if (!message.guild) return;99100 return await prompt.parse.textChannel(message.guild, str);101 case 'voiceChannel':102 if (!message.guild) return;103104 return await prompt.parse.voiceChannel(message.guild, str);105 case 'guildChannel':106 if (!message.guild) return;107108 return await prompt.parse.guildChannel(message.guild, str);109 case 'boolean':110 return await prompt.parse.boolean(str);111 case 'snowflake':112 return await prompt.parse.snowflake(str);113 case 'timeLength':114 return await prompt.parse.timeLength(str);115 default:116 return;117 } ...

Full Screen

Full Screen

replwrap.py

Source:replwrap.py Github

copy

Full Screen

...39 self.child.waitnoecho()40 if prompt_change is None:41 self.prompt = orig_prompt42 else:43 self.set_prompt(orig_prompt,44 prompt_change.format(new_prompt, continuation_prompt))45 self.prompt = new_prompt46 self.continuation_prompt = continuation_prompt47 self._expect_prompt()48 if extra_init_cmd is not None:49 self.run_command(extra_init_cmd)50 def set_prompt(self, orig_prompt, prompt_change):51 self.child.expect(orig_prompt)52 self.child.sendline(prompt_change)53 def _expect_prompt(self, timeout=-1):54 return self.child.expect_exact([self.prompt, self.continuation_prompt],55 timeout=timeout)56 def run_command(self, command, timeout=-1):57 """Send a command to the REPL, wait for and return output.58 :param str command: The command to send. Trailing newlines are not needed.59 This should be a complete block of input that will trigger execution;60 if a continuation prompt is found after sending input, :exc:`ValueError`61 will be raised.62 :param int timeout: How long to wait for the next prompt. -1 means the63 default from the :class:`pexpect.spawn` object (default 30 seconds).64 None means to wait indefinitely.65 """66 # Split up multiline commands and feed them in bit-by-bit67 cmdlines = command.splitlines()68 # splitlines ignores trailing newlines - add it back in manually69 if command.endswith('\n'):70 cmdlines.append('')71 if not cmdlines:72 raise ValueError("No command was given")73 res = []74 self.child.sendline(cmdlines[0])75 for line in cmdlines[1:]:76 self._expect_prompt(timeout=timeout)77 res.append(self.child.before)78 self.child.sendline(line)79 # Command was fully submitted, now wait for the next prompt80 if self._expect_prompt(timeout=timeout) == 1:81 # We got the continuation prompt - command was incomplete82 self.child.kill(signal.SIGINT)83 self._expect_prompt(timeout=1)84 raise ValueError("Continuation prompt found - input was incomplete:\n"85 + command)86 return u''.join(res + [self.child.before])87def python(command="python"):88 """Start a Python shell and return a :class:`REPLWrapper` object."""89 return REPLWrapper(command, u">>> ", u"import sys; sys.ps1={0!r}; sys.ps2={1!r}")90def bash(command="bash"):91 """Start a bash shell and return a :class:`REPLWrapper` object."""92 bashrc = os.path.join(os.path.dirname(__file__), 'bashrc.sh')93 child = pexpect.spawn(command, ['--rcfile', bashrc], echo=False,94 encoding='utf-8')95 # If the user runs 'env', the value of PS1 will be in the output. To avoid96 # replwrap seeing that as the next prompt, we'll embed the marker characters97 # for invisible characters in the prompt; these show up when inspecting the...

Full Screen

Full Screen

Using AI Code Generation

copy

Full Screen

1import React from 'react';2import { storiesOf } from '@storybook/react';3import { action } from '@storybook/addon-actions';4import { withInfo } from '@storybook/addon-info';5import { withKnobs, text, boolean, number } from '@storybook/addon-knobs/react';6import { withNotes } from '@storybook/addon-notes';7import { withOptions } from '@storybook/addon-options';8import { withViewport } from '@storybook/addon-viewport';9import { withBackgrounds } from '@storybook/addon-backgrounds';10import { withPropsTable } from 'storybook-addon-react-docgen';11import { withA11y } from '@storybook/addon-a11y';12import { withConsole } from '@storybook/addon-console';13import { Button } from '@storybook/react/demo';14storiesOf('Button', module)15 .addDecorator((story, context) => withConsole()(story)(context))16 .add('with text', () => <Button onClick={action('clicked')}>Hello Button</Button>)17 .add('with some emoji', () => (18 <Button onClick={action('clicked')}>19 ));

Full Screen

Using AI Code Generation

copy

Full Screen

1import { storiesOf } from '@storybook/react';2import { withKnobs, text, boolean, number } from '@storybook/addon-knobs/react';3import { action } from '@storybook/addon-actions';4import { withInfo } from '@storybook/addon-info';5import { withReadme } from 'storybook-readme';6import { withA11y } from '@storybook/addon-a11y';7import { withConsole } from '@storybook/addon-console';8import { withTests } from '@storybook/addon-jest';9import { withOptions } from '@storybook/addon-options';10import { withViewport } from '@storybook/addon-viewport';11import { withBackgrounds } from '@storybook/addon-backgrounds';12import { withLinks } from '@storybook/addon-links';13import { withNotes } from '@storybook/addon-notes';14import { withPropsTable } from 'storybook-addon-react-docgen';15import { withDocs } from 'storybook-readme';16import { withSmartKnobs } from 'storybook-addon-smart-knobs';17import { withScreenshot } from 'storyc

Full Screen

Using AI Code Generation

copy

Full Screen

1import { withKnobs, text } from '@storybook/addon-knobs';2export default {3};4export const test = () => {5 const text = text('Text', 'Hello Storybook');6 return `<div>${text}</div>`;7};8module.exports = {9};10"scripts": {11},

Full Screen

Using AI Code Generation

copy

Full Screen

1import { prompt } from 'storybook-root';2jest.mock('storybook-root', () => ({3 prompt: jest.fn(),4}));5test('test', () => {6 prompt('test');7 expect(prompt).toHaveBeenCalledWith('test');8});9I am trying to mock the prompt method of storybook-root in my test file (test.test.js) to test the prompt method. I have tried the following code but it is not working. It is throwing the error "TypeError: (0 , _storybookRoot.prompt) is not a function". I am using jest for testing. I am not sure what is wrong with the code. Can anyone please help me?10jest.mock('storybook-root', () => ({11 prompt: jest.fn(),12}));

Full Screen

Using AI Code Generation

copy

Full Screen

1import { prompt } from 'storybook-root'2prompt('some text')3prompt('some text', 'some default text')4import { storiesOf } from '@storybook/react'5import { action } from '@storybook/addon-actions'6import { withKnobs, text } from '@storybook/addon-knobs'7import prompt from './src/prompt'8storiesOf('Prompt', module)9 .addDecorator(withKnobs)10 .add('default', () => {11 const text = text('text', 'some text')12 return (13 onClick={() => {14 prompt(text)15 .then(action('prompt resolved'))16 .catch(action('prompt rejected'))17 }}18 })19 .add('with default text', () => {20 const text = text('text', 'some text')21 const defaultText = text('default text', 'default text')22 return (23 onClick={() => {24 prompt(text, defaultText)25 .then(action('prompt resolved'))26 .catch(action('prompt rejected'))27 }}28 })29import { prompt } from 'storybook-root'30{31 "scripts": {32 },33 "dependencies": {34 },35 "devDependencies": {

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run storybook-root automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful