diff --git a/.gitignore b/.gitignore index f2cdba4..369b1e7 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,24 @@ node_modules/ -.DS_Store +**/.DS_Store Thumbs.db /.settings/ /.project /.tern-project site/ + +/ANT/jsPsych/ +Questionnaires/BackgroundPapers/ +Questionnaires/ToAdd/ +ANT/jsPsych/ + + +BaddeleyGramReason/jspsych/ + +ANT/.idea/vcs.xml +ANT/.idea/ANT.iml +ANT/.idea/modules.xml +ANT/.idea/workspace.xml + +TrailMaking/jspysch/* +TestButtons/jspsych/* +Stroop/jspsych/* diff --git a/.idea/.gitignore b/.idea/.gitignore new file mode 100644 index 0000000..b58b603 --- /dev/null +++ b/.idea/.gitignore @@ -0,0 +1,5 @@ +# Default ignored files +/shelf/ +/workspace.xml +# Editor-based HTTP Client requests +/httpRequests/ diff --git a/.idea/experiment-demos.iml b/.idea/experiment-demos.iml new file mode 100644 index 0000000..0c8867d --- /dev/null +++ b/.idea/experiment-demos.iml @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/modules.xml b/.idea/modules.xml new file mode 100644 index 0000000..5c4bdec --- /dev/null +++ b/.idea/modules.xml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/.idea/vcs.xml b/.idea/vcs.xml new file mode 100644 index 0000000..94a25f7 --- /dev/null +++ b/.idea/vcs.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/ANT/ANT_Setup_EN.js b/ANT/ANT_Setup_EN.js new file mode 100644 index 0000000..0b43bb2 --- /dev/null +++ b/ANT/ANT_Setup_EN.js @@ -0,0 +1,125 @@ +// This variable affects the distance between the center of +// the screen and the high and low line + + +// How many times to cycle over all different trials for PRACTICE +var NumberOfCyclesForPractice = 1 +// How many times to cycle over all different trials for TESTING +var NumberOfCyclesForTesting = 2 + +var InstructionText = 'For this task you will see...' + +var ScreenSpacing = 300 +// Set the fixation time between trials, in milliseconds +var FixationDuration = 300 +// how many flankers should be on either side of the central arrow +var NFlankers = 2; +// define the flankers as empty strings +var flankersL = ''; +var flankersR = ''; +var flankersN = ''; +// append to the empty strings the different number of flankers +for (i = 0; i < NFlankers; i++) + { + // the following are the codes for the specific arrows and star to use + // https://www.fileformat.info/info/unicode/char/2190/index.htm + flankersL += "\u2190"; + flankersR += "\u2192"; + flankersN += "\u2014"; + } +// font size of the stimuli +var FontSize = 72 + +// What are the keyboard keys to record responses from +var KeyboardChoices = ['arrowleft', 'arrowright']; +// The above key codes need to be mapped onto text descriptions used later for scoring +var ResponseMapping = ['left', 'right']; + +// format the stimuli on the screen. This uses the fontsize specified above +// The stimuli are presented with some at the top of the screen, some in the middle and some at the bottom. Build an +// html formatted table of the stimuli. +function PutIntoTable(top='top', middle='mid', bottom='bot', width=600, height=ScreenSpacing) { + return '
'+top+'
'+middle+'
'+bottom+'
'; + } + + // What type of trial is this? + // Factors: + // flanker: left, right, none [3] + // central arrow direction: left, right [2] + // star: top&bot, top, bot, none, middle [5] + // position: top, bottom [2] + + +// Create the array of objects dynamically +// https://stackoverflow.com/questions/7858385/how-to-add-values-to-an-array-of-objects-dynamically-in-javascript + +// The order of these is important and needs to map onto the for loops below which index these lists +var flankers = [flankersL, flankersR, flankersN]; +var centralArrow = ["\u2190", "\u2192"]; // left, right +var fixation = [PutIntoTable("\u2217","+","\u2217"), PutIntoTable("\u2217","+"," "), PutIntoTable(" ","+","\u2217"), PutIntoTable(" ","+"," "), PutIntoTable(" ","\u2217"," ")]; +var position = ["high","low"]; +var count = 0; +var ANT = []; + +// The test procedure in the HTML specifies how many times to repeat the stimuli presentation +// The following sets up the stimuli for all cells across the different factors +// This also adds text descriptions which will go into the resulatnt data file and will be used for scoring +for(var i=0; i<3; i++) { // flanker type: LEFT, RIGHT, NEUTRAL + for (var j = 0; j < 2; j++) { // cycle over central arrow directions + for (var k = 0; k < 1; k++) { // cycle over fixation positions + for (var m = 0; m < 2; m++) { // cycle over stimulus position + // Set up all the stimuli + ANT[count] = {}; + ANT[count].flanker = flankers[i]; // left/right/neutral + ANT[count].centralArrow = centralArrow[j]; // right/left + ANT[count].fixation = fixation[k]; //upper/lower/both/center/none + ANT[count].position = position[m]; // upper/lower + // THE FOLLOWING NEEDS TO BE CHECKED AND CONFIRMED + + // Define what is considered correct and incorrect for each trial + // The central arrow points to the LEFT + if (j==0) {ANT[count].correct = 'left'} + // The central arrow points to the RIGHT + if (j==1) {ANT[count].correct = 'right'} + + // map the stim to the type + // using the indices is a convenience. It would be better if this was more explicit + // This information will be used for scoring this experiment + + // Flankers are LEFT + // Central arrow is LEFT + if (i==0 & j==0) {ANT[count].flankerType = 'conFlanker';} + // Central arrow is RIGHT + if (i==0 & j==1) {ANT[count].flankerType = 'incFlanker';} + // Flankers are RIGHT + // Central arrow is RIGHT + if (i==1 & j==1) {ANT[count].flankerType = 'conFlanker';} + // Central arrow is LEFT + if (i==1 & j==0) {ANT[count].flankerType = 'incFlanker';} + // Flankers are NEURAL + if (i==2) {ANT[count].flankerType = 'neuFlanker';} + // Position of the CUE + + if (k==0) {ANT[count].cuePos = 'both';} + if (k==1) {ANT[count].cuePos = 'upper';} + if (k==2) {ANT[count].cuePos = 'lower';} + if (k==3) {ANT[count].cuePos = 'none';} + if (k==4) {ANT[count].cuePos = 'center';} + // Code whether the stim is congruent with the CUE + // Stimulus is HIGH (m = 0) + if (k==0 & m==0) {ANT[count].cueType = 'bothCue';} + if (k==1 & m==0) {ANT[count].cueType = 'conCue';} + if (k==2 & m==0) {ANT[count].cueType = 'incCue';} + if (k==3 & m==0) {ANT[count].cueType = 'noCue';} + if (k==4 & m==0) {ANT[count].cueType = 'centerCue';} + // Stimulus is LOW (m = 1) + if (k==2 & m==1) {ANT[count].cueType = 'conCue';} + if (k==1 & m==1) {ANT[count].cueType = 'incCue';} + if (k==0 & m==1) {ANT[count].cueType = 'bothCue';} + if (k==3 & m==1) {ANT[count].cueType = 'noCue';} + if (k==4 & m==1) {ANT[count].cueType = 'centerCue';} + count += 1; + } + } + } +} diff --git a/ANT/README.md b/ANT/README.md new file mode 100644 index 0000000..9c15793 --- /dev/null +++ b/ANT/README.md @@ -0,0 +1,49 @@ +# Attention Network Task-Flanker Task +[Description](http://lcni-3.uoregon.edu/phenowiki/index.php/Attention_Network_Task-Flanker_Task_(ANT)) + +[In depth description](https://scienceofbehaviorchange.org/measures/attentional-network-task/) + +This task tests the three attentional networks of alerting, orienting, and executive control. It is a combination of a cued reaction time task and a flanker task. + +The flanker task stimuli are presented at the top or bottom of the screen. An alerting star(s) is also presented in the following locations: +- upper +- lower +- both +- center +- none + +## Conditions +- cue type (5) + - upper + - lower + - both + - center + - none + +- target location (2) + - upper + - lower + +- target direction (2) + - left + - right + +- flanker condition (3) + - congruent + - flanking arrows the same direction as the target direction + - incongruent + - flanking arrows the opposite direction as the target direction + - neutral + - the target is flanked by lines without arrowheads + +There are a total of 60 unique condition combinations. +The resulatant measures need to be accuracy and response time for the following conditions: +- cue type (5) +- target location (2) +- flanker condition (3) +This is 30 different measures!!! + +# To do +- Add instructions +- Add practice trials +- Should there be trials with feedback? diff --git a/ANT/ant.html b/ANT/ant.html new file mode 100644 index 0000000..4ff22c7 --- /dev/null +++ b/ANT/ant.html @@ -0,0 +1,146 @@ + + + + + + + + + ANT + + + + + + + + + + + + \ No newline at end of file diff --git a/BaddeleyGramReason/BGR.css b/BaddeleyGramReason/BGR.css new file mode 100644 index 0000000..70229b2 --- /dev/null +++ b/BaddeleyGramReason/BGR.css @@ -0,0 +1 @@ +body {background-color: rgb(150, 150, 150)} \ No newline at end of file diff --git a/BaddeleyGramReason/BGR.html b/BaddeleyGramReason/BGR.html new file mode 100644 index 0000000..5c110f4 --- /dev/null +++ b/BaddeleyGramReason/BGR.html @@ -0,0 +1,234 @@ + + + + + + + + + Baddeley's Grammatical Reasoning Task + + + + + + + + + + + + + + \ No newline at end of file diff --git a/BaddeleyGramReason/BGR_Setup_EN.js b/BaddeleyGramReason/BGR_Setup_EN.js new file mode 100644 index 0000000..ec0f8bb --- /dev/null +++ b/BaddeleyGramReason/BGR_Setup_EN.js @@ -0,0 +1,442 @@ + +var FullScreenMode = false; +var TextFontSize = "30px"; +var StimulusFontSize = "45px"; + +var ResponseChoiceTrue = 37 // Left arrow +var ResponseChoiceFalse = 39 // right arrow +var ResponseChoices = [ResponseChoiceTrue, ResponseChoiceFalse, 27] +var FeedbackLength = 400; // This is in milliseconds +var PracticeRepeats = 1 +var TestRepeats = 1 + +function PutIntoTable(top='top', middle='mid', width=600, height=200) { + return '
'+top+'
'+middle+'
'; + } + +var InstrText = ['

Welcome to the Grammatical Reasoning Test.

', + '

You will see a series of statements. Below each statement there are two letters.

', + '

Your task is to decide whether the statement about the letters is true or false. Respond by pressing the left arrow for True or right for False.

', + //'

', + '

Before doing the actual experiment you will complete some practice trials. These will give you feedback about your accuracy.

Remember to respond as accurately and quickly as possible.

']; + +var PoorPerformanceText = ['

There will be another run of practice trials with feedback.

Remember to respond as accurately and quickly as possible.

']; + +var TestInstrText = ['

Now you will do the task.
It will be exactly like the practice except you will not get feedback.

']; + +var ThankYouText = 'Thank you' + +var BaddGramReasonPracticeList = [ + { + "item": 1, + "Sentence": "A is followed by B", + "Response": "AB", + "correct": "true" + }, + { + "item": 2, + "Sentence": "B is not followed by A", + "Response": "BA", + "correct": "false" + }, + { + "item": 3, + "Sentence": "B is preceded by A", + "Response": "BA", + "correct": "false" + }, + { + "item": 4, + "Sentence": "A does not precede B", + "Response": "BA", + "correct": "true" + } +] + + +var BaddGramReasonList = [ + { + "item": 1, + "Sentence": "A is preceded by B", + "Response": "BA", + "correct": "true" + }, + { + "item": 2, + "Sentence": "B does not precede A", + "Response": "AB", + "correct": "true" + }, + { + "item": 3, + "Sentence": "A is not followed by B", + "Response": "BA", + "correct": "true" + }, + { + "item": 4, + "Sentence": "B is preceded by A", + "Response": "BA", + "correct": "false" + }, + { + "item": 5, + "Sentence": "A is followed by B", + "Response": "AB", + "correct": "true" + }, + { + "item": 6, + "Sentence": "A does not follow B", + "Response": "AB", + "correct": "true" + }, + { + "item": 7, + "Sentence": "B is not preceded by A", + "Response": "AB", + "correct": "false" + }, + { + "item": 8, + "Sentence": "B follows A", + "Response": "AB", + "correct": "true" + }, + { + "item": 9, + "Sentence": "A precedes B", + "Response": "BA", + "correct": "false" + }, + { + "item": 10, + "Sentence": "B does not follow A", + "Response": "BA", + "correct": "true" + }, + { + "item": 11, + "Sentence": "B precedes A", + "Response": "AB", + "correct": "false" + }, + { + "item": 12, + "Sentence": "B is followed by A", + "Response": "AB", + "correct": "false" + }, + { + "item": 13, + "Sentence": "B is not followed by A", + "Response": "BA", + "correct": "false" + }, + { + "item": 14, + "Sentence": "B is preceded by A", + "Response": "AB", + "correct": "true" + }, + { + "item": 15, + "Sentence": "B is followed by A", + "Response": "BA", + "correct": "true" + }, + { + "item": 16, + "Sentence": "B precedes A", + "Response": "BA", + "correct": "true" + }, + { + "item": 17, + "Sentence": "A is not followed by B", + "Response": "AB", + "correct": "false" + }, + { + "item": 18, + "Sentence": "A is followed by B", + "Response": "BA", + "correct": "false" + }, + { + "item": 19, + "Sentence": "B is not preceded by A", + "Response": "BA", + "correct": "true" + }, + { + "item": 20, + "Sentence": "B is followed by A", + "Response": "AB", + "correct": "false" + }, + { + "item": 21, + "Sentence": "A does not follow B", + "Response": "BA", + "correct": "false" + }, + { + "item": 22, + "Sentence": "A is preceded by B", + "Response": "AB", + "correct": "false" + }, + { + "item": 23, + "Sentence": "B does not follow A", + "Response": "AB", + "correct": "false" + }, + { + "item": 24, + "Sentence": "A is not preceded by B", + "Response": "BA", + "correct": "false" + }, + { + "item": 25, + "Sentence": "A follows B", + "Response": "BA", + "correct": "true" + }, + { + "item": 26, + "Sentence": "A is not preceded by B", + "Response": "AB", + "correct": "true" + }, + { + "item": 27, + "Sentence": "A follows B", + "Response": "AB", + "correct": "false" + }, + { + "item": 28, + "Sentence": "A does not precede B", + "Response": "AB", + "correct": "false" + }, + { + "item": 29, + "Sentence": "A precedes B", + "Response": "AB", + "correct": "true" + }, + { + "item": 30, + "Sentence": "B follows A", + "Response": "BA", + "correct": "false" + }, + { + "item": 31, + "Sentence": "B does not precede A", + "Response": "BA", + "correct": "false" + }, + { + "item": 32, + "Sentence": "A does not precede B", + "Response": "BA", + "correct": "true" + }, + { + "item": 33, + "Sentence": "A does not follow B", + "Response": "AB", + "correct": "true" + }, + { + "item": 34, + "Sentence": "A is not followed by B", + "Response": "BA", + "correct": "true" + }, + { + "item": 35, + "Sentence": "B is not preceded by A", + "Response": "BA", + "correct": "true" + }, + { + "item": 36, + "Sentence": "B is preceded by A", + "Response": "AB", + "correct": "true" + }, + { + "item": 37, + "Sentence": "A follows B", + "Response": "BA", + "correct": "true" + }, + { + "item": 38, + "Sentence": "B precedes A", + "Response": "BA", + "correct": "true" + }, + { + "item": 39, + "Sentence": "B is followed by A", + "Response": "BA", + "correct": "true" + }, + { + "item": 40, + "Sentence": "A precedes B", + "Response": "AB", + "correct": "true" + }, + { + "item": 41, + "Sentence": "A follows B", + "Response": "AB", + "correct": "false" + }, + { + "item": 42, + "Sentence": "B does not precede A", + "Response": "BA", + "correct": "false" + }, + { + "item": 43, + "Sentence": "A does not precede B", + "Response": "BA", + "correct": "true" + }, + { + "item": 44, + "Sentence": "A is preceded by B", + "Response": "BA", + "correct": "true" + }, + { + "item": 45, + "Sentence": "B is not followed by A", + "Response": "AB", + "correct": "true" + }, + { + "item": 46, + "Sentence": "B does not follow A", + "Response": "BA", + "correct": "true" + }, + { + "item": 47, + "Sentence": "B does not precede A", + "Response": "AB", + "correct": "true" + }, + { + "item": 48, + "Sentence": "A is followed by B", + "Response": "AB", + "correct": "true" + }, + { + "item": 49, + "Sentence": "B is not preceded by A", + "Response": "AB", + "correct": "false" + }, + { + "item": 50, + "Sentence": "B follows A", + "Response": "BA", + "correct": "false" + }, + { + "item": 51, + "Sentence": "A does not precede B", + "Response": "AB", + "correct": "false" + }, + { + "item": 52, + "Sentence": "A is not followed by B", + "Response": "AB", + "correct": "false" + }, + { + "item": 53, + "Sentence": "B is preceded by A", + "Response": "BA", + "correct": "false" + }, + { + "item": 54, + "Sentence": "A is not preceded by B", + "Response": "AB", + "correct": "true" + }, + { + "item": 55, + "Sentence": "A does not follow B", + "Response": "BA", + "correct": "false" + }, + { + "item": 56, + "Sentence": "A is followed by B", + "Response": "BA", + "correct": "false" + }, + { + "item": 57, + "Sentence": "A is not preceded by B", + "Response": "BA", + "correct": "false" + }, + { + "item": 58, + "Sentence": "B is followed by A", + "Response": "AB", + "correct": "false" + }, + { + "item": 59, + "Sentence": "B is not followed by A", + "Response": "BA", + "correct": "false" + }, + { + "item": 60, + "Sentence": "A precedes B", + "Response": "BA", + "correct": "false" + }, + { + "item": 61, + "Sentence": "B does not follow A", + "Response": "AB", + "correct": "false" + }, + { + "item": 62, + "Sentence": "A is preceded by B", + "Response": "AB", + "correct": "false" + }, + { + "item": 63, + "Sentence": "B precedes A", + "Response": "AB", + "correct": "false" + }, + { + "item": 64, + "Sentence": "B follows A", + "Response": "AB", + "correct": "true" + } +] \ No newline at end of file diff --git a/BaddeleyGramReason/Baddeley1968_Article_A3MinReasoningTestBasedOnGramm.pdf b/BaddeleyGramReason/Baddeley1968_Article_A3MinReasoningTestBasedOnGramm.pdf new file mode 100644 index 0000000..a72460e Binary files /dev/null and b/BaddeleyGramReason/Baddeley1968_Article_A3MinReasoningTestBasedOnGramm.pdf differ diff --git a/BaddeleyGramReason/assets/TrueFalseArrows.png b/BaddeleyGramReason/assets/TrueFalseArrows.png new file mode 100644 index 0000000..4dbd87b Binary files /dev/null and b/BaddeleyGramReason/assets/TrueFalseArrows.png differ diff --git a/GoalsAndNotes.md b/GoalsAndNotes.md new file mode 100644 index 0000000..3291ee3 --- /dev/null +++ b/GoalsAndNotes.md @@ -0,0 +1,85 @@ +Jason's goals and notes + + +Can I get a site where a user clicks items from a list and presses "administer." + +These tests are then run and the user takes the tests. + + + +MAKE SURE TO STATE WHAT THE REAL LIFE IMPLICATIONS +are of each of the tasks. What is the purpose of each task? + + +QUESTIONNAIRES +CPT + + +TASKS + +To Do: + + +Activated long term memory +Arrow flanker << In the examples folder +Arrow flanker ABCD + +Attentional Network Task << First draft done +LetterDMS + + +IMTDMT -- Immediate & Delayed Memory Task (IMT/DMT) + * Participants view a steady stream of 5-digit (default) numbers and are asked to click the Mouse button if the current target number is identical to the one before it. +Letter Memory +Listening Span Task + +Picture-Story Task ** + +Picture-Story Task + +Sptial Relations +Spatial Process +Spatial Reconstructing +Visual Scanning +Word Fluency +WCST +Symbol Search +Matrix Reasoning + + +Done and need to test: +Baddeley Gramattical Reasoning Task + + +AES - Apathy Evaluation Scale +CFS - Cognitive flexibility scale +CFI - Cognitive flexibility index +STAI +PANAS +CUDITR +Beck Depression Index +DMS - block +DMS - adaptive + + + + + + + + +Done, tested, released +Stroop Color +Stroop Word +Stroop Color/Word + + +JATOS + +Can I make jsPsych task that simply presents buttons on the screen. Dependingon which button is pressed will determine which task to present via JATOS. + + + + +GENRAL NOTES +If each questionnaire gets setup in a JS file, there also needs to be an accompanying HTML file to load and run the JS file. There only needs to be one edit to change an HTML file from one questionaire to another. \ No newline at end of file diff --git a/LetterDMS/DMS_Adaptive.html b/LetterDMS/DMS_Adaptive.html new file mode 100644 index 0000000..bd74327 --- /dev/null +++ b/LetterDMS/DMS_Adaptive.html @@ -0,0 +1,212 @@ + + + + + + + + + DMS Adaptive + + + + + + + + + + + + + \ No newline at end of file diff --git a/LetterDMS/DMS_BlockBased.html b/LetterDMS/DMS_BlockBased.html new file mode 100644 index 0000000..0685106 --- /dev/null +++ b/LetterDMS/DMS_BlockBased.html @@ -0,0 +1,196 @@ + + + + + + + + + DMS Block based + + + + + + + + + + + + \ No newline at end of file diff --git a/LetterDMS/DMS_functions.js b/LetterDMS/DMS_functions.js new file mode 100644 index 0000000..ddbd77c --- /dev/null +++ b/LetterDMS/DMS_functions.js @@ -0,0 +1,302 @@ +class AdaptiveStimulusList { + // This class is made to keep track of the stimuli and probes used in the previous trials + constructor(CurrentStim='', PreviousStim=[], CurrentProbe='', PreviousProbe=[], CurrentCorrect = false) + { + // keep track of the current trial and make an array of all + this.CurrentStim = CurrentStim; + this.PreviousStim = []; + this.CurrentProbe = CurrentProbe; + this.PreviousProbe = []; + this.CurrentCorrect = CurrentCorrect; + + } + // return the current stimulus + getCurrentStim() {return this.CurrentStim} + // return the stimulus from teh previous trial + getLastStim() {return this.PreviousStim[this.PreviousStim.length - 1]} + + getLastProbe() {return this.PreviousProbe[this.PreviousProbe.length - 1]} + + getCurrentCorrect() {return this.CurrentCorrect} + // update the current stim and add it to the array of stimuli + addStim(newStim) { + this.CurrentStim = newStim; + this.PreviousStim.push(this.CurrentStim); + } +// update the current probe and add it to the array of probes + addProbe(newProbe) { + this.CurrentProbe = newProbe; + this.PreviousProbe.push(this.CurrentProbe); + } + addCorrect(newCorrect) { + this.CurrentCorrect = newCorrect + } +} + + +function PutLettersInGrid(LetterList,NRows,NCols, width=600, height=300, FontSize=40) + { + var count = 0; + var Table = ''; + // create the html table and assign the class which is defined in the HTML file + Table += '' + for (var i=0; i' + for (var j=0; j'+LetterList[count]+''; + count += 1; + } + } + return Table + } + +function CountdownTimer(MillisecondsPerNumber = 1000) + { + var Three = { + type: 'html-keyboard-response', + stimulus: '

3

', + choices: jsPsych.NO_KEYS, + trial_duration: MillisecondsPerNumber, + } + var Two = { + type: 'html-keyboard-response', + stimulus: '

2

', + choices: jsPsych.NO_KEYS, + trial_duration: MillisecondsPerNumber, + } + var One = { + type: 'html-keyboard-response', + stimulus: '

1

', + choices: jsPsych.NO_KEYS, + trial_duration: MillisecondsPerNumber, + } + + var Countdown_procedure = { + // Make sure this order is correct: fixation cue and then the stimulus + // Otherwise the scoring will not make any sense + timeline: [Three, Two, One], + randomize_order: false + } + return Countdown_procedure + } + +function RemoveOldLetters(AllowableLetters, LastTrialStimulus, LastTrialProbe) + { // remove the letters from the last trial from teh list of allowable letters + // For the first trial there are no previous stimuli, so check for that + if (typeof LastTrialStimulus !== 'undefined') + { + for (var i=0; i < LastTrialStimulus.length; i++) { + AllowableLetters = AllowableLetters.replace(LastTrialStimulus[i],''); + } + // remove the probe letter also, after converting it to uppercase + AllowableLetters = AllowableLetters.replace(LastTrialProbe.toUpperCase(),'') + } + else { + AllowabelLetters = AllowableLetters + } + return AllowableLetters + } + +function MakeStimulus(LettersToUse, Load) + { + // Make a letter list for use as stimuli + // Shuffle the letters + // Make sure a load 1 letter is not L + var IsEll = true + while (IsEll) { + var ShuffledLetters = shuffle(LettersToUse) + //console.log(LettersToUse) + // Onky take the required number of letters based on the load + var LetterString = ShuffledLetters.substring(0,Load) + console.log(LetterString) + if (Load == 1 & LetterString == "L") { + IsEll = true + } + else {IsEll = false} + } + return LetterString + } + +function CreateProbeLetter(CurrentStim, AllowableLetters) + { + ProbeType = MakeProbeType() + + if (ProbeType == 1) + { + //LookingForProbe = true + //while (LookingForProbe) + //{ + // select a random letter from the current stim + // ADD CHCK TO MAKE SURE ELL IS NOT THE PROBE + var IsEll = true + while (IsEll) { + ShuffledStim = shuffle(CurrentStim) + CurrentProbe = ShuffledStim[0].toLowerCase() + console.log('Trying: '+CurrentProbe) + if (CurrentProbe != 'l') { + IsEll = false + } + } + var correct = true + // if (CurrentProbe != "L") + // {LookingForProbe = false} + //} + } + else + { // Remove the current stim letters from the available letter set + CurrentAllowableList = RemoveOldLetters(AllowableLetters, CurrentStim, '') + var IsEll = true + while (IsEll) { + ShuffledStim = shuffle(CurrentAllowableList) + CurrentProbe = ShuffledStim[0].toLowerCase() + // Check to make sure the probe letter is NOT ell + if (CurrentProbe != 'l') { + IsEll = false + } + } + var correct = false + } + return [CurrentProbe, correct] + } +function MakeProbeType() + { + // Decide if this is a posiitve (1) or negative (0) trial + return Math.round(Math.random()) + } +function getRandomInt(n) + { //https://www.codespeedy.com/shuffle-characters-of-a-string-in-javascript/ + return Math.floor(Math.random() * n); + } + +function shuffle(s) { + var arr = s.split(''); // Convert String to array + var n = arr.length; // Length of the array + + for(var i=0 ; i 9){ + Limit = 9 + } + else if (Limit < 5){ + Limit = 5 + } + switch (Limit) { + case 5: + Loads = [1,3,4,5,6]; + break; + case 6: + Loads = [1,3,5,6,7]; + break; + case 7: + Loads = [1,3,6,7,8]; + break; + case 8: + Loads = [1,3,6,8,9]; + break; + case 9: + Loads = [1,3,6,8,9]; + } + this.Loads = Loads + } + + CreateDMSItemList() + { + // Make a function that runs at the beginning of the DMS block based version. It takes as inpout just the CognCap. + // It then identifies the loads required. Then it extracts the corresponding rows from the list. It therefore + // shortens the selected list accordingly. + + // This variable describes how many trials there are in the setup file + var NTrialsPerLoadInList = 6 + var TrialList = [] + for (var i = 0; i < this.Loads.length; i++) + { + // What is the current load + var temp = this.Loads[i] + for (var j = 0; j < 54; j++) + { + if (temp == this.InputStimulusList[j]['Load']) + { + console.log(this.InputStimulusList[j]['Load']) + TrialList.push(this.InputStimulusList[j]) + } + } + } + this.TrialList = TrialList + } +} \ No newline at end of file diff --git a/LetterDMS/DMS_setup_eng.js b/LetterDMS/DMS_setup_eng.js new file mode 100644 index 0000000..3a538f8 --- /dev/null +++ b/LetterDMS/DMS_setup_eng.js @@ -0,0 +1,297 @@ +// General parameters used by all versions of the DFMS task +// The following letter list does not include vowels nor w. W is removed because it is the only two syllable +// letter in the English language. During subvocal rehearsal W therefore takes two syllables to rehearse. +// The letter ELL is in included as a stimulus letter but NOT a probe letter. This is done so that proactive +// interference can be minimized between consecutive trials with a load of 9 letters. Without ELL as a stimulus +// there are simply not enough available letters. ELL is not allowed to be a probe leter because it is difficult +// to differentiate it from ONE and EYE. + +var AllowableLetters = "BCDFGHJKLMNPQRSTVXYZ"; + +var FullScreenMode = true; + +var FontSize = 72; + +var KeyboardChoices = ['arrowleft', 'arrowright']; +// the following is used for scoring and allows the keyboard choices to be whatever you would like +// This response mapping correspopnds to the order of the above KeyboardChoices +var ResponseMapping = [true, false]; + +// Timings of the different trial phases in MILLISECONDS +var StimOnTime = 2500 +var RetOnTime = 3500 +var ProbeOnTime = 5000 +// This is the intertrial interval. +var ITITime = 1000 + +// This is the time between blocks. Note that between each block of trials there +// is also the 3-2-1 countdown. Therefore, the full interblock interval is this value PLUS +// the countdown time, which is 3 seconds. +var InterBlockTime = 3000 +// TO DO ++++++ +// This is a delay component for use after instructions and before the first Block +var WaitOnTime = 5000 + +var ProbeColor = 'blue' + +// =================================================== +// BLOCK BASED PARAMETERS +// How many trials per block +var NTrialsPerBlock = 6 +// The number of blocks to run +var NumberOfBlocks = 5 +// =================================================== +// ADAPTIVE VERSION PARAMETERS +var MaxTrials = 50 +var MaxReversals = 15 +var StartValue = 1 +var MinValue = 1 +var MaxValue = 9 +var StepSize = 1 +var NUp = 3 +var NDown = 1 +var FastStart = true +// =================================================== + + +MaxTrials = 5 +StimOnTime = 250 +RetOnTime = 250 + +var instructions = ['This is the Verbal Delayed Match to Sample Task', + '

Press [LEFT] if the letter WAS in the set.
Press [RIGHT] if the letter WAS NOT in the set.
', + '

You will NOT receive feedback after each trial.

', + 'Remember that the letters to study will be in white and CAPITALIZED. The test letter will be in blue and will be lowercase.', + 'Try to respond as quickly and as accurately as possible. Press next to begin.'] + + +// The following are stimulus lists that are carefully selected to be counterbalanced for +// probe type and to have six trials per block and minimal proactive interference from +// previous trials + +var DMSLetterBehaviorList001 = [ + {"StimulusLetters": "****G****", "ProbeLetter": "g", "Correct": true, "Load": 1}, + {"StimulusLetters": "****H****", "ProbeLetter": "h", "Correct": true, "Load": 1}, + {"StimulusLetters": "****X****", "ProbeLetter": "d", "Correct": false, "Load": 1}, + {"StimulusLetters": "****R****", "ProbeLetter": "r", "Correct": true, "Load": 1}, + {"StimulusLetters": "****M****", "ProbeLetter": "h", "Correct": false, "Load": 1}, + {"StimulusLetters": "****Y****", "ProbeLetter": "z", "Correct": false, "Load": 1}, + {"StimulusLetters": "***M*K***", "ProbeLetter": "v", "Correct": false, "Load": 2}, + {"StimulusLetters": "***H*R***", "ProbeLetter": "p", "Correct": false, "Load": 2}, + {"StimulusLetters": "***D*T***", "ProbeLetter": "d", "Correct": true, "Load": 2}, + {"StimulusLetters": "***L*K***", "ProbeLetter": "k", "Correct": true, "Load": 2}, + {"StimulusLetters": "***M*X***", "ProbeLetter": "x", "Correct": true, "Load": 2}, + {"StimulusLetters": "***P*T***", "ProbeLetter": "g", "Correct": false, "Load": 2}, + {"StimulusLetters": "***VPM***", "ProbeLetter": "p", "Correct": true, "Load": 3}, + {"StimulusLetters": "***SRY***", "ProbeLetter": "j", "Correct": false, "Load": 3}, + {"StimulusLetters": "***VDB***", "ProbeLetter": "h", "Correct": false, "Load": 3}, + {"StimulusLetters": "***YRL***", "ProbeLetter": "y", "Correct": true, "Load": 3}, + {"StimulusLetters": "***PJN***", "ProbeLetter": "c", "Correct": false, "Load": 3}, + {"StimulusLetters": "***XST***", "ProbeLetter": "s", "Correct": true, "Load": 3}, + {"StimulusLetters": "G*C***Q*B", "ProbeLetter": "q", "Correct": true, "Load": 4}, + {"StimulusLetters": "K*P***H*J", "ProbeLetter": "r", "Correct": false, "Load": 4}, + {"StimulusLetters": "M*B***X*N", "ProbeLetter": "s", "Correct": false, "Load": 4}, + {"StimulusLetters": "P*V***H*F", "ProbeLetter": "v", "Correct": true, "Load": 4}, + {"StimulusLetters": "X*L***B*S", "ProbeLetter": "s", "Correct": true, "Load": 4}, + {"StimulusLetters": "C*T***Q*M", "ProbeLetter": "k", "Correct": false, "Load": 4}, + {"StimulusLetters": "Z*N*R*K*Y", "ProbeLetter": "y", "Correct": true, "Load": 5}, + {"StimulusLetters": "Q*G*V*B*M", "ProbeLetter": "g", "Correct": true, "Load": 5}, + {"StimulusLetters": "J*H*N*C*T", "ProbeLetter": "s", "Correct": false, "Load": 5}, + {"StimulusLetters": "K*B*P*G*M", "ProbeLetter": "r", "Correct": false, "Load": 5}, + {"StimulusLetters": "X*Z*N*L*J", "ProbeLetter": "n", "Correct": true, "Load": 5}, + {"StimulusLetters": "P*M*G*F*K", "ProbeLetter": "h", "Correct": false, "Load": 5}, + {"StimulusLetters": "BZM***NSJ", "ProbeLetter": "c", "Correct": false, "Load": 6}, + {"StimulusLetters": "DFH***RPQ", "ProbeLetter": "h", "Correct": true, "Load": 6}, + {"StimulusLetters": "KLV***MBT", "ProbeLetter": "g", "Correct": false, "Load": 6}, + {"StimulusLetters": "DXJ***FPZ", "ProbeLetter": "x", "Correct": true, "Load": 6}, + {"StimulusLetters": "RGT***CBQ", "ProbeLetter": "c", "Correct": true, "Load": 6}, + {"StimulusLetters": "KYD***XVH", "ProbeLetter": "n", "Correct": false, "Load": 6}, + {"StimulusLetters": "LBG*C*QKF", "ProbeLetter": "k", "Correct": true, "Load": 7}, + {"StimulusLetters": "ZYD*J*TXS", "ProbeLetter": "d", "Correct": true, "Load": 7}, + {"StimulusLetters": "LFP*R*GVN", "ProbeLetter": "g", "Correct": true, "Load": 7}, + {"StimulusLetters": "TKS*C*XYB", "ProbeLetter": "z", "Correct": false, "Load": 7}, + {"StimulusLetters": "PGQ*J*VFH", "ProbeLetter": "d", "Correct": false, "Load": 7}, + {"StimulusLetters": "BYL*R*ZSM", "ProbeLetter": "c", "Correct": false, "Load": 7}, + {"StimulusLetters": "ZYXR*JDSN", "ProbeLetter": "c", "Correct": false, "Load": 8}, + {"StimulusLetters": "LGFV*PCQK", "ProbeLetter": "q", "Correct": true, "Load": 8}, + {"StimulusLetters": "SJXT*RBHV", "ProbeLetter": "s", "Correct": true, "Load": 8}, + {"StimulusLetters": "FKTQ*YZNM", "ProbeLetter": "n", "Correct": true, "Load": 8}, + {"StimulusLetters": "BPJG*KHXD", "ProbeLetter": "v", "Correct": false, "Load": 8}, + {"StimulusLetters": "ZYCF*SLKM", "ProbeLetter": "n", "Correct": false, "Load": 8}, + {"StimulusLetters": "PMSYLCXKH", "ProbeLetter": "b", "Correct": false, "Load": 9}, + {"StimulusLetters": "JCNQVKZGT", "ProbeLetter": "c", "Correct": true, "Load": 9}, + {"StimulusLetters": "HRMYBVPQL", "ProbeLetter": "m", "Correct": true, "Load": 9}, + {"StimulusLetters": "DYNBXKFZT", "ProbeLetter": "t", "Correct": true, "Load": 9}, + {"StimulusLetters": "JRGHMNSYV", "ProbeLetter": "p", "Correct": false, "Load": 9}, + {"StimulusLetters": "GCJXLTKBD", "ProbeLetter": "q", "Correct": false, "Load": 9} +] +var DMSLetterBehaviorList002 = [ + {"StimulusLetters": "****R****", "ProbeLetter": "d", "Correct": false, "Load": 1}, + {"StimulusLetters": "****K****", "ProbeLetter": "n", "Correct": false, "Load": 1}, + {"StimulusLetters": "****J****", "ProbeLetter": "f", "Correct": false, "Load": 1}, + {"StimulusLetters": "****B****", "ProbeLetter": "b", "Correct": true, "Load": 1}, + {"StimulusLetters": "****X****", "ProbeLetter": "x", "Correct": true, "Load": 1}, + {"StimulusLetters": "****C****", "ProbeLetter": "c", "Correct": true, "Load": 1}, + {"StimulusLetters": "***V*F***", "ProbeLetter": "d", "Correct": false, "Load": 2}, + {"StimulusLetters": "***G*B***", "ProbeLetter": "k", "Correct": false, "Load": 2}, + {"StimulusLetters": "***D*P***", "ProbeLetter": "p", "Correct": true, "Load": 2}, + {"StimulusLetters": "***V*T***", "ProbeLetter": "z", "Correct": false, "Load": 2}, + {"StimulusLetters": "***S*F***", "ProbeLetter": "f", "Correct": true, "Load": 2}, + {"StimulusLetters": "***T*P***", "ProbeLetter": "t", "Correct": true, "Load": 2}, + {"StimulusLetters": "***BGF***", "ProbeLetter": "g", "Correct": true, "Load": 3}, + {"StimulusLetters": "***KDR***", "ProbeLetter": "x", "Correct": false, "Load": 3}, + {"StimulusLetters": "***GLY***", "ProbeLetter": "s", "Correct": false, "Load": 3}, + {"StimulusLetters": "***HKF***", "ProbeLetter": "h", "Correct": true, "Load": 3}, + {"StimulusLetters": "***NVJ***", "ProbeLetter": "v", "Correct": true, "Load": 3}, + {"StimulusLetters": "***QFK***", "ProbeLetter": "p", "Correct": false, "Load": 3}, + {"StimulusLetters": "P*Q***H*T", "ProbeLetter": "q", "Correct": true, "Load": 4}, + {"StimulusLetters": "L*S***V*N", "ProbeLetter": "n", "Correct": true, "Load": 4}, + {"StimulusLetters": "Q*J***R*B", "ProbeLetter": "r", "Correct": true, "Load": 4}, + {"StimulusLetters": "F*M***V*H", "ProbeLetter": "g", "Correct": false, "Load": 4}, + {"StimulusLetters": "L*T***P*C", "ProbeLetter": "y", "Correct": false, "Load": 4}, + {"StimulusLetters": "N*B***Q*Z", "ProbeLetter": "k", "Correct": false, "Load": 4}, + {"StimulusLetters": "B*T*K*P*N", "ProbeLetter": "x", "Correct": false, "Load": 5}, + {"StimulusLetters": "D*R*C*V*Z", "ProbeLetter": "s", "Correct": false, "Load": 5}, + {"StimulusLetters": "H*G*T*P*F", "ProbeLetter": "p", "Correct": true, "Load": 5}, + {"StimulusLetters": "S*J*B*Y*X", "ProbeLetter": "j", "Correct": true, "Load": 5}, + {"StimulusLetters": "T*N*Q*K*H", "ProbeLetter": "n", "Correct": true, "Load": 5}, + {"StimulusLetters": "R*M*Y*F*L", "ProbeLetter": "v", "Correct": false, "Load": 5}, + {"StimulusLetters": "DTF***KRC", "ProbeLetter": "j", "Correct": false, "Load": 6}, + {"StimulusLetters": "BGQ***YPS", "ProbeLetter": "q", "Correct": true, "Load": 6}, + {"StimulusLetters": "HRT***ZKL", "ProbeLetter": "z", "Correct": true, "Load": 6}, + {"StimulusLetters": "BCF***JGV", "ProbeLetter": "j", "Correct": true, "Load": 6}, + {"StimulusLetters": "ZDP***SQX", "ProbeLetter": "k", "Correct": false, "Load": 6}, + {"StimulusLetters": "BJC***GYF", "ProbeLetter": "m", "Correct": false, "Load": 6}, + {"StimulusLetters": "SJQ*D*GVP", "ProbeLetter": "d", "Correct": true, "Load": 7}, + {"StimulusLetters": "MHX*L*TKY", "ProbeLetter": "h", "Correct": true, "Load": 7}, + {"StimulusLetters": "GDJ*R*BNP", "ProbeLetter": "p", "Correct": true, "Load": 7}, + {"StimulusLetters": "FQS*K*YLM", "ProbeLetter": "x", "Correct": false, "Load": 7}, + {"StimulusLetters": "JCV*D*NPB", "ProbeLetter": "r", "Correct": false, "Load": 7}, + {"StimulusLetters": "FQS*K*YLM", "ProbeLetter": "x", "Correct": false, "Load": 7}, + {"StimulusLetters": "MFGX*HBLS", "ProbeLetter": "s", "Correct": true, "Load": 8}, + {"StimulusLetters": "LVPT*KZRC", "ProbeLetter": "k", "Correct": true, "Load": 8}, + {"StimulusLetters": "SBFX*GDQY", "ProbeLetter": "f", "Correct": true, "Load": 8}, + {"StimulusLetters": "HYZL*MKVC", "ProbeLetter": "r", "Correct": false, "Load": 8}, + {"StimulusLetters": "PRXD*GNQB", "ProbeLetter": "t", "Correct": false, "Load": 8}, + {"StimulusLetters": "YMFH*KZJB", "ProbeLetter": "l", "Correct": false, "Load": 8}, + {"StimulusLetters": "HYMJSRCBD", "ProbeLetter": "v", "Correct": false, "Load": 9}, + {"StimulusLetters": "XTNPLFKSY", "ProbeLetter": "t", "Correct": true, "Load": 9}, + {"StimulusLetters": "VMDGYXRHC", "ProbeLetter": "z", "Correct": false, "Load": 9}, + {"StimulusLetters": "SPKQVLNYB", "ProbeLetter": "p", "Correct": true, "Load": 9}, + {"StimulusLetters": "MZHGRKTFS", "ProbeLetter": "h", "Correct": true, "Load": 9}, + {"StimulusLetters": "ZNRVXJQDY", "ProbeLetter": "b", "Correct": false, "Load": 9} +] + +var DMSLetterMRIList001 = [ + {"StimulusLetters": "****D****", "ProbeLetter": "z", "Correct": true, "Load": 1}, + {"StimulusLetters": "****Q****", "ProbeLetter": "v", "Correct": true, "Load": 1}, + {"StimulusLetters": "****N****", "ProbeLetter": "c", "Correct": true, "Load": 1}, + {"StimulusLetters": "****M****", "ProbeLetter": "m", "Correct": true, "Load": 1}, + {"StimulusLetters": "****Z****", "ProbeLetter": "z", "Correct": true, "Load": 1}, + {"StimulusLetters": "****J****", "ProbeLetter": "j", "Correct": true, "Load": 1}, + {"StimulusLetters": "***F*X***", "ProbeLetter": "c", "Correct": true, "Load": 2}, + {"StimulusLetters": "***S*N***", "ProbeLetter": "s", "Correct": true, "Load": 2}, + {"StimulusLetters": "***Y*L***", "ProbeLetter": "y", "Correct": true, "Load": 2}, + {"StimulusLetters": "***C*M***", "ProbeLetter": "c", "Correct": true, "Load": 2}, + {"StimulusLetters": "***Q*B***", "ProbeLetter": "n", "Correct": true, "Load": 2}, + {"StimulusLetters": "***F*T***", "ProbeLetter": "c", "Correct": true, "Load": 2}, + {"StimulusLetters": "***CZH***", "ProbeLetter": "y", "Correct": true, "Load": 3}, + {"StimulusLetters": "***XBK***", "ProbeLetter": "x", "Correct": true, "Load": 3}, + {"StimulusLetters": "***RHY***", "ProbeLetter": "r", "Correct": true, "Load": 3}, + {"StimulusLetters": "***CLT***", "ProbeLetter": "c", "Correct": true, "Load": 3}, + {"StimulusLetters": "***GXY***", "ProbeLetter": "f", "Correct": true, "Load": 3}, + {"StimulusLetters": "***MNC***", "ProbeLetter": "b", "Correct": true, "Load": 3}, + {"StimulusLetters": "Z*B***C*P", "ProbeLetter": "z", "Correct": true, "Load": 4}, + {"StimulusLetters": "V*G***Y*N", "ProbeLetter": "y", "Correct": true, "Load": 4}, + {"StimulusLetters": "M*X***J*B", "ProbeLetter": "h", "Correct": true, "Load": 4}, + {"StimulusLetters": "V*L***F*Y", "ProbeLetter": "y", "Correct": true, "Load": 4}, + {"StimulusLetters": "P*N***H*X", "ProbeLetter": "g", "Correct": true, "Load": 4}, + {"StimulusLetters": "R*D***Z*T", "ProbeLetter": "m", "Correct": true, "Load": 4}, + {"StimulusLetters": "V*Q*L*B*H", "ProbeLetter": "x", "Correct": true, "Load": 5}, + {"StimulusLetters": "Z*P*C*F*K", "ProbeLetter": "p", "Correct": true, "Load": 5}, + {"StimulusLetters": "J*G*N*D*Y", "ProbeLetter": "h", "Correct": true, "Load": 5}, + {"StimulusLetters": "X*R*M*V*L", "ProbeLetter": "v", "Correct": true, "Load": 5}, + {"StimulusLetters": "Q*G*Y*J*Z", "ProbeLetter": "t", "Correct": true, "Load": 5}, + {"StimulusLetters": "X*C*S*K*M", "ProbeLetter": "x", "Correct": true, "Load": 5}, + {"StimulusLetters": "PCZ***GQJ", "ProbeLetter": "f", "Correct": true, "Load": 6}, + {"StimulusLetters": "KHX***RMV", "ProbeLetter": "r", "Correct": true, "Load": 6}, + {"StimulusLetters": "JLZ***GCD", "ProbeLetter": "f", "Correct": true, "Load": 6}, + {"StimulusLetters": "BQT***KPY", "ProbeLetter": "h", "Correct": true, "Load": 6}, + {"StimulusLetters": "SXR***NLF", "ProbeLetter": "r", "Correct": true, "Load": 6}, + {"StimulusLetters": "MZJ***VBD", "ProbeLetter": "v", "Correct": true, "Load": 6}, + {"StimulusLetters": "JDF*K*ZNX", "ProbeLetter": "g", "Correct": true, "Load": 7}, + {"StimulusLetters": "SCQ*T*LMB", "ProbeLetter": "s", "Correct": true, "Load": 7}, + {"StimulusLetters": "DYF*R*KGN", "ProbeLetter": "x", "Correct": true, "Load": 7}, + {"StimulusLetters": "TQJ*L*HSM", "ProbeLetter": "j", "Correct": true, "Load": 7}, + {"StimulusLetters": "RGN*B*DVC", "ProbeLetter": "c", "Correct": true, "Load": 7}, + {"StimulusLetters": "FQS*K*YLM", "ProbeLetter": "x", "Correct": true, "Load": 7}, + {"StimulusLetters": "DNSQ*HZKF", "ProbeLetter": "s", "Correct": true, "Load": 8}, + {"StimulusLetters": "BQLX*MYVG", "ProbeLetter": "g", "Correct": true, "Load": 8}, + {"StimulusLetters": "RQJK*TCFD", "ProbeLetter": "d", "Correct": true, "Load": 8}, + {"StimulusLetters": "MGXZ*HSVL", "ProbeLetter": "p", "Correct": true, "Load": 8}, + {"StimulusLetters": "KMFN*QTDR", "ProbeLetter": "j", "Correct": true, "Load": 8}, + {"StimulusLetters": "NYXH*GPCL", "ProbeLetter": "z", "Correct": true, "Load": 8}, + {"StimulusLetters": "SRCZPJQDF", "ProbeLetter": "s", "Correct": true, "Load": 9}, + {"StimulusLetters": "ZXMFGVTBL", "ProbeLetter": "k", "Correct": true, "Load": 9}, + {"StimulusLetters": "VDCBRHQJS", "ProbeLetter": "y", "Correct": true, "Load": 9}, + {"StimulusLetters": "LKFSGNMPY", "ProbeLetter": "n", "Correct": true, "Load": 9}, + {"StimulusLetters": "TCJXKRGBV", "ProbeLetter": "t", "Correct": true, "Load": 9}, + {"StimulusLetters": "QZYSRKDLN", "ProbeLetter": "f", "Correct": true, "Load": 9} +] + +var DMSLetterMRIList002 = [ + {"StimulusLetters": "****F****", "ProbeLetter": "f", "Correct": true, "Load": 1}, + {"StimulusLetters": "****X****", "ProbeLetter": "x", "Correct": true, "Load": 1}, + {"StimulusLetters": "****M****", "ProbeLetter": "s", "Correct": true, "Load": 1}, + {"StimulusLetters": "****N****", "ProbeLetter": "c", "Correct": true, "Load": 1}, + {"StimulusLetters": "****D****", "ProbeLetter": "s", "Correct": true, "Load": 1}, + {"StimulusLetters": "****J****", "ProbeLetter": "j", "Correct": true, "Load": 1}, + {"StimulusLetters": "***V*D***", "ProbeLetter": "s", "Correct": true, "Load": 2}, + {"StimulusLetters": "***L*T***", "ProbeLetter": "c", "Correct": true, "Load": 2}, + {"StimulusLetters": "***B*X***", "ProbeLetter": "r", "Correct": true, "Load": 2}, + {"StimulusLetters": "***Q*H***", "ProbeLetter": "h", "Correct": true, "Load": 2}, + {"StimulusLetters": "***Z*K***", "ProbeLetter": "k", "Correct": true, "Load": 2}, + {"StimulusLetters": "***S*N***", "ProbeLetter": "n", "Correct": true, "Load": 2}, + {"StimulusLetters": "***JKG***", "ProbeLetter": "g", "Correct": true, "Load": 3}, + {"StimulusLetters": "***LVC***", "ProbeLetter": "c", "Correct": true, "Load": 3}, + {"StimulusLetters": "***BDM***", "ProbeLetter": "m", "Correct": true, "Load": 3}, + {"StimulusLetters": "***FCH***", "ProbeLetter": "s", "Correct": true, "Load": 3}, + {"StimulusLetters": "***JPM***", "ProbeLetter": "t", "Correct": true, "Load": 3}, + {"StimulusLetters": "***RHN***", "ProbeLetter": "s", "Correct": true, "Load": 3}, + {"StimulusLetters": "M*T***P*Y", "ProbeLetter": "x", "Correct": true, "Load": 4}, + {"StimulusLetters": "Q*R***J*S", "ProbeLetter": "q", "Correct": true, "Load": 4}, + {"StimulusLetters": "K*F***N*V", "ProbeLetter": "x", "Correct": true, "Load": 4}, + {"StimulusLetters": "M*H***D*L", "ProbeLetter": "d", "Correct": true, "Load": 4}, + {"StimulusLetters": "Y*X***R*V", "ProbeLetter": "s", "Correct": true, "Load": 4}, + {"StimulusLetters": "M*G***Z*B", "ProbeLetter": "g", "Correct": true, "Load": 4}, + {"StimulusLetters": "M*L*Y*Z*J", "ProbeLetter": "j", "Correct": true, "Load": 5}, + {"StimulusLetters": "K*Q*C*T*H", "ProbeLetter": "h", "Correct": true, "Load": 5}, + {"StimulusLetters": "S*R*M*P*N", "ProbeLetter": "z", "Correct": true, "Load": 5}, + {"StimulusLetters": "L*C*Q*F*H", "ProbeLetter": "j", "Correct": true, "Load": 5}, + {"StimulusLetters": "P*K*R*N*V", "ProbeLetter": "v", "Correct": true, "Load": 5}, + {"StimulusLetters": "J*Y*Q*X*L", "ProbeLetter": "b", "Correct": true, "Load": 5}, + {"StimulusLetters": "VSR***KPY", "ProbeLetter": "f", "Correct": true, "Load": 6}, + {"StimulusLetters": "QHD***GNM", "ProbeLetter": "q", "Correct": true, "Load": 6}, + {"StimulusLetters": "CXS***LKP", "ProbeLetter": "x", "Correct": true, "Load": 6}, + {"StimulusLetters": "DBY***MJN", "ProbeLetter": "g", "Correct": true, "Load": 6}, + {"StimulusLetters": "HRT***ZKL", "ProbeLetter": "z", "Correct": true, "Load": 6}, + {"StimulusLetters": "BQC***MGY", "ProbeLetter": "s", "Correct": true, "Load": 6}, + {"StimulusLetters": "ZVX*C*YMH", "ProbeLetter": "d", "Correct": true, "Load": 7}, + {"StimulusLetters": "QNB*K*LJF", "ProbeLetter": "k", "Correct": true, "Load": 7}, + {"StimulusLetters": "SMC*R*HXY", "ProbeLetter": "y", "Correct": true, "Load": 7}, + {"StimulusLetters": "DLJ*P*NBG", "ProbeLetter": "d", "Correct": true, "Load": 7}, + {"StimulusLetters": "XSR*Z*KHF", "ProbeLetter": "t", "Correct": true, "Load": 7}, + {"StimulusLetters": "CGD*P*JQY", "ProbeLetter": "b", "Correct": true, "Load": 7}, + {"StimulusLetters": "VHJQ*CPYM", "ProbeLetter": "n", "Correct": true, "Load": 8}, + {"StimulusLetters": "VGRK*NZXB", "ProbeLetter": "r", "Correct": true, "Load": 8}, + {"StimulusLetters": "CJLQ*MTGF", "ProbeLetter": "y", "Correct": true, "Load": 8}, + {"StimulusLetters": "XBVD*NZKL", "ProbeLetter": "d", "Correct": true, "Load": 8}, + {"StimulusLetters": "CHFL*YJMG", "ProbeLetter": "r", "Correct": true, "Load": 8}, + {"StimulusLetters": "XKDB*TMZN", "ProbeLetter": "k", "Correct": true, "Load": 8}, + {"StimulusLetters": "CTMSQVLFX", "ProbeLetter": "z", "Correct": true, "Load": 9}, + {"StimulusLetters": "GBYJRVZPH", "ProbeLetter": "y", "Correct": true, "Load": 9}, + {"StimulusLetters": "NHTBXFMQS", "ProbeLetter": "s", "Correct": true, "Load": 9}, + {"StimulusLetters": "DVQLPJRZT", "ProbeLetter": "z", "Correct": true, "Load": 9}, + {"StimulusLetters": "BCQKFXHRN", "ProbeLetter": "m", "Correct": true, "Load": 9}, + {"StimulusLetters": "RLSZQYGVP", "ProbeLetter": "t", "Correct": true, "Load": 9} +] \ No newline at end of file diff --git a/LetterDMS/StairClass.js b/LetterDMS/StairClass.js new file mode 100644 index 0000000..2a2bb2c --- /dev/null +++ b/LetterDMS/StairClass.js @@ -0,0 +1,187 @@ + class Stair { + constructor(Current=1, MinValue=1, MaxValue=9, MaxReversals=5, + MaxTrials=40, StepSize=1, NUp=3, NDown=1, FastStart=true, MaxTime=420) + { + this.Current = Current; // what is the current value + this.TrialCount = 0; // How many trials have been completed + this.MinValue = MinValue; // what is the minimum value this staircase can reach + this.MaxValue = MaxValue; // what is the maximum value this staircase can reach + this.MaxReversals = MaxReversals; // What is the maximum number of reversal. This is a break condition + this.MaxTrials = MaxTrials; // What is the maximum number of trials. This is a break condition + this.StepSize = StepSize; // What is the step size + this.NUp = NUp;// How many correct responses in a row are required before the current value is INcreased + this.NDown = NDown;// How many INcorrect responses in a two are required before the current value is DEcreased + this.FastStart = FastStart; // At the beginning should sequential correct responses result in increasing current? + this.CurrentRun = 1; // How many correct in a row + this.ValueList = []; // List of all the values across all trials + // add the inital value to the list + this.ValueList.push(this.Current) + this.ReversalList = []; // List of values where a reversal took place + this.findDirection() + this.CurrentDirection = 1; // What direction are we heading? 1 increasing, -1 decreasing + this.Finished = false; // If any of the stopping conditions are met this is set to true + this.ReversalCount = 0; // the number of reversals + this.MaxTime= MaxTime; // This is the maximal amount of time that the staircase can last (IN SECONDS) + + } + findDirection() { + if (this.StepSize > 0) { + this.CurrentDirection = 1 + } + else { + this.CurrentDirection = -1 + } + } + // increase difficulty + stepUp() { + // allow for reverse staircases + if (this.StepSize > 0) { + // only increase current if the max value has NOT been reached + if (this.Current < this.MaxValue) { + // Is this a reversal? + if (this.CurrentDirection == -1) { + // if so add the current value to the list + this.ReversalList.push(this.Current) + // and update the reversal count list + this.updateReversalCount() + } + this.Current += this.StepSize + + // update the current run list + this.CurrentRun = 1 + } + // Make sure the direction is correct + this.CurrentDirection = 1 + + } + else { + if (this.Current > this.MinValue) { + // Is this a reversal? + if (this.CurrentDirection == 1) { + // if so add the current value to the list + this.ReversalList.push(this.Current) + // and update the reversal count list + this.updateReversalCount() + // update the current run list + this.CurrentRun = 1 + } + this.Current += this.StepSize + } + // Make sure the direction is correct + this.CurrentDirection = -1 + } + } + // Check to see if any stopping conditions are met + checkFinished() { + if (this.ReversalCount == this.MaxReversals) { + this.Finished = true + } + if (this.TrialCount == this.MaxTrials) { + this.Finished = true + } + } + + // decrease difficulty + stepDown() { + // allow for reverse staircases + if (this.StepSize > 0) { + + if (this.CurrentDirection == 1) { + // if so add the current value to the list + this.ReversalList.push(this.Current) + // and update the reversal count list + this.updateReversalCount() + } + // Check to make sure the value is not at the limit + if (this.Current > this.MinValue) + { + this.Current -= this.StepSize + } + // Make sure the direction is correct + this.CurrentDirection = -1 + } + else { + // Check to see if this is a reversal + if (this.CurrentDirection == -1) { + // if so add the current value to the list + this.ReversalList.push(this.Current) + // and update the reversal count list + this.updateReversalCount() + } + + if (this.Current < this.MaxValue) + { + this.Current -= this.StepSize + } + // Make sure the direction is correct + this.CurrentDirection = 1 + } + } + + // keep track of reversal count + updateReversalCount() { + this.ReversalCount += 1 + } + + // stop the fast start + StopFastStart() { + this.FastStart = false + } + + // Make a decision based on the current response + Decide(ResponseCorrect) { + // the response is correct + if (ResponseCorrect) { + // are we still at the rapid increase phase? + if (this.FastStart) { + this.BeginningDecision() + } + else { + this.LaterDecision() + } + } + else { + this.stepDown() + // reset the current run counter + this.CurrentRun = 1 + // If we are still in the fast start increase phase, then end it + if (this.FastStart) { + this.FastStart = false + } + } + // At the end of every decision check the ending conditions + this.ValueList.push(this.Current) + this.TrialCount += 1 + this.checkFinished() + } + + // We are at the rapid increase phase at the beginning + BeginningDecision() { + this.stepUp() + // Note that the CurrentRun counter is not being used yet + } + + // We are past the rapid increase phase at the beginning + LaterDecision() { + // if this is the completeion of a run of correct responses, then increase + if (this.CurrentRun == this.NUp) { + this.stepUp() + // reset the current run counter since we made a change + this.CurrentRun = 1 + } + else { + this.CurrentRun += 1 + } + } + + CalculateAverage() { + // calculate the average of the reversal values + var totalSum = 0; + for(var i in this.ReversalList) { + totalSum += this.ReversalList[i]; + } + var numsCnt = this.ReversalList.length; + var average = totalSum / numsCnt; + return average + } + } diff --git a/LetterDMS/StyleSheet.css b/LetterDMS/StyleSheet.css new file mode 100644 index 0000000..70229b2 --- /dev/null +++ b/LetterDMS/StyleSheet.css @@ -0,0 +1 @@ +body {background-color: rgb(150, 150, 150)} \ No newline at end of file diff --git a/LetterDMS/jspsych/VERSION_6.3.1 b/LetterDMS/jspsych/VERSION_6.3.1 new file mode 100644 index 0000000..c33117e --- /dev/null +++ b/LetterDMS/jspsych/VERSION_6.3.1 @@ -0,0 +1 @@ +VERSION_6.3.1 diff --git a/LetterDMS/jspsych/css/jspsych.css b/LetterDMS/jspsych/css/jspsych.css new file mode 100644 index 0000000..3b6d177 --- /dev/null +++ b/LetterDMS/jspsych/css/jspsych.css @@ -0,0 +1,206 @@ +/* + * CSS for jsPsych experiments. + * + * This stylesheet provides minimal styling to make jsPsych + * experiments look polished without any additional styles. + */ + + @import url(https://fonts.googleapis.com/css?family=Open+Sans:400italic,700italic,400,700); + +/* Container holding jsPsych content */ + + .jspsych-display-element { + display: flex; + flex-direction: column; + overflow-y: auto; + } + + .jspsych-display-element:focus { + outline: none; + } + + .jspsych-content-wrapper { + display: flex; + margin: auto; + flex: 1 1 100%; + width: 100%; + } + + .jspsych-content { + max-width: 95%; /* this is mainly an IE 10-11 fix */ + text-align: center; + margin: auto; /* this is for overflowing content */ + } + + .jspsych-top { + align-items: flex-start; + } + + .jspsych-middle { + align-items: center; + } + +/* fonts and type */ + +.jspsych-display-element { + font-family: 'Open Sans', 'Arial', sans-serif; + font-size: 18px; + line-height: 1.6em; +} + +/* Form elements like input fields and buttons */ + +.jspsych-display-element input[type="text"] { + font-family: 'Open Sans', 'Arial', sans-serif; + font-size: 14px; +} + +/* borrowing Bootstrap style for btn elements, but combining styles a bit */ +.jspsych-btn { + display: inline-block; + padding: 6px 12px; + margin: 0px; + font-size: 14px; + font-weight: 400; + font-family: 'Open Sans', 'Arial', sans-serif; + cursor: pointer; + line-height: 1.4; + text-align: center; + white-space: nowrap; + vertical-align: middle; + background-image: none; + border: 1px solid transparent; + border-radius: 4px; + color: #333; + background-color: #fff; + border-color: #ccc; +} + +/* only apply the hover style on devices with a mouse/pointer that can hover - issue #977 */ +@media (hover: hover) { + .jspsych-btn:hover { + background-color: #ddd; + border-color: #aaa; + } +} + +.jspsych-btn:active { + background-color: #ddd; + border-color:#000000; +} + +.jspsych-btn:disabled { + background-color: #eee; + color: #aaa; + border-color: #ccc; + cursor: not-allowed; +} + +/* custom style for input[type="range] (slider) to improve alignment between positions and labels */ + +.jspsych-slider { + appearance: none; + -webkit-appearance: none; + -moz-appearance: none; + width: 100%; + background: transparent; +} +.jspsych-slider:focus { + outline: none; +} +/* track */ +.jspsych-slider::-webkit-slider-runnable-track { + appearance: none; + -webkit-appearance: none; + width: 100%; + height: 8px; + cursor: pointer; + background: #eee; + box-shadow: 0px 0px 0px #000000, 0px 0px 0px #0d0d0d; + border-radius: 2px; + border: 1px solid #aaa; +} +.jspsych-slider::-moz-range-track { + appearance: none; + width: 100%; + height: 8px; + cursor: pointer; + background: #eee; + box-shadow: 0px 0px 0px #000000, 0px 0px 0px #0d0d0d; + border-radius: 2px; + border: 1px solid #aaa; +} +.jspsych-slider::-ms-track { + appearance: none; + width: 99%; + height: 14px; + cursor: pointer; + background: #eee; + box-shadow: 0px 0px 0px #000000, 0px 0px 0px #0d0d0d; + border-radius: 2px; + border: 1px solid #aaa; +} +/* thumb */ +.jspsych-slider::-webkit-slider-thumb { + border: 1px solid #666; + height: 24px; + width: 15px; + border-radius: 5px; + background: #ffffff; + cursor: pointer; + -webkit-appearance: none; + margin-top: -9px; +} +.jspsych-slider::-moz-range-thumb { + border: 1px solid #666; + height: 24px; + width: 15px; + border-radius: 5px; + background: #ffffff; + cursor: pointer; +} +.jspsych-slider::-ms-thumb { + border: 1px solid #666; + height: 20px; + width: 15px; + border-radius: 5px; + background: #ffffff; + cursor: pointer; + margin-top: -2px; +} + +/* jsPsych progress bar */ + +#jspsych-progressbar-container { + color: #555; + border-bottom: 1px solid #dedede; + background-color: #f9f9f9; + margin-bottom: 1em; + text-align: center; + padding: 8px 0px; + width: 100%; + line-height: 1em; +} +#jspsych-progressbar-container span { + font-size: 14px; + padding-right: 14px; +} +#jspsych-progressbar-outer { + background-color: #eee; + width: 50%; + margin: auto; + height: 14px; + display: inline-block; + vertical-align: middle; + box-shadow: inset 0 1px 2px rgba(0,0,0,0.1); +} +#jspsych-progressbar-inner { + background-color: #aaa; + width: 0%; + height: 100%; +} + +/* Control appearance of jsPsych.data.displayData() */ +#jspsych-data-display { + text-align: left; +} diff --git a/LetterDMS/jspsych/examples/add-to-end-of-timeline.html b/LetterDMS/jspsych/examples/add-to-end-of-timeline.html new file mode 100644 index 0000000..05dfa2a --- /dev/null +++ b/LetterDMS/jspsych/examples/add-to-end-of-timeline.html @@ -0,0 +1,38 @@ + + + + + + + + + + + + diff --git a/LetterDMS/jspsych/examples/case-sensitive-responses.html b/LetterDMS/jspsych/examples/case-sensitive-responses.html new file mode 100644 index 0000000..6407121 --- /dev/null +++ b/LetterDMS/jspsych/examples/case-sensitive-responses.html @@ -0,0 +1,45 @@ + + + + + + + + + + diff --git a/LetterDMS/jspsych/examples/conditional-and-loop-functions.html b/LetterDMS/jspsych/examples/conditional-and-loop-functions.html new file mode 100644 index 0000000..68c7a74 --- /dev/null +++ b/LetterDMS/jspsych/examples/conditional-and-loop-functions.html @@ -0,0 +1,64 @@ + + + + + + + + + + diff --git a/LetterDMS/jspsych/examples/css-classes-parameter.html b/LetterDMS/jspsych/examples/css-classes-parameter.html new file mode 100644 index 0000000..a98b321 --- /dev/null +++ b/LetterDMS/jspsych/examples/css-classes-parameter.html @@ -0,0 +1,145 @@ + + + + + + + + + + + + + + + diff --git a/LetterDMS/jspsych/examples/css/jquery-ui.css b/LetterDMS/jspsych/examples/css/jquery-ui.css new file mode 100644 index 0000000..a320639 --- /dev/null +++ b/LetterDMS/jspsych/examples/css/jquery-ui.css @@ -0,0 +1,1225 @@ +/*! jQuery UI - v1.11.3 - 2015-02-12 +* http://jqueryui.com +* Includes: core.css, accordion.css, autocomplete.css, button.css, datepicker.css, dialog.css, draggable.css, menu.css, progressbar.css, resizable.css, selectable.css, selectmenu.css, slider.css, sortable.css, spinner.css, tabs.css, tooltip.css, theme.css +* To view and modify this theme, visit http://jqueryui.com/themeroller/?ffDefault=Verdana%2CArial%2Csans-serif&fwDefault=normal&fsDefault=1.1em&cornerRadius=4px&bgColorHeader=cccccc&bgTextureHeader=highlight_soft&bgImgOpacityHeader=75&borderColorHeader=aaaaaa&fcHeader=222222&iconColorHeader=222222&bgColorContent=ffffff&bgTextureContent=flat&bgImgOpacityContent=75&borderColorContent=aaaaaa&fcContent=222222&iconColorContent=222222&bgColorDefault=e6e6e6&bgTextureDefault=glass&bgImgOpacityDefault=75&borderColorDefault=d3d3d3&fcDefault=555555&iconColorDefault=888888&bgColorHover=dadada&bgTextureHover=glass&bgImgOpacityHover=75&borderColorHover=999999&fcHover=212121&iconColorHover=454545&bgColorActive=ffffff&bgTextureActive=glass&bgImgOpacityActive=65&borderColorActive=aaaaaa&fcActive=212121&iconColorActive=454545&bgColorHighlight=fbf9ee&bgTextureHighlight=glass&bgImgOpacityHighlight=55&borderColorHighlight=fcefa1&fcHighlight=363636&iconColorHighlight=2e83ff&bgColorError=fef1ec&bgTextureError=glass&bgImgOpacityError=95&borderColorError=cd0a0a&fcError=cd0a0a&iconColorError=cd0a0a&bgColorOverlay=aaaaaa&bgTextureOverlay=flat&bgImgOpacityOverlay=0&opacityOverlay=30&bgColorShadow=aaaaaa&bgTextureShadow=flat&bgImgOpacityShadow=0&opacityShadow=30&thicknessShadow=8px&offsetTopShadow=-8px&offsetLeftShadow=-8px&cornerRadiusShadow=8px +* Copyright 2015 jQuery Foundation and other contributors; Licensed MIT */ + +/* Layout helpers +----------------------------------*/ +.ui-helper-hidden { + display: none; +} +.ui-helper-hidden-accessible { + border: 0; + clip: rect(0 0 0 0); + height: 1px; + margin: -1px; + overflow: hidden; + padding: 0; + position: absolute; + width: 1px; +} +.ui-helper-reset { + margin: 0; + padding: 0; + border: 0; + outline: 0; + line-height: 1.3; + text-decoration: none; + font-size: 100%; + list-style: none; +} +.ui-helper-clearfix:before, +.ui-helper-clearfix:after { + content: ""; + display: table; + border-collapse: collapse; +} +.ui-helper-clearfix:after { + clear: both; +} +.ui-helper-clearfix { + min-height: 0; /* support: IE7 */ +} +.ui-helper-zfix { + width: 100%; + height: 100%; + top: 0; + left: 0; + position: absolute; + opacity: 0; + filter:Alpha(Opacity=0); /* support: IE8 */ +} + +.ui-front { + z-index: 100; +} + + +/* Interaction Cues +----------------------------------*/ +.ui-state-disabled { + cursor: default !important; +} + + +/* Icons +----------------------------------*/ + +/* states and images */ +.ui-icon { + display: block; + text-indent: -99999px; + overflow: hidden; + background-repeat: no-repeat; +} + + +/* Misc visuals +----------------------------------*/ + +/* Overlays */ +.ui-widget-overlay { + position: fixed; + top: 0; + left: 0; + width: 100%; + height: 100%; +} +.ui-accordion .ui-accordion-header { + display: block; + cursor: pointer; + position: relative; + margin: 2px 0 0 0; + padding: .5em .5em .5em .7em; + min-height: 0; /* support: IE7 */ + font-size: 100%; +} +.ui-accordion .ui-accordion-icons { + padding-left: 2.2em; +} +.ui-accordion .ui-accordion-icons .ui-accordion-icons { + padding-left: 2.2em; +} +.ui-accordion .ui-accordion-header .ui-accordion-header-icon { + position: absolute; + left: .5em; + top: 50%; + margin-top: -8px; +} +.ui-accordion .ui-accordion-content { + padding: 1em 2.2em; + border-top: 0; + overflow: auto; +} +.ui-autocomplete { + position: absolute; + top: 0; + left: 0; + cursor: default; +} +.ui-button { + display: inline-block; + position: relative; + padding: 0; + line-height: normal; + margin-right: .1em; + cursor: pointer; + vertical-align: middle; + text-align: center; + overflow: visible; /* removes extra width in IE */ +} +.ui-button, +.ui-button:link, +.ui-button:visited, +.ui-button:hover, +.ui-button:active { + text-decoration: none; +} +/* to make room for the icon, a width needs to be set here */ +.ui-button-icon-only { + width: 2.2em; +} +/* button elements seem to need a little more width */ +button.ui-button-icon-only { + width: 2.4em; +} +.ui-button-icons-only { + width: 3.4em; +} +button.ui-button-icons-only { + width: 3.7em; +} + +/* button text element */ +.ui-button .ui-button-text { + display: block; + line-height: normal; +} +.ui-button-text-only .ui-button-text { + padding: .4em 1em; +} +.ui-button-icon-only .ui-button-text, +.ui-button-icons-only .ui-button-text { + padding: .4em; + text-indent: -9999999px; +} +.ui-button-text-icon-primary .ui-button-text, +.ui-button-text-icons .ui-button-text { + padding: .4em 1em .4em 2.1em; +} +.ui-button-text-icon-secondary .ui-button-text, +.ui-button-text-icons .ui-button-text { + padding: .4em 2.1em .4em 1em; +} +.ui-button-text-icons .ui-button-text { + padding-left: 2.1em; + padding-right: 2.1em; +} +/* no icon support for input elements, provide padding by default */ +input.ui-button { + padding: .4em 1em; +} + +/* button icon element(s) */ +.ui-button-icon-only .ui-icon, +.ui-button-text-icon-primary .ui-icon, +.ui-button-text-icon-secondary .ui-icon, +.ui-button-text-icons .ui-icon, +.ui-button-icons-only .ui-icon { + position: absolute; + top: 50%; + margin-top: -8px; +} +.ui-button-icon-only .ui-icon { + left: 50%; + margin-left: -8px; +} +.ui-button-text-icon-primary .ui-button-icon-primary, +.ui-button-text-icons .ui-button-icon-primary, +.ui-button-icons-only .ui-button-icon-primary { + left: .5em; +} +.ui-button-text-icon-secondary .ui-button-icon-secondary, +.ui-button-text-icons .ui-button-icon-secondary, +.ui-button-icons-only .ui-button-icon-secondary { + right: .5em; +} + +/* button sets */ +.ui-buttonset { + margin-right: 7px; +} +.ui-buttonset .ui-button { + margin-left: 0; + margin-right: -.3em; +} + +/* workarounds */ +/* reset extra padding in Firefox, see h5bp.com/l */ +input.ui-button::-moz-focus-inner, +button.ui-button::-moz-focus-inner { + border: 0; + padding: 0; +} +.ui-datepicker { + width: 17em; + padding: .2em .2em 0; + display: none; +} +.ui-datepicker .ui-datepicker-header { + position: relative; + padding: .2em 0; +} +.ui-datepicker .ui-datepicker-prev, +.ui-datepicker .ui-datepicker-next { + position: absolute; + top: 2px; + width: 1.8em; + height: 1.8em; +} +.ui-datepicker .ui-datepicker-prev-hover, +.ui-datepicker .ui-datepicker-next-hover { + top: 1px; +} +.ui-datepicker .ui-datepicker-prev { + left: 2px; +} +.ui-datepicker .ui-datepicker-next { + right: 2px; +} +.ui-datepicker .ui-datepicker-prev-hover { + left: 1px; +} +.ui-datepicker .ui-datepicker-next-hover { + right: 1px; +} +.ui-datepicker .ui-datepicker-prev span, +.ui-datepicker .ui-datepicker-next span { + display: block; + position: absolute; + left: 50%; + margin-left: -8px; + top: 50%; + margin-top: -8px; +} +.ui-datepicker .ui-datepicker-title { + margin: 0 2.3em; + line-height: 1.8em; + text-align: center; +} +.ui-datepicker .ui-datepicker-title select { + font-size: 1em; + margin: 1px 0; +} +.ui-datepicker select.ui-datepicker-month, +.ui-datepicker select.ui-datepicker-year { + width: 45%; +} +.ui-datepicker table { + width: 100%; + font-size: .9em; + border-collapse: collapse; + margin: 0 0 .4em; +} +.ui-datepicker th { + padding: .7em .3em; + text-align: center; + font-weight: bold; + border: 0; +} +.ui-datepicker td { + border: 0; + padding: 1px; +} +.ui-datepicker td span, +.ui-datepicker td a { + display: block; + padding: .2em; + text-align: right; + text-decoration: none; +} +.ui-datepicker .ui-datepicker-buttonpane { + background-image: none; + margin: .7em 0 0 0; + padding: 0 .2em; + border-left: 0; + border-right: 0; + border-bottom: 0; +} +.ui-datepicker .ui-datepicker-buttonpane button { + float: right; + margin: .5em .2em .4em; + cursor: pointer; + padding: .2em .6em .3em .6em; + width: auto; + overflow: visible; +} +.ui-datepicker .ui-datepicker-buttonpane button.ui-datepicker-current { + float: left; +} + +/* with multiple calendars */ +.ui-datepicker.ui-datepicker-multi { + width: auto; +} +.ui-datepicker-multi .ui-datepicker-group { + float: left; +} +.ui-datepicker-multi .ui-datepicker-group table { + width: 95%; + margin: 0 auto .4em; +} +.ui-datepicker-multi-2 .ui-datepicker-group { + width: 50%; +} +.ui-datepicker-multi-3 .ui-datepicker-group { + width: 33.3%; +} +.ui-datepicker-multi-4 .ui-datepicker-group { + width: 25%; +} +.ui-datepicker-multi .ui-datepicker-group-last .ui-datepicker-header, +.ui-datepicker-multi .ui-datepicker-group-middle .ui-datepicker-header { + border-left-width: 0; +} +.ui-datepicker-multi .ui-datepicker-buttonpane { + clear: left; +} +.ui-datepicker-row-break { + clear: both; + width: 100%; + font-size: 0; +} + +/* RTL support */ +.ui-datepicker-rtl { + direction: rtl; +} +.ui-datepicker-rtl .ui-datepicker-prev { + right: 2px; + left: auto; +} +.ui-datepicker-rtl .ui-datepicker-next { + left: 2px; + right: auto; +} +.ui-datepicker-rtl .ui-datepicker-prev:hover { + right: 1px; + left: auto; +} +.ui-datepicker-rtl .ui-datepicker-next:hover { + left: 1px; + right: auto; +} +.ui-datepicker-rtl .ui-datepicker-buttonpane { + clear: right; +} +.ui-datepicker-rtl .ui-datepicker-buttonpane button { + float: left; +} +.ui-datepicker-rtl .ui-datepicker-buttonpane button.ui-datepicker-current, +.ui-datepicker-rtl .ui-datepicker-group { + float: right; +} +.ui-datepicker-rtl .ui-datepicker-group-last .ui-datepicker-header, +.ui-datepicker-rtl .ui-datepicker-group-middle .ui-datepicker-header { + border-right-width: 0; + border-left-width: 1px; +} +.ui-dialog { + overflow: hidden; + position: absolute; + top: 0; + left: 0; + padding: .2em; + outline: 0; +} +.ui-dialog .ui-dialog-titlebar { + padding: .4em 1em; + position: relative; +} +.ui-dialog .ui-dialog-title { + float: left; + margin: .1em 0; + white-space: nowrap; + width: 90%; + overflow: hidden; + text-overflow: ellipsis; +} +.ui-dialog .ui-dialog-titlebar-close { + position: absolute; + right: .3em; + top: 50%; + width: 20px; + margin: -10px 0 0 0; + padding: 1px; + height: 20px; +} +.ui-dialog .ui-dialog-content { + position: relative; + border: 0; + padding: .5em 1em; + background: none; + overflow: auto; +} +.ui-dialog .ui-dialog-buttonpane { + text-align: left; + border-width: 1px 0 0 0; + background-image: none; + margin-top: .5em; + padding: .3em 1em .5em .4em; +} +.ui-dialog .ui-dialog-buttonpane .ui-dialog-buttonset { + float: right; +} +.ui-dialog .ui-dialog-buttonpane button { + margin: .5em .4em .5em 0; + cursor: pointer; +} +.ui-dialog .ui-resizable-se { + width: 12px; + height: 12px; + right: -5px; + bottom: -5px; + background-position: 16px 16px; +} +.ui-draggable .ui-dialog-titlebar { + cursor: move; +} +.ui-draggable-handle { + -ms-touch-action: none; + touch-action: none; +} +.ui-menu { + list-style: none; + padding: 0; + margin: 0; + display: block; + outline: none; +} +.ui-menu .ui-menu { + position: absolute; +} +.ui-menu .ui-menu-item { + position: relative; + margin: 0; + padding: 3px 1em 3px .4em; + cursor: pointer; + min-height: 0; /* support: IE7 */ + /* support: IE10, see #8844 */ + list-style-image: url("data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7"); +} +.ui-menu .ui-menu-divider { + margin: 5px 0; + height: 0; + font-size: 0; + line-height: 0; + border-width: 1px 0 0 0; +} +.ui-menu .ui-state-focus, +.ui-menu .ui-state-active { + margin: -1px; +} + +/* icon support */ +.ui-menu-icons { + position: relative; +} +.ui-menu-icons .ui-menu-item { + padding-left: 2em; +} + +/* left-aligned */ +.ui-menu .ui-icon { + position: absolute; + top: 0; + bottom: 0; + left: .2em; + margin: auto 0; +} + +/* right-aligned */ +.ui-menu .ui-menu-icon { + left: auto; + right: 0; +} +.ui-progressbar { + height: 2em; + text-align: left; + overflow: hidden; +} +.ui-progressbar .ui-progressbar-value { + margin: -1px; + height: 100%; +} +.ui-progressbar .ui-progressbar-overlay { + background: url("data:image/gif;base64,R0lGODlhKAAoAIABAAAAAP///yH/C05FVFNDQVBFMi4wAwEAAAAh+QQJAQABACwAAAAAKAAoAAACkYwNqXrdC52DS06a7MFZI+4FHBCKoDeWKXqymPqGqxvJrXZbMx7Ttc+w9XgU2FB3lOyQRWET2IFGiU9m1frDVpxZZc6bfHwv4c1YXP6k1Vdy292Fb6UkuvFtXpvWSzA+HycXJHUXiGYIiMg2R6W459gnWGfHNdjIqDWVqemH2ekpObkpOlppWUqZiqr6edqqWQAAIfkECQEAAQAsAAAAACgAKAAAApSMgZnGfaqcg1E2uuzDmmHUBR8Qil95hiPKqWn3aqtLsS18y7G1SzNeowWBENtQd+T1JktP05nzPTdJZlR6vUxNWWjV+vUWhWNkWFwxl9VpZRedYcflIOLafaa28XdsH/ynlcc1uPVDZxQIR0K25+cICCmoqCe5mGhZOfeYSUh5yJcJyrkZWWpaR8doJ2o4NYq62lAAACH5BAkBAAEALAAAAAAoACgAAAKVDI4Yy22ZnINRNqosw0Bv7i1gyHUkFj7oSaWlu3ovC8GxNso5fluz3qLVhBVeT/Lz7ZTHyxL5dDalQWPVOsQWtRnuwXaFTj9jVVh8pma9JjZ4zYSj5ZOyma7uuolffh+IR5aW97cHuBUXKGKXlKjn+DiHWMcYJah4N0lYCMlJOXipGRr5qdgoSTrqWSq6WFl2ypoaUAAAIfkECQEAAQAsAAAAACgAKAAAApaEb6HLgd/iO7FNWtcFWe+ufODGjRfoiJ2akShbueb0wtI50zm02pbvwfWEMWBQ1zKGlLIhskiEPm9R6vRXxV4ZzWT2yHOGpWMyorblKlNp8HmHEb/lCXjcW7bmtXP8Xt229OVWR1fod2eWqNfHuMjXCPkIGNileOiImVmCOEmoSfn3yXlJWmoHGhqp6ilYuWYpmTqKUgAAIfkECQEAAQAsAAAAACgAKAAAApiEH6kb58biQ3FNWtMFWW3eNVcojuFGfqnZqSebuS06w5V80/X02pKe8zFwP6EFWOT1lDFk8rGERh1TTNOocQ61Hm4Xm2VexUHpzjymViHrFbiELsefVrn6XKfnt2Q9G/+Xdie499XHd2g4h7ioOGhXGJboGAnXSBnoBwKYyfioubZJ2Hn0RuRZaflZOil56Zp6iioKSXpUAAAh+QQJAQABACwAAAAAKAAoAAACkoQRqRvnxuI7kU1a1UU5bd5tnSeOZXhmn5lWK3qNTWvRdQxP8qvaC+/yaYQzXO7BMvaUEmJRd3TsiMAgswmNYrSgZdYrTX6tSHGZO73ezuAw2uxuQ+BbeZfMxsexY35+/Qe4J1inV0g4x3WHuMhIl2jXOKT2Q+VU5fgoSUI52VfZyfkJGkha6jmY+aaYdirq+lQAACH5BAkBAAEALAAAAAAoACgAAAKWBIKpYe0L3YNKToqswUlvznigd4wiR4KhZrKt9Upqip61i9E3vMvxRdHlbEFiEXfk9YARYxOZZD6VQ2pUunBmtRXo1Lf8hMVVcNl8JafV38aM2/Fu5V16Bn63r6xt97j09+MXSFi4BniGFae3hzbH9+hYBzkpuUh5aZmHuanZOZgIuvbGiNeomCnaxxap2upaCZsq+1kAACH5BAkBAAEALAAAAAAoACgAAAKXjI8By5zf4kOxTVrXNVlv1X0d8IGZGKLnNpYtm8Lr9cqVeuOSvfOW79D9aDHizNhDJidFZhNydEahOaDH6nomtJjp1tutKoNWkvA6JqfRVLHU/QUfau9l2x7G54d1fl995xcIGAdXqMfBNadoYrhH+Mg2KBlpVpbluCiXmMnZ2Sh4GBqJ+ckIOqqJ6LmKSllZmsoq6wpQAAAh+QQJAQABACwAAAAAKAAoAAAClYx/oLvoxuJDkU1a1YUZbJ59nSd2ZXhWqbRa2/gF8Gu2DY3iqs7yrq+xBYEkYvFSM8aSSObE+ZgRl1BHFZNr7pRCavZ5BW2142hY3AN/zWtsmf12p9XxxFl2lpLn1rseztfXZjdIWIf2s5dItwjYKBgo9yg5pHgzJXTEeGlZuenpyPmpGQoKOWkYmSpaSnqKileI2FAAACH5BAkBAAEALAAAAAAoACgAAAKVjB+gu+jG4kORTVrVhRlsnn2dJ3ZleFaptFrb+CXmO9OozeL5VfP99HvAWhpiUdcwkpBH3825AwYdU8xTqlLGhtCosArKMpvfa1mMRae9VvWZfeB2XfPkeLmm18lUcBj+p5dnN8jXZ3YIGEhYuOUn45aoCDkp16hl5IjYJvjWKcnoGQpqyPlpOhr3aElaqrq56Bq7VAAAOw=="); + height: 100%; + filter: alpha(opacity=25); /* support: IE8 */ + opacity: 0.25; +} +.ui-progressbar-indeterminate .ui-progressbar-value { + background-image: none; +} +.ui-resizable { + position: relative; +} +.ui-resizable-handle { + position: absolute; + font-size: 0.1px; + display: block; + -ms-touch-action: none; + touch-action: none; +} +.ui-resizable-disabled .ui-resizable-handle, +.ui-resizable-autohide .ui-resizable-handle { + display: none; +} +.ui-resizable-n { + cursor: n-resize; + height: 7px; + width: 100%; + top: -5px; + left: 0; +} +.ui-resizable-s { + cursor: s-resize; + height: 7px; + width: 100%; + bottom: -5px; + left: 0; +} +.ui-resizable-e { + cursor: e-resize; + width: 7px; + right: -5px; + top: 0; + height: 100%; +} +.ui-resizable-w { + cursor: w-resize; + width: 7px; + left: -5px; + top: 0; + height: 100%; +} +.ui-resizable-se { + cursor: se-resize; + width: 12px; + height: 12px; + right: 1px; + bottom: 1px; +} +.ui-resizable-sw { + cursor: sw-resize; + width: 9px; + height: 9px; + left: -5px; + bottom: -5px; +} +.ui-resizable-nw { + cursor: nw-resize; + width: 9px; + height: 9px; + left: -5px; + top: -5px; +} +.ui-resizable-ne { + cursor: ne-resize; + width: 9px; + height: 9px; + right: -5px; + top: -5px; +} +.ui-selectable { + -ms-touch-action: none; + touch-action: none; +} +.ui-selectable-helper { + position: absolute; + z-index: 100; + border: 1px dotted black; +} +.ui-selectmenu-menu { + padding: 0; + margin: 0; + position: absolute; + top: 0; + left: 0; + display: none; +} +.ui-selectmenu-menu .ui-menu { + overflow: auto; + /* Support: IE7 */ + overflow-x: hidden; + padding-bottom: 1px; +} +.ui-selectmenu-menu .ui-menu .ui-selectmenu-optgroup { + font-size: 1em; + font-weight: bold; + line-height: 1.5; + padding: 2px 0.4em; + margin: 0.5em 0 0 0; + height: auto; + border: 0; +} +.ui-selectmenu-open { + display: block; +} +.ui-selectmenu-button { + display: inline-block; + overflow: hidden; + position: relative; + text-decoration: none; + cursor: pointer; +} +.ui-selectmenu-button span.ui-icon { + right: 0.5em; + left: auto; + margin-top: -8px; + position: absolute; + top: 50%; +} +.ui-selectmenu-button span.ui-selectmenu-text { + text-align: left; + padding: 0.4em 2.1em 0.4em 1em; + display: block; + line-height: 1.4; + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; +} +.ui-slider { + position: relative; + text-align: left; +} +.ui-slider .ui-slider-handle { + position: absolute; + z-index: 2; + width: 1.2em; + height: 1.2em; + cursor: default; + -ms-touch-action: none; + touch-action: none; +} +.ui-slider .ui-slider-range { + position: absolute; + z-index: 1; + font-size: .7em; + display: block; + border: 0; + background-position: 0 0; +} + +/* support: IE8 - See #6727 */ +.ui-slider.ui-state-disabled .ui-slider-handle, +.ui-slider.ui-state-disabled .ui-slider-range { + filter: inherit; +} + +.ui-slider-horizontal { + height: .8em; +} +.ui-slider-horizontal .ui-slider-handle { + top: -.3em; + margin-left: -.6em; +} +.ui-slider-horizontal .ui-slider-range { + top: 0; + height: 100%; +} +.ui-slider-horizontal .ui-slider-range-min { + left: 0; +} +.ui-slider-horizontal .ui-slider-range-max { + right: 0; +} + +.ui-slider-vertical { + width: .8em; + height: 100px; +} +.ui-slider-vertical .ui-slider-handle { + left: -.3em; + margin-left: 0; + margin-bottom: -.6em; +} +.ui-slider-vertical .ui-slider-range { + left: 0; + width: 100%; +} +.ui-slider-vertical .ui-slider-range-min { + bottom: 0; +} +.ui-slider-vertical .ui-slider-range-max { + top: 0; +} +.ui-sortable-handle { + -ms-touch-action: none; + touch-action: none; +} +.ui-spinner { + position: relative; + display: inline-block; + overflow: hidden; + padding: 0; + vertical-align: middle; +} +.ui-spinner-input { + border: none; + background: none; + color: inherit; + padding: 0; + margin: .2em 0; + vertical-align: middle; + margin-left: .4em; + margin-right: 22px; +} +.ui-spinner-button { + width: 16px; + height: 50%; + font-size: .5em; + padding: 0; + margin: 0; + text-align: center; + position: absolute; + cursor: default; + display: block; + overflow: hidden; + right: 0; +} +/* more specificity required here to override default borders */ +.ui-spinner a.ui-spinner-button { + border-top: none; + border-bottom: none; + border-right: none; +} +/* vertically center icon */ +.ui-spinner .ui-icon { + position: absolute; + margin-top: -8px; + top: 50%; + left: 0; +} +.ui-spinner-up { + top: 0; +} +.ui-spinner-down { + bottom: 0; +} + +/* TR overrides */ +.ui-spinner .ui-icon-triangle-1-s { + /* need to fix icons sprite */ + background-position: -65px -16px; +} +.ui-tabs { + position: relative;/* position: relative prevents IE scroll bug (element with position: relative inside container with overflow: auto appear as "fixed") */ + padding: .2em; +} +.ui-tabs .ui-tabs-nav { + margin: 0; + padding: .2em .2em 0; +} +.ui-tabs .ui-tabs-nav li { + list-style: none; + float: left; + position: relative; + top: 0; + margin: 1px .2em 0 0; + border-bottom-width: 0; + padding: 0; + white-space: nowrap; +} +.ui-tabs .ui-tabs-nav .ui-tabs-anchor { + float: left; + padding: .5em 1em; + text-decoration: none; +} +.ui-tabs .ui-tabs-nav li.ui-tabs-active { + margin-bottom: -1px; + padding-bottom: 1px; +} +.ui-tabs .ui-tabs-nav li.ui-tabs-active .ui-tabs-anchor, +.ui-tabs .ui-tabs-nav li.ui-state-disabled .ui-tabs-anchor, +.ui-tabs .ui-tabs-nav li.ui-tabs-loading .ui-tabs-anchor { + cursor: text; +} +.ui-tabs-collapsible .ui-tabs-nav li.ui-tabs-active .ui-tabs-anchor { + cursor: pointer; +} +.ui-tabs .ui-tabs-panel { + display: block; + border-width: 0; + padding: 1em 1.4em; + background: none; +} +.ui-tooltip { + padding: 8px; + position: absolute; + z-index: 9999; + max-width: 300px; + -webkit-box-shadow: 0 0 5px #aaa; + box-shadow: 0 0 5px #aaa; +} +body .ui-tooltip { + border-width: 2px; +} + +/* Component containers +----------------------------------*/ +.ui-widget { + font-family: Verdana,Arial,sans-serif; + font-size: 1.1em; +} +.ui-widget .ui-widget { + font-size: 1em; +} +.ui-widget input, +.ui-widget select, +.ui-widget textarea, +.ui-widget button { + font-family: Verdana,Arial,sans-serif; + font-size: 1em; +} +.ui-widget-content { + border: 1px solid #aaaaaa; + background: #ffffff url("images/ui-bg_flat_75_ffffff_40x100.png") 50% 50% repeat-x; + color: #222222; +} +.ui-widget-content a { + color: #222222; +} +.ui-widget-header { + border: 1px solid #aaaaaa; + background: #cccccc url("images/ui-bg_highlight-soft_75_cccccc_1x100.png") 50% 50% repeat-x; + color: #222222; + font-weight: bold; +} +.ui-widget-header a { + color: #222222; +} + +/* Interaction states +----------------------------------*/ +.ui-state-default, +.ui-widget-content .ui-state-default, +.ui-widget-header .ui-state-default { + border: 1px solid #d3d3d3; + background: #e6e6e6 url("images/ui-bg_glass_75_e6e6e6_1x400.png") 50% 50% repeat-x; + font-weight: normal; + color: #555555; +} +.ui-state-default a, +.ui-state-default a:link, +.ui-state-default a:visited { + color: #555555; + text-decoration: none; +} +.ui-state-hover, +.ui-widget-content .ui-state-hover, +.ui-widget-header .ui-state-hover, +.ui-state-focus, +.ui-widget-content .ui-state-focus, +.ui-widget-header .ui-state-focus { + border: 1px solid #999999; + background: #dadada url("images/ui-bg_glass_75_dadada_1x400.png") 50% 50% repeat-x; + font-weight: normal; + color: #212121; +} +.ui-state-hover a, +.ui-state-hover a:hover, +.ui-state-hover a:link, +.ui-state-hover a:visited, +.ui-state-focus a, +.ui-state-focus a:hover, +.ui-state-focus a:link, +.ui-state-focus a:visited { + color: #212121; + text-decoration: none; +} +.ui-state-active, +.ui-widget-content .ui-state-active, +.ui-widget-header .ui-state-active { + border: 1px solid #aaaaaa; + background: #ffffff url("images/ui-bg_glass_65_ffffff_1x400.png") 50% 50% repeat-x; + font-weight: normal; + color: #212121; +} +.ui-state-active a, +.ui-state-active a:link, +.ui-state-active a:visited { + color: #212121; + text-decoration: none; +} + +/* Interaction Cues +----------------------------------*/ +.ui-state-highlight, +.ui-widget-content .ui-state-highlight, +.ui-widget-header .ui-state-highlight { + border: 1px solid #fcefa1; + background: #fbf9ee url("images/ui-bg_glass_55_fbf9ee_1x400.png") 50% 50% repeat-x; + color: #363636; +} +.ui-state-highlight a, +.ui-widget-content .ui-state-highlight a, +.ui-widget-header .ui-state-highlight a { + color: #363636; +} +.ui-state-error, +.ui-widget-content .ui-state-error, +.ui-widget-header .ui-state-error { + border: 1px solid #cd0a0a; + background: #fef1ec url("images/ui-bg_glass_95_fef1ec_1x400.png") 50% 50% repeat-x; + color: #cd0a0a; +} +.ui-state-error a, +.ui-widget-content .ui-state-error a, +.ui-widget-header .ui-state-error a { + color: #cd0a0a; +} +.ui-state-error-text, +.ui-widget-content .ui-state-error-text, +.ui-widget-header .ui-state-error-text { + color: #cd0a0a; +} +.ui-priority-primary, +.ui-widget-content .ui-priority-primary, +.ui-widget-header .ui-priority-primary { + font-weight: bold; +} +.ui-priority-secondary, +.ui-widget-content .ui-priority-secondary, +.ui-widget-header .ui-priority-secondary { + opacity: .7; + filter:Alpha(Opacity=70); /* support: IE8 */ + font-weight: normal; +} +.ui-state-disabled, +.ui-widget-content .ui-state-disabled, +.ui-widget-header .ui-state-disabled { + opacity: .35; + filter:Alpha(Opacity=35); /* support: IE8 */ + background-image: none; +} +.ui-state-disabled .ui-icon { + filter:Alpha(Opacity=35); /* support: IE8 - See #6059 */ +} + +/* Icons +----------------------------------*/ + +/* states and images */ +.ui-icon { + width: 16px; + height: 16px; +} +.ui-icon, +.ui-widget-content .ui-icon { + background-image: url("images/ui-icons_222222_256x240.png"); +} +.ui-widget-header .ui-icon { + background-image: url("images/ui-icons_222222_256x240.png"); +} +.ui-state-default .ui-icon { + background-image: url("images/ui-icons_888888_256x240.png"); +} +.ui-state-hover .ui-icon, +.ui-state-focus .ui-icon { + background-image: url("images/ui-icons_454545_256x240.png"); +} +.ui-state-active .ui-icon { + background-image: url("images/ui-icons_454545_256x240.png"); +} +.ui-state-highlight .ui-icon { + background-image: url("images/ui-icons_2e83ff_256x240.png"); +} +.ui-state-error .ui-icon, +.ui-state-error-text .ui-icon { + background-image: url("images/ui-icons_cd0a0a_256x240.png"); +} + +/* positioning */ +.ui-icon-blank { background-position: 16px 16px; } +.ui-icon-carat-1-n { background-position: 0 0; } +.ui-icon-carat-1-ne { background-position: -16px 0; } +.ui-icon-carat-1-e { background-position: -32px 0; } +.ui-icon-carat-1-se { background-position: -48px 0; } +.ui-icon-carat-1-s { background-position: -64px 0; } +.ui-icon-carat-1-sw { background-position: -80px 0; } +.ui-icon-carat-1-w { background-position: -96px 0; } +.ui-icon-carat-1-nw { background-position: -112px 0; } +.ui-icon-carat-2-n-s { background-position: -128px 0; } +.ui-icon-carat-2-e-w { background-position: -144px 0; } +.ui-icon-triangle-1-n { background-position: 0 -16px; } +.ui-icon-triangle-1-ne { background-position: -16px -16px; } +.ui-icon-triangle-1-e { background-position: -32px -16px; } +.ui-icon-triangle-1-se { background-position: -48px -16px; } +.ui-icon-triangle-1-s { background-position: -64px -16px; } +.ui-icon-triangle-1-sw { background-position: -80px -16px; } +.ui-icon-triangle-1-w { background-position: -96px -16px; } +.ui-icon-triangle-1-nw { background-position: -112px -16px; } +.ui-icon-triangle-2-n-s { background-position: -128px -16px; } +.ui-icon-triangle-2-e-w { background-position: -144px -16px; } +.ui-icon-arrow-1-n { background-position: 0 -32px; } +.ui-icon-arrow-1-ne { background-position: -16px -32px; } +.ui-icon-arrow-1-e { background-position: -32px -32px; } +.ui-icon-arrow-1-se { background-position: -48px -32px; } +.ui-icon-arrow-1-s { background-position: -64px -32px; } +.ui-icon-arrow-1-sw { background-position: -80px -32px; } +.ui-icon-arrow-1-w { background-position: -96px -32px; } +.ui-icon-arrow-1-nw { background-position: -112px -32px; } +.ui-icon-arrow-2-n-s { background-position: -128px -32px; } +.ui-icon-arrow-2-ne-sw { background-position: -144px -32px; } +.ui-icon-arrow-2-e-w { background-position: -160px -32px; } +.ui-icon-arrow-2-se-nw { background-position: -176px -32px; } +.ui-icon-arrowstop-1-n { background-position: -192px -32px; } +.ui-icon-arrowstop-1-e { background-position: -208px -32px; } +.ui-icon-arrowstop-1-s { background-position: -224px -32px; } +.ui-icon-arrowstop-1-w { background-position: -240px -32px; } +.ui-icon-arrowthick-1-n { background-position: 0 -48px; } +.ui-icon-arrowthick-1-ne { background-position: -16px -48px; } +.ui-icon-arrowthick-1-e { background-position: -32px -48px; } +.ui-icon-arrowthick-1-se { background-position: -48px -48px; } +.ui-icon-arrowthick-1-s { background-position: -64px -48px; } +.ui-icon-arrowthick-1-sw { background-position: -80px -48px; } +.ui-icon-arrowthick-1-w { background-position: -96px -48px; } +.ui-icon-arrowthick-1-nw { background-position: -112px -48px; } +.ui-icon-arrowthick-2-n-s { background-position: -128px -48px; } +.ui-icon-arrowthick-2-ne-sw { background-position: -144px -48px; } +.ui-icon-arrowthick-2-e-w { background-position: -160px -48px; } +.ui-icon-arrowthick-2-se-nw { background-position: -176px -48px; } +.ui-icon-arrowthickstop-1-n { background-position: -192px -48px; } +.ui-icon-arrowthickstop-1-e { background-position: -208px -48px; } +.ui-icon-arrowthickstop-1-s { background-position: -224px -48px; } +.ui-icon-arrowthickstop-1-w { background-position: -240px -48px; } +.ui-icon-arrowreturnthick-1-w { background-position: 0 -64px; } +.ui-icon-arrowreturnthick-1-n { background-position: -16px -64px; } +.ui-icon-arrowreturnthick-1-e { background-position: -32px -64px; } +.ui-icon-arrowreturnthick-1-s { background-position: -48px -64px; } +.ui-icon-arrowreturn-1-w { background-position: -64px -64px; } +.ui-icon-arrowreturn-1-n { background-position: -80px -64px; } +.ui-icon-arrowreturn-1-e { background-position: -96px -64px; } +.ui-icon-arrowreturn-1-s { background-position: -112px -64px; } +.ui-icon-arrowrefresh-1-w { background-position: -128px -64px; } +.ui-icon-arrowrefresh-1-n { background-position: -144px -64px; } +.ui-icon-arrowrefresh-1-e { background-position: -160px -64px; } +.ui-icon-arrowrefresh-1-s { background-position: -176px -64px; } +.ui-icon-arrow-4 { background-position: 0 -80px; } +.ui-icon-arrow-4-diag { background-position: -16px -80px; } +.ui-icon-extlink { background-position: -32px -80px; } +.ui-icon-newwin { background-position: -48px -80px; } +.ui-icon-refresh { background-position: -64px -80px; } +.ui-icon-shuffle { background-position: -80px -80px; } +.ui-icon-transfer-e-w { background-position: -96px -80px; } +.ui-icon-transferthick-e-w { background-position: -112px -80px; } +.ui-icon-folder-collapsed { background-position: 0 -96px; } +.ui-icon-folder-open { background-position: -16px -96px; } +.ui-icon-document { background-position: -32px -96px; } +.ui-icon-document-b { background-position: -48px -96px; } +.ui-icon-note { background-position: -64px -96px; } +.ui-icon-mail-closed { background-position: -80px -96px; } +.ui-icon-mail-open { background-position: -96px -96px; } +.ui-icon-suitcase { background-position: -112px -96px; } +.ui-icon-comment { background-position: -128px -96px; } +.ui-icon-person { background-position: -144px -96px; } +.ui-icon-print { background-position: -160px -96px; } +.ui-icon-trash { background-position: -176px -96px; } +.ui-icon-locked { background-position: -192px -96px; } +.ui-icon-unlocked { background-position: -208px -96px; } +.ui-icon-bookmark { background-position: -224px -96px; } +.ui-icon-tag { background-position: -240px -96px; } +.ui-icon-home { background-position: 0 -112px; } +.ui-icon-flag { background-position: -16px -112px; } +.ui-icon-calendar { background-position: -32px -112px; } +.ui-icon-cart { background-position: -48px -112px; } +.ui-icon-pencil { background-position: -64px -112px; } +.ui-icon-clock { background-position: -80px -112px; } +.ui-icon-disk { background-position: -96px -112px; } +.ui-icon-calculator { background-position: -112px -112px; } +.ui-icon-zoomin { background-position: -128px -112px; } +.ui-icon-zoomout { background-position: -144px -112px; } +.ui-icon-search { background-position: -160px -112px; } +.ui-icon-wrench { background-position: -176px -112px; } +.ui-icon-gear { background-position: -192px -112px; } +.ui-icon-heart { background-position: -208px -112px; } +.ui-icon-star { background-position: -224px -112px; } +.ui-icon-link { background-position: -240px -112px; } +.ui-icon-cancel { background-position: 0 -128px; } +.ui-icon-plus { background-position: -16px -128px; } +.ui-icon-plusthick { background-position: -32px -128px; } +.ui-icon-minus { background-position: -48px -128px; } +.ui-icon-minusthick { background-position: -64px -128px; } +.ui-icon-close { background-position: -80px -128px; } +.ui-icon-closethick { background-position: -96px -128px; } +.ui-icon-key { background-position: -112px -128px; } +.ui-icon-lightbulb { background-position: -128px -128px; } +.ui-icon-scissors { background-position: -144px -128px; } +.ui-icon-clipboard { background-position: -160px -128px; } +.ui-icon-copy { background-position: -176px -128px; } +.ui-icon-contact { background-position: -192px -128px; } +.ui-icon-image { background-position: -208px -128px; } +.ui-icon-video { background-position: -224px -128px; } +.ui-icon-script { background-position: -240px -128px; } +.ui-icon-alert { background-position: 0 -144px; } +.ui-icon-info { background-position: -16px -144px; } +.ui-icon-notice { background-position: -32px -144px; } +.ui-icon-help { background-position: -48px -144px; } +.ui-icon-check { background-position: -64px -144px; } +.ui-icon-bullet { background-position: -80px -144px; } +.ui-icon-radio-on { background-position: -96px -144px; } +.ui-icon-radio-off { background-position: -112px -144px; } +.ui-icon-pin-w { background-position: -128px -144px; } +.ui-icon-pin-s { background-position: -144px -144px; } +.ui-icon-play { background-position: 0 -160px; } +.ui-icon-pause { background-position: -16px -160px; } +.ui-icon-seek-next { background-position: -32px -160px; } +.ui-icon-seek-prev { background-position: -48px -160px; } +.ui-icon-seek-end { background-position: -64px -160px; } +.ui-icon-seek-start { background-position: -80px -160px; } +/* ui-icon-seek-first is deprecated, use ui-icon-seek-start instead */ +.ui-icon-seek-first { background-position: -80px -160px; } +.ui-icon-stop { background-position: -96px -160px; } +.ui-icon-eject { background-position: -112px -160px; } +.ui-icon-volume-off { background-position: -128px -160px; } +.ui-icon-volume-on { background-position: -144px -160px; } +.ui-icon-power { background-position: 0 -176px; } +.ui-icon-signal-diag { background-position: -16px -176px; } +.ui-icon-signal { background-position: -32px -176px; } +.ui-icon-battery-0 { background-position: -48px -176px; } +.ui-icon-battery-1 { background-position: -64px -176px; } +.ui-icon-battery-2 { background-position: -80px -176px; } +.ui-icon-battery-3 { background-position: -96px -176px; } +.ui-icon-circle-plus { background-position: 0 -192px; } +.ui-icon-circle-minus { background-position: -16px -192px; } +.ui-icon-circle-close { background-position: -32px -192px; } +.ui-icon-circle-triangle-e { background-position: -48px -192px; } +.ui-icon-circle-triangle-s { background-position: -64px -192px; } +.ui-icon-circle-triangle-w { background-position: -80px -192px; } +.ui-icon-circle-triangle-n { background-position: -96px -192px; } +.ui-icon-circle-arrow-e { background-position: -112px -192px; } +.ui-icon-circle-arrow-s { background-position: -128px -192px; } +.ui-icon-circle-arrow-w { background-position: -144px -192px; } +.ui-icon-circle-arrow-n { background-position: -160px -192px; } +.ui-icon-circle-zoomin { background-position: -176px -192px; } +.ui-icon-circle-zoomout { background-position: -192px -192px; } +.ui-icon-circle-check { background-position: -208px -192px; } +.ui-icon-circlesmall-plus { background-position: 0 -208px; } +.ui-icon-circlesmall-minus { background-position: -16px -208px; } +.ui-icon-circlesmall-close { background-position: -32px -208px; } +.ui-icon-squaresmall-plus { background-position: -48px -208px; } +.ui-icon-squaresmall-minus { background-position: -64px -208px; } +.ui-icon-squaresmall-close { background-position: -80px -208px; } +.ui-icon-grip-dotted-vertical { background-position: 0 -224px; } +.ui-icon-grip-dotted-horizontal { background-position: -16px -224px; } +.ui-icon-grip-solid-vertical { background-position: -32px -224px; } +.ui-icon-grip-solid-horizontal { background-position: -48px -224px; } +.ui-icon-gripsmall-diagonal-se { background-position: -64px -224px; } +.ui-icon-grip-diagonal-se { background-position: -80px -224px; } + + +/* Misc visuals +----------------------------------*/ + +/* Corner radius */ +.ui-corner-all, +.ui-corner-top, +.ui-corner-left, +.ui-corner-tl { + border-top-left-radius: 4px; +} +.ui-corner-all, +.ui-corner-top, +.ui-corner-right, +.ui-corner-tr { + border-top-right-radius: 4px; +} +.ui-corner-all, +.ui-corner-bottom, +.ui-corner-left, +.ui-corner-bl { + border-bottom-left-radius: 4px; +} +.ui-corner-all, +.ui-corner-bottom, +.ui-corner-right, +.ui-corner-br { + border-bottom-right-radius: 4px; +} + +/* Overlays */ +.ui-widget-overlay { + background: #aaaaaa url("images/ui-bg_flat_0_aaaaaa_40x100.png") 50% 50% repeat-x; + opacity: .3; + filter: Alpha(Opacity=30); /* support: IE8 */ +} +.ui-widget-shadow { + margin: -8px 0 0 -8px; + padding: 8px; + background: #aaaaaa url("images/ui-bg_flat_0_aaaaaa_40x100.png") 50% 50% repeat-x; + opacity: .3; + filter: Alpha(Opacity=30); /* support: IE8 */ + border-radius: 8px; +} diff --git a/LetterDMS/jspsych/examples/data-add-properties.html b/LetterDMS/jspsych/examples/data-add-properties.html new file mode 100644 index 0000000..d58998d --- /dev/null +++ b/LetterDMS/jspsych/examples/data-add-properties.html @@ -0,0 +1,44 @@ + + + + + + + + + + + + + diff --git a/LetterDMS/jspsych/examples/data-as-function.html b/LetterDMS/jspsych/examples/data-as-function.html new file mode 100644 index 0000000..a03399c --- /dev/null +++ b/LetterDMS/jspsych/examples/data-as-function.html @@ -0,0 +1,39 @@ + + + + + + + + + + + + + diff --git a/LetterDMS/jspsych/examples/data-from-timeline.html b/LetterDMS/jspsych/examples/data-from-timeline.html new file mode 100644 index 0000000..0d9291e --- /dev/null +++ b/LetterDMS/jspsych/examples/data-from-timeline.html @@ -0,0 +1,52 @@ + + + + + + + + + + + + diff --git a/LetterDMS/jspsych/examples/data-from-url.html b/LetterDMS/jspsych/examples/data-from-url.html new file mode 100644 index 0000000..975527e --- /dev/null +++ b/LetterDMS/jspsych/examples/data-from-url.html @@ -0,0 +1,21 @@ + + + + + + + +

The URL variable should be logged to the console

+ + + diff --git a/LetterDMS/jspsych/examples/demo-flanker.html b/LetterDMS/jspsych/examples/demo-flanker.html new file mode 100644 index 0000000..f36f85d --- /dev/null +++ b/LetterDMS/jspsych/examples/demo-flanker.html @@ -0,0 +1,117 @@ + + + + Flanker Task + + + + + + + + + + diff --git a/LetterDMS/jspsych/examples/demo-simple-rt-task.html b/LetterDMS/jspsych/examples/demo-simple-rt-task.html new file mode 100644 index 0000000..e1356d2 --- /dev/null +++ b/LetterDMS/jspsych/examples/demo-simple-rt-task.html @@ -0,0 +1,120 @@ + + + + + My experiment + + + + + + + + + + + \ No newline at end of file diff --git a/LetterDMS/jspsych/examples/demos/demo_1.html b/LetterDMS/jspsych/examples/demos/demo_1.html new file mode 100644 index 0000000..c450396 --- /dev/null +++ b/LetterDMS/jspsych/examples/demos/demo_1.html @@ -0,0 +1,35 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/LetterDMS/jspsych/examples/demos/demo_2.html b/LetterDMS/jspsych/examples/demos/demo_2.html new file mode 100644 index 0000000..271a746 --- /dev/null +++ b/LetterDMS/jspsych/examples/demos/demo_2.html @@ -0,0 +1,50 @@ + + + + + + + + + + + + diff --git a/LetterDMS/jspsych/examples/demos/demo_3.html b/LetterDMS/jspsych/examples/demos/demo_3.html new file mode 100644 index 0000000..8f2806f --- /dev/null +++ b/LetterDMS/jspsych/examples/demos/demo_3.html @@ -0,0 +1,63 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/LetterDMS/jspsych/examples/display-element-to-embed-experiment.html b/LetterDMS/jspsych/examples/display-element-to-embed-experiment.html new file mode 100644 index 0000000..5cc5a0f --- /dev/null +++ b/LetterDMS/jspsych/examples/display-element-to-embed-experiment.html @@ -0,0 +1,79 @@ + + + + + + + + + + + + + + +
+

Lorem ipsum dolor sit amet, consectetur adipiscing elit. Integer nec odio. Praesent libero. Sed cursus ante dapibus diam. Sed nisi. Nulla quis sem at nibh elementum imperdiet. Duis sagittis ipsum. Praesent mauris. Fusce nec tellus sed augue semper porta. Mauris massa. Vestibulum lacinia arcu eget nulla.

+ +

Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Curabitur sodales ligula in libero. Sed dignissim lacinia nunc. Curabitur tortor. Pellentesque nibh. Aenean quam. In scelerisque sem at dolor. Maecenas mattis. Sed convallis tristique sem. Proin ut ligula vel nunc egestas porttitor. Morbi lectus risus, iaculis vel, suscipit quis, luctus non, massa.

+ +

Fusce ac turpis quis ligula lacinia aliquet. Mauris ipsum. Nulla metus metus, ullamcorper vel, tincidunt sed, euismod in, nibh. Quisque volutpat condimentum velit. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Nam nec ante. Sed lacinia, urna non tincidunt mattis, tortor neque adipiscing diam, a cursus ipsum ante quis turpis. Nulla facilisi. Ut fringilla. Suspendisse potenti. Nunc feugiat mi a tellus consequat imperdiet. Vestibulum sapien. Proin quam. Etiam ultrices.

+ +

Suspendisse in justo eu magna luctus suscipit. Sed lectus. Integer euismod lacus luctus magna. Quisque cursus, metus vitae pharetra auctor, sem massa mattis sem, at interdum magna augue eget diam. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Morbi lacinia molestie dui. Praesent blandit dolor. Sed non quam. In vel mi sit amet augue congue elementum. Morbi in ipsum sit amet pede facilisis laoreet. Donec lacus nunc, viverra nec, blandit vel, egestas et, augue. Vestibulum tincidunt malesuada tellus.

+ +

Ut ultrices ultrices enim. Curabitur sit amet mauris. Morbi in dui quis est pulvinar ullamcorper. Nulla facilisi. Integer lacinia sollicitudin massa. Cras metus. Sed aliquet risus a tortor. Integer id quam. Morbi mi. Quisque nisl felis, venenatis tristique, dignissim in, ultrices sit amet, augue. Proin sodales libero eget ante. Nulla quam.

+ + + + + + diff --git a/LetterDMS/jspsych/examples/end-active-node.html b/LetterDMS/jspsych/examples/end-active-node.html new file mode 100644 index 0000000..7f335cd --- /dev/null +++ b/LetterDMS/jspsych/examples/end-active-node.html @@ -0,0 +1,52 @@ + + + + + + + + + + + + + diff --git a/LetterDMS/jspsych/examples/end-experiment.html b/LetterDMS/jspsych/examples/end-experiment.html new file mode 100644 index 0000000..c620aa0 --- /dev/null +++ b/LetterDMS/jspsych/examples/end-experiment.html @@ -0,0 +1,45 @@ + + + + + + + + + + + + + diff --git a/LetterDMS/jspsych/examples/exclusions.html b/LetterDMS/jspsych/examples/exclusions.html new file mode 100644 index 0000000..7e5d31b --- /dev/null +++ b/LetterDMS/jspsych/examples/exclusions.html @@ -0,0 +1,32 @@ + + + + + + + + + + + + + + diff --git a/LetterDMS/jspsych/examples/external_html/simple_consent.html b/LetterDMS/jspsych/examples/external_html/simple_consent.html new file mode 100644 index 0000000..212f27d --- /dev/null +++ b/LetterDMS/jspsych/examples/external_html/simple_consent.html @@ -0,0 +1,4 @@ +

This is a demo consent form. Click the checkbox below to indicate the you + would like to participate in the experiment

+

I agree to take part in this study.

+ diff --git a/LetterDMS/jspsych/examples/img/1.gif b/LetterDMS/jspsych/examples/img/1.gif new file mode 100644 index 0000000..f4511d9 Binary files /dev/null and b/LetterDMS/jspsych/examples/img/1.gif differ diff --git a/LetterDMS/jspsych/examples/img/10.gif b/LetterDMS/jspsych/examples/img/10.gif new file mode 100644 index 0000000..1f25f1a Binary files /dev/null and b/LetterDMS/jspsych/examples/img/10.gif differ diff --git a/LetterDMS/jspsych/examples/img/11.gif b/LetterDMS/jspsych/examples/img/11.gif new file mode 100644 index 0000000..f54b3d6 Binary files /dev/null and b/LetterDMS/jspsych/examples/img/11.gif differ diff --git a/LetterDMS/jspsych/examples/img/12.gif b/LetterDMS/jspsych/examples/img/12.gif new file mode 100644 index 0000000..053ecf6 Binary files /dev/null and b/LetterDMS/jspsych/examples/img/12.gif differ diff --git a/LetterDMS/jspsych/examples/img/2.gif b/LetterDMS/jspsych/examples/img/2.gif new file mode 100644 index 0000000..8900090 Binary files /dev/null and b/LetterDMS/jspsych/examples/img/2.gif differ diff --git a/LetterDMS/jspsych/examples/img/3.gif b/LetterDMS/jspsych/examples/img/3.gif new file mode 100644 index 0000000..b6205d1 Binary files /dev/null and b/LetterDMS/jspsych/examples/img/3.gif differ diff --git a/LetterDMS/jspsych/examples/img/4.gif b/LetterDMS/jspsych/examples/img/4.gif new file mode 100644 index 0000000..1d2de35 Binary files /dev/null and b/LetterDMS/jspsych/examples/img/4.gif differ diff --git a/LetterDMS/jspsych/examples/img/5.gif b/LetterDMS/jspsych/examples/img/5.gif new file mode 100644 index 0000000..0c8ce98 Binary files /dev/null and b/LetterDMS/jspsych/examples/img/5.gif differ diff --git a/LetterDMS/jspsych/examples/img/6.gif b/LetterDMS/jspsych/examples/img/6.gif new file mode 100644 index 0000000..59149da Binary files /dev/null and b/LetterDMS/jspsych/examples/img/6.gif differ diff --git a/LetterDMS/jspsych/examples/img/7.gif b/LetterDMS/jspsych/examples/img/7.gif new file mode 100644 index 0000000..6b3ea1b Binary files /dev/null and b/LetterDMS/jspsych/examples/img/7.gif differ diff --git a/LetterDMS/jspsych/examples/img/8.gif b/LetterDMS/jspsych/examples/img/8.gif new file mode 100644 index 0000000..723ab75 Binary files /dev/null and b/LetterDMS/jspsych/examples/img/8.gif differ diff --git a/LetterDMS/jspsych/examples/img/9.gif b/LetterDMS/jspsych/examples/img/9.gif new file mode 100644 index 0000000..dabe68d Binary files /dev/null and b/LetterDMS/jspsych/examples/img/9.gif differ diff --git a/LetterDMS/jspsych/examples/img/age/of1.jpg b/LetterDMS/jspsych/examples/img/age/of1.jpg new file mode 100644 index 0000000..13a406d Binary files /dev/null and b/LetterDMS/jspsych/examples/img/age/of1.jpg differ diff --git a/LetterDMS/jspsych/examples/img/age/of2.jpg b/LetterDMS/jspsych/examples/img/age/of2.jpg new file mode 100644 index 0000000..0c10e81 Binary files /dev/null and b/LetterDMS/jspsych/examples/img/age/of2.jpg differ diff --git a/LetterDMS/jspsych/examples/img/age/of3.jpg b/LetterDMS/jspsych/examples/img/age/of3.jpg new file mode 100644 index 0000000..e20f977 Binary files /dev/null and b/LetterDMS/jspsych/examples/img/age/of3.jpg differ diff --git a/LetterDMS/jspsych/examples/img/age/om1.jpg b/LetterDMS/jspsych/examples/img/age/om1.jpg new file mode 100644 index 0000000..0b4335b Binary files /dev/null and b/LetterDMS/jspsych/examples/img/age/om1.jpg differ diff --git a/LetterDMS/jspsych/examples/img/age/om2.jpg b/LetterDMS/jspsych/examples/img/age/om2.jpg new file mode 100644 index 0000000..3930e06 Binary files /dev/null and b/LetterDMS/jspsych/examples/img/age/om2.jpg differ diff --git a/LetterDMS/jspsych/examples/img/age/om3.jpg b/LetterDMS/jspsych/examples/img/age/om3.jpg new file mode 100644 index 0000000..f74917e Binary files /dev/null and b/LetterDMS/jspsych/examples/img/age/om3.jpg differ diff --git a/LetterDMS/jspsych/examples/img/age/yf1.jpg b/LetterDMS/jspsych/examples/img/age/yf1.jpg new file mode 100644 index 0000000..c13b936 Binary files /dev/null and b/LetterDMS/jspsych/examples/img/age/yf1.jpg differ diff --git a/LetterDMS/jspsych/examples/img/age/yf4.jpg b/LetterDMS/jspsych/examples/img/age/yf4.jpg new file mode 100644 index 0000000..d4d61da Binary files /dev/null and b/LetterDMS/jspsych/examples/img/age/yf4.jpg differ diff --git a/LetterDMS/jspsych/examples/img/age/yf5.jpg b/LetterDMS/jspsych/examples/img/age/yf5.jpg new file mode 100644 index 0000000..9471606 Binary files /dev/null and b/LetterDMS/jspsych/examples/img/age/yf5.jpg differ diff --git a/LetterDMS/jspsych/examples/img/age/ym2.jpg b/LetterDMS/jspsych/examples/img/age/ym2.jpg new file mode 100644 index 0000000..a56e70e Binary files /dev/null and b/LetterDMS/jspsych/examples/img/age/ym2.jpg differ diff --git a/LetterDMS/jspsych/examples/img/age/ym3.jpg b/LetterDMS/jspsych/examples/img/age/ym3.jpg new file mode 100644 index 0000000..33847a4 Binary files /dev/null and b/LetterDMS/jspsych/examples/img/age/ym3.jpg differ diff --git a/LetterDMS/jspsych/examples/img/age/ym5.jpg b/LetterDMS/jspsych/examples/img/age/ym5.jpg new file mode 100644 index 0000000..4e25343 Binary files /dev/null and b/LetterDMS/jspsych/examples/img/age/ym5.jpg differ diff --git a/LetterDMS/jspsych/examples/img/backwardN.gif b/LetterDMS/jspsych/examples/img/backwardN.gif new file mode 100644 index 0000000..38a2164 Binary files /dev/null and b/LetterDMS/jspsych/examples/img/backwardN.gif differ diff --git a/LetterDMS/jspsych/examples/img/blue.png b/LetterDMS/jspsych/examples/img/blue.png new file mode 100644 index 0000000..820bdce Binary files /dev/null and b/LetterDMS/jspsych/examples/img/blue.png differ diff --git a/LetterDMS/jspsych/examples/img/card.png b/LetterDMS/jspsych/examples/img/card.png new file mode 100644 index 0000000..ca8de4f Binary files /dev/null and b/LetterDMS/jspsych/examples/img/card.png differ diff --git a/LetterDMS/jspsych/examples/img/con1.png b/LetterDMS/jspsych/examples/img/con1.png new file mode 100644 index 0000000..9b311d2 Binary files /dev/null and b/LetterDMS/jspsych/examples/img/con1.png differ diff --git a/LetterDMS/jspsych/examples/img/con2.png b/LetterDMS/jspsych/examples/img/con2.png new file mode 100644 index 0000000..884bcbb Binary files /dev/null and b/LetterDMS/jspsych/examples/img/con2.png differ diff --git a/LetterDMS/jspsych/examples/img/fixation.gif b/LetterDMS/jspsych/examples/img/fixation.gif new file mode 100644 index 0000000..ff76c92 Binary files /dev/null and b/LetterDMS/jspsych/examples/img/fixation.gif differ diff --git a/LetterDMS/jspsych/examples/img/happy_face_1.jpg b/LetterDMS/jspsych/examples/img/happy_face_1.jpg new file mode 100644 index 0000000..00891ce Binary files /dev/null and b/LetterDMS/jspsych/examples/img/happy_face_1.jpg differ diff --git a/LetterDMS/jspsych/examples/img/happy_face_2.jpg b/LetterDMS/jspsych/examples/img/happy_face_2.jpg new file mode 100644 index 0000000..c895ca2 Binary files /dev/null and b/LetterDMS/jspsych/examples/img/happy_face_2.jpg differ diff --git a/LetterDMS/jspsych/examples/img/happy_face_3.jpg b/LetterDMS/jspsych/examples/img/happy_face_3.jpg new file mode 100644 index 0000000..d00fe2f Binary files /dev/null and b/LetterDMS/jspsych/examples/img/happy_face_3.jpg differ diff --git a/LetterDMS/jspsych/examples/img/happy_face_4.jpg b/LetterDMS/jspsych/examples/img/happy_face_4.jpg new file mode 100644 index 0000000..843cb19 Binary files /dev/null and b/LetterDMS/jspsych/examples/img/happy_face_4.jpg differ diff --git a/LetterDMS/jspsych/examples/img/inc1.png b/LetterDMS/jspsych/examples/img/inc1.png new file mode 100644 index 0000000..4710286 Binary files /dev/null and b/LetterDMS/jspsych/examples/img/inc1.png differ diff --git a/LetterDMS/jspsych/examples/img/inc2.png b/LetterDMS/jspsych/examples/img/inc2.png new file mode 100644 index 0000000..cfb43bb Binary files /dev/null and b/LetterDMS/jspsych/examples/img/inc2.png differ diff --git a/LetterDMS/jspsych/examples/img/normalN.gif b/LetterDMS/jspsych/examples/img/normalN.gif new file mode 100644 index 0000000..dc5593d Binary files /dev/null and b/LetterDMS/jspsych/examples/img/normalN.gif differ diff --git a/LetterDMS/jspsych/examples/img/orange.png b/LetterDMS/jspsych/examples/img/orange.png new file mode 100644 index 0000000..108e6e5 Binary files /dev/null and b/LetterDMS/jspsych/examples/img/orange.png differ diff --git a/LetterDMS/jspsych/examples/img/redX.png b/LetterDMS/jspsych/examples/img/redX.png new file mode 100644 index 0000000..55eeb03 Binary files /dev/null and b/LetterDMS/jspsych/examples/img/redX.png differ diff --git a/LetterDMS/jspsych/examples/img/ribbon.jpg b/LetterDMS/jspsych/examples/img/ribbon.jpg new file mode 100644 index 0000000..cf20cc6 Binary files /dev/null and b/LetterDMS/jspsych/examples/img/ribbon.jpg differ diff --git a/LetterDMS/jspsych/examples/img/sad_face_1.jpg b/LetterDMS/jspsych/examples/img/sad_face_1.jpg new file mode 100644 index 0000000..2a12ac2 Binary files /dev/null and b/LetterDMS/jspsych/examples/img/sad_face_1.jpg differ diff --git a/LetterDMS/jspsych/examples/img/sad_face_2.jpg b/LetterDMS/jspsych/examples/img/sad_face_2.jpg new file mode 100644 index 0000000..17ca674 Binary files /dev/null and b/LetterDMS/jspsych/examples/img/sad_face_2.jpg differ diff --git a/LetterDMS/jspsych/examples/img/sad_face_3.jpg b/LetterDMS/jspsych/examples/img/sad_face_3.jpg new file mode 100644 index 0000000..3edd256 Binary files /dev/null and b/LetterDMS/jspsych/examples/img/sad_face_3.jpg differ diff --git a/LetterDMS/jspsych/examples/img/sad_face_4.jpg b/LetterDMS/jspsych/examples/img/sad_face_4.jpg new file mode 100644 index 0000000..c8b7a75 Binary files /dev/null and b/LetterDMS/jspsych/examples/img/sad_face_4.jpg differ diff --git a/LetterDMS/jspsych/examples/js/snap.svg-min.js b/LetterDMS/jspsych/examples/js/snap.svg-min.js new file mode 100644 index 0000000..23a7343 --- /dev/null +++ b/LetterDMS/jspsych/examples/js/snap.svg-min.js @@ -0,0 +1,21 @@ +// Snap.svg 0.5.1 +// +// Copyright (c) 2013 – 2017 Adobe Systems Incorporated. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// build: 2017-02-07 + +!function(a){var b,c,d="0.5.0",e="hasOwnProperty",f=/[\.\/]/,g=/\s*,\s*/,h="*",i=function(a,b){return a-b},j={n:{}},k=function(){for(var a=0,b=this.length;b>a;a++)if("undefined"!=typeof this[a])return this[a]},l=function(){for(var a=this.length;--a;)if("undefined"!=typeof this[a])return this[a]},m=Object.prototype.toString,n=String,o=Array.isArray||function(a){return a instanceof Array||"[object Array]"==m.call(a)};eve=function(a,d){var e,f=c,g=Array.prototype.slice.call(arguments,2),h=eve.listeners(a),j=0,m=[],n={},o=[],p=b;o.firstDefined=k,o.lastDefined=l,b=a,c=0;for(var q=0,r=h.length;r>q;q++)"zIndex"in h[q]&&(m.push(h[q].zIndex),h[q].zIndex<0&&(n[h[q].zIndex]=h[q]));for(m.sort(i);m[j]<0;)if(e=n[m[j++]],o.push(e.apply(d,g)),c)return c=f,o;for(q=0;r>q;q++)if(e=h[q],"zIndex"in e)if(e.zIndex==m[j]){if(o.push(e.apply(d,g)),c)break;do if(j++,e=n[m[j]],e&&o.push(e.apply(d,g)),c)break;while(e)}else n[e.zIndex]=e;else if(o.push(e.apply(d,g)),c)break;return c=f,b=p,o},eve._events=j,eve.listeners=function(a){var b,c,d,e,g,i,k,l,m=o(a)?a:a.split(f),n=j,p=[n],q=[];for(e=0,g=m.length;g>e;e++){for(l=[],i=0,k=p.length;k>i;i++)for(n=p[i].n,c=[n[m[e]],n[h]],d=2;d--;)b=c[d],b&&(l.push(b),q=q.concat(b.f||[]));p=l}return q},eve.separator=function(a){a?(a=n(a).replace(/(?=[\.\^\]\[\-])/g,"\\"),a="["+a+"]",f=new RegExp(a)):f=/[\.\/]/},eve.on=function(a,b){if("function"!=typeof b)return function(){};for(var c=o(a)?o(a[0])?a:[a]:n(a).split(g),d=0,e=c.length;e>d;d++)!function(a){for(var c,d=o(a)?a:n(a).split(f),e=j,g=0,h=d.length;h>g;g++)e=e.n,e=e.hasOwnProperty(d[g])&&e[d[g]]||(e[d[g]]={n:{}});for(e.f=e.f||[],g=0,h=e.f.length;h>g;g++)if(e.f[g]==b){c=!0;break}!c&&e.f.push(b)}(c[d]);return function(a){+a==+a&&(b.zIndex=+a)}},eve.f=function(a){var b=[].slice.call(arguments,1);return function(){eve.apply(null,[a,null].concat(b).concat([].slice.call(arguments,0)))}},eve.stop=function(){c=1},eve.nt=function(a){var c=o(b)?b.join("."):b;return a?new RegExp("(?:\\.|\\/|^)"+a+"(?:\\.|\\/|$)").test(c):c},eve.nts=function(){return o(b)?b:b.split(f)},eve.off=eve.unbind=function(a,b){if(!a)return void(eve._events=j={n:{}});var c=o(a)?o(a[0])?a:[a]:n(a).split(g);if(c.length>1)for(var d=0,i=c.length;i>d;d++)eve.off(c[d],b);else{c=o(a)?a:n(a).split(f);var k,l,m,d,i,p,q,r=[j],s=[];for(d=0,i=c.length;i>d;d++)for(p=0;pd;d++)for(k=r[d];k.n;){if(b){if(k.f){for(p=0,q=k.f.length;q>p;p++)if(k.f[p]==b){k.f.splice(p,1);break}!k.f.length&&delete k.f}for(l in k.n)if(k.n[e](l)&&k.n[l].f){var t=k.n[l].f;for(p=0,q=t.length;q>p;p++)if(t[p]==b){t.splice(p,1);break}!t.length&&delete k.n[l].f}}else{delete k.f;for(l in k.n)k.n[e](l)&&k.n[l].f&&delete k.n[l].f}k=k.n}a:for(d=0,i=s.length;i>d;d++){k=s[d];for(l in k.n[k.name].f)continue a;for(l in k.n[k.name].n)continue a;delete k.n[k.name]}}},eve.once=function(a,b){var c=function(){return eve.off(a,c),b.apply(this,arguments)};return eve.on(a,c)},eve.version=d,eve.toString=function(){return"You are running Eve "+d},"undefined"!=typeof module&&module.exports?module.exports=eve:"function"==typeof define&&define.amd?define("eve",[],function(){return eve}):a.eve=eve}(this),function(a,b){if("function"==typeof define&&define.amd)define(["eve"],function(c){return b(a,c)});else if("undefined"!=typeof exports){var c=require("eve");module.exports=b(a,c)}else b(a,a.eve)}(window||this,function(a,b){var c=function(b){var c,d={},e=a.requestAnimationFrame||a.webkitRequestAnimationFrame||a.mozRequestAnimationFrame||a.oRequestAnimationFrame||a.msRequestAnimationFrame||function(a){return setTimeout(a,16,(new Date).getTime()),!0},f=Array.isArray||function(a){return a instanceof Array||"[object Array]"==Object.prototype.toString.call(a)},g=0,h="M"+(+new Date).toString(36),i=function(){return h+(g++).toString(36)},j=Date.now||function(){return+new Date},k=function(a){var b=this;if(null==a)return b.s;var c=b.s-a;b.b+=b.dur*c,b.B+=b.dur*c,b.s=a},l=function(a){var b=this;return null==a?b.spd:void(b.spd=a)},m=function(a){var b=this;return null==a?b.dur:(b.s=b.s*a/b.dur,void(b.dur=a))},n=function(){var a=this;delete d[a.id],a.update(),b("mina.stop."+a.id,a)},o=function(){var a=this;a.pdif||(delete d[a.id],a.update(),a.pdif=a.get()-a.b)},p=function(){var a=this;a.pdif&&(a.b=a.get()-a.pdif,delete a.pdif,d[a.id]=a,r())},q=function(){var a,b=this;if(f(b.start)){a=[];for(var c=0,d=b.start.length;d>c;c++)a[c]=+b.start[c]+(b.end[c]-b.start[c])*b.easing(b.s)}else a=+b.start+(b.end-b.start)*b.easing(b.s);b.set(a)},r=function(a){if(!a)return void(c||(c=e(r)));var f=0;for(var g in d)if(d.hasOwnProperty(g)){var h=d[g],i=h.get();f++,h.s=(i-h.b)/(h.dur/h.spd),h.s>=1&&(delete d[g],h.s=1,f--,function(a){setTimeout(function(){b("mina.finish."+a.id,a)})}(h)),h.update()}c=f?e(r):!1},s=function(a,b,c,e,f,g,h){var j={id:i(),start:a,end:b,b:c,s:0,dur:e-c,spd:1,get:f,set:g,easing:h||s.linear,status:k,speed:l,duration:m,stop:n,pause:o,resume:p,update:q};d[j.id]=j;var t,u=0;for(t in d)if(d.hasOwnProperty(t)&&(u++,2==u))break;return 1==u&&r(),j};return s.time=j,s.getById=function(a){return d[a]||null},s.linear=function(a){return a},s.easeout=function(a){return Math.pow(a,1.7)},s.easein=function(a){return Math.pow(a,.48)},s.easeinout=function(a){if(1==a)return 1;if(0==a)return 0;var b=.48-a/1.04,c=Math.sqrt(.1734+b*b),d=c-b,e=Math.pow(Math.abs(d),1/3)*(0>d?-1:1),f=-c-b,g=Math.pow(Math.abs(f),1/3)*(0>f?-1:1),h=e+g+.5;return 3*(1-h)*h*h+h*h*h},s.backin=function(a){if(1==a)return 1;var b=1.70158;return a*a*((b+1)*a-b)},s.backout=function(a){if(0==a)return 0;a-=1;var b=1.70158;return a*a*((b+1)*a+b)+1},s.elastic=function(a){return a==!!a?a:Math.pow(2,-10*a)*Math.sin((a-.075)*(2*Math.PI)/.3)+1},s.bounce=function(a){var b,c=7.5625,d=2.75;return 1/d>a?b=c*a*a:2/d>a?(a-=1.5/d,b=c*a*a+.75):2.5/d>a?(a-=2.25/d,b=c*a*a+.9375):(a-=2.625/d,b=c*a*a+.984375),b},a.mina=s,s}("undefined"==typeof b?function(){}:b),d=function(a){function c(a,b){if(a){if(a.nodeType)return w(a);if(e(a,"array")&&c.set)return c.set.apply(c,a);if(a instanceof s)return a;if(null==b)try{return a=y.doc.querySelector(String(a)),w(a)}catch(d){return null}}return a=null==a?"100%":a,b=null==b?"100%":b,new v(a,b)}function d(a,b){if(b){if("#text"==a&&(a=y.doc.createTextNode(b.text||b["#text"]||"")),"#comment"==a&&(a=y.doc.createComment(b.text||b["#text"]||"")),"string"==typeof a&&(a=d(a)),"string"==typeof b)return 1==a.nodeType?"xlink:"==b.substring(0,6)?a.getAttributeNS(T,b.substring(6)):"xml:"==b.substring(0,4)?a.getAttributeNS(U,b.substring(4)):a.getAttribute(b):"text"==b?a.nodeValue:null;if(1==a.nodeType){for(var c in b)if(b[z](c)){var e=A(b[c]);e?"xlink:"==c.substring(0,6)?a.setAttributeNS(T,c.substring(6),e):"xml:"==c.substring(0,4)?a.setAttributeNS(U,c.substring(4),e):a.setAttribute(c,e):a.removeAttribute(c)}}else"text"in b&&(a.nodeValue=b.text)}else a=y.doc.createElementNS(U,a);return a}function e(a,b){return b=A.prototype.toLowerCase.call(b),"finite"==b?isFinite(a):"array"==b&&(a instanceof Array||Array.isArray&&Array.isArray(a))?!0:"null"==b&&null===a||b==typeof a&&null!==a||"object"==b&&a===Object(a)||J.call(a).slice(8,-1).toLowerCase()==b}function f(a){if("function"==typeof a||Object(a)!==a)return a;var b=new a.constructor;for(var c in a)a[z](c)&&(b[c]=f(a[c]));return b}function h(a,b){for(var c=0,d=a.length;d>c;c++)if(a[c]===b)return a.push(a.splice(c,1)[0])}function i(a,b,c){function d(){var e=Array.prototype.slice.call(arguments,0),f=e.join("␀"),g=d.cache=d.cache||{},i=d.count=d.count||[];return g[z](f)?(h(i,f),c?c(g[f]):g[f]):(i.length>=1e3&&delete g[i.shift()],i.push(f),g[f]=a.apply(b,e),c?c(g[f]):g[f])}return d}function j(a,b,c,d,e,f){if(null==e){var g=a-c,h=b-d;return g||h?(180+180*D.atan2(-h,-g)/H+360)%360:0}return j(a,b,e,f)-j(c,d,e,f)}function k(a){return a%360*H/180}function l(a){return 180*a/H%360}function m(a){var b=[];return a=a.replace(/(?:^|\s)(\w+)\(([^)]+)\)/g,function(a,c,d){return d=d.split(/\s*,\s*|\s+/),"rotate"==c&&1==d.length&&d.push(0,0),"scale"==c&&(d.length>2?d=d.slice(0,2):2==d.length&&d.push(0,0),1==d.length&&d.push(d[0],0,0)),"skewX"==c?b.push(["m",1,0,D.tan(k(d[0])),1,0,0]):"skewY"==c?b.push(["m",1,D.tan(k(d[0])),0,1,0,0]):b.push([c.charAt(0)].concat(d)),a}),b}function n(a,b){var d=aa(a),e=new c.Matrix;if(d)for(var f=0,g=d.length;g>f;f++){var h,i,j,k,l,m=d[f],n=m.length,o=A(m[0]).toLowerCase(),p=m[0]!=o,q=p?e.invert():0;"t"==o&&2==n?e.translate(m[1],0):"t"==o&&3==n?p?(h=q.x(0,0),i=q.y(0,0),j=q.x(m[1],m[2]),k=q.y(m[1],m[2]),e.translate(j-h,k-i)):e.translate(m[1],m[2]):"r"==o?2==n?(l=l||b,e.rotate(m[1],l.x+l.width/2,l.y+l.height/2)):4==n&&(p?(j=q.x(m[2],m[3]),k=q.y(m[2],m[3]),e.rotate(m[1],j,k)):e.rotate(m[1],m[2],m[3])):"s"==o?2==n||3==n?(l=l||b,e.scale(m[1],m[n-1],l.x+l.width/2,l.y+l.height/2)):4==n?p?(j=q.x(m[2],m[3]),k=q.y(m[2],m[3]),e.scale(m[1],m[1],j,k)):e.scale(m[1],m[1],m[2],m[3]):5==n&&(p?(j=q.x(m[3],m[4]),k=q.y(m[3],m[4]),e.scale(m[1],m[2],j,k)):e.scale(m[1],m[2],m[3],m[4])):"m"==o&&7==n&&e.add(m[1],m[2],m[3],m[4],m[5],m[6])}return e}function o(a){var b=a.node.ownerSVGElement&&w(a.node.ownerSVGElement)||a.node.parentNode&&w(a.node.parentNode)||c.select("svg")||c(0,0),d=b.select("defs"),e=null==d?!1:d.node;return e||(e=u("defs",b.node).node),e}function p(a){return a.node.ownerSVGElement&&w(a.node.ownerSVGElement)||c.select("svg")}function q(a,b,c){function e(a){if(null==a)return I;if(a==+a)return a;d(j,{width:a});try{return j.getBBox().width}catch(b){return 0}}function f(a){if(null==a)return I;if(a==+a)return a;d(j,{height:a});try{return j.getBBox().height}catch(b){return 0}}function g(d,e){null==b?i[d]=e(a.attr(d)||0):d==b&&(i=e(null==c?a.attr(d)||0:c))}var h=p(a).node,i={},j=h.querySelector(".svg---mgr");switch(j||(j=d("rect"),d(j,{x:-9e9,y:-9e9,width:10,height:10,"class":"svg---mgr",fill:"none"}),h.appendChild(j)),a.type){case"rect":g("rx",e),g("ry",f);case"image":g("width",e),g("height",f);case"text":g("x",e),g("y",f);break;case"circle":g("cx",e),g("cy",f),g("r",e);break;case"ellipse":g("cx",e),g("cy",f),g("rx",e),g("ry",f);break;case"line":g("x1",e),g("x2",e),g("y1",f),g("y2",f);break;case"marker":g("refX",e),g("markerWidth",e),g("refY",f),g("markerHeight",f);break;case"radialGradient":g("fx",e),g("fy",f);break;case"tspan":g("dx",e),g("dy",f);break;default:g(b,e)}return h.removeChild(j),i}function r(a){e(a,"array")||(a=Array.prototype.slice.call(arguments,0));for(var b=0,c=0,d=this.node;this[b];)delete this[b++];for(b=0;bc;c++){var e={type:a[c].type,attr:a[c].attr()},f=a[c].children();b.push(e),f.length&&x(f,e.childNodes=[])}}c.version="0.5.1",c.toString=function(){return"Snap v"+this.version},c._={};var y={win:a.window,doc:a.window.document};c._.glob=y;var z="hasOwnProperty",A=String,B=parseFloat,C=parseInt,D=Math,E=D.max,F=D.min,G=D.abs,H=(D.pow,D.PI),I=(D.round,""),J=Object.prototype.toString,K=/^\s*((#[a-f\d]{6})|(#[a-f\d]{3})|rgba?\(\s*([\d\.]+%?\s*,\s*[\d\.]+%?\s*,\s*[\d\.]+%?(?:\s*,\s*[\d\.]+%?)?)\s*\)|hsba?\(\s*([\d\.]+(?:deg|\xb0|%)?\s*,\s*[\d\.]+%?\s*,\s*[\d\.]+(?:%?\s*,\s*[\d\.]+)?%?)\s*\)|hsla?\(\s*([\d\.]+(?:deg|\xb0|%)?\s*,\s*[\d\.]+%?\s*,\s*[\d\.]+(?:%?\s*,\s*[\d\.]+)?%?)\s*\))\s*$/i,L=(c._.separator=/[,\s]+/,/[\s]*,[\s]*/),M={hs:1,rg:1},N=/([a-z])[\s,]*((-?\d*\.?\d*(?:e[\-+]?\d+)?[\s]*,?[\s]*)+)/gi,O=/([rstm])[\s,]*((-?\d*\.?\d*(?:e[\-+]?\d+)?[\s]*,?[\s]*)+)/gi,P=/(-?\d*\.?\d*(?:e[\-+]?\d+)?)[\s]*,?[\s]*/gi,Q=0,R="S"+(+new Date).toString(36),S=function(a){return(a&&a.type?a.type:I)+R+(Q++).toString(36)},T="http://www.w3.org/1999/xlink",U="http://www.w3.org/2000/svg",V={};c.url=function(a){return"url('#"+a+"')"};c._.$=d,c._.id=S,c.format=function(){var a=/\{([^\}]+)\}/g,b=/(?:(?:^|\.)(.+?)(?=\[|\.|$|\()|\[('|")(.+?)\2\])(\(\))?/g,c=function(a,c,d){var e=d;return c.replace(b,function(a,b,c,d,f){b=b||d,e&&(b in e&&(e=e[b]),"function"==typeof e&&f&&(e=e()))}),e=(null==e||e==d?a:e)+""};return function(b,d){return A(b).replace(a,function(a,b){return c(a,b,d)})}}(),c._.clone=f,c._.cacher=i,c.rad=k,c.deg=l,c.sin=function(a){return D.sin(c.rad(a))},c.tan=function(a){return D.tan(c.rad(a))},c.cos=function(a){return D.cos(c.rad(a))},c.asin=function(a){return c.deg(D.asin(a))},c.acos=function(a){return c.deg(D.acos(a))},c.atan=function(a){return c.deg(D.atan(a))},c.atan2=function(a){return c.deg(D.atan2(a))},c.angle=j,c.len=function(a,b,d,e){return Math.sqrt(c.len2(a,b,d,e))},c.len2=function(a,b,c,d){return(a-c)*(a-c)+(b-d)*(b-d)},c.closestPoint=function(a,b,c){function d(a){var d=a.x-b,e=a.y-c;return d*d+e*e}for(var e,f,g,h,i=a.node,j=i.getTotalLength(),k=j/i.pathSegList.numberOfItems*.125,l=1/0,m=0;j>=m;m+=k)(h=d(g=i.getPointAtLength(m))).5;){var n,o,p,q,r,s;(p=f-k)>=0&&(r=d(n=i.getPointAtLength(p)))f)return b-f;if(f>a-c)return b-f+a}return b},c.getRGB=i(function(a){if(!a||(a=A(a)).indexOf("-")+1)return{r:-1,g:-1,b:-1,hex:"none",error:1,toString:Z};if("none"==a)return{r:-1,g:-1,b:-1,hex:"none",toString:Z};if(!(M[z](a.toLowerCase().substring(0,2))||"#"==a.charAt())&&(a=W(a)),!a)return{r:-1,g:-1,b:-1,hex:"none",error:1,toString:Z};var b,d,f,g,h,i,j=a.match(K);return j?(j[2]&&(f=C(j[2].substring(5),16),d=C(j[2].substring(3,5),16),b=C(j[2].substring(1,3),16)),j[3]&&(f=C((h=j[3].charAt(3))+h,16),d=C((h=j[3].charAt(2))+h,16),b=C((h=j[3].charAt(1))+h,16)),j[4]&&(i=j[4].split(L),b=B(i[0]),"%"==i[0].slice(-1)&&(b*=2.55),d=B(i[1]),"%"==i[1].slice(-1)&&(d*=2.55),f=B(i[2]),"%"==i[2].slice(-1)&&(f*=2.55),"rgba"==j[1].toLowerCase().slice(0,4)&&(g=B(i[3])),i[3]&&"%"==i[3].slice(-1)&&(g/=100)),j[5]?(i=j[5].split(L),b=B(i[0]),"%"==i[0].slice(-1)&&(b/=100),d=B(i[1]),"%"==i[1].slice(-1)&&(d/=100),f=B(i[2]),"%"==i[2].slice(-1)&&(f/=100),("deg"==i[0].slice(-3)||"°"==i[0].slice(-1))&&(b/=360),"hsba"==j[1].toLowerCase().slice(0,4)&&(g=B(i[3])),i[3]&&"%"==i[3].slice(-1)&&(g/=100),c.hsb2rgb(b,d,f,g)):j[6]?(i=j[6].split(L),b=B(i[0]),"%"==i[0].slice(-1)&&(b/=100),d=B(i[1]),"%"==i[1].slice(-1)&&(d/=100),f=B(i[2]),"%"==i[2].slice(-1)&&(f/=100),("deg"==i[0].slice(-3)||"°"==i[0].slice(-1))&&(b/=360),"hsla"==j[1].toLowerCase().slice(0,4)&&(g=B(i[3])),i[3]&&"%"==i[3].slice(-1)&&(g/=100),c.hsl2rgb(b,d,f,g)):(b=F(D.round(b),255),d=F(D.round(d),255),f=F(D.round(f),255),g=F(E(g,0),1),j={r:b,g:d,b:f,toString:Z},j.hex="#"+(16777216|f|d<<8|b<<16).toString(16).slice(1),j.opacity=e(g,"finite")?g:1,j)):{r:-1,g:-1,b:-1,hex:"none",error:1,toString:Z}},c),c.hsb=i(function(a,b,d){return c.hsb2rgb(a,b,d).hex}),c.hsl=i(function(a,b,d){return c.hsl2rgb(a,b,d).hex}),c.rgb=i(function(a,b,c,d){if(e(d,"finite")){var f=D.round;return"rgba("+[f(a),f(b),f(c),+d.toFixed(2)]+")"}return"#"+(16777216|c|b<<8|a<<16).toString(16).slice(1)});var W=function(a){var b=y.doc.getElementsByTagName("head")[0]||y.doc.getElementsByTagName("svg")[0],c="rgb(255, 0, 0)";return(W=i(function(a){if("red"==a.toLowerCase())return c;b.style.color=c,b.style.color=a;var d=y.doc.defaultView.getComputedStyle(b,I).getPropertyValue("color");return d==c?null:d}))(a)},X=function(){return"hsb("+[this.h,this.s,this.b]+")"},Y=function(){return"hsl("+[this.h,this.s,this.l]+")"},Z=function(){return 1==this.opacity||null==this.opacity?this.hex:"rgba("+[this.r,this.g,this.b,this.opacity]+")"},$=function(a,b,d){if(null==b&&e(a,"object")&&"r"in a&&"g"in a&&"b"in a&&(d=a.b,b=a.g,a=a.r),null==b&&e(a,string)){var f=c.getRGB(a);a=f.r,b=f.g,d=f.b}return(a>1||b>1||d>1)&&(a/=255,b/=255,d/=255),[a,b,d]},_=function(a,b,d,f){a=D.round(255*a),b=D.round(255*b),d=D.round(255*d);var g={r:a,g:b,b:d,opacity:e(f,"finite")?f:1,hex:c.rgb(a,b,d),toString:Z};return e(f,"finite")&&(g.opacity=f),g};c.color=function(a){var b;return e(a,"object")&&"h"in a&&"s"in a&&"b"in a?(b=c.hsb2rgb(a),a.r=b.r,a.g=b.g,a.b=b.b,a.opacity=1,a.hex=b.hex):e(a,"object")&&"h"in a&&"s"in a&&"l"in a?(b=c.hsl2rgb(a),a.r=b.r,a.g=b.g,a.b=b.b,a.opacity=1,a.hex=b.hex):(e(a,"string")&&(a=c.getRGB(a)),e(a,"object")&&"r"in a&&"g"in a&&"b"in a&&!("error"in a)?(b=c.rgb2hsl(a),a.h=b.h,a.s=b.s,a.l=b.l,b=c.rgb2hsb(a),a.v=b.b):(a={hex:"none"},a.r=a.g=a.b=a.h=a.s=a.v=a.l=-1,a.error=1)),a.toString=Z,a},c.hsb2rgb=function(a,b,c,d){e(a,"object")&&"h"in a&&"s"in a&&"b"in a&&(c=a.b,b=a.s,d=a.o,a=a.h),a*=360;var f,g,h,i,j;return a=a%360/60,j=c*b,i=j*(1-G(a%2-1)),f=g=h=c-j,a=~~a,f+=[j,i,0,0,i,j][a],g+=[i,j,j,i,0,0][a],h+=[0,0,i,j,j,i][a],_(f,g,h,d)},c.hsl2rgb=function(a,b,c,d){e(a,"object")&&"h"in a&&"s"in a&&"l"in a&&(c=a.l,b=a.s,a=a.h),(a>1||b>1||c>1)&&(a/=360,b/=100,c/=100),a*=360;var f,g,h,i,j;return a=a%360/60,j=2*b*(.5>c?c:1-c),i=j*(1-G(a%2-1)),f=g=h=c-j/2,a=~~a,f+=[j,i,0,0,i,j][a],g+=[i,j,j,i,0,0][a],h+=[0,0,i,j,j,i][a],_(f,g,h,d)},c.rgb2hsb=function(a,b,c){c=$(a,b,c),a=c[0],b=c[1],c=c[2];var d,e,f,g;return f=E(a,b,c),g=f-F(a,b,c),d=0==g?null:f==a?(b-c)/g:f==b?(c-a)/g+2:(a-b)/g+4,d=(d+360)%6*60/360,e=0==g?0:g/f,{h:d,s:e,b:f,toString:X}},c.rgb2hsl=function(a,b,c){c=$(a,b,c),a=c[0],b=c[1],c=c[2];var d,e,f,g,h,i;return g=E(a,b,c),h=F(a,b,c),i=g-h,d=0==i?null:g==a?(b-c)/i:g==b?(c-a)/i+2:(a-b)/i+4,d=(d+360)%6*60/360,f=(g+h)/2,e=0==i?0:.5>f?i/(2*f):i/(2-2*f),{h:d,s:e,l:f,toString:Y}},c.parsePathString=function(a){if(!a)return null;var b=c.path(a);if(b.arr)return c.path.clone(b.arr);var d={a:7,c:6,o:2,h:1,l:2,m:2,r:4,q:4,s:4,t:2,v:1,u:3,z:0},f=[];return e(a,"array")&&e(a[0],"array")&&(f=c.path.clone(a)),f.length||A(a).replace(N,function(a,b,c){var e=[],g=b.toLowerCase();if(c.replace(P,function(a,b){b&&e.push(+b)}),"m"==g&&e.length>2&&(f.push([b].concat(e.splice(0,2))),g="l",b="m"==b?"l":"L"),"o"==g&&1==e.length&&f.push([b,e[0]]),"r"==g)f.push([b].concat(e));else for(;e.length>=d[g]&&(f.push([b].concat(e.splice(0,d[g]))),d[g]););}),f.toString=c.path.toString,b.arr=c.path.clone(f),f};var aa=c.parseTransformString=function(a){if(!a)return null;var b=[];return e(a,"array")&&e(a[0],"array")&&(b=c.path.clone(a)),b.length||A(a).replace(O,function(a,c,d){var e=[];c.toLowerCase();d.replace(P,function(a,b){b&&e.push(+b)}),b.push([c].concat(e))}),b.toString=c.path.toString,b};c._.svgTransform2string=m,c._.rgTransform=/^[a-z][\s]*-?\.?\d/i,c._.transform2matrix=n,c._unit2px=q;y.doc.contains||y.doc.compareDocumentPosition?function(a,b){var c=9==a.nodeType?a.documentElement:a,d=b&&b.parentNode;return a==d||!(!d||1!=d.nodeType||!(c.contains?c.contains(d):a.compareDocumentPosition&&16&a.compareDocumentPosition(d)))}:function(a,b){if(b)for(;b;)if(b=b.parentNode,b==a)return!0;return!1};c._.getSomeDefs=o,c._.getSomeSVG=p,c.select=function(a){return a=A(a).replace(/([^\\]):/g,"$1\\:"),w(y.doc.querySelector(a))},c.selectAll=function(a){for(var b=y.doc.querySelectorAll(a),d=(c.set||Array)(),e=0;ei;i++)h[g[i].nodeName]=g[i].nodeValue;return h}if(e(a,"string")){if(!(arguments.length>1))return b("snap.util.getattr."+a,d).firstDefined();var k={};k[a]=c,a=k}for(var l in a)a[z](l)&&b("snap.util.attr."+l,d,a[l]);return d},c.parse=function(a){var b=y.doc.createDocumentFragment(),c=!0,d=y.doc.createElement("div");if(a=A(a),a.match(/^\s*<\s*svg(?:\s|>)/)||(a=""+a+"",c=!1),d.innerHTML=a,a=d.getElementsByTagName("svg")[0])if(c)b=a;else for(;a.firstChild;)b.appendChild(a.firstChild);return new t(b)},c.fragment=function(){for(var a=Array.prototype.slice.call(arguments,0),b=y.doc.createDocumentFragment(),d=0,e=a.length;e>d;d++){var f=a[d];f.node&&f.node.nodeType&&b.appendChild(f.node),f.nodeType&&b.appendChild(f),"string"==typeof f&&b.appendChild(c.parse(f).node)}return new t(b)},c._.make=u,c._.wrap=w,v.prototype.el=function(a,b){var c=u(a,this.node);return b&&c.attr(b),c},s.prototype.children=function(){for(var a=[],b=this.node.childNodes,d=0,e=b.length;e>d;d++)a[d]=c(b[d]);return a},s.prototype.toJSON=function(){var a=[];return x([this],a),a[0]},b.on("snap.util.getattr",function(){var a=b.nt();a=a.substring(a.lastIndexOf(".")+1);var c=a.replace(/[A-Z]/g,function(a){return"-"+a.toLowerCase()});return ba[z](c)?this.node.ownerDocument.defaultView.getComputedStyle(this.node,null).getPropertyValue(c):d(this.node,a)});var ba={"alignment-baseline":0,"baseline-shift":0,clip:0,"clip-path":0,"clip-rule":0,color:0,"color-interpolation":0,"color-interpolation-filters":0,"color-profile":0,"color-rendering":0,cursor:0,direction:0,display:0,"dominant-baseline":0,"enable-background":0,fill:0,"fill-opacity":0,"fill-rule":0,filter:0,"flood-color":0,"flood-opacity":0,font:0,"font-family":0,"font-size":0,"font-size-adjust":0,"font-stretch":0,"font-style":0,"font-variant":0,"font-weight":0,"glyph-orientation-horizontal":0,"glyph-orientation-vertical":0,"image-rendering":0,kerning:0,"letter-spacing":0,"lighting-color":0,marker:0,"marker-end":0,"marker-mid":0,"marker-start":0,mask:0,opacity:0,overflow:0,"pointer-events":0,"shape-rendering":0,"stop-color":0,"stop-opacity":0,stroke:0,"stroke-dasharray":0,"stroke-dashoffset":0,"stroke-linecap":0,"stroke-linejoin":0,"stroke-miterlimit":0,"stroke-opacity":0,"stroke-width":0,"text-anchor":0,"text-decoration":0,"text-rendering":0,"unicode-bidi":0,visibility:0,"word-spacing":0,"writing-mode":0};b.on("snap.util.attr",function(a){var c=b.nt(),e={};c=c.substring(c.lastIndexOf(".")+1),e[c]=a;var f=c.replace(/-(\w)/gi,function(a,b){return b.toUpperCase()}),g=c.replace(/[A-Z]/g,function(a){return"-"+a.toLowerCase()});ba[z](g)?this.node.style[f]=null==a?I:a:d(this.node,e)}),function(a){}(v.prototype),c.ajax=function(a,c,d,f){var g=new XMLHttpRequest,h=S();if(g){if(e(c,"function"))f=d,d=c,c=null;else if(e(c,"object")){var i=[];for(var j in c)c.hasOwnProperty(j)&&i.push(encodeURIComponent(j)+"="+encodeURIComponent(c[j]));c=i.join("&")}return g.open(c?"POST":"GET",a,!0),c&&(g.setRequestHeader("X-Requested-With","XMLHttpRequest"),g.setRequestHeader("Content-type","application/x-www-form-urlencoded")),d&&(b.once("snap.ajax."+h+".0",d),b.once("snap.ajax."+h+".200",d),b.once("snap.ajax."+h+".304",d)),g.onreadystatechange=function(){4==g.readyState&&b("snap.ajax."+h+"."+g.status,f,g)},4==g.readyState?g:(g.send(c),g)}},c.load=function(a,b,d){c.ajax(a,function(a){var e=c.parse(a.responseText);d?b.call(d,e):b(e)})};var ca=function(a){var b=a.getBoundingClientRect(),c=a.ownerDocument,d=c.body,e=c.documentElement,f=e.clientTop||d.clientTop||0,h=e.clientLeft||d.clientLeft||0,i=b.top+(g.win.pageYOffset||e.scrollTop||d.scrollTop)-f,j=b.left+(g.win.pageXOffset||e.scrollLeft||d.scrollLeft)-h;return{y:i,x:j}};return c.getElementByPoint=function(a,b){var c=this,d=(c.canvas,y.doc.elementFromPoint(a,b));if(y.win.opera&&"svg"==d.tagName){var e=ca(d),f=d.createSVGRect();f.x=a-e.x,f.y=b-e.y,f.width=f.height=1;var g=d.getIntersectionList(f,null);g.length&&(d=g[g.length-1])}return d?w(d):null},c.plugin=function(a){a(c,s,v,y,t)},y.win.Snap=c,c}(a||this);return d.plugin(function(c,d,e,f,g){function h(a,b){if(null==b){var d=!0;if(b="linearGradient"==a.type||"radialGradient"==a.type?a.node.getAttribute("gradientTransform"):"pattern"==a.type?a.node.getAttribute("patternTransform"):a.node.getAttribute("transform"),!b)return new c.Matrix;b=c._.svgTransform2string(b)}else b=c._.rgTransform.test(b)?m(b).replace(/\.{3}|\u2026/g,a._.transform||""):c._.svgTransform2string(b),l(b,"array")&&(b=c.path?c.path.toString.call(b):m(b)),a._.transform=b;var e=c._.transform2matrix(b,a.getBBox(1));return d?e:void(a.matrix=e)}function i(a){function b(a,b){var d=o(a.node,b);d=d&&d.match(g),d=d&&d[2],d&&"#"==d.charAt()&&(d=d.substring(1),d&&(i[d]=(i[d]||[]).concat(function(d){var e={};e[b]=c.url(d),o(a.node,e)})))}function d(a){var b=o(a.node,"xlink:href");b&&"#"==b.charAt()&&(b=b.substring(1),b&&(i[b]=(i[b]||[]).concat(function(b){a.attr("xlink:href","#"+b)})))}for(var e,f=a.selectAll("*"),g=/^\s*url\(("|'|)(.*)\1\)\s*$/,h=[],i={},j=0,k=f.length;k>j;j++){e=f[j],b(e,"fill"),b(e,"stroke"),b(e,"filter"),b(e,"mask"),b(e,"clip-path"),d(e);var l=o(e.node,"id");l&&(o(e.node,{id:e.id}),h.push({old:l,id:e.id}))}for(j=0,k=h.length;k>j;j++){var m=i[h[j].old];if(m)for(var n=0,p=m.length;p>n;n++)m[n](h[j].id)}}function j(a){return function(){var b=a?"<"+this.type:"",c=this.node.attributes,d=this.node.childNodes;if(a)for(var e=0,f=c.length;f>e;e++)b+=" "+c[e].name+'="'+c[e].value.replace(/"/g,'\\"')+'"';if(d.length){for(a&&(b+=">"),e=0,f=d.length;f>e;e++)3==d[e].nodeType?b+=d[e].nodeValue:1==d[e].nodeType&&(b+=s(d[e]).toString());a&&(b+="")}else a&&(b+="/>");return b}}var k=d.prototype,l=c.is,m=String,n=c._unit2px,o=c._.$,p=c._.make,q=c._.getSomeDefs,r="hasOwnProperty",s=c._.wrap;k.getBBox=function(a){if("tspan"==this.type)return c._.box(this.node.getClientRects().item(0));if(!c.Matrix||!c.path)return this.node.getBBox();var b=this,d=new c.Matrix;if(b.removed)return c._.box();for(;"use"==b.type;)if(a||(d=d.add(b.transform().localMatrix.translate(b.attr("x")||0,b.attr("y")||0))),b.original)b=b.original;else{var e=b.attr("xlink:href");b=b.original=b.node.ownerDocument.getElementById(e.substring(e.indexOf("#")+1))}var f=b._,g=c.path.get[b.type]||c.path.get.deflt;try{return a?(f.bboxwt=g?c.path.getBBox(b.realPath=g(b)):c._.box(b.node.getBBox()),c._.box(f.bboxwt)):(b.realPath=g(b),b.matrix=b.transform().localMatrix,f.bbox=c.path.getBBox(c.path.map(b.realPath,d.add(b.matrix))),c._.box(f.bbox))}catch(h){return c._.box()}};var t=function(){return this.string};k.transform=function(a){var b=this._;if(null==a){for(var d,e=this,f=new c.Matrix(this.node.getCTM()),g=h(this),i=[g],j=new c.Matrix,k=g.toTransformString(),l=m(g)==m(this.matrix)?m(b.transform):k;"svg"!=e.type&&(e=e.parent());)i.push(h(e));for(d=i.length;d--;)j.add(i[d]);return{string:l,globalMatrix:f,totalMatrix:j,localMatrix:g,diffMatrix:f.clone().add(g.invert()),global:f.toTransformString(),total:j.toTransformString(),local:k,toString:t}}return a instanceof c.Matrix?(this.matrix=a,this._.transform=a.toTransformString()):h(this,a),this.node&&("linearGradient"==this.type||"radialGradient"==this.type?o(this.node,{gradientTransform:this.matrix}):"pattern"==this.type?o(this.node,{patternTransform:this.matrix}):o(this.node,{transform:this.matrix})),this},k.parent=function(){return s(this.node.parentNode)},k.append=k.add=function(a){if(a){if("set"==a.type){var b=this;return a.forEach(function(a){b.add(a)}),this}a=s(a),this.node.appendChild(a.node),a.paper=this.paper}return this},k.appendTo=function(a){return a&&(a=s(a),a.append(this)),this},k.prepend=function(a){if(a){if("set"==a.type){var b,c=this;return a.forEach(function(a){b?b.after(a):c.prepend(a),b=a}),this}a=s(a);var d=a.parent();this.node.insertBefore(a.node,this.node.firstChild),this.add&&this.add(),a.paper=this.paper,this.parent()&&this.parent().add(),d&&d.add()}return this},k.prependTo=function(a){return a=s(a),a.prepend(this),this},k.before=function(a){if("set"==a.type){var b=this;return a.forEach(function(a){var c=a.parent();b.node.parentNode.insertBefore(a.node,b.node),c&&c.add()}),this.parent().add(),this}a=s(a);var c=a.parent();return this.node.parentNode.insertBefore(a.node,this.node),this.parent()&&this.parent().add(),c&&c.add(),a.paper=this.paper,this},k.after=function(a){a=s(a);var b=a.parent();return this.node.nextSibling?this.node.parentNode.insertBefore(a.node,this.node.nextSibling):this.node.parentNode.appendChild(a.node),this.parent()&&this.parent().add(),b&&b.add(),a.paper=this.paper,this},k.insertBefore=function(a){a=s(a);var b=this.parent();return a.node.parentNode.insertBefore(this.node,a.node),this.paper=a.paper,b&&b.add(),a.parent()&&a.parent().add(),this},k.insertAfter=function(a){a=s(a);var b=this.parent();return a.node.parentNode.insertBefore(this.node,a.node.nextSibling),this.paper=a.paper,b&&b.add(),a.parent()&&a.parent().add(),this},k.remove=function(){var a=this.parent();return this.node.parentNode&&this.node.parentNode.removeChild(this.node),delete this.paper,this.removed=!0,a&&a.add(),this},k.select=function(a){return s(this.node.querySelector(a))},k.selectAll=function(a){for(var b=this.node.querySelectorAll(a),d=(c.set||Array)(),e=0;e{contents}',{x:+b.x.toFixed(3),y:+b.y.toFixed(3),width:+b.width.toFixed(3),height:+b.height.toFixed(3), +contents:this.outerSVG()});return"data:image/svg+xml;base64,"+btoa(unescape(encodeURIComponent(d)))}},g.prototype.select=k.select,g.prototype.selectAll=k.selectAll}),d.plugin(function(a,d,e,f,g){function h(a,b,c){return function(d){var e=d.slice(a,b);return 1==e.length&&(e=e[0]),c?c(e):e}}var i=d.prototype,j=a.is,k=String,l="hasOwnProperty",m=function(a,b,d,e){"function"!=typeof d||d.length||(e=d,d=c.linear),this.attr=a,this.dur=b,d&&(this.easing=d),e&&(this.callback=e)};a._.Animation=m,a.animation=function(a,b,c,d){return new m(a,b,c,d)},i.inAnim=function(){var a=this,b=[];for(var c in a.anims)a.anims[l](c)&&!function(a){b.push({anim:new m(a._attrs,a.dur,a.easing,a._callback),mina:a,curStatus:a.status(),status:function(b){return a.status(b)},stop:function(){a.stop()}})}(a.anims[c]);return b},a.animate=function(a,d,e,f,g,h){"function"!=typeof g||g.length||(h=g,g=c.linear);var i=c.time(),j=c(a,d,i,i+f,c.time,e,g);return h&&b.once("mina.finish."+j.id,h),j},i.stop=function(){for(var a=this.inAnim(),b=0,c=a.length;c>b;b++)a[b].stop();return this},i.animate=function(a,d,e,f){"function"!=typeof e||e.length||(f=e,e=c.linear),a instanceof m&&(f=a.callback,e=a.easing,d=a.dur,a=a.attr);var g,i,n,o,p=[],q=[],r={},s=this;for(var t in a)if(a[l](t)){s.equal?(o=s.equal(t,k(a[t])),g=o.from,i=o.to,n=o.f):(g=+s.attr(t),i=+a[t]);var u=j(g,"array")?g.length:1;r[t]=h(p.length,p.length+u,n),p=p.concat(g),q=q.concat(i)}var v=c.time(),w=c(p,q,v,v+d,c.time,function(a){var b={};for(var c in r)r[l](c)&&(b[c]=r[c](a));s.attr(b)},e);return s.anims[w.id]=w,w._attrs=a,w._callback=f,b("snap.animcreated."+s.id,w),b.once("mina.finish."+w.id,function(){b.off("mina.*."+w.id),delete s.anims[w.id],f&&f.call(s)}),b.once("mina.stop."+w.id,function(){b.off("mina.*."+w.id),delete s.anims[w.id]}),s}}),d.plugin(function(a,b,c,d,e){function f(a,b,c,d,e,f){return null==b&&"[object SVGMatrix]"==g.call(a)?(this.a=a.a,this.b=a.b,this.c=a.c,this.d=a.d,this.e=a.e,void(this.f=a.f)):void(null!=a?(this.a=+a,this.b=+b,this.c=+c,this.d=+d,this.e=+e,this.f=+f):(this.a=1,this.b=0,this.c=0,this.d=1,this.e=0,this.f=0))}var g=Object.prototype.toString,h=String,i=Math,j="";!function(b){function c(a){return a[0]*a[0]+a[1]*a[1]}function d(a){var b=i.sqrt(c(a));a[0]&&(a[0]/=b),a[1]&&(a[1]/=b)}b.add=function(a,b,c,d,e,g){if(a&&a instanceof f)return this.add(a.a,a.b,a.c,a.d,a.e,a.f);var h=a*this.a+b*this.c,i=a*this.b+b*this.d;return this.e+=e*this.a+g*this.c,this.f+=e*this.b+g*this.d,this.c=c*this.a+d*this.c,this.d=c*this.b+d*this.d,this.a=h,this.b=i,this},f.prototype.multLeft=function(a,b,c,d,e,g){if(a&&a instanceof f)return this.multLeft(a.a,a.b,a.c,a.d,a.e,a.f);var h=a*this.a+c*this.b,i=a*this.c+c*this.d,j=a*this.e+c*this.f+e;return this.b=b*this.a+d*this.b,this.d=b*this.c+d*this.d,this.f=b*this.e+d*this.f+g,this.a=h,this.c=i,this.e=j,this},b.invert=function(){var a=this,b=a.a*a.d-a.b*a.c;return new f(a.d/b,-a.b/b,-a.c/b,a.a/b,(a.c*a.f-a.d*a.e)/b,(a.b*a.e-a.a*a.f)/b)},b.clone=function(){return new f(this.a,this.b,this.c,this.d,this.e,this.f)},b.translate=function(a,b){return this.e+=a*this.a+b*this.c,this.f+=a*this.b+b*this.d,this},b.scale=function(a,b,c,d){return null==b&&(b=a),(c||d)&&this.translate(c,d),this.a*=a,this.b*=a,this.c*=b,this.d*=b,(c||d)&&this.translate(-c,-d),this},b.rotate=function(b,c,d){b=a.rad(b),c=c||0,d=d||0;var e=+i.cos(b).toFixed(9),f=+i.sin(b).toFixed(9);return this.add(e,f,-f,e,c,d),this.add(1,0,0,1,-c,-d)},b.skewX=function(a){return this.skew(a,0)},b.skewY=function(a){return this.skew(0,a)},b.skew=function(b,c){b=b||0,c=c||0,b=a.rad(b),c=a.rad(c);var d=i.tan(b).toFixed(9),e=i.tan(c).toFixed(9);return this.add(1,e,d,1,0,0)},b.x=function(a,b){return a*this.a+b*this.c+this.e},b.y=function(a,b){return a*this.b+b*this.d+this.f},b.get=function(a){return+this[h.fromCharCode(97+a)].toFixed(4)},b.toString=function(){return"matrix("+[this.get(0),this.get(1),this.get(2),this.get(3),this.get(4),this.get(5)].join()+")"},b.offset=function(){return[this.e.toFixed(4),this.f.toFixed(4)]},b.determinant=function(){return this.a*this.d-this.b*this.c},b.split=function(){var b={};b.dx=this.e,b.dy=this.f;var e=[[this.a,this.b],[this.c,this.d]];b.scalex=i.sqrt(c(e[0])),d(e[0]),b.shear=e[0][0]*e[1][0]+e[0][1]*e[1][1],e[1]=[e[1][0]-e[0][0]*b.shear,e[1][1]-e[0][1]*b.shear],b.scaley=i.sqrt(c(e[1])),d(e[1]),b.shear/=b.scaley,this.determinant()<0&&(b.scalex=-b.scalex);var f=e[0][1],g=e[1][1];return 0>g?(b.rotate=a.deg(i.acos(g)),0>f&&(b.rotate=360-b.rotate)):b.rotate=a.deg(i.asin(f)),b.isSimple=!(+b.shear.toFixed(9)||b.scalex.toFixed(9)!=b.scaley.toFixed(9)&&b.rotate),b.isSuperSimple=!+b.shear.toFixed(9)&&b.scalex.toFixed(9)==b.scaley.toFixed(9)&&!b.rotate,b.noRotation=!+b.shear.toFixed(9)&&!b.rotate,b},b.toTransformString=function(a){var b=a||this.split();return+b.shear.toFixed(9)?"m"+[this.get(0),this.get(1),this.get(2),this.get(3),this.get(4),this.get(5)]:(b.scalex=+b.scalex.toFixed(4),b.scaley=+b.scaley.toFixed(4),b.rotate=+b.rotate.toFixed(4),(b.dx||b.dy?"t"+[+b.dx.toFixed(4),+b.dy.toFixed(4)]:j)+(b.rotate?"r"+[+b.rotate.toFixed(4),0,0]:j)+(1!=b.scalex||1!=b.scaley?"s"+[b.scalex,b.scaley,0,0]:j))}}(f.prototype),a.Matrix=f,a.matrix=function(a,b,c,d,e,g){return new f(a,b,c,d,e,g)}}),d.plugin(function(a,c,d,e,f){function g(d){return function(e){if(b.stop(),e instanceof f&&1==e.node.childNodes.length&&("radialGradient"==e.node.firstChild.tagName||"linearGradient"==e.node.firstChild.tagName||"pattern"==e.node.firstChild.tagName)&&(e=e.node.firstChild,n(this).appendChild(e),e=l(e)),e instanceof c)if("radialGradient"==e.type||"linearGradient"==e.type||"pattern"==e.type){e.node.id||p(e.node,{id:e.id});var g=q(e.node.id)}else g=e.attr(d);else if(g=a.color(e),g.error){var h=a(n(this).ownerSVGElement).gradient(e);h?(h.node.id||p(h.node,{id:h.id}),g=q(h.node.id)):g=e}else g=r(g);var i={};i[d]=g,p(this.node,i),this.node.style[d]=t}}function h(a){b.stop(),a==+a&&(a+="px"),this.node.style.fontSize=a}function i(a){for(var b=[],c=a.childNodes,d=0,e=c.length;e>d;d++){var f=c[d];3==f.nodeType&&b.push(f.nodeValue),"tspan"==f.tagName&&(1==f.childNodes.length&&3==f.firstChild.nodeType?b.push(f.firstChild.nodeValue):b.push(i(f)))}return b}function j(){return b.stop(),this.node.style.fontSize}var k=a._.make,l=a._.wrap,m=a.is,n=a._.getSomeDefs,o=/^url\((['"]?)([^)]+)\1\)$/,p=a._.$,q=a.url,r=String,s=a._.separator,t="";a.deurl=function(a){var b=String(a).match(o);return b?b[2]:a},b.on("snap.util.attr.mask",function(a){if(a instanceof c||a instanceof f){if(b.stop(),a instanceof f&&1==a.node.childNodes.length&&(a=a.node.firstChild,n(this).appendChild(a),a=l(a)),"mask"==a.type)var d=a;else d=k("mask",n(this)),d.node.appendChild(a.node);!d.node.id&&p(d.node,{id:d.id}),p(this.node,{mask:q(d.id)})}}),function(a){b.on("snap.util.attr.clip",a),b.on("snap.util.attr.clip-path",a),b.on("snap.util.attr.clipPath",a)}(function(a){if(a instanceof c||a instanceof f){b.stop();for(var d,e=a.node;e;){if("clipPath"===e.nodeName){d=new c(e);break}if("svg"===e.nodeName){d=void 0;break}e=e.parentNode}d||(d=k("clipPath",n(this)),d.node.appendChild(a.node),!d.node.id&&p(d.node,{id:d.id})),p(this.node,{"clip-path":q(d.node.id||d.id)})}}),b.on("snap.util.attr.fill",g("fill")),b.on("snap.util.attr.stroke",g("stroke"));var u=/^([lr])(?:\(([^)]*)\))?(.*)$/i;b.on("snap.util.grad.parse",function(a){function b(a,b){for(var c=(b-h)/(a-i),d=i;a>d;d++)f[d].offset=+(+h+c*(d-i)).toFixed(2);i=a,h=b}a=r(a);var c=a.match(u);if(!c)return null;var d=c[1],e=c[2],f=c[3];e=e.split(/\s*,\s*/).map(function(a){return+a==a?+a:a}),1==e.length&&0==e[0]&&(e=[]),f=f.split("-"),f=f.map(function(a){a=a.split(":");var b={color:a[0]};return a[1]&&(b.offset=parseFloat(a[1])),b});var g=f.length,h=0,i=0;g--;for(var j=0;g>j;j++)"offset"in f[j]&&b(j,f[j].offset);return f[g].offset=f[g].offset||100,b(g,f[g].offset),{type:d,params:e,stops:f}}),b.on("snap.util.attr.d",function(c){b.stop(),m(c,"array")&&m(c[0],"array")&&(c=a.path.toString.call(c)),c=r(c),c.match(/[ruo]/i)&&(c=a.path.toAbsolute(c)),p(this.node,{d:c})})(-1),b.on("snap.util.attr.#text",function(a){b.stop(),a=r(a);for(var c=e.doc.createTextNode(a);this.node.firstChild;)this.node.removeChild(this.node.firstChild);this.node.appendChild(c)})(-1),b.on("snap.util.attr.path",function(a){b.stop(),this.attr({d:a})})(-1),b.on("snap.util.attr.class",function(a){b.stop(),this.node.className.baseVal=a})(-1),b.on("snap.util.attr.viewBox",function(a){var c;c=m(a,"object")&&"x"in a?[a.x,a.y,a.width,a.height].join(" "):m(a,"array")?a.join(" "):a,p(this.node,{viewBox:c}),b.stop()})(-1),b.on("snap.util.attr.transform",function(a){this.transform(a),b.stop()})(-1),b.on("snap.util.attr.r",function(a){"rect"==this.type&&(b.stop(),p(this.node,{rx:a,ry:a}))})(-1),b.on("snap.util.attr.textpath",function(a){if(b.stop(),"text"==this.type){var d,e,f;if(!a&&this.textPath){for(e=this.textPath;e.node.firstChild;)this.node.appendChild(e.node.firstChild);return e.remove(),void delete this.textPath}if(m(a,"string")){var g=n(this),h=l(g.parentNode).path(a);g.appendChild(h.node),d=h.id,h.attr({id:d})}else a=l(a),a instanceof c&&(d=a.attr("id"),d||(d=a.id,a.attr({id:d})));if(d)if(e=this.textPath,f=this.node,e)e.attr({"xlink:href":"#"+d});else{for(e=p("textPath",{"xlink:href":"#"+d});f.firstChild;)e.appendChild(f.firstChild);f.appendChild(e),this.textPath=l(e)}}})(-1),b.on("snap.util.attr.text",function(a){if("text"==this.type){for(var c=this.node,d=function(a){var b=p("tspan");if(m(a,"array"))for(var c=0;c1&&(a=Array.prototype.slice.call(arguments,0));var b={};return i(a,"object")&&!i(a,"array")?b=a:null!=a&&(b={points:a}),this.el("polyline",b)},h.polygon=function(a){arguments.length>1&&(a=Array.prototype.slice.call(arguments,0));var b={};return i(a,"object")&&!i(a,"array")?b=a:null!=a&&(b={points:a}),this.el("polygon",b)},function(){function d(){return this.selectAll("stop")}function e(a,b){var d=l("stop"),e={offset:+b+"%"};a=c.color(a),e["stop-color"]=a.hex,a.opacity<1&&(e["stop-opacity"]=a.opacity),l(d,e);for(var f,g=this.stops(),h=0;hb){this.node.insertBefore(d,g[h].node),f=!0;break}}return f||this.node.appendChild(d),this}function f(){if("linearGradient"==this.type){var a=l(this.node,"x1")||0,b=l(this.node,"x2")||1,d=l(this.node,"y1")||0,e=l(this.node,"y2")||0;return c._.box(a,d,math.abs(b-a),math.abs(e-d))}var f=this.node.cx||.5,g=this.node.cy||.5,h=this.node.r||0;return c._.box(f-h,g-h,2*h,2*h)}function g(a){var d=a,e=this.stops();if("string"==typeof a&&(d=b("snap.util.grad.parse",null,"l(0,0,0,1)"+a).firstDefined().stops),c.is(d,"array")){for(var f=0;fh;h++){var i=f[h];d.addStop(i.color,i.offset)}return d}function j(a,b,h,i,j){var k=c._.make("linearGradient",a);return k.stops=d,k.addStop=e,k.getBBox=f,k.setStops=g,null!=b&&l(k.node,{x1:b,y1:h,x2:i,y2:j}),k}function k(a,b,g,h,i,j){var k=c._.make("radialGradient",a);return k.stops=d,k.addStop=e,k.getBBox=f,null!=b&&l(k.node,{cx:b,cy:g,r:h}),null!=i&&null!=j&&l(k.node,{fx:i,fy:j}),k}var l=c._.$;h.gradient=function(a){return i(this.defs,a)},h.gradientLinear=function(a,b,c,d){return j(this.defs,a,b,c,d)},h.gradientRadial=function(a,b,c,d,e){return k(this.defs,a,b,c,d,e)},h.toString=function(){var a,b=this.node.ownerDocument,d=b.createDocumentFragment(),e=b.createElement("div"),f=this.node.cloneNode(!0);return d.appendChild(e),e.appendChild(f),c._.$(f,{xmlns:"http://www.w3.org/2000/svg"}),a=e.innerHTML,d.removeChild(d.firstChild),a},h.toDataURL=function(){return a&&a.btoa?"data:image/svg+xml;base64,"+btoa(unescape(encodeURIComponent(this))):void 0},h.clear=function(){for(var a,b=this.node.firstChild;b;)a=b.nextSibling,"defs"!=b.tagName?b.parentNode.removeChild(b):h.clear.call({node:b}),b=a}}()}),d.plugin(function(a,b,c,d){function e(a){var b=e.ps=e.ps||{};return b[a]?b[a].sleep=100:b[a]={sleep:100},setTimeout(function(){for(var c in b)b[M](c)&&c!=a&&(b[c].sleep--,!b[c].sleep&&delete b[c])}),b[a]}function f(a,b,c,d){return null==a&&(a=b=c=d=0),null==b&&(b=a.y,c=a.width,d=a.height,a=a.x),{x:a,y:b,width:c,w:c,height:d,h:d,x2:a+c,y2:b+d,cx:a+c/2,cy:b+d/2,r1:P.min(c,d)/2,r2:P.max(c,d)/2,r0:P.sqrt(c*c+d*d)/2,path:y(a,b,c,d),vb:[a,b,c,d].join(" ")}}function g(){return this.join(",").replace(N,"$1")}function h(a){var b=L(a);return b.toString=g,b}function i(a,b,c,d,e,f,g,h,i){return null==i?p(a,b,c,d,e,f,g,h):k(a,b,c,d,e,f,g,h,q(a,b,c,d,e,f,g,h,i))}function j(c,d){function e(a){return+(+a).toFixed(3)}return a._.cacher(function(a,f,g){a instanceof b&&(a=a.attr("d")),a=G(a);for(var h,j,l,m,n,o="",p={},q=0,r=0,s=a.length;s>r;r++){if(l=a[r],"M"==l[0])h=+l[1],j=+l[2];else{if(m=i(h,j,l[1],l[2],l[3],l[4],l[5],l[6]),q+m>f){if(d&&!p.start){if(n=i(h,j,l[1],l[2],l[3],l[4],l[5],l[6],f-q),o+=["C"+e(n.start.x),e(n.start.y),e(n.m.x),e(n.m.y),e(n.x),e(n.y)],g)return o;p.start=o,o=["M"+e(n.x),e(n.y)+"C"+e(n.n.x),e(n.n.y),e(n.end.x),e(n.end.y),e(l[5]),e(l[6])].join(),q+=m,h=+l[5],j=+l[6];continue}if(!c&&!d)return n=i(h,j,l[1],l[2],l[3],l[4],l[5],l[6],f-q)}q+=m,h=+l[5],j=+l[6]}o+=l.shift()+l}return p.end=o,n=c?q:d?p:k(h,j,l[0],l[1],l[2],l[3],l[4],l[5],1)},null,a._.clone)}function k(a,b,c,d,e,f,g,h,i){var j=1-i,k=T(j,3),l=T(j,2),m=i*i,n=m*i,o=k*a+3*l*i*c+3*j*i*i*e+n*g,p=k*b+3*l*i*d+3*j*i*i*f+n*h,q=a+2*i*(c-a)+m*(e-2*c+a),r=b+2*i*(d-b)+m*(f-2*d+b),s=c+2*i*(e-c)+m*(g-2*e+c),t=d+2*i*(f-d)+m*(h-2*f+d),u=j*a+i*c,v=j*b+i*d,w=j*e+i*g,x=j*f+i*h,y=90-180*P.atan2(q-s,r-t)/Q;return{x:o,y:p,m:{x:q,y:r},n:{x:s,y:t},start:{x:u,y:v},end:{x:w,y:x},alpha:y}}function l(b,c,d,e,g,h,i,j){a.is(b,"array")||(b=[b,c,d,e,g,h,i,j]);var k=F.apply(null,b);return f(k.min.x,k.min.y,k.max.x-k.min.x,k.max.y-k.min.y)}function m(a,b,c){return b>=a.x&&b<=a.x+a.width&&c>=a.y&&c<=a.y+a.height}function n(a,b){return a=f(a),b=f(b),m(b,a.x,a.y)||m(b,a.x2,a.y)||m(b,a.x,a.y2)||m(b,a.x2,a.y2)||m(a,b.x,b.y)||m(a,b.x2,b.y)||m(a,b.x,b.y2)||m(a,b.x2,b.y2)||(a.xb.x||b.xa.x)&&(a.yb.y||b.ya.y)}function o(a,b,c,d,e){var f=-3*b+9*c-9*d+3*e,g=a*f+6*b-12*c+6*d;return a*g-3*b+3*c}function p(a,b,c,d,e,f,g,h,i){null==i&&(i=1),i=i>1?1:0>i?0:i;for(var j=i/2,k=12,l=[-.1252,.1252,-.3678,.3678,-.5873,.5873,-.7699,.7699,-.9041,.9041,-.9816,.9816],m=[.2491,.2491,.2335,.2335,.2032,.2032,.1601,.1601,.1069,.1069,.0472,.0472],n=0,p=0;k>p;p++){var q=j*l[p]+j,r=o(q,a,c,e,g),s=o(q,b,d,f,h),t=r*r+s*s;n+=m[p]*P.sqrt(t)}return j*n}function q(a,b,c,d,e,f,g,h,i){if(!(0>i||p(a,b,c,d,e,f,g,h)n;)l/=2,m+=(i>j?1:-1)*l,j=p(a,b,c,d,e,f,g,h,m);return m}}function r(a,b,c,d,e,f,g,h){if(!(S(a,c)S(e,g)||S(b,d)S(f,h))){var i=(a*d-b*c)*(e-g)-(a-c)*(e*h-f*g),j=(a*d-b*c)*(f-h)-(b-d)*(e*h-f*g),k=(a-c)*(f-h)-(b-d)*(e-g);if(k){var l=i/k,m=j/k,n=+l.toFixed(2),o=+m.toFixed(2);if(!(n<+R(a,c).toFixed(2)||n>+S(a,c).toFixed(2)||n<+R(e,g).toFixed(2)||n>+S(e,g).toFixed(2)||o<+R(b,d).toFixed(2)||o>+S(b,d).toFixed(2)||o<+R(f,h).toFixed(2)||o>+S(f,h).toFixed(2)))return{x:l,y:m}}}}function s(a,b,c){var d=l(a),e=l(b);if(!n(d,e))return c?0:[];for(var f=p.apply(0,a),g=p.apply(0,b),h=~~(f/8),i=~~(g/8),j=[],m=[],o={},q=c?0:[],s=0;h+1>s;s++){var t=k.apply(0,a.concat(s/h));j.push({x:t.x,y:t.y,t:s/h})}for(s=0;i+1>s;s++)t=k.apply(0,b.concat(s/i)),m.push({x:t.x,y:t.y,t:s/i});for(s=0;h>s;s++)for(var u=0;i>u;u++){var v=j[s],w=j[s+1],x=m[u],y=m[u+1],z=U(w.x-v.x)<.001?"y":"x",A=U(y.x-x.x)<.001?"y":"x",B=r(v.x,v.y,w.x,w.y,x.x,x.y,y.x,y.y);if(B){if(o[B.x.toFixed(4)]==B.y.toFixed(4))continue;o[B.x.toFixed(4)]=B.y.toFixed(4);var C=v.t+U((B[z]-v[z])/(w[z]-v[z]))*(w.t-v.t),D=x.t+U((B[A]-x[A])/(y[A]-x[A]))*(y.t-x.t);C>=0&&1>=C&&D>=0&&1>=D&&(c?q++:q.push({x:B.x,y:B.y,t1:C,t2:D}))}}return q}function t(a,b){return v(a,b)}function u(a,b){return v(a,b,1)}function v(a,b,c){a=G(a),b=G(b);for(var d,e,f,g,h,i,j,k,l,m,n=c?0:[],o=0,p=a.length;p>o;o++){var q=a[o];if("M"==q[0])d=h=q[1],e=i=q[2];else{"C"==q[0]?(l=[d,e].concat(q.slice(1)),d=l[6],e=l[7]):(l=[d,e,d,e,h,i,h,i],d=h,e=i);for(var r=0,t=b.length;t>r;r++){var u=b[r];if("M"==u[0])f=j=u[1],g=k=u[2];else{"C"==u[0]?(m=[f,g].concat(u.slice(1)),f=m[6],g=m[7]):(m=[f,g,f,g,j,k,j,k],f=j,g=k);var v=s(l,m,c);if(c)n+=v;else{for(var w=0,x=v.length;x>w;w++)v[w].segment1=o,v[w].segment2=r,v[w].bez1=l,v[w].bez2=m;n=n.concat(v)}}}}}return n}function w(a,b,c){var d=x(a);return m(d,b,c)&&v(a,[["M",b,c],["H",d.x2+10]],1)%2==1}function x(a){var b=e(a);if(b.bbox)return L(b.bbox);if(!a)return f();a=G(a);for(var c,d=0,g=0,h=[],i=[],j=0,k=a.length;k>j;j++)if(c=a[j],"M"==c[0])d=c[1],g=c[2],h.push(d),i.push(g);else{var l=F(d,g,c[1],c[2],c[3],c[4],c[5],c[6]);h=h.concat(l.min.x,l.max.x),i=i.concat(l.min.y,l.max.y),d=c[5],g=c[6]}var m=R.apply(0,h),n=R.apply(0,i),o=S.apply(0,h),p=S.apply(0,i),q=f(m,n,o-m,p-n);return b.bbox=L(q),q}function y(a,b,c,d,e){if(e)return[["M",+a+ +e,b],["l",c-2*e,0],["a",e,e,0,0,1,e,e],["l",0,d-2*e],["a",e,e,0,0,1,-e,e],["l",2*e-c,0],["a",e,e,0,0,1,-e,-e],["l",0,2*e-d],["a",e,e,0,0,1,e,-e],["z"]];var f=[["M",a,b],["l",c,0],["l",0,d],["l",-c,0],["z"]];return f.toString=g,f}function z(a,b,c,d,e){if(null==e&&null==d&&(d=c),a=+a,b=+b,c=+c,d=+d,null!=e)var f=Math.PI/180,h=a+c*Math.cos(-d*f),i=a+c*Math.cos(-e*f),j=b+c*Math.sin(-d*f),k=b+c*Math.sin(-e*f),l=[["M",h,j],["A",c,c,0,+(e-d>180),0,i,k]];else l=[["M",a,b],["m",0,-d],["a",c,d,0,1,1,0,2*d],["a",c,d,0,1,1,0,-2*d],["z"]];return l.toString=g,l}function A(b){var c=e(b),d=String.prototype.toLowerCase;if(c.rel)return h(c.rel);a.is(b,"array")&&a.is(b&&b[0],"array")||(b=a.parsePathString(b));var f=[],i=0,j=0,k=0,l=0,m=0;"M"==b[0][0]&&(i=b[0][1],j=b[0][2],k=i,l=j,m++,f.push(["M",i,j]));for(var n=m,o=b.length;o>n;n++){var p=f[n]=[],q=b[n];if(q[0]!=d.call(q[0]))switch(p[0]=d.call(q[0]),p[0]){case"a":p[1]=q[1],p[2]=q[2],p[3]=q[3],p[4]=q[4],p[5]=q[5],p[6]=+(q[6]-i).toFixed(3),p[7]=+(q[7]-j).toFixed(3);break;case"v":p[1]=+(q[1]-j).toFixed(3);break;case"m":k=q[1],l=q[2];default:for(var r=1,s=q.length;s>r;r++)p[r]=+(q[r]-(r%2?i:j)).toFixed(3)}else{p=f[n]=[],"m"==q[0]&&(k=q[1]+i,l=q[2]+j);for(var t=0,u=q.length;u>t;t++)f[n][t]=q[t]}var v=f[n].length;switch(f[n][0]){case"z":i=k,j=l;break;case"h":i+=+f[n][v-1];break;case"v":j+=+f[n][v-1];break;default:i+=+f[n][v-2],j+=+f[n][v-1]}}return f.toString=g,c.rel=h(f),f}function B(b){var c=e(b);if(c.abs)return h(c.abs);if(K(b,"array")&&K(b&&b[0],"array")||(b=a.parsePathString(b)),!b||!b.length)return[["M",0,0]];var d,f=[],i=0,j=0,k=0,l=0,m=0;"M"==b[0][0]&&(i=+b[0][1],j=+b[0][2],k=i,l=j,m++,f[0]=["M",i,j]);for(var n,o,p=3==b.length&&"M"==b[0][0]&&"R"==b[1][0].toUpperCase()&&"Z"==b[2][0].toUpperCase(),q=m,r=b.length;r>q;q++){if(f.push(n=[]),o=b[q],d=o[0],d!=d.toUpperCase())switch(n[0]=d.toUpperCase(),n[0]){case"A":n[1]=o[1],n[2]=o[2],n[3]=o[3],n[4]=o[4],n[5]=o[5],n[6]=+o[6]+i,n[7]=+o[7]+j;break;case"V":n[1]=+o[1]+j;break;case"H":n[1]=+o[1]+i;break;case"R":for(var s=[i,j].concat(o.slice(1)),t=2,u=s.length;u>t;t++)s[t]=+s[t]+i,s[++t]=+s[t]+j;f.pop(),f=f.concat(I(s,p));break;case"O":f.pop(),s=z(i,j,o[1],o[2]),s.push(s[0]),f=f.concat(s);break;case"U":f.pop(),f=f.concat(z(i,j,o[1],o[2],o[3])),n=["U"].concat(f[f.length-1].slice(-2));break;case"M":k=+o[1]+i,l=+o[2]+j;default:for(t=1,u=o.length;u>t;t++)n[t]=+o[t]+(t%2?i:j)}else if("R"==d)s=[i,j].concat(o.slice(1)),f.pop(),f=f.concat(I(s,p)),n=["R"].concat(o.slice(-2));else if("O"==d)f.pop(),s=z(i,j,o[1],o[2]),s.push(s[0]),f=f.concat(s);else if("U"==d)f.pop(),f=f.concat(z(i,j,o[1],o[2],o[3])),n=["U"].concat(f[f.length-1].slice(-2));else for(var v=0,w=o.length;w>v;v++)n[v]=o[v];if(d=d.toUpperCase(),"O"!=d)switch(n[0]){case"Z":i=+k,j=+l;break;case"H":i=n[1];break;case"V":j=n[1];break;case"M":k=n[n.length-2],l=n[n.length-1];default:i=n[n.length-2],j=n[n.length-1]}}return f.toString=g,c.abs=h(f),f}function C(a,b,c,d){return[a,b,c,d,c,d]}function D(a,b,c,d,e,f){var g=1/3,h=2/3;return[g*a+h*c,g*b+h*d,g*e+h*c,g*f+h*d,e,f]}function E(b,c,d,e,f,g,h,i,j,k){var l,m=120*Q/180,n=Q/180*(+f||0),o=[],p=a._.cacher(function(a,b,c){var d=a*P.cos(c)-b*P.sin(c),e=a*P.sin(c)+b*P.cos(c);return{x:d,y:e}});if(!d||!e)return[b,c,i,j,i,j];if(k)y=k[0],z=k[1],w=k[2],x=k[3];else{l=p(b,c,-n),b=l.x,c=l.y,l=p(i,j,-n),i=l.x,j=l.y;var q=(P.cos(Q/180*f),P.sin(Q/180*f),(b-i)/2),r=(c-j)/2,s=q*q/(d*d)+r*r/(e*e);s>1&&(s=P.sqrt(s),d=s*d,e=s*e);var t=d*d,u=e*e,v=(g==h?-1:1)*P.sqrt(U((t*u-t*r*r-u*q*q)/(t*r*r+u*q*q))),w=v*d*r/e+(b+i)/2,x=v*-e*q/d+(c+j)/2,y=P.asin(((c-x)/e).toFixed(9)),z=P.asin(((j-x)/e).toFixed(9));y=w>b?Q-y:y,z=w>i?Q-z:z,0>y&&(y=2*Q+y),0>z&&(z=2*Q+z),h&&y>z&&(y-=2*Q),!h&&z>y&&(z-=2*Q)}var A=z-y;if(U(A)>m){var B=z,C=i,D=j;z=y+m*(h&&z>y?1:-1),i=w+d*P.cos(z),j=x+e*P.sin(z),o=E(i,j,d,e,f,0,h,C,D,[z,B,w,x])}A=z-y;var F=P.cos(y),G=P.sin(y),H=P.cos(z),I=P.sin(z),J=P.tan(A/4),K=4/3*d*J,L=4/3*e*J,M=[b,c],N=[b+K*G,c-L*F],O=[i+K*I,j-L*H],R=[i,j];if(N[0]=2*M[0]-N[0],N[1]=2*M[1]-N[1],k)return[N,O,R].concat(o);o=[N,O,R].concat(o).join().split(",");for(var S=[],T=0,V=o.length;V>T;T++)S[T]=T%2?p(o[T-1],o[T],n).y:p(o[T],o[T+1],n).x;return S}function F(a,b,c,d,e,f,g,h){for(var i,j,k,l,m,n,o,p,q=[],r=[[],[]],s=0;2>s;++s)if(0==s?(j=6*a-12*c+6*e,i=-3*a+9*c-9*e+3*g,k=3*c-3*a):(j=6*b-12*d+6*f,i=-3*b+9*d-9*f+3*h,k=3*d-3*b),U(i)<1e-12){if(U(j)<1e-12)continue;l=-k/j,l>0&&1>l&&q.push(l)}else o=j*j-4*k*i,p=P.sqrt(o),0>o||(m=(-j+p)/(2*i),m>0&&1>m&&q.push(m),n=(-j-p)/(2*i),n>0&&1>n&&q.push(n));for(var t,u=q.length,v=u;u--;)l=q[u],t=1-l,r[0][u]=t*t*t*a+3*t*t*l*c+3*t*l*l*e+l*l*l*g,r[1][u]=t*t*t*b+3*t*t*l*d+3*t*l*l*f+l*l*l*h;return r[0][v]=a,r[1][v]=b,r[0][v+1]=g,r[1][v+1]=h,r[0].length=r[1].length=v+2,{min:{x:R.apply(0,r[0]),y:R.apply(0,r[1])},max:{x:S.apply(0,r[0]),y:S.apply(0,r[1])}}}function G(a,b){var c=!b&&e(a);if(!b&&c.curve)return h(c.curve);for(var d=B(a),f=b&&B(b),g={x:0,y:0,bx:0,by:0,X:0,Y:0,qx:null,qy:null},i={x:0,y:0,bx:0,by:0,X:0,Y:0,qx:null,qy:null},j=(function(a,b,c){var d,e;if(!a)return["C",b.x,b.y,b.x,b.y,b.x,b.y];switch(!(a[0]in{T:1,Q:1})&&(b.qx=b.qy=null),a[0]){case"M":b.X=a[1],b.Y=a[2];break;case"A":a=["C"].concat(E.apply(0,[b.x,b.y].concat(a.slice(1))));break;case"S":"C"==c||"S"==c?(d=2*b.x-b.bx,e=2*b.y-b.by):(d=b.x,e=b.y),a=["C",d,e].concat(a.slice(1));break;case"T":"Q"==c||"T"==c?(b.qx=2*b.x-b.qx,b.qy=2*b.y-b.qy):(b.qx=b.x,b.qy=b.y),a=["C"].concat(D(b.x,b.y,b.qx,b.qy,a[1],a[2]));break;case"Q":b.qx=a[1],b.qy=a[2],a=["C"].concat(D(b.x,b.y,a[1],a[2],a[3],a[4]));break;case"L":a=["C"].concat(C(b.x,b.y,a[1],a[2]));break;case"H":a=["C"].concat(C(b.x,b.y,a[1],b.y));break;case"V":a=["C"].concat(C(b.x,b.y,b.x,a[1]));break;case"Z":a=["C"].concat(C(b.x,b.y,b.X,b.Y))}return a}),k=function(a,b){if(a[b].length>7){a[b].shift();for(var c=a[b];c.length;)m[b]="A",f&&(n[b]="A"),a.splice(b++,0,["C"].concat(c.splice(0,6)));a.splice(b,1),r=S(d.length,f&&f.length||0)}},l=function(a,b,c,e,g){a&&b&&"M"==a[g][0]&&"M"!=b[g][0]&&(b.splice(g,0,["M",e.x,e.y]),c.bx=0,c.by=0,c.x=a[g][1],c.y=a[g][2],r=S(d.length,f&&f.length||0))},m=[],n=[],o="",p="",q=0,r=S(d.length,f&&f.length||0);r>q;q++){d[q]&&(o=d[q][0]),"C"!=o&&(m[q]=o,q&&(p=m[q-1])),d[q]=j(d[q],g,p),"A"!=m[q]&&"C"==o&&(m[q]="C"),k(d,q),f&&(f[q]&&(o=f[q][0]),"C"!=o&&(n[q]=o,q&&(p=n[q-1])),f[q]=j(f[q],i,p),"A"!=n[q]&&"C"==o&&(n[q]="C"),k(f,q)),l(d,f,g,i,q),l(f,d,i,g,q);var s=d[q],t=f&&f[q],u=s.length,v=f&&t.length;g.x=s[u-2],g.y=s[u-1],g.bx=O(s[u-4])||g.x,g.by=O(s[u-3])||g.y,i.bx=f&&(O(t[v-4])||i.x),i.by=f&&(O(t[v-3])||i.y),i.x=f&&t[v-2],i.y=f&&t[v-1]}return f||(c.curve=h(d)),f?[d,f]:d}function H(a,b){if(!b)return a;var c,d,e,f,g,h,i;for(a=G(a),e=0,g=a.length;g>e;e++)for(i=a[e],f=1,h=i.length;h>f;f+=2)c=b.x(i[f],i[f+1]),d=b.y(i[f],i[f+1]),i[f]=c,i[f+1]=d;return a}function I(a,b){for(var c=[],d=0,e=a.length;e-2*!b>d;d+=2){var f=[{x:+a[d-2],y:+a[d-1]},{x:+a[d],y:+a[d+1]},{x:+a[d+2],y:+a[d+3]},{x:+a[d+4],y:+a[d+5]}];b?d?e-4==d?f[3]={x:+a[0],y:+a[1]}:e-2==d&&(f[2]={x:+a[0],y:+a[1]},f[3]={x:+a[2],y:+a[3]}):f[0]={x:+a[e-2],y:+a[e-1]}:e-4==d?f[3]=f[2]:d||(f[0]={x:+a[d],y:+a[d+1]}),c.push(["C",(-f[0].x+6*f[1].x+f[2].x)/6,(-f[0].y+6*f[1].y+f[2].y)/6,(f[1].x+6*f[2].x-f[3].x)/6,(f[1].y+6*f[2].y-f[3].y)/6,f[2].x,f[2].y])}return c}var J=b.prototype,K=a.is,L=a._.clone,M="hasOwnProperty",N=/,?([a-z]),?/gi,O=parseFloat,P=Math,Q=P.PI,R=P.min,S=P.max,T=P.pow,U=P.abs,V=j(1),W=j(),X=j(0,1),Y=a._unit2px,Z={path:function(a){return a.attr("path")},circle:function(a){var b=Y(a);return z(b.cx,b.cy,b.r)},ellipse:function(a){var b=Y(a); +return z(b.cx||0,b.cy||0,b.rx,b.ry)},rect:function(a){var b=Y(a);return y(b.x||0,b.y||0,b.width,b.height,b.rx,b.ry)},image:function(a){var b=Y(a);return y(b.x||0,b.y||0,b.width,b.height)},line:function(a){return"M"+[a.attr("x1")||0,a.attr("y1")||0,a.attr("x2"),a.attr("y2")]},polyline:function(a){return"M"+a.attr("points")},polygon:function(a){return"M"+a.attr("points")+"z"},deflt:function(a){var b=a.node.getBBox();return y(b.x,b.y,b.width,b.height)}};a.path=e,a.path.getTotalLength=V,a.path.getPointAtLength=W,a.path.getSubpath=function(a,b,c){if(this.getTotalLength(a)-c<1e-6)return X(a,b).end;var d=X(a,c,1);return b?X(d,b).end:d},J.getTotalLength=function(){return this.node.getTotalLength?this.node.getTotalLength():void 0},J.getPointAtLength=function(a){return W(this.attr("d"),a)},J.getSubpath=function(b,c){return a.path.getSubpath(this.attr("d"),b,c)},a._.box=f,a.path.findDotsAtSegment=k,a.path.bezierBBox=l,a.path.isPointInsideBBox=m,a.closest=function(b,c,d,e){for(var g=100,h=f(b-g/2,c-g/2,g,g),i=[],j=d[0].hasOwnProperty("x")?function(a){return{x:d[a].x,y:d[a].y}}:function(a){return{x:d[a],y:e[a]}},k=0;1e6>=g&&!k;){for(var l=0,n=d.length;n>l;l++){var o=j(l);if(m(h,o.x,o.y)){k++,i.push(o);break}}k||(g*=2,h=f(b-g/2,c-g/2,g,g))}if(1e6!=g){var p,q=1/0;for(l=0,n=i.length;n>l;l++){var r=a.len(b,c,i[l].x,i[l].y);q>r&&(q=r,i[l].len=r,p=i[l])}return p}},a.path.isBBoxIntersect=n,a.path.intersection=t,a.path.intersectionNumber=u,a.path.isPointInside=w,a.path.getBBox=x,a.path.get=Z,a.path.toRelative=A,a.path.toAbsolute=B,a.path.toCubic=G,a.path.map=H,a.path.toString=g,a.path.clone=h}),d.plugin(function(a,d,e,f){var g=Math.max,h=Math.min,i=function(a){if(this.items=[],this.bindings={},this.length=0,this.type="set",a)for(var b=0,c=a.length;c>b;b++)a[b]&&(this[this.items.length]=this.items[this.items.length]=a[b],this.length++)},j=i.prototype;j.push=function(){for(var a,b,c=0,d=arguments.length;d>c;c++)a=arguments[c],a&&(b=this.items.length,this[b]=this.items[b]=a,this.length++);return this},j.pop=function(){return this.length&&delete this[this.length--],this.items.pop()},j.forEach=function(a,b){for(var c=0,d=this.items.length;d>c;c++)if(a.call(b,this.items[c],c)===!1)return this;return this},j.animate=function(d,e,f,g){"function"!=typeof f||f.length||(g=f,f=c.linear),d instanceof a._.Animation&&(g=d.callback,f=d.easing,e=f.dur,d=d.attr);var h=arguments;if(a.is(d,"array")&&a.is(h[h.length-1],"array"))var i=!0;var j,k=function(){j?this.b=j:j=this.b},l=0,m=this,n=g&&function(){++l==m.length&&g.call(this)};return this.forEach(function(a,c){b.once("snap.animcreated."+a.id,k),i?h[c]&&a.animate.apply(a,h[c]):a.animate(d,e,f,n)})},j.remove=function(){for(;this.length;)this.pop().remove();return this},j.bind=function(a,b,c){var d={};if("function"==typeof b)this.bindings[a]=b;else{var e=c||a;this.bindings[a]=function(a){d[e]=a,b.attr(d)}}return this},j.attr=function(a){var b={};for(var c in a)this.bindings[c]?this.bindings[c](a[c]):b[c]=a[c];for(var d=0,e=this.items.length;e>d;d++)this.items[d].attr(b);return this},j.clear=function(){for(;this.length;)this.pop()},j.splice=function(a,b,c){a=0>a?g(this.length+a,0):a,b=g(0,h(this.length-a,b));var d,e=[],f=[],j=[];for(d=2;dd;d++)f.push(this[a+d]);for(;dd?j[d]:e[d-k];for(d=this.items.length=this.length-=b-k;this[d];)delete this[d++];return new i(f)},j.exclude=function(a){for(var b=0,c=this.length;c>b;b++)if(this[b]==a)return this.splice(b,1),!0;return!1},j.insertAfter=function(a){for(var b=this.items.length;b--;)this.items[b].insertAfter(a);return this},j.getBBox=function(){for(var a=[],b=[],c=[],d=[],e=this.items.length;e--;)if(!this.items[e].removed){var f=this.items[e].getBBox();a.push(f.x),b.push(f.y),c.push(f.x+f.width),d.push(f.y+f.height)}return a=h.apply(0,a),b=h.apply(0,b),c=g.apply(0,c),d=g.apply(0,d),{x:a,y:b,x2:c,y2:d,width:c-a,height:d-b,cx:a+(c-a)/2,cy:b+(d-b)/2}},j.clone=function(a){a=new i;for(var b=0,c=this.items.length;c>b;b++)a.push(this.items[b].clone());return a},j.toString=function(){return"Snap‘s set"},j.type="set",a.Set=i,a.set=function(){var a=new i;return arguments.length&&a.push.apply(a,Array.prototype.slice.call(arguments,0)),a}}),d.plugin(function(a,c,d,e){function f(a){var b=a[0];switch(b.toLowerCase()){case"t":return[b,0,0];case"m":return[b,1,0,0,1,0,0];case"r":return 4==a.length?[b,0,a[2],a[3]]:[b,0];case"s":return 5==a.length?[b,1,1,a[3],a[4]]:3==a.length?[b,1,1]:[b,1]}}function g(b,c,d){b=b||new a.Matrix,c=c||new a.Matrix,b=a.parseTransformString(b.toTransformString())||[],c=a.parseTransformString(c.toTransformString())||[];for(var e,g,h,i,j=Math.max(b.length,c.length),k=[],n=[],o=0;j>o;o++){if(h=b[o]||f(c[o]),i=c[o]||f(h),h[0]!=i[0]||"r"==h[0].toLowerCase()&&(h[2]!=i[2]||h[3]!=i[3])||"s"==h[0].toLowerCase()&&(h[3]!=i[3]||h[4]!=i[4])){b=a._.transform2matrix(b,d()),c=a._.transform2matrix(c,d()),k=[["m",b.a,b.b,b.c,b.d,b.e,b.f]],n=[["m",c.a,c.b,c.c,c.d,c.e,c.f]];break}for(k[o]=[],n[o]=[],e=0,g=Math.max(h.length,i.length);g>e;e++)e in h&&(k[o][e]=h[e]),e in i&&(n[o][e]=i[e])}return{from:m(k),to:m(n),f:l(k)}}function h(a){return a}function i(a){return function(b){return+b.toFixed(3)+a}}function j(a){return a.join(" ")}function k(b){return a.rgb(b[0],b[1],b[2],b[3])}function l(a){var b,c,d,e,f,g,h=0,i=[];for(b=0,c=a.length;c>b;b++){for(f="[",g=['"'+a[b][0]+'"'],d=1,e=a[b].length;e>d;d++)g[d]="val["+h++ +"]";f+=g+"]",i[b]=f}return Function("val","return Snap.path.toString.call(["+i+"])")}function m(a){for(var b=[],c=0,d=a.length;d>c;c++)for(var e=1,f=a[c].length;f>e;e++)b.push(a[c][e]);return b}function n(a){return isFinite(a)}function o(b,c){return a.is(b,"array")&&a.is(c,"array")?b.toString()==c.toString():!1}var p={},q=/[%a-z]+$/i,r=String;p.stroke=p.fill="colour",c.prototype.equal=function(a,c){return b("snap.util.equal",this,a,c).firstDefined()},b.on("snap.util.equal",function(b,c){var d,e,f=r(this.attr(b)||""),s=this;if("colour"==p[b])return d=a.color(f),e=a.color(c),{from:[d.r,d.g,d.b,d.opacity],to:[e.r,e.g,e.b,e.opacity],f:k};if("viewBox"==b)return d=this.attr(b).vb.split(" ").map(Number),e=c.split(" ").map(Number),{from:d,to:e,f:j};if("transform"==b||"gradientTransform"==b||"patternTransform"==b)return"string"==typeof c&&(c=r(c).replace(/\.{3}|\u2026/g,f)),f=this.matrix,c=a._.rgTransform.test(c)?a._.transform2matrix(c,this.getBBox()):a._.transform2matrix(a._.svgTransform2string(c),this.getBBox()),g(f,c,function(){return s.getBBox(1)});if("d"==b||"path"==b)return d=a.path.toCubic(f,c),{from:m(d[0]),to:m(d[1]),f:l(d[0])};if("points"==b)return d=r(f).split(a._.separator),e=r(c).split(a._.separator),{from:d,to:e,f:function(a){return a}};if(n(f)&&n(c))return{from:parseFloat(f),to:parseFloat(c),f:h};var t=f.match(q),u=r(c).match(q);return t&&o(t,u)?{from:parseFloat(f),to:parseFloat(c),f:i(t)}:{from:this.asPX(b),to:this.asPX(b,c),f:h}})}),d.plugin(function(a,c,d,e){for(var f=c.prototype,g="hasOwnProperty",h=("createTouch"in e.doc),i=["click","dblclick","mousedown","mousemove","mouseout","mouseover","mouseup","touchstart","touchmove","touchend","touchcancel"],j={mousedown:"touchstart",mousemove:"touchmove",mouseup:"touchend"},k=(function(a,b){var c="y"==a?"scrollTop":"scrollLeft",d=b&&b.node?b.node.ownerDocument:e.doc;return d[c in d.documentElement?"documentElement":"body"][c]}),l=function(){return this.originalEvent.preventDefault()},m=function(){return this.originalEvent.stopPropagation()},n=function(a,b,c,d){var e=h&&j[b]?j[b]:b,f=function(e){var f=k("y",d),i=k("x",d);if(h&&j[g](b))for(var n=0,o=e.targetTouches&&e.targetTouches.length;o>n;n++)if(e.targetTouches[n].target==a||a.contains(e.targetTouches[n].target)){var p=e;e=e.targetTouches[n],e.originalEvent=p,e.preventDefault=l,e.stopPropagation=m;break}var q=e.clientX+i,r=e.clientY+f;return c.call(d,e,q,r)};return b!==e&&a.addEventListener(b,f,!1),a.addEventListener(e,f,!1),function(){return b!==e&&a.removeEventListener(b,f,!1),a.removeEventListener(e,f,!1),!0}},o=[],p=function(a){for(var c,d=a.clientX,e=a.clientY,f=k("y"),g=k("x"),i=o.length;i--;){if(c=o[i],h){for(var j,l=a.touches&&a.touches.length;l--;)if(j=a.touches[l],j.identifier==c.el._drag.id||c.el.node.contains(j.target)){d=j.clientX,e=j.clientY,(a.originalEvent?a.originalEvent:a).preventDefault();break}}else a.preventDefault();var m=c.el.node;m.nextSibling,m.parentNode,m.style.display;d+=g,e+=f,b("snap.drag.move."+c.el.id,c.move_scope||c.el,d-c.el._drag.x,e-c.el._drag.y,d,e,a)}},q=function(c){a.unmousemove(p).unmouseup(q);for(var d,e=o.length;e--;)d=o[e],d.el._drag={},b("snap.drag.end."+d.el.id,d.end_scope||d.start_scope||d.move_scope||d.el,c),b.off("snap.drag.*."+d.el.id);o=[]},r=i.length;r--;)!function(b){a[b]=f[b]=function(c,d){if(a.is(c,"function"))this.events=this.events||[],this.events.push({name:b,f:c,unbind:n(this.node||document,b,c,d||this)});else for(var e=0,f=this.events.length;f>e;e++)if(this.events[e].name==b)try{this.events[e].f.call(this)}catch(g){}return this},a["un"+b]=f["un"+b]=function(a){for(var c=this.events||[],d=c.length;d--;)if(c[d].name==b&&(c[d].f==a||!a))return c[d].unbind(),c.splice(d,1),!c.length&&delete this.events,this;return this}}(i[r]);f.hover=function(a,b,c,d){return this.mouseover(a,c).mouseout(b,d||c)},f.unhover=function(a,b){return this.unmouseover(a).unmouseout(b)};var s=[];f.drag=function(c,d,e,f,g,h){function i(i,j,l){(i.originalEvent||i).preventDefault(),k._drag.x=j,k._drag.y=l,k._drag.id=i.identifier,!o.length&&a.mousemove(p).mouseup(q),o.push({el:k,move_scope:f,start_scope:g,end_scope:h}),d&&b.on("snap.drag.start."+k.id,d),c&&b.on("snap.drag.move."+k.id,c),e&&b.on("snap.drag.end."+k.id,e),b("snap.drag.start."+k.id,g||f||k,j,l,i)}function j(a,c,d){b("snap.draginit."+k.id,k,a,c,d)}var k=this;if(!arguments.length){var l;return k.drag(function(a,b){this.attr({transform:l+(l?"T":"t")+[a,b]})},function(){l=this.transform().local})}return b.on("snap.draginit."+k.id,i),k._drag={},s.push({el:k,start:i,init:j}),k.mousedown(j),k},f.undrag=function(){for(var c=s.length;c--;)s[c].el==this&&(this.unmousedown(s[c].init),s.splice(c,1),b.unbind("snap.drag.*."+this.id),b.unbind("snap.draginit."+this.id));return!s.length&&a.unmousemove(p).unmouseup(q),this}}),d.plugin(function(a,c,d,e){var f=(c.prototype,d.prototype),g=/^\s*url\((.+)\)/,h=String,i=a._.$;a.filter={},f.filter=function(b){var d=this;"svg"!=d.type&&(d=d.paper);var e=a.parse(h(b)),f=a._.id(),g=(d.node.offsetWidth,d.node.offsetHeight,i("filter"));return i(g,{id:f,filterUnits:"userSpaceOnUse"}),g.appendChild(e.node),d.defs.appendChild(g),new c(g)},b.on("snap.util.getattr.filter",function(){b.stop();var c=i(this.node,"filter");if(c){var d=h(c).match(g);return d&&a.select(d[1])}}),b.on("snap.util.attr.filter",function(d){if(d instanceof c&&"filter"==d.type){b.stop();var e=d.node.id;e||(i(d.node,{id:d.id}),e=d.id),i(this.node,{filter:a.url(e)})}d&&"none"!=d||(b.stop(),this.node.removeAttribute("filter"))}),a.filter.blur=function(b,c){null==b&&(b=2);var d=null==c?b:[b,c];return a.format('',{def:d})},a.filter.blur.toString=function(){return this()},a.filter.shadow=function(b,c,d,e,f){return null==f&&(null==e?(f=d,d=4,e="#000"):(f=e,e=d,d=4)),null==d&&(d=4),null==f&&(f=1),null==b&&(b=0,c=2),null==c&&(c=b),e=a.color(e),a.format('',{color:e,dx:b,dy:c,blur:d,opacity:f})},a.filter.shadow.toString=function(){return this()},a.filter.grayscale=function(b){return null==b&&(b=1),a.format('',{a:.2126+.7874*(1-b),b:.7152-.7152*(1-b),c:.0722-.0722*(1-b),d:.2126-.2126*(1-b),e:.7152+.2848*(1-b),f:.0722-.0722*(1-b),g:.2126-.2126*(1-b),h:.0722+.9278*(1-b)})},a.filter.grayscale.toString=function(){return this()},a.filter.sepia=function(b){return null==b&&(b=1),a.format('',{a:.393+.607*(1-b),b:.769-.769*(1-b),c:.189-.189*(1-b),d:.349-.349*(1-b),e:.686+.314*(1-b),f:.168-.168*(1-b),g:.272-.272*(1-b),h:.534-.534*(1-b),i:.131+.869*(1-b)})},a.filter.sepia.toString=function(){return this()},a.filter.saturate=function(b){return null==b&&(b=1),a.format('',{amount:1-b})},a.filter.saturate.toString=function(){return this()},a.filter.hueRotate=function(b){return b=b||0,a.format('',{angle:b})},a.filter.hueRotate.toString=function(){return this()},a.filter.invert=function(b){return null==b&&(b=1),a.format('',{amount:b,amount2:1-b})},a.filter.invert.toString=function(){return this()},a.filter.brightness=function(b){return null==b&&(b=1),a.format('',{amount:b})},a.filter.brightness.toString=function(){return this()},a.filter.contrast=function(b){return null==b&&(b=1),a.format('',{amount:b,amount2:.5-b/2})},a.filter.contrast.toString=function(){return this()}}),d.plugin(function(a,b,c,d,e){var f=a._.box,g=a.is,h=/^[^a-z]*([tbmlrc])/i,i=function(){return"T"+this.dx+","+this.dy};b.prototype.getAlign=function(a,b){null==b&&g(a,"string")&&(b=a,a=null),a=a||this.paper;var c=a.getBBox?a.getBBox():f(a),d=this.getBBox(),e={};switch(b=b&&b.match(h),b=b?b[1].toLowerCase():"c"){case"t":e.dx=0,e.dy=c.y-d.y;break;case"b":e.dx=0,e.dy=c.y2-d.y2;break;case"m":e.dx=0,e.dy=c.cy-d.cy;break;case"l":e.dx=c.x-d.x,e.dy=0;break;case"r":e.dx=c.x2-d.x2,e.dy=0;break;default:e.dx=c.cx-d.cx,e.dy=0}return e.toString=i,e},b.prototype.align=function(a,b){return this.transform("..."+this.getAlign(a,b))}}),d.plugin(function(b,c,d,e){function f(a){a=a.split(/(?=#)/);var b=new String(a[5]);return b[50]=a[0],b[100]=a[1],b[200]=a[2],b[300]=a[3],b[400]=a[4],b[500]=a[5],b[600]=a[6],b[700]=a[7],b[800]=a[8],b[900]=a[9],a[10]&&(b.A100=a[10],b.A200=a[11],b.A400=a[12],b.A700=a[13]),b}var g="#ffebee#ffcdd2#ef9a9a#e57373#ef5350#f44336#e53935#d32f2f#c62828#b71c1c#ff8a80#ff5252#ff1744#d50000",h="#FCE4EC#F8BBD0#F48FB1#F06292#EC407A#E91E63#D81B60#C2185B#AD1457#880E4F#FF80AB#FF4081#F50057#C51162",i="#F3E5F5#E1BEE7#CE93D8#BA68C8#AB47BC#9C27B0#8E24AA#7B1FA2#6A1B9A#4A148C#EA80FC#E040FB#D500F9#AA00FF",j="#EDE7F6#D1C4E9#B39DDB#9575CD#7E57C2#673AB7#5E35B1#512DA8#4527A0#311B92#B388FF#7C4DFF#651FFF#6200EA",k="#E8EAF6#C5CAE9#9FA8DA#7986CB#5C6BC0#3F51B5#3949AB#303F9F#283593#1A237E#8C9EFF#536DFE#3D5AFE#304FFE",l="#E3F2FD#BBDEFB#90CAF9#64B5F6#64B5F6#2196F3#1E88E5#1976D2#1565C0#0D47A1#82B1FF#448AFF#2979FF#2962FF",m="#E1F5FE#B3E5FC#81D4FA#4FC3F7#29B6F6#03A9F4#039BE5#0288D1#0277BD#01579B#80D8FF#40C4FF#00B0FF#0091EA",n="#E0F7FA#B2EBF2#80DEEA#4DD0E1#26C6DA#00BCD4#00ACC1#0097A7#00838F#006064#84FFFF#18FFFF#00E5FF#00B8D4",o="#E0F2F1#B2DFDB#80CBC4#4DB6AC#26A69A#009688#00897B#00796B#00695C#004D40#A7FFEB#64FFDA#1DE9B6#00BFA5",p="#E8F5E9#C8E6C9#A5D6A7#81C784#66BB6A#4CAF50#43A047#388E3C#2E7D32#1B5E20#B9F6CA#69F0AE#00E676#00C853",q="#F1F8E9#DCEDC8#C5E1A5#AED581#9CCC65#8BC34A#7CB342#689F38#558B2F#33691E#CCFF90#B2FF59#76FF03#64DD17",r="#F9FBE7#F0F4C3#E6EE9C#DCE775#D4E157#CDDC39#C0CA33#AFB42B#9E9D24#827717#F4FF81#EEFF41#C6FF00#AEEA00",s="#FFFDE7#FFF9C4#FFF59D#FFF176#FFEE58#FFEB3B#FDD835#FBC02D#F9A825#F57F17#FFFF8D#FFFF00#FFEA00#FFD600",t="#FFF8E1#FFECB3#FFE082#FFD54F#FFCA28#FFC107#FFB300#FFA000#FF8F00#FF6F00#FFE57F#FFD740#FFC400#FFAB00",u="#FFF3E0#FFE0B2#FFCC80#FFB74D#FFA726#FF9800#FB8C00#F57C00#EF6C00#E65100#FFD180#FFAB40#FF9100#FF6D00",v="#FBE9E7#FFCCBC#FFAB91#FF8A65#FF7043#FF5722#F4511E#E64A19#D84315#BF360C#FF9E80#FF6E40#FF3D00#DD2C00",w="#EFEBE9#D7CCC8#BCAAA4#A1887F#8D6E63#795548#6D4C41#5D4037#4E342E#3E2723",x="#FAFAFA#F5F5F5#EEEEEE#E0E0E0#BDBDBD#9E9E9E#757575#616161#424242#212121",y="#ECEFF1#CFD8DC#B0BEC5#90A4AE#78909C#607D8B#546E7A#455A64#37474F#263238";b.mui={},b.flat={},b.mui.red=f(g),b.mui.pink=f(h),b.mui.purple=f(i),b.mui.deeppurple=f(j),b.mui.indigo=f(k),b.mui.blue=f(l),b.mui.lightblue=f(m),b.mui.cyan=f(n),b.mui.teal=f(o),b.mui.green=f(p),b.mui.lightgreen=f(q),b.mui.lime=f(r),b.mui.yellow=f(s),b.mui.amber=f(t),b.mui.orange=f(u),b.mui.deeporange=f(v),b.mui.brown=f(w),b.mui.grey=f(x),b.mui.bluegrey=f(y),b.flat.turquoise="#1abc9c",b.flat.greensea="#16a085",b.flat.sunflower="#f1c40f",b.flat.orange="#f39c12",b.flat.emerland="#2ecc71",b.flat.nephritis="#27ae60",b.flat.carrot="#e67e22",b.flat.pumpkin="#d35400",b.flat.peterriver="#3498db",b.flat.belizehole="#2980b9",b.flat.alizarin="#e74c3c",b.flat.pomegranate="#c0392b",b.flat.amethyst="#9b59b6",b.flat.wisteria="#8e44ad",b.flat.clouds="#ecf0f1",b.flat.silver="#bdc3c7",b.flat.wetasphalt="#34495e",b.flat.midnightblue="#2c3e50",b.flat.concrete="#95a5a6",b.flat.asbestos="#7f8c8d",b.importMUIColors=function(){for(var c in b.mui)b.mui.hasOwnProperty(c)&&(a[c]=b.mui[c])}}),d}); diff --git a/LetterDMS/jspsych/examples/js/webgazer/ridgeWorker.mjs b/LetterDMS/jspsych/examples/js/webgazer/ridgeWorker.mjs new file mode 100644 index 0000000..effea18 --- /dev/null +++ b/LetterDMS/jspsych/examples/js/webgazer/ridgeWorker.mjs @@ -0,0 +1,135 @@ +'use strict'; + +console.log('thread starting'); + +// Add src/util.mjs and src/mat.mjs to the same directory as your html file +importScripts('./worker_scripts/util.js', './worker_scripts/mat.js'); // [20200708] Figure out how to make all of this wrap up neatly +var ridgeParameter = Math.pow(10,-5); +var resizeWidth = 10; +var resizeHeight = 6; +var dataWindow = 700; +var trailDataWindow = 10; +var trainInterval = 500; + +var screenXClicksArray = new self.webgazer.util.DataWindow(dataWindow); +var screenYClicksArray = new self.webgazer.util.DataWindow(dataWindow); +var eyeFeaturesClicks = new self.webgazer.util.DataWindow(dataWindow); +var dataClicks = new self.webgazer.util.DataWindow(dataWindow); + +var screenXTrailArray = new self.webgazer.util.DataWindow(trailDataWindow); +var screenYTrailArray = new self.webgazer.util.DataWindow(trailDataWindow); +var eyeFeaturesTrail = new self.webgazer.util.DataWindow(trailDataWindow); +var dataTrail = new self.webgazer.util.DataWindow(trailDataWindow); + +/** + * Performs ridge regression, according to the Weka code. + * @param {Array} y - corresponds to screen coordinates (either x or y) for each of n click events + * @param {Array.>} X - corresponds to gray pixel features (120 pixels for both eyes) for each of n clicks + * @param {Array} k - ridge parameter + * @return{Array} regression coefficients + */ +function ridge(y, X, k){ + var nc = X[0].length; + var m_Coefficients = new Array(nc); + var xt = self.webgazer.mat.transpose(X); + var solution = new Array(); + var success = true; + do{ + var ss = self.webgazer.mat.mult(xt,X); + // Set ridge regression adjustment + for (var i = 0; i < nc; i++) { + ss[i][i] = ss[i][i] + k; + } + + // Carry out the regression + var bb = self.webgazer.mat.mult(xt,y); + for(var i = 0; i < nc; i++) { + m_Coefficients[i] = bb[i][0]; + } + try{ + var n = (m_Coefficients.length !== 0 ? m_Coefficients.length/m_Coefficients.length: 0); + if (m_Coefficients.length*n !== m_Coefficients.length){ + console.log('Array length must be a multiple of m') + } + solution = (ss.length === ss[0].length ? (self.webgazer.mat.LUDecomposition(ss,bb)) : (self.webgazer.mat.QRDecomposition(ss,bb))); + + for (var i = 0; i < nc; i++){ + m_Coefficients[i] = solution[i][0]; + } + success = true; + } + catch (ex){ + k *= 10; + console.log(ex); + success = false; + } + } while (!success); + return m_Coefficients; +} + +//TODO: still usefull ??? +/** + * + * @returns {Number} + */ +function getCurrentFixationIndex() { + var index = 0; + var recentX = this.screenXTrailArray.get(0); + var recentY = this.screenYTrailArray.get(0); + for (var i = this.screenXTrailArray.length - 1; i >= 0; i--) { + var currX = this.screenXTrailArray.get(i); + var currY = this.screenYTrailArray.get(i); + var euclideanDistance = Math.sqrt(Math.pow((currX-recentX),2)+Math.pow((currY-recentY),2)); + if (euclideanDistance > 72){ + return i+1; + } + } + return i; +} + +/** + * Event handler, it store screen position to allow training + * @param {Event} event - the receive event + */ +self.onmessage = function(event) { + var data = event.data; + var screenPos = data['screenPos']; + var eyes = data['eyes']; + var type = data['type']; + if (type === 'click') { + self.screenXClicksArray.push([screenPos[0]]); + self.screenYClicksArray.push([screenPos[1]]); + + self.eyeFeaturesClicks.push(eyes); + } else if (type === 'move') { + self.screenXTrailArray.push([screenPos[0]]); + self.screenYTrailArray.push([screenPos[1]]); + + self.eyeFeaturesTrail.push(eyes); + self.dataTrail.push({'eyes':eyes, 'screenPos':screenPos, 'type':type}); + } + self.needsTraining = true; +}; + +/** + * Compute coefficient from training data + */ +function retrain() { + if (self.screenXClicksArray.length === 0) { + return; + } + if (!self.needsTraining) { + return; + } + var screenXArray = self.screenXClicksArray.data.concat(self.screenXTrailArray.data); + var screenYArray = self.screenYClicksArray.data.concat(self.screenYTrailArray.data); + var eyeFeatures = self.eyeFeaturesClicks.data.concat(self.eyeFeaturesTrail.data); + + var coefficientsX = ridge(screenXArray, eyeFeatures, ridgeParameter); + var coefficientsY = ridge(screenYArray, eyeFeatures, ridgeParameter); + self.postMessage({'X':coefficientsX, 'Y': coefficientsY}); + self.needsTraining = false; +} + +setInterval(retrain, trainInterval); + diff --git a/LetterDMS/jspsych/examples/js/webgazer/webgazer.js b/LetterDMS/jspsych/examples/js/webgazer/webgazer.js new file mode 100644 index 0000000..6368f8a --- /dev/null +++ b/LetterDMS/jspsych/examples/js/webgazer/webgazer.js @@ -0,0 +1,88909 @@ +/*! + * + * WebGazer.js: Scalable Webcam EyeTracking Using User Interactions + * Copyright (c) 2016-2020, Brown HCI Group + * Licensed under GPLv3. Companies with a valuation of less than $1M can use WebGazer.js under LGPLv3. + * + */ +var webgazer = +/******/ (function(modules) { // webpackBootstrap +/******/ // The module cache +/******/ var installedModules = {}; +/******/ +/******/ // The require function +/******/ function __webpack_require__(moduleId) { +/******/ +/******/ // Check if module is in cache +/******/ if(installedModules[moduleId]) { +/******/ return installedModules[moduleId].exports; +/******/ } +/******/ // Create a new module (and put it into the cache) +/******/ var module = installedModules[moduleId] = { +/******/ i: moduleId, +/******/ l: false, +/******/ exports: {} +/******/ }; +/******/ +/******/ // Execute the module function +/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); +/******/ +/******/ // Flag the module as loaded +/******/ module.l = true; +/******/ +/******/ // Return the exports of the module +/******/ return module.exports; +/******/ } +/******/ +/******/ +/******/ // expose the modules object (__webpack_modules__) +/******/ __webpack_require__.m = modules; +/******/ +/******/ // expose the module cache +/******/ __webpack_require__.c = installedModules; +/******/ +/******/ // define getter function for harmony exports +/******/ __webpack_require__.d = function(exports, name, getter) { +/******/ if(!__webpack_require__.o(exports, name)) { +/******/ Object.defineProperty(exports, name, { enumerable: true, get: getter }); +/******/ } +/******/ }; +/******/ +/******/ // define __esModule on exports +/******/ __webpack_require__.r = function(exports) { +/******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) { +/******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' }); +/******/ } +/******/ Object.defineProperty(exports, '__esModule', { value: true }); +/******/ }; +/******/ +/******/ // create a fake namespace object +/******/ // mode & 1: value is a module id, require it +/******/ // mode & 2: merge all properties of value into the ns +/******/ // mode & 4: return value when already ns object +/******/ // mode & 8|1: behave like require +/******/ __webpack_require__.t = function(value, mode) { +/******/ if(mode & 1) value = __webpack_require__(value); +/******/ if(mode & 8) return value; +/******/ if((mode & 4) && typeof value === 'object' && value && value.__esModule) return value; +/******/ var ns = Object.create(null); +/******/ __webpack_require__.r(ns); +/******/ Object.defineProperty(ns, 'default', { enumerable: true, value: value }); +/******/ if(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key)); +/******/ return ns; +/******/ }; +/******/ +/******/ // getDefaultExport function for compatibility with non-harmony modules +/******/ __webpack_require__.n = function(module) { +/******/ var getter = module && module.__esModule ? +/******/ function getDefault() { return module['default']; } : +/******/ function getModuleExports() { return module; }; +/******/ __webpack_require__.d(getter, 'a', getter); +/******/ return getter; +/******/ }; +/******/ +/******/ // Object.prototype.hasOwnProperty.call +/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); }; +/******/ +/******/ // __webpack_public_path__ +/******/ __webpack_require__.p = ""; +/******/ +/******/ +/******/ // Load entry module and return exports +/******/ return __webpack_require__(__webpack_require__.s = 90); +/******/ }) +/************************************************************************/ +/******/ ([ +/* 0 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +// ESM COMPAT FLAG +__webpack_require__.r(__webpack_exports__); + +// EXPORTS +__webpack_require__.d(__webpack_exports__, "AdadeltaOptimizer", function() { return /* reexport */ adadelta_optimizer_AdadeltaOptimizer; }); +__webpack_require__.d(__webpack_exports__, "AdagradOptimizer", function() { return /* reexport */ adagrad_optimizer_AdagradOptimizer; }); +__webpack_require__.d(__webpack_exports__, "AdamOptimizer", function() { return /* reexport */ adam_optimizer_AdamOptimizer; }); +__webpack_require__.d(__webpack_exports__, "AdamaxOptimizer", function() { return /* reexport */ adamax_optimizer_AdamaxOptimizer; }); +__webpack_require__.d(__webpack_exports__, "MomentumOptimizer", function() { return /* reexport */ momentum_optimizer_MomentumOptimizer; }); +__webpack_require__.d(__webpack_exports__, "Optimizer", function() { return /* reexport */ optimizer_Optimizer; }); +__webpack_require__.d(__webpack_exports__, "RMSPropOptimizer", function() { return /* reexport */ rmsprop_optimizer_RMSPropOptimizer; }); +__webpack_require__.d(__webpack_exports__, "SGDOptimizer", function() { return /* reexport */ sgd_optimizer_SGDOptimizer; }); +__webpack_require__.d(__webpack_exports__, "Tensor", function() { return /* reexport */ dist_tensor["a" /* Tensor */]; }); +__webpack_require__.d(__webpack_exports__, "TensorBuffer", function() { return /* reexport */ dist_tensor["b" /* TensorBuffer */]; }); +__webpack_require__.d(__webpack_exports__, "Variable", function() { return /* reexport */ dist_tensor["c" /* Variable */]; }); +__webpack_require__.d(__webpack_exports__, "Rank", function() { return /* reexport */ dist_types["a" /* Rank */]; }); +__webpack_require__.d(__webpack_exports__, "sumOutType", function() { return /* reexport */ dist_types["b" /* sumOutType */]; }); +__webpack_require__.d(__webpack_exports__, "upcastType", function() { return /* reexport */ dist_types["c" /* upcastType */]; }); +__webpack_require__.d(__webpack_exports__, "add", function() { return /* reexport */ add; }); +__webpack_require__.d(__webpack_exports__, "addN", function() { return /* reexport */ addN; }); +__webpack_require__.d(__webpack_exports__, "atan2", function() { return /* reexport */ atan2; }); +__webpack_require__.d(__webpack_exports__, "avgPool", function() { return /* reexport */ avgPool; }); +__webpack_require__.d(__webpack_exports__, "avgPool3d", function() { return /* reexport */ avgPool3d; }); +__webpack_require__.d(__webpack_exports__, "batchToSpaceND", function() { return /* reexport */ batchToSpaceND; }); +__webpack_require__.d(__webpack_exports__, "batchNorm", function() { return /* reexport */ batchNorm; }); +__webpack_require__.d(__webpack_exports__, "batchNorm2d", function() { return /* reexport */ batchNorm2d; }); +__webpack_require__.d(__webpack_exports__, "batchNorm3d", function() { return /* reexport */ batchNorm3d; }); +__webpack_require__.d(__webpack_exports__, "batchNorm4d", function() { return /* reexport */ batchNorm4d; }); +__webpack_require__.d(__webpack_exports__, "broadcastTo", function() { return /* reexport */ broadcastTo; }); +__webpack_require__.d(__webpack_exports__, "clone", function() { return /* reexport */ clone; }); +__webpack_require__.d(__webpack_exports__, "complex", function() { return /* reexport */ complex["a" /* complex */]; }); +__webpack_require__.d(__webpack_exports__, "concat", function() { return /* reexport */ concat; }); +__webpack_require__.d(__webpack_exports__, "concat1d", function() { return /* reexport */ concat1d; }); +__webpack_require__.d(__webpack_exports__, "concat2d", function() { return /* reexport */ concat2d; }); +__webpack_require__.d(__webpack_exports__, "concat3d", function() { return /* reexport */ concat3d; }); +__webpack_require__.d(__webpack_exports__, "concat4d", function() { return /* reexport */ concat4d; }); +__webpack_require__.d(__webpack_exports__, "conv1d", function() { return /* reexport */ conv1d; }); +__webpack_require__.d(__webpack_exports__, "conv2d", function() { return /* reexport */ conv2d; }); +__webpack_require__.d(__webpack_exports__, "conv2dTranspose", function() { return /* reexport */ conv2dTranspose; }); +__webpack_require__.d(__webpack_exports__, "conv3d", function() { return /* reexport */ conv3d; }); +__webpack_require__.d(__webpack_exports__, "conv3dTranspose", function() { return /* reexport */ conv3dTranspose; }); +__webpack_require__.d(__webpack_exports__, "cumsum", function() { return /* reexport */ cumsum; }); +__webpack_require__.d(__webpack_exports__, "depthToSpace", function() { return /* reexport */ depthToSpace; }); +__webpack_require__.d(__webpack_exports__, "depthwiseConv2d", function() { return /* reexport */ depthwiseConv2d; }); +__webpack_require__.d(__webpack_exports__, "diag", function() { return /* reexport */ diag; }); +__webpack_require__.d(__webpack_exports__, "div", function() { return /* reexport */ div; }); +__webpack_require__.d(__webpack_exports__, "divNoNan", function() { return /* reexport */ divNoNan; }); +__webpack_require__.d(__webpack_exports__, "dot", function() { return /* reexport */ dot; }); +__webpack_require__.d(__webpack_exports__, "elu", function() { return /* reexport */ elu; }); +__webpack_require__.d(__webpack_exports__, "equal", function() { return /* reexport */ equal; }); +__webpack_require__.d(__webpack_exports__, "eye", function() { return /* reexport */ eye; }); +__webpack_require__.d(__webpack_exports__, "fill", function() { return /* reexport */ fill; }); +__webpack_require__.d(__webpack_exports__, "floorDiv", function() { return /* reexport */ floorDiv; }); +__webpack_require__.d(__webpack_exports__, "greater", function() { return /* reexport */ greater; }); +__webpack_require__.d(__webpack_exports__, "greaterEqual", function() { return /* reexport */ greaterEqual; }); +__webpack_require__.d(__webpack_exports__, "imag", function() { return /* reexport */ imag["a" /* imag */]; }); +__webpack_require__.d(__webpack_exports__, "leakyRelu", function() { return /* reexport */ leakyRelu; }); +__webpack_require__.d(__webpack_exports__, "less", function() { return /* reexport */ less; }); +__webpack_require__.d(__webpack_exports__, "lessEqual", function() { return /* reexport */ lessEqual; }); +__webpack_require__.d(__webpack_exports__, "localResponseNormalization", function() { return /* reexport */ localResponseNormalization; }); +__webpack_require__.d(__webpack_exports__, "matMul", function() { return /* reexport */ matMul; }); +__webpack_require__.d(__webpack_exports__, "max", function() { return /* reexport */ max_max; }); +__webpack_require__.d(__webpack_exports__, "maxPool", function() { return /* reexport */ maxPool; }); +__webpack_require__.d(__webpack_exports__, "maxPool3d", function() { return /* reexport */ maxPool3d; }); +__webpack_require__.d(__webpack_exports__, "maxPoolWithArgmax", function() { return /* reexport */ maxPoolWithArgmax; }); +__webpack_require__.d(__webpack_exports__, "maximum", function() { return /* reexport */ maximum; }); +__webpack_require__.d(__webpack_exports__, "minimum", function() { return /* reexport */ minimum; }); +__webpack_require__.d(__webpack_exports__, "mod", function() { return /* reexport */ mod; }); +__webpack_require__.d(__webpack_exports__, "mul", function() { return /* reexport */ mul; }); +__webpack_require__.d(__webpack_exports__, "multinomial", function() { return /* reexport */ multinomial; }); +__webpack_require__.d(__webpack_exports__, "notEqual", function() { return /* reexport */ notEqual; }); +__webpack_require__.d(__webpack_exports__, "oneHot", function() { return /* reexport */ oneHot; }); +__webpack_require__.d(__webpack_exports__, "outerProduct", function() { return /* reexport */ outerProduct; }); +__webpack_require__.d(__webpack_exports__, "pad", function() { return /* reexport */ pad_pad; }); +__webpack_require__.d(__webpack_exports__, "pad1d", function() { return /* reexport */ pad1d; }); +__webpack_require__.d(__webpack_exports__, "pad2d", function() { return /* reexport */ pad2d; }); +__webpack_require__.d(__webpack_exports__, "pad3d", function() { return /* reexport */ pad3d; }); +__webpack_require__.d(__webpack_exports__, "pad4d", function() { return /* reexport */ pad4d; }); +__webpack_require__.d(__webpack_exports__, "pool", function() { return /* reexport */ pool; }); +__webpack_require__.d(__webpack_exports__, "pow", function() { return /* reexport */ pow; }); +__webpack_require__.d(__webpack_exports__, "prelu", function() { return /* reexport */ prelu; }); +__webpack_require__.d(__webpack_exports__, "rand", function() { return /* reexport */ rand; }); +__webpack_require__.d(__webpack_exports__, "randomGamma", function() { return /* reexport */ randomGamma; }); +__webpack_require__.d(__webpack_exports__, "randomNormal", function() { return /* reexport */ randomNormal; }); +__webpack_require__.d(__webpack_exports__, "randomUniform", function() { return /* reexport */ randomUniform; }); +__webpack_require__.d(__webpack_exports__, "real", function() { return /* reexport */ real["a" /* real */]; }); +__webpack_require__.d(__webpack_exports__, "relu", function() { return /* reexport */ relu; }); +__webpack_require__.d(__webpack_exports__, "relu6", function() { return /* reexport */ relu6; }); +__webpack_require__.d(__webpack_exports__, "selu", function() { return /* reexport */ selu; }); +__webpack_require__.d(__webpack_exports__, "separableConv2d", function() { return /* reexport */ separableConv2d; }); +__webpack_require__.d(__webpack_exports__, "spaceToBatchND", function() { return /* reexport */ spaceToBatchND; }); +__webpack_require__.d(__webpack_exports__, "split", function() { return /* reexport */ split; }); +__webpack_require__.d(__webpack_exports__, "square", function() { return /* reexport */ square; }); +__webpack_require__.d(__webpack_exports__, "squaredDifference", function() { return /* reexport */ squaredDifference; }); +__webpack_require__.d(__webpack_exports__, "sub", function() { return /* reexport */ sub; }); +__webpack_require__.d(__webpack_exports__, "tile", function() { return /* reexport */ tile; }); +__webpack_require__.d(__webpack_exports__, "truncatedNormal", function() { return /* reexport */ truncatedNormal; }); +__webpack_require__.d(__webpack_exports__, "booleanMaskAsync", function() { return /* reexport */ booleanMaskAsync; }); +__webpack_require__.d(__webpack_exports__, "reverse", function() { return /* reexport */ reverse_reverse; }); +__webpack_require__.d(__webpack_exports__, "reverse1d", function() { return /* reexport */ reverse1d; }); +__webpack_require__.d(__webpack_exports__, "reverse2d", function() { return /* reexport */ reverse2d; }); +__webpack_require__.d(__webpack_exports__, "reverse3d", function() { return /* reexport */ reverse3d; }); +__webpack_require__.d(__webpack_exports__, "reverse4d", function() { return /* reexport */ reverse4d; }); +__webpack_require__.d(__webpack_exports__, "slice", function() { return /* reexport */ slice; }); +__webpack_require__.d(__webpack_exports__, "slice1d", function() { return /* reexport */ slice1d; }); +__webpack_require__.d(__webpack_exports__, "slice2d", function() { return /* reexport */ slice2d; }); +__webpack_require__.d(__webpack_exports__, "slice3d", function() { return /* reexport */ slice3d; }); +__webpack_require__.d(__webpack_exports__, "slice4d", function() { return /* reexport */ slice4d; }); +__webpack_require__.d(__webpack_exports__, "abs", function() { return /* reexport */ abs; }); +__webpack_require__.d(__webpack_exports__, "acos", function() { return /* reexport */ acos; }); +__webpack_require__.d(__webpack_exports__, "acosh", function() { return /* reexport */ acosh; }); +__webpack_require__.d(__webpack_exports__, "asin", function() { return /* reexport */ asin; }); +__webpack_require__.d(__webpack_exports__, "asinh", function() { return /* reexport */ asinh; }); +__webpack_require__.d(__webpack_exports__, "atan", function() { return /* reexport */ atan; }); +__webpack_require__.d(__webpack_exports__, "atanh", function() { return /* reexport */ atanh; }); +__webpack_require__.d(__webpack_exports__, "ceil", function() { return /* reexport */ ceil; }); +__webpack_require__.d(__webpack_exports__, "clipByValue", function() { return /* reexport */ clipByValue; }); +__webpack_require__.d(__webpack_exports__, "cos", function() { return /* reexport */ cos; }); +__webpack_require__.d(__webpack_exports__, "cosh", function() { return /* reexport */ cosh; }); +__webpack_require__.d(__webpack_exports__, "erf", function() { return /* reexport */ erf; }); +__webpack_require__.d(__webpack_exports__, "exp", function() { return /* reexport */ unary_ops_exp; }); +__webpack_require__.d(__webpack_exports__, "expm1", function() { return /* reexport */ expm1; }); +__webpack_require__.d(__webpack_exports__, "floor", function() { return /* reexport */ floor; }); +__webpack_require__.d(__webpack_exports__, "log", function() { return /* reexport */ log; }); +__webpack_require__.d(__webpack_exports__, "log1p", function() { return /* reexport */ log1p; }); +__webpack_require__.d(__webpack_exports__, "logSigmoid", function() { return /* reexport */ logSigmoid; }); +__webpack_require__.d(__webpack_exports__, "neg", function() { return /* reexport */ neg; }); +__webpack_require__.d(__webpack_exports__, "reciprocal", function() { return /* reexport */ reciprocal; }); +__webpack_require__.d(__webpack_exports__, "round", function() { return /* reexport */ round; }); +__webpack_require__.d(__webpack_exports__, "rsqrt", function() { return /* reexport */ rsqrt; }); +__webpack_require__.d(__webpack_exports__, "sigmoid", function() { return /* reexport */ sigmoid; }); +__webpack_require__.d(__webpack_exports__, "sign", function() { return /* reexport */ sign; }); +__webpack_require__.d(__webpack_exports__, "isNaN", function() { return /* reexport */ unary_ops_isNaN; }); +__webpack_require__.d(__webpack_exports__, "isInf", function() { return /* reexport */ isInf; }); +__webpack_require__.d(__webpack_exports__, "isFinite", function() { return /* reexport */ unary_ops_isFinite; }); +__webpack_require__.d(__webpack_exports__, "sin", function() { return /* reexport */ sin; }); +__webpack_require__.d(__webpack_exports__, "sinh", function() { return /* reexport */ sinh; }); +__webpack_require__.d(__webpack_exports__, "softplus", function() { return /* reexport */ softplus; }); +__webpack_require__.d(__webpack_exports__, "sqrt", function() { return /* reexport */ sqrt; }); +__webpack_require__.d(__webpack_exports__, "step", function() { return /* reexport */ unary_ops_step; }); +__webpack_require__.d(__webpack_exports__, "tan", function() { return /* reexport */ tan; }); +__webpack_require__.d(__webpack_exports__, "tanh", function() { return /* reexport */ tanh; }); +__webpack_require__.d(__webpack_exports__, "all", function() { return /* reexport */ reduction_ops_all; }); +__webpack_require__.d(__webpack_exports__, "any", function() { return /* reexport */ any; }); +__webpack_require__.d(__webpack_exports__, "argMax", function() { return /* reexport */ argMax; }); +__webpack_require__.d(__webpack_exports__, "argMin", function() { return /* reexport */ argMin; }); +__webpack_require__.d(__webpack_exports__, "logSumExp", function() { return /* reexport */ logSumExp; }); +__webpack_require__.d(__webpack_exports__, "mean", function() { return /* reexport */ reduction_ops_mean; }); +__webpack_require__.d(__webpack_exports__, "min", function() { return /* reexport */ reduction_ops_min; }); +__webpack_require__.d(__webpack_exports__, "moments", function() { return /* reexport */ moments; }); +__webpack_require__.d(__webpack_exports__, "sum", function() { return /* reexport */ sum; }); +__webpack_require__.d(__webpack_exports__, "prod", function() { return /* reexport */ reduction_ops_prod; }); +__webpack_require__.d(__webpack_exports__, "equalStrict", function() { return /* reexport */ equalStrict; }); +__webpack_require__.d(__webpack_exports__, "greaterEqualStrict", function() { return /* reexport */ greaterEqualStrict; }); +__webpack_require__.d(__webpack_exports__, "greaterStrict", function() { return /* reexport */ greaterStrict; }); +__webpack_require__.d(__webpack_exports__, "lessEqualStrict", function() { return /* reexport */ lessEqualStrict; }); +__webpack_require__.d(__webpack_exports__, "lessStrict", function() { return /* reexport */ lessStrict; }); +__webpack_require__.d(__webpack_exports__, "notEqualStrict", function() { return /* reexport */ notEqualStrict; }); +__webpack_require__.d(__webpack_exports__, "addStrict", function() { return /* reexport */ addStrict; }); +__webpack_require__.d(__webpack_exports__, "divStrict", function() { return /* reexport */ divStrict; }); +__webpack_require__.d(__webpack_exports__, "maximumStrict", function() { return /* reexport */ maximumStrict; }); +__webpack_require__.d(__webpack_exports__, "minimumStrict", function() { return /* reexport */ minimumStrict; }); +__webpack_require__.d(__webpack_exports__, "modStrict", function() { return /* reexport */ modStrict; }); +__webpack_require__.d(__webpack_exports__, "mulStrict", function() { return /* reexport */ mulStrict; }); +__webpack_require__.d(__webpack_exports__, "powStrict", function() { return /* reexport */ powStrict; }); +__webpack_require__.d(__webpack_exports__, "squaredDifferenceStrict", function() { return /* reexport */ squaredDifferenceStrict; }); +__webpack_require__.d(__webpack_exports__, "subStrict", function() { return /* reexport */ subStrict; }); +__webpack_require__.d(__webpack_exports__, "logicalAnd", function() { return /* reexport */ logicalAnd; }); +__webpack_require__.d(__webpack_exports__, "logicalNot", function() { return /* reexport */ logicalNot; }); +__webpack_require__.d(__webpack_exports__, "logicalOr", function() { return /* reexport */ logicalOr; }); +__webpack_require__.d(__webpack_exports__, "logicalXor", function() { return /* reexport */ logicalXor; }); +__webpack_require__.d(__webpack_exports__, "where", function() { return /* reexport */ where; }); +__webpack_require__.d(__webpack_exports__, "whereAsync", function() { return /* reexport */ whereAsync; }); +__webpack_require__.d(__webpack_exports__, "buffer", function() { return /* reexport */ array_ops_buffer; }); +__webpack_require__.d(__webpack_exports__, "print", function() { return /* reexport */ print; }); +__webpack_require__.d(__webpack_exports__, "cast", function() { return /* reexport */ cast; }); +__webpack_require__.d(__webpack_exports__, "expandDims", function() { return /* reexport */ expandDims; }); +__webpack_require__.d(__webpack_exports__, "reshape", function() { return /* reexport */ reshape; }); +__webpack_require__.d(__webpack_exports__, "squeeze", function() { return /* reexport */ squeeze; }); +__webpack_require__.d(__webpack_exports__, "stack", function() { return /* reexport */ stack; }); +__webpack_require__.d(__webpack_exports__, "unstack", function() { return /* reexport */ unstack; }); +__webpack_require__.d(__webpack_exports__, "setdiff1dAsync", function() { return /* reexport */ setdiff1dAsync; }); +__webpack_require__.d(__webpack_exports__, "linspace", function() { return /* reexport */ tensor_ops["a" /* linspace */]; }); +__webpack_require__.d(__webpack_exports__, "ones", function() { return /* reexport */ tensor_ops["b" /* ones */]; }); +__webpack_require__.d(__webpack_exports__, "range", function() { return /* reexport */ tensor_ops["d" /* range */]; }); +__webpack_require__.d(__webpack_exports__, "scalar", function() { return /* reexport */ tensor_ops["e" /* scalar */]; }); +__webpack_require__.d(__webpack_exports__, "tensor", function() { return /* reexport */ tensor_ops["f" /* tensor */]; }); +__webpack_require__.d(__webpack_exports__, "tensor1d", function() { return /* reexport */ tensor_ops["g" /* tensor1d */]; }); +__webpack_require__.d(__webpack_exports__, "tensor2d", function() { return /* reexport */ tensor_ops["h" /* tensor2d */]; }); +__webpack_require__.d(__webpack_exports__, "tensor3d", function() { return /* reexport */ tensor_ops["i" /* tensor3d */]; }); +__webpack_require__.d(__webpack_exports__, "tensor4d", function() { return /* reexport */ tensor_ops["j" /* tensor4d */]; }); +__webpack_require__.d(__webpack_exports__, "tensor5d", function() { return /* reexport */ tensor_ops["k" /* tensor5d */]; }); +__webpack_require__.d(__webpack_exports__, "tensor6d", function() { return /* reexport */ tensor_ops["l" /* tensor6d */]; }); +__webpack_require__.d(__webpack_exports__, "variable", function() { return /* reexport */ tensor_ops["m" /* variable */]; }); +__webpack_require__.d(__webpack_exports__, "zeros", function() { return /* reexport */ tensor_ops["n" /* zeros */]; }); +__webpack_require__.d(__webpack_exports__, "onesLike", function() { return /* reexport */ tensor_ops["c" /* onesLike */]; }); +__webpack_require__.d(__webpack_exports__, "zerosLike", function() { return /* reexport */ tensor_ops["o" /* zerosLike */]; }); +__webpack_require__.d(__webpack_exports__, "transpose", function() { return /* reexport */ transpose; }); +__webpack_require__.d(__webpack_exports__, "softmax", function() { return /* reexport */ softmax; }); +__webpack_require__.d(__webpack_exports__, "logSoftmax", function() { return /* reexport */ logSoftmax; }); +__webpack_require__.d(__webpack_exports__, "norm", function() { return /* reexport */ norm_norm; }); +__webpack_require__.d(__webpack_exports__, "gather", function() { return /* reexport */ gather; }); +__webpack_require__.d(__webpack_exports__, "unsortedSegmentSum", function() { return /* reexport */ unsortedSegmentSum; }); +__webpack_require__.d(__webpack_exports__, "basicLSTMCell", function() { return /* reexport */ basicLSTMCell; }); +__webpack_require__.d(__webpack_exports__, "multiRNNCell", function() { return /* reexport */ multiRNNCell; }); +__webpack_require__.d(__webpack_exports__, "movingAverage", function() { return /* reexport */ movingAverage; }); +__webpack_require__.d(__webpack_exports__, "stridedSlice", function() { return /* reexport */ stridedSlice; }); +__webpack_require__.d(__webpack_exports__, "topk", function() { return /* reexport */ topk; }); +__webpack_require__.d(__webpack_exports__, "scatterND", function() { return /* reexport */ scatterND; }); +__webpack_require__.d(__webpack_exports__, "fft", function() { return /* reexport */ fft; }); +__webpack_require__.d(__webpack_exports__, "ifft", function() { return /* reexport */ ifft; }); +__webpack_require__.d(__webpack_exports__, "rfft", function() { return /* reexport */ rfft; }); +__webpack_require__.d(__webpack_exports__, "irfft", function() { return /* reexport */ irfft; }); +__webpack_require__.d(__webpack_exports__, "sparseToDense", function() { return /* reexport */ sparseToDense; }); +__webpack_require__.d(__webpack_exports__, "gatherND", function() { return /* reexport */ gatherND; }); +__webpack_require__.d(__webpack_exports__, "dropout", function() { return /* reexport */ dropout; }); +__webpack_require__.d(__webpack_exports__, "hannWindow", function() { return /* reexport */ hannWindow; }); +__webpack_require__.d(__webpack_exports__, "hammingWindow", function() { return /* reexport */ hammingWindow; }); +__webpack_require__.d(__webpack_exports__, "frame", function() { return /* reexport */ signal_ops_frame; }); +__webpack_require__.d(__webpack_exports__, "stft", function() { return /* reexport */ stft; }); +__webpack_require__.d(__webpack_exports__, "inTopKAsync", function() { return /* reexport */ inTopKAsync; }); +__webpack_require__.d(__webpack_exports__, "op", function() { return /* reexport */ operation["a" /* op */]; }); +__webpack_require__.d(__webpack_exports__, "image", function() { return /* reexport */ image_ops_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "linalg", function() { return /* reexport */ linalg_ops_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "losses", function() { return /* reexport */ loss_ops_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "spectral", function() { return /* reexport */ spectral_ops_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "fused", function() { return /* reexport */ fused_ops_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "signal", function() { return /* reexport */ signal_ops_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "Reduction", function() { return /* reexport */ Reduction; }); +__webpack_require__.d(__webpack_exports__, "train", function() { return /* reexport */ train; }); +__webpack_require__.d(__webpack_exports__, "enableProdMode", function() { return /* reexport */ enableProdMode; }); +__webpack_require__.d(__webpack_exports__, "enableDebugMode", function() { return /* reexport */ enableDebugMode; }); +__webpack_require__.d(__webpack_exports__, "disableDeprecationWarnings", function() { return /* reexport */ disableDeprecationWarnings; }); +__webpack_require__.d(__webpack_exports__, "deprecationWarn", function() { return /* reexport */ deprecationWarn; }); +__webpack_require__.d(__webpack_exports__, "disposeVariables", function() { return /* reexport */ disposeVariables; }); +__webpack_require__.d(__webpack_exports__, "engine", function() { return /* reexport */ globals_engine; }); +__webpack_require__.d(__webpack_exports__, "memory", function() { return /* reexport */ memory; }); +__webpack_require__.d(__webpack_exports__, "profile", function() { return /* reexport */ profile; }); +__webpack_require__.d(__webpack_exports__, "tidy", function() { return /* reexport */ tidy; }); +__webpack_require__.d(__webpack_exports__, "dispose", function() { return /* reexport */ dispose; }); +__webpack_require__.d(__webpack_exports__, "keep", function() { return /* reexport */ keep; }); +__webpack_require__.d(__webpack_exports__, "time", function() { return /* reexport */ time; }); +__webpack_require__.d(__webpack_exports__, "setBackend", function() { return /* reexport */ setBackend; }); +__webpack_require__.d(__webpack_exports__, "ready", function() { return /* reexport */ ready; }); +__webpack_require__.d(__webpack_exports__, "getBackend", function() { return /* reexport */ getBackend; }); +__webpack_require__.d(__webpack_exports__, "removeBackend", function() { return /* reexport */ removeBackend; }); +__webpack_require__.d(__webpack_exports__, "findBackend", function() { return /* reexport */ findBackend; }); +__webpack_require__.d(__webpack_exports__, "findBackendFactory", function() { return /* reexport */ findBackendFactory; }); +__webpack_require__.d(__webpack_exports__, "registerBackend", function() { return /* reexport */ registerBackend; }); +__webpack_require__.d(__webpack_exports__, "backend", function() { return /* reexport */ globals_backend; }); +__webpack_require__.d(__webpack_exports__, "setPlatform", function() { return /* reexport */ setPlatform; }); +__webpack_require__.d(__webpack_exports__, "getKernel", function() { return /* reexport */ kernel_registry["b" /* getKernel */]; }); +__webpack_require__.d(__webpack_exports__, "getGradient", function() { return /* reexport */ kernel_registry["a" /* getGradient */]; }); +__webpack_require__.d(__webpack_exports__, "getKernelsForBackend", function() { return /* reexport */ kernel_registry["c" /* getKernelsForBackend */]; }); +__webpack_require__.d(__webpack_exports__, "registerKernel", function() { return /* reexport */ kernel_registry["e" /* registerKernel */]; }); +__webpack_require__.d(__webpack_exports__, "registerGradient", function() { return /* reexport */ kernel_registry["d" /* registerGradient */]; }); +__webpack_require__.d(__webpack_exports__, "unregisterKernel", function() { return /* reexport */ kernel_registry["g" /* unregisterKernel */]; }); +__webpack_require__.d(__webpack_exports__, "unregisterGradient", function() { return /* reexport */ kernel_registry["f" /* unregisterGradient */]; }); +__webpack_require__.d(__webpack_exports__, "customGrad", function() { return /* reexport */ customGrad; }); +__webpack_require__.d(__webpack_exports__, "grad", function() { return /* reexport */ gradients_grad; }); +__webpack_require__.d(__webpack_exports__, "grads", function() { return /* reexport */ gradients_grads; }); +__webpack_require__.d(__webpack_exports__, "valueAndGrad", function() { return /* reexport */ valueAndGrad; }); +__webpack_require__.d(__webpack_exports__, "valueAndGrads", function() { return /* reexport */ valueAndGrads; }); +__webpack_require__.d(__webpack_exports__, "variableGrads", function() { return /* reexport */ variableGrads; }); +__webpack_require__.d(__webpack_exports__, "Environment", function() { return /* reexport */ environment["b" /* Environment */]; }); +__webpack_require__.d(__webpack_exports__, "env", function() { return /* reexport */ environment["c" /* env */]; }); +__webpack_require__.d(__webpack_exports__, "ENV", function() { return /* reexport */ environment["a" /* ENV */]; }); +__webpack_require__.d(__webpack_exports__, "version_core", function() { return /* reexport */ version; }); +__webpack_require__.d(__webpack_exports__, "nextFrame", function() { return /* reexport */ browser_util["a" /* nextFrame */]; }); +__webpack_require__.d(__webpack_exports__, "browser", function() { return /* reexport */ browser_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "io", function() { return /* reexport */ io_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "math", function() { return /* reexport */ math_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "serialization", function() { return /* reexport */ serialization_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "test_util", function() { return /* reexport */ test_util_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "util", function() { return /* reexport */ util; }); +__webpack_require__.d(__webpack_exports__, "backend_util", function() { return /* reexport */ backend_util_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "tensor_util", function() { return /* reexport */ tensor_util; }); +__webpack_require__.d(__webpack_exports__, "slice_util", function() { return /* reexport */ slice_util_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "gather_util", function() { return /* reexport */ gather_nd_util_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "scatter_util", function() { return /* reexport */ scatter_nd_util_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "device_util", function() { return /* reexport */ device_util; }); +__webpack_require__.d(__webpack_exports__, "kernel_impls", function() { return /* reexport */ kernel_impls_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "KernelBackend", function() { return /* reexport */ KernelBackend; }); +__webpack_require__.d(__webpack_exports__, "DataStorage", function() { return /* reexport */ DataStorage; }); +__webpack_require__.d(__webpack_exports__, "Add", function() { return /* reexport */ kernel_names["a" /* Add */]; }); +__webpack_require__.d(__webpack_exports__, "AddN", function() { return /* reexport */ kernel_names["b" /* AddN */]; }); +__webpack_require__.d(__webpack_exports__, "Atan2", function() { return /* reexport */ kernel_names["c" /* Atan2 */]; }); +__webpack_require__.d(__webpack_exports__, "AvgPool", function() { return /* reexport */ kernel_names["d" /* AvgPool */]; }); +__webpack_require__.d(__webpack_exports__, "AvgPoolBackprop", function() { return /* reexport */ kernel_names["g" /* AvgPoolBackprop */]; }); +__webpack_require__.d(__webpack_exports__, "AvgPool3D", function() { return /* reexport */ kernel_names["e" /* AvgPool3D */]; }); +__webpack_require__.d(__webpack_exports__, "AvgPool3DBackprop", function() { return /* reexport */ kernel_names["f" /* AvgPool3DBackprop */]; }); +__webpack_require__.d(__webpack_exports__, "BatchMatMul", function() { return /* reexport */ kernel_names["h" /* BatchMatMul */]; }); +__webpack_require__.d(__webpack_exports__, "BatchToSpaceND", function() { return /* reexport */ kernel_names["i" /* BatchToSpaceND */]; }); +__webpack_require__.d(__webpack_exports__, "BroadcastTo", function() { return /* reexport */ kernel_names["j" /* BroadcastTo */]; }); +__webpack_require__.d(__webpack_exports__, "Complex", function() { return /* reexport */ kernel_names["k" /* Complex */]; }); +__webpack_require__.d(__webpack_exports__, "Concat", function() { return /* reexport */ kernel_names["l" /* Concat */]; }); +__webpack_require__.d(__webpack_exports__, "Conv2D", function() { return /* reexport */ kernel_names["m" /* Conv2D */]; }); +__webpack_require__.d(__webpack_exports__, "Conv2DBackpropFilter", function() { return /* reexport */ kernel_names["n" /* Conv2DBackpropFilter */]; }); +__webpack_require__.d(__webpack_exports__, "Conv2DBackpropInput", function() { return /* reexport */ kernel_names["o" /* Conv2DBackpropInput */]; }); +__webpack_require__.d(__webpack_exports__, "Conv3D", function() { return /* reexport */ kernel_names["p" /* Conv3D */]; }); +__webpack_require__.d(__webpack_exports__, "Conv3DBackpropFilterV2", function() { return /* reexport */ kernel_names["q" /* Conv3DBackpropFilterV2 */]; }); +__webpack_require__.d(__webpack_exports__, "Conv3DBackpropInputV2", function() { return /* reexport */ kernel_names["r" /* Conv3DBackpropInputV2 */]; }); +__webpack_require__.d(__webpack_exports__, "Cumsum", function() { return /* reexport */ kernel_names["s" /* Cumsum */]; }); +__webpack_require__.d(__webpack_exports__, "DepthToSpace", function() { return /* reexport */ kernel_names["t" /* DepthToSpace */]; }); +__webpack_require__.d(__webpack_exports__, "DepthwiseConv2dNative", function() { return /* reexport */ kernel_names["u" /* DepthwiseConv2dNative */]; }); +__webpack_require__.d(__webpack_exports__, "DepthwiseConv2dNativeBackpropFilter", function() { return /* reexport */ kernel_names["v" /* DepthwiseConv2dNativeBackpropFilter */]; }); +__webpack_require__.d(__webpack_exports__, "DepthwiseConv2dNativeBackpropInput", function() { return /* reexport */ kernel_names["w" /* DepthwiseConv2dNativeBackpropInput */]; }); +__webpack_require__.d(__webpack_exports__, "Diag", function() { return /* reexport */ kernel_names["x" /* Diag */]; }); +__webpack_require__.d(__webpack_exports__, "Div", function() { return /* reexport */ kernel_names["y" /* Div */]; }); +__webpack_require__.d(__webpack_exports__, "Elu", function() { return /* reexport */ kernel_names["z" /* Elu */]; }); +__webpack_require__.d(__webpack_exports__, "EluGrad", function() { return /* reexport */ kernel_names["A" /* EluGrad */]; }); +__webpack_require__.d(__webpack_exports__, "Equal", function() { return /* reexport */ kernel_names["B" /* Equal */]; }); +__webpack_require__.d(__webpack_exports__, "FloorDiv", function() { return /* reexport */ kernel_names["D" /* FloorDiv */]; }); +__webpack_require__.d(__webpack_exports__, "Fill", function() { return /* reexport */ kernel_names["C" /* Fill */]; }); +__webpack_require__.d(__webpack_exports__, "FusedBatchNorm", function() { return /* reexport */ kernel_names["F" /* FusedBatchNorm */]; }); +__webpack_require__.d(__webpack_exports__, "GatherNd", function() { return /* reexport */ kernel_names["G" /* GatherNd */]; }); +__webpack_require__.d(__webpack_exports__, "Greater", function() { return /* reexport */ kernel_names["H" /* Greater */]; }); +__webpack_require__.d(__webpack_exports__, "GreaterEqual", function() { return /* reexport */ kernel_names["I" /* GreaterEqual */]; }); +__webpack_require__.d(__webpack_exports__, "Identity", function() { return /* reexport */ kernel_names["J" /* Identity */]; }); +__webpack_require__.d(__webpack_exports__, "Imag", function() { return /* reexport */ kernel_names["K" /* Imag */]; }); +__webpack_require__.d(__webpack_exports__, "Less", function() { return /* reexport */ kernel_names["N" /* Less */]; }); +__webpack_require__.d(__webpack_exports__, "LessEqual", function() { return /* reexport */ kernel_names["O" /* LessEqual */]; }); +__webpack_require__.d(__webpack_exports__, "LRN", function() { return /* reexport */ kernel_names["L" /* LRN */]; }); +__webpack_require__.d(__webpack_exports__, "LRNBackprop", function() { return /* reexport */ kernel_names["M" /* LRNBackprop */]; }); +__webpack_require__.d(__webpack_exports__, "Max", function() { return /* reexport */ kernel_names["P" /* Max */]; }); +__webpack_require__.d(__webpack_exports__, "Maximum", function() { return /* reexport */ kernel_names["V" /* Maximum */]; }); +__webpack_require__.d(__webpack_exports__, "MaxPool", function() { return /* reexport */ kernel_names["Q" /* MaxPool */]; }); +__webpack_require__.d(__webpack_exports__, "MaxPoolBackprop", function() { return /* reexport */ kernel_names["T" /* MaxPoolBackprop */]; }); +__webpack_require__.d(__webpack_exports__, "MaxPool3D", function() { return /* reexport */ kernel_names["R" /* MaxPool3D */]; }); +__webpack_require__.d(__webpack_exports__, "MaxPool3DBackprop", function() { return /* reexport */ kernel_names["S" /* MaxPool3DBackprop */]; }); +__webpack_require__.d(__webpack_exports__, "MaxPoolWithArgmax", function() { return /* reexport */ kernel_names["U" /* MaxPoolWithArgmax */]; }); +__webpack_require__.d(__webpack_exports__, "Minimum", function() { return /* reexport */ kernel_names["W" /* Minimum */]; }); +__webpack_require__.d(__webpack_exports__, "Mod", function() { return /* reexport */ kernel_names["X" /* Mod */]; }); +__webpack_require__.d(__webpack_exports__, "Multiply", function() { return /* reexport */ kernel_names["Y" /* Multiply */]; }); +__webpack_require__.d(__webpack_exports__, "NotEqual", function() { return /* reexport */ kernel_names["bb" /* NotEqual */]; }); +__webpack_require__.d(__webpack_exports__, "NonMaxSuppressionV3", function() { return /* reexport */ kernel_names["Z" /* NonMaxSuppressionV3 */]; }); +__webpack_require__.d(__webpack_exports__, "NonMaxSuppressionV5", function() { return /* reexport */ kernel_names["ab" /* NonMaxSuppressionV5 */]; }); +__webpack_require__.d(__webpack_exports__, "OneHot", function() { return /* reexport */ kernel_names["cb" /* OneHot */]; }); +__webpack_require__.d(__webpack_exports__, "PadV2", function() { return /* reexport */ kernel_names["db" /* PadV2 */]; }); +__webpack_require__.d(__webpack_exports__, "Pool", function() { return /* reexport */ kernel_names["eb" /* Pool */]; }); +__webpack_require__.d(__webpack_exports__, "Pow", function() { return /* reexport */ kernel_names["fb" /* Pow */]; }); +__webpack_require__.d(__webpack_exports__, "Prelu", function() { return /* reexport */ kernel_names["gb" /* Prelu */]; }); +__webpack_require__.d(__webpack_exports__, "Real", function() { return /* reexport */ kernel_names["hb" /* Real */]; }); +__webpack_require__.d(__webpack_exports__, "Relu", function() { return /* reexport */ kernel_names["ib" /* Relu */]; }); +__webpack_require__.d(__webpack_exports__, "Relu6", function() { return /* reexport */ kernel_names["jb" /* Relu6 */]; }); +__webpack_require__.d(__webpack_exports__, "SelectV2", function() { return /* reexport */ kernel_names["kb" /* SelectV2 */]; }); +__webpack_require__.d(__webpack_exports__, "Selu", function() { return /* reexport */ kernel_names["lb" /* Selu */]; }); +__webpack_require__.d(__webpack_exports__, "SpaceToBatchND", function() { return /* reexport */ kernel_names["mb" /* SpaceToBatchND */]; }); +__webpack_require__.d(__webpack_exports__, "SplitV", function() { return /* reexport */ kernel_names["nb" /* SplitV */]; }); +__webpack_require__.d(__webpack_exports__, "SquaredDifference", function() { return /* reexport */ kernel_names["pb" /* SquaredDifference */]; }); +__webpack_require__.d(__webpack_exports__, "Square", function() { return /* reexport */ kernel_names["ob" /* Square */]; }); +__webpack_require__.d(__webpack_exports__, "Sub", function() { return /* reexport */ kernel_names["qb" /* Sub */]; }); +__webpack_require__.d(__webpack_exports__, "Tile", function() { return /* reexport */ kernel_names["rb" /* Tile */]; }); +__webpack_require__.d(__webpack_exports__, "Transpose", function() { return /* reexport */ kernel_names["sb" /* Transpose */]; }); +__webpack_require__.d(__webpack_exports__, "FromPixels", function() { return /* reexport */ kernel_names["E" /* FromPixels */]; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/ops/slice_util.js +var slice_util_namespaceObject = {}; +__webpack_require__.r(slice_util_namespaceObject); +__webpack_require__.d(slice_util_namespaceObject, "assertParamsValid", function() { return assertParamsValid; }); +__webpack_require__.d(slice_util_namespaceObject, "maskToAxes", function() { return maskToAxes; }); +__webpack_require__.d(slice_util_namespaceObject, "computeOutShape", function() { return slice_util_computeOutShape; }); +__webpack_require__.d(slice_util_namespaceObject, "stridesWithElidedDims", function() { return stridesWithElidedDims; }); +__webpack_require__.d(slice_util_namespaceObject, "startIndicesWithElidedDims", function() { return startIndicesWithElidedDims; }); +__webpack_require__.d(slice_util_namespaceObject, "stopIndicesWithElidedDims", function() { return stopIndicesWithElidedDims; }); +__webpack_require__.d(slice_util_namespaceObject, "stridesForAxis", function() { return stridesForAxis; }); +__webpack_require__.d(slice_util_namespaceObject, "startForAxis", function() { return startForAxis; }); +__webpack_require__.d(slice_util_namespaceObject, "stopForAxis", function() { return stopForAxis; }); +__webpack_require__.d(slice_util_namespaceObject, "isSliceContinous", function() { return isSliceContinous; }); +__webpack_require__.d(slice_util_namespaceObject, "computeFlatOffset", function() { return computeFlatOffset; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/io/io.js +var io_namespaceObject = {}; +__webpack_require__.r(io_namespaceObject); +__webpack_require__.d(io_namespaceObject, "copyModel", function() { return copyModel; }); +__webpack_require__.d(io_namespaceObject, "listModels", function() { return listModels; }); +__webpack_require__.d(io_namespaceObject, "moveModel", function() { return moveModel; }); +__webpack_require__.d(io_namespaceObject, "removeModel", function() { return removeModel; }); +__webpack_require__.d(io_namespaceObject, "browserFiles", function() { return browserFiles; }); +__webpack_require__.d(io_namespaceObject, "browserHTTPRequest", function() { return browserHTTPRequest; }); +__webpack_require__.d(io_namespaceObject, "concatenateArrayBuffers", function() { return io_utils["d" /* concatenateArrayBuffers */]; }); +__webpack_require__.d(io_namespaceObject, "decodeWeights", function() { return io_utils["e" /* decodeWeights */]; }); +__webpack_require__.d(io_namespaceObject, "encodeWeights", function() { return io_utils["f" /* encodeWeights */]; }); +__webpack_require__.d(io_namespaceObject, "fromMemory", function() { return fromMemory; }); +__webpack_require__.d(io_namespaceObject, "getLoadHandlers", function() { return getLoadHandlers; }); +__webpack_require__.d(io_namespaceObject, "getModelArtifactsInfoForJSON", function() { return io_utils["g" /* getModelArtifactsInfoForJSON */]; }); +__webpack_require__.d(io_namespaceObject, "getSaveHandlers", function() { return getSaveHandlers; }); +__webpack_require__.d(io_namespaceObject, "http", function() { return http; }); +__webpack_require__.d(io_namespaceObject, "isHTTPScheme", function() { return isHTTPScheme; }); +__webpack_require__.d(io_namespaceObject, "loadWeights", function() { return loadWeights; }); +__webpack_require__.d(io_namespaceObject, "registerLoadRouter", function() { return registerLoadRouter; }); +__webpack_require__.d(io_namespaceObject, "registerSaveRouter", function() { return registerSaveRouter; }); +__webpack_require__.d(io_namespaceObject, "weightsLoaderFactory", function() { return weightsLoaderFactory; }); +__webpack_require__.d(io_namespaceObject, "withSaveHandler", function() { return withSaveHandler; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/math.js +var math_namespaceObject = {}; +__webpack_require__.r(math_namespaceObject); +__webpack_require__.d(math_namespaceObject, "confusionMatrix", function() { return confusionMatrix; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/ops/browser.js +var browser_namespaceObject = {}; +__webpack_require__.r(browser_namespaceObject); +__webpack_require__.d(browser_namespaceObject, "toPixels", function() { return toPixels; }); +__webpack_require__.d(browser_namespaceObject, "fromPixels", function() { return fromPixels; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/ops/gather_nd_util.js +var gather_nd_util_namespaceObject = {}; +__webpack_require__.r(gather_nd_util_namespaceObject); +__webpack_require__.d(gather_nd_util_namespaceObject, "prepareAndValidate", function() { return prepareAndValidate; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/ops/scatter_nd_util.js +var scatter_nd_util_namespaceObject = {}; +__webpack_require__.r(scatter_nd_util_namespaceObject); +__webpack_require__.d(scatter_nd_util_namespaceObject, "validateUpdateShape", function() { return validateUpdateShape; }); +__webpack_require__.d(scatter_nd_util_namespaceObject, "validateInput", function() { return validateInput; }); +__webpack_require__.d(scatter_nd_util_namespaceObject, "calculateShapes", function() { return calculateShapes; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/serialization.js +var serialization_namespaceObject = {}; +__webpack_require__.r(serialization_namespaceObject); +__webpack_require__.d(serialization_namespaceObject, "Serializable", function() { return Serializable; }); +__webpack_require__.d(serialization_namespaceObject, "SerializationMap", function() { return SerializationMap; }); +__webpack_require__.d(serialization_namespaceObject, "registerClass", function() { return registerClass; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/test_util.js +var test_util_namespaceObject = {}; +__webpack_require__.r(test_util_namespaceObject); +__webpack_require__.d(test_util_namespaceObject, "TEST_EPSILON_FLOAT16", function() { return TEST_EPSILON_FLOAT16; }); +__webpack_require__.d(test_util_namespaceObject, "expectArraysClose", function() { return expectArraysClose; }); +__webpack_require__.d(test_util_namespaceObject, "testEpsilon", function() { return testEpsilon; }); +__webpack_require__.d(test_util_namespaceObject, "expectPromiseToFail", function() { return expectPromiseToFail; }); +__webpack_require__.d(test_util_namespaceObject, "expectArraysEqual", function() { return expectArraysEqual; }); +__webpack_require__.d(test_util_namespaceObject, "expectNumbersClose", function() { return expectNumbersClose; }); +__webpack_require__.d(test_util_namespaceObject, "expectValuesInRange", function() { return expectValuesInRange; }); +__webpack_require__.d(test_util_namespaceObject, "expectArrayBuffersEqual", function() { return expectArrayBuffersEqual; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/ops/segment_util.js +var segment_util_namespaceObject = {}; +__webpack_require__.r(segment_util_namespaceObject); +__webpack_require__.d(segment_util_namespaceObject, "segOpComputeOptimalWindowSize", function() { return segOpComputeOptimalWindowSize; }); +__webpack_require__.d(segment_util_namespaceObject, "computeOutShape", function() { return segment_util_computeOutShape; }); +__webpack_require__.d(segment_util_namespaceObject, "collectGatherOpShapeInfo", function() { return collectGatherOpShapeInfo; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/ops/spectral_ops.js +var spectral_ops_namespaceObject = {}; +__webpack_require__.r(spectral_ops_namespaceObject); +__webpack_require__.d(spectral_ops_namespaceObject, "fft", function() { return fft; }); +__webpack_require__.d(spectral_ops_namespaceObject, "ifft", function() { return ifft; }); +__webpack_require__.d(spectral_ops_namespaceObject, "rfft", function() { return rfft; }); +__webpack_require__.d(spectral_ops_namespaceObject, "irfft", function() { return irfft; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/ops/signal_ops.js +var signal_ops_namespaceObject = {}; +__webpack_require__.r(signal_ops_namespaceObject); +__webpack_require__.d(signal_ops_namespaceObject, "hannWindow", function() { return hannWindow; }); +__webpack_require__.d(signal_ops_namespaceObject, "hammingWindow", function() { return hammingWindow; }); +__webpack_require__.d(signal_ops_namespaceObject, "frame", function() { return signal_ops_frame; }); +__webpack_require__.d(signal_ops_namespaceObject, "stft", function() { return stft; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/ops/loss_ops.js +var loss_ops_namespaceObject = {}; +__webpack_require__.r(loss_ops_namespaceObject); +__webpack_require__.d(loss_ops_namespaceObject, "Reduction", function() { return Reduction; }); +__webpack_require__.d(loss_ops_namespaceObject, "absoluteDifference", function() { return absoluteDifference; }); +__webpack_require__.d(loss_ops_namespaceObject, "computeWeightedLoss", function() { return computeWeightedLoss; }); +__webpack_require__.d(loss_ops_namespaceObject, "cosineDistance", function() { return cosineDistance; }); +__webpack_require__.d(loss_ops_namespaceObject, "hingeLoss", function() { return hingeLoss; }); +__webpack_require__.d(loss_ops_namespaceObject, "huberLoss", function() { return huberLoss; }); +__webpack_require__.d(loss_ops_namespaceObject, "logLoss", function() { return logLoss; }); +__webpack_require__.d(loss_ops_namespaceObject, "meanSquaredError", function() { return meanSquaredError; }); +__webpack_require__.d(loss_ops_namespaceObject, "sigmoidCrossEntropy", function() { return sigmoidCrossEntropy; }); +__webpack_require__.d(loss_ops_namespaceObject, "softmaxCrossEntropy", function() { return softmaxCrossEntropy; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/ops/linalg_ops.js +var linalg_ops_namespaceObject = {}; +__webpack_require__.r(linalg_ops_namespaceObject); +__webpack_require__.d(linalg_ops_namespaceObject, "bandPart", function() { return bandPart; }); +__webpack_require__.d(linalg_ops_namespaceObject, "gramSchmidt", function() { return gramSchmidt; }); +__webpack_require__.d(linalg_ops_namespaceObject, "qr", function() { return qr; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/ops/image_ops.js +var image_ops_namespaceObject = {}; +__webpack_require__.r(image_ops_namespaceObject); +__webpack_require__.d(image_ops_namespaceObject, "nonMaxSuppression", function() { return nonMaxSuppression; }); +__webpack_require__.d(image_ops_namespaceObject, "resizeBilinear", function() { return resizeBilinear; }); +__webpack_require__.d(image_ops_namespaceObject, "resizeNearestNeighbor", function() { return resizeNearestNeighbor; }); +__webpack_require__.d(image_ops_namespaceObject, "nonMaxSuppressionAsync", function() { return nonMaxSuppressionAsync; }); +__webpack_require__.d(image_ops_namespaceObject, "nonMaxSuppressionWithScore", function() { return nonMaxSuppressionWithScore; }); +__webpack_require__.d(image_ops_namespaceObject, "nonMaxSuppressionWithScoreAsync", function() { return nonMaxSuppressionWithScoreAsync; }); +__webpack_require__.d(image_ops_namespaceObject, "cropAndResize", function() { return cropAndResize; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/ops/fused_ops.js +var fused_ops_namespaceObject = {}; +__webpack_require__.r(fused_ops_namespaceObject); +__webpack_require__.d(fused_ops_namespaceObject, "matMul", function() { return fused_ops_matMul; }); +__webpack_require__.d(fused_ops_namespaceObject, "conv2d", function() { return fused_ops_conv2d; }); +__webpack_require__.d(fused_ops_namespaceObject, "depthwiseConv2d", function() { return fused_ops_depthwiseConv2d; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/ops/ops.js +var ops_namespaceObject = {}; +__webpack_require__.r(ops_namespaceObject); +__webpack_require__.d(ops_namespaceObject, "add", function() { return add; }); +__webpack_require__.d(ops_namespaceObject, "addN", function() { return addN; }); +__webpack_require__.d(ops_namespaceObject, "atan2", function() { return atan2; }); +__webpack_require__.d(ops_namespaceObject, "avgPool", function() { return avgPool; }); +__webpack_require__.d(ops_namespaceObject, "avgPool3d", function() { return avgPool3d; }); +__webpack_require__.d(ops_namespaceObject, "batchToSpaceND", function() { return batchToSpaceND; }); +__webpack_require__.d(ops_namespaceObject, "batchNorm", function() { return batchNorm; }); +__webpack_require__.d(ops_namespaceObject, "batchNorm2d", function() { return batchNorm2d; }); +__webpack_require__.d(ops_namespaceObject, "batchNorm3d", function() { return batchNorm3d; }); +__webpack_require__.d(ops_namespaceObject, "batchNorm4d", function() { return batchNorm4d; }); +__webpack_require__.d(ops_namespaceObject, "broadcastTo", function() { return broadcastTo; }); +__webpack_require__.d(ops_namespaceObject, "clone", function() { return clone; }); +__webpack_require__.d(ops_namespaceObject, "complex", function() { return complex["a" /* complex */]; }); +__webpack_require__.d(ops_namespaceObject, "concat", function() { return concat; }); +__webpack_require__.d(ops_namespaceObject, "concat1d", function() { return concat1d; }); +__webpack_require__.d(ops_namespaceObject, "concat2d", function() { return concat2d; }); +__webpack_require__.d(ops_namespaceObject, "concat3d", function() { return concat3d; }); +__webpack_require__.d(ops_namespaceObject, "concat4d", function() { return concat4d; }); +__webpack_require__.d(ops_namespaceObject, "conv1d", function() { return conv1d; }); +__webpack_require__.d(ops_namespaceObject, "conv2d", function() { return conv2d; }); +__webpack_require__.d(ops_namespaceObject, "conv2dTranspose", function() { return conv2dTranspose; }); +__webpack_require__.d(ops_namespaceObject, "conv3d", function() { return conv3d; }); +__webpack_require__.d(ops_namespaceObject, "conv3dTranspose", function() { return conv3dTranspose; }); +__webpack_require__.d(ops_namespaceObject, "cumsum", function() { return cumsum; }); +__webpack_require__.d(ops_namespaceObject, "depthToSpace", function() { return depthToSpace; }); +__webpack_require__.d(ops_namespaceObject, "depthwiseConv2d", function() { return depthwiseConv2d; }); +__webpack_require__.d(ops_namespaceObject, "diag", function() { return diag; }); +__webpack_require__.d(ops_namespaceObject, "div", function() { return div; }); +__webpack_require__.d(ops_namespaceObject, "divNoNan", function() { return divNoNan; }); +__webpack_require__.d(ops_namespaceObject, "dot", function() { return dot; }); +__webpack_require__.d(ops_namespaceObject, "elu", function() { return elu; }); +__webpack_require__.d(ops_namespaceObject, "equal", function() { return equal; }); +__webpack_require__.d(ops_namespaceObject, "eye", function() { return eye; }); +__webpack_require__.d(ops_namespaceObject, "fill", function() { return fill; }); +__webpack_require__.d(ops_namespaceObject, "floorDiv", function() { return floorDiv; }); +__webpack_require__.d(ops_namespaceObject, "greater", function() { return greater; }); +__webpack_require__.d(ops_namespaceObject, "greaterEqual", function() { return greaterEqual; }); +__webpack_require__.d(ops_namespaceObject, "imag", function() { return imag["a" /* imag */]; }); +__webpack_require__.d(ops_namespaceObject, "leakyRelu", function() { return leakyRelu; }); +__webpack_require__.d(ops_namespaceObject, "less", function() { return less; }); +__webpack_require__.d(ops_namespaceObject, "lessEqual", function() { return lessEqual; }); +__webpack_require__.d(ops_namespaceObject, "localResponseNormalization", function() { return localResponseNormalization; }); +__webpack_require__.d(ops_namespaceObject, "matMul", function() { return matMul; }); +__webpack_require__.d(ops_namespaceObject, "max", function() { return max_max; }); +__webpack_require__.d(ops_namespaceObject, "maxPool", function() { return maxPool; }); +__webpack_require__.d(ops_namespaceObject, "maxPool3d", function() { return maxPool3d; }); +__webpack_require__.d(ops_namespaceObject, "maxPoolWithArgmax", function() { return maxPoolWithArgmax; }); +__webpack_require__.d(ops_namespaceObject, "maximum", function() { return maximum; }); +__webpack_require__.d(ops_namespaceObject, "minimum", function() { return minimum; }); +__webpack_require__.d(ops_namespaceObject, "mod", function() { return mod; }); +__webpack_require__.d(ops_namespaceObject, "mul", function() { return mul; }); +__webpack_require__.d(ops_namespaceObject, "multinomial", function() { return multinomial; }); +__webpack_require__.d(ops_namespaceObject, "notEqual", function() { return notEqual; }); +__webpack_require__.d(ops_namespaceObject, "oneHot", function() { return oneHot; }); +__webpack_require__.d(ops_namespaceObject, "outerProduct", function() { return outerProduct; }); +__webpack_require__.d(ops_namespaceObject, "pad", function() { return pad_pad; }); +__webpack_require__.d(ops_namespaceObject, "pad1d", function() { return pad1d; }); +__webpack_require__.d(ops_namespaceObject, "pad2d", function() { return pad2d; }); +__webpack_require__.d(ops_namespaceObject, "pad3d", function() { return pad3d; }); +__webpack_require__.d(ops_namespaceObject, "pad4d", function() { return pad4d; }); +__webpack_require__.d(ops_namespaceObject, "pool", function() { return pool; }); +__webpack_require__.d(ops_namespaceObject, "pow", function() { return pow; }); +__webpack_require__.d(ops_namespaceObject, "prelu", function() { return prelu; }); +__webpack_require__.d(ops_namespaceObject, "rand", function() { return rand; }); +__webpack_require__.d(ops_namespaceObject, "randomGamma", function() { return randomGamma; }); +__webpack_require__.d(ops_namespaceObject, "randomNormal", function() { return randomNormal; }); +__webpack_require__.d(ops_namespaceObject, "randomUniform", function() { return randomUniform; }); +__webpack_require__.d(ops_namespaceObject, "real", function() { return real["a" /* real */]; }); +__webpack_require__.d(ops_namespaceObject, "relu", function() { return relu; }); +__webpack_require__.d(ops_namespaceObject, "relu6", function() { return relu6; }); +__webpack_require__.d(ops_namespaceObject, "selu", function() { return selu; }); +__webpack_require__.d(ops_namespaceObject, "separableConv2d", function() { return separableConv2d; }); +__webpack_require__.d(ops_namespaceObject, "spaceToBatchND", function() { return spaceToBatchND; }); +__webpack_require__.d(ops_namespaceObject, "split", function() { return split; }); +__webpack_require__.d(ops_namespaceObject, "square", function() { return square; }); +__webpack_require__.d(ops_namespaceObject, "squaredDifference", function() { return squaredDifference; }); +__webpack_require__.d(ops_namespaceObject, "sub", function() { return sub; }); +__webpack_require__.d(ops_namespaceObject, "tile", function() { return tile; }); +__webpack_require__.d(ops_namespaceObject, "truncatedNormal", function() { return truncatedNormal; }); +__webpack_require__.d(ops_namespaceObject, "booleanMaskAsync", function() { return booleanMaskAsync; }); +__webpack_require__.d(ops_namespaceObject, "reverse", function() { return reverse_reverse; }); +__webpack_require__.d(ops_namespaceObject, "reverse1d", function() { return reverse1d; }); +__webpack_require__.d(ops_namespaceObject, "reverse2d", function() { return reverse2d; }); +__webpack_require__.d(ops_namespaceObject, "reverse3d", function() { return reverse3d; }); +__webpack_require__.d(ops_namespaceObject, "reverse4d", function() { return reverse4d; }); +__webpack_require__.d(ops_namespaceObject, "slice", function() { return slice; }); +__webpack_require__.d(ops_namespaceObject, "slice1d", function() { return slice1d; }); +__webpack_require__.d(ops_namespaceObject, "slice2d", function() { return slice2d; }); +__webpack_require__.d(ops_namespaceObject, "slice3d", function() { return slice3d; }); +__webpack_require__.d(ops_namespaceObject, "slice4d", function() { return slice4d; }); +__webpack_require__.d(ops_namespaceObject, "abs", function() { return abs; }); +__webpack_require__.d(ops_namespaceObject, "acos", function() { return acos; }); +__webpack_require__.d(ops_namespaceObject, "acosh", function() { return acosh; }); +__webpack_require__.d(ops_namespaceObject, "asin", function() { return asin; }); +__webpack_require__.d(ops_namespaceObject, "asinh", function() { return asinh; }); +__webpack_require__.d(ops_namespaceObject, "atan", function() { return atan; }); +__webpack_require__.d(ops_namespaceObject, "atanh", function() { return atanh; }); +__webpack_require__.d(ops_namespaceObject, "ceil", function() { return ceil; }); +__webpack_require__.d(ops_namespaceObject, "clipByValue", function() { return clipByValue; }); +__webpack_require__.d(ops_namespaceObject, "cos", function() { return cos; }); +__webpack_require__.d(ops_namespaceObject, "cosh", function() { return cosh; }); +__webpack_require__.d(ops_namespaceObject, "erf", function() { return erf; }); +__webpack_require__.d(ops_namespaceObject, "exp", function() { return unary_ops_exp; }); +__webpack_require__.d(ops_namespaceObject, "expm1", function() { return expm1; }); +__webpack_require__.d(ops_namespaceObject, "floor", function() { return floor; }); +__webpack_require__.d(ops_namespaceObject, "log", function() { return log; }); +__webpack_require__.d(ops_namespaceObject, "log1p", function() { return log1p; }); +__webpack_require__.d(ops_namespaceObject, "logSigmoid", function() { return logSigmoid; }); +__webpack_require__.d(ops_namespaceObject, "neg", function() { return neg; }); +__webpack_require__.d(ops_namespaceObject, "reciprocal", function() { return reciprocal; }); +__webpack_require__.d(ops_namespaceObject, "round", function() { return round; }); +__webpack_require__.d(ops_namespaceObject, "rsqrt", function() { return rsqrt; }); +__webpack_require__.d(ops_namespaceObject, "sigmoid", function() { return sigmoid; }); +__webpack_require__.d(ops_namespaceObject, "sign", function() { return sign; }); +__webpack_require__.d(ops_namespaceObject, "isNaN", function() { return unary_ops_isNaN; }); +__webpack_require__.d(ops_namespaceObject, "isInf", function() { return isInf; }); +__webpack_require__.d(ops_namespaceObject, "isFinite", function() { return unary_ops_isFinite; }); +__webpack_require__.d(ops_namespaceObject, "sin", function() { return sin; }); +__webpack_require__.d(ops_namespaceObject, "sinh", function() { return sinh; }); +__webpack_require__.d(ops_namespaceObject, "softplus", function() { return softplus; }); +__webpack_require__.d(ops_namespaceObject, "sqrt", function() { return sqrt; }); +__webpack_require__.d(ops_namespaceObject, "step", function() { return unary_ops_step; }); +__webpack_require__.d(ops_namespaceObject, "tan", function() { return tan; }); +__webpack_require__.d(ops_namespaceObject, "tanh", function() { return tanh; }); +__webpack_require__.d(ops_namespaceObject, "all", function() { return reduction_ops_all; }); +__webpack_require__.d(ops_namespaceObject, "any", function() { return any; }); +__webpack_require__.d(ops_namespaceObject, "argMax", function() { return argMax; }); +__webpack_require__.d(ops_namespaceObject, "argMin", function() { return argMin; }); +__webpack_require__.d(ops_namespaceObject, "logSumExp", function() { return logSumExp; }); +__webpack_require__.d(ops_namespaceObject, "mean", function() { return reduction_ops_mean; }); +__webpack_require__.d(ops_namespaceObject, "min", function() { return reduction_ops_min; }); +__webpack_require__.d(ops_namespaceObject, "moments", function() { return moments; }); +__webpack_require__.d(ops_namespaceObject, "sum", function() { return sum; }); +__webpack_require__.d(ops_namespaceObject, "prod", function() { return reduction_ops_prod; }); +__webpack_require__.d(ops_namespaceObject, "equalStrict", function() { return equalStrict; }); +__webpack_require__.d(ops_namespaceObject, "greaterEqualStrict", function() { return greaterEqualStrict; }); +__webpack_require__.d(ops_namespaceObject, "greaterStrict", function() { return greaterStrict; }); +__webpack_require__.d(ops_namespaceObject, "lessEqualStrict", function() { return lessEqualStrict; }); +__webpack_require__.d(ops_namespaceObject, "lessStrict", function() { return lessStrict; }); +__webpack_require__.d(ops_namespaceObject, "notEqualStrict", function() { return notEqualStrict; }); +__webpack_require__.d(ops_namespaceObject, "addStrict", function() { return addStrict; }); +__webpack_require__.d(ops_namespaceObject, "divStrict", function() { return divStrict; }); +__webpack_require__.d(ops_namespaceObject, "maximumStrict", function() { return maximumStrict; }); +__webpack_require__.d(ops_namespaceObject, "minimumStrict", function() { return minimumStrict; }); +__webpack_require__.d(ops_namespaceObject, "modStrict", function() { return modStrict; }); +__webpack_require__.d(ops_namespaceObject, "mulStrict", function() { return mulStrict; }); +__webpack_require__.d(ops_namespaceObject, "powStrict", function() { return powStrict; }); +__webpack_require__.d(ops_namespaceObject, "squaredDifferenceStrict", function() { return squaredDifferenceStrict; }); +__webpack_require__.d(ops_namespaceObject, "subStrict", function() { return subStrict; }); +__webpack_require__.d(ops_namespaceObject, "logicalAnd", function() { return logicalAnd; }); +__webpack_require__.d(ops_namespaceObject, "logicalNot", function() { return logicalNot; }); +__webpack_require__.d(ops_namespaceObject, "logicalOr", function() { return logicalOr; }); +__webpack_require__.d(ops_namespaceObject, "logicalXor", function() { return logicalXor; }); +__webpack_require__.d(ops_namespaceObject, "where", function() { return where; }); +__webpack_require__.d(ops_namespaceObject, "whereAsync", function() { return whereAsync; }); +__webpack_require__.d(ops_namespaceObject, "buffer", function() { return array_ops_buffer; }); +__webpack_require__.d(ops_namespaceObject, "print", function() { return print; }); +__webpack_require__.d(ops_namespaceObject, "cast", function() { return cast; }); +__webpack_require__.d(ops_namespaceObject, "expandDims", function() { return expandDims; }); +__webpack_require__.d(ops_namespaceObject, "reshape", function() { return reshape; }); +__webpack_require__.d(ops_namespaceObject, "squeeze", function() { return squeeze; }); +__webpack_require__.d(ops_namespaceObject, "stack", function() { return stack; }); +__webpack_require__.d(ops_namespaceObject, "unstack", function() { return unstack; }); +__webpack_require__.d(ops_namespaceObject, "setdiff1dAsync", function() { return setdiff1dAsync; }); +__webpack_require__.d(ops_namespaceObject, "linspace", function() { return tensor_ops["a" /* linspace */]; }); +__webpack_require__.d(ops_namespaceObject, "ones", function() { return tensor_ops["b" /* ones */]; }); +__webpack_require__.d(ops_namespaceObject, "range", function() { return tensor_ops["d" /* range */]; }); +__webpack_require__.d(ops_namespaceObject, "scalar", function() { return tensor_ops["e" /* scalar */]; }); +__webpack_require__.d(ops_namespaceObject, "tensor", function() { return tensor_ops["f" /* tensor */]; }); +__webpack_require__.d(ops_namespaceObject, "tensor1d", function() { return tensor_ops["g" /* tensor1d */]; }); +__webpack_require__.d(ops_namespaceObject, "tensor2d", function() { return tensor_ops["h" /* tensor2d */]; }); +__webpack_require__.d(ops_namespaceObject, "tensor3d", function() { return tensor_ops["i" /* tensor3d */]; }); +__webpack_require__.d(ops_namespaceObject, "tensor4d", function() { return tensor_ops["j" /* tensor4d */]; }); +__webpack_require__.d(ops_namespaceObject, "tensor5d", function() { return tensor_ops["k" /* tensor5d */]; }); +__webpack_require__.d(ops_namespaceObject, "tensor6d", function() { return tensor_ops["l" /* tensor6d */]; }); +__webpack_require__.d(ops_namespaceObject, "variable", function() { return tensor_ops["m" /* variable */]; }); +__webpack_require__.d(ops_namespaceObject, "zeros", function() { return tensor_ops["n" /* zeros */]; }); +__webpack_require__.d(ops_namespaceObject, "onesLike", function() { return tensor_ops["c" /* onesLike */]; }); +__webpack_require__.d(ops_namespaceObject, "zerosLike", function() { return tensor_ops["o" /* zerosLike */]; }); +__webpack_require__.d(ops_namespaceObject, "transpose", function() { return transpose; }); +__webpack_require__.d(ops_namespaceObject, "softmax", function() { return softmax; }); +__webpack_require__.d(ops_namespaceObject, "logSoftmax", function() { return logSoftmax; }); +__webpack_require__.d(ops_namespaceObject, "norm", function() { return norm_norm; }); +__webpack_require__.d(ops_namespaceObject, "gather", function() { return gather; }); +__webpack_require__.d(ops_namespaceObject, "unsortedSegmentSum", function() { return unsortedSegmentSum; }); +__webpack_require__.d(ops_namespaceObject, "basicLSTMCell", function() { return basicLSTMCell; }); +__webpack_require__.d(ops_namespaceObject, "multiRNNCell", function() { return multiRNNCell; }); +__webpack_require__.d(ops_namespaceObject, "movingAverage", function() { return movingAverage; }); +__webpack_require__.d(ops_namespaceObject, "stridedSlice", function() { return stridedSlice; }); +__webpack_require__.d(ops_namespaceObject, "topk", function() { return topk; }); +__webpack_require__.d(ops_namespaceObject, "scatterND", function() { return scatterND; }); +__webpack_require__.d(ops_namespaceObject, "fft", function() { return fft; }); +__webpack_require__.d(ops_namespaceObject, "ifft", function() { return ifft; }); +__webpack_require__.d(ops_namespaceObject, "rfft", function() { return rfft; }); +__webpack_require__.d(ops_namespaceObject, "irfft", function() { return irfft; }); +__webpack_require__.d(ops_namespaceObject, "sparseToDense", function() { return sparseToDense; }); +__webpack_require__.d(ops_namespaceObject, "gatherND", function() { return gatherND; }); +__webpack_require__.d(ops_namespaceObject, "dropout", function() { return dropout; }); +__webpack_require__.d(ops_namespaceObject, "hannWindow", function() { return hannWindow; }); +__webpack_require__.d(ops_namespaceObject, "hammingWindow", function() { return hammingWindow; }); +__webpack_require__.d(ops_namespaceObject, "frame", function() { return signal_ops_frame; }); +__webpack_require__.d(ops_namespaceObject, "stft", function() { return stft; }); +__webpack_require__.d(ops_namespaceObject, "inTopKAsync", function() { return inTopKAsync; }); +__webpack_require__.d(ops_namespaceObject, "op", function() { return operation["a" /* op */]; }); +__webpack_require__.d(ops_namespaceObject, "image", function() { return image_ops_namespaceObject; }); +__webpack_require__.d(ops_namespaceObject, "linalg", function() { return linalg_ops_namespaceObject; }); +__webpack_require__.d(ops_namespaceObject, "losses", function() { return loss_ops_namespaceObject; }); +__webpack_require__.d(ops_namespaceObject, "spectral", function() { return spectral_ops_namespaceObject; }); +__webpack_require__.d(ops_namespaceObject, "fused", function() { return fused_ops_namespaceObject; }); +__webpack_require__.d(ops_namespaceObject, "signal", function() { return signal_ops_namespaceObject; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/backends/backend_util.js +var backend_util_namespaceObject = {}; +__webpack_require__.r(backend_util_namespaceObject); +__webpack_require__.d(backend_util_namespaceObject, "axesAreInnerMostDims", function() { return axesAreInnerMostDims; }); +__webpack_require__.d(backend_util_namespaceObject, "combineLocations", function() { return combineLocations; }); +__webpack_require__.d(backend_util_namespaceObject, "computeOutAndReduceShapes", function() { return computeOutAndReduceShapes; }); +__webpack_require__.d(backend_util_namespaceObject, "expandShapeToKeepDim", function() { return expandShapeToKeepDim; }); +__webpack_require__.d(backend_util_namespaceObject, "assertAxesAreInnerMostDims", function() { return assertAxesAreInnerMostDims; }); +__webpack_require__.d(backend_util_namespaceObject, "getAxesPermutation", function() { return getAxesPermutation; }); +__webpack_require__.d(backend_util_namespaceObject, "getUndoAxesPermutation", function() { return getUndoAxesPermutation; }); +__webpack_require__.d(backend_util_namespaceObject, "getInnerMostAxes", function() { return getInnerMostAxes; }); +__webpack_require__.d(backend_util_namespaceObject, "getBroadcastDims", function() { return getBroadcastDims; }); +__webpack_require__.d(backend_util_namespaceObject, "getReductionAxes", function() { return getReductionAxes; }); +__webpack_require__.d(backend_util_namespaceObject, "assertAndGetBroadcastShape", function() { return assertAndGetBroadcastShape; }); +__webpack_require__.d(backend_util_namespaceObject, "assertParamsConsistent", function() { return assertParamsConsistent; }); +__webpack_require__.d(backend_util_namespaceObject, "computeOutShape", function() { return computeOutShape; }); +__webpack_require__.d(backend_util_namespaceObject, "computePool2DInfo", function() { return computePool2DInfo; }); +__webpack_require__.d(backend_util_namespaceObject, "computePool3DInfo", function() { return computePool3DInfo; }); +__webpack_require__.d(backend_util_namespaceObject, "computeConv2DInfo", function() { return computeConv2DInfo; }); +__webpack_require__.d(backend_util_namespaceObject, "computeConv3DInfo", function() { return computeConv3DInfo; }); +__webpack_require__.d(backend_util_namespaceObject, "computeDefaultPad", function() { return computeDefaultPad; }); +__webpack_require__.d(backend_util_namespaceObject, "tupleValuesAreOne", function() { return tupleValuesAreOne; }); +__webpack_require__.d(backend_util_namespaceObject, "eitherStridesOrDilationsAreOne", function() { return eitherStridesOrDilationsAreOne; }); +__webpack_require__.d(backend_util_namespaceObject, "convertConv2DDataFormat", function() { return convertConv2DDataFormat; }); +__webpack_require__.d(backend_util_namespaceObject, "PARALLELIZE_THRESHOLD", function() { return PARALLELIZE_THRESHOLD; }); +__webpack_require__.d(backend_util_namespaceObject, "computeOptimalWindowSize", function() { return computeOptimalWindowSize; }); +__webpack_require__.d(backend_util_namespaceObject, "nonMaxSuppressionV3", function() { return nonMaxSuppressionV3; }); +__webpack_require__.d(backend_util_namespaceObject, "nonMaxSuppressionV5", function() { return nonMaxSuppressionV5; }); +__webpack_require__.d(backend_util_namespaceObject, "upcastType", function() { return dist_types["c" /* upcastType */]; }); +__webpack_require__.d(backend_util_namespaceObject, "getReshaped", function() { return getReshaped; }); +__webpack_require__.d(backend_util_namespaceObject, "getPermuted", function() { return getPermuted; }); +__webpack_require__.d(backend_util_namespaceObject, "getReshapedPermuted", function() { return getReshapedPermuted; }); +__webpack_require__.d(backend_util_namespaceObject, "getSliceBeginCoords", function() { return getSliceBeginCoords; }); +__webpack_require__.d(backend_util_namespaceObject, "getSliceSize", function() { return getSliceSize; }); +__webpack_require__.d(backend_util_namespaceObject, "prepareAndValidate", function() { return prepareAndValidate; }); +__webpack_require__.d(backend_util_namespaceObject, "validateUpdateShape", function() { return validateUpdateShape; }); +__webpack_require__.d(backend_util_namespaceObject, "validateInput", function() { return validateInput; }); +__webpack_require__.d(backend_util_namespaceObject, "calculateShapes", function() { return calculateShapes; }); +__webpack_require__.d(backend_util_namespaceObject, "SELU_SCALEALPHA", function() { return SELU_SCALEALPHA; }); +__webpack_require__.d(backend_util_namespaceObject, "SELU_SCALE", function() { return SELU_SCALE; }); +__webpack_require__.d(backend_util_namespaceObject, "shouldFuse", function() { return shouldFuse; }); +__webpack_require__.d(backend_util_namespaceObject, "ERF_P", function() { return ERF_P; }); +__webpack_require__.d(backend_util_namespaceObject, "ERF_A1", function() { return ERF_A1; }); +__webpack_require__.d(backend_util_namespaceObject, "ERF_A2", function() { return ERF_A2; }); +__webpack_require__.d(backend_util_namespaceObject, "ERF_A3", function() { return ERF_A3; }); +__webpack_require__.d(backend_util_namespaceObject, "ERF_A4", function() { return ERF_A4; }); +__webpack_require__.d(backend_util_namespaceObject, "ERF_A5", function() { return ERF_A5; }); +__webpack_require__.d(backend_util_namespaceObject, "warn", function() { return warn; }); +__webpack_require__.d(backend_util_namespaceObject, "log", function() { return log_log; }); +__webpack_require__.d(backend_util_namespaceObject, "mergeRealAndImagArrays", function() { return mergeRealAndImagArrays; }); +__webpack_require__.d(backend_util_namespaceObject, "splitRealAndImagArrays", function() { return splitRealAndImagArrays; }); +__webpack_require__.d(backend_util_namespaceObject, "complexWithEvenIndex", function() { return complexWithEvenIndex; }); +__webpack_require__.d(backend_util_namespaceObject, "complexWithOddIndex", function() { return complexWithOddIndex; }); +__webpack_require__.d(backend_util_namespaceObject, "getComplexWithIndex", function() { return getComplexWithIndex; }); +__webpack_require__.d(backend_util_namespaceObject, "assignToTypedArray", function() { return assignToTypedArray; }); +__webpack_require__.d(backend_util_namespaceObject, "exponents", function() { return exponents; }); +__webpack_require__.d(backend_util_namespaceObject, "exponent", function() { return exponent; }); +__webpack_require__.d(backend_util_namespaceObject, "segment_util", function() { return segment_util_namespaceObject; }); +__webpack_require__.d(backend_util_namespaceObject, "castTensor", function() { return castTensor; }); +__webpack_require__.d(backend_util_namespaceObject, "reshapeTensor", function() { return reshapeTensor; }); +__webpack_require__.d(backend_util_namespaceObject, "linspaceImpl", function() { return linspaceImpl; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/backends/kernel_impls.js +var kernel_impls_namespaceObject = {}; +__webpack_require__.r(kernel_impls_namespaceObject); +__webpack_require__.d(kernel_impls_namespaceObject, "nonMaxSuppressionV3", function() { return nonMaxSuppressionV3; }); +__webpack_require__.d(kernel_impls_namespaceObject, "nonMaxSuppressionV5", function() { return nonMaxSuppressionV5; }); +__webpack_require__.d(kernel_impls_namespaceObject, "split", function() { return split_shared_split; }); +__webpack_require__.d(kernel_impls_namespaceObject, "tile", function() { return tile_impl_tile; }); +__webpack_require__.d(kernel_impls_namespaceObject, "topkImpl", function() { return topkImpl; }); +__webpack_require__.d(kernel_impls_namespaceObject, "whereImpl", function() { return whereImpl; }); + +// EXTERNAL MODULE: ./node_modules/@tensorflow/tfjs-core/dist/engine.js + 2 modules +var engine = __webpack_require__(5); + +// EXTERNAL MODULE: ./node_modules/@tensorflow/tfjs-core/dist/flags.js +var flags = __webpack_require__(61); + +// EXTERNAL MODULE: ./node_modules/@tensorflow/tfjs-core/dist/kernel_names.js +var kernel_names = __webpack_require__(6); + +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/broadcast_util.js +/** + * @license + * Copyright 2017 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ +/** + * Returns the dimensions in the input shape that are broadcasted to + * produce the provided output shape. + * + * The returned dimensions are 0-indexed and sorted. An example: + * inShape = [4, 1, 3] + * outShape = [5, 4, 3, 3] + * result = [1]. Dimension 1 (2nd dimension of input) gets broadcasted 1 => 3. + */ +function getBroadcastDims(inShape, outShape) { + const inRank = inShape.length; + const dims = []; + for (let i = 0; i < inRank; i++) { + const dim = inRank - 1 - i; + const a = inShape[dim] || 1; + const b = outShape[outShape.length - 1 - i] || 1; + if (b > 1 && a === 1) { + dims.unshift(dim); + } + } + return dims; +} +/** + * Returns the axes in the output space that should be reduced to produce + * the input space. + */ +function getReductionAxes(inShape, outShape) { + const result = []; + for (let i = 0; i < outShape.length; i++) { + const inDim = inShape[inShape.length - i - 1]; + const outAxis = outShape.length - i - 1; + const outDim = outShape[outAxis]; + if (inDim == null || (inDim === 1 && outDim > 1)) { + result.unshift(outAxis); + } + } + return result; +} +function assertAndGetBroadcastShape(shapeA, shapeB) { + const result = []; + const l = Math.max(shapeA.length, shapeB.length); + for (let i = 0; i < l; i++) { + let a = shapeA[shapeA.length - i - 1]; + if (a == null) { + a = 1; + } + let b = shapeB[shapeB.length - i - 1]; + if (b == null) { + b = 1; + } + if (a === 1) { + result.unshift(b); + } + else if (b === 1) { + result.unshift(a); + } + else if (a !== b) { + const errMsg = `Operands could not be broadcast together with shapes ` + + `${shapeA} and ${shapeB}.`; + throw Error(errMsg); + } + else { + result.unshift(a); + } + } + return result; +} +//# sourceMappingURL=broadcast_util.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Add_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const addGradConfig = { + kernelName: kernel_names["a" /* Add */], + inputsToSave: ['a', 'b'], + gradFunc: (dy, saved) => { + const [a, b] = saved; + const outShape = assertAndGetBroadcastShape(a.shape, b.shape); + const derA = () => { + let res = dy; + const reduceAxes = getReductionAxes(a.shape, outShape); + if (reduceAxes.length > 0) { + res = res.sum(reduceAxes); + } + return res.reshape(a.shape); + }; + const derB = () => { + let res = dy; + const reduceAxes = getReductionAxes(b.shape, outShape); + if (reduceAxes.length > 0) { + res = res.sum(reduceAxes); + } + return res.reshape(b.shape); + }; + return { a: derA, b: derB }; + } +}; +//# sourceMappingURL=Add_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/AddN_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + +const addNGradConfig = { + kernelName: kernel_names["b" /* AddN */], + saveAllInputs: true, + gradFunc: (dy, saved) => { + const ders = {}; + saved.forEach((_, i) => { + ders[i] = () => dy.clone(); + }); + return ders; + } +}; +//# sourceMappingURL=AddN_grad.js.map +// EXTERNAL MODULE: ./node_modules/@tensorflow/tfjs-core/dist/tensor_util.js +var tensor_util = __webpack_require__(11); + +// EXTERNAL MODULE: ./node_modules/@tensorflow/tfjs-core/dist/tensor_util_env.js +var tensor_util_env = __webpack_require__(3); + +// EXTERNAL MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/operation.js +var operation = __webpack_require__(4); + +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/add.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +/** + * Adds two `tf.Tensor`s element-wise, A + B. Supports broadcasting. + * + * + * ```js + * const a = tf.tensor1d([1, 2, 3, 4]); + * const b = tf.tensor1d([10, 20, 30, 40]); + * + * a.add(b).print(); // or tf.add(a, b) + * ``` + * + * ```js + * // Broadcast add a with b. + * const a = tf.scalar(5); + * const b = tf.tensor1d([10, 20, 30, 40]); + * + * a.add(b).print(); // or tf.add(a, b) + * ``` + * @param a The first `tf.Tensor` to add. + * @param b The second `tf.Tensor` to add. Must have the same type as `a`. + */ +/** @doc {heading: 'Operations', subheading: 'Arithmetic'} */ +function add_(a, b) { + let $a = Object(tensor_util_env["a" /* convertToTensor */])(a, 'a', 'add'); + let $b = Object(tensor_util_env["a" /* convertToTensor */])(b, 'b', 'add'); + [$a, $b] = Object(tensor_util["makeTypesMatch"])($a, $b); + const forward = (backend, save) => { + const res = backend.add($a, $b); + save([$a, $b]); + return res; + }; + const inputs = { a: $a, b: $b }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* gradient */, kernel_names["a" /* Add */]); +} +const add = Object(operation["a" /* op */])({ add_ }); +//# sourceMappingURL=add.js.map +// EXTERNAL MODULE: ./node_modules/@tensorflow/tfjs-core/dist/tensor.js + 1 modules +var dist_tensor = __webpack_require__(7); + +// EXTERNAL MODULE: ./node_modules/@tensorflow/tfjs-core/dist/util.js +var util = __webpack_require__(1); + +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/concat_util.js +/** + * @license + * Copyright 2017 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + +function assertParamsConsistent(shapes, axis) { + const rank = shapes[0].length; + shapes.forEach((shape, i) => { + util["assert"](shape.length === rank, () => `Error in concat${rank}D: rank of tensors[${i}] must be the same ` + + `as the rank of the rest (${rank})`); + }); + util["assert"](axis >= 0 && axis < rank, () => `Error in concat${rank}D: axis must be between 0 and ${rank - 1}.`); + const firstShape = shapes[0]; + shapes.forEach((shape, i) => { + for (let r = 0; r < rank; r++) { + util["assert"]((r === axis) || (shape[r] === firstShape[r]), () => `Error in concat${rank}D: Shape of tensors[${i}] (${shape}) ` + + `does not match the shape of the rest (${firstShape}) ` + + `along the non-concatenated axis ${i}.`); + } + }); +} +function computeOutShape(shapes, axis) { + const outputShape = shapes[0].slice(); + for (let i = 1; i < shapes.length; i++) { + outputShape[axis] += shapes[i][axis]; + } + return outputShape; +} +//# sourceMappingURL=concat_util.js.map +// EXTERNAL MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/tensor_ops.js +var tensor_ops = __webpack_require__(8); + +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/concat.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + +/** + * Concatenates a list of `tf.Tensor`s along a given axis. + * + * The tensors ranks and types must match, and their sizes must match in all + * dimensions except `axis`. + * + * Also available are stricter rank-specific methods that assert that + * `tensors` are of the given rank: + * - `tf.concat1d` + * - `tf.concat2d` + * - `tf.concat3d` + * - `tf.concat4d` + * + * Except `tf.concat1d` (which does not have axis param), all methods have + * same signature as this method. + * + * ```js + * const a = tf.tensor1d([1, 2]); + * const b = tf.tensor1d([3, 4]); + * a.concat(b).print(); // or a.concat(b) + * ``` + * + * ```js + * const a = tf.tensor1d([1, 2]); + * const b = tf.tensor1d([3, 4]); + * const c = tf.tensor1d([5, 6]); + * tf.concat([a, b, c]).print(); + * ``` + * + * ```js + * const a = tf.tensor2d([[1, 2], [10, 20]]); + * const b = tf.tensor2d([[3, 4], [30, 40]]); + * const axis = 1; + * tf.concat([a, b], axis).print(); + * ``` + * @param tensors A list of tensors to concatenate. + * @param axis The axis to concate along. Defaults to 0 (the first dim). + */ +/** @doc {heading: 'Tensors', subheading: 'Slicing and Joining'} */ +function concat_(tensors, axis = 0) { + Object(util["assert"])(tensors.length >= 1, () => 'Pass at least one tensor to concat'); + let $tensors = Object(tensor_util_env["b" /* convertToTensorArray */])(tensors, 'tensors', 'concat'); + if ($tensors[0].dtype === 'complex64') { + $tensors.forEach(tensor => { + if (tensor.dtype !== 'complex64') { + throw new Error(`Cannot concatenate complex64 tensors with a tensor + with dtype ${tensor.dtype}. `); + } + }); + } + const $axis = Object(util["parseAxisParam"])(axis, $tensors[0].shape)[0]; + const outShape = computeOutShape($tensors.map(t => t.shape), $axis); + if (Object(util["sizeFromShape"])(outShape) === 0) { + return Object(tensor_ops["f" /* tensor */])([], outShape); + } + // Keep only non-empty tensors (ignore tensors with 0 in their shape). + $tensors = $tensors.filter(t => t.size > 0); + if ($tensors.length === 1) { + return $tensors[0]; + } + const shapes = $tensors.map(t => t.shape); + assertParamsConsistent(shapes, $axis); + const forward = (backend, save) => { + const res = backend.concat($tensors, $axis); + save($tensors); + return res; + }; + const inputs = $tensors; + const attr = { axis }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["l" /* Concat */], attr); +} +const concat = Object(operation["a" /* op */])({ concat_ }); +//# sourceMappingURL=concat.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/array_ops.js +/** + * @license + * Copyright 2018 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + +/** + * Reshapes a `tf.Tensor` to a given shape. + * + * Given an input tensor, returns a new tensor with the same values as the + * input tensor with shape `shape`. + * + * If one component of shape is the special value -1, the size of that + * dimension is computed so that the total size remains constant. In + * particular, a shape of [-1] flattens into 1-D. At most one component of + * shape can be -1. + * + * If shape is 1-D or higher, then the operation returns a tensor with shape + * shape filled with the values of tensor. In this case, the number of + * elements implied by shape must be the same as the number of elements in + * tensor. + * + * ```js + * const x = tf.tensor1d([1, 2, 3, 4]); + * x.reshape([2, 2]).print(); + * ``` + * + * @param x The input tensor to be reshaped. + * @param shape An array of integers defining the output tensor shape. + */ +/** @doc {heading: 'Tensors', subheading: 'Transformations'} */ +function reshape_(x, shape) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'reshape', null); + shape = util["inferFromImplicitShape"](shape, $x.size); + util["assert"]($x.size === util["sizeFromShape"](shape), () => 'new shape and old shape must have the same number of elements.'); + const grad = (dy) => { + return { x: () => dy.reshape($x.shape) }; + }; + const attrs = { shape }; + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.reshape($x, shape), { x: $x }, grad, 'Reshape', attrs); +} +/** + * Removes dimensions of size 1 from the shape of a `tf.Tensor`. + * + * ```js + * const x = tf.tensor([1, 2, 3, 4], [1, 1, 4]); + * x.squeeze().print(); + * ``` + * + * @param x The input tensor to be squeezed. + * @param axis An optional list of numbers. If specified, only + * squeezes the dimensions listed. The dimension index starts at 0. It + * is an error to squeeze a dimension that is not 1. + */ +/** @doc {heading: 'Tensors', subheading: 'Transformations'} */ +function squeeze_(x, axis) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'squeeze'); + return reshape($x, util["squeezeShape"]($x.shape, axis).newShape); +} +/** + * Casts a `tf.Tensor` to a new dtype. + * + * ```js + * const x = tf.tensor1d([1.5, 2.5, 3]); + * tf.cast(x, 'int32').print(); + * ``` + * @param x The input tensor to be casted. + * @param dtype The dtype to cast the input tensor to. + */ +/** @doc {heading: 'Tensors', subheading: 'Transformations'} */ +function cast_(x, dtype) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'cast'); + // Sanity checks. + if (!util["isValidDtype"](dtype)) { + throw new Error(`Failed to cast to unknown dtype ${dtype}`); + } + if (dtype === 'string' && $x.dtype !== 'string' || + dtype !== 'string' && $x.dtype === 'string') { + throw new Error('Only strings can be casted to strings'); + } + const grad = (dy) => { + return { x: () => dy.clone() }; + }; + const attrs = { dtype }; + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.cast($x, dtype), { x: $x }, grad, 'Cast', attrs); +} +/** + * Stacks a list of rank-`R` `tf.Tensor`s into one rank-`(R+1)` `tf.Tensor`. + * + * ```js + * const a = tf.tensor1d([1, 2]); + * const b = tf.tensor1d([3, 4]); + * const c = tf.tensor1d([5, 6]); + * tf.stack([a, b, c]).print(); + * ``` + * + * @param tensors A list of tensor objects with the same shape and dtype. + * @param axis The axis to stack along. Defaults to 0 (the first dim). + */ +/** @doc {heading: 'Tensors', subheading: 'Slicing and Joining'} */ +function stack_(tensors, axis = 0) { + const $tensors = Object(tensor_util_env["b" /* convertToTensorArray */])(tensors, 'tensors', 'stack'); + util["assert"]($tensors.length >= 1, () => 'Pass at least one tensor to tf.stack'); + if ($tensors.length === 1) { + return $tensors[0].expandDims(axis); + } + const rank = $tensors[0].rank; + const shape = $tensors[0].shape; + const dtype = $tensors[0].dtype; + util["assert"](axis <= rank, () => 'Axis must be <= rank of the tensor'); + $tensors.forEach(t => { + util["assertShapesMatch"](shape, t.shape, 'All tensors passed to stack must have matching shapes'); + }); + $tensors.forEach(t => { + util["assert"](dtype === t.dtype, () => 'All tensors passed to stack must have matching dtypes'); + }); + const expandedTensors = $tensors.map(t => t.expandDims(axis)); + return concat(expandedTensors, axis); +} +/** + * Unstacks a `tf.Tensor` of rank-`R` into a list of rank-`(R-1)` `tf.Tensor`s. + * + * ```js + * const a = tf.tensor2d([1, 2, 3, 4], [2, 2]); + * + * tf.unstack(a).forEach(tensor => tensor.print()); + * ``` + * + * @param x A tensor object. + * @param axis The axis to unstack along. Defaults to 0 (the first dim). + */ +/** @doc {heading: 'Tensors', subheading: 'Slicing and Joining'} */ +function unstack_(x, axis = 0) { + axis = axis || 0; + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'unstack'); + util["assert"](axis >= -$x.shape.length && axis < $x.shape.length, () => `Axis = ${axis} is not in [-${$x.shape.length}, ${$x.shape.length})`); + if (axis < 0) { + axis += $x.shape.length; + } + const grad = (dy) => { + return { x: () => stack(dy, axis) }; + }; + const attrs = { axis }; + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.unstack($x, axis), { x: $x }, grad, 'Unpack', attrs); +} +/** + * Returns a `tf.Tensor` that has expanded rank, by inserting a dimension + * into the tensor's shape. + * + * ```js + * const x = tf.tensor1d([1, 2, 3, 4]); + * const axis = 1; + * x.expandDims(axis).print(); + * ``` + * + * @param x The input tensor whose dimensions to be expanded. + * @param axis The dimension index at which to insert shape of `1`. Defaults + * to 0 (the first dimension). + */ +/** @doc {heading: 'Tensors', subheading: 'Transformations'} */ +function expandDims_(x, axis = 0) { + const parseAs = null; + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'expandDims', parseAs); + util["assert"](axis <= $x.rank, () => 'Axis must be <= rank of the tensor'); + const newShape = $x.shape.slice(); + if (axis < 0) { + // Negative value is counted from the tail of rank. + util["assert"](-($x.rank + 1) <= axis, () => `Axis must be in the interval [${-($x.rank + 1)}, ${$x.rank}]`); + axis = $x.rank + axis + 1; + } + newShape.splice(axis, 0, 1); + return reshape($x, newShape); +} +/** + * Computes the difference between two lists of numbers. + * + * Given a Tensor `x` and a Tensor `y`, this operation returns a Tensor `out` + * that represents all values that are in `x` but not in `y`. The returned + * Tensor `out` is sorted in the same order that the numbers appear in `x` + * (duplicates are preserved). This operation also returns a Tensor indices that + * represents the position of each out element in `x`. In other words: + * + * `out[i] = x[idx[i]] for i in [0, 1, ..., out.length - 1]` + * + * ```js + * const x = [1, 2, 3, 4, 5, 6]; + * const y = [1, 3, 5]; + * + * const [out, indices] = await tf.setdiff1dAsync(x, y); + * out.print(); // [2, 4, 6] + * indices.print(); // [1, 3, 5] + * ``` + * + * @param x 1-D Tensor. Values to keep. + * @param y 1-D Tensor. Must have the same type as x. Values to exclude in the + * output. + * @returns Promise of Tensor tuple [out, indices]. + * out: Tensor with the same type as x. + * indices: A Tensor of type int32. + */ +/** @doc {heading: 'Tensors', subheading: 'Transformations'} */ +async function setdiff1dAsync_(x, y) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'setdiff1d'); + const $y = Object(tensor_util_env["a" /* convertToTensor */])(y, 'y', 'setdiff1d'); + util["assert"]($x.dtype === $y.dtype, () => `x and y should have the same dtype, but got x (${$x.dtype}) and y (${$y.dtype}).`); + util["assert"]($x.rank === 1, () => `x should be 1D tensor, but got x (${$x.shape}).`); + util["assert"]($y.rank === 1, () => `y should be 1D tensor, but got y (${$y.shape}).`); + const xVals = await $x.data(); + const yVals = await $y.data(); + const ySet = new Set(yVals); + let outputSize = 0; + for (let i = 0; i < xVals.length; i++) { + if (!ySet.has(xVals[i])) { + outputSize++; + } + } + const buffer = new dist_tensor["b" /* TensorBuffer */]([outputSize], $x.dtype); + const indices = new dist_tensor["b" /* TensorBuffer */]([outputSize], 'int32'); + for (let i = 0, p = 0; i < xVals.length; i++) { + if (!ySet.has(xVals[i])) { + buffer.values[p] = xVals[i]; + indices.values[p] = i; + p++; + } + } + return [buffer.toTensor(), indices.toTensor()]; +} +/** + * Creates an empty `tf.TensorBuffer` with the specified `shape` and `dtype`. + * + * The values are stored in CPU as `TypedArray`. Fill the buffer using + * `buffer.set()`, or by modifying directly `buffer.values`. + * + * When done, call `buffer.toTensor()` to get an immutable `tf.Tensor` with + * those values. + * + * ```js + * // Create a buffer and set values at particular indices. + * const buffer = tf.buffer([2, 2]); + * buffer.set(3, 0, 0); + * buffer.set(5, 1, 0); + * + * // Convert the buffer back to a tensor. + * buffer.toTensor().print(); + * ``` + * + * @param shape An array of integers defining the output tensor shape. + * @param dtype The dtype of the buffer. Defaults to 'float32'. + * @param values The values of the buffer as `TypedArray`. Defaults to + * zeros. + */ +/** @doc {heading: 'Tensors', subheading: 'Creation'} */ +function array_ops_buffer(shape, dtype = 'float32', values) { + dtype = dtype || 'float32'; + util["assertNonNegativeIntegerDimensions"](shape); + return new dist_tensor["b" /* TensorBuffer */](shape, dtype, values); +} +/** + * Prints information about the `tf.Tensor` including its data. + * + * ```js + * const verbose = true; + * tf.tensor2d([1, 2, 3, 4], [2, 2]).print(verbose); + * ``` + * @param x The tensor to be printed. + * @param verbose Whether to print verbose information about the ` Tensor`, + * including dtype and size. + */ +/** @doc {heading: 'Tensors', subheading: 'Creation'} */ +function print(x, verbose = false) { + console.log(x.toString(verbose)); +} + +const cast = Object(operation["a" /* op */])({ cast_ }); +const expandDims = Object(operation["a" /* op */])({ expandDims_ }); +const reshape = Object(operation["a" /* op */])({ reshape_ }); +const squeeze = Object(operation["a" /* op */])({ squeeze_ }); +const stack = Object(operation["a" /* op */])({ stack_ }); +const unstack = Object(operation["a" /* op */])({ unstack_ }); +const setdiff1dAsync = setdiff1dAsync_; +//# sourceMappingURL=array_ops.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/floorDiv.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +/** + * Divides two `tf.Tensor`s element-wise, A / B. Supports broadcasting. + * The result is rounded with floor function. + * + * + * ```js + * const a = tf.tensor1d([1, 4, 9, 16]); + * const b = tf.tensor1d([1, 2, 3, 4]); + * + * a.floorDiv(b).print(); // or tf.div(a, b) + * ``` + * + * ```js + * // Broadcast div a with b. + * const a = tf.tensor1d([2, 4, 6, 8]); + * const b = tf.scalar(2); + * + * a.floorDiv(b).print(); // or tf.floorDiv(a, b) + * ``` + * + * @param a The first tensor as the numerator. + * @param b The second tensor as the denominator. Must have the same dtype as + * `a`. + */ +/** @doc {heading: 'Operations', subheading: 'Arithmetic'} */ +function floorDiv_(a, b) { + let $a = Object(tensor_util_env["a" /* convertToTensor */])(a, 'a', 'floorDiv'); + let $b = Object(tensor_util_env["a" /* convertToTensor */])(b, 'b', 'floorDiv'); + [$a, $b] = Object(tensor_util["makeTypesMatch"])($a, $b); + const forward = (backend, save) => { + const res = backend.floorDiv($a, $b); + save([$a, $b]); + return res; + }; + const inputs = { a: $a, b: $b }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* gradient */, kernel_names["D" /* FloorDiv */]); +} +const floorDiv = Object(operation["a" /* op */])({ floorDiv_ }); +//# sourceMappingURL=floorDiv.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/div.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + +/** + * Divides two `tf.Tensor`s element-wise, A / B. Supports broadcasting. + * + * ```js + * const a = tf.tensor1d([1, 4, 9, 16]); + * const b = tf.tensor1d([1, 2, 3, 4]); + * + * a.div(b).print(); // or tf.div(a, b) + * ``` + * + * ```js + * // Broadcast div a with b. + * const a = tf.tensor1d([2, 4, 6, 8]); + * const b = tf.scalar(2); + * + * a.div(b).print(); // or tf.div(a, b) + * ``` + * + * @param a The first tensor as the numerator. + * @param b The second tensor as the denominator. Must have the same dtype as + * `a`. + */ +/** @doc {heading: 'Operations', subheading: 'Arithmetic'} */ +function div_(a, b) { + let $a = Object(tensor_util_env["a" /* convertToTensor */])(a, 'a', 'div'); + let $b = Object(tensor_util_env["a" /* convertToTensor */])(b, 'b', 'div'); + [$a, $b] = Object(tensor_util["makeTypesMatch"])($a, $b); + if ($a.dtype === 'int32' && $b.dtype === 'int32') { + return floorDiv($a, $b); + } + const forward = (backend, save) => { + const res = backend.realDivide($a, $b); + save([$a, $b]); + return res; + }; + const inputs = { a: $a, b: $b }; + const attrs = {}; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* gradient */, kernel_names["y" /* Div */], attrs); +} +const div = Object(operation["a" /* op */])({ div_ }); +//# sourceMappingURL=div.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/mul.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +/** + * Multiplies two `tf.Tensor`s element-wise, A * B. Supports broadcasting. + * + * We also expose `tf.mulStrict` which has the same signature as this op and + * asserts that `a` and `b` are the same shape (does not broadcast). + * + * ```js + * const a = tf.tensor1d([1, 2, 3, 4]); + * const b = tf.tensor1d([2, 3, 4, 5]); + * + * a.mul(b).print(); // or tf.mul(a, b) + * ``` + * + * ```js + * // Broadcast mul a with b. + * const a = tf.tensor1d([1, 2, 3, 4]); + * const b = tf.scalar(5); + * + * a.mul(b).print(); // or tf.mul(a, b) + * ``` + * @param a The first tensor to multiply. + * @param b The second tensor to multiply. Must have the same dtype as `a`. + */ +/** @doc {heading: 'Operations', subheading: 'Arithmetic'} */ +function mul_(a, b) { + let $a = Object(tensor_util_env["a" /* convertToTensor */])(a, 'a', 'mul'); + let $b = Object(tensor_util_env["a" /* convertToTensor */])(b, 'b', 'mul'); + [$a, $b] = Object(tensor_util["makeTypesMatch"])($a, $b); + const forward = (backend, save) => { + const res = backend.multiply($a, $b); + save([$a, $b]); + return res; + }; + const inputs = { a: $a, b: $b }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* gradient */, kernel_names["Y" /* Multiply */]); +} +const mul = Object(operation["a" /* op */])({ mul_ }); +//# sourceMappingURL=mul.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients.js +/** + * @license + * Copyright 2018 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + +/** + * Provided `f(x)`, returns another function `g(x, dy?)`, which gives the + * gradient of `f(x)` with respect to `x`. + * + * If `dy` is provided, the gradient of `f(x).mul(dy).sum()` with respect to + * `x` is computed instead. `f(x)` must take a single tensor `x` and return a + * single tensor `y`. If `f()` takes multiple inputs, use `tf.grads` instead. + * + * ```js + * // f(x) = x ^ 2 + * const f = x => x.square(); + * // f'(x) = 2x + * const g = tf.grad(f); + * + * const x = tf.tensor1d([2, 3]); + * g(x).print(); + * ``` + * + * ```js + * // f(x) = x ^ 3 + * const f = x => x.pow(tf.scalar(3, 'int32')); + * // f'(x) = 3x ^ 2 + * const g = tf.grad(f); + * // f''(x) = 6x + * const gg = tf.grad(g); + * + * const x = tf.tensor1d([2, 3]); + * gg(x).print(); + * ``` + * + * @param f The function f(x), to compute gradient for. + */ +/** @doc {heading: 'Training', subheading: 'Gradients'} */ +function gradients_grad(f) { + util["assert"](util["isFunction"](f), () => 'The f passed in grad(f) must be a function'); + return (x, dy) => { + // x can be of any dtype, thus null as the last argument. + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'tf.grad', null); + const $dy = (dy != null) ? Object(tensor_util_env["a" /* convertToTensor */])(dy, 'dy', 'tf.grad') : null; + return engine["a" /* ENGINE */].tidy(() => { + const { value, grads } = engine["a" /* ENGINE */].gradients(() => f($x), [$x], $dy); + if ($dy != null) { + util["assertShapesMatch"](value.shape, $dy.shape, 'The shape of dy passed in grad(f)(x, dy) must match the shape ' + + 'returned by f(x)'); + } + checkGrads(grads); + return grads[0]; + }); + }; +} +/** + * Provided `f(x1, x2,...)`, returns another function `g([x1, x2,...], dy?)`, + * which gives an array of gradients of `f()` with respect to each input + * [`x1`,`x2`,...]. + * + * If `dy` is passed when calling `g()`, the gradient of + * `f(x1,...).mul(dy).sum()` with respect to each input is computed instead. + * The provided `f` must take one or more tensors and return a single tensor + * `y`. If `f()` takes a single input, we recommend using `tf.grad` instead. + * + * ```js + * // f(a, b) = a * b + * const f = (a, b) => a.mul(b); + * // df / da = b, df / db = a + * const g = tf.grads(f); + * + * const a = tf.tensor1d([2, 3]); + * const b = tf.tensor1d([-2, -3]); + * const [da, db] = g([a, b]); + * console.log('da'); + * da.print(); + * console.log('db'); + * db.print(); + * ``` + * + * @param f The function `f(x1, x2,...)` to compute gradients for. + */ +/** @doc {heading: 'Training', subheading: 'Gradients'} */ +function gradients_grads(f) { + util["assert"](util["isFunction"](f), () => 'The f passed in grads(f) must be a function'); + return (args, dy) => { + util["assert"](Array.isArray(args), () => 'The args passed in grads(f)(args) must be an array ' + + 'of `Tensor`s or `TensorLike`s'); + // args can be of any dtype, thus null as the last argument. + const $args = Object(tensor_util_env["b" /* convertToTensorArray */])(args, 'args', 'tf.grads', null); + const $dy = (dy != null) ? Object(tensor_util_env["a" /* convertToTensor */])(dy, 'dy', 'tf.grads') : null; + return engine["a" /* ENGINE */].tidy(() => { + const { value, grads } = engine["a" /* ENGINE */].gradients(() => f(...$args), $args, $dy); + if ($dy != null) { + util["assertShapesMatch"](value.shape, $dy.shape, 'The shape of dy passed in grads(f)([x1,...], dy) must ' + + 'match the shape returned by f([x1,...])'); + } + checkGrads(grads); + return grads; + }); + }; +} +/** + * Like `tf.grad`, but also returns the value of `f()`. Useful when `f()` + * returns a metric you want to show. + * + * The result is a rich object with the following properties: + * - grad: The gradient of `f(x)` w.r.t `x` (result of `tf.grad`). + * - value: The value returned by `f(x)`. + * + * ```js + * // f(x) = x ^ 2 + * const f = x => x.square(); + * // f'(x) = 2x + * const g = tf.valueAndGrad(f); + * + * const x = tf.tensor1d([2, 3]); + * const {value, grad} = g(x); + * + * console.log('value'); + * value.print(); + * console.log('grad'); + * grad.print(); + * ``` + */ +/** @doc {heading: 'Training', subheading: 'Gradients'} */ +function valueAndGrad(f) { + util["assert"](util["isFunction"](f), () => 'The f passed in valueAndGrad(f) must be a function'); + return (x, dy) => { + util["assert"](x instanceof dist_tensor["a" /* Tensor */], () => 'The x passed in valueAndGrad(f)(x) must be a tensor'); + util["assert"](dy == null || dy instanceof dist_tensor["a" /* Tensor */], () => 'The dy passed in valueAndGrad(f)(x, dy) must be a tensor'); + const { grads, value } = engine["a" /* ENGINE */].gradients(() => f(x), [x], dy); + checkGrads(grads); + return { grad: grads[0], value }; + }; +} +/** + * Like `tf.grads`, but returns also the value of `f()`. Useful when `f()` + * returns a metric you want to show. + * + * The result is a rich object with the following properties: + * - grads: The gradients of `f()` w.r.t each input (result of `tf.grads`). + * - value: The value returned by `f(x)`. + * + * ```js + * // f(a, b) = a * b + * const f = (a, b) => a.mul(b); + * // df/da = b, df/db = a + * const g = tf.valueAndGrads(f); + * + * const a = tf.tensor1d([2, 3]); + * const b = tf.tensor1d([-2, -3]); + * const {value, grads} = g([a, b]); + * + * const [da, db] = grads; + * + * console.log('value'); + * value.print(); + * + * console.log('da'); + * da.print(); + * console.log('db'); + * db.print(); + * ``` + */ +/** @doc {heading: 'Training', subheading: 'Gradients'} */ +function valueAndGrads(f) { + util["assert"](util["isFunction"](f), () => 'The f passed in valueAndGrads(f) must be a function'); + return (args, dy) => { + util["assert"](Array.isArray(args) && args.every(arg => arg instanceof dist_tensor["a" /* Tensor */]), () => 'The args passed in valueAndGrads(f)(args) must be array of ' + + 'tensors'); + util["assert"](dy == null || dy instanceof dist_tensor["a" /* Tensor */], () => 'The dy passed in valueAndGrads(f)(args, dy) must be a tensor'); + const res = engine["a" /* ENGINE */].gradients(() => f(...args), args, dy); + if (dy != null) { + util["assertShapesMatch"](res.value.shape, dy.shape, 'The shape of dy passed in valueAndGrads(f)([x1,...], dy) must ' + + 'match the shape returned by f([x1,...])'); + } + checkGrads(res.grads); + return res; + }; +} +/** + * Computes and returns the gradient of f(x) with respect to the list of + * trainable variables provided by `varList`. If no list is provided, it + * defaults to all trainable variables. + * + * ```js + * const a = tf.variable(tf.tensor1d([3, 4])); + * const b = tf.variable(tf.tensor1d([5, 6])); + * const x = tf.tensor1d([1, 2]); + * + * // f(a, b) = a * x ^ 2 + b * x + * const f = () => a.mul(x.square()).add(b.mul(x)).sum(); + * // df/da = x ^ 2, df/db = x + * const {value, grads} = tf.variableGrads(f); + * + * Object.keys(grads).forEach(varName => grads[varName].print()); + * ``` + * + * @param f The function to execute. f() should return a scalar. + * @param varList The list of variables to compute the gradients with respect + * to. Defaults to all trainable variables. + * @returns An object with the following keys and values: + * - `value`: The value of the function `f`. + * - `grads`: A map from the names of the variables to the gradients. + * If the `varList` argument is provided explicitly and contains a subset of + * non-trainable variables, this map in the return value will contain keys + * that map the names of the non-trainable variables to `null`. + */ +/** @doc {heading: 'Training', subheading: 'Gradients'} */ +function variableGrads(f, varList) { + util["assert"](util["isFunction"](f), () => 'The f passed in variableGrads(f) must be a function'); + util["assert"](varList == null || + Array.isArray(varList) && varList.every(v => v instanceof dist_tensor["c" /* Variable */]), () => 'The varList passed in variableGrads(f, varList) must be an array ' + + 'of variables'); + const specifiedVarList = varList != null; + if (!specifiedVarList) { + // Get all of the trainable variables. + varList = []; + for (const varName in engine["a" /* ENGINE */].registeredVariables) { + varList.push(engine["a" /* ENGINE */].registeredVariables[varName]); + } + } + const specifiedNonTrainable = specifiedVarList ? varList.filter(variable => !variable.trainable) : null; + // Prune non-trainable variables. + const originalVarCount = varList.length; + varList = varList.filter(variable => variable.trainable); + util["assert"](varList.length > 0, () => `variableGrads() expects at least one of the input variables to ` + + `be trainable, but none of the ${originalVarCount} variables is ` + + `trainable.`); + const allowNoGradients = true; + const { value, grads } = engine["a" /* ENGINE */].gradients(f, varList, null, allowNoGradients); + util["assert"](grads.some(g => g != null), () => 'Cannot find a connection between any variable and the result of ' + + 'the loss function y=f(x). Please make sure the operations that ' + + 'use variables are inside the function f passed to minimize().'); + util["assert"](value.rank === 0, () => `The f passed in variableGrads(f) must return a scalar, but it ` + + `returned a rank-${value.rank} tensor`); + const namedGrads = {}; + varList.forEach((v, i) => { + if (grads[i] != null) { + namedGrads[v.name] = grads[i]; + } + }); + if (specifiedNonTrainable != null) { + // If varList is explicitly provided and contains non-trainable values, + // add them to the returned gradients with `null` values. + specifiedNonTrainable.forEach(v => namedGrads[v.name] = null); + } + return { value, grads: namedGrads }; +} +/** + * Overrides the gradient computation of a function `f`. + * + * Takes a function + * `f(...inputs, save) => {value: Tensor, gradFunc: (dy, saved) => Tensor[]}` + * and returns another function `g(...inputs)` which takes the same inputs as + * `f`. When called, `g` returns `f().value`. In backward mode, custom gradients + * with respect to each input of `f` are computed using `f().gradFunc`. + * + * The `save` function passsed to `f` should be used for saving tensors needed + * in the gradient. And the `saved` passed to the `gradFunc` is a + * `NamedTensorMap`, which contains those saved tensor. + * + * ```js + * const customOp = tf.customGrad((x, save) => { + * // Save x to make sure it's available later for the gradient. + * save([x]); + * // Override gradient of our custom x ^ 2 op to be dy * abs(x); + * return { + * value: x.square(), + * // Note `saved.x` which points to the `x` we saved earlier. + * gradFunc: (dy, saved) => [dy.mul(saved[0].abs())] + * }; + * }); + * + * const x = tf.tensor1d([-1, -2, 3]); + * const dx = tf.grad(x => customOp(x)); + * + * console.log(`f(x):`); + * customOp(x).print(); + * console.log(`f'(x):`); + * dx(x).print(); + * ``` + * + * @param f The function to evaluate in forward mode, which should return + * `{value: Tensor, gradFunc: (dy, saved) => Tensor[]}`, where `gradFunc` + * returns the custom gradients of `f` with respect to its inputs. + */ +/** @doc {heading: 'Training', subheading: 'Gradients'} */ +function customGrad(f) { + return engine["a" /* ENGINE */].customGrad(f); +} +function checkGrads(grads) { + const numNullGradients = grads.filter(g => g == null).length; + if (numNullGradients > 0) { + throw new Error(`Cannot compute gradient of y=f(x) with respect to x. Make sure that + the f you passed encloses all operations that lead from x to y.`); + } +} + +//# sourceMappingURL=gradients.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/axis_util.js +/** + * @license + * Copyright 2017 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + +/** + * Returns true if the axis specifies the inner most dimensions of the + * array. + */ +function axesAreInnerMostDims(axes, rank) { + for (let i = 0; i < axes.length; ++i) { + if (axes[axes.length - i - 1] !== rank - 1 - i) { + return false; + } + } + return true; +} +function combineLocations(outputLoc, reduceLoc, axes) { + const rank = outputLoc.length + reduceLoc.length; + const loc = []; + let outIdx = 0; + let reduceIdx = 0; + for (let dim = 0; dim < rank; dim++) { + if (axes.indexOf(dim) === -1) { + loc.push(outputLoc[outIdx++]); + } + else { + loc.push(reduceLoc[reduceIdx++]); + } + } + return loc; +} +function computeOutAndReduceShapes(aShape, axes) { + const outShape = []; + const rank = aShape.length; + for (let dim = 0; dim < rank; dim++) { + if (axes.indexOf(dim) === -1) { + outShape.push(aShape[dim]); + } + } + const reduceShape = axes.map(dim => aShape[dim]); + return [outShape, reduceShape]; +} +function expandShapeToKeepDim(shape, axes) { + const reduceSubShape = axes.map(x => 1); + return combineLocations(shape, reduceSubShape, axes); +} +function assertAxesAreInnerMostDims(msg, axes, rank) { + util["assert"](axesAreInnerMostDims(axes, rank), () => `${msg} supports only inner-most axes for now. ` + + `Got axes ${axes} and rank-${rank} input.`); +} +/** + * Returns the axes permutation to be used with `tf.transpose`, if such + * permutation is necessary. Otherwise it returns null. This method is used by + * operations that operate only on inner-most axes. + */ +function getAxesPermutation(axes, rank) { + if (axesAreInnerMostDims(axes, rank)) { + return null; + } + const result = []; + for (let i = 0; i < rank; ++i) { + if (axes.indexOf(i) === -1) { + result.push(i); + } + } + axes.forEach(axis => result.push(axis)); + return result; +} +/** Returns the axes permutation that undoes the original permutation. */ +function getUndoAxesPermutation(axes) { + return axes.map((axis, i) => [i, axis]) + .sort((a, b) => a[1] - b[1]) + .map(x => x[0]); +} +function getInnerMostAxes(numAxes, rank) { + const res = []; + for (let i = rank - numAxes; i < rank; ++i) { + res.push(i); + } + return res; +} +//# sourceMappingURL=axis_util.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/reduction_ops_util.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + +/** + * Gradient helper function for the min and max operations. + */ +function gradForMinAndMax(dy, y, xOrig, origAxes, permutedAxes) { + if (y.rank < xOrig.rank) { + y = y.reshape(expandShapeToKeepDim(y.shape, origAxes)); + } + if (dy.rank < xOrig.rank) { + dy = dy.reshape(expandShapeToKeepDim(dy.shape, origAxes)); + } + return { + x: () => { + const dx = dy.mul(xOrig.equal(y).cast(dy.dtype)); + return permutedAxes == null ? dx : dx.transpose(permutedAxes); + } + }; +} +//# sourceMappingURL=reduction_ops_util.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/reduction_ops.js +/** + * @license + * Copyright 2018 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + + +/** + * Computes the log(sum(exp(elements across the reduction dimensions)). + * + * Reduces the input along the dimensions given in `axis`. Unless `keepDims` + * is true, the rank of the array is reduced by 1 for each entry in `axis`. + * If `keepDims` is true, the reduced dimensions are retained with length 1. + * If `axis` has no entries, all dimensions are reduced, and an array with a + * single element is returned. + * + * ```js + * const x = tf.tensor1d([1, 2, 3]); + * + * x.logSumExp().print(); // or tf.logSumExp(x) + * ``` + * + * ```js + * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]); + * + * const axis = 1; + * x.logSumExp(axis).print(); // or tf.logSumExp(a, axis) + * ``` + * @param x The input tensor. + * @param axis The dimension(s) to reduce. If null (the default), + * reduces all dimensions. + * @param keepDims If true, retains reduced dimensions with length + * of 1. Defaults to false. + */ +/** @doc {heading: 'Operations', subheading: 'Reduction'} */ +function logSumExp_(x, axis = null, keepDims = false) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'logSumExp'); + const axes = util["parseAxisParam"](axis, $x.shape); + const xMax = $x.max(axes, true /* keepDims */); + const a = $x.sub(xMax); + const b = a.exp(); + const c = b.sum(axes); + const d = c.log(); + const res = xMax.reshape(d.shape).add(d); + if (keepDims) { + const newShape = expandShapeToKeepDim(res.shape, axes); + return res.reshape(newShape); + } + return res; +} +/** + * Computes the sum of elements across dimensions of a `tf.Tensor`. + * + * Reduces the input along the dimensions given in `axes`. Unless `keepDims` + * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in + * `axes`. If `keepDims` is true, the reduced dimensions are retained with + * length 1. If axes has no entries, all dimensions are reduced, and a + * `tf.Tensor` with a single element is returned. + * + * ```js + * const x = tf.tensor1d([1, 2, 3]); + * + * x.sum().print(); // or tf.sum(x) + * ``` + * + * ```js + * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]); + * + * const axis = 1; + * x.sum(axis).print(); // or tf.sum(x, axis) + * ``` + * + * @param x The input tensor to compute the sum over. If the dtype is `bool` + * it will be converted to `int32` and the output dtype will be `int32`. + * @param axis The dimension(s) to reduce. By default it reduces + * all dimensions. + * @param keepDims If true, retains reduced dimensions with size 1. + */ +/** @doc {heading: 'Operations', subheading: 'Reduction'} */ +function sum_(x, axis = null, keepDims = false) { + let $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'sum'); + if ($x.dtype === 'bool') { + $x = $x.toInt(); + } + const axes = util["parseAxisParam"](axis, $x.shape); + // Use a custom gradient to bypass 2 gradient backprops since sum is used + // extremely often. + const customOp = customGrad((x) => { + const permutation = getAxesPermutation(axes, x.rank); + let reductionAxes = axes; + let permutedX = x; + if (permutation != null) { + permutedX = x.transpose(permutation); + reductionAxes = getInnerMostAxes(reductionAxes.length, x.rank); + } + const gradFunc = (dy) => { + const expandedDyShape = x.shape.slice(); + axes.forEach(axis => { + expandedDyShape[axis] = 1; + }); + const expandedDy = dy.reshape(expandedDyShape); + const derX = expandedDy.mul(Object(tensor_ops["b" /* ones */])(x.shape, 'float32')); + return derX; + }; + const gradInputs = (dy) => { + return { x: () => gradFunc(dy) }; + }; + const attrs = { axes: reductionAxes }; + let value = engine["a" /* ENGINE */].runKernelFunc(backend => backend.sum(permutedX, reductionAxes), { x: permutedX }, gradInputs, 'Sum', attrs); + if (keepDims) { + const newShape = expandShapeToKeepDim(value.shape, axes); + value = value.reshape(newShape); + } + return { value, gradFunc }; + }); + return customOp($x); +} +/** + * Computes the product of elements across dimensions of a `tf.Tensor`. + * + * Reduces the input along the dimensions given in `axes`. Unless `keepDims` + * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in + * `axes`. If `keepDims` is true, the reduced dimensions are retained with + * length 1. If `axes` has no entries, all dimensions are reduced, and a + * `tf.Tensor` with a single element is returned. + * + * ```js + * const x = tf.tensor1d([1, 2, 3]); + * + * x.prod().print(); // or tf.prod(x) + * ``` + * + * ```js + * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]); + * + * const axis = 1; + * x.prod(axis).print(); // or tf.prod(x, axis) + * ``` + * + * @param x The input tensor to compute the product over. If the dtype is `bool` + * it will be converted to `int32` and the output dtype will be `int32`. + * @param axis The dimension(s) to reduce. By default it reduces + * all dimensions. + * @param keepDims If true, retains reduced dimensions with size 1. + */ +/** @doc {heading: 'Operations', subheading: 'Reduction'} */ +function prod_(x, axis = null, keepDims = false) { + let $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'prod'); + if ($x.dtype === 'bool') { + $x = $x.toInt(); + } + const axes = util["parseAxisParam"](axis, $x.shape); + const permutation = getAxesPermutation(axes, $x.rank); + let reductionAxes = axes; + let permutedX = $x; + if (permutation != null) { + permutedX = $x.transpose(permutation); + reductionAxes = getInnerMostAxes(reductionAxes.length, $x.rank); + } + let value = engine["a" /* ENGINE */].runKernelFunc(backend => backend.prod(permutedX, reductionAxes), { permutedX }); + if (keepDims) { + const newShape = expandShapeToKeepDim(value.shape, axes); + value = value.reshape(newShape); + } + return value; +} +/** + * Computes the mean of elements across dimensions of a `tf.Tensor`. + * + * Reduces `x` along the dimensions given in `axis`. Unless `keepDims` is + * true, the rank of the `tf.Tensor` is reduced by 1 for each entry in `axis`. + * If `keepDims` is true, the reduced dimensions are retained with length 1. + * If `axis` has no entries, all dimensions are reduced, and a `tf.Tensor` with + * a single element is returned. + * + * ```js + * const x = tf.tensor1d([1, 2, 3]); + * + * x.mean().print(); // or tf.mean(a) + * ``` + * + * ```js + * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]); + * + * const axis = 1; + * x.mean(axis).print(); // or tf.mean(x, axis) + * ``` + * + * @param x The input tensor. + * @param axis The dimension(s) to reduce. By default it reduces + * all dimensions. + * @param keepDims If true, retains reduced dimensions with size 1. + */ +/** @doc {heading: 'Operations', subheading: 'Reduction'} */ +function mean_(x, axis = null, keepDims = false) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'mean'); + const axes = util["parseAxisParam"](axis, $x.shape); + const shapes = computeOutAndReduceShapes($x.shape, axes); + const reduceShape = shapes[1]; + const reduceSize = util["sizeFromShape"](reduceShape); + // Use a custom gradient to bypass 2 gradient backprops since mean is used + // extremely often. + const customOp = customGrad((x) => { + const reduceSizeScalar = Object(tensor_ops["e" /* scalar */])(reduceSize); + // Cast if needed. + const xReduce = reduceSizeScalar.dtype === x.dtype ? x : x.cast(reduceSizeScalar.dtype); + const res = xReduce.div(reduceSizeScalar); + const value = res.sum(axis, keepDims); + const gradFunc = (dy) => { + const expandedDyShape = x.shape.slice(); + axes.forEach(axis => { + expandedDyShape[axis] = 1; + }); + const expandedDy = dy.reshape(expandedDyShape); + const derX = expandedDy.mul(Object(tensor_ops["b" /* ones */])(x.shape, 'float32')).div(reduceSize); + return derX; + }; + return { value, gradFunc }; + }); + return customOp($x); +} +/** + * Computes the minimum value from the input. + * + * Reduces the input along the dimensions given in `axes`. Unless `keepDims` + * is true, the rank of the array is reduced by 1 for each entry in `axes`. + * If `keepDims` is true, the reduced dimensions are retained with length 1. + * If `axes` has no entries, all dimensions are reduced, and an array with a + * single element is returned. + * + * ```js + * const x = tf.tensor1d([1, 2, 3]); + * + * x.min().print(); // or tf.min(x) + * ``` + * + * ```js + * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]); + * + * const axis = 1; + * x.min(axis).print(); // or tf.min(x, axis) + * ``` + * + * @param x The input Tensor. + * @param axis The dimension(s) to reduce. By default it reduces + * all dimensions. + * @param keepDims If true, retains reduced dimensions with size 1. + */ +/** @doc {heading: 'Operations', subheading: 'Reduction'} */ +function min_(x, axis = null, keepDims = false) { + let $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'min'); + const xOrig = $x; + const origAxes = util["parseAxisParam"](axis, $x.shape); + let axes = origAxes; + const permutedAxes = getAxesPermutation(axes, $x.rank); + if (permutedAxes != null) { + $x = $x.transpose(permutedAxes); + axes = getInnerMostAxes(axes.length, $x.rank); + } + const grad = (dy, saved) => gradForMinAndMax(dy, saved[1], saved[0], origAxes, permutedAxes); + const inputsToSave = [$x]; + const outputsToSave = [true]; + let res = engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const y = backend.min($x, axes); + save([xOrig, y]); + return y; + }, { x: $x }, grad, 'Min', { axes }, inputsToSave, outputsToSave); + if (keepDims) { + const newShape = expandShapeToKeepDim(res.shape, origAxes); + res = res.reshape(newShape); + } + return res; +} +/** + * Returns the indices of the minimum values along an `axis`. + * + * The result has the same shape as `input` with the dimension along `axis` + * removed. + * + * ```js + * const x = tf.tensor1d([1, 2, 3]); + * + * x.argMin().print(); // or tf.argMin(x) + * ``` + * + * ```js + * const x = tf.tensor2d([1, 2, 4, 3], [2, 2]); + * + * const axis = 1; + * x.argMin(axis).print(); // or tf.argMin(x, axis) + * ``` + * + * @param x The input tensor. + * @param axis The dimension to reduce. Defaults to 0 (outer-most dimension). + * + */ +/** @doc {heading: 'Operations', subheading: 'Reduction'} */ +function argMin_(x, axis = 0) { + let $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'argMin'); + if (axis == null) { + axis = 0; + } + let axes = util["parseAxisParam"](axis, $x.shape); + const permutedAxes = getAxesPermutation(axes, $x.rank); + if (permutedAxes != null) { + $x = $x.transpose(permutedAxes); + axes = getInnerMostAxes(axes.length, $x.rank); + } + const grad = (dy, saved) => { + const [$x] = saved; + return { $x: () => Object(tensor_ops["o" /* zerosLike */])($x) }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.argMin($x, axes[0]); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Returns the indices of the maximum values along an `axis`. + * + * The result has the same shape as `input` with the dimension along `axis` + * removed. + * + * ```js + * const x = tf.tensor1d([1, 2, 3]); + * + * x.argMax().print(); // or tf.argMax(x) + * ``` + * + * ```js + * const x = tf.tensor2d([1, 2, 4, 3], [2, 2]); + * + * const axis = 1; + * x.argMax(axis).print(); // or tf.argMax(x, axis) + * ``` + * + * @param x The input tensor. + * @param axis The dimension to reduce. Defaults to 0 (outer-most dimension). + */ +/** @doc {heading: 'Operations', subheading: 'Reduction'} */ +function argMax_(x, axis = 0) { + let $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'argMax'); + if (axis == null) { + axis = 0; + } + let axes = util["parseAxisParam"](axis, $x.shape); + const permutedAxes = getAxesPermutation(axes, $x.rank); + if (permutedAxes != null) { + $x = $x.transpose(permutedAxes); + axes = getInnerMostAxes(axes.length, $x.rank); + } + const grad = (dy, saved) => { + const [$x] = saved; + return { x: () => Object(tensor_ops["o" /* zerosLike */])($x) }; + }; + const attrs = { axis: axes[0] }; + const inputsToSave = [$x]; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.argMax($x, axes[0]); + save([$x]); + return res; + }, { x: $x }, grad, 'ArgMax', attrs, inputsToSave); +} +/** + * Computes the logical and of elements across dimensions of a `tf.Tensor`. + * + * Reduces the input along the dimensions given in `axes`. Unless `keepDims` + * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in + * `axes`. If `keepDims` is true, the reduced dimensions are retained with + * length 1. If `axes` has no entries, all dimensions are reduced, and an + * `tf.Tensor` with a single element is returned. + * + * ```js + * const x = tf.tensor1d([1, 1, 1], 'bool'); + * + * x.all().print(); // or tf.all(x) + * ``` + * + * ```js + * const x = tf.tensor2d([1, 1, 0, 0], [2, 2], 'bool'); + * + * const axis = 1; + * x.all(axis).print(); // or tf.all(x, axis) + * ``` + * + * @param x The input tensor. Must be of dtype bool. + * @param axis The dimension(s) to reduce. By default it reduces + * all dimensions. + * @param keepDims If true, retains reduced dimensions with size 1. + */ +/** @doc {heading: 'Operations', subheading: 'Reduction'} */ +function all_(x, axis = null, keepDims = false) { + let $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'all', 'bool'); + const origAxes = util["parseAxisParam"](axis, $x.shape); + let axes = origAxes; + const permutedAxes = getAxesPermutation(axes, $x.rank); + if (permutedAxes != null) { + $x = $x.transpose(permutedAxes); + axes = getInnerMostAxes(axes.length, $x.rank); + } + const res = engine["a" /* ENGINE */].runKernelFunc(backend => backend.all($x, axes), { $x }); + if (keepDims) { + const newShape = expandShapeToKeepDim(res.shape, origAxes); + return res.reshape(newShape); + } + return res; +} +/** + * Computes the logical or of elements across dimensions of a `tf.Tensor`. + * + * Reduces the input along the dimensions given in `axes`. Unless `keepDims` + * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in + * `axes`. If `keepDims` is true, the reduced dimensions are retained with + * length 1. If `axes` has no entries, all dimensions are reduced, and an + * `tf.Tensor` with a single element is returned. + * + * ```js + * const x = tf.tensor1d([1, 1, 1], 'bool'); + * + * x.any().print(); // or tf.any(x) + * ``` + * + * ```js + * const x = tf.tensor2d([1, 1, 0, 0], [2, 2], 'bool'); + * + * const axis = 1; + * x.any(axis).print(); // or tf.any(x, axis) + * ``` + * + * @param x The input tensor. Must be of dtype bool. + * @param axis The dimension(s) to reduce. By default it reduces + * all dimensions. + * @param keepDims If true, retains reduced dimensions with size 1. + */ +/** @doc {heading: 'Operations', subheading: 'Reduction'} */ +function any_(x, axis = null, keepDims = false) { + let $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'any', 'bool'); + const origAxes = util["parseAxisParam"](axis, $x.shape); + let axes = origAxes; + const permutedAxes = getAxesPermutation(axes, $x.rank); + if (permutedAxes != null) { + $x = $x.transpose(permutedAxes); + axes = getInnerMostAxes(axes.length, $x.rank); + } + const res = engine["a" /* ENGINE */].runKernelFunc(backend => backend.any($x, axes), { $x }); + if (keepDims) { + const newShape = expandShapeToKeepDim(res.shape, origAxes); + return res.reshape(newShape); + } + return res; +} +/** + * Calculates the mean and variance of `x`. The mean and variance are + * calculated by aggregating the contents of `x` across `axes`. If `x` is + * 1-D and `axes = [0]` this is just the mean and variance of a vector. + * + * @param x The input tensor. + * @param axis The dimension(s) along with to compute mean and + * variance. By default it reduces all dimensions. + * @param keepDims If true, the moments have the same dimensionality as the + * input. + * @return An object with two keys: `mean` and `variance`. + */ +/** @doc {heading: 'Operations', subheading: 'Normalization'} */ +function moments_(x, axis = null, keepDims = false) { + x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'moments'); + const axes = util["parseAxisParam"](axis, x.shape); + const mean = x.mean(axes, keepDims); + let keepDimsShape = mean.shape; + if (!keepDims) { + keepDimsShape = expandShapeToKeepDim(mean.shape, axes); + } + const devSquared = x.toFloat().sub(mean.reshape(keepDimsShape)).square(); + const variance = devSquared.mean(axes, keepDims); + return { mean, variance }; +} +const reduction_ops_all = Object(operation["a" /* op */])({ all_ }); +// tslint:disable-next-line:variable-name +const any = Object(operation["a" /* op */])({ any_ }); +const argMax = Object(operation["a" /* op */])({ argMax_ }); +const argMin = Object(operation["a" /* op */])({ argMin_ }); +const logSumExp = Object(operation["a" /* op */])({ logSumExp_ }); +const reduction_ops_mean = Object(operation["a" /* op */])({ mean_ }); +const reduction_ops_min = Object(operation["a" /* op */])({ min_ }); +const moments = Object(operation["a" /* op */])({ moments_ }); +const sum = Object(operation["a" /* op */])({ sum_ }); +const reduction_ops_prod = Object(operation["a" /* op */])({ prod_ }); +//# sourceMappingURL=reduction_ops.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/square.js +/** + * @license + * Copyright 2019 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + +/** + * Computes square of `x` element-wise: `x ^ 2` + * + * ```js + * const x = tf.tensor1d([1, 2, Math.sqrt(2), -1]); + * + * x.square().print(); // or tf.square(x) + * ``` + * @param x The input Tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function square_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'square'); + const attrs = {}; + const inputsToSave = [$x]; + const outputsToSave = []; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + save([$x]); + return backend.square($x); + }, { x: $x }, null /* grad */, 'Square', attrs, inputsToSave, outputsToSave); +} +const square = Object(operation["a" /* op */])({ square_ }); +//# sourceMappingURL=square.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/unary_ops.js +/** + * @license + * Copyright 2018 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +/** + * Computes `-1 * x` element-wise. + * + * ```js + * const x = tf.tensor2d([1, 2, -2, 0], [2, 2]); + * + * x.neg().print(); // or tf.neg(x) + * ``` + * + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function neg_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'neg'); + const grad = (dy) => { + return { x: () => dy.neg() }; + }; + const attrs = {}; + const inputsToSave = [$x]; + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.neg($x), { x: $x }, grad, 'Neg', attrs, inputsToSave); +} +/** + * Computes ceiling of input `tf.Tensor` element-wise: `ceil(x)` + * + * ```js + * const x = tf.tensor1d([.6, 1.1, -3.3]); + * + * x.ceil().print(); // or tf.ceil(x) + * ``` + * @param x The input Tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function ceil_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'ceil'); + // TODO(manrajgrover): Return null for gradients when backprop supports it. + const grad = (dy) => { + return { $x: () => Object(tensor_ops["o" /* zerosLike */])(dy) }; + }; + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.ceil($x), { $x }, grad); +} +/** + * Computes floor of input `tf.Tensor` element-wise: `floor(x)`. + * + * ```js + * const x = tf.tensor1d([.6, 1.1, -3.3]); + * + * x.floor().print(); // or tf.floor(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function floor_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'floor'); + // TODO(nsthorat): Let gradients be null for cases where we want to stop + // backpropgation. + const grad = (dy) => { + return { $x: () => Object(tensor_ops["o" /* zerosLike */])(dy) }; + }; + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.floor($x), { $x }, grad); +} +/** + * Returns an element-wise indication of the sign of a number. + * + * ```js + * const x = tf.tensor1d([.6, 1.1, -3.3, NaN, 0]); + * + * x.sign().print(); // or tf.sign(x) + * ``` + * @param x The input Tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function sign_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'sign'); + const grad = (dy) => { + return { $x: () => Object(tensor_ops["o" /* zerosLike */])(dy) }; + }; + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.sign($x), { $x }, grad); +} +/** + * RReturns which elements of x are NaN. + * + * ```js + * const x = tf.tensor1d([NaN, Infinity, -Infinity, 0, 1]); + * + * x.isNaN().print(); // or tf.isNaN(x) + * ``` + * @param x The input Tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function isNaN_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'isNaN'); + // TODO(nsthorat): Let gradients be null for cases where we want to stop + // backpropgation. + const grad = (dy) => { + return { $x: () => Object(tensor_ops["o" /* zerosLike */])(dy) }; + }; + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.isNaN($x), { $x }, grad); +} +/** + * Returns which elements of x are Infinity or -Infinity. + * + * ```js + * const x = tf.tensor1d([NaN, Infinity, -Infinity, 0, 1]); + * + * x.isInf().print(); // or tf.isNaN(x) + * ``` + * @param x The input Tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function isInf_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'isInf'); + // TODO(nsthorat): Let gradients be null for cases where we want to stop + // backpropgation. + const grad = (dy) => { + return { $x: () => Object(tensor_ops["o" /* zerosLike */])(dy) }; + }; + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.isInf($x), { $x }, grad); +} +/** + * Returns which elements of x are finite. + * + * ```js + * const x = tf.tensor1d([NaN, Infinity, -Infinity, 0, 1]); + * + * x.isFinite().print(); // or tf.isNaN(x) + * ``` + * @param x The input Tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function isFinite_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'isFinite'); + // TODO(nsthorat): Let gradients be null for cases where we want to stop + // backpropgation. + const grad = (dy) => { + return { $x: () => Object(tensor_ops["o" /* zerosLike */])(dy) }; + }; + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.isFinite($x), { $x }, grad); +} +/** + * Computes round of input `tf.Tensor` element-wise: `round(x)`. + * It implements banker's rounding. + * + * ```js + * const x = tf.tensor1d([.6, 1.1, -3.3]); + * + * x.round().print(); // or tf.round(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function round_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'round'); + // TODO(nsthorat): Let gradients be null for cases where we want to stop + // backpropgation. + const grad = (dy) => { + return { $x: () => Object(tensor_ops["o" /* zerosLike */])(dy) }; + }; + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.round($x), { $x }, grad); +} +/** + * Computes exponential of the input `tf.Tensor` element-wise. `e ^ x` + * + * ```js + * const x = tf.tensor1d([1, 2, -3]); + * + * x.exp().print(); // or tf.exp(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function exp_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'exp'); + const bck = (dy, saved) => { + // tslint:disable-next-line: no-unnecessary-type-assertion + return { x: () => dy.mul(saved[0]) }; + }; + const attrs = {}; + const inputsToSave = []; + const outputsToSave = [true]; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const y = backend.exp($x); + save([y]); + return y; + }, { x: $x }, bck, 'Exp', attrs, inputsToSave, outputsToSave); +} +/** + * Computes exponential of the input `tf.Tensor` minus one element-wise. + * `e ^ x - 1` + * + * ```js + * const x = tf.tensor1d([1, 2, -3]); + * + * x.expm1().print(); // or tf.expm1(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function expm1_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'expm1'); + const grad = (dy, saved) => { + const [$x] = saved; + return { $x: () => dy.mul($x.exp()) }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.expm1($x); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Computes natural logarithm of the input `tf.Tensor` element-wise: `ln(x)` + * + * ```js + * const x = tf.tensor1d([1, 2, Math.E]); + * + * x.log().print(); // or tf.log(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function log_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'log'); + const grad = (dy, saved) => { + const [$x] = saved; + return { x: () => dy.div($x.toFloat()) }; + }; + const attrs = {}; + const inputsToSave = [$x]; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.log($x); + save([$x]); + return res; + }, { x: $x }, grad, 'Log', attrs, inputsToSave); +} +/** + * Computes natural logarithm of the input `tf.Tensor` plus one + * element-wise: `ln(1 + x)` + * + * ```js + * const x = tf.tensor1d([1, 2, Math.E - 1]); + * + * x.log1p().print(); // or tf.log1p(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function log1p_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'log1p'); + const grad = (dy, saved) => { + const [$x] = saved; + return { $x: () => dy.div($x.add(1)) }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.log1p($x); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Computes square root of the input `tf.Tensor` element-wise: `y = sqrt(x)` + * + * ```js + * const x = tf.tensor1d([1, 2, 4, -1]); + * + * x.sqrt().print(); // or tf.sqrt(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function sqrt_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'sqrt'); + const grad = (dy, saved) => { + const [$x] = saved; + return { x: () => dy.div($x.toFloat().sqrt().mul(2)) }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.sqrt($x); + save([$x]); + return res; + }, { x: $x }, grad, 'Sqrt', {}); +} +/** + * Computes reciprocal of square root of the input `tf.Tensor` element-wise: + * `y = 1 / sqrt(x)` + * + * ```js + * const x = tf.tensor1d([1, 2, 4, -1]); + * + * x.rsqrt().print(); // or tf.rsqrt(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function rsqrt_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'rsqrt'); + const grad = (dy, saved) => { + const [$x] = saved; + return { x: () => dy.div($x.pow(1.5).mul(2)).neg() }; + }; + const inputsToSave = [$x]; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.rsqrt($x); + save([$x]); + return res; + }, { x: $x }, grad, 'Rsqrt', {} /* attrs */, inputsToSave); +} +/** + * Computes reciprocal of x element-wise: `1 / x` + * + * ```js + * const x = tf.tensor1d([0, 1, 2]); + * + * x.reciprocal().print(); // or tf.reciprocal(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function reciprocal_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'reciprocal'); + const grad = (dy, saved) => { + const [$x] = saved; + return { $x: () => dy.div($x.square().neg()) }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.reciprocal($x); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Computes absolute value element-wise: `abs(x)` + * + * ```js + * const x = tf.tensor1d([-1, 2, -3, 4]); + * + * x.abs().print(); // or tf.abs(x) + * ``` + * @param x The input `tf.Tensor`. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function abs_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'abs'); + if ($x.dtype === 'complex64') { + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.complexAbs($x), { $x }); + } + const grad = (dy, saved) => { + const [$x] = saved; + return { x: () => dy.mul($x.toFloat().step(-1)) }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.abs($x); + save([$x]); + return res; + }, { x: $x }, grad, 'Abs'); +} +/** + * Clips values element-wise. `max(min(x, clipValueMax), clipValueMin)` + * + * ```js + * const x = tf.tensor1d([-1, 2, -3, 4]); + * + * x.clipByValue(-2, 3).print(); // or tf.clipByValue(x, -2, 3) + * ``` + * @param x The input tensor. + * @param clipValueMin Lower-bound of range to be clipped to. + * @param clipValueMax Upper-bound of range to be clipped to. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function clipByValue_(x, clipValueMin, clipValueMax) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'clipByValue'); + util["assert"]((clipValueMin <= clipValueMax), () => `Error in clip: min (${clipValueMin}) must be ` + + `less than or equal to max (${clipValueMax}).`); + const grad = (dy, saved) => { + const [$x] = saved; + return { + x: () => dy.where($x.greaterEqual(clipValueMin) + .logicalAnd($x.lessEqual(clipValueMax)), Object(tensor_ops["o" /* zerosLike */])(dy)), + }; + }; + const inputsToSave = [$x]; + const attr = { min: clipValueMin, max: clipValueMax }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.clip($x, clipValueMin, clipValueMax); + save([$x]); + return res; + }, { x: $x }, grad, 'ClipByValue', attr, inputsToSave); +} +/** + * Computes sigmoid element-wise, `1 / (1 + exp(-x))` + * + * ```js + * const x = tf.tensor1d([0, -1, 2, -3]); + * + * x.sigmoid().print(); // or tf.sigmoid(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function sigmoid_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'sigmoid'); + const grad = (dy, saved) => { + const [y] = saved; + return { x: () => dy.mul(y.mul(Object(tensor_ops["e" /* scalar */])(1).sub(y))) }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const y = backend.sigmoid($x); + save([y]); + return y; + }, { x: $x }, grad, 'Sigmoid'); +} +/** + * Computes log sigmoid of the input `tf.Tensor` element-wise: + * `logSigmoid(x)`. For numerical stability, we use `-tf.softplus(-x)`. + * + * ```js + * const x = tf.tensor1d([0, 1, -1, .7]); + * + * x.logSigmoid().print(); // or tf.logSigmoid(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function logSigmoid_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'logSigmoid'); + const grad = (dy, saved) => { + const [$x] = saved; + return { $x: () => dy.mul($x.neg().sigmoid()) }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.softplus($x.neg()).neg(); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Computes softplus of the input `tf.Tensor` element-wise: `log(exp(x) + 1)` + * + * ```js + * const x = tf.tensor1d([0, 1, -1, .7]); + * + * x.softplus().print(); // or tf.softplus(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function softplus_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'softplus'); + const grad = (dy, saved) => { + const [$x] = saved; + return { $x: () => dy.mul($x.sigmoid()) }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.softplus($x); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Computes sin of the input Tensor element-wise: `sin(x)` + * + * ```js + * const x = tf.tensor1d([0, Math.PI / 2, Math.PI * 3 / 4]); + * + * x.sin().print(); // or tf.sin(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function sin_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'sin'); + const grad = (dy, saved) => { + const [$x] = saved; + return { x: () => $x.toFloat().cos().mul(dy) }; + }; + const inputsToSave = [$x]; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.sin($x); + save([$x]); + return res; + }, { x: $x }, grad, 'Sin', {} /* attrs */, inputsToSave); +} +/** + * Computes cos of the input `tf.Tensor` element-wise: `cos(x)` + * + * ```js + * const x = tf.tensor1d([0, Math.PI / 2, Math.PI * 3 / 4]); + * + * x.cos().print(); // or tf.cos(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function cos_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'cos'); + const grad = (dy, saved) => { + const [$x] = saved; + return { x: () => $x.toFloat().sin().neg().mul(dy) }; + }; + const inputsToSave = [$x]; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.cos($x); + save([$x]); + return res; + }, { x: $x }, grad, 'Cos', {} /* attrs */, inputsToSave); +} +/** + * Computes tan of the input `tf.Tensor` element-wise, `tan(x)` + * + * ```js + * const x = tf.tensor1d([0, Math.PI / 2, Math.PI * 3 / 4]); + * + * x.tan().print(); // or tf.tan(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function tan_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'tan'); + const grad = (dy, saved) => { + const [$x] = saved; + return { $x: () => dy.div($x.cos().square()) }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.tan($x); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Computes asin of the input `tf.Tensor` element-wise: `asin(x)` + * + * ```js + * const x = tf.tensor1d([0, 1, -1, .7]); + * + * x.asin().print(); // or tf.asin(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function asin_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'asin'); + const grad = (dy, saved) => { + const [$x] = saved; + return { + // tslint:disable-next-line: no-unnecessary-type-assertion + $x: () => dy.div(Object(tensor_ops["e" /* scalar */])(1).sub($x.toFloat().square()).sqrt()) + }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.asin($x); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Computes acos of the input `tf.Tensor` element-wise: `acos(x)` + * + * ```js + * const x = tf.tensor1d([0, 1, -1, .7]); + * + * x.acos().print(); // or tf.acos(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function acos_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'acos'); + const grad = (dy, saved) => { + const [$x] = saved; + return { + $x: () => { + const a = $x.toFloat().square(); + const b = Object(tensor_ops["e" /* scalar */])(1).sub(a).sqrt(); + // tslint:disable-next-line: no-unnecessary-type-assertion + return dy.div(b).neg(); + } + }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.acos($x); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Computes atan of the input `tf.Tensor` element-wise: `atan(x)` + * + * ```js + * const x = tf.tensor1d([0, 1, -1, .7]); + * + * x.atan().print(); // or tf.atan(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function atan_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'atan'); + const grad = (dy, saved) => { + const [$x] = saved; + return { $x: () => dy.div($x.toFloat().square().add(1)) }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.atan($x); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Computes hyperbolic sin of the input `tf.Tensor` element-wise: `sinh(x)` + * + * ```js + * const x = tf.tensor1d([0, 1, -1, .7]); + * + * x.sinh().print(); // or tf.sinh(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function sinh_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'sinh'); + const grad = (dy, saved) => { + const [$x] = saved; + // tslint:disable-next-line: no-unnecessary-type-assertion + return { $x: () => $x.toFloat().cosh().mul(dy) }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.sinh($x); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Computes hyperbolic cos of the input `tf.Tensor` element-wise: `cosh(x)` + * + * ```js + * const x = tf.tensor1d([0, 1, -1, .7]); + * + * x.cosh().print(); // or tf.cosh(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function cosh_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'cosh'); + const grad = (dy, saved) => { + const [$x] = saved; + // tslint:disable-next-line: no-unnecessary-type-assertion + return { $x: () => $x.toFloat().sinh().mul(dy) }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.cosh($x); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Computes hyperbolic tangent of the input `tf.Tensor` element-wise: `tanh(x)` + * + * ```js + * const x = tf.tensor1d([0, 1, -1, 70]); + * + * x.tanh().print(); // or tf.tanh(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function tanh_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'tanh'); + const grad = (dy, saved) => { + const [y] = saved; + // tslint:disable-next-line: no-unnecessary-type-assertion + return { x: () => Object(tensor_ops["e" /* scalar */])(1).sub(y.square()).mul(dy) }; + }; + const outputsToSave = [true]; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const y = backend.tanh($x); + save([y]); + return y; + }, { x: $x }, grad, 'Tanh', {} /* attrs */, null /* inputsToSave */, outputsToSave); +} +/** + * Computes inverse hyperbolic sin of the input `tf.Tensor` element-wise: + * `asinh(x)` + * + * ```js + * const x = tf.tensor1d([0, 1, -1, .7]); + * + * x.asinh().print(); // or tf.asinh(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function asinh_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'asinh'); + const grad = (dy, saved) => { + const [$x] = saved; + return { + $x: () => { + const a = Object(tensor_ops["e" /* scalar */])(1).add($x.toFloat().square()).sqrt(); + // tslint:disable-next-line: no-unnecessary-type-assertion + return dy.div(a); + } + }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.asinh($x); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Computes the inverse hyperbolic cos of the input `tf.Tensor` element-wise: + * `acosh(x)` + * + * ```js + * const x = tf.tensor1d([10, 1, 3, 5.7]); + * + * x.acosh().print(); // or tf.acosh(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function acosh_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'acosh'); + const grad = (dy, saved) => { + const [$x] = saved; + return { + $x: () => { + const a = $x.toFloat().square().sub(1).sqrt(); + // tslint:disable-next-line: no-unnecessary-type-assertion + return dy.div(a); + } + }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.acosh($x); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Computes inverse hyperbolic tan of the input `tf.Tensor` element-wise: + * `atanh(x)` + * + * ```js + * const x = tf.tensor1d([0, .1, -.1, .7]); + * + * x.atanh().print(); // or tf.atanh(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function atanh_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'atanh'); + const grad = (dy, saved) => { + const [$x] = saved; + return { $x: () => dy.div(Object(tensor_ops["e" /* scalar */])(1).sub($x.toFloat().square())) }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.atanh($x); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Computes gause error function of the input `tf.Tensor` element-wise: + * `erf(x)` + * + * ```js + * const x = tf.tensor1d([0, .1, -.1, .7]); + * + * x.erf().print(); // or tf.erf(x); + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function erf_(x) { + let $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'erf'); + util["assert"]($x.dtype === 'int32' || $x.dtype === 'float32', () => 'Input dtype must be `int32` or `float32`.'); + if ($x.dtype === 'int32') { + $x = $x.toFloat(); + } + const grad = (dy, saved) => { + const [$x] = saved; + return { + $x: () => dy.mul($x.square().neg().exp().mul(2 / Math.sqrt(Math.PI))) + }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.erf($x); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Computes step of the input `tf.Tensor` element-wise: `x > 0 ? 1 : alpha * x` + * + * ```js + * const x = tf.tensor1d([0, 2, -1, -3]); + * + * x.step(.5).print(); // or tf.step(x, .5) + * ``` + * @param x The input tensor. + * @param alpha The gradient when input is negative. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function step_(x, alpha = 0.0) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'step'); + // TODO(manrajgrover): Return null for gradients when backprop supports + // it. + const grad = (dy) => { + return { $x: () => Object(tensor_ops["o" /* zerosLike */])(dy) }; + }; + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.step($x, alpha), { $x }, grad); +} +const abs = Object(operation["a" /* op */])({ abs_ }); +const acos = Object(operation["a" /* op */])({ acos_ }); +const acosh = Object(operation["a" /* op */])({ acosh_ }); +const asin = Object(operation["a" /* op */])({ asin_ }); +const asinh = Object(operation["a" /* op */])({ asinh_ }); +const atan = Object(operation["a" /* op */])({ atan_ }); +const atanh = Object(operation["a" /* op */])({ atanh_ }); +const ceil = Object(operation["a" /* op */])({ ceil_ }); +const clipByValue = Object(operation["a" /* op */])({ clipByValue_ }); +const cos = Object(operation["a" /* op */])({ cos_ }); +const cosh = Object(operation["a" /* op */])({ cosh_ }); +const erf = Object(operation["a" /* op */])({ erf_ }); +const unary_ops_exp = Object(operation["a" /* op */])({ exp_ }); +const expm1 = Object(operation["a" /* op */])({ expm1_ }); +const floor = Object(operation["a" /* op */])({ floor_ }); +const log = Object(operation["a" /* op */])({ log_ }); +const log1p = Object(operation["a" /* op */])({ log1p_ }); +const logSigmoid = Object(operation["a" /* op */])({ logSigmoid_ }); +const neg = Object(operation["a" /* op */])({ neg_ }); +const reciprocal = Object(operation["a" /* op */])({ reciprocal_ }); +const round = Object(operation["a" /* op */])({ round_ }); +const rsqrt = Object(operation["a" /* op */])({ rsqrt_ }); +const sigmoid = Object(operation["a" /* op */])({ sigmoid_ }); +const sign = Object(operation["a" /* op */])({ sign_ }); +const unary_ops_isNaN = Object(operation["a" /* op */])({ isNaN_ }); +const isInf = Object(operation["a" /* op */])({ isInf_ }); +const unary_ops_isFinite = Object(operation["a" /* op */])({ isFinite_ }); +const sin = Object(operation["a" /* op */])({ sin_ }); +const sinh = Object(operation["a" /* op */])({ sinh_ }); +const softplus = Object(operation["a" /* op */])({ softplus_ }); +const sqrt = Object(operation["a" /* op */])({ sqrt_ }); +const unary_ops_step = Object(operation["a" /* op */])({ step_ }); +const tan = Object(operation["a" /* op */])({ tan_ }); +const tanh = Object(operation["a" /* op */])({ tanh_ }); +//# sourceMappingURL=unary_ops.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Atan2_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + + + +const atan2GradConfig = { + kernelName: kernel_names["c" /* Atan2 */], + inputsToSave: ['a', 'b'], + gradFunc: (dy, saved) => { + const [a, b] = saved; + const outShape = assertAndGetBroadcastShape(a.shape, b.shape); + const derA = () => { + const d = add(square(a), square(b)); + let res = mul(dy, div(b, d)); + const reduceAxes = getReductionAxes(a.shape, outShape); + if (reduceAxes.length > 0) { + res = sum(res, reduceAxes); + } + return reshape(res, a.shape); + }; + const derB = () => { + const d = add(square(a), square(b)); + let res = neg(mul(dy, div(a, d))); + const reduceAxes = getReductionAxes(b.shape, outShape); + if (reduceAxes.length > 0) { + res = sum(res, reduceAxes); + } + return reshape(res, b.shape); + }; + return { a: derA, b: derB }; + } +}; +//# sourceMappingURL=Atan2_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/conv_util.js +/** + * @license + * Copyright 2017 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + +function computePool2DInfo(inShape, filterSize, strides, dilations, pad, roundingMode, dataFormat = 'channelsLast') { + const [filterHeight, filterWidth] = parseTupleParam(filterSize); + let filterShape; + if (dataFormat === 'channelsLast') { + filterShape = [filterHeight, filterWidth, inShape[3], inShape[3]]; + } + else if (dataFormat === 'channelsFirst') { + filterShape = [filterHeight, filterWidth, inShape[1], inShape[1]]; + } + else { + throw new Error(`Unknown dataFormat ${dataFormat}`); + } + return computeConv2DInfo(inShape, filterShape, strides, dilations, pad, roundingMode, false, dataFormat); +} +/** + * Computes the information for a forward pass of a pooling3D operation. + */ +function computePool3DInfo(inShape, filterSize, strides, dilations, pad, roundingMode, dataFormat = 'NDHWC') { + const [filterDepth, filterHeight, filterWidth] = parse3TupleParam(filterSize); + let filterShape; + let $dataFormat; + if (dataFormat === 'NDHWC') { + $dataFormat = 'channelsLast'; + filterShape = + [filterDepth, filterHeight, filterWidth, inShape[4], inShape[4]]; + } + else if (dataFormat === 'NCDHW') { + $dataFormat = 'channelsFirst'; + filterShape = + [filterDepth, filterHeight, filterWidth, inShape[1], inShape[1]]; + } + else { + throw new Error(`Unknown dataFormat ${dataFormat}`); + } + return computeConv3DInfo(inShape, filterShape, strides, dilations, pad, false, $dataFormat, roundingMode); +} +/** + * Computes the information for a forward pass of a convolution/pooling + * operation. + */ +function computeConv2DInfo(inShape, filterShape, strides, dilations, pad, roundingMode, depthwise = false, dataFormat = 'channelsLast') { + let [batchSize, inHeight, inWidth, inChannels] = [-1, -1, -1, -1]; + if (dataFormat === 'channelsLast') { + [batchSize, inHeight, inWidth, inChannels] = inShape; + } + else if (dataFormat === 'channelsFirst') { + [batchSize, inChannels, inHeight, inWidth] = inShape; + } + else { + throw new Error(`Unknown dataFormat ${dataFormat}`); + } + const [filterHeight, filterWidth, , filterChannels] = filterShape; + const [strideHeight, strideWidth] = parseTupleParam(strides); + const [dilationHeight, dilationWidth] = parseTupleParam(dilations); + const effectiveFilterHeight = getEffectiveFilterSize(filterHeight, dilationHeight); + const effectiveFilterWidth = getEffectiveFilterSize(filterWidth, dilationWidth); + const { padInfo, outHeight, outWidth } = getPadAndOutInfo(pad, inHeight, inWidth, strideHeight, strideWidth, effectiveFilterHeight, effectiveFilterWidth, roundingMode, dataFormat); + const outChannels = depthwise ? filterChannels * inChannels : filterChannels; + let outShape; + if (dataFormat === 'channelsFirst') { + outShape = [batchSize, outChannels, outHeight, outWidth]; + } + else if (dataFormat === 'channelsLast') { + outShape = [batchSize, outHeight, outWidth, outChannels]; + } + return { + batchSize, + dataFormat, + inHeight, + inWidth, + inChannels, + outHeight, + outWidth, + outChannels, + padInfo, + strideHeight, + strideWidth, + filterHeight, + filterWidth, + effectiveFilterHeight, + effectiveFilterWidth, + dilationHeight, + dilationWidth, + inShape, + outShape, + filterShape + }; +} +/** + * Computes the information for a forward pass of a 3D convolution/pooling + * operation. + */ +function computeConv3DInfo(inShape, filterShape, strides, dilations, pad, depthwise = false, dataFormat = 'channelsLast', roundingMode) { + let [batchSize, inDepth, inHeight, inWidth, inChannels] = [-1, -1, -1, -1, -1]; + if (dataFormat === 'channelsLast') { + [batchSize, inDepth, inHeight, inWidth, inChannels] = inShape; + } + else if (dataFormat === 'channelsFirst') { + [batchSize, inChannels, inDepth, inHeight, inWidth] = inShape; + } + else { + throw new Error(`Unknown dataFormat ${dataFormat}`); + } + const [filterDepth, filterHeight, filterWidth, , filterChannels] = filterShape; + const [strideDepth, strideHeight, strideWidth] = parse3TupleParam(strides); + const [dilationDepth, dilationHeight, dilationWidth] = parse3TupleParam(dilations); + const effectiveFilterDepth = getEffectiveFilterSize(filterDepth, dilationDepth); + const effectiveFilterHeight = getEffectiveFilterSize(filterHeight, dilationHeight); + const effectiveFilterWidth = getEffectiveFilterSize(filterWidth, dilationWidth); + const { padInfo, outDepth, outHeight, outWidth } = get3DPadAndOutInfo(pad, inDepth, inHeight, inWidth, strideDepth, strideHeight, strideWidth, effectiveFilterDepth, effectiveFilterHeight, effectiveFilterWidth, roundingMode); + const outChannels = depthwise ? filterChannels * inChannels : filterChannels; + let outShape; + if (dataFormat === 'channelsFirst') { + outShape = [batchSize, outChannels, outDepth, outHeight, outWidth]; + } + else if (dataFormat === 'channelsLast') { + outShape = [batchSize, outDepth, outHeight, outWidth, outChannels]; + } + return { + batchSize, + dataFormat, + inDepth, + inHeight, + inWidth, + inChannels, + outDepth, + outHeight, + outWidth, + outChannels, + padInfo, + strideDepth, + strideHeight, + strideWidth, + filterDepth, + filterHeight, + filterWidth, + effectiveFilterDepth, + effectiveFilterHeight, + effectiveFilterWidth, + dilationDepth, + dilationHeight, + dilationWidth, + inShape, + outShape, + filterShape + }; +} +function computeOutputShape2D(inShape, fieldSize, stride, zeroPad, roundingMode) { + if (zeroPad == null) { + zeroPad = computeDefaultPad(inShape, fieldSize, stride); + } + const inputRows = inShape[0]; + const inputCols = inShape[1]; + const outputRows = conditionalRound((inputRows - fieldSize + 2 * zeroPad) / stride + 1, roundingMode); + util["assert"](util["isInt"](outputRows), () => `The output # of rows (${outputRows}) must be an integer. ` + + `Change the stride and/or zero pad parameters`); + const outputCols = conditionalRound((inputCols - fieldSize + 2 * zeroPad) / stride + 1, roundingMode); + util["assert"](util["isInt"](outputCols), () => `The output # of columns (${outputCols}) must be an integer. ` + + `Change the stride and/or zero pad parameters`); + return [outputRows, outputCols]; +} +function computeOutputShape4D(inShape, fieldSize, outChannels, stride, zeroPad, roundingMode) { + if (zeroPad == null) { + zeroPad = computeDefaultPad(inShape, fieldSize, stride); + } + const inputDepth = inShape[0]; + const inputRows = inShape[1]; + const inputCols = inShape[2]; + const outputDepths = conditionalRound((inputDepth - fieldSize + 2 * zeroPad) / stride + 1, roundingMode); + util["assert"](util["isInt"](outputDepths), () => `The output # of depths (${outputDepths}) must be an integer. ` + + `Change the stride and/or zero pad parameters`); + const outputRows = conditionalRound((inputRows - fieldSize + 2 * zeroPad) / stride + 1, roundingMode); + util["assert"](util["isInt"](outputRows), () => `The output # of rows (${outputRows}) must be an integer. ` + + `Change the stride and/or zero pad parameters`); + const outputCols = conditionalRound((inputCols - fieldSize + 2 * zeroPad) / stride + 1, roundingMode); + util["assert"](util["isInt"](outputCols), () => `The output # of columns (${outputCols}) must be an integer. ` + + `Change the stride and/or zero pad parameters`); + return [outputDepths, outputRows, outputCols, outChannels]; +} +function computeDefaultPad(inputShape, fieldSize, stride, dilation = 1) { + const effectiveFieldSize = getEffectiveFilterSize(fieldSize, dilation); + return Math.floor((inputShape[0] * (stride - 1) - stride + effectiveFieldSize) / 2); +} +function parseTupleParam(param) { + if (typeof param === 'number') { + return [param, param, param]; + } + if (param.length === 2) { + return [param[0], param[1], 1]; + } + return param; +} +function parse3TupleParam(param) { + return typeof param === 'number' ? [param, param, param] : param; +} +/* See https://www.tensorflow.org/api_docs/python/tf/nn/atrous_conv2d + * Atrous convolution is equivalent to standard convolution with upsampled + * filters with effective_filter_height = + * filter_height + (filter_height - 1) * (dilation - 1) + * and effective_filter_width = + * filter_width + (filter_width - 1) * (dilation - 1), + * produced by inserting dilation - 1 zeros along consecutive elements across + * the filters' spatial dimensions. + * When there is a dilation, this converts a filter dimension to the + * effective filter dimension, so it can be used in a standard convolution. + */ +function getEffectiveFilterSize(filterSize, dilation) { + if (dilation <= 1) { + return filterSize; + } + return filterSize + (filterSize - 1) * (dilation - 1); +} +function getPadAndOutInfo(pad, inHeight, inWidth, strideHeight, strideWidth, filterHeight, filterWidth, roundingMode, dataFormat) { + let padInfo; + let outHeight; + let outWidth; + if (typeof pad === 'number') { + const padType = (pad === 0) ? 'VALID' : 'NUMBER'; + padInfo = { top: pad, bottom: pad, left: pad, right: pad, type: padType }; + const outShape = computeOutputShape2D([inHeight, inWidth], filterHeight, strideHeight, pad, roundingMode); + outHeight = outShape[0]; + outWidth = outShape[1]; + } + else if (pad === 'same') { + outHeight = Math.ceil(inHeight / strideHeight); + outWidth = Math.ceil(inWidth / strideWidth); + const padAlongHeight = Math.max(0, (outHeight - 1) * strideHeight + filterHeight - inHeight); + const padAlongWidth = Math.max(0, (outWidth - 1) * strideWidth + filterWidth - inWidth); + const top = Math.floor(padAlongHeight / 2); + const bottom = padAlongHeight - top; + const left = Math.floor(padAlongWidth / 2); + const right = padAlongWidth - left; + padInfo = { top, bottom, left, right, type: 'SAME' }; + } + else if (pad === 'valid') { + padInfo = { top: 0, bottom: 0, left: 0, right: 0, type: 'VALID' }; + outHeight = Math.ceil((inHeight - filterHeight + 1) / strideHeight); + outWidth = Math.ceil((inWidth - filterWidth + 1) / strideWidth); + } + else if (typeof pad === 'object') { + const top = dataFormat === 'channelsLast' ? pad[1][0] : pad[2][0]; + const bottom = dataFormat === 'channelsLast' ? pad[1][1] : pad[2][1]; + const left = dataFormat === 'channelsLast' ? pad[2][0] : pad[3][0]; + const right = dataFormat === 'channelsLast' ? pad[2][1] : pad[3][1]; + const padType = (top === 0 && bottom === 0 && left === 0 && right === 0) ? + 'VALID' : + 'EXPLICIT'; + padInfo = { top, bottom, left, right, type: padType }; + outHeight = conditionalRound((inHeight - filterHeight + top + bottom) / strideHeight + 1, roundingMode); + outWidth = conditionalRound((inWidth - filterWidth + left + right) / strideWidth + 1, roundingMode); + } + else { + throw Error(`Unknown padding parameter: ${pad}`); + } + return { padInfo, outHeight, outWidth }; +} +function get3DPadAndOutInfo(pad, inDepth, inHeight, inWidth, strideDepth, strideHeight, strideWidth, filterDepth, filterHeight, filterWidth, roundingMode) { + let padInfo; + let outDepth; + let outHeight; + let outWidth; + if (typeof pad === 'number') { + const padType = (pad === 0) ? 'VALID' : 'NUMBER'; + padInfo = { + top: pad, + bottom: pad, + left: pad, + right: pad, + front: pad, + back: pad, + type: padType + }; + const outShape = computeOutputShape4D([inDepth, inHeight, inWidth, 1], filterDepth, 1, strideDepth, pad, roundingMode); + outDepth = outShape[0]; + outHeight = outShape[1]; + outWidth = outShape[2]; + } + else if (pad === 'same') { + outDepth = Math.ceil(inDepth / strideDepth); + outHeight = Math.ceil(inHeight / strideHeight); + outWidth = Math.ceil(inWidth / strideWidth); + const padAlongDepth = (outDepth - 1) * strideDepth + filterDepth - inDepth; + const padAlongHeight = (outHeight - 1) * strideHeight + filterHeight - inHeight; + const padAlongWidth = (outWidth - 1) * strideWidth + filterWidth - inWidth; + const front = Math.floor(padAlongDepth / 2); + const back = padAlongDepth - front; + const top = Math.floor(padAlongHeight / 2); + const bottom = padAlongHeight - top; + const left = Math.floor(padAlongWidth / 2); + const right = padAlongWidth - left; + padInfo = { top, bottom, left, right, front, back, type: 'SAME' }; + } + else if (pad === 'valid') { + padInfo = { + top: 0, + bottom: 0, + left: 0, + right: 0, + front: 0, + back: 0, + type: 'VALID' + }; + outDepth = Math.ceil((inDepth - filterDepth + 1) / strideDepth); + outHeight = Math.ceil((inHeight - filterHeight + 1) / strideHeight); + outWidth = Math.ceil((inWidth - filterWidth + 1) / strideWidth); + } + else { + throw Error(`Unknown padding parameter: ${pad}`); + } + return { padInfo, outDepth, outHeight, outWidth }; +} +/** + * Rounds a value depending on the rounding mode + * @param value + * @param roundingMode + */ +function conditionalRound(value, roundingMode) { + if (!roundingMode) { + return value; + } + switch (roundingMode) { + case 'round': + // used for Caffe Conv + return Math.round(value); + case 'ceil': + // used for Caffe Pool + return Math.ceil(value); + case 'floor': + return Math.floor(value); + default: + throw new Error(`Unknown roundingMode ${roundingMode}`); + } +} +function tupleValuesAreOne(param) { + const [dimA, dimB, dimC] = parseTupleParam(param); + return dimA === 1 && dimB === 1 && dimC === 1; +} +function eitherStridesOrDilationsAreOne(strides, dilations) { + return tupleValuesAreOne(strides) || tupleValuesAreOne(dilations); +} +/** + * Convert Conv2D dataFormat from 'NHWC'|'NCHW' to + * 'channelsLast'|'channelsFirst' + * @param dataFormat in 'NHWC'|'NCHW' mode + * @return dataFormat in 'channelsLast'|'channelsFirst' mode + * @throws unknown dataFormat + */ +function convertConv2DDataFormat(dataFormat) { + if (dataFormat === 'NHWC') { + return 'channelsLast'; + } + else if (dataFormat === 'NCHW') { + return 'channelsFirst'; + } + else { + throw new Error(`Unknown dataFormat ${dataFormat}`); + } +} +//# sourceMappingURL=conv_util.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/avg_pool_3d_backprop.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + +/** + * Computes the backprop of a 3d avg pool. + * + * @param dy The dy error, of rank 5 of shape + * [batchSize, depth, height, width, channels]. + * assumed. + * @param input The original input image, of rank 5 or rank4 of shape + * [batchSize, depth, height, width, channels]. + * @param filterSize The filter size: + * `[filterDepth, filterHeight, filterWidth]`. + * `filterSize` is a single number, + * then `filterDepth == filterHeight == filterWidth`. + * @param strides The strides of the pooling: + * `[strideDepth, strideHeight, strideWidth]`. If + * `strides` is a single number, then `strideHeight == strideWidth`. + * @param dilations Deprecated, this field will be gone in v3.0.0. The dilation + * rates: `[dilationDepth, dilationHeight, dilationWidth]` + * in which we sample input values across the depth, height and width + * dimensions in dilated pooling. + * Defaults to `[1, 1, 1]`. If `dilations` is a single number, + * then `dilationDepth == dilationHeight == dilationWidth`. + * If it is greater than 1, then all values of `strides` must be 1. + * @param pad A string from: 'same', 'valid'. The type of padding algorithm + * used in the forward prop of the op. + * @param dimRoundingMode A string from: 'ceil', 'round', 'floor'. The + * rounding mode used when computing output dimensions if pad is a + * number. If none is provided, it will not round and error if the output + * is of fractional size. + */ +function avgPool3dBackprop_(dy, input, filterSize, strides, dilations = [1, 1, 1], pad, dimRoundingMode) { + const $dy = Object(tensor_util_env["a" /* convertToTensor */])(dy, 'dy', 'avgPool3dBackprop'); + const $input = Object(tensor_util_env["a" /* convertToTensor */])(input, 'input', 'avgPool3dBackprop'); + let dy5D = $dy; + let input5D = $input; + let reshapedTo5D = false; + if ($input.rank === 4) { + reshapedTo5D = true; + dy5D = reshape($dy, [1, $dy.shape[0], $dy.shape[1], $dy.shape[2], $dy.shape[3]]); + input5D = reshape($input, [ + 1, $input.shape[0], $input.shape[1], $input.shape[2], $input.shape[3] + ]); + } + util["assert"](dy5D.rank === 5, () => `Error in avgPool3dBackprop: dy must be rank 5 but got rank ` + + `${dy5D.rank}.`); + util["assert"](input5D.rank === 5, () => `Error in avgPool3dBackprop: input must be rank 5 but got rank ` + + `${input5D.rank}.`); + util["assert"](eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in avgPool3dBackprop: Either strides or dilations ' + + `must be 1. Got strides ${strides} and dilations '${dilations}'`); + if (dimRoundingMode != null) { + util["assert"](util["isInt"](pad), () => `Error in maxPool3dBackprop: pad must be an integer when ` + + `using, dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`); + } + const forward = backend => { + const convInfo = computePool3DInfo(input5D.shape, filterSize, strides, dilations, pad, dimRoundingMode); + return backend.avgPool3dBackprop(dy5D, input5D, convInfo); + }; + const inputs = { dy: dy5D, input: input5D }; + const attrs = { filterSize, strides, dilations, pad, dimRoundingMode }; + const res = engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["f" /* AvgPool3DBackprop */], attrs); + if (reshapedTo5D) { + return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]); + } + return res; +} +const avgPool3dBackprop = Object(operation["a" /* op */])({ avgPool3dBackprop_ }); +//# sourceMappingURL=avg_pool_3d_backprop.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/AvgPool3D_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const avgPool3DGradConfig = { + kernelName: kernel_names["e" /* AvgPool3D */], + inputsToSave: ['x'], + gradFunc: (dy, saved, attrs) => { + const [x] = saved; + const { filterSize, strides, dilations, pad, dimRoundingMode } = attrs; + const $dilations = dilations == null ? [1, 1, 1] : dilations; + return { + x: () => avgPool3dBackprop(dy, x, filterSize, strides, $dilations, pad, dimRoundingMode) + }; + } +}; +//# sourceMappingURL=AvgPool3D_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/avg_pool_backprop.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + +/** + * Computes the backprop of an 2D avg pool. + * + * @param dy The dy error, of rank 4 or rank 3 of shape + * [batchSize, height, width, channels]. If rank 3, batch of 1 is + * assumed. + * @param input The input image, of rank 4 or rank 3 of shape + * [batchSize, height, width, channels]. If rank 3, batch of 1 is + * assumed. + * @param filterSize The filter size: `[filterHeight, filterWidth]`. If + * `filterSize` is a single number, then `filterHeight == filterWidth`. + * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If + * `strides` is a single number, then `strideHeight == strideWidth`. + * @param pad A string from: 'same', 'valid'. The type of padding algorithm + * used in the forward prop of the op. + */ +function avgPoolBackprop_(dy, input, filterSize, strides, pad) { + const $dy = Object(tensor_util_env["a" /* convertToTensor */])(dy, 'dy', 'avgPoolBackprop'); + const $input = Object(tensor_util_env["a" /* convertToTensor */])(input, 'input', 'avgPoolBackprop'); + util["assert"]($input.rank === $dy.rank, () => `Rank of input (${$input.rank}) does not match rank of dy (${$dy.rank})`); + let input4D = $input; + let dy4D = $dy; + let reshapedTo4D = false; + if ($input.rank === 3) { + reshapedTo4D = true; + input4D = + reshape($input, [1, $input.shape[0], $input.shape[1], $input.shape[2]]); + dy4D = reshape($dy, [1, $dy.shape[0], $dy.shape[1], $dy.shape[2]]); + } + util["assert"](dy4D.rank === 4, () => `Error in avgPoolBackprop: dy must be rank 4 but got rank ` + + `${dy4D.rank}.`); + util["assert"](input4D.rank === 4, () => `Error in avgPoolBackprop: input must be rank 4 but got rank ` + + `${input4D.rank}.`); + const forward = backend => { + const convInfo = computePool2DInfo(input4D.shape, filterSize, strides, 1 /* dilations */, pad); + return backend.avgPoolBackprop(dy4D, input4D, convInfo); + }; + const inputs = { dy: dy4D, input: input4D }; + const attrs = { filterSize, strides, pad }; + const res = engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null, kernel_names["g" /* AvgPoolBackprop */], attrs); + if (reshapedTo4D) { + return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]); + } + return res; +} +const avgPoolBackprop = Object(operation["a" /* op */])({ avgPoolBackprop_ }); +//# sourceMappingURL=avg_pool_backprop.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/AvgPool_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const avgPoolGradConfig = { + kernelName: kernel_names["d" /* AvgPool */], + inputsToSave: ['x'], + gradFunc: (dy, saved, attrs) => { + const [x] = saved; + const { filterSize, strides, pad } = attrs; + return { + x: () => avgPoolBackprop(dy, x, filterSize, strides, pad) + }; + } +}; +//# sourceMappingURL=AvgPool_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/mat_mul.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + +/** + * Computes the dot product of two matrices, A * B. These must be matrices. + * + * ```js + * const a = tf.tensor2d([1, 2], [1, 2]); + * const b = tf.tensor2d([1, 2, 3, 4], [2, 2]); + * + * a.matMul(b).print(); // or tf.matMul(a, b) + * ``` + * @param a First matrix in dot product operation. + * @param b Second matrix in dot product operation. + * @param transposeA If true, `a` is transposed before multiplication. + * @param transposeB If true, `b` is transposed before multiplication. + */ +/** @doc {heading: 'Operations', subheading: 'Matrices'} */ +function matMul_(a, b, transposeA = false, transposeB = false) { + let $a = Object(tensor_util_env["a" /* convertToTensor */])(a, 'a', 'matMul'); + let $b = Object(tensor_util_env["a" /* convertToTensor */])(b, 'b', 'matMul'); + [$a, $b] = Object(tensor_util["makeTypesMatch"])($a, $b); + util["assert"]($a.rank >= 2 && $b.rank >= 2 && $a.rank === $b.rank, () => `Error in matMul: inputs must have the same rank of at least 2, ` + + `got ranks ${$a.rank} and ${$b.rank}.`); + const innerShapeA = transposeA ? $a.shape[$a.rank - 2] : $a.shape[$a.rank - 1]; + const innerShapeB = transposeB ? $b.shape[$b.rank - 1] : $b.shape[$b.rank - 2]; + const outerShapeA = transposeA ? $a.shape[$a.rank - 1] : $a.shape[$a.rank - 2]; + const outerShapeB = transposeB ? $b.shape[$b.rank - 2] : $b.shape[$b.rank - 1]; + const outerDimsA = $a.shape.slice(0, -2); + const outerDimsB = $b.shape.slice(0, -2); + const batchDimA = util["sizeFromShape"](outerDimsA); + const batchDimB = util["sizeFromShape"](outerDimsB); + util["assert"](util["arraysEqual"](outerDimsA, outerDimsB), () => `Error in matMul: outer dimensions (${outerDimsA}) and (` + + `${outerDimsB}) of Tensors with shapes ${$a.shape} and ` + + `${$b.shape} must match.`); + util["assert"](innerShapeA === innerShapeB, () => `Error in matMul: inner shapes (${innerShapeA}) and (` + + `${innerShapeB}) of Tensors with shapes ${$a.shape} and ` + + `${$b.shape} and transposeA=${transposeA}` + + ` and transposeB=${transposeB} must match.`); + const outShape = $a.shape.slice(0, -2).concat([outerShapeA, outerShapeB]); + const a3D = transposeA ? reshape($a, [batchDimA, innerShapeA, outerShapeA]) : + reshape($a, [batchDimA, outerShapeA, innerShapeA]); + const b3D = transposeB ? reshape($b, [batchDimB, outerShapeB, innerShapeB]) : + reshape($b, [batchDimB, innerShapeB, outerShapeB]); + const forward = (backend, save) => { + save([a3D, b3D]); + return backend.batchMatMul(a3D, b3D, transposeA, transposeB); + }; + const inputs = { a: a3D, b: b3D }; + const attrs = { transposeA, transposeB }; + const res = engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["h" /* BatchMatMul */], attrs); + return reshape(res, outShape); +} +const matMul = Object(operation["a" /* op */])({ matMul_ }); +//# sourceMappingURL=mat_mul.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/BatchMatMul_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const batchMatMulGradConfig = { + kernelName: kernel_names["h" /* BatchMatMul */], + inputsToSave: ['a', 'b'], + gradFunc: (dy, saved, attrs) => { + const [a, b] = saved; + const { transposeA, transposeB } = attrs; + if (!transposeA && !transposeB) { + return { + a: () => matMul(dy, b, false, true), + b: () => matMul(a, dy, true, false) + }; + } + else if (!transposeA && transposeB) { + return { + a: () => matMul(dy, b, false, false), + b: () => matMul(dy, a, true, false) + }; + } + else if (transposeA && !transposeB) { + return { + a: () => matMul(b, dy, false, true), + b: () => matMul(a, dy, false, false) + }; + } + else { + return { + a: () => matMul(b, dy, true, true), + b: () => matMul(dy, a, true, true) + }; + } + } +}; +//# sourceMappingURL=BatchMatMul_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/space_to_batch_nd.js +/** + * @license + * Copyright 2020 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +/** + * This operation divides "spatial" dimensions `[1, ..., M]` of the input into + * a grid of blocks of shape `blockShape`, and interleaves these blocks with + * the "batch" dimension (0) such that in the output, the spatial + * dimensions `[1, ..., M]` correspond to the position within the grid, + * and the batch dimension combines both the position within a spatial block + * and the original batch position. Prior to division into blocks, + * the spatial dimensions of the input are optionally zero padded + * according to `paddings`. See below for a precise description. + * + * ```js + * const x = tf.tensor4d([1, 2, 3, 4], [1, 2, 2, 1]); + * const blockShape = [2, 2]; + * const paddings = [[0, 0], [0, 0]]; + * + * x.spaceToBatchND(blockShape, paddings).print(); + * ``` + * + * @param x A `tf.Tensor`. N-D with `x.shape` = `[batch] + spatialShape + + * remainingShape`, where spatialShape has `M` dimensions. + * @param blockShape A 1-D array. Must have shape `[M]`, all values must + * be >= 1. + * @param paddings A 2-D array. Must have shape `[M, 2]`, all values must be >= + * 0. `paddings[i] = [padStart, padEnd]` specifies the amount to zero-pad + * from input dimension `i + 1`, which corresponds to spatial dimension `i`. It + * is required that + * `(inputShape[i + 1] + padStart + padEnd) % blockShape[i] === 0` + * + * This operation is equivalent to the following steps: + * + * 1. Zero-pad the start and end of dimensions `[1, ..., M]` of the input + * according to `paddings` to produce `padded` of shape paddedShape. + * + * 2. Reshape `padded` to `reshapedPadded` of shape: + * `[batch] + [paddedShape[1] / blockShape[0], blockShape[0], ..., + * paddedShape[M] / blockShape[M-1], blockShape[M-1]] + remainingShape` + * + * 3. Permute dimensions of `reshapedPadded` to produce `permutedReshapedPadded` + * of shape: `blockShape + [batch] + [paddedShape[1] / blockShape[0], ..., + * paddedShape[M] / blockShape[M-1]] + remainingShape` + * + * 4. Reshape `permutedReshapedPadded` to flatten `blockShape` into the + * batch dimension, producing an output tensor of shape: + * `[batch * prod(blockShape)] + [paddedShape[1] / blockShape[0], ..., + * paddedShape[M] / blockShape[M-1]] + remainingShape` + */ +/** @doc {heading: 'Tensors', subheading: 'Transformations'} */ +function spaceToBatchND_(x, blockShape, paddings) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'spaceToBatchND'); + util["assert"]($x.rank >= 1 + blockShape.length, () => `input rank ${$x.rank} should be > than [blockShape] ${blockShape.length}`); + util["assert"](paddings.length === blockShape.length, () => `paddings.shape[0] ${paddings.length} must be equal to [blockShape] ${blockShape.length}`); + util["assert"]($x.shape.reduce((a, b, i) => { + if (i > 0 && i <= blockShape.length) { + return a && + ((b + paddings[i - 1][0] + paddings[i - 1][1]) % + blockShape[i - 1] === + 0); + } + return a; + }, true), () => `input spatial dimensions ${$x.shape.slice(1)} with paddings ${paddings.toString()} must be divisible by blockShapes ${blockShape.toString()}`); + const forward = backend => backend.spaceToBatchND($x, blockShape, paddings); + const inputs = { x: $x }; + const attrs = { blockShape, paddings }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* gradient */, kernel_names["mb" /* SpaceToBatchND */], attrs); +} +const spaceToBatchND = Object(operation["a" /* op */])({ spaceToBatchND_ }); +//# sourceMappingURL=space_to_batch_nd.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/BatchToSpaceND_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const batchToSpaceNDGradConfig = { + kernelName: kernel_names["i" /* BatchToSpaceND */], + gradFunc: (dy, saved, attrs) => { + const { blockShape, crops } = attrs; + return { x: () => spaceToBatchND(dy, blockShape, crops) }; + } +}; +//# sourceMappingURL=BatchToSpaceND_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/BroadcastTo_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const broadcastToGradConfig = { + kernelName: kernel_names["j" /* BroadcastTo */], + gradFunc: (dy, saved, attrs) => { + const broadCastToAttrs = attrs; + const inputShape = broadCastToAttrs.inputShape; + const outputShape = broadCastToAttrs.shape; + const reps = Array.from(outputShape); + for (let i = inputShape.length - 1; i >= 0; i--) { + if (inputShape[i] === outputShape[i]) { + reps[i] = 1; + } + else if (inputShape[i] !== 1) { + throw new Error(`broadcastTo(): [${inputShape}] cannot be broadcast to [${outputShape}].`); + } + } + const axes = []; + for (let i = 0; i < reps.length; i++) { + if (reps[i] > 1) { + axes.push(i); + } + } + return { x: () => sum(dy, axes, true /* keepDims */) }; + } +}; +//# sourceMappingURL=BroadcastTo_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/split.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + +/** + * Splits a `tf.Tensor` into sub tensors. + * + * If `numOrSizeSplits` is a number, splits `x` along dimension `axis` + * into `numOrSizeSplits` smaller tensors. + * Requires that `numOrSizeSplits` evenly divides `x.shape[axis]`. + * + * If `numOrSizeSplits` is a number array, splits `x` into + * `numOrSizeSplits.length` pieces. The shape of the `i`-th piece has the + * same size as `x` except along dimension `axis` where the size is + * `numOrSizeSplits[i]`. + * + * ```js + * const x = tf.tensor2d([1, 2, 3, 4, 5, 6, 7, 8], [2, 4]); + * const [a, b] = tf.split(x, 2, 1); + * a.print(); + * b.print(); + * + * const [c, d, e] = tf.split(x, [1, 2, 1], 1); + * c.print(); + * d.print(); + * e.print(); + * ``` + * + * @param x The input tensor to split. + * @param numOrSizeSplits Either an integer indicating the number of + * splits along the axis or an array of integers containing the sizes of + * each output tensor along the axis. If a number then it must evenly divide + * `x.shape[axis]`; otherwise the sum of sizes must match `x.shape[axis]`. + * @param axis The dimension along which to split. Defaults to 0 (the first + * dim). + */ +/** @doc {heading: 'Tensors', subheading: 'Slicing and Joining'} */ +function split_(x, numOrSizeSplits, axis = 0) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'split'); + const $axis = Object(util["parseAxisParam"])(axis, $x.shape)[0]; + let splitSizes; + if (typeof (numOrSizeSplits) === 'number') { + Object(util["assert"])($x.shape[$axis] % numOrSizeSplits === 0, () => 'Number of splits must evenly divide the axis.'); + splitSizes = + new Array(numOrSizeSplits).fill($x.shape[$axis] / numOrSizeSplits); + } + else { + Object(util["assert"])($x.shape[$axis] === numOrSizeSplits.reduce((a, b) => a + b), () => 'The sum of sizes must match the size of the axis dimension.'); + splitSizes = numOrSizeSplits; + } + const forward = (backend, _) => { + return backend.split($x, splitSizes, $axis); + }; + const inputs = { x: $x }; + const attr = { numOrSizeSplits, axis }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["nb" /* SplitV */], attr); +} +const split = Object(operation["a" /* op */])({ split_ }); +//# sourceMappingURL=split.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Concat_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + +const concatGradConfig = { + kernelName: kernel_names["l" /* Concat */], + saveAllInputs: true, + gradFunc: (dy, saved, attrs) => { + const shapes = saved.map(t => t.shape); + const { axis } = attrs; + const $axis = Object(util["parseAxisParam"])(axis, saved[0].shape)[0]; + const sizeSplits = shapes.map(s => s[$axis]); + const derTensors = split(dy, sizeSplits, $axis); + return derTensors.map(t => () => t); + } +}; +//# sourceMappingURL=Concat_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/conv2d_backprop_filter.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + +/** + * Computes the derivative of the filter of a 2D convolution. + * + * @param x The input tensor, of rank 4 or rank 3 of shape + * [batch, height, width, inChannels]. If rank 3, batch of 1 is assumed. + * @param dy The dy image, of rank 4 or rank 3, of shape + * [batch, height, width, outDepth]. If rank 3, batch of 1 is assumed. + * @param filterShape The shape of the filter, length 4, + * [filterHeight, filterWidth, inDepth, outDepth]. + * @param strides The strides of the convolution: [strideHeight, + * strideWidth]. + * @param pad A string from: 'same', 'valid'. The type of padding algorithm + * used in the forward prop of the op. + * @param dataFormat: An optional string from: "NHWC", "NCHW". Defaults to + * "NHWC". Specify the data format of the input and output data. With the + * default format "NHWC", the data is stored in the order of: [batch, + * height, width, channels]. + * @param dimRoundingMode A string from: 'ceil', 'round', 'floor'. The + * rounding mode used when computing output dimensions if pad is a + * number. If none is provided, it will not round and error if the output + * is of fractional size. + */ +function conv2DBackpropFilter_(x, dy, filterShape, strides, pad, dataFormat = 'NHWC', dimRoundingMode) { + let x4D = x; + if (x.rank === 3) { + x4D = reshape(x, [1, x.shape[0], x.shape[1], x.shape[2]]); + } + let dy4D = dy; + if (dy4D.rank === 3) { + dy4D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2]]); + } + util["assert"](x4D.rank === 4, () => `Error in conv2dDerFilter: input must be rank 4, but got shape ` + + `${x4D.shape}.`); + util["assert"](dy4D.rank === 4, () => `Error in conv2dDerFilter: dy must be rank 4, but got shape ` + + `${dy4D.shape}.`); + util["assert"](filterShape.length === 4, () => `Error in conv2dDerFilter: filterShape must be length 4, but got ` + + `${filterShape}.`); + const inDepth = dataFormat === 'NHWC' ? x4D.shape[3] : x4D.shape[1]; + const outDepth = dataFormat === 'NHWC' ? dy4D.shape[3] : dy4D.shape[1]; + util["assert"](inDepth === filterShape[2], () => `Error in conv2dDerFilter: depth of input ${inDepth}) must ` + + `match input depth in filter (${filterShape[2]}.`); + util["assert"](outDepth === filterShape[3], () => `Error in conv2dDerFilter: depth of dy (${outDepth}) must ` + + `match output depth for filter (${filterShape[3]}).`); + if (dimRoundingMode != null) { + util["assert"](util["isInt"](pad), () => `Error in conv2dDerFilter: pad must be an integer when using, ` + + `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`); + } + const forward = backend => { + const dilations = 1; + const $dataFormat = convertConv2DDataFormat(dataFormat); + const convInfo = computeConv2DInfo(x4D.shape, filterShape, strides, dilations, pad, dimRoundingMode, false, $dataFormat); + return backend.conv2dDerFilter(x4D, dy4D, convInfo); + }; + const inputs = { x: x4D, dy: dy4D }; + const attrs = { strides, pad, dataFormat, dimRoundingMode }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null, kernel_names["n" /* Conv2DBackpropFilter */], attrs); +} +const conv2DBackpropFilter = Object(operation["a" /* op */])({ conv2DBackpropFilter_ }); +//# sourceMappingURL=conv2d_backprop_filter.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/conv2d_backprop_input.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + +/** + * Computes the derivative of the input of a 2D convolution. + * + * @param xShape The shape of the input: [batch, height, width, inDepth]. + * If length of 3, batch of 1 is assumed. + * @param dy The derivative of the output, of rank 4 or rank 3 of shape + * `[batch, outHeight, outWidth, outDepth]`. If rank 3, batch of 1 is + * assumed. + * @param filter The filter, rank 4, of shape + * `[filterHeight, filterWidth, inDepth, outDepth]`. + * @param strides The strides of the convolution: `[strideHeight, + * strideWidth]`. + * @param pad The type of padding algorithm used: + * - `same` and stride 1: output will be of same size as input, + * regardless of filter size. + * - `valid`: output will be smaller than input if filter is larger + * than 1x1. + * @param dataFormat: An optional string from: "NHWC", "NCHW". Defaults to + * "NHWC". Specify the data format of the input and output data. With the + * default format "NHWC", the data is stored in the order of: [batch, + * height, width, channels]. + * @param dimRoundingMode The rounding mode used when computing output + * dimensions if pad is a number. If none is provided, it will not round + * and error if the output is of fractional size. + */ +function conv2DBackpropInput_(xShape, dy, filter, strides, pad, dataFormat = 'NHWC', dimRoundingMode) { + util["assert"](xShape.length === dy.rank, () => `Length of inShape ` + + `(${xShape.length}) and rank of dy (${dy.rank}) must match`); + let xShape4D = xShape; + let dy4D = dy; + let reshapedTo4D = false; + if (dy.rank === 3) { + reshapedTo4D = true; + dy4D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2]]); + xShape4D = [1, xShape[0], xShape[1], xShape[2]]; + } + util["assert"](xShape4D.length === 4, () => `Error in conv2dDerInput: inShape must be length 4, but got length ` + + `${xShape4D.length}.`); + util["assert"](dy4D.rank === 4, () => `Error in conv2dDerInput: dy must be rank 4, but got ` + + `rank ${dy4D.rank}`); + util["assert"](filter.rank === 4, () => `Error in conv2dDerInput: filter must be rank 4, but got ` + + `rank ${filter.rank}`); + const inDepth = dataFormat === 'NHWC' ? xShape4D[3] : xShape4D[1]; + const outDepth = dataFormat === 'NHWC' ? dy4D.shape[3] : dy4D.shape[1]; + util["assert"](inDepth === filter.shape[2], () => `Error in conv2dDerInput: depth of input (${inDepth}) must ` + + `match input depth for filter ${filter.shape[2]}.`); + util["assert"](outDepth === filter.shape[3], () => `Error in conv2dDerInput: depth of output (${outDepth}) must ` + + `match output depth for filter ${filter.shape[3]}.`); + if (dimRoundingMode != null) { + util["assert"](util["isInt"](pad), () => `Error in conv2dDerInput: pad must be an integer when using, ` + + `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`); + } + const forward = (backend, save) => { + const dilations = 1; + const $dataFormat = convertConv2DDataFormat(dataFormat); + const convInfo = computeConv2DInfo(xShape4D, filter.shape, strides, dilations, pad, dimRoundingMode, false, $dataFormat); + const res = backend.conv2dDerInput(dy4D, filter, convInfo); + save([dy4D, filter]); + return res; + }; + const inputs = { dy: dy4D, filter }; + const attrs = { strides, pad, dataFormat, dimRoundingMode }; + const res = engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["o" /* Conv2DBackpropInput */], attrs); + if (reshapedTo4D) { + return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]); + } + return res; +} +const conv2DBackpropInput = Object(operation["a" /* op */])({ conv2DBackpropInput_ }); +//# sourceMappingURL=conv2d_backprop_input.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Conv2D_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +const conv2DGradConfig = { + kernelName: kernel_names["m" /* Conv2D */], + inputsToSave: ['x', 'filter'], + gradFunc: (dy, saved, attrs) => { + const [x4D, $filter] = saved; + const { dilations, strides, pad, dataFormat } = attrs; + util["assert"](tupleValuesAreOne(dilations), () => 'Error in gradient of conv2D: dilation rates greater than 1 ' + + `are not yet supported in gradients. Got dilations '${dilations}'`); + return { + x: () => conv2DBackpropInput(x4D.shape, dy, $filter, strides, pad, dataFormat), + filter: () => conv2DBackpropFilter(x4D, dy, $filter.shape, strides, pad, dataFormat) + }; + } +}; +//# sourceMappingURL=Conv2D_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/conv2d.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + +/** + * Computes a 2D convolution over the input x. + * + * @param x The input tensor, of rank 4 or rank 3, of shape + * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is + * assumed. + * @param filter The filter, rank 4, of shape + * `[filterHeight, filterWidth, inDepth, outDepth]`. + * @param strides The strides of the convolution: `[strideHeight, + * strideWidth]`. + * @param pad The type of padding algorithm. + * - `same` and stride 1: output will be of same size as input, + * regardless of filter size. + * - `valid`: output will be smaller than input if filter is larger + * than 1x1. + * - For more info, see this guide: + * [https://www.tensorflow.org/api_guides/python/nn#Convolution]( + * https://www.tensorflow.org/api_guides/python/nn#Convolution) + * @param dataFormat: An optional string from: "NHWC", "NCHW". Defaults to + * "NHWC". Specify the data format of the input and output data. With the + * default format "NHWC", the data is stored in the order of: [batch, + * height, width, channels]. + * @param dilations The dilation rates: `[dilationHeight, dilationWidth]` + * in which we sample input values across the height and width dimensions + * in atrous convolution. Defaults to `[1, 1]`. If `dilations` is a single + * number, then `dilationHeight == dilationWidth`. If it is greater than + * 1, then all values of `strides` must be 1. + * @param dimRoundingMode The rounding mode used when computing output + * dimensions if pad is a number. If none is provided, it will not round + * and error if the output is of fractional size. + */ +/** @doc {heading: 'Operations', subheading: 'Convolution'} */ +function conv2d_(x, filter, strides, pad, dataFormat = 'NHWC', dilations = [1, 1], dimRoundingMode) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'conv2d'); + const $filter = Object(tensor_util_env["a" /* convertToTensor */])(filter, 'filter', 'conv2d'); + let x4D = $x; + let reshapedTo4D = false; + if ($x.rank === 3) { + reshapedTo4D = true; + x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]); + } + util["assert"](x4D.rank === 4, () => `Error in conv2d: input must be rank 4, but got rank ${x4D.rank}.`); + util["assert"]($filter.rank === 4, () => `Error in conv2d: filter must be rank 4, but got rank ` + + `${$filter.rank}.`); + if (dimRoundingMode != null) { + util["assert"](util["isInt"](pad), () => `Error in conv2d: pad must be an integer when using, ` + + `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`); + } + const inDepth = dataFormat === 'NHWC' ? x4D.shape[3] : x4D.shape[1]; + util["assert"](inDepth === $filter.shape[2], () => `Error in conv2d: depth of input (${inDepth}) must match ` + + `input depth for filter ${$filter.shape[2]}.`); + util["assert"](eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in conv2D: Either strides or dilations must be 1. ' + + `Got strides ${strides} and dilations '${dilations}'`); + const forward = (backend, save) => { + const $dataFormat = convertConv2DDataFormat(dataFormat); + const convInfo = computeConv2DInfo(x4D.shape, $filter.shape, strides, dilations, pad, dimRoundingMode, false, $dataFormat); + const res = backend.conv2d(x4D, $filter, convInfo); + save([x4D, $filter]); + return res; + }; + const inputs = { x: x4D, filter: $filter }; + const attrs = { strides, pad, dataFormat, dilations, dimRoundingMode }; + const res = engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["m" /* Conv2D */], attrs); + if (reshapedTo4D) { + return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]); + } + return res; +} +const conv2d = Object(operation["a" /* op */])({ conv2d_ }); +//# sourceMappingURL=conv2d.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Conv2DBackpropInput_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + +const conv2DBackpropInputGradConfig = { + kernelName: kernel_names["o" /* Conv2DBackpropInput */], + inputsToSave: ['dy', 'filter'], + gradFunc: (ddx, saved, attrs) => { + const [dy, filter] = saved; + const { strides, pad, dataFormat, dimRoundingMode } = attrs; + return { + dy: () => conv2d(ddx, filter, strides, pad, dataFormat, 1 /* dilations */, dimRoundingMode), + filter: () => conv2DBackpropFilter(ddx, dy, filter.shape, strides, pad, dataFormat, dimRoundingMode) + }; + } +}; +//# sourceMappingURL=Conv2DBackpropInput_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/conv3d_backprop_filter.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + +/** + * Computes the derivative of the filter of a 3D convolution. + * + * @param x The input tensor, of rank 5 or rank 4 of shape + * [batch, depth, height, width, inChannels]. If rank 4, batch of 1 is + * assumed. + * @param dy The dy image, of rank 5 or rank 4, of shape + * [batch, depth, height, width, outDepth]. If rank 4, batch of 1 is + * assumed. + * @param filterShape The shape of the filter, length 5, + * [filterDepth, filterHeight, filterWidth, inDepth, outDepth]. + * @param strides The strides of the convolution: [strideDepth, strideHeight, + * strideWidth]. + * @param pad A string from: 'same', 'valid'. The type of padding algorithm + * used in the forward prop of the op. + */ +function conv3DBackpropFilter_(x, dy, filterShape, strides, pad) { + let x5D = x; + if (x.rank === 4) { + x5D = reshape(x, [1, x.shape[0], x.shape[1], x.shape[2], x.shape[3]]); + } + let dy5D = dy; + if (dy5D.rank === 4) { + dy5D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2], dy.shape[3]]); + } + util["assert"](x5D.rank === 5, () => `Error in conv3dDerFilter: input must be rank 5, but got shape ` + + `${x5D.shape}.`); + util["assert"](dy5D.rank === 5, () => `Error in conv3dDerFilter: dy must be rank 5, but got shape ` + + `${dy5D.shape}.`); + util["assert"](filterShape.length === 5, () => `Error in conv3dDerFilter: filterShape must be length 5, but got ` + + `${filterShape}.`); + util["assert"](x5D.shape[4] === filterShape[3], () => `Error in conv3dDerFilter: depth of input ${x5D.shape[4]}) must ` + + `match input depth in filter (${filterShape[3]}.`); + util["assert"](dy5D.shape[4] === filterShape[4], () => `Error in conv3dDerFilter: depth of dy (${dy5D.shape[4]}) must ` + + `match output depth for filter (${filterShape[4]}).`); + const forward = backend => { + const dilations = 1; + const convInfo = computeConv3DInfo(x5D.shape, filterShape, strides, dilations, pad); + return backend.conv3dDerFilter(x5D, dy5D, convInfo); + }; + const inputs = { x: x5D, y: dy5D }; + const attrs = { strides, pad }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null, kernel_names["q" /* Conv3DBackpropFilterV2 */], attrs); +} +const conv3DBackpropFilter = Object(operation["a" /* op */])({ conv3DBackpropFilter_ }); +//# sourceMappingURL=conv3d_backprop_filter.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/conv3d_backprop_input.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + +/** + * Computes the derivative of the input of a 3D convolution. + * + * @param xShape The shape of the input: [batch, depth, height, width, + * in_channels]. If length of 4, batch of 1 is assumed. + * @param dy The derivative of the output, of rank 5 or rank 4 of shape + * `[batch, outDepth, outHeight, outWidth, in_channels]`. + * If rank 4, batch of 1 is assumed. + * @param filter The filter, rank 5, of shape + * `[filterDepth, filterHeight, filterWidth, inDepth, outDepth]`. + * @param strides The strides of the convolution: `[strideDepth, strideHeight, + * strideWidth]`. + * @param pad The type of padding algorithm used: + * - `same` and stride 1: output will be of same size as input, + * regardless of filter size. + * - `valid`: output will be smaller than input if filter is larger + * than 1x1. + */ +function conv3DBackpropInput_(xShape, dy, filter, strides, pad) { + util["assert"](xShape.length === dy.rank, () => `Length of inShape ` + + `(${xShape.length}) and rank of dy (${dy.rank}) must match`); + let xShape5D = xShape; + let dy5D = dy; + let reshapedTo5D = false; + if (dy.rank === 4) { + reshapedTo5D = true; + dy5D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2], dy.shape[3]]); + xShape5D = [1, xShape[0], xShape[1], xShape[2], xShape[3]]; + } + const inDepth = xShape5D[4]; + const outDepth = dy5D.shape[4]; + util["assert"](xShape5D.length === 5, () => `Error in conv3dDerInput: inShape must be length 5, but got length ` + + `${xShape5D.length}.`); + util["assert"](dy5D.rank === 5, () => `Error in conv3dDerInput: dy must be rank 5, but got ` + + `rank ${dy5D.rank}`); + util["assert"](filter.rank === 5, () => `Error in conv3dDerInput: filter must be rank 5, but got ` + + `rank ${filter.rank}`); + util["assert"](inDepth === filter.shape[3], () => `Error in conv3dDerInput: depth of input (${inDepth}) must ` + + `match input depth for filter ${filter.shape[3]}.`); + util["assert"](outDepth === filter.shape[4], () => `Error in conv3dDerInput: depth of output (${outDepth}) must ` + + `match output depth for filter ${filter.shape[4]}.`); + const forward = backend => { + const dilations = 1; + const convInfo = computeConv3DInfo(xShape5D, filter.shape, strides, dilations, pad); + return backend.conv3dDerInput(dy5D, filter, convInfo); + }; + const inputs = { dy: dy5D }; + const attrs = { pad }; + const res = engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null, kernel_names["r" /* Conv3DBackpropInputV2 */], attrs); + if (reshapedTo5D) { + return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]); + } + return res; +} +const conv3DBackpropInput = Object(operation["a" /* op */])({ conv3DBackpropInput_ }); +//# sourceMappingURL=conv3d_backprop_input.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Conv3D_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +const conv3DGradConfig = { + kernelName: kernel_names["p" /* Conv3D */], + inputsToSave: ['x', 'filter'], + gradFunc: (dy, saved, attrs) => { + const { dilations, strides, pad } = attrs; + util["assert"](tupleValuesAreOne(dilations), () => 'Error in gradient of conv3D: dilation rates greater than 1 are ' + + `not yet supported in gradients. Got dilations '${dilations}'`); + const [x5D, $filter] = saved; + return { + x: () => conv3DBackpropInput(x5D.shape, dy, $filter, strides, pad), + filter: () => conv3DBackpropFilter(x5D, dy, $filter.shape, strides, pad) + }; + } +}; +//# sourceMappingURL=Conv3D_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/transpose.js +/** + * @license + * Copyright 2018 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + +/** + * Transposes the `tf.Tensor`. Permutes the dimensions according to `perm`. + * + * The returned `tf.Tensor`'s dimension `i` will correspond to the input + * dimension `perm[i]`. If `perm` is not given, it is set to `[n-1...0]`, + * where `n` is the rank of the input `tf.Tensor`. Hence by default, this + * operation performs a regular matrix transpose on 2-D input `tf.Tensor`s. + * + * ```js + * const a = tf.tensor2d([1, 2, 3, 4, 5, 6], [2, 3]); + * + * a.transpose().print(); // or tf.transpose(a) + * ``` + * + * @param x The tensor to transpose. + * @param perm The permutation of the dimensions of a. + */ +/** @doc {heading: 'Operations', subheading: 'Matrices'} */ +function transpose_(x, perm) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'transpose'); + if (perm == null) { + perm = $x.shape.map((s, i) => i).reverse(); + } + util["assert"]($x.rank === perm.length, () => `Error in transpose: rank of input ${$x.rank} ` + + `must match length of perm ${perm}.`); + perm.forEach(axis => { + util["assert"](axis >= 0 && axis < $x.rank, () => `All entries in 'perm' must be between 0 and ${$x.rank - 1}` + + ` but got ${perm}`); + }); + if ($x.rank <= 1) { + return $x.clone(); + } + const attrs = { perm }; + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.transpose($x, perm), { x: $x }, null /* gradient */, 'Transpose', attrs); +} +const transpose = Object(operation["a" /* op */])({ transpose_ }); +//# sourceMappingURL=transpose.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/cumsum.js +/** + * @license + * Copyright 2018 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + +/** + * Computes the cumulative sum of a `tf.Tensor` along `axis`. + * + * ```js + * const x = tf.tensor([1, 2, 3, 4]); + * x.cumsum().print(); + * ``` + * ```js + * const x = tf.tensor([[1, 2], [3, 4]]); + * x.cumsum().print(); + * ``` + * + * @param x The input tensor to be summed. + * @param axis The axis along which to sum. Optional. Defaults to 0. + * @param exclusive Whether to perform exclusive cumulative sum. Optional. + * Defaults to false. If set to true then the sum of each tensor entry + * does not include its own value, but only the values previous to it + * along the specified axis. + * @param reverse Whether to sum in the opposite direction. Optional. + * Defaults to false. + */ +/** @doc {heading: 'Operations', subheading: 'Scan'} */ +function cumsum_(x, axis = 0, exclusive = false, reverse = false) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'cumsum'); + const forward = (backend, save) => { + const permutation = getAxesPermutation([axis], $x.rank); + let permutedX = $x; + if (permutation != null) { + permutedX = transpose($x, permutation); + } + const permutedAxis = getInnerMostAxes(1, $x.rank)[0]; + let value = backend.cumsum(permutedX, permutedAxis, exclusive, reverse); + save([$x]); + if (permutation != null) { + value = transpose(value, permutation); + } + return value; + }; + const inputs = { x: $x }; + const attrs = { axis, exclusive, reverse }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["s" /* Cumsum */], attrs); +} +const cumsum = Object(operation["a" /* op */])({ cumsum_ }); +//# sourceMappingURL=cumsum.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Cumsum_grad.js +/** + * @license + * Copyright 2020 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + +const cumsumGradConfig = { + kernelName: kernel_names["s" /* Cumsum */], + inputsToSave: ['x'], + gradFunc: (dy, saved, attrs) => { + const [x] = saved; + const { axis, exclusive, reverse } = attrs; + return { + x: () => { + const permutation = getAxesPermutation([axis], x.rank); + let out = cumsum(dy, axis, exclusive, !reverse); + if (permutation != null) { + out = transpose(out, permutation); + } + return out; + } + }; + } +}; +//# sourceMappingURL=Cumsum_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/depthwise_conv2d_native_backprop_filter.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + +function depthwiseConv2dNativeBackpropFilter_(x, dy, filterShape, convInfo) { + let x4D = x; + if (x.rank === 3) { + x4D = reshape(x, [1, x.shape[0], x.shape[1], x.shape[2]]); + } + let dy4D = dy; + if (dy4D.rank === 3) { + dy4D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2]]); + } + const forward = backend => backend.depthwiseConv2DDerFilter(x4D, dy4D, convInfo); + const inputs = { x: x4D, dy: dy4D }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null, kernel_names["v" /* DepthwiseConv2dNativeBackpropFilter */]); +} +const depthwiseConv2dNativeBackpropFilter = Object(operation["a" /* op */])({ depthwiseConv2dNativeBackpropFilter_ }); +//# sourceMappingURL=depthwise_conv2d_native_backprop_filter.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/depthwise_conv2d_native_backprop_input.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + +function depthwiseConv2dNativeBackpropInput_(xShape, dy, filter, convInfo) { + let dy4D = dy; + let reshapedTo4D = false; + if (dy.rank === 3) { + reshapedTo4D = true; + dy4D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2]]); + } + const forward = backend => backend.depthwiseConv2DDerInput(dy4D, filter, convInfo); + const inputs = { dy: dy4D }; + const res = engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null, kernel_names["w" /* DepthwiseConv2dNativeBackpropInput */]); + if (reshapedTo4D) { + return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]); + } + return res; +} +const depthwiseConv2dNativeBackpropInput = Object(operation["a" /* op */])({ depthwiseConv2dNativeBackpropInput_ }); +//# sourceMappingURL=depthwise_conv2d_native_backprop_input.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/DepthwiseConv2dNative_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +const depthwiseConv2dNativeGradConfig = { + kernelName: kernel_names["u" /* DepthwiseConv2dNative */], + inputsToSave: ['x', 'filter'], + gradFunc: (dy, saved, attrs) => { + const { dilations, strides, pad, dimRoundingMode } = attrs; + const $dilations = dilations == null ? [1, 1] : dilations; + util["assert"](tupleValuesAreOne($dilations), () => 'Error in gradient of depthwiseConv2dNative: dilation rates ' + + `greater than 1 are not yet supported. Got dilations ` + + `'${$dilations}'`); + const [x, filter] = saved; + util["assert"](x.rank === 4, () => `Error in gradient of depthwiseConv2dNative: input must be ` + + `rank 4, but got rank ${x.rank}.`); + util["assert"](filter.rank === 4, () => `Error in gradient of depthwiseConv2dNative: filter must be ` + + `rank 4, but got rank ${filter.rank}.`); + util["assert"](x.shape[3] === filter.shape[2], () => `Error in gradient of depthwiseConv2d: number of input ` + + `channels (${x.shape[3]}) must match the inChannels dimension ` + + `in filter ${filter.shape[2]}.`); + util["assert"](eitherStridesOrDilationsAreOne(strides, $dilations), () => 'Error in gradient of depthwiseConv2d: Either strides or ' + + `dilations must be 1. Got strides ${strides} and dilations ` + + `'${$dilations}'.`); + if (dimRoundingMode != null) { + util["assert"](util["isInt"](pad), () => `Error in depthwiseConv2d: pad must be an integer when using, ` + + `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`); + } + const convInfo = computeConv2DInfo(x.shape, filter.shape, strides, $dilations, pad, dimRoundingMode, true /* depthwise */); + return { + x: () => depthwiseConv2dNativeBackpropInput(x.shape, dy, filter, convInfo), + filter: () => depthwiseConv2dNativeBackpropFilter(x, dy, filter.shape, convInfo), + }; + } +}; +//# sourceMappingURL=DepthwiseConv2dNative_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Div_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + + +const divGradConfig = { + kernelName: kernel_names["y" /* Div */], + inputsToSave: ['a', 'b'], + gradFunc: (dy, saved) => { + const [a, b] = saved; + const outShape = assertAndGetBroadcastShape(a.shape, b.shape); + const derA = () => { + const res = div(dy, b.toFloat()); + const reduceAxes = getReductionAxes(a.shape, outShape); + if (reduceAxes.length > 0) { + return sum(res, reduceAxes).reshape(a.shape); + } + return res; + }; + const derB = () => { + let res = mul(dy, a.toFloat()); + const reduceAxes = getReductionAxes(b.shape, outShape); + if (reduceAxes.length > 0) { + res = reshape(sum(res, reduceAxes), b.shape); + } + const tmp = square(b); + return neg(div(res, tmp.toFloat())); + }; + return { a: derA, b: derB }; + } +}; +//# sourceMappingURL=Div_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Elu_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const eluGradConfig = { + kernelName: kernel_names["z" /* Elu */], + outputsToSave: [true], + gradFunc: (dy, saved) => { + const [y] = saved; + const backPropKernelFunc = (backend) => { + return backend.eluDer(dy, y); + }; + const inputs = { dy, y }; + return { + x: () => engine["a" /* ENGINE */].runKernelFunc(backPropKernelFunc, inputs, null /* grad */, kernel_names["A" /* EluGrad */]) + }; + } +}; +//# sourceMappingURL=Elu_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/FloorDiv_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const floorDivGradConfig = { + kernelName: kernel_names["D" /* FloorDiv */], + inputsToSave: ['a', 'b'], + gradFunc: (dy, saved) => { + const [a, b] = saved; + const outShape = assertAndGetBroadcastShape(a.shape, b.shape); + const derA = () => { + const res = dy.div(b.toFloat()); + const reduceAxes = getReductionAxes(a.shape, outShape); + if (reduceAxes.length > 0) { + return res.sum(reduceAxes).reshape(a.shape); + } + return res; + }; + const derB = () => { + let res = dy.mul(a.toFloat()); + const reduceAxes = getReductionAxes(b.shape, outShape); + if (reduceAxes.length > 0) { + res = res.sum(reduceAxes).reshape(b.shape); + } + const tmp = b.square(); + return res.div(tmp.toFloat()).neg(); + }; + return { a: derA, b: derB }; + } +}; +//# sourceMappingURL=FloorDiv_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/sub.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +/** + * Subtracts two `tf.Tensor`s element-wise, A - B. Supports broadcasting. + * + * ```js + * const a = tf.tensor1d([10, 20, 30, 40]); + * const b = tf.tensor1d([1, 2, 3, 4]); + * + * a.sub(b).print(); // or tf.sub(a, b) + * ``` + * + * ```js + * // Broadcast subtract a with b. + * const a = tf.tensor1d([10, 20, 30, 40]); + * const b = tf.scalar(5); + * + * a.sub(b).print(); // or tf.sub(a, b) + * ``` + * @param a The first `tf.Tensor` to subtract from. + * @param b The second `tf.Tensor` to be subtracted. Must have the same dtype as + * `a`. + */ +/** @doc {heading: 'Operations', subheading: 'Arithmetic'} */ +function sub_(a, b) { + let $a = Object(tensor_util_env["a" /* convertToTensor */])(a, 'a', 'sub'); + let $b = Object(tensor_util_env["a" /* convertToTensor */])(b, 'b', 'sub'); + [$a, $b] = Object(tensor_util["makeTypesMatch"])($a, $b); + const forward = (backend, save) => { + const res = backend.subtract($a, $b); + save([$a, $b]); + return res; + }; + const inputs = { a: $a, b: $b }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["qb" /* Sub */]); +} +const sub = Object(operation["a" /* op */])({ sub_ }); +//# sourceMappingURL=sub.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/tile.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +/** + * Construct a tensor by repeating it the number of times given by reps. + * + * This operation creates a new tensor by replicating `input` `reps` + * times. The output tensor's i'th dimension has `input.shape[i] * + * reps[i]` elements, and the values of `input` are replicated + * `reps[i]` times along the i'th dimension. For example, tiling + * `[a, b, c, d]` by `[2]` produces `[a, b, c, d, a, b, c, d]`. + * + * ```js + * const a = tf.tensor1d([1, 2]); + * + * a.tile([2]).print(); // or a.tile([2]) + * ``` + * + * ```js + * const a = tf.tensor2d([1, 2, 3, 4], [2, 2]); + * + * a.tile([1, 2]).print(); // or a.tile([1, 2]) + * ``` + * @param x The tensor to tile. + * @param reps Determines the number of replications per dimension. + */ +/** @doc {heading: 'Tensors', subheading: 'Slicing and Joining'} */ +function tile_(x, reps) { + const parseAs = null; + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'tile', parseAs); + util["assert"]($x.rank === reps.length, () => `Error in transpose: rank of input ${$x.rank} ` + + `must match length of reps ${reps}.`); + const forward = (backend, save) => { + const res = backend.tile($x, reps); + save([$x]); + return res; + }; + const inputsToSave = [$x]; + const inputs = { x: $x }; + const attrs = { reps }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["rb" /* Tile */], attrs, inputsToSave); +} +const tile = Object(operation["a" /* op */])({ tile_ }); +//# sourceMappingURL=tile.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/FusedBatchNorm_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + + + + +const fusedBatchNormGradConfig = { + kernelName: kernel_names["F" /* FusedBatchNorm */], + inputsToSave: ['x', 'mean', 'variance', 'scale'], + gradFunc: (dy, saved, attrs) => { + const { varianceEpsilon } = attrs; + const [x, mean, variance, scale] = saved; + const scaleValue = scale == null ? Object(tensor_ops["e" /* scalar */])(1) : scale; + const reductionAxes = getReductionAxes(mean.shape, x.shape); + const tileShape = []; + if (mean.rank === 1) { + for (let i = 0; i < x.shape.length - 1; ++i) { + tileShape.push(x.shape[i]); + } + tileShape.push(1); + } + const xMinusMean = sub(x, mean); + const dyTimesScaleValue = mul(dy, scaleValue); + const oneOverSqrtVariance = rsqrt(add(variance, Object(tensor_ops["e" /* scalar */])(varianceEpsilon))); + const minusHalfRCube = mul(mul(mul(oneOverSqrtVariance, oneOverSqrtVariance), oneOverSqrtVariance), Object(tensor_ops["e" /* scalar */])(-0.5)); + const derX = () => { + if (mean.rank === 1) { + return reshape(mul(mul(dy, tile(oneOverSqrtVariance.as4D(1, 1, 1, mean.shape[0]), tileShape)), scaleValue), x.shape); + } + else { + return reshape(mul(mul(dy, oneOverSqrtVariance), scaleValue), x.shape); + } + }; + const derMean = () => { + let meanDer = mul(mul(oneOverSqrtVariance, Object(tensor_ops["e" /* scalar */])(-1)), dyTimesScaleValue); + if (mean.rank === 1) { + meanDer = sum(meanDer, reductionAxes); + } + return reshape(meanDer, mean.shape); + }; + const derVariance = () => { + let varianceDer = mul(mul(minusHalfRCube, xMinusMean), dyTimesScaleValue); + if (mean.rank === 1) { + varianceDer = sum(varianceDer, reductionAxes); + } + return reshape(varianceDer, mean.shape); + }; + const derScale = () => { + const xMinusMean2TimesRsqrt = mul(xMinusMean, oneOverSqrtVariance); + let scaleDer = mul(dy, xMinusMean2TimesRsqrt); + if (mean.rank === 1) { + scaleDer = sum(scaleDer, reductionAxes); + } + return reshape(scaleDer, mean.shape); + }; + const derOffset = () => { + let offsetDer = dy; + if (mean.rank === 1) { + offsetDer = sum(offsetDer, reductionAxes); + } + return reshape(offsetDer, mean.shape); + }; + return { + x: derX, + mean: derMean, + variance: derVariance, + scale: derScale, + offset: derOffset + }; + } +}; +//# sourceMappingURL=FusedBatchNorm_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/GreaterEqual_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const greaterEqualGradConfig = { + kernelName: kernel_names["I" /* GreaterEqual */], + inputsToSave: ['a', 'b'], + gradFunc: (dy, saved) => { + const [a, b] = saved; + return { a: () => Object(tensor_ops["o" /* zerosLike */])(a), b: () => Object(tensor_ops["o" /* zerosLike */])(b) }; + } +}; +//# sourceMappingURL=GreaterEqual_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Identity_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + +const identityGradConfig = { + kernelName: kernel_names["J" /* Identity */], + gradFunc: (dy) => { + return { x: () => dy.toFloat() }; + } +}; +//# sourceMappingURL=Identity_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/local_response_normalization_backprop.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + +function localResponseNormalizationBackprop_(x, y, dy, depthRadius = 5, bias = 1, alpha = 1, beta = 0.5) { + const forward = backend => backend.LRNGrad(dy, x, y, depthRadius, bias, alpha, beta); + const inputs = { x, y, dy }; + const attrs = { depthRadius, bias, alpha, beta }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["M" /* LRNBackprop */], attrs); +} +const localResponseNormalizationBackprop = Object(operation["a" /* op */])({ localResponseNormalizationBackprop_ }); +//# sourceMappingURL=local_response_normalization_backprop.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/LRN_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const lrnGradConfig = { + kernelName: kernel_names["L" /* LRN */], + inputsToSave: ['x'], + outputsToSave: [true], + gradFunc: (dy, saved, attrs) => { + const [x, y] = saved; + const { depthRadius, bias, alpha, beta } = attrs; + return { + x: () => localResponseNormalizationBackprop(x, y, dy, depthRadius, bias, alpha, beta) + }; + } +}; +//# sourceMappingURL=LRN_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Max_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +const maxGradConfig = { + kernelName: kernel_names["P" /* Max */], + inputsToSave: ['x'], + outputsToSave: [true], + gradFunc: (dy, saved, attrs) => { + const maxAttrs = attrs; + const { reductionIndices } = maxAttrs; + const [x, y] = saved; + const origAxes = util["parseAxisParam"](reductionIndices, x.shape); + const permutedAxes = getAxesPermutation(origAxes, x.rank); + const maxGrad = gradForMinAndMax(dy, y, x, origAxes, permutedAxes); + return { + x: () => { + let out = maxGrad['x'](); + if (permutedAxes != null) { + out = transpose(out); + } + return out; + } + }; + } +}; +//# sourceMappingURL=Max_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/greater_equal.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + +/** + * Returns the truth value of (a >= b) element-wise. Supports broadcasting. + * + * ```js + * const a = tf.tensor1d([1, 2, 3]); + * const b = tf.tensor1d([2, 2, 2]); + * + * a.greaterEqual(b).print(); + * ``` + * + * @param a The first input tensor. + * @param b The second input tensor. Must have the same dtype as `a`. + */ +/** @doc {heading: 'Operations', subheading: 'Logical'} */ +function greaterEqual_(a, b) { + let $a = Object(tensor_util_env["a" /* convertToTensor */])(a, 'a', 'greaterEqual'); + let $b = Object(tensor_util_env["a" /* convertToTensor */])(b, 'b', 'greaterEqual'); + [$a, $b] = Object(tensor_util["makeTypesMatch"])($a, $b); + assertAndGetBroadcastShape($a.shape, $b.shape); + const forward = (backend, save) => { + const res = backend.greaterEqual($a, $b); + save([$a, $b]); + return res; + }; + const inputs = { a: $a, b: $b }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["I" /* GreaterEqual */]); +} +const greaterEqual = Object(operation["a" /* op */])({ greaterEqual_ }); +//# sourceMappingURL=greater_equal.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/less.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + +/** + * Returns the truth value of (a < b) element-wise. Supports broadcasting. + * + * ```js + * const a = tf.tensor1d([1, 2, 3]); + * const b = tf.tensor1d([2, 2, 2]); + * + * a.less(b).print(); + * ``` + * @param a The first input tensor. + * @param b The second input tensor. Must have the same dtype as `a`. + */ +/** @doc {heading: 'Operations', subheading: 'Logical'} */ +function less_(a, b) { + let $a = Object(tensor_util_env["a" /* convertToTensor */])(a, 'a', 'less'); + let $b = Object(tensor_util_env["a" /* convertToTensor */])(b, 'b', 'less'); + [$a, $b] = Object(tensor_util["makeTypesMatch"])($a, $b); + assertAndGetBroadcastShape($a.shape, $b.shape); + const forward = backend => backend.less($a, $b); + const inputs = { a: $a, b: $b }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["N" /* Less */]); +} +const less = Object(operation["a" /* op */])({ less_ }); +//# sourceMappingURL=less.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Maximum_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +const maximumGradConfig = { + kernelName: kernel_names["V" /* Maximum */], + inputsToSave: ['a', 'b'], + gradFunc: (dy, saved) => { + const [a, b] = saved; + const derA = () => mul(dy, cast(greaterEqual(a, b), 'float32')); + const derB = () => mul(dy, cast(less(a, b), 'float32')); + return { a: derA, b: derB }; + } +}; +//# sourceMappingURL=Maximum_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/max_pool_3d_backprop.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + +/** + * Computes the backprop of a 3d max pool. + * + * @param dy The dy error, of rank 5 of shape + * [batchSize, depth, height, width, channels]. + * assumed. + * @param input The original input image, of rank 5 or rank 4 of shape + * [batchSize, depth, height, width, channels]. + * @param output The original output image, of rank 5 of shape + * [batchSize, outDepth, outHeight, outWidth, channels]. + * @param filterSize The filter size: + * `[filterDepth, filterHeight, filterWidth]`. + * `filterSize` is a single number, + * then `filterDepth == filterHeight == filterWidth`. + * @param strides The strides of the pooling: + * `[strideDepth, strideHeight, strideWidth]`. If + * `strides` is a single number, then `strideHeight == strideWidth`. + * @param dilations Deprecated, this field will be gone in v3.0.0. + * The dilation rates: `[dilationDepth, dilationHeight, dilationWidth]` + * in which we sample input values across the depth, height and width + * dimensions in dilated pooling. + * Defaults to `[1, 1, 1]`. If `dilations` is a single number, + * then `dilationDepth == dilationHeight == dilationWidth`. + * If it is greater than 1, then all values of `strides` must be 1. + * @param pad A string from: 'same', 'valid'. The type of padding algorithm + * used in the forward prop of the op. + * @param dimRoundingMode A string from: 'ceil', 'round', 'floor'. The + * rounding mode used when computing output dimensions if pad is a + * number. If none is provided, it will not round and error if the output + * is of fractional size. + */ +function maxPool3dBackprop_(dy, input, output, filterSize, strides, dilations = [1, 1, 1], pad, dimRoundingMode) { + const $dy = Object(tensor_util_env["a" /* convertToTensor */])(dy, 'dy', 'maxPool3dBackprop'); + const $input = Object(tensor_util_env["a" /* convertToTensor */])(input, 'input', 'maxPool3dBackprop'); + const $output = Object(tensor_util_env["a" /* convertToTensor */])(output, 'output', 'maxPool3dBackprop'); + let dy5D = $dy; + let input5D = $input; + let output5D = $output; + let reshapedTo5D = false; + if ($input.rank === 4) { + reshapedTo5D = true; + dy5D = reshape($dy, [1, $dy.shape[0], $dy.shape[1], $dy.shape[2], $dy.shape[3]]); + input5D = reshape($input, [ + 1, $input.shape[0], $input.shape[1], $input.shape[2], $input.shape[3] + ]); + output5D = reshape($output, [ + 1, $output.shape[0], $output.shape[1], $output.shape[2], $output.shape[3] + ]); + } + util["assert"](dy5D.rank === 5, () => `Error in maxPool3dBackprop: dy must be rank 5 but got rank ` + + `${dy5D.rank}.`); + util["assert"](input5D.rank === 5, () => `Error in maxPool3dBackprop: input must be rank 5 but got rank ` + + `${input5D.rank}.`); + util["assert"](output5D.rank === 5, () => `Error in maxPool3dBackprop: output must be rank 5 but got rank ` + + `${output5D.rank}.`); + util["assert"](eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in maxPool3dBackprop: Either strides or dilations ' + + `must be 1. Got strides ${strides} and dilations '${dilations}'`); + if (dimRoundingMode != null) { + util["assert"](util["isInt"](pad), () => `Error in maxPool3dBackprop: pad must be an integer when ` + + `using, dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`); + } + const forward = backend => { + const convInfo = computePool3DInfo(input5D.shape, filterSize, strides, dilations, pad, dimRoundingMode); + return backend.maxPool3dBackprop(dy5D, input5D, output5D, convInfo); + }; + const inputs = { dy: dy5D, input: input5D, output: output5D }; + const attrs = { filterSize, strides, dilations, pad, dimRoundingMode }; + const res = engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["S" /* MaxPool3DBackprop */], attrs); + if (reshapedTo5D) { + return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]); + } + return res; +} +const maxPool3dBackprop = Object(operation["a" /* op */])({ maxPool3dBackprop_ }); +//# sourceMappingURL=max_pool_3d_backprop.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/MaxPool3D_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const maxPool3DGradConfig = { + kernelName: kernel_names["R" /* MaxPool3D */], + inputsToSave: ['x'], + outputsToSave: [true], + gradFunc: (dy, saved, attrs) => { + const [x, y] = saved; + const { filterSize, strides, dilations, pad, dimRoundingMode } = attrs; + const $dilations = dilations == null ? [1, 1, 1] : dilations; + return { + x: () => maxPool3dBackprop(dy, x, y, filterSize, strides, $dilations, pad, dimRoundingMode) + }; + } +}; +//# sourceMappingURL=MaxPool3D_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/max_pool_backprop.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + +/** + * Computes the backprop of a 2D max pool. + * + * @param dy The dy error, of rank 4 or rank 3 of shape + * [batchSize, height, width, channels]. If rank 3, batch of 1 is + * assumed. + * @param input The original input image, of rank 4, of shape + * [batchSize, height, width, channels]. + * @param output The original output image, of rank 4, of shape + * [batchSize, outHeight, outWidth, channels]. + * @param filterSize The filter size: `[filterHeight, filterWidth]`. If + * `filterSize` is a single number, then `filterHeight == filterWidth`. + * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If + * `strides` is a single number, then `strideHeight == strideWidth`. + * @param pad A string from: 'same', 'valid'. The type of padding algorithm + * used in the forward prop of the op. + * @param dimRoundingMode A string from: 'ceil', 'round', 'floor'. The + * rounding mode used when computing output dimensions if pad is a + * number. If none is provided, it will not round and error if the output + * is of fractional size. + */ +function maxPoolBackprop_(dy, input, output, filterSize, strides, pad, dimRoundingMode) { + const $dy = Object(tensor_util_env["a" /* convertToTensor */])(dy, 'dy', 'maxPoolBackprop'); + const $input = Object(tensor_util_env["a" /* convertToTensor */])(input, 'input', 'maxPoolBackprop'); + const $output = Object(tensor_util_env["a" /* convertToTensor */])(output, 'output', 'maxPoolBackprop'); + util["assert"]($input.rank === $dy.rank, () => `Rank of input (${$input.rank}) does not match rank of dy ` + + `(${$dy.rank})`); + util["assert"]($dy.rank === 4, () => `Error in maxPoolBackprop: dy must be rank 4 but got rank ` + + `${$dy.rank}.`); + util["assert"]($input.rank === 4, () => `Error in maxPoolBackprop: input must be rank 4 but got rank ` + + `${$input.rank}.`); + if (dimRoundingMode != null) { + util["assert"](util["isInt"](pad), () => `Error in maxPoolBackprop: pad must be an integer when using, ` + + `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`); + } + const forward = backend => { + const convInfo = computePool2DInfo($input.shape, filterSize, strides, 1 /* dilations */, pad, dimRoundingMode); + return backend.maxPoolBackprop($dy, $input, $output, convInfo); + }; + const inputs = { dy: $dy, input: $input, output: $output }; + const attrs = { filterSize, strides, pad, dimRoundingMode }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null, kernel_names["T" /* MaxPoolBackprop */], attrs); +} +const maxPoolBackprop = Object(operation["a" /* op */])({ maxPoolBackprop_ }); +//# sourceMappingURL=max_pool_backprop.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/MaxPool_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const maxPoolGradConfig = { + kernelName: kernel_names["Q" /* MaxPool */], + inputsToSave: ['x'], + outputsToSave: [true], + gradFunc: (dy, saved, attrs) => { + const [x, y] = saved; + const { filterSize, strides, pad } = attrs; + return { + x: () => maxPoolBackprop(dy, x, y, filterSize, strides, pad) + }; + } +}; +//# sourceMappingURL=MaxPool_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/greater.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + +/** + * Returns the truth value of (a > b) element-wise. Supports broadcasting. + * + * ```js + * const a = tf.tensor1d([1, 2, 3]); + * const b = tf.tensor1d([2, 2, 2]); + * + * a.greater(b).print(); + * ``` + * + * @param a The first input tensor. + * @param b The second input tensor. Must have the same dtype as `a`. + */ +/** @doc {heading: 'Operations', subheading: 'Logical'} */ +function greater_(a, b) { + let $a = Object(tensor_util_env["a" /* convertToTensor */])(a, 'a', 'greater'); + let $b = Object(tensor_util_env["a" /* convertToTensor */])(b, 'b', 'greater'); + [$a, $b] = Object(tensor_util["makeTypesMatch"])($a, $b); + assertAndGetBroadcastShape($a.shape, $b.shape); + const forward = backend => backend.greater($a, $b); + const inputs = { a: $a, b: $b }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["H" /* Greater */]); +} +const greater = Object(operation["a" /* op */])({ greater_ }); +//# sourceMappingURL=greater.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/less_equal.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + +/** + * Returns the truth value of (a <= b) element-wise. Supports broadcasting. + * + * ```js + * const a = tf.tensor1d([1, 2, 3]); + * const b = tf.tensor1d([2, 2, 2]); + * + * a.lessEqual(b).print(); + * ``` + * + * @param a The first input tensor. + * @param b The second input tensor. Must have the same dtype as `a`. + */ +/** @doc {heading: 'Operations', subheading: 'Logical'} */ +function lessEqual_(a, b) { + let $a = Object(tensor_util_env["a" /* convertToTensor */])(a, 'a', 'lessEqual'); + let $b = Object(tensor_util_env["a" /* convertToTensor */])(b, 'b', 'lessEqual'); + [$a, $b] = Object(tensor_util["makeTypesMatch"])($a, $b); + assertAndGetBroadcastShape($a.shape, $b.shape); + const forward = (backend, save) => { + const res = backend.lessEqual($a, $b); + save([$a, $b]); + return res; + }; + const inputs = { a: $a, b: $b }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["O" /* LessEqual */]); +} +const lessEqual = Object(operation["a" /* op */])({ lessEqual_ }); +//# sourceMappingURL=less_equal.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Minimum_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +const minimumGradConfig = { + kernelName: kernel_names["W" /* Minimum */], + inputsToSave: ['a', 'b'], + gradFunc: (dy, saved) => { + const [a, b] = saved; + const derA = () => mul(dy, cast(lessEqual(a, b), 'float32')); + const derB = () => mul(dy, cast(greater(a, b), 'float32')); + return { a: derA, b: derB }; + } +}; +//# sourceMappingURL=Minimum_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Mod_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + +const modGradConfig = { + kernelName: kernel_names["X" /* Mod */], + inputsToSave: ['a', 'b'], + gradFunc: (dy, saved) => { + const [a, b] = saved; + const outShape = assertAndGetBroadcastShape(a.shape, b.shape); + const derA = () => { + const reduceAxes = getReductionAxes(a.shape, outShape); + if (reduceAxes.length > 0) { + return reshape(sum(dy, reduceAxes), a.shape); + } + return dy; + }; + const derB = () => { + const res = mul(dy, neg(floor(div(a, b)))); + const reduceAxes = getReductionAxes(b.shape, outShape); + if (reduceAxes.length > 0) { + return reshape(sum(res, reduceAxes), b.shape); + } + return res; + }; + return { a: derA, b: derB }; + } +}; +//# sourceMappingURL=Mod_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Multiply_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +const multiplyGradConfig = { + kernelName: kernel_names["Y" /* Multiply */], + inputsToSave: ['a', 'b'], + gradFunc: (dy, saved) => { + const [a, b] = saved; + const outShape = assertAndGetBroadcastShape(a.shape, b.shape); + const derA = () => { + const res = mul(dy, cast(b, 'float32')); + const reduceAxes = getReductionAxes(a.shape, outShape); + if (reduceAxes.length > 0) { + return reshape(sum(res, reduceAxes), a.shape); + } + return res; + }; + const derB = () => { + const res = mul(dy, cast(a, 'float32')); + const reduceAxes = getReductionAxes(b.shape, outShape); + if (reduceAxes.length > 0) { + return reshape(sum(res, reduceAxes), b.shape); + } + return res; + }; + return { a: derA, b: derB }; + } +}; +//# sourceMappingURL=Multiply_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/OneHot_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const oneHotGradConfig = { + kernelName: kernel_names["cb" /* OneHot */], + inputsToSave: ['indices'], + gradFunc: (dy, saved) => { + const indices = saved[0]; + return { indices: () => Object(tensor_ops["n" /* zeros */])(indices.shape, 'float32') }; + } +}; +//# sourceMappingURL=OneHot_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/PadV2_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + +const padV2GradConfig = { + kernelName: kernel_names["db" /* PadV2 */], + inputsToSave: ['x'], + gradFunc: (dy, saved, attrs) => { + // Pad introduces values around the original tensor, so the gradient + // slices the original shape out of the gradient. + const x = saved[0]; + const { paddings } = attrs; + const begin = paddings.map(p => p[0]); + return { x: () => dy.slice(begin, x.shape) }; + } +}; +//# sourceMappingURL=PadV2_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/backends/where_impl.js +/** + * @license + * Copyright 2018 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ +/** An implementation of the Where kernel shared between cpu and webgl */ + +function whereImpl(condShape, condVals) { + const indices = []; + for (let i = 0; i < condVals.length; i++) { + if (condVals[i]) { + indices.push(i); + } + } + const inBuffer = array_ops_buffer(condShape, 'int32'); + const out = array_ops_buffer([indices.length, condShape.length], 'int32'); + for (let i = 0; i < indices.length; i++) { + const loc = inBuffer.indexToLoc(indices[i]); + const offset = i * condShape.length; + out.values.set(loc, offset); + } + return out.toTensor(); +} +//# sourceMappingURL=where_impl.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/logical_ops.js +/** + * @license + * Copyright 2018 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + + +/** + * Returns the truth value of `NOT x` element-wise. + * + * ```js + * const a = tf.tensor1d([false, true], 'bool'); + * + * a.logicalNot().print(); + * ``` + * + * @param x The input tensor. Must be of dtype 'bool'. + */ +/** @doc {heading: 'Operations', subheading: 'Logical'} */ +function logicalNot_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'logicalNot', 'bool'); + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.logicalNot($x), { $x }); +} +/** + * Returns the truth value of `a AND b` element-wise. Supports broadcasting. + * + * ```js + * const a = tf.tensor1d([false, false, true, true], 'bool'); + * const b = tf.tensor1d([false, true, false, true], 'bool'); + * + * a.logicalAnd(b).print(); + * ``` + * + * @param a The first input tensor. Must be of dtype bool. + * @param b The second input tensor. Must be of dtype bool. + */ +/** @doc {heading: 'Operations', subheading: 'Logical'} */ +function logicalAnd_(a, b) { + const $a = Object(tensor_util_env["a" /* convertToTensor */])(a, 'a', 'logicalAnd', 'bool'); + const $b = Object(tensor_util_env["a" /* convertToTensor */])(b, 'b', 'logicalAnd', 'bool'); + assertAndGetBroadcastShape($a.shape, $b.shape); + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.logicalAnd($a, $b), { a: $a, b: $b }, null /* grad */, 'LogicalAnd'); +} +/** + * Returns the truth value of `a OR b` element-wise. Supports broadcasting. + * + * ```js + * const a = tf.tensor1d([false, false, true, true], 'bool'); + * const b = tf.tensor1d([false, true, false, true], 'bool'); + * + * a.logicalOr(b).print(); + * ``` + * @param a The first input tensor. Must be of dtype bool. + * @param b The second input tensor. Must be of dtype bool. + */ +/** @doc {heading: 'Operations', subheading: 'Logical'} */ +function logicalOr_(a, b) { + const $a = Object(tensor_util_env["a" /* convertToTensor */])(a, 'a', 'logicalOr', 'bool'); + const $b = Object(tensor_util_env["a" /* convertToTensor */])(b, 'b', 'logicalOr', 'bool'); + assertAndGetBroadcastShape($a.shape, $b.shape); + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.logicalOr($a, $b), { $a, $b }); +} +/** + * Returns the truth value of `a XOR b` element-wise. Supports broadcasting. + * + * ```js + * const a = tf.tensor1d([false, false, true, true], 'bool'); + * const b = tf.tensor1d([false, true, false, true], 'bool'); + * + * a.logicalXor(b).print(); + * ``` + * + * @param a The first input tensor. Must be of dtype bool. + * @param b The second input tensor. Must be of dtype bool. + */ +/** @doc {heading: 'Operations', subheading: 'Logical'} */ +function logicalXor_(a, b) { + const $a = Object(tensor_util_env["a" /* convertToTensor */])(a, 'a', 'logicalXor', 'bool'); + const $b = Object(tensor_util_env["a" /* convertToTensor */])(b, 'b', 'logicalXor', 'bool'); + assertAndGetBroadcastShape($a.shape, $b.shape); + // x ^ y = (x | y) & ~(x & y) + return logicalOr(a, b).logicalAnd(logicalAnd(a, b).logicalNot()); +} +/** + * Returns the elements, either `a` or `b` depending on the `condition`. + * + * If the condition is true, select from `a`, otherwise select from `b`. + * + * ```js + * const cond = tf.tensor1d([false, false, true], 'bool'); + * const a = tf.tensor1d([1 , 2, 3]); + * const b = tf.tensor1d([-1, -2, -3]); + * + * a.where(cond, b).print(); + * ``` + * + * @param condition The input condition. Must be of dtype bool. + * @param a If `condition` is rank 1, `a` may have a higher rank but + * its first dimension must match the size of `condition`. + * @param b A tensor with the same shape and type as `a`. + */ +/** @doc {heading: 'Operations', subheading: 'Logical'} */ +function where_(condition, a, b) { + const $a = Object(tensor_util_env["a" /* convertToTensor */])(a, 'a', 'where'); + const $b = Object(tensor_util_env["a" /* convertToTensor */])(b, 'b', 'where'); + const $condition = Object(tensor_util_env["a" /* convertToTensor */])(condition, 'condition', 'where', 'bool'); + Object(util["assertShapesMatch"])($a.shape, $b.shape, 'Error in where: '); + if ($condition.rank === 1) { + // If condition rank is 1, then the first dimension must match the size of + // condition. + Object(util["assert"])($condition.shape[0] === $a.shape[0], () => 'The first dimension of `a` must match the size of `condition`.'); + } + else { + // A must have the same shape as condition. + Object(util["assertShapesMatch"])($condition.shape, $b.shape, 'Error in where: '); + } + // TODO(julianoks): Return null for condition gradient + // when backprop supports it. + const grad = (dy, saved) => { + const [$condition] = saved; + return { + condition: () => Object(tensor_ops["o" /* zerosLike */])($condition).toFloat(), + t: () => dy.mul($condition.cast(dy.dtype)), + e: () => dy.mul($condition.logicalNot().cast(dy.dtype)) + }; + }; + const inputs = { condition: $condition, t: $a, e: $b }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.select($condition, $a, $b); + save([$condition]); + return res; + }, inputs, grad, kernel_names["kb" /* SelectV2 */]); +} +/** + * Returns the coordinates of true elements of condition. + * + * The coordinates are returned in a 2-D tensor where the first dimension (rows) + * represents the number of true elements, and the second dimension (columns) + * represents the coordinates of the true elements. Keep in mind, the shape of + * the output tensor can vary depending on how many true values there are in + * input. Indices are output in row-major order. The resulting tensor has the + * shape `[numTrueElems, condition.rank]`. + * + * This is analogous to calling the python `tf.where(cond)` without an x or y. + * + * ```js + * const cond = tf.tensor1d([false, false, true], 'bool'); + * const result = await tf.whereAsync(cond); + * result.print(); + * ``` + */ +/** @doc {heading: 'Operations', subheading: 'Logical'} */ +async function whereAsync_(condition) { + const $condition = Object(tensor_util_env["a" /* convertToTensor */])(condition, 'condition', 'whereAsync', 'bool'); + const vals = await $condition.data(); + const res = whereImpl($condition.shape, vals); + if (condition !== $condition) { + $condition.dispose(); + } + return res; +} +const logicalAnd = Object(operation["a" /* op */])({ logicalAnd_ }); +const logicalNot = Object(operation["a" /* op */])({ logicalNot_ }); +const logicalOr = Object(operation["a" /* op */])({ logicalOr_ }); +const logicalXor = Object(operation["a" /* op */])({ logicalXor_ }); +const where = Object(operation["a" /* op */])({ where_ }); +const whereAsync = whereAsync_; +//# sourceMappingURL=logical_ops.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/pow.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +/** + * Computes the power of one `tf.Tensor` to another. Supports broadcasting. + * + * Given a `tf.Tensor` x and a `tf.Tensor` y, this operation computes x^y for + * corresponding elements in x and y. The result's dtype will be the upcasted + * type of the `base` and `exp` dtypes. + * + * ```js + * const a = tf.tensor([[2, 3], [4, 5]]) + * const b = tf.tensor([[1, 2], [3, 0]]).toInt(); + * + * a.pow(b).print(); // or tf.pow(a, b) + * ``` + * + * ```js + * const a = tf.tensor([[1, 2], [3, 4]]) + * const b = tf.tensor(2).toInt(); + * + * a.pow(b).print(); // or tf.pow(a, b) + * ``` + * We also expose `powStrict` which has the same signature as this op and + * asserts that `base` and `exp` are the same shape (does not broadcast). + * + * @param base The base `tf.Tensor` to pow element-wise. + * @param exp The exponent `tf.Tensor` to pow element-wise. + */ +/** @doc {heading: 'Operations', subheading: 'Arithmetic'} */ +function pow_(base, exp) { + let $base = Object(tensor_util_env["a" /* convertToTensor */])(base, 'base', 'pow'); + let $exp = Object(tensor_util_env["a" /* convertToTensor */])(exp, 'exp', 'pow'); + [$base, $exp] = Object(tensor_util["makeTypesMatch"])($base, $exp); + const inputs = { a: $base, b: $exp }; + const forward = (backend, save) => { + const y = backend.pow($base, $exp); + save([$base, $exp, y]); + return y; + }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* gradient */, kernel_names["fb" /* Pow */]); +} +const pow = Object(operation["a" /* op */])({ pow_ }); +//# sourceMappingURL=pow.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Pow_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + + + + + +const powGradConfig = { + kernelName: kernel_names["fb" /* Pow */], + inputsToSave: ['a', 'b'], + outputsToSave: [true], + gradFunc: (dy, saved) => { + const [a, b, y] = saved; + const base = a; + const exp = b; + const outShape = assertAndGetBroadcastShape(base.shape, exp.shape); + const derBase = () => { + const expFloat = cast(exp, 'float32'); + let res = mul(dy, mul(expFloat, pow(base, sub(expFloat, Object(tensor_ops["e" /* scalar */])(1))))); + const reduceAxes = getReductionAxes(base.shape, outShape); + if (reduceAxes.length > 0) { + res = sum(res, reduceAxes); + } + return reshape(res, base.shape); + }; + const derExp = () => { + const condition = greater(base, 0); + const logBase = where(condition, log(base), Object(tensor_ops["o" /* zerosLike */])(base)); + let res = mul(dy, mul(y, logBase)); + const reduceAxes = getReductionAxes(exp.shape, outShape); + if (reduceAxes.length > 0) { + res = sum(res, reduceAxes); + } + return reshape(res, exp.shape); + }; + return { a: derBase, b: derExp }; + } +}; +//# sourceMappingURL=Pow_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Prelu_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + + +const preluGradConfig = { + kernelName: kernel_names["gb" /* Prelu */], + inputsToSave: ['x', 'alpha'], + gradFunc: (dy, saved) => { + const [x, alpha] = saved; + const mask = greater(x, 0); + return { + x: () => where(mask, dy, mul(dy, alpha)), + alpha: () => { + let res = where(mask, Object(tensor_ops["o" /* zerosLike */])(dy), mul(dy, x)); + const reduceAxes = getReductionAxes(alpha.shape, dy.shape); + if (reduceAxes.length > 0) { + res = sum(res, reduceAxes); + } + return reshape(res, alpha.shape); + } + }; + } +}; +//# sourceMappingURL=Prelu_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Relu6_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +const relu6GradConfig = { + kernelName: kernel_names["jb" /* Relu6 */], + inputsToSave: ['x'], + gradFunc: (dy, saved) => { + const [x] = saved; + const mask = mul(lessEqual(x, 6), unary_ops_step(x)); + return { x: () => mul(dy, cast(mask, 'float32')) }; + } +}; +//# sourceMappingURL=Relu6_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Relu_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + +const reluGradConfig = { + kernelName: kernel_names["ib" /* Relu */], + inputsToSave: ['x'], + gradFunc: (dy, saved) => { + const [x] = saved; + return { x: () => mul(dy, cast(unary_ops_step(x), 'float32')) }; + } +}; +//# sourceMappingURL=Relu_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/selu_util.js +/** + * @license + * Copyright 2018 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ +const SELU_SCALEALPHA = 1.7580993408473768599402175208123; +const SELU_SCALE = 1.0507009873554804934193349852946; +//# sourceMappingURL=selu_util.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Selu_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + + +const seluGradConfig = { + kernelName: kernel_names["lb" /* Selu */], + inputsToSave: ['x'], + gradFunc: (dy, saved) => { + const [x] = saved; + return { + x: () => { + const mask = greater(x, Object(tensor_ops["e" /* scalar */])(0)); + const scaleAlpha = Object(tensor_ops["e" /* scalar */])(SELU_SCALEALPHA); + const scale = Object(tensor_ops["e" /* scalar */])(SELU_SCALE); + const greaterThanZeroDer = mul(dy, scale); + const lessEqualZeroDer = mul(mul(dy, scaleAlpha), unary_ops_exp(cast(x, 'float32'))); + return where(mask, greaterThanZeroDer, lessEqualZeroDer); + } + }; + } +}; +//# sourceMappingURL=Selu_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/batch_to_space_nd.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +/** + * This operation reshapes the "batch" dimension 0 into `M + 1` dimensions of + * shape `blockShape + [batch]`, interleaves these blocks back into the grid + * defined by the spatial dimensions `[1, ..., M]`, to obtain a result with + * the same rank as the input. The spatial dimensions of this intermediate + * result are then optionally cropped according to `crops` to produce the + * output. This is the reverse of `tf.spaceToBatchND`. See below for a precise + * description. + * + * ```js + * const x = tf.tensor4d([1, 2, 3, 4], [4, 1, 1, 1]); + * const blockShape = [2, 2]; + * const crops = [[0, 0], [0, 0]]; + * + * x.batchToSpaceND(blockShape, crops).print(); + * ``` + * + * @param x A `tf.Tensor`. N-D with `x.shape` = `[batch] + spatialShape + + * remainingShape`, where spatialShape has `M` dimensions. + * @param blockShape A 1-D array. Must have shape `[M]`, all values must + * be >= 1. + * @param crops A 2-D array. Must have shape `[M, 2]`, all values must be >= 0. + * `crops[i] = [cropStart, cropEnd]` specifies the amount to crop from input + * dimension `i + 1`, which corresponds to spatial dimension `i`. It is required + * that `cropStart[i] + cropEnd[i] <= blockShape[i] * inputShape[i + 1]` + * + * This operation is equivalent to the following steps: + * + * 1. Reshape `x` to `reshaped` of shape: `[blockShape[0], ..., + * blockShape[M-1], batch / prod(blockShape), x.shape[1], ..., + * x.shape[N-1]]` + * + * 2. Permute dimensions of `reshaped`to produce `permuted` of shape `[batch / + * prod(blockShape),x.shape[1], blockShape[0], ..., x.shape[M], + * blockShape[M-1],x.shape[M+1], ..., x.shape[N-1]]` + * + * 3. Reshape `permuted` to produce `reshapedPermuted` of shape `[batch / + * prod(blockShape),x.shape[1] * blockShape[0], ..., x.shape[M] * + * blockShape[M-1],x.shape[M+1], ..., x.shape[N-1]]` + * + * 4. Crop the start and end of dimensions `[1, ..., M]` of `reshapedPermuted` + * according to `crops` to produce the output of shape: `[batch / + * prod(blockShape),x.shape[1] * blockShape[0] - crops[0,0] - crops[0,1], + * ..., x.shape[M] * blockShape[M-1] - crops[M-1,0] - + * crops[M-1,1],x.shape[M+1], ..., x.shape[N-1]]` + */ +/** @doc {heading: 'Tensors', subheading: 'Transformations'} */ +function batchToSpaceND_(x, blockShape, crops) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'batchToSpaceND'); + const prod = blockShape.reduce((a, b) => a * b); + util["assert"]($x.rank >= 1 + blockShape.length, () => `input rank is ${$x.rank} but should be > than blockShape.length ${blockShape.length}`); + util["assert"](crops.length === blockShape.length, () => `crops.length is ${crops.length} but should be equal to blockShape.length ${blockShape.length}`); + util["assert"]($x.shape[0] % prod === 0, () => `input tensor batch is ${$x.shape[0]} but is not divisible by the product of ` + + `the elements of blockShape ${blockShape.join(' * ')} === ${prod}`); + const forward = backend => { + return backend.batchToSpaceND($x, blockShape, crops); + }; + const inputs = { x: $x }; + const attrs = { blockShape, crops }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* gradient */, kernel_names["i" /* BatchToSpaceND */], attrs); +} +const batchToSpaceND = Object(operation["a" /* op */])({ batchToSpaceND_ }); +//# sourceMappingURL=batch_to_space_nd.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/SpaceToBatchND_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const spaceToBatchNDGradConfig = { + kernelName: kernel_names["mb" /* SpaceToBatchND */], + gradFunc: (dy, saved, attrs) => { + const { blockShape, paddings } = attrs; + return { x: () => batchToSpaceND(dy, blockShape, paddings) }; + } +}; +//# sourceMappingURL=SpaceToBatchND_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/SplitV_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const splitVGradConfig = { + kernelName: kernel_names["nb" /* SplitV */], + gradFunc: (dy, saved, attrs) => { + const { axis } = attrs; + return { x: () => concat(dy, axis) }; + } +}; +//# sourceMappingURL=SplitV_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Square_grad.js +/** + * @license + * Copyright 2019 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const squareGradConfig = { + kernelName: kernel_names["ob" /* Square */], + inputsToSave: ['x'], + gradFunc: (dy, saved) => { + const [x] = saved; + return { x: () => mul(dy, mul(x.toFloat(), 2)) }; + } +}; +//# sourceMappingURL=Square_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/SquaredDifference_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + +const squaredDifferenceGradConfig = { + kernelName: kernel_names["pb" /* SquaredDifference */], + inputsToSave: ['a', 'b'], + gradFunc: (dy, saved) => { + const [a, b] = saved; + const two = Object(tensor_ops["e" /* scalar */])(2); + const derA = () => mul(dy, mul(two, sub(a, b))); + const derB = () => mul(dy, mul(two, sub(b, a))); + return { a: derA, b: derB }; + } +}; +//# sourceMappingURL=SquaredDifference_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Sub_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +const subGradConfig = { + kernelName: kernel_names["qb" /* Sub */], + inputsToSave: ['a', 'b'], + gradFunc: (dy, saved) => { + const [a, b] = saved; + const outShape = assertAndGetBroadcastShape(a.shape, b.shape); + const derA = () => { + let res = dy; + const reduceAxes = getReductionAxes(a.shape, outShape); + if (reduceAxes.length > 0) { + res = sum(res, reduceAxes); + } + return reshape(res, a.shape); + }; + const derB = () => { + let res = dy; + const reduceAxes = getReductionAxes(b.shape, outShape); + if (reduceAxes.length > 0) { + res = sum(res, reduceAxes); + } + return reshape(neg(res), b.shape); + }; + return { a: derA, b: derB }; + } +}; +//# sourceMappingURL=Sub_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/pad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + +/** + * Pads a `tf.Tensor` with a given value and paddings. + * + * This operation currently only implements the `CONSTANT` mode. + * + * Also available are stricter rank-specific methods with the same signature + * as this method that assert that `paddings` is of given length. + * - `tf.pad1d` + * - `tf.pad2d` + * - `tf.pad3d` + * - `tf.pad4d` + * + * ```js + * const x = tf.tensor1d([1, 2, 3, 4]); + * x.pad([[1, 2]]).print(); + * ``` + * @param x The tensor to pad. + * @param paddings An array of length `R` (the rank of the tensor), where + * each element is a length-2 tuple of ints `[padBefore, padAfter]`, + * specifying how much to pad along each dimension of the tensor. + * @param constantValue The pad value to use. Defaults to 0. + */ +/** @doc {heading: 'Tensors', subheading: 'Transformations'} */ +function pad_(x, paddings, constantValue = 0) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'pad'); + if ($x.rank === 0) { + throw new Error('pad(scalar) is not defined. Pass non-scalar to pad'); + } + const forward = (backend, save) => { + save([$x]); + return backend.pad($x, paddings, constantValue); + }; + const attrs = { paddings, constantValue }; + const inputs = { x: $x }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["db" /* PadV2 */], attrs); +} +const pad_pad = Object(operation["a" /* op */])({ pad_ }); +//# sourceMappingURL=pad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/slice_util.js +/** + * @license + * Copyright 2017 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + +function assertParamsValid(input, begin, size) { + util["assert"](input.rank === begin.length, () => `Error in slice${input.rank}D: Length of begin ${begin} must ` + + `match the rank of the array (${input.rank}).`); + util["assert"](input.rank === size.length, () => `Error in slice${input.rank}D: Length of size ${size} must ` + + `match the rank of the array (${input.rank}).`); + for (let i = 0; i < input.rank; ++i) { + util["assert"](begin[i] + size[i] <= input.shape[i], () => `Error in slice${input.rank}D: begin[${i}] + size[${i}] ` + + `(${begin[i] + size[i]}) would overflow input.shape[${i}] (${input.shape[i]})`); + } +} +/** Converts a binary mask to an array of axes. Used in stridedSlice(). */ +function maskToAxes(mask) { + const axes = []; + let axis = 0; + while (mask > 0) { + if (mask & 1) { + axes.push(axis); + } + mask /= 2; + axis++; + } + return axes; +} +/** Computes the output shape given the strided slice params. */ +function slice_util_computeOutShape(begin, end, strides) { + const size = []; + for (let axis = 0; axis < begin.length; axis++) { + size[axis] = Math.ceil((end[axis] - begin[axis]) / strides[axis]); + } + return size; +} +// Creates full selection at the elided dimensions. If the dimension matches +// the ellipsis mask, override the current stride value. Otherwise, insert. +function stridesWithElidedDims(strides, ellipsisInsertionIndex, numElidedAxes) { + const newStrides = [...strides]; + for (let i = 0; i < numElidedAxes; i++) { + if (i === 0) { + newStrides[ellipsisInsertionIndex] = 1; + } + else { + newStrides.splice(ellipsisInsertionIndex, 0 /* num elements to delete */, 1 /* element to add */); + newStrides.pop(); + } + } + return newStrides; +} +// Creates full selection at the elided dimensions. If the dimension matches +// the ellipsis mask, override the current start value. Otherwise, insert. +function startIndicesWithElidedDims(startIndices, ellipsisInsertionIndex, numElidedAxes) { + const newIndices = [...startIndices]; + for (let i = 0; i < numElidedAxes; i++) { + if (i === 0) { + newIndices[ellipsisInsertionIndex] = 0; + } + else { + newIndices.splice(ellipsisInsertionIndex, 0 /* num elements to delete */, 0 /* element to add */); + newIndices.pop(); + } + } + return newIndices; +} +// Creates full selection at the elided dimensions. If the dimension matches +// the ellipsis mask, override the current stop value. Otherwise, insert. +function stopIndicesWithElidedDims(stopIndices, ellipsisInsertionIndex, numElidedAxes, inputShape) { + const newIndices = [...stopIndices]; + for (let i = 0; i < numElidedAxes; i++) { + if (i === 0) { + newIndices[ellipsisInsertionIndex] = Number.MAX_SAFE_INTEGER; + } + else { + newIndices.splice(ellipsisInsertionIndex, 0 /* num elements to delete */, Number.MAX_SAFE_INTEGER /* element to add */); + newIndices.pop(); + } + } + for (let i = 0; i < newIndices.length; i++) { + newIndices[i] = util["clamp"](0, newIndices[i], inputShape[i]); + } + return newIndices; +} +function stridesForAxis(strides, axis, ellipsisMask) { + let stride = strides[axis]; + if (ellipsisMask & (1 << axis) || stride == null) { + stride = 1; + } + return stride; +} +function startForAxis(beginMask, startIndices, strides, inputShape, axis, ellipsisMask) { + // Begin with the specified index + let start = startIndices[axis]; + const stride = strides[axis] || 1; + // Check the axis bit from right of masked axes, or the begin index is not set + // for the axis. + if (beginMask & 1 << axis || ellipsisMask & 1 << axis || start == null) { + if (stride > 0) { + // Forward iteration - use the first element. These values will get + // clamped below (Note: We could have set them to 0 and axis_size-1, but + // use lowest() and max() to maintain symmetry with StopForAxis()) + start = Number.MIN_SAFE_INTEGER; + } + else { + // Backward iteration - use the last element. + start = Number.MAX_SAFE_INTEGER; + } + } + // Handle negative indices + const axisSize = inputShape[axis]; + if (start < 0) { + start += axisSize; + } + // Clamping + start = util["clamp"](0, start, axisSize - 1); + return start; +} +function stopForAxis(endMask, stopIndices, strides, inputShape, axis, ellipsisMask) { + // Begin with the specified index + let stop = stopIndices[axis]; + const stride = strides[axis] || 1; + // Check the axis bit from right of masked axes, or if the stop index is not + // set for this axis. + if (endMask & (1 << axis) || ellipsisMask & (1 << axis) || stop == null) { + if (stride > 0) { + // Forward iteration - use the last element. These values will get + // clamped below + stop = Number.MAX_SAFE_INTEGER; + } + else { + // Backward iteration - use the first element. + stop = Number.MIN_SAFE_INTEGER; + } + } + // Handle negative indices + const axisSize = inputShape[axis]; + if (stop < 0) { + stop += axisSize; + } + // Clamping + // Because the end index points one past the last element, we need slightly + // different clamping ranges depending on the direction. + if (stride > 0) { + // Forward iteration + stop = util["clamp"](0, stop, axisSize); + } + else { + // Backward iteration + stop = util["clamp"](-1, stop, axisSize - 1); + } + return stop; +} +/** + * Returns true if the slice occupies a continous set of elements in the + * 'flat' space. + */ +function isSliceContinous(shape, begin, size) { + // Index of the first axis that has size > 1. + let firstNonOneAxis = size.length; + for (let i = 0; i < size.length; i++) { + if (size[i] > 1) { + firstNonOneAxis = i; + break; + } + } + for (let i = firstNonOneAxis + 1; i < size.length; i++) { + if (begin[i] > 0 || size[i] !== shape[i]) { + return false; + } + } + return true; +} +function computeFlatOffset(begin, strides) { + let flatOffset = begin.length > 0 ? begin[begin.length - 1] : 1; + for (let i = 0; i < begin.length - 1; i++) { + flatOffset += begin[i] * strides[i]; + } + return flatOffset; +} +//# sourceMappingURL=slice_util.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/slice.js +/** + * @license + * Copyright 2018 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + +/** + * Extracts a 1D slice from 1D array starting at coordinates `begin` and is + * of length `size`. See `slice` for details. + */ +function slice1d_(x, begin, size) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'slice1d'); + util["assert"]($x.rank === 1, () => `slice1d expects a rank-1 tensor, but got a rank-${$x.rank} tensor`); + return slice($x, [begin], [size]); +} +/** + * Extracts a 2D slice from a 2D array starting at coordinates `begin` and + * is of size `size`. See `slice` for details. + */ +function slice2d_(x, begin, size) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'slice2d'); + util["assert"]($x.rank === 2, () => `slice2d expects a rank-2 tensor, but got a rank-${$x.rank} tensor`); + return slice($x, begin, size); +} +/** + * Extracts a 3D slice from a 3D array starting at coordinates `begin` and + * is of size `size`. See `slice` for details. + */ +function slice3d_(x, begin, size) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'slice3d'); + util["assert"]($x.rank === 3, () => `slice3d expects a rank-3 tensor, but got a rank-${$x.rank} tensor`); + return slice($x, begin, size); +} +/** + * Extracts a 4D slice from a 4D array starting at coordinates `begin` and + * is of size `size`. See `slice` for details. + */ +function slice4d_(x, begin, size) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'slice4d'); + util["assert"]($x.rank === 4, () => `slice4d expects a rank-4 tensor, but got a rank-${$x.rank} tensor`); + return slice($x, begin, size); +} +/** + * Extracts a slice from a `tf.Tensor` starting at coordinates `begin` + * and is of size `size`. + * + * Also available are stricter rank-specific methods with the same signature + * as this method that assert that `x` is of the given rank: + * - `tf.slice1d` + * - `tf.slice2d` + * - `tf.slice3d` + * - `tf.slice4d` + * + * ```js + * const x = tf.tensor1d([1, 2, 3, 4]); + * + * x.slice([1], [2]).print(); + * ``` + * + * ```js + * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]); + * + * x.slice([1, 0], [1, 2]).print(); + * ``` + * @param x The input `tf.Tensor` to slice from. + * @param begin The coordinates to start the slice from. The length can be + * less than the rank of x - the rest of the axes will have implicit 0 as + * start. Can also be a single number, in which case it specifies the + * first axis. + * @param size The size of the slice. The length can be less than the rank of + * x - the rest of the axes will have implicit -1. A value of -1 requests + * the rest of the dimensions in the axis. Can also be a single number, + * in which case it specifies the size of the first axis. + */ +/** @doc {heading: 'Tensors', subheading: 'Slicing and Joining'} */ +function slice_(x, begin, size) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'slice'); + if ($x.rank === 0) { + throw new Error('Slicing scalar is not possible'); + } + // The following logic allows for more ergonomic calls. + let begin_; + if (typeof begin === 'number') { + begin_ = [begin, ...new Array($x.rank - 1).fill(0)]; + } + else if (begin.length < $x.rank) { + begin_ = begin.concat(new Array($x.rank - begin.length).fill(0)); + } + else { + begin_ = begin.slice(); + } + begin_.forEach(d => { + util["assert"](d !== -1, () => 'slice() does not support negative begin indexing.'); + }); + let size_; + if (size == null) { + size_ = new Array($x.rank).fill(-1); + } + else if (typeof size === 'number') { + size_ = [size, ...new Array($x.rank - 1).fill(-1)]; + } + else if (size.length < $x.rank) { + size_ = size.concat(new Array($x.rank - size.length).fill(-1)); + } + else { + size_ = size; + } + size_ = size_.map((d, i) => { + if (d >= 0) { + return d; + } + else { + util["assert"](d === -1, () => `Negative size values should be exactly -1 but got ` + + `${d} for the slice() size at index ${i}.`); + return $x.shape[i] - begin_[i]; + } + }); + assertParamsValid($x, begin_, size_); + const inputShape = $x.shape; + const grad = (dy) => { + // Create an Nx2 padding where the first column represents how many + // zeros are prepended (at start) for each dimension, and the second + // column indicates how many zeros are appended (at end). + // The number of zeros to append is the shape of the input + // elementwise-subtracted by both the begin vector and sizes vector. + const paddings = []; + for (let i = 0; i < dy.rank; i++) { + paddings.push([begin_[i], inputShape[i] - begin_[i] - size_[i]]); + } + return { x: () => pad_pad(dy, paddings) }; + }; + const attrs = { begin: begin_, size: size_ }; + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.slice($x, begin_, size_), { x: $x }, grad, 'Slice', attrs); +} +const slice = Object(operation["a" /* op */])({ slice_ }); +const slice1d = Object(operation["a" /* op */])({ slice1d_ }); +const slice2d = Object(operation["a" /* op */])({ slice2d_ }); +const slice3d = Object(operation["a" /* op */])({ slice3d_ }); +const slice4d = Object(operation["a" /* op */])({ slice4d_ }); +//# sourceMappingURL=slice.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Tile_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + +const tileGradConfig = { + kernelName: kernel_names["rb" /* Tile */], + inputsToSave: ['x'], + gradFunc: (dy, saved, attrs) => { + const [x] = saved; + const { reps } = attrs; + const derX = () => { + let xGrad = Object(tensor_ops["o" /* zerosLike */])(x); + // TODO(cais): Maybe reduce memory footprint by avoiding repeated + // slicing. + if (x.rank === 1) { + for (let i = 0; i < reps[0]; ++i) { + xGrad = add(xGrad, slice(dy, [i * x.shape[0]], [x.shape[0]])); + } + } + else if (x.rank === 2) { + for (let i = 0; i < reps[0]; ++i) { + for (let j = 0; j < reps[1]; ++j) { + xGrad = add(xGrad, slice(dy, [i * x.shape[0], j * x.shape[1]], [ + x.shape[0], x.shape[1] + ])); + } + } + } + else if (x.rank === 3) { + for (let i = 0; i < reps[0]; ++i) { + for (let j = 0; j < reps[1]; ++j) { + for (let k = 0; k < reps[2]; ++k) { + xGrad = + add(xGrad, slice(dy, [i * x.shape[0], j * x.shape[1], k * x.shape[2]], [x.shape[0], x.shape[1], x.shape[2]])); + } + } + } + } + else if (x.rank === 4) { + for (let i = 0; i < reps[0]; ++i) { + for (let j = 0; j < reps[1]; ++j) { + for (let k = 0; k < reps[2]; ++k) { + for (let l = 0; l < reps[3]; ++l) { + xGrad = + add(xGrad, slice(dy, [ + i * x.shape[0], j * x.shape[1], k * x.shape[2], + l * x.shape[3] + ], [x.shape[0], x.shape[1], x.shape[2], x.shape[3]])); + } + } + } + } + } + else { + throw new Error(`Gradient for tile operation is not implemented for rank-` + + `${x.rank} tensors yet.`); + } + return xGrad; + }; + return { x: derX }; + }, +}; +//# sourceMappingURL=Tile_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Transpose_grad.js +/** + * @license + * Copyright 2020 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + +const transposeGradConfig = { + kernelName: kernel_names["sb" /* Transpose */], + gradFunc: (dy, saved, attrs) => { + const transposeAttrs = attrs; + const { perm } = transposeAttrs; + const undoPerm = getUndoAxesPermutation(perm); + return { x: () => transpose(dy, undoPerm) }; + } +}; +//# sourceMappingURL=Transpose_grad.js.map +// EXTERNAL MODULE: ./node_modules/@tensorflow/tfjs-core/dist/kernel_registry.js +var kernel_registry = __webpack_require__(17); + +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/register_all_gradients.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +// Export all kernel configs here so that the package can auto register them +const gradConfigs = [ + addGradConfig, + addNGradConfig, + atan2GradConfig, + avgPoolGradConfig, + avgPool3DGradConfig, + batchMatMulGradConfig, + batchToSpaceNDGradConfig, + broadcastToGradConfig, + concatGradConfig, + conv2DGradConfig, + conv2DBackpropInputGradConfig, + conv3DGradConfig, + cumsumGradConfig, + depthwiseConv2dNativeGradConfig, + divGradConfig, + eluGradConfig, + floorDivGradConfig, + fusedBatchNormGradConfig, + greaterEqualGradConfig, + identityGradConfig, + lrnGradConfig, + oneHotGradConfig, + padV2GradConfig, + splitVGradConfig, + maxGradConfig, + spaceToBatchNDGradConfig, + maxGradConfig, + maximumGradConfig, + maxPoolGradConfig, + maxPool3DGradConfig, + minimumGradConfig, + modGradConfig, + multiplyGradConfig, + oneHotGradConfig, + padV2GradConfig, + powGradConfig, + preluGradConfig, + reluGradConfig, + relu6GradConfig, + seluGradConfig, + spaceToBatchNDGradConfig, + splitVGradConfig, + squareGradConfig, + squaredDifferenceGradConfig, + tileGradConfig, + transposeGradConfig, + subGradConfig +]; +for (const gradientConfig of gradConfigs) { + Object(kernel_registry["d" /* registerGradient */])(gradientConfig); +} +//# sourceMappingURL=register_all_gradients.js.map +// EXTERNAL MODULE: ./node_modules/@tensorflow/tfjs-core/dist/environment.js +var environment = __webpack_require__(10); + +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/platforms/platform_browser.js +/** + * @license + * Copyright 2019 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + +class PlatformBrowser { + fetch(path, init) { + return fetch(path, init); + } + now() { + return performance.now(); + } + encode(text, encoding) { + if (encoding !== 'utf-8' && encoding !== 'utf8') { + throw new Error(`Browser's encoder only supports utf-8, but got ${encoding}`); + } + if (this.textEncoder == null) { + this.textEncoder = new TextEncoder(); + } + return this.textEncoder.encode(text); + } + decode(bytes, encoding) { + return new TextDecoder(encoding).decode(bytes); + } +} +if (Object(environment["c" /* env */])().get('IS_BROWSER')) { + Object(environment["c" /* env */])().setPlatform('browser', new PlatformBrowser()); +} +//# sourceMappingURL=platform_browser.js.map +// EXTERNAL MODULE: ./node_modules/@tensorflow/tfjs-core/dist/platforms/platform_node.js +var platform_node = __webpack_require__(62); + +// EXTERNAL MODULE: ./node_modules/@tensorflow/tfjs-core/dist/io/io_utils.js +var io_utils = __webpack_require__(13); + +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/io/router_registry.js +/** + * @license + * Copyright 2018 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ +class IORouterRegistry { + constructor() { + this.saveRouters = []; + this.loadRouters = []; + } + static getInstance() { + if (IORouterRegistry.instance == null) { + IORouterRegistry.instance = new IORouterRegistry(); + } + return IORouterRegistry.instance; + } + /** + * Register a save-handler router. + * + * @param saveRouter A function that maps a URL-like string onto an instance + * of `IOHandler` with the `save` method defined or `null`. + */ + static registerSaveRouter(saveRouter) { + IORouterRegistry.getInstance().saveRouters.push(saveRouter); + } + /** + * Register a load-handler router. + * + * @param loadRouter A function that maps a URL-like string onto an instance + * of `IOHandler` with the `load` method defined or `null`. + */ + static registerLoadRouter(loadRouter) { + IORouterRegistry.getInstance().loadRouters.push(loadRouter); + } + /** + * Look up IOHandler for saving, given a URL-like string. + * + * @param url + * @returns If only one match is found, an instance of IOHandler with the + * `save` method defined. If no match is found, `null`. + * @throws Error, if more than one match is found. + */ + static getSaveHandlers(url) { + return IORouterRegistry.getHandlers(url, 'save'); + } + /** + * Look up IOHandler for loading, given a URL-like string. + * + * @param url + * @param loadOptions Optional, custom load options. + * @returns All valid handlers for `url`, given the currently registered + * handler routers. + */ + static getLoadHandlers(url, loadOptions) { + return IORouterRegistry.getHandlers(url, 'load', loadOptions); + } + static getHandlers(url, handlerType, loadOptions) { + const validHandlers = []; + const routers = handlerType === 'load' ? + IORouterRegistry.getInstance().loadRouters : + IORouterRegistry.getInstance().saveRouters; + routers.forEach(router => { + const handler = router(url, loadOptions); + if (handler !== null) { + validHandlers.push(handler); + } + }); + return validHandlers; + } +} +const registerSaveRouter = (loudRouter) => IORouterRegistry.registerSaveRouter(loudRouter); +const registerLoadRouter = (loudRouter) => IORouterRegistry.registerLoadRouter(loudRouter); +const getSaveHandlers = (url) => IORouterRegistry.getSaveHandlers(url); +const getLoadHandlers = (url, loadOptions) => IORouterRegistry.getLoadHandlers(url, loadOptions); +//# sourceMappingURL=router_registry.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/io/model_management.js +/** + * @license + * Copyright 2018 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ +/** + * Classes and functions for model management across multiple storage mediums. + * + * Supported client actions: + * - Listing models on all registered storage mediums. + * - Remove model by URL from any registered storage mediums, by using URL + * string. + * - Moving or copying model from one path to another in the same medium or from + * one medium to another, by using URL strings. + */ + + +const URL_SCHEME_SUFFIX = '://'; +class model_management_ModelStoreManagerRegistry { + constructor() { + this.managers = {}; + } + static getInstance() { + if (model_management_ModelStoreManagerRegistry.instance == null) { + model_management_ModelStoreManagerRegistry.instance = new model_management_ModelStoreManagerRegistry(); + } + return model_management_ModelStoreManagerRegistry.instance; + } + /** + * Register a save-handler router. + * + * @param saveRouter A function that maps a URL-like string onto an instance + * of `IOHandler` with the `save` method defined or `null`. + */ + static registerManager(scheme, manager) { + Object(util["assert"])(scheme != null, () => 'scheme must not be undefined or null.'); + if (scheme.endsWith(URL_SCHEME_SUFFIX)) { + scheme = scheme.slice(0, scheme.indexOf(URL_SCHEME_SUFFIX)); + } + Object(util["assert"])(scheme.length > 0, () => 'scheme must not be an empty string.'); + const registry = model_management_ModelStoreManagerRegistry.getInstance(); + Object(util["assert"])(registry.managers[scheme] == null, () => `A model store manager is already registered for scheme '${scheme}'.`); + registry.managers[scheme] = manager; + } + static getManager(scheme) { + const manager = this.getInstance().managers[scheme]; + if (manager == null) { + throw new Error(`Cannot find model manager for scheme '${scheme}'`); + } + return manager; + } + static getSchemes() { + return Object.keys(this.getInstance().managers); + } +} +/** + * Helper method for parsing a URL string into a scheme and a path. + * + * @param url E.g., 'localstorage://my-model' + * @returns A dictionary with two fields: scheme and path. + * Scheme: e.g., 'localstorage' in the example above. + * Path: e.g., 'my-model' in the example above. + */ +function parseURL(url) { + if (url.indexOf(URL_SCHEME_SUFFIX) === -1) { + throw new Error(`The url string provided does not contain a scheme. ` + + `Supported schemes are: ` + + `${model_management_ModelStoreManagerRegistry.getSchemes().join(',')}`); + } + return { + scheme: url.split(URL_SCHEME_SUFFIX)[0], + path: url.split(URL_SCHEME_SUFFIX)[1], + }; +} +async function cloneModelInternal(sourceURL, destURL, deleteSource = false) { + Object(util["assert"])(sourceURL !== destURL, () => `Old path and new path are the same: '${sourceURL}'`); + const loadHandlers = IORouterRegistry.getLoadHandlers(sourceURL); + Object(util["assert"])(loadHandlers.length > 0, () => `Copying failed because no load handler is found for source URL ${sourceURL}.`); + Object(util["assert"])(loadHandlers.length < 2, () => `Copying failed because more than one (${loadHandlers.length}) ` + + `load handlers for source URL ${sourceURL}.`); + const loadHandler = loadHandlers[0]; + const saveHandlers = IORouterRegistry.getSaveHandlers(destURL); + Object(util["assert"])(saveHandlers.length > 0, () => `Copying failed because no save handler is found for destination ` + + `URL ${destURL}.`); + Object(util["assert"])(saveHandlers.length < 2, () => `Copying failed because more than one (${loadHandlers.length}) ` + + `save handlers for destination URL ${destURL}.`); + const saveHandler = saveHandlers[0]; + const sourceScheme = parseURL(sourceURL).scheme; + const sourcePath = parseURL(sourceURL).path; + const sameMedium = sourceScheme === parseURL(sourceURL).scheme; + const modelArtifacts = await loadHandler.load(); + // If moving within the same storage medium, remove the old model as soon as + // the loading is done. Without doing this, it is possible that the combined + // size of the two models will cause the cloning to fail. + if (deleteSource && sameMedium) { + await model_management_ModelStoreManagerRegistry.getManager(sourceScheme) + .removeModel(sourcePath); + } + const saveResult = await saveHandler.save(modelArtifacts); + // If moving between mediums, the deletion is done after the save succeeds. + // This guards against the case in which saving to the destination medium + // fails. + if (deleteSource && !sameMedium) { + await model_management_ModelStoreManagerRegistry.getManager(sourceScheme) + .removeModel(sourcePath); + } + return saveResult.modelArtifactsInfo; +} +/** + * List all models stored in registered storage mediums. + * + * For a web browser environment, the registered mediums are Local Storage and + * IndexedDB. + * + * ```js + * // First create and save a model. + * const model = tf.sequential(); + * model.add(tf.layers.dense( + * {units: 1, inputShape: [10], activation: 'sigmoid'})); + * await model.save('localstorage://demo/management/model1'); + * + * // Then list existing models. + * console.log(JSON.stringify(await tf.io.listModels())); + * + * // Delete the model. + * await tf.io.removeModel('localstorage://demo/management/model1'); + * + * // List models again. + * console.log(JSON.stringify(await tf.io.listModels())); + * ``` + * + * @returns A `Promise` of a dictionary mapping URLs of existing models to + * their model artifacts info. URLs include medium-specific schemes, e.g., + * 'indexeddb://my/model/1'. Model artifacts info include type of the + * model's topology, byte sizes of the topology, weights, etc. + */ +/** + * @doc { + * heading: 'Models', + * subheading: 'Management', + * namespace: 'io', + * ignoreCI: true + * } + */ +async function listModels() { + const schemes = model_management_ModelStoreManagerRegistry.getSchemes(); + const out = {}; + for (const scheme of schemes) { + const schemeOut = await model_management_ModelStoreManagerRegistry.getManager(scheme).listModels(); + for (const path in schemeOut) { + const url = scheme + URL_SCHEME_SUFFIX + path; + out[url] = schemeOut[path]; + } + } + return out; +} +/** + * Remove a model specified by URL from a reigstered storage medium. + * + * ```js + * // First create and save a model. + * const model = tf.sequential(); + * model.add(tf.layers.dense( + * {units: 1, inputShape: [10], activation: 'sigmoid'})); + * await model.save('localstorage://demo/management/model1'); + * + * // Then list existing models. + * console.log(JSON.stringify(await tf.io.listModels())); + * + * // Delete the model. + * await tf.io.removeModel('localstorage://demo/management/model1'); + * + * // List models again. + * console.log(JSON.stringify(await tf.io.listModels())); + * ``` + * + * @param url A URL to a stored model, with a scheme prefix, e.g., + * 'localstorage://my-model-1', 'indexeddb://my/model/2'. + * @returns ModelArtifactsInfo of the deleted model (if and only if deletion + * is successful). + * @throws Error if deletion fails, e.g., if no model exists at `path`. + */ +/** + * @doc { + * heading: 'Models', + * subheading: 'Management', + * namespace: 'io', + * ignoreCI: true + * } + */ +async function removeModel(url) { + const schemeAndPath = parseURL(url); + const manager = model_management_ModelStoreManagerRegistry.getManager(schemeAndPath.scheme); + return manager.removeModel(schemeAndPath.path); +} +/** + * Copy a model from one URL to another. + * + * This function supports: + * + * 1. Copying within a storage medium, e.g., + * `tf.io.copyModel('localstorage://model-1', 'localstorage://model-2')` + * 2. Copying between two storage mediums, e.g., + * `tf.io.copyModel('localstorage://model-1', 'indexeddb://model-1')` + * + * ```js + * // First create and save a model. + * const model = tf.sequential(); + * model.add(tf.layers.dense( + * {units: 1, inputShape: [10], activation: 'sigmoid'})); + * await model.save('localstorage://demo/management/model1'); + * + * // Then list existing models. + * console.log(JSON.stringify(await tf.io.listModels())); + * + * // Copy the model, from Local Storage to IndexedDB. + * await tf.io.copyModel( + * 'localstorage://demo/management/model1', + * 'indexeddb://demo/management/model1'); + * + * // List models again. + * console.log(JSON.stringify(await tf.io.listModels())); + * + * // Remove both models. + * await tf.io.removeModel('localstorage://demo/management/model1'); + * await tf.io.removeModel('indexeddb://demo/management/model1'); + * ``` + * + * @param sourceURL Source URL of copying. + * @param destURL Destination URL of copying. + * @returns ModelArtifactsInfo of the copied model (if and only if copying + * is successful). + * @throws Error if copying fails, e.g., if no model exists at `sourceURL`, or + * if `oldPath` and `newPath` are identical. + */ +/** + * @doc { + * heading: 'Models', + * subheading: 'Management', + * namespace: 'io', + * ignoreCI: true + * } + */ +async function copyModel(sourceURL, destURL) { + const deleteSource = false; + return cloneModelInternal(sourceURL, destURL, deleteSource); +} +/** + * Move a model from one URL to another. + * + * This function supports: + * + * 1. Moving within a storage medium, e.g., + * `tf.io.moveModel('localstorage://model-1', 'localstorage://model-2')` + * 2. Moving between two storage mediums, e.g., + * `tf.io.moveModel('localstorage://model-1', 'indexeddb://model-1')` + * + * ```js + * // First create and save a model. + * const model = tf.sequential(); + * model.add(tf.layers.dense( + * {units: 1, inputShape: [10], activation: 'sigmoid'})); + * await model.save('localstorage://demo/management/model1'); + * + * // Then list existing models. + * console.log(JSON.stringify(await tf.io.listModels())); + * + * // Move the model, from Local Storage to IndexedDB. + * await tf.io.moveModel( + * 'localstorage://demo/management/model1', + * 'indexeddb://demo/management/model1'); + * + * // List models again. + * console.log(JSON.stringify(await tf.io.listModels())); + * + * // Remove the moved model. + * await tf.io.removeModel('indexeddb://demo/management/model1'); + * ``` + * + * @param sourceURL Source URL of moving. + * @param destURL Destination URL of moving. + * @returns ModelArtifactsInfo of the copied model (if and only if copying + * is successful). + * @throws Error if moving fails, e.g., if no model exists at `sourceURL`, or + * if `oldPath` and `newPath` are identical. + */ +/** + * @doc { + * heading: 'Models', + * subheading: 'Management', + * namespace: 'io', + * ignoreCI: true + * } + */ +async function moveModel(sourceURL, destURL) { + const deleteSource = true; + return cloneModelInternal(sourceURL, destURL, deleteSource); +} + +//# sourceMappingURL=model_management.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/io/indexed_db.js +/** + * @license + * Copyright 2018 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + +const DATABASE_NAME = 'tensorflowjs'; +const DATABASE_VERSION = 1; +// Model data and ModelArtifactsInfo (metadata) are stored in two separate +// stores for efficient access of the list of stored models and their metadata. +// 1. The object store for model data: topology, weights and weight manifests. +const MODEL_STORE_NAME = 'models_store'; +// 2. The object store for ModelArtifactsInfo, including meta-information such +// as the type of topology (JSON vs binary), byte size of the topology, byte +// size of the weights, etc. +const INFO_STORE_NAME = 'model_info_store'; +/** + * Delete the entire database for tensorflow.js, including the models store. + */ +async function deleteDatabase() { + const idbFactory = getIndexedDBFactory(); + return new Promise((resolve, reject) => { + const deleteRequest = idbFactory.deleteDatabase(DATABASE_NAME); + deleteRequest.onsuccess = () => resolve(); + deleteRequest.onerror = error => reject(error); + }); +} +function getIndexedDBFactory() { + if (!Object(environment["c" /* env */])().getBool('IS_BROWSER')) { + // TODO(cais): Add more info about what IOHandler subtypes are available. + // Maybe point to a doc page on the web and/or automatically determine + // the available IOHandlers and print them in the error message. + throw new Error('Failed to obtain IndexedDB factory because the current environment' + + 'is not a web browser.'); + } + // tslint:disable-next-line:no-any + const theWindow = typeof window === 'undefined' ? self : window; + const factory = theWindow.indexedDB || theWindow.mozIndexedDB || + theWindow.webkitIndexedDB || theWindow.msIndexedDB || + theWindow.shimIndexedDB; + if (factory == null) { + throw new Error('The current browser does not appear to support IndexedDB.'); + } + return factory; +} +function setUpDatabase(openRequest) { + const db = openRequest.result; + db.createObjectStore(MODEL_STORE_NAME, { keyPath: 'modelPath' }); + db.createObjectStore(INFO_STORE_NAME, { keyPath: 'modelPath' }); +} +/** + * IOHandler subclass: Browser IndexedDB. + * + * See the doc string of `browserIndexedDB` for more details. + */ +class indexed_db_BrowserIndexedDB { + constructor(modelPath) { + this.indexedDB = getIndexedDBFactory(); + if (modelPath == null || !modelPath) { + throw new Error('For IndexedDB, modelPath must not be null, undefined or empty.'); + } + this.modelPath = modelPath; + } + async save(modelArtifacts) { + // TODO(cais): Support saving GraphDef models. + if (modelArtifacts.modelTopology instanceof ArrayBuffer) { + throw new Error('BrowserLocalStorage.save() does not support saving model topology ' + + 'in binary formats yet.'); + } + return this.databaseAction(this.modelPath, modelArtifacts); + } + async load() { + return this.databaseAction(this.modelPath); + } + /** + * Perform database action to put model artifacts into or read model artifacts + * from IndexedDB object store. + * + * Whether the action is put or get depends on whether `modelArtifacts` is + * specified. If it is specified, the action will be put; otherwise the action + * will be get. + * + * @param modelPath A unique string path for the model. + * @param modelArtifacts If specified, it will be the model artifacts to be + * stored in IndexedDB. + * @returns A `Promise` of `SaveResult`, if the action is put, or a `Promise` + * of `ModelArtifacts`, if the action is get. + */ + databaseAction(modelPath, modelArtifacts) { + return new Promise((resolve, reject) => { + const openRequest = this.indexedDB.open(DATABASE_NAME, DATABASE_VERSION); + openRequest.onupgradeneeded = () => setUpDatabase(openRequest); + openRequest.onsuccess = () => { + const db = openRequest.result; + if (modelArtifacts == null) { + // Read model out from object store. + const modelTx = db.transaction(MODEL_STORE_NAME, 'readonly'); + const modelStore = modelTx.objectStore(MODEL_STORE_NAME); + const getRequest = modelStore.get(this.modelPath); + getRequest.onsuccess = () => { + if (getRequest.result == null) { + db.close(); + return reject(new Error(`Cannot find model with path '${this.modelPath}' ` + + `in IndexedDB.`)); + } + else { + resolve(getRequest.result.modelArtifacts); + } + }; + getRequest.onerror = error => { + db.close(); + return reject(getRequest.error); + }; + modelTx.oncomplete = () => db.close(); + } + else { + // Put model into object store. + const modelArtifactsInfo = Object(io_utils["g" /* getModelArtifactsInfoForJSON */])(modelArtifacts); + // First, put ModelArtifactsInfo into info store. + const infoTx = db.transaction(INFO_STORE_NAME, 'readwrite'); + let infoStore = infoTx.objectStore(INFO_STORE_NAME); + const putInfoRequest = infoStore.put({ modelPath: this.modelPath, modelArtifactsInfo }); + let modelTx; + putInfoRequest.onsuccess = () => { + // Second, put model data into model store. + modelTx = db.transaction(MODEL_STORE_NAME, 'readwrite'); + const modelStore = modelTx.objectStore(MODEL_STORE_NAME); + const putModelRequest = modelStore.put({ + modelPath: this.modelPath, + modelArtifacts, + modelArtifactsInfo + }); + putModelRequest.onsuccess = () => resolve({ modelArtifactsInfo }); + putModelRequest.onerror = error => { + // If the put-model request fails, roll back the info entry as + // well. + infoStore = infoTx.objectStore(INFO_STORE_NAME); + const deleteInfoRequest = infoStore.delete(this.modelPath); + deleteInfoRequest.onsuccess = () => { + db.close(); + return reject(putModelRequest.error); + }; + deleteInfoRequest.onerror = error => { + db.close(); + return reject(putModelRequest.error); + }; + }; + }; + putInfoRequest.onerror = error => { + db.close(); + return reject(putInfoRequest.error); + }; + infoTx.oncomplete = () => { + if (modelTx == null) { + db.close(); + } + else { + modelTx.oncomplete = () => db.close(); + } + }; + } + }; + openRequest.onerror = error => reject(openRequest.error); + }); + } +} +indexed_db_BrowserIndexedDB.URL_SCHEME = 'indexeddb://'; +const indexedDBRouter = (url) => { + if (!Object(environment["c" /* env */])().getBool('IS_BROWSER')) { + return null; + } + else { + if (!Array.isArray(url) && url.startsWith(indexed_db_BrowserIndexedDB.URL_SCHEME)) { + return browserIndexedDB(url.slice(indexed_db_BrowserIndexedDB.URL_SCHEME.length)); + } + else { + return null; + } + } +}; +IORouterRegistry.registerSaveRouter(indexedDBRouter); +IORouterRegistry.registerLoadRouter(indexedDBRouter); +/** + * Creates a browser IndexedDB IOHandler for saving and loading models. + * + * ```js + * const model = tf.sequential(); + * model.add( + * tf.layers.dense({units: 1, inputShape: [100], activation: 'sigmoid'})); + * + * const saveResult = await model.save('indexeddb://MyModel')); + * console.log(saveResult); + * ``` + * + * @param modelPath A unique identifier for the model to be saved. Must be a + * non-empty string. + * @returns An instance of `BrowserIndexedDB` (sublcass of `IOHandler`), + * which can be used with, e.g., `tf.Model.save`. + */ +function browserIndexedDB(modelPath) { + return new indexed_db_BrowserIndexedDB(modelPath); +} +function maybeStripScheme(key) { + return key.startsWith(indexed_db_BrowserIndexedDB.URL_SCHEME) ? + key.slice(indexed_db_BrowserIndexedDB.URL_SCHEME.length) : + key; +} +class BrowserIndexedDBManager { + constructor() { + this.indexedDB = getIndexedDBFactory(); + } + async listModels() { + return new Promise((resolve, reject) => { + const openRequest = this.indexedDB.open(DATABASE_NAME, DATABASE_VERSION); + openRequest.onupgradeneeded = () => setUpDatabase(openRequest); + openRequest.onsuccess = () => { + const db = openRequest.result; + const tx = db.transaction(INFO_STORE_NAME, 'readonly'); + const store = tx.objectStore(INFO_STORE_NAME); + // tslint:disable:max-line-length + // Need to cast `store` as `any` here because TypeScript's DOM + // library does not have the `getAll()` method even though the + // method is supported in the latest version of most mainstream + // browsers: + // https://developer.mozilla.org/en-US/docs/Web/API/IDBObjectStore/getAll + // tslint:enable:max-line-length + // tslint:disable-next-line:no-any + const getAllInfoRequest = store.getAll(); + getAllInfoRequest.onsuccess = () => { + const out = {}; + for (const item of getAllInfoRequest.result) { + out[item.modelPath] = item.modelArtifactsInfo; + } + resolve(out); + }; + getAllInfoRequest.onerror = error => { + db.close(); + return reject(getAllInfoRequest.error); + }; + tx.oncomplete = () => db.close(); + }; + openRequest.onerror = error => reject(openRequest.error); + }); + } + async removeModel(path) { + path = maybeStripScheme(path); + return new Promise((resolve, reject) => { + const openRequest = this.indexedDB.open(DATABASE_NAME, DATABASE_VERSION); + openRequest.onupgradeneeded = () => setUpDatabase(openRequest); + openRequest.onsuccess = () => { + const db = openRequest.result; + const infoTx = db.transaction(INFO_STORE_NAME, 'readwrite'); + const infoStore = infoTx.objectStore(INFO_STORE_NAME); + const getInfoRequest = infoStore.get(path); + let modelTx; + getInfoRequest.onsuccess = () => { + if (getInfoRequest.result == null) { + db.close(); + return reject(new Error(`Cannot find model with path '${path}' ` + + `in IndexedDB.`)); + } + else { + // First, delete the entry in the info store. + const deleteInfoRequest = infoStore.delete(path); + const deleteModelData = () => { + // Second, delete the entry in the model store. + modelTx = db.transaction(MODEL_STORE_NAME, 'readwrite'); + const modelStore = modelTx.objectStore(MODEL_STORE_NAME); + const deleteModelRequest = modelStore.delete(path); + deleteModelRequest.onsuccess = () => resolve(getInfoRequest.result.modelArtifactsInfo); + deleteModelRequest.onerror = error => reject(getInfoRequest.error); + }; + // Proceed with deleting model data regardless of whether deletion + // of info data succeeds or not. + deleteInfoRequest.onsuccess = deleteModelData; + deleteInfoRequest.onerror = error => { + deleteModelData(); + db.close(); + return reject(getInfoRequest.error); + }; + } + }; + getInfoRequest.onerror = error => { + db.close(); + return reject(getInfoRequest.error); + }; + infoTx.oncomplete = () => { + if (modelTx == null) { + db.close(); + } + else { + modelTx.oncomplete = () => db.close(); + } + }; + }; + openRequest.onerror = error => reject(openRequest.error); + }); + } +} +if (Object(environment["c" /* env */])().getBool('IS_BROWSER')) { + // Wrap the construction and registration, to guard against browsers that + // don't support Local Storage. + try { + model_management_ModelStoreManagerRegistry.registerManager(indexed_db_BrowserIndexedDB.URL_SCHEME, new BrowserIndexedDBManager()); + } + catch (err) { + } +} +//# sourceMappingURL=indexed_db.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/io/local_storage.js +/** + * @license + * Copyright 2018 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +const PATH_SEPARATOR = '/'; +const PATH_PREFIX = 'tensorflowjs_models'; +const INFO_SUFFIX = 'info'; +const MODEL_TOPOLOGY_SUFFIX = 'model_topology'; +const WEIGHT_SPECS_SUFFIX = 'weight_specs'; +const WEIGHT_DATA_SUFFIX = 'weight_data'; +const MODEL_METADATA_SUFFIX = 'model_metadata'; +/** + * Purge all tensorflow.js-saved model artifacts from local storage. + * + * @returns Paths of the models purged. + */ +function purgeLocalStorageArtifacts() { + if (!Object(environment["c" /* env */])().getBool('IS_BROWSER') || + typeof window === 'undefined' || + typeof window.localStorage === 'undefined') { + throw new Error('purgeLocalStorageModels() cannot proceed because local storage is ' + + 'unavailable in the current environment.'); + } + const LS = window.localStorage; + const purgedModelPaths = []; + for (let i = 0; i < LS.length; ++i) { + const key = LS.key(i); + const prefix = PATH_PREFIX + PATH_SEPARATOR; + if (key.startsWith(prefix) && key.length > prefix.length) { + LS.removeItem(key); + const modelName = getModelPathFromKey(key); + if (purgedModelPaths.indexOf(modelName) === -1) { + purgedModelPaths.push(modelName); + } + } + } + return purgedModelPaths; +} +function getModelKeys(path) { + return { + info: [PATH_PREFIX, path, INFO_SUFFIX].join(PATH_SEPARATOR), + topology: [PATH_PREFIX, path, MODEL_TOPOLOGY_SUFFIX].join(PATH_SEPARATOR), + weightSpecs: [PATH_PREFIX, path, WEIGHT_SPECS_SUFFIX].join(PATH_SEPARATOR), + weightData: [PATH_PREFIX, path, WEIGHT_DATA_SUFFIX].join(PATH_SEPARATOR), + modelMetadata: [PATH_PREFIX, path, MODEL_METADATA_SUFFIX].join(PATH_SEPARATOR) + }; +} +/** + * Get model path from a local-storage key. + * + * E.g., 'tensorflowjs_models/my/model/1/info' --> 'my/model/1' + * + * @param key + */ +function getModelPathFromKey(key) { + const items = key.split(PATH_SEPARATOR); + if (items.length < 3) { + throw new Error(`Invalid key format: ${key}`); + } + return items.slice(1, items.length - 1).join(PATH_SEPARATOR); +} +function local_storage_maybeStripScheme(key) { + return key.startsWith(local_storage_BrowserLocalStorage.URL_SCHEME) ? + key.slice(local_storage_BrowserLocalStorage.URL_SCHEME.length) : + key; +} +/** + * IOHandler subclass: Browser Local Storage. + * + * See the doc string to `browserLocalStorage` for more details. + */ +class local_storage_BrowserLocalStorage { + constructor(modelPath) { + if (!Object(environment["c" /* env */])().getBool('IS_BROWSER') || + typeof window === 'undefined' || + typeof window.localStorage === 'undefined') { + // TODO(cais): Add more info about what IOHandler subtypes are + // available. + // Maybe point to a doc page on the web and/or automatically determine + // the available IOHandlers and print them in the error message. + throw new Error('The current environment does not support local storage.'); + } + this.LS = window.localStorage; + if (modelPath == null || !modelPath) { + throw new Error('For local storage, modelPath must not be null, undefined or empty.'); + } + this.modelPath = modelPath; + this.keys = getModelKeys(this.modelPath); + } + /** + * Save model artifacts to browser local storage. + * + * See the documentation to `browserLocalStorage` for details on the saved + * artifacts. + * + * @param modelArtifacts The model artifacts to be stored. + * @returns An instance of SaveResult. + */ + async save(modelArtifacts) { + if (modelArtifacts.modelTopology instanceof ArrayBuffer) { + throw new Error('BrowserLocalStorage.save() does not support saving model topology ' + + 'in binary formats yet.'); + } + else { + const topology = JSON.stringify(modelArtifacts.modelTopology); + const weightSpecs = JSON.stringify(modelArtifacts.weightSpecs); + const modelArtifactsInfo = Object(io_utils["g" /* getModelArtifactsInfoForJSON */])(modelArtifacts); + try { + this.LS.setItem(this.keys.info, JSON.stringify(modelArtifactsInfo)); + this.LS.setItem(this.keys.topology, topology); + this.LS.setItem(this.keys.weightSpecs, weightSpecs); + this.LS.setItem(this.keys.weightData, Object(io_utils["a" /* arrayBufferToBase64String */])(modelArtifacts.weightData)); + this.LS.setItem(this.keys.modelMetadata, JSON.stringify({ + format: modelArtifacts.format, + generatedBy: modelArtifacts.generatedBy, + convertedBy: modelArtifacts.convertedBy, + userDefinedMetadata: modelArtifacts.userDefinedMetadata + })); + return { modelArtifactsInfo }; + } + catch (err) { + // If saving failed, clean up all items saved so far. + this.LS.removeItem(this.keys.info); + this.LS.removeItem(this.keys.topology); + this.LS.removeItem(this.keys.weightSpecs); + this.LS.removeItem(this.keys.weightData); + this.LS.removeItem(this.keys.modelMetadata); + throw new Error(`Failed to save model '${this.modelPath}' to local storage: ` + + `size quota being exceeded is a possible cause of this failure: ` + + `modelTopologyBytes=${modelArtifactsInfo.modelTopologyBytes}, ` + + `weightSpecsBytes=${modelArtifactsInfo.weightSpecsBytes}, ` + + `weightDataBytes=${modelArtifactsInfo.weightDataBytes}.`); + } + } + } + /** + * Load a model from local storage. + * + * See the documentation to `browserLocalStorage` for details on the saved + * artifacts. + * + * @returns The loaded model (if loading succeeds). + */ + async load() { + const info = JSON.parse(this.LS.getItem(this.keys.info)); + if (info == null) { + throw new Error(`In local storage, there is no model with name '${this.modelPath}'`); + } + if (info.modelTopologyType !== 'JSON') { + throw new Error('BrowserLocalStorage does not support loading non-JSON model ' + + 'topology yet.'); + } + const out = {}; + // Load topology. + const topology = JSON.parse(this.LS.getItem(this.keys.topology)); + if (topology == null) { + throw new Error(`In local storage, the topology of model '${this.modelPath}' ` + + `is missing.`); + } + out.modelTopology = topology; + // Load weight specs. + const weightSpecs = JSON.parse(this.LS.getItem(this.keys.weightSpecs)); + if (weightSpecs == null) { + throw new Error(`In local storage, the weight specs of model '${this.modelPath}' ` + + `are missing.`); + } + out.weightSpecs = weightSpecs; + // Load meta-data fields. + const metadataString = this.LS.getItem(this.keys.modelMetadata); + if (metadataString != null) { + const metadata = JSON.parse(metadataString); + out.format = metadata['format']; + out.generatedBy = metadata['generatedBy']; + out.convertedBy = metadata['convertedBy']; + out.userDefinedMetadata = metadata['userDefinedMetadata']; + } + // Load weight data. + const weightDataBase64 = this.LS.getItem(this.keys.weightData); + if (weightDataBase64 == null) { + throw new Error(`In local storage, the binary weight values of model ` + + `'${this.modelPath}' are missing.`); + } + out.weightData = Object(io_utils["b" /* base64StringToArrayBuffer */])(weightDataBase64); + return out; + } +} +local_storage_BrowserLocalStorage.URL_SCHEME = 'localstorage://'; +const localStorageRouter = (url) => { + if (!Object(environment["c" /* env */])().getBool('IS_BROWSER')) { + return null; + } + else { + if (!Array.isArray(url) && url.startsWith(local_storage_BrowserLocalStorage.URL_SCHEME)) { + return browserLocalStorage(url.slice(local_storage_BrowserLocalStorage.URL_SCHEME.length)); + } + else { + return null; + } + } +}; +IORouterRegistry.registerSaveRouter(localStorageRouter); +IORouterRegistry.registerLoadRouter(localStorageRouter); +/** + * Factory function for local storage IOHandler. + * + * This `IOHandler` supports both `save` and `load`. + * + * For each model's saved artifacts, four items are saved to local storage. + * - `${PATH_SEPARATOR}/${modelPath}/info`: Contains meta-info about the + * model, such as date saved, type of the topology, size in bytes, etc. + * - `${PATH_SEPARATOR}/${modelPath}/topology`: Model topology. For Keras- + * style models, this is a stringized JSON. + * - `${PATH_SEPARATOR}/${modelPath}/weight_specs`: Weight specs of the + * model, can be used to decode the saved binary weight values (see + * item below). + * - `${PATH_SEPARATOR}/${modelPath}/weight_data`: Concatenated binary + * weight values, stored as a base64-encoded string. + * + * Saving may throw an `Error` if the total size of the artifacts exceed the + * browser-specific quota. + * + * @param modelPath A unique identifier for the model to be saved. Must be a + * non-empty string. + * @returns An instance of `IOHandler`, which can be used with, e.g., + * `tf.Model.save`. + */ +function browserLocalStorage(modelPath) { + return new local_storage_BrowserLocalStorage(modelPath); +} +class local_storage_BrowserLocalStorageManager { + constructor() { + Object(util["assert"])(Object(environment["c" /* env */])().getBool('IS_BROWSER'), () => 'Current environment is not a web browser'); + Object(util["assert"])(typeof window === 'undefined' || + typeof window.localStorage !== 'undefined', () => 'Current browser does not appear to support localStorage'); + this.LS = window.localStorage; + } + async listModels() { + const out = {}; + const prefix = PATH_PREFIX + PATH_SEPARATOR; + const suffix = PATH_SEPARATOR + INFO_SUFFIX; + for (let i = 0; i < this.LS.length; ++i) { + const key = this.LS.key(i); + if (key.startsWith(prefix) && key.endsWith(suffix)) { + const modelPath = getModelPathFromKey(key); + out[modelPath] = JSON.parse(this.LS.getItem(key)); + } + } + return out; + } + async removeModel(path) { + path = local_storage_maybeStripScheme(path); + const keys = getModelKeys(path); + if (this.LS.getItem(keys.info) == null) { + throw new Error(`Cannot find model at path '${path}'`); + } + const info = JSON.parse(this.LS.getItem(keys.info)); + this.LS.removeItem(keys.info); + this.LS.removeItem(keys.topology); + this.LS.removeItem(keys.weightSpecs); + this.LS.removeItem(keys.weightData); + return info; + } +} +if (Object(environment["c" /* env */])().getBool('IS_BROWSER')) { + // Wrap the construction and registration, to guard against browsers that + // don't support Local Storage. + try { + model_management_ModelStoreManagerRegistry.registerManager(local_storage_BrowserLocalStorage.URL_SCHEME, new local_storage_BrowserLocalStorageManager()); + } + catch (err) { + } +} +//# sourceMappingURL=local_storage.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/io/browser_files.js +/** + * @license + * Copyright 2018 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ +/** + * IOHandlers related to files, such as browser-triggered file downloads, + * user-selected files in browser. + */ + + + +const DEFAULT_FILE_NAME_PREFIX = 'model'; +const DEFAULT_JSON_EXTENSION_NAME = '.json'; +const DEFAULT_WEIGHT_DATA_EXTENSION_NAME = '.weights.bin'; +function defer(f) { + return new Promise(resolve => setTimeout(resolve)).then(f); +} +class browser_files_BrowserDownloads { + constructor(fileNamePrefix) { + if (!Object(environment["c" /* env */])().getBool('IS_BROWSER')) { + // TODO(cais): Provide info on what IOHandlers are available under the + // current environment. + throw new Error('browserDownloads() cannot proceed because the current environment ' + + 'is not a browser.'); + } + if (fileNamePrefix.startsWith(browser_files_BrowserDownloads.URL_SCHEME)) { + fileNamePrefix = fileNamePrefix.slice(browser_files_BrowserDownloads.URL_SCHEME.length); + } + if (fileNamePrefix == null || fileNamePrefix.length === 0) { + fileNamePrefix = DEFAULT_FILE_NAME_PREFIX; + } + this.modelTopologyFileName = fileNamePrefix + DEFAULT_JSON_EXTENSION_NAME; + this.weightDataFileName = + fileNamePrefix + DEFAULT_WEIGHT_DATA_EXTENSION_NAME; + } + async save(modelArtifacts) { + if (typeof (document) === 'undefined') { + throw new Error('Browser downloads are not supported in ' + + 'this environment since `document` is not present'); + } + const weightsURL = window.URL.createObjectURL(new Blob([modelArtifacts.weightData], { type: 'application/octet-stream' })); + if (modelArtifacts.modelTopology instanceof ArrayBuffer) { + throw new Error('BrowserDownloads.save() does not support saving model topology ' + + 'in binary formats yet.'); + } + else { + const weightsManifest = [{ + paths: ['./' + this.weightDataFileName], + weights: modelArtifacts.weightSpecs + }]; + const modelTopologyAndWeightManifest = { + modelTopology: modelArtifacts.modelTopology, + format: modelArtifacts.format, + generatedBy: modelArtifacts.generatedBy, + convertedBy: modelArtifacts.convertedBy, + weightsManifest + }; + const modelTopologyAndWeightManifestURL = window.URL.createObjectURL(new Blob([JSON.stringify(modelTopologyAndWeightManifest)], { type: 'application/json' })); + // If anchor elements are not provided, create them without attaching them + // to parents, so that the downloaded file names can be controlled. + const jsonAnchor = this.jsonAnchor == null ? document.createElement('a') : + this.jsonAnchor; + jsonAnchor.download = this.modelTopologyFileName; + jsonAnchor.href = modelTopologyAndWeightManifestURL; + // Trigger downloads by evoking a click event on the download anchors. + // When multiple downloads are started synchronously, Firefox will only + // save the last one. + await defer(() => jsonAnchor.dispatchEvent(new MouseEvent('click'))); + if (modelArtifacts.weightData != null) { + const weightDataAnchor = this.weightDataAnchor == null ? + document.createElement('a') : + this.weightDataAnchor; + weightDataAnchor.download = this.weightDataFileName; + weightDataAnchor.href = weightsURL; + await defer(() => weightDataAnchor.dispatchEvent(new MouseEvent('click'))); + } + return { modelArtifactsInfo: Object(io_utils["g" /* getModelArtifactsInfoForJSON */])(modelArtifacts) }; + } + } +} +browser_files_BrowserDownloads.URL_SCHEME = 'downloads://'; +class browser_files_BrowserFiles { + constructor(files) { + if (files == null || files.length < 1) { + throw new Error(`When calling browserFiles, at least 1 file is required, ` + + `but received ${files}`); + } + this.files = files; + } + async load() { + const jsonFile = this.files[0]; + const weightFiles = this.files.slice(1); + return new Promise((resolve, reject) => { + const jsonReader = new FileReader(); + jsonReader.onload = (event) => { + // tslint:disable-next-line:no-any + const modelJSON = JSON.parse(event.target.result); + const modelTopology = modelJSON.modelTopology; + if (modelTopology == null) { + reject(new Error(`modelTopology field is missing from file ${jsonFile.name}`)); + return; + } + if (weightFiles.length === 0) { + resolve({ modelTopology }); + } + const weightsManifest = modelJSON.weightsManifest; + if (weightsManifest == null) { + reject(new Error(`weightManifest field is missing from file ${jsonFile.name}`)); + return; + } + let pathToFile; + try { + pathToFile = + this.checkManifestAndWeightFiles(weightsManifest, weightFiles); + } + catch (err) { + reject(err); + return; + } + const weightSpecs = []; + const paths = []; + const perFileBuffers = []; + weightsManifest.forEach(weightsGroup => { + weightsGroup.paths.forEach(path => { + paths.push(path); + perFileBuffers.push(null); + }); + weightSpecs.push(...weightsGroup.weights); + }); + weightsManifest.forEach(weightsGroup => { + weightsGroup.paths.forEach(path => { + const weightFileReader = new FileReader(); + weightFileReader.onload = (event) => { + // tslint:disable-next-line:no-any + const weightData = event.target.result; + const index = paths.indexOf(path); + perFileBuffers[index] = weightData; + if (perFileBuffers.indexOf(null) === -1) { + resolve({ + modelTopology, + weightSpecs, + weightData: Object(io_utils["d" /* concatenateArrayBuffers */])(perFileBuffers), + format: modelJSON.format, + generatedBy: modelJSON.generatedBy, + convertedBy: modelJSON.convertedBy, + userDefinedMetadata: modelJSON.userDefinedMetadata + }); + } + }; + weightFileReader.onerror = error => reject(`Failed to weights data from file of path '${path}'.`); + weightFileReader.readAsArrayBuffer(pathToFile[path]); + }); + }); + }; + jsonReader.onerror = error => reject(`Failed to read model topology and weights manifest JSON ` + + `from file '${jsonFile.name}'. BrowserFiles supports loading ` + + `Keras-style tf.Model artifacts only.`); + jsonReader.readAsText(jsonFile); + }); + } + /** + * Check the compatibility between weights manifest and weight files. + */ + checkManifestAndWeightFiles(manifest, files) { + const basenames = []; + const fileNames = files.map(file => Object(io_utils["c" /* basename */])(file.name)); + const pathToFile = {}; + for (const group of manifest) { + group.paths.forEach(path => { + const pathBasename = Object(io_utils["c" /* basename */])(path); + if (basenames.indexOf(pathBasename) !== -1) { + throw new Error(`Duplicate file basename found in weights manifest: ` + + `'${pathBasename}'`); + } + basenames.push(pathBasename); + if (fileNames.indexOf(pathBasename) === -1) { + throw new Error(`Weight file with basename '${pathBasename}' is not provided.`); + } + else { + pathToFile[path] = files[fileNames.indexOf(pathBasename)]; + } + }); + } + if (basenames.length !== files.length) { + throw new Error(`Mismatch in the number of files in weights manifest ` + + `(${basenames.length}) and the number of weight files provided ` + + `(${files.length}).`); + } + return pathToFile; + } +} +const browserDownloadsRouter = (url) => { + if (!Object(environment["c" /* env */])().getBool('IS_BROWSER')) { + return null; + } + else { + if (!Array.isArray(url) && url.startsWith(browser_files_BrowserDownloads.URL_SCHEME)) { + return browserDownloads(url.slice(browser_files_BrowserDownloads.URL_SCHEME.length)); + } + else { + return null; + } + } +}; +IORouterRegistry.registerSaveRouter(browserDownloadsRouter); +/** + * Creates an IOHandler that triggers file downloads from the browser. + * + * The returned `IOHandler` instance can be used as model exporting methods such + * as `tf.Model.save` and supports only saving. + * + * ```js + * const model = tf.sequential(); + * model.add(tf.layers.dense( + * {units: 1, inputShape: [10], activation: 'sigmoid'})); + * const saveResult = await model.save('downloads://mymodel'); + * // This will trigger downloading of two files: + * // 'mymodel.json' and 'mymodel.weights.bin'. + * console.log(saveResult); + * ``` + * + * @param fileNamePrefix Prefix name of the files to be downloaded. For use with + * `tf.Model`, `fileNamePrefix` should follow either of the following two + * formats: + * 1. `null` or `undefined`, in which case the default file + * names will be used: + * - 'model.json' for the JSON file containing the model topology and + * weights manifest. + * - 'model.weights.bin' for the binary file containing the binary weight + * values. + * 2. A single string or an Array of a single string, as the file name prefix. + * For example, if `'foo'` is provided, the downloaded JSON + * file and binary weights file will be named 'foo.json' and + * 'foo.weights.bin', respectively. + * @param config Additional configuration for triggering downloads. + * @returns An instance of `BrowserDownloads` `IOHandler`. + */ +/** + * @doc { + * heading: 'Models', + * subheading: 'Loading', + * namespace: 'io', + * ignoreCI: true + * } + */ +function browserDownloads(fileNamePrefix = 'model') { + return new browser_files_BrowserDownloads(fileNamePrefix); +} +/** + * Creates an IOHandler that loads model artifacts from user-selected files. + * + * This method can be used for loading from files such as user-selected files + * in the browser. + * When used in conjunction with `tf.loadLayersModel`, an instance of + * `tf.LayersModel` (Keras-style) can be constructed from the loaded artifacts. + * + * ```js + * // Note: This code snippet won't run properly without the actual file input + * // elements in the HTML DOM. + * + * // Suppose there are two HTML file input (``) + * // elements. + * const uploadJSONInput = document.getElementById('upload-json'); + * const uploadWeightsInput = document.getElementById('upload-weights'); + * const model = await tf.loadLayersModel(tf.io.browserFiles( + * [uploadJSONInput.files[0], uploadWeightsInput.files[0]])); + * ``` + * + * @param files `File`s to load from. Currently, this function supports only + * loading from files that contain Keras-style models (i.e., `tf.Model`s), for + * which an `Array` of `File`s is expected (in that order): + * - A JSON file containing the model topology and weight manifest. + * - Optionally, One or more binary files containing the binary weights. + * These files must have names that match the paths in the `weightsManifest` + * contained by the aforementioned JSON file, or errors will be thrown + * during loading. These weights files have the same format as the ones + * generated by `tensorflowjs_converter` that comes with the `tensorflowjs` + * Python PIP package. If no weights files are provided, only the model + * topology will be loaded from the JSON file above. + * @returns An instance of `Files` `IOHandler`. + */ +/** + * @doc { + * heading: 'Models', + * subheading: 'Loading', + * namespace: 'io', + * ignoreCI: true + * } + */ +function browserFiles(files) { + return new browser_files_BrowserFiles(files); +} +//# sourceMappingURL=browser_files.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/io/progress.js +/** + * @license + * Copyright 2019 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + +/** + * Monitor Promise.all progress, fire onProgress callback function. + * + * @param promises Promise list going to be monitored + * @param onProgress Callback function. Fired when a promise resolved. + * @param startFraction Optional fraction start. Default to 0. + * @param endFraction Optional fraction end. Default to 1. + */ +function monitorPromisesProgress(promises, onProgress, startFraction, endFraction) { + checkPromises(promises); + startFraction = startFraction == null ? 0 : startFraction; + endFraction = endFraction == null ? 1 : endFraction; + checkFraction(startFraction, endFraction); + let resolvedPromise = 0; + const registerMonitor = (promise) => { + promise.then(value => { + const fraction = startFraction + + ++resolvedPromise / promises.length * (endFraction - startFraction); + // pass fraction as parameter to callback function. + onProgress(fraction); + return value; + }); + return promise; + }; + function checkPromises(promises) { + Object(util["assert"])(promises != null && Array.isArray(promises) && promises.length > 0, () => 'promises must be a none empty array'); + } + function checkFraction(startFraction, endFraction) { + Object(util["assert"])(startFraction >= 0 && startFraction <= 1, () => `Progress fraction must be in range [0, 1], but ` + + `got startFraction ${startFraction}`); + Object(util["assert"])(endFraction >= 0 && endFraction <= 1, () => `Progress fraction must be in range [0, 1], but ` + + `got endFraction ${endFraction}`); + Object(util["assert"])(endFraction >= startFraction, () => `startFraction must be no more than endFraction, but ` + + `got startFraction ${startFraction} and endFraction ` + + `${endFraction}`); + } + return Promise.all(promises.map(registerMonitor)); +} +//# sourceMappingURL=progress.js.map +// EXTERNAL MODULE: ./node_modules/@tensorflow/tfjs-core/dist/io/types.js +var types = __webpack_require__(34); + +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/io/weights_loader.js +/** + * @license + * Copyright 2018 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +/** + * Reads binary weights data from a number of URLs. + * + * @param fetchURLs URLs to send the HTTP requests at, using `fetch` calls. + * @param requestOptions RequestInit (options) for the HTTP requests. + * @param fetchFunc Optional overriding value for the `window.fetch` function. + * @param onProgress Optional, progress callback function, fired periodically + * before the load is completed. + * @returns A `Promise` of an Array of `ArrayBuffer`. The Array has the same + * length as `fetchURLs`. + */ +async function loadWeightsAsArrayBuffer(fetchURLs, loadOptions) { + if (loadOptions == null) { + loadOptions = {}; + } + const fetchFunc = loadOptions.fetchFunc == null ? Object(environment["c" /* env */])().platform.fetch : + loadOptions.fetchFunc; + // Create the requests for all of the weights in parallel. + const requests = fetchURLs.map(fetchURL => fetchFunc(fetchURL, loadOptions.requestInit, { isBinary: true })); + const fetchStartFraction = 0; + const fetchEndFraction = 0.5; + const responses = loadOptions.onProgress == null ? + await Promise.all(requests) : + await monitorPromisesProgress(requests, loadOptions.onProgress, fetchStartFraction, fetchEndFraction); + const bufferPromises = responses.map(response => response.arrayBuffer()); + const bufferStartFraction = 0.5; + const bufferEndFraction = 1; + const buffers = loadOptions.onProgress == null ? + await Promise.all(bufferPromises) : + await monitorPromisesProgress(bufferPromises, loadOptions.onProgress, bufferStartFraction, bufferEndFraction); + return buffers; +} +/** + * Reads a weights manifest JSON configuration, fetches the weights and + * returns them as `Tensor`s. + * + * @param manifest The weights manifest JSON. + * @param filePathPrefix The path prefix for filenames given in the manifest. + * Defaults to the empty string. + * @param weightNames The names of the weights to be fetched. + */ +async function loadWeights(manifest, filePathPrefix = '', weightNames, requestInit) { + // TODO(nsthorat): Groups are currently fetched atomically. If you need a + // single weight from a group, the whole group will be fetched. At a future + // date, we should support fetching only the individual shards within a + // group that are needed to reconstruct the requested weight. + // TODO(cais): Use `decodeWeights` for implementation. + const fetchWeights = (fetchUrls) => loadWeightsAsArrayBuffer(fetchUrls, { requestInit }); + const loadWeights = weightsLoaderFactory(fetchWeights); + return loadWeights(manifest, filePathPrefix, weightNames); +} +/** + * Creates a function, which reads a weights manifest JSON configuration, + * fetches the weight files using the specified function and returns them as + * `Tensor`s. + * + * ```js + * // example for creating a nodejs weight loader, which reads the weight files + * // from disk using fs.readFileSync + * + * import * as fs from 'fs' + * + * const fetchWeightsFromDisk = (filePaths: string[]) => + * filePaths.map(filePath => fs.readFileSync(filePath).buffer) + * + * const loadWeights = tf.io.weightsLoaderFactory(fetchWeightsFromDisk) + * + * const manifest = JSON.parse( + * fs.readFileSync('./my_model-weights_manifest').toString() + * ) + * const weightMap = await loadWeights(manifest, './') + * ``` + * @param fetchWeightsFunction The function used for fetching the weight files. + * @returns Weight loading function. + */ +function weightsLoaderFactory(fetchWeightsFunction) { + return async (manifest, filePathPrefix = '', weightNames) => { + // Collect all the groups, weights, and their relative offsets to be + // fetched. + const groupIndicesToFetchMap = manifest.map(() => false); + const groupWeightsToFetch = {}; + const weightsFound = weightNames != null ? weightNames.map(() => false) : []; + const allManifestWeightNames = []; + manifest.forEach((manifestGroupConfig, groupIndex) => { + let groupOffset = 0; + manifestGroupConfig.weights.forEach(weightsEntry => { + const rawDtype = ('quantization' in weightsEntry) ? + weightsEntry.quantization.dtype : + weightsEntry.dtype; + const weightsBytes = types["a" /* DTYPE_VALUE_SIZE_MAP */][rawDtype] * + util["sizeFromShape"](weightsEntry.shape); + const enqueueWeightsForFetchingFn = () => { + groupIndicesToFetchMap[groupIndex] = true; + if (groupWeightsToFetch[groupIndex] == null) { + groupWeightsToFetch[groupIndex] = []; + } + groupWeightsToFetch[groupIndex].push({ + manifestEntry: weightsEntry, + groupOffset, + sizeBytes: weightsBytes + }); + }; + if (weightNames != null) { + weightNames.forEach((weightName, weightIndex) => { + if (weightName === weightsEntry.name) { + enqueueWeightsForFetchingFn(); + weightsFound[weightIndex] = true; + } + }); + } + else { + enqueueWeightsForFetchingFn(); + } + allManifestWeightNames.push(weightsEntry.name); + groupOffset += weightsBytes; + }); + }); + if (!weightsFound.every(found => found)) { + const weightsNotFound = weightNames.filter((_, i) => !weightsFound[i]); + throw new Error(`Could not find weights in manifest with names: ` + + `${weightsNotFound.join(', ')}. \n` + + `Manifest JSON has weights with names: ` + + `${allManifestWeightNames.join(', ')}.`); + } + // Convert the one-hot boolean groupId => shouldFetch map to a list of group + // IDs. + const groupIndicesToFetch = groupIndicesToFetchMap.reduce((accumulator, shouldFetch, i) => { + if (shouldFetch) { + accumulator.push(i); + } + return accumulator; + }, []); + const fetchUrls = []; + groupIndicesToFetch.forEach(i => { + manifest[i].paths.forEach(filepath => { + const fetchUrl = filePathPrefix + + (!filePathPrefix.endsWith('/') ? '/' : '') + filepath; + fetchUrls.push(fetchUrl); + }); + }); + const buffers = await fetchWeightsFunction(fetchUrls); + const weightsTensorMap = {}; + let bufferIndexOffset = 0; + groupIndicesToFetch.forEach(i => { + const numBuffers = manifest[i].paths.length; + let groupBytes = 0; + for (let i = 0; i < numBuffers; i++) { + groupBytes += buffers[bufferIndexOffset + i].byteLength; + } + // Create a buffer for the whole group. + const groupBuffer = new ArrayBuffer(groupBytes); + const groupByteBuffer = new Uint8Array(groupBuffer); + let groupBufferOffset = 0; + for (let i = 0; i < numBuffers; i++) { + const buffer = new Uint8Array(buffers[bufferIndexOffset + i]); + groupByteBuffer.set(buffer, groupBufferOffset); + groupBufferOffset += buffer.byteLength; + } + const weightsEntries = groupWeightsToFetch[i]; + weightsEntries.forEach(weightsEntry => { + const byteBuffer = groupBuffer.slice(weightsEntry.groupOffset, weightsEntry.groupOffset + weightsEntry.sizeBytes); + const nameToTensorMap = Object(io_utils["e" /* decodeWeights */])(byteBuffer, [weightsEntry.manifestEntry]); + for (const name in nameToTensorMap) { + weightsTensorMap[name] = nameToTensorMap[name]; + } + }); + bufferIndexOffset += numBuffers; + }); + return weightsTensorMap; + }; +} +//# sourceMappingURL=weights_loader.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/io/http.js +/** + * @license + * Copyright 2018 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ +/** + * IOHandler implementations based on HTTP requests in the web browser. + * + * Uses [`fetch`](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API). + */ + + + + + +const OCTET_STREAM_MIME_TYPE = 'application/octet-stream'; +const JSON_TYPE = 'application/json'; +class http_HTTPRequest { + constructor(path, loadOptions) { + this.DEFAULT_METHOD = 'POST'; + if (loadOptions == null) { + loadOptions = {}; + } + this.weightPathPrefix = loadOptions.weightPathPrefix; + this.onProgress = loadOptions.onProgress; + if (loadOptions.fetchFunc != null) { + Object(util["assert"])(typeof loadOptions.fetchFunc === 'function', () => 'Must pass a function that matches the signature of ' + + '`fetch` (see ' + + 'https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API)'); + this.fetch = loadOptions.fetchFunc; + } + else { + this.fetch = Object(environment["c" /* env */])().platform.fetch; + } + Object(util["assert"])(path != null && path.length > 0, () => 'URL path for http must not be null, undefined or ' + + 'empty.'); + if (Array.isArray(path)) { + Object(util["assert"])(path.length === 2, () => 'URL paths for http must have a length of 2, ' + + `(actual length is ${path.length}).`); + } + this.path = path; + if (loadOptions.requestInit != null && + loadOptions.requestInit.body != null) { + throw new Error('requestInit is expected to have no pre-existing body, but has one.'); + } + this.requestInit = loadOptions.requestInit || {}; + } + async save(modelArtifacts) { + if (modelArtifacts.modelTopology instanceof ArrayBuffer) { + throw new Error('BrowserHTTPRequest.save() does not support saving model topology ' + + 'in binary formats yet.'); + } + const init = Object.assign({ method: this.DEFAULT_METHOD }, this.requestInit); + init.body = new FormData(); + const weightsManifest = [{ + paths: ['./model.weights.bin'], + weights: modelArtifacts.weightSpecs, + }]; + const modelTopologyAndWeightManifest = { + modelTopology: modelArtifacts.modelTopology, + format: modelArtifacts.format, + generatedBy: modelArtifacts.generatedBy, + convertedBy: modelArtifacts.convertedBy, + userDefinedMetadata: modelArtifacts.userDefinedMetadata, + weightsManifest + }; + init.body.append('model.json', new Blob([JSON.stringify(modelTopologyAndWeightManifest)], { type: JSON_TYPE }), 'model.json'); + if (modelArtifacts.weightData != null) { + init.body.append('model.weights.bin', new Blob([modelArtifacts.weightData], { type: OCTET_STREAM_MIME_TYPE }), 'model.weights.bin'); + } + const response = await this.fetch(this.path, init); + if (response.ok) { + return { + modelArtifactsInfo: Object(io_utils["g" /* getModelArtifactsInfoForJSON */])(modelArtifacts), + responses: [response], + }; + } + else { + throw new Error(`BrowserHTTPRequest.save() failed due to HTTP response status ` + + `${response.status}.`); + } + } + /** + * Load model artifacts via HTTP request(s). + * + * See the documentation to `tf.io.http` for details on the saved + * artifacts. + * + * @returns The loaded model artifacts (if loading succeeds). + */ + async load() { + const modelConfigRequest = await this.fetch(this.path, this.requestInit); + if (!modelConfigRequest.ok) { + throw new Error(`Request to ${this.path} failed with status code ` + + `${modelConfigRequest.status}. Please verify this URL points to ` + + `the model JSON of the model to load.`); + } + let modelConfig; + try { + modelConfig = await modelConfigRequest.json(); + } + catch (e) { + let message = `Failed to parse model JSON of response from ${this.path}.`; + // TODO(nsthorat): Remove this after some time when we're comfortable that + // .pb files are mostly gone. + if (this.path.endsWith('.pb')) { + message += ' Your path contains a .pb file extension. ' + + 'Support for .pb models have been removed in TensorFlow.js 1.0 ' + + 'in favor of .json models. You can re-convert your Python ' + + 'TensorFlow model using the TensorFlow.js 1.0 conversion scripts ' + + 'or you can convert your.pb models with the \'pb2json\'' + + 'NPM script in the tensorflow/tfjs-converter repository.'; + } + else { + message += ' Please make sure the server is serving valid ' + + 'JSON for this request.'; + } + throw new Error(message); + } + const modelTopology = modelConfig.modelTopology; + const weightsManifest = modelConfig.weightsManifest; + const generatedBy = modelConfig.generatedBy; + const convertedBy = modelConfig.convertedBy; + const format = modelConfig.format; + const userDefinedMetadata = modelConfig.userDefinedMetadata; + // We do not allow both modelTopology and weightsManifest to be missing. + if (modelTopology == null && weightsManifest == null) { + throw new Error(`The JSON from HTTP path ${this.path} contains neither model ` + + `topology or manifest for weights.`); + } + let weightSpecs; + let weightData; + if (weightsManifest != null) { + const results = await this.loadWeights(weightsManifest); + [weightSpecs, weightData] = results; + } + return { + modelTopology, + weightSpecs, + weightData, + userDefinedMetadata, + generatedBy, + convertedBy, + format + }; + } + async loadWeights(weightsManifest) { + const weightPath = Array.isArray(this.path) ? this.path[1] : this.path; + const [prefix, suffix] = parseUrl(weightPath); + const pathPrefix = this.weightPathPrefix || prefix; + const weightSpecs = []; + for (const entry of weightsManifest) { + weightSpecs.push(...entry.weights); + } + const fetchURLs = []; + weightsManifest.forEach(weightsGroup => { + weightsGroup.paths.forEach(path => { + fetchURLs.push(pathPrefix + path + suffix); + }); + }); + const buffers = await loadWeightsAsArrayBuffer(fetchURLs, { + requestInit: this.requestInit, + fetchFunc: this.fetch, + onProgress: this.onProgress + }); + return [weightSpecs, Object(io_utils["d" /* concatenateArrayBuffers */])(buffers)]; + } +} +http_HTTPRequest.URL_SCHEME_REGEX = /^https?:\/\//; +/** + * Extract the prefix and suffix of the url, where the prefix is the path before + * the last file, and suffix is the search params after the last file. + * ``` + * const url = 'http://tfhub.dev/model/1/tensorflowjs_model.pb?tfjs-format=file' + * [prefix, suffix] = parseUrl(url) + * // prefix = 'http://tfhub.dev/model/1/' + * // suffix = '?tfjs-format=file' + * ``` + * @param url the model url to be parsed. + */ +function parseUrl(url) { + const lastSlash = url.lastIndexOf('/'); + const lastSearchParam = url.lastIndexOf('?'); + const prefix = url.substring(0, lastSlash); + const suffix = lastSearchParam > lastSlash ? url.substring(lastSearchParam) : ''; + return [prefix + '/', suffix]; +} +function isHTTPScheme(url) { + return url.match(http_HTTPRequest.URL_SCHEME_REGEX) != null; +} +const httpRouter = (url, loadOptions) => { + if (typeof fetch === 'undefined' && + (loadOptions == null || loadOptions.fetchFunc == null)) { + // `http` uses `fetch` or `node-fetch`, if one wants to use it in + // an environment that is not the browser or node they have to setup a + // global fetch polyfill. + return null; + } + else { + let isHTTP = true; + if (Array.isArray(url)) { + isHTTP = url.every(urlItem => isHTTPScheme(urlItem)); + } + else { + isHTTP = isHTTPScheme(url); + } + if (isHTTP) { + return http(url, loadOptions); + } + } + return null; +}; +IORouterRegistry.registerSaveRouter(httpRouter); +IORouterRegistry.registerLoadRouter(httpRouter); +/** + * Creates an IOHandler subtype that sends model artifacts to HTTP server. + * + * An HTTP request of the `multipart/form-data` mime type will be sent to the + * `path` URL. The form data includes artifacts that represent the topology + * and/or weights of the model. In the case of Keras-style `tf.Model`, two + * blobs (files) exist in form-data: + * - A JSON file consisting of `modelTopology` and `weightsManifest`. + * - A binary weights file consisting of the concatenated weight values. + * These files are in the same format as the one generated by + * [tfjs_converter](https://js.tensorflow.org/tutorials/import-keras.html). + * + * The following code snippet exemplifies the client-side code that uses this + * function: + * + * ```js + * const model = tf.sequential(); + * model.add( + * tf.layers.dense({units: 1, inputShape: [100], activation: 'sigmoid'})); + * + * const saveResult = await model.save(tf.io.http( + * 'http://model-server:5000/upload', {requestInit: {method: 'PUT'}})); + * console.log(saveResult); + * ``` + * + * If the default `POST` method is to be used, without any custom parameters + * such as headers, you can simply pass an HTTP or HTTPS URL to `model.save`: + * + * ```js + * const saveResult = await model.save('http://model-server:5000/upload'); + * ``` + * + * The following GitHub Gist + * https://gist.github.com/dsmilkov/1b6046fd6132d7408d5257b0976f7864 + * implements a server based on [flask](https://github.com/pallets/flask) that + * can receive the request. Upon receiving the model artifacts via the requst, + * this particular server reconsistutes instances of [Keras + * Models](https://keras.io/models/model/) in memory. + * + * + * @param path A URL path to the model. + * Can be an absolute HTTP path (e.g., + * 'http://localhost:8000/model-upload)') or a relative path (e.g., + * './model-upload'). + * @param requestInit Request configurations to be used when sending + * HTTP request to server using `fetch`. It can contain fields such as + * `method`, `credentials`, `headers`, `mode`, etc. See + * https://developer.mozilla.org/en-US/docs/Web/API/Request/Request + * for more information. `requestInit` must not have a body, because the + * body will be set by TensorFlow.js. File blobs representing the model + * topology (filename: 'model.json') and the weights of the model (filename: + * 'model.weights.bin') will be appended to the body. If `requestInit` has a + * `body`, an Error will be thrown. + * @param loadOptions Optional configuration for the loading. It includes the + * following fields: + * - weightPathPrefix Optional, this specifies the path prefix for weight + * files, by default this is calculated from the path param. + * - fetchFunc Optional, custom `fetch` function. E.g., in Node.js, + * the `fetch` from node-fetch can be used here. + * - onProgress Optional, progress callback function, fired periodically + * before the load is completed. + * @returns An instance of `IOHandler`. + */ +/** + * @doc { + * heading: 'Models', + * subheading: 'Loading', + * namespace: 'io', + * ignoreCI: true + * } + */ +function http(path, loadOptions) { + return new http_HTTPRequest(path, loadOptions); +} +/** + * Deprecated. Use `tf.io.http`. + * @param path + * @param loadOptions + */ +function browserHTTPRequest(path, loadOptions) { + return http(path, loadOptions); +} +//# sourceMappingURL=http.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/io/passthrough.js +/** + * @license + * Copyright 2018 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ +class PassthroughLoader { + constructor(modelArtifacts) { + this.modelArtifacts = modelArtifacts; + } + async load() { + return this.modelArtifacts; + } +} +class PassthroughSaver { + constructor(saveHandler) { + this.saveHandler = saveHandler; + } + async save(modelArtifacts) { + return this.saveHandler(modelArtifacts); + } +} +/** + * Creates an IOHandler that loads model artifacts from memory. + * + * When used in conjunction with `tf.loadLayersModel`, an instance of + * `tf.LayersModel` (Keras-style) can be constructed from the loaded artifacts. + * + * ```js + * const model = await tf.loadLayersModel(tf.io.fromMemory( + * modelTopology, weightSpecs, weightData)); + * ``` + * + * @param modelArtifacts a object containing model topology (i.e., parsed from + * the JSON format). + * @param weightSpecs An array of `WeightsManifestEntry` objects describing the + * names, shapes, types, and quantization of the weight data. + * @param weightData A single `ArrayBuffer` containing the weight data, + * concatenated in the order described by the weightSpecs. + * @param trainingConfig Model training configuration. Optional. + * + * @returns A passthrough `IOHandler` that simply loads the provided data. + */ +function fromMemory(modelArtifacts, weightSpecs, weightData, trainingConfig) { + if (arguments.length === 1) { + const isModelArtifacts = modelArtifacts.modelTopology != null || + modelArtifacts.weightSpecs != null; + if (isModelArtifacts) { + return new PassthroughLoader(modelArtifacts); + } + else { + // Legacy support: with only modelTopology. + // TODO(cais): Remove this deprecated API. + console.warn('Please call tf.io.fromMemory() with only one argument. ' + + 'The argument should be of type ModelArtifacts. ' + + 'The multi-argument signature of tf.io.fromMemory() has been ' + + 'deprecated and will be removed in a future release.'); + return new PassthroughLoader({ modelTopology: modelArtifacts }); + } + } + else { + // Legacy support. + // TODO(cais): Remove this deprecated API. + console.warn('Please call tf.io.fromMemory() with only one argument. ' + + 'The argument should be of type ModelArtifacts. ' + + 'The multi-argument signature of tf.io.fromMemory() has been ' + + 'deprecated and will be removed in a future release.'); + return new PassthroughLoader({ + modelTopology: modelArtifacts, + weightSpecs, + weightData, + trainingConfig + }); + } +} +/** + * Creates an IOHandler that passes saved model artifacts to a callback. + * + * ```js + * function handleSave(artifacts) { + * // ... do something with the artifacts ... + * return {modelArtifactsInfo: {...}, ...}; + * } + * + * const saveResult = model.save(tf.io.withSaveHandler(handleSave)); + * ``` + * + * @param saveHandler A function that accepts a `ModelArtifacts` and returns a + * `SaveResult`. + */ +function withSaveHandler(saveHandler) { + return new PassthroughSaver(saveHandler); +} +//# sourceMappingURL=passthrough.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/io/io.js +/** + * @license + * Copyright 2018 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ +// Importing local_storage and indexed_db is necessary for the routers to be +// registered. + + + + + + + + + + +//# sourceMappingURL=io.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/one_hot.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +/** + * Creates a one-hot `tf.Tensor`. The locations represented by `indices` take + * value `onValue` (defaults to 1), while all other locations take value + * `offValue` (defaults to 0). If `indices` is rank `R`, the output has rank + * `R+1` with the last axis of size `depth`. + * + * ```js + * tf.oneHot(tf.tensor1d([0, 1], 'int32'), 3).print(); + * ``` + * + * @param indices `tf.Tensor` of indices with dtype `int32`. + * @param depth The depth of the one hot dimension. + * @param onValue A number used to fill in the output when the index matches + * the location. + * @param offValue A number used to fill in the output when the index does + * not match the location. + */ +/** @doc {heading: 'Tensors', subheading: 'Creation'} */ +function oneHot_(indices, depth, onValue = 1, offValue = 0) { + if (depth < 2) { + throw new Error(`Error in oneHot: depth must be >=2, but it is ${depth}`); + } + let $indices = Object(tensor_util_env["a" /* convertToTensor */])(indices, 'indices', 'oneHot', 'int32'); + const outShape = [...$indices.shape, depth]; + $indices = $indices.flatten(); + const forward = (backend, save) => { + save([$indices]); + return reshape(backend.oneHot($indices, depth, onValue, offValue), outShape); + }; + const inputs = { indices: $indices }; + const attrs = { depth, onValue, offValue }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["cb" /* OneHot */], attrs); +} +const oneHot = Object(operation["a" /* op */])({ oneHot_ }); +//# sourceMappingURL=one_hot.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/confusion_matrix.js +/** + * @license + * Copyright 2018 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + +/** + * Computes the confusion matrix from true labels and predicted labels. + * + * ```js + * const labels = tf.tensor1d([0, 1, 2, 1, 0], 'int32'); + * const predictions = tf.tensor1d([0, 2, 2, 1, 0], 'int32'); + * const numClasses = 3; + * const out = tf.math.confusionMatrix(labels, predictions, numClasses); + * out.print(); + * // Expected output matrix: + * // [[2, 0, 0], + * // [0, 1, 1], + * // [0, 0, 1]] + * ``` + * + * @param labels The target labels, assumed to be 0-based integers + * for the classes. The shape is `[numExamples]`, where + * `numExamples` is the number of examples included. + * @param predictions The predicted classes, assumed to be + * 0-based integers for the classes. Must have the same shape as `labels`. + * @param numClasses Number of all classes, as an integer. + * Its value must be larger than the largest element in `labels` and + * `predictions`. + * @returns The confusion matrix as a int32-type 2D tensor. The value at + * row `r` and column `c` is the number of times examples of actual class + * `r` were predicted as class `c`. + */ +/** @doc {heading: 'Operations', subheading: 'Evaluation'} */ +function confusionMatrix_(labels, predictions, numClasses) { + const $labels = Object(tensor_util_env["a" /* convertToTensor */])(labels, 'labels', 'confusionMatrix'); + const $predictions = Object(tensor_util_env["a" /* convertToTensor */])(predictions, 'predictions', 'confusionMatrix'); + util["assert"](numClasses == null || numClasses > 0 && Number.isInteger(numClasses), () => `If provided, numClasses must be a positive integer, ` + + `but got ${numClasses}`); + util["assert"]($labels.rank === 1, () => `Expected the rank of labels to be 1, but got ${$labels.rank}`); + util["assert"]($predictions.rank === 1, () => `Expected the rank of predictions to be 1, ` + + `but got ${$predictions.rank}`); + util["assert"]($labels.shape[0] === $predictions.shape[0], () => `Mismatch in the number of examples: ` + + `${$labels.shape[0]} vs. ${$predictions.shape[0]}. ` + + `Labels and predictions should have the same number of elements.`); + util["assert"](numClasses > 0 && Number.isInteger(numClasses), () => `numClasses is required to be a positive integer, but got ` + + `${numClasses}`); + // TODO(cais): In the future, if oneHot supports tensors inputs for + // `numClasses`, `confusionMatrix` can make `numClasses` optional. + const oneHotLabels = oneHot($labels.asType('int32'), numClasses); + const oneHotPredictions = oneHot($predictions.asType('int32'), numClasses); + const oneHotLabelsT = oneHotLabels.transpose(); + return oneHotLabelsT.matMul(oneHotPredictions).asType('int32'); +} +const confusionMatrix = Object(operation["a" /* op */])({ confusionMatrix_ }); +//# sourceMappingURL=confusion_matrix.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/math.js +/** + * @license + * Copyright 2018 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ +/** + * Exports under the tf.math.* namespace. + */ + + +//# sourceMappingURL=math.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/browser.js +/** + * @license + * Copyright 2019 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + +let fromPixels2DContext; +/** + * Creates a `tf.Tensor` from an image. + * + * ```js + * const image = new ImageData(1, 1); + * image.data[0] = 100; + * image.data[1] = 150; + * image.data[2] = 200; + * image.data[3] = 255; + * + * tf.browser.fromPixels(image).print(); + * ``` + * + * @param pixels The input image to construct the tensor from. The + * supported image types are all 4-channel. You can also pass in an image + * object with following attributes: + * `{data: Uint8Array; width: number; height: number}` + * @param numChannels The number of channels of the output tensor. A + * numChannels value less than 4 allows you to ignore channels. Defaults to + * 3 (ignores alpha channel of input image). + */ +/** @doc {heading: 'Browser', namespace: 'browser', ignoreCI: true} */ +function fromPixels_(pixels, numChannels = 3) { + // Sanity checks. + if (numChannels > 4) { + throw new Error('Cannot construct Tensor with more than 4 channels from pixels.'); + } + if (pixels == null) { + throw new Error('pixels passed to tf.browser.fromPixels() can not be null'); + } + let isPixelData = false; + let isImageData = false; + let isVideo = false; + let isImage = false; + let isCanvasLike = false; + if (pixels.data instanceof Uint8Array) { + isPixelData = true; + } + else if (typeof (ImageData) !== 'undefined' && pixels instanceof ImageData) { + isImageData = true; + } + else if (typeof (HTMLVideoElement) !== 'undefined' && + pixels instanceof HTMLVideoElement) { + isVideo = true; + } + else if (typeof (HTMLImageElement) !== 'undefined' && + pixels instanceof HTMLImageElement) { + isImage = true; + // tslint:disable-next-line: no-any + } + else if (pixels.getContext != null) { + isCanvasLike = true; + } + else { + throw new Error('pixels passed to tf.browser.fromPixels() must be either an ' + + `HTMLVideoElement, HTMLImageElement, HTMLCanvasElement, ImageData ` + + `in browser, or OffscreenCanvas, ImageData in webworker` + + ` or {data: Uint32Array, width: number, height: number}, ` + + `but was ${pixels.constructor.name}`); + } + if (isVideo) { + const HAVE_CURRENT_DATA_READY_STATE = 2; + if (isVideo && + pixels.readyState < + HAVE_CURRENT_DATA_READY_STATE) { + throw new Error('The video element has not loaded data yet. Please wait for ' + + '`loadeddata` event on the
'; + + // construct each row of the maxdiff table + for (var i = 0; i < trial.alternatives.length; i++) { + var alternative = trial.alternatives[alternative_order[i]]; + // add alternative + maxdiff_table += ''; + maxdiff_table += ''; + maxdiff_table += ''; + } + maxdiff_table += '
' + trial.labels[0] + '' + trial.labels[1] + '

' + alternative + '


'; + html += maxdiff_table; + + // add submit button + var enable_submit = trial.required == true ? 'disabled = "disabled"' : ''; + html += ''; + html += ''; + + display_element.innerHTML = html; + + // function to control responses + // first checks that the same alternative cannot be endorsed in the left and right columns simultaneously. + // then enables the submit button if the trial is required. + const left_right = ["left", "right"] + left_right.forEach(function(p) { + // Get all elements either 'left' or 'right' + document.getElementsByName(p).forEach(function(alt) { + alt.addEventListener('click', function() { + // Find the opposite (if left, then right & vice versa) identified by the class (jspsych-maxdiff-alt-1, 2, etc) + var op = alt.name == 'left' ? 'right' : 'left'; + var n = document.getElementsByClassName(alt.className).namedItem(op); + // If it's checked, uncheck it. + if (n.checked) { + n.checked = false; + } + + // check response + if (trial.required){ + // Now check if one of both left and right have been enabled to allow submission + var left_checked = [...document.getElementsByName('left')].some(c => c.checked); + var right_checked = [...document.getElementsByName('right')].some(c => c.checked); + if (left_checked && right_checked) { + document.getElementById("jspsych-maxdiff-next").disabled = false; + } else { + document.getElementById("jspsych-maxdiff-next").disabled = true; + } + } + }); + }); + }); + + // Get the data once the submit button is clicked + // Get the data once the submit button is clicked + display_element.querySelector('#jspsych-maxdiff-form').addEventListener('submit', function(e){ + e.preventDefault(); + + // measure response time + var endTime = performance.now(); + var response_time = endTime - startTime; + + // get the alternative by the data-name attribute, allowing a null response if unchecked + get_response = function(side){ + var col = display_element.querySelectorAll('[name=\"' + side + '\"]:checked')[0]; + if (col === undefined){ + return null; + } else { + var i = parseInt(col.getAttribute('data-name')); + return trial.alternatives[i]; + } + } + + // data saving + var trial_data = { + rt: response_time, + labels: {left: trial.labels[0], right: trial.labels[1]}, + response: {left: get_response('left'), right: get_response('right')} + }; + + // next trial + jsPsych.finishTrial(trial_data); + }); + + var startTime = performance.now(); + }; + + return plugin; +})(); \ No newline at end of file diff --git a/LetterDMS/jspsych/plugins/jspsych-preload.js b/LetterDMS/jspsych/plugins/jspsych-preload.js new file mode 100644 index 0000000..01623c2 --- /dev/null +++ b/LetterDMS/jspsych/plugins/jspsych-preload.js @@ -0,0 +1,345 @@ +/** + * jspsych-preload + * documentation: docs.jspsych.org + **/ + +jsPsych.plugins['preload'] = (function() { + + var plugin = {}; + + plugin.info = { + name: 'preload', + description: '', + parameters: { + auto_preload: { + type: jsPsych.plugins.parameterType.BOOL, + default: false, + description: 'Whether or not to automatically preload any media files based on the timeline passed to jsPsych.init.' + }, + trials: { + type: jsPsych.plugins.parameterType.TIMELINE, + default: [], + description: 'Array with a timeline of trials to automatically preload. If one or more trial objects is provided, '+ + 'then the plugin will attempt to preload the media files used in the trial(s).' + }, + images: { + type: jsPsych.plugins.parameterType.STRING, + default: [], + description: 'Array with one or more image files to load. This parameter is often used in cases where media files cannot '+ + 'be automatically preloaded based on the timeline, e.g. because the media files are passed into an image plugin/parameter with '+ + 'timeline variables or dynamic parameters, or because the image is embedded in an HTML string.' + }, + audio: { + type: jsPsych.plugins.parameterType.STRING, + default: [], + description: 'Array with one or more audio files to load. This parameter is often used in cases where media files cannot '+ + 'be automatically preloaded based on the timeline, e.g. because the media files are passed into an audio plugin/parameter with '+ + 'timeline variables or dynamic parameters, or because the audio is embedded in an HTML string.' + }, + video: { + type: jsPsych.plugins.parameterType.STRING, + default: [], + description: 'Array with one or more video files to load. This parameter is often used in cases where media files cannot '+ + 'be automatically preloaded based on the timeline, e.g. because the media files are passed into a video plugin/parameter with '+ + 'timeline variables or dynamic parameters, or because the video is embedded in an HTML string.' + }, + message: { + type: jsPsych.plugins.parameterType.HTML_STRING, + default: null, + description: 'HTML-formatted message to be shown above the progress bar while the files are loading.' + }, + show_progress_bar: { + type: jsPsych.plugins.parameterType.BOOL, + default: true, + description: 'Whether or not to show the loading progress bar.' + }, + continue_after_error: { + type: jsPsych.plugins.parameterType.BOOL, + default: false, + description: 'Whether or not to continue with the experiment if a loading error occurs. If false, then if a loading error occurs, '+ + 'the error_message will be shown on the page and the trial will not end. If true, then if if a loading error occurs, the trial will end '+ + 'and preloading failure will be logged in the trial data.' + }, + error_message: { + type: jsPsych.plugins.parameterType.HTML_STRING, + default: 'The experiment failed to load.', + description: 'Error message to show on the page in case of any loading errors. This parameter is only relevant when continue_after_error is false.' + }, + show_detailed_errors: { + type: jsPsych.plugins.parameterType.BOOL, + default: false, + description: 'Whether or not to show a detailed error message on the page. If true, then detailed error messages will be shown on the '+ + 'page for all files that failed to load, along with the general error_message. This parameter is only relevant when continue_after_error is false.' + }, + max_load_time: { + type: jsPsych.plugins.parameterType.INT, + default: null, + description: 'The maximum amount of time that the plugin should wait before stopping the preload and either ending the trial '+ + '(if continue_after_error is true) or stopping the experiment with an error message (if continue_after_error is false). '+ + 'If null, the plugin will wait indefintely for the files to load.' + }, + on_error: { + type: jsPsych.plugins.parameterType.FUNCTION, + default: null, + description: 'Function to be called after a file fails to load. The function takes the file name as its only argument.' + }, + on_success: { + type: jsPsych.plugins.parameterType.FUNCTION, + default: null, + description: 'Function to be called after a file loads successfully. The function takes the file name as its only argument.' + } + } + } + + plugin.trial = function(display_element, trial) { + + var success = null; + var timeout = false; + var failed_images = []; + var failed_audio = []; + var failed_video = []; + var detailed_errors = []; + var in_safe_mode = jsPsych.getSafeModeStatus(); + + // create list of media to preload // + + var images = []; + var audio = []; + var video = []; + + if(trial.auto_preload){ + var auto_preload = jsPsych.pluginAPI.getAutoPreloadList(); + images = images.concat(auto_preload.images); + audio = audio.concat(auto_preload.audio); + video = video.concat(auto_preload.video); + } + + if(trial.trials.length > 0){ + var trial_preloads = jsPsych.pluginAPI.getAutoPreloadList(trial.trials); + images = images.concat(trial_preloads.images); + audio = audio.concat(trial_preloads.audio); + video = video.concat(trial_preloads.video); + } + + images = images.concat(trial.images); + audio = audio.concat(trial.audio); + video = video.concat(trial.video); + + images = jsPsych.utils.unique(jsPsych.utils.flatten(images)); + audio = jsPsych.utils.unique(jsPsych.utils.flatten(audio)); + video = jsPsych.utils.unique(jsPsych.utils.flatten(video)); + + if (in_safe_mode) { + // don't preload video if in safe mode (experiment is running via file protocol) + video = []; + } + + // render display of message and progress bar + + var html = ''; + + if(trial.message !== null){ + html += trial.message; + } + + if(trial.show_progress_bar){ + html += ` +
+
+
`; + } + + display_element.innerHTML = html; + + // do preloading + + if(trial.max_load_time !== null){ + jsPsych.pluginAPI.setTimeout(on_timeout, trial.max_load_time); + } + + var total_n = images.length + audio.length + video.length; + var loaded = 0; // success or error count + var loaded_success = 0; // success count + + if (total_n == 0) { + on_success(); + } else { + function load_video(cb){ + jsPsych.pluginAPI.preloadVideo(video, cb, file_loading_success, file_loading_error); + } + function load_audio(cb){ + jsPsych.pluginAPI.preloadAudio(audio, cb, file_loading_success, file_loading_error); + } + function load_images(cb){ + jsPsych.pluginAPI.preloadImages(images, cb, file_loading_success, file_loading_error); + } + if (video.length > 0) { load_video(function () { }) } + if (audio.length > 0) { load_audio(function () { }) } + if (images.length > 0) { load_images(function () { }) } + } + + // helper functions and callbacks + + function update_loading_progress_bar(){ + loaded++; + if(trial.show_progress_bar){ + var percent_loaded = (loaded/total_n)*100; + var preload_progress_bar = jsPsych.getDisplayElement().querySelector('#jspsych-loading-progress-bar'); + if (preload_progress_bar !== null) { + preload_progress_bar.style.width = percent_loaded+"%"; + } + } + } + + // called when a single file loading fails + function file_loading_error(e) { + // update progress bar even if there's an error + update_loading_progress_bar(); + // change success flag after first file loading error + if (success == null) { + success = false; + } + // add file to failed media list + var source = "unknown file"; + if (e.source) { + source = e.source; + } + if (e.error && e.error.path && e.error.path.length > 0) { + if (e.error.path[0].localName == "img") { + failed_images.push(source); + } else if (e.error.path[0].localName == "audio") { + failed_audio.push(source); + } else if (e.error.path[0].localName == "video") { + failed_video.push(source); + } + } + // construct detailed error message + var err_msg = '

Error loading file: '+source+'
'; + if (e.error.statusText) { + err_msg += 'File request response status: '+e.error.statusText+'
'; + } + if (e.error == "404") { + err_msg += '404 - file not found.
'; + } + if (typeof e.error.loaded !== 'undefined' && e.error.loaded !== null && e.error.loaded !== 0) { + err_msg += e.error.loaded+' bytes transferred.'; + } else { + err_msg += 'File did not begin loading. Check that file path is correct and reachable by the browser,
'+ + 'and that loading is not blocked by cross-origin resource sharing (CORS) errors.'; + } + err_msg += '

'; + detailed_errors.push(err_msg); + // call trial's on_error function + after_error(source); + // if this is the last file + if (loaded == total_n) { + if (trial.continue_after_error) { + // if continue_after_error is false, then stop with an error + end_trial(); + } else { + // otherwise end the trial and continue + stop_with_error_message(); + } + } + } + + // called when a single file loads successfully + function file_loading_success(source) { + update_loading_progress_bar(); + // call trial's on_success function + after_success(source); + loaded_success++; + if (loaded_success == total_n) { + // if this is the last file and all loaded successfully, call success function + on_success(); + } else if (loaded == total_n) { + // if this is the last file and there was at least one error + if (trial.continue_after_error) { + // end the trial and continue with experiment + end_trial(); + } else { + // if continue_after_error is false, then stop with an error + stop_with_error_message(); + } + } + } + + // called if all files load successfully + function on_success() { + if (typeof timeout !== 'undefined' && timeout === false) { + // clear timeout immediately after finishing, to handle race condition with max_load_time + jsPsych.pluginAPI.clearAllTimeouts(); + // need to call cancel preload function to clear global jsPsych preload_request list, even when they've all succeeded + jsPsych.pluginAPI.cancelPreloads(); + success = true; + end_trial(); + } + } + + // called if all_files haven't finished loading when max_load_time is reached + function on_timeout() { + //console.log('timeout fired'); + jsPsych.pluginAPI.cancelPreloads(); + if (typeof success !== 'undefined' && (success === false || success === null)) { + timeout = true; + if (loaded_success < total_n) { + success = false; + } + after_error('timeout'); // call trial's on_error event handler here, in case loading timed out with no file errors + detailed_errors.push('

Loading timed out.
'+ + 'Consider compressing your stimuli files, loading your files in smaller batches,
'+ + 'and/or increasing the max_load_time parameter.

'); + if (trial.continue_after_error) { + end_trial(); + } else { + stop_with_error_message(); + } + } + } + + function stop_with_error_message() { + jsPsych.pluginAPI.clearAllTimeouts(); + jsPsych.pluginAPI.cancelPreloads(); + // show error message + display_element.innerHTML = trial.error_message; + // show detailed errors, if necessary + if (trial.show_detailed_errors) { + display_element.innerHTML += '

Error details:

'; + detailed_errors.forEach(function(e) { + display_element.innerHTML += e; + }); + } + } + + function after_error(source) { + // call on_error function and pass file name + if (trial.on_error !== null) { + trial.on_error(source); + } + } + function after_success(source) { + // call on_success function and pass file name + if (trial.on_success !== null) { + trial.on_success(source); + } + } + + function end_trial(){ + // clear timeout again when end_trial is called, to handle race condition with max_load_time + jsPsych.pluginAPI.clearAllTimeouts(); + var trial_data = { + success: success, + timeout: timeout, + failed_images: failed_images, + failed_audio: failed_audio, + failed_video: failed_video + }; + // clear the display + display_element.innerHTML = ''; + jsPsych.finishTrial(trial_data); + } + }; + + return plugin; + })(); + \ No newline at end of file diff --git a/LetterDMS/jspsych/plugins/jspsych-rdk.js b/LetterDMS/jspsych/plugins/jspsych-rdk.js new file mode 100644 index 0000000..5b07d9d --- /dev/null +++ b/LetterDMS/jspsych/plugins/jspsych-rdk.js @@ -0,0 +1,1373 @@ +/* + + RDK plugin for JsPsych + ---------------------- + + This code was created in the Consciousness and Metacognition Lab at UCLA, + under the supervision of Brian Odegaard and Hakwan Lau + + We would appreciate it if you cited this paper when you use the RDK: + Rajananda, S., Lau, H. & Odegaard, B., (2018). A Random-Dot Kinematogram for Web-Based Vision Research. Journal of Open Research Software. 6(1), p.6. DOI: [http://doi.org/10.5334/jors.194] + + ---------------------- + + Copyright (C) 2017 Sivananda Rajananda + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +*/ + + +jsPsych.plugins["rdk"] = (function() { + + var plugin = {}; + + plugin.info = { + name: "rdk", + parameters: { + choices: { + type: jsPsych.plugins.parameterType.KEY, + pretty_name: "Choices", + default: jsPsych.ALL_KEYS, + array: true, + description: "The valid keys that the subject can press to indicate a response" + }, + correct_choice: { + type: jsPsych.plugins.parameterType.KEY, + pretty_name: "Correct choice", + default: undefined, + array: true, + description: "The correct keys for that trial" + }, + trial_duration: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: "Trial duration", + default: 500, + description: "The length of stimulus presentation" + }, + response_ends_trial: { + type: jsPsych.plugins.parameterType.BOOL, + pretty_name: "Response ends trial", + default: true, + description: "If true, then any valid key will end the trial" + }, + number_of_apertures: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: "Number of apertures", + default: 1, + description: "The number of RDK apertures (If more than one, make sure to separate them by setting aperture_center_x and aperture_center_y for each RDK)" + }, + number_of_dots: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: "Number of dots", + default: 300, + description: "The number of dots per set in the stimulus" + }, + number_of_sets: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: "Number of sets", + default: 1, + description: "The number of sets of dots to cycle through" + }, + coherent_direction: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: "Coherent direction", + default: 0, + description: "The direction of coherent motion in degrees" + }, + coherence: { + type: jsPsych.plugins.parameterType.FLOAT, + pretty_name: "Coherence", + default: 0.5, + description: "The percentage of dots moving in the coherent direction" + }, + opposite_coherence: { + type: jsPsych.plugins.parameterType.FLOAT, + pretty_name: "Opposite coherence", + default: 0, + description: "The percentage of dots moving in the direction opposite of the coherent direction" + }, + dot_radius: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: "Dot radius", + default: 2, + description: "The radius of the dots in pixels" + }, + dot_life: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: "Dot life", + default: -1, + description: "The number of frames that pass before each dot disappears and reappears somewhere else" + }, + move_distance: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: "Move distance", + default: 1, + description: "The distance in pixels each dot moves per frame" + }, + aperture_width: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: "Aperture width", + default: 600, + description: "The width of the aperture in pixels" + }, + aperture_height: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: "Aperture height", + default: 400, + description: "The height of the aperture in pixels" + }, + dot_color: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: "Dot color", + default: "white", + description: "The color of the dots" + }, + background_color: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: "Background color", + default: "gray", + description: "The background of the stimulus" + }, + RDK_type: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: "RDK type", + default: 3, + description: "The Type of RDK (refer to documentation for details)" + }, + aperture_type: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: "Aperture Type", + default: 2, + description: "The shape of the aperture" + }, + reinsert_type: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: "Reinsert type", + default: 2, + description: "The reinsertion rule for dots that move out of the aperture" + }, + aperture_center_x: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: "Aperture center X", + default: window.innerWidth/2, + description: "The x-coordinate of the center of the aperture" + }, + aperture_center_y: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: "Aperture center Y", + default: window.innerHeight/2, + description: "The y-coordinate of the center of the aperture" + }, + fixation_cross: { + type: jsPsych.plugins.parameterType.INT, //boolean + pretty_name: "Fixation cross", + default: false, + description: "If true, then a fixation cross will be present in the middle of the screen" + }, + fixation_cross_width: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: "Fixation cross width", + default: 20, + description: "The width of the fixation cross in pixels" + }, + fixation_cross_height: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: "Fixation cross height", + default: 20, + description: "The height of the fixation cross in pixels" + }, + fixation_cross_color: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: "Fixation cross color", + default: "black", + description: "The color of the fixation cross" + }, + fixation_cross_thickness: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: "Fixation cross thickness", + default: 1, + description: "The thickness of the fixation cross" + }, + border: { + type: jsPsych.plugins.parameterType.BOOL, + pretty_name: "Border", + default: false, + description: "The presence of a border around the aperture" + }, + border_thickness: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: "Border width", + default: 1, + description: "The thickness of the border in pixels" + }, + border_color: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: "Border Color", + default: 1, + description: "The color of the border" + } + } + } + + + //BEGINNING OF TRIAL + plugin.trial = function(display_element, trial) { + + //-------------------------------------- + //---------SET PARAMETERS BEGIN--------- + //-------------------------------------- + + + //Note on '||' logical operator: If the first option is 'undefined', it evalutes to 'false' and the second option is returned as the assignment + trial.choices = assignParameterValue(trial.choices, []); + trial.correct_choice = assignParameterValue(trial.correct_choice, undefined); + trial.trial_duration = assignParameterValue(trial.trial_duration, 500); + trial.response_ends_trial = assignParameterValue(trial.response_ends_trial, true); + trial.number_of_apertures = assignParameterValue(trial.number_of_apertures, 1); + trial.number_of_dots = assignParameterValue(trial.number_of_dots, 300); + trial.number_of_sets = assignParameterValue(trial.number_of_sets, 1); + trial.coherent_direction = assignParameterValue(trial.coherent_direction, 0); + trial.coherence = assignParameterValue(trial.coherence, 0.5); + trial.opposite_coherence = assignParameterValue(trial.opposite_coherence, 0); + trial.dot_radius = assignParameterValue(trial.dot_radius, 2); + trial.dot_life = assignParameterValue(trial.dot_life, -1); + trial.move_distance = assignParameterValue(trial.move_distance, 1); + trial.aperture_width = assignParameterValue(trial.aperture_width, 600); + trial.aperture_height = assignParameterValue(trial.aperture_height, 400); + trial.dot_color = assignParameterValue(trial.dot_color, "white"); + trial.background_color = assignParameterValue(trial.background_color, "gray"); + trial.RDK_type = assignParameterValue(trial.RDK_type, 3); + trial.aperture_type = assignParameterValue(trial.aperture_type, 2); + trial.reinsert_type = assignParameterValue(trial.reinsert_type, 2); + trial.aperture_center_x = assignParameterValue(trial.aperture_center_x, window.innerWidth/2); + trial.aperture_center_y = assignParameterValue(trial.aperture_center_y, window.innerHeight/2); + trial.fixation_cross = assignParameterValue(trial.fixation_cross, false); + trial.fixation_cross_width = assignParameterValue(trial.fixation_cross_width, 20); + trial.fixation_cross_height = assignParameterValue(trial.fixation_cross_height, 20); + trial.fixation_cross_color = assignParameterValue(trial.fixation_cross_color, "black"); + trial.fixation_cross_thickness = assignParameterValue(trial.fixation_cross_thickness, 1); + trial.border = assignParameterValue(trial.border, false); + trial.border_thickness = assignParameterValue(trial.border_thickness, 1); + trial.border_color = assignParameterValue(trial.border_color, "black"); + + + //For square and circle, set the aperture height == aperture width + if (apertureType == 1 || apertureType == 3) { + trial.aperture_height = trial.aperture_width; + } + + //Convert the parameter variables to those that the code below can use + + var nApertures = trial.number_of_apertures; //The number of apertures + var nDots = trial.number_of_dots; //Number of dots per set (equivalent to number of dots per frame) + var nSets = trial.number_of_sets; //Number of sets to cycle through per frame + var coherentDirection = trial.coherent_direction; //The direction of the coherentDots in degrees. Starts at 3 o'clock and goes counterclockwise (0 == rightwards, 90 == upwards, 180 == leftwards, 270 == downwards), range 0 - 360 + var coherence = trial.coherence; //Proportion of dots to move together, range from 0 to 1 + var oppositeCoherence = trial.opposite_coherence; // The coherence for the dots going the opposite direction as the coherent dots + var dotRadius = trial.dot_radius; //Radius of each dot in pixels + var dotLife = trial.dot_life; //How many frames a dot will keep following its trajectory before it is redrawn at a random location. -1 denotes infinite life (the dot will only be redrawn if it reaches the end of the aperture). + var moveDistance = trial.move_distance; //How many pixels the dots move per frame + var apertureWidth = trial.aperture_width; // How many pixels wide the aperture is. For square aperture this will be the both height and width. For circle, this will be the diameter. + var apertureHeight = trial.aperture_height; //How many pixels high the aperture is. Only relevant for ellipse and rectangle apertures. For circle and square, this is ignored. + var dotColor = trial.dot_color; //Color of the dots + var backgroundColor = trial.background_color; //Color of the background + var apertureCenterX = trial.aperture_center_x; // The x-coordinate of center of the aperture on the screen, in pixels + var apertureCenterY = trial.aperture_center_y; // The y-coordinate of center of the aperture on the screen, in pixels + + + /* RDK type parameter + ** See Fig. 1 in Scase, Braddick, and Raymond (1996) for a visual depiction of these different signal selection rules and noise types + + ------------------- + SUMMARY: + + Signal Selection rule: + -Same: Each dot is designated to be either a coherent dot (signal) or incoherent dot (noise) and will remain so throughout all frames in the display. Coherent dots will always move in the direction of coherent motion in all frames. + -Different: Each dot can be either a coherent dot (signal) or incoherent dot (noise) and will be designated randomly (weighted based on the coherence level) at each frame. Only the dots that are designated to be coherent dots will move in the direction of coherent motion, but only in that frame. In the next frame, each dot will be designated randomly again on whether it is a coherent or incoherent dot. + + Noise Type: + -Random position: The incoherent dots appear in a random location in the aperture in each frame + -Random walk: The incoherent dots will move in a random direction (designated randomly in each frame) in each frame. + -Random direction: Each incoherent dot has its own alternative direction of motion (designated randomly at the beginning of the trial), and moves in that direction in each frame. + + ------------------- + + 1 - same && random position + 2 - same && random walk + 3 - same && random direction + 4 - different && random position + 5 - different && random walk + 6 - different && random direction */ + + var RDK = trial.RDK_type; + + + /* + Shape of aperture + 1 - Circle + 2 - Ellipse + 3 - Square + 4 - Rectangle + */ + var apertureType = trial.aperture_type; + + /* + Out of Bounds Decision + How we reinsert a dot that has moved outside the edges of the aperture: + 1 - Randomly appear anywhere in the aperture + 2 - Appear on the opposite edge of the aperture (Random if square or rectangle, reflected about origin in circle and ellipse) + */ + var reinsertType = trial.reinsert_type; + + //Fixation Cross Parameters + var fixationCross = trial.fixation_cross; //To display or not to display the cross + var fixationCrossWidth = trial.fixation_cross_width; //The width of the fixation cross in pixels + var fixationCrossHeight = trial.fixation_cross_height; //The height of the fixation cross in pixels + var fixationCrossColor = trial.fixation_cross_color; //The color of the fixation cross + var fixationCrossThickness = trial.fixation_cross_thickness; //The thickness of the fixation cross, must be positive number above 1 + + //Border Parameters + var border = trial.border; //To display or not to display the border + var borderThickness = trial.border_thickness; //The width of the border in pixels + var borderColor = trial.border_color; //The color of the border + + + + //-------------------------------------- + //----------SET PARAMETERS END---------- + //-------------------------------------- + + //--------Set up Canvas begin------- + + //Create a canvas element and append it to the DOM + var canvas = document.createElement("canvas"); + display_element.appendChild(canvas); + + + //The document body IS 'display_element' (i.e. .... ) + var body = document.getElementsByClassName("jspsych-display-element")[0]; + + //Save the current settings to be restored later + var originalMargin = body.style.margin; + var originalPadding = body.style.padding; + var originalBackgroundColor = body.style.backgroundColor; + + //Remove the margins and paddings of the display_element + body.style.margin = 0; + body.style.padding = 0; + body.style.backgroundColor = backgroundColor; //Match the background of the display element to the background color of the canvas so that the removal of the canvas at the end of the trial is not noticed + + //Remove the margins and padding of the canvas + canvas.style.margin = 0; + canvas.style.padding = 0; + // use absolute positioning in top left corner to get rid of scroll bars + canvas.style.position = 'absolute'; + canvas.style.top = 0; + canvas.style.left = 0; + + //Get the context of the canvas so that it can be painted on. + var ctx = canvas.getContext("2d"); + + //Declare variables for width and height, and also set the canvas width and height to the window width and height + var canvasWidth = canvas.width = window.innerWidth; + var canvasHeight = canvas.height = window.innerHeight; + + //Set the canvas background color + canvas.style.backgroundColor = backgroundColor; + + //--------Set up Canvas end------- + + + + //--------RDK variables and function calls begin-------- + + //This is the main part of the trial that makes everything run + + //Global variable for the current aperture number + var currentApertureNumber; + + //3D Array to hold the dots (1st D is Apertures, 2nd D is Sets, 3rd D is Dots) + var dotArray3d = []; + + //Variables for different apertures (initialized in setUpMultipleApertures function below) + var nDotsArray; + var nSetsArray; + var coherentDirectionArray; + var coherenceArray; + var oppositeCoherenceArray; + var dotRadiusArray; + var dotLifeArray; + var moveDistanceArray; + var apertureWidthArray; + var apertureHeightArray; + var dotColorArray; + var apertureCenterXArray; + var apertureCenterYArray; + + // Set up multiple apertures + setUpMultipleApertures(); + + //Declare aperture parameters for initialization based on shape (used in initializeApertureDimensions function below) + var horizontalAxis; + var verticalAxis; + + //Calculate the x and y jump sizes for coherent dots + var coherentJumpSizeX; + var coherentJumpSizeY; + + //Calculate the number of coherent, opposite coherent, and incoherent dots + var nCoherentDots; + var nOppositeCoherentDots; + var nIncoherentDots; + + //Make the array of arrays containing dot objects + var dotArray2d; + + var dotArray; //Declare a global variable to hold the current array + var currentSetArray; //Declare and initialize a global variable to cycle through the dot arrays + + + //Initialize stopping condition for animateDotMotion function that runs in a loop + var stopDotMotion = false; + + //Variable to control the frame rate, to ensure that the first frame is skipped because it follows a different timing + var firstFrame = true; //Used to skip the first frame in animate function below (in animateDotMotion function) + + //Variable to start the timer when the time comes + var timerHasStarted = false; + + //Initialize object to store the response data. Default values of -1 are used if the trial times out and the subject has not pressed a valid key + var response = { + rt: -1, + key: -1 + } + + //Declare a global timeout ID to be initialized below in animateDotMotion function and to be used in after_response function + var timeoutID; + + //Declare global variable to be defined in startKeyboardListener function and to be used in end_trial function + var keyboardListener; + + //Declare global variable to store the frame rate of the trial + var frameRate = []; //How often the monitor refreshes, in ms. Currently an array to store all the intervals. Will be converted into a single number (the average) in end_trial function. + + //variable to store how many frames were presented. + var numberOfFrames = 0; + + //This runs the dot motion simulation, updating it according to the frame refresh rate of the screen. + animateDotMotion(); + + + //--------RDK variables and function calls end-------- + + + + //------------------------------------- + //-----------FUNCTIONS BEGIN----------- + //------------------------------------- + + //----JsPsych Functions Begin---- + + + //Function to start the keyboard listener + function startKeyboardListener(){ + //Start the response listener if there are choices for keys + if (trial.choices != jsPsych.NO_KEYS) { + //Create the keyboard listener to listen for subjects' key response + keyboardListener = jsPsych.pluginAPI.getKeyboardResponse({ + callback_function: after_response, //Function to call once the subject presses a valid key + valid_responses: trial.choices, //The keys that will be considered a valid response and cause the callback function to be called + rt_method: 'performance', //The type of method to record timing information. + persist: false, //If set to false, keyboard listener will only trigger the first time a valid key is pressed. If set to true, it has to be explicitly cancelled by the cancelKeyboardResponse plugin API. + allow_held_key: false //Only register the key once, after this getKeyboardResponse function is called. (Check JsPsych docs for better info under 'jsPsych.pluginAPI.getKeyboardResponse'). + }); + } + } + + //Function to end the trial proper + function end_trial() { + + //Stop the dot motion animation + stopDotMotion = true; + + //Store the number of frames + numberOfFrames = frameRate.length; + + //Variable to store the frame rate array + var frameRateArray = frameRate; + + //Calculate the average frame rate + if(frameRate.length > 0){//Check to make sure that the array is not empty + frameRate = frameRate.reduce((total,current) => total + current)/frameRate.length; //Sum up all the elements in the array + }else{ + frameRate = 0; //Set to zero if the subject presses an answer before a frame is shown (i.e. if frameRate is an empty array) + } + + //Kill the keyboard listener if keyboardListener has been defined + if (typeof keyboardListener !== 'undefined') { + jsPsych.pluginAPI.cancelKeyboardResponse(keyboardListener); + } + + //Place all the data to be saved from this trial in one data object + var trial_data = { + rt: response.rt, //The response time + response: response.key, //The key that the subject pressed + correct: correctOrNot(), //If the subject response was correct + choices: trial.choices, //The set of valid keys + correct_choice: trial.correct_choice, //The correct choice + trial_duration: trial.trial_duration, //The trial duration + response_ends_trial: trial.response_ends_trial, //If the response ends the trial + number_of_apertures: trial.number_of_apertures, + number_of_dots: trial.number_of_dots, + number_of_sets: trial.number_of_sets, + coherent_direction: trial.coherent_direction, + coherence: trial.coherence, + opposite_coherence: trial.opposite_coherence, + dot_radius: trial.dot_radius, + dot_life: trial.dot_life, + move_distance: trial.move_distance, + aperture_width: trial.aperture_width, + aperture_height: trial.aperture_height, + dot_color: trial.dot_color, + background_color: trial.background_color, + RDK_type: trial.RDK_type, + aperture_type: trial.aperture_type, + reinsert_type: trial.reinsert_type, + frame_rate: frameRate, //The average frame rate for the trial + frame_rate_array: frameRateArray, //The array of ms per frame in this trial + number_of_frames: numberOfFrames, //The number of frames in this trial + aperture_center_x: trial.aperture_center_x, + aperture_center_y: trial.aperture_center_y, + fixation_cross: trial.fixation_cross, + fixation_cross_width: trial.fixation_cross_width, + fixation_cross_height: trial.fixation_cross_height, + fixation_cross_color: trial.fixation_cross_color, + fixation_cross_thickness: trial.fixation_cross_thickness, + border: trial.border, + border_thickness: trial.border_thickness, + border_color: trial.border_color, + canvas_width: canvasWidth, + canvas_height: canvasHeight + } + + //Remove the canvas as the child of the display_element element + display_element.innerHTML=''; + + //Restore the settings to JsPsych defaults + body.style.margin = originalMargin; + body.style.padding = originalPadding; + body.style.backgroundColor = originalBackgroundColor + + //End this trial and move on to the next trial + jsPsych.finishTrial(trial_data); + + } //End of end_trial + + //Function to record the first response by the subject + function after_response(info) { + + //If the response has not been recorded, record it + if (response.key == -1) { + response = info; //Replace the response object created above + } + + //If the parameter is set such that the response ends the trial, then kill the timeout and end the trial + if (trial.response_ends_trial) { + window.clearTimeout(timeoutID); + end_trial(); + } + + } //End of after_response + + //Function that determines if the response is correct + function correctOrNot(){ + + //Check that the correct_choice has been defined + if(typeof trial.correct_choice !== 'undefined'){ + //If the correct_choice variable holds an array + if(trial.correct_choice.constructor === Array){ //If it is an array + //If the elements are characters + if(typeof trial.correct_choice[0] === 'string' || trial.correct_choice[0] instanceof String){ + var key_in_choices = trial.correct_choice.every(function(x) { + return jsPsych.pluginAPI.compareKeys(x,response.key); + }); + return key_in_choices; //If the response is included in the correct_choice array, return true. Else, return false. + } + //Else if the elements are numbers (javascript character codes) + else if (typeof trial.correct_choice[0] === 'number'){ + console.error('Error in RDK plugin: correct_choice value must be a string.'); + } + } + //Else compare the char with the response key + else{ + //If the element is a character + if(typeof trial.correct_choice === 'string' || trial.correct_choice instanceof String){ + //Return true if the user's response matches the correct answer. Return false otherwise. + return jsPsych.pluginAPI.compareKeys(response.key, trial.correct_choice); + } + //Else if the element is a number (javascript character codes) + else if (typeof trial.correct_choice === 'number'){ + console.error('Error in RDK plugin: correct_choice value must be a string.'); + } + } + } + } + + //----JsPsych Functions End---- + + //----RDK Functions Begin---- + + //Set up the variables for the apertures + function setUpMultipleApertures(){ + nDotsArray = setParameter(nDots); + nSetsArray = setParameter(nSets); + coherentDirectionArray = setParameter(coherentDirection); + coherenceArray = setParameter(coherence); + oppositeCoherenceArray = setParameter(oppositeCoherence); + dotRadiusArray = setParameter(dotRadius); + dotLifeArray = setParameter(dotLife); + moveDistanceArray = setParameter(moveDistance); + apertureWidthArray = setParameter(apertureWidth); + apertureHeightArray = setParameter(apertureHeight); + dotColorArray = setParameter(dotColor); + apertureCenterXArray = setParameter(apertureCenterX); + apertureCenterYArray = setParameter(apertureCenterY); + RDKArray = setParameter(RDK); + apertureTypeArray = setParameter(apertureType); + reinsertTypeArray = setParameter(reinsertType); + fixationCrossArray = setParameter(fixationCross); + fixationCrossWidthArray = setParameter(fixationCrossWidth); + fixationCrossHeightArray = setParameter(fixationCrossHeight); + fixationCrossColorArray = setParameter(fixationCrossColor); + fixationCrossThicknessArray = setParameter(fixationCrossThickness); + borderArray = setParameter(border); + borderThicknessArray = setParameter(borderThickness); + borderColorArray = setParameter(borderColor); + + currentSetArray = setParameter(0); //Always starts at zero + + + //Loop through the number of apertures to make the dots + for(currentApertureNumber = 0; currentApertureNumber < nApertures; currentApertureNumber++){ + + //Initialize the parameters to make the 2d dot array (one for each aperture); + initializeCurrentApertureParameters(); + + //Make each 2d array and push it into the 3d array + dotArray3d.push(makeDotArray2d()); + } + } + + //Function to set the parameters of the array + function setParameter(originalVariable){ + //Check if it is an array and its length matches the aperture then return the original array + if(originalVariable.constructor === Array && originalVariable.length === nApertures){ + return originalVariable; + } + //Else if it is not an array, we make it an array with duplicate values + else if(originalVariable.constructor !== Array){ + + var tempArray = []; + + //Make a for loop and duplicate the values + for(var i = 0; i < nApertures; i++){ + tempArray.push(originalVariable); + } + return tempArray; + } + //Else if the array is not long enough, then print out that error message + else if(originalVariable.constructor === Array && originalVariable.length !== nApertures){ + console.error("If you have more than one aperture, please ensure that arrays that are passed in as parameters are the same length as the number of apertures. Else you can use a single value without the array"); + } + //Else print a generic error + else{ + console.error("A parameter is incorrectly set. Please ensure that the nApertures parameter is set to the correct value (if using more than one aperture), and all others parameters are set correctly."); + } + } + + //Function to set the global variables to the current aperture so that the correct dots are updated and drawn + function initializeCurrentApertureParameters(){ + + //Set the global variables to that relevant to the current aperture + nDots = nDotsArray[currentApertureNumber]; + nSets = nSetsArray[currentApertureNumber]; + coherentDirection = coherentDirectionArray[currentApertureNumber]; + coherence = coherenceArray[currentApertureNumber]; + oppositeCoherence = oppositeCoherenceArray[currentApertureNumber]; + dotRadius = dotRadiusArray[currentApertureNumber]; + dotLife = dotLifeArray[currentApertureNumber]; + moveDistance = moveDistanceArray[currentApertureNumber]; + apertureWidth = apertureWidthArray[currentApertureNumber]; + apertureHeight = apertureHeightArray[currentApertureNumber]; + dotColor = dotColorArray[currentApertureNumber]; + apertureCenterX = apertureCenterXArray[currentApertureNumber]; + apertureCenterY = apertureCenterYArray[currentApertureNumber]; + RDK = RDKArray[currentApertureNumber]; + apertureType = apertureTypeArray[currentApertureNumber]; + reinsertType = reinsertTypeArray[currentApertureNumber]; + fixationCross = fixationCrossArray[currentApertureNumber]; + fixationCrossWidth = fixationCrossWidthArray[currentApertureNumber]; + fixationCrossHeight = fixationCrossHeightArray[currentApertureNumber]; + fixationCrossColor = fixationCrossColorArray[currentApertureNumber]; + fixationCrossThickness = fixationCrossThicknessArray[currentApertureNumber]; + border = borderArray[currentApertureNumber]; + borderThickness = borderThicknessArray[currentApertureNumber]; + borderColor = borderColorArray[currentApertureNumber]; + + //Calculate the x and y jump sizes for coherent dots + coherentJumpSizeX = calculateCoherentJumpSizeX(coherentDirection); + coherentJumpSizeY = calculateCoherentJumpSizeY(coherentDirection); + + //Initialize the aperture parameters + initializeApertureDimensions(); + + //Calculate the number of coherent, opposite coherent, and incoherent dots + nCoherentDots = nDots * coherence; + nOppositeCoherentDots = nDots * oppositeCoherence; + nIncoherentDots = nDots - (nCoherentDots + nOppositeCoherentDots); + + //If the 3d array has been made, then choose the 2d array and the current set + dotArray2d = dotArray3d.length !==0 ? dotArray3d[currentApertureNumber] : undefined; + + }// End of initializeCurrentApertureParameters + + //Calculate coherent jump size in the x direction + function calculateCoherentJumpSizeX(coherentDirection) { + var angleInRadians = coherentDirection * Math.PI / 180; + return moveDistance * Math.cos(angleInRadians); + } + + //Calculate coherent jump size in the y direction + function calculateCoherentJumpSizeY(coherentDirection) { + var angleInRadians = -coherentDirection * Math.PI / 180; //Negative sign because the y-axis is flipped on screen + return moveDistance * Math.sin(angleInRadians); + } + + //Initialize the parameters for the aperture for further calculation + function initializeApertureDimensions() { + //For circle and square + if (apertureType == 1 || apertureType == 3) { + horizontalAxis = verticalAxis = apertureWidth/2; + } + //For ellipse and rectangle + else if (apertureType == 2 || apertureType == 4) { + horizontalAxis = apertureWidth / 2; + verticalAxis = apertureHeight / 2; + } + } + + //Make the 2d array, which is an array of array of dots + function makeDotArray2d() { + //Declare an array to hold the sets of dot arrays + var tempArray = [] + //Loop for each set of dot array + for (var i = 0; i < nSets; i++) { + tempArray.push(makeDotArray()); //Make a dot array and push it into the 2d array + } + + return tempArray; + } + + //Make the dot array + function makeDotArray() { + var tempArray = [] + for (var i = 0; i < nDots; i++) { + //Initialize a dot to be modified and inserted into the array + var dot = { + x: 0, //x coordinate + y: 0, //y coordinate + vx: 0, //coherent x jumpsize (if any) + vy: 0, //coherent y jumpsize (if any) + vx2: 0, //incoherent (random) x jumpsize (if any) + vy2: 0, //incoherent (random) y jumpsize (if any) + latestXMove: 0, //Stores the latest x move direction for the dot (to be used in reinsertOnOppositeEdge function below) + latestYMove: 0, //Stores the latest y move direction for the dot (to be used in reinsertOnOppositeEdge function below) + lifeCount: Math.floor(randomNumberBetween(0, dotLife)), //Counter for the dot's life. Updates every time it is shown in a frame + updateType: "" //String to determine how this dot is updated + }; + + //randomly set the x and y coordinates + dot = resetLocation(dot); + + //For the same && random position RDK type + if (RDK == 1) { + //For coherent dots + if (i < nCoherentDots) { + dot = setvxvy(dot); // Set dot.vx and dot.vy + dot.updateType = "constant direction"; + } + //For opposite coherent dots + else if(i >= nCoherentDots && i < (nCoherentDots + nOppositeCoherentDots)){ + dot = setvxvy(dot); // Set dot.vx and dot.vy + dot.updateType = "opposite direction"; + } + //For incoherent dots + else { + dot.updateType = "random position"; + } + } //End of RDK==1 + + //For the same && random walk RDK type + if (RDK == 2) { + //For coherent dots + if (i < nCoherentDots) { + dot = setvxvy(dot); // Set dot.vx and dot.vy + dot.updateType = "constant direction"; + } + //For opposite coherent dots + else if(i >= nCoherentDots && i < (nCoherentDots + nOppositeCoherentDots)){ + dot = setvxvy(dot); // Set dot.vx and dot.vy + dot.updateType = "opposite direction"; + } + //For incoherent dots + else { + dot.updateType = "random walk"; + } + } //End of RDK==2 + + //For the same && random direction RDK type + if (RDK == 3) { + //For coherent dots + if (i < nCoherentDots) { + dot = setvxvy(dot); // Set dot.vx and dot.vy + dot.updateType = "constant direction"; + } + //For opposite coherent dots + else if(i >= nCoherentDots && i < (nCoherentDots + nOppositeCoherentDots)){ + dot = setvxvy(dot); // Set dot.vx and dot.vy + dot.updateType = "opposite direction"; + } + //For incoherent dots + else { + setvx2vy2(dot); // Set dot.vx2 and dot.vy2 + dot.updateType = "random direction"; + } + } //End of RDK==3 + + //For the different && random position RDK type + if (RDK == 4) { + //For all dots + dot = setvxvy(dot); // Set dot.vx and dot.vy + dot.updateType = "constant direction or opposite direction or random position"; + } //End of RDK==4 + + //For the different && random walk RDK type + if (RDK == 5) { + //For all dots + dot = setvxvy(dot); // Set dot.vx and dot.vy + dot.updateType = "constant direction or opposite direction or random walk"; + } //End of RDK==5 + + //For the different && random direction RDK type + if (RDK == 6) { + //For all dots + dot = setvxvy(dot); // Set dot.vx and dot.vy + //Each dot will have its own alternate direction of motion + setvx2vy2(dot); // Set dot.vx2 and dot.vy2 + dot.updateType = "constant direction or opposite direction or random direction"; + } //End of RDK==6 + + tempArray.push(dot); + } //End of for loop + return tempArray; + } + + //Function to update all the dots all the apertures and then draw them + function updateAndDraw(){ + + //Three for loops that do things in sequence: clear, update, and draw dots. + + // Clear all the current dots + for(currentApertureNumber = 0; currentApertureNumber < nApertures; currentApertureNumber++){ + + //Initialize the variables for each parameter + initializeCurrentApertureParameters(currentApertureNumber); + + //Clear the canvas by drawing over the current dots + clearDots(); + } + + // Update all the relevant dots + for(currentApertureNumber = 0; currentApertureNumber < nApertures; currentApertureNumber++){ + + //Initialize the variables for each parameter + initializeCurrentApertureParameters(currentApertureNumber); + + //Update the dots + updateDots(); + } + + // Draw all the relevant dots on the canvas + for(currentApertureNumber = 0; currentApertureNumber < nApertures; currentApertureNumber++){ + + //Initialize the variables for each parameter + initializeCurrentApertureParameters(currentApertureNumber); + + //Draw on the canvas + draw(); + } + } + + //Function that clears the dots on the canvas by drawing over it with the color of the baclground + function clearDots(){ + + //Load in the current set of dot array for easy handling + var dotArray = dotArray2d[currentSetArray[currentApertureNumber]]; + + //Loop through the dots one by one and draw them + for (var i = 0; i < nDots; i++) { + dot = dotArray[i]; + ctx.beginPath(); + ctx.arc(dot.x, dot.y, dotRadius+1, 0, Math.PI * 2); + ctx.fillStyle = backgroundColor; + ctx.fill(); + } + } + + //Draw the dots on the canvas after they're updated + function draw() { + + //Load in the current set of dot array for easy handling + var dotArray = dotArray2d[currentSetArray[currentApertureNumber]]; + + //Loop through the dots one by one and draw them + for (var i = 0; i < nDots; i++) { + dot = dotArray[i]; + ctx.beginPath(); + ctx.arc(dot.x, dot.y, dotRadius, 0, Math.PI * 2); + ctx.fillStyle = dotColor; + ctx.fill(); + } + + //Draw the fixation cross if we want it + if(fixationCross === true){ + //Horizontal line + ctx.beginPath(); + ctx.lineWidth = fixationCrossThickness; + ctx.moveTo(canvasWidth/2 - fixationCrossWidth, canvasHeight/2); + ctx.lineTo(canvasWidth/2 + fixationCrossWidth, canvasHeight/2); + ctx.strokeStyle = fixationCrossColor; + ctx.stroke(); + + //Vertical line + ctx.beginPath(); + ctx.lineWidth = fixationCrossThickness; + ctx.moveTo(canvasWidth/2, canvasHeight/2 - fixationCrossHeight); + ctx.lineTo(canvasWidth/2, canvasHeight/2 + fixationCrossHeight); + ctx.strokeStyle = fixationCrossColor; + ctx.stroke(); + } + + //Draw the border if we want it + if(border === true){ + + //For circle and ellipse + if(apertureType === 1 || apertureType === 2){ + ctx.lineWidth = borderThickness; + ctx.strokeStyle = borderColor; + ctx.beginPath(); + ctx.ellipse(apertureCenterX, apertureCenterY, horizontalAxis+(borderThickness/2), verticalAxis+(borderThickness/2), 0, 0, Math.PI*2); + ctx.stroke(); + }//End of if circle or ellipse + + //For square and rectangle + if(apertureType === 3 || apertureType === 4){ + ctx.lineWidth = borderThickness; + ctx.strokeStyle = borderColor; + ctx.strokeRect(apertureCenterX-horizontalAxis-(borderThickness/2), apertureCenterY-verticalAxis-(borderThickness/2), (horizontalAxis*2)+borderThickness, (verticalAxis*2)+borderThickness); + }//End of if square or + + }//End of if border === true + + }//End of draw + + //Update the dots with their new location + function updateDots() { + + //Cycle through to the next set of dots + if (currentSetArray[currentApertureNumber] == nSets - 1) { + currentSetArray[currentApertureNumber] = 0; + } else { + currentSetArray[currentApertureNumber] = currentSetArray[currentApertureNumber] + 1; + } + + //Load in the current set of dot array for easy handling + var dotArray = dotArray2d[currentSetArray[currentApertureNumber]]; + + //Load in the current set of dot array for easy handling + //dotArray = dotArray2d[currentSetArray[currentApertureNumber]]; //Global variable, so the draw function also uses this array + + //Loop through the dots one by one and update them accordingly + for (var i = 0; i < nDots; i++) { + var dot = dotArray[i]; //Load the current dot into the variable for easy handling + + //Generate a random value + var randomValue = Math.random(); + + //Update based on the dot's update type + if (dot.updateType == "constant direction") { + dot = constantDirectionUpdate(dot); + } else if (dot.updateType == "opposite direction") { + dot = oppositeDirectionUpdate(dot); + } else if (dot.updateType == "random position") { + dot = resetLocation(dot); + } else if (dot.updateType == "random walk") { + dot = randomWalkUpdate(dot); + } else if (dot.updateType == "random direction") { + dot = randomDirectionUpdate(dot); + } else if (dot.updateType == "constant direction or opposite direction or random position") { + + //Randomly select if the dot goes in a constant direction or random position, weighted based on the coherence level + if (randomValue < coherence) { + dot = constantDirectionUpdate(dot); + } else if(randomValue >= coherence && randomValue < (coherence + oppositeCoherence)){ + dot = oppositeDirectionUpdate(dot); + } else { + dot = resetLocation(dot); + } + } else if (dot.updateType == "constant direction or opposite direction or random walk") { + //Randomly select if the dot goes in a constant direction or random walk, weighted based on the coherence level + if (randomValue < coherence) { + dot = constantDirectionUpdate(dot); + } else if(randomValue >= coherence && randomValue < (coherence + oppositeCoherence)){ + dot = oppositeDirectionUpdate(dot); + } else { + dot = randomWalkUpdate(dot); + } + } else if (dot.updateType == "constant direction or opposite direction or random direction") { + //Randomly select if the dot goes in a constant direction or random direction, weighted based on the coherence level + if (randomValue < coherence) { + dot = constantDirectionUpdate(dot); + } else if(randomValue >= coherence && randomValue < (coherence + oppositeCoherence)){ + dot = oppositeDirectionUpdate(dot); + } else { + dot = randomDirectionUpdate(dot); + } + }//End of if dot.updateType == ... + + //Increment the life count + dot.lifeCount++; + + //Check if out of bounds or if life ended + if (lifeEnded(dot)) { + dot = resetLocation(dot); + } + + //If it goes out of bounds, do what is necessary (reinsert randomly or reinsert on the opposite edge) based on the parameter chosen + if (outOfBounds(dot)) { + switch (reinsertType) { + case 1: + dot = resetLocation(dot); + break; + case 2: + dot = reinsertOnOppositeEdge(dot); + break; + } //End of switch statement + } //End of if + + } //End of for loop + } //End of updateDots function + + //Function to check if dot life has ended + function lifeEnded(dot) { + //If we want infinite dot life + if (dotLife < 0) { + dot.lifeCount = 0; //resetting to zero to save memory. Otherwise it might increment to huge numbers. + return false; + } + //Else if the dot's life has reached its end + else if (dot.lifeCount >= dotLife) { + dot.lifeCount = 0; + return true; + } + //Else the dot's life has not reached its end + else { + return false; + } + } + + //Function to check if dot is out of bounds + function outOfBounds(dot) { + //For circle and ellipse + if (apertureType == 1 || apertureType == 2) { + if (dot.x < xValueNegative(dot.y) || dot.x > xValuePositive(dot.y) || dot.y < yValueNegative(dot.x) || dot.y > yValuePositive(dot.x)) { + return true; + } else { + return false; + } + } + //For square and rectangle + if (apertureType == 3 || apertureType == 4) { + if (dot.x < (apertureCenterX) - horizontalAxis || dot.x > (apertureCenterX) + horizontalAxis || dot.y < (apertureCenterY) - verticalAxis || dot.y > (apertureCenterY) + verticalAxis) { + return true; + } else { + return false; + } + } + + } + + //Set the vx and vy for the dot to the coherent jump sizes of the X and Y directions + function setvxvy(dot) { + dot.vx = coherentJumpSizeX; + dot.vy = coherentJumpSizeY; + return dot; + } + + //Set the vx2 and vy2 based on a random angle + function setvx2vy2(dot) { + //Generate a random angle of movement + var theta = randomNumberBetween(-Math.PI, Math.PI); + //Update properties vx2 and vy2 with the alternate directions + dot.vx2 = Math.cos(theta) * moveDistance; + dot.vy2 = -Math.sin(theta) * moveDistance; + return dot; + } + + //Updates the x and y coordinates by moving it in the x and y coherent directions + function constantDirectionUpdate(dot) { + dot.x += dot.vx; + dot.y += dot.vy; + dot.latestXMove = dot.vx; + dot.latestYMove = dot.vy; + return dot; + } + + //Updates the x and y coordinates by moving it in the opposite x and y coherent directions + function oppositeDirectionUpdate(dot) { + dot.x -= dot.vx; + dot.y -= dot.vy; + dot.latestXMove = -dot.vx; + dot.latestYMove = -dot.vy; + return dot; + } + + //Creates a new angle to move towards and updates the x and y coordinates + function randomWalkUpdate(dot) { + //Generate a random angle of movement + var theta = randomNumberBetween(-Math.PI, Math.PI); + //Generate the movement from the angle + dot.latestXMove = Math.cos(theta) * moveDistance; + dot.latestYMove = -Math.sin(theta) * moveDistance; + //Update x and y coordinates with the new location + dot.x += dot.latestXMove; + dot.y += dot.latestYMove; + return dot; + } + + //Updates the x and y coordinates with the alternative move direction + function randomDirectionUpdate(dot) { + dot.x += dot.vx2; + dot.y += dot.vy2; + dot.latestXMove = dot.vx2; + dot.latestYMove = dot.vy2; + return dot; + } + + //Calculates a random position on the opposite edge to reinsert the dot + function reinsertOnOppositeEdge(dot) { + //If it is a circle or ellipse + if (apertureType == 1 || apertureType == 2) { + //Bring the dot back into the aperture by moving back one step + dot.x -= dot.latestXMove; + dot.y -= dot.latestYMove; + + //Move the dot to the position relative to the origin to be reflected about the origin + dot.x -= apertureCenterX; + dot.y -= apertureCenterY; + + //Reflect the dot about the origin + dot.x = -dot.x; + dot.y = -dot.y; + + //Move the dot back to the center of the screen + dot.x += apertureCenterX; + dot.y += apertureCenterY; + + } //End of if apertureType == 1 | == 2 + + //If it is a square or rectangle, re-insert on one of the opposite edges + if (apertureType == 3 || apertureType == 4) { + + /* The formula for calculating whether a dot appears from the vertical edge (left or right edges) is dependent on the direction of the dot and the ratio of the vertical and horizontal edge lengths. + E.g. + Aperture is 100 px high and 200px wide + Dot is moving 3 px in x direction and 4px in y direction + Weight on vertical edge (sides) = (100/(100+200)) * (|3| / (|3| + |4|)) = 1/7 + Weight on horizontal edge (top or bottom) = (200/(100+200)) * (|4| / (|3| + |4|)) = 8/21 + + The weights above are the ratios to one another. + E.g. (cont.) + Ratio (vertical edge : horizontal edge) == (1/7 : 8/21) + Total probability space = 1/7 + 8/21 = 11/21 + Probability that dot appears on vertical edge = (1/7)/(11/21) = 3/11 + Probability that dot appears on horizontal edge = (8/21)/(11/21) = 8/11 + */ + + //Get the absolute values of the latest X and Y moves and store them in variables for easy handling. + var absX = Math.abs(dot.latestXMove); + var absY = Math.abs(dot.latestYMove); + //Calculate the direction weights based on direction the dot was moving + var weightInXDirection = absX / (absX + absY); + var weightInYDirection = absY / (absX + absY); + //Calculate the weight of the edge the dot should appear from, based on direction of dot and ratio of the aperture edges + var weightOnVerticalEdge = (verticalAxis / (verticalAxis + horizontalAxis)) * weightInXDirection; + var weightOnHorizontalEdge = (horizontalAxis / (verticalAxis + horizontalAxis)) * weightInYDirection; + + + //Generate a bounded random number to determine if the dot should appear on the vertical edge or the horizontal edge + if (weightOnVerticalEdge > (weightOnHorizontalEdge + weightOnVerticalEdge) * Math.random()) { //If yes, appear on the left or right edge (vertical edge) + if (dot.latestXMove < 0) { //If dots move left, appear on right edge + dot.x = apertureCenterX + horizontalAxis; + dot.y = randomNumberBetween((apertureCenterY) - verticalAxis, (apertureCenterY) + verticalAxis); + } else { //Else dots move right, so they should appear on the left edge + dot.x = apertureCenterX - horizontalAxis; + dot.y = randomNumberBetween((apertureCenterY) - verticalAxis, (apertureCenterY) + verticalAxis); + } + } else { //Else appear on the top or bottom edge (horizontal edge) + if (dot.latestYMove < 0) { //If dots move upwards, then appear on bottom edge + dot.y = apertureCenterY + verticalAxis; + dot.x = randomNumberBetween((apertureCenterX) - horizontalAxis, (apertureCenterX) + horizontalAxis) + } else { //If dots move downwards, then appear on top edge + dot.y = apertureCenterY - verticalAxis; + dot.x = randomNumberBetween((apertureCenterX) - horizontalAxis, (apertureCenterX) + horizontalAxis) + } + } + } //End of apertureType == 3 + return dot; + } //End of reinsertOnOppositeEdge + + //Calculate the POSITIVE y value of a point on the edge of the ellipse given an x-value + function yValuePositive(x) { + var x = x - (apertureCenterX); //Bring it back to the (0,0) center to calculate accurately (ignore the y-coordinate because it is not necessary for calculation) + return verticalAxis * Math.sqrt(1 - (Math.pow(x, 2) / Math.pow(horizontalAxis, 2))) + apertureCenterY; //Calculated the positive y value and added apertureCenterY to recenter it on the screen + } + + //Calculate the NEGATIVE y value of a point on the edge of the ellipse given an x-value + function yValueNegative(x) { + var x = x - (apertureCenterX); //Bring it back to the (0,0) center to calculate accurately (ignore the y-coordinate because it is not necessary for calculation) + return -verticalAxis * Math.sqrt(1 - (Math.pow(x, 2) / Math.pow(horizontalAxis, 2))) + apertureCenterY; //Calculated the negative y value and added apertureCenterY to recenter it on the screen + } + + //Calculate the POSITIVE x value of a point on the edge of the ellipse given a y-value + function xValuePositive(y) { + var y = y - (apertureCenterY); //Bring it back to the (0,0) center to calculate accurately (ignore the x-coordinate because it is not necessary for calculation) + return horizontalAxis * Math.sqrt(1 - (Math.pow(y, 2) / Math.pow(verticalAxis, 2))) + apertureCenterX; //Calculated the positive x value and added apertureCenterX to recenter it on the screen + } + + //Calculate the NEGATIVE x value of a point on the edge of the ellipse given a y-value + function xValueNegative(y) { + var y = y - (apertureCenterY); //Bring it back to the (0,0) center to calculate accurately (ignore the x-coordinate because it is not necessary for calculation) + return -horizontalAxis * Math.sqrt(1 - (Math.pow(y, 2) / Math.pow(verticalAxis, 2))) + apertureCenterX; //Calculated the negative x value and added apertureCenterX to recenter it on the screen + } + + //Calculate a random x and y coordinate in the ellipse + function resetLocation(dot) { + + //For circle and ellipse + if (apertureType == 1 || apertureType == 2) { + var phi = randomNumberBetween(-Math.PI, Math.PI); + var rho = Math.random(); + + x = Math.sqrt(rho) * Math.cos(phi); + y = Math.sqrt(rho) * Math.sin(phi); + + x = x * horizontalAxis + apertureCenterX; + y = y * verticalAxis + apertureCenterY; + + dot.x = x; + dot.y = y; + } + //For square and rectangle + else if (apertureType == 3 || apertureType == 4) { + dot.x = randomNumberBetween((apertureCenterX) - horizontalAxis, (apertureCenterX) + horizontalAxis); //Between the left and right edges of the square / rectangle + dot.y = randomNumberBetween((apertureCenterY) - verticalAxis, (apertureCenterY) + verticalAxis); //Between the top and bottom edges of the square / rectangle + } + + return dot; + } + + //Generates a random number (with decimals) between 2 values + function randomNumberBetween(lowerBound, upperBound) { + return lowerBound + Math.random() * (upperBound - lowerBound); + } + + //Function to make the dots move on the canvas + function animateDotMotion() { + //frameRequestID saves a long integer that is the ID of this frame request. The ID is then used to terminate the request below. + var frameRequestID = window.requestAnimationFrame(animate); + + //Start to listen to subject's key responses + startKeyboardListener(); + + //Delare a timestamp + var previousTimestamp; + + function animate() { + //If stopping condition has been reached, then stop the animation + if (stopDotMotion) { + window.cancelAnimationFrame(frameRequestID); //Cancels the frame request + } + //Else continue with another frame request + else { + frameRequestID = window.requestAnimationFrame(animate); //Calls for another frame request + + //If the timer has not been started and it is set, then start the timer + if ( (!timerHasStarted) && (trial.trial_duration > 0) ){ + //If the trial duration is set, then set a timer to count down and call the end_trial function when the time is up + //(If the subject did not press a valid keyboard response within the trial duration, then this will end the trial) + timeoutID = window.setTimeout(end_trial,trial.trial_duration); //This timeoutID is then used to cancel the timeout should the subject press a valid key + //The timer has started, so we set the variable to true so it does not start more timers + timerHasStarted = true; + } + + updateAndDraw(); //Update and draw each of the dots in their respective apertures + + //If this is before the first frame, then start the timestamp + if(previousTimestamp === undefined){ + previousTimestamp = performance.now(); + } + //Else calculate the time and push it into the array + else{ + var currentTimeStamp = performance.now(); //Variable to hold current timestamp + frameRate.push(currentTimeStamp - previousTimestamp); //Push the interval into the frameRate array + previousTimestamp = currentTimeStamp; //Reset the timestamp + } + } + } + } + + //----RDK Functions End---- + + //----General Functions Begin//---- + + //Function to assign the default values for the staircase parameters + function assignParameterValue(argument, defaultValue){ + return typeof argument !== 'undefined' ? argument : defaultValue; + } + + //----General Functions End//---- + + + //------------------------------------- + //-----------FUNCTIONS END------------- + //------------------------------------- + + + }; // END OF TRIAL + + //Return the plugin object which contains the trial + return plugin; +})(); diff --git a/LetterDMS/jspsych/plugins/jspsych-reconstruction.js b/LetterDMS/jspsych/plugins/jspsych-reconstruction.js new file mode 100644 index 0000000..e39bb18 --- /dev/null +++ b/LetterDMS/jspsych/plugins/jspsych-reconstruction.js @@ -0,0 +1,134 @@ +/** + * jspsych-reconstruction + * a jspsych plugin for a reconstruction task where the subject recreates + * a stimulus from memory + * + * Josh de Leeuw + * + * documentation: docs.jspsych.org + * + */ + + +jsPsych.plugins['reconstruction'] = (function() { + + var plugin = {}; + + plugin.info = { + name: 'reconstruction', + description: '', + parameters: { + stim_function: { + type: jsPsych.plugins.parameterType.FUNCTION, + pretty_name: 'Stimulus function', + default: undefined, + description: 'A function with a single parameter that returns an HTML-formatted string representing the stimulus.' + }, + starting_value: { + type: jsPsych.plugins.parameterType.FLOAT, + pretty_name: 'Starting value', + default: 0.5, + description: 'The starting value of the stimulus parameter.' + }, + step_size: { + type: jsPsych.plugins.parameterType.FLOAT, + pretty_name: 'Step size', + default: 0.05, + description: 'The change in the stimulus parameter caused by pressing one of the modification keys.' + }, + key_increase: { + type: jsPsych.plugins.parameterType.KEY, + pretty_name: 'Key increase', + default: 'h', + description: 'The key to press for increasing the parameter value.' + }, + key_decrease: { + type: jsPsych.plugins.parameterType.KEY, + pretty_name: 'Key decrease', + default: 'g', + description: 'The key to press for decreasing the parameter value.' + }, + button_label: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: 'Button label', + default: 'Continue', + description: 'The text that appears on the button to finish the trial.' + } + } + } + + plugin.trial = function(display_element, trial) { + + // current param level + var param = trial.starting_value; + + // set-up key listeners + var after_response = function(info) { + + //console.log('fire'); + + var key_i = trial.key_increase; + var key_d = trial.key_decrease; + + // get new param value + if (jsPsych.pluginAPI.compareKeys(info.key, key_i)) { + param = param + trial.step_size; + } else if (jsPsych.pluginAPI.compareKeys(info.key, key_d)) { + param = param - trial.step_size; + } + param = Math.max(Math.min(1, param), 0); + + // refresh the display + draw(param); + } + + // listen for responses + var key_listener = jsPsych.pluginAPI.getKeyboardResponse({ + callback_function: after_response, + valid_responses: [trial.key_increase, trial.key_decrease], + rt_method: 'performance', + persist: true, + allow_held_key: true + }); + // draw first iteration + draw(param); + + function draw(param) { + + //console.log(param); + + display_element.innerHTML = '
'+trial.stim_function(param)+'
'; + + // add submit button + display_element.innerHTML += ''; + + display_element.querySelector('#jspsych-reconstruction-next').addEventListener('click', endTrial); + } + + function endTrial() { + // measure response time + var endTime =performance.now(); + var response_time = endTime - startTime; + + // clear keyboard response + jsPsych.pluginAPI.cancelKeyboardResponse(key_listener); + + // save data + var trial_data = { + rt: response_time, + final_value: param, + start_value: trial.starting_value + }; + + display_element.innerHTML = ''; + + // next trial + jsPsych.finishTrial(trial_data); + } + + var startTime = performance.now(); + + }; + + return plugin; +})(); diff --git a/LetterDMS/jspsych/plugins/jspsych-resize.js b/LetterDMS/jspsych/plugins/jspsych-resize.js new file mode 100644 index 0000000..833e7ae --- /dev/null +++ b/LetterDMS/jspsych/plugins/jspsych-resize.js @@ -0,0 +1,166 @@ +/** +* jspsych-resize +* Steve Chao +* +* plugin for controlling the real world size of the display +* +* documentation: docs.jspsych.org +* +**/ + +jsPsych.plugins["resize"] = (function() { + + var plugin = {}; + + plugin.info = { + name: 'resize', + description: '', + parameters: { + item_height: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Item height', + default: 1, + description: 'The height of the item to be measured.' + }, + item_width: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Item width', + default: 1, + description: 'The width of the item to be measured.' + }, + prompt: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: 'Prompt', + default: null, + description: 'The content displayed below the resizable box and above the button.' + }, + pixels_per_unit: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Pixels per unit', + default: 100, + description: 'After the scaling factor is applied, this many pixels will equal one unit of measurement.' + }, + starting_size: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Starting size', + default: 100, + description: 'The initial size of the box, in pixels, along the larget dimension.' + }, + button_label: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: 'Button label', + default: 'Continue', + description: 'Label to display on the button to complete calibration.' + }, + } + } + + plugin.trial = function(display_element, trial) { + + var aspect_ratio = trial.item_width / trial.item_height; + + // variables to determine div size + if(trial.item_width >= trial.item_height){ + var start_div_width = trial.starting_size; + var start_div_height = Math.round(trial.starting_size / aspect_ratio); + } else { + var start_div_height = trial.starting_size; + var start_div_width = Math.round(trial.starting_size * aspect_ratio); + } + + // create html for display + var html ='
'; + html += '
'; + html += '
'; + if (trial.prompt !== null){ + html += trial.prompt; + } + html += ''+trial.button_label+''; + + // render + display_element.innerHTML = html; + + // listens for the click + document.getElementById("jspsych-resize-btn").addEventListener('click', function() { + scale(); + end_trial(); + }); + + var dragging = false; + var origin_x, origin_y; + var cx, cy; + + var mousedownevent = function(e){ + e.preventDefault(); + dragging = true; + origin_x = e.pageX; + origin_y = e.pageY; + cx = parseInt(scale_div.style.width); + cy = parseInt(scale_div.style.height); + } + + display_element.querySelector('#jspsych-resize-handle').addEventListener('mousedown', mousedownevent); + + var mouseupevent = function(e){ + dragging = false; + } + + document.addEventListener('mouseup', mouseupevent); + + var scale_div = display_element.querySelector('#jspsych-resize-div'); + + var resizeevent = function(e){ + if(dragging){ + var dx = (e.pageX - origin_x); + var dy = (e.pageY - origin_y); + + if(Math.abs(dx) >= Math.abs(dy)){ + scale_div.style.width = Math.round(Math.max(20, cx+dx*2)) + "px"; + scale_div.style.height = Math.round(Math.max(20, cx+dx*2) / aspect_ratio ) + "px"; + } else { + scale_div.style.height = Math.round(Math.max(20, cy+dy*2)) + "px"; + scale_div.style.width = Math.round(aspect_ratio * Math.max(20, cy+dy*2)) + "px"; + } + } + } + + document.addEventListener('mousemove', resizeevent); + + // scales the stimulus + var scale_factor; + var final_height_px, final_width_px; + function scale() { + final_width_px = scale_div.offsetWidth; + //final_height_px = scale_div.offsetHeight; + + var pixels_unit_screen = final_width_px / trial.item_width; + + scale_factor = pixels_unit_screen / trial.pixels_per_unit; + document.getElementById("jspsych-content").style.transform = "scale(" + scale_factor + ")"; + }; + + + // function to end trial + function end_trial() { + + // clear document event listeners + document.removeEventListener('mousemove', resizeevent); + document.removeEventListener('mouseup', mouseupevent); + + // clear the screen + display_element.innerHTML = ''; + + // finishes trial + + var trial_data = { + final_height_px: final_height_px, + final_width_px: final_width_px, + scale_factor: scale_factor + } + + jsPsych.finishTrial(trial_data); + } + }; + + return plugin; +})(); diff --git a/LetterDMS/jspsych/plugins/jspsych-same-different-html.js b/LetterDMS/jspsych/plugins/jspsych-same-different-html.js new file mode 100644 index 0000000..76e1d2a --- /dev/null +++ b/LetterDMS/jspsych/plugins/jspsych-same-different-html.js @@ -0,0 +1,168 @@ +/** + * jspsych-same-different + * Josh de Leeuw + * + * plugin for showing two stimuli sequentially and getting a same / different judgment + * + * documentation: docs.jspsych.org + * + */ + +jsPsych.plugins['same-different-html'] = (function() { + + var plugin = {}; + + plugin.info = { + name: 'same-different-html', + description: '', + parameters: { + stimuli: { + type: jsPsych.plugins.parameterType.HTML_STRING, + pretty_name: 'Stimuli', + default: undefined, + array: true, + description: 'The HTML content to be displayed.' + }, + answer: { + type: jsPsych.plugins.parameterType.SELECT, + pretty_name: 'Answer', + options: ['same', 'different'], + default: undefined, + description: 'Either "same" or "different".' + }, + same_key: { + type: jsPsych.plugins.parameterType.KEY, + pretty_name: 'Same key', + default: 'q', + description: '' + }, + different_key: { + type: jsPsych.plugins.parameterType.KEY, + pretty_name: 'Different key', + default: 'p', + description: 'The key that subjects should press to indicate that the two stimuli are the same.' + }, + first_stim_duration: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'First stimulus duration', + default: null, + description: 'How long to show the first stimulus for in milliseconds. If null, then the stimulus will remain on the screen until any keypress is made.' + }, + gap_duration: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Gap duration', + default: 500, + description: 'How long to show a blank screen in between the two stimuli.' + }, + second_stim_duration: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Second stimulus duration', + default: null, + description: 'How long to show the second stimulus for in milliseconds. If null, then the stimulus will remain on the screen until a valid response is made.' + }, + prompt: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: 'Prompt', + default: null, + description: 'Any content here will be displayed below the stimulus.' + } + } + } + + plugin.trial = function(display_element, trial) { + + display_element.innerHTML = '
'+trial.stimuli[0]+'
'; + + var first_stim_info; + if (trial.first_stim_duration > 0) { + jsPsych.pluginAPI.setTimeout(function() { + showBlankScreen(); + }, trial.first_stim_duration); + } else { + function afterKeyboardResponse(info) { + first_stim_info = info; + showBlankScreen(); + } + jsPsych.pluginAPI.getKeyboardResponse({ + callback_function: afterKeyboardResponse, + valid_responses: trial.advance_key, + rt_method: 'performance', + persist: false, + allow_held_key: false + }); + } + + function showBlankScreen() { + display_element.innerHTML = ''; + + jsPsych.pluginAPI.setTimeout(function() { + showSecondStim(); + }, trial.gap_duration); + } + + function showSecondStim() { + + var html = '
'+trial.stimuli[1]+'
'; + //show prompt here + if (trial.prompt !== null) { + html += trial.prompt; + } + display_element.innerHTML = html; + + if (trial.second_stim_duration > 0) { + jsPsych.pluginAPI.setTimeout(function() { + display_element.querySelector('.jspsych-same-different-stimulus').style.visibility = 'hidden'; + }, trial.second_stim_duration); + } + + + + var after_response = function(info) { + + // kill any remaining setTimeout handlers + jsPsych.pluginAPI.clearAllTimeouts(); + + var correct = false; + + var skey = trial.same_key; + var dkey = trial.different_key; + + if (jsPsych.pluginAPI.compareKeys(info.key, skey) && trial.answer == 'same') { + correct = true; + } + + if (jsPsych.pluginAPI.compareKeys(info.key, dkey) && trial.answer == 'different') { + correct = true; + } + + var trial_data = { + rt: info.rt, + answer: trial.answer, + correct: correct, + stimulus: [trial.stimuli[0], trial.stimuli[1]], + response: info.key + }; + if (first_stim_info) { + trial_data["rt_stim1"] = first_stim_info.rt; + trial_data["response_stim1"] = first_stim_info.key; + } + + display_element.innerHTML = ''; + + jsPsych.finishTrial(trial_data); + } + + jsPsych.pluginAPI.getKeyboardResponse({ + callback_function: after_response, + valid_responses: [trial.same_key, trial.different_key], + rt_method: 'performance', + persist: false, + allow_held_key: false + }); + + } + + }; + + return plugin; +})(); diff --git a/LetterDMS/jspsych/plugins/jspsych-same-different-image.js b/LetterDMS/jspsych/plugins/jspsych-same-different-image.js new file mode 100644 index 0000000..1c469d6 --- /dev/null +++ b/LetterDMS/jspsych/plugins/jspsych-same-different-image.js @@ -0,0 +1,169 @@ +/** + * jspsych-same-different + * Josh de Leeuw + * + * plugin for showing two stimuli sequentially and getting a same / different judgment + * + * documentation: docs.jspsych.org + * + */ + +jsPsych.plugins['same-different-image'] = (function() { + + var plugin = {}; + + jsPsych.pluginAPI.registerPreload('same-different-image', 'stimuli', 'image') + + plugin.info = { + name: 'same-different-image', + description: '', + parameters: { + stimuli: { + type: jsPsych.plugins.parameterType.IMAGE, + pretty_name: 'Stimuli', + default: undefined, + array: true, + description: 'The images to be displayed.' + }, + answer: { + type: jsPsych.plugins.parameterType.SELECT, + pretty_name: 'Answer', + options: ['same', 'different'], + default: undefined, + description: 'Either "same" or "different".' + }, + same_key: { + type: jsPsych.plugins.parameterType.KEY, + pretty_name: 'Same key', + default: 'q', + description: '' + }, + different_key: { + type: jsPsych.plugins.parameterType.KEY, + pretty_name: 'Different key', + default: 'p', + description: 'The key that subjects should press to indicate that the two stimuli are the same.' + }, + first_stim_duration: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'First stimulus duration', + default: null, + description: 'How long to show the first stimulus for in milliseconds. If null, then the stimulus will remain on the screen until any keypress is made.' + }, + gap_duration: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Gap duration', + default: 500, + description: 'How long to show a blank screen in between the two stimuli.' + }, + second_stim_duration: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Second stimulus duration', + default: null, + description: 'How long to show the second stimulus for in milliseconds. If null, then the stimulus will remain on the screen until a valid response is made.' + }, + prompt: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: 'Prompt', + default: null, + description: 'Any content here will be displayed below the stimulus.' + } + } + } + + plugin.trial = function(display_element, trial) { + + display_element.innerHTML = ''; + + var first_stim_info; + if (trial.first_stim_duration > 0) { + jsPsych.pluginAPI.setTimeout(function() { + showBlankScreen(); + }, trial.first_stim_duration); + } else { + function afterKeyboardResponse(info) { + first_stim_info = info; + showBlankScreen(); + } + jsPsych.pluginAPI.getKeyboardResponse({ + callback_function: afterKeyboardResponse, + valid_responses: trial.advance_key, + rt_method: 'performance', + persist: false, + allow_held_key: false + }); + } + + function showBlankScreen() { + display_element.innerHTML = ''; + + jsPsych.pluginAPI.setTimeout(function() { + showSecondStim(); + }, trial.gap_duration); + } + + function showSecondStim() { + + var html = ''; + //show prompt + if (trial.prompt !== null) { + html += trial.prompt; + } + + display_element.innerHTML = html; + + if (trial.second_stim_duration > 0) { + jsPsych.pluginAPI.setTimeout(function() { + display_element.querySelector('.jspsych-same-different-stimulus').style.visibility = 'hidden'; + }, trial.second_stim_duration); + } + + var after_response = function(info) { + + // kill any remaining setTimeout handlers + jsPsych.pluginAPI.clearAllTimeouts(); + + var correct = false; + + var skey = trial.same_key; + var dkey = trial.different_key; + + if (jsPsych.pluginAPI.compareKeys(info.key,skey) && trial.answer == 'same') { + correct = true; + } + + if (jsPsych.pluginAPI.compareKeys(info.key, dkey) && trial.answer == 'different') { + correct = true; + } + + var trial_data = { + rt: info.rt, + answer: trial.answer, + correct: correct, + stimulus: [trial.stimuli[0], trial.stimuli[1]], + response: info.key + }; + if (first_stim_info) { + trial_data["rt_stim1"] = first_stim_info.rt; + trial_data["response_stim1"] = first_stim_info.key; + } + + display_element.innerHTML = ''; + + jsPsych.finishTrial(trial_data); + } + + jsPsych.pluginAPI.getKeyboardResponse({ + callback_function: after_response, + valid_responses: [trial.same_key, trial.different_key], + rt_method: 'performance', + persist: false, + allow_held_key: false + }); + + } + + }; + + return plugin; +})(); diff --git a/LetterDMS/jspsych/plugins/jspsych-serial-reaction-time-mouse.js b/LetterDMS/jspsych/plugins/jspsych-serial-reaction-time-mouse.js new file mode 100644 index 0000000..18121f6 --- /dev/null +++ b/LetterDMS/jspsych/plugins/jspsych-serial-reaction-time-mouse.js @@ -0,0 +1,212 @@ +/** + * jspsych-serial-reaction-time + * Josh de Leeuw + * + * plugin for running a serial reaction time task + * + * documentation: docs.jspsych.org + * + **/ + +jsPsych.plugins["serial-reaction-time-mouse"] = (function() { + + var plugin = {}; + + plugin.info = { + name: 'serial-reaction-time-mouse', + description: '', + parameters: { + target: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Target', + array: true, + default: undefined, + description: 'The location of the target. The array should be the [row, column] of the target.' + }, + grid: { + type: jsPsych.plugins.parameterType.BOOL, + pretty_name: 'Grid', + array: true, + default: [[1,1,1,1]], + description: 'This array represents the grid of boxes shown on the screen.' + }, + grid_square_size: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Grid square size', + default: 100, + description: 'The width and height in pixels of each square in the grid.' + }, + target_color: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: 'Target color', + default: "#999", + description: 'The color of the target square.' + }, + response_ends_trial: { + type: jsPsych.plugins.parameterType.BOOL, + pretty_name: 'Response ends trial', + default: true, + description: 'If true, the trial ends after a mouse click.' + }, + pre_target_duration: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Pre-target duration', + default: 0, + description: 'The number of milliseconds to display the grid before the target changes color.' + }, + trial_duration: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Trial duration', + default: null, + description: 'How long to show the trial' + }, + fade_duration: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Fade duration', + default: null, + description: 'If a positive number, the target will progressively change color at the start of the trial, with the transition lasting this many milliseconds.' + }, + allow_nontarget_responses: { + type: jsPsych.plugins.parameterType.BOOL, + pretty_name: 'Allow nontarget response', + default: false, + description: 'If true, then user can make nontarget response.' + }, + prompt: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: 'Prompt', + default: null, + description: 'Any content here will be displayed below the stimulus' + }, + } + } + + plugin.trial = function(display_element, trial) { + + var startTime = -1; + var response = { + rt: null, + row: null, + column: null + } + + // display stimulus + var stimulus = this.stimulus(trial.grid, trial.grid_square_size); + display_element.innerHTML = stimulus; + + + if(trial.pre_target_duration <= 0){ + showTarget(); + } else { + jsPsych.pluginAPI.setTimeout(function(){ + showTarget(); + }, trial.pre_target_duration); + } + + //show prompt if there is one + if (trial.prompt !== null) { + display_element.insertAdjacentHTML('beforeend', trial.prompt); + } + + function showTarget(){ + var resp_targets; + if(!trial.allow_nontarget_responses){ + resp_targets = [display_element.querySelector('#jspsych-serial-reaction-time-stimulus-cell-'+trial.target[0]+'-'+trial.target[1])] + } else { + resp_targets = display_element.querySelectorAll('.jspsych-serial-reaction-time-stimulus-cell'); + } + for(var i=0; i"; + for(var i=0; i -1){ + flat_choices.splice(flat_choices.indexOf(''),1); + } + + // display stimulus + var stimulus = this.stimulus(trial.grid, trial.grid_square_size); + display_element.innerHTML = stimulus; + + if(trial.pre_target_duration <= 0){ + showTarget(); + } else { + jsPsych.pluginAPI.setTimeout(function(){ + showTarget(); + }, trial.pre_target_duration); + } + + //show prompt if there is one + if (trial.prompt !== null) { + display_element.innerHTML += trial.prompt; + } + + var keyboardListener = {}; + + var response = { + rt: null, + key: false, + correct: false + } + + function showTarget(){ + if(trial.fade_duration == null){ + display_element.querySelector('#jspsych-serial-reaction-time-stimulus-cell-'+trial.target[0]+'-'+trial.target[1]).style.backgroundColor = trial.target_color; + } else { + display_element.querySelector('#jspsych-serial-reaction-time-stimulus-cell-'+trial.target[0]+'-'+trial.target[1]).style.transition = "background-color "+trial.fade_duration; + display_element.querySelector('#jspsych-serial-reaction-time-stimulus-cell-'+trial.target[0]+'-'+trial.target[1]).style.backgroundColor = trial.target_color; + } + + keyboardListener = jsPsych.pluginAPI.getKeyboardResponse({ + callback_function: after_response, + valid_responses: flat_choices, + allow_held_key: false + }); + + if(trial.trial_duration > null){ + jsPsych.pluginAPI.setTimeout(showFeedback, trial.trial_duration); + } + + } + + function showFeedback() { + if(response.rt == null || trial.show_response_feedback == false){ + endTrial(); + } else { + var color = response.correct ? '#0f0' : '#f00'; + display_element.querySelector('#jspsych-serial-reaction-time-stimulus-cell-'+response.responseLoc[0]+'-'+response.responseLoc[1]).style.transition = ""; + display_element.querySelector('#jspsych-serial-reaction-time-stimulus-cell-'+response.responseLoc[0]+'-'+response.responseLoc[1]).style.backgroundColor = color; + jsPsych.pluginAPI.setTimeout(endTrial, trial.feedback_duration); + } + } + + function endTrial() { + + // kill any remaining setTimeout handlers + jsPsych.pluginAPI.clearAllTimeouts(); + + // kill keyboard listeners + if (typeof keyboardListener !== 'undefined') { + jsPsych.pluginAPI.cancelKeyboardResponse(keyboardListener); + } + + // gather the data to store for the trial + var trial_data = { + rt: response.rt, + response: response.key, + correct: response.correct, + grid: trial.grid, + target: trial.target + }; + + // clear the display + display_element.innerHTML = ''; + + // move on to the next trial + jsPsych.finishTrial(trial_data); + + }; + + // function to handle responses by the subject + function after_response(info) { + + // only record first response + response = response.rt == null ? info : response; + + // check if the response is correct + var responseLoc = []; + for(var i=0; i"; + for(var i=0; i tag must not be included and is generated by the plugin.' + }, + preamble: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: 'Preamble', + default: null, + description: 'HTML formatted string to display at the top of the page above all the questions.' + }, + button_label: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: 'Button label', + default: 'Continue', + description: 'The text that appears on the button to finish the trial.' + }, + autofocus: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: 'Element ID to focus', + default: '', + description: 'The HTML element ID of a form field to autofocus on.' + }, + dataAsArray: { + type: jsPsych.plugins.parameterType.BOOLEAN, + pretty_name: 'Data As Array', + default: false, + description: 'Retrieve the data as an array e.g. [{name: "INPUT_NAME", value: "INPUT_VALUE"}, ...] instead of an object e.g. {INPUT_NAME: INPUT_VALUE, ...}.' + }, + autocomplete: { + type: jsPsych.plugins.parameterType.BOOL, + pretty_name: 'Allow autocomplete', + default: false, + description: "Setting this to true will enable browser auto-complete or auto-fill for the form." + } + } + } + + plugin.trial = function(display_element, trial) { + + var html = ''; + // show preamble text + if(trial.preamble !== null){ + html += '
'+trial.preamble+'
'; + } + // start form + if ( trial.autocomplete ) { + html += '
' + } else { + html += '' + } + + // add form HTML / input elements + html += trial.html; + + // add submit button + html += ''; + + html += '
'; + display_element.innerHTML = html; + + if ( trial.autofocus !== '' ) { + var focus_elements = display_element.querySelectorAll('#'+trial.autofocus); + if ( focus_elements.length === 0 ) { + console.warn('No element found with id: '+trial.autofocus); + } else if ( focus_elements.length > 1 ) { + console.warn('The id "'+trial.autofocus+'" is not unique so autofocus will not work.'); + } else { + focus_elements[0].focus(); + } + } + + display_element.querySelector('#jspsych-survey-html-form').addEventListener('submit', function(event) { + // don't submit form + event.preventDefault(); + + // measure response time + var endTime = performance.now(); + var response_time = endTime - startTime; + + var question_data = serializeArray(this); + + if (!trial.dataAsArray) { + question_data = objectifyForm(question_data); + } + + // save data + var trialdata = { + rt: response_time, + response: question_data + }; + + display_element.innerHTML = ''; + + // next trial + jsPsych.finishTrial(trialdata); + }); + + var startTime = performance.now(); + }; + + /*! + * Serialize all form data into an array + * (c) 2018 Chris Ferdinandi, MIT License, https://gomakethings.com + * @param {Node} form The form to serialize + * @return {String} The serialized form data + */ + var serializeArray = function (form) { + // Setup our serialized data + var serialized = []; + + // Loop through each field in the form + for (var i = 0; i < form.elements.length; i++) { + var field = form.elements[i]; + + // Don't serialize fields without a name, submits, buttons, file and reset inputs, and disabled fields + if (!field.name || field.disabled || field.type === 'file' || field.type === 'reset' || field.type === 'submit' || field.type === 'button') continue; + + // If a multi-select, get all selections + if (field.type === 'select-multiple') { + for (var n = 0; n < field.options.length; n++) { + if (!field.options[n].selected) continue; + serialized.push({ + name: field.name, + value: field.options[n].value + }); + } + } + + // Convert field data to a query string + else if ((field.type !== 'checkbox' && field.type !== 'radio') || field.checked) { + serialized.push({ + name: field.name, + value: field.value + }); + } + } + + return serialized; + }; + + // from https://stackoverflow.com/questions/1184624/convert-form-data-to-javascript-object-with-jquery + function objectifyForm(formArray) {//serialize data function + var returnArray = {}; + for (var i = 0; i < formArray.length; i++){ + returnArray[formArray[i]['name']] = formArray[i]['value']; + } + return returnArray; + } + + return plugin; +})(); diff --git a/LetterDMS/jspsych/plugins/jspsych-survey-likert.js b/LetterDMS/jspsych/plugins/jspsych-survey-likert.js new file mode 100644 index 0000000..9684ff8 --- /dev/null +++ b/LetterDMS/jspsych/plugins/jspsych-survey-likert.js @@ -0,0 +1,195 @@ +/** + * jspsych-survey-likert + * a jspsych plugin for measuring items on a likert scale + * + * Josh de Leeuw + * + * documentation: docs.jspsych.org + * + */ + +jsPsych.plugins['survey-likert'] = (function() { + + var plugin = {}; + + plugin.info = { + name: 'survey-likert', + description: '', + parameters: { + questions: { + type: jsPsych.plugins.parameterType.COMPLEX, + array: true, + pretty_name: 'Questions', + nested: { + prompt: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: 'Prompt', + default: undefined, + description: 'Questions that are associated with the slider.' + }, + labels: { + type: jsPsych.plugins.parameterType.STRING, + array: true, + pretty_name: 'Labels', + default: undefined, + description: 'Labels to display for individual question.' + }, + required: { + type: jsPsych.plugins.parameterType.BOOL, + pretty_name: 'Required', + default: false, + description: 'Makes answering the question required.' + }, + name: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: 'Question Name', + default: '', + description: 'Controls the name of data values associated with this question' + } + } + }, + randomize_question_order: { + type: jsPsych.plugins.parameterType.BOOL, + pretty_name: 'Randomize Question Order', + default: false, + description: 'If true, the order of the questions will be randomized' + }, + preamble: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: 'Preamble', + default: null, + description: 'String to display at top of the page.' + }, + scale_width: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Scale width', + default: null, + description: 'Width of the likert scales in pixels.' + }, + button_label: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: 'Button label', + default: 'Continue', + description: 'Label of the button.' + }, + autocomplete: { + type: jsPsych.plugins.parameterType.BOOL, + pretty_name: 'Allow autocomplete', + default: false, + description: "Setting this to true will enable browser auto-complete or auto-fill for the form." + } + } + } + + plugin.trial = function(display_element, trial) { + + if(trial.scale_width !== null){ + var w = trial.scale_width + 'px'; + } else { + var w = '100%'; + } + + var html = ""; + // inject CSS for trial + html += ''; + + // show preamble text + if(trial.preamble !== null){ + html += '
'+trial.preamble+'
'; + } + + if ( trial.autocomplete ) { + html += '
'; + } else { + html += ''; + } + + // add likert scale questions /// + // generate question order. this is randomized here as opposed to randomizing the order of trial.questions + // so that the data are always associated with the same question regardless of order + var question_order = []; + for(var i=0; i'; + // add options + var width = 100 / question.labels.length; + var options_string = '
    '; + for (var j = 0; j < question.labels.length; j++) { + options_string += '
  • '; + } + options_string += '
'; + html += options_string; + } + + // add submit button + html += ''; + + html += '' + + display_element.innerHTML = html; + + display_element.querySelector('#jspsych-survey-likert-form').addEventListener('submit', function(e){ + e.preventDefault(); + // measure response time + var endTime = performance.now(); + var response_time = endTime - startTime; + + // create object to hold responses + var question_data = {}; + var matches = display_element.querySelectorAll('#jspsych-survey-likert-form .jspsych-survey-likert-opts'); + for(var index = 0; index < matches.length; index++){ + var id = matches[index].dataset['radioGroup']; + var el = display_element.querySelector('input[name="' + id + '"]:checked'); + if (el === null) { + var response = ""; + } else { + var response = parseInt(el.value); + } + var obje = {}; + if(matches[index].attributes['data-name'].value !== ''){ + var name = matches[index].attributes['data-name'].value; + } else { + var name = id; + } + obje[name] = response; + Object.assign(question_data, obje); + } + + // save data + var trial_data = { + rt: response_time, + response: question_data, + question_order: question_order + }; + + display_element.innerHTML = ''; + + // next trial + jsPsych.finishTrial(trial_data); + }); + + var startTime = performance.now(); + }; + + return plugin; +})(); diff --git a/LetterDMS/jspsych/plugins/jspsych-survey-multi-choice.js b/LetterDMS/jspsych/plugins/jspsych-survey-multi-choice.js new file mode 100644 index 0000000..540828f --- /dev/null +++ b/LetterDMS/jspsych/plugins/jspsych-survey-multi-choice.js @@ -0,0 +1,208 @@ +/** + * jspsych-survey-multi-choice + * a jspsych plugin for multiple choice survey questions + * + * Shane Martin + * + * documentation: docs.jspsych.org + * + */ + + +jsPsych.plugins['survey-multi-choice'] = (function() { + var plugin = {}; + + plugin.info = { + name: 'survey-multi-choice', + description: '', + parameters: { + questions: { + type: jsPsych.plugins.parameterType.COMPLEX, + array: true, + pretty_name: 'Questions', + nested: { + prompt: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: 'Prompt', + default: undefined, + description: 'The strings that will be associated with a group of options.' + }, + options: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: 'Options', + array: true, + default: undefined, + description: 'Displays options for an individual question.' + }, + required: { + type: jsPsych.plugins.parameterType.BOOL, + pretty_name: 'Required', + default: false, + description: 'Subject will be required to pick an option for each question.' + }, + horizontal: { + type: jsPsych.plugins.parameterType.BOOL, + pretty_name: 'Horizontal', + default: false, + description: 'If true, then questions are centered and options are displayed horizontally.' + }, + name: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: 'Question Name', + default: '', + description: 'Controls the name of data values associated with this question' + } + } + }, + randomize_question_order: { + type: jsPsych.plugins.parameterType.BOOL, + pretty_name: 'Randomize Question Order', + default: false, + description: 'If true, the order of the questions will be randomized' + }, + preamble: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: 'Preamble', + default: null, + description: 'HTML formatted string to display at the top of the page above all the questions.' + }, + button_label: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: 'Button label', + default: 'Continue', + description: 'Label of the button.' + }, + autocomplete: { + type: jsPsych.plugins.parameterType.BOOL, + pretty_name: 'Allow autocomplete', + default: false, + description: "Setting this to true will enable browser auto-complete or auto-fill for the form." + } + } + } + plugin.trial = function(display_element, trial) { + var plugin_id_name = "jspsych-survey-multi-choice"; + + var html = ""; + + // inject CSS for trial + html += ''; + + // show preamble text + if(trial.preamble !== null){ + html += '
'+trial.preamble+'
'; + } + + // form element + if ( trial.autocomplete ) { + html += '
'; + } else { + html += ''; + } + // generate question order. this is randomized here as opposed to randomizing the order of trial.questions + // so that the data are always associated with the same question regardless of order + var question_order = []; + for(var i=0; i'; + + // add question text + html += '

' + question.prompt + if(question.required){ + html += "*"; + } + html += '

'; + + // create option radio buttons + for (var j = 0; j < question.options.length; j++) { + // add label and question text + var option_id_name = "jspsych-survey-multi-choice-option-"+question_id+"-"+j; + var input_name = 'jspsych-survey-multi-choice-response-'+question_id; + var input_id = 'jspsych-survey-multi-choice-response-'+question_id+'-'+j; + + var required_attr = question.required ? 'required' : ''; + + // add radio button container + html += '
'; + html += ''; + html += '
'; + } + + html += ''; + } + + // add submit button + html += ''; + html += ''; + + // render + display_element.innerHTML = html; + + document.querySelector('form').addEventListener('submit', function(event) { + event.preventDefault(); + // measure response time + var endTime = performance.now(); + var response_time = endTime - startTime; + + // create object to hold responses + var question_data = {}; + for(var i=0; i'; + + // form element + var trial_form_id = _join(plugin_id_name, "form"); + display_element.innerHTML += '
'; + var trial_form = display_element.querySelector("#" + trial_form_id); + if ( !trial.autocomplete ) { + trial_form.setAttribute('autocomplete',"off"); + } + // show preamble text + var preamble_id_name = _join(plugin_id_name, 'preamble'); + if(trial.preamble !== null){ + trial_form.innerHTML += '
'+trial.preamble+'
'; + } + // generate question order. this is randomized here as opposed to randomizing the order of trial.questions + // so that the data are always associated with the same question regardless of order + var question_order = []; + for(var i=0; i'; + + var question_selector = _join(plugin_id_selector, question_id); + + // add question text + display_element.querySelector(question_selector).innerHTML += '

' + question.prompt + '

'; + + // create option check boxes + for (var j = 0; j < question.options.length; j++) { + var option_id_name = _join(plugin_id_name, "option", question_id, j); + + // add check box container + display_element.querySelector(question_selector).innerHTML += '
'; + + // add label and question text + var form = document.getElementById(option_id_name) + var input_name = _join(plugin_id_name, 'response', question_id); + var input_id = _join(plugin_id_name, 'response', question_id, j); + var label = document.createElement('label'); + label.setAttribute('class', plugin_id_name+'-text'); + label.innerHTML = question.options[j]; + label.setAttribute('for', input_id) + + // create checkboxes + var input = document.createElement('input'); + input.setAttribute('type', "checkbox"); + input.setAttribute('name', input_name); + input.setAttribute('id', input_id); + input.setAttribute('value', question.options[j]) + form.appendChild(label) + label.insertBefore(input, label.firstChild) + } + } + // add submit button + trial_form.innerHTML += '
' + trial_form.innerHTML += ''; + + // validation check on the data first for custom validation handling + // then submit the form + display_element.querySelector('#jspsych-survey-multi-select-next').addEventListener('click', function(){ + for(var i=0; i'; + } + // start form + if (trial.autocomplete) { + html += '
'; + } else { + html += ''; + } + // generate question order + var question_order = []; + for(var i=0; i'; + html += '

' + question.prompt + '

'; + var autofocus = i == 0 ? "autofocus" : ""; + var req = question.required ? "required" : ""; + if(question.rows == 1){ + html += ''; + } else { + html += ''; + } + html += ''; + } + + // add submit button + html += ''; + + html += '' + display_element.innerHTML = html; + + // backup in case autofocus doesn't work + display_element.querySelector('#input-'+question_order[0]).focus(); + + display_element.querySelector('#jspsych-survey-text-form').addEventListener('submit', function(e) { + e.preventDefault(); + // measure response time + var endTime = performance.now(); + var response_time = endTime - startTime; + + // create object to hold responses + var question_data = {}; + + for(var index=0; index < trial.questions.length; index++){ + var id = "Q" + index; + var q_element = document.querySelector('#jspsych-survey-text-'+index).querySelector('textarea, input'); + var val = q_element.value; + var name = q_element.attributes['data-name'].value; + if(name == ''){ + name = id; + } + var obje = {}; + obje[name] = val; + Object.assign(question_data, obje); + } + // save data + var trialdata = { + rt: response_time, + response: question_data + }; + + display_element.innerHTML = ''; + + // next trial + jsPsych.finishTrial(trialdata); + }); + + var startTime = performance.now(); + }; + + return plugin; +})(); diff --git a/LetterDMS/jspsych/plugins/jspsych-video-button-response.js b/LetterDMS/jspsych/plugins/jspsych-video-button-response.js new file mode 100644 index 0000000..54f50ef --- /dev/null +++ b/LetterDMS/jspsych/plugins/jspsych-video-button-response.js @@ -0,0 +1,335 @@ +/** + * jspsych-video-button-response + * Josh de Leeuw + * + * plugin for playing a video file and getting a button response + * + * documentation: docs.jspsych.org + * + **/ + +jsPsych.plugins["video-button-response"] = (function() { + + var plugin = {}; + + jsPsych.pluginAPI.registerPreload('video-button-response', 'stimulus', 'video'); + + plugin.info = { + name: 'video-button-response', + description: '', + parameters: { + stimulus: { + type: jsPsych.plugins.parameterType.VIDEO, + pretty_name: 'Video', + default: undefined, + description: 'The video file to play.' + }, + choices: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: 'Choices', + default: undefined, + array: true, + description: 'The labels for the buttons.' + }, + button_html: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: 'Button HTML', + default: '', + array: true, + description: 'The html of the button. Can create own style.' + }, + prompt: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: 'Prompt', + default: null, + description: 'Any content here will be displayed below the buttons.' + }, + width: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Width', + default: '', + description: 'The width of the video in pixels.' + }, + height: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Height', + default: '', + description: 'The height of the video display in pixels.' + }, + autoplay: { + type: jsPsych.plugins.parameterType.BOOL, + pretty_name: 'Autoplay', + default: true, + description: 'If true, the video will begin playing as soon as it has loaded.' + }, + controls: { + type: jsPsych.plugins.parameterType.BOOL, + pretty_name: 'Controls', + default: false, + description: 'If true, the subject will be able to pause the video or move the playback to any point in the video.' + }, + start: { + type: jsPsych.plugins.parameterType.FLOAT, + pretty_name: 'Start', + default: null, + description: 'Time to start the clip.' + }, + stop: { + type: jsPsych.plugins.parameterType.FLOAT, + pretty_name: 'Stop', + default: null, + description: 'Time to stop the clip.' + }, + rate: { + type: jsPsych.plugins.parameterType.FLOAT, + pretty_name: 'Rate', + default: 1, + description: 'The playback rate of the video. 1 is normal, <1 is slower, >1 is faster.' + }, + trial_ends_after_video: { + type: jsPsych.plugins.parameterType.BOOL, + pretty_name: 'End trial after video finishes', + default: false, + description: 'If true, the trial will end immediately after the video finishes playing.' + }, + trial_duration: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Trial duration', + default: null, + description: 'How long to show trial before it ends.' + }, + margin_vertical: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: 'Margin vertical', + default: '0px', + description: 'The vertical margin of the button.' + }, + margin_horizontal: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: 'Margin horizontal', + default: '8px', + description: 'The horizontal margin of the button.' + }, + response_ends_trial: { + type: jsPsych.plugins.parameterType.BOOL, + pretty_name: 'Response ends trial', + default: true, + description: 'If true, the trial will end when subject makes a response.' + }, + response_allowed_while_playing: { + type: jsPsych.plugins.parameterType.BOOL, + pretty_name: 'Response allowed while playing', + default: true, + description: 'If true, then responses are allowed while the video is playing. '+ + 'If false, then the video must finish playing before a response is accepted.' + } + } + } + + plugin.trial = function(display_element, trial) { + + // setup stimulus + var video_html = '
' + video_html += '"; + video_html += "
"; + + //display buttons + var buttons = []; + if (Array.isArray(trial.button_html)) { + if (trial.button_html.length == trial.choices.length) { + buttons = trial.button_html; + } else { + console.error('Error in video-button-response plugin. The length of the button_html array does not equal the length of the choices array'); + } + } else { + for (var i = 0; i < trial.choices.length; i++) { + buttons.push(trial.button_html); + } + } + video_html += '
'; + for (var i = 0; i < trial.choices.length; i++) { + var str = buttons[i].replace(/%choice%/g, trial.choices[i]); + video_html += '
'+str+'
'; + } + video_html += '
'; + + // add prompt if there is one + if (trial.prompt !== null) { + video_html += trial.prompt; + } + + display_element.innerHTML = video_html; + + var start_time = performance.now(); + + var video_element = display_element.querySelector('#jspsych-video-button-response-stimulus'); + + if(video_preload_blob){ + video_element.src = video_preload_blob; + } + + video_element.onended = function(){ + if(trial.trial_ends_after_video){ + end_trial(); + } else if (!trial.response_allowed_while_playing) { + enable_buttons(); + } + } + + video_element.playbackRate = trial.rate; + + // if video start time is specified, hide the video and set the starting time + // before showing and playing, so that the video doesn't automatically show the first frame + if(trial.start !== null){ + video_element.pause(); + video_element.currentTime = trial.start; + video_element.onseeked = function() { + video_element.style.visibility = "visible"; + if (trial.autoplay) { + video_element.play(); + } + } + } + + if(trial.stop !== null){ + video_element.addEventListener('timeupdate', function(e){ + var currenttime = video_element.currentTime; + if(currenttime >= trial.stop){ + video_element.pause(); + } + }) + } + + if(trial.response_allowed_while_playing){ + enable_buttons(); + } else { + disable_buttons(); + } + + // store response + var response = { + rt: null, + button: null + }; + + // function to end trial when it is time + function end_trial() { + + // kill any remaining setTimeout handlers + jsPsych.pluginAPI.clearAllTimeouts(); + + // stop the video file if it is playing + // remove any remaining end event handlers + display_element.querySelector('#jspsych-video-button-response-stimulus').pause(); + display_element.querySelector('#jspsych-video-button-response-stimulus').onended = function() {}; + + // gather the data to store for the trial + var trial_data = { + rt: response.rt, + stimulus: trial.stimulus, + response: response.button + }; + + // clear the display + display_element.innerHTML = ''; + + // move on to the next trial + jsPsych.finishTrial(trial_data); + } + + // function to handle responses by the subject + function after_response(choice) { + + // measure rt + var end_time = performance.now(); + var rt = end_time - start_time; + response.button = parseInt(choice); + response.rt = rt; + + // after a valid response, the stimulus will have the CSS class 'responded' + // which can be used to provide visual feedback that a response was recorded + video_element.className += ' responded'; + + // disable all the buttons after a response + disable_buttons(); + + if (trial.response_ends_trial) { + end_trial(); + } + } + + function button_response(e){ + var choice = e.currentTarget.getAttribute('data-choice'); // don't use dataset for jsdom compatibility + after_response(choice); + } + + function disable_buttons() { + var btns = document.querySelectorAll('.jspsych-video-button-response-button'); + for (var i=0; i1 is faster.' + }, + trial_ends_after_video: { + type: jsPsych.plugins.parameterType.BOOL, + pretty_name: 'End trial after video finishes', + default: false, + description: 'If true, the trial will end immediately after the video finishes playing.' + }, + trial_duration: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Trial duration', + default: null, + description: 'How long to show trial before it ends.' + }, + response_ends_trial: { + type: jsPsych.plugins.parameterType.BOOL, + pretty_name: 'Response ends trial', + default: true, + description: 'If true, the trial will end when subject makes a response.' + }, + response_allowed_while_playing: { + type: jsPsych.plugins.parameterType.BOOL, + pretty_name: 'Response allowed while playing', + default: true, + description: 'If true, then responses are allowed while the video is playing. '+ + 'If false, then the video must finish playing before a response is accepted.' + } + } + } + + plugin.trial = function(display_element, trial) { + + // setup stimulus + var video_html = '
' + video_html += '"; + video_html += "
"; + + // add prompt if there is one + if (trial.prompt !== null) { + video_html += trial.prompt; + } + + display_element.innerHTML = video_html; + + var video_element = display_element.querySelector('#jspsych-video-keyboard-response-stimulus'); + + if(video_preload_blob){ + video_element.src = video_preload_blob; + } + + video_element.onended = function(){ + if(trial.trial_ends_after_video){ + end_trial(); + } + if ((trial.response_allowed_while_playing == false) & (!trial.trial_ends_after_video)) { + // start keyboard listener + var keyboardListener = jsPsych.pluginAPI.getKeyboardResponse({ + callback_function: after_response, + valid_responses: trial.choices, + rt_method: 'performance', + persist: false, + allow_held_key: false, + }); + } + } + + video_element.playbackRate = trial.rate; + + // if video start time is specified, hide the video and set the starting time + // before showing and playing, so that the video doesn't automatically show the first frame + if(trial.start !== null){ + video_element.pause(); + video_element.currentTime = trial.start; + video_element.onseeked = function() { + video_element.style.visibility = "visible"; + if (trial.autoplay) { + video_element.play(); + } + } + } + + if(trial.stop !== null){ + video_element.addEventListener('timeupdate', function(e){ + var currenttime = video_element.currentTime; + if(currenttime >= trial.stop){ + video_element.pause(); + } + }) + } + + // store response + var response = { + rt: null, + key: null + }; + + // function to end trial when it is time + function end_trial() { + + // kill any remaining setTimeout handlers + jsPsych.pluginAPI.clearAllTimeouts(); + + // kill keyboard listeners + jsPsych.pluginAPI.cancelAllKeyboardResponses(); + + // stop the video file if it is playing + // remove end event listeners if they exist + display_element.querySelector('#jspsych-video-keyboard-response-stimulus').pause(); + display_element.querySelector('#jspsych-video-keyboard-response-stimulus').onended = function(){ }; + + // gather the data to store for the trial + var trial_data = { + rt: response.rt, + stimulus: trial.stimulus, + response: response.key + }; + + // clear the display + display_element.innerHTML = ''; + + // move on to the next trial + jsPsych.finishTrial(trial_data); + } + + // function to handle responses by the subject + var after_response = function(info) { + + // after a valid response, the stimulus will have the CSS class 'responded' + // which can be used to provide visual feedback that a response was recorded + display_element.querySelector('#jspsych-video-keyboard-response-stimulus').className += ' responded'; + + // only record the first response + if (response.key == null) { + response = info; + } + + if (trial.response_ends_trial) { + end_trial(); + } + }; + + // start the response listener + if ((trial.choices != jsPsych.NO_KEYS) & (trial.response_allowed_while_playing)) { + var keyboardListener = jsPsych.pluginAPI.getKeyboardResponse({ + callback_function: after_response, + valid_responses: trial.choices, + rt_method: 'performance', + persist: false, + allow_held_key: false, + }); + } + + // end trial if time limit is set + if (trial.trial_duration !== null) { + jsPsych.pluginAPI.setTimeout(function() { + end_trial(); + }, trial.trial_duration); + } + }; + + return plugin; +})(); diff --git a/LetterDMS/jspsych/plugins/jspsych-video-slider-response.js b/LetterDMS/jspsych/plugins/jspsych-video-slider-response.js new file mode 100644 index 0000000..7954ad9 --- /dev/null +++ b/LetterDMS/jspsych/plugins/jspsych-video-slider-response.js @@ -0,0 +1,351 @@ +/** + * jspsych-video-slider-response + * Josh de Leeuw + * + * plugin for playing a video file and getting a slider response + * + * documentation: docs.jspsych.org + * + **/ + +jsPsych.plugins["video-slider-response"] = (function() { + + var plugin = {}; + + jsPsych.pluginAPI.registerPreload('video-slider-response', 'stimulus', 'video'); + + plugin.info = { + name: 'video-slider-response', + description: '', + parameters: { + stimulus: { + type: jsPsych.plugins.parameterType.VIDEO, + pretty_name: 'Video', + default: undefined, + description: 'The video file to play.' + }, + prompt: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: 'Prompt', + default: null, + description: 'Any content here will be displayed below the stimulus.' + }, + width: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Width', + default: '', + description: 'The width of the video in pixels.' + }, + height: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Height', + default: '', + description: 'The height of the video display in pixels.' + }, + autoplay: { + type: jsPsych.plugins.parameterType.BOOL, + pretty_name: 'Autoplay', + default: true, + description: 'If true, the video will begin playing as soon as it has loaded.' + }, + controls: { + type: jsPsych.plugins.parameterType.BOOL, + pretty_name: 'Controls', + default: false, + description: 'If true, the subject will be able to pause the video or move the playback to any point in the video.' + }, + start: { + type: jsPsych.plugins.parameterType.FLOAT, + pretty_name: 'Start', + default: null, + description: 'Time to start the clip.' + }, + stop: { + type: jsPsych.plugins.parameterType.FLOAT, + pretty_name: 'Stop', + default: null, + description: 'Time to stop the clip.' + }, + rate: { + type: jsPsych.plugins.parameterType.FLOAT, + pretty_name: 'Rate', + default: 1, + description: 'The playback rate of the video. 1 is normal, <1 is slower, >1 is faster.' + }, + min: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Min slider', + default: 0, + description: 'Sets the minimum value of the slider.' + }, + max: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Max slider', + default: 100, + description: 'Sets the maximum value of the slider', + }, + slider_start: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Slider starting value', + default: 50, + description: 'Sets the starting value of the slider', + }, + step: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Step', + default: 1, + description: 'Sets the step of the slider' + }, + labels: { + type: jsPsych.plugins.parameterType.HTML_STRING, + pretty_name:'Labels', + default: [], + array: true, + description: 'Labels of the slider.', + }, + slider_width: { + type: jsPsych.plugins.parameterType.INT, + pretty_name:'Slider width', + default: null, + description: 'Width of the slider in pixels.' + }, + button_label: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: 'Button label', + default: 'Continue', + array: false, + description: 'Label of the button to advance.' + }, + require_movement: { + type: jsPsych.plugins.parameterType.BOOL, + pretty_name: 'Require movement', + default: false, + description: 'If true, the participant will have to move the slider before continuing.' + }, + trial_ends_after_video: { + type: jsPsych.plugins.parameterType.BOOL, + pretty_name: 'End trial after video finishes', + default: false, + description: 'If true, the trial will end immediately after the video finishes playing.' + }, + trial_duration: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Trial duration', + default: null, + description: 'How long to show trial before it ends.' + }, + response_ends_trial: { + type: jsPsych.plugins.parameterType.BOOL, + pretty_name: 'Response ends trial', + default: true, + description: 'If true, the trial will end when subject makes a response.' + }, + response_allowed_while_playing: { + type: jsPsych.plugins.parameterType.BOOL, + pretty_name: 'Response allowed while playing', + default: true, + description: 'If true, then responses are allowed while the video is playing. '+ + 'If false, then the video must finish playing before a response is accepted.' + } + } + } + + plugin.trial = function(display_element, trial) { + + // half of the thumb width value from jspsych.css, used to adjust the label positions + var half_thumb_width = 7.5; + + // setup stimulus + var video_html = '"; + + var html = '
'; + html += '
' + video_html + '
'; + html += '
'; + html += ''; + html += ''+trial.labels[j]+''; + html += '
' + } + html += '
'; + html += ''; + html += ''; + + // add prompt if there is one + if (trial.prompt !== null) { + html += '
'+trial.prompt+'
'; + } + + // add submit button + var next_disabled_attribute = ""; + if (trial.require_movement | !trial.response_allowed_while_playing) { + next_disabled_attribute = "disabled"; + } + html += ''; + + display_element.innerHTML = html; + + var video_element = display_element.querySelector('#jspsych-video-slider-response-stimulus-video'); + + if(video_preload_blob){ + video_element.src = video_preload_blob; + } + + video_element.onended = function(){ + if(trial.trial_ends_after_video){ + end_trial(); + } else if (!trial.response_allowed_while_playing) { + enable_slider(); + } + } + + video_element.playbackRate = trial.rate; + + // if video start time is specified, hide the video and set the starting time + // before showing and playing, so that the video doesn't automatically show the first frame + if(trial.start !== null){ + video_element.pause(); + video_element.currentTime = trial.start; + video_element.onseeked = function() { + video_element.style.visibility = "visible"; + if (trial.autoplay) { + video_element.play(); + } + } + } + + if(trial.stop !== null){ + video_element.addEventListener('timeupdate', function(e){ + var currenttime = video_element.currentTime; + if(currenttime >= trial.stop){ + video_element.pause(); + } + }) + } + + if(trial.require_movement){ + display_element.querySelector('#jspsych-video-slider-response-response').addEventListener('click', function(){ + display_element.querySelector('#jspsych-video-slider-response-next').disabled = false; + }); + } + + var startTime = performance.now(); + + // store response + var response = { + rt: null, + response: null + }; + + display_element.querySelector('#jspsych-video-slider-response-next').addEventListener('click', function() { + // measure response time + var endTime = performance.now(); + response.rt = endTime - startTime; + response.response = display_element.querySelector('#jspsych-video-slider-response-response').valueAsNumber; + + if(trial.response_ends_trial){ + end_trial(); + } else { + display_element.querySelector('#jspsych-video-slider-response-next').disabled = true; + } + + }); + + // function to end trial when it is time + function end_trial() { + + // kill any remaining setTimeout handlers + jsPsych.pluginAPI.clearAllTimeouts(); + + // stop the video file if it is playing + // remove any remaining end event handlers + display_element.querySelector('#jspsych-video-slider-response-stimulus-video').pause(); + display_element.querySelector('#jspsych-video-slider-response-stimulus-video').onended = function() {}; + + // gather the data to store for the trial + var trial_data = { + rt: response.rt, + stimulus: trial.stimulus, + start: trial.start, + slider_start: trial.slider_start, + response: response.response + }; + + // clear the display + display_element.innerHTML = ''; + + // move on to the next trial + jsPsych.finishTrial(trial_data); + }; + + // function to enable slider after video ends + function enable_slider() { + document.querySelector('#jspsych-video-slider-response-response').disabled = false; + if (!trial.require_movement) { + document.querySelector('#jspsych-video-slider-response-next').disabled = false; + } + } + + // end trial if time limit is set + if (trial.trial_duration !== null) { + jsPsych.pluginAPI.setTimeout(function() { + end_trial(); + }, trial.trial_duration); + } + }; + + return plugin; +})(); diff --git a/LetterDMS/jspsych/plugins/jspsych-virtual-chinrest.js b/LetterDMS/jspsych/plugins/jspsych-virtual-chinrest.js new file mode 100644 index 0000000..7899b1c --- /dev/null +++ b/LetterDMS/jspsych/plugins/jspsych-virtual-chinrest.js @@ -0,0 +1,471 @@ +/* + * virtual chinrest plugin for jsPsych, based on Qisheng Li 11/2019. /// https://github.com/QishengLi/virtual_chinrest + + Modified by Gustavo Juantorena 08/2020 // https://github.com/GEJ1 + + Contributions from Peter J. Kohler: https://github.com/pjkohler + */ + +jsPsych.plugins["virtual-chinrest"] = (function () { + var plugin = {}; + + plugin.info = { + name: "virtual-chinrest", + parameters: { + resize_units: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: "Resize units", + default: "none", + description: + 'What units to resize to? ["none"/"cm"/"inch"/"deg"]. If "none", no resizing will be done to the jsPsych content after this trial.', + }, + pixels_per_unit: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: "Pixels per unit", + default: 100, + description: + "After the scaling factor is applied, this many pixels will equal one unit of measurement.", + }, + // mouse_adjustment: { + // type: jsPsych.plugins.parameterType.BOOL, + // pretty_name: "Adjust Using Mouse?", + // default: true, + // }, + adjustment_prompt: { + type: jsPsych.plugins.parameterType.HTML_STRING, + pretty_name: "Adjustment prompt", + default: ` +
+

Click and drag the lower right corner of the image until it is the same size as a credit card held up to the screen.

+

You can use any card that is the same size as a credit card, like a membership card or driver's license.

+

If you do not have access to a real card you can use a ruler to measure the image width to 3.37 inches or 85.6 mm.

+
`, + description: + "Any content here will be displayed above the card stimulus.", + }, + adjustment_button_prompt: { + type: jsPsych.plugins.parameterType.HTML_STRING, + pretty_name: "Adjustment button prompt", + default: "Click here when the image is the correct size", + description: + " Content of the button displayed below the card stimulus.", + }, + item_path: { + type: jsPsych.plugins.parameterType.STRING, + pretty_name: "Item path", + default: "img/card.png", + description: "Path to an image to be shown in the resizable item div." + }, + item_height_mm: { + type: jsPsych.plugins.parameterType.FLOAT, + pretty_name: "Item height (mm)", + default: 53.98, + description: "The height of the item to be measured, in mm.", + }, + item_width_mm: { + type: jsPsych.plugins.parameterType.FLOAT, + pretty_name: "Item width (mm)", + default: 85.6, + description: "The width of the item to be measured, in mm.", + }, + item_init_size: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: "Initial Size", + default: 250, + description: + "The initial size of the card, in pixels, along the largest dimension.", + }, + blindspot_reps: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: "Blindspot measurement repetitions", + default: 5, + description: + "How many times to measure the blindspot location? If 0, blindspot will not be detected, and viewing distance and degree data not computed.", + }, + blindspot_prompt: { + type: jsPsych.plugins.parameterType.HTML_STRING, + pretty_name: "Blindspot prompt", + default: ` +

Now we will quickly measure how far away you are sitting.

+
+
    +
  1. Put your left hand on the space bar.
  2. +
  3. Cover your right eye with your right hand.
  4. +
  5. Using your left eye, focus on the black square. Keep your focus on the black square.
  6. +
  7. The red ball will disappear as it moves from right to left. Press the space bar as soon as the ball disappears.
  8. +
+
+

Press the space bar when you are ready to begin.

+ `, + description: "HTML-formatted prompt to be shown on the screen during blindspot estimates." + }, + // blindspot_start_prompt: { + // type: jsPsych.plugins.parameterType.HTML_STRING, + // pretty_name: "Blindspot start prompt", + // default: "Start", + // description: "Content of the start button for the blindspot tasks.", + // }, + blindspot_measurements_prompt: { + type: jsPsych.plugins.parameterType.HTML_STRING, + pretty_name: "Blindspot measurements prompt", + default: "Remaining measurements: ", + description: "Text accompanying the remaining measures counter", + }, + viewing_distance_report: { + type: jsPsych.plugins.parameterType.HTML_STRING, + pretty_name: "Viewing distance report", + default: "

Based on your responses, you are sitting about from the screen.

Does that seem about right?

", + description: + 'If "none" is given, viewing distance will not be reported to the participant', + }, + redo_measurement_button_label: { + type: jsPsych.plugins.parameterType.HTML_STRING, + pretty_name: "Re-do measurement button label", + default: 'No, that is not close. Try again.', + description: "Label for the button that can be clicked on the viewing distance report screen to re-do the blindspot estimate(s)." + }, + blindspot_done_prompt: { + type: jsPsych.plugins.parameterType.HTML_STRING, + pretty_name: "Blindspot done prompt", + default: "Yes", + description: "Label for the button that can be clicked on the viewing distance report screen to accept the viewing distance estimate.", + }, + }, + }; + + plugin.trial = function (display_element, trial) { + /* check parameter compatibility */ + if (!(trial.blindspot_reps > 0) && (trial.resize_units == "deg" || trial.resize_units == "degrees")) { + console.error("Blindspot repetitions set to 0, so resizing to degrees of visual angle is not possible!"); + return; + } + + /* some additional parameter configuration */ + let trial_data = { + item_width_mm: trial.item_width_mm, + item_height_mm: trial.item_height_mm, //card dimension: 85.60 × 53.98 mm (3.370 × 2.125 in) + }; + + let blindspot_config_data = { + ball_pos: [], + slider_clck: false, + }; + + let aspect_ratio = trial.item_width_mm / trial.item_height_mm; + + const start_div_height = + aspect_ratio < 1 + ? trial.item_init_size + : Math.round(trial.item_init_size / aspect_ratio); + const start_div_width = + aspect_ratio < 1 + ? Math.round(trial.item_init_size * aspect_ratio) + : trial.item_init_size; + const adjust_size = Math.round(start_div_width * 0.1); + + /* create content for first screen, resizing card */ + let pagesize_content = ` +
+
+
+
+
+ ${trial.adjustment_prompt} + +
+ ` + + /* create content for second screen, blind spot */ + let blindspot_content = ` +
+ ${trial.blindspot_prompt} +
+ + ${trial.blindspot_measurements_prompt} +
${trial.blindspot_reps}
+
` + + /* create content for final report screen */ + let report_content = ` +
+
+ ${trial.viewing_distance_report} +
+ + +
+ ` + + display_element.innerHTML = `
` + + const start_time = performance.now(); + startResizePhase(); + + function startResizePhase() { + display_element.querySelector('#content').innerHTML = pagesize_content; + + // Event listeners for mouse-based resize + let dragging = false; + let origin_x, origin_y; + let cx, cy; + const scale_div = display_element.querySelector("#item"); + + function mouseupevent() { + dragging = false; + }; + document.addEventListener("mouseup", mouseupevent); + + function mousedownevent(e) { + e.preventDefault(); + dragging = true; + origin_x = e.pageX; + origin_y = e.pageY; + cx = parseInt(scale_div.style.width); + cy = parseInt(scale_div.style.height); + }; + display_element.querySelector("#jspsych-resize-handle").addEventListener("mousedown", mousedownevent); + + function resizeevent(e) { + if (dragging) { + let dx = e.pageX - origin_x; + let dy = e.pageY - origin_y; + + if (Math.abs(dx) >= Math.abs(dy)) { + scale_div.style.width = + Math.round(Math.max(20, cx + dx * 2)) + "px"; + scale_div.style.height = + Math.round(Math.max(20, cx + dx * 2) / aspect_ratio) + "px"; + } else { + scale_div.style.height = + Math.round(Math.max(20, cy + dy * 2)) + "px"; + scale_div.style.width = + Math.round(aspect_ratio * Math.max(20, cy + dy * 2)) + "px"; + } + } + } + display_element.addEventListener("mousemove", resizeevent); + + display_element.querySelector("#end_resize_phase").addEventListener("click", finishResizePhase); + + } + + function finishResizePhase() { + // add item width info to data + const item_width_px = getScaledItemWidth(); + trial_data["item_width_px"] = Math.round(item_width_px); + const px2mm = convertPixelsToMM(item_width_px); + trial_data["px2mm"] = accurateRound(px2mm, 2); + // check what to do next + if (trial.blindspot_reps > 0) { + startBlindSpotPhase(); + } else { + endTrial(); + } + } + + function startBlindSpotPhase() { + // reset the config data in case we are redoing the measurement + blindspot_config_data = { + ball_pos: [], + slider_clck: false, + }; + // add the content to the page + document.querySelector("#content").innerHTML = blindspot_content; + // draw the ball and fixation square + drawBall(); + // wait for a spacebar to begin the animations + jsPsych.pluginAPI.getKeyboardResponse({ + callback_function: startBall, + valid_responses: [' '], + rt_method: 'performance', + allow_held_keys: false, + persist: false + }) + } + + function startBall() { + ball_position_listener = jsPsych.pluginAPI.getKeyboardResponse({ + callback_function: recordPosition, + valid_responses: [' '], + rt_method: 'performance', + allow_held_keys: false, + persist: true + }); + animateBall(); + } + + function finishBlindSpotPhase() { + ball.stop(); + + jsPsych.pluginAPI.cancelAllKeyboardResponses(); + + if(trial.viewing_distance_report == 'none'){ + endTrial(); + } else { + showReport(); + } + } + + function showReport() { + // Display data + display_element.querySelector("#content").innerHTML = report_content; + display_element.querySelector('#distance-estimate').innerHTML = ` + ${Math.round(trial_data["view_dist_mm"] / 10)} cm (${Math.round(trial_data["view_dist_mm"]*0.0393701)} inches) + ` + + display_element.querySelector("#redo_blindspot").addEventListener('click', startBlindSpotPhase) + display_element.querySelector("#proceed").addEventListener('click', endTrial); + } + + function computeTransformation() { + trial_data.item_width_deg = + (2 * + Math.atan( + trial_data["item_width_mm"] / 2 / trial_data["view_dist_mm"] + ) * + 180) / + Math.PI; + trial_data.px2deg = + trial_data["item_width_px"] / trial_data.item_width_deg; // size of item in pixels divided by size of item in degrees of visual angle + + let px2unit_scr = 0; + switch (trial.resize_units) { + case "cm": + case "centimeters": + px2unit_scr = trial_data["px2mm"] * 10; // pixels per centimeter + break; + case "inch": + case "inches": + px2unit_scr = trial_data["px2mm"] * 25.4; // pixels per inch + break; + case "deg": + case "degrees": + px2unit_scr = trial_data["px2deg"]; // pixels per degree of visual angle + break; + } + if (px2unit_scr > 0) { + // scale the window + scale_factor = px2unit_scr / trial.pixels_per_unit; + document.getElementById("jspsych-content").style.transform = + "scale(" + scale_factor + ")"; + // pixels have been scaled, so pixels per degree, pixels per mm and pixels per item_width needs to be updated + trial_data.px2deg = trial_data.px2deg / scale_factor; + trial_data.px2mm = trial_data.px2mm / scale_factor; + trial_data.item_width_px = + trial_data.item_width_px / scale_factor; + trial_data.scale_factor = scale_factor; + } + + if (trial.blindspot_reps > 0) { + trial_data.win_width_deg = window.innerWidth / trial_data.px2deg; + trial_data.win_height_deg = + window.innerHeight / trial_data.px2deg; + } else { + // delete degree related properties + delete trial_data.px2deg; + delete trial_data.item_width_deg; + } + } + + function endTrial() { + + // finish trial + trial_data.rt = performance.now() - start_time; + + // remove lingering event listeners, just in case + jsPsych.pluginAPI.cancelAllKeyboardResponses(); + + // compute final data + computeTransformation(); + + // clear the display + display_element.innerHTML = ""; + + // finish the trial + jsPsych.finishTrial(trial_data); + + } + + function getScaledItemWidth() { + return document.querySelector('#item').getBoundingClientRect().width; + } + + function drawBall(pos = 180) { + // pos: define where the fixation square should be. + var mySVG = SVG("svgDiv"); + const rectX = trial_data["px2mm"] * pos; + const ballX = rectX * 0.6; // define where the ball is + var ball = mySVG.circle(30).move(ballX, 50).fill("#f00"); + window.ball = ball; + var square = mySVG.rect(30, 30).move(Math.min(rectX - 50, 950), 50); //square position + blindspot_config_data["square_pos"] = accurateRound(square.cx(), 2); + blindspot_config_data["rectX"] = rectX; + blindspot_config_data["ballX"] = ballX; + } + + function animateBall() { + ball + .animate(7000) + .during(function (pos) { + moveX = -pos * blindspot_config_data["ballX"]; + window.moveX = moveX; + moveY = 0; + ball.attr({ transform: "translate(" + moveX + "," + moveY + ")" }); //jqueryToVanilla: el.getAttribute(''); + }) + .loop(true, false) + .after(function () { + animateBall(); + }); + } + + function recordPosition() { + // angle: define horizontal blind spot entry point position in degrees. + const angle = 13.5; + + blindspot_config_data["ball_pos"].push(accurateRound(ball.cx() + moveX, 2)); + var sum = blindspot_config_data["ball_pos"].reduce((a, b) => a + b, 0); + var ballPosLen = blindspot_config_data["ball_pos"].length; + blindspot_config_data["avg_ball_pos"] = accurateRound(sum / ballPosLen, 2); + var ball_sqr_distance = + (blindspot_config_data["square_pos"] - blindspot_config_data["avg_ball_pos"]) / + trial_data["px2mm"]; + var viewDistance = ball_sqr_distance / Math.tan(Math.radians(angle)); + trial_data["view_dist_mm"] = accurateRound(viewDistance, 2); + + //counter and stop + var counter = Number(document.querySelector("#click").textContent); + counter = counter - 1; + document.querySelector("#click").textContent = Math.max(counter, 0); + if (counter <= 0) { + finishBlindSpotPhase(); + return; + } else { + ball.stop(); + animateBall(); + } + + } + + function convertPixelsToMM(item_width_px){ + const px2mm = item_width_px / trial_data["item_width_mm"]; + return px2mm; + } + + function accurateRound(value, decimals){ + return Number(Math.round(value+'e'+decimals)+'e-'+decimals); + } + + }; + + //helper function for radians + // Converts from degrees to radians. + Math.radians = function (degrees) { + return (degrees * Math.PI) / 180; + }; + + return plugin; +})(); diff --git a/LetterDMS/jspsych/plugins/jspsych-visual-search-circle.js b/LetterDMS/jspsych/plugins/jspsych-visual-search-circle.js new file mode 100644 index 0000000..46998fe --- /dev/null +++ b/LetterDMS/jspsych/plugins/jspsych-visual-search-circle.js @@ -0,0 +1,259 @@ +/** + * + * jspsych-visual-search-circle + * Josh de Leeuw + * + * display a set of objects, with or without a target, equidistant from fixation + * subject responds to whether or not the target is present + * + * based on code written for psychtoolbox by Ben Motz + * + * documentation: docs.jspsych.org + * + **/ + +jsPsych.plugins["visual-search-circle"] = (function() { + + var plugin = {}; + + jsPsych.pluginAPI.registerPreload('visual-search-circle', 'target', 'image'); + jsPsych.pluginAPI.registerPreload('visual-search-circle', 'foil', 'image'); + jsPsych.pluginAPI.registerPreload('visual-search-circle', 'fixation_image', 'image'); + + plugin.info = { + name: 'visual-search-circle', + description: '', + parameters: { + target: { + type: jsPsych.plugins.parameterType.IMAGE, + pretty_name: 'Target', + default: undefined, + description: 'The image to be displayed.' + }, + foil: { + type: jsPsych.plugins.parameterType.IMAGE, + pretty_name: 'Foil', + default: undefined, + description: 'Path to image file that is the foil/distractor.' + }, + fixation_image: { + type: jsPsych.plugins.parameterType.IMAGE, + pretty_name: 'Fixation image', + default: undefined, + description: 'Path to image file that is a fixation target.' + }, + set_size: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Set size', + default: undefined, + description: 'How many items should be displayed?' + }, + target_present: { + type: jsPsych.plugins.parameterType.BOOL, + pretty_name: 'Target present', + default: true, + description: 'Is the target present?' + }, + target_size: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Target size', + array: true, + default: [50, 50], + description: 'Two element array indicating the height and width of the search array element images.' + }, + fixation_size: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Fixation size', + array: true, + default: [16, 16], + description: 'Two element array indicating the height and width of the fixation image.' + }, + circle_diameter: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Circle diameter', + default: 250, + description: 'The diameter of the search array circle in pixels.' + }, + target_present_key: { + type: jsPsych.plugins.parameterType.KEY, + pretty_name: 'Target present key', + default: 'j', + description: 'The key to press if the target is present in the search array.' + }, + target_absent_key: { + type: jsPsych.plugins.parameterType.KEY, + pretty_name: 'Target absent key', + default: 'f', + description: 'The key to press if the target is not present in the search array.' + }, + trial_duration: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Trial duration', + default: null, + description: 'The maximum duration to wait for a response.' + }, + fixation_duration: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Fixation duration', + default: 1000, + description: 'How long to show the fixation image for before the search array (in milliseconds).' + } + } + } + + plugin.trial = function(display_element, trial) { + + // circle params + var diam = trial.circle_diameter; // pixels + var radi = diam / 2; + var paper_size = diam + trial.target_size[0]; + + // stimuli width, height + var stimh = trial.target_size[0]; + var stimw = trial.target_size[1]; + var hstimh = stimh / 2; + var hstimw = stimw / 2; + + // fixation location + var fix_loc = [Math.floor(paper_size / 2 - trial.fixation_size[0] / 2), Math.floor(paper_size / 2 - trial.fixation_size[1] / 2)]; + + // possible stimulus locations on the circle + var display_locs = []; + var possible_display_locs = trial.set_size; + var random_offset = Math.floor(Math.random() * 360); + for (var i = 0; i < possible_display_locs; i++) { + display_locs.push([ + Math.floor(paper_size / 2 + (cosd(random_offset + (i * (360 / possible_display_locs))) * radi) - hstimw), + Math.floor(paper_size / 2 - (sind(random_offset + (i * (360 / possible_display_locs))) * radi) - hstimh) + ]); + } + + // get target to draw on + display_element.innerHTML += '
'; + var paper = display_element.querySelector("#jspsych-visual-search-circle-container"); + + // check distractors - array? + if(!Array.isArray(trial.foil)){ + fa = []; + for(var i=0; i"; + + // wait + jsPsych.pluginAPI.setTimeout(function() { + // after wait is over + show_search_array(); + }, trial.fixation_duration); + } + + function show_search_array() { + + var search_array_images = []; + + var to_present = []; + if(trial.target_present){ + to_present.push(trial.target); + } + to_present = to_present.concat(trial.foil); + + for (var i = 0; i < display_locs.length; i++) { + + paper.innerHTML += ""; + + } + + var trial_over = false; + + var after_response = function(info) { + + trial_over = true; + + var correct = false; + + if ((jsPsych.pluginAPI.compareKeys(info.key, trial.target_present_key)) && trial.target_present || + (jsPsych.pluginAPI.compareKeys(info.key, trial.target_absent_key)) && !trial.target_present) { + correct = true; + } + + clear_display(); + + end_trial(info.rt, correct, info.key); + + } + + var valid_keys = [trial.target_present_key, trial.target_absent_key]; + + key_listener = jsPsych.pluginAPI.getKeyboardResponse({ + callback_function: after_response, + valid_responses: valid_keys, + rt_method: 'performance', + persist: false, + allow_held_key: false + }); + + if (trial.trial_duration !== null) { + + jsPsych.pluginAPI.setTimeout(function() { + + if (!trial_over) { + + jsPsych.pluginAPI.cancelKeyboardResponse(key_listener); + + trial_over = true; + + var rt = null; + var correct = 0; + var key_press = null; + + clear_display(); + + end_trial(rt, correct, key_press); + } + }, trial.trial_duration); + + } + + function clear_display() { + display_element.innerHTML = ''; + } + } + + + function end_trial(rt, correct, key_press) { + + // data saving + var trial_data = { + correct: correct, + rt: rt, + response: key_press, + locations: display_locs, + target_present: trial.target_present, + set_size: trial.set_size + }; + + // go to next trial + jsPsych.finishTrial(trial_data); + } + }; + + // helper function for determining stimulus locations + + function cosd(num) { + return Math.cos(num / 180 * Math.PI); + } + + function sind(num) { + return Math.sin(num / 180 * Math.PI); + } + + return plugin; +})(); diff --git a/LetterDMS/jspsych/plugins/jspsych-vsl-animate-occlusion.js b/LetterDMS/jspsych/plugins/jspsych-vsl-animate-occlusion.js new file mode 100644 index 0000000..55c0ca6 --- /dev/null +++ b/LetterDMS/jspsych/plugins/jspsych-vsl-animate-occlusion.js @@ -0,0 +1,196 @@ +/** + * jsPsych plugin for showing animations that mimic the experiment described in + * + * Fiser, J., & Aslin, R. N. (2002). Statistical learning of higher-order + * temporal structure from visual shape sequences. Journal of Experimental + * Psychology: Learning, Memory, and Cognition, 28(3), 458. + * + * Josh de Leeuw + * + * documentation: docs.jspsych.org + * + */ + +jsPsych.plugins['vsl-animate-occlusion'] = (function() { + + var plugin = {}; + + jsPsych.pluginAPI.registerPreload('vsl-animate-occlusion', 'stimuli', 'image'); + + plugin.info = { + name: 'vsl-animate-occlusion', + description: '', + parameters: { + stimuli: { + type: jsPsych.plugins.parameterType.IMAGE, + pretty_name: 'Stimuli', + default: undefined, + array: true, + description: 'A stimulus is a path to an image file.' + }, + choices: { + type: jsPsych.plugins.parameterType.KEY, + pretty_name: 'Choices', + array: true, + default: jsPsych.ALL_KEYS, + description: 'This array contains the keys that the subject is allowed to press in order to respond to the stimulus. ' + }, + canvas_size: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Canvas size', + array: true, + default: [400,400], + description: 'Array specifying the width and height of the area that the animation will display in.' + }, + image_size: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Image size', + array: true, + default: [100,100], + description: 'Array specifying the width and height of the images to show.' + }, + initial_direction: { + type: jsPsych.plugins.parameterType.SELECT, + pretty_name: 'Initial direction', + choices: ['left','right'], + default: 'left', + description: 'Which direction the stimulus should move first.' + }, + occlude_center: { + type: jsPsych.plugins.parameterType.BOOL, + pretty_name: 'Occlude center', + default: true, + description: 'If true, display a rectangle in the center of the screen that is just wide enough to occlude the image completely as it passes behind.' + }, + cycle_duration: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Cycle duration', + default: 1000, + description: 'How long it takes for a stimulus in the sequence to make a complete cycle.' + }, + pre_movement_duration: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Pre movement duration', + default: 500, + description: 'How long to wait before the stimuli starts moving from behind the center rectangle.' + } + } + } + + plugin.trial = function(display_element, trial) { + + // variable to keep track of timing info and responses + var start_time = 0; + var responses = []; + + var directions = [ + [{ + params: { + x: trial.canvas_size[0] - trial.image_size[0] + }, + ms: trial.cycle_duration / 2 + }, { + params: { + x: trial.canvas_size[0] / 2 - trial.image_size[0] / 2 + }, + ms: trial.cycle_duration / 2 + }], + [{ + params: { + x: 0 + }, + ms: trial.cycle_duration / 2 + }, { + params: { + x: trial.canvas_size[0] / 2 - trial.image_size[0] / 2 + }, + ms: trial.cycle_duration / 2 + }] + ]; + + var which_image = 0; + var next_direction = (trial.initial_direction == "right") ? 0 : 1; + + function next_step() { + if (trial.stimuli.length == which_image) { + endTrial(); + } else { + + var d = directions[next_direction]; + next_direction === 0 ? next_direction = 1 : next_direction = 0; + var i = trial.stimuli[which_image]; + which_image++; + + c.animate(d[0].params, d[0].ms, mina.linear, function() { + c.animate(d[1].params, d[1].ms, mina.linear, function() { + next_step(); + }); + }); + + c.attr({ + href: i + }); + + // start timer for this trial + start_time = performance.now(); + } + } + + display_element.innerHTML = ""; + + var paper = Snap("#jspsych-vsl-animate-occlusion-canvas"); + + var c = paper.image(trial.stimuli[which_image], trial.canvas_size[0] / 2 - trial.image_size[0] / 2, trial.canvas_size[1] / 2 - trial.image_size[1] / 2, trial.image_size[0], trial.image_size[1]).attr({ + "id": 'jspsych-vsl-animate-occlusion-moving-image' + }); + + display_element.querySelector('#jspsych-vsl-animate-occlusion-moving-image').removeAttribute('preserveAspectRatio'); + + if (trial.occlude_center) { + paper.rect((trial.canvas_size[0] / 2) - (trial.image_size[0] / 2), 0, trial.image_size[0], trial.canvas_size[1]).attr({ + fill: "#000" + }); + } + + // add key listener + var after_response = function(info) { + responses.push({ + key: info.key, + stimulus: which_image - 1, + rt: info.rt + }); + } + + key_listener = jsPsych.pluginAPI.getKeyboardResponse({ + callback_function: after_response, + valid_responses: trial.choices, + rt_method: 'performance', + persist: true, + allow_held_key: false + }); + + if (trial.pre_movement_duration > 0) { + jsPsych.pluginAPI.setTimeout(function() { + next_step(); + }, trial.pre_movement_duration); + } else { + next_step(); + } + + function endTrial() { + + display_element.innerHTML = ''; + + jsPsych.pluginAPI.cancelKeyboardResponse(key_listener); + + var trial_data = { + stimuli: trial.stimuli, + response: responses + }; + + jsPsych.finishTrial(trial_data); + } + }; + + return plugin; +})(); diff --git a/LetterDMS/jspsych/plugins/jspsych-vsl-grid-scene.js b/LetterDMS/jspsych/plugins/jspsych-vsl-grid-scene.js new file mode 100644 index 0000000..7986ef6 --- /dev/null +++ b/LetterDMS/jspsych/plugins/jspsych-vsl-grid-scene.js @@ -0,0 +1,103 @@ +/** + * jsPsych plugin for showing scenes that mimic the experiments described in + * + * Fiser, J., & Aslin, R. N. (2001). Unsupervised statistical learning of + * higher-order spatial structures from visual scenes. Psychological science, + * 12(6), 499-504. + * + * Josh de Leeuw + * + * documentation: docs.jspsych.org + * + */ + +jsPsych.plugins['vsl-grid-scene'] = (function() { + + var plugin = {}; + + jsPsych.pluginAPI.registerPreload('vsl-grid-scene', 'stimuli', 'image'); + + plugin.info = { + name: 'vsl-grid-scene', + description: '', + parameters: { + stimuli: { + type: jsPsych.plugins.parameterType.IMAGE, + pretty_name: 'Stimuli', + array: true, + default: undefined, + description: 'An array that defines a grid.' + }, + image_size: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Image size', + array: true, + default: [100,100], + description: 'Array specifying the width and height of the images to show.' + }, + trial_duration: { + type: jsPsych.plugins.parameterType.INT, + pretty_name: 'Trial duration', + default: 2000, + description: 'How long to show the stimulus for in milliseconds.' + } + } + } + + plugin.trial = function(display_element, trial) { + + display_element.innerHTML = plugin.generate_stimulus(trial.stimuli, trial.image_size); + + jsPsych.pluginAPI.setTimeout(function() { + endTrial(); + }, trial.trial_duration); + + function endTrial() { + + display_element.innerHTML = ''; + + var trial_data = { + stimulus: trial.stimuli + }; + + jsPsych.finishTrial(trial_data); + } + }; + + plugin.generate_stimulus = function(pattern, image_size) { + var nrows = pattern.length; + var ncols = pattern[0].length; + + // create blank element to hold code that we generate + var html = '
'; + + // create table + html += ''; + + for (var row = 0; row < nrows; row++) { + html += ''; + + for (var col = 0; col < ncols; col++) { + html += ''; + } + html += ''; + } + + html += '
'+ + '
'; + if (pattern[row][col] !== 0) { + html += ''; + } + html += '
'; + html += '
'; + html += '
'; + + return html; + + }; + + return plugin; +})(); diff --git a/LetterDMS/jspsych/plugins/jspsych-webgazer-calibrate.js b/LetterDMS/jspsych/plugins/jspsych-webgazer-calibrate.js new file mode 100644 index 0000000..79cc368 --- /dev/null +++ b/LetterDMS/jspsych/plugins/jspsych-webgazer-calibrate.js @@ -0,0 +1,161 @@ +/** + * jspsych-webgazer-calibrate + * Josh de Leeuw + **/ + +jsPsych.plugins["webgazer-calibrate"] = (function() { + + var plugin = {}; + + plugin.info = { + name: 'webgazer-calibrate', + description: '', + parameters: { + calibration_points: { + type: jsPsych.plugins.parameterType.INT, + default: [[10,10], [10,50], [10,90], [50,10], [50,50], [50,90], [90,10], [90,50], [90,90]] + }, + calibration_mode: { + type: jsPsych.plugins.parameterType.STRING, + default: 'click', // options: 'click', 'view' + }, + point_size:{ + type: jsPsych.plugins.parameterType.INT, + default: 20 + }, + repetitions_per_point: { + type: jsPsych.plugins.parameterType.INT, + default: 1 + }, + randomize_calibration_order: { + type: jsPsych.plugins.parameterType.BOOL, + default: false + }, + time_to_saccade: { + type: jsPsych.plugins.parameterType.INT, + default: 1000 + }, + time_per_point: { + type: jsPsych.plugins.parameterType.STRING, + default: 1000 + } + } + } + + plugin.trial = function(display_element, trial) { + + var html = ` +
+
` + + display_element.innerHTML = html; + + var wg_container = display_element.querySelector('#webgazer-calibrate-container'); + + var reps_completed = 0; + var points_completed = -1; + var cal_points = null; + + calibrate(); + + function calibrate(){ + jsPsych.extensions['webgazer'].resume(); + if(trial.calibration_mode == 'click'){ + jsPsych.extensions['webgazer'].startMouseCalibration(); + } + next_calibration_round(); + } + + function next_calibration_round(){ + if(trial.randomize_calibration_order){ + cal_points = jsPsych.randomization.shuffle(trial.calibration_points); + } else { + cal_points = trial.calibration_points; + } + points_completed = -1; + next_calibration_point(); + } + + function next_calibration_point(){ + points_completed++; + if(points_completed == cal_points.length){ + reps_completed++; + if(reps_completed == trial.repetitions_per_point){ + calibration_done(); + } else { + next_calibration_round(); + } + } else { + var pt = cal_points[points_completed]; + calibration_display_gaze_only(pt); + } + } + + function calibration_display_gaze_only(pt){ + var pt_html = `
` + wg_container.innerHTML = pt_html; + + var pt_dom = wg_container.querySelector('#calibration-point'); + + if(trial.calibration_mode == 'click'){ + pt_dom.style.cursor = 'pointer'; + pt_dom.addEventListener('click', function(){ + next_calibration_point(); + }) + } + + if(trial.calibration_mode == 'view'){ + var br = pt_dom.getBoundingClientRect(); + var x = br.left + br.width / 2; + var y = br.top + br.height / 2; + + var pt_start_cal = performance.now() + trial.time_to_saccade; + var pt_finish = performance.now() + trial.time_to_saccade + trial.time_per_point; + + requestAnimationFrame(function watch_dot(){ + + if(performance.now() > pt_start_cal){ + jsPsych.extensions['webgazer'].calibratePoint(x,y,'click'); + } + if(performance.now() < pt_finish){ + requestAnimationFrame(watch_dot); + } else { + next_calibration_point(); + } + }) + } + } + + function calibration_done(){ + if(trial.calibration_mode == 'click'){ + jsPsych.extensions['webgazer'].stopMouseCalibration(); + } + wg_container.innerHTML = ""; + end_trial(); + } + + // function to end trial when it is time + function end_trial() { + jsPsych.extensions['webgazer'].pause(); + jsPsych.extensions['webgazer'].hidePredictions(); + jsPsych.extensions['webgazer'].hideVideo(); + + // kill any remaining setTimeout handlers + jsPsych.pluginAPI.clearAllTimeouts(); + + // gather the data to store for the trial + var trial_data = { + + }; + + // clear the display + display_element.innerHTML = ''; + + // move on to the next trial + jsPsych.finishTrial(trial_data); + }; + + }; + + return plugin; + })(); \ No newline at end of file diff --git a/LetterDMS/jspsych/plugins/jspsych-webgazer-init-camera.js b/LetterDMS/jspsych/plugins/jspsych-webgazer-init-camera.js new file mode 100644 index 0000000..1dc2640 --- /dev/null +++ b/LetterDMS/jspsych/plugins/jspsych-webgazer-init-camera.js @@ -0,0 +1,139 @@ +/** + * jspsych-webgazer-init-camera + * Josh de Leeuw + **/ + + jsPsych.plugins["webgazer-init-camera"] = (function () { + + var plugin = {}; + + plugin.info = { + name: 'webgazer-init-camera', + description: '', + parameters: { + instructions: { + type: jsPsych.plugins.parameterType.HTML_STRING, + default: ` +

Position your head so that the webcam has a good view of your eyes.

+

Center your face in the box and look directly towards the camera.

+

It is important that you try and keep your head reasonably still throughout the experiment, so please take a moment to adjust your setup to be comfortable.

+

When your face is centered in the box and the box is green, you can click to continue.

` + }, + button_text: { + type: jsPsych.plugins.parameterType.STRING, + default: 'Continue' + } + } + } + + plugin.trial = function (display_element, trial) { + + var start_time = performance.now(); + var load_time; + + if (!jsPsych.extensions.webgazer.isInitialized()) { + jsPsych.extensions.webgazer.start().then(function () { + showTrial(); + }).catch(function () { + display_element.innerHTML = `

The experiment cannot continue because the eye tracker failed to start.

+

This may be because of a technical problem or because you did not grant permission for the page to use your camera.

` + }); + } else { + showTrial(); + } + + function showTrial() { + + load_time = Math.round(performance.now() - start_time); + + var style = ` + + ` + document.querySelector('head').insertAdjacentHTML('beforeend', style); + + var html = ` +
+
` + + display_element.innerHTML = html; + + jsPsych.extensions['webgazer'].showVideo(); + jsPsych.extensions['webgazer'].resume(); + + var wg_container = display_element.querySelector('#webgazer-init-container'); + + + wg_container.innerHTML = ` +
+ ${trial.instructions} + +
` + + if(is_face_detect_green()){ + document.querySelector('#jspsych-wg-cont').disabled = false; + } else { + var observer = new MutationObserver(face_detect_event_observer); + observer.observe(document, { + attributes: true, + attributeFilter: ['style'], + subtree: true + }); + } + + document.querySelector('#jspsych-wg-cont').addEventListener('click', function () { + if(observer){ + observer.disconnect(); + } + end_trial(); + }); + } + + function is_face_detect_green(){ + if(document.querySelector("#webgazerFaceFeedbackBox")){ + return document.querySelector('#webgazerFaceFeedbackBox').style.borderColor == "green" + } else { + return false; + } + } + + function face_detect_event_observer(mutationsList, observer) { + if (mutationsList[0].target == document.querySelector('#webgazerFaceFeedbackBox')) { + if (mutationsList[0].type == 'attributes' && mutationsList[0].target.style.borderColor == "green") { + document.querySelector('#jspsych-wg-cont').disabled = false; + } + if (mutationsList[0].type == 'attributes' && mutationsList[0].target.style.borderColor == "red") { + document.querySelector('#jspsych-wg-cont').disabled = true; + } + } + } + + // function to end trial when it is time + function end_trial() { + + jsPsych.extensions['webgazer'].pause(); + jsPsych.extensions['webgazer'].hideVideo(); + + + // kill any remaining setTimeout handlers + jsPsych.pluginAPI.clearAllTimeouts(); + + // gather the data to store for the trial + var trial_data = { + load_time: load_time + }; + + // clear the display + display_element.innerHTML = ''; + + document.querySelector('#webgazer-center-style').remove(); + + // move on to the next trial + jsPsych.finishTrial(trial_data); + }; + + }; + + return plugin; +})(); \ No newline at end of file diff --git a/LetterDMS/jspsych/plugins/jspsych-webgazer-validate.js b/LetterDMS/jspsych/plugins/jspsych-webgazer-validate.js new file mode 100644 index 0000000..2229a64 --- /dev/null +++ b/LetterDMS/jspsych/plugins/jspsych-webgazer-validate.js @@ -0,0 +1,314 @@ +/** + * jspsych-webgazer-validate + * Josh de Leeuw + **/ + + jsPsych.plugins["webgazer-validate"] = (function() { + + var plugin = {}; + + plugin.info = { + name: 'webgazer-validate', + description: '', + parameters: { + validation_points: { + type: jsPsych.plugins.parameterType.INT, + default: [[10,10], [10,50], [10,90], [50,10], [50,50], [50,90], [90,10], [90,50], [90,90]] + }, + validation_point_coordinates: { + type: jsPsych.plugins.parameterType.STRING, + default: 'percent' // options: 'percent', 'center-offset-pixels' + }, + roi_radius: { + type: jsPsych.plugins.parameterType.INT, + default: 200 + }, + randomize_validation_order: { + type: jsPsych.plugins.parameterType.BOOL, + default: false + }, + time_to_saccade: { + type: jsPsych.plugins.parameterType.INT, + default: 1000 + }, + validation_duration: { + type: jsPsych.plugins.parameterType.INT, + default: 2000 + }, + point_size:{ + type: jsPsych.plugins.parameterType.INT, + default: 20 + }, + show_validation_data: { + type: jsPsych.plugins.parameterType.BOOL, + default: false + } + } + } + + plugin.trial = function(display_element, trial) { + + var trial_data = {} + trial_data.raw_gaze = []; + trial_data.percent_in_roi = []; + trial_data.average_offset = []; + trial_data.validation_points = null; + + var html = ` +
+
` + + display_element.innerHTML = html; + + var wg_container = display_element.querySelector('#webgazer-validate-container'); + + var points_completed = -1; + var val_points = null; + var start = performance.now(); + + validate(); + + function validate(){ + + if(trial.randomize_validation_order){ + val_points = jsPsych.randomization.shuffle(trial.validation_points); + } else { + val_points = trial.validation_points; + } + trial_data.validation_points = val_points; + points_completed = -1; + //jsPsych.extensions['webgazer'].resume(); + jsPsych.extensions.webgazer.startSampleInterval(); + //jsPsych.extensions.webgazer.showPredictions(); + next_validation_point(); + } + + function next_validation_point(){ + points_completed++; + if(points_completed == val_points.length){ + validation_done(); + } else { + var pt = val_points[points_completed]; + validation_display(pt); + } + } + + function validation_display(pt){ + var pt_html = drawValidationPoint(pt[0], pt[1]); + wg_container.innerHTML = pt_html; + + var pt_dom = wg_container.querySelector('.validation-point'); + + var br = pt_dom.getBoundingClientRect(); + var x = br.left + br.width / 2; + var y = br.top + br.height / 2; + + var pt_start_val = performance.now() + trial.time_to_saccade; + var pt_finish = pt_start_val + trial.validation_duration; + + var pt_data = []; + + var cancelGazeUpdate = jsPsych.extensions['webgazer'].onGazeUpdate(function(prediction){ + if(performance.now() > pt_start_val){ + pt_data.push({x: prediction.x, y: prediction.y, dx: prediction.x - x, dy: prediction.y - y, t: Math.round(prediction.t-start)}); + } + }); + + requestAnimationFrame(function watch_dot(){ + if(performance.now() < pt_finish){ + requestAnimationFrame(watch_dot); + } else { + trial_data.raw_gaze.push(pt_data); + cancelGazeUpdate(); + + next_validation_point(); + } + }); + + } + + function drawValidationPoint(x,y){ + if(trial.validation_point_coordinates == 'percent'){ + return drawValidationPoint_PercentMode(x,y); + } + if(trial.validation_point_coordinates == 'center-offset-pixels'){ + return drawValidationPoint_CenterOffsetMode(x,y); + } + } + + function drawValidationPoint_PercentMode(x,y){ + return `
` + } + + function drawValidationPoint_CenterOffsetMode(x,y){ + return `
` + } + + function drawCircle(target_x, target_y, dx, dy, r){ + if(trial.validation_point_coordinates == 'percent'){ + return drawCircle_PercentMode(target_x, target_y, dx, dy, r); + } + if(trial.validation_point_coordinates == 'center-offset-pixels'){ + return drawCircle_CenterOffsetMode(target_x, target_y, dx, dy, r); + } + } + + function drawCircle_PercentMode(target_x, target_y, dx, dy, r){ + var html = ` +
+ ` + return html; + } + + function drawCircle_CenterOffsetMode(target_x, target_y, dx, dy, r){ + var html = ` +
+ ` + return html; + } + + function drawRawDataPoint(target_x, target_y, dx, dy, ){ + if(trial.validation_point_coordinates == 'percent'){ + return drawRawDataPoint_PercentMode(target_x, target_y, dx, dy); + } + if(trial.validation_point_coordinates == 'center-offset-pixels'){ + return drawRawDataPoint_CenterOffsetMode(target_x, target_y, dx, dy); + } + } + + function drawRawDataPoint_PercentMode(target_x, target_y, dx, dy){ + var color = Math.sqrt(dx*dx + dy*dy) <= trial.roi_radius ? '#afa' : '#faa'; + return `
` + } + + function drawRawDataPoint_CenterOffsetMode(target_x, target_y, dx, dy){ + var color = Math.sqrt(dx*dx + dy*dy) <= trial.roi_radius ? '#afa' : '#faa'; + return `
` + } + + function median(arr){ + var mid = Math.floor(arr.length/2); + var sorted_arr = arr.sort((a,b) => a-b); + if(arr.length % 2 == 0){ + return sorted_arr[mid-1] + sorted_arr[mid] / 2; + } else { + return sorted_arr[mid]; + } + } + + function calculateGazeCentroid(gazeData){ + + var x_diff_m = gazeData.reduce(function(accumulator, currentValue, index){ + accumulator += currentValue.dx; + if(index == gazeData.length-1){ + return accumulator / gazeData.length; + } else { + return accumulator; + } + }, 0); + + var y_diff_m = gazeData.reduce(function(accumulator, currentValue, index){ + accumulator += currentValue.dy; + if(index == gazeData.length-1){ + return accumulator / gazeData.length; + } else { + return accumulator; + } + }, 0); + + var median_distance = median(gazeData.map(function(x){ return(Math.sqrt(Math.pow(x.dx-x_diff_m,2) + Math.pow(x.dy-y_diff_m,2)))})); + + return { + x: x_diff_m, + y: y_diff_m, + r: median_distance + } + } + + function calculatePercentInROI(gazeData){ + var distances = gazeData.map(function(p){ + return(Math.sqrt(Math.pow(p.dx,2) + Math.pow(p.dy,2))) + }); + var sum_in_roi = distances.reduce(function(accumulator, currentValue){ + if(currentValue <= trial.roi_radius){ + accumulator++; + } + return accumulator; + }, 0); + var percent = sum_in_roi / gazeData.length * 100; + return percent; + } + + function calculateSampleRate(gazeData){ + var mean_diff = []; + for(var i=0; i 1){ + var t_diff = []; + for(var j=1; j 0){ + return 1000 / (mean_diff.reduce(function(a,b) { return(a+b) }, 0) / mean_diff.length); + } else { + return null; + } + + } + + function validation_done(){ + trial_data.samples_per_sec = calculateSampleRate(trial_data.raw_gaze).toFixed(2); + for(var i=0; i + + + + + + + + ANT + + + + + + + + + + + \ No newline at end of file diff --git a/LetterDMS/testLoop.html b/LetterDMS/testLoop.html new file mode 100644 index 0000000..de15460 --- /dev/null +++ b/LetterDMS/testLoop.html @@ -0,0 +1,60 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/Questionnaires/BLANK.html b/Questionnaires/BLANK.html new file mode 100644 index 0000000..31351cb --- /dev/null +++ b/Questionnaires/BLANK.html @@ -0,0 +1,103 @@ + + + + + + + + + Questionnaire: FILL-IN + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/Questionnaires/CSS/LikertQuestionnaires.css b/Questionnaires/CSS/LikertQuestionnaires.css new file mode 100644 index 0000000..cfd0cd4 --- /dev/null +++ b/Questionnaires/CSS/LikertQuestionnaires.css @@ -0,0 +1,35 @@ +body {background-color: rgb(150, 150, 150)} + + +.jspsych-display-element { + font-size: 25px; + color: black; +} +.jspsych-content { + max-width: 100%; +} + +#jspsych-survey-likert-preamble { + color:black; +} + +#jspsych-survey-likert-opts { + color:black; +} + +.jspsych-survey-likert-opts li input[type=radio] { + display: block; + position: relative; + top: 0; + left: 50%; + margin-left: -6px; +} + +.jspsych-survey-multi-choice-response-0 input[type=radio] { + display: block; + position: relative; + top: 0; + left: 50%; + margin-left: -6px; +} + diff --git a/Questionnaires/CSS/jspsych.css b/Questionnaires/CSS/jspsych.css new file mode 100644 index 0000000..c82f2e4 --- /dev/null +++ b/Questionnaires/CSS/jspsych.css @@ -0,0 +1,206 @@ +/* + * CSS for jsPsych experiments. + * + * This stylesheet provides minimal styling to make jsPsych + * experiments look polished without any additional styles. + */ + + @import url(https://fonts.googleapis.com/css?family=Open+Sans:400italic,700italic,400,700); + +/* Container holding jsPsych content */ + + .jspsych-display-element { + display: flex; + flex-direction: column; + overflow-y: auto; + } + + .jspsych-display-element:focus { + outline: none; + } + + .jspsych-content-wrapper { + display: flex; + margin: auto; + flex: 1 1 100%; + width: 100%; + } + + .jspsych-content { + max-width: 95%; /* this is mainly an IE 10-11 fix */ + text-align: center; + margin: auto; /* this is for overflowing content */ + } + + .jspsych-top { + align-items: flex-start; + } + + .jspsych-middle { + align-items: center; + } + +/* fonts and type */ + +.jspsych-display-element { + font-family: 'Open Sans', 'Arial', sans-serif; + font-size: 18px; + line-height: 1.6em; +} + +/* Form elements like input fields and buttons */ + +.jspsych-display-element input[type="text"] { + font-family: 'Open Sans', 'Arial', sans-serif; + font-size: 14px; +} + +/* borrowing Bootstrap style for btn elements, but combining styles a bit */ +.jspsych-btn { + display: inline-block; + padding: 6px 12px; + margin: 0px; + font-size: 14px; + font-weight: 400; + font-family: 'Open Sans', 'Arial', sans-serif; + cursor: pointer; + line-height: 1.4; + text-align: center; + white-space: nowrap; + vertical-align: middle; + background-image: none; + border: 1px solid transparent; + border-radius: 4px; + color: #333; + background-color: #fff; + border-color: #ccc; +} + +/* only apply the hover style on devices with a mouse/pointer that can hover - issue #977 */ +@media (hover: hover) { + .jspsych-btn:hover { + background-color: #ddd; + border-color: #aaa; + } +} + +.jspsych-btn:active { + background-color: #ddd; + border-color:#000000; +} + +.jspsych-btn:disabled { + background-color: #eee; + color: #aaa; + border-color: #ccc; + cursor: not-allowed; +} + +/* custom style for input[type="range] (slider) to improve alignment between positions and labels */ + +.jspsych-slider { + appearance: none; + -webkit-appearance: none; + -moz-appearance: none; + width: 100%; + background: transparent; +} +.jspsych-slider:focus { + outline: none; +} +/* track */ +.jspsych-slider::-webkit-slider-runnable-track { + appearance: none; + -webkit-appearance: none; + width: 100%; + height: 8px; + cursor: pointer; + background: #eee; + box-shadow: 0px 0px 0px #000000, 0px 0px 0px #0d0d0d; + border-radius: 2px; + border: 1px solid #aaa; +} +.jspsych-slider::-moz-range-track { + appearance: none; + width: 100%; + height: 8px; + cursor: pointer; + background: #eee; + box-shadow: 0px 0px 0px #000000, 0px 0px 0px #0d0d0d; + border-radius: 2px; + border: 5px solid #aaa; +} +.jspsych-slider::-ms-track { + appearance: none; + width: 99%; + height: 14px; + cursor: pointer; + background: #eee; + box-shadow: 0px 0px 0px #000000, 0px 0px 0px #0d0d0d; + border-radius: 2px; + border: 5px solid #aaa; +} +/* thumb */ +.jspsych-slider::-webkit-slider-thumb { + border: 1px solid #666; + height: 24px; + width: 15px; + border-radius: 15px; + background: #ffffff; + cursor: pointer; + -webkit-appearance: none; + margin-top: -9px; +} +.jspsych-slider::-moz-range-thumb { + border: 1px solid #666; + height: 24px; + width: 15px; + border-radius: 15px; + background: #ffffff; + cursor: pointer; +} +.jspsych-slider::-ms-thumb { + border: 1px solid #666; + height: 20px; + width: 45px; + border-radius: 15px; + background: #ffffff; + cursor: pointer; + margin-top: -2px; +} + +/* jsPsych progress bar */ + +#jspsych-progressbar-container { + color: #555; + border-bottom: 1px solid #dedede; + background-color: #f9f9f9; + margin-bottom: 1em; + text-align: center; + padding: 8px 0px; + width: 100%; + line-height: 1em; +} +#jspsych-progressbar-container span { + font-size: 14px; + padding-right: 14px; +} +#jspsych-progressbar-outer { + background-color: #eee; + width: 50%; + margin: auto; + height: 14px; + display: inline-block; + vertical-align: middle; + box-shadow: inset 0 1px 2px rgba(0,0,0,0.1); +} +#jspsych-progressbar-inner { + background-color: #aaa; + width: 0%; + height: 100%; +} + +/* Control appearance of jsPsych.data.displayData() */ +#jspsych-data-display { + text-align: left; +} diff --git a/Questionnaires/CUDITR.html b/Questionnaires/CUDITR.html new file mode 100644 index 0000000..477ecce --- /dev/null +++ b/Questionnaires/CUDITR.html @@ -0,0 +1,102 @@ + + + + + + + + + Questionnaire: CUDITR + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/Questionnaires/LawtonIADL.html b/Questionnaires/LawtonIADL.html new file mode 100644 index 0000000..3eb31e2 --- /dev/null +++ b/Questionnaires/LawtonIADL.html @@ -0,0 +1,80 @@ + + + + + + + + + Questionnaire: BDI + + + + + + + + + + + + + + + diff --git a/Questionnaires/PANAS.html b/Questionnaires/PANAS.html new file mode 100644 index 0000000..9bc554b --- /dev/null +++ b/Questionnaires/PANAS.html @@ -0,0 +1,102 @@ + + + + + + + + + Questionnaire: PANAS + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/Questionnaires/Readme.md b/Questionnaires/Readme.md new file mode 100644 index 0000000..f831217 --- /dev/null +++ b/Questionnaires/Readme.md @@ -0,0 +1,34 @@ +# This is a selection of questionnaires. + +## Organization +This folder is organized so that all of the questionnaire setup files are in the *assets* folder + +Each questionnaire uses multiple files: + +## NAME_setup_eng.js +_ These are the files in the *assets* folder +- This contains multiple parts stored as javascript (JS) variables. Each unique scale is made by building this file. This separates the code (in the HTML file described below) and the JS file. This also facilitates language translation of questionnaires. +- title +- scale +- if this is a Likert type questionnaires, these are the values used. There can be as many scales as needed for the questionnaire +- ReverseScoreDirection +- This allows for reverse scoring of some questions using a true/false flag +- items +- This is an array of objects containing the prompt for each question and the associated scale to use +- instructions + +## NAME.html file +- There is one HTML file for each questionnaire. This file is created with minor modifications from the BLANK.html file. This file contains the presentation of the questionnaire and provides all scoring of the questionnaire. The responses to all questions and the total score are saved as output. +- The only required modification is to change the last + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/Questionnaires/assets/CUDITR_setup_eng.js b/Questionnaires/assets/CUDITR_setup_eng.js new file mode 100644 index 0000000..1410e71 --- /dev/null +++ b/Questionnaires/assets/CUDITR_setup_eng.js @@ -0,0 +1,46 @@ +var title = "The Cannabis Use Disorder Identification Test - Revised (CUDIT-R) " + +var scale01 = [ + "Never", + "Monthly or less", + "2-4 times a month", + "2-3 times a week", + "4 or more times a week" + ] + +var scale02 = [ + "Less than 1", + "1 or 2", + "3 or 4", + "5 or 6", + "7 or more" + ] + +var scale03 = [ + "Never", + "Less than monthly", + "Monthly", + "Weekly", + "Daily or almost daily"] + +var scale04 = [ + "Never", + "Yes, but not in the past 6 months", + "Yes, during the past 6 months"] + +var ReverseScoreDirection = [false] +// The last question has three positions which are scored as 0,2,4 +var QuestionScoreWeights = [1,1,1,1,1,1,1,2] + +var items = [ + {prompt: "How often do you use cannabis? ", labels: scale01}, + {prompt: "How many hours were you “stoned” on a typical day when you had been using cannabis?", labels: scale02}, + {prompt: "How often during the past 6 months did you find that you were not able to stop using cannabis once you had started?",labels: scale03}, + {prompt: "How often during the past 6 months did you fail to do what was normally expected from you because of using cannabis?",labels: scale03}, + {prompt: "How often in the past 6 months have you devoted a great deal of your time to getting, using, or recovering from cannabis? ",labels: scale03}, + {prompt: "How often in the past 6 months have you had a problem with your memory or concentration after using cannabis?",labels: scale03}, + {prompt: "How often do you use cannabis in situations that could be physically hazardous, such as driving, operating machinery, or caring for children:",labels: scale03}, + {prompt: "Have you ever thought about cutting down, or stopping, your use of cannabis?",labels: scale04}, + ] + + var instructions = 'Please answer the following questions about your cannabis use. Circle the response that is most correct for you in relation to your cannabis use over the past six months' diff --git a/Questionnaires/assets/LawtonIADL.js b/Questionnaires/assets/LawtonIADL.js new file mode 100644 index 0000000..c092898 --- /dev/null +++ b/Questionnaires/assets/LawtonIADL.js @@ -0,0 +1,98 @@ +var title = "Instrumental Activities of Daily Living Scale" + +var instructions = "" + +var reference = "" + +var ReverseScoreDirection = [false] + +var items = [ + { + prompt: "Ability to Use Telephone", + name: "telephone", + options: ["Operates telephone on own initiative; looks up and dials numbers", + "Dials a few well-known numbers", + "Answers telephone, but does not dial", + "Does not use telephone at all" + ], + values: [1, 1, 1, 0], + required:true + }, + { + prompt: "Shopping", + name: "shopping", + options: ["Takes care of all shopping needs independently", + "Shops independently for small purchases", + "Needs to be accompanied on any shopping trip", + "Completely unable to shop" + ], + values: [1, 0, 0, 0], + required: true + }, + { + prompt: "Food Preparation", + name: "foodPreparation", + options:["Plans, prepares, and serves adequate meals independently", + "Prepares adequate meals if supplied with ingredients", + "Heats and serves prepared meals or prepares meals but does not maintain adequate diet", + "Needs to have meals prepared and served" + ], + values: [1, 0, 0, 0], + required: true + }, + { + prompt: "Housekeeping", + name: "housekeeping", + options: ["Maintains house alone with occasion assistance (heavy work)", + "Performs light daily tasks such as dishwashing, bed making", + "Performs light daily tasks, but cannot maintain acceptable level of cleanliness", + "Needs help with all home maintenance tasks", + "Does not participate in any housekeeping tasks" + ], + values: [1, 1, 1, 1, 0], + required: true + }, + { + prompt: "Laundry", + name: "laundry", + options: ["Does personal laundry completely", + "Launders small items, rinses socks, stockings, etc", + "All laundry must be done by others" + ], + values: [1, 1, 0], + required: true + }, + { + prompt: "Mode of Transportation", + name: "transportation", + options: ["Travels independently on public transportation or drives own car", + "Arranges own travel via taxi, but does not otherwise use public transportation", + "Travels on public transportation when assisted or accompanied by another", + "Travel limited to taxi or automobile with assistance of another", + "Does not travel at all" + ], + values: [1, 1, 1, 0, 0], + required: true + }, + { + prompt: "Responsibility for Own Medicine", + name: "medicine", + options: ["Is responsible for taking medication in correct dosages at correct time", + "Takes responsibility if medication is prepared in advance in separate dosages", + "Is not capable of dispensing own medication" + ], + values: [1, 0, 0], + required: true + }, + { + prompt: "Ability to Handle Finances", + name: "finances", + options: ["Manages financial matters independently (budgets, writes checks, pays rent and bills, goes to bank); collects and keeps track of income", + "Manages day-to-day purchases, but needs help with banking, major purchases, etc", + "Incapable of handling money" + ], + values: [1, 1, 0], + required: true + } + ] + diff --git a/Questionnaires/assets/aes_setup_eng.js b/Questionnaires/assets/aes_setup_eng.js new file mode 100644 index 0000000..3ffc1c0 --- /dev/null +++ b/Questionnaires/assets/aes_setup_eng.js @@ -0,0 +1,38 @@ +var title = "Apathy Evaluation Scale (AES)" + + +var scale01 = [ + "Not at all", + "Slightly", + "Somewhat", + "A lot" +] + +var ReverseScoreDirection = [false, false, false, false, false, true, false, false, false, true, true, false,false, false, false, false, false, false] + +var items = [ + {prompt: "I am interested in things.", labels: scale01}, + {prompt: "I get things done during the day.", labels: scale01}, + {prompt: "Getting things started on my own is important to me.", labels: scale01}, + {prompt: "I am interested in having new experiences.", labels: scale01}, + {prompt: "I am interested in learning new things.", labels: scale01}, + {prompt: "I put little effort into anything.", labels: scale01}, + {prompt: "I approach life with intensity.", labels: scale01}, + {prompt: "Seeing a job through to the end is important to me.", labels: scale01}, + {prompt: "I spend time doing things that interest me.", labels: scale01}, + {prompt: "Someone has to tell me what to do each day.", labels: scale01}, + {prompt: "I am less concerned about my problems than I should be.", labels: scale01}, + {prompt: "I have friends.", labels: scale01}, + {prompt: "Getting together with friends is important to me.", labels: scale01}, + {prompt: "When something good happens, I get excited.", labels: scale01}, + {prompt: "I have an accurate understanding of my problems.", labels: scale01}, + {prompt: "Getting things done during the day is important to me.", labels: scale01}, + {prompt: "I have initiative.", labels: scale01}, + {prompt: "I have motivation.", labels: scale01} + ] + +var instructions = 'For each statement, choose the answer that best describes the your thoughts, feelings, and activity in the past 4 weeks.' + +var references = "Marin, R. S., Biedrzycki, R. C., & Firinciogullari, S. (1991). Reliability and validity of the Apathy Evaluation Scale. Psychiatry research, 38(2), 143-162." + +var notes = "The Apathy Evaluation Scale (AES) (46): Apathy was quantified using the AES, consisting of 18 items relating to apathy, each scored on a 4-point Likert-type scale. The score range for the complete AES (total AES score) is 18 to 72, with a lower score indicating greater apathy." \ No newline at end of file diff --git a/Questionnaires/assets/bdi_setup_eng.js b/Questionnaires/assets/bdi_setup_eng.js new file mode 100644 index 0000000..d5d0d7c --- /dev/null +++ b/Questionnaires/assets/bdi_setup_eng.js @@ -0,0 +1,210 @@ +var title = "Beck Depression Index" + +var instructions = "This questionnaire consists of 21 groups of statements. Please read each group of statements carefully. And then pick out the one statement in each group that best describes the way you have been feeling during the past two weeks, including today. Circle the number beside the statement you have picked. If several statements in the group seem to apply equally well, circle the highest number for that group." + +var reference = "" + +var values = [0, 1, 2, 3] + +var items = [ + { + prompt: "Sadness", + name: "Sadness", + options: ["I do not feel sad.", + "I feel sad.", + "I am sad all the time and I can't snap out of it.", + "I am so sad and unhappy that I can't stand it."], + required:true + }, + { + prompt: "Pessimism", + name: "Pessimism", + options: ["I am not particularly discouraged about the future.", + "I feel discouraged about the future.", + "I feel I have nothing to look forward to.", + "I feel the future is hopeless and that things cannot improve."], + required: true + }, + { + prompt: "Past Failure", + name: "PastFailure", + options:["I do not feel like a failure.", + "I feel I have failed more than the average person.", + "As I look back on my life, all I can see is a lot of failures.", + "I feel I am a complete failure as a person." + ], + required: true + }, + { + prompt: "Loss of Pleasure", + name: "LossPleasure", + options: ["I get as much satisfaction out of things as I used to.", + "I don't enjoy things the way I used to.", + "I don't get real satisfaction out of anything anymore.", + "I am dissatisfied or bored with everything."], + required: true + }, + { + prompt: "Guilty Feelings", + name: "GuiltyFeelings", + options: ["I don't feel particularly guilty.", + "I feel guilty a good part of the time.", + "I feel quite guilty most of the time.", + "I feel guilty all of the time."], + required: true + }, + { + prompt: "Punishment Feelings", + name: "PunishmentFeelings", + options: ["I don't feel I am being punished.", + "I feel I may be punished.", + "I expect to be punished.", + "I feel I am being punished."], + required: true + }, + { + prompt: "Self-Dislike", + name: "Self-Dislike", + options: ["I don't feel disappointed in myself.", + "I am disappointed in myself.", + "I am disgusted with myself.", + "I hate myself."], + required: true + }, + { + prompt: "Self-Criticalness", + name: "Self-Criticalness", + options: ["I don't feel I am any worse than anybody else.", + "I am critical of myself for my weaknesses or mistakes.", + "I blame myself all the time for my faults.", + "I blame myself for everything bad that happens."], + required: true + }, + { + prompt: "Suicidal Thoughts or Wishes", + name: "SuicidalThoughts", + options: ["I don't have any thoughts of killing myself.", + "I have thoughts of killing myself, but I would not carry them out.", + "I would like to kill myself.", + "I would kill myself if I had the chance."], + required: true + }, + { + prompt: "Crying", + name: "Crying", + options: ["I don't cry any more than usual.", + "I cry more now than I used to.", + "I cry all the time now.", + "I used to be able to cry, but now I can't cry even though I want to."], + required: true + }, + { + prompt: "Agitation", + name: "Agitation", + options: ["I am no more irritated by things than I ever was.", + "I am slightly more irritated now than usual.", + "I am quite annoyed or irritated a good deal of the time.", + "I feel irritated all the time."], + required: true + }, + { + prompt: "Loss of Interest", + name: "LossInterest", + options: ["I have not lost interest in other people.", + "I am less interested in other people than I used to be.", + "I have lost most of my interest in other people.", + "I have lost all of my interest in other people."], + required: true + }, + { + prompt: "Indecisiveness", + name: "Indecisiveness", + options: ["I make decisions about as well as I ever could.", + "I put off making decisions more than I used to.", + "I have greater difficulty in making decisions more than I used to.", + "I can't make decisions at all anymore."], + required: true + }, + { + prompt: "Worthlessness", + name: "Worthlessness", + options: ["I don't feel that I look any worse than I used to.", + "I am worried that I am looking old or unattractive.", + "I feel there are permanent changes in my appearance that make me look unattractive.", + "I believe that I look ugly."], + required: true + }, + { + prompt: "Loss of Energy", + name: "LossEnergy", + options:["I can work about as well as before.", + "It takes an extra effort to get started at doing something.", + "I have to push myself very hard to do anything.", + "I can't do any work at all."], + required: true + }, + { + prompt: "Changes in Sleeping Pattern", + name: "Sleep", + options: ["I can sleep as well as usual.", + "I don't sleep as well as I used to.", + "I wake up 1-2 hours earlier than usual and find it hard to get back to sleep.", + "I wake up several hours earlier than I used to and cannot get back to sleep."], + required: true + }, + { + prompt: "Tiredness or Fatigue", + name: "Tired", + options: ["I don't get more tired than usual.", + "I get tired more easily than I used to.", + "I get tired from doing almost anything.", + "I am too tired to do anything."], + required: true + }, + { + prompt: "Changes in Appetite", + name: "Appetite", + options: ["My appetite is no worse than usual.", + "My appetite is not as good as it used to be.", + "My appetite is much worse now.", + "I have no appetite at all anymore."], + required: true + }, + { + prompt: "Weight Loss", + name: "WeightLoss", + options: ["I haven't lost much weight, if any, lately.", + "I have lost more than five pounds.", + "I have lost more than ten pounds.", + "I have lost more than fifteen pounds."], + required: true + }, + { + prompt: "Personal Health", + name: "PersonalHealth", + options: ["I am no more worried about my health than usual.", + "I am worried about physical problems like aches, pains, upset stomach, or constipation.", + "I am very worried about physical problems and it's hard to think of much else.", + "I am so worried about my physical problems that I cannot think of anything else."], + required: true + }, + { + prompt: "Loss of Interest in Sex", + name: "InterestSex", + options: ["I have not noticed any recent change in my interest in sex.", + "I am less interested in sex than I used to be.", + "I have almost no interest in sex.", + "I have lost interest in sex completely."], + required: true + } + ] + +// Notes: +// 1-10____________________These ups and downs are considered normal +// 11-16___________________ Mild mood disturbance +// 17-20___________________Borderline clinical depression +// 21-30___________________Moderate depression +// 31-40___________________Severe depression +// over 40__________________Extreme depression + + diff --git a/Questionnaires/assets/cfi_setup_eng.js b/Questionnaires/assets/cfi_setup_eng.js new file mode 100644 index 0000000..2793efc --- /dev/null +++ b/Questionnaires/assets/cfi_setup_eng.js @@ -0,0 +1,41 @@ +var title = "Cognitive Flexibility Index" + +var scale01 = [ + "Strongly disagree", + "Disagree", + "Somewhat agree", + "Neutral", + "Somewhat agree", + "Agree", + "Strongly agree" + ] + +var ReverseScoreDirection = [false] + +var items = [ + {prompt: "I am good at ‘‘sizing up’’ situations.", labels: scale01}, + {prompt: "I have a hard time making decisions when faced with difficult situations.", labels: scale01}, + {prompt: "I consider multiple options before making a decision.", labels: scale01}, + {prompt: "When I encounter difficult situations, I feel like I am losing control.", labels: scale01}, + {prompt: "I like to look at difficult situations from many different angles.", labels: scale01}, + {prompt: "I seek additional information not immediately available before attributing causes to behavior.", labels: scale01}, + {prompt: "When encountering difficult situations, I become so stressed that I can not think of a way to resolve the situation.", labels: scale01}, + {prompt: "I try to think about things from another person’s point of view.", labels: scale01}, + {prompt: "I find it troublesome that there are so many different ways to deal with difficult situations.", labels: scale01}, + {prompt: "I am good at putting myself in others’ shoes.", labels: scale01}, + {prompt: "When I encounter difficult situations, I just don’t know what to do.", labels: scale01}, + {prompt: "It is important to look at difficult situations from many angles.", labels: scale01}, + {prompt: "When in difficult situations, I consider multiple options before deciding how to behave.", labels: scale01}, + {prompt: "I often look at a situation from different viewpoints.", labels: scale01}, + {prompt: "I am capable of overcoming the difficulties in life that I face.", labels: scale01}, + {prompt: "I consider all the available facts and informationm when attributing causes to behavior.", labels: scale01}, + {prompt: "I feel I have no power to change things in difficult situations.", labels: scale01}, + {prompt: "When I encounter difficult situations, I stop and try to think of several ways to resolve it.", labels: scale01}, + {prompt: "I can think of more than one way to resolve a difficult situation I’m confronted with.", labels: scale01}, + {prompt: "I consider multiple options before responding to difficult situations.", labels: scale01}, + ] + +var instructions = "Please indicate the extent to which you agree or disagree with the following statements" + +var reference = "Dennis, John & Vander Wal, Jillon. (2010). The Cognitive Flexibility Inventory: Instrument Development and Estimates of Reliability and Validity. Cogn Ther Res. 34. 241-253. 10.1007/s10608-009-9276-4. " + diff --git a/Questionnaires/assets/cfs_setup_eng.js b/Questionnaires/assets/cfs_setup_eng.js new file mode 100644 index 0000000..3374810 --- /dev/null +++ b/Questionnaires/assets/cfs_setup_eng.js @@ -0,0 +1,31 @@ +var title = "Cognitive Flexibility Scale" + +var scale01 = [ + "Strongly Agree", + "Agree", + "Slightly Agree", + "Slightly Disagree", + "Disagree", + "Strongly Disagree" + ] + +var ReverseScoreDirection = [false, true, true, false, true, false, false, false, false, true, false, false] + +var items = [ + {prompt: "I can communicate an idea in many different ways.", labels: scale01}, + {prompt: "I avoid new and unusual situations.", labels: scale01}, + {prompt: "I feel like I never get to make decisions.", labels: scale01}, + {prompt: "I can find workable solutions to seemingly unsolvable problems.", labels: scale01}, + {prompt: "I seldom have choices when deciding how to behave.", labels: scale01}, + {prompt: "I am willing to work at creative solutions to problems.", labels: scale01}, + {prompt: "In any given situation, I am able to act appropriately.", labels: scale01}, + {prompt: "My behavior is a result of conscious decisions that I make.", labels: scale01}, + {prompt: "I have many possible ways of behaving in any given situation.", labels: scale01}, + {prompt: "I have difficulty using my knowledge on a given topic in real life situations.", labels: scale01}, + {prompt: "I am willing to listen and consider alternatives for handling a problem.", labels: scale01}, + {prompt: "I have the self-confidence necessary to try different ways of behaving. ", labels: scale01} + ] + +var instructions = "The following statements deal with your beliefs and feelings about your own behavior. Read each statement and respond by circling the number that best represents your agreement with each statement. " + +var reference = "Martin, M. M., & Rubin, R. B. (1995). A New Measure of Cognitive Flexibility. Psychological Reports, 76(2), 623–626. doi:10.2466/pr0.1995.76.2.623 " \ No newline at end of file diff --git a/Questionnaires/assets/panas_setup_eng.js b/Questionnaires/assets/panas_setup_eng.js new file mode 100644 index 0000000..073f79c --- /dev/null +++ b/Questionnaires/assets/panas_setup_eng.js @@ -0,0 +1,40 @@ +var title = "Positive and Negative Affect Schedule (PANAS)" + +var scale01 = [ + "Very slightly or not at all", + "A little", + "Moderately", + "Quite a bit", + "Extremely" + ] + +// All questions use the same direction so enter that single flag as an array of size 1 +var ReverseScoreDirection = [false] + +var items = [ + {prompt: "Interested", labels: scale01}, + {prompt: "Distressed", labels: scale01}, + {prompt: "Excited", labels: scale01}, + {prompt: "Upset", labels: scale01}, + {prompt: "Strong", labels: scale01}, + {prompt: "Guilty", labels: scale01}, + {prompt: "Scared", labels: scale01}, + {prompt: "Hostile", labels: scale01}, + {prompt: "Enthusiastic", labels: scale01}, + {prompt: "Proud", labels: scale01}, + {prompt: "Irritable", labels: scale01}, + {prompt: "Alert", labels: scale01}, + {prompt: "Ashamed", labels: scale01}, + {prompt: "Inspired", labels: scale01}, + {prompt: "Nervous", labels: scale01}, + {prompt: "Determined", labels: scale01}, + {prompt: "Attentive", labels: scale01}, + {prompt: "Jittery", labels: scale01}, + {prompt: "Active", labels: scale01}, + {prompt: "Afraid", labels: scale01}, + ] + +var instructions = 'This scale consists of a number of words that describe different feelings and emotions. Indicate to what extent you feel this way right now, that is, at the present moment.' + +var references = "Watson, D., Clark, L. A., & Tellegan, A. (1988). Development and validation of brief measures of positive and negative affect: The PANAS scales. *Journal of Personality and Social Psychology, 54*(6), 1063–1070." + diff --git a/Questionnaires/assets/stai_setup_eng.js b/Questionnaires/assets/stai_setup_eng.js new file mode 100644 index 0000000..fe3f65d --- /dev/null +++ b/Questionnaires/assets/stai_setup_eng.js @@ -0,0 +1,36 @@ +var title = "Stait-Trait Anxiety Index" + +var scale01 = [ + "Almost never", + "Sometimes", + "Often", + "Almost always" + ] + +// var ReverseScoreDirection = [false, true, false, true, true, false, false, false, false, false, true, true, false, false, true, false, true, true, false, true] +var ReverseScoreDirection = [true, false, true, false, false, true, true, false, false, true, false, false, true, true, false, true, false, false, true, false] + +var items = [ + {prompt: "I feel pleasant.", labels: scale01}, + {prompt: "I feel nervous and restless.", labels: scale01}, + {prompt: "I feel satisfied with myself.",labels: scale01}, + {prompt: "I wish I could be as happy as others seem to be.",labels: scale01}, + {prompt: "I feel like a failure.",labels: scale01}, + {prompt: "I feel rested.",labels: scale01}, + {prompt: 'I am "calm, cool, and collected".',labels: scale01}, + {prompt: "I feel that difficulties are piling up so that I cannot overcome them.",labels: scale01}, + {prompt: "I worry too much over something that doesn't really matter.",labels: scale01}, + {prompt: "I am happy.",labels: scale01}, + {prompt: "I have disturbing thoughts.",labels: scale01}, + {prompt: "I lack self-confidence.",labels: scale01}, + {prompt: "I feel secure.",labels: scale01}, + {prompt: "I make decisions easily.",labels: scale01}, + {prompt: "I feel inadequate.",labels: scale01}, + {prompt: "I am content.",labels: scale01}, + {prompt: "Some unimportant thought runs through my mind and bothers me.",labels: scale01}, + {prompt: "I take disappointments so keenly that I can't put them out of my mind.",labels: scale01}, + {prompt: "I am a steady person.",labels: scale01}, + {prompt: "I get in a state of tension or turmoil as I think over my recent concerns and interest.", labels: scale01}, + ] + + var instructions = 'Read each statement and then choose the answer to indicate how you generally feel.' diff --git a/Questionnaires/bdi.html b/Questionnaires/bdi.html new file mode 100644 index 0000000..a37a873 --- /dev/null +++ b/Questionnaires/bdi.html @@ -0,0 +1,81 @@ + + + + + + + + + Questionnaire: BDI + + + + + + + + + + + + + + + diff --git a/Questionnaires/cfi.html b/Questionnaires/cfi.html new file mode 100644 index 0000000..8de1488 --- /dev/null +++ b/Questionnaires/cfi.html @@ -0,0 +1,102 @@ + + + + + + + + + Questionnaire: CFI + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/Questionnaires/cfs.html b/Questionnaires/cfs.html new file mode 100644 index 0000000..b0138a0 --- /dev/null +++ b/Questionnaires/cfs.html @@ -0,0 +1,102 @@ + + + + + + + + + Questionnaire: CFS + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/Questionnaires/jspsych/VERSION_6.3.1 b/Questionnaires/jspsych/VERSION_6.3.1 new file mode 100644 index 0000000..c33117e --- /dev/null +++ b/Questionnaires/jspsych/VERSION_6.3.1 @@ -0,0 +1 @@ +VERSION_6.3.1 diff --git a/Questionnaires/jspsych/css/jspsych.css b/Questionnaires/jspsych/css/jspsych.css new file mode 100644 index 0000000..3b6d177 --- /dev/null +++ b/Questionnaires/jspsych/css/jspsych.css @@ -0,0 +1,206 @@ +/* + * CSS for jsPsych experiments. + * + * This stylesheet provides minimal styling to make jsPsych + * experiments look polished without any additional styles. + */ + + @import url(https://fonts.googleapis.com/css?family=Open+Sans:400italic,700italic,400,700); + +/* Container holding jsPsych content */ + + .jspsych-display-element { + display: flex; + flex-direction: column; + overflow-y: auto; + } + + .jspsych-display-element:focus { + outline: none; + } + + .jspsych-content-wrapper { + display: flex; + margin: auto; + flex: 1 1 100%; + width: 100%; + } + + .jspsych-content { + max-width: 95%; /* this is mainly an IE 10-11 fix */ + text-align: center; + margin: auto; /* this is for overflowing content */ + } + + .jspsych-top { + align-items: flex-start; + } + + .jspsych-middle { + align-items: center; + } + +/* fonts and type */ + +.jspsych-display-element { + font-family: 'Open Sans', 'Arial', sans-serif; + font-size: 18px; + line-height: 1.6em; +} + +/* Form elements like input fields and buttons */ + +.jspsych-display-element input[type="text"] { + font-family: 'Open Sans', 'Arial', sans-serif; + font-size: 14px; +} + +/* borrowing Bootstrap style for btn elements, but combining styles a bit */ +.jspsych-btn { + display: inline-block; + padding: 6px 12px; + margin: 0px; + font-size: 14px; + font-weight: 400; + font-family: 'Open Sans', 'Arial', sans-serif; + cursor: pointer; + line-height: 1.4; + text-align: center; + white-space: nowrap; + vertical-align: middle; + background-image: none; + border: 1px solid transparent; + border-radius: 4px; + color: #333; + background-color: #fff; + border-color: #ccc; +} + +/* only apply the hover style on devices with a mouse/pointer that can hover - issue #977 */ +@media (hover: hover) { + .jspsych-btn:hover { + background-color: #ddd; + border-color: #aaa; + } +} + +.jspsych-btn:active { + background-color: #ddd; + border-color:#000000; +} + +.jspsych-btn:disabled { + background-color: #eee; + color: #aaa; + border-color: #ccc; + cursor: not-allowed; +} + +/* custom style for input[type="range] (slider) to improve alignment between positions and labels */ + +.jspsych-slider { + appearance: none; + -webkit-appearance: none; + -moz-appearance: none; + width: 100%; + background: transparent; +} +.jspsych-slider:focus { + outline: none; +} +/* track */ +.jspsych-slider::-webkit-slider-runnable-track { + appearance: none; + -webkit-appearance: none; + width: 100%; + height: 8px; + cursor: pointer; + background: #eee; + box-shadow: 0px 0px 0px #000000, 0px 0px 0px #0d0d0d; + border-radius: 2px; + border: 1px solid #aaa; +} +.jspsych-slider::-moz-range-track { + appearance: none; + width: 100%; + height: 8px; + cursor: pointer; + background: #eee; + box-shadow: 0px 0px 0px #000000, 0px 0px 0px #0d0d0d; + border-radius: 2px; + border: 1px solid #aaa; +} +.jspsych-slider::-ms-track { + appearance: none; + width: 99%; + height: 14px; + cursor: pointer; + background: #eee; + box-shadow: 0px 0px 0px #000000, 0px 0px 0px #0d0d0d; + border-radius: 2px; + border: 1px solid #aaa; +} +/* thumb */ +.jspsych-slider::-webkit-slider-thumb { + border: 1px solid #666; + height: 24px; + width: 15px; + border-radius: 5px; + background: #ffffff; + cursor: pointer; + -webkit-appearance: none; + margin-top: -9px; +} +.jspsych-slider::-moz-range-thumb { + border: 1px solid #666; + height: 24px; + width: 15px; + border-radius: 5px; + background: #ffffff; + cursor: pointer; +} +.jspsych-slider::-ms-thumb { + border: 1px solid #666; + height: 20px; + width: 15px; + border-radius: 5px; + background: #ffffff; + cursor: pointer; + margin-top: -2px; +} + +/* jsPsych progress bar */ + +#jspsych-progressbar-container { + color: #555; + border-bottom: 1px solid #dedede; + background-color: #f9f9f9; + margin-bottom: 1em; + text-align: center; + padding: 8px 0px; + width: 100%; + line-height: 1em; +} +#jspsych-progressbar-container span { + font-size: 14px; + padding-right: 14px; +} +#jspsych-progressbar-outer { + background-color: #eee; + width: 50%; + margin: auto; + height: 14px; + display: inline-block; + vertical-align: middle; + box-shadow: inset 0 1px 2px rgba(0,0,0,0.1); +} +#jspsych-progressbar-inner { + background-color: #aaa; + width: 0%; + height: 100%; +} + +/* Control appearance of jsPsych.data.displayData() */ +#jspsych-data-display { + text-align: left; +} diff --git a/Questionnaires/jspsych/examples/add-to-end-of-timeline.html b/Questionnaires/jspsych/examples/add-to-end-of-timeline.html new file mode 100644 index 0000000..05dfa2a --- /dev/null +++ b/Questionnaires/jspsych/examples/add-to-end-of-timeline.html @@ -0,0 +1,38 @@ + + + + + + + + + + + + diff --git a/Questionnaires/jspsych/examples/case-sensitive-responses.html b/Questionnaires/jspsych/examples/case-sensitive-responses.html new file mode 100644 index 0000000..6407121 --- /dev/null +++ b/Questionnaires/jspsych/examples/case-sensitive-responses.html @@ -0,0 +1,45 @@ + + + + + + + + + + diff --git a/Questionnaires/jspsych/examples/conditional-and-loop-functions.html b/Questionnaires/jspsych/examples/conditional-and-loop-functions.html new file mode 100644 index 0000000..68c7a74 --- /dev/null +++ b/Questionnaires/jspsych/examples/conditional-and-loop-functions.html @@ -0,0 +1,64 @@ + + + + + + + + + + diff --git a/Questionnaires/jspsych/examples/css-classes-parameter.html b/Questionnaires/jspsych/examples/css-classes-parameter.html new file mode 100644 index 0000000..a98b321 --- /dev/null +++ b/Questionnaires/jspsych/examples/css-classes-parameter.html @@ -0,0 +1,145 @@ + + + + + + + + + + + + + + + diff --git a/Questionnaires/jspsych/examples/css/jquery-ui.css b/Questionnaires/jspsych/examples/css/jquery-ui.css new file mode 100644 index 0000000..a320639 --- /dev/null +++ b/Questionnaires/jspsych/examples/css/jquery-ui.css @@ -0,0 +1,1225 @@ +/*! jQuery UI - v1.11.3 - 2015-02-12 +* http://jqueryui.com +* Includes: core.css, accordion.css, autocomplete.css, button.css, datepicker.css, dialog.css, draggable.css, menu.css, progressbar.css, resizable.css, selectable.css, selectmenu.css, slider.css, sortable.css, spinner.css, tabs.css, tooltip.css, theme.css +* To view and modify this theme, visit http://jqueryui.com/themeroller/?ffDefault=Verdana%2CArial%2Csans-serif&fwDefault=normal&fsDefault=1.1em&cornerRadius=4px&bgColorHeader=cccccc&bgTextureHeader=highlight_soft&bgImgOpacityHeader=75&borderColorHeader=aaaaaa&fcHeader=222222&iconColorHeader=222222&bgColorContent=ffffff&bgTextureContent=flat&bgImgOpacityContent=75&borderColorContent=aaaaaa&fcContent=222222&iconColorContent=222222&bgColorDefault=e6e6e6&bgTextureDefault=glass&bgImgOpacityDefault=75&borderColorDefault=d3d3d3&fcDefault=555555&iconColorDefault=888888&bgColorHover=dadada&bgTextureHover=glass&bgImgOpacityHover=75&borderColorHover=999999&fcHover=212121&iconColorHover=454545&bgColorActive=ffffff&bgTextureActive=glass&bgImgOpacityActive=65&borderColorActive=aaaaaa&fcActive=212121&iconColorActive=454545&bgColorHighlight=fbf9ee&bgTextureHighlight=glass&bgImgOpacityHighlight=55&borderColorHighlight=fcefa1&fcHighlight=363636&iconColorHighlight=2e83ff&bgColorError=fef1ec&bgTextureError=glass&bgImgOpacityError=95&borderColorError=cd0a0a&fcError=cd0a0a&iconColorError=cd0a0a&bgColorOverlay=aaaaaa&bgTextureOverlay=flat&bgImgOpacityOverlay=0&opacityOverlay=30&bgColorShadow=aaaaaa&bgTextureShadow=flat&bgImgOpacityShadow=0&opacityShadow=30&thicknessShadow=8px&offsetTopShadow=-8px&offsetLeftShadow=-8px&cornerRadiusShadow=8px +* Copyright 2015 jQuery Foundation and other contributors; Licensed MIT */ + +/* Layout helpers +----------------------------------*/ +.ui-helper-hidden { + display: none; +} +.ui-helper-hidden-accessible { + border: 0; + clip: rect(0 0 0 0); + height: 1px; + margin: -1px; + overflow: hidden; + padding: 0; + position: absolute; + width: 1px; +} +.ui-helper-reset { + margin: 0; + padding: 0; + border: 0; + outline: 0; + line-height: 1.3; + text-decoration: none; + font-size: 100%; + list-style: none; +} +.ui-helper-clearfix:before, +.ui-helper-clearfix:after { + content: ""; + display: table; + border-collapse: collapse; +} +.ui-helper-clearfix:after { + clear: both; +} +.ui-helper-clearfix { + min-height: 0; /* support: IE7 */ +} +.ui-helper-zfix { + width: 100%; + height: 100%; + top: 0; + left: 0; + position: absolute; + opacity: 0; + filter:Alpha(Opacity=0); /* support: IE8 */ +} + +.ui-front { + z-index: 100; +} + + +/* Interaction Cues +----------------------------------*/ +.ui-state-disabled { + cursor: default !important; +} + + +/* Icons +----------------------------------*/ + +/* states and images */ +.ui-icon { + display: block; + text-indent: -99999px; + overflow: hidden; + background-repeat: no-repeat; +} + + +/* Misc visuals +----------------------------------*/ + +/* Overlays */ +.ui-widget-overlay { + position: fixed; + top: 0; + left: 0; + width: 100%; + height: 100%; +} +.ui-accordion .ui-accordion-header { + display: block; + cursor: pointer; + position: relative; + margin: 2px 0 0 0; + padding: .5em .5em .5em .7em; + min-height: 0; /* support: IE7 */ + font-size: 100%; +} +.ui-accordion .ui-accordion-icons { + padding-left: 2.2em; +} +.ui-accordion .ui-accordion-icons .ui-accordion-icons { + padding-left: 2.2em; +} +.ui-accordion .ui-accordion-header .ui-accordion-header-icon { + position: absolute; + left: .5em; + top: 50%; + margin-top: -8px; +} +.ui-accordion .ui-accordion-content { + padding: 1em 2.2em; + border-top: 0; + overflow: auto; +} +.ui-autocomplete { + position: absolute; + top: 0; + left: 0; + cursor: default; +} +.ui-button { + display: inline-block; + position: relative; + padding: 0; + line-height: normal; + margin-right: .1em; + cursor: pointer; + vertical-align: middle; + text-align: center; + overflow: visible; /* removes extra width in IE */ +} +.ui-button, +.ui-button:link, +.ui-button:visited, +.ui-button:hover, +.ui-button:active { + text-decoration: none; +} +/* to make room for the icon, a width needs to be set here */ +.ui-button-icon-only { + width: 2.2em; +} +/* button elements seem to need a little more width */ +button.ui-button-icon-only { + width: 2.4em; +} +.ui-button-icons-only { + width: 3.4em; +} +button.ui-button-icons-only { + width: 3.7em; +} + +/* button text element */ +.ui-button .ui-button-text { + display: block; + line-height: normal; +} +.ui-button-text-only .ui-button-text { + padding: .4em 1em; +} +.ui-button-icon-only .ui-button-text, +.ui-button-icons-only .ui-button-text { + padding: .4em; + text-indent: -9999999px; +} +.ui-button-text-icon-primary .ui-button-text, +.ui-button-text-icons .ui-button-text { + padding: .4em 1em .4em 2.1em; +} +.ui-button-text-icon-secondary .ui-button-text, +.ui-button-text-icons .ui-button-text { + padding: .4em 2.1em .4em 1em; +} +.ui-button-text-icons .ui-button-text { + padding-left: 2.1em; + padding-right: 2.1em; +} +/* no icon support for input elements, provide padding by default */ +input.ui-button { + padding: .4em 1em; +} + +/* button icon element(s) */ +.ui-button-icon-only .ui-icon, +.ui-button-text-icon-primary .ui-icon, +.ui-button-text-icon-secondary .ui-icon, +.ui-button-text-icons .ui-icon, +.ui-button-icons-only .ui-icon { + position: absolute; + top: 50%; + margin-top: -8px; +} +.ui-button-icon-only .ui-icon { + left: 50%; + margin-left: -8px; +} +.ui-button-text-icon-primary .ui-button-icon-primary, +.ui-button-text-icons .ui-button-icon-primary, +.ui-button-icons-only .ui-button-icon-primary { + left: .5em; +} +.ui-button-text-icon-secondary .ui-button-icon-secondary, +.ui-button-text-icons .ui-button-icon-secondary, +.ui-button-icons-only .ui-button-icon-secondary { + right: .5em; +} + +/* button sets */ +.ui-buttonset { + margin-right: 7px; +} +.ui-buttonset .ui-button { + margin-left: 0; + margin-right: -.3em; +} + +/* workarounds */ +/* reset extra padding in Firefox, see h5bp.com/l */ +input.ui-button::-moz-focus-inner, +button.ui-button::-moz-focus-inner { + border: 0; + padding: 0; +} +.ui-datepicker { + width: 17em; + padding: .2em .2em 0; + display: none; +} +.ui-datepicker .ui-datepicker-header { + position: relative; + padding: .2em 0; +} +.ui-datepicker .ui-datepicker-prev, +.ui-datepicker .ui-datepicker-next { + position: absolute; + top: 2px; + width: 1.8em; + height: 1.8em; +} +.ui-datepicker .ui-datepicker-prev-hover, +.ui-datepicker .ui-datepicker-next-hover { + top: 1px; +} +.ui-datepicker .ui-datepicker-prev { + left: 2px; +} +.ui-datepicker .ui-datepicker-next { + right: 2px; +} +.ui-datepicker .ui-datepicker-prev-hover { + left: 1px; +} +.ui-datepicker .ui-datepicker-next-hover { + right: 1px; +} +.ui-datepicker .ui-datepicker-prev span, +.ui-datepicker .ui-datepicker-next span { + display: block; + position: absolute; + left: 50%; + margin-left: -8px; + top: 50%; + margin-top: -8px; +} +.ui-datepicker .ui-datepicker-title { + margin: 0 2.3em; + line-height: 1.8em; + text-align: center; +} +.ui-datepicker .ui-datepicker-title select { + font-size: 1em; + margin: 1px 0; +} +.ui-datepicker select.ui-datepicker-month, +.ui-datepicker select.ui-datepicker-year { + width: 45%; +} +.ui-datepicker table { + width: 100%; + font-size: .9em; + border-collapse: collapse; + margin: 0 0 .4em; +} +.ui-datepicker th { + padding: .7em .3em; + text-align: center; + font-weight: bold; + border: 0; +} +.ui-datepicker td { + border: 0; + padding: 1px; +} +.ui-datepicker td span, +.ui-datepicker td a { + display: block; + padding: .2em; + text-align: right; + text-decoration: none; +} +.ui-datepicker .ui-datepicker-buttonpane { + background-image: none; + margin: .7em 0 0 0; + padding: 0 .2em; + border-left: 0; + border-right: 0; + border-bottom: 0; +} +.ui-datepicker .ui-datepicker-buttonpane button { + float: right; + margin: .5em .2em .4em; + cursor: pointer; + padding: .2em .6em .3em .6em; + width: auto; + overflow: visible; +} +.ui-datepicker .ui-datepicker-buttonpane button.ui-datepicker-current { + float: left; +} + +/* with multiple calendars */ +.ui-datepicker.ui-datepicker-multi { + width: auto; +} +.ui-datepicker-multi .ui-datepicker-group { + float: left; +} +.ui-datepicker-multi .ui-datepicker-group table { + width: 95%; + margin: 0 auto .4em; +} +.ui-datepicker-multi-2 .ui-datepicker-group { + width: 50%; +} +.ui-datepicker-multi-3 .ui-datepicker-group { + width: 33.3%; +} +.ui-datepicker-multi-4 .ui-datepicker-group { + width: 25%; +} +.ui-datepicker-multi .ui-datepicker-group-last .ui-datepicker-header, +.ui-datepicker-multi .ui-datepicker-group-middle .ui-datepicker-header { + border-left-width: 0; +} +.ui-datepicker-multi .ui-datepicker-buttonpane { + clear: left; +} +.ui-datepicker-row-break { + clear: both; + width: 100%; + font-size: 0; +} + +/* RTL support */ +.ui-datepicker-rtl { + direction: rtl; +} +.ui-datepicker-rtl .ui-datepicker-prev { + right: 2px; + left: auto; +} +.ui-datepicker-rtl .ui-datepicker-next { + left: 2px; + right: auto; +} +.ui-datepicker-rtl .ui-datepicker-prev:hover { + right: 1px; + left: auto; +} +.ui-datepicker-rtl .ui-datepicker-next:hover { + left: 1px; + right: auto; +} +.ui-datepicker-rtl .ui-datepicker-buttonpane { + clear: right; +} +.ui-datepicker-rtl .ui-datepicker-buttonpane button { + float: left; +} +.ui-datepicker-rtl .ui-datepicker-buttonpane button.ui-datepicker-current, +.ui-datepicker-rtl .ui-datepicker-group { + float: right; +} +.ui-datepicker-rtl .ui-datepicker-group-last .ui-datepicker-header, +.ui-datepicker-rtl .ui-datepicker-group-middle .ui-datepicker-header { + border-right-width: 0; + border-left-width: 1px; +} +.ui-dialog { + overflow: hidden; + position: absolute; + top: 0; + left: 0; + padding: .2em; + outline: 0; +} +.ui-dialog .ui-dialog-titlebar { + padding: .4em 1em; + position: relative; +} +.ui-dialog .ui-dialog-title { + float: left; + margin: .1em 0; + white-space: nowrap; + width: 90%; + overflow: hidden; + text-overflow: ellipsis; +} +.ui-dialog .ui-dialog-titlebar-close { + position: absolute; + right: .3em; + top: 50%; + width: 20px; + margin: -10px 0 0 0; + padding: 1px; + height: 20px; +} +.ui-dialog .ui-dialog-content { + position: relative; + border: 0; + padding: .5em 1em; + background: none; + overflow: auto; +} +.ui-dialog .ui-dialog-buttonpane { + text-align: left; + border-width: 1px 0 0 0; + background-image: none; + margin-top: .5em; + padding: .3em 1em .5em .4em; +} +.ui-dialog .ui-dialog-buttonpane .ui-dialog-buttonset { + float: right; +} +.ui-dialog .ui-dialog-buttonpane button { + margin: .5em .4em .5em 0; + cursor: pointer; +} +.ui-dialog .ui-resizable-se { + width: 12px; + height: 12px; + right: -5px; + bottom: -5px; + background-position: 16px 16px; +} +.ui-draggable .ui-dialog-titlebar { + cursor: move; +} +.ui-draggable-handle { + -ms-touch-action: none; + touch-action: none; +} +.ui-menu { + list-style: none; + padding: 0; + margin: 0; + display: block; + outline: none; +} +.ui-menu .ui-menu { + position: absolute; +} +.ui-menu .ui-menu-item { + position: relative; + margin: 0; + padding: 3px 1em 3px .4em; + cursor: pointer; + min-height: 0; /* support: IE7 */ + /* support: IE10, see #8844 */ + list-style-image: url("data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7"); +} +.ui-menu .ui-menu-divider { + margin: 5px 0; + height: 0; + font-size: 0; + line-height: 0; + border-width: 1px 0 0 0; +} +.ui-menu .ui-state-focus, +.ui-menu .ui-state-active { + margin: -1px; +} + +/* icon support */ +.ui-menu-icons { + position: relative; +} +.ui-menu-icons .ui-menu-item { + padding-left: 2em; +} + +/* left-aligned */ +.ui-menu .ui-icon { + position: absolute; + top: 0; + bottom: 0; + left: .2em; + margin: auto 0; +} + +/* right-aligned */ +.ui-menu .ui-menu-icon { + left: auto; + right: 0; +} +.ui-progressbar { + height: 2em; + text-align: left; + overflow: hidden; +} +.ui-progressbar .ui-progressbar-value { + margin: -1px; + height: 100%; +} +.ui-progressbar .ui-progressbar-overlay { + background: url("data:image/gif;base64,R0lGODlhKAAoAIABAAAAAP///yH/C05FVFNDQVBFMi4wAwEAAAAh+QQJAQABACwAAAAAKAAoAAACkYwNqXrdC52DS06a7MFZI+4FHBCKoDeWKXqymPqGqxvJrXZbMx7Ttc+w9XgU2FB3lOyQRWET2IFGiU9m1frDVpxZZc6bfHwv4c1YXP6k1Vdy292Fb6UkuvFtXpvWSzA+HycXJHUXiGYIiMg2R6W459gnWGfHNdjIqDWVqemH2ekpObkpOlppWUqZiqr6edqqWQAAIfkECQEAAQAsAAAAACgAKAAAApSMgZnGfaqcg1E2uuzDmmHUBR8Qil95hiPKqWn3aqtLsS18y7G1SzNeowWBENtQd+T1JktP05nzPTdJZlR6vUxNWWjV+vUWhWNkWFwxl9VpZRedYcflIOLafaa28XdsH/ynlcc1uPVDZxQIR0K25+cICCmoqCe5mGhZOfeYSUh5yJcJyrkZWWpaR8doJ2o4NYq62lAAACH5BAkBAAEALAAAAAAoACgAAAKVDI4Yy22ZnINRNqosw0Bv7i1gyHUkFj7oSaWlu3ovC8GxNso5fluz3qLVhBVeT/Lz7ZTHyxL5dDalQWPVOsQWtRnuwXaFTj9jVVh8pma9JjZ4zYSj5ZOyma7uuolffh+IR5aW97cHuBUXKGKXlKjn+DiHWMcYJah4N0lYCMlJOXipGRr5qdgoSTrqWSq6WFl2ypoaUAAAIfkECQEAAQAsAAAAACgAKAAAApaEb6HLgd/iO7FNWtcFWe+ufODGjRfoiJ2akShbueb0wtI50zm02pbvwfWEMWBQ1zKGlLIhskiEPm9R6vRXxV4ZzWT2yHOGpWMyorblKlNp8HmHEb/lCXjcW7bmtXP8Xt229OVWR1fod2eWqNfHuMjXCPkIGNileOiImVmCOEmoSfn3yXlJWmoHGhqp6ilYuWYpmTqKUgAAIfkECQEAAQAsAAAAACgAKAAAApiEH6kb58biQ3FNWtMFWW3eNVcojuFGfqnZqSebuS06w5V80/X02pKe8zFwP6EFWOT1lDFk8rGERh1TTNOocQ61Hm4Xm2VexUHpzjymViHrFbiELsefVrn6XKfnt2Q9G/+Xdie499XHd2g4h7ioOGhXGJboGAnXSBnoBwKYyfioubZJ2Hn0RuRZaflZOil56Zp6iioKSXpUAAAh+QQJAQABACwAAAAAKAAoAAACkoQRqRvnxuI7kU1a1UU5bd5tnSeOZXhmn5lWK3qNTWvRdQxP8qvaC+/yaYQzXO7BMvaUEmJRd3TsiMAgswmNYrSgZdYrTX6tSHGZO73ezuAw2uxuQ+BbeZfMxsexY35+/Qe4J1inV0g4x3WHuMhIl2jXOKT2Q+VU5fgoSUI52VfZyfkJGkha6jmY+aaYdirq+lQAACH5BAkBAAEALAAAAAAoACgAAAKWBIKpYe0L3YNKToqswUlvznigd4wiR4KhZrKt9Upqip61i9E3vMvxRdHlbEFiEXfk9YARYxOZZD6VQ2pUunBmtRXo1Lf8hMVVcNl8JafV38aM2/Fu5V16Bn63r6xt97j09+MXSFi4BniGFae3hzbH9+hYBzkpuUh5aZmHuanZOZgIuvbGiNeomCnaxxap2upaCZsq+1kAACH5BAkBAAEALAAAAAAoACgAAAKXjI8By5zf4kOxTVrXNVlv1X0d8IGZGKLnNpYtm8Lr9cqVeuOSvfOW79D9aDHizNhDJidFZhNydEahOaDH6nomtJjp1tutKoNWkvA6JqfRVLHU/QUfau9l2x7G54d1fl995xcIGAdXqMfBNadoYrhH+Mg2KBlpVpbluCiXmMnZ2Sh4GBqJ+ckIOqqJ6LmKSllZmsoq6wpQAAAh+QQJAQABACwAAAAAKAAoAAAClYx/oLvoxuJDkU1a1YUZbJ59nSd2ZXhWqbRa2/gF8Gu2DY3iqs7yrq+xBYEkYvFSM8aSSObE+ZgRl1BHFZNr7pRCavZ5BW2142hY3AN/zWtsmf12p9XxxFl2lpLn1rseztfXZjdIWIf2s5dItwjYKBgo9yg5pHgzJXTEeGlZuenpyPmpGQoKOWkYmSpaSnqKileI2FAAACH5BAkBAAEALAAAAAAoACgAAAKVjB+gu+jG4kORTVrVhRlsnn2dJ3ZleFaptFrb+CXmO9OozeL5VfP99HvAWhpiUdcwkpBH3825AwYdU8xTqlLGhtCosArKMpvfa1mMRae9VvWZfeB2XfPkeLmm18lUcBj+p5dnN8jXZ3YIGEhYuOUn45aoCDkp16hl5IjYJvjWKcnoGQpqyPlpOhr3aElaqrq56Bq7VAAAOw=="); + height: 100%; + filter: alpha(opacity=25); /* support: IE8 */ + opacity: 0.25; +} +.ui-progressbar-indeterminate .ui-progressbar-value { + background-image: none; +} +.ui-resizable { + position: relative; +} +.ui-resizable-handle { + position: absolute; + font-size: 0.1px; + display: block; + -ms-touch-action: none; + touch-action: none; +} +.ui-resizable-disabled .ui-resizable-handle, +.ui-resizable-autohide .ui-resizable-handle { + display: none; +} +.ui-resizable-n { + cursor: n-resize; + height: 7px; + width: 100%; + top: -5px; + left: 0; +} +.ui-resizable-s { + cursor: s-resize; + height: 7px; + width: 100%; + bottom: -5px; + left: 0; +} +.ui-resizable-e { + cursor: e-resize; + width: 7px; + right: -5px; + top: 0; + height: 100%; +} +.ui-resizable-w { + cursor: w-resize; + width: 7px; + left: -5px; + top: 0; + height: 100%; +} +.ui-resizable-se { + cursor: se-resize; + width: 12px; + height: 12px; + right: 1px; + bottom: 1px; +} +.ui-resizable-sw { + cursor: sw-resize; + width: 9px; + height: 9px; + left: -5px; + bottom: -5px; +} +.ui-resizable-nw { + cursor: nw-resize; + width: 9px; + height: 9px; + left: -5px; + top: -5px; +} +.ui-resizable-ne { + cursor: ne-resize; + width: 9px; + height: 9px; + right: -5px; + top: -5px; +} +.ui-selectable { + -ms-touch-action: none; + touch-action: none; +} +.ui-selectable-helper { + position: absolute; + z-index: 100; + border: 1px dotted black; +} +.ui-selectmenu-menu { + padding: 0; + margin: 0; + position: absolute; + top: 0; + left: 0; + display: none; +} +.ui-selectmenu-menu .ui-menu { + overflow: auto; + /* Support: IE7 */ + overflow-x: hidden; + padding-bottom: 1px; +} +.ui-selectmenu-menu .ui-menu .ui-selectmenu-optgroup { + font-size: 1em; + font-weight: bold; + line-height: 1.5; + padding: 2px 0.4em; + margin: 0.5em 0 0 0; + height: auto; + border: 0; +} +.ui-selectmenu-open { + display: block; +} +.ui-selectmenu-button { + display: inline-block; + overflow: hidden; + position: relative; + text-decoration: none; + cursor: pointer; +} +.ui-selectmenu-button span.ui-icon { + right: 0.5em; + left: auto; + margin-top: -8px; + position: absolute; + top: 50%; +} +.ui-selectmenu-button span.ui-selectmenu-text { + text-align: left; + padding: 0.4em 2.1em 0.4em 1em; + display: block; + line-height: 1.4; + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; +} +.ui-slider { + position: relative; + text-align: left; +} +.ui-slider .ui-slider-handle { + position: absolute; + z-index: 2; + width: 1.2em; + height: 1.2em; + cursor: default; + -ms-touch-action: none; + touch-action: none; +} +.ui-slider .ui-slider-range { + position: absolute; + z-index: 1; + font-size: .7em; + display: block; + border: 0; + background-position: 0 0; +} + +/* support: IE8 - See #6727 */ +.ui-slider.ui-state-disabled .ui-slider-handle, +.ui-slider.ui-state-disabled .ui-slider-range { + filter: inherit; +} + +.ui-slider-horizontal { + height: .8em; +} +.ui-slider-horizontal .ui-slider-handle { + top: -.3em; + margin-left: -.6em; +} +.ui-slider-horizontal .ui-slider-range { + top: 0; + height: 100%; +} +.ui-slider-horizontal .ui-slider-range-min { + left: 0; +} +.ui-slider-horizontal .ui-slider-range-max { + right: 0; +} + +.ui-slider-vertical { + width: .8em; + height: 100px; +} +.ui-slider-vertical .ui-slider-handle { + left: -.3em; + margin-left: 0; + margin-bottom: -.6em; +} +.ui-slider-vertical .ui-slider-range { + left: 0; + width: 100%; +} +.ui-slider-vertical .ui-slider-range-min { + bottom: 0; +} +.ui-slider-vertical .ui-slider-range-max { + top: 0; +} +.ui-sortable-handle { + -ms-touch-action: none; + touch-action: none; +} +.ui-spinner { + position: relative; + display: inline-block; + overflow: hidden; + padding: 0; + vertical-align: middle; +} +.ui-spinner-input { + border: none; + background: none; + color: inherit; + padding: 0; + margin: .2em 0; + vertical-align: middle; + margin-left: .4em; + margin-right: 22px; +} +.ui-spinner-button { + width: 16px; + height: 50%; + font-size: .5em; + padding: 0; + margin: 0; + text-align: center; + position: absolute; + cursor: default; + display: block; + overflow: hidden; + right: 0; +} +/* more specificity required here to override default borders */ +.ui-spinner a.ui-spinner-button { + border-top: none; + border-bottom: none; + border-right: none; +} +/* vertically center icon */ +.ui-spinner .ui-icon { + position: absolute; + margin-top: -8px; + top: 50%; + left: 0; +} +.ui-spinner-up { + top: 0; +} +.ui-spinner-down { + bottom: 0; +} + +/* TR overrides */ +.ui-spinner .ui-icon-triangle-1-s { + /* need to fix icons sprite */ + background-position: -65px -16px; +} +.ui-tabs { + position: relative;/* position: relative prevents IE scroll bug (element with position: relative inside container with overflow: auto appear as "fixed") */ + padding: .2em; +} +.ui-tabs .ui-tabs-nav { + margin: 0; + padding: .2em .2em 0; +} +.ui-tabs .ui-tabs-nav li { + list-style: none; + float: left; + position: relative; + top: 0; + margin: 1px .2em 0 0; + border-bottom-width: 0; + padding: 0; + white-space: nowrap; +} +.ui-tabs .ui-tabs-nav .ui-tabs-anchor { + float: left; + padding: .5em 1em; + text-decoration: none; +} +.ui-tabs .ui-tabs-nav li.ui-tabs-active { + margin-bottom: -1px; + padding-bottom: 1px; +} +.ui-tabs .ui-tabs-nav li.ui-tabs-active .ui-tabs-anchor, +.ui-tabs .ui-tabs-nav li.ui-state-disabled .ui-tabs-anchor, +.ui-tabs .ui-tabs-nav li.ui-tabs-loading .ui-tabs-anchor { + cursor: text; +} +.ui-tabs-collapsible .ui-tabs-nav li.ui-tabs-active .ui-tabs-anchor { + cursor: pointer; +} +.ui-tabs .ui-tabs-panel { + display: block; + border-width: 0; + padding: 1em 1.4em; + background: none; +} +.ui-tooltip { + padding: 8px; + position: absolute; + z-index: 9999; + max-width: 300px; + -webkit-box-shadow: 0 0 5px #aaa; + box-shadow: 0 0 5px #aaa; +} +body .ui-tooltip { + border-width: 2px; +} + +/* Component containers +----------------------------------*/ +.ui-widget { + font-family: Verdana,Arial,sans-serif; + font-size: 1.1em; +} +.ui-widget .ui-widget { + font-size: 1em; +} +.ui-widget input, +.ui-widget select, +.ui-widget textarea, +.ui-widget button { + font-family: Verdana,Arial,sans-serif; + font-size: 1em; +} +.ui-widget-content { + border: 1px solid #aaaaaa; + background: #ffffff url("images/ui-bg_flat_75_ffffff_40x100.png") 50% 50% repeat-x; + color: #222222; +} +.ui-widget-content a { + color: #222222; +} +.ui-widget-header { + border: 1px solid #aaaaaa; + background: #cccccc url("images/ui-bg_highlight-soft_75_cccccc_1x100.png") 50% 50% repeat-x; + color: #222222; + font-weight: bold; +} +.ui-widget-header a { + color: #222222; +} + +/* Interaction states +----------------------------------*/ +.ui-state-default, +.ui-widget-content .ui-state-default, +.ui-widget-header .ui-state-default { + border: 1px solid #d3d3d3; + background: #e6e6e6 url("images/ui-bg_glass_75_e6e6e6_1x400.png") 50% 50% repeat-x; + font-weight: normal; + color: #555555; +} +.ui-state-default a, +.ui-state-default a:link, +.ui-state-default a:visited { + color: #555555; + text-decoration: none; +} +.ui-state-hover, +.ui-widget-content .ui-state-hover, +.ui-widget-header .ui-state-hover, +.ui-state-focus, +.ui-widget-content .ui-state-focus, +.ui-widget-header .ui-state-focus { + border: 1px solid #999999; + background: #dadada url("images/ui-bg_glass_75_dadada_1x400.png") 50% 50% repeat-x; + font-weight: normal; + color: #212121; +} +.ui-state-hover a, +.ui-state-hover a:hover, +.ui-state-hover a:link, +.ui-state-hover a:visited, +.ui-state-focus a, +.ui-state-focus a:hover, +.ui-state-focus a:link, +.ui-state-focus a:visited { + color: #212121; + text-decoration: none; +} +.ui-state-active, +.ui-widget-content .ui-state-active, +.ui-widget-header .ui-state-active { + border: 1px solid #aaaaaa; + background: #ffffff url("images/ui-bg_glass_65_ffffff_1x400.png") 50% 50% repeat-x; + font-weight: normal; + color: #212121; +} +.ui-state-active a, +.ui-state-active a:link, +.ui-state-active a:visited { + color: #212121; + text-decoration: none; +} + +/* Interaction Cues +----------------------------------*/ +.ui-state-highlight, +.ui-widget-content .ui-state-highlight, +.ui-widget-header .ui-state-highlight { + border: 1px solid #fcefa1; + background: #fbf9ee url("images/ui-bg_glass_55_fbf9ee_1x400.png") 50% 50% repeat-x; + color: #363636; +} +.ui-state-highlight a, +.ui-widget-content .ui-state-highlight a, +.ui-widget-header .ui-state-highlight a { + color: #363636; +} +.ui-state-error, +.ui-widget-content .ui-state-error, +.ui-widget-header .ui-state-error { + border: 1px solid #cd0a0a; + background: #fef1ec url("images/ui-bg_glass_95_fef1ec_1x400.png") 50% 50% repeat-x; + color: #cd0a0a; +} +.ui-state-error a, +.ui-widget-content .ui-state-error a, +.ui-widget-header .ui-state-error a { + color: #cd0a0a; +} +.ui-state-error-text, +.ui-widget-content .ui-state-error-text, +.ui-widget-header .ui-state-error-text { + color: #cd0a0a; +} +.ui-priority-primary, +.ui-widget-content .ui-priority-primary, +.ui-widget-header .ui-priority-primary { + font-weight: bold; +} +.ui-priority-secondary, +.ui-widget-content .ui-priority-secondary, +.ui-widget-header .ui-priority-secondary { + opacity: .7; + filter:Alpha(Opacity=70); /* support: IE8 */ + font-weight: normal; +} +.ui-state-disabled, +.ui-widget-content .ui-state-disabled, +.ui-widget-header .ui-state-disabled { + opacity: .35; + filter:Alpha(Opacity=35); /* support: IE8 */ + background-image: none; +} +.ui-state-disabled .ui-icon { + filter:Alpha(Opacity=35); /* support: IE8 - See #6059 */ +} + +/* Icons +----------------------------------*/ + +/* states and images */ +.ui-icon { + width: 16px; + height: 16px; +} +.ui-icon, +.ui-widget-content .ui-icon { + background-image: url("images/ui-icons_222222_256x240.png"); +} +.ui-widget-header .ui-icon { + background-image: url("images/ui-icons_222222_256x240.png"); +} +.ui-state-default .ui-icon { + background-image: url("images/ui-icons_888888_256x240.png"); +} +.ui-state-hover .ui-icon, +.ui-state-focus .ui-icon { + background-image: url("images/ui-icons_454545_256x240.png"); +} +.ui-state-active .ui-icon { + background-image: url("images/ui-icons_454545_256x240.png"); +} +.ui-state-highlight .ui-icon { + background-image: url("images/ui-icons_2e83ff_256x240.png"); +} +.ui-state-error .ui-icon, +.ui-state-error-text .ui-icon { + background-image: url("images/ui-icons_cd0a0a_256x240.png"); +} + +/* positioning */ +.ui-icon-blank { background-position: 16px 16px; } +.ui-icon-carat-1-n { background-position: 0 0; } +.ui-icon-carat-1-ne { background-position: -16px 0; } +.ui-icon-carat-1-e { background-position: -32px 0; } +.ui-icon-carat-1-se { background-position: -48px 0; } +.ui-icon-carat-1-s { background-position: -64px 0; } +.ui-icon-carat-1-sw { background-position: -80px 0; } +.ui-icon-carat-1-w { background-position: -96px 0; } +.ui-icon-carat-1-nw { background-position: -112px 0; } +.ui-icon-carat-2-n-s { background-position: -128px 0; } +.ui-icon-carat-2-e-w { background-position: -144px 0; } +.ui-icon-triangle-1-n { background-position: 0 -16px; } +.ui-icon-triangle-1-ne { background-position: -16px -16px; } +.ui-icon-triangle-1-e { background-position: -32px -16px; } +.ui-icon-triangle-1-se { background-position: -48px -16px; } +.ui-icon-triangle-1-s { background-position: -64px -16px; } +.ui-icon-triangle-1-sw { background-position: -80px -16px; } +.ui-icon-triangle-1-w { background-position: -96px -16px; } +.ui-icon-triangle-1-nw { background-position: -112px -16px; } +.ui-icon-triangle-2-n-s { background-position: -128px -16px; } +.ui-icon-triangle-2-e-w { background-position: -144px -16px; } +.ui-icon-arrow-1-n { background-position: 0 -32px; } +.ui-icon-arrow-1-ne { background-position: -16px -32px; } +.ui-icon-arrow-1-e { background-position: -32px -32px; } +.ui-icon-arrow-1-se { background-position: -48px -32px; } +.ui-icon-arrow-1-s { background-position: -64px -32px; } +.ui-icon-arrow-1-sw { background-position: -80px -32px; } +.ui-icon-arrow-1-w { background-position: -96px -32px; } +.ui-icon-arrow-1-nw { background-position: -112px -32px; } +.ui-icon-arrow-2-n-s { background-position: -128px -32px; } +.ui-icon-arrow-2-ne-sw { background-position: -144px -32px; } +.ui-icon-arrow-2-e-w { background-position: -160px -32px; } +.ui-icon-arrow-2-se-nw { background-position: -176px -32px; } +.ui-icon-arrowstop-1-n { background-position: -192px -32px; } +.ui-icon-arrowstop-1-e { background-position: -208px -32px; } +.ui-icon-arrowstop-1-s { background-position: -224px -32px; } +.ui-icon-arrowstop-1-w { background-position: -240px -32px; } +.ui-icon-arrowthick-1-n { background-position: 0 -48px; } +.ui-icon-arrowthick-1-ne { background-position: -16px -48px; } +.ui-icon-arrowthick-1-e { background-position: -32px -48px; } +.ui-icon-arrowthick-1-se { background-position: -48px -48px; } +.ui-icon-arrowthick-1-s { background-position: -64px -48px; } +.ui-icon-arrowthick-1-sw { background-position: -80px -48px; } +.ui-icon-arrowthick-1-w { background-position: -96px -48px; } +.ui-icon-arrowthick-1-nw { background-position: -112px -48px; } +.ui-icon-arrowthick-2-n-s { background-position: -128px -48px; } +.ui-icon-arrowthick-2-ne-sw { background-position: -144px -48px; } +.ui-icon-arrowthick-2-e-w { background-position: -160px -48px; } +.ui-icon-arrowthick-2-se-nw { background-position: -176px -48px; } +.ui-icon-arrowthickstop-1-n { background-position: -192px -48px; } +.ui-icon-arrowthickstop-1-e { background-position: -208px -48px; } +.ui-icon-arrowthickstop-1-s { background-position: -224px -48px; } +.ui-icon-arrowthickstop-1-w { background-position: -240px -48px; } +.ui-icon-arrowreturnthick-1-w { background-position: 0 -64px; } +.ui-icon-arrowreturnthick-1-n { background-position: -16px -64px; } +.ui-icon-arrowreturnthick-1-e { background-position: -32px -64px; } +.ui-icon-arrowreturnthick-1-s { background-position: -48px -64px; } +.ui-icon-arrowreturn-1-w { background-position: -64px -64px; } +.ui-icon-arrowreturn-1-n { background-position: -80px -64px; } +.ui-icon-arrowreturn-1-e { background-position: -96px -64px; } +.ui-icon-arrowreturn-1-s { background-position: -112px -64px; } +.ui-icon-arrowrefresh-1-w { background-position: -128px -64px; } +.ui-icon-arrowrefresh-1-n { background-position: -144px -64px; } +.ui-icon-arrowrefresh-1-e { background-position: -160px -64px; } +.ui-icon-arrowrefresh-1-s { background-position: -176px -64px; } +.ui-icon-arrow-4 { background-position: 0 -80px; } +.ui-icon-arrow-4-diag { background-position: -16px -80px; } +.ui-icon-extlink { background-position: -32px -80px; } +.ui-icon-newwin { background-position: -48px -80px; } +.ui-icon-refresh { background-position: -64px -80px; } +.ui-icon-shuffle { background-position: -80px -80px; } +.ui-icon-transfer-e-w { background-position: -96px -80px; } +.ui-icon-transferthick-e-w { background-position: -112px -80px; } +.ui-icon-folder-collapsed { background-position: 0 -96px; } +.ui-icon-folder-open { background-position: -16px -96px; } +.ui-icon-document { background-position: -32px -96px; } +.ui-icon-document-b { background-position: -48px -96px; } +.ui-icon-note { background-position: -64px -96px; } +.ui-icon-mail-closed { background-position: -80px -96px; } +.ui-icon-mail-open { background-position: -96px -96px; } +.ui-icon-suitcase { background-position: -112px -96px; } +.ui-icon-comment { background-position: -128px -96px; } +.ui-icon-person { background-position: -144px -96px; } +.ui-icon-print { background-position: -160px -96px; } +.ui-icon-trash { background-position: -176px -96px; } +.ui-icon-locked { background-position: -192px -96px; } +.ui-icon-unlocked { background-position: -208px -96px; } +.ui-icon-bookmark { background-position: -224px -96px; } +.ui-icon-tag { background-position: -240px -96px; } +.ui-icon-home { background-position: 0 -112px; } +.ui-icon-flag { background-position: -16px -112px; } +.ui-icon-calendar { background-position: -32px -112px; } +.ui-icon-cart { background-position: -48px -112px; } +.ui-icon-pencil { background-position: -64px -112px; } +.ui-icon-clock { background-position: -80px -112px; } +.ui-icon-disk { background-position: -96px -112px; } +.ui-icon-calculator { background-position: -112px -112px; } +.ui-icon-zoomin { background-position: -128px -112px; } +.ui-icon-zoomout { background-position: -144px -112px; } +.ui-icon-search { background-position: -160px -112px; } +.ui-icon-wrench { background-position: -176px -112px; } +.ui-icon-gear { background-position: -192px -112px; } +.ui-icon-heart { background-position: -208px -112px; } +.ui-icon-star { background-position: -224px -112px; } +.ui-icon-link { background-position: -240px -112px; } +.ui-icon-cancel { background-position: 0 -128px; } +.ui-icon-plus { background-position: -16px -128px; } +.ui-icon-plusthick { background-position: -32px -128px; } +.ui-icon-minus { background-position: -48px -128px; } +.ui-icon-minusthick { background-position: -64px -128px; } +.ui-icon-close { background-position: -80px -128px; } +.ui-icon-closethick { background-position: -96px -128px; } +.ui-icon-key { background-position: -112px -128px; } +.ui-icon-lightbulb { background-position: -128px -128px; } +.ui-icon-scissors { background-position: -144px -128px; } +.ui-icon-clipboard { background-position: -160px -128px; } +.ui-icon-copy { background-position: -176px -128px; } +.ui-icon-contact { background-position: -192px -128px; } +.ui-icon-image { background-position: -208px -128px; } +.ui-icon-video { background-position: -224px -128px; } +.ui-icon-script { background-position: -240px -128px; } +.ui-icon-alert { background-position: 0 -144px; } +.ui-icon-info { background-position: -16px -144px; } +.ui-icon-notice { background-position: -32px -144px; } +.ui-icon-help { background-position: -48px -144px; } +.ui-icon-check { background-position: -64px -144px; } +.ui-icon-bullet { background-position: -80px -144px; } +.ui-icon-radio-on { background-position: -96px -144px; } +.ui-icon-radio-off { background-position: -112px -144px; } +.ui-icon-pin-w { background-position: -128px -144px; } +.ui-icon-pin-s { background-position: -144px -144px; } +.ui-icon-play { background-position: 0 -160px; } +.ui-icon-pause { background-position: -16px -160px; } +.ui-icon-seek-next { background-position: -32px -160px; } +.ui-icon-seek-prev { background-position: -48px -160px; } +.ui-icon-seek-end { background-position: -64px -160px; } +.ui-icon-seek-start { background-position: -80px -160px; } +/* ui-icon-seek-first is deprecated, use ui-icon-seek-start instead */ +.ui-icon-seek-first { background-position: -80px -160px; } +.ui-icon-stop { background-position: -96px -160px; } +.ui-icon-eject { background-position: -112px -160px; } +.ui-icon-volume-off { background-position: -128px -160px; } +.ui-icon-volume-on { background-position: -144px -160px; } +.ui-icon-power { background-position: 0 -176px; } +.ui-icon-signal-diag { background-position: -16px -176px; } +.ui-icon-signal { background-position: -32px -176px; } +.ui-icon-battery-0 { background-position: -48px -176px; } +.ui-icon-battery-1 { background-position: -64px -176px; } +.ui-icon-battery-2 { background-position: -80px -176px; } +.ui-icon-battery-3 { background-position: -96px -176px; } +.ui-icon-circle-plus { background-position: 0 -192px; } +.ui-icon-circle-minus { background-position: -16px -192px; } +.ui-icon-circle-close { background-position: -32px -192px; } +.ui-icon-circle-triangle-e { background-position: -48px -192px; } +.ui-icon-circle-triangle-s { background-position: -64px -192px; } +.ui-icon-circle-triangle-w { background-position: -80px -192px; } +.ui-icon-circle-triangle-n { background-position: -96px -192px; } +.ui-icon-circle-arrow-e { background-position: -112px -192px; } +.ui-icon-circle-arrow-s { background-position: -128px -192px; } +.ui-icon-circle-arrow-w { background-position: -144px -192px; } +.ui-icon-circle-arrow-n { background-position: -160px -192px; } +.ui-icon-circle-zoomin { background-position: -176px -192px; } +.ui-icon-circle-zoomout { background-position: -192px -192px; } +.ui-icon-circle-check { background-position: -208px -192px; } +.ui-icon-circlesmall-plus { background-position: 0 -208px; } +.ui-icon-circlesmall-minus { background-position: -16px -208px; } +.ui-icon-circlesmall-close { background-position: -32px -208px; } +.ui-icon-squaresmall-plus { background-position: -48px -208px; } +.ui-icon-squaresmall-minus { background-position: -64px -208px; } +.ui-icon-squaresmall-close { background-position: -80px -208px; } +.ui-icon-grip-dotted-vertical { background-position: 0 -224px; } +.ui-icon-grip-dotted-horizontal { background-position: -16px -224px; } +.ui-icon-grip-solid-vertical { background-position: -32px -224px; } +.ui-icon-grip-solid-horizontal { background-position: -48px -224px; } +.ui-icon-gripsmall-diagonal-se { background-position: -64px -224px; } +.ui-icon-grip-diagonal-se { background-position: -80px -224px; } + + +/* Misc visuals +----------------------------------*/ + +/* Corner radius */ +.ui-corner-all, +.ui-corner-top, +.ui-corner-left, +.ui-corner-tl { + border-top-left-radius: 4px; +} +.ui-corner-all, +.ui-corner-top, +.ui-corner-right, +.ui-corner-tr { + border-top-right-radius: 4px; +} +.ui-corner-all, +.ui-corner-bottom, +.ui-corner-left, +.ui-corner-bl { + border-bottom-left-radius: 4px; +} +.ui-corner-all, +.ui-corner-bottom, +.ui-corner-right, +.ui-corner-br { + border-bottom-right-radius: 4px; +} + +/* Overlays */ +.ui-widget-overlay { + background: #aaaaaa url("images/ui-bg_flat_0_aaaaaa_40x100.png") 50% 50% repeat-x; + opacity: .3; + filter: Alpha(Opacity=30); /* support: IE8 */ +} +.ui-widget-shadow { + margin: -8px 0 0 -8px; + padding: 8px; + background: #aaaaaa url("images/ui-bg_flat_0_aaaaaa_40x100.png") 50% 50% repeat-x; + opacity: .3; + filter: Alpha(Opacity=30); /* support: IE8 */ + border-radius: 8px; +} diff --git a/Questionnaires/jspsych/examples/data-add-properties.html b/Questionnaires/jspsych/examples/data-add-properties.html new file mode 100644 index 0000000..d58998d --- /dev/null +++ b/Questionnaires/jspsych/examples/data-add-properties.html @@ -0,0 +1,44 @@ + + + + + + + + + + + + + diff --git a/Questionnaires/jspsych/examples/data-as-function.html b/Questionnaires/jspsych/examples/data-as-function.html new file mode 100644 index 0000000..a03399c --- /dev/null +++ b/Questionnaires/jspsych/examples/data-as-function.html @@ -0,0 +1,39 @@ + + + + + + + + + + + + + diff --git a/Questionnaires/jspsych/examples/data-from-timeline.html b/Questionnaires/jspsych/examples/data-from-timeline.html new file mode 100644 index 0000000..0d9291e --- /dev/null +++ b/Questionnaires/jspsych/examples/data-from-timeline.html @@ -0,0 +1,52 @@ + + + + + + + + + + + + diff --git a/Questionnaires/jspsych/examples/data-from-url.html b/Questionnaires/jspsych/examples/data-from-url.html new file mode 100644 index 0000000..975527e --- /dev/null +++ b/Questionnaires/jspsych/examples/data-from-url.html @@ -0,0 +1,21 @@ + + + + + + + +

The URL variable should be logged to the console

+ + + diff --git a/Questionnaires/jspsych/examples/demo-flanker.html b/Questionnaires/jspsych/examples/demo-flanker.html new file mode 100644 index 0000000..f36f85d --- /dev/null +++ b/Questionnaires/jspsych/examples/demo-flanker.html @@ -0,0 +1,117 @@ + + + + Flanker Task + + + + + + + + + + diff --git a/Questionnaires/jspsych/examples/demo-simple-rt-task.html b/Questionnaires/jspsych/examples/demo-simple-rt-task.html new file mode 100644 index 0000000..e1356d2 --- /dev/null +++ b/Questionnaires/jspsych/examples/demo-simple-rt-task.html @@ -0,0 +1,120 @@ + + + + + My experiment + + + + + + + + + + + \ No newline at end of file diff --git a/Questionnaires/jspsych/examples/demos/demo_1.html b/Questionnaires/jspsych/examples/demos/demo_1.html new file mode 100644 index 0000000..c450396 --- /dev/null +++ b/Questionnaires/jspsych/examples/demos/demo_1.html @@ -0,0 +1,35 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/Questionnaires/jspsych/examples/demos/demo_2.html b/Questionnaires/jspsych/examples/demos/demo_2.html new file mode 100644 index 0000000..271a746 --- /dev/null +++ b/Questionnaires/jspsych/examples/demos/demo_2.html @@ -0,0 +1,50 @@ + + + + + + + + + + + + diff --git a/Questionnaires/jspsych/examples/demos/demo_3.html b/Questionnaires/jspsych/examples/demos/demo_3.html new file mode 100644 index 0000000..8f2806f --- /dev/null +++ b/Questionnaires/jspsych/examples/demos/demo_3.html @@ -0,0 +1,63 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/Questionnaires/jspsych/examples/display-element-to-embed-experiment.html b/Questionnaires/jspsych/examples/display-element-to-embed-experiment.html new file mode 100644 index 0000000..5cc5a0f --- /dev/null +++ b/Questionnaires/jspsych/examples/display-element-to-embed-experiment.html @@ -0,0 +1,79 @@ + + + + + + + + + + + + + + +
+

Lorem ipsum dolor sit amet, consectetur adipiscing elit. Integer nec odio. Praesent libero. Sed cursus ante dapibus diam. Sed nisi. Nulla quis sem at nibh elementum imperdiet. Duis sagittis ipsum. Praesent mauris. Fusce nec tellus sed augue semper porta. Mauris massa. Vestibulum lacinia arcu eget nulla.

+ +

Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Curabitur sodales ligula in libero. Sed dignissim lacinia nunc. Curabitur tortor. Pellentesque nibh. Aenean quam. In scelerisque sem at dolor. Maecenas mattis. Sed convallis tristique sem. Proin ut ligula vel nunc egestas porttitor. Morbi lectus risus, iaculis vel, suscipit quis, luctus non, massa.

+ +

Fusce ac turpis quis ligula lacinia aliquet. Mauris ipsum. Nulla metus metus, ullamcorper vel, tincidunt sed, euismod in, nibh. Quisque volutpat condimentum velit. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Nam nec ante. Sed lacinia, urna non tincidunt mattis, tortor neque adipiscing diam, a cursus ipsum ante quis turpis. Nulla facilisi. Ut fringilla. Suspendisse potenti. Nunc feugiat mi a tellus consequat imperdiet. Vestibulum sapien. Proin quam. Etiam ultrices.

+ +

Suspendisse in justo eu magna luctus suscipit. Sed lectus. Integer euismod lacus luctus magna. Quisque cursus, metus vitae pharetra auctor, sem massa mattis sem, at interdum magna augue eget diam. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Morbi lacinia molestie dui. Praesent blandit dolor. Sed non quam. In vel mi sit amet augue congue elementum. Morbi in ipsum sit amet pede facilisis laoreet. Donec lacus nunc, viverra nec, blandit vel, egestas et, augue. Vestibulum tincidunt malesuada tellus.

+ +

Ut ultrices ultrices enim. Curabitur sit amet mauris. Morbi in dui quis est pulvinar ullamcorper. Nulla facilisi. Integer lacinia sollicitudin massa. Cras metus. Sed aliquet risus a tortor. Integer id quam. Morbi mi. Quisque nisl felis, venenatis tristique, dignissim in, ultrices sit amet, augue. Proin sodales libero eget ante. Nulla quam.

+ + + + + + diff --git a/Questionnaires/jspsych/examples/end-active-node.html b/Questionnaires/jspsych/examples/end-active-node.html new file mode 100644 index 0000000..7f335cd --- /dev/null +++ b/Questionnaires/jspsych/examples/end-active-node.html @@ -0,0 +1,52 @@ + + + + + + + + + + + + + diff --git a/Questionnaires/jspsych/examples/end-experiment.html b/Questionnaires/jspsych/examples/end-experiment.html new file mode 100644 index 0000000..c620aa0 --- /dev/null +++ b/Questionnaires/jspsych/examples/end-experiment.html @@ -0,0 +1,45 @@ + + + + + + + + + + + + + diff --git a/Questionnaires/jspsych/examples/exclusions.html b/Questionnaires/jspsych/examples/exclusions.html new file mode 100644 index 0000000..7e5d31b --- /dev/null +++ b/Questionnaires/jspsych/examples/exclusions.html @@ -0,0 +1,32 @@ + + + + + + + + + + + + + + diff --git a/Questionnaires/jspsych/examples/external_html/simple_consent.html b/Questionnaires/jspsych/examples/external_html/simple_consent.html new file mode 100644 index 0000000..212f27d --- /dev/null +++ b/Questionnaires/jspsych/examples/external_html/simple_consent.html @@ -0,0 +1,4 @@ +

This is a demo consent form. Click the checkbox below to indicate the you + would like to participate in the experiment

+

I agree to take part in this study.

+ diff --git a/Questionnaires/jspsych/examples/img/1.gif b/Questionnaires/jspsych/examples/img/1.gif new file mode 100644 index 0000000..f4511d9 Binary files /dev/null and b/Questionnaires/jspsych/examples/img/1.gif differ diff --git a/Questionnaires/jspsych/examples/img/10.gif b/Questionnaires/jspsych/examples/img/10.gif new file mode 100644 index 0000000..1f25f1a Binary files /dev/null and b/Questionnaires/jspsych/examples/img/10.gif differ diff --git a/Questionnaires/jspsych/examples/img/11.gif b/Questionnaires/jspsych/examples/img/11.gif new file mode 100644 index 0000000..f54b3d6 Binary files /dev/null and b/Questionnaires/jspsych/examples/img/11.gif differ diff --git a/Questionnaires/jspsych/examples/img/12.gif b/Questionnaires/jspsych/examples/img/12.gif new file mode 100644 index 0000000..053ecf6 Binary files /dev/null and b/Questionnaires/jspsych/examples/img/12.gif differ diff --git a/Questionnaires/jspsych/examples/img/2.gif b/Questionnaires/jspsych/examples/img/2.gif new file mode 100644 index 0000000..8900090 Binary files /dev/null and b/Questionnaires/jspsych/examples/img/2.gif differ diff --git a/Questionnaires/jspsych/examples/img/3.gif b/Questionnaires/jspsych/examples/img/3.gif new file mode 100644 index 0000000..b6205d1 Binary files /dev/null and b/Questionnaires/jspsych/examples/img/3.gif differ diff --git a/Questionnaires/jspsych/examples/img/4.gif b/Questionnaires/jspsych/examples/img/4.gif new file mode 100644 index 0000000..1d2de35 Binary files /dev/null and b/Questionnaires/jspsych/examples/img/4.gif differ diff --git a/Questionnaires/jspsych/examples/img/5.gif b/Questionnaires/jspsych/examples/img/5.gif new file mode 100644 index 0000000..0c8ce98 Binary files /dev/null and b/Questionnaires/jspsych/examples/img/5.gif differ diff --git a/Questionnaires/jspsych/examples/img/6.gif b/Questionnaires/jspsych/examples/img/6.gif new file mode 100644 index 0000000..59149da Binary files /dev/null and b/Questionnaires/jspsych/examples/img/6.gif differ diff --git a/Questionnaires/jspsych/examples/img/7.gif b/Questionnaires/jspsych/examples/img/7.gif new file mode 100644 index 0000000..6b3ea1b Binary files /dev/null and b/Questionnaires/jspsych/examples/img/7.gif differ diff --git a/Questionnaires/jspsych/examples/img/8.gif b/Questionnaires/jspsych/examples/img/8.gif new file mode 100644 index 0000000..723ab75 Binary files /dev/null and b/Questionnaires/jspsych/examples/img/8.gif differ diff --git a/Questionnaires/jspsych/examples/img/9.gif b/Questionnaires/jspsych/examples/img/9.gif new file mode 100644 index 0000000..dabe68d Binary files /dev/null and b/Questionnaires/jspsych/examples/img/9.gif differ diff --git a/Questionnaires/jspsych/examples/img/age/of1.jpg b/Questionnaires/jspsych/examples/img/age/of1.jpg new file mode 100644 index 0000000..13a406d Binary files /dev/null and b/Questionnaires/jspsych/examples/img/age/of1.jpg differ diff --git a/Questionnaires/jspsych/examples/img/age/of2.jpg b/Questionnaires/jspsych/examples/img/age/of2.jpg new file mode 100644 index 0000000..0c10e81 Binary files /dev/null and b/Questionnaires/jspsych/examples/img/age/of2.jpg differ diff --git a/Questionnaires/jspsych/examples/img/age/of3.jpg b/Questionnaires/jspsych/examples/img/age/of3.jpg new file mode 100644 index 0000000..e20f977 Binary files /dev/null and b/Questionnaires/jspsych/examples/img/age/of3.jpg differ diff --git a/Questionnaires/jspsych/examples/img/age/om1.jpg b/Questionnaires/jspsych/examples/img/age/om1.jpg new file mode 100644 index 0000000..0b4335b Binary files /dev/null and b/Questionnaires/jspsych/examples/img/age/om1.jpg differ diff --git a/Questionnaires/jspsych/examples/img/age/om2.jpg b/Questionnaires/jspsych/examples/img/age/om2.jpg new file mode 100644 index 0000000..3930e06 Binary files /dev/null and b/Questionnaires/jspsych/examples/img/age/om2.jpg differ diff --git a/Questionnaires/jspsych/examples/img/age/om3.jpg b/Questionnaires/jspsych/examples/img/age/om3.jpg new file mode 100644 index 0000000..f74917e Binary files /dev/null and b/Questionnaires/jspsych/examples/img/age/om3.jpg differ diff --git a/Questionnaires/jspsych/examples/img/age/yf1.jpg b/Questionnaires/jspsych/examples/img/age/yf1.jpg new file mode 100644 index 0000000..c13b936 Binary files /dev/null and b/Questionnaires/jspsych/examples/img/age/yf1.jpg differ diff --git a/Questionnaires/jspsych/examples/img/age/yf4.jpg b/Questionnaires/jspsych/examples/img/age/yf4.jpg new file mode 100644 index 0000000..d4d61da Binary files /dev/null and b/Questionnaires/jspsych/examples/img/age/yf4.jpg differ diff --git a/Questionnaires/jspsych/examples/img/age/yf5.jpg b/Questionnaires/jspsych/examples/img/age/yf5.jpg new file mode 100644 index 0000000..9471606 Binary files /dev/null and b/Questionnaires/jspsych/examples/img/age/yf5.jpg differ diff --git a/Questionnaires/jspsych/examples/img/age/ym2.jpg b/Questionnaires/jspsych/examples/img/age/ym2.jpg new file mode 100644 index 0000000..a56e70e Binary files /dev/null and b/Questionnaires/jspsych/examples/img/age/ym2.jpg differ diff --git a/Questionnaires/jspsych/examples/img/age/ym3.jpg b/Questionnaires/jspsych/examples/img/age/ym3.jpg new file mode 100644 index 0000000..33847a4 Binary files /dev/null and b/Questionnaires/jspsych/examples/img/age/ym3.jpg differ diff --git a/Questionnaires/jspsych/examples/img/age/ym5.jpg b/Questionnaires/jspsych/examples/img/age/ym5.jpg new file mode 100644 index 0000000..4e25343 Binary files /dev/null and b/Questionnaires/jspsych/examples/img/age/ym5.jpg differ diff --git a/Questionnaires/jspsych/examples/img/backwardN.gif b/Questionnaires/jspsych/examples/img/backwardN.gif new file mode 100644 index 0000000..38a2164 Binary files /dev/null and b/Questionnaires/jspsych/examples/img/backwardN.gif differ diff --git a/Questionnaires/jspsych/examples/img/blue.png b/Questionnaires/jspsych/examples/img/blue.png new file mode 100644 index 0000000..820bdce Binary files /dev/null and b/Questionnaires/jspsych/examples/img/blue.png differ diff --git a/Questionnaires/jspsych/examples/img/card.png b/Questionnaires/jspsych/examples/img/card.png new file mode 100644 index 0000000..ca8de4f Binary files /dev/null and b/Questionnaires/jspsych/examples/img/card.png differ diff --git a/Questionnaires/jspsych/examples/img/con1.png b/Questionnaires/jspsych/examples/img/con1.png new file mode 100644 index 0000000..9b311d2 Binary files /dev/null and b/Questionnaires/jspsych/examples/img/con1.png differ diff --git a/Questionnaires/jspsych/examples/img/con2.png b/Questionnaires/jspsych/examples/img/con2.png new file mode 100644 index 0000000..884bcbb Binary files /dev/null and b/Questionnaires/jspsych/examples/img/con2.png differ diff --git a/Questionnaires/jspsych/examples/img/fixation.gif b/Questionnaires/jspsych/examples/img/fixation.gif new file mode 100644 index 0000000..ff76c92 Binary files /dev/null and b/Questionnaires/jspsych/examples/img/fixation.gif differ diff --git a/Questionnaires/jspsych/examples/img/happy_face_1.jpg b/Questionnaires/jspsych/examples/img/happy_face_1.jpg new file mode 100644 index 0000000..00891ce Binary files /dev/null and b/Questionnaires/jspsych/examples/img/happy_face_1.jpg differ diff --git a/Questionnaires/jspsych/examples/img/happy_face_2.jpg b/Questionnaires/jspsych/examples/img/happy_face_2.jpg new file mode 100644 index 0000000..c895ca2 Binary files /dev/null and b/Questionnaires/jspsych/examples/img/happy_face_2.jpg differ diff --git a/Questionnaires/jspsych/examples/img/happy_face_3.jpg b/Questionnaires/jspsych/examples/img/happy_face_3.jpg new file mode 100644 index 0000000..d00fe2f Binary files /dev/null and b/Questionnaires/jspsych/examples/img/happy_face_3.jpg differ diff --git a/Questionnaires/jspsych/examples/img/happy_face_4.jpg b/Questionnaires/jspsych/examples/img/happy_face_4.jpg new file mode 100644 index 0000000..843cb19 Binary files /dev/null and b/Questionnaires/jspsych/examples/img/happy_face_4.jpg differ diff --git a/Questionnaires/jspsych/examples/img/inc1.png b/Questionnaires/jspsych/examples/img/inc1.png new file mode 100644 index 0000000..4710286 Binary files /dev/null and b/Questionnaires/jspsych/examples/img/inc1.png differ diff --git a/Questionnaires/jspsych/examples/img/inc2.png b/Questionnaires/jspsych/examples/img/inc2.png new file mode 100644 index 0000000..cfb43bb Binary files /dev/null and b/Questionnaires/jspsych/examples/img/inc2.png differ diff --git a/Questionnaires/jspsych/examples/img/normalN.gif b/Questionnaires/jspsych/examples/img/normalN.gif new file mode 100644 index 0000000..dc5593d Binary files /dev/null and b/Questionnaires/jspsych/examples/img/normalN.gif differ diff --git a/Questionnaires/jspsych/examples/img/orange.png b/Questionnaires/jspsych/examples/img/orange.png new file mode 100644 index 0000000..108e6e5 Binary files /dev/null and b/Questionnaires/jspsych/examples/img/orange.png differ diff --git a/Questionnaires/jspsych/examples/img/redX.png b/Questionnaires/jspsych/examples/img/redX.png new file mode 100644 index 0000000..55eeb03 Binary files /dev/null and b/Questionnaires/jspsych/examples/img/redX.png differ diff --git a/Questionnaires/jspsych/examples/img/ribbon.jpg b/Questionnaires/jspsych/examples/img/ribbon.jpg new file mode 100644 index 0000000..cf20cc6 Binary files /dev/null and b/Questionnaires/jspsych/examples/img/ribbon.jpg differ diff --git a/Questionnaires/jspsych/examples/img/sad_face_1.jpg b/Questionnaires/jspsych/examples/img/sad_face_1.jpg new file mode 100644 index 0000000..2a12ac2 Binary files /dev/null and b/Questionnaires/jspsych/examples/img/sad_face_1.jpg differ diff --git a/Questionnaires/jspsych/examples/img/sad_face_2.jpg b/Questionnaires/jspsych/examples/img/sad_face_2.jpg new file mode 100644 index 0000000..17ca674 Binary files /dev/null and b/Questionnaires/jspsych/examples/img/sad_face_2.jpg differ diff --git a/Questionnaires/jspsych/examples/img/sad_face_3.jpg b/Questionnaires/jspsych/examples/img/sad_face_3.jpg new file mode 100644 index 0000000..3edd256 Binary files /dev/null and b/Questionnaires/jspsych/examples/img/sad_face_3.jpg differ diff --git a/Questionnaires/jspsych/examples/img/sad_face_4.jpg b/Questionnaires/jspsych/examples/img/sad_face_4.jpg new file mode 100644 index 0000000..c8b7a75 Binary files /dev/null and b/Questionnaires/jspsych/examples/img/sad_face_4.jpg differ diff --git a/Questionnaires/jspsych/examples/js/snap.svg-min.js b/Questionnaires/jspsych/examples/js/snap.svg-min.js new file mode 100644 index 0000000..23a7343 --- /dev/null +++ b/Questionnaires/jspsych/examples/js/snap.svg-min.js @@ -0,0 +1,21 @@ +// Snap.svg 0.5.1 +// +// Copyright (c) 2013 – 2017 Adobe Systems Incorporated. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// build: 2017-02-07 + +!function(a){var b,c,d="0.5.0",e="hasOwnProperty",f=/[\.\/]/,g=/\s*,\s*/,h="*",i=function(a,b){return a-b},j={n:{}},k=function(){for(var a=0,b=this.length;b>a;a++)if("undefined"!=typeof this[a])return this[a]},l=function(){for(var a=this.length;--a;)if("undefined"!=typeof this[a])return this[a]},m=Object.prototype.toString,n=String,o=Array.isArray||function(a){return a instanceof Array||"[object Array]"==m.call(a)};eve=function(a,d){var e,f=c,g=Array.prototype.slice.call(arguments,2),h=eve.listeners(a),j=0,m=[],n={},o=[],p=b;o.firstDefined=k,o.lastDefined=l,b=a,c=0;for(var q=0,r=h.length;r>q;q++)"zIndex"in h[q]&&(m.push(h[q].zIndex),h[q].zIndex<0&&(n[h[q].zIndex]=h[q]));for(m.sort(i);m[j]<0;)if(e=n[m[j++]],o.push(e.apply(d,g)),c)return c=f,o;for(q=0;r>q;q++)if(e=h[q],"zIndex"in e)if(e.zIndex==m[j]){if(o.push(e.apply(d,g)),c)break;do if(j++,e=n[m[j]],e&&o.push(e.apply(d,g)),c)break;while(e)}else n[e.zIndex]=e;else if(o.push(e.apply(d,g)),c)break;return c=f,b=p,o},eve._events=j,eve.listeners=function(a){var b,c,d,e,g,i,k,l,m=o(a)?a:a.split(f),n=j,p=[n],q=[];for(e=0,g=m.length;g>e;e++){for(l=[],i=0,k=p.length;k>i;i++)for(n=p[i].n,c=[n[m[e]],n[h]],d=2;d--;)b=c[d],b&&(l.push(b),q=q.concat(b.f||[]));p=l}return q},eve.separator=function(a){a?(a=n(a).replace(/(?=[\.\^\]\[\-])/g,"\\"),a="["+a+"]",f=new RegExp(a)):f=/[\.\/]/},eve.on=function(a,b){if("function"!=typeof b)return function(){};for(var c=o(a)?o(a[0])?a:[a]:n(a).split(g),d=0,e=c.length;e>d;d++)!function(a){for(var c,d=o(a)?a:n(a).split(f),e=j,g=0,h=d.length;h>g;g++)e=e.n,e=e.hasOwnProperty(d[g])&&e[d[g]]||(e[d[g]]={n:{}});for(e.f=e.f||[],g=0,h=e.f.length;h>g;g++)if(e.f[g]==b){c=!0;break}!c&&e.f.push(b)}(c[d]);return function(a){+a==+a&&(b.zIndex=+a)}},eve.f=function(a){var b=[].slice.call(arguments,1);return function(){eve.apply(null,[a,null].concat(b).concat([].slice.call(arguments,0)))}},eve.stop=function(){c=1},eve.nt=function(a){var c=o(b)?b.join("."):b;return a?new RegExp("(?:\\.|\\/|^)"+a+"(?:\\.|\\/|$)").test(c):c},eve.nts=function(){return o(b)?b:b.split(f)},eve.off=eve.unbind=function(a,b){if(!a)return void(eve._events=j={n:{}});var c=o(a)?o(a[0])?a:[a]:n(a).split(g);if(c.length>1)for(var d=0,i=c.length;i>d;d++)eve.off(c[d],b);else{c=o(a)?a:n(a).split(f);var k,l,m,d,i,p,q,r=[j],s=[];for(d=0,i=c.length;i>d;d++)for(p=0;pd;d++)for(k=r[d];k.n;){if(b){if(k.f){for(p=0,q=k.f.length;q>p;p++)if(k.f[p]==b){k.f.splice(p,1);break}!k.f.length&&delete k.f}for(l in k.n)if(k.n[e](l)&&k.n[l].f){var t=k.n[l].f;for(p=0,q=t.length;q>p;p++)if(t[p]==b){t.splice(p,1);break}!t.length&&delete k.n[l].f}}else{delete k.f;for(l in k.n)k.n[e](l)&&k.n[l].f&&delete k.n[l].f}k=k.n}a:for(d=0,i=s.length;i>d;d++){k=s[d];for(l in k.n[k.name].f)continue a;for(l in k.n[k.name].n)continue a;delete k.n[k.name]}}},eve.once=function(a,b){var c=function(){return eve.off(a,c),b.apply(this,arguments)};return eve.on(a,c)},eve.version=d,eve.toString=function(){return"You are running Eve "+d},"undefined"!=typeof module&&module.exports?module.exports=eve:"function"==typeof define&&define.amd?define("eve",[],function(){return eve}):a.eve=eve}(this),function(a,b){if("function"==typeof define&&define.amd)define(["eve"],function(c){return b(a,c)});else if("undefined"!=typeof exports){var c=require("eve");module.exports=b(a,c)}else b(a,a.eve)}(window||this,function(a,b){var c=function(b){var c,d={},e=a.requestAnimationFrame||a.webkitRequestAnimationFrame||a.mozRequestAnimationFrame||a.oRequestAnimationFrame||a.msRequestAnimationFrame||function(a){return setTimeout(a,16,(new Date).getTime()),!0},f=Array.isArray||function(a){return a instanceof Array||"[object Array]"==Object.prototype.toString.call(a)},g=0,h="M"+(+new Date).toString(36),i=function(){return h+(g++).toString(36)},j=Date.now||function(){return+new Date},k=function(a){var b=this;if(null==a)return b.s;var c=b.s-a;b.b+=b.dur*c,b.B+=b.dur*c,b.s=a},l=function(a){var b=this;return null==a?b.spd:void(b.spd=a)},m=function(a){var b=this;return null==a?b.dur:(b.s=b.s*a/b.dur,void(b.dur=a))},n=function(){var a=this;delete d[a.id],a.update(),b("mina.stop."+a.id,a)},o=function(){var a=this;a.pdif||(delete d[a.id],a.update(),a.pdif=a.get()-a.b)},p=function(){var a=this;a.pdif&&(a.b=a.get()-a.pdif,delete a.pdif,d[a.id]=a,r())},q=function(){var a,b=this;if(f(b.start)){a=[];for(var c=0,d=b.start.length;d>c;c++)a[c]=+b.start[c]+(b.end[c]-b.start[c])*b.easing(b.s)}else a=+b.start+(b.end-b.start)*b.easing(b.s);b.set(a)},r=function(a){if(!a)return void(c||(c=e(r)));var f=0;for(var g in d)if(d.hasOwnProperty(g)){var h=d[g],i=h.get();f++,h.s=(i-h.b)/(h.dur/h.spd),h.s>=1&&(delete d[g],h.s=1,f--,function(a){setTimeout(function(){b("mina.finish."+a.id,a)})}(h)),h.update()}c=f?e(r):!1},s=function(a,b,c,e,f,g,h){var j={id:i(),start:a,end:b,b:c,s:0,dur:e-c,spd:1,get:f,set:g,easing:h||s.linear,status:k,speed:l,duration:m,stop:n,pause:o,resume:p,update:q};d[j.id]=j;var t,u=0;for(t in d)if(d.hasOwnProperty(t)&&(u++,2==u))break;return 1==u&&r(),j};return s.time=j,s.getById=function(a){return d[a]||null},s.linear=function(a){return a},s.easeout=function(a){return Math.pow(a,1.7)},s.easein=function(a){return Math.pow(a,.48)},s.easeinout=function(a){if(1==a)return 1;if(0==a)return 0;var b=.48-a/1.04,c=Math.sqrt(.1734+b*b),d=c-b,e=Math.pow(Math.abs(d),1/3)*(0>d?-1:1),f=-c-b,g=Math.pow(Math.abs(f),1/3)*(0>f?-1:1),h=e+g+.5;return 3*(1-h)*h*h+h*h*h},s.backin=function(a){if(1==a)return 1;var b=1.70158;return a*a*((b+1)*a-b)},s.backout=function(a){if(0==a)return 0;a-=1;var b=1.70158;return a*a*((b+1)*a+b)+1},s.elastic=function(a){return a==!!a?a:Math.pow(2,-10*a)*Math.sin((a-.075)*(2*Math.PI)/.3)+1},s.bounce=function(a){var b,c=7.5625,d=2.75;return 1/d>a?b=c*a*a:2/d>a?(a-=1.5/d,b=c*a*a+.75):2.5/d>a?(a-=2.25/d,b=c*a*a+.9375):(a-=2.625/d,b=c*a*a+.984375),b},a.mina=s,s}("undefined"==typeof b?function(){}:b),d=function(a){function c(a,b){if(a){if(a.nodeType)return w(a);if(e(a,"array")&&c.set)return c.set.apply(c,a);if(a instanceof s)return a;if(null==b)try{return a=y.doc.querySelector(String(a)),w(a)}catch(d){return null}}return a=null==a?"100%":a,b=null==b?"100%":b,new v(a,b)}function d(a,b){if(b){if("#text"==a&&(a=y.doc.createTextNode(b.text||b["#text"]||"")),"#comment"==a&&(a=y.doc.createComment(b.text||b["#text"]||"")),"string"==typeof a&&(a=d(a)),"string"==typeof b)return 1==a.nodeType?"xlink:"==b.substring(0,6)?a.getAttributeNS(T,b.substring(6)):"xml:"==b.substring(0,4)?a.getAttributeNS(U,b.substring(4)):a.getAttribute(b):"text"==b?a.nodeValue:null;if(1==a.nodeType){for(var c in b)if(b[z](c)){var e=A(b[c]);e?"xlink:"==c.substring(0,6)?a.setAttributeNS(T,c.substring(6),e):"xml:"==c.substring(0,4)?a.setAttributeNS(U,c.substring(4),e):a.setAttribute(c,e):a.removeAttribute(c)}}else"text"in b&&(a.nodeValue=b.text)}else a=y.doc.createElementNS(U,a);return a}function e(a,b){return b=A.prototype.toLowerCase.call(b),"finite"==b?isFinite(a):"array"==b&&(a instanceof Array||Array.isArray&&Array.isArray(a))?!0:"null"==b&&null===a||b==typeof a&&null!==a||"object"==b&&a===Object(a)||J.call(a).slice(8,-1).toLowerCase()==b}function f(a){if("function"==typeof a||Object(a)!==a)return a;var b=new a.constructor;for(var c in a)a[z](c)&&(b[c]=f(a[c]));return b}function h(a,b){for(var c=0,d=a.length;d>c;c++)if(a[c]===b)return a.push(a.splice(c,1)[0])}function i(a,b,c){function d(){var e=Array.prototype.slice.call(arguments,0),f=e.join("␀"),g=d.cache=d.cache||{},i=d.count=d.count||[];return g[z](f)?(h(i,f),c?c(g[f]):g[f]):(i.length>=1e3&&delete g[i.shift()],i.push(f),g[f]=a.apply(b,e),c?c(g[f]):g[f])}return d}function j(a,b,c,d,e,f){if(null==e){var g=a-c,h=b-d;return g||h?(180+180*D.atan2(-h,-g)/H+360)%360:0}return j(a,b,e,f)-j(c,d,e,f)}function k(a){return a%360*H/180}function l(a){return 180*a/H%360}function m(a){var b=[];return a=a.replace(/(?:^|\s)(\w+)\(([^)]+)\)/g,function(a,c,d){return d=d.split(/\s*,\s*|\s+/),"rotate"==c&&1==d.length&&d.push(0,0),"scale"==c&&(d.length>2?d=d.slice(0,2):2==d.length&&d.push(0,0),1==d.length&&d.push(d[0],0,0)),"skewX"==c?b.push(["m",1,0,D.tan(k(d[0])),1,0,0]):"skewY"==c?b.push(["m",1,D.tan(k(d[0])),0,1,0,0]):b.push([c.charAt(0)].concat(d)),a}),b}function n(a,b){var d=aa(a),e=new c.Matrix;if(d)for(var f=0,g=d.length;g>f;f++){var h,i,j,k,l,m=d[f],n=m.length,o=A(m[0]).toLowerCase(),p=m[0]!=o,q=p?e.invert():0;"t"==o&&2==n?e.translate(m[1],0):"t"==o&&3==n?p?(h=q.x(0,0),i=q.y(0,0),j=q.x(m[1],m[2]),k=q.y(m[1],m[2]),e.translate(j-h,k-i)):e.translate(m[1],m[2]):"r"==o?2==n?(l=l||b,e.rotate(m[1],l.x+l.width/2,l.y+l.height/2)):4==n&&(p?(j=q.x(m[2],m[3]),k=q.y(m[2],m[3]),e.rotate(m[1],j,k)):e.rotate(m[1],m[2],m[3])):"s"==o?2==n||3==n?(l=l||b,e.scale(m[1],m[n-1],l.x+l.width/2,l.y+l.height/2)):4==n?p?(j=q.x(m[2],m[3]),k=q.y(m[2],m[3]),e.scale(m[1],m[1],j,k)):e.scale(m[1],m[1],m[2],m[3]):5==n&&(p?(j=q.x(m[3],m[4]),k=q.y(m[3],m[4]),e.scale(m[1],m[2],j,k)):e.scale(m[1],m[2],m[3],m[4])):"m"==o&&7==n&&e.add(m[1],m[2],m[3],m[4],m[5],m[6])}return e}function o(a){var b=a.node.ownerSVGElement&&w(a.node.ownerSVGElement)||a.node.parentNode&&w(a.node.parentNode)||c.select("svg")||c(0,0),d=b.select("defs"),e=null==d?!1:d.node;return e||(e=u("defs",b.node).node),e}function p(a){return a.node.ownerSVGElement&&w(a.node.ownerSVGElement)||c.select("svg")}function q(a,b,c){function e(a){if(null==a)return I;if(a==+a)return a;d(j,{width:a});try{return j.getBBox().width}catch(b){return 0}}function f(a){if(null==a)return I;if(a==+a)return a;d(j,{height:a});try{return j.getBBox().height}catch(b){return 0}}function g(d,e){null==b?i[d]=e(a.attr(d)||0):d==b&&(i=e(null==c?a.attr(d)||0:c))}var h=p(a).node,i={},j=h.querySelector(".svg---mgr");switch(j||(j=d("rect"),d(j,{x:-9e9,y:-9e9,width:10,height:10,"class":"svg---mgr",fill:"none"}),h.appendChild(j)),a.type){case"rect":g("rx",e),g("ry",f);case"image":g("width",e),g("height",f);case"text":g("x",e),g("y",f);break;case"circle":g("cx",e),g("cy",f),g("r",e);break;case"ellipse":g("cx",e),g("cy",f),g("rx",e),g("ry",f);break;case"line":g("x1",e),g("x2",e),g("y1",f),g("y2",f);break;case"marker":g("refX",e),g("markerWidth",e),g("refY",f),g("markerHeight",f);break;case"radialGradient":g("fx",e),g("fy",f);break;case"tspan":g("dx",e),g("dy",f);break;default:g(b,e)}return h.removeChild(j),i}function r(a){e(a,"array")||(a=Array.prototype.slice.call(arguments,0));for(var b=0,c=0,d=this.node;this[b];)delete this[b++];for(b=0;bc;c++){var e={type:a[c].type,attr:a[c].attr()},f=a[c].children();b.push(e),f.length&&x(f,e.childNodes=[])}}c.version="0.5.1",c.toString=function(){return"Snap v"+this.version},c._={};var y={win:a.window,doc:a.window.document};c._.glob=y;var z="hasOwnProperty",A=String,B=parseFloat,C=parseInt,D=Math,E=D.max,F=D.min,G=D.abs,H=(D.pow,D.PI),I=(D.round,""),J=Object.prototype.toString,K=/^\s*((#[a-f\d]{6})|(#[a-f\d]{3})|rgba?\(\s*([\d\.]+%?\s*,\s*[\d\.]+%?\s*,\s*[\d\.]+%?(?:\s*,\s*[\d\.]+%?)?)\s*\)|hsba?\(\s*([\d\.]+(?:deg|\xb0|%)?\s*,\s*[\d\.]+%?\s*,\s*[\d\.]+(?:%?\s*,\s*[\d\.]+)?%?)\s*\)|hsla?\(\s*([\d\.]+(?:deg|\xb0|%)?\s*,\s*[\d\.]+%?\s*,\s*[\d\.]+(?:%?\s*,\s*[\d\.]+)?%?)\s*\))\s*$/i,L=(c._.separator=/[,\s]+/,/[\s]*,[\s]*/),M={hs:1,rg:1},N=/([a-z])[\s,]*((-?\d*\.?\d*(?:e[\-+]?\d+)?[\s]*,?[\s]*)+)/gi,O=/([rstm])[\s,]*((-?\d*\.?\d*(?:e[\-+]?\d+)?[\s]*,?[\s]*)+)/gi,P=/(-?\d*\.?\d*(?:e[\-+]?\d+)?)[\s]*,?[\s]*/gi,Q=0,R="S"+(+new Date).toString(36),S=function(a){return(a&&a.type?a.type:I)+R+(Q++).toString(36)},T="http://www.w3.org/1999/xlink",U="http://www.w3.org/2000/svg",V={};c.url=function(a){return"url('#"+a+"')"};c._.$=d,c._.id=S,c.format=function(){var a=/\{([^\}]+)\}/g,b=/(?:(?:^|\.)(.+?)(?=\[|\.|$|\()|\[('|")(.+?)\2\])(\(\))?/g,c=function(a,c,d){var e=d;return c.replace(b,function(a,b,c,d,f){b=b||d,e&&(b in e&&(e=e[b]),"function"==typeof e&&f&&(e=e()))}),e=(null==e||e==d?a:e)+""};return function(b,d){return A(b).replace(a,function(a,b){return c(a,b,d)})}}(),c._.clone=f,c._.cacher=i,c.rad=k,c.deg=l,c.sin=function(a){return D.sin(c.rad(a))},c.tan=function(a){return D.tan(c.rad(a))},c.cos=function(a){return D.cos(c.rad(a))},c.asin=function(a){return c.deg(D.asin(a))},c.acos=function(a){return c.deg(D.acos(a))},c.atan=function(a){return c.deg(D.atan(a))},c.atan2=function(a){return c.deg(D.atan2(a))},c.angle=j,c.len=function(a,b,d,e){return Math.sqrt(c.len2(a,b,d,e))},c.len2=function(a,b,c,d){return(a-c)*(a-c)+(b-d)*(b-d)},c.closestPoint=function(a,b,c){function d(a){var d=a.x-b,e=a.y-c;return d*d+e*e}for(var e,f,g,h,i=a.node,j=i.getTotalLength(),k=j/i.pathSegList.numberOfItems*.125,l=1/0,m=0;j>=m;m+=k)(h=d(g=i.getPointAtLength(m))).5;){var n,o,p,q,r,s;(p=f-k)>=0&&(r=d(n=i.getPointAtLength(p)))f)return b-f;if(f>a-c)return b-f+a}return b},c.getRGB=i(function(a){if(!a||(a=A(a)).indexOf("-")+1)return{r:-1,g:-1,b:-1,hex:"none",error:1,toString:Z};if("none"==a)return{r:-1,g:-1,b:-1,hex:"none",toString:Z};if(!(M[z](a.toLowerCase().substring(0,2))||"#"==a.charAt())&&(a=W(a)),!a)return{r:-1,g:-1,b:-1,hex:"none",error:1,toString:Z};var b,d,f,g,h,i,j=a.match(K);return j?(j[2]&&(f=C(j[2].substring(5),16),d=C(j[2].substring(3,5),16),b=C(j[2].substring(1,3),16)),j[3]&&(f=C((h=j[3].charAt(3))+h,16),d=C((h=j[3].charAt(2))+h,16),b=C((h=j[3].charAt(1))+h,16)),j[4]&&(i=j[4].split(L),b=B(i[0]),"%"==i[0].slice(-1)&&(b*=2.55),d=B(i[1]),"%"==i[1].slice(-1)&&(d*=2.55),f=B(i[2]),"%"==i[2].slice(-1)&&(f*=2.55),"rgba"==j[1].toLowerCase().slice(0,4)&&(g=B(i[3])),i[3]&&"%"==i[3].slice(-1)&&(g/=100)),j[5]?(i=j[5].split(L),b=B(i[0]),"%"==i[0].slice(-1)&&(b/=100),d=B(i[1]),"%"==i[1].slice(-1)&&(d/=100),f=B(i[2]),"%"==i[2].slice(-1)&&(f/=100),("deg"==i[0].slice(-3)||"°"==i[0].slice(-1))&&(b/=360),"hsba"==j[1].toLowerCase().slice(0,4)&&(g=B(i[3])),i[3]&&"%"==i[3].slice(-1)&&(g/=100),c.hsb2rgb(b,d,f,g)):j[6]?(i=j[6].split(L),b=B(i[0]),"%"==i[0].slice(-1)&&(b/=100),d=B(i[1]),"%"==i[1].slice(-1)&&(d/=100),f=B(i[2]),"%"==i[2].slice(-1)&&(f/=100),("deg"==i[0].slice(-3)||"°"==i[0].slice(-1))&&(b/=360),"hsla"==j[1].toLowerCase().slice(0,4)&&(g=B(i[3])),i[3]&&"%"==i[3].slice(-1)&&(g/=100),c.hsl2rgb(b,d,f,g)):(b=F(D.round(b),255),d=F(D.round(d),255),f=F(D.round(f),255),g=F(E(g,0),1),j={r:b,g:d,b:f,toString:Z},j.hex="#"+(16777216|f|d<<8|b<<16).toString(16).slice(1),j.opacity=e(g,"finite")?g:1,j)):{r:-1,g:-1,b:-1,hex:"none",error:1,toString:Z}},c),c.hsb=i(function(a,b,d){return c.hsb2rgb(a,b,d).hex}),c.hsl=i(function(a,b,d){return c.hsl2rgb(a,b,d).hex}),c.rgb=i(function(a,b,c,d){if(e(d,"finite")){var f=D.round;return"rgba("+[f(a),f(b),f(c),+d.toFixed(2)]+")"}return"#"+(16777216|c|b<<8|a<<16).toString(16).slice(1)});var W=function(a){var b=y.doc.getElementsByTagName("head")[0]||y.doc.getElementsByTagName("svg")[0],c="rgb(255, 0, 0)";return(W=i(function(a){if("red"==a.toLowerCase())return c;b.style.color=c,b.style.color=a;var d=y.doc.defaultView.getComputedStyle(b,I).getPropertyValue("color");return d==c?null:d}))(a)},X=function(){return"hsb("+[this.h,this.s,this.b]+")"},Y=function(){return"hsl("+[this.h,this.s,this.l]+")"},Z=function(){return 1==this.opacity||null==this.opacity?this.hex:"rgba("+[this.r,this.g,this.b,this.opacity]+")"},$=function(a,b,d){if(null==b&&e(a,"object")&&"r"in a&&"g"in a&&"b"in a&&(d=a.b,b=a.g,a=a.r),null==b&&e(a,string)){var f=c.getRGB(a);a=f.r,b=f.g,d=f.b}return(a>1||b>1||d>1)&&(a/=255,b/=255,d/=255),[a,b,d]},_=function(a,b,d,f){a=D.round(255*a),b=D.round(255*b),d=D.round(255*d);var g={r:a,g:b,b:d,opacity:e(f,"finite")?f:1,hex:c.rgb(a,b,d),toString:Z};return e(f,"finite")&&(g.opacity=f),g};c.color=function(a){var b;return e(a,"object")&&"h"in a&&"s"in a&&"b"in a?(b=c.hsb2rgb(a),a.r=b.r,a.g=b.g,a.b=b.b,a.opacity=1,a.hex=b.hex):e(a,"object")&&"h"in a&&"s"in a&&"l"in a?(b=c.hsl2rgb(a),a.r=b.r,a.g=b.g,a.b=b.b,a.opacity=1,a.hex=b.hex):(e(a,"string")&&(a=c.getRGB(a)),e(a,"object")&&"r"in a&&"g"in a&&"b"in a&&!("error"in a)?(b=c.rgb2hsl(a),a.h=b.h,a.s=b.s,a.l=b.l,b=c.rgb2hsb(a),a.v=b.b):(a={hex:"none"},a.r=a.g=a.b=a.h=a.s=a.v=a.l=-1,a.error=1)),a.toString=Z,a},c.hsb2rgb=function(a,b,c,d){e(a,"object")&&"h"in a&&"s"in a&&"b"in a&&(c=a.b,b=a.s,d=a.o,a=a.h),a*=360;var f,g,h,i,j;return a=a%360/60,j=c*b,i=j*(1-G(a%2-1)),f=g=h=c-j,a=~~a,f+=[j,i,0,0,i,j][a],g+=[i,j,j,i,0,0][a],h+=[0,0,i,j,j,i][a],_(f,g,h,d)},c.hsl2rgb=function(a,b,c,d){e(a,"object")&&"h"in a&&"s"in a&&"l"in a&&(c=a.l,b=a.s,a=a.h),(a>1||b>1||c>1)&&(a/=360,b/=100,c/=100),a*=360;var f,g,h,i,j;return a=a%360/60,j=2*b*(.5>c?c:1-c),i=j*(1-G(a%2-1)),f=g=h=c-j/2,a=~~a,f+=[j,i,0,0,i,j][a],g+=[i,j,j,i,0,0][a],h+=[0,0,i,j,j,i][a],_(f,g,h,d)},c.rgb2hsb=function(a,b,c){c=$(a,b,c),a=c[0],b=c[1],c=c[2];var d,e,f,g;return f=E(a,b,c),g=f-F(a,b,c),d=0==g?null:f==a?(b-c)/g:f==b?(c-a)/g+2:(a-b)/g+4,d=(d+360)%6*60/360,e=0==g?0:g/f,{h:d,s:e,b:f,toString:X}},c.rgb2hsl=function(a,b,c){c=$(a,b,c),a=c[0],b=c[1],c=c[2];var d,e,f,g,h,i;return g=E(a,b,c),h=F(a,b,c),i=g-h,d=0==i?null:g==a?(b-c)/i:g==b?(c-a)/i+2:(a-b)/i+4,d=(d+360)%6*60/360,f=(g+h)/2,e=0==i?0:.5>f?i/(2*f):i/(2-2*f),{h:d,s:e,l:f,toString:Y}},c.parsePathString=function(a){if(!a)return null;var b=c.path(a);if(b.arr)return c.path.clone(b.arr);var d={a:7,c:6,o:2,h:1,l:2,m:2,r:4,q:4,s:4,t:2,v:1,u:3,z:0},f=[];return e(a,"array")&&e(a[0],"array")&&(f=c.path.clone(a)),f.length||A(a).replace(N,function(a,b,c){var e=[],g=b.toLowerCase();if(c.replace(P,function(a,b){b&&e.push(+b)}),"m"==g&&e.length>2&&(f.push([b].concat(e.splice(0,2))),g="l",b="m"==b?"l":"L"),"o"==g&&1==e.length&&f.push([b,e[0]]),"r"==g)f.push([b].concat(e));else for(;e.length>=d[g]&&(f.push([b].concat(e.splice(0,d[g]))),d[g]););}),f.toString=c.path.toString,b.arr=c.path.clone(f),f};var aa=c.parseTransformString=function(a){if(!a)return null;var b=[];return e(a,"array")&&e(a[0],"array")&&(b=c.path.clone(a)),b.length||A(a).replace(O,function(a,c,d){var e=[];c.toLowerCase();d.replace(P,function(a,b){b&&e.push(+b)}),b.push([c].concat(e))}),b.toString=c.path.toString,b};c._.svgTransform2string=m,c._.rgTransform=/^[a-z][\s]*-?\.?\d/i,c._.transform2matrix=n,c._unit2px=q;y.doc.contains||y.doc.compareDocumentPosition?function(a,b){var c=9==a.nodeType?a.documentElement:a,d=b&&b.parentNode;return a==d||!(!d||1!=d.nodeType||!(c.contains?c.contains(d):a.compareDocumentPosition&&16&a.compareDocumentPosition(d)))}:function(a,b){if(b)for(;b;)if(b=b.parentNode,b==a)return!0;return!1};c._.getSomeDefs=o,c._.getSomeSVG=p,c.select=function(a){return a=A(a).replace(/([^\\]):/g,"$1\\:"),w(y.doc.querySelector(a))},c.selectAll=function(a){for(var b=y.doc.querySelectorAll(a),d=(c.set||Array)(),e=0;ei;i++)h[g[i].nodeName]=g[i].nodeValue;return h}if(e(a,"string")){if(!(arguments.length>1))return b("snap.util.getattr."+a,d).firstDefined();var k={};k[a]=c,a=k}for(var l in a)a[z](l)&&b("snap.util.attr."+l,d,a[l]);return d},c.parse=function(a){var b=y.doc.createDocumentFragment(),c=!0,d=y.doc.createElement("div");if(a=A(a),a.match(/^\s*<\s*svg(?:\s|>)/)||(a=""+a+"",c=!1),d.innerHTML=a,a=d.getElementsByTagName("svg")[0])if(c)b=a;else for(;a.firstChild;)b.appendChild(a.firstChild);return new t(b)},c.fragment=function(){for(var a=Array.prototype.slice.call(arguments,0),b=y.doc.createDocumentFragment(),d=0,e=a.length;e>d;d++){var f=a[d];f.node&&f.node.nodeType&&b.appendChild(f.node),f.nodeType&&b.appendChild(f),"string"==typeof f&&b.appendChild(c.parse(f).node)}return new t(b)},c._.make=u,c._.wrap=w,v.prototype.el=function(a,b){var c=u(a,this.node);return b&&c.attr(b),c},s.prototype.children=function(){for(var a=[],b=this.node.childNodes,d=0,e=b.length;e>d;d++)a[d]=c(b[d]);return a},s.prototype.toJSON=function(){var a=[];return x([this],a),a[0]},b.on("snap.util.getattr",function(){var a=b.nt();a=a.substring(a.lastIndexOf(".")+1);var c=a.replace(/[A-Z]/g,function(a){return"-"+a.toLowerCase()});return ba[z](c)?this.node.ownerDocument.defaultView.getComputedStyle(this.node,null).getPropertyValue(c):d(this.node,a)});var ba={"alignment-baseline":0,"baseline-shift":0,clip:0,"clip-path":0,"clip-rule":0,color:0,"color-interpolation":0,"color-interpolation-filters":0,"color-profile":0,"color-rendering":0,cursor:0,direction:0,display:0,"dominant-baseline":0,"enable-background":0,fill:0,"fill-opacity":0,"fill-rule":0,filter:0,"flood-color":0,"flood-opacity":0,font:0,"font-family":0,"font-size":0,"font-size-adjust":0,"font-stretch":0,"font-style":0,"font-variant":0,"font-weight":0,"glyph-orientation-horizontal":0,"glyph-orientation-vertical":0,"image-rendering":0,kerning:0,"letter-spacing":0,"lighting-color":0,marker:0,"marker-end":0,"marker-mid":0,"marker-start":0,mask:0,opacity:0,overflow:0,"pointer-events":0,"shape-rendering":0,"stop-color":0,"stop-opacity":0,stroke:0,"stroke-dasharray":0,"stroke-dashoffset":0,"stroke-linecap":0,"stroke-linejoin":0,"stroke-miterlimit":0,"stroke-opacity":0,"stroke-width":0,"text-anchor":0,"text-decoration":0,"text-rendering":0,"unicode-bidi":0,visibility:0,"word-spacing":0,"writing-mode":0};b.on("snap.util.attr",function(a){var c=b.nt(),e={};c=c.substring(c.lastIndexOf(".")+1),e[c]=a;var f=c.replace(/-(\w)/gi,function(a,b){return b.toUpperCase()}),g=c.replace(/[A-Z]/g,function(a){return"-"+a.toLowerCase()});ba[z](g)?this.node.style[f]=null==a?I:a:d(this.node,e)}),function(a){}(v.prototype),c.ajax=function(a,c,d,f){var g=new XMLHttpRequest,h=S();if(g){if(e(c,"function"))f=d,d=c,c=null;else if(e(c,"object")){var i=[];for(var j in c)c.hasOwnProperty(j)&&i.push(encodeURIComponent(j)+"="+encodeURIComponent(c[j]));c=i.join("&")}return g.open(c?"POST":"GET",a,!0),c&&(g.setRequestHeader("X-Requested-With","XMLHttpRequest"),g.setRequestHeader("Content-type","application/x-www-form-urlencoded")),d&&(b.once("snap.ajax."+h+".0",d),b.once("snap.ajax."+h+".200",d),b.once("snap.ajax."+h+".304",d)),g.onreadystatechange=function(){4==g.readyState&&b("snap.ajax."+h+"."+g.status,f,g)},4==g.readyState?g:(g.send(c),g)}},c.load=function(a,b,d){c.ajax(a,function(a){var e=c.parse(a.responseText);d?b.call(d,e):b(e)})};var ca=function(a){var b=a.getBoundingClientRect(),c=a.ownerDocument,d=c.body,e=c.documentElement,f=e.clientTop||d.clientTop||0,h=e.clientLeft||d.clientLeft||0,i=b.top+(g.win.pageYOffset||e.scrollTop||d.scrollTop)-f,j=b.left+(g.win.pageXOffset||e.scrollLeft||d.scrollLeft)-h;return{y:i,x:j}};return c.getElementByPoint=function(a,b){var c=this,d=(c.canvas,y.doc.elementFromPoint(a,b));if(y.win.opera&&"svg"==d.tagName){var e=ca(d),f=d.createSVGRect();f.x=a-e.x,f.y=b-e.y,f.width=f.height=1;var g=d.getIntersectionList(f,null);g.length&&(d=g[g.length-1])}return d?w(d):null},c.plugin=function(a){a(c,s,v,y,t)},y.win.Snap=c,c}(a||this);return d.plugin(function(c,d,e,f,g){function h(a,b){if(null==b){var d=!0;if(b="linearGradient"==a.type||"radialGradient"==a.type?a.node.getAttribute("gradientTransform"):"pattern"==a.type?a.node.getAttribute("patternTransform"):a.node.getAttribute("transform"),!b)return new c.Matrix;b=c._.svgTransform2string(b)}else b=c._.rgTransform.test(b)?m(b).replace(/\.{3}|\u2026/g,a._.transform||""):c._.svgTransform2string(b),l(b,"array")&&(b=c.path?c.path.toString.call(b):m(b)),a._.transform=b;var e=c._.transform2matrix(b,a.getBBox(1));return d?e:void(a.matrix=e)}function i(a){function b(a,b){var d=o(a.node,b);d=d&&d.match(g),d=d&&d[2],d&&"#"==d.charAt()&&(d=d.substring(1),d&&(i[d]=(i[d]||[]).concat(function(d){var e={};e[b]=c.url(d),o(a.node,e)})))}function d(a){var b=o(a.node,"xlink:href");b&&"#"==b.charAt()&&(b=b.substring(1),b&&(i[b]=(i[b]||[]).concat(function(b){a.attr("xlink:href","#"+b)})))}for(var e,f=a.selectAll("*"),g=/^\s*url\(("|'|)(.*)\1\)\s*$/,h=[],i={},j=0,k=f.length;k>j;j++){e=f[j],b(e,"fill"),b(e,"stroke"),b(e,"filter"),b(e,"mask"),b(e,"clip-path"),d(e);var l=o(e.node,"id");l&&(o(e.node,{id:e.id}),h.push({old:l,id:e.id}))}for(j=0,k=h.length;k>j;j++){var m=i[h[j].old];if(m)for(var n=0,p=m.length;p>n;n++)m[n](h[j].id)}}function j(a){return function(){var b=a?"<"+this.type:"",c=this.node.attributes,d=this.node.childNodes;if(a)for(var e=0,f=c.length;f>e;e++)b+=" "+c[e].name+'="'+c[e].value.replace(/"/g,'\\"')+'"';if(d.length){for(a&&(b+=">"),e=0,f=d.length;f>e;e++)3==d[e].nodeType?b+=d[e].nodeValue:1==d[e].nodeType&&(b+=s(d[e]).toString());a&&(b+="")}else a&&(b+="/>");return b}}var k=d.prototype,l=c.is,m=String,n=c._unit2px,o=c._.$,p=c._.make,q=c._.getSomeDefs,r="hasOwnProperty",s=c._.wrap;k.getBBox=function(a){if("tspan"==this.type)return c._.box(this.node.getClientRects().item(0));if(!c.Matrix||!c.path)return this.node.getBBox();var b=this,d=new c.Matrix;if(b.removed)return c._.box();for(;"use"==b.type;)if(a||(d=d.add(b.transform().localMatrix.translate(b.attr("x")||0,b.attr("y")||0))),b.original)b=b.original;else{var e=b.attr("xlink:href");b=b.original=b.node.ownerDocument.getElementById(e.substring(e.indexOf("#")+1))}var f=b._,g=c.path.get[b.type]||c.path.get.deflt;try{return a?(f.bboxwt=g?c.path.getBBox(b.realPath=g(b)):c._.box(b.node.getBBox()),c._.box(f.bboxwt)):(b.realPath=g(b),b.matrix=b.transform().localMatrix,f.bbox=c.path.getBBox(c.path.map(b.realPath,d.add(b.matrix))),c._.box(f.bbox))}catch(h){return c._.box()}};var t=function(){return this.string};k.transform=function(a){var b=this._;if(null==a){for(var d,e=this,f=new c.Matrix(this.node.getCTM()),g=h(this),i=[g],j=new c.Matrix,k=g.toTransformString(),l=m(g)==m(this.matrix)?m(b.transform):k;"svg"!=e.type&&(e=e.parent());)i.push(h(e));for(d=i.length;d--;)j.add(i[d]);return{string:l,globalMatrix:f,totalMatrix:j,localMatrix:g,diffMatrix:f.clone().add(g.invert()),global:f.toTransformString(),total:j.toTransformString(),local:k,toString:t}}return a instanceof c.Matrix?(this.matrix=a,this._.transform=a.toTransformString()):h(this,a),this.node&&("linearGradient"==this.type||"radialGradient"==this.type?o(this.node,{gradientTransform:this.matrix}):"pattern"==this.type?o(this.node,{patternTransform:this.matrix}):o(this.node,{transform:this.matrix})),this},k.parent=function(){return s(this.node.parentNode)},k.append=k.add=function(a){if(a){if("set"==a.type){var b=this;return a.forEach(function(a){b.add(a)}),this}a=s(a),this.node.appendChild(a.node),a.paper=this.paper}return this},k.appendTo=function(a){return a&&(a=s(a),a.append(this)),this},k.prepend=function(a){if(a){if("set"==a.type){var b,c=this;return a.forEach(function(a){b?b.after(a):c.prepend(a),b=a}),this}a=s(a);var d=a.parent();this.node.insertBefore(a.node,this.node.firstChild),this.add&&this.add(),a.paper=this.paper,this.parent()&&this.parent().add(),d&&d.add()}return this},k.prependTo=function(a){return a=s(a),a.prepend(this),this},k.before=function(a){if("set"==a.type){var b=this;return a.forEach(function(a){var c=a.parent();b.node.parentNode.insertBefore(a.node,b.node),c&&c.add()}),this.parent().add(),this}a=s(a);var c=a.parent();return this.node.parentNode.insertBefore(a.node,this.node),this.parent()&&this.parent().add(),c&&c.add(),a.paper=this.paper,this},k.after=function(a){a=s(a);var b=a.parent();return this.node.nextSibling?this.node.parentNode.insertBefore(a.node,this.node.nextSibling):this.node.parentNode.appendChild(a.node),this.parent()&&this.parent().add(),b&&b.add(),a.paper=this.paper,this},k.insertBefore=function(a){a=s(a);var b=this.parent();return a.node.parentNode.insertBefore(this.node,a.node),this.paper=a.paper,b&&b.add(),a.parent()&&a.parent().add(),this},k.insertAfter=function(a){a=s(a);var b=this.parent();return a.node.parentNode.insertBefore(this.node,a.node.nextSibling),this.paper=a.paper,b&&b.add(),a.parent()&&a.parent().add(),this},k.remove=function(){var a=this.parent();return this.node.parentNode&&this.node.parentNode.removeChild(this.node),delete this.paper,this.removed=!0,a&&a.add(),this},k.select=function(a){return s(this.node.querySelector(a))},k.selectAll=function(a){for(var b=this.node.querySelectorAll(a),d=(c.set||Array)(),e=0;e{contents}',{x:+b.x.toFixed(3),y:+b.y.toFixed(3),width:+b.width.toFixed(3),height:+b.height.toFixed(3), +contents:this.outerSVG()});return"data:image/svg+xml;base64,"+btoa(unescape(encodeURIComponent(d)))}},g.prototype.select=k.select,g.prototype.selectAll=k.selectAll}),d.plugin(function(a,d,e,f,g){function h(a,b,c){return function(d){var e=d.slice(a,b);return 1==e.length&&(e=e[0]),c?c(e):e}}var i=d.prototype,j=a.is,k=String,l="hasOwnProperty",m=function(a,b,d,e){"function"!=typeof d||d.length||(e=d,d=c.linear),this.attr=a,this.dur=b,d&&(this.easing=d),e&&(this.callback=e)};a._.Animation=m,a.animation=function(a,b,c,d){return new m(a,b,c,d)},i.inAnim=function(){var a=this,b=[];for(var c in a.anims)a.anims[l](c)&&!function(a){b.push({anim:new m(a._attrs,a.dur,a.easing,a._callback),mina:a,curStatus:a.status(),status:function(b){return a.status(b)},stop:function(){a.stop()}})}(a.anims[c]);return b},a.animate=function(a,d,e,f,g,h){"function"!=typeof g||g.length||(h=g,g=c.linear);var i=c.time(),j=c(a,d,i,i+f,c.time,e,g);return h&&b.once("mina.finish."+j.id,h),j},i.stop=function(){for(var a=this.inAnim(),b=0,c=a.length;c>b;b++)a[b].stop();return this},i.animate=function(a,d,e,f){"function"!=typeof e||e.length||(f=e,e=c.linear),a instanceof m&&(f=a.callback,e=a.easing,d=a.dur,a=a.attr);var g,i,n,o,p=[],q=[],r={},s=this;for(var t in a)if(a[l](t)){s.equal?(o=s.equal(t,k(a[t])),g=o.from,i=o.to,n=o.f):(g=+s.attr(t),i=+a[t]);var u=j(g,"array")?g.length:1;r[t]=h(p.length,p.length+u,n),p=p.concat(g),q=q.concat(i)}var v=c.time(),w=c(p,q,v,v+d,c.time,function(a){var b={};for(var c in r)r[l](c)&&(b[c]=r[c](a));s.attr(b)},e);return s.anims[w.id]=w,w._attrs=a,w._callback=f,b("snap.animcreated."+s.id,w),b.once("mina.finish."+w.id,function(){b.off("mina.*."+w.id),delete s.anims[w.id],f&&f.call(s)}),b.once("mina.stop."+w.id,function(){b.off("mina.*."+w.id),delete s.anims[w.id]}),s}}),d.plugin(function(a,b,c,d,e){function f(a,b,c,d,e,f){return null==b&&"[object SVGMatrix]"==g.call(a)?(this.a=a.a,this.b=a.b,this.c=a.c,this.d=a.d,this.e=a.e,void(this.f=a.f)):void(null!=a?(this.a=+a,this.b=+b,this.c=+c,this.d=+d,this.e=+e,this.f=+f):(this.a=1,this.b=0,this.c=0,this.d=1,this.e=0,this.f=0))}var g=Object.prototype.toString,h=String,i=Math,j="";!function(b){function c(a){return a[0]*a[0]+a[1]*a[1]}function d(a){var b=i.sqrt(c(a));a[0]&&(a[0]/=b),a[1]&&(a[1]/=b)}b.add=function(a,b,c,d,e,g){if(a&&a instanceof f)return this.add(a.a,a.b,a.c,a.d,a.e,a.f);var h=a*this.a+b*this.c,i=a*this.b+b*this.d;return this.e+=e*this.a+g*this.c,this.f+=e*this.b+g*this.d,this.c=c*this.a+d*this.c,this.d=c*this.b+d*this.d,this.a=h,this.b=i,this},f.prototype.multLeft=function(a,b,c,d,e,g){if(a&&a instanceof f)return this.multLeft(a.a,a.b,a.c,a.d,a.e,a.f);var h=a*this.a+c*this.b,i=a*this.c+c*this.d,j=a*this.e+c*this.f+e;return this.b=b*this.a+d*this.b,this.d=b*this.c+d*this.d,this.f=b*this.e+d*this.f+g,this.a=h,this.c=i,this.e=j,this},b.invert=function(){var a=this,b=a.a*a.d-a.b*a.c;return new f(a.d/b,-a.b/b,-a.c/b,a.a/b,(a.c*a.f-a.d*a.e)/b,(a.b*a.e-a.a*a.f)/b)},b.clone=function(){return new f(this.a,this.b,this.c,this.d,this.e,this.f)},b.translate=function(a,b){return this.e+=a*this.a+b*this.c,this.f+=a*this.b+b*this.d,this},b.scale=function(a,b,c,d){return null==b&&(b=a),(c||d)&&this.translate(c,d),this.a*=a,this.b*=a,this.c*=b,this.d*=b,(c||d)&&this.translate(-c,-d),this},b.rotate=function(b,c,d){b=a.rad(b),c=c||0,d=d||0;var e=+i.cos(b).toFixed(9),f=+i.sin(b).toFixed(9);return this.add(e,f,-f,e,c,d),this.add(1,0,0,1,-c,-d)},b.skewX=function(a){return this.skew(a,0)},b.skewY=function(a){return this.skew(0,a)},b.skew=function(b,c){b=b||0,c=c||0,b=a.rad(b),c=a.rad(c);var d=i.tan(b).toFixed(9),e=i.tan(c).toFixed(9);return this.add(1,e,d,1,0,0)},b.x=function(a,b){return a*this.a+b*this.c+this.e},b.y=function(a,b){return a*this.b+b*this.d+this.f},b.get=function(a){return+this[h.fromCharCode(97+a)].toFixed(4)},b.toString=function(){return"matrix("+[this.get(0),this.get(1),this.get(2),this.get(3),this.get(4),this.get(5)].join()+")"},b.offset=function(){return[this.e.toFixed(4),this.f.toFixed(4)]},b.determinant=function(){return this.a*this.d-this.b*this.c},b.split=function(){var b={};b.dx=this.e,b.dy=this.f;var e=[[this.a,this.b],[this.c,this.d]];b.scalex=i.sqrt(c(e[0])),d(e[0]),b.shear=e[0][0]*e[1][0]+e[0][1]*e[1][1],e[1]=[e[1][0]-e[0][0]*b.shear,e[1][1]-e[0][1]*b.shear],b.scaley=i.sqrt(c(e[1])),d(e[1]),b.shear/=b.scaley,this.determinant()<0&&(b.scalex=-b.scalex);var f=e[0][1],g=e[1][1];return 0>g?(b.rotate=a.deg(i.acos(g)),0>f&&(b.rotate=360-b.rotate)):b.rotate=a.deg(i.asin(f)),b.isSimple=!(+b.shear.toFixed(9)||b.scalex.toFixed(9)!=b.scaley.toFixed(9)&&b.rotate),b.isSuperSimple=!+b.shear.toFixed(9)&&b.scalex.toFixed(9)==b.scaley.toFixed(9)&&!b.rotate,b.noRotation=!+b.shear.toFixed(9)&&!b.rotate,b},b.toTransformString=function(a){var b=a||this.split();return+b.shear.toFixed(9)?"m"+[this.get(0),this.get(1),this.get(2),this.get(3),this.get(4),this.get(5)]:(b.scalex=+b.scalex.toFixed(4),b.scaley=+b.scaley.toFixed(4),b.rotate=+b.rotate.toFixed(4),(b.dx||b.dy?"t"+[+b.dx.toFixed(4),+b.dy.toFixed(4)]:j)+(b.rotate?"r"+[+b.rotate.toFixed(4),0,0]:j)+(1!=b.scalex||1!=b.scaley?"s"+[b.scalex,b.scaley,0,0]:j))}}(f.prototype),a.Matrix=f,a.matrix=function(a,b,c,d,e,g){return new f(a,b,c,d,e,g)}}),d.plugin(function(a,c,d,e,f){function g(d){return function(e){if(b.stop(),e instanceof f&&1==e.node.childNodes.length&&("radialGradient"==e.node.firstChild.tagName||"linearGradient"==e.node.firstChild.tagName||"pattern"==e.node.firstChild.tagName)&&(e=e.node.firstChild,n(this).appendChild(e),e=l(e)),e instanceof c)if("radialGradient"==e.type||"linearGradient"==e.type||"pattern"==e.type){e.node.id||p(e.node,{id:e.id});var g=q(e.node.id)}else g=e.attr(d);else if(g=a.color(e),g.error){var h=a(n(this).ownerSVGElement).gradient(e);h?(h.node.id||p(h.node,{id:h.id}),g=q(h.node.id)):g=e}else g=r(g);var i={};i[d]=g,p(this.node,i),this.node.style[d]=t}}function h(a){b.stop(),a==+a&&(a+="px"),this.node.style.fontSize=a}function i(a){for(var b=[],c=a.childNodes,d=0,e=c.length;e>d;d++){var f=c[d];3==f.nodeType&&b.push(f.nodeValue),"tspan"==f.tagName&&(1==f.childNodes.length&&3==f.firstChild.nodeType?b.push(f.firstChild.nodeValue):b.push(i(f)))}return b}function j(){return b.stop(),this.node.style.fontSize}var k=a._.make,l=a._.wrap,m=a.is,n=a._.getSomeDefs,o=/^url\((['"]?)([^)]+)\1\)$/,p=a._.$,q=a.url,r=String,s=a._.separator,t="";a.deurl=function(a){var b=String(a).match(o);return b?b[2]:a},b.on("snap.util.attr.mask",function(a){if(a instanceof c||a instanceof f){if(b.stop(),a instanceof f&&1==a.node.childNodes.length&&(a=a.node.firstChild,n(this).appendChild(a),a=l(a)),"mask"==a.type)var d=a;else d=k("mask",n(this)),d.node.appendChild(a.node);!d.node.id&&p(d.node,{id:d.id}),p(this.node,{mask:q(d.id)})}}),function(a){b.on("snap.util.attr.clip",a),b.on("snap.util.attr.clip-path",a),b.on("snap.util.attr.clipPath",a)}(function(a){if(a instanceof c||a instanceof f){b.stop();for(var d,e=a.node;e;){if("clipPath"===e.nodeName){d=new c(e);break}if("svg"===e.nodeName){d=void 0;break}e=e.parentNode}d||(d=k("clipPath",n(this)),d.node.appendChild(a.node),!d.node.id&&p(d.node,{id:d.id})),p(this.node,{"clip-path":q(d.node.id||d.id)})}}),b.on("snap.util.attr.fill",g("fill")),b.on("snap.util.attr.stroke",g("stroke"));var u=/^([lr])(?:\(([^)]*)\))?(.*)$/i;b.on("snap.util.grad.parse",function(a){function b(a,b){for(var c=(b-h)/(a-i),d=i;a>d;d++)f[d].offset=+(+h+c*(d-i)).toFixed(2);i=a,h=b}a=r(a);var c=a.match(u);if(!c)return null;var d=c[1],e=c[2],f=c[3];e=e.split(/\s*,\s*/).map(function(a){return+a==a?+a:a}),1==e.length&&0==e[0]&&(e=[]),f=f.split("-"),f=f.map(function(a){a=a.split(":");var b={color:a[0]};return a[1]&&(b.offset=parseFloat(a[1])),b});var g=f.length,h=0,i=0;g--;for(var j=0;g>j;j++)"offset"in f[j]&&b(j,f[j].offset);return f[g].offset=f[g].offset||100,b(g,f[g].offset),{type:d,params:e,stops:f}}),b.on("snap.util.attr.d",function(c){b.stop(),m(c,"array")&&m(c[0],"array")&&(c=a.path.toString.call(c)),c=r(c),c.match(/[ruo]/i)&&(c=a.path.toAbsolute(c)),p(this.node,{d:c})})(-1),b.on("snap.util.attr.#text",function(a){b.stop(),a=r(a);for(var c=e.doc.createTextNode(a);this.node.firstChild;)this.node.removeChild(this.node.firstChild);this.node.appendChild(c)})(-1),b.on("snap.util.attr.path",function(a){b.stop(),this.attr({d:a})})(-1),b.on("snap.util.attr.class",function(a){b.stop(),this.node.className.baseVal=a})(-1),b.on("snap.util.attr.viewBox",function(a){var c;c=m(a,"object")&&"x"in a?[a.x,a.y,a.width,a.height].join(" "):m(a,"array")?a.join(" "):a,p(this.node,{viewBox:c}),b.stop()})(-1),b.on("snap.util.attr.transform",function(a){this.transform(a),b.stop()})(-1),b.on("snap.util.attr.r",function(a){"rect"==this.type&&(b.stop(),p(this.node,{rx:a,ry:a}))})(-1),b.on("snap.util.attr.textpath",function(a){if(b.stop(),"text"==this.type){var d,e,f;if(!a&&this.textPath){for(e=this.textPath;e.node.firstChild;)this.node.appendChild(e.node.firstChild);return e.remove(),void delete this.textPath}if(m(a,"string")){var g=n(this),h=l(g.parentNode).path(a);g.appendChild(h.node),d=h.id,h.attr({id:d})}else a=l(a),a instanceof c&&(d=a.attr("id"),d||(d=a.id,a.attr({id:d})));if(d)if(e=this.textPath,f=this.node,e)e.attr({"xlink:href":"#"+d});else{for(e=p("textPath",{"xlink:href":"#"+d});f.firstChild;)e.appendChild(f.firstChild);f.appendChild(e),this.textPath=l(e)}}})(-1),b.on("snap.util.attr.text",function(a){if("text"==this.type){for(var c=this.node,d=function(a){var b=p("tspan");if(m(a,"array"))for(var c=0;c1&&(a=Array.prototype.slice.call(arguments,0));var b={};return i(a,"object")&&!i(a,"array")?b=a:null!=a&&(b={points:a}),this.el("polyline",b)},h.polygon=function(a){arguments.length>1&&(a=Array.prototype.slice.call(arguments,0));var b={};return i(a,"object")&&!i(a,"array")?b=a:null!=a&&(b={points:a}),this.el("polygon",b)},function(){function d(){return this.selectAll("stop")}function e(a,b){var d=l("stop"),e={offset:+b+"%"};a=c.color(a),e["stop-color"]=a.hex,a.opacity<1&&(e["stop-opacity"]=a.opacity),l(d,e);for(var f,g=this.stops(),h=0;hb){this.node.insertBefore(d,g[h].node),f=!0;break}}return f||this.node.appendChild(d),this}function f(){if("linearGradient"==this.type){var a=l(this.node,"x1")||0,b=l(this.node,"x2")||1,d=l(this.node,"y1")||0,e=l(this.node,"y2")||0;return c._.box(a,d,math.abs(b-a),math.abs(e-d))}var f=this.node.cx||.5,g=this.node.cy||.5,h=this.node.r||0;return c._.box(f-h,g-h,2*h,2*h)}function g(a){var d=a,e=this.stops();if("string"==typeof a&&(d=b("snap.util.grad.parse",null,"l(0,0,0,1)"+a).firstDefined().stops),c.is(d,"array")){for(var f=0;fh;h++){var i=f[h];d.addStop(i.color,i.offset)}return d}function j(a,b,h,i,j){var k=c._.make("linearGradient",a);return k.stops=d,k.addStop=e,k.getBBox=f,k.setStops=g,null!=b&&l(k.node,{x1:b,y1:h,x2:i,y2:j}),k}function k(a,b,g,h,i,j){var k=c._.make("radialGradient",a);return k.stops=d,k.addStop=e,k.getBBox=f,null!=b&&l(k.node,{cx:b,cy:g,r:h}),null!=i&&null!=j&&l(k.node,{fx:i,fy:j}),k}var l=c._.$;h.gradient=function(a){return i(this.defs,a)},h.gradientLinear=function(a,b,c,d){return j(this.defs,a,b,c,d)},h.gradientRadial=function(a,b,c,d,e){return k(this.defs,a,b,c,d,e)},h.toString=function(){var a,b=this.node.ownerDocument,d=b.createDocumentFragment(),e=b.createElement("div"),f=this.node.cloneNode(!0);return d.appendChild(e),e.appendChild(f),c._.$(f,{xmlns:"http://www.w3.org/2000/svg"}),a=e.innerHTML,d.removeChild(d.firstChild),a},h.toDataURL=function(){return a&&a.btoa?"data:image/svg+xml;base64,"+btoa(unescape(encodeURIComponent(this))):void 0},h.clear=function(){for(var a,b=this.node.firstChild;b;)a=b.nextSibling,"defs"!=b.tagName?b.parentNode.removeChild(b):h.clear.call({node:b}),b=a}}()}),d.plugin(function(a,b,c,d){function e(a){var b=e.ps=e.ps||{};return b[a]?b[a].sleep=100:b[a]={sleep:100},setTimeout(function(){for(var c in b)b[M](c)&&c!=a&&(b[c].sleep--,!b[c].sleep&&delete b[c])}),b[a]}function f(a,b,c,d){return null==a&&(a=b=c=d=0),null==b&&(b=a.y,c=a.width,d=a.height,a=a.x),{x:a,y:b,width:c,w:c,height:d,h:d,x2:a+c,y2:b+d,cx:a+c/2,cy:b+d/2,r1:P.min(c,d)/2,r2:P.max(c,d)/2,r0:P.sqrt(c*c+d*d)/2,path:y(a,b,c,d),vb:[a,b,c,d].join(" ")}}function g(){return this.join(",").replace(N,"$1")}function h(a){var b=L(a);return b.toString=g,b}function i(a,b,c,d,e,f,g,h,i){return null==i?p(a,b,c,d,e,f,g,h):k(a,b,c,d,e,f,g,h,q(a,b,c,d,e,f,g,h,i))}function j(c,d){function e(a){return+(+a).toFixed(3)}return a._.cacher(function(a,f,g){a instanceof b&&(a=a.attr("d")),a=G(a);for(var h,j,l,m,n,o="",p={},q=0,r=0,s=a.length;s>r;r++){if(l=a[r],"M"==l[0])h=+l[1],j=+l[2];else{if(m=i(h,j,l[1],l[2],l[3],l[4],l[5],l[6]),q+m>f){if(d&&!p.start){if(n=i(h,j,l[1],l[2],l[3],l[4],l[5],l[6],f-q),o+=["C"+e(n.start.x),e(n.start.y),e(n.m.x),e(n.m.y),e(n.x),e(n.y)],g)return o;p.start=o,o=["M"+e(n.x),e(n.y)+"C"+e(n.n.x),e(n.n.y),e(n.end.x),e(n.end.y),e(l[5]),e(l[6])].join(),q+=m,h=+l[5],j=+l[6];continue}if(!c&&!d)return n=i(h,j,l[1],l[2],l[3],l[4],l[5],l[6],f-q)}q+=m,h=+l[5],j=+l[6]}o+=l.shift()+l}return p.end=o,n=c?q:d?p:k(h,j,l[0],l[1],l[2],l[3],l[4],l[5],1)},null,a._.clone)}function k(a,b,c,d,e,f,g,h,i){var j=1-i,k=T(j,3),l=T(j,2),m=i*i,n=m*i,o=k*a+3*l*i*c+3*j*i*i*e+n*g,p=k*b+3*l*i*d+3*j*i*i*f+n*h,q=a+2*i*(c-a)+m*(e-2*c+a),r=b+2*i*(d-b)+m*(f-2*d+b),s=c+2*i*(e-c)+m*(g-2*e+c),t=d+2*i*(f-d)+m*(h-2*f+d),u=j*a+i*c,v=j*b+i*d,w=j*e+i*g,x=j*f+i*h,y=90-180*P.atan2(q-s,r-t)/Q;return{x:o,y:p,m:{x:q,y:r},n:{x:s,y:t},start:{x:u,y:v},end:{x:w,y:x},alpha:y}}function l(b,c,d,e,g,h,i,j){a.is(b,"array")||(b=[b,c,d,e,g,h,i,j]);var k=F.apply(null,b);return f(k.min.x,k.min.y,k.max.x-k.min.x,k.max.y-k.min.y)}function m(a,b,c){return b>=a.x&&b<=a.x+a.width&&c>=a.y&&c<=a.y+a.height}function n(a,b){return a=f(a),b=f(b),m(b,a.x,a.y)||m(b,a.x2,a.y)||m(b,a.x,a.y2)||m(b,a.x2,a.y2)||m(a,b.x,b.y)||m(a,b.x2,b.y)||m(a,b.x,b.y2)||m(a,b.x2,b.y2)||(a.xb.x||b.xa.x)&&(a.yb.y||b.ya.y)}function o(a,b,c,d,e){var f=-3*b+9*c-9*d+3*e,g=a*f+6*b-12*c+6*d;return a*g-3*b+3*c}function p(a,b,c,d,e,f,g,h,i){null==i&&(i=1),i=i>1?1:0>i?0:i;for(var j=i/2,k=12,l=[-.1252,.1252,-.3678,.3678,-.5873,.5873,-.7699,.7699,-.9041,.9041,-.9816,.9816],m=[.2491,.2491,.2335,.2335,.2032,.2032,.1601,.1601,.1069,.1069,.0472,.0472],n=0,p=0;k>p;p++){var q=j*l[p]+j,r=o(q,a,c,e,g),s=o(q,b,d,f,h),t=r*r+s*s;n+=m[p]*P.sqrt(t)}return j*n}function q(a,b,c,d,e,f,g,h,i){if(!(0>i||p(a,b,c,d,e,f,g,h)n;)l/=2,m+=(i>j?1:-1)*l,j=p(a,b,c,d,e,f,g,h,m);return m}}function r(a,b,c,d,e,f,g,h){if(!(S(a,c)S(e,g)||S(b,d)S(f,h))){var i=(a*d-b*c)*(e-g)-(a-c)*(e*h-f*g),j=(a*d-b*c)*(f-h)-(b-d)*(e*h-f*g),k=(a-c)*(f-h)-(b-d)*(e-g);if(k){var l=i/k,m=j/k,n=+l.toFixed(2),o=+m.toFixed(2);if(!(n<+R(a,c).toFixed(2)||n>+S(a,c).toFixed(2)||n<+R(e,g).toFixed(2)||n>+S(e,g).toFixed(2)||o<+R(b,d).toFixed(2)||o>+S(b,d).toFixed(2)||o<+R(f,h).toFixed(2)||o>+S(f,h).toFixed(2)))return{x:l,y:m}}}}function s(a,b,c){var d=l(a),e=l(b);if(!n(d,e))return c?0:[];for(var f=p.apply(0,a),g=p.apply(0,b),h=~~(f/8),i=~~(g/8),j=[],m=[],o={},q=c?0:[],s=0;h+1>s;s++){var t=k.apply(0,a.concat(s/h));j.push({x:t.x,y:t.y,t:s/h})}for(s=0;i+1>s;s++)t=k.apply(0,b.concat(s/i)),m.push({x:t.x,y:t.y,t:s/i});for(s=0;h>s;s++)for(var u=0;i>u;u++){var v=j[s],w=j[s+1],x=m[u],y=m[u+1],z=U(w.x-v.x)<.001?"y":"x",A=U(y.x-x.x)<.001?"y":"x",B=r(v.x,v.y,w.x,w.y,x.x,x.y,y.x,y.y);if(B){if(o[B.x.toFixed(4)]==B.y.toFixed(4))continue;o[B.x.toFixed(4)]=B.y.toFixed(4);var C=v.t+U((B[z]-v[z])/(w[z]-v[z]))*(w.t-v.t),D=x.t+U((B[A]-x[A])/(y[A]-x[A]))*(y.t-x.t);C>=0&&1>=C&&D>=0&&1>=D&&(c?q++:q.push({x:B.x,y:B.y,t1:C,t2:D}))}}return q}function t(a,b){return v(a,b)}function u(a,b){return v(a,b,1)}function v(a,b,c){a=G(a),b=G(b);for(var d,e,f,g,h,i,j,k,l,m,n=c?0:[],o=0,p=a.length;p>o;o++){var q=a[o];if("M"==q[0])d=h=q[1],e=i=q[2];else{"C"==q[0]?(l=[d,e].concat(q.slice(1)),d=l[6],e=l[7]):(l=[d,e,d,e,h,i,h,i],d=h,e=i);for(var r=0,t=b.length;t>r;r++){var u=b[r];if("M"==u[0])f=j=u[1],g=k=u[2];else{"C"==u[0]?(m=[f,g].concat(u.slice(1)),f=m[6],g=m[7]):(m=[f,g,f,g,j,k,j,k],f=j,g=k);var v=s(l,m,c);if(c)n+=v;else{for(var w=0,x=v.length;x>w;w++)v[w].segment1=o,v[w].segment2=r,v[w].bez1=l,v[w].bez2=m;n=n.concat(v)}}}}}return n}function w(a,b,c){var d=x(a);return m(d,b,c)&&v(a,[["M",b,c],["H",d.x2+10]],1)%2==1}function x(a){var b=e(a);if(b.bbox)return L(b.bbox);if(!a)return f();a=G(a);for(var c,d=0,g=0,h=[],i=[],j=0,k=a.length;k>j;j++)if(c=a[j],"M"==c[0])d=c[1],g=c[2],h.push(d),i.push(g);else{var l=F(d,g,c[1],c[2],c[3],c[4],c[5],c[6]);h=h.concat(l.min.x,l.max.x),i=i.concat(l.min.y,l.max.y),d=c[5],g=c[6]}var m=R.apply(0,h),n=R.apply(0,i),o=S.apply(0,h),p=S.apply(0,i),q=f(m,n,o-m,p-n);return b.bbox=L(q),q}function y(a,b,c,d,e){if(e)return[["M",+a+ +e,b],["l",c-2*e,0],["a",e,e,0,0,1,e,e],["l",0,d-2*e],["a",e,e,0,0,1,-e,e],["l",2*e-c,0],["a",e,e,0,0,1,-e,-e],["l",0,2*e-d],["a",e,e,0,0,1,e,-e],["z"]];var f=[["M",a,b],["l",c,0],["l",0,d],["l",-c,0],["z"]];return f.toString=g,f}function z(a,b,c,d,e){if(null==e&&null==d&&(d=c),a=+a,b=+b,c=+c,d=+d,null!=e)var f=Math.PI/180,h=a+c*Math.cos(-d*f),i=a+c*Math.cos(-e*f),j=b+c*Math.sin(-d*f),k=b+c*Math.sin(-e*f),l=[["M",h,j],["A",c,c,0,+(e-d>180),0,i,k]];else l=[["M",a,b],["m",0,-d],["a",c,d,0,1,1,0,2*d],["a",c,d,0,1,1,0,-2*d],["z"]];return l.toString=g,l}function A(b){var c=e(b),d=String.prototype.toLowerCase;if(c.rel)return h(c.rel);a.is(b,"array")&&a.is(b&&b[0],"array")||(b=a.parsePathString(b));var f=[],i=0,j=0,k=0,l=0,m=0;"M"==b[0][0]&&(i=b[0][1],j=b[0][2],k=i,l=j,m++,f.push(["M",i,j]));for(var n=m,o=b.length;o>n;n++){var p=f[n]=[],q=b[n];if(q[0]!=d.call(q[0]))switch(p[0]=d.call(q[0]),p[0]){case"a":p[1]=q[1],p[2]=q[2],p[3]=q[3],p[4]=q[4],p[5]=q[5],p[6]=+(q[6]-i).toFixed(3),p[7]=+(q[7]-j).toFixed(3);break;case"v":p[1]=+(q[1]-j).toFixed(3);break;case"m":k=q[1],l=q[2];default:for(var r=1,s=q.length;s>r;r++)p[r]=+(q[r]-(r%2?i:j)).toFixed(3)}else{p=f[n]=[],"m"==q[0]&&(k=q[1]+i,l=q[2]+j);for(var t=0,u=q.length;u>t;t++)f[n][t]=q[t]}var v=f[n].length;switch(f[n][0]){case"z":i=k,j=l;break;case"h":i+=+f[n][v-1];break;case"v":j+=+f[n][v-1];break;default:i+=+f[n][v-2],j+=+f[n][v-1]}}return f.toString=g,c.rel=h(f),f}function B(b){var c=e(b);if(c.abs)return h(c.abs);if(K(b,"array")&&K(b&&b[0],"array")||(b=a.parsePathString(b)),!b||!b.length)return[["M",0,0]];var d,f=[],i=0,j=0,k=0,l=0,m=0;"M"==b[0][0]&&(i=+b[0][1],j=+b[0][2],k=i,l=j,m++,f[0]=["M",i,j]);for(var n,o,p=3==b.length&&"M"==b[0][0]&&"R"==b[1][0].toUpperCase()&&"Z"==b[2][0].toUpperCase(),q=m,r=b.length;r>q;q++){if(f.push(n=[]),o=b[q],d=o[0],d!=d.toUpperCase())switch(n[0]=d.toUpperCase(),n[0]){case"A":n[1]=o[1],n[2]=o[2],n[3]=o[3],n[4]=o[4],n[5]=o[5],n[6]=+o[6]+i,n[7]=+o[7]+j;break;case"V":n[1]=+o[1]+j;break;case"H":n[1]=+o[1]+i;break;case"R":for(var s=[i,j].concat(o.slice(1)),t=2,u=s.length;u>t;t++)s[t]=+s[t]+i,s[++t]=+s[t]+j;f.pop(),f=f.concat(I(s,p));break;case"O":f.pop(),s=z(i,j,o[1],o[2]),s.push(s[0]),f=f.concat(s);break;case"U":f.pop(),f=f.concat(z(i,j,o[1],o[2],o[3])),n=["U"].concat(f[f.length-1].slice(-2));break;case"M":k=+o[1]+i,l=+o[2]+j;default:for(t=1,u=o.length;u>t;t++)n[t]=+o[t]+(t%2?i:j)}else if("R"==d)s=[i,j].concat(o.slice(1)),f.pop(),f=f.concat(I(s,p)),n=["R"].concat(o.slice(-2));else if("O"==d)f.pop(),s=z(i,j,o[1],o[2]),s.push(s[0]),f=f.concat(s);else if("U"==d)f.pop(),f=f.concat(z(i,j,o[1],o[2],o[3])),n=["U"].concat(f[f.length-1].slice(-2));else for(var v=0,w=o.length;w>v;v++)n[v]=o[v];if(d=d.toUpperCase(),"O"!=d)switch(n[0]){case"Z":i=+k,j=+l;break;case"H":i=n[1];break;case"V":j=n[1];break;case"M":k=n[n.length-2],l=n[n.length-1];default:i=n[n.length-2],j=n[n.length-1]}}return f.toString=g,c.abs=h(f),f}function C(a,b,c,d){return[a,b,c,d,c,d]}function D(a,b,c,d,e,f){var g=1/3,h=2/3;return[g*a+h*c,g*b+h*d,g*e+h*c,g*f+h*d,e,f]}function E(b,c,d,e,f,g,h,i,j,k){var l,m=120*Q/180,n=Q/180*(+f||0),o=[],p=a._.cacher(function(a,b,c){var d=a*P.cos(c)-b*P.sin(c),e=a*P.sin(c)+b*P.cos(c);return{x:d,y:e}});if(!d||!e)return[b,c,i,j,i,j];if(k)y=k[0],z=k[1],w=k[2],x=k[3];else{l=p(b,c,-n),b=l.x,c=l.y,l=p(i,j,-n),i=l.x,j=l.y;var q=(P.cos(Q/180*f),P.sin(Q/180*f),(b-i)/2),r=(c-j)/2,s=q*q/(d*d)+r*r/(e*e);s>1&&(s=P.sqrt(s),d=s*d,e=s*e);var t=d*d,u=e*e,v=(g==h?-1:1)*P.sqrt(U((t*u-t*r*r-u*q*q)/(t*r*r+u*q*q))),w=v*d*r/e+(b+i)/2,x=v*-e*q/d+(c+j)/2,y=P.asin(((c-x)/e).toFixed(9)),z=P.asin(((j-x)/e).toFixed(9));y=w>b?Q-y:y,z=w>i?Q-z:z,0>y&&(y=2*Q+y),0>z&&(z=2*Q+z),h&&y>z&&(y-=2*Q),!h&&z>y&&(z-=2*Q)}var A=z-y;if(U(A)>m){var B=z,C=i,D=j;z=y+m*(h&&z>y?1:-1),i=w+d*P.cos(z),j=x+e*P.sin(z),o=E(i,j,d,e,f,0,h,C,D,[z,B,w,x])}A=z-y;var F=P.cos(y),G=P.sin(y),H=P.cos(z),I=P.sin(z),J=P.tan(A/4),K=4/3*d*J,L=4/3*e*J,M=[b,c],N=[b+K*G,c-L*F],O=[i+K*I,j-L*H],R=[i,j];if(N[0]=2*M[0]-N[0],N[1]=2*M[1]-N[1],k)return[N,O,R].concat(o);o=[N,O,R].concat(o).join().split(",");for(var S=[],T=0,V=o.length;V>T;T++)S[T]=T%2?p(o[T-1],o[T],n).y:p(o[T],o[T+1],n).x;return S}function F(a,b,c,d,e,f,g,h){for(var i,j,k,l,m,n,o,p,q=[],r=[[],[]],s=0;2>s;++s)if(0==s?(j=6*a-12*c+6*e,i=-3*a+9*c-9*e+3*g,k=3*c-3*a):(j=6*b-12*d+6*f,i=-3*b+9*d-9*f+3*h,k=3*d-3*b),U(i)<1e-12){if(U(j)<1e-12)continue;l=-k/j,l>0&&1>l&&q.push(l)}else o=j*j-4*k*i,p=P.sqrt(o),0>o||(m=(-j+p)/(2*i),m>0&&1>m&&q.push(m),n=(-j-p)/(2*i),n>0&&1>n&&q.push(n));for(var t,u=q.length,v=u;u--;)l=q[u],t=1-l,r[0][u]=t*t*t*a+3*t*t*l*c+3*t*l*l*e+l*l*l*g,r[1][u]=t*t*t*b+3*t*t*l*d+3*t*l*l*f+l*l*l*h;return r[0][v]=a,r[1][v]=b,r[0][v+1]=g,r[1][v+1]=h,r[0].length=r[1].length=v+2,{min:{x:R.apply(0,r[0]),y:R.apply(0,r[1])},max:{x:S.apply(0,r[0]),y:S.apply(0,r[1])}}}function G(a,b){var c=!b&&e(a);if(!b&&c.curve)return h(c.curve);for(var d=B(a),f=b&&B(b),g={x:0,y:0,bx:0,by:0,X:0,Y:0,qx:null,qy:null},i={x:0,y:0,bx:0,by:0,X:0,Y:0,qx:null,qy:null},j=(function(a,b,c){var d,e;if(!a)return["C",b.x,b.y,b.x,b.y,b.x,b.y];switch(!(a[0]in{T:1,Q:1})&&(b.qx=b.qy=null),a[0]){case"M":b.X=a[1],b.Y=a[2];break;case"A":a=["C"].concat(E.apply(0,[b.x,b.y].concat(a.slice(1))));break;case"S":"C"==c||"S"==c?(d=2*b.x-b.bx,e=2*b.y-b.by):(d=b.x,e=b.y),a=["C",d,e].concat(a.slice(1));break;case"T":"Q"==c||"T"==c?(b.qx=2*b.x-b.qx,b.qy=2*b.y-b.qy):(b.qx=b.x,b.qy=b.y),a=["C"].concat(D(b.x,b.y,b.qx,b.qy,a[1],a[2]));break;case"Q":b.qx=a[1],b.qy=a[2],a=["C"].concat(D(b.x,b.y,a[1],a[2],a[3],a[4]));break;case"L":a=["C"].concat(C(b.x,b.y,a[1],a[2]));break;case"H":a=["C"].concat(C(b.x,b.y,a[1],b.y));break;case"V":a=["C"].concat(C(b.x,b.y,b.x,a[1]));break;case"Z":a=["C"].concat(C(b.x,b.y,b.X,b.Y))}return a}),k=function(a,b){if(a[b].length>7){a[b].shift();for(var c=a[b];c.length;)m[b]="A",f&&(n[b]="A"),a.splice(b++,0,["C"].concat(c.splice(0,6)));a.splice(b,1),r=S(d.length,f&&f.length||0)}},l=function(a,b,c,e,g){a&&b&&"M"==a[g][0]&&"M"!=b[g][0]&&(b.splice(g,0,["M",e.x,e.y]),c.bx=0,c.by=0,c.x=a[g][1],c.y=a[g][2],r=S(d.length,f&&f.length||0))},m=[],n=[],o="",p="",q=0,r=S(d.length,f&&f.length||0);r>q;q++){d[q]&&(o=d[q][0]),"C"!=o&&(m[q]=o,q&&(p=m[q-1])),d[q]=j(d[q],g,p),"A"!=m[q]&&"C"==o&&(m[q]="C"),k(d,q),f&&(f[q]&&(o=f[q][0]),"C"!=o&&(n[q]=o,q&&(p=n[q-1])),f[q]=j(f[q],i,p),"A"!=n[q]&&"C"==o&&(n[q]="C"),k(f,q)),l(d,f,g,i,q),l(f,d,i,g,q);var s=d[q],t=f&&f[q],u=s.length,v=f&&t.length;g.x=s[u-2],g.y=s[u-1],g.bx=O(s[u-4])||g.x,g.by=O(s[u-3])||g.y,i.bx=f&&(O(t[v-4])||i.x),i.by=f&&(O(t[v-3])||i.y),i.x=f&&t[v-2],i.y=f&&t[v-1]}return f||(c.curve=h(d)),f?[d,f]:d}function H(a,b){if(!b)return a;var c,d,e,f,g,h,i;for(a=G(a),e=0,g=a.length;g>e;e++)for(i=a[e],f=1,h=i.length;h>f;f+=2)c=b.x(i[f],i[f+1]),d=b.y(i[f],i[f+1]),i[f]=c,i[f+1]=d;return a}function I(a,b){for(var c=[],d=0,e=a.length;e-2*!b>d;d+=2){var f=[{x:+a[d-2],y:+a[d-1]},{x:+a[d],y:+a[d+1]},{x:+a[d+2],y:+a[d+3]},{x:+a[d+4],y:+a[d+5]}];b?d?e-4==d?f[3]={x:+a[0],y:+a[1]}:e-2==d&&(f[2]={x:+a[0],y:+a[1]},f[3]={x:+a[2],y:+a[3]}):f[0]={x:+a[e-2],y:+a[e-1]}:e-4==d?f[3]=f[2]:d||(f[0]={x:+a[d],y:+a[d+1]}),c.push(["C",(-f[0].x+6*f[1].x+f[2].x)/6,(-f[0].y+6*f[1].y+f[2].y)/6,(f[1].x+6*f[2].x-f[3].x)/6,(f[1].y+6*f[2].y-f[3].y)/6,f[2].x,f[2].y])}return c}var J=b.prototype,K=a.is,L=a._.clone,M="hasOwnProperty",N=/,?([a-z]),?/gi,O=parseFloat,P=Math,Q=P.PI,R=P.min,S=P.max,T=P.pow,U=P.abs,V=j(1),W=j(),X=j(0,1),Y=a._unit2px,Z={path:function(a){return a.attr("path")},circle:function(a){var b=Y(a);return z(b.cx,b.cy,b.r)},ellipse:function(a){var b=Y(a); +return z(b.cx||0,b.cy||0,b.rx,b.ry)},rect:function(a){var b=Y(a);return y(b.x||0,b.y||0,b.width,b.height,b.rx,b.ry)},image:function(a){var b=Y(a);return y(b.x||0,b.y||0,b.width,b.height)},line:function(a){return"M"+[a.attr("x1")||0,a.attr("y1")||0,a.attr("x2"),a.attr("y2")]},polyline:function(a){return"M"+a.attr("points")},polygon:function(a){return"M"+a.attr("points")+"z"},deflt:function(a){var b=a.node.getBBox();return y(b.x,b.y,b.width,b.height)}};a.path=e,a.path.getTotalLength=V,a.path.getPointAtLength=W,a.path.getSubpath=function(a,b,c){if(this.getTotalLength(a)-c<1e-6)return X(a,b).end;var d=X(a,c,1);return b?X(d,b).end:d},J.getTotalLength=function(){return this.node.getTotalLength?this.node.getTotalLength():void 0},J.getPointAtLength=function(a){return W(this.attr("d"),a)},J.getSubpath=function(b,c){return a.path.getSubpath(this.attr("d"),b,c)},a._.box=f,a.path.findDotsAtSegment=k,a.path.bezierBBox=l,a.path.isPointInsideBBox=m,a.closest=function(b,c,d,e){for(var g=100,h=f(b-g/2,c-g/2,g,g),i=[],j=d[0].hasOwnProperty("x")?function(a){return{x:d[a].x,y:d[a].y}}:function(a){return{x:d[a],y:e[a]}},k=0;1e6>=g&&!k;){for(var l=0,n=d.length;n>l;l++){var o=j(l);if(m(h,o.x,o.y)){k++,i.push(o);break}}k||(g*=2,h=f(b-g/2,c-g/2,g,g))}if(1e6!=g){var p,q=1/0;for(l=0,n=i.length;n>l;l++){var r=a.len(b,c,i[l].x,i[l].y);q>r&&(q=r,i[l].len=r,p=i[l])}return p}},a.path.isBBoxIntersect=n,a.path.intersection=t,a.path.intersectionNumber=u,a.path.isPointInside=w,a.path.getBBox=x,a.path.get=Z,a.path.toRelative=A,a.path.toAbsolute=B,a.path.toCubic=G,a.path.map=H,a.path.toString=g,a.path.clone=h}),d.plugin(function(a,d,e,f){var g=Math.max,h=Math.min,i=function(a){if(this.items=[],this.bindings={},this.length=0,this.type="set",a)for(var b=0,c=a.length;c>b;b++)a[b]&&(this[this.items.length]=this.items[this.items.length]=a[b],this.length++)},j=i.prototype;j.push=function(){for(var a,b,c=0,d=arguments.length;d>c;c++)a=arguments[c],a&&(b=this.items.length,this[b]=this.items[b]=a,this.length++);return this},j.pop=function(){return this.length&&delete this[this.length--],this.items.pop()},j.forEach=function(a,b){for(var c=0,d=this.items.length;d>c;c++)if(a.call(b,this.items[c],c)===!1)return this;return this},j.animate=function(d,e,f,g){"function"!=typeof f||f.length||(g=f,f=c.linear),d instanceof a._.Animation&&(g=d.callback,f=d.easing,e=f.dur,d=d.attr);var h=arguments;if(a.is(d,"array")&&a.is(h[h.length-1],"array"))var i=!0;var j,k=function(){j?this.b=j:j=this.b},l=0,m=this,n=g&&function(){++l==m.length&&g.call(this)};return this.forEach(function(a,c){b.once("snap.animcreated."+a.id,k),i?h[c]&&a.animate.apply(a,h[c]):a.animate(d,e,f,n)})},j.remove=function(){for(;this.length;)this.pop().remove();return this},j.bind=function(a,b,c){var d={};if("function"==typeof b)this.bindings[a]=b;else{var e=c||a;this.bindings[a]=function(a){d[e]=a,b.attr(d)}}return this},j.attr=function(a){var b={};for(var c in a)this.bindings[c]?this.bindings[c](a[c]):b[c]=a[c];for(var d=0,e=this.items.length;e>d;d++)this.items[d].attr(b);return this},j.clear=function(){for(;this.length;)this.pop()},j.splice=function(a,b,c){a=0>a?g(this.length+a,0):a,b=g(0,h(this.length-a,b));var d,e=[],f=[],j=[];for(d=2;dd;d++)f.push(this[a+d]);for(;dd?j[d]:e[d-k];for(d=this.items.length=this.length-=b-k;this[d];)delete this[d++];return new i(f)},j.exclude=function(a){for(var b=0,c=this.length;c>b;b++)if(this[b]==a)return this.splice(b,1),!0;return!1},j.insertAfter=function(a){for(var b=this.items.length;b--;)this.items[b].insertAfter(a);return this},j.getBBox=function(){for(var a=[],b=[],c=[],d=[],e=this.items.length;e--;)if(!this.items[e].removed){var f=this.items[e].getBBox();a.push(f.x),b.push(f.y),c.push(f.x+f.width),d.push(f.y+f.height)}return a=h.apply(0,a),b=h.apply(0,b),c=g.apply(0,c),d=g.apply(0,d),{x:a,y:b,x2:c,y2:d,width:c-a,height:d-b,cx:a+(c-a)/2,cy:b+(d-b)/2}},j.clone=function(a){a=new i;for(var b=0,c=this.items.length;c>b;b++)a.push(this.items[b].clone());return a},j.toString=function(){return"Snap‘s set"},j.type="set",a.Set=i,a.set=function(){var a=new i;return arguments.length&&a.push.apply(a,Array.prototype.slice.call(arguments,0)),a}}),d.plugin(function(a,c,d,e){function f(a){var b=a[0];switch(b.toLowerCase()){case"t":return[b,0,0];case"m":return[b,1,0,0,1,0,0];case"r":return 4==a.length?[b,0,a[2],a[3]]:[b,0];case"s":return 5==a.length?[b,1,1,a[3],a[4]]:3==a.length?[b,1,1]:[b,1]}}function g(b,c,d){b=b||new a.Matrix,c=c||new a.Matrix,b=a.parseTransformString(b.toTransformString())||[],c=a.parseTransformString(c.toTransformString())||[];for(var e,g,h,i,j=Math.max(b.length,c.length),k=[],n=[],o=0;j>o;o++){if(h=b[o]||f(c[o]),i=c[o]||f(h),h[0]!=i[0]||"r"==h[0].toLowerCase()&&(h[2]!=i[2]||h[3]!=i[3])||"s"==h[0].toLowerCase()&&(h[3]!=i[3]||h[4]!=i[4])){b=a._.transform2matrix(b,d()),c=a._.transform2matrix(c,d()),k=[["m",b.a,b.b,b.c,b.d,b.e,b.f]],n=[["m",c.a,c.b,c.c,c.d,c.e,c.f]];break}for(k[o]=[],n[o]=[],e=0,g=Math.max(h.length,i.length);g>e;e++)e in h&&(k[o][e]=h[e]),e in i&&(n[o][e]=i[e])}return{from:m(k),to:m(n),f:l(k)}}function h(a){return a}function i(a){return function(b){return+b.toFixed(3)+a}}function j(a){return a.join(" ")}function k(b){return a.rgb(b[0],b[1],b[2],b[3])}function l(a){var b,c,d,e,f,g,h=0,i=[];for(b=0,c=a.length;c>b;b++){for(f="[",g=['"'+a[b][0]+'"'],d=1,e=a[b].length;e>d;d++)g[d]="val["+h++ +"]";f+=g+"]",i[b]=f}return Function("val","return Snap.path.toString.call(["+i+"])")}function m(a){for(var b=[],c=0,d=a.length;d>c;c++)for(var e=1,f=a[c].length;f>e;e++)b.push(a[c][e]);return b}function n(a){return isFinite(a)}function o(b,c){return a.is(b,"array")&&a.is(c,"array")?b.toString()==c.toString():!1}var p={},q=/[%a-z]+$/i,r=String;p.stroke=p.fill="colour",c.prototype.equal=function(a,c){return b("snap.util.equal",this,a,c).firstDefined()},b.on("snap.util.equal",function(b,c){var d,e,f=r(this.attr(b)||""),s=this;if("colour"==p[b])return d=a.color(f),e=a.color(c),{from:[d.r,d.g,d.b,d.opacity],to:[e.r,e.g,e.b,e.opacity],f:k};if("viewBox"==b)return d=this.attr(b).vb.split(" ").map(Number),e=c.split(" ").map(Number),{from:d,to:e,f:j};if("transform"==b||"gradientTransform"==b||"patternTransform"==b)return"string"==typeof c&&(c=r(c).replace(/\.{3}|\u2026/g,f)),f=this.matrix,c=a._.rgTransform.test(c)?a._.transform2matrix(c,this.getBBox()):a._.transform2matrix(a._.svgTransform2string(c),this.getBBox()),g(f,c,function(){return s.getBBox(1)});if("d"==b||"path"==b)return d=a.path.toCubic(f,c),{from:m(d[0]),to:m(d[1]),f:l(d[0])};if("points"==b)return d=r(f).split(a._.separator),e=r(c).split(a._.separator),{from:d,to:e,f:function(a){return a}};if(n(f)&&n(c))return{from:parseFloat(f),to:parseFloat(c),f:h};var t=f.match(q),u=r(c).match(q);return t&&o(t,u)?{from:parseFloat(f),to:parseFloat(c),f:i(t)}:{from:this.asPX(b),to:this.asPX(b,c),f:h}})}),d.plugin(function(a,c,d,e){for(var f=c.prototype,g="hasOwnProperty",h=("createTouch"in e.doc),i=["click","dblclick","mousedown","mousemove","mouseout","mouseover","mouseup","touchstart","touchmove","touchend","touchcancel"],j={mousedown:"touchstart",mousemove:"touchmove",mouseup:"touchend"},k=(function(a,b){var c="y"==a?"scrollTop":"scrollLeft",d=b&&b.node?b.node.ownerDocument:e.doc;return d[c in d.documentElement?"documentElement":"body"][c]}),l=function(){return this.originalEvent.preventDefault()},m=function(){return this.originalEvent.stopPropagation()},n=function(a,b,c,d){var e=h&&j[b]?j[b]:b,f=function(e){var f=k("y",d),i=k("x",d);if(h&&j[g](b))for(var n=0,o=e.targetTouches&&e.targetTouches.length;o>n;n++)if(e.targetTouches[n].target==a||a.contains(e.targetTouches[n].target)){var p=e;e=e.targetTouches[n],e.originalEvent=p,e.preventDefault=l,e.stopPropagation=m;break}var q=e.clientX+i,r=e.clientY+f;return c.call(d,e,q,r)};return b!==e&&a.addEventListener(b,f,!1),a.addEventListener(e,f,!1),function(){return b!==e&&a.removeEventListener(b,f,!1),a.removeEventListener(e,f,!1),!0}},o=[],p=function(a){for(var c,d=a.clientX,e=a.clientY,f=k("y"),g=k("x"),i=o.length;i--;){if(c=o[i],h){for(var j,l=a.touches&&a.touches.length;l--;)if(j=a.touches[l],j.identifier==c.el._drag.id||c.el.node.contains(j.target)){d=j.clientX,e=j.clientY,(a.originalEvent?a.originalEvent:a).preventDefault();break}}else a.preventDefault();var m=c.el.node;m.nextSibling,m.parentNode,m.style.display;d+=g,e+=f,b("snap.drag.move."+c.el.id,c.move_scope||c.el,d-c.el._drag.x,e-c.el._drag.y,d,e,a)}},q=function(c){a.unmousemove(p).unmouseup(q);for(var d,e=o.length;e--;)d=o[e],d.el._drag={},b("snap.drag.end."+d.el.id,d.end_scope||d.start_scope||d.move_scope||d.el,c),b.off("snap.drag.*."+d.el.id);o=[]},r=i.length;r--;)!function(b){a[b]=f[b]=function(c,d){if(a.is(c,"function"))this.events=this.events||[],this.events.push({name:b,f:c,unbind:n(this.node||document,b,c,d||this)});else for(var e=0,f=this.events.length;f>e;e++)if(this.events[e].name==b)try{this.events[e].f.call(this)}catch(g){}return this},a["un"+b]=f["un"+b]=function(a){for(var c=this.events||[],d=c.length;d--;)if(c[d].name==b&&(c[d].f==a||!a))return c[d].unbind(),c.splice(d,1),!c.length&&delete this.events,this;return this}}(i[r]);f.hover=function(a,b,c,d){return this.mouseover(a,c).mouseout(b,d||c)},f.unhover=function(a,b){return this.unmouseover(a).unmouseout(b)};var s=[];f.drag=function(c,d,e,f,g,h){function i(i,j,l){(i.originalEvent||i).preventDefault(),k._drag.x=j,k._drag.y=l,k._drag.id=i.identifier,!o.length&&a.mousemove(p).mouseup(q),o.push({el:k,move_scope:f,start_scope:g,end_scope:h}),d&&b.on("snap.drag.start."+k.id,d),c&&b.on("snap.drag.move."+k.id,c),e&&b.on("snap.drag.end."+k.id,e),b("snap.drag.start."+k.id,g||f||k,j,l,i)}function j(a,c,d){b("snap.draginit."+k.id,k,a,c,d)}var k=this;if(!arguments.length){var l;return k.drag(function(a,b){this.attr({transform:l+(l?"T":"t")+[a,b]})},function(){l=this.transform().local})}return b.on("snap.draginit."+k.id,i),k._drag={},s.push({el:k,start:i,init:j}),k.mousedown(j),k},f.undrag=function(){for(var c=s.length;c--;)s[c].el==this&&(this.unmousedown(s[c].init),s.splice(c,1),b.unbind("snap.drag.*."+this.id),b.unbind("snap.draginit."+this.id));return!s.length&&a.unmousemove(p).unmouseup(q),this}}),d.plugin(function(a,c,d,e){var f=(c.prototype,d.prototype),g=/^\s*url\((.+)\)/,h=String,i=a._.$;a.filter={},f.filter=function(b){var d=this;"svg"!=d.type&&(d=d.paper);var e=a.parse(h(b)),f=a._.id(),g=(d.node.offsetWidth,d.node.offsetHeight,i("filter"));return i(g,{id:f,filterUnits:"userSpaceOnUse"}),g.appendChild(e.node),d.defs.appendChild(g),new c(g)},b.on("snap.util.getattr.filter",function(){b.stop();var c=i(this.node,"filter");if(c){var d=h(c).match(g);return d&&a.select(d[1])}}),b.on("snap.util.attr.filter",function(d){if(d instanceof c&&"filter"==d.type){b.stop();var e=d.node.id;e||(i(d.node,{id:d.id}),e=d.id),i(this.node,{filter:a.url(e)})}d&&"none"!=d||(b.stop(),this.node.removeAttribute("filter"))}),a.filter.blur=function(b,c){null==b&&(b=2);var d=null==c?b:[b,c];return a.format('',{def:d})},a.filter.blur.toString=function(){return this()},a.filter.shadow=function(b,c,d,e,f){return null==f&&(null==e?(f=d,d=4,e="#000"):(f=e,e=d,d=4)),null==d&&(d=4),null==f&&(f=1),null==b&&(b=0,c=2),null==c&&(c=b),e=a.color(e),a.format('',{color:e,dx:b,dy:c,blur:d,opacity:f})},a.filter.shadow.toString=function(){return this()},a.filter.grayscale=function(b){return null==b&&(b=1),a.format('',{a:.2126+.7874*(1-b),b:.7152-.7152*(1-b),c:.0722-.0722*(1-b),d:.2126-.2126*(1-b),e:.7152+.2848*(1-b),f:.0722-.0722*(1-b),g:.2126-.2126*(1-b),h:.0722+.9278*(1-b)})},a.filter.grayscale.toString=function(){return this()},a.filter.sepia=function(b){return null==b&&(b=1),a.format('',{a:.393+.607*(1-b),b:.769-.769*(1-b),c:.189-.189*(1-b),d:.349-.349*(1-b),e:.686+.314*(1-b),f:.168-.168*(1-b),g:.272-.272*(1-b),h:.534-.534*(1-b),i:.131+.869*(1-b)})},a.filter.sepia.toString=function(){return this()},a.filter.saturate=function(b){return null==b&&(b=1),a.format('',{amount:1-b})},a.filter.saturate.toString=function(){return this()},a.filter.hueRotate=function(b){return b=b||0,a.format('',{angle:b})},a.filter.hueRotate.toString=function(){return this()},a.filter.invert=function(b){return null==b&&(b=1),a.format('',{amount:b,amount2:1-b})},a.filter.invert.toString=function(){return this()},a.filter.brightness=function(b){return null==b&&(b=1),a.format('',{amount:b})},a.filter.brightness.toString=function(){return this()},a.filter.contrast=function(b){return null==b&&(b=1),a.format('',{amount:b,amount2:.5-b/2})},a.filter.contrast.toString=function(){return this()}}),d.plugin(function(a,b,c,d,e){var f=a._.box,g=a.is,h=/^[^a-z]*([tbmlrc])/i,i=function(){return"T"+this.dx+","+this.dy};b.prototype.getAlign=function(a,b){null==b&&g(a,"string")&&(b=a,a=null),a=a||this.paper;var c=a.getBBox?a.getBBox():f(a),d=this.getBBox(),e={};switch(b=b&&b.match(h),b=b?b[1].toLowerCase():"c"){case"t":e.dx=0,e.dy=c.y-d.y;break;case"b":e.dx=0,e.dy=c.y2-d.y2;break;case"m":e.dx=0,e.dy=c.cy-d.cy;break;case"l":e.dx=c.x-d.x,e.dy=0;break;case"r":e.dx=c.x2-d.x2,e.dy=0;break;default:e.dx=c.cx-d.cx,e.dy=0}return e.toString=i,e},b.prototype.align=function(a,b){return this.transform("..."+this.getAlign(a,b))}}),d.plugin(function(b,c,d,e){function f(a){a=a.split(/(?=#)/);var b=new String(a[5]);return b[50]=a[0],b[100]=a[1],b[200]=a[2],b[300]=a[3],b[400]=a[4],b[500]=a[5],b[600]=a[6],b[700]=a[7],b[800]=a[8],b[900]=a[9],a[10]&&(b.A100=a[10],b.A200=a[11],b.A400=a[12],b.A700=a[13]),b}var g="#ffebee#ffcdd2#ef9a9a#e57373#ef5350#f44336#e53935#d32f2f#c62828#b71c1c#ff8a80#ff5252#ff1744#d50000",h="#FCE4EC#F8BBD0#F48FB1#F06292#EC407A#E91E63#D81B60#C2185B#AD1457#880E4F#FF80AB#FF4081#F50057#C51162",i="#F3E5F5#E1BEE7#CE93D8#BA68C8#AB47BC#9C27B0#8E24AA#7B1FA2#6A1B9A#4A148C#EA80FC#E040FB#D500F9#AA00FF",j="#EDE7F6#D1C4E9#B39DDB#9575CD#7E57C2#673AB7#5E35B1#512DA8#4527A0#311B92#B388FF#7C4DFF#651FFF#6200EA",k="#E8EAF6#C5CAE9#9FA8DA#7986CB#5C6BC0#3F51B5#3949AB#303F9F#283593#1A237E#8C9EFF#536DFE#3D5AFE#304FFE",l="#E3F2FD#BBDEFB#90CAF9#64B5F6#64B5F6#2196F3#1E88E5#1976D2#1565C0#0D47A1#82B1FF#448AFF#2979FF#2962FF",m="#E1F5FE#B3E5FC#81D4FA#4FC3F7#29B6F6#03A9F4#039BE5#0288D1#0277BD#01579B#80D8FF#40C4FF#00B0FF#0091EA",n="#E0F7FA#B2EBF2#80DEEA#4DD0E1#26C6DA#00BCD4#00ACC1#0097A7#00838F#006064#84FFFF#18FFFF#00E5FF#00B8D4",o="#E0F2F1#B2DFDB#80CBC4#4DB6AC#26A69A#009688#00897B#00796B#00695C#004D40#A7FFEB#64FFDA#1DE9B6#00BFA5",p="#E8F5E9#C8E6C9#A5D6A7#81C784#66BB6A#4CAF50#43A047#388E3C#2E7D32#1B5E20#B9F6CA#69F0AE#00E676#00C853",q="#F1F8E9#DCEDC8#C5E1A5#AED581#9CCC65#8BC34A#7CB342#689F38#558B2F#33691E#CCFF90#B2FF59#76FF03#64DD17",r="#F9FBE7#F0F4C3#E6EE9C#DCE775#D4E157#CDDC39#C0CA33#AFB42B#9E9D24#827717#F4FF81#EEFF41#C6FF00#AEEA00",s="#FFFDE7#FFF9C4#FFF59D#FFF176#FFEE58#FFEB3B#FDD835#FBC02D#F9A825#F57F17#FFFF8D#FFFF00#FFEA00#FFD600",t="#FFF8E1#FFECB3#FFE082#FFD54F#FFCA28#FFC107#FFB300#FFA000#FF8F00#FF6F00#FFE57F#FFD740#FFC400#FFAB00",u="#FFF3E0#FFE0B2#FFCC80#FFB74D#FFA726#FF9800#FB8C00#F57C00#EF6C00#E65100#FFD180#FFAB40#FF9100#FF6D00",v="#FBE9E7#FFCCBC#FFAB91#FF8A65#FF7043#FF5722#F4511E#E64A19#D84315#BF360C#FF9E80#FF6E40#FF3D00#DD2C00",w="#EFEBE9#D7CCC8#BCAAA4#A1887F#8D6E63#795548#6D4C41#5D4037#4E342E#3E2723",x="#FAFAFA#F5F5F5#EEEEEE#E0E0E0#BDBDBD#9E9E9E#757575#616161#424242#212121",y="#ECEFF1#CFD8DC#B0BEC5#90A4AE#78909C#607D8B#546E7A#455A64#37474F#263238";b.mui={},b.flat={},b.mui.red=f(g),b.mui.pink=f(h),b.mui.purple=f(i),b.mui.deeppurple=f(j),b.mui.indigo=f(k),b.mui.blue=f(l),b.mui.lightblue=f(m),b.mui.cyan=f(n),b.mui.teal=f(o),b.mui.green=f(p),b.mui.lightgreen=f(q),b.mui.lime=f(r),b.mui.yellow=f(s),b.mui.amber=f(t),b.mui.orange=f(u),b.mui.deeporange=f(v),b.mui.brown=f(w),b.mui.grey=f(x),b.mui.bluegrey=f(y),b.flat.turquoise="#1abc9c",b.flat.greensea="#16a085",b.flat.sunflower="#f1c40f",b.flat.orange="#f39c12",b.flat.emerland="#2ecc71",b.flat.nephritis="#27ae60",b.flat.carrot="#e67e22",b.flat.pumpkin="#d35400",b.flat.peterriver="#3498db",b.flat.belizehole="#2980b9",b.flat.alizarin="#e74c3c",b.flat.pomegranate="#c0392b",b.flat.amethyst="#9b59b6",b.flat.wisteria="#8e44ad",b.flat.clouds="#ecf0f1",b.flat.silver="#bdc3c7",b.flat.wetasphalt="#34495e",b.flat.midnightblue="#2c3e50",b.flat.concrete="#95a5a6",b.flat.asbestos="#7f8c8d",b.importMUIColors=function(){for(var c in b.mui)b.mui.hasOwnProperty(c)&&(a[c]=b.mui[c])}}),d}); diff --git a/Questionnaires/jspsych/examples/js/webgazer/ridgeWorker.mjs b/Questionnaires/jspsych/examples/js/webgazer/ridgeWorker.mjs new file mode 100644 index 0000000..effea18 --- /dev/null +++ b/Questionnaires/jspsych/examples/js/webgazer/ridgeWorker.mjs @@ -0,0 +1,135 @@ +'use strict'; + +console.log('thread starting'); + +// Add src/util.mjs and src/mat.mjs to the same directory as your html file +importScripts('./worker_scripts/util.js', './worker_scripts/mat.js'); // [20200708] Figure out how to make all of this wrap up neatly +var ridgeParameter = Math.pow(10,-5); +var resizeWidth = 10; +var resizeHeight = 6; +var dataWindow = 700; +var trailDataWindow = 10; +var trainInterval = 500; + +var screenXClicksArray = new self.webgazer.util.DataWindow(dataWindow); +var screenYClicksArray = new self.webgazer.util.DataWindow(dataWindow); +var eyeFeaturesClicks = new self.webgazer.util.DataWindow(dataWindow); +var dataClicks = new self.webgazer.util.DataWindow(dataWindow); + +var screenXTrailArray = new self.webgazer.util.DataWindow(trailDataWindow); +var screenYTrailArray = new self.webgazer.util.DataWindow(trailDataWindow); +var eyeFeaturesTrail = new self.webgazer.util.DataWindow(trailDataWindow); +var dataTrail = new self.webgazer.util.DataWindow(trailDataWindow); + +/** + * Performs ridge regression, according to the Weka code. + * @param {Array} y - corresponds to screen coordinates (either x or y) for each of n click events + * @param {Array.>} X - corresponds to gray pixel features (120 pixels for both eyes) for each of n clicks + * @param {Array} k - ridge parameter + * @return{Array} regression coefficients + */ +function ridge(y, X, k){ + var nc = X[0].length; + var m_Coefficients = new Array(nc); + var xt = self.webgazer.mat.transpose(X); + var solution = new Array(); + var success = true; + do{ + var ss = self.webgazer.mat.mult(xt,X); + // Set ridge regression adjustment + for (var i = 0; i < nc; i++) { + ss[i][i] = ss[i][i] + k; + } + + // Carry out the regression + var bb = self.webgazer.mat.mult(xt,y); + for(var i = 0; i < nc; i++) { + m_Coefficients[i] = bb[i][0]; + } + try{ + var n = (m_Coefficients.length !== 0 ? m_Coefficients.length/m_Coefficients.length: 0); + if (m_Coefficients.length*n !== m_Coefficients.length){ + console.log('Array length must be a multiple of m') + } + solution = (ss.length === ss[0].length ? (self.webgazer.mat.LUDecomposition(ss,bb)) : (self.webgazer.mat.QRDecomposition(ss,bb))); + + for (var i = 0; i < nc; i++){ + m_Coefficients[i] = solution[i][0]; + } + success = true; + } + catch (ex){ + k *= 10; + console.log(ex); + success = false; + } + } while (!success); + return m_Coefficients; +} + +//TODO: still usefull ??? +/** + * + * @returns {Number} + */ +function getCurrentFixationIndex() { + var index = 0; + var recentX = this.screenXTrailArray.get(0); + var recentY = this.screenYTrailArray.get(0); + for (var i = this.screenXTrailArray.length - 1; i >= 0; i--) { + var currX = this.screenXTrailArray.get(i); + var currY = this.screenYTrailArray.get(i); + var euclideanDistance = Math.sqrt(Math.pow((currX-recentX),2)+Math.pow((currY-recentY),2)); + if (euclideanDistance > 72){ + return i+1; + } + } + return i; +} + +/** + * Event handler, it store screen position to allow training + * @param {Event} event - the receive event + */ +self.onmessage = function(event) { + var data = event.data; + var screenPos = data['screenPos']; + var eyes = data['eyes']; + var type = data['type']; + if (type === 'click') { + self.screenXClicksArray.push([screenPos[0]]); + self.screenYClicksArray.push([screenPos[1]]); + + self.eyeFeaturesClicks.push(eyes); + } else if (type === 'move') { + self.screenXTrailArray.push([screenPos[0]]); + self.screenYTrailArray.push([screenPos[1]]); + + self.eyeFeaturesTrail.push(eyes); + self.dataTrail.push({'eyes':eyes, 'screenPos':screenPos, 'type':type}); + } + self.needsTraining = true; +}; + +/** + * Compute coefficient from training data + */ +function retrain() { + if (self.screenXClicksArray.length === 0) { + return; + } + if (!self.needsTraining) { + return; + } + var screenXArray = self.screenXClicksArray.data.concat(self.screenXTrailArray.data); + var screenYArray = self.screenYClicksArray.data.concat(self.screenYTrailArray.data); + var eyeFeatures = self.eyeFeaturesClicks.data.concat(self.eyeFeaturesTrail.data); + + var coefficientsX = ridge(screenXArray, eyeFeatures, ridgeParameter); + var coefficientsY = ridge(screenYArray, eyeFeatures, ridgeParameter); + self.postMessage({'X':coefficientsX, 'Y': coefficientsY}); + self.needsTraining = false; +} + +setInterval(retrain, trainInterval); + diff --git a/Questionnaires/jspsych/examples/js/webgazer/webgazer.js b/Questionnaires/jspsych/examples/js/webgazer/webgazer.js new file mode 100644 index 0000000..6368f8a --- /dev/null +++ b/Questionnaires/jspsych/examples/js/webgazer/webgazer.js @@ -0,0 +1,88909 @@ +/*! + * + * WebGazer.js: Scalable Webcam EyeTracking Using User Interactions + * Copyright (c) 2016-2020, Brown HCI Group + * Licensed under GPLv3. Companies with a valuation of less than $1M can use WebGazer.js under LGPLv3. + * + */ +var webgazer = +/******/ (function(modules) { // webpackBootstrap +/******/ // The module cache +/******/ var installedModules = {}; +/******/ +/******/ // The require function +/******/ function __webpack_require__(moduleId) { +/******/ +/******/ // Check if module is in cache +/******/ if(installedModules[moduleId]) { +/******/ return installedModules[moduleId].exports; +/******/ } +/******/ // Create a new module (and put it into the cache) +/******/ var module = installedModules[moduleId] = { +/******/ i: moduleId, +/******/ l: false, +/******/ exports: {} +/******/ }; +/******/ +/******/ // Execute the module function +/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); +/******/ +/******/ // Flag the module as loaded +/******/ module.l = true; +/******/ +/******/ // Return the exports of the module +/******/ return module.exports; +/******/ } +/******/ +/******/ +/******/ // expose the modules object (__webpack_modules__) +/******/ __webpack_require__.m = modules; +/******/ +/******/ // expose the module cache +/******/ __webpack_require__.c = installedModules; +/******/ +/******/ // define getter function for harmony exports +/******/ __webpack_require__.d = function(exports, name, getter) { +/******/ if(!__webpack_require__.o(exports, name)) { +/******/ Object.defineProperty(exports, name, { enumerable: true, get: getter }); +/******/ } +/******/ }; +/******/ +/******/ // define __esModule on exports +/******/ __webpack_require__.r = function(exports) { +/******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) { +/******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' }); +/******/ } +/******/ Object.defineProperty(exports, '__esModule', { value: true }); +/******/ }; +/******/ +/******/ // create a fake namespace object +/******/ // mode & 1: value is a module id, require it +/******/ // mode & 2: merge all properties of value into the ns +/******/ // mode & 4: return value when already ns object +/******/ // mode & 8|1: behave like require +/******/ __webpack_require__.t = function(value, mode) { +/******/ if(mode & 1) value = __webpack_require__(value); +/******/ if(mode & 8) return value; +/******/ if((mode & 4) && typeof value === 'object' && value && value.__esModule) return value; +/******/ var ns = Object.create(null); +/******/ __webpack_require__.r(ns); +/******/ Object.defineProperty(ns, 'default', { enumerable: true, value: value }); +/******/ if(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key)); +/******/ return ns; +/******/ }; +/******/ +/******/ // getDefaultExport function for compatibility with non-harmony modules +/******/ __webpack_require__.n = function(module) { +/******/ var getter = module && module.__esModule ? +/******/ function getDefault() { return module['default']; } : +/******/ function getModuleExports() { return module; }; +/******/ __webpack_require__.d(getter, 'a', getter); +/******/ return getter; +/******/ }; +/******/ +/******/ // Object.prototype.hasOwnProperty.call +/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); }; +/******/ +/******/ // __webpack_public_path__ +/******/ __webpack_require__.p = ""; +/******/ +/******/ +/******/ // Load entry module and return exports +/******/ return __webpack_require__(__webpack_require__.s = 90); +/******/ }) +/************************************************************************/ +/******/ ([ +/* 0 */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +// ESM COMPAT FLAG +__webpack_require__.r(__webpack_exports__); + +// EXPORTS +__webpack_require__.d(__webpack_exports__, "AdadeltaOptimizer", function() { return /* reexport */ adadelta_optimizer_AdadeltaOptimizer; }); +__webpack_require__.d(__webpack_exports__, "AdagradOptimizer", function() { return /* reexport */ adagrad_optimizer_AdagradOptimizer; }); +__webpack_require__.d(__webpack_exports__, "AdamOptimizer", function() { return /* reexport */ adam_optimizer_AdamOptimizer; }); +__webpack_require__.d(__webpack_exports__, "AdamaxOptimizer", function() { return /* reexport */ adamax_optimizer_AdamaxOptimizer; }); +__webpack_require__.d(__webpack_exports__, "MomentumOptimizer", function() { return /* reexport */ momentum_optimizer_MomentumOptimizer; }); +__webpack_require__.d(__webpack_exports__, "Optimizer", function() { return /* reexport */ optimizer_Optimizer; }); +__webpack_require__.d(__webpack_exports__, "RMSPropOptimizer", function() { return /* reexport */ rmsprop_optimizer_RMSPropOptimizer; }); +__webpack_require__.d(__webpack_exports__, "SGDOptimizer", function() { return /* reexport */ sgd_optimizer_SGDOptimizer; }); +__webpack_require__.d(__webpack_exports__, "Tensor", function() { return /* reexport */ dist_tensor["a" /* Tensor */]; }); +__webpack_require__.d(__webpack_exports__, "TensorBuffer", function() { return /* reexport */ dist_tensor["b" /* TensorBuffer */]; }); +__webpack_require__.d(__webpack_exports__, "Variable", function() { return /* reexport */ dist_tensor["c" /* Variable */]; }); +__webpack_require__.d(__webpack_exports__, "Rank", function() { return /* reexport */ dist_types["a" /* Rank */]; }); +__webpack_require__.d(__webpack_exports__, "sumOutType", function() { return /* reexport */ dist_types["b" /* sumOutType */]; }); +__webpack_require__.d(__webpack_exports__, "upcastType", function() { return /* reexport */ dist_types["c" /* upcastType */]; }); +__webpack_require__.d(__webpack_exports__, "add", function() { return /* reexport */ add; }); +__webpack_require__.d(__webpack_exports__, "addN", function() { return /* reexport */ addN; }); +__webpack_require__.d(__webpack_exports__, "atan2", function() { return /* reexport */ atan2; }); +__webpack_require__.d(__webpack_exports__, "avgPool", function() { return /* reexport */ avgPool; }); +__webpack_require__.d(__webpack_exports__, "avgPool3d", function() { return /* reexport */ avgPool3d; }); +__webpack_require__.d(__webpack_exports__, "batchToSpaceND", function() { return /* reexport */ batchToSpaceND; }); +__webpack_require__.d(__webpack_exports__, "batchNorm", function() { return /* reexport */ batchNorm; }); +__webpack_require__.d(__webpack_exports__, "batchNorm2d", function() { return /* reexport */ batchNorm2d; }); +__webpack_require__.d(__webpack_exports__, "batchNorm3d", function() { return /* reexport */ batchNorm3d; }); +__webpack_require__.d(__webpack_exports__, "batchNorm4d", function() { return /* reexport */ batchNorm4d; }); +__webpack_require__.d(__webpack_exports__, "broadcastTo", function() { return /* reexport */ broadcastTo; }); +__webpack_require__.d(__webpack_exports__, "clone", function() { return /* reexport */ clone; }); +__webpack_require__.d(__webpack_exports__, "complex", function() { return /* reexport */ complex["a" /* complex */]; }); +__webpack_require__.d(__webpack_exports__, "concat", function() { return /* reexport */ concat; }); +__webpack_require__.d(__webpack_exports__, "concat1d", function() { return /* reexport */ concat1d; }); +__webpack_require__.d(__webpack_exports__, "concat2d", function() { return /* reexport */ concat2d; }); +__webpack_require__.d(__webpack_exports__, "concat3d", function() { return /* reexport */ concat3d; }); +__webpack_require__.d(__webpack_exports__, "concat4d", function() { return /* reexport */ concat4d; }); +__webpack_require__.d(__webpack_exports__, "conv1d", function() { return /* reexport */ conv1d; }); +__webpack_require__.d(__webpack_exports__, "conv2d", function() { return /* reexport */ conv2d; }); +__webpack_require__.d(__webpack_exports__, "conv2dTranspose", function() { return /* reexport */ conv2dTranspose; }); +__webpack_require__.d(__webpack_exports__, "conv3d", function() { return /* reexport */ conv3d; }); +__webpack_require__.d(__webpack_exports__, "conv3dTranspose", function() { return /* reexport */ conv3dTranspose; }); +__webpack_require__.d(__webpack_exports__, "cumsum", function() { return /* reexport */ cumsum; }); +__webpack_require__.d(__webpack_exports__, "depthToSpace", function() { return /* reexport */ depthToSpace; }); +__webpack_require__.d(__webpack_exports__, "depthwiseConv2d", function() { return /* reexport */ depthwiseConv2d; }); +__webpack_require__.d(__webpack_exports__, "diag", function() { return /* reexport */ diag; }); +__webpack_require__.d(__webpack_exports__, "div", function() { return /* reexport */ div; }); +__webpack_require__.d(__webpack_exports__, "divNoNan", function() { return /* reexport */ divNoNan; }); +__webpack_require__.d(__webpack_exports__, "dot", function() { return /* reexport */ dot; }); +__webpack_require__.d(__webpack_exports__, "elu", function() { return /* reexport */ elu; }); +__webpack_require__.d(__webpack_exports__, "equal", function() { return /* reexport */ equal; }); +__webpack_require__.d(__webpack_exports__, "eye", function() { return /* reexport */ eye; }); +__webpack_require__.d(__webpack_exports__, "fill", function() { return /* reexport */ fill; }); +__webpack_require__.d(__webpack_exports__, "floorDiv", function() { return /* reexport */ floorDiv; }); +__webpack_require__.d(__webpack_exports__, "greater", function() { return /* reexport */ greater; }); +__webpack_require__.d(__webpack_exports__, "greaterEqual", function() { return /* reexport */ greaterEqual; }); +__webpack_require__.d(__webpack_exports__, "imag", function() { return /* reexport */ imag["a" /* imag */]; }); +__webpack_require__.d(__webpack_exports__, "leakyRelu", function() { return /* reexport */ leakyRelu; }); +__webpack_require__.d(__webpack_exports__, "less", function() { return /* reexport */ less; }); +__webpack_require__.d(__webpack_exports__, "lessEqual", function() { return /* reexport */ lessEqual; }); +__webpack_require__.d(__webpack_exports__, "localResponseNormalization", function() { return /* reexport */ localResponseNormalization; }); +__webpack_require__.d(__webpack_exports__, "matMul", function() { return /* reexport */ matMul; }); +__webpack_require__.d(__webpack_exports__, "max", function() { return /* reexport */ max_max; }); +__webpack_require__.d(__webpack_exports__, "maxPool", function() { return /* reexport */ maxPool; }); +__webpack_require__.d(__webpack_exports__, "maxPool3d", function() { return /* reexport */ maxPool3d; }); +__webpack_require__.d(__webpack_exports__, "maxPoolWithArgmax", function() { return /* reexport */ maxPoolWithArgmax; }); +__webpack_require__.d(__webpack_exports__, "maximum", function() { return /* reexport */ maximum; }); +__webpack_require__.d(__webpack_exports__, "minimum", function() { return /* reexport */ minimum; }); +__webpack_require__.d(__webpack_exports__, "mod", function() { return /* reexport */ mod; }); +__webpack_require__.d(__webpack_exports__, "mul", function() { return /* reexport */ mul; }); +__webpack_require__.d(__webpack_exports__, "multinomial", function() { return /* reexport */ multinomial; }); +__webpack_require__.d(__webpack_exports__, "notEqual", function() { return /* reexport */ notEqual; }); +__webpack_require__.d(__webpack_exports__, "oneHot", function() { return /* reexport */ oneHot; }); +__webpack_require__.d(__webpack_exports__, "outerProduct", function() { return /* reexport */ outerProduct; }); +__webpack_require__.d(__webpack_exports__, "pad", function() { return /* reexport */ pad_pad; }); +__webpack_require__.d(__webpack_exports__, "pad1d", function() { return /* reexport */ pad1d; }); +__webpack_require__.d(__webpack_exports__, "pad2d", function() { return /* reexport */ pad2d; }); +__webpack_require__.d(__webpack_exports__, "pad3d", function() { return /* reexport */ pad3d; }); +__webpack_require__.d(__webpack_exports__, "pad4d", function() { return /* reexport */ pad4d; }); +__webpack_require__.d(__webpack_exports__, "pool", function() { return /* reexport */ pool; }); +__webpack_require__.d(__webpack_exports__, "pow", function() { return /* reexport */ pow; }); +__webpack_require__.d(__webpack_exports__, "prelu", function() { return /* reexport */ prelu; }); +__webpack_require__.d(__webpack_exports__, "rand", function() { return /* reexport */ rand; }); +__webpack_require__.d(__webpack_exports__, "randomGamma", function() { return /* reexport */ randomGamma; }); +__webpack_require__.d(__webpack_exports__, "randomNormal", function() { return /* reexport */ randomNormal; }); +__webpack_require__.d(__webpack_exports__, "randomUniform", function() { return /* reexport */ randomUniform; }); +__webpack_require__.d(__webpack_exports__, "real", function() { return /* reexport */ real["a" /* real */]; }); +__webpack_require__.d(__webpack_exports__, "relu", function() { return /* reexport */ relu; }); +__webpack_require__.d(__webpack_exports__, "relu6", function() { return /* reexport */ relu6; }); +__webpack_require__.d(__webpack_exports__, "selu", function() { return /* reexport */ selu; }); +__webpack_require__.d(__webpack_exports__, "separableConv2d", function() { return /* reexport */ separableConv2d; }); +__webpack_require__.d(__webpack_exports__, "spaceToBatchND", function() { return /* reexport */ spaceToBatchND; }); +__webpack_require__.d(__webpack_exports__, "split", function() { return /* reexport */ split; }); +__webpack_require__.d(__webpack_exports__, "square", function() { return /* reexport */ square; }); +__webpack_require__.d(__webpack_exports__, "squaredDifference", function() { return /* reexport */ squaredDifference; }); +__webpack_require__.d(__webpack_exports__, "sub", function() { return /* reexport */ sub; }); +__webpack_require__.d(__webpack_exports__, "tile", function() { return /* reexport */ tile; }); +__webpack_require__.d(__webpack_exports__, "truncatedNormal", function() { return /* reexport */ truncatedNormal; }); +__webpack_require__.d(__webpack_exports__, "booleanMaskAsync", function() { return /* reexport */ booleanMaskAsync; }); +__webpack_require__.d(__webpack_exports__, "reverse", function() { return /* reexport */ reverse_reverse; }); +__webpack_require__.d(__webpack_exports__, "reverse1d", function() { return /* reexport */ reverse1d; }); +__webpack_require__.d(__webpack_exports__, "reverse2d", function() { return /* reexport */ reverse2d; }); +__webpack_require__.d(__webpack_exports__, "reverse3d", function() { return /* reexport */ reverse3d; }); +__webpack_require__.d(__webpack_exports__, "reverse4d", function() { return /* reexport */ reverse4d; }); +__webpack_require__.d(__webpack_exports__, "slice", function() { return /* reexport */ slice; }); +__webpack_require__.d(__webpack_exports__, "slice1d", function() { return /* reexport */ slice1d; }); +__webpack_require__.d(__webpack_exports__, "slice2d", function() { return /* reexport */ slice2d; }); +__webpack_require__.d(__webpack_exports__, "slice3d", function() { return /* reexport */ slice3d; }); +__webpack_require__.d(__webpack_exports__, "slice4d", function() { return /* reexport */ slice4d; }); +__webpack_require__.d(__webpack_exports__, "abs", function() { return /* reexport */ abs; }); +__webpack_require__.d(__webpack_exports__, "acos", function() { return /* reexport */ acos; }); +__webpack_require__.d(__webpack_exports__, "acosh", function() { return /* reexport */ acosh; }); +__webpack_require__.d(__webpack_exports__, "asin", function() { return /* reexport */ asin; }); +__webpack_require__.d(__webpack_exports__, "asinh", function() { return /* reexport */ asinh; }); +__webpack_require__.d(__webpack_exports__, "atan", function() { return /* reexport */ atan; }); +__webpack_require__.d(__webpack_exports__, "atanh", function() { return /* reexport */ atanh; }); +__webpack_require__.d(__webpack_exports__, "ceil", function() { return /* reexport */ ceil; }); +__webpack_require__.d(__webpack_exports__, "clipByValue", function() { return /* reexport */ clipByValue; }); +__webpack_require__.d(__webpack_exports__, "cos", function() { return /* reexport */ cos; }); +__webpack_require__.d(__webpack_exports__, "cosh", function() { return /* reexport */ cosh; }); +__webpack_require__.d(__webpack_exports__, "erf", function() { return /* reexport */ erf; }); +__webpack_require__.d(__webpack_exports__, "exp", function() { return /* reexport */ unary_ops_exp; }); +__webpack_require__.d(__webpack_exports__, "expm1", function() { return /* reexport */ expm1; }); +__webpack_require__.d(__webpack_exports__, "floor", function() { return /* reexport */ floor; }); +__webpack_require__.d(__webpack_exports__, "log", function() { return /* reexport */ log; }); +__webpack_require__.d(__webpack_exports__, "log1p", function() { return /* reexport */ log1p; }); +__webpack_require__.d(__webpack_exports__, "logSigmoid", function() { return /* reexport */ logSigmoid; }); +__webpack_require__.d(__webpack_exports__, "neg", function() { return /* reexport */ neg; }); +__webpack_require__.d(__webpack_exports__, "reciprocal", function() { return /* reexport */ reciprocal; }); +__webpack_require__.d(__webpack_exports__, "round", function() { return /* reexport */ round; }); +__webpack_require__.d(__webpack_exports__, "rsqrt", function() { return /* reexport */ rsqrt; }); +__webpack_require__.d(__webpack_exports__, "sigmoid", function() { return /* reexport */ sigmoid; }); +__webpack_require__.d(__webpack_exports__, "sign", function() { return /* reexport */ sign; }); +__webpack_require__.d(__webpack_exports__, "isNaN", function() { return /* reexport */ unary_ops_isNaN; }); +__webpack_require__.d(__webpack_exports__, "isInf", function() { return /* reexport */ isInf; }); +__webpack_require__.d(__webpack_exports__, "isFinite", function() { return /* reexport */ unary_ops_isFinite; }); +__webpack_require__.d(__webpack_exports__, "sin", function() { return /* reexport */ sin; }); +__webpack_require__.d(__webpack_exports__, "sinh", function() { return /* reexport */ sinh; }); +__webpack_require__.d(__webpack_exports__, "softplus", function() { return /* reexport */ softplus; }); +__webpack_require__.d(__webpack_exports__, "sqrt", function() { return /* reexport */ sqrt; }); +__webpack_require__.d(__webpack_exports__, "step", function() { return /* reexport */ unary_ops_step; }); +__webpack_require__.d(__webpack_exports__, "tan", function() { return /* reexport */ tan; }); +__webpack_require__.d(__webpack_exports__, "tanh", function() { return /* reexport */ tanh; }); +__webpack_require__.d(__webpack_exports__, "all", function() { return /* reexport */ reduction_ops_all; }); +__webpack_require__.d(__webpack_exports__, "any", function() { return /* reexport */ any; }); +__webpack_require__.d(__webpack_exports__, "argMax", function() { return /* reexport */ argMax; }); +__webpack_require__.d(__webpack_exports__, "argMin", function() { return /* reexport */ argMin; }); +__webpack_require__.d(__webpack_exports__, "logSumExp", function() { return /* reexport */ logSumExp; }); +__webpack_require__.d(__webpack_exports__, "mean", function() { return /* reexport */ reduction_ops_mean; }); +__webpack_require__.d(__webpack_exports__, "min", function() { return /* reexport */ reduction_ops_min; }); +__webpack_require__.d(__webpack_exports__, "moments", function() { return /* reexport */ moments; }); +__webpack_require__.d(__webpack_exports__, "sum", function() { return /* reexport */ sum; }); +__webpack_require__.d(__webpack_exports__, "prod", function() { return /* reexport */ reduction_ops_prod; }); +__webpack_require__.d(__webpack_exports__, "equalStrict", function() { return /* reexport */ equalStrict; }); +__webpack_require__.d(__webpack_exports__, "greaterEqualStrict", function() { return /* reexport */ greaterEqualStrict; }); +__webpack_require__.d(__webpack_exports__, "greaterStrict", function() { return /* reexport */ greaterStrict; }); +__webpack_require__.d(__webpack_exports__, "lessEqualStrict", function() { return /* reexport */ lessEqualStrict; }); +__webpack_require__.d(__webpack_exports__, "lessStrict", function() { return /* reexport */ lessStrict; }); +__webpack_require__.d(__webpack_exports__, "notEqualStrict", function() { return /* reexport */ notEqualStrict; }); +__webpack_require__.d(__webpack_exports__, "addStrict", function() { return /* reexport */ addStrict; }); +__webpack_require__.d(__webpack_exports__, "divStrict", function() { return /* reexport */ divStrict; }); +__webpack_require__.d(__webpack_exports__, "maximumStrict", function() { return /* reexport */ maximumStrict; }); +__webpack_require__.d(__webpack_exports__, "minimumStrict", function() { return /* reexport */ minimumStrict; }); +__webpack_require__.d(__webpack_exports__, "modStrict", function() { return /* reexport */ modStrict; }); +__webpack_require__.d(__webpack_exports__, "mulStrict", function() { return /* reexport */ mulStrict; }); +__webpack_require__.d(__webpack_exports__, "powStrict", function() { return /* reexport */ powStrict; }); +__webpack_require__.d(__webpack_exports__, "squaredDifferenceStrict", function() { return /* reexport */ squaredDifferenceStrict; }); +__webpack_require__.d(__webpack_exports__, "subStrict", function() { return /* reexport */ subStrict; }); +__webpack_require__.d(__webpack_exports__, "logicalAnd", function() { return /* reexport */ logicalAnd; }); +__webpack_require__.d(__webpack_exports__, "logicalNot", function() { return /* reexport */ logicalNot; }); +__webpack_require__.d(__webpack_exports__, "logicalOr", function() { return /* reexport */ logicalOr; }); +__webpack_require__.d(__webpack_exports__, "logicalXor", function() { return /* reexport */ logicalXor; }); +__webpack_require__.d(__webpack_exports__, "where", function() { return /* reexport */ where; }); +__webpack_require__.d(__webpack_exports__, "whereAsync", function() { return /* reexport */ whereAsync; }); +__webpack_require__.d(__webpack_exports__, "buffer", function() { return /* reexport */ array_ops_buffer; }); +__webpack_require__.d(__webpack_exports__, "print", function() { return /* reexport */ print; }); +__webpack_require__.d(__webpack_exports__, "cast", function() { return /* reexport */ cast; }); +__webpack_require__.d(__webpack_exports__, "expandDims", function() { return /* reexport */ expandDims; }); +__webpack_require__.d(__webpack_exports__, "reshape", function() { return /* reexport */ reshape; }); +__webpack_require__.d(__webpack_exports__, "squeeze", function() { return /* reexport */ squeeze; }); +__webpack_require__.d(__webpack_exports__, "stack", function() { return /* reexport */ stack; }); +__webpack_require__.d(__webpack_exports__, "unstack", function() { return /* reexport */ unstack; }); +__webpack_require__.d(__webpack_exports__, "setdiff1dAsync", function() { return /* reexport */ setdiff1dAsync; }); +__webpack_require__.d(__webpack_exports__, "linspace", function() { return /* reexport */ tensor_ops["a" /* linspace */]; }); +__webpack_require__.d(__webpack_exports__, "ones", function() { return /* reexport */ tensor_ops["b" /* ones */]; }); +__webpack_require__.d(__webpack_exports__, "range", function() { return /* reexport */ tensor_ops["d" /* range */]; }); +__webpack_require__.d(__webpack_exports__, "scalar", function() { return /* reexport */ tensor_ops["e" /* scalar */]; }); +__webpack_require__.d(__webpack_exports__, "tensor", function() { return /* reexport */ tensor_ops["f" /* tensor */]; }); +__webpack_require__.d(__webpack_exports__, "tensor1d", function() { return /* reexport */ tensor_ops["g" /* tensor1d */]; }); +__webpack_require__.d(__webpack_exports__, "tensor2d", function() { return /* reexport */ tensor_ops["h" /* tensor2d */]; }); +__webpack_require__.d(__webpack_exports__, "tensor3d", function() { return /* reexport */ tensor_ops["i" /* tensor3d */]; }); +__webpack_require__.d(__webpack_exports__, "tensor4d", function() { return /* reexport */ tensor_ops["j" /* tensor4d */]; }); +__webpack_require__.d(__webpack_exports__, "tensor5d", function() { return /* reexport */ tensor_ops["k" /* tensor5d */]; }); +__webpack_require__.d(__webpack_exports__, "tensor6d", function() { return /* reexport */ tensor_ops["l" /* tensor6d */]; }); +__webpack_require__.d(__webpack_exports__, "variable", function() { return /* reexport */ tensor_ops["m" /* variable */]; }); +__webpack_require__.d(__webpack_exports__, "zeros", function() { return /* reexport */ tensor_ops["n" /* zeros */]; }); +__webpack_require__.d(__webpack_exports__, "onesLike", function() { return /* reexport */ tensor_ops["c" /* onesLike */]; }); +__webpack_require__.d(__webpack_exports__, "zerosLike", function() { return /* reexport */ tensor_ops["o" /* zerosLike */]; }); +__webpack_require__.d(__webpack_exports__, "transpose", function() { return /* reexport */ transpose; }); +__webpack_require__.d(__webpack_exports__, "softmax", function() { return /* reexport */ softmax; }); +__webpack_require__.d(__webpack_exports__, "logSoftmax", function() { return /* reexport */ logSoftmax; }); +__webpack_require__.d(__webpack_exports__, "norm", function() { return /* reexport */ norm_norm; }); +__webpack_require__.d(__webpack_exports__, "gather", function() { return /* reexport */ gather; }); +__webpack_require__.d(__webpack_exports__, "unsortedSegmentSum", function() { return /* reexport */ unsortedSegmentSum; }); +__webpack_require__.d(__webpack_exports__, "basicLSTMCell", function() { return /* reexport */ basicLSTMCell; }); +__webpack_require__.d(__webpack_exports__, "multiRNNCell", function() { return /* reexport */ multiRNNCell; }); +__webpack_require__.d(__webpack_exports__, "movingAverage", function() { return /* reexport */ movingAverage; }); +__webpack_require__.d(__webpack_exports__, "stridedSlice", function() { return /* reexport */ stridedSlice; }); +__webpack_require__.d(__webpack_exports__, "topk", function() { return /* reexport */ topk; }); +__webpack_require__.d(__webpack_exports__, "scatterND", function() { return /* reexport */ scatterND; }); +__webpack_require__.d(__webpack_exports__, "fft", function() { return /* reexport */ fft; }); +__webpack_require__.d(__webpack_exports__, "ifft", function() { return /* reexport */ ifft; }); +__webpack_require__.d(__webpack_exports__, "rfft", function() { return /* reexport */ rfft; }); +__webpack_require__.d(__webpack_exports__, "irfft", function() { return /* reexport */ irfft; }); +__webpack_require__.d(__webpack_exports__, "sparseToDense", function() { return /* reexport */ sparseToDense; }); +__webpack_require__.d(__webpack_exports__, "gatherND", function() { return /* reexport */ gatherND; }); +__webpack_require__.d(__webpack_exports__, "dropout", function() { return /* reexport */ dropout; }); +__webpack_require__.d(__webpack_exports__, "hannWindow", function() { return /* reexport */ hannWindow; }); +__webpack_require__.d(__webpack_exports__, "hammingWindow", function() { return /* reexport */ hammingWindow; }); +__webpack_require__.d(__webpack_exports__, "frame", function() { return /* reexport */ signal_ops_frame; }); +__webpack_require__.d(__webpack_exports__, "stft", function() { return /* reexport */ stft; }); +__webpack_require__.d(__webpack_exports__, "inTopKAsync", function() { return /* reexport */ inTopKAsync; }); +__webpack_require__.d(__webpack_exports__, "op", function() { return /* reexport */ operation["a" /* op */]; }); +__webpack_require__.d(__webpack_exports__, "image", function() { return /* reexport */ image_ops_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "linalg", function() { return /* reexport */ linalg_ops_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "losses", function() { return /* reexport */ loss_ops_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "spectral", function() { return /* reexport */ spectral_ops_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "fused", function() { return /* reexport */ fused_ops_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "signal", function() { return /* reexport */ signal_ops_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "Reduction", function() { return /* reexport */ Reduction; }); +__webpack_require__.d(__webpack_exports__, "train", function() { return /* reexport */ train; }); +__webpack_require__.d(__webpack_exports__, "enableProdMode", function() { return /* reexport */ enableProdMode; }); +__webpack_require__.d(__webpack_exports__, "enableDebugMode", function() { return /* reexport */ enableDebugMode; }); +__webpack_require__.d(__webpack_exports__, "disableDeprecationWarnings", function() { return /* reexport */ disableDeprecationWarnings; }); +__webpack_require__.d(__webpack_exports__, "deprecationWarn", function() { return /* reexport */ deprecationWarn; }); +__webpack_require__.d(__webpack_exports__, "disposeVariables", function() { return /* reexport */ disposeVariables; }); +__webpack_require__.d(__webpack_exports__, "engine", function() { return /* reexport */ globals_engine; }); +__webpack_require__.d(__webpack_exports__, "memory", function() { return /* reexport */ memory; }); +__webpack_require__.d(__webpack_exports__, "profile", function() { return /* reexport */ profile; }); +__webpack_require__.d(__webpack_exports__, "tidy", function() { return /* reexport */ tidy; }); +__webpack_require__.d(__webpack_exports__, "dispose", function() { return /* reexport */ dispose; }); +__webpack_require__.d(__webpack_exports__, "keep", function() { return /* reexport */ keep; }); +__webpack_require__.d(__webpack_exports__, "time", function() { return /* reexport */ time; }); +__webpack_require__.d(__webpack_exports__, "setBackend", function() { return /* reexport */ setBackend; }); +__webpack_require__.d(__webpack_exports__, "ready", function() { return /* reexport */ ready; }); +__webpack_require__.d(__webpack_exports__, "getBackend", function() { return /* reexport */ getBackend; }); +__webpack_require__.d(__webpack_exports__, "removeBackend", function() { return /* reexport */ removeBackend; }); +__webpack_require__.d(__webpack_exports__, "findBackend", function() { return /* reexport */ findBackend; }); +__webpack_require__.d(__webpack_exports__, "findBackendFactory", function() { return /* reexport */ findBackendFactory; }); +__webpack_require__.d(__webpack_exports__, "registerBackend", function() { return /* reexport */ registerBackend; }); +__webpack_require__.d(__webpack_exports__, "backend", function() { return /* reexport */ globals_backend; }); +__webpack_require__.d(__webpack_exports__, "setPlatform", function() { return /* reexport */ setPlatform; }); +__webpack_require__.d(__webpack_exports__, "getKernel", function() { return /* reexport */ kernel_registry["b" /* getKernel */]; }); +__webpack_require__.d(__webpack_exports__, "getGradient", function() { return /* reexport */ kernel_registry["a" /* getGradient */]; }); +__webpack_require__.d(__webpack_exports__, "getKernelsForBackend", function() { return /* reexport */ kernel_registry["c" /* getKernelsForBackend */]; }); +__webpack_require__.d(__webpack_exports__, "registerKernel", function() { return /* reexport */ kernel_registry["e" /* registerKernel */]; }); +__webpack_require__.d(__webpack_exports__, "registerGradient", function() { return /* reexport */ kernel_registry["d" /* registerGradient */]; }); +__webpack_require__.d(__webpack_exports__, "unregisterKernel", function() { return /* reexport */ kernel_registry["g" /* unregisterKernel */]; }); +__webpack_require__.d(__webpack_exports__, "unregisterGradient", function() { return /* reexport */ kernel_registry["f" /* unregisterGradient */]; }); +__webpack_require__.d(__webpack_exports__, "customGrad", function() { return /* reexport */ customGrad; }); +__webpack_require__.d(__webpack_exports__, "grad", function() { return /* reexport */ gradients_grad; }); +__webpack_require__.d(__webpack_exports__, "grads", function() { return /* reexport */ gradients_grads; }); +__webpack_require__.d(__webpack_exports__, "valueAndGrad", function() { return /* reexport */ valueAndGrad; }); +__webpack_require__.d(__webpack_exports__, "valueAndGrads", function() { return /* reexport */ valueAndGrads; }); +__webpack_require__.d(__webpack_exports__, "variableGrads", function() { return /* reexport */ variableGrads; }); +__webpack_require__.d(__webpack_exports__, "Environment", function() { return /* reexport */ environment["b" /* Environment */]; }); +__webpack_require__.d(__webpack_exports__, "env", function() { return /* reexport */ environment["c" /* env */]; }); +__webpack_require__.d(__webpack_exports__, "ENV", function() { return /* reexport */ environment["a" /* ENV */]; }); +__webpack_require__.d(__webpack_exports__, "version_core", function() { return /* reexport */ version; }); +__webpack_require__.d(__webpack_exports__, "nextFrame", function() { return /* reexport */ browser_util["a" /* nextFrame */]; }); +__webpack_require__.d(__webpack_exports__, "browser", function() { return /* reexport */ browser_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "io", function() { return /* reexport */ io_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "math", function() { return /* reexport */ math_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "serialization", function() { return /* reexport */ serialization_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "test_util", function() { return /* reexport */ test_util_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "util", function() { return /* reexport */ util; }); +__webpack_require__.d(__webpack_exports__, "backend_util", function() { return /* reexport */ backend_util_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "tensor_util", function() { return /* reexport */ tensor_util; }); +__webpack_require__.d(__webpack_exports__, "slice_util", function() { return /* reexport */ slice_util_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "gather_util", function() { return /* reexport */ gather_nd_util_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "scatter_util", function() { return /* reexport */ scatter_nd_util_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "device_util", function() { return /* reexport */ device_util; }); +__webpack_require__.d(__webpack_exports__, "kernel_impls", function() { return /* reexport */ kernel_impls_namespaceObject; }); +__webpack_require__.d(__webpack_exports__, "KernelBackend", function() { return /* reexport */ KernelBackend; }); +__webpack_require__.d(__webpack_exports__, "DataStorage", function() { return /* reexport */ DataStorage; }); +__webpack_require__.d(__webpack_exports__, "Add", function() { return /* reexport */ kernel_names["a" /* Add */]; }); +__webpack_require__.d(__webpack_exports__, "AddN", function() { return /* reexport */ kernel_names["b" /* AddN */]; }); +__webpack_require__.d(__webpack_exports__, "Atan2", function() { return /* reexport */ kernel_names["c" /* Atan2 */]; }); +__webpack_require__.d(__webpack_exports__, "AvgPool", function() { return /* reexport */ kernel_names["d" /* AvgPool */]; }); +__webpack_require__.d(__webpack_exports__, "AvgPoolBackprop", function() { return /* reexport */ kernel_names["g" /* AvgPoolBackprop */]; }); +__webpack_require__.d(__webpack_exports__, "AvgPool3D", function() { return /* reexport */ kernel_names["e" /* AvgPool3D */]; }); +__webpack_require__.d(__webpack_exports__, "AvgPool3DBackprop", function() { return /* reexport */ kernel_names["f" /* AvgPool3DBackprop */]; }); +__webpack_require__.d(__webpack_exports__, "BatchMatMul", function() { return /* reexport */ kernel_names["h" /* BatchMatMul */]; }); +__webpack_require__.d(__webpack_exports__, "BatchToSpaceND", function() { return /* reexport */ kernel_names["i" /* BatchToSpaceND */]; }); +__webpack_require__.d(__webpack_exports__, "BroadcastTo", function() { return /* reexport */ kernel_names["j" /* BroadcastTo */]; }); +__webpack_require__.d(__webpack_exports__, "Complex", function() { return /* reexport */ kernel_names["k" /* Complex */]; }); +__webpack_require__.d(__webpack_exports__, "Concat", function() { return /* reexport */ kernel_names["l" /* Concat */]; }); +__webpack_require__.d(__webpack_exports__, "Conv2D", function() { return /* reexport */ kernel_names["m" /* Conv2D */]; }); +__webpack_require__.d(__webpack_exports__, "Conv2DBackpropFilter", function() { return /* reexport */ kernel_names["n" /* Conv2DBackpropFilter */]; }); +__webpack_require__.d(__webpack_exports__, "Conv2DBackpropInput", function() { return /* reexport */ kernel_names["o" /* Conv2DBackpropInput */]; }); +__webpack_require__.d(__webpack_exports__, "Conv3D", function() { return /* reexport */ kernel_names["p" /* Conv3D */]; }); +__webpack_require__.d(__webpack_exports__, "Conv3DBackpropFilterV2", function() { return /* reexport */ kernel_names["q" /* Conv3DBackpropFilterV2 */]; }); +__webpack_require__.d(__webpack_exports__, "Conv3DBackpropInputV2", function() { return /* reexport */ kernel_names["r" /* Conv3DBackpropInputV2 */]; }); +__webpack_require__.d(__webpack_exports__, "Cumsum", function() { return /* reexport */ kernel_names["s" /* Cumsum */]; }); +__webpack_require__.d(__webpack_exports__, "DepthToSpace", function() { return /* reexport */ kernel_names["t" /* DepthToSpace */]; }); +__webpack_require__.d(__webpack_exports__, "DepthwiseConv2dNative", function() { return /* reexport */ kernel_names["u" /* DepthwiseConv2dNative */]; }); +__webpack_require__.d(__webpack_exports__, "DepthwiseConv2dNativeBackpropFilter", function() { return /* reexport */ kernel_names["v" /* DepthwiseConv2dNativeBackpropFilter */]; }); +__webpack_require__.d(__webpack_exports__, "DepthwiseConv2dNativeBackpropInput", function() { return /* reexport */ kernel_names["w" /* DepthwiseConv2dNativeBackpropInput */]; }); +__webpack_require__.d(__webpack_exports__, "Diag", function() { return /* reexport */ kernel_names["x" /* Diag */]; }); +__webpack_require__.d(__webpack_exports__, "Div", function() { return /* reexport */ kernel_names["y" /* Div */]; }); +__webpack_require__.d(__webpack_exports__, "Elu", function() { return /* reexport */ kernel_names["z" /* Elu */]; }); +__webpack_require__.d(__webpack_exports__, "EluGrad", function() { return /* reexport */ kernel_names["A" /* EluGrad */]; }); +__webpack_require__.d(__webpack_exports__, "Equal", function() { return /* reexport */ kernel_names["B" /* Equal */]; }); +__webpack_require__.d(__webpack_exports__, "FloorDiv", function() { return /* reexport */ kernel_names["D" /* FloorDiv */]; }); +__webpack_require__.d(__webpack_exports__, "Fill", function() { return /* reexport */ kernel_names["C" /* Fill */]; }); +__webpack_require__.d(__webpack_exports__, "FusedBatchNorm", function() { return /* reexport */ kernel_names["F" /* FusedBatchNorm */]; }); +__webpack_require__.d(__webpack_exports__, "GatherNd", function() { return /* reexport */ kernel_names["G" /* GatherNd */]; }); +__webpack_require__.d(__webpack_exports__, "Greater", function() { return /* reexport */ kernel_names["H" /* Greater */]; }); +__webpack_require__.d(__webpack_exports__, "GreaterEqual", function() { return /* reexport */ kernel_names["I" /* GreaterEqual */]; }); +__webpack_require__.d(__webpack_exports__, "Identity", function() { return /* reexport */ kernel_names["J" /* Identity */]; }); +__webpack_require__.d(__webpack_exports__, "Imag", function() { return /* reexport */ kernel_names["K" /* Imag */]; }); +__webpack_require__.d(__webpack_exports__, "Less", function() { return /* reexport */ kernel_names["N" /* Less */]; }); +__webpack_require__.d(__webpack_exports__, "LessEqual", function() { return /* reexport */ kernel_names["O" /* LessEqual */]; }); +__webpack_require__.d(__webpack_exports__, "LRN", function() { return /* reexport */ kernel_names["L" /* LRN */]; }); +__webpack_require__.d(__webpack_exports__, "LRNBackprop", function() { return /* reexport */ kernel_names["M" /* LRNBackprop */]; }); +__webpack_require__.d(__webpack_exports__, "Max", function() { return /* reexport */ kernel_names["P" /* Max */]; }); +__webpack_require__.d(__webpack_exports__, "Maximum", function() { return /* reexport */ kernel_names["V" /* Maximum */]; }); +__webpack_require__.d(__webpack_exports__, "MaxPool", function() { return /* reexport */ kernel_names["Q" /* MaxPool */]; }); +__webpack_require__.d(__webpack_exports__, "MaxPoolBackprop", function() { return /* reexport */ kernel_names["T" /* MaxPoolBackprop */]; }); +__webpack_require__.d(__webpack_exports__, "MaxPool3D", function() { return /* reexport */ kernel_names["R" /* MaxPool3D */]; }); +__webpack_require__.d(__webpack_exports__, "MaxPool3DBackprop", function() { return /* reexport */ kernel_names["S" /* MaxPool3DBackprop */]; }); +__webpack_require__.d(__webpack_exports__, "MaxPoolWithArgmax", function() { return /* reexport */ kernel_names["U" /* MaxPoolWithArgmax */]; }); +__webpack_require__.d(__webpack_exports__, "Minimum", function() { return /* reexport */ kernel_names["W" /* Minimum */]; }); +__webpack_require__.d(__webpack_exports__, "Mod", function() { return /* reexport */ kernel_names["X" /* Mod */]; }); +__webpack_require__.d(__webpack_exports__, "Multiply", function() { return /* reexport */ kernel_names["Y" /* Multiply */]; }); +__webpack_require__.d(__webpack_exports__, "NotEqual", function() { return /* reexport */ kernel_names["bb" /* NotEqual */]; }); +__webpack_require__.d(__webpack_exports__, "NonMaxSuppressionV3", function() { return /* reexport */ kernel_names["Z" /* NonMaxSuppressionV3 */]; }); +__webpack_require__.d(__webpack_exports__, "NonMaxSuppressionV5", function() { return /* reexport */ kernel_names["ab" /* NonMaxSuppressionV5 */]; }); +__webpack_require__.d(__webpack_exports__, "OneHot", function() { return /* reexport */ kernel_names["cb" /* OneHot */]; }); +__webpack_require__.d(__webpack_exports__, "PadV2", function() { return /* reexport */ kernel_names["db" /* PadV2 */]; }); +__webpack_require__.d(__webpack_exports__, "Pool", function() { return /* reexport */ kernel_names["eb" /* Pool */]; }); +__webpack_require__.d(__webpack_exports__, "Pow", function() { return /* reexport */ kernel_names["fb" /* Pow */]; }); +__webpack_require__.d(__webpack_exports__, "Prelu", function() { return /* reexport */ kernel_names["gb" /* Prelu */]; }); +__webpack_require__.d(__webpack_exports__, "Real", function() { return /* reexport */ kernel_names["hb" /* Real */]; }); +__webpack_require__.d(__webpack_exports__, "Relu", function() { return /* reexport */ kernel_names["ib" /* Relu */]; }); +__webpack_require__.d(__webpack_exports__, "Relu6", function() { return /* reexport */ kernel_names["jb" /* Relu6 */]; }); +__webpack_require__.d(__webpack_exports__, "SelectV2", function() { return /* reexport */ kernel_names["kb" /* SelectV2 */]; }); +__webpack_require__.d(__webpack_exports__, "Selu", function() { return /* reexport */ kernel_names["lb" /* Selu */]; }); +__webpack_require__.d(__webpack_exports__, "SpaceToBatchND", function() { return /* reexport */ kernel_names["mb" /* SpaceToBatchND */]; }); +__webpack_require__.d(__webpack_exports__, "SplitV", function() { return /* reexport */ kernel_names["nb" /* SplitV */]; }); +__webpack_require__.d(__webpack_exports__, "SquaredDifference", function() { return /* reexport */ kernel_names["pb" /* SquaredDifference */]; }); +__webpack_require__.d(__webpack_exports__, "Square", function() { return /* reexport */ kernel_names["ob" /* Square */]; }); +__webpack_require__.d(__webpack_exports__, "Sub", function() { return /* reexport */ kernel_names["qb" /* Sub */]; }); +__webpack_require__.d(__webpack_exports__, "Tile", function() { return /* reexport */ kernel_names["rb" /* Tile */]; }); +__webpack_require__.d(__webpack_exports__, "Transpose", function() { return /* reexport */ kernel_names["sb" /* Transpose */]; }); +__webpack_require__.d(__webpack_exports__, "FromPixels", function() { return /* reexport */ kernel_names["E" /* FromPixels */]; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/ops/slice_util.js +var slice_util_namespaceObject = {}; +__webpack_require__.r(slice_util_namespaceObject); +__webpack_require__.d(slice_util_namespaceObject, "assertParamsValid", function() { return assertParamsValid; }); +__webpack_require__.d(slice_util_namespaceObject, "maskToAxes", function() { return maskToAxes; }); +__webpack_require__.d(slice_util_namespaceObject, "computeOutShape", function() { return slice_util_computeOutShape; }); +__webpack_require__.d(slice_util_namespaceObject, "stridesWithElidedDims", function() { return stridesWithElidedDims; }); +__webpack_require__.d(slice_util_namespaceObject, "startIndicesWithElidedDims", function() { return startIndicesWithElidedDims; }); +__webpack_require__.d(slice_util_namespaceObject, "stopIndicesWithElidedDims", function() { return stopIndicesWithElidedDims; }); +__webpack_require__.d(slice_util_namespaceObject, "stridesForAxis", function() { return stridesForAxis; }); +__webpack_require__.d(slice_util_namespaceObject, "startForAxis", function() { return startForAxis; }); +__webpack_require__.d(slice_util_namespaceObject, "stopForAxis", function() { return stopForAxis; }); +__webpack_require__.d(slice_util_namespaceObject, "isSliceContinous", function() { return isSliceContinous; }); +__webpack_require__.d(slice_util_namespaceObject, "computeFlatOffset", function() { return computeFlatOffset; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/io/io.js +var io_namespaceObject = {}; +__webpack_require__.r(io_namespaceObject); +__webpack_require__.d(io_namespaceObject, "copyModel", function() { return copyModel; }); +__webpack_require__.d(io_namespaceObject, "listModels", function() { return listModels; }); +__webpack_require__.d(io_namespaceObject, "moveModel", function() { return moveModel; }); +__webpack_require__.d(io_namespaceObject, "removeModel", function() { return removeModel; }); +__webpack_require__.d(io_namespaceObject, "browserFiles", function() { return browserFiles; }); +__webpack_require__.d(io_namespaceObject, "browserHTTPRequest", function() { return browserHTTPRequest; }); +__webpack_require__.d(io_namespaceObject, "concatenateArrayBuffers", function() { return io_utils["d" /* concatenateArrayBuffers */]; }); +__webpack_require__.d(io_namespaceObject, "decodeWeights", function() { return io_utils["e" /* decodeWeights */]; }); +__webpack_require__.d(io_namespaceObject, "encodeWeights", function() { return io_utils["f" /* encodeWeights */]; }); +__webpack_require__.d(io_namespaceObject, "fromMemory", function() { return fromMemory; }); +__webpack_require__.d(io_namespaceObject, "getLoadHandlers", function() { return getLoadHandlers; }); +__webpack_require__.d(io_namespaceObject, "getModelArtifactsInfoForJSON", function() { return io_utils["g" /* getModelArtifactsInfoForJSON */]; }); +__webpack_require__.d(io_namespaceObject, "getSaveHandlers", function() { return getSaveHandlers; }); +__webpack_require__.d(io_namespaceObject, "http", function() { return http; }); +__webpack_require__.d(io_namespaceObject, "isHTTPScheme", function() { return isHTTPScheme; }); +__webpack_require__.d(io_namespaceObject, "loadWeights", function() { return loadWeights; }); +__webpack_require__.d(io_namespaceObject, "registerLoadRouter", function() { return registerLoadRouter; }); +__webpack_require__.d(io_namespaceObject, "registerSaveRouter", function() { return registerSaveRouter; }); +__webpack_require__.d(io_namespaceObject, "weightsLoaderFactory", function() { return weightsLoaderFactory; }); +__webpack_require__.d(io_namespaceObject, "withSaveHandler", function() { return withSaveHandler; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/math.js +var math_namespaceObject = {}; +__webpack_require__.r(math_namespaceObject); +__webpack_require__.d(math_namespaceObject, "confusionMatrix", function() { return confusionMatrix; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/ops/browser.js +var browser_namespaceObject = {}; +__webpack_require__.r(browser_namespaceObject); +__webpack_require__.d(browser_namespaceObject, "toPixels", function() { return toPixels; }); +__webpack_require__.d(browser_namespaceObject, "fromPixels", function() { return fromPixels; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/ops/gather_nd_util.js +var gather_nd_util_namespaceObject = {}; +__webpack_require__.r(gather_nd_util_namespaceObject); +__webpack_require__.d(gather_nd_util_namespaceObject, "prepareAndValidate", function() { return prepareAndValidate; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/ops/scatter_nd_util.js +var scatter_nd_util_namespaceObject = {}; +__webpack_require__.r(scatter_nd_util_namespaceObject); +__webpack_require__.d(scatter_nd_util_namespaceObject, "validateUpdateShape", function() { return validateUpdateShape; }); +__webpack_require__.d(scatter_nd_util_namespaceObject, "validateInput", function() { return validateInput; }); +__webpack_require__.d(scatter_nd_util_namespaceObject, "calculateShapes", function() { return calculateShapes; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/serialization.js +var serialization_namespaceObject = {}; +__webpack_require__.r(serialization_namespaceObject); +__webpack_require__.d(serialization_namespaceObject, "Serializable", function() { return Serializable; }); +__webpack_require__.d(serialization_namespaceObject, "SerializationMap", function() { return SerializationMap; }); +__webpack_require__.d(serialization_namespaceObject, "registerClass", function() { return registerClass; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/test_util.js +var test_util_namespaceObject = {}; +__webpack_require__.r(test_util_namespaceObject); +__webpack_require__.d(test_util_namespaceObject, "TEST_EPSILON_FLOAT16", function() { return TEST_EPSILON_FLOAT16; }); +__webpack_require__.d(test_util_namespaceObject, "expectArraysClose", function() { return expectArraysClose; }); +__webpack_require__.d(test_util_namespaceObject, "testEpsilon", function() { return testEpsilon; }); +__webpack_require__.d(test_util_namespaceObject, "expectPromiseToFail", function() { return expectPromiseToFail; }); +__webpack_require__.d(test_util_namespaceObject, "expectArraysEqual", function() { return expectArraysEqual; }); +__webpack_require__.d(test_util_namespaceObject, "expectNumbersClose", function() { return expectNumbersClose; }); +__webpack_require__.d(test_util_namespaceObject, "expectValuesInRange", function() { return expectValuesInRange; }); +__webpack_require__.d(test_util_namespaceObject, "expectArrayBuffersEqual", function() { return expectArrayBuffersEqual; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/ops/segment_util.js +var segment_util_namespaceObject = {}; +__webpack_require__.r(segment_util_namespaceObject); +__webpack_require__.d(segment_util_namespaceObject, "segOpComputeOptimalWindowSize", function() { return segOpComputeOptimalWindowSize; }); +__webpack_require__.d(segment_util_namespaceObject, "computeOutShape", function() { return segment_util_computeOutShape; }); +__webpack_require__.d(segment_util_namespaceObject, "collectGatherOpShapeInfo", function() { return collectGatherOpShapeInfo; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/ops/spectral_ops.js +var spectral_ops_namespaceObject = {}; +__webpack_require__.r(spectral_ops_namespaceObject); +__webpack_require__.d(spectral_ops_namespaceObject, "fft", function() { return fft; }); +__webpack_require__.d(spectral_ops_namespaceObject, "ifft", function() { return ifft; }); +__webpack_require__.d(spectral_ops_namespaceObject, "rfft", function() { return rfft; }); +__webpack_require__.d(spectral_ops_namespaceObject, "irfft", function() { return irfft; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/ops/signal_ops.js +var signal_ops_namespaceObject = {}; +__webpack_require__.r(signal_ops_namespaceObject); +__webpack_require__.d(signal_ops_namespaceObject, "hannWindow", function() { return hannWindow; }); +__webpack_require__.d(signal_ops_namespaceObject, "hammingWindow", function() { return hammingWindow; }); +__webpack_require__.d(signal_ops_namespaceObject, "frame", function() { return signal_ops_frame; }); +__webpack_require__.d(signal_ops_namespaceObject, "stft", function() { return stft; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/ops/loss_ops.js +var loss_ops_namespaceObject = {}; +__webpack_require__.r(loss_ops_namespaceObject); +__webpack_require__.d(loss_ops_namespaceObject, "Reduction", function() { return Reduction; }); +__webpack_require__.d(loss_ops_namespaceObject, "absoluteDifference", function() { return absoluteDifference; }); +__webpack_require__.d(loss_ops_namespaceObject, "computeWeightedLoss", function() { return computeWeightedLoss; }); +__webpack_require__.d(loss_ops_namespaceObject, "cosineDistance", function() { return cosineDistance; }); +__webpack_require__.d(loss_ops_namespaceObject, "hingeLoss", function() { return hingeLoss; }); +__webpack_require__.d(loss_ops_namespaceObject, "huberLoss", function() { return huberLoss; }); +__webpack_require__.d(loss_ops_namespaceObject, "logLoss", function() { return logLoss; }); +__webpack_require__.d(loss_ops_namespaceObject, "meanSquaredError", function() { return meanSquaredError; }); +__webpack_require__.d(loss_ops_namespaceObject, "sigmoidCrossEntropy", function() { return sigmoidCrossEntropy; }); +__webpack_require__.d(loss_ops_namespaceObject, "softmaxCrossEntropy", function() { return softmaxCrossEntropy; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/ops/linalg_ops.js +var linalg_ops_namespaceObject = {}; +__webpack_require__.r(linalg_ops_namespaceObject); +__webpack_require__.d(linalg_ops_namespaceObject, "bandPart", function() { return bandPart; }); +__webpack_require__.d(linalg_ops_namespaceObject, "gramSchmidt", function() { return gramSchmidt; }); +__webpack_require__.d(linalg_ops_namespaceObject, "qr", function() { return qr; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/ops/image_ops.js +var image_ops_namespaceObject = {}; +__webpack_require__.r(image_ops_namespaceObject); +__webpack_require__.d(image_ops_namespaceObject, "nonMaxSuppression", function() { return nonMaxSuppression; }); +__webpack_require__.d(image_ops_namespaceObject, "resizeBilinear", function() { return resizeBilinear; }); +__webpack_require__.d(image_ops_namespaceObject, "resizeNearestNeighbor", function() { return resizeNearestNeighbor; }); +__webpack_require__.d(image_ops_namespaceObject, "nonMaxSuppressionAsync", function() { return nonMaxSuppressionAsync; }); +__webpack_require__.d(image_ops_namespaceObject, "nonMaxSuppressionWithScore", function() { return nonMaxSuppressionWithScore; }); +__webpack_require__.d(image_ops_namespaceObject, "nonMaxSuppressionWithScoreAsync", function() { return nonMaxSuppressionWithScoreAsync; }); +__webpack_require__.d(image_ops_namespaceObject, "cropAndResize", function() { return cropAndResize; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/ops/fused_ops.js +var fused_ops_namespaceObject = {}; +__webpack_require__.r(fused_ops_namespaceObject); +__webpack_require__.d(fused_ops_namespaceObject, "matMul", function() { return fused_ops_matMul; }); +__webpack_require__.d(fused_ops_namespaceObject, "conv2d", function() { return fused_ops_conv2d; }); +__webpack_require__.d(fused_ops_namespaceObject, "depthwiseConv2d", function() { return fused_ops_depthwiseConv2d; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/ops/ops.js +var ops_namespaceObject = {}; +__webpack_require__.r(ops_namespaceObject); +__webpack_require__.d(ops_namespaceObject, "add", function() { return add; }); +__webpack_require__.d(ops_namespaceObject, "addN", function() { return addN; }); +__webpack_require__.d(ops_namespaceObject, "atan2", function() { return atan2; }); +__webpack_require__.d(ops_namespaceObject, "avgPool", function() { return avgPool; }); +__webpack_require__.d(ops_namespaceObject, "avgPool3d", function() { return avgPool3d; }); +__webpack_require__.d(ops_namespaceObject, "batchToSpaceND", function() { return batchToSpaceND; }); +__webpack_require__.d(ops_namespaceObject, "batchNorm", function() { return batchNorm; }); +__webpack_require__.d(ops_namespaceObject, "batchNorm2d", function() { return batchNorm2d; }); +__webpack_require__.d(ops_namespaceObject, "batchNorm3d", function() { return batchNorm3d; }); +__webpack_require__.d(ops_namespaceObject, "batchNorm4d", function() { return batchNorm4d; }); +__webpack_require__.d(ops_namespaceObject, "broadcastTo", function() { return broadcastTo; }); +__webpack_require__.d(ops_namespaceObject, "clone", function() { return clone; }); +__webpack_require__.d(ops_namespaceObject, "complex", function() { return complex["a" /* complex */]; }); +__webpack_require__.d(ops_namespaceObject, "concat", function() { return concat; }); +__webpack_require__.d(ops_namespaceObject, "concat1d", function() { return concat1d; }); +__webpack_require__.d(ops_namespaceObject, "concat2d", function() { return concat2d; }); +__webpack_require__.d(ops_namespaceObject, "concat3d", function() { return concat3d; }); +__webpack_require__.d(ops_namespaceObject, "concat4d", function() { return concat4d; }); +__webpack_require__.d(ops_namespaceObject, "conv1d", function() { return conv1d; }); +__webpack_require__.d(ops_namespaceObject, "conv2d", function() { return conv2d; }); +__webpack_require__.d(ops_namespaceObject, "conv2dTranspose", function() { return conv2dTranspose; }); +__webpack_require__.d(ops_namespaceObject, "conv3d", function() { return conv3d; }); +__webpack_require__.d(ops_namespaceObject, "conv3dTranspose", function() { return conv3dTranspose; }); +__webpack_require__.d(ops_namespaceObject, "cumsum", function() { return cumsum; }); +__webpack_require__.d(ops_namespaceObject, "depthToSpace", function() { return depthToSpace; }); +__webpack_require__.d(ops_namespaceObject, "depthwiseConv2d", function() { return depthwiseConv2d; }); +__webpack_require__.d(ops_namespaceObject, "diag", function() { return diag; }); +__webpack_require__.d(ops_namespaceObject, "div", function() { return div; }); +__webpack_require__.d(ops_namespaceObject, "divNoNan", function() { return divNoNan; }); +__webpack_require__.d(ops_namespaceObject, "dot", function() { return dot; }); +__webpack_require__.d(ops_namespaceObject, "elu", function() { return elu; }); +__webpack_require__.d(ops_namespaceObject, "equal", function() { return equal; }); +__webpack_require__.d(ops_namespaceObject, "eye", function() { return eye; }); +__webpack_require__.d(ops_namespaceObject, "fill", function() { return fill; }); +__webpack_require__.d(ops_namespaceObject, "floorDiv", function() { return floorDiv; }); +__webpack_require__.d(ops_namespaceObject, "greater", function() { return greater; }); +__webpack_require__.d(ops_namespaceObject, "greaterEqual", function() { return greaterEqual; }); +__webpack_require__.d(ops_namespaceObject, "imag", function() { return imag["a" /* imag */]; }); +__webpack_require__.d(ops_namespaceObject, "leakyRelu", function() { return leakyRelu; }); +__webpack_require__.d(ops_namespaceObject, "less", function() { return less; }); +__webpack_require__.d(ops_namespaceObject, "lessEqual", function() { return lessEqual; }); +__webpack_require__.d(ops_namespaceObject, "localResponseNormalization", function() { return localResponseNormalization; }); +__webpack_require__.d(ops_namespaceObject, "matMul", function() { return matMul; }); +__webpack_require__.d(ops_namespaceObject, "max", function() { return max_max; }); +__webpack_require__.d(ops_namespaceObject, "maxPool", function() { return maxPool; }); +__webpack_require__.d(ops_namespaceObject, "maxPool3d", function() { return maxPool3d; }); +__webpack_require__.d(ops_namespaceObject, "maxPoolWithArgmax", function() { return maxPoolWithArgmax; }); +__webpack_require__.d(ops_namespaceObject, "maximum", function() { return maximum; }); +__webpack_require__.d(ops_namespaceObject, "minimum", function() { return minimum; }); +__webpack_require__.d(ops_namespaceObject, "mod", function() { return mod; }); +__webpack_require__.d(ops_namespaceObject, "mul", function() { return mul; }); +__webpack_require__.d(ops_namespaceObject, "multinomial", function() { return multinomial; }); +__webpack_require__.d(ops_namespaceObject, "notEqual", function() { return notEqual; }); +__webpack_require__.d(ops_namespaceObject, "oneHot", function() { return oneHot; }); +__webpack_require__.d(ops_namespaceObject, "outerProduct", function() { return outerProduct; }); +__webpack_require__.d(ops_namespaceObject, "pad", function() { return pad_pad; }); +__webpack_require__.d(ops_namespaceObject, "pad1d", function() { return pad1d; }); +__webpack_require__.d(ops_namespaceObject, "pad2d", function() { return pad2d; }); +__webpack_require__.d(ops_namespaceObject, "pad3d", function() { return pad3d; }); +__webpack_require__.d(ops_namespaceObject, "pad4d", function() { return pad4d; }); +__webpack_require__.d(ops_namespaceObject, "pool", function() { return pool; }); +__webpack_require__.d(ops_namespaceObject, "pow", function() { return pow; }); +__webpack_require__.d(ops_namespaceObject, "prelu", function() { return prelu; }); +__webpack_require__.d(ops_namespaceObject, "rand", function() { return rand; }); +__webpack_require__.d(ops_namespaceObject, "randomGamma", function() { return randomGamma; }); +__webpack_require__.d(ops_namespaceObject, "randomNormal", function() { return randomNormal; }); +__webpack_require__.d(ops_namespaceObject, "randomUniform", function() { return randomUniform; }); +__webpack_require__.d(ops_namespaceObject, "real", function() { return real["a" /* real */]; }); +__webpack_require__.d(ops_namespaceObject, "relu", function() { return relu; }); +__webpack_require__.d(ops_namespaceObject, "relu6", function() { return relu6; }); +__webpack_require__.d(ops_namespaceObject, "selu", function() { return selu; }); +__webpack_require__.d(ops_namespaceObject, "separableConv2d", function() { return separableConv2d; }); +__webpack_require__.d(ops_namespaceObject, "spaceToBatchND", function() { return spaceToBatchND; }); +__webpack_require__.d(ops_namespaceObject, "split", function() { return split; }); +__webpack_require__.d(ops_namespaceObject, "square", function() { return square; }); +__webpack_require__.d(ops_namespaceObject, "squaredDifference", function() { return squaredDifference; }); +__webpack_require__.d(ops_namespaceObject, "sub", function() { return sub; }); +__webpack_require__.d(ops_namespaceObject, "tile", function() { return tile; }); +__webpack_require__.d(ops_namespaceObject, "truncatedNormal", function() { return truncatedNormal; }); +__webpack_require__.d(ops_namespaceObject, "booleanMaskAsync", function() { return booleanMaskAsync; }); +__webpack_require__.d(ops_namespaceObject, "reverse", function() { return reverse_reverse; }); +__webpack_require__.d(ops_namespaceObject, "reverse1d", function() { return reverse1d; }); +__webpack_require__.d(ops_namespaceObject, "reverse2d", function() { return reverse2d; }); +__webpack_require__.d(ops_namespaceObject, "reverse3d", function() { return reverse3d; }); +__webpack_require__.d(ops_namespaceObject, "reverse4d", function() { return reverse4d; }); +__webpack_require__.d(ops_namespaceObject, "slice", function() { return slice; }); +__webpack_require__.d(ops_namespaceObject, "slice1d", function() { return slice1d; }); +__webpack_require__.d(ops_namespaceObject, "slice2d", function() { return slice2d; }); +__webpack_require__.d(ops_namespaceObject, "slice3d", function() { return slice3d; }); +__webpack_require__.d(ops_namespaceObject, "slice4d", function() { return slice4d; }); +__webpack_require__.d(ops_namespaceObject, "abs", function() { return abs; }); +__webpack_require__.d(ops_namespaceObject, "acos", function() { return acos; }); +__webpack_require__.d(ops_namespaceObject, "acosh", function() { return acosh; }); +__webpack_require__.d(ops_namespaceObject, "asin", function() { return asin; }); +__webpack_require__.d(ops_namespaceObject, "asinh", function() { return asinh; }); +__webpack_require__.d(ops_namespaceObject, "atan", function() { return atan; }); +__webpack_require__.d(ops_namespaceObject, "atanh", function() { return atanh; }); +__webpack_require__.d(ops_namespaceObject, "ceil", function() { return ceil; }); +__webpack_require__.d(ops_namespaceObject, "clipByValue", function() { return clipByValue; }); +__webpack_require__.d(ops_namespaceObject, "cos", function() { return cos; }); +__webpack_require__.d(ops_namespaceObject, "cosh", function() { return cosh; }); +__webpack_require__.d(ops_namespaceObject, "erf", function() { return erf; }); +__webpack_require__.d(ops_namespaceObject, "exp", function() { return unary_ops_exp; }); +__webpack_require__.d(ops_namespaceObject, "expm1", function() { return expm1; }); +__webpack_require__.d(ops_namespaceObject, "floor", function() { return floor; }); +__webpack_require__.d(ops_namespaceObject, "log", function() { return log; }); +__webpack_require__.d(ops_namespaceObject, "log1p", function() { return log1p; }); +__webpack_require__.d(ops_namespaceObject, "logSigmoid", function() { return logSigmoid; }); +__webpack_require__.d(ops_namespaceObject, "neg", function() { return neg; }); +__webpack_require__.d(ops_namespaceObject, "reciprocal", function() { return reciprocal; }); +__webpack_require__.d(ops_namespaceObject, "round", function() { return round; }); +__webpack_require__.d(ops_namespaceObject, "rsqrt", function() { return rsqrt; }); +__webpack_require__.d(ops_namespaceObject, "sigmoid", function() { return sigmoid; }); +__webpack_require__.d(ops_namespaceObject, "sign", function() { return sign; }); +__webpack_require__.d(ops_namespaceObject, "isNaN", function() { return unary_ops_isNaN; }); +__webpack_require__.d(ops_namespaceObject, "isInf", function() { return isInf; }); +__webpack_require__.d(ops_namespaceObject, "isFinite", function() { return unary_ops_isFinite; }); +__webpack_require__.d(ops_namespaceObject, "sin", function() { return sin; }); +__webpack_require__.d(ops_namespaceObject, "sinh", function() { return sinh; }); +__webpack_require__.d(ops_namespaceObject, "softplus", function() { return softplus; }); +__webpack_require__.d(ops_namespaceObject, "sqrt", function() { return sqrt; }); +__webpack_require__.d(ops_namespaceObject, "step", function() { return unary_ops_step; }); +__webpack_require__.d(ops_namespaceObject, "tan", function() { return tan; }); +__webpack_require__.d(ops_namespaceObject, "tanh", function() { return tanh; }); +__webpack_require__.d(ops_namespaceObject, "all", function() { return reduction_ops_all; }); +__webpack_require__.d(ops_namespaceObject, "any", function() { return any; }); +__webpack_require__.d(ops_namespaceObject, "argMax", function() { return argMax; }); +__webpack_require__.d(ops_namespaceObject, "argMin", function() { return argMin; }); +__webpack_require__.d(ops_namespaceObject, "logSumExp", function() { return logSumExp; }); +__webpack_require__.d(ops_namespaceObject, "mean", function() { return reduction_ops_mean; }); +__webpack_require__.d(ops_namespaceObject, "min", function() { return reduction_ops_min; }); +__webpack_require__.d(ops_namespaceObject, "moments", function() { return moments; }); +__webpack_require__.d(ops_namespaceObject, "sum", function() { return sum; }); +__webpack_require__.d(ops_namespaceObject, "prod", function() { return reduction_ops_prod; }); +__webpack_require__.d(ops_namespaceObject, "equalStrict", function() { return equalStrict; }); +__webpack_require__.d(ops_namespaceObject, "greaterEqualStrict", function() { return greaterEqualStrict; }); +__webpack_require__.d(ops_namespaceObject, "greaterStrict", function() { return greaterStrict; }); +__webpack_require__.d(ops_namespaceObject, "lessEqualStrict", function() { return lessEqualStrict; }); +__webpack_require__.d(ops_namespaceObject, "lessStrict", function() { return lessStrict; }); +__webpack_require__.d(ops_namespaceObject, "notEqualStrict", function() { return notEqualStrict; }); +__webpack_require__.d(ops_namespaceObject, "addStrict", function() { return addStrict; }); +__webpack_require__.d(ops_namespaceObject, "divStrict", function() { return divStrict; }); +__webpack_require__.d(ops_namespaceObject, "maximumStrict", function() { return maximumStrict; }); +__webpack_require__.d(ops_namespaceObject, "minimumStrict", function() { return minimumStrict; }); +__webpack_require__.d(ops_namespaceObject, "modStrict", function() { return modStrict; }); +__webpack_require__.d(ops_namespaceObject, "mulStrict", function() { return mulStrict; }); +__webpack_require__.d(ops_namespaceObject, "powStrict", function() { return powStrict; }); +__webpack_require__.d(ops_namespaceObject, "squaredDifferenceStrict", function() { return squaredDifferenceStrict; }); +__webpack_require__.d(ops_namespaceObject, "subStrict", function() { return subStrict; }); +__webpack_require__.d(ops_namespaceObject, "logicalAnd", function() { return logicalAnd; }); +__webpack_require__.d(ops_namespaceObject, "logicalNot", function() { return logicalNot; }); +__webpack_require__.d(ops_namespaceObject, "logicalOr", function() { return logicalOr; }); +__webpack_require__.d(ops_namespaceObject, "logicalXor", function() { return logicalXor; }); +__webpack_require__.d(ops_namespaceObject, "where", function() { return where; }); +__webpack_require__.d(ops_namespaceObject, "whereAsync", function() { return whereAsync; }); +__webpack_require__.d(ops_namespaceObject, "buffer", function() { return array_ops_buffer; }); +__webpack_require__.d(ops_namespaceObject, "print", function() { return print; }); +__webpack_require__.d(ops_namespaceObject, "cast", function() { return cast; }); +__webpack_require__.d(ops_namespaceObject, "expandDims", function() { return expandDims; }); +__webpack_require__.d(ops_namespaceObject, "reshape", function() { return reshape; }); +__webpack_require__.d(ops_namespaceObject, "squeeze", function() { return squeeze; }); +__webpack_require__.d(ops_namespaceObject, "stack", function() { return stack; }); +__webpack_require__.d(ops_namespaceObject, "unstack", function() { return unstack; }); +__webpack_require__.d(ops_namespaceObject, "setdiff1dAsync", function() { return setdiff1dAsync; }); +__webpack_require__.d(ops_namespaceObject, "linspace", function() { return tensor_ops["a" /* linspace */]; }); +__webpack_require__.d(ops_namespaceObject, "ones", function() { return tensor_ops["b" /* ones */]; }); +__webpack_require__.d(ops_namespaceObject, "range", function() { return tensor_ops["d" /* range */]; }); +__webpack_require__.d(ops_namespaceObject, "scalar", function() { return tensor_ops["e" /* scalar */]; }); +__webpack_require__.d(ops_namespaceObject, "tensor", function() { return tensor_ops["f" /* tensor */]; }); +__webpack_require__.d(ops_namespaceObject, "tensor1d", function() { return tensor_ops["g" /* tensor1d */]; }); +__webpack_require__.d(ops_namespaceObject, "tensor2d", function() { return tensor_ops["h" /* tensor2d */]; }); +__webpack_require__.d(ops_namespaceObject, "tensor3d", function() { return tensor_ops["i" /* tensor3d */]; }); +__webpack_require__.d(ops_namespaceObject, "tensor4d", function() { return tensor_ops["j" /* tensor4d */]; }); +__webpack_require__.d(ops_namespaceObject, "tensor5d", function() { return tensor_ops["k" /* tensor5d */]; }); +__webpack_require__.d(ops_namespaceObject, "tensor6d", function() { return tensor_ops["l" /* tensor6d */]; }); +__webpack_require__.d(ops_namespaceObject, "variable", function() { return tensor_ops["m" /* variable */]; }); +__webpack_require__.d(ops_namespaceObject, "zeros", function() { return tensor_ops["n" /* zeros */]; }); +__webpack_require__.d(ops_namespaceObject, "onesLike", function() { return tensor_ops["c" /* onesLike */]; }); +__webpack_require__.d(ops_namespaceObject, "zerosLike", function() { return tensor_ops["o" /* zerosLike */]; }); +__webpack_require__.d(ops_namespaceObject, "transpose", function() { return transpose; }); +__webpack_require__.d(ops_namespaceObject, "softmax", function() { return softmax; }); +__webpack_require__.d(ops_namespaceObject, "logSoftmax", function() { return logSoftmax; }); +__webpack_require__.d(ops_namespaceObject, "norm", function() { return norm_norm; }); +__webpack_require__.d(ops_namespaceObject, "gather", function() { return gather; }); +__webpack_require__.d(ops_namespaceObject, "unsortedSegmentSum", function() { return unsortedSegmentSum; }); +__webpack_require__.d(ops_namespaceObject, "basicLSTMCell", function() { return basicLSTMCell; }); +__webpack_require__.d(ops_namespaceObject, "multiRNNCell", function() { return multiRNNCell; }); +__webpack_require__.d(ops_namespaceObject, "movingAverage", function() { return movingAverage; }); +__webpack_require__.d(ops_namespaceObject, "stridedSlice", function() { return stridedSlice; }); +__webpack_require__.d(ops_namespaceObject, "topk", function() { return topk; }); +__webpack_require__.d(ops_namespaceObject, "scatterND", function() { return scatterND; }); +__webpack_require__.d(ops_namespaceObject, "fft", function() { return fft; }); +__webpack_require__.d(ops_namespaceObject, "ifft", function() { return ifft; }); +__webpack_require__.d(ops_namespaceObject, "rfft", function() { return rfft; }); +__webpack_require__.d(ops_namespaceObject, "irfft", function() { return irfft; }); +__webpack_require__.d(ops_namespaceObject, "sparseToDense", function() { return sparseToDense; }); +__webpack_require__.d(ops_namespaceObject, "gatherND", function() { return gatherND; }); +__webpack_require__.d(ops_namespaceObject, "dropout", function() { return dropout; }); +__webpack_require__.d(ops_namespaceObject, "hannWindow", function() { return hannWindow; }); +__webpack_require__.d(ops_namespaceObject, "hammingWindow", function() { return hammingWindow; }); +__webpack_require__.d(ops_namespaceObject, "frame", function() { return signal_ops_frame; }); +__webpack_require__.d(ops_namespaceObject, "stft", function() { return stft; }); +__webpack_require__.d(ops_namespaceObject, "inTopKAsync", function() { return inTopKAsync; }); +__webpack_require__.d(ops_namespaceObject, "op", function() { return operation["a" /* op */]; }); +__webpack_require__.d(ops_namespaceObject, "image", function() { return image_ops_namespaceObject; }); +__webpack_require__.d(ops_namespaceObject, "linalg", function() { return linalg_ops_namespaceObject; }); +__webpack_require__.d(ops_namespaceObject, "losses", function() { return loss_ops_namespaceObject; }); +__webpack_require__.d(ops_namespaceObject, "spectral", function() { return spectral_ops_namespaceObject; }); +__webpack_require__.d(ops_namespaceObject, "fused", function() { return fused_ops_namespaceObject; }); +__webpack_require__.d(ops_namespaceObject, "signal", function() { return signal_ops_namespaceObject; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/backends/backend_util.js +var backend_util_namespaceObject = {}; +__webpack_require__.r(backend_util_namespaceObject); +__webpack_require__.d(backend_util_namespaceObject, "axesAreInnerMostDims", function() { return axesAreInnerMostDims; }); +__webpack_require__.d(backend_util_namespaceObject, "combineLocations", function() { return combineLocations; }); +__webpack_require__.d(backend_util_namespaceObject, "computeOutAndReduceShapes", function() { return computeOutAndReduceShapes; }); +__webpack_require__.d(backend_util_namespaceObject, "expandShapeToKeepDim", function() { return expandShapeToKeepDim; }); +__webpack_require__.d(backend_util_namespaceObject, "assertAxesAreInnerMostDims", function() { return assertAxesAreInnerMostDims; }); +__webpack_require__.d(backend_util_namespaceObject, "getAxesPermutation", function() { return getAxesPermutation; }); +__webpack_require__.d(backend_util_namespaceObject, "getUndoAxesPermutation", function() { return getUndoAxesPermutation; }); +__webpack_require__.d(backend_util_namespaceObject, "getInnerMostAxes", function() { return getInnerMostAxes; }); +__webpack_require__.d(backend_util_namespaceObject, "getBroadcastDims", function() { return getBroadcastDims; }); +__webpack_require__.d(backend_util_namespaceObject, "getReductionAxes", function() { return getReductionAxes; }); +__webpack_require__.d(backend_util_namespaceObject, "assertAndGetBroadcastShape", function() { return assertAndGetBroadcastShape; }); +__webpack_require__.d(backend_util_namespaceObject, "assertParamsConsistent", function() { return assertParamsConsistent; }); +__webpack_require__.d(backend_util_namespaceObject, "computeOutShape", function() { return computeOutShape; }); +__webpack_require__.d(backend_util_namespaceObject, "computePool2DInfo", function() { return computePool2DInfo; }); +__webpack_require__.d(backend_util_namespaceObject, "computePool3DInfo", function() { return computePool3DInfo; }); +__webpack_require__.d(backend_util_namespaceObject, "computeConv2DInfo", function() { return computeConv2DInfo; }); +__webpack_require__.d(backend_util_namespaceObject, "computeConv3DInfo", function() { return computeConv3DInfo; }); +__webpack_require__.d(backend_util_namespaceObject, "computeDefaultPad", function() { return computeDefaultPad; }); +__webpack_require__.d(backend_util_namespaceObject, "tupleValuesAreOne", function() { return tupleValuesAreOne; }); +__webpack_require__.d(backend_util_namespaceObject, "eitherStridesOrDilationsAreOne", function() { return eitherStridesOrDilationsAreOne; }); +__webpack_require__.d(backend_util_namespaceObject, "convertConv2DDataFormat", function() { return convertConv2DDataFormat; }); +__webpack_require__.d(backend_util_namespaceObject, "PARALLELIZE_THRESHOLD", function() { return PARALLELIZE_THRESHOLD; }); +__webpack_require__.d(backend_util_namespaceObject, "computeOptimalWindowSize", function() { return computeOptimalWindowSize; }); +__webpack_require__.d(backend_util_namespaceObject, "nonMaxSuppressionV3", function() { return nonMaxSuppressionV3; }); +__webpack_require__.d(backend_util_namespaceObject, "nonMaxSuppressionV5", function() { return nonMaxSuppressionV5; }); +__webpack_require__.d(backend_util_namespaceObject, "upcastType", function() { return dist_types["c" /* upcastType */]; }); +__webpack_require__.d(backend_util_namespaceObject, "getReshaped", function() { return getReshaped; }); +__webpack_require__.d(backend_util_namespaceObject, "getPermuted", function() { return getPermuted; }); +__webpack_require__.d(backend_util_namespaceObject, "getReshapedPermuted", function() { return getReshapedPermuted; }); +__webpack_require__.d(backend_util_namespaceObject, "getSliceBeginCoords", function() { return getSliceBeginCoords; }); +__webpack_require__.d(backend_util_namespaceObject, "getSliceSize", function() { return getSliceSize; }); +__webpack_require__.d(backend_util_namespaceObject, "prepareAndValidate", function() { return prepareAndValidate; }); +__webpack_require__.d(backend_util_namespaceObject, "validateUpdateShape", function() { return validateUpdateShape; }); +__webpack_require__.d(backend_util_namespaceObject, "validateInput", function() { return validateInput; }); +__webpack_require__.d(backend_util_namespaceObject, "calculateShapes", function() { return calculateShapes; }); +__webpack_require__.d(backend_util_namespaceObject, "SELU_SCALEALPHA", function() { return SELU_SCALEALPHA; }); +__webpack_require__.d(backend_util_namespaceObject, "SELU_SCALE", function() { return SELU_SCALE; }); +__webpack_require__.d(backend_util_namespaceObject, "shouldFuse", function() { return shouldFuse; }); +__webpack_require__.d(backend_util_namespaceObject, "ERF_P", function() { return ERF_P; }); +__webpack_require__.d(backend_util_namespaceObject, "ERF_A1", function() { return ERF_A1; }); +__webpack_require__.d(backend_util_namespaceObject, "ERF_A2", function() { return ERF_A2; }); +__webpack_require__.d(backend_util_namespaceObject, "ERF_A3", function() { return ERF_A3; }); +__webpack_require__.d(backend_util_namespaceObject, "ERF_A4", function() { return ERF_A4; }); +__webpack_require__.d(backend_util_namespaceObject, "ERF_A5", function() { return ERF_A5; }); +__webpack_require__.d(backend_util_namespaceObject, "warn", function() { return warn; }); +__webpack_require__.d(backend_util_namespaceObject, "log", function() { return log_log; }); +__webpack_require__.d(backend_util_namespaceObject, "mergeRealAndImagArrays", function() { return mergeRealAndImagArrays; }); +__webpack_require__.d(backend_util_namespaceObject, "splitRealAndImagArrays", function() { return splitRealAndImagArrays; }); +__webpack_require__.d(backend_util_namespaceObject, "complexWithEvenIndex", function() { return complexWithEvenIndex; }); +__webpack_require__.d(backend_util_namespaceObject, "complexWithOddIndex", function() { return complexWithOddIndex; }); +__webpack_require__.d(backend_util_namespaceObject, "getComplexWithIndex", function() { return getComplexWithIndex; }); +__webpack_require__.d(backend_util_namespaceObject, "assignToTypedArray", function() { return assignToTypedArray; }); +__webpack_require__.d(backend_util_namespaceObject, "exponents", function() { return exponents; }); +__webpack_require__.d(backend_util_namespaceObject, "exponent", function() { return exponent; }); +__webpack_require__.d(backend_util_namespaceObject, "segment_util", function() { return segment_util_namespaceObject; }); +__webpack_require__.d(backend_util_namespaceObject, "castTensor", function() { return castTensor; }); +__webpack_require__.d(backend_util_namespaceObject, "reshapeTensor", function() { return reshapeTensor; }); +__webpack_require__.d(backend_util_namespaceObject, "linspaceImpl", function() { return linspaceImpl; }); + +// NAMESPACE OBJECT: ./node_modules/@tensorflow/tfjs-core/dist/backends/kernel_impls.js +var kernel_impls_namespaceObject = {}; +__webpack_require__.r(kernel_impls_namespaceObject); +__webpack_require__.d(kernel_impls_namespaceObject, "nonMaxSuppressionV3", function() { return nonMaxSuppressionV3; }); +__webpack_require__.d(kernel_impls_namespaceObject, "nonMaxSuppressionV5", function() { return nonMaxSuppressionV5; }); +__webpack_require__.d(kernel_impls_namespaceObject, "split", function() { return split_shared_split; }); +__webpack_require__.d(kernel_impls_namespaceObject, "tile", function() { return tile_impl_tile; }); +__webpack_require__.d(kernel_impls_namespaceObject, "topkImpl", function() { return topkImpl; }); +__webpack_require__.d(kernel_impls_namespaceObject, "whereImpl", function() { return whereImpl; }); + +// EXTERNAL MODULE: ./node_modules/@tensorflow/tfjs-core/dist/engine.js + 2 modules +var engine = __webpack_require__(5); + +// EXTERNAL MODULE: ./node_modules/@tensorflow/tfjs-core/dist/flags.js +var flags = __webpack_require__(61); + +// EXTERNAL MODULE: ./node_modules/@tensorflow/tfjs-core/dist/kernel_names.js +var kernel_names = __webpack_require__(6); + +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/broadcast_util.js +/** + * @license + * Copyright 2017 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ +/** + * Returns the dimensions in the input shape that are broadcasted to + * produce the provided output shape. + * + * The returned dimensions are 0-indexed and sorted. An example: + * inShape = [4, 1, 3] + * outShape = [5, 4, 3, 3] + * result = [1]. Dimension 1 (2nd dimension of input) gets broadcasted 1 => 3. + */ +function getBroadcastDims(inShape, outShape) { + const inRank = inShape.length; + const dims = []; + for (let i = 0; i < inRank; i++) { + const dim = inRank - 1 - i; + const a = inShape[dim] || 1; + const b = outShape[outShape.length - 1 - i] || 1; + if (b > 1 && a === 1) { + dims.unshift(dim); + } + } + return dims; +} +/** + * Returns the axes in the output space that should be reduced to produce + * the input space. + */ +function getReductionAxes(inShape, outShape) { + const result = []; + for (let i = 0; i < outShape.length; i++) { + const inDim = inShape[inShape.length - i - 1]; + const outAxis = outShape.length - i - 1; + const outDim = outShape[outAxis]; + if (inDim == null || (inDim === 1 && outDim > 1)) { + result.unshift(outAxis); + } + } + return result; +} +function assertAndGetBroadcastShape(shapeA, shapeB) { + const result = []; + const l = Math.max(shapeA.length, shapeB.length); + for (let i = 0; i < l; i++) { + let a = shapeA[shapeA.length - i - 1]; + if (a == null) { + a = 1; + } + let b = shapeB[shapeB.length - i - 1]; + if (b == null) { + b = 1; + } + if (a === 1) { + result.unshift(b); + } + else if (b === 1) { + result.unshift(a); + } + else if (a !== b) { + const errMsg = `Operands could not be broadcast together with shapes ` + + `${shapeA} and ${shapeB}.`; + throw Error(errMsg); + } + else { + result.unshift(a); + } + } + return result; +} +//# sourceMappingURL=broadcast_util.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Add_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const addGradConfig = { + kernelName: kernel_names["a" /* Add */], + inputsToSave: ['a', 'b'], + gradFunc: (dy, saved) => { + const [a, b] = saved; + const outShape = assertAndGetBroadcastShape(a.shape, b.shape); + const derA = () => { + let res = dy; + const reduceAxes = getReductionAxes(a.shape, outShape); + if (reduceAxes.length > 0) { + res = res.sum(reduceAxes); + } + return res.reshape(a.shape); + }; + const derB = () => { + let res = dy; + const reduceAxes = getReductionAxes(b.shape, outShape); + if (reduceAxes.length > 0) { + res = res.sum(reduceAxes); + } + return res.reshape(b.shape); + }; + return { a: derA, b: derB }; + } +}; +//# sourceMappingURL=Add_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/AddN_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + +const addNGradConfig = { + kernelName: kernel_names["b" /* AddN */], + saveAllInputs: true, + gradFunc: (dy, saved) => { + const ders = {}; + saved.forEach((_, i) => { + ders[i] = () => dy.clone(); + }); + return ders; + } +}; +//# sourceMappingURL=AddN_grad.js.map +// EXTERNAL MODULE: ./node_modules/@tensorflow/tfjs-core/dist/tensor_util.js +var tensor_util = __webpack_require__(11); + +// EXTERNAL MODULE: ./node_modules/@tensorflow/tfjs-core/dist/tensor_util_env.js +var tensor_util_env = __webpack_require__(3); + +// EXTERNAL MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/operation.js +var operation = __webpack_require__(4); + +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/add.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +/** + * Adds two `tf.Tensor`s element-wise, A + B. Supports broadcasting. + * + * + * ```js + * const a = tf.tensor1d([1, 2, 3, 4]); + * const b = tf.tensor1d([10, 20, 30, 40]); + * + * a.add(b).print(); // or tf.add(a, b) + * ``` + * + * ```js + * // Broadcast add a with b. + * const a = tf.scalar(5); + * const b = tf.tensor1d([10, 20, 30, 40]); + * + * a.add(b).print(); // or tf.add(a, b) + * ``` + * @param a The first `tf.Tensor` to add. + * @param b The second `tf.Tensor` to add. Must have the same type as `a`. + */ +/** @doc {heading: 'Operations', subheading: 'Arithmetic'} */ +function add_(a, b) { + let $a = Object(tensor_util_env["a" /* convertToTensor */])(a, 'a', 'add'); + let $b = Object(tensor_util_env["a" /* convertToTensor */])(b, 'b', 'add'); + [$a, $b] = Object(tensor_util["makeTypesMatch"])($a, $b); + const forward = (backend, save) => { + const res = backend.add($a, $b); + save([$a, $b]); + return res; + }; + const inputs = { a: $a, b: $b }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* gradient */, kernel_names["a" /* Add */]); +} +const add = Object(operation["a" /* op */])({ add_ }); +//# sourceMappingURL=add.js.map +// EXTERNAL MODULE: ./node_modules/@tensorflow/tfjs-core/dist/tensor.js + 1 modules +var dist_tensor = __webpack_require__(7); + +// EXTERNAL MODULE: ./node_modules/@tensorflow/tfjs-core/dist/util.js +var util = __webpack_require__(1); + +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/concat_util.js +/** + * @license + * Copyright 2017 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + +function assertParamsConsistent(shapes, axis) { + const rank = shapes[0].length; + shapes.forEach((shape, i) => { + util["assert"](shape.length === rank, () => `Error in concat${rank}D: rank of tensors[${i}] must be the same ` + + `as the rank of the rest (${rank})`); + }); + util["assert"](axis >= 0 && axis < rank, () => `Error in concat${rank}D: axis must be between 0 and ${rank - 1}.`); + const firstShape = shapes[0]; + shapes.forEach((shape, i) => { + for (let r = 0; r < rank; r++) { + util["assert"]((r === axis) || (shape[r] === firstShape[r]), () => `Error in concat${rank}D: Shape of tensors[${i}] (${shape}) ` + + `does not match the shape of the rest (${firstShape}) ` + + `along the non-concatenated axis ${i}.`); + } + }); +} +function computeOutShape(shapes, axis) { + const outputShape = shapes[0].slice(); + for (let i = 1; i < shapes.length; i++) { + outputShape[axis] += shapes[i][axis]; + } + return outputShape; +} +//# sourceMappingURL=concat_util.js.map +// EXTERNAL MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/tensor_ops.js +var tensor_ops = __webpack_require__(8); + +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/concat.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + +/** + * Concatenates a list of `tf.Tensor`s along a given axis. + * + * The tensors ranks and types must match, and their sizes must match in all + * dimensions except `axis`. + * + * Also available are stricter rank-specific methods that assert that + * `tensors` are of the given rank: + * - `tf.concat1d` + * - `tf.concat2d` + * - `tf.concat3d` + * - `tf.concat4d` + * + * Except `tf.concat1d` (which does not have axis param), all methods have + * same signature as this method. + * + * ```js + * const a = tf.tensor1d([1, 2]); + * const b = tf.tensor1d([3, 4]); + * a.concat(b).print(); // or a.concat(b) + * ``` + * + * ```js + * const a = tf.tensor1d([1, 2]); + * const b = tf.tensor1d([3, 4]); + * const c = tf.tensor1d([5, 6]); + * tf.concat([a, b, c]).print(); + * ``` + * + * ```js + * const a = tf.tensor2d([[1, 2], [10, 20]]); + * const b = tf.tensor2d([[3, 4], [30, 40]]); + * const axis = 1; + * tf.concat([a, b], axis).print(); + * ``` + * @param tensors A list of tensors to concatenate. + * @param axis The axis to concate along. Defaults to 0 (the first dim). + */ +/** @doc {heading: 'Tensors', subheading: 'Slicing and Joining'} */ +function concat_(tensors, axis = 0) { + Object(util["assert"])(tensors.length >= 1, () => 'Pass at least one tensor to concat'); + let $tensors = Object(tensor_util_env["b" /* convertToTensorArray */])(tensors, 'tensors', 'concat'); + if ($tensors[0].dtype === 'complex64') { + $tensors.forEach(tensor => { + if (tensor.dtype !== 'complex64') { + throw new Error(`Cannot concatenate complex64 tensors with a tensor + with dtype ${tensor.dtype}. `); + } + }); + } + const $axis = Object(util["parseAxisParam"])(axis, $tensors[0].shape)[0]; + const outShape = computeOutShape($tensors.map(t => t.shape), $axis); + if (Object(util["sizeFromShape"])(outShape) === 0) { + return Object(tensor_ops["f" /* tensor */])([], outShape); + } + // Keep only non-empty tensors (ignore tensors with 0 in their shape). + $tensors = $tensors.filter(t => t.size > 0); + if ($tensors.length === 1) { + return $tensors[0]; + } + const shapes = $tensors.map(t => t.shape); + assertParamsConsistent(shapes, $axis); + const forward = (backend, save) => { + const res = backend.concat($tensors, $axis); + save($tensors); + return res; + }; + const inputs = $tensors; + const attr = { axis }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["l" /* Concat */], attr); +} +const concat = Object(operation["a" /* op */])({ concat_ }); +//# sourceMappingURL=concat.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/array_ops.js +/** + * @license + * Copyright 2018 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + +/** + * Reshapes a `tf.Tensor` to a given shape. + * + * Given an input tensor, returns a new tensor with the same values as the + * input tensor with shape `shape`. + * + * If one component of shape is the special value -1, the size of that + * dimension is computed so that the total size remains constant. In + * particular, a shape of [-1] flattens into 1-D. At most one component of + * shape can be -1. + * + * If shape is 1-D or higher, then the operation returns a tensor with shape + * shape filled with the values of tensor. In this case, the number of + * elements implied by shape must be the same as the number of elements in + * tensor. + * + * ```js + * const x = tf.tensor1d([1, 2, 3, 4]); + * x.reshape([2, 2]).print(); + * ``` + * + * @param x The input tensor to be reshaped. + * @param shape An array of integers defining the output tensor shape. + */ +/** @doc {heading: 'Tensors', subheading: 'Transformations'} */ +function reshape_(x, shape) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'reshape', null); + shape = util["inferFromImplicitShape"](shape, $x.size); + util["assert"]($x.size === util["sizeFromShape"](shape), () => 'new shape and old shape must have the same number of elements.'); + const grad = (dy) => { + return { x: () => dy.reshape($x.shape) }; + }; + const attrs = { shape }; + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.reshape($x, shape), { x: $x }, grad, 'Reshape', attrs); +} +/** + * Removes dimensions of size 1 from the shape of a `tf.Tensor`. + * + * ```js + * const x = tf.tensor([1, 2, 3, 4], [1, 1, 4]); + * x.squeeze().print(); + * ``` + * + * @param x The input tensor to be squeezed. + * @param axis An optional list of numbers. If specified, only + * squeezes the dimensions listed. The dimension index starts at 0. It + * is an error to squeeze a dimension that is not 1. + */ +/** @doc {heading: 'Tensors', subheading: 'Transformations'} */ +function squeeze_(x, axis) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'squeeze'); + return reshape($x, util["squeezeShape"]($x.shape, axis).newShape); +} +/** + * Casts a `tf.Tensor` to a new dtype. + * + * ```js + * const x = tf.tensor1d([1.5, 2.5, 3]); + * tf.cast(x, 'int32').print(); + * ``` + * @param x The input tensor to be casted. + * @param dtype The dtype to cast the input tensor to. + */ +/** @doc {heading: 'Tensors', subheading: 'Transformations'} */ +function cast_(x, dtype) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'cast'); + // Sanity checks. + if (!util["isValidDtype"](dtype)) { + throw new Error(`Failed to cast to unknown dtype ${dtype}`); + } + if (dtype === 'string' && $x.dtype !== 'string' || + dtype !== 'string' && $x.dtype === 'string') { + throw new Error('Only strings can be casted to strings'); + } + const grad = (dy) => { + return { x: () => dy.clone() }; + }; + const attrs = { dtype }; + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.cast($x, dtype), { x: $x }, grad, 'Cast', attrs); +} +/** + * Stacks a list of rank-`R` `tf.Tensor`s into one rank-`(R+1)` `tf.Tensor`. + * + * ```js + * const a = tf.tensor1d([1, 2]); + * const b = tf.tensor1d([3, 4]); + * const c = tf.tensor1d([5, 6]); + * tf.stack([a, b, c]).print(); + * ``` + * + * @param tensors A list of tensor objects with the same shape and dtype. + * @param axis The axis to stack along. Defaults to 0 (the first dim). + */ +/** @doc {heading: 'Tensors', subheading: 'Slicing and Joining'} */ +function stack_(tensors, axis = 0) { + const $tensors = Object(tensor_util_env["b" /* convertToTensorArray */])(tensors, 'tensors', 'stack'); + util["assert"]($tensors.length >= 1, () => 'Pass at least one tensor to tf.stack'); + if ($tensors.length === 1) { + return $tensors[0].expandDims(axis); + } + const rank = $tensors[0].rank; + const shape = $tensors[0].shape; + const dtype = $tensors[0].dtype; + util["assert"](axis <= rank, () => 'Axis must be <= rank of the tensor'); + $tensors.forEach(t => { + util["assertShapesMatch"](shape, t.shape, 'All tensors passed to stack must have matching shapes'); + }); + $tensors.forEach(t => { + util["assert"](dtype === t.dtype, () => 'All tensors passed to stack must have matching dtypes'); + }); + const expandedTensors = $tensors.map(t => t.expandDims(axis)); + return concat(expandedTensors, axis); +} +/** + * Unstacks a `tf.Tensor` of rank-`R` into a list of rank-`(R-1)` `tf.Tensor`s. + * + * ```js + * const a = tf.tensor2d([1, 2, 3, 4], [2, 2]); + * + * tf.unstack(a).forEach(tensor => tensor.print()); + * ``` + * + * @param x A tensor object. + * @param axis The axis to unstack along. Defaults to 0 (the first dim). + */ +/** @doc {heading: 'Tensors', subheading: 'Slicing and Joining'} */ +function unstack_(x, axis = 0) { + axis = axis || 0; + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'unstack'); + util["assert"](axis >= -$x.shape.length && axis < $x.shape.length, () => `Axis = ${axis} is not in [-${$x.shape.length}, ${$x.shape.length})`); + if (axis < 0) { + axis += $x.shape.length; + } + const grad = (dy) => { + return { x: () => stack(dy, axis) }; + }; + const attrs = { axis }; + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.unstack($x, axis), { x: $x }, grad, 'Unpack', attrs); +} +/** + * Returns a `tf.Tensor` that has expanded rank, by inserting a dimension + * into the tensor's shape. + * + * ```js + * const x = tf.tensor1d([1, 2, 3, 4]); + * const axis = 1; + * x.expandDims(axis).print(); + * ``` + * + * @param x The input tensor whose dimensions to be expanded. + * @param axis The dimension index at which to insert shape of `1`. Defaults + * to 0 (the first dimension). + */ +/** @doc {heading: 'Tensors', subheading: 'Transformations'} */ +function expandDims_(x, axis = 0) { + const parseAs = null; + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'expandDims', parseAs); + util["assert"](axis <= $x.rank, () => 'Axis must be <= rank of the tensor'); + const newShape = $x.shape.slice(); + if (axis < 0) { + // Negative value is counted from the tail of rank. + util["assert"](-($x.rank + 1) <= axis, () => `Axis must be in the interval [${-($x.rank + 1)}, ${$x.rank}]`); + axis = $x.rank + axis + 1; + } + newShape.splice(axis, 0, 1); + return reshape($x, newShape); +} +/** + * Computes the difference between two lists of numbers. + * + * Given a Tensor `x` and a Tensor `y`, this operation returns a Tensor `out` + * that represents all values that are in `x` but not in `y`. The returned + * Tensor `out` is sorted in the same order that the numbers appear in `x` + * (duplicates are preserved). This operation also returns a Tensor indices that + * represents the position of each out element in `x`. In other words: + * + * `out[i] = x[idx[i]] for i in [0, 1, ..., out.length - 1]` + * + * ```js + * const x = [1, 2, 3, 4, 5, 6]; + * const y = [1, 3, 5]; + * + * const [out, indices] = await tf.setdiff1dAsync(x, y); + * out.print(); // [2, 4, 6] + * indices.print(); // [1, 3, 5] + * ``` + * + * @param x 1-D Tensor. Values to keep. + * @param y 1-D Tensor. Must have the same type as x. Values to exclude in the + * output. + * @returns Promise of Tensor tuple [out, indices]. + * out: Tensor with the same type as x. + * indices: A Tensor of type int32. + */ +/** @doc {heading: 'Tensors', subheading: 'Transformations'} */ +async function setdiff1dAsync_(x, y) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'setdiff1d'); + const $y = Object(tensor_util_env["a" /* convertToTensor */])(y, 'y', 'setdiff1d'); + util["assert"]($x.dtype === $y.dtype, () => `x and y should have the same dtype, but got x (${$x.dtype}) and y (${$y.dtype}).`); + util["assert"]($x.rank === 1, () => `x should be 1D tensor, but got x (${$x.shape}).`); + util["assert"]($y.rank === 1, () => `y should be 1D tensor, but got y (${$y.shape}).`); + const xVals = await $x.data(); + const yVals = await $y.data(); + const ySet = new Set(yVals); + let outputSize = 0; + for (let i = 0; i < xVals.length; i++) { + if (!ySet.has(xVals[i])) { + outputSize++; + } + } + const buffer = new dist_tensor["b" /* TensorBuffer */]([outputSize], $x.dtype); + const indices = new dist_tensor["b" /* TensorBuffer */]([outputSize], 'int32'); + for (let i = 0, p = 0; i < xVals.length; i++) { + if (!ySet.has(xVals[i])) { + buffer.values[p] = xVals[i]; + indices.values[p] = i; + p++; + } + } + return [buffer.toTensor(), indices.toTensor()]; +} +/** + * Creates an empty `tf.TensorBuffer` with the specified `shape` and `dtype`. + * + * The values are stored in CPU as `TypedArray`. Fill the buffer using + * `buffer.set()`, or by modifying directly `buffer.values`. + * + * When done, call `buffer.toTensor()` to get an immutable `tf.Tensor` with + * those values. + * + * ```js + * // Create a buffer and set values at particular indices. + * const buffer = tf.buffer([2, 2]); + * buffer.set(3, 0, 0); + * buffer.set(5, 1, 0); + * + * // Convert the buffer back to a tensor. + * buffer.toTensor().print(); + * ``` + * + * @param shape An array of integers defining the output tensor shape. + * @param dtype The dtype of the buffer. Defaults to 'float32'. + * @param values The values of the buffer as `TypedArray`. Defaults to + * zeros. + */ +/** @doc {heading: 'Tensors', subheading: 'Creation'} */ +function array_ops_buffer(shape, dtype = 'float32', values) { + dtype = dtype || 'float32'; + util["assertNonNegativeIntegerDimensions"](shape); + return new dist_tensor["b" /* TensorBuffer */](shape, dtype, values); +} +/** + * Prints information about the `tf.Tensor` including its data. + * + * ```js + * const verbose = true; + * tf.tensor2d([1, 2, 3, 4], [2, 2]).print(verbose); + * ``` + * @param x The tensor to be printed. + * @param verbose Whether to print verbose information about the ` Tensor`, + * including dtype and size. + */ +/** @doc {heading: 'Tensors', subheading: 'Creation'} */ +function print(x, verbose = false) { + console.log(x.toString(verbose)); +} + +const cast = Object(operation["a" /* op */])({ cast_ }); +const expandDims = Object(operation["a" /* op */])({ expandDims_ }); +const reshape = Object(operation["a" /* op */])({ reshape_ }); +const squeeze = Object(operation["a" /* op */])({ squeeze_ }); +const stack = Object(operation["a" /* op */])({ stack_ }); +const unstack = Object(operation["a" /* op */])({ unstack_ }); +const setdiff1dAsync = setdiff1dAsync_; +//# sourceMappingURL=array_ops.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/floorDiv.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +/** + * Divides two `tf.Tensor`s element-wise, A / B. Supports broadcasting. + * The result is rounded with floor function. + * + * + * ```js + * const a = tf.tensor1d([1, 4, 9, 16]); + * const b = tf.tensor1d([1, 2, 3, 4]); + * + * a.floorDiv(b).print(); // or tf.div(a, b) + * ``` + * + * ```js + * // Broadcast div a with b. + * const a = tf.tensor1d([2, 4, 6, 8]); + * const b = tf.scalar(2); + * + * a.floorDiv(b).print(); // or tf.floorDiv(a, b) + * ``` + * + * @param a The first tensor as the numerator. + * @param b The second tensor as the denominator. Must have the same dtype as + * `a`. + */ +/** @doc {heading: 'Operations', subheading: 'Arithmetic'} */ +function floorDiv_(a, b) { + let $a = Object(tensor_util_env["a" /* convertToTensor */])(a, 'a', 'floorDiv'); + let $b = Object(tensor_util_env["a" /* convertToTensor */])(b, 'b', 'floorDiv'); + [$a, $b] = Object(tensor_util["makeTypesMatch"])($a, $b); + const forward = (backend, save) => { + const res = backend.floorDiv($a, $b); + save([$a, $b]); + return res; + }; + const inputs = { a: $a, b: $b }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* gradient */, kernel_names["D" /* FloorDiv */]); +} +const floorDiv = Object(operation["a" /* op */])({ floorDiv_ }); +//# sourceMappingURL=floorDiv.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/div.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + +/** + * Divides two `tf.Tensor`s element-wise, A / B. Supports broadcasting. + * + * ```js + * const a = tf.tensor1d([1, 4, 9, 16]); + * const b = tf.tensor1d([1, 2, 3, 4]); + * + * a.div(b).print(); // or tf.div(a, b) + * ``` + * + * ```js + * // Broadcast div a with b. + * const a = tf.tensor1d([2, 4, 6, 8]); + * const b = tf.scalar(2); + * + * a.div(b).print(); // or tf.div(a, b) + * ``` + * + * @param a The first tensor as the numerator. + * @param b The second tensor as the denominator. Must have the same dtype as + * `a`. + */ +/** @doc {heading: 'Operations', subheading: 'Arithmetic'} */ +function div_(a, b) { + let $a = Object(tensor_util_env["a" /* convertToTensor */])(a, 'a', 'div'); + let $b = Object(tensor_util_env["a" /* convertToTensor */])(b, 'b', 'div'); + [$a, $b] = Object(tensor_util["makeTypesMatch"])($a, $b); + if ($a.dtype === 'int32' && $b.dtype === 'int32') { + return floorDiv($a, $b); + } + const forward = (backend, save) => { + const res = backend.realDivide($a, $b); + save([$a, $b]); + return res; + }; + const inputs = { a: $a, b: $b }; + const attrs = {}; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* gradient */, kernel_names["y" /* Div */], attrs); +} +const div = Object(operation["a" /* op */])({ div_ }); +//# sourceMappingURL=div.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/mul.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +/** + * Multiplies two `tf.Tensor`s element-wise, A * B. Supports broadcasting. + * + * We also expose `tf.mulStrict` which has the same signature as this op and + * asserts that `a` and `b` are the same shape (does not broadcast). + * + * ```js + * const a = tf.tensor1d([1, 2, 3, 4]); + * const b = tf.tensor1d([2, 3, 4, 5]); + * + * a.mul(b).print(); // or tf.mul(a, b) + * ``` + * + * ```js + * // Broadcast mul a with b. + * const a = tf.tensor1d([1, 2, 3, 4]); + * const b = tf.scalar(5); + * + * a.mul(b).print(); // or tf.mul(a, b) + * ``` + * @param a The first tensor to multiply. + * @param b The second tensor to multiply. Must have the same dtype as `a`. + */ +/** @doc {heading: 'Operations', subheading: 'Arithmetic'} */ +function mul_(a, b) { + let $a = Object(tensor_util_env["a" /* convertToTensor */])(a, 'a', 'mul'); + let $b = Object(tensor_util_env["a" /* convertToTensor */])(b, 'b', 'mul'); + [$a, $b] = Object(tensor_util["makeTypesMatch"])($a, $b); + const forward = (backend, save) => { + const res = backend.multiply($a, $b); + save([$a, $b]); + return res; + }; + const inputs = { a: $a, b: $b }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* gradient */, kernel_names["Y" /* Multiply */]); +} +const mul = Object(operation["a" /* op */])({ mul_ }); +//# sourceMappingURL=mul.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients.js +/** + * @license + * Copyright 2018 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + +/** + * Provided `f(x)`, returns another function `g(x, dy?)`, which gives the + * gradient of `f(x)` with respect to `x`. + * + * If `dy` is provided, the gradient of `f(x).mul(dy).sum()` with respect to + * `x` is computed instead. `f(x)` must take a single tensor `x` and return a + * single tensor `y`. If `f()` takes multiple inputs, use `tf.grads` instead. + * + * ```js + * // f(x) = x ^ 2 + * const f = x => x.square(); + * // f'(x) = 2x + * const g = tf.grad(f); + * + * const x = tf.tensor1d([2, 3]); + * g(x).print(); + * ``` + * + * ```js + * // f(x) = x ^ 3 + * const f = x => x.pow(tf.scalar(3, 'int32')); + * // f'(x) = 3x ^ 2 + * const g = tf.grad(f); + * // f''(x) = 6x + * const gg = tf.grad(g); + * + * const x = tf.tensor1d([2, 3]); + * gg(x).print(); + * ``` + * + * @param f The function f(x), to compute gradient for. + */ +/** @doc {heading: 'Training', subheading: 'Gradients'} */ +function gradients_grad(f) { + util["assert"](util["isFunction"](f), () => 'The f passed in grad(f) must be a function'); + return (x, dy) => { + // x can be of any dtype, thus null as the last argument. + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'tf.grad', null); + const $dy = (dy != null) ? Object(tensor_util_env["a" /* convertToTensor */])(dy, 'dy', 'tf.grad') : null; + return engine["a" /* ENGINE */].tidy(() => { + const { value, grads } = engine["a" /* ENGINE */].gradients(() => f($x), [$x], $dy); + if ($dy != null) { + util["assertShapesMatch"](value.shape, $dy.shape, 'The shape of dy passed in grad(f)(x, dy) must match the shape ' + + 'returned by f(x)'); + } + checkGrads(grads); + return grads[0]; + }); + }; +} +/** + * Provided `f(x1, x2,...)`, returns another function `g([x1, x2,...], dy?)`, + * which gives an array of gradients of `f()` with respect to each input + * [`x1`,`x2`,...]. + * + * If `dy` is passed when calling `g()`, the gradient of + * `f(x1,...).mul(dy).sum()` with respect to each input is computed instead. + * The provided `f` must take one or more tensors and return a single tensor + * `y`. If `f()` takes a single input, we recommend using `tf.grad` instead. + * + * ```js + * // f(a, b) = a * b + * const f = (a, b) => a.mul(b); + * // df / da = b, df / db = a + * const g = tf.grads(f); + * + * const a = tf.tensor1d([2, 3]); + * const b = tf.tensor1d([-2, -3]); + * const [da, db] = g([a, b]); + * console.log('da'); + * da.print(); + * console.log('db'); + * db.print(); + * ``` + * + * @param f The function `f(x1, x2,...)` to compute gradients for. + */ +/** @doc {heading: 'Training', subheading: 'Gradients'} */ +function gradients_grads(f) { + util["assert"](util["isFunction"](f), () => 'The f passed in grads(f) must be a function'); + return (args, dy) => { + util["assert"](Array.isArray(args), () => 'The args passed in grads(f)(args) must be an array ' + + 'of `Tensor`s or `TensorLike`s'); + // args can be of any dtype, thus null as the last argument. + const $args = Object(tensor_util_env["b" /* convertToTensorArray */])(args, 'args', 'tf.grads', null); + const $dy = (dy != null) ? Object(tensor_util_env["a" /* convertToTensor */])(dy, 'dy', 'tf.grads') : null; + return engine["a" /* ENGINE */].tidy(() => { + const { value, grads } = engine["a" /* ENGINE */].gradients(() => f(...$args), $args, $dy); + if ($dy != null) { + util["assertShapesMatch"](value.shape, $dy.shape, 'The shape of dy passed in grads(f)([x1,...], dy) must ' + + 'match the shape returned by f([x1,...])'); + } + checkGrads(grads); + return grads; + }); + }; +} +/** + * Like `tf.grad`, but also returns the value of `f()`. Useful when `f()` + * returns a metric you want to show. + * + * The result is a rich object with the following properties: + * - grad: The gradient of `f(x)` w.r.t `x` (result of `tf.grad`). + * - value: The value returned by `f(x)`. + * + * ```js + * // f(x) = x ^ 2 + * const f = x => x.square(); + * // f'(x) = 2x + * const g = tf.valueAndGrad(f); + * + * const x = tf.tensor1d([2, 3]); + * const {value, grad} = g(x); + * + * console.log('value'); + * value.print(); + * console.log('grad'); + * grad.print(); + * ``` + */ +/** @doc {heading: 'Training', subheading: 'Gradients'} */ +function valueAndGrad(f) { + util["assert"](util["isFunction"](f), () => 'The f passed in valueAndGrad(f) must be a function'); + return (x, dy) => { + util["assert"](x instanceof dist_tensor["a" /* Tensor */], () => 'The x passed in valueAndGrad(f)(x) must be a tensor'); + util["assert"](dy == null || dy instanceof dist_tensor["a" /* Tensor */], () => 'The dy passed in valueAndGrad(f)(x, dy) must be a tensor'); + const { grads, value } = engine["a" /* ENGINE */].gradients(() => f(x), [x], dy); + checkGrads(grads); + return { grad: grads[0], value }; + }; +} +/** + * Like `tf.grads`, but returns also the value of `f()`. Useful when `f()` + * returns a metric you want to show. + * + * The result is a rich object with the following properties: + * - grads: The gradients of `f()` w.r.t each input (result of `tf.grads`). + * - value: The value returned by `f(x)`. + * + * ```js + * // f(a, b) = a * b + * const f = (a, b) => a.mul(b); + * // df/da = b, df/db = a + * const g = tf.valueAndGrads(f); + * + * const a = tf.tensor1d([2, 3]); + * const b = tf.tensor1d([-2, -3]); + * const {value, grads} = g([a, b]); + * + * const [da, db] = grads; + * + * console.log('value'); + * value.print(); + * + * console.log('da'); + * da.print(); + * console.log('db'); + * db.print(); + * ``` + */ +/** @doc {heading: 'Training', subheading: 'Gradients'} */ +function valueAndGrads(f) { + util["assert"](util["isFunction"](f), () => 'The f passed in valueAndGrads(f) must be a function'); + return (args, dy) => { + util["assert"](Array.isArray(args) && args.every(arg => arg instanceof dist_tensor["a" /* Tensor */]), () => 'The args passed in valueAndGrads(f)(args) must be array of ' + + 'tensors'); + util["assert"](dy == null || dy instanceof dist_tensor["a" /* Tensor */], () => 'The dy passed in valueAndGrads(f)(args, dy) must be a tensor'); + const res = engine["a" /* ENGINE */].gradients(() => f(...args), args, dy); + if (dy != null) { + util["assertShapesMatch"](res.value.shape, dy.shape, 'The shape of dy passed in valueAndGrads(f)([x1,...], dy) must ' + + 'match the shape returned by f([x1,...])'); + } + checkGrads(res.grads); + return res; + }; +} +/** + * Computes and returns the gradient of f(x) with respect to the list of + * trainable variables provided by `varList`. If no list is provided, it + * defaults to all trainable variables. + * + * ```js + * const a = tf.variable(tf.tensor1d([3, 4])); + * const b = tf.variable(tf.tensor1d([5, 6])); + * const x = tf.tensor1d([1, 2]); + * + * // f(a, b) = a * x ^ 2 + b * x + * const f = () => a.mul(x.square()).add(b.mul(x)).sum(); + * // df/da = x ^ 2, df/db = x + * const {value, grads} = tf.variableGrads(f); + * + * Object.keys(grads).forEach(varName => grads[varName].print()); + * ``` + * + * @param f The function to execute. f() should return a scalar. + * @param varList The list of variables to compute the gradients with respect + * to. Defaults to all trainable variables. + * @returns An object with the following keys and values: + * - `value`: The value of the function `f`. + * - `grads`: A map from the names of the variables to the gradients. + * If the `varList` argument is provided explicitly and contains a subset of + * non-trainable variables, this map in the return value will contain keys + * that map the names of the non-trainable variables to `null`. + */ +/** @doc {heading: 'Training', subheading: 'Gradients'} */ +function variableGrads(f, varList) { + util["assert"](util["isFunction"](f), () => 'The f passed in variableGrads(f) must be a function'); + util["assert"](varList == null || + Array.isArray(varList) && varList.every(v => v instanceof dist_tensor["c" /* Variable */]), () => 'The varList passed in variableGrads(f, varList) must be an array ' + + 'of variables'); + const specifiedVarList = varList != null; + if (!specifiedVarList) { + // Get all of the trainable variables. + varList = []; + for (const varName in engine["a" /* ENGINE */].registeredVariables) { + varList.push(engine["a" /* ENGINE */].registeredVariables[varName]); + } + } + const specifiedNonTrainable = specifiedVarList ? varList.filter(variable => !variable.trainable) : null; + // Prune non-trainable variables. + const originalVarCount = varList.length; + varList = varList.filter(variable => variable.trainable); + util["assert"](varList.length > 0, () => `variableGrads() expects at least one of the input variables to ` + + `be trainable, but none of the ${originalVarCount} variables is ` + + `trainable.`); + const allowNoGradients = true; + const { value, grads } = engine["a" /* ENGINE */].gradients(f, varList, null, allowNoGradients); + util["assert"](grads.some(g => g != null), () => 'Cannot find a connection between any variable and the result of ' + + 'the loss function y=f(x). Please make sure the operations that ' + + 'use variables are inside the function f passed to minimize().'); + util["assert"](value.rank === 0, () => `The f passed in variableGrads(f) must return a scalar, but it ` + + `returned a rank-${value.rank} tensor`); + const namedGrads = {}; + varList.forEach((v, i) => { + if (grads[i] != null) { + namedGrads[v.name] = grads[i]; + } + }); + if (specifiedNonTrainable != null) { + // If varList is explicitly provided and contains non-trainable values, + // add them to the returned gradients with `null` values. + specifiedNonTrainable.forEach(v => namedGrads[v.name] = null); + } + return { value, grads: namedGrads }; +} +/** + * Overrides the gradient computation of a function `f`. + * + * Takes a function + * `f(...inputs, save) => {value: Tensor, gradFunc: (dy, saved) => Tensor[]}` + * and returns another function `g(...inputs)` which takes the same inputs as + * `f`. When called, `g` returns `f().value`. In backward mode, custom gradients + * with respect to each input of `f` are computed using `f().gradFunc`. + * + * The `save` function passsed to `f` should be used for saving tensors needed + * in the gradient. And the `saved` passed to the `gradFunc` is a + * `NamedTensorMap`, which contains those saved tensor. + * + * ```js + * const customOp = tf.customGrad((x, save) => { + * // Save x to make sure it's available later for the gradient. + * save([x]); + * // Override gradient of our custom x ^ 2 op to be dy * abs(x); + * return { + * value: x.square(), + * // Note `saved.x` which points to the `x` we saved earlier. + * gradFunc: (dy, saved) => [dy.mul(saved[0].abs())] + * }; + * }); + * + * const x = tf.tensor1d([-1, -2, 3]); + * const dx = tf.grad(x => customOp(x)); + * + * console.log(`f(x):`); + * customOp(x).print(); + * console.log(`f'(x):`); + * dx(x).print(); + * ``` + * + * @param f The function to evaluate in forward mode, which should return + * `{value: Tensor, gradFunc: (dy, saved) => Tensor[]}`, where `gradFunc` + * returns the custom gradients of `f` with respect to its inputs. + */ +/** @doc {heading: 'Training', subheading: 'Gradients'} */ +function customGrad(f) { + return engine["a" /* ENGINE */].customGrad(f); +} +function checkGrads(grads) { + const numNullGradients = grads.filter(g => g == null).length; + if (numNullGradients > 0) { + throw new Error(`Cannot compute gradient of y=f(x) with respect to x. Make sure that + the f you passed encloses all operations that lead from x to y.`); + } +} + +//# sourceMappingURL=gradients.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/axis_util.js +/** + * @license + * Copyright 2017 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + +/** + * Returns true if the axis specifies the inner most dimensions of the + * array. + */ +function axesAreInnerMostDims(axes, rank) { + for (let i = 0; i < axes.length; ++i) { + if (axes[axes.length - i - 1] !== rank - 1 - i) { + return false; + } + } + return true; +} +function combineLocations(outputLoc, reduceLoc, axes) { + const rank = outputLoc.length + reduceLoc.length; + const loc = []; + let outIdx = 0; + let reduceIdx = 0; + for (let dim = 0; dim < rank; dim++) { + if (axes.indexOf(dim) === -1) { + loc.push(outputLoc[outIdx++]); + } + else { + loc.push(reduceLoc[reduceIdx++]); + } + } + return loc; +} +function computeOutAndReduceShapes(aShape, axes) { + const outShape = []; + const rank = aShape.length; + for (let dim = 0; dim < rank; dim++) { + if (axes.indexOf(dim) === -1) { + outShape.push(aShape[dim]); + } + } + const reduceShape = axes.map(dim => aShape[dim]); + return [outShape, reduceShape]; +} +function expandShapeToKeepDim(shape, axes) { + const reduceSubShape = axes.map(x => 1); + return combineLocations(shape, reduceSubShape, axes); +} +function assertAxesAreInnerMostDims(msg, axes, rank) { + util["assert"](axesAreInnerMostDims(axes, rank), () => `${msg} supports only inner-most axes for now. ` + + `Got axes ${axes} and rank-${rank} input.`); +} +/** + * Returns the axes permutation to be used with `tf.transpose`, if such + * permutation is necessary. Otherwise it returns null. This method is used by + * operations that operate only on inner-most axes. + */ +function getAxesPermutation(axes, rank) { + if (axesAreInnerMostDims(axes, rank)) { + return null; + } + const result = []; + for (let i = 0; i < rank; ++i) { + if (axes.indexOf(i) === -1) { + result.push(i); + } + } + axes.forEach(axis => result.push(axis)); + return result; +} +/** Returns the axes permutation that undoes the original permutation. */ +function getUndoAxesPermutation(axes) { + return axes.map((axis, i) => [i, axis]) + .sort((a, b) => a[1] - b[1]) + .map(x => x[0]); +} +function getInnerMostAxes(numAxes, rank) { + const res = []; + for (let i = rank - numAxes; i < rank; ++i) { + res.push(i); + } + return res; +} +//# sourceMappingURL=axis_util.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/reduction_ops_util.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + +/** + * Gradient helper function for the min and max operations. + */ +function gradForMinAndMax(dy, y, xOrig, origAxes, permutedAxes) { + if (y.rank < xOrig.rank) { + y = y.reshape(expandShapeToKeepDim(y.shape, origAxes)); + } + if (dy.rank < xOrig.rank) { + dy = dy.reshape(expandShapeToKeepDim(dy.shape, origAxes)); + } + return { + x: () => { + const dx = dy.mul(xOrig.equal(y).cast(dy.dtype)); + return permutedAxes == null ? dx : dx.transpose(permutedAxes); + } + }; +} +//# sourceMappingURL=reduction_ops_util.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/reduction_ops.js +/** + * @license + * Copyright 2018 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + + +/** + * Computes the log(sum(exp(elements across the reduction dimensions)). + * + * Reduces the input along the dimensions given in `axis`. Unless `keepDims` + * is true, the rank of the array is reduced by 1 for each entry in `axis`. + * If `keepDims` is true, the reduced dimensions are retained with length 1. + * If `axis` has no entries, all dimensions are reduced, and an array with a + * single element is returned. + * + * ```js + * const x = tf.tensor1d([1, 2, 3]); + * + * x.logSumExp().print(); // or tf.logSumExp(x) + * ``` + * + * ```js + * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]); + * + * const axis = 1; + * x.logSumExp(axis).print(); // or tf.logSumExp(a, axis) + * ``` + * @param x The input tensor. + * @param axis The dimension(s) to reduce. If null (the default), + * reduces all dimensions. + * @param keepDims If true, retains reduced dimensions with length + * of 1. Defaults to false. + */ +/** @doc {heading: 'Operations', subheading: 'Reduction'} */ +function logSumExp_(x, axis = null, keepDims = false) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'logSumExp'); + const axes = util["parseAxisParam"](axis, $x.shape); + const xMax = $x.max(axes, true /* keepDims */); + const a = $x.sub(xMax); + const b = a.exp(); + const c = b.sum(axes); + const d = c.log(); + const res = xMax.reshape(d.shape).add(d); + if (keepDims) { + const newShape = expandShapeToKeepDim(res.shape, axes); + return res.reshape(newShape); + } + return res; +} +/** + * Computes the sum of elements across dimensions of a `tf.Tensor`. + * + * Reduces the input along the dimensions given in `axes`. Unless `keepDims` + * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in + * `axes`. If `keepDims` is true, the reduced dimensions are retained with + * length 1. If axes has no entries, all dimensions are reduced, and a + * `tf.Tensor` with a single element is returned. + * + * ```js + * const x = tf.tensor1d([1, 2, 3]); + * + * x.sum().print(); // or tf.sum(x) + * ``` + * + * ```js + * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]); + * + * const axis = 1; + * x.sum(axis).print(); // or tf.sum(x, axis) + * ``` + * + * @param x The input tensor to compute the sum over. If the dtype is `bool` + * it will be converted to `int32` and the output dtype will be `int32`. + * @param axis The dimension(s) to reduce. By default it reduces + * all dimensions. + * @param keepDims If true, retains reduced dimensions with size 1. + */ +/** @doc {heading: 'Operations', subheading: 'Reduction'} */ +function sum_(x, axis = null, keepDims = false) { + let $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'sum'); + if ($x.dtype === 'bool') { + $x = $x.toInt(); + } + const axes = util["parseAxisParam"](axis, $x.shape); + // Use a custom gradient to bypass 2 gradient backprops since sum is used + // extremely often. + const customOp = customGrad((x) => { + const permutation = getAxesPermutation(axes, x.rank); + let reductionAxes = axes; + let permutedX = x; + if (permutation != null) { + permutedX = x.transpose(permutation); + reductionAxes = getInnerMostAxes(reductionAxes.length, x.rank); + } + const gradFunc = (dy) => { + const expandedDyShape = x.shape.slice(); + axes.forEach(axis => { + expandedDyShape[axis] = 1; + }); + const expandedDy = dy.reshape(expandedDyShape); + const derX = expandedDy.mul(Object(tensor_ops["b" /* ones */])(x.shape, 'float32')); + return derX; + }; + const gradInputs = (dy) => { + return { x: () => gradFunc(dy) }; + }; + const attrs = { axes: reductionAxes }; + let value = engine["a" /* ENGINE */].runKernelFunc(backend => backend.sum(permutedX, reductionAxes), { x: permutedX }, gradInputs, 'Sum', attrs); + if (keepDims) { + const newShape = expandShapeToKeepDim(value.shape, axes); + value = value.reshape(newShape); + } + return { value, gradFunc }; + }); + return customOp($x); +} +/** + * Computes the product of elements across dimensions of a `tf.Tensor`. + * + * Reduces the input along the dimensions given in `axes`. Unless `keepDims` + * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in + * `axes`. If `keepDims` is true, the reduced dimensions are retained with + * length 1. If `axes` has no entries, all dimensions are reduced, and a + * `tf.Tensor` with a single element is returned. + * + * ```js + * const x = tf.tensor1d([1, 2, 3]); + * + * x.prod().print(); // or tf.prod(x) + * ``` + * + * ```js + * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]); + * + * const axis = 1; + * x.prod(axis).print(); // or tf.prod(x, axis) + * ``` + * + * @param x The input tensor to compute the product over. If the dtype is `bool` + * it will be converted to `int32` and the output dtype will be `int32`. + * @param axis The dimension(s) to reduce. By default it reduces + * all dimensions. + * @param keepDims If true, retains reduced dimensions with size 1. + */ +/** @doc {heading: 'Operations', subheading: 'Reduction'} */ +function prod_(x, axis = null, keepDims = false) { + let $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'prod'); + if ($x.dtype === 'bool') { + $x = $x.toInt(); + } + const axes = util["parseAxisParam"](axis, $x.shape); + const permutation = getAxesPermutation(axes, $x.rank); + let reductionAxes = axes; + let permutedX = $x; + if (permutation != null) { + permutedX = $x.transpose(permutation); + reductionAxes = getInnerMostAxes(reductionAxes.length, $x.rank); + } + let value = engine["a" /* ENGINE */].runKernelFunc(backend => backend.prod(permutedX, reductionAxes), { permutedX }); + if (keepDims) { + const newShape = expandShapeToKeepDim(value.shape, axes); + value = value.reshape(newShape); + } + return value; +} +/** + * Computes the mean of elements across dimensions of a `tf.Tensor`. + * + * Reduces `x` along the dimensions given in `axis`. Unless `keepDims` is + * true, the rank of the `tf.Tensor` is reduced by 1 for each entry in `axis`. + * If `keepDims` is true, the reduced dimensions are retained with length 1. + * If `axis` has no entries, all dimensions are reduced, and a `tf.Tensor` with + * a single element is returned. + * + * ```js + * const x = tf.tensor1d([1, 2, 3]); + * + * x.mean().print(); // or tf.mean(a) + * ``` + * + * ```js + * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]); + * + * const axis = 1; + * x.mean(axis).print(); // or tf.mean(x, axis) + * ``` + * + * @param x The input tensor. + * @param axis The dimension(s) to reduce. By default it reduces + * all dimensions. + * @param keepDims If true, retains reduced dimensions with size 1. + */ +/** @doc {heading: 'Operations', subheading: 'Reduction'} */ +function mean_(x, axis = null, keepDims = false) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'mean'); + const axes = util["parseAxisParam"](axis, $x.shape); + const shapes = computeOutAndReduceShapes($x.shape, axes); + const reduceShape = shapes[1]; + const reduceSize = util["sizeFromShape"](reduceShape); + // Use a custom gradient to bypass 2 gradient backprops since mean is used + // extremely often. + const customOp = customGrad((x) => { + const reduceSizeScalar = Object(tensor_ops["e" /* scalar */])(reduceSize); + // Cast if needed. + const xReduce = reduceSizeScalar.dtype === x.dtype ? x : x.cast(reduceSizeScalar.dtype); + const res = xReduce.div(reduceSizeScalar); + const value = res.sum(axis, keepDims); + const gradFunc = (dy) => { + const expandedDyShape = x.shape.slice(); + axes.forEach(axis => { + expandedDyShape[axis] = 1; + }); + const expandedDy = dy.reshape(expandedDyShape); + const derX = expandedDy.mul(Object(tensor_ops["b" /* ones */])(x.shape, 'float32')).div(reduceSize); + return derX; + }; + return { value, gradFunc }; + }); + return customOp($x); +} +/** + * Computes the minimum value from the input. + * + * Reduces the input along the dimensions given in `axes`. Unless `keepDims` + * is true, the rank of the array is reduced by 1 for each entry in `axes`. + * If `keepDims` is true, the reduced dimensions are retained with length 1. + * If `axes` has no entries, all dimensions are reduced, and an array with a + * single element is returned. + * + * ```js + * const x = tf.tensor1d([1, 2, 3]); + * + * x.min().print(); // or tf.min(x) + * ``` + * + * ```js + * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]); + * + * const axis = 1; + * x.min(axis).print(); // or tf.min(x, axis) + * ``` + * + * @param x The input Tensor. + * @param axis The dimension(s) to reduce. By default it reduces + * all dimensions. + * @param keepDims If true, retains reduced dimensions with size 1. + */ +/** @doc {heading: 'Operations', subheading: 'Reduction'} */ +function min_(x, axis = null, keepDims = false) { + let $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'min'); + const xOrig = $x; + const origAxes = util["parseAxisParam"](axis, $x.shape); + let axes = origAxes; + const permutedAxes = getAxesPermutation(axes, $x.rank); + if (permutedAxes != null) { + $x = $x.transpose(permutedAxes); + axes = getInnerMostAxes(axes.length, $x.rank); + } + const grad = (dy, saved) => gradForMinAndMax(dy, saved[1], saved[0], origAxes, permutedAxes); + const inputsToSave = [$x]; + const outputsToSave = [true]; + let res = engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const y = backend.min($x, axes); + save([xOrig, y]); + return y; + }, { x: $x }, grad, 'Min', { axes }, inputsToSave, outputsToSave); + if (keepDims) { + const newShape = expandShapeToKeepDim(res.shape, origAxes); + res = res.reshape(newShape); + } + return res; +} +/** + * Returns the indices of the minimum values along an `axis`. + * + * The result has the same shape as `input` with the dimension along `axis` + * removed. + * + * ```js + * const x = tf.tensor1d([1, 2, 3]); + * + * x.argMin().print(); // or tf.argMin(x) + * ``` + * + * ```js + * const x = tf.tensor2d([1, 2, 4, 3], [2, 2]); + * + * const axis = 1; + * x.argMin(axis).print(); // or tf.argMin(x, axis) + * ``` + * + * @param x The input tensor. + * @param axis The dimension to reduce. Defaults to 0 (outer-most dimension). + * + */ +/** @doc {heading: 'Operations', subheading: 'Reduction'} */ +function argMin_(x, axis = 0) { + let $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'argMin'); + if (axis == null) { + axis = 0; + } + let axes = util["parseAxisParam"](axis, $x.shape); + const permutedAxes = getAxesPermutation(axes, $x.rank); + if (permutedAxes != null) { + $x = $x.transpose(permutedAxes); + axes = getInnerMostAxes(axes.length, $x.rank); + } + const grad = (dy, saved) => { + const [$x] = saved; + return { $x: () => Object(tensor_ops["o" /* zerosLike */])($x) }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.argMin($x, axes[0]); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Returns the indices of the maximum values along an `axis`. + * + * The result has the same shape as `input` with the dimension along `axis` + * removed. + * + * ```js + * const x = tf.tensor1d([1, 2, 3]); + * + * x.argMax().print(); // or tf.argMax(x) + * ``` + * + * ```js + * const x = tf.tensor2d([1, 2, 4, 3], [2, 2]); + * + * const axis = 1; + * x.argMax(axis).print(); // or tf.argMax(x, axis) + * ``` + * + * @param x The input tensor. + * @param axis The dimension to reduce. Defaults to 0 (outer-most dimension). + */ +/** @doc {heading: 'Operations', subheading: 'Reduction'} */ +function argMax_(x, axis = 0) { + let $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'argMax'); + if (axis == null) { + axis = 0; + } + let axes = util["parseAxisParam"](axis, $x.shape); + const permutedAxes = getAxesPermutation(axes, $x.rank); + if (permutedAxes != null) { + $x = $x.transpose(permutedAxes); + axes = getInnerMostAxes(axes.length, $x.rank); + } + const grad = (dy, saved) => { + const [$x] = saved; + return { x: () => Object(tensor_ops["o" /* zerosLike */])($x) }; + }; + const attrs = { axis: axes[0] }; + const inputsToSave = [$x]; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.argMax($x, axes[0]); + save([$x]); + return res; + }, { x: $x }, grad, 'ArgMax', attrs, inputsToSave); +} +/** + * Computes the logical and of elements across dimensions of a `tf.Tensor`. + * + * Reduces the input along the dimensions given in `axes`. Unless `keepDims` + * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in + * `axes`. If `keepDims` is true, the reduced dimensions are retained with + * length 1. If `axes` has no entries, all dimensions are reduced, and an + * `tf.Tensor` with a single element is returned. + * + * ```js + * const x = tf.tensor1d([1, 1, 1], 'bool'); + * + * x.all().print(); // or tf.all(x) + * ``` + * + * ```js + * const x = tf.tensor2d([1, 1, 0, 0], [2, 2], 'bool'); + * + * const axis = 1; + * x.all(axis).print(); // or tf.all(x, axis) + * ``` + * + * @param x The input tensor. Must be of dtype bool. + * @param axis The dimension(s) to reduce. By default it reduces + * all dimensions. + * @param keepDims If true, retains reduced dimensions with size 1. + */ +/** @doc {heading: 'Operations', subheading: 'Reduction'} */ +function all_(x, axis = null, keepDims = false) { + let $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'all', 'bool'); + const origAxes = util["parseAxisParam"](axis, $x.shape); + let axes = origAxes; + const permutedAxes = getAxesPermutation(axes, $x.rank); + if (permutedAxes != null) { + $x = $x.transpose(permutedAxes); + axes = getInnerMostAxes(axes.length, $x.rank); + } + const res = engine["a" /* ENGINE */].runKernelFunc(backend => backend.all($x, axes), { $x }); + if (keepDims) { + const newShape = expandShapeToKeepDim(res.shape, origAxes); + return res.reshape(newShape); + } + return res; +} +/** + * Computes the logical or of elements across dimensions of a `tf.Tensor`. + * + * Reduces the input along the dimensions given in `axes`. Unless `keepDims` + * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in + * `axes`. If `keepDims` is true, the reduced dimensions are retained with + * length 1. If `axes` has no entries, all dimensions are reduced, and an + * `tf.Tensor` with a single element is returned. + * + * ```js + * const x = tf.tensor1d([1, 1, 1], 'bool'); + * + * x.any().print(); // or tf.any(x) + * ``` + * + * ```js + * const x = tf.tensor2d([1, 1, 0, 0], [2, 2], 'bool'); + * + * const axis = 1; + * x.any(axis).print(); // or tf.any(x, axis) + * ``` + * + * @param x The input tensor. Must be of dtype bool. + * @param axis The dimension(s) to reduce. By default it reduces + * all dimensions. + * @param keepDims If true, retains reduced dimensions with size 1. + */ +/** @doc {heading: 'Operations', subheading: 'Reduction'} */ +function any_(x, axis = null, keepDims = false) { + let $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'any', 'bool'); + const origAxes = util["parseAxisParam"](axis, $x.shape); + let axes = origAxes; + const permutedAxes = getAxesPermutation(axes, $x.rank); + if (permutedAxes != null) { + $x = $x.transpose(permutedAxes); + axes = getInnerMostAxes(axes.length, $x.rank); + } + const res = engine["a" /* ENGINE */].runKernelFunc(backend => backend.any($x, axes), { $x }); + if (keepDims) { + const newShape = expandShapeToKeepDim(res.shape, origAxes); + return res.reshape(newShape); + } + return res; +} +/** + * Calculates the mean and variance of `x`. The mean and variance are + * calculated by aggregating the contents of `x` across `axes`. If `x` is + * 1-D and `axes = [0]` this is just the mean and variance of a vector. + * + * @param x The input tensor. + * @param axis The dimension(s) along with to compute mean and + * variance. By default it reduces all dimensions. + * @param keepDims If true, the moments have the same dimensionality as the + * input. + * @return An object with two keys: `mean` and `variance`. + */ +/** @doc {heading: 'Operations', subheading: 'Normalization'} */ +function moments_(x, axis = null, keepDims = false) { + x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'moments'); + const axes = util["parseAxisParam"](axis, x.shape); + const mean = x.mean(axes, keepDims); + let keepDimsShape = mean.shape; + if (!keepDims) { + keepDimsShape = expandShapeToKeepDim(mean.shape, axes); + } + const devSquared = x.toFloat().sub(mean.reshape(keepDimsShape)).square(); + const variance = devSquared.mean(axes, keepDims); + return { mean, variance }; +} +const reduction_ops_all = Object(operation["a" /* op */])({ all_ }); +// tslint:disable-next-line:variable-name +const any = Object(operation["a" /* op */])({ any_ }); +const argMax = Object(operation["a" /* op */])({ argMax_ }); +const argMin = Object(operation["a" /* op */])({ argMin_ }); +const logSumExp = Object(operation["a" /* op */])({ logSumExp_ }); +const reduction_ops_mean = Object(operation["a" /* op */])({ mean_ }); +const reduction_ops_min = Object(operation["a" /* op */])({ min_ }); +const moments = Object(operation["a" /* op */])({ moments_ }); +const sum = Object(operation["a" /* op */])({ sum_ }); +const reduction_ops_prod = Object(operation["a" /* op */])({ prod_ }); +//# sourceMappingURL=reduction_ops.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/square.js +/** + * @license + * Copyright 2019 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + +/** + * Computes square of `x` element-wise: `x ^ 2` + * + * ```js + * const x = tf.tensor1d([1, 2, Math.sqrt(2), -1]); + * + * x.square().print(); // or tf.square(x) + * ``` + * @param x The input Tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function square_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'square'); + const attrs = {}; + const inputsToSave = [$x]; + const outputsToSave = []; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + save([$x]); + return backend.square($x); + }, { x: $x }, null /* grad */, 'Square', attrs, inputsToSave, outputsToSave); +} +const square = Object(operation["a" /* op */])({ square_ }); +//# sourceMappingURL=square.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/unary_ops.js +/** + * @license + * Copyright 2018 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +/** + * Computes `-1 * x` element-wise. + * + * ```js + * const x = tf.tensor2d([1, 2, -2, 0], [2, 2]); + * + * x.neg().print(); // or tf.neg(x) + * ``` + * + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function neg_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'neg'); + const grad = (dy) => { + return { x: () => dy.neg() }; + }; + const attrs = {}; + const inputsToSave = [$x]; + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.neg($x), { x: $x }, grad, 'Neg', attrs, inputsToSave); +} +/** + * Computes ceiling of input `tf.Tensor` element-wise: `ceil(x)` + * + * ```js + * const x = tf.tensor1d([.6, 1.1, -3.3]); + * + * x.ceil().print(); // or tf.ceil(x) + * ``` + * @param x The input Tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function ceil_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'ceil'); + // TODO(manrajgrover): Return null for gradients when backprop supports it. + const grad = (dy) => { + return { $x: () => Object(tensor_ops["o" /* zerosLike */])(dy) }; + }; + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.ceil($x), { $x }, grad); +} +/** + * Computes floor of input `tf.Tensor` element-wise: `floor(x)`. + * + * ```js + * const x = tf.tensor1d([.6, 1.1, -3.3]); + * + * x.floor().print(); // or tf.floor(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function floor_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'floor'); + // TODO(nsthorat): Let gradients be null for cases where we want to stop + // backpropgation. + const grad = (dy) => { + return { $x: () => Object(tensor_ops["o" /* zerosLike */])(dy) }; + }; + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.floor($x), { $x }, grad); +} +/** + * Returns an element-wise indication of the sign of a number. + * + * ```js + * const x = tf.tensor1d([.6, 1.1, -3.3, NaN, 0]); + * + * x.sign().print(); // or tf.sign(x) + * ``` + * @param x The input Tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function sign_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'sign'); + const grad = (dy) => { + return { $x: () => Object(tensor_ops["o" /* zerosLike */])(dy) }; + }; + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.sign($x), { $x }, grad); +} +/** + * RReturns which elements of x are NaN. + * + * ```js + * const x = tf.tensor1d([NaN, Infinity, -Infinity, 0, 1]); + * + * x.isNaN().print(); // or tf.isNaN(x) + * ``` + * @param x The input Tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function isNaN_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'isNaN'); + // TODO(nsthorat): Let gradients be null for cases where we want to stop + // backpropgation. + const grad = (dy) => { + return { $x: () => Object(tensor_ops["o" /* zerosLike */])(dy) }; + }; + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.isNaN($x), { $x }, grad); +} +/** + * Returns which elements of x are Infinity or -Infinity. + * + * ```js + * const x = tf.tensor1d([NaN, Infinity, -Infinity, 0, 1]); + * + * x.isInf().print(); // or tf.isNaN(x) + * ``` + * @param x The input Tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function isInf_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'isInf'); + // TODO(nsthorat): Let gradients be null for cases where we want to stop + // backpropgation. + const grad = (dy) => { + return { $x: () => Object(tensor_ops["o" /* zerosLike */])(dy) }; + }; + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.isInf($x), { $x }, grad); +} +/** + * Returns which elements of x are finite. + * + * ```js + * const x = tf.tensor1d([NaN, Infinity, -Infinity, 0, 1]); + * + * x.isFinite().print(); // or tf.isNaN(x) + * ``` + * @param x The input Tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function isFinite_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'isFinite'); + // TODO(nsthorat): Let gradients be null for cases where we want to stop + // backpropgation. + const grad = (dy) => { + return { $x: () => Object(tensor_ops["o" /* zerosLike */])(dy) }; + }; + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.isFinite($x), { $x }, grad); +} +/** + * Computes round of input `tf.Tensor` element-wise: `round(x)`. + * It implements banker's rounding. + * + * ```js + * const x = tf.tensor1d([.6, 1.1, -3.3]); + * + * x.round().print(); // or tf.round(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function round_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'round'); + // TODO(nsthorat): Let gradients be null for cases where we want to stop + // backpropgation. + const grad = (dy) => { + return { $x: () => Object(tensor_ops["o" /* zerosLike */])(dy) }; + }; + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.round($x), { $x }, grad); +} +/** + * Computes exponential of the input `tf.Tensor` element-wise. `e ^ x` + * + * ```js + * const x = tf.tensor1d([1, 2, -3]); + * + * x.exp().print(); // or tf.exp(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function exp_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'exp'); + const bck = (dy, saved) => { + // tslint:disable-next-line: no-unnecessary-type-assertion + return { x: () => dy.mul(saved[0]) }; + }; + const attrs = {}; + const inputsToSave = []; + const outputsToSave = [true]; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const y = backend.exp($x); + save([y]); + return y; + }, { x: $x }, bck, 'Exp', attrs, inputsToSave, outputsToSave); +} +/** + * Computes exponential of the input `tf.Tensor` minus one element-wise. + * `e ^ x - 1` + * + * ```js + * const x = tf.tensor1d([1, 2, -3]); + * + * x.expm1().print(); // or tf.expm1(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function expm1_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'expm1'); + const grad = (dy, saved) => { + const [$x] = saved; + return { $x: () => dy.mul($x.exp()) }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.expm1($x); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Computes natural logarithm of the input `tf.Tensor` element-wise: `ln(x)` + * + * ```js + * const x = tf.tensor1d([1, 2, Math.E]); + * + * x.log().print(); // or tf.log(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function log_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'log'); + const grad = (dy, saved) => { + const [$x] = saved; + return { x: () => dy.div($x.toFloat()) }; + }; + const attrs = {}; + const inputsToSave = [$x]; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.log($x); + save([$x]); + return res; + }, { x: $x }, grad, 'Log', attrs, inputsToSave); +} +/** + * Computes natural logarithm of the input `tf.Tensor` plus one + * element-wise: `ln(1 + x)` + * + * ```js + * const x = tf.tensor1d([1, 2, Math.E - 1]); + * + * x.log1p().print(); // or tf.log1p(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function log1p_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'log1p'); + const grad = (dy, saved) => { + const [$x] = saved; + return { $x: () => dy.div($x.add(1)) }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.log1p($x); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Computes square root of the input `tf.Tensor` element-wise: `y = sqrt(x)` + * + * ```js + * const x = tf.tensor1d([1, 2, 4, -1]); + * + * x.sqrt().print(); // or tf.sqrt(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function sqrt_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'sqrt'); + const grad = (dy, saved) => { + const [$x] = saved; + return { x: () => dy.div($x.toFloat().sqrt().mul(2)) }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.sqrt($x); + save([$x]); + return res; + }, { x: $x }, grad, 'Sqrt', {}); +} +/** + * Computes reciprocal of square root of the input `tf.Tensor` element-wise: + * `y = 1 / sqrt(x)` + * + * ```js + * const x = tf.tensor1d([1, 2, 4, -1]); + * + * x.rsqrt().print(); // or tf.rsqrt(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function rsqrt_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'rsqrt'); + const grad = (dy, saved) => { + const [$x] = saved; + return { x: () => dy.div($x.pow(1.5).mul(2)).neg() }; + }; + const inputsToSave = [$x]; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.rsqrt($x); + save([$x]); + return res; + }, { x: $x }, grad, 'Rsqrt', {} /* attrs */, inputsToSave); +} +/** + * Computes reciprocal of x element-wise: `1 / x` + * + * ```js + * const x = tf.tensor1d([0, 1, 2]); + * + * x.reciprocal().print(); // or tf.reciprocal(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function reciprocal_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'reciprocal'); + const grad = (dy, saved) => { + const [$x] = saved; + return { $x: () => dy.div($x.square().neg()) }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.reciprocal($x); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Computes absolute value element-wise: `abs(x)` + * + * ```js + * const x = tf.tensor1d([-1, 2, -3, 4]); + * + * x.abs().print(); // or tf.abs(x) + * ``` + * @param x The input `tf.Tensor`. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function abs_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'abs'); + if ($x.dtype === 'complex64') { + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.complexAbs($x), { $x }); + } + const grad = (dy, saved) => { + const [$x] = saved; + return { x: () => dy.mul($x.toFloat().step(-1)) }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.abs($x); + save([$x]); + return res; + }, { x: $x }, grad, 'Abs'); +} +/** + * Clips values element-wise. `max(min(x, clipValueMax), clipValueMin)` + * + * ```js + * const x = tf.tensor1d([-1, 2, -3, 4]); + * + * x.clipByValue(-2, 3).print(); // or tf.clipByValue(x, -2, 3) + * ``` + * @param x The input tensor. + * @param clipValueMin Lower-bound of range to be clipped to. + * @param clipValueMax Upper-bound of range to be clipped to. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function clipByValue_(x, clipValueMin, clipValueMax) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'clipByValue'); + util["assert"]((clipValueMin <= clipValueMax), () => `Error in clip: min (${clipValueMin}) must be ` + + `less than or equal to max (${clipValueMax}).`); + const grad = (dy, saved) => { + const [$x] = saved; + return { + x: () => dy.where($x.greaterEqual(clipValueMin) + .logicalAnd($x.lessEqual(clipValueMax)), Object(tensor_ops["o" /* zerosLike */])(dy)), + }; + }; + const inputsToSave = [$x]; + const attr = { min: clipValueMin, max: clipValueMax }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.clip($x, clipValueMin, clipValueMax); + save([$x]); + return res; + }, { x: $x }, grad, 'ClipByValue', attr, inputsToSave); +} +/** + * Computes sigmoid element-wise, `1 / (1 + exp(-x))` + * + * ```js + * const x = tf.tensor1d([0, -1, 2, -3]); + * + * x.sigmoid().print(); // or tf.sigmoid(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function sigmoid_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'sigmoid'); + const grad = (dy, saved) => { + const [y] = saved; + return { x: () => dy.mul(y.mul(Object(tensor_ops["e" /* scalar */])(1).sub(y))) }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const y = backend.sigmoid($x); + save([y]); + return y; + }, { x: $x }, grad, 'Sigmoid'); +} +/** + * Computes log sigmoid of the input `tf.Tensor` element-wise: + * `logSigmoid(x)`. For numerical stability, we use `-tf.softplus(-x)`. + * + * ```js + * const x = tf.tensor1d([0, 1, -1, .7]); + * + * x.logSigmoid().print(); // or tf.logSigmoid(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function logSigmoid_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'logSigmoid'); + const grad = (dy, saved) => { + const [$x] = saved; + return { $x: () => dy.mul($x.neg().sigmoid()) }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.softplus($x.neg()).neg(); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Computes softplus of the input `tf.Tensor` element-wise: `log(exp(x) + 1)` + * + * ```js + * const x = tf.tensor1d([0, 1, -1, .7]); + * + * x.softplus().print(); // or tf.softplus(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function softplus_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'softplus'); + const grad = (dy, saved) => { + const [$x] = saved; + return { $x: () => dy.mul($x.sigmoid()) }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.softplus($x); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Computes sin of the input Tensor element-wise: `sin(x)` + * + * ```js + * const x = tf.tensor1d([0, Math.PI / 2, Math.PI * 3 / 4]); + * + * x.sin().print(); // or tf.sin(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function sin_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'sin'); + const grad = (dy, saved) => { + const [$x] = saved; + return { x: () => $x.toFloat().cos().mul(dy) }; + }; + const inputsToSave = [$x]; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.sin($x); + save([$x]); + return res; + }, { x: $x }, grad, 'Sin', {} /* attrs */, inputsToSave); +} +/** + * Computes cos of the input `tf.Tensor` element-wise: `cos(x)` + * + * ```js + * const x = tf.tensor1d([0, Math.PI / 2, Math.PI * 3 / 4]); + * + * x.cos().print(); // or tf.cos(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function cos_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'cos'); + const grad = (dy, saved) => { + const [$x] = saved; + return { x: () => $x.toFloat().sin().neg().mul(dy) }; + }; + const inputsToSave = [$x]; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.cos($x); + save([$x]); + return res; + }, { x: $x }, grad, 'Cos', {} /* attrs */, inputsToSave); +} +/** + * Computes tan of the input `tf.Tensor` element-wise, `tan(x)` + * + * ```js + * const x = tf.tensor1d([0, Math.PI / 2, Math.PI * 3 / 4]); + * + * x.tan().print(); // or tf.tan(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function tan_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'tan'); + const grad = (dy, saved) => { + const [$x] = saved; + return { $x: () => dy.div($x.cos().square()) }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.tan($x); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Computes asin of the input `tf.Tensor` element-wise: `asin(x)` + * + * ```js + * const x = tf.tensor1d([0, 1, -1, .7]); + * + * x.asin().print(); // or tf.asin(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function asin_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'asin'); + const grad = (dy, saved) => { + const [$x] = saved; + return { + // tslint:disable-next-line: no-unnecessary-type-assertion + $x: () => dy.div(Object(tensor_ops["e" /* scalar */])(1).sub($x.toFloat().square()).sqrt()) + }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.asin($x); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Computes acos of the input `tf.Tensor` element-wise: `acos(x)` + * + * ```js + * const x = tf.tensor1d([0, 1, -1, .7]); + * + * x.acos().print(); // or tf.acos(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function acos_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'acos'); + const grad = (dy, saved) => { + const [$x] = saved; + return { + $x: () => { + const a = $x.toFloat().square(); + const b = Object(tensor_ops["e" /* scalar */])(1).sub(a).sqrt(); + // tslint:disable-next-line: no-unnecessary-type-assertion + return dy.div(b).neg(); + } + }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.acos($x); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Computes atan of the input `tf.Tensor` element-wise: `atan(x)` + * + * ```js + * const x = tf.tensor1d([0, 1, -1, .7]); + * + * x.atan().print(); // or tf.atan(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function atan_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'atan'); + const grad = (dy, saved) => { + const [$x] = saved; + return { $x: () => dy.div($x.toFloat().square().add(1)) }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.atan($x); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Computes hyperbolic sin of the input `tf.Tensor` element-wise: `sinh(x)` + * + * ```js + * const x = tf.tensor1d([0, 1, -1, .7]); + * + * x.sinh().print(); // or tf.sinh(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function sinh_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'sinh'); + const grad = (dy, saved) => { + const [$x] = saved; + // tslint:disable-next-line: no-unnecessary-type-assertion + return { $x: () => $x.toFloat().cosh().mul(dy) }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.sinh($x); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Computes hyperbolic cos of the input `tf.Tensor` element-wise: `cosh(x)` + * + * ```js + * const x = tf.tensor1d([0, 1, -1, .7]); + * + * x.cosh().print(); // or tf.cosh(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function cosh_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'cosh'); + const grad = (dy, saved) => { + const [$x] = saved; + // tslint:disable-next-line: no-unnecessary-type-assertion + return { $x: () => $x.toFloat().sinh().mul(dy) }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.cosh($x); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Computes hyperbolic tangent of the input `tf.Tensor` element-wise: `tanh(x)` + * + * ```js + * const x = tf.tensor1d([0, 1, -1, 70]); + * + * x.tanh().print(); // or tf.tanh(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function tanh_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'tanh'); + const grad = (dy, saved) => { + const [y] = saved; + // tslint:disable-next-line: no-unnecessary-type-assertion + return { x: () => Object(tensor_ops["e" /* scalar */])(1).sub(y.square()).mul(dy) }; + }; + const outputsToSave = [true]; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const y = backend.tanh($x); + save([y]); + return y; + }, { x: $x }, grad, 'Tanh', {} /* attrs */, null /* inputsToSave */, outputsToSave); +} +/** + * Computes inverse hyperbolic sin of the input `tf.Tensor` element-wise: + * `asinh(x)` + * + * ```js + * const x = tf.tensor1d([0, 1, -1, .7]); + * + * x.asinh().print(); // or tf.asinh(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function asinh_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'asinh'); + const grad = (dy, saved) => { + const [$x] = saved; + return { + $x: () => { + const a = Object(tensor_ops["e" /* scalar */])(1).add($x.toFloat().square()).sqrt(); + // tslint:disable-next-line: no-unnecessary-type-assertion + return dy.div(a); + } + }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.asinh($x); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Computes the inverse hyperbolic cos of the input `tf.Tensor` element-wise: + * `acosh(x)` + * + * ```js + * const x = tf.tensor1d([10, 1, 3, 5.7]); + * + * x.acosh().print(); // or tf.acosh(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function acosh_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'acosh'); + const grad = (dy, saved) => { + const [$x] = saved; + return { + $x: () => { + const a = $x.toFloat().square().sub(1).sqrt(); + // tslint:disable-next-line: no-unnecessary-type-assertion + return dy.div(a); + } + }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.acosh($x); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Computes inverse hyperbolic tan of the input `tf.Tensor` element-wise: + * `atanh(x)` + * + * ```js + * const x = tf.tensor1d([0, .1, -.1, .7]); + * + * x.atanh().print(); // or tf.atanh(x) + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function atanh_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'atanh'); + const grad = (dy, saved) => { + const [$x] = saved; + return { $x: () => dy.div(Object(tensor_ops["e" /* scalar */])(1).sub($x.toFloat().square())) }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.atanh($x); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Computes gause error function of the input `tf.Tensor` element-wise: + * `erf(x)` + * + * ```js + * const x = tf.tensor1d([0, .1, -.1, .7]); + * + * x.erf().print(); // or tf.erf(x); + * ``` + * @param x The input tensor. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function erf_(x) { + let $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'erf'); + util["assert"]($x.dtype === 'int32' || $x.dtype === 'float32', () => 'Input dtype must be `int32` or `float32`.'); + if ($x.dtype === 'int32') { + $x = $x.toFloat(); + } + const grad = (dy, saved) => { + const [$x] = saved; + return { + $x: () => dy.mul($x.square().neg().exp().mul(2 / Math.sqrt(Math.PI))) + }; + }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.erf($x); + save([$x]); + return res; + }, { $x }, grad); +} +/** + * Computes step of the input `tf.Tensor` element-wise: `x > 0 ? 1 : alpha * x` + * + * ```js + * const x = tf.tensor1d([0, 2, -1, -3]); + * + * x.step(.5).print(); // or tf.step(x, .5) + * ``` + * @param x The input tensor. + * @param alpha The gradient when input is negative. + */ +/** @doc {heading: 'Operations', subheading: 'Basic math'} */ +function step_(x, alpha = 0.0) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'step'); + // TODO(manrajgrover): Return null for gradients when backprop supports + // it. + const grad = (dy) => { + return { $x: () => Object(tensor_ops["o" /* zerosLike */])(dy) }; + }; + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.step($x, alpha), { $x }, grad); +} +const abs = Object(operation["a" /* op */])({ abs_ }); +const acos = Object(operation["a" /* op */])({ acos_ }); +const acosh = Object(operation["a" /* op */])({ acosh_ }); +const asin = Object(operation["a" /* op */])({ asin_ }); +const asinh = Object(operation["a" /* op */])({ asinh_ }); +const atan = Object(operation["a" /* op */])({ atan_ }); +const atanh = Object(operation["a" /* op */])({ atanh_ }); +const ceil = Object(operation["a" /* op */])({ ceil_ }); +const clipByValue = Object(operation["a" /* op */])({ clipByValue_ }); +const cos = Object(operation["a" /* op */])({ cos_ }); +const cosh = Object(operation["a" /* op */])({ cosh_ }); +const erf = Object(operation["a" /* op */])({ erf_ }); +const unary_ops_exp = Object(operation["a" /* op */])({ exp_ }); +const expm1 = Object(operation["a" /* op */])({ expm1_ }); +const floor = Object(operation["a" /* op */])({ floor_ }); +const log = Object(operation["a" /* op */])({ log_ }); +const log1p = Object(operation["a" /* op */])({ log1p_ }); +const logSigmoid = Object(operation["a" /* op */])({ logSigmoid_ }); +const neg = Object(operation["a" /* op */])({ neg_ }); +const reciprocal = Object(operation["a" /* op */])({ reciprocal_ }); +const round = Object(operation["a" /* op */])({ round_ }); +const rsqrt = Object(operation["a" /* op */])({ rsqrt_ }); +const sigmoid = Object(operation["a" /* op */])({ sigmoid_ }); +const sign = Object(operation["a" /* op */])({ sign_ }); +const unary_ops_isNaN = Object(operation["a" /* op */])({ isNaN_ }); +const isInf = Object(operation["a" /* op */])({ isInf_ }); +const unary_ops_isFinite = Object(operation["a" /* op */])({ isFinite_ }); +const sin = Object(operation["a" /* op */])({ sin_ }); +const sinh = Object(operation["a" /* op */])({ sinh_ }); +const softplus = Object(operation["a" /* op */])({ softplus_ }); +const sqrt = Object(operation["a" /* op */])({ sqrt_ }); +const unary_ops_step = Object(operation["a" /* op */])({ step_ }); +const tan = Object(operation["a" /* op */])({ tan_ }); +const tanh = Object(operation["a" /* op */])({ tanh_ }); +//# sourceMappingURL=unary_ops.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Atan2_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + + + +const atan2GradConfig = { + kernelName: kernel_names["c" /* Atan2 */], + inputsToSave: ['a', 'b'], + gradFunc: (dy, saved) => { + const [a, b] = saved; + const outShape = assertAndGetBroadcastShape(a.shape, b.shape); + const derA = () => { + const d = add(square(a), square(b)); + let res = mul(dy, div(b, d)); + const reduceAxes = getReductionAxes(a.shape, outShape); + if (reduceAxes.length > 0) { + res = sum(res, reduceAxes); + } + return reshape(res, a.shape); + }; + const derB = () => { + const d = add(square(a), square(b)); + let res = neg(mul(dy, div(a, d))); + const reduceAxes = getReductionAxes(b.shape, outShape); + if (reduceAxes.length > 0) { + res = sum(res, reduceAxes); + } + return reshape(res, b.shape); + }; + return { a: derA, b: derB }; + } +}; +//# sourceMappingURL=Atan2_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/conv_util.js +/** + * @license + * Copyright 2017 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + +function computePool2DInfo(inShape, filterSize, strides, dilations, pad, roundingMode, dataFormat = 'channelsLast') { + const [filterHeight, filterWidth] = parseTupleParam(filterSize); + let filterShape; + if (dataFormat === 'channelsLast') { + filterShape = [filterHeight, filterWidth, inShape[3], inShape[3]]; + } + else if (dataFormat === 'channelsFirst') { + filterShape = [filterHeight, filterWidth, inShape[1], inShape[1]]; + } + else { + throw new Error(`Unknown dataFormat ${dataFormat}`); + } + return computeConv2DInfo(inShape, filterShape, strides, dilations, pad, roundingMode, false, dataFormat); +} +/** + * Computes the information for a forward pass of a pooling3D operation. + */ +function computePool3DInfo(inShape, filterSize, strides, dilations, pad, roundingMode, dataFormat = 'NDHWC') { + const [filterDepth, filterHeight, filterWidth] = parse3TupleParam(filterSize); + let filterShape; + let $dataFormat; + if (dataFormat === 'NDHWC') { + $dataFormat = 'channelsLast'; + filterShape = + [filterDepth, filterHeight, filterWidth, inShape[4], inShape[4]]; + } + else if (dataFormat === 'NCDHW') { + $dataFormat = 'channelsFirst'; + filterShape = + [filterDepth, filterHeight, filterWidth, inShape[1], inShape[1]]; + } + else { + throw new Error(`Unknown dataFormat ${dataFormat}`); + } + return computeConv3DInfo(inShape, filterShape, strides, dilations, pad, false, $dataFormat, roundingMode); +} +/** + * Computes the information for a forward pass of a convolution/pooling + * operation. + */ +function computeConv2DInfo(inShape, filterShape, strides, dilations, pad, roundingMode, depthwise = false, dataFormat = 'channelsLast') { + let [batchSize, inHeight, inWidth, inChannels] = [-1, -1, -1, -1]; + if (dataFormat === 'channelsLast') { + [batchSize, inHeight, inWidth, inChannels] = inShape; + } + else if (dataFormat === 'channelsFirst') { + [batchSize, inChannels, inHeight, inWidth] = inShape; + } + else { + throw new Error(`Unknown dataFormat ${dataFormat}`); + } + const [filterHeight, filterWidth, , filterChannels] = filterShape; + const [strideHeight, strideWidth] = parseTupleParam(strides); + const [dilationHeight, dilationWidth] = parseTupleParam(dilations); + const effectiveFilterHeight = getEffectiveFilterSize(filterHeight, dilationHeight); + const effectiveFilterWidth = getEffectiveFilterSize(filterWidth, dilationWidth); + const { padInfo, outHeight, outWidth } = getPadAndOutInfo(pad, inHeight, inWidth, strideHeight, strideWidth, effectiveFilterHeight, effectiveFilterWidth, roundingMode, dataFormat); + const outChannels = depthwise ? filterChannels * inChannels : filterChannels; + let outShape; + if (dataFormat === 'channelsFirst') { + outShape = [batchSize, outChannels, outHeight, outWidth]; + } + else if (dataFormat === 'channelsLast') { + outShape = [batchSize, outHeight, outWidth, outChannels]; + } + return { + batchSize, + dataFormat, + inHeight, + inWidth, + inChannels, + outHeight, + outWidth, + outChannels, + padInfo, + strideHeight, + strideWidth, + filterHeight, + filterWidth, + effectiveFilterHeight, + effectiveFilterWidth, + dilationHeight, + dilationWidth, + inShape, + outShape, + filterShape + }; +} +/** + * Computes the information for a forward pass of a 3D convolution/pooling + * operation. + */ +function computeConv3DInfo(inShape, filterShape, strides, dilations, pad, depthwise = false, dataFormat = 'channelsLast', roundingMode) { + let [batchSize, inDepth, inHeight, inWidth, inChannels] = [-1, -1, -1, -1, -1]; + if (dataFormat === 'channelsLast') { + [batchSize, inDepth, inHeight, inWidth, inChannels] = inShape; + } + else if (dataFormat === 'channelsFirst') { + [batchSize, inChannels, inDepth, inHeight, inWidth] = inShape; + } + else { + throw new Error(`Unknown dataFormat ${dataFormat}`); + } + const [filterDepth, filterHeight, filterWidth, , filterChannels] = filterShape; + const [strideDepth, strideHeight, strideWidth] = parse3TupleParam(strides); + const [dilationDepth, dilationHeight, dilationWidth] = parse3TupleParam(dilations); + const effectiveFilterDepth = getEffectiveFilterSize(filterDepth, dilationDepth); + const effectiveFilterHeight = getEffectiveFilterSize(filterHeight, dilationHeight); + const effectiveFilterWidth = getEffectiveFilterSize(filterWidth, dilationWidth); + const { padInfo, outDepth, outHeight, outWidth } = get3DPadAndOutInfo(pad, inDepth, inHeight, inWidth, strideDepth, strideHeight, strideWidth, effectiveFilterDepth, effectiveFilterHeight, effectiveFilterWidth, roundingMode); + const outChannels = depthwise ? filterChannels * inChannels : filterChannels; + let outShape; + if (dataFormat === 'channelsFirst') { + outShape = [batchSize, outChannels, outDepth, outHeight, outWidth]; + } + else if (dataFormat === 'channelsLast') { + outShape = [batchSize, outDepth, outHeight, outWidth, outChannels]; + } + return { + batchSize, + dataFormat, + inDepth, + inHeight, + inWidth, + inChannels, + outDepth, + outHeight, + outWidth, + outChannels, + padInfo, + strideDepth, + strideHeight, + strideWidth, + filterDepth, + filterHeight, + filterWidth, + effectiveFilterDepth, + effectiveFilterHeight, + effectiveFilterWidth, + dilationDepth, + dilationHeight, + dilationWidth, + inShape, + outShape, + filterShape + }; +} +function computeOutputShape2D(inShape, fieldSize, stride, zeroPad, roundingMode) { + if (zeroPad == null) { + zeroPad = computeDefaultPad(inShape, fieldSize, stride); + } + const inputRows = inShape[0]; + const inputCols = inShape[1]; + const outputRows = conditionalRound((inputRows - fieldSize + 2 * zeroPad) / stride + 1, roundingMode); + util["assert"](util["isInt"](outputRows), () => `The output # of rows (${outputRows}) must be an integer. ` + + `Change the stride and/or zero pad parameters`); + const outputCols = conditionalRound((inputCols - fieldSize + 2 * zeroPad) / stride + 1, roundingMode); + util["assert"](util["isInt"](outputCols), () => `The output # of columns (${outputCols}) must be an integer. ` + + `Change the stride and/or zero pad parameters`); + return [outputRows, outputCols]; +} +function computeOutputShape4D(inShape, fieldSize, outChannels, stride, zeroPad, roundingMode) { + if (zeroPad == null) { + zeroPad = computeDefaultPad(inShape, fieldSize, stride); + } + const inputDepth = inShape[0]; + const inputRows = inShape[1]; + const inputCols = inShape[2]; + const outputDepths = conditionalRound((inputDepth - fieldSize + 2 * zeroPad) / stride + 1, roundingMode); + util["assert"](util["isInt"](outputDepths), () => `The output # of depths (${outputDepths}) must be an integer. ` + + `Change the stride and/or zero pad parameters`); + const outputRows = conditionalRound((inputRows - fieldSize + 2 * zeroPad) / stride + 1, roundingMode); + util["assert"](util["isInt"](outputRows), () => `The output # of rows (${outputRows}) must be an integer. ` + + `Change the stride and/or zero pad parameters`); + const outputCols = conditionalRound((inputCols - fieldSize + 2 * zeroPad) / stride + 1, roundingMode); + util["assert"](util["isInt"](outputCols), () => `The output # of columns (${outputCols}) must be an integer. ` + + `Change the stride and/or zero pad parameters`); + return [outputDepths, outputRows, outputCols, outChannels]; +} +function computeDefaultPad(inputShape, fieldSize, stride, dilation = 1) { + const effectiveFieldSize = getEffectiveFilterSize(fieldSize, dilation); + return Math.floor((inputShape[0] * (stride - 1) - stride + effectiveFieldSize) / 2); +} +function parseTupleParam(param) { + if (typeof param === 'number') { + return [param, param, param]; + } + if (param.length === 2) { + return [param[0], param[1], 1]; + } + return param; +} +function parse3TupleParam(param) { + return typeof param === 'number' ? [param, param, param] : param; +} +/* See https://www.tensorflow.org/api_docs/python/tf/nn/atrous_conv2d + * Atrous convolution is equivalent to standard convolution with upsampled + * filters with effective_filter_height = + * filter_height + (filter_height - 1) * (dilation - 1) + * and effective_filter_width = + * filter_width + (filter_width - 1) * (dilation - 1), + * produced by inserting dilation - 1 zeros along consecutive elements across + * the filters' spatial dimensions. + * When there is a dilation, this converts a filter dimension to the + * effective filter dimension, so it can be used in a standard convolution. + */ +function getEffectiveFilterSize(filterSize, dilation) { + if (dilation <= 1) { + return filterSize; + } + return filterSize + (filterSize - 1) * (dilation - 1); +} +function getPadAndOutInfo(pad, inHeight, inWidth, strideHeight, strideWidth, filterHeight, filterWidth, roundingMode, dataFormat) { + let padInfo; + let outHeight; + let outWidth; + if (typeof pad === 'number') { + const padType = (pad === 0) ? 'VALID' : 'NUMBER'; + padInfo = { top: pad, bottom: pad, left: pad, right: pad, type: padType }; + const outShape = computeOutputShape2D([inHeight, inWidth], filterHeight, strideHeight, pad, roundingMode); + outHeight = outShape[0]; + outWidth = outShape[1]; + } + else if (pad === 'same') { + outHeight = Math.ceil(inHeight / strideHeight); + outWidth = Math.ceil(inWidth / strideWidth); + const padAlongHeight = Math.max(0, (outHeight - 1) * strideHeight + filterHeight - inHeight); + const padAlongWidth = Math.max(0, (outWidth - 1) * strideWidth + filterWidth - inWidth); + const top = Math.floor(padAlongHeight / 2); + const bottom = padAlongHeight - top; + const left = Math.floor(padAlongWidth / 2); + const right = padAlongWidth - left; + padInfo = { top, bottom, left, right, type: 'SAME' }; + } + else if (pad === 'valid') { + padInfo = { top: 0, bottom: 0, left: 0, right: 0, type: 'VALID' }; + outHeight = Math.ceil((inHeight - filterHeight + 1) / strideHeight); + outWidth = Math.ceil((inWidth - filterWidth + 1) / strideWidth); + } + else if (typeof pad === 'object') { + const top = dataFormat === 'channelsLast' ? pad[1][0] : pad[2][0]; + const bottom = dataFormat === 'channelsLast' ? pad[1][1] : pad[2][1]; + const left = dataFormat === 'channelsLast' ? pad[2][0] : pad[3][0]; + const right = dataFormat === 'channelsLast' ? pad[2][1] : pad[3][1]; + const padType = (top === 0 && bottom === 0 && left === 0 && right === 0) ? + 'VALID' : + 'EXPLICIT'; + padInfo = { top, bottom, left, right, type: padType }; + outHeight = conditionalRound((inHeight - filterHeight + top + bottom) / strideHeight + 1, roundingMode); + outWidth = conditionalRound((inWidth - filterWidth + left + right) / strideWidth + 1, roundingMode); + } + else { + throw Error(`Unknown padding parameter: ${pad}`); + } + return { padInfo, outHeight, outWidth }; +} +function get3DPadAndOutInfo(pad, inDepth, inHeight, inWidth, strideDepth, strideHeight, strideWidth, filterDepth, filterHeight, filterWidth, roundingMode) { + let padInfo; + let outDepth; + let outHeight; + let outWidth; + if (typeof pad === 'number') { + const padType = (pad === 0) ? 'VALID' : 'NUMBER'; + padInfo = { + top: pad, + bottom: pad, + left: pad, + right: pad, + front: pad, + back: pad, + type: padType + }; + const outShape = computeOutputShape4D([inDepth, inHeight, inWidth, 1], filterDepth, 1, strideDepth, pad, roundingMode); + outDepth = outShape[0]; + outHeight = outShape[1]; + outWidth = outShape[2]; + } + else if (pad === 'same') { + outDepth = Math.ceil(inDepth / strideDepth); + outHeight = Math.ceil(inHeight / strideHeight); + outWidth = Math.ceil(inWidth / strideWidth); + const padAlongDepth = (outDepth - 1) * strideDepth + filterDepth - inDepth; + const padAlongHeight = (outHeight - 1) * strideHeight + filterHeight - inHeight; + const padAlongWidth = (outWidth - 1) * strideWidth + filterWidth - inWidth; + const front = Math.floor(padAlongDepth / 2); + const back = padAlongDepth - front; + const top = Math.floor(padAlongHeight / 2); + const bottom = padAlongHeight - top; + const left = Math.floor(padAlongWidth / 2); + const right = padAlongWidth - left; + padInfo = { top, bottom, left, right, front, back, type: 'SAME' }; + } + else if (pad === 'valid') { + padInfo = { + top: 0, + bottom: 0, + left: 0, + right: 0, + front: 0, + back: 0, + type: 'VALID' + }; + outDepth = Math.ceil((inDepth - filterDepth + 1) / strideDepth); + outHeight = Math.ceil((inHeight - filterHeight + 1) / strideHeight); + outWidth = Math.ceil((inWidth - filterWidth + 1) / strideWidth); + } + else { + throw Error(`Unknown padding parameter: ${pad}`); + } + return { padInfo, outDepth, outHeight, outWidth }; +} +/** + * Rounds a value depending on the rounding mode + * @param value + * @param roundingMode + */ +function conditionalRound(value, roundingMode) { + if (!roundingMode) { + return value; + } + switch (roundingMode) { + case 'round': + // used for Caffe Conv + return Math.round(value); + case 'ceil': + // used for Caffe Pool + return Math.ceil(value); + case 'floor': + return Math.floor(value); + default: + throw new Error(`Unknown roundingMode ${roundingMode}`); + } +} +function tupleValuesAreOne(param) { + const [dimA, dimB, dimC] = parseTupleParam(param); + return dimA === 1 && dimB === 1 && dimC === 1; +} +function eitherStridesOrDilationsAreOne(strides, dilations) { + return tupleValuesAreOne(strides) || tupleValuesAreOne(dilations); +} +/** + * Convert Conv2D dataFormat from 'NHWC'|'NCHW' to + * 'channelsLast'|'channelsFirst' + * @param dataFormat in 'NHWC'|'NCHW' mode + * @return dataFormat in 'channelsLast'|'channelsFirst' mode + * @throws unknown dataFormat + */ +function convertConv2DDataFormat(dataFormat) { + if (dataFormat === 'NHWC') { + return 'channelsLast'; + } + else if (dataFormat === 'NCHW') { + return 'channelsFirst'; + } + else { + throw new Error(`Unknown dataFormat ${dataFormat}`); + } +} +//# sourceMappingURL=conv_util.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/avg_pool_3d_backprop.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + +/** + * Computes the backprop of a 3d avg pool. + * + * @param dy The dy error, of rank 5 of shape + * [batchSize, depth, height, width, channels]. + * assumed. + * @param input The original input image, of rank 5 or rank4 of shape + * [batchSize, depth, height, width, channels]. + * @param filterSize The filter size: + * `[filterDepth, filterHeight, filterWidth]`. + * `filterSize` is a single number, + * then `filterDepth == filterHeight == filterWidth`. + * @param strides The strides of the pooling: + * `[strideDepth, strideHeight, strideWidth]`. If + * `strides` is a single number, then `strideHeight == strideWidth`. + * @param dilations Deprecated, this field will be gone in v3.0.0. The dilation + * rates: `[dilationDepth, dilationHeight, dilationWidth]` + * in which we sample input values across the depth, height and width + * dimensions in dilated pooling. + * Defaults to `[1, 1, 1]`. If `dilations` is a single number, + * then `dilationDepth == dilationHeight == dilationWidth`. + * If it is greater than 1, then all values of `strides` must be 1. + * @param pad A string from: 'same', 'valid'. The type of padding algorithm + * used in the forward prop of the op. + * @param dimRoundingMode A string from: 'ceil', 'round', 'floor'. The + * rounding mode used when computing output dimensions if pad is a + * number. If none is provided, it will not round and error if the output + * is of fractional size. + */ +function avgPool3dBackprop_(dy, input, filterSize, strides, dilations = [1, 1, 1], pad, dimRoundingMode) { + const $dy = Object(tensor_util_env["a" /* convertToTensor */])(dy, 'dy', 'avgPool3dBackprop'); + const $input = Object(tensor_util_env["a" /* convertToTensor */])(input, 'input', 'avgPool3dBackprop'); + let dy5D = $dy; + let input5D = $input; + let reshapedTo5D = false; + if ($input.rank === 4) { + reshapedTo5D = true; + dy5D = reshape($dy, [1, $dy.shape[0], $dy.shape[1], $dy.shape[2], $dy.shape[3]]); + input5D = reshape($input, [ + 1, $input.shape[0], $input.shape[1], $input.shape[2], $input.shape[3] + ]); + } + util["assert"](dy5D.rank === 5, () => `Error in avgPool3dBackprop: dy must be rank 5 but got rank ` + + `${dy5D.rank}.`); + util["assert"](input5D.rank === 5, () => `Error in avgPool3dBackprop: input must be rank 5 but got rank ` + + `${input5D.rank}.`); + util["assert"](eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in avgPool3dBackprop: Either strides or dilations ' + + `must be 1. Got strides ${strides} and dilations '${dilations}'`); + if (dimRoundingMode != null) { + util["assert"](util["isInt"](pad), () => `Error in maxPool3dBackprop: pad must be an integer when ` + + `using, dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`); + } + const forward = backend => { + const convInfo = computePool3DInfo(input5D.shape, filterSize, strides, dilations, pad, dimRoundingMode); + return backend.avgPool3dBackprop(dy5D, input5D, convInfo); + }; + const inputs = { dy: dy5D, input: input5D }; + const attrs = { filterSize, strides, dilations, pad, dimRoundingMode }; + const res = engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["f" /* AvgPool3DBackprop */], attrs); + if (reshapedTo5D) { + return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]); + } + return res; +} +const avgPool3dBackprop = Object(operation["a" /* op */])({ avgPool3dBackprop_ }); +//# sourceMappingURL=avg_pool_3d_backprop.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/AvgPool3D_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const avgPool3DGradConfig = { + kernelName: kernel_names["e" /* AvgPool3D */], + inputsToSave: ['x'], + gradFunc: (dy, saved, attrs) => { + const [x] = saved; + const { filterSize, strides, dilations, pad, dimRoundingMode } = attrs; + const $dilations = dilations == null ? [1, 1, 1] : dilations; + return { + x: () => avgPool3dBackprop(dy, x, filterSize, strides, $dilations, pad, dimRoundingMode) + }; + } +}; +//# sourceMappingURL=AvgPool3D_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/avg_pool_backprop.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + +/** + * Computes the backprop of an 2D avg pool. + * + * @param dy The dy error, of rank 4 or rank 3 of shape + * [batchSize, height, width, channels]. If rank 3, batch of 1 is + * assumed. + * @param input The input image, of rank 4 or rank 3 of shape + * [batchSize, height, width, channels]. If rank 3, batch of 1 is + * assumed. + * @param filterSize The filter size: `[filterHeight, filterWidth]`. If + * `filterSize` is a single number, then `filterHeight == filterWidth`. + * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If + * `strides` is a single number, then `strideHeight == strideWidth`. + * @param pad A string from: 'same', 'valid'. The type of padding algorithm + * used in the forward prop of the op. + */ +function avgPoolBackprop_(dy, input, filterSize, strides, pad) { + const $dy = Object(tensor_util_env["a" /* convertToTensor */])(dy, 'dy', 'avgPoolBackprop'); + const $input = Object(tensor_util_env["a" /* convertToTensor */])(input, 'input', 'avgPoolBackprop'); + util["assert"]($input.rank === $dy.rank, () => `Rank of input (${$input.rank}) does not match rank of dy (${$dy.rank})`); + let input4D = $input; + let dy4D = $dy; + let reshapedTo4D = false; + if ($input.rank === 3) { + reshapedTo4D = true; + input4D = + reshape($input, [1, $input.shape[0], $input.shape[1], $input.shape[2]]); + dy4D = reshape($dy, [1, $dy.shape[0], $dy.shape[1], $dy.shape[2]]); + } + util["assert"](dy4D.rank === 4, () => `Error in avgPoolBackprop: dy must be rank 4 but got rank ` + + `${dy4D.rank}.`); + util["assert"](input4D.rank === 4, () => `Error in avgPoolBackprop: input must be rank 4 but got rank ` + + `${input4D.rank}.`); + const forward = backend => { + const convInfo = computePool2DInfo(input4D.shape, filterSize, strides, 1 /* dilations */, pad); + return backend.avgPoolBackprop(dy4D, input4D, convInfo); + }; + const inputs = { dy: dy4D, input: input4D }; + const attrs = { filterSize, strides, pad }; + const res = engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null, kernel_names["g" /* AvgPoolBackprop */], attrs); + if (reshapedTo4D) { + return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]); + } + return res; +} +const avgPoolBackprop = Object(operation["a" /* op */])({ avgPoolBackprop_ }); +//# sourceMappingURL=avg_pool_backprop.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/AvgPool_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const avgPoolGradConfig = { + kernelName: kernel_names["d" /* AvgPool */], + inputsToSave: ['x'], + gradFunc: (dy, saved, attrs) => { + const [x] = saved; + const { filterSize, strides, pad } = attrs; + return { + x: () => avgPoolBackprop(dy, x, filterSize, strides, pad) + }; + } +}; +//# sourceMappingURL=AvgPool_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/mat_mul.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + +/** + * Computes the dot product of two matrices, A * B. These must be matrices. + * + * ```js + * const a = tf.tensor2d([1, 2], [1, 2]); + * const b = tf.tensor2d([1, 2, 3, 4], [2, 2]); + * + * a.matMul(b).print(); // or tf.matMul(a, b) + * ``` + * @param a First matrix in dot product operation. + * @param b Second matrix in dot product operation. + * @param transposeA If true, `a` is transposed before multiplication. + * @param transposeB If true, `b` is transposed before multiplication. + */ +/** @doc {heading: 'Operations', subheading: 'Matrices'} */ +function matMul_(a, b, transposeA = false, transposeB = false) { + let $a = Object(tensor_util_env["a" /* convertToTensor */])(a, 'a', 'matMul'); + let $b = Object(tensor_util_env["a" /* convertToTensor */])(b, 'b', 'matMul'); + [$a, $b] = Object(tensor_util["makeTypesMatch"])($a, $b); + util["assert"]($a.rank >= 2 && $b.rank >= 2 && $a.rank === $b.rank, () => `Error in matMul: inputs must have the same rank of at least 2, ` + + `got ranks ${$a.rank} and ${$b.rank}.`); + const innerShapeA = transposeA ? $a.shape[$a.rank - 2] : $a.shape[$a.rank - 1]; + const innerShapeB = transposeB ? $b.shape[$b.rank - 1] : $b.shape[$b.rank - 2]; + const outerShapeA = transposeA ? $a.shape[$a.rank - 1] : $a.shape[$a.rank - 2]; + const outerShapeB = transposeB ? $b.shape[$b.rank - 2] : $b.shape[$b.rank - 1]; + const outerDimsA = $a.shape.slice(0, -2); + const outerDimsB = $b.shape.slice(0, -2); + const batchDimA = util["sizeFromShape"](outerDimsA); + const batchDimB = util["sizeFromShape"](outerDimsB); + util["assert"](util["arraysEqual"](outerDimsA, outerDimsB), () => `Error in matMul: outer dimensions (${outerDimsA}) and (` + + `${outerDimsB}) of Tensors with shapes ${$a.shape} and ` + + `${$b.shape} must match.`); + util["assert"](innerShapeA === innerShapeB, () => `Error in matMul: inner shapes (${innerShapeA}) and (` + + `${innerShapeB}) of Tensors with shapes ${$a.shape} and ` + + `${$b.shape} and transposeA=${transposeA}` + + ` and transposeB=${transposeB} must match.`); + const outShape = $a.shape.slice(0, -2).concat([outerShapeA, outerShapeB]); + const a3D = transposeA ? reshape($a, [batchDimA, innerShapeA, outerShapeA]) : + reshape($a, [batchDimA, outerShapeA, innerShapeA]); + const b3D = transposeB ? reshape($b, [batchDimB, outerShapeB, innerShapeB]) : + reshape($b, [batchDimB, innerShapeB, outerShapeB]); + const forward = (backend, save) => { + save([a3D, b3D]); + return backend.batchMatMul(a3D, b3D, transposeA, transposeB); + }; + const inputs = { a: a3D, b: b3D }; + const attrs = { transposeA, transposeB }; + const res = engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["h" /* BatchMatMul */], attrs); + return reshape(res, outShape); +} +const matMul = Object(operation["a" /* op */])({ matMul_ }); +//# sourceMappingURL=mat_mul.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/BatchMatMul_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const batchMatMulGradConfig = { + kernelName: kernel_names["h" /* BatchMatMul */], + inputsToSave: ['a', 'b'], + gradFunc: (dy, saved, attrs) => { + const [a, b] = saved; + const { transposeA, transposeB } = attrs; + if (!transposeA && !transposeB) { + return { + a: () => matMul(dy, b, false, true), + b: () => matMul(a, dy, true, false) + }; + } + else if (!transposeA && transposeB) { + return { + a: () => matMul(dy, b, false, false), + b: () => matMul(dy, a, true, false) + }; + } + else if (transposeA && !transposeB) { + return { + a: () => matMul(b, dy, false, true), + b: () => matMul(a, dy, false, false) + }; + } + else { + return { + a: () => matMul(b, dy, true, true), + b: () => matMul(dy, a, true, true) + }; + } + } +}; +//# sourceMappingURL=BatchMatMul_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/space_to_batch_nd.js +/** + * @license + * Copyright 2020 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +/** + * This operation divides "spatial" dimensions `[1, ..., M]` of the input into + * a grid of blocks of shape `blockShape`, and interleaves these blocks with + * the "batch" dimension (0) such that in the output, the spatial + * dimensions `[1, ..., M]` correspond to the position within the grid, + * and the batch dimension combines both the position within a spatial block + * and the original batch position. Prior to division into blocks, + * the spatial dimensions of the input are optionally zero padded + * according to `paddings`. See below for a precise description. + * + * ```js + * const x = tf.tensor4d([1, 2, 3, 4], [1, 2, 2, 1]); + * const blockShape = [2, 2]; + * const paddings = [[0, 0], [0, 0]]; + * + * x.spaceToBatchND(blockShape, paddings).print(); + * ``` + * + * @param x A `tf.Tensor`. N-D with `x.shape` = `[batch] + spatialShape + + * remainingShape`, where spatialShape has `M` dimensions. + * @param blockShape A 1-D array. Must have shape `[M]`, all values must + * be >= 1. + * @param paddings A 2-D array. Must have shape `[M, 2]`, all values must be >= + * 0. `paddings[i] = [padStart, padEnd]` specifies the amount to zero-pad + * from input dimension `i + 1`, which corresponds to spatial dimension `i`. It + * is required that + * `(inputShape[i + 1] + padStart + padEnd) % blockShape[i] === 0` + * + * This operation is equivalent to the following steps: + * + * 1. Zero-pad the start and end of dimensions `[1, ..., M]` of the input + * according to `paddings` to produce `padded` of shape paddedShape. + * + * 2. Reshape `padded` to `reshapedPadded` of shape: + * `[batch] + [paddedShape[1] / blockShape[0], blockShape[0], ..., + * paddedShape[M] / blockShape[M-1], blockShape[M-1]] + remainingShape` + * + * 3. Permute dimensions of `reshapedPadded` to produce `permutedReshapedPadded` + * of shape: `blockShape + [batch] + [paddedShape[1] / blockShape[0], ..., + * paddedShape[M] / blockShape[M-1]] + remainingShape` + * + * 4. Reshape `permutedReshapedPadded` to flatten `blockShape` into the + * batch dimension, producing an output tensor of shape: + * `[batch * prod(blockShape)] + [paddedShape[1] / blockShape[0], ..., + * paddedShape[M] / blockShape[M-1]] + remainingShape` + */ +/** @doc {heading: 'Tensors', subheading: 'Transformations'} */ +function spaceToBatchND_(x, blockShape, paddings) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'spaceToBatchND'); + util["assert"]($x.rank >= 1 + blockShape.length, () => `input rank ${$x.rank} should be > than [blockShape] ${blockShape.length}`); + util["assert"](paddings.length === blockShape.length, () => `paddings.shape[0] ${paddings.length} must be equal to [blockShape] ${blockShape.length}`); + util["assert"]($x.shape.reduce((a, b, i) => { + if (i > 0 && i <= blockShape.length) { + return a && + ((b + paddings[i - 1][0] + paddings[i - 1][1]) % + blockShape[i - 1] === + 0); + } + return a; + }, true), () => `input spatial dimensions ${$x.shape.slice(1)} with paddings ${paddings.toString()} must be divisible by blockShapes ${blockShape.toString()}`); + const forward = backend => backend.spaceToBatchND($x, blockShape, paddings); + const inputs = { x: $x }; + const attrs = { blockShape, paddings }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* gradient */, kernel_names["mb" /* SpaceToBatchND */], attrs); +} +const spaceToBatchND = Object(operation["a" /* op */])({ spaceToBatchND_ }); +//# sourceMappingURL=space_to_batch_nd.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/BatchToSpaceND_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const batchToSpaceNDGradConfig = { + kernelName: kernel_names["i" /* BatchToSpaceND */], + gradFunc: (dy, saved, attrs) => { + const { blockShape, crops } = attrs; + return { x: () => spaceToBatchND(dy, blockShape, crops) }; + } +}; +//# sourceMappingURL=BatchToSpaceND_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/BroadcastTo_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const broadcastToGradConfig = { + kernelName: kernel_names["j" /* BroadcastTo */], + gradFunc: (dy, saved, attrs) => { + const broadCastToAttrs = attrs; + const inputShape = broadCastToAttrs.inputShape; + const outputShape = broadCastToAttrs.shape; + const reps = Array.from(outputShape); + for (let i = inputShape.length - 1; i >= 0; i--) { + if (inputShape[i] === outputShape[i]) { + reps[i] = 1; + } + else if (inputShape[i] !== 1) { + throw new Error(`broadcastTo(): [${inputShape}] cannot be broadcast to [${outputShape}].`); + } + } + const axes = []; + for (let i = 0; i < reps.length; i++) { + if (reps[i] > 1) { + axes.push(i); + } + } + return { x: () => sum(dy, axes, true /* keepDims */) }; + } +}; +//# sourceMappingURL=BroadcastTo_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/split.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + +/** + * Splits a `tf.Tensor` into sub tensors. + * + * If `numOrSizeSplits` is a number, splits `x` along dimension `axis` + * into `numOrSizeSplits` smaller tensors. + * Requires that `numOrSizeSplits` evenly divides `x.shape[axis]`. + * + * If `numOrSizeSplits` is a number array, splits `x` into + * `numOrSizeSplits.length` pieces. The shape of the `i`-th piece has the + * same size as `x` except along dimension `axis` where the size is + * `numOrSizeSplits[i]`. + * + * ```js + * const x = tf.tensor2d([1, 2, 3, 4, 5, 6, 7, 8], [2, 4]); + * const [a, b] = tf.split(x, 2, 1); + * a.print(); + * b.print(); + * + * const [c, d, e] = tf.split(x, [1, 2, 1], 1); + * c.print(); + * d.print(); + * e.print(); + * ``` + * + * @param x The input tensor to split. + * @param numOrSizeSplits Either an integer indicating the number of + * splits along the axis or an array of integers containing the sizes of + * each output tensor along the axis. If a number then it must evenly divide + * `x.shape[axis]`; otherwise the sum of sizes must match `x.shape[axis]`. + * @param axis The dimension along which to split. Defaults to 0 (the first + * dim). + */ +/** @doc {heading: 'Tensors', subheading: 'Slicing and Joining'} */ +function split_(x, numOrSizeSplits, axis = 0) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'split'); + const $axis = Object(util["parseAxisParam"])(axis, $x.shape)[0]; + let splitSizes; + if (typeof (numOrSizeSplits) === 'number') { + Object(util["assert"])($x.shape[$axis] % numOrSizeSplits === 0, () => 'Number of splits must evenly divide the axis.'); + splitSizes = + new Array(numOrSizeSplits).fill($x.shape[$axis] / numOrSizeSplits); + } + else { + Object(util["assert"])($x.shape[$axis] === numOrSizeSplits.reduce((a, b) => a + b), () => 'The sum of sizes must match the size of the axis dimension.'); + splitSizes = numOrSizeSplits; + } + const forward = (backend, _) => { + return backend.split($x, splitSizes, $axis); + }; + const inputs = { x: $x }; + const attr = { numOrSizeSplits, axis }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["nb" /* SplitV */], attr); +} +const split = Object(operation["a" /* op */])({ split_ }); +//# sourceMappingURL=split.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Concat_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + +const concatGradConfig = { + kernelName: kernel_names["l" /* Concat */], + saveAllInputs: true, + gradFunc: (dy, saved, attrs) => { + const shapes = saved.map(t => t.shape); + const { axis } = attrs; + const $axis = Object(util["parseAxisParam"])(axis, saved[0].shape)[0]; + const sizeSplits = shapes.map(s => s[$axis]); + const derTensors = split(dy, sizeSplits, $axis); + return derTensors.map(t => () => t); + } +}; +//# sourceMappingURL=Concat_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/conv2d_backprop_filter.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + +/** + * Computes the derivative of the filter of a 2D convolution. + * + * @param x The input tensor, of rank 4 or rank 3 of shape + * [batch, height, width, inChannels]. If rank 3, batch of 1 is assumed. + * @param dy The dy image, of rank 4 or rank 3, of shape + * [batch, height, width, outDepth]. If rank 3, batch of 1 is assumed. + * @param filterShape The shape of the filter, length 4, + * [filterHeight, filterWidth, inDepth, outDepth]. + * @param strides The strides of the convolution: [strideHeight, + * strideWidth]. + * @param pad A string from: 'same', 'valid'. The type of padding algorithm + * used in the forward prop of the op. + * @param dataFormat: An optional string from: "NHWC", "NCHW". Defaults to + * "NHWC". Specify the data format of the input and output data. With the + * default format "NHWC", the data is stored in the order of: [batch, + * height, width, channels]. + * @param dimRoundingMode A string from: 'ceil', 'round', 'floor'. The + * rounding mode used when computing output dimensions if pad is a + * number. If none is provided, it will not round and error if the output + * is of fractional size. + */ +function conv2DBackpropFilter_(x, dy, filterShape, strides, pad, dataFormat = 'NHWC', dimRoundingMode) { + let x4D = x; + if (x.rank === 3) { + x4D = reshape(x, [1, x.shape[0], x.shape[1], x.shape[2]]); + } + let dy4D = dy; + if (dy4D.rank === 3) { + dy4D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2]]); + } + util["assert"](x4D.rank === 4, () => `Error in conv2dDerFilter: input must be rank 4, but got shape ` + + `${x4D.shape}.`); + util["assert"](dy4D.rank === 4, () => `Error in conv2dDerFilter: dy must be rank 4, but got shape ` + + `${dy4D.shape}.`); + util["assert"](filterShape.length === 4, () => `Error in conv2dDerFilter: filterShape must be length 4, but got ` + + `${filterShape}.`); + const inDepth = dataFormat === 'NHWC' ? x4D.shape[3] : x4D.shape[1]; + const outDepth = dataFormat === 'NHWC' ? dy4D.shape[3] : dy4D.shape[1]; + util["assert"](inDepth === filterShape[2], () => `Error in conv2dDerFilter: depth of input ${inDepth}) must ` + + `match input depth in filter (${filterShape[2]}.`); + util["assert"](outDepth === filterShape[3], () => `Error in conv2dDerFilter: depth of dy (${outDepth}) must ` + + `match output depth for filter (${filterShape[3]}).`); + if (dimRoundingMode != null) { + util["assert"](util["isInt"](pad), () => `Error in conv2dDerFilter: pad must be an integer when using, ` + + `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`); + } + const forward = backend => { + const dilations = 1; + const $dataFormat = convertConv2DDataFormat(dataFormat); + const convInfo = computeConv2DInfo(x4D.shape, filterShape, strides, dilations, pad, dimRoundingMode, false, $dataFormat); + return backend.conv2dDerFilter(x4D, dy4D, convInfo); + }; + const inputs = { x: x4D, dy: dy4D }; + const attrs = { strides, pad, dataFormat, dimRoundingMode }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null, kernel_names["n" /* Conv2DBackpropFilter */], attrs); +} +const conv2DBackpropFilter = Object(operation["a" /* op */])({ conv2DBackpropFilter_ }); +//# sourceMappingURL=conv2d_backprop_filter.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/conv2d_backprop_input.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + +/** + * Computes the derivative of the input of a 2D convolution. + * + * @param xShape The shape of the input: [batch, height, width, inDepth]. + * If length of 3, batch of 1 is assumed. + * @param dy The derivative of the output, of rank 4 or rank 3 of shape + * `[batch, outHeight, outWidth, outDepth]`. If rank 3, batch of 1 is + * assumed. + * @param filter The filter, rank 4, of shape + * `[filterHeight, filterWidth, inDepth, outDepth]`. + * @param strides The strides of the convolution: `[strideHeight, + * strideWidth]`. + * @param pad The type of padding algorithm used: + * - `same` and stride 1: output will be of same size as input, + * regardless of filter size. + * - `valid`: output will be smaller than input if filter is larger + * than 1x1. + * @param dataFormat: An optional string from: "NHWC", "NCHW". Defaults to + * "NHWC". Specify the data format of the input and output data. With the + * default format "NHWC", the data is stored in the order of: [batch, + * height, width, channels]. + * @param dimRoundingMode The rounding mode used when computing output + * dimensions if pad is a number. If none is provided, it will not round + * and error if the output is of fractional size. + */ +function conv2DBackpropInput_(xShape, dy, filter, strides, pad, dataFormat = 'NHWC', dimRoundingMode) { + util["assert"](xShape.length === dy.rank, () => `Length of inShape ` + + `(${xShape.length}) and rank of dy (${dy.rank}) must match`); + let xShape4D = xShape; + let dy4D = dy; + let reshapedTo4D = false; + if (dy.rank === 3) { + reshapedTo4D = true; + dy4D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2]]); + xShape4D = [1, xShape[0], xShape[1], xShape[2]]; + } + util["assert"](xShape4D.length === 4, () => `Error in conv2dDerInput: inShape must be length 4, but got length ` + + `${xShape4D.length}.`); + util["assert"](dy4D.rank === 4, () => `Error in conv2dDerInput: dy must be rank 4, but got ` + + `rank ${dy4D.rank}`); + util["assert"](filter.rank === 4, () => `Error in conv2dDerInput: filter must be rank 4, but got ` + + `rank ${filter.rank}`); + const inDepth = dataFormat === 'NHWC' ? xShape4D[3] : xShape4D[1]; + const outDepth = dataFormat === 'NHWC' ? dy4D.shape[3] : dy4D.shape[1]; + util["assert"](inDepth === filter.shape[2], () => `Error in conv2dDerInput: depth of input (${inDepth}) must ` + + `match input depth for filter ${filter.shape[2]}.`); + util["assert"](outDepth === filter.shape[3], () => `Error in conv2dDerInput: depth of output (${outDepth}) must ` + + `match output depth for filter ${filter.shape[3]}.`); + if (dimRoundingMode != null) { + util["assert"](util["isInt"](pad), () => `Error in conv2dDerInput: pad must be an integer when using, ` + + `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`); + } + const forward = (backend, save) => { + const dilations = 1; + const $dataFormat = convertConv2DDataFormat(dataFormat); + const convInfo = computeConv2DInfo(xShape4D, filter.shape, strides, dilations, pad, dimRoundingMode, false, $dataFormat); + const res = backend.conv2dDerInput(dy4D, filter, convInfo); + save([dy4D, filter]); + return res; + }; + const inputs = { dy: dy4D, filter }; + const attrs = { strides, pad, dataFormat, dimRoundingMode }; + const res = engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["o" /* Conv2DBackpropInput */], attrs); + if (reshapedTo4D) { + return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]); + } + return res; +} +const conv2DBackpropInput = Object(operation["a" /* op */])({ conv2DBackpropInput_ }); +//# sourceMappingURL=conv2d_backprop_input.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Conv2D_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +const conv2DGradConfig = { + kernelName: kernel_names["m" /* Conv2D */], + inputsToSave: ['x', 'filter'], + gradFunc: (dy, saved, attrs) => { + const [x4D, $filter] = saved; + const { dilations, strides, pad, dataFormat } = attrs; + util["assert"](tupleValuesAreOne(dilations), () => 'Error in gradient of conv2D: dilation rates greater than 1 ' + + `are not yet supported in gradients. Got dilations '${dilations}'`); + return { + x: () => conv2DBackpropInput(x4D.shape, dy, $filter, strides, pad, dataFormat), + filter: () => conv2DBackpropFilter(x4D, dy, $filter.shape, strides, pad, dataFormat) + }; + } +}; +//# sourceMappingURL=Conv2D_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/conv2d.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + +/** + * Computes a 2D convolution over the input x. + * + * @param x The input tensor, of rank 4 or rank 3, of shape + * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is + * assumed. + * @param filter The filter, rank 4, of shape + * `[filterHeight, filterWidth, inDepth, outDepth]`. + * @param strides The strides of the convolution: `[strideHeight, + * strideWidth]`. + * @param pad The type of padding algorithm. + * - `same` and stride 1: output will be of same size as input, + * regardless of filter size. + * - `valid`: output will be smaller than input if filter is larger + * than 1x1. + * - For more info, see this guide: + * [https://www.tensorflow.org/api_guides/python/nn#Convolution]( + * https://www.tensorflow.org/api_guides/python/nn#Convolution) + * @param dataFormat: An optional string from: "NHWC", "NCHW". Defaults to + * "NHWC". Specify the data format of the input and output data. With the + * default format "NHWC", the data is stored in the order of: [batch, + * height, width, channels]. + * @param dilations The dilation rates: `[dilationHeight, dilationWidth]` + * in which we sample input values across the height and width dimensions + * in atrous convolution. Defaults to `[1, 1]`. If `dilations` is a single + * number, then `dilationHeight == dilationWidth`. If it is greater than + * 1, then all values of `strides` must be 1. + * @param dimRoundingMode The rounding mode used when computing output + * dimensions if pad is a number. If none is provided, it will not round + * and error if the output is of fractional size. + */ +/** @doc {heading: 'Operations', subheading: 'Convolution'} */ +function conv2d_(x, filter, strides, pad, dataFormat = 'NHWC', dilations = [1, 1], dimRoundingMode) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'conv2d'); + const $filter = Object(tensor_util_env["a" /* convertToTensor */])(filter, 'filter', 'conv2d'); + let x4D = $x; + let reshapedTo4D = false; + if ($x.rank === 3) { + reshapedTo4D = true; + x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]); + } + util["assert"](x4D.rank === 4, () => `Error in conv2d: input must be rank 4, but got rank ${x4D.rank}.`); + util["assert"]($filter.rank === 4, () => `Error in conv2d: filter must be rank 4, but got rank ` + + `${$filter.rank}.`); + if (dimRoundingMode != null) { + util["assert"](util["isInt"](pad), () => `Error in conv2d: pad must be an integer when using, ` + + `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`); + } + const inDepth = dataFormat === 'NHWC' ? x4D.shape[3] : x4D.shape[1]; + util["assert"](inDepth === $filter.shape[2], () => `Error in conv2d: depth of input (${inDepth}) must match ` + + `input depth for filter ${$filter.shape[2]}.`); + util["assert"](eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in conv2D: Either strides or dilations must be 1. ' + + `Got strides ${strides} and dilations '${dilations}'`); + const forward = (backend, save) => { + const $dataFormat = convertConv2DDataFormat(dataFormat); + const convInfo = computeConv2DInfo(x4D.shape, $filter.shape, strides, dilations, pad, dimRoundingMode, false, $dataFormat); + const res = backend.conv2d(x4D, $filter, convInfo); + save([x4D, $filter]); + return res; + }; + const inputs = { x: x4D, filter: $filter }; + const attrs = { strides, pad, dataFormat, dilations, dimRoundingMode }; + const res = engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["m" /* Conv2D */], attrs); + if (reshapedTo4D) { + return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]); + } + return res; +} +const conv2d = Object(operation["a" /* op */])({ conv2d_ }); +//# sourceMappingURL=conv2d.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Conv2DBackpropInput_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + +const conv2DBackpropInputGradConfig = { + kernelName: kernel_names["o" /* Conv2DBackpropInput */], + inputsToSave: ['dy', 'filter'], + gradFunc: (ddx, saved, attrs) => { + const [dy, filter] = saved; + const { strides, pad, dataFormat, dimRoundingMode } = attrs; + return { + dy: () => conv2d(ddx, filter, strides, pad, dataFormat, 1 /* dilations */, dimRoundingMode), + filter: () => conv2DBackpropFilter(ddx, dy, filter.shape, strides, pad, dataFormat, dimRoundingMode) + }; + } +}; +//# sourceMappingURL=Conv2DBackpropInput_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/conv3d_backprop_filter.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + +/** + * Computes the derivative of the filter of a 3D convolution. + * + * @param x The input tensor, of rank 5 or rank 4 of shape + * [batch, depth, height, width, inChannels]. If rank 4, batch of 1 is + * assumed. + * @param dy The dy image, of rank 5 or rank 4, of shape + * [batch, depth, height, width, outDepth]. If rank 4, batch of 1 is + * assumed. + * @param filterShape The shape of the filter, length 5, + * [filterDepth, filterHeight, filterWidth, inDepth, outDepth]. + * @param strides The strides of the convolution: [strideDepth, strideHeight, + * strideWidth]. + * @param pad A string from: 'same', 'valid'. The type of padding algorithm + * used in the forward prop of the op. + */ +function conv3DBackpropFilter_(x, dy, filterShape, strides, pad) { + let x5D = x; + if (x.rank === 4) { + x5D = reshape(x, [1, x.shape[0], x.shape[1], x.shape[2], x.shape[3]]); + } + let dy5D = dy; + if (dy5D.rank === 4) { + dy5D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2], dy.shape[3]]); + } + util["assert"](x5D.rank === 5, () => `Error in conv3dDerFilter: input must be rank 5, but got shape ` + + `${x5D.shape}.`); + util["assert"](dy5D.rank === 5, () => `Error in conv3dDerFilter: dy must be rank 5, but got shape ` + + `${dy5D.shape}.`); + util["assert"](filterShape.length === 5, () => `Error in conv3dDerFilter: filterShape must be length 5, but got ` + + `${filterShape}.`); + util["assert"](x5D.shape[4] === filterShape[3], () => `Error in conv3dDerFilter: depth of input ${x5D.shape[4]}) must ` + + `match input depth in filter (${filterShape[3]}.`); + util["assert"](dy5D.shape[4] === filterShape[4], () => `Error in conv3dDerFilter: depth of dy (${dy5D.shape[4]}) must ` + + `match output depth for filter (${filterShape[4]}).`); + const forward = backend => { + const dilations = 1; + const convInfo = computeConv3DInfo(x5D.shape, filterShape, strides, dilations, pad); + return backend.conv3dDerFilter(x5D, dy5D, convInfo); + }; + const inputs = { x: x5D, y: dy5D }; + const attrs = { strides, pad }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null, kernel_names["q" /* Conv3DBackpropFilterV2 */], attrs); +} +const conv3DBackpropFilter = Object(operation["a" /* op */])({ conv3DBackpropFilter_ }); +//# sourceMappingURL=conv3d_backprop_filter.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/conv3d_backprop_input.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + +/** + * Computes the derivative of the input of a 3D convolution. + * + * @param xShape The shape of the input: [batch, depth, height, width, + * in_channels]. If length of 4, batch of 1 is assumed. + * @param dy The derivative of the output, of rank 5 or rank 4 of shape + * `[batch, outDepth, outHeight, outWidth, in_channels]`. + * If rank 4, batch of 1 is assumed. + * @param filter The filter, rank 5, of shape + * `[filterDepth, filterHeight, filterWidth, inDepth, outDepth]`. + * @param strides The strides of the convolution: `[strideDepth, strideHeight, + * strideWidth]`. + * @param pad The type of padding algorithm used: + * - `same` and stride 1: output will be of same size as input, + * regardless of filter size. + * - `valid`: output will be smaller than input if filter is larger + * than 1x1. + */ +function conv3DBackpropInput_(xShape, dy, filter, strides, pad) { + util["assert"](xShape.length === dy.rank, () => `Length of inShape ` + + `(${xShape.length}) and rank of dy (${dy.rank}) must match`); + let xShape5D = xShape; + let dy5D = dy; + let reshapedTo5D = false; + if (dy.rank === 4) { + reshapedTo5D = true; + dy5D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2], dy.shape[3]]); + xShape5D = [1, xShape[0], xShape[1], xShape[2], xShape[3]]; + } + const inDepth = xShape5D[4]; + const outDepth = dy5D.shape[4]; + util["assert"](xShape5D.length === 5, () => `Error in conv3dDerInput: inShape must be length 5, but got length ` + + `${xShape5D.length}.`); + util["assert"](dy5D.rank === 5, () => `Error in conv3dDerInput: dy must be rank 5, but got ` + + `rank ${dy5D.rank}`); + util["assert"](filter.rank === 5, () => `Error in conv3dDerInput: filter must be rank 5, but got ` + + `rank ${filter.rank}`); + util["assert"](inDepth === filter.shape[3], () => `Error in conv3dDerInput: depth of input (${inDepth}) must ` + + `match input depth for filter ${filter.shape[3]}.`); + util["assert"](outDepth === filter.shape[4], () => `Error in conv3dDerInput: depth of output (${outDepth}) must ` + + `match output depth for filter ${filter.shape[4]}.`); + const forward = backend => { + const dilations = 1; + const convInfo = computeConv3DInfo(xShape5D, filter.shape, strides, dilations, pad); + return backend.conv3dDerInput(dy5D, filter, convInfo); + }; + const inputs = { dy: dy5D }; + const attrs = { pad }; + const res = engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null, kernel_names["r" /* Conv3DBackpropInputV2 */], attrs); + if (reshapedTo5D) { + return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]); + } + return res; +} +const conv3DBackpropInput = Object(operation["a" /* op */])({ conv3DBackpropInput_ }); +//# sourceMappingURL=conv3d_backprop_input.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Conv3D_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +const conv3DGradConfig = { + kernelName: kernel_names["p" /* Conv3D */], + inputsToSave: ['x', 'filter'], + gradFunc: (dy, saved, attrs) => { + const { dilations, strides, pad } = attrs; + util["assert"](tupleValuesAreOne(dilations), () => 'Error in gradient of conv3D: dilation rates greater than 1 are ' + + `not yet supported in gradients. Got dilations '${dilations}'`); + const [x5D, $filter] = saved; + return { + x: () => conv3DBackpropInput(x5D.shape, dy, $filter, strides, pad), + filter: () => conv3DBackpropFilter(x5D, dy, $filter.shape, strides, pad) + }; + } +}; +//# sourceMappingURL=Conv3D_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/transpose.js +/** + * @license + * Copyright 2018 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + +/** + * Transposes the `tf.Tensor`. Permutes the dimensions according to `perm`. + * + * The returned `tf.Tensor`'s dimension `i` will correspond to the input + * dimension `perm[i]`. If `perm` is not given, it is set to `[n-1...0]`, + * where `n` is the rank of the input `tf.Tensor`. Hence by default, this + * operation performs a regular matrix transpose on 2-D input `tf.Tensor`s. + * + * ```js + * const a = tf.tensor2d([1, 2, 3, 4, 5, 6], [2, 3]); + * + * a.transpose().print(); // or tf.transpose(a) + * ``` + * + * @param x The tensor to transpose. + * @param perm The permutation of the dimensions of a. + */ +/** @doc {heading: 'Operations', subheading: 'Matrices'} */ +function transpose_(x, perm) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'transpose'); + if (perm == null) { + perm = $x.shape.map((s, i) => i).reverse(); + } + util["assert"]($x.rank === perm.length, () => `Error in transpose: rank of input ${$x.rank} ` + + `must match length of perm ${perm}.`); + perm.forEach(axis => { + util["assert"](axis >= 0 && axis < $x.rank, () => `All entries in 'perm' must be between 0 and ${$x.rank - 1}` + + ` but got ${perm}`); + }); + if ($x.rank <= 1) { + return $x.clone(); + } + const attrs = { perm }; + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.transpose($x, perm), { x: $x }, null /* gradient */, 'Transpose', attrs); +} +const transpose = Object(operation["a" /* op */])({ transpose_ }); +//# sourceMappingURL=transpose.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/cumsum.js +/** + * @license + * Copyright 2018 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + +/** + * Computes the cumulative sum of a `tf.Tensor` along `axis`. + * + * ```js + * const x = tf.tensor([1, 2, 3, 4]); + * x.cumsum().print(); + * ``` + * ```js + * const x = tf.tensor([[1, 2], [3, 4]]); + * x.cumsum().print(); + * ``` + * + * @param x The input tensor to be summed. + * @param axis The axis along which to sum. Optional. Defaults to 0. + * @param exclusive Whether to perform exclusive cumulative sum. Optional. + * Defaults to false. If set to true then the sum of each tensor entry + * does not include its own value, but only the values previous to it + * along the specified axis. + * @param reverse Whether to sum in the opposite direction. Optional. + * Defaults to false. + */ +/** @doc {heading: 'Operations', subheading: 'Scan'} */ +function cumsum_(x, axis = 0, exclusive = false, reverse = false) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'cumsum'); + const forward = (backend, save) => { + const permutation = getAxesPermutation([axis], $x.rank); + let permutedX = $x; + if (permutation != null) { + permutedX = transpose($x, permutation); + } + const permutedAxis = getInnerMostAxes(1, $x.rank)[0]; + let value = backend.cumsum(permutedX, permutedAxis, exclusive, reverse); + save([$x]); + if (permutation != null) { + value = transpose(value, permutation); + } + return value; + }; + const inputs = { x: $x }; + const attrs = { axis, exclusive, reverse }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["s" /* Cumsum */], attrs); +} +const cumsum = Object(operation["a" /* op */])({ cumsum_ }); +//# sourceMappingURL=cumsum.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Cumsum_grad.js +/** + * @license + * Copyright 2020 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + +const cumsumGradConfig = { + kernelName: kernel_names["s" /* Cumsum */], + inputsToSave: ['x'], + gradFunc: (dy, saved, attrs) => { + const [x] = saved; + const { axis, exclusive, reverse } = attrs; + return { + x: () => { + const permutation = getAxesPermutation([axis], x.rank); + let out = cumsum(dy, axis, exclusive, !reverse); + if (permutation != null) { + out = transpose(out, permutation); + } + return out; + } + }; + } +}; +//# sourceMappingURL=Cumsum_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/depthwise_conv2d_native_backprop_filter.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + +function depthwiseConv2dNativeBackpropFilter_(x, dy, filterShape, convInfo) { + let x4D = x; + if (x.rank === 3) { + x4D = reshape(x, [1, x.shape[0], x.shape[1], x.shape[2]]); + } + let dy4D = dy; + if (dy4D.rank === 3) { + dy4D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2]]); + } + const forward = backend => backend.depthwiseConv2DDerFilter(x4D, dy4D, convInfo); + const inputs = { x: x4D, dy: dy4D }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null, kernel_names["v" /* DepthwiseConv2dNativeBackpropFilter */]); +} +const depthwiseConv2dNativeBackpropFilter = Object(operation["a" /* op */])({ depthwiseConv2dNativeBackpropFilter_ }); +//# sourceMappingURL=depthwise_conv2d_native_backprop_filter.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/depthwise_conv2d_native_backprop_input.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + +function depthwiseConv2dNativeBackpropInput_(xShape, dy, filter, convInfo) { + let dy4D = dy; + let reshapedTo4D = false; + if (dy.rank === 3) { + reshapedTo4D = true; + dy4D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2]]); + } + const forward = backend => backend.depthwiseConv2DDerInput(dy4D, filter, convInfo); + const inputs = { dy: dy4D }; + const res = engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null, kernel_names["w" /* DepthwiseConv2dNativeBackpropInput */]); + if (reshapedTo4D) { + return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]); + } + return res; +} +const depthwiseConv2dNativeBackpropInput = Object(operation["a" /* op */])({ depthwiseConv2dNativeBackpropInput_ }); +//# sourceMappingURL=depthwise_conv2d_native_backprop_input.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/DepthwiseConv2dNative_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +const depthwiseConv2dNativeGradConfig = { + kernelName: kernel_names["u" /* DepthwiseConv2dNative */], + inputsToSave: ['x', 'filter'], + gradFunc: (dy, saved, attrs) => { + const { dilations, strides, pad, dimRoundingMode } = attrs; + const $dilations = dilations == null ? [1, 1] : dilations; + util["assert"](tupleValuesAreOne($dilations), () => 'Error in gradient of depthwiseConv2dNative: dilation rates ' + + `greater than 1 are not yet supported. Got dilations ` + + `'${$dilations}'`); + const [x, filter] = saved; + util["assert"](x.rank === 4, () => `Error in gradient of depthwiseConv2dNative: input must be ` + + `rank 4, but got rank ${x.rank}.`); + util["assert"](filter.rank === 4, () => `Error in gradient of depthwiseConv2dNative: filter must be ` + + `rank 4, but got rank ${filter.rank}.`); + util["assert"](x.shape[3] === filter.shape[2], () => `Error in gradient of depthwiseConv2d: number of input ` + + `channels (${x.shape[3]}) must match the inChannels dimension ` + + `in filter ${filter.shape[2]}.`); + util["assert"](eitherStridesOrDilationsAreOne(strides, $dilations), () => 'Error in gradient of depthwiseConv2d: Either strides or ' + + `dilations must be 1. Got strides ${strides} and dilations ` + + `'${$dilations}'.`); + if (dimRoundingMode != null) { + util["assert"](util["isInt"](pad), () => `Error in depthwiseConv2d: pad must be an integer when using, ` + + `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`); + } + const convInfo = computeConv2DInfo(x.shape, filter.shape, strides, $dilations, pad, dimRoundingMode, true /* depthwise */); + return { + x: () => depthwiseConv2dNativeBackpropInput(x.shape, dy, filter, convInfo), + filter: () => depthwiseConv2dNativeBackpropFilter(x, dy, filter.shape, convInfo), + }; + } +}; +//# sourceMappingURL=DepthwiseConv2dNative_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Div_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + + +const divGradConfig = { + kernelName: kernel_names["y" /* Div */], + inputsToSave: ['a', 'b'], + gradFunc: (dy, saved) => { + const [a, b] = saved; + const outShape = assertAndGetBroadcastShape(a.shape, b.shape); + const derA = () => { + const res = div(dy, b.toFloat()); + const reduceAxes = getReductionAxes(a.shape, outShape); + if (reduceAxes.length > 0) { + return sum(res, reduceAxes).reshape(a.shape); + } + return res; + }; + const derB = () => { + let res = mul(dy, a.toFloat()); + const reduceAxes = getReductionAxes(b.shape, outShape); + if (reduceAxes.length > 0) { + res = reshape(sum(res, reduceAxes), b.shape); + } + const tmp = square(b); + return neg(div(res, tmp.toFloat())); + }; + return { a: derA, b: derB }; + } +}; +//# sourceMappingURL=Div_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Elu_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const eluGradConfig = { + kernelName: kernel_names["z" /* Elu */], + outputsToSave: [true], + gradFunc: (dy, saved) => { + const [y] = saved; + const backPropKernelFunc = (backend) => { + return backend.eluDer(dy, y); + }; + const inputs = { dy, y }; + return { + x: () => engine["a" /* ENGINE */].runKernelFunc(backPropKernelFunc, inputs, null /* grad */, kernel_names["A" /* EluGrad */]) + }; + } +}; +//# sourceMappingURL=Elu_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/FloorDiv_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const floorDivGradConfig = { + kernelName: kernel_names["D" /* FloorDiv */], + inputsToSave: ['a', 'b'], + gradFunc: (dy, saved) => { + const [a, b] = saved; + const outShape = assertAndGetBroadcastShape(a.shape, b.shape); + const derA = () => { + const res = dy.div(b.toFloat()); + const reduceAxes = getReductionAxes(a.shape, outShape); + if (reduceAxes.length > 0) { + return res.sum(reduceAxes).reshape(a.shape); + } + return res; + }; + const derB = () => { + let res = dy.mul(a.toFloat()); + const reduceAxes = getReductionAxes(b.shape, outShape); + if (reduceAxes.length > 0) { + res = res.sum(reduceAxes).reshape(b.shape); + } + const tmp = b.square(); + return res.div(tmp.toFloat()).neg(); + }; + return { a: derA, b: derB }; + } +}; +//# sourceMappingURL=FloorDiv_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/sub.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +/** + * Subtracts two `tf.Tensor`s element-wise, A - B. Supports broadcasting. + * + * ```js + * const a = tf.tensor1d([10, 20, 30, 40]); + * const b = tf.tensor1d([1, 2, 3, 4]); + * + * a.sub(b).print(); // or tf.sub(a, b) + * ``` + * + * ```js + * // Broadcast subtract a with b. + * const a = tf.tensor1d([10, 20, 30, 40]); + * const b = tf.scalar(5); + * + * a.sub(b).print(); // or tf.sub(a, b) + * ``` + * @param a The first `tf.Tensor` to subtract from. + * @param b The second `tf.Tensor` to be subtracted. Must have the same dtype as + * `a`. + */ +/** @doc {heading: 'Operations', subheading: 'Arithmetic'} */ +function sub_(a, b) { + let $a = Object(tensor_util_env["a" /* convertToTensor */])(a, 'a', 'sub'); + let $b = Object(tensor_util_env["a" /* convertToTensor */])(b, 'b', 'sub'); + [$a, $b] = Object(tensor_util["makeTypesMatch"])($a, $b); + const forward = (backend, save) => { + const res = backend.subtract($a, $b); + save([$a, $b]); + return res; + }; + const inputs = { a: $a, b: $b }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["qb" /* Sub */]); +} +const sub = Object(operation["a" /* op */])({ sub_ }); +//# sourceMappingURL=sub.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/tile.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +/** + * Construct a tensor by repeating it the number of times given by reps. + * + * This operation creates a new tensor by replicating `input` `reps` + * times. The output tensor's i'th dimension has `input.shape[i] * + * reps[i]` elements, and the values of `input` are replicated + * `reps[i]` times along the i'th dimension. For example, tiling + * `[a, b, c, d]` by `[2]` produces `[a, b, c, d, a, b, c, d]`. + * + * ```js + * const a = tf.tensor1d([1, 2]); + * + * a.tile([2]).print(); // or a.tile([2]) + * ``` + * + * ```js + * const a = tf.tensor2d([1, 2, 3, 4], [2, 2]); + * + * a.tile([1, 2]).print(); // or a.tile([1, 2]) + * ``` + * @param x The tensor to tile. + * @param reps Determines the number of replications per dimension. + */ +/** @doc {heading: 'Tensors', subheading: 'Slicing and Joining'} */ +function tile_(x, reps) { + const parseAs = null; + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'tile', parseAs); + util["assert"]($x.rank === reps.length, () => `Error in transpose: rank of input ${$x.rank} ` + + `must match length of reps ${reps}.`); + const forward = (backend, save) => { + const res = backend.tile($x, reps); + save([$x]); + return res; + }; + const inputsToSave = [$x]; + const inputs = { x: $x }; + const attrs = { reps }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["rb" /* Tile */], attrs, inputsToSave); +} +const tile = Object(operation["a" /* op */])({ tile_ }); +//# sourceMappingURL=tile.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/FusedBatchNorm_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + + + + +const fusedBatchNormGradConfig = { + kernelName: kernel_names["F" /* FusedBatchNorm */], + inputsToSave: ['x', 'mean', 'variance', 'scale'], + gradFunc: (dy, saved, attrs) => { + const { varianceEpsilon } = attrs; + const [x, mean, variance, scale] = saved; + const scaleValue = scale == null ? Object(tensor_ops["e" /* scalar */])(1) : scale; + const reductionAxes = getReductionAxes(mean.shape, x.shape); + const tileShape = []; + if (mean.rank === 1) { + for (let i = 0; i < x.shape.length - 1; ++i) { + tileShape.push(x.shape[i]); + } + tileShape.push(1); + } + const xMinusMean = sub(x, mean); + const dyTimesScaleValue = mul(dy, scaleValue); + const oneOverSqrtVariance = rsqrt(add(variance, Object(tensor_ops["e" /* scalar */])(varianceEpsilon))); + const minusHalfRCube = mul(mul(mul(oneOverSqrtVariance, oneOverSqrtVariance), oneOverSqrtVariance), Object(tensor_ops["e" /* scalar */])(-0.5)); + const derX = () => { + if (mean.rank === 1) { + return reshape(mul(mul(dy, tile(oneOverSqrtVariance.as4D(1, 1, 1, mean.shape[0]), tileShape)), scaleValue), x.shape); + } + else { + return reshape(mul(mul(dy, oneOverSqrtVariance), scaleValue), x.shape); + } + }; + const derMean = () => { + let meanDer = mul(mul(oneOverSqrtVariance, Object(tensor_ops["e" /* scalar */])(-1)), dyTimesScaleValue); + if (mean.rank === 1) { + meanDer = sum(meanDer, reductionAxes); + } + return reshape(meanDer, mean.shape); + }; + const derVariance = () => { + let varianceDer = mul(mul(minusHalfRCube, xMinusMean), dyTimesScaleValue); + if (mean.rank === 1) { + varianceDer = sum(varianceDer, reductionAxes); + } + return reshape(varianceDer, mean.shape); + }; + const derScale = () => { + const xMinusMean2TimesRsqrt = mul(xMinusMean, oneOverSqrtVariance); + let scaleDer = mul(dy, xMinusMean2TimesRsqrt); + if (mean.rank === 1) { + scaleDer = sum(scaleDer, reductionAxes); + } + return reshape(scaleDer, mean.shape); + }; + const derOffset = () => { + let offsetDer = dy; + if (mean.rank === 1) { + offsetDer = sum(offsetDer, reductionAxes); + } + return reshape(offsetDer, mean.shape); + }; + return { + x: derX, + mean: derMean, + variance: derVariance, + scale: derScale, + offset: derOffset + }; + } +}; +//# sourceMappingURL=FusedBatchNorm_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/GreaterEqual_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const greaterEqualGradConfig = { + kernelName: kernel_names["I" /* GreaterEqual */], + inputsToSave: ['a', 'b'], + gradFunc: (dy, saved) => { + const [a, b] = saved; + return { a: () => Object(tensor_ops["o" /* zerosLike */])(a), b: () => Object(tensor_ops["o" /* zerosLike */])(b) }; + } +}; +//# sourceMappingURL=GreaterEqual_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Identity_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + +const identityGradConfig = { + kernelName: kernel_names["J" /* Identity */], + gradFunc: (dy) => { + return { x: () => dy.toFloat() }; + } +}; +//# sourceMappingURL=Identity_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/local_response_normalization_backprop.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + +function localResponseNormalizationBackprop_(x, y, dy, depthRadius = 5, bias = 1, alpha = 1, beta = 0.5) { + const forward = backend => backend.LRNGrad(dy, x, y, depthRadius, bias, alpha, beta); + const inputs = { x, y, dy }; + const attrs = { depthRadius, bias, alpha, beta }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["M" /* LRNBackprop */], attrs); +} +const localResponseNormalizationBackprop = Object(operation["a" /* op */])({ localResponseNormalizationBackprop_ }); +//# sourceMappingURL=local_response_normalization_backprop.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/LRN_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const lrnGradConfig = { + kernelName: kernel_names["L" /* LRN */], + inputsToSave: ['x'], + outputsToSave: [true], + gradFunc: (dy, saved, attrs) => { + const [x, y] = saved; + const { depthRadius, bias, alpha, beta } = attrs; + return { + x: () => localResponseNormalizationBackprop(x, y, dy, depthRadius, bias, alpha, beta) + }; + } +}; +//# sourceMappingURL=LRN_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Max_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +const maxGradConfig = { + kernelName: kernel_names["P" /* Max */], + inputsToSave: ['x'], + outputsToSave: [true], + gradFunc: (dy, saved, attrs) => { + const maxAttrs = attrs; + const { reductionIndices } = maxAttrs; + const [x, y] = saved; + const origAxes = util["parseAxisParam"](reductionIndices, x.shape); + const permutedAxes = getAxesPermutation(origAxes, x.rank); + const maxGrad = gradForMinAndMax(dy, y, x, origAxes, permutedAxes); + return { + x: () => { + let out = maxGrad['x'](); + if (permutedAxes != null) { + out = transpose(out); + } + return out; + } + }; + } +}; +//# sourceMappingURL=Max_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/greater_equal.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + +/** + * Returns the truth value of (a >= b) element-wise. Supports broadcasting. + * + * ```js + * const a = tf.tensor1d([1, 2, 3]); + * const b = tf.tensor1d([2, 2, 2]); + * + * a.greaterEqual(b).print(); + * ``` + * + * @param a The first input tensor. + * @param b The second input tensor. Must have the same dtype as `a`. + */ +/** @doc {heading: 'Operations', subheading: 'Logical'} */ +function greaterEqual_(a, b) { + let $a = Object(tensor_util_env["a" /* convertToTensor */])(a, 'a', 'greaterEqual'); + let $b = Object(tensor_util_env["a" /* convertToTensor */])(b, 'b', 'greaterEqual'); + [$a, $b] = Object(tensor_util["makeTypesMatch"])($a, $b); + assertAndGetBroadcastShape($a.shape, $b.shape); + const forward = (backend, save) => { + const res = backend.greaterEqual($a, $b); + save([$a, $b]); + return res; + }; + const inputs = { a: $a, b: $b }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["I" /* GreaterEqual */]); +} +const greaterEqual = Object(operation["a" /* op */])({ greaterEqual_ }); +//# sourceMappingURL=greater_equal.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/less.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + +/** + * Returns the truth value of (a < b) element-wise. Supports broadcasting. + * + * ```js + * const a = tf.tensor1d([1, 2, 3]); + * const b = tf.tensor1d([2, 2, 2]); + * + * a.less(b).print(); + * ``` + * @param a The first input tensor. + * @param b The second input tensor. Must have the same dtype as `a`. + */ +/** @doc {heading: 'Operations', subheading: 'Logical'} */ +function less_(a, b) { + let $a = Object(tensor_util_env["a" /* convertToTensor */])(a, 'a', 'less'); + let $b = Object(tensor_util_env["a" /* convertToTensor */])(b, 'b', 'less'); + [$a, $b] = Object(tensor_util["makeTypesMatch"])($a, $b); + assertAndGetBroadcastShape($a.shape, $b.shape); + const forward = backend => backend.less($a, $b); + const inputs = { a: $a, b: $b }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["N" /* Less */]); +} +const less = Object(operation["a" /* op */])({ less_ }); +//# sourceMappingURL=less.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Maximum_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +const maximumGradConfig = { + kernelName: kernel_names["V" /* Maximum */], + inputsToSave: ['a', 'b'], + gradFunc: (dy, saved) => { + const [a, b] = saved; + const derA = () => mul(dy, cast(greaterEqual(a, b), 'float32')); + const derB = () => mul(dy, cast(less(a, b), 'float32')); + return { a: derA, b: derB }; + } +}; +//# sourceMappingURL=Maximum_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/max_pool_3d_backprop.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + +/** + * Computes the backprop of a 3d max pool. + * + * @param dy The dy error, of rank 5 of shape + * [batchSize, depth, height, width, channels]. + * assumed. + * @param input The original input image, of rank 5 or rank 4 of shape + * [batchSize, depth, height, width, channels]. + * @param output The original output image, of rank 5 of shape + * [batchSize, outDepth, outHeight, outWidth, channels]. + * @param filterSize The filter size: + * `[filterDepth, filterHeight, filterWidth]`. + * `filterSize` is a single number, + * then `filterDepth == filterHeight == filterWidth`. + * @param strides The strides of the pooling: + * `[strideDepth, strideHeight, strideWidth]`. If + * `strides` is a single number, then `strideHeight == strideWidth`. + * @param dilations Deprecated, this field will be gone in v3.0.0. + * The dilation rates: `[dilationDepth, dilationHeight, dilationWidth]` + * in which we sample input values across the depth, height and width + * dimensions in dilated pooling. + * Defaults to `[1, 1, 1]`. If `dilations` is a single number, + * then `dilationDepth == dilationHeight == dilationWidth`. + * If it is greater than 1, then all values of `strides` must be 1. + * @param pad A string from: 'same', 'valid'. The type of padding algorithm + * used in the forward prop of the op. + * @param dimRoundingMode A string from: 'ceil', 'round', 'floor'. The + * rounding mode used when computing output dimensions if pad is a + * number. If none is provided, it will not round and error if the output + * is of fractional size. + */ +function maxPool3dBackprop_(dy, input, output, filterSize, strides, dilations = [1, 1, 1], pad, dimRoundingMode) { + const $dy = Object(tensor_util_env["a" /* convertToTensor */])(dy, 'dy', 'maxPool3dBackprop'); + const $input = Object(tensor_util_env["a" /* convertToTensor */])(input, 'input', 'maxPool3dBackprop'); + const $output = Object(tensor_util_env["a" /* convertToTensor */])(output, 'output', 'maxPool3dBackprop'); + let dy5D = $dy; + let input5D = $input; + let output5D = $output; + let reshapedTo5D = false; + if ($input.rank === 4) { + reshapedTo5D = true; + dy5D = reshape($dy, [1, $dy.shape[0], $dy.shape[1], $dy.shape[2], $dy.shape[3]]); + input5D = reshape($input, [ + 1, $input.shape[0], $input.shape[1], $input.shape[2], $input.shape[3] + ]); + output5D = reshape($output, [ + 1, $output.shape[0], $output.shape[1], $output.shape[2], $output.shape[3] + ]); + } + util["assert"](dy5D.rank === 5, () => `Error in maxPool3dBackprop: dy must be rank 5 but got rank ` + + `${dy5D.rank}.`); + util["assert"](input5D.rank === 5, () => `Error in maxPool3dBackprop: input must be rank 5 but got rank ` + + `${input5D.rank}.`); + util["assert"](output5D.rank === 5, () => `Error in maxPool3dBackprop: output must be rank 5 but got rank ` + + `${output5D.rank}.`); + util["assert"](eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in maxPool3dBackprop: Either strides or dilations ' + + `must be 1. Got strides ${strides} and dilations '${dilations}'`); + if (dimRoundingMode != null) { + util["assert"](util["isInt"](pad), () => `Error in maxPool3dBackprop: pad must be an integer when ` + + `using, dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`); + } + const forward = backend => { + const convInfo = computePool3DInfo(input5D.shape, filterSize, strides, dilations, pad, dimRoundingMode); + return backend.maxPool3dBackprop(dy5D, input5D, output5D, convInfo); + }; + const inputs = { dy: dy5D, input: input5D, output: output5D }; + const attrs = { filterSize, strides, dilations, pad, dimRoundingMode }; + const res = engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["S" /* MaxPool3DBackprop */], attrs); + if (reshapedTo5D) { + return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]); + } + return res; +} +const maxPool3dBackprop = Object(operation["a" /* op */])({ maxPool3dBackprop_ }); +//# sourceMappingURL=max_pool_3d_backprop.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/MaxPool3D_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const maxPool3DGradConfig = { + kernelName: kernel_names["R" /* MaxPool3D */], + inputsToSave: ['x'], + outputsToSave: [true], + gradFunc: (dy, saved, attrs) => { + const [x, y] = saved; + const { filterSize, strides, dilations, pad, dimRoundingMode } = attrs; + const $dilations = dilations == null ? [1, 1, 1] : dilations; + return { + x: () => maxPool3dBackprop(dy, x, y, filterSize, strides, $dilations, pad, dimRoundingMode) + }; + } +}; +//# sourceMappingURL=MaxPool3D_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/max_pool_backprop.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + +/** + * Computes the backprop of a 2D max pool. + * + * @param dy The dy error, of rank 4 or rank 3 of shape + * [batchSize, height, width, channels]. If rank 3, batch of 1 is + * assumed. + * @param input The original input image, of rank 4, of shape + * [batchSize, height, width, channels]. + * @param output The original output image, of rank 4, of shape + * [batchSize, outHeight, outWidth, channels]. + * @param filterSize The filter size: `[filterHeight, filterWidth]`. If + * `filterSize` is a single number, then `filterHeight == filterWidth`. + * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If + * `strides` is a single number, then `strideHeight == strideWidth`. + * @param pad A string from: 'same', 'valid'. The type of padding algorithm + * used in the forward prop of the op. + * @param dimRoundingMode A string from: 'ceil', 'round', 'floor'. The + * rounding mode used when computing output dimensions if pad is a + * number. If none is provided, it will not round and error if the output + * is of fractional size. + */ +function maxPoolBackprop_(dy, input, output, filterSize, strides, pad, dimRoundingMode) { + const $dy = Object(tensor_util_env["a" /* convertToTensor */])(dy, 'dy', 'maxPoolBackprop'); + const $input = Object(tensor_util_env["a" /* convertToTensor */])(input, 'input', 'maxPoolBackprop'); + const $output = Object(tensor_util_env["a" /* convertToTensor */])(output, 'output', 'maxPoolBackprop'); + util["assert"]($input.rank === $dy.rank, () => `Rank of input (${$input.rank}) does not match rank of dy ` + + `(${$dy.rank})`); + util["assert"]($dy.rank === 4, () => `Error in maxPoolBackprop: dy must be rank 4 but got rank ` + + `${$dy.rank}.`); + util["assert"]($input.rank === 4, () => `Error in maxPoolBackprop: input must be rank 4 but got rank ` + + `${$input.rank}.`); + if (dimRoundingMode != null) { + util["assert"](util["isInt"](pad), () => `Error in maxPoolBackprop: pad must be an integer when using, ` + + `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`); + } + const forward = backend => { + const convInfo = computePool2DInfo($input.shape, filterSize, strides, 1 /* dilations */, pad, dimRoundingMode); + return backend.maxPoolBackprop($dy, $input, $output, convInfo); + }; + const inputs = { dy: $dy, input: $input, output: $output }; + const attrs = { filterSize, strides, pad, dimRoundingMode }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null, kernel_names["T" /* MaxPoolBackprop */], attrs); +} +const maxPoolBackprop = Object(operation["a" /* op */])({ maxPoolBackprop_ }); +//# sourceMappingURL=max_pool_backprop.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/MaxPool_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const maxPoolGradConfig = { + kernelName: kernel_names["Q" /* MaxPool */], + inputsToSave: ['x'], + outputsToSave: [true], + gradFunc: (dy, saved, attrs) => { + const [x, y] = saved; + const { filterSize, strides, pad } = attrs; + return { + x: () => maxPoolBackprop(dy, x, y, filterSize, strides, pad) + }; + } +}; +//# sourceMappingURL=MaxPool_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/greater.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + +/** + * Returns the truth value of (a > b) element-wise. Supports broadcasting. + * + * ```js + * const a = tf.tensor1d([1, 2, 3]); + * const b = tf.tensor1d([2, 2, 2]); + * + * a.greater(b).print(); + * ``` + * + * @param a The first input tensor. + * @param b The second input tensor. Must have the same dtype as `a`. + */ +/** @doc {heading: 'Operations', subheading: 'Logical'} */ +function greater_(a, b) { + let $a = Object(tensor_util_env["a" /* convertToTensor */])(a, 'a', 'greater'); + let $b = Object(tensor_util_env["a" /* convertToTensor */])(b, 'b', 'greater'); + [$a, $b] = Object(tensor_util["makeTypesMatch"])($a, $b); + assertAndGetBroadcastShape($a.shape, $b.shape); + const forward = backend => backend.greater($a, $b); + const inputs = { a: $a, b: $b }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["H" /* Greater */]); +} +const greater = Object(operation["a" /* op */])({ greater_ }); +//# sourceMappingURL=greater.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/less_equal.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + +/** + * Returns the truth value of (a <= b) element-wise. Supports broadcasting. + * + * ```js + * const a = tf.tensor1d([1, 2, 3]); + * const b = tf.tensor1d([2, 2, 2]); + * + * a.lessEqual(b).print(); + * ``` + * + * @param a The first input tensor. + * @param b The second input tensor. Must have the same dtype as `a`. + */ +/** @doc {heading: 'Operations', subheading: 'Logical'} */ +function lessEqual_(a, b) { + let $a = Object(tensor_util_env["a" /* convertToTensor */])(a, 'a', 'lessEqual'); + let $b = Object(tensor_util_env["a" /* convertToTensor */])(b, 'b', 'lessEqual'); + [$a, $b] = Object(tensor_util["makeTypesMatch"])($a, $b); + assertAndGetBroadcastShape($a.shape, $b.shape); + const forward = (backend, save) => { + const res = backend.lessEqual($a, $b); + save([$a, $b]); + return res; + }; + const inputs = { a: $a, b: $b }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["O" /* LessEqual */]); +} +const lessEqual = Object(operation["a" /* op */])({ lessEqual_ }); +//# sourceMappingURL=less_equal.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Minimum_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +const minimumGradConfig = { + kernelName: kernel_names["W" /* Minimum */], + inputsToSave: ['a', 'b'], + gradFunc: (dy, saved) => { + const [a, b] = saved; + const derA = () => mul(dy, cast(lessEqual(a, b), 'float32')); + const derB = () => mul(dy, cast(greater(a, b), 'float32')); + return { a: derA, b: derB }; + } +}; +//# sourceMappingURL=Minimum_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Mod_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + +const modGradConfig = { + kernelName: kernel_names["X" /* Mod */], + inputsToSave: ['a', 'b'], + gradFunc: (dy, saved) => { + const [a, b] = saved; + const outShape = assertAndGetBroadcastShape(a.shape, b.shape); + const derA = () => { + const reduceAxes = getReductionAxes(a.shape, outShape); + if (reduceAxes.length > 0) { + return reshape(sum(dy, reduceAxes), a.shape); + } + return dy; + }; + const derB = () => { + const res = mul(dy, neg(floor(div(a, b)))); + const reduceAxes = getReductionAxes(b.shape, outShape); + if (reduceAxes.length > 0) { + return reshape(sum(res, reduceAxes), b.shape); + } + return res; + }; + return { a: derA, b: derB }; + } +}; +//# sourceMappingURL=Mod_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Multiply_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +const multiplyGradConfig = { + kernelName: kernel_names["Y" /* Multiply */], + inputsToSave: ['a', 'b'], + gradFunc: (dy, saved) => { + const [a, b] = saved; + const outShape = assertAndGetBroadcastShape(a.shape, b.shape); + const derA = () => { + const res = mul(dy, cast(b, 'float32')); + const reduceAxes = getReductionAxes(a.shape, outShape); + if (reduceAxes.length > 0) { + return reshape(sum(res, reduceAxes), a.shape); + } + return res; + }; + const derB = () => { + const res = mul(dy, cast(a, 'float32')); + const reduceAxes = getReductionAxes(b.shape, outShape); + if (reduceAxes.length > 0) { + return reshape(sum(res, reduceAxes), b.shape); + } + return res; + }; + return { a: derA, b: derB }; + } +}; +//# sourceMappingURL=Multiply_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/OneHot_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const oneHotGradConfig = { + kernelName: kernel_names["cb" /* OneHot */], + inputsToSave: ['indices'], + gradFunc: (dy, saved) => { + const indices = saved[0]; + return { indices: () => Object(tensor_ops["n" /* zeros */])(indices.shape, 'float32') }; + } +}; +//# sourceMappingURL=OneHot_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/PadV2_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + +const padV2GradConfig = { + kernelName: kernel_names["db" /* PadV2 */], + inputsToSave: ['x'], + gradFunc: (dy, saved, attrs) => { + // Pad introduces values around the original tensor, so the gradient + // slices the original shape out of the gradient. + const x = saved[0]; + const { paddings } = attrs; + const begin = paddings.map(p => p[0]); + return { x: () => dy.slice(begin, x.shape) }; + } +}; +//# sourceMappingURL=PadV2_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/backends/where_impl.js +/** + * @license + * Copyright 2018 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ +/** An implementation of the Where kernel shared between cpu and webgl */ + +function whereImpl(condShape, condVals) { + const indices = []; + for (let i = 0; i < condVals.length; i++) { + if (condVals[i]) { + indices.push(i); + } + } + const inBuffer = array_ops_buffer(condShape, 'int32'); + const out = array_ops_buffer([indices.length, condShape.length], 'int32'); + for (let i = 0; i < indices.length; i++) { + const loc = inBuffer.indexToLoc(indices[i]); + const offset = i * condShape.length; + out.values.set(loc, offset); + } + return out.toTensor(); +} +//# sourceMappingURL=where_impl.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/logical_ops.js +/** + * @license + * Copyright 2018 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + + +/** + * Returns the truth value of `NOT x` element-wise. + * + * ```js + * const a = tf.tensor1d([false, true], 'bool'); + * + * a.logicalNot().print(); + * ``` + * + * @param x The input tensor. Must be of dtype 'bool'. + */ +/** @doc {heading: 'Operations', subheading: 'Logical'} */ +function logicalNot_(x) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'logicalNot', 'bool'); + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.logicalNot($x), { $x }); +} +/** + * Returns the truth value of `a AND b` element-wise. Supports broadcasting. + * + * ```js + * const a = tf.tensor1d([false, false, true, true], 'bool'); + * const b = tf.tensor1d([false, true, false, true], 'bool'); + * + * a.logicalAnd(b).print(); + * ``` + * + * @param a The first input tensor. Must be of dtype bool. + * @param b The second input tensor. Must be of dtype bool. + */ +/** @doc {heading: 'Operations', subheading: 'Logical'} */ +function logicalAnd_(a, b) { + const $a = Object(tensor_util_env["a" /* convertToTensor */])(a, 'a', 'logicalAnd', 'bool'); + const $b = Object(tensor_util_env["a" /* convertToTensor */])(b, 'b', 'logicalAnd', 'bool'); + assertAndGetBroadcastShape($a.shape, $b.shape); + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.logicalAnd($a, $b), { a: $a, b: $b }, null /* grad */, 'LogicalAnd'); +} +/** + * Returns the truth value of `a OR b` element-wise. Supports broadcasting. + * + * ```js + * const a = tf.tensor1d([false, false, true, true], 'bool'); + * const b = tf.tensor1d([false, true, false, true], 'bool'); + * + * a.logicalOr(b).print(); + * ``` + * @param a The first input tensor. Must be of dtype bool. + * @param b The second input tensor. Must be of dtype bool. + */ +/** @doc {heading: 'Operations', subheading: 'Logical'} */ +function logicalOr_(a, b) { + const $a = Object(tensor_util_env["a" /* convertToTensor */])(a, 'a', 'logicalOr', 'bool'); + const $b = Object(tensor_util_env["a" /* convertToTensor */])(b, 'b', 'logicalOr', 'bool'); + assertAndGetBroadcastShape($a.shape, $b.shape); + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.logicalOr($a, $b), { $a, $b }); +} +/** + * Returns the truth value of `a XOR b` element-wise. Supports broadcasting. + * + * ```js + * const a = tf.tensor1d([false, false, true, true], 'bool'); + * const b = tf.tensor1d([false, true, false, true], 'bool'); + * + * a.logicalXor(b).print(); + * ``` + * + * @param a The first input tensor. Must be of dtype bool. + * @param b The second input tensor. Must be of dtype bool. + */ +/** @doc {heading: 'Operations', subheading: 'Logical'} */ +function logicalXor_(a, b) { + const $a = Object(tensor_util_env["a" /* convertToTensor */])(a, 'a', 'logicalXor', 'bool'); + const $b = Object(tensor_util_env["a" /* convertToTensor */])(b, 'b', 'logicalXor', 'bool'); + assertAndGetBroadcastShape($a.shape, $b.shape); + // x ^ y = (x | y) & ~(x & y) + return logicalOr(a, b).logicalAnd(logicalAnd(a, b).logicalNot()); +} +/** + * Returns the elements, either `a` or `b` depending on the `condition`. + * + * If the condition is true, select from `a`, otherwise select from `b`. + * + * ```js + * const cond = tf.tensor1d([false, false, true], 'bool'); + * const a = tf.tensor1d([1 , 2, 3]); + * const b = tf.tensor1d([-1, -2, -3]); + * + * a.where(cond, b).print(); + * ``` + * + * @param condition The input condition. Must be of dtype bool. + * @param a If `condition` is rank 1, `a` may have a higher rank but + * its first dimension must match the size of `condition`. + * @param b A tensor with the same shape and type as `a`. + */ +/** @doc {heading: 'Operations', subheading: 'Logical'} */ +function where_(condition, a, b) { + const $a = Object(tensor_util_env["a" /* convertToTensor */])(a, 'a', 'where'); + const $b = Object(tensor_util_env["a" /* convertToTensor */])(b, 'b', 'where'); + const $condition = Object(tensor_util_env["a" /* convertToTensor */])(condition, 'condition', 'where', 'bool'); + Object(util["assertShapesMatch"])($a.shape, $b.shape, 'Error in where: '); + if ($condition.rank === 1) { + // If condition rank is 1, then the first dimension must match the size of + // condition. + Object(util["assert"])($condition.shape[0] === $a.shape[0], () => 'The first dimension of `a` must match the size of `condition`.'); + } + else { + // A must have the same shape as condition. + Object(util["assertShapesMatch"])($condition.shape, $b.shape, 'Error in where: '); + } + // TODO(julianoks): Return null for condition gradient + // when backprop supports it. + const grad = (dy, saved) => { + const [$condition] = saved; + return { + condition: () => Object(tensor_ops["o" /* zerosLike */])($condition).toFloat(), + t: () => dy.mul($condition.cast(dy.dtype)), + e: () => dy.mul($condition.logicalNot().cast(dy.dtype)) + }; + }; + const inputs = { condition: $condition, t: $a, e: $b }; + return engine["a" /* ENGINE */].runKernelFunc((backend, save) => { + const res = backend.select($condition, $a, $b); + save([$condition]); + return res; + }, inputs, grad, kernel_names["kb" /* SelectV2 */]); +} +/** + * Returns the coordinates of true elements of condition. + * + * The coordinates are returned in a 2-D tensor where the first dimension (rows) + * represents the number of true elements, and the second dimension (columns) + * represents the coordinates of the true elements. Keep in mind, the shape of + * the output tensor can vary depending on how many true values there are in + * input. Indices are output in row-major order. The resulting tensor has the + * shape `[numTrueElems, condition.rank]`. + * + * This is analogous to calling the python `tf.where(cond)` without an x or y. + * + * ```js + * const cond = tf.tensor1d([false, false, true], 'bool'); + * const result = await tf.whereAsync(cond); + * result.print(); + * ``` + */ +/** @doc {heading: 'Operations', subheading: 'Logical'} */ +async function whereAsync_(condition) { + const $condition = Object(tensor_util_env["a" /* convertToTensor */])(condition, 'condition', 'whereAsync', 'bool'); + const vals = await $condition.data(); + const res = whereImpl($condition.shape, vals); + if (condition !== $condition) { + $condition.dispose(); + } + return res; +} +const logicalAnd = Object(operation["a" /* op */])({ logicalAnd_ }); +const logicalNot = Object(operation["a" /* op */])({ logicalNot_ }); +const logicalOr = Object(operation["a" /* op */])({ logicalOr_ }); +const logicalXor = Object(operation["a" /* op */])({ logicalXor_ }); +const where = Object(operation["a" /* op */])({ where_ }); +const whereAsync = whereAsync_; +//# sourceMappingURL=logical_ops.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/pow.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +/** + * Computes the power of one `tf.Tensor` to another. Supports broadcasting. + * + * Given a `tf.Tensor` x and a `tf.Tensor` y, this operation computes x^y for + * corresponding elements in x and y. The result's dtype will be the upcasted + * type of the `base` and `exp` dtypes. + * + * ```js + * const a = tf.tensor([[2, 3], [4, 5]]) + * const b = tf.tensor([[1, 2], [3, 0]]).toInt(); + * + * a.pow(b).print(); // or tf.pow(a, b) + * ``` + * + * ```js + * const a = tf.tensor([[1, 2], [3, 4]]) + * const b = tf.tensor(2).toInt(); + * + * a.pow(b).print(); // or tf.pow(a, b) + * ``` + * We also expose `powStrict` which has the same signature as this op and + * asserts that `base` and `exp` are the same shape (does not broadcast). + * + * @param base The base `tf.Tensor` to pow element-wise. + * @param exp The exponent `tf.Tensor` to pow element-wise. + */ +/** @doc {heading: 'Operations', subheading: 'Arithmetic'} */ +function pow_(base, exp) { + let $base = Object(tensor_util_env["a" /* convertToTensor */])(base, 'base', 'pow'); + let $exp = Object(tensor_util_env["a" /* convertToTensor */])(exp, 'exp', 'pow'); + [$base, $exp] = Object(tensor_util["makeTypesMatch"])($base, $exp); + const inputs = { a: $base, b: $exp }; + const forward = (backend, save) => { + const y = backend.pow($base, $exp); + save([$base, $exp, y]); + return y; + }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* gradient */, kernel_names["fb" /* Pow */]); +} +const pow = Object(operation["a" /* op */])({ pow_ }); +//# sourceMappingURL=pow.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Pow_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + + + + + +const powGradConfig = { + kernelName: kernel_names["fb" /* Pow */], + inputsToSave: ['a', 'b'], + outputsToSave: [true], + gradFunc: (dy, saved) => { + const [a, b, y] = saved; + const base = a; + const exp = b; + const outShape = assertAndGetBroadcastShape(base.shape, exp.shape); + const derBase = () => { + const expFloat = cast(exp, 'float32'); + let res = mul(dy, mul(expFloat, pow(base, sub(expFloat, Object(tensor_ops["e" /* scalar */])(1))))); + const reduceAxes = getReductionAxes(base.shape, outShape); + if (reduceAxes.length > 0) { + res = sum(res, reduceAxes); + } + return reshape(res, base.shape); + }; + const derExp = () => { + const condition = greater(base, 0); + const logBase = where(condition, log(base), Object(tensor_ops["o" /* zerosLike */])(base)); + let res = mul(dy, mul(y, logBase)); + const reduceAxes = getReductionAxes(exp.shape, outShape); + if (reduceAxes.length > 0) { + res = sum(res, reduceAxes); + } + return reshape(res, exp.shape); + }; + return { a: derBase, b: derExp }; + } +}; +//# sourceMappingURL=Pow_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Prelu_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + + +const preluGradConfig = { + kernelName: kernel_names["gb" /* Prelu */], + inputsToSave: ['x', 'alpha'], + gradFunc: (dy, saved) => { + const [x, alpha] = saved; + const mask = greater(x, 0); + return { + x: () => where(mask, dy, mul(dy, alpha)), + alpha: () => { + let res = where(mask, Object(tensor_ops["o" /* zerosLike */])(dy), mul(dy, x)); + const reduceAxes = getReductionAxes(alpha.shape, dy.shape); + if (reduceAxes.length > 0) { + res = sum(res, reduceAxes); + } + return reshape(res, alpha.shape); + } + }; + } +}; +//# sourceMappingURL=Prelu_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Relu6_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +const relu6GradConfig = { + kernelName: kernel_names["jb" /* Relu6 */], + inputsToSave: ['x'], + gradFunc: (dy, saved) => { + const [x] = saved; + const mask = mul(lessEqual(x, 6), unary_ops_step(x)); + return { x: () => mul(dy, cast(mask, 'float32')) }; + } +}; +//# sourceMappingURL=Relu6_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Relu_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + +const reluGradConfig = { + kernelName: kernel_names["ib" /* Relu */], + inputsToSave: ['x'], + gradFunc: (dy, saved) => { + const [x] = saved; + return { x: () => mul(dy, cast(unary_ops_step(x), 'float32')) }; + } +}; +//# sourceMappingURL=Relu_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/selu_util.js +/** + * @license + * Copyright 2018 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ +const SELU_SCALEALPHA = 1.7580993408473768599402175208123; +const SELU_SCALE = 1.0507009873554804934193349852946; +//# sourceMappingURL=selu_util.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Selu_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + + +const seluGradConfig = { + kernelName: kernel_names["lb" /* Selu */], + inputsToSave: ['x'], + gradFunc: (dy, saved) => { + const [x] = saved; + return { + x: () => { + const mask = greater(x, Object(tensor_ops["e" /* scalar */])(0)); + const scaleAlpha = Object(tensor_ops["e" /* scalar */])(SELU_SCALEALPHA); + const scale = Object(tensor_ops["e" /* scalar */])(SELU_SCALE); + const greaterThanZeroDer = mul(dy, scale); + const lessEqualZeroDer = mul(mul(dy, scaleAlpha), unary_ops_exp(cast(x, 'float32'))); + return where(mask, greaterThanZeroDer, lessEqualZeroDer); + } + }; + } +}; +//# sourceMappingURL=Selu_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/batch_to_space_nd.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +/** + * This operation reshapes the "batch" dimension 0 into `M + 1` dimensions of + * shape `blockShape + [batch]`, interleaves these blocks back into the grid + * defined by the spatial dimensions `[1, ..., M]`, to obtain a result with + * the same rank as the input. The spatial dimensions of this intermediate + * result are then optionally cropped according to `crops` to produce the + * output. This is the reverse of `tf.spaceToBatchND`. See below for a precise + * description. + * + * ```js + * const x = tf.tensor4d([1, 2, 3, 4], [4, 1, 1, 1]); + * const blockShape = [2, 2]; + * const crops = [[0, 0], [0, 0]]; + * + * x.batchToSpaceND(blockShape, crops).print(); + * ``` + * + * @param x A `tf.Tensor`. N-D with `x.shape` = `[batch] + spatialShape + + * remainingShape`, where spatialShape has `M` dimensions. + * @param blockShape A 1-D array. Must have shape `[M]`, all values must + * be >= 1. + * @param crops A 2-D array. Must have shape `[M, 2]`, all values must be >= 0. + * `crops[i] = [cropStart, cropEnd]` specifies the amount to crop from input + * dimension `i + 1`, which corresponds to spatial dimension `i`. It is required + * that `cropStart[i] + cropEnd[i] <= blockShape[i] * inputShape[i + 1]` + * + * This operation is equivalent to the following steps: + * + * 1. Reshape `x` to `reshaped` of shape: `[blockShape[0], ..., + * blockShape[M-1], batch / prod(blockShape), x.shape[1], ..., + * x.shape[N-1]]` + * + * 2. Permute dimensions of `reshaped`to produce `permuted` of shape `[batch / + * prod(blockShape),x.shape[1], blockShape[0], ..., x.shape[M], + * blockShape[M-1],x.shape[M+1], ..., x.shape[N-1]]` + * + * 3. Reshape `permuted` to produce `reshapedPermuted` of shape `[batch / + * prod(blockShape),x.shape[1] * blockShape[0], ..., x.shape[M] * + * blockShape[M-1],x.shape[M+1], ..., x.shape[N-1]]` + * + * 4. Crop the start and end of dimensions `[1, ..., M]` of `reshapedPermuted` + * according to `crops` to produce the output of shape: `[batch / + * prod(blockShape),x.shape[1] * blockShape[0] - crops[0,0] - crops[0,1], + * ..., x.shape[M] * blockShape[M-1] - crops[M-1,0] - + * crops[M-1,1],x.shape[M+1], ..., x.shape[N-1]]` + */ +/** @doc {heading: 'Tensors', subheading: 'Transformations'} */ +function batchToSpaceND_(x, blockShape, crops) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'batchToSpaceND'); + const prod = blockShape.reduce((a, b) => a * b); + util["assert"]($x.rank >= 1 + blockShape.length, () => `input rank is ${$x.rank} but should be > than blockShape.length ${blockShape.length}`); + util["assert"](crops.length === blockShape.length, () => `crops.length is ${crops.length} but should be equal to blockShape.length ${blockShape.length}`); + util["assert"]($x.shape[0] % prod === 0, () => `input tensor batch is ${$x.shape[0]} but is not divisible by the product of ` + + `the elements of blockShape ${blockShape.join(' * ')} === ${prod}`); + const forward = backend => { + return backend.batchToSpaceND($x, blockShape, crops); + }; + const inputs = { x: $x }; + const attrs = { blockShape, crops }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* gradient */, kernel_names["i" /* BatchToSpaceND */], attrs); +} +const batchToSpaceND = Object(operation["a" /* op */])({ batchToSpaceND_ }); +//# sourceMappingURL=batch_to_space_nd.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/SpaceToBatchND_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const spaceToBatchNDGradConfig = { + kernelName: kernel_names["mb" /* SpaceToBatchND */], + gradFunc: (dy, saved, attrs) => { + const { blockShape, paddings } = attrs; + return { x: () => batchToSpaceND(dy, blockShape, paddings) }; + } +}; +//# sourceMappingURL=SpaceToBatchND_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/SplitV_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const splitVGradConfig = { + kernelName: kernel_names["nb" /* SplitV */], + gradFunc: (dy, saved, attrs) => { + const { axis } = attrs; + return { x: () => concat(dy, axis) }; + } +}; +//# sourceMappingURL=SplitV_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Square_grad.js +/** + * @license + * Copyright 2019 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + +const squareGradConfig = { + kernelName: kernel_names["ob" /* Square */], + inputsToSave: ['x'], + gradFunc: (dy, saved) => { + const [x] = saved; + return { x: () => mul(dy, mul(x.toFloat(), 2)) }; + } +}; +//# sourceMappingURL=Square_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/SquaredDifference_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + +const squaredDifferenceGradConfig = { + kernelName: kernel_names["pb" /* SquaredDifference */], + inputsToSave: ['a', 'b'], + gradFunc: (dy, saved) => { + const [a, b] = saved; + const two = Object(tensor_ops["e" /* scalar */])(2); + const derA = () => mul(dy, mul(two, sub(a, b))); + const derB = () => mul(dy, mul(two, sub(b, a))); + return { a: derA, b: derB }; + } +}; +//# sourceMappingURL=SquaredDifference_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Sub_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +const subGradConfig = { + kernelName: kernel_names["qb" /* Sub */], + inputsToSave: ['a', 'b'], + gradFunc: (dy, saved) => { + const [a, b] = saved; + const outShape = assertAndGetBroadcastShape(a.shape, b.shape); + const derA = () => { + let res = dy; + const reduceAxes = getReductionAxes(a.shape, outShape); + if (reduceAxes.length > 0) { + res = sum(res, reduceAxes); + } + return reshape(res, a.shape); + }; + const derB = () => { + let res = dy; + const reduceAxes = getReductionAxes(b.shape, outShape); + if (reduceAxes.length > 0) { + res = sum(res, reduceAxes); + } + return reshape(neg(res), b.shape); + }; + return { a: derA, b: derB }; + } +}; +//# sourceMappingURL=Sub_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/pad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + +/** + * Pads a `tf.Tensor` with a given value and paddings. + * + * This operation currently only implements the `CONSTANT` mode. + * + * Also available are stricter rank-specific methods with the same signature + * as this method that assert that `paddings` is of given length. + * - `tf.pad1d` + * - `tf.pad2d` + * - `tf.pad3d` + * - `tf.pad4d` + * + * ```js + * const x = tf.tensor1d([1, 2, 3, 4]); + * x.pad([[1, 2]]).print(); + * ``` + * @param x The tensor to pad. + * @param paddings An array of length `R` (the rank of the tensor), where + * each element is a length-2 tuple of ints `[padBefore, padAfter]`, + * specifying how much to pad along each dimension of the tensor. + * @param constantValue The pad value to use. Defaults to 0. + */ +/** @doc {heading: 'Tensors', subheading: 'Transformations'} */ +function pad_(x, paddings, constantValue = 0) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'pad'); + if ($x.rank === 0) { + throw new Error('pad(scalar) is not defined. Pass non-scalar to pad'); + } + const forward = (backend, save) => { + save([$x]); + return backend.pad($x, paddings, constantValue); + }; + const attrs = { paddings, constantValue }; + const inputs = { x: $x }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["db" /* PadV2 */], attrs); +} +const pad_pad = Object(operation["a" /* op */])({ pad_ }); +//# sourceMappingURL=pad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/slice_util.js +/** + * @license + * Copyright 2017 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + +function assertParamsValid(input, begin, size) { + util["assert"](input.rank === begin.length, () => `Error in slice${input.rank}D: Length of begin ${begin} must ` + + `match the rank of the array (${input.rank}).`); + util["assert"](input.rank === size.length, () => `Error in slice${input.rank}D: Length of size ${size} must ` + + `match the rank of the array (${input.rank}).`); + for (let i = 0; i < input.rank; ++i) { + util["assert"](begin[i] + size[i] <= input.shape[i], () => `Error in slice${input.rank}D: begin[${i}] + size[${i}] ` + + `(${begin[i] + size[i]}) would overflow input.shape[${i}] (${input.shape[i]})`); + } +} +/** Converts a binary mask to an array of axes. Used in stridedSlice(). */ +function maskToAxes(mask) { + const axes = []; + let axis = 0; + while (mask > 0) { + if (mask & 1) { + axes.push(axis); + } + mask /= 2; + axis++; + } + return axes; +} +/** Computes the output shape given the strided slice params. */ +function slice_util_computeOutShape(begin, end, strides) { + const size = []; + for (let axis = 0; axis < begin.length; axis++) { + size[axis] = Math.ceil((end[axis] - begin[axis]) / strides[axis]); + } + return size; +} +// Creates full selection at the elided dimensions. If the dimension matches +// the ellipsis mask, override the current stride value. Otherwise, insert. +function stridesWithElidedDims(strides, ellipsisInsertionIndex, numElidedAxes) { + const newStrides = [...strides]; + for (let i = 0; i < numElidedAxes; i++) { + if (i === 0) { + newStrides[ellipsisInsertionIndex] = 1; + } + else { + newStrides.splice(ellipsisInsertionIndex, 0 /* num elements to delete */, 1 /* element to add */); + newStrides.pop(); + } + } + return newStrides; +} +// Creates full selection at the elided dimensions. If the dimension matches +// the ellipsis mask, override the current start value. Otherwise, insert. +function startIndicesWithElidedDims(startIndices, ellipsisInsertionIndex, numElidedAxes) { + const newIndices = [...startIndices]; + for (let i = 0; i < numElidedAxes; i++) { + if (i === 0) { + newIndices[ellipsisInsertionIndex] = 0; + } + else { + newIndices.splice(ellipsisInsertionIndex, 0 /* num elements to delete */, 0 /* element to add */); + newIndices.pop(); + } + } + return newIndices; +} +// Creates full selection at the elided dimensions. If the dimension matches +// the ellipsis mask, override the current stop value. Otherwise, insert. +function stopIndicesWithElidedDims(stopIndices, ellipsisInsertionIndex, numElidedAxes, inputShape) { + const newIndices = [...stopIndices]; + for (let i = 0; i < numElidedAxes; i++) { + if (i === 0) { + newIndices[ellipsisInsertionIndex] = Number.MAX_SAFE_INTEGER; + } + else { + newIndices.splice(ellipsisInsertionIndex, 0 /* num elements to delete */, Number.MAX_SAFE_INTEGER /* element to add */); + newIndices.pop(); + } + } + for (let i = 0; i < newIndices.length; i++) { + newIndices[i] = util["clamp"](0, newIndices[i], inputShape[i]); + } + return newIndices; +} +function stridesForAxis(strides, axis, ellipsisMask) { + let stride = strides[axis]; + if (ellipsisMask & (1 << axis) || stride == null) { + stride = 1; + } + return stride; +} +function startForAxis(beginMask, startIndices, strides, inputShape, axis, ellipsisMask) { + // Begin with the specified index + let start = startIndices[axis]; + const stride = strides[axis] || 1; + // Check the axis bit from right of masked axes, or the begin index is not set + // for the axis. + if (beginMask & 1 << axis || ellipsisMask & 1 << axis || start == null) { + if (stride > 0) { + // Forward iteration - use the first element. These values will get + // clamped below (Note: We could have set them to 0 and axis_size-1, but + // use lowest() and max() to maintain symmetry with StopForAxis()) + start = Number.MIN_SAFE_INTEGER; + } + else { + // Backward iteration - use the last element. + start = Number.MAX_SAFE_INTEGER; + } + } + // Handle negative indices + const axisSize = inputShape[axis]; + if (start < 0) { + start += axisSize; + } + // Clamping + start = util["clamp"](0, start, axisSize - 1); + return start; +} +function stopForAxis(endMask, stopIndices, strides, inputShape, axis, ellipsisMask) { + // Begin with the specified index + let stop = stopIndices[axis]; + const stride = strides[axis] || 1; + // Check the axis bit from right of masked axes, or if the stop index is not + // set for this axis. + if (endMask & (1 << axis) || ellipsisMask & (1 << axis) || stop == null) { + if (stride > 0) { + // Forward iteration - use the last element. These values will get + // clamped below + stop = Number.MAX_SAFE_INTEGER; + } + else { + // Backward iteration - use the first element. + stop = Number.MIN_SAFE_INTEGER; + } + } + // Handle negative indices + const axisSize = inputShape[axis]; + if (stop < 0) { + stop += axisSize; + } + // Clamping + // Because the end index points one past the last element, we need slightly + // different clamping ranges depending on the direction. + if (stride > 0) { + // Forward iteration + stop = util["clamp"](0, stop, axisSize); + } + else { + // Backward iteration + stop = util["clamp"](-1, stop, axisSize - 1); + } + return stop; +} +/** + * Returns true if the slice occupies a continous set of elements in the + * 'flat' space. + */ +function isSliceContinous(shape, begin, size) { + // Index of the first axis that has size > 1. + let firstNonOneAxis = size.length; + for (let i = 0; i < size.length; i++) { + if (size[i] > 1) { + firstNonOneAxis = i; + break; + } + } + for (let i = firstNonOneAxis + 1; i < size.length; i++) { + if (begin[i] > 0 || size[i] !== shape[i]) { + return false; + } + } + return true; +} +function computeFlatOffset(begin, strides) { + let flatOffset = begin.length > 0 ? begin[begin.length - 1] : 1; + for (let i = 0; i < begin.length - 1; i++) { + flatOffset += begin[i] * strides[i]; + } + return flatOffset; +} +//# sourceMappingURL=slice_util.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/slice.js +/** + * @license + * Copyright 2018 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + +/** + * Extracts a 1D slice from 1D array starting at coordinates `begin` and is + * of length `size`. See `slice` for details. + */ +function slice1d_(x, begin, size) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'slice1d'); + util["assert"]($x.rank === 1, () => `slice1d expects a rank-1 tensor, but got a rank-${$x.rank} tensor`); + return slice($x, [begin], [size]); +} +/** + * Extracts a 2D slice from a 2D array starting at coordinates `begin` and + * is of size `size`. See `slice` for details. + */ +function slice2d_(x, begin, size) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'slice2d'); + util["assert"]($x.rank === 2, () => `slice2d expects a rank-2 tensor, but got a rank-${$x.rank} tensor`); + return slice($x, begin, size); +} +/** + * Extracts a 3D slice from a 3D array starting at coordinates `begin` and + * is of size `size`. See `slice` for details. + */ +function slice3d_(x, begin, size) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'slice3d'); + util["assert"]($x.rank === 3, () => `slice3d expects a rank-3 tensor, but got a rank-${$x.rank} tensor`); + return slice($x, begin, size); +} +/** + * Extracts a 4D slice from a 4D array starting at coordinates `begin` and + * is of size `size`. See `slice` for details. + */ +function slice4d_(x, begin, size) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'slice4d'); + util["assert"]($x.rank === 4, () => `slice4d expects a rank-4 tensor, but got a rank-${$x.rank} tensor`); + return slice($x, begin, size); +} +/** + * Extracts a slice from a `tf.Tensor` starting at coordinates `begin` + * and is of size `size`. + * + * Also available are stricter rank-specific methods with the same signature + * as this method that assert that `x` is of the given rank: + * - `tf.slice1d` + * - `tf.slice2d` + * - `tf.slice3d` + * - `tf.slice4d` + * + * ```js + * const x = tf.tensor1d([1, 2, 3, 4]); + * + * x.slice([1], [2]).print(); + * ``` + * + * ```js + * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]); + * + * x.slice([1, 0], [1, 2]).print(); + * ``` + * @param x The input `tf.Tensor` to slice from. + * @param begin The coordinates to start the slice from. The length can be + * less than the rank of x - the rest of the axes will have implicit 0 as + * start. Can also be a single number, in which case it specifies the + * first axis. + * @param size The size of the slice. The length can be less than the rank of + * x - the rest of the axes will have implicit -1. A value of -1 requests + * the rest of the dimensions in the axis. Can also be a single number, + * in which case it specifies the size of the first axis. + */ +/** @doc {heading: 'Tensors', subheading: 'Slicing and Joining'} */ +function slice_(x, begin, size) { + const $x = Object(tensor_util_env["a" /* convertToTensor */])(x, 'x', 'slice'); + if ($x.rank === 0) { + throw new Error('Slicing scalar is not possible'); + } + // The following logic allows for more ergonomic calls. + let begin_; + if (typeof begin === 'number') { + begin_ = [begin, ...new Array($x.rank - 1).fill(0)]; + } + else if (begin.length < $x.rank) { + begin_ = begin.concat(new Array($x.rank - begin.length).fill(0)); + } + else { + begin_ = begin.slice(); + } + begin_.forEach(d => { + util["assert"](d !== -1, () => 'slice() does not support negative begin indexing.'); + }); + let size_; + if (size == null) { + size_ = new Array($x.rank).fill(-1); + } + else if (typeof size === 'number') { + size_ = [size, ...new Array($x.rank - 1).fill(-1)]; + } + else if (size.length < $x.rank) { + size_ = size.concat(new Array($x.rank - size.length).fill(-1)); + } + else { + size_ = size; + } + size_ = size_.map((d, i) => { + if (d >= 0) { + return d; + } + else { + util["assert"](d === -1, () => `Negative size values should be exactly -1 but got ` + + `${d} for the slice() size at index ${i}.`); + return $x.shape[i] - begin_[i]; + } + }); + assertParamsValid($x, begin_, size_); + const inputShape = $x.shape; + const grad = (dy) => { + // Create an Nx2 padding where the first column represents how many + // zeros are prepended (at start) for each dimension, and the second + // column indicates how many zeros are appended (at end). + // The number of zeros to append is the shape of the input + // elementwise-subtracted by both the begin vector and sizes vector. + const paddings = []; + for (let i = 0; i < dy.rank; i++) { + paddings.push([begin_[i], inputShape[i] - begin_[i] - size_[i]]); + } + return { x: () => pad_pad(dy, paddings) }; + }; + const attrs = { begin: begin_, size: size_ }; + return engine["a" /* ENGINE */].runKernelFunc(backend => backend.slice($x, begin_, size_), { x: $x }, grad, 'Slice', attrs); +} +const slice = Object(operation["a" /* op */])({ slice_ }); +const slice1d = Object(operation["a" /* op */])({ slice1d_ }); +const slice2d = Object(operation["a" /* op */])({ slice2d_ }); +const slice3d = Object(operation["a" /* op */])({ slice3d_ }); +const slice4d = Object(operation["a" /* op */])({ slice4d_ }); +//# sourceMappingURL=slice.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Tile_grad.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + +const tileGradConfig = { + kernelName: kernel_names["rb" /* Tile */], + inputsToSave: ['x'], + gradFunc: (dy, saved, attrs) => { + const [x] = saved; + const { reps } = attrs; + const derX = () => { + let xGrad = Object(tensor_ops["o" /* zerosLike */])(x); + // TODO(cais): Maybe reduce memory footprint by avoiding repeated + // slicing. + if (x.rank === 1) { + for (let i = 0; i < reps[0]; ++i) { + xGrad = add(xGrad, slice(dy, [i * x.shape[0]], [x.shape[0]])); + } + } + else if (x.rank === 2) { + for (let i = 0; i < reps[0]; ++i) { + for (let j = 0; j < reps[1]; ++j) { + xGrad = add(xGrad, slice(dy, [i * x.shape[0], j * x.shape[1]], [ + x.shape[0], x.shape[1] + ])); + } + } + } + else if (x.rank === 3) { + for (let i = 0; i < reps[0]; ++i) { + for (let j = 0; j < reps[1]; ++j) { + for (let k = 0; k < reps[2]; ++k) { + xGrad = + add(xGrad, slice(dy, [i * x.shape[0], j * x.shape[1], k * x.shape[2]], [x.shape[0], x.shape[1], x.shape[2]])); + } + } + } + } + else if (x.rank === 4) { + for (let i = 0; i < reps[0]; ++i) { + for (let j = 0; j < reps[1]; ++j) { + for (let k = 0; k < reps[2]; ++k) { + for (let l = 0; l < reps[3]; ++l) { + xGrad = + add(xGrad, slice(dy, [ + i * x.shape[0], j * x.shape[1], k * x.shape[2], + l * x.shape[3] + ], [x.shape[0], x.shape[1], x.shape[2], x.shape[3]])); + } + } + } + } + } + else { + throw new Error(`Gradient for tile operation is not implemented for rank-` + + `${x.rank} tensors yet.`); + } + return xGrad; + }; + return { x: derX }; + }, +}; +//# sourceMappingURL=Tile_grad.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/gradients/Transpose_grad.js +/** + * @license + * Copyright 2020 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + +const transposeGradConfig = { + kernelName: kernel_names["sb" /* Transpose */], + gradFunc: (dy, saved, attrs) => { + const transposeAttrs = attrs; + const { perm } = transposeAttrs; + const undoPerm = getUndoAxesPermutation(perm); + return { x: () => transpose(dy, undoPerm) }; + } +}; +//# sourceMappingURL=Transpose_grad.js.map +// EXTERNAL MODULE: ./node_modules/@tensorflow/tfjs-core/dist/kernel_registry.js +var kernel_registry = __webpack_require__(17); + +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/register_all_gradients.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +// Export all kernel configs here so that the package can auto register them +const gradConfigs = [ + addGradConfig, + addNGradConfig, + atan2GradConfig, + avgPoolGradConfig, + avgPool3DGradConfig, + batchMatMulGradConfig, + batchToSpaceNDGradConfig, + broadcastToGradConfig, + concatGradConfig, + conv2DGradConfig, + conv2DBackpropInputGradConfig, + conv3DGradConfig, + cumsumGradConfig, + depthwiseConv2dNativeGradConfig, + divGradConfig, + eluGradConfig, + floorDivGradConfig, + fusedBatchNormGradConfig, + greaterEqualGradConfig, + identityGradConfig, + lrnGradConfig, + oneHotGradConfig, + padV2GradConfig, + splitVGradConfig, + maxGradConfig, + spaceToBatchNDGradConfig, + maxGradConfig, + maximumGradConfig, + maxPoolGradConfig, + maxPool3DGradConfig, + minimumGradConfig, + modGradConfig, + multiplyGradConfig, + oneHotGradConfig, + padV2GradConfig, + powGradConfig, + preluGradConfig, + reluGradConfig, + relu6GradConfig, + seluGradConfig, + spaceToBatchNDGradConfig, + splitVGradConfig, + squareGradConfig, + squaredDifferenceGradConfig, + tileGradConfig, + transposeGradConfig, + subGradConfig +]; +for (const gradientConfig of gradConfigs) { + Object(kernel_registry["d" /* registerGradient */])(gradientConfig); +} +//# sourceMappingURL=register_all_gradients.js.map +// EXTERNAL MODULE: ./node_modules/@tensorflow/tfjs-core/dist/environment.js +var environment = __webpack_require__(10); + +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/platforms/platform_browser.js +/** + * @license + * Copyright 2019 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + +class PlatformBrowser { + fetch(path, init) { + return fetch(path, init); + } + now() { + return performance.now(); + } + encode(text, encoding) { + if (encoding !== 'utf-8' && encoding !== 'utf8') { + throw new Error(`Browser's encoder only supports utf-8, but got ${encoding}`); + } + if (this.textEncoder == null) { + this.textEncoder = new TextEncoder(); + } + return this.textEncoder.encode(text); + } + decode(bytes, encoding) { + return new TextDecoder(encoding).decode(bytes); + } +} +if (Object(environment["c" /* env */])().get('IS_BROWSER')) { + Object(environment["c" /* env */])().setPlatform('browser', new PlatformBrowser()); +} +//# sourceMappingURL=platform_browser.js.map +// EXTERNAL MODULE: ./node_modules/@tensorflow/tfjs-core/dist/platforms/platform_node.js +var platform_node = __webpack_require__(62); + +// EXTERNAL MODULE: ./node_modules/@tensorflow/tfjs-core/dist/io/io_utils.js +var io_utils = __webpack_require__(13); + +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/io/router_registry.js +/** + * @license + * Copyright 2018 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ +class IORouterRegistry { + constructor() { + this.saveRouters = []; + this.loadRouters = []; + } + static getInstance() { + if (IORouterRegistry.instance == null) { + IORouterRegistry.instance = new IORouterRegistry(); + } + return IORouterRegistry.instance; + } + /** + * Register a save-handler router. + * + * @param saveRouter A function that maps a URL-like string onto an instance + * of `IOHandler` with the `save` method defined or `null`. + */ + static registerSaveRouter(saveRouter) { + IORouterRegistry.getInstance().saveRouters.push(saveRouter); + } + /** + * Register a load-handler router. + * + * @param loadRouter A function that maps a URL-like string onto an instance + * of `IOHandler` with the `load` method defined or `null`. + */ + static registerLoadRouter(loadRouter) { + IORouterRegistry.getInstance().loadRouters.push(loadRouter); + } + /** + * Look up IOHandler for saving, given a URL-like string. + * + * @param url + * @returns If only one match is found, an instance of IOHandler with the + * `save` method defined. If no match is found, `null`. + * @throws Error, if more than one match is found. + */ + static getSaveHandlers(url) { + return IORouterRegistry.getHandlers(url, 'save'); + } + /** + * Look up IOHandler for loading, given a URL-like string. + * + * @param url + * @param loadOptions Optional, custom load options. + * @returns All valid handlers for `url`, given the currently registered + * handler routers. + */ + static getLoadHandlers(url, loadOptions) { + return IORouterRegistry.getHandlers(url, 'load', loadOptions); + } + static getHandlers(url, handlerType, loadOptions) { + const validHandlers = []; + const routers = handlerType === 'load' ? + IORouterRegistry.getInstance().loadRouters : + IORouterRegistry.getInstance().saveRouters; + routers.forEach(router => { + const handler = router(url, loadOptions); + if (handler !== null) { + validHandlers.push(handler); + } + }); + return validHandlers; + } +} +const registerSaveRouter = (loudRouter) => IORouterRegistry.registerSaveRouter(loudRouter); +const registerLoadRouter = (loudRouter) => IORouterRegistry.registerLoadRouter(loudRouter); +const getSaveHandlers = (url) => IORouterRegistry.getSaveHandlers(url); +const getLoadHandlers = (url, loadOptions) => IORouterRegistry.getLoadHandlers(url, loadOptions); +//# sourceMappingURL=router_registry.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/io/model_management.js +/** + * @license + * Copyright 2018 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ +/** + * Classes and functions for model management across multiple storage mediums. + * + * Supported client actions: + * - Listing models on all registered storage mediums. + * - Remove model by URL from any registered storage mediums, by using URL + * string. + * - Moving or copying model from one path to another in the same medium or from + * one medium to another, by using URL strings. + */ + + +const URL_SCHEME_SUFFIX = '://'; +class model_management_ModelStoreManagerRegistry { + constructor() { + this.managers = {}; + } + static getInstance() { + if (model_management_ModelStoreManagerRegistry.instance == null) { + model_management_ModelStoreManagerRegistry.instance = new model_management_ModelStoreManagerRegistry(); + } + return model_management_ModelStoreManagerRegistry.instance; + } + /** + * Register a save-handler router. + * + * @param saveRouter A function that maps a URL-like string onto an instance + * of `IOHandler` with the `save` method defined or `null`. + */ + static registerManager(scheme, manager) { + Object(util["assert"])(scheme != null, () => 'scheme must not be undefined or null.'); + if (scheme.endsWith(URL_SCHEME_SUFFIX)) { + scheme = scheme.slice(0, scheme.indexOf(URL_SCHEME_SUFFIX)); + } + Object(util["assert"])(scheme.length > 0, () => 'scheme must not be an empty string.'); + const registry = model_management_ModelStoreManagerRegistry.getInstance(); + Object(util["assert"])(registry.managers[scheme] == null, () => `A model store manager is already registered for scheme '${scheme}'.`); + registry.managers[scheme] = manager; + } + static getManager(scheme) { + const manager = this.getInstance().managers[scheme]; + if (manager == null) { + throw new Error(`Cannot find model manager for scheme '${scheme}'`); + } + return manager; + } + static getSchemes() { + return Object.keys(this.getInstance().managers); + } +} +/** + * Helper method for parsing a URL string into a scheme and a path. + * + * @param url E.g., 'localstorage://my-model' + * @returns A dictionary with two fields: scheme and path. + * Scheme: e.g., 'localstorage' in the example above. + * Path: e.g., 'my-model' in the example above. + */ +function parseURL(url) { + if (url.indexOf(URL_SCHEME_SUFFIX) === -1) { + throw new Error(`The url string provided does not contain a scheme. ` + + `Supported schemes are: ` + + `${model_management_ModelStoreManagerRegistry.getSchemes().join(',')}`); + } + return { + scheme: url.split(URL_SCHEME_SUFFIX)[0], + path: url.split(URL_SCHEME_SUFFIX)[1], + }; +} +async function cloneModelInternal(sourceURL, destURL, deleteSource = false) { + Object(util["assert"])(sourceURL !== destURL, () => `Old path and new path are the same: '${sourceURL}'`); + const loadHandlers = IORouterRegistry.getLoadHandlers(sourceURL); + Object(util["assert"])(loadHandlers.length > 0, () => `Copying failed because no load handler is found for source URL ${sourceURL}.`); + Object(util["assert"])(loadHandlers.length < 2, () => `Copying failed because more than one (${loadHandlers.length}) ` + + `load handlers for source URL ${sourceURL}.`); + const loadHandler = loadHandlers[0]; + const saveHandlers = IORouterRegistry.getSaveHandlers(destURL); + Object(util["assert"])(saveHandlers.length > 0, () => `Copying failed because no save handler is found for destination ` + + `URL ${destURL}.`); + Object(util["assert"])(saveHandlers.length < 2, () => `Copying failed because more than one (${loadHandlers.length}) ` + + `save handlers for destination URL ${destURL}.`); + const saveHandler = saveHandlers[0]; + const sourceScheme = parseURL(sourceURL).scheme; + const sourcePath = parseURL(sourceURL).path; + const sameMedium = sourceScheme === parseURL(sourceURL).scheme; + const modelArtifacts = await loadHandler.load(); + // If moving within the same storage medium, remove the old model as soon as + // the loading is done. Without doing this, it is possible that the combined + // size of the two models will cause the cloning to fail. + if (deleteSource && sameMedium) { + await model_management_ModelStoreManagerRegistry.getManager(sourceScheme) + .removeModel(sourcePath); + } + const saveResult = await saveHandler.save(modelArtifacts); + // If moving between mediums, the deletion is done after the save succeeds. + // This guards against the case in which saving to the destination medium + // fails. + if (deleteSource && !sameMedium) { + await model_management_ModelStoreManagerRegistry.getManager(sourceScheme) + .removeModel(sourcePath); + } + return saveResult.modelArtifactsInfo; +} +/** + * List all models stored in registered storage mediums. + * + * For a web browser environment, the registered mediums are Local Storage and + * IndexedDB. + * + * ```js + * // First create and save a model. + * const model = tf.sequential(); + * model.add(tf.layers.dense( + * {units: 1, inputShape: [10], activation: 'sigmoid'})); + * await model.save('localstorage://demo/management/model1'); + * + * // Then list existing models. + * console.log(JSON.stringify(await tf.io.listModels())); + * + * // Delete the model. + * await tf.io.removeModel('localstorage://demo/management/model1'); + * + * // List models again. + * console.log(JSON.stringify(await tf.io.listModels())); + * ``` + * + * @returns A `Promise` of a dictionary mapping URLs of existing models to + * their model artifacts info. URLs include medium-specific schemes, e.g., + * 'indexeddb://my/model/1'. Model artifacts info include type of the + * model's topology, byte sizes of the topology, weights, etc. + */ +/** + * @doc { + * heading: 'Models', + * subheading: 'Management', + * namespace: 'io', + * ignoreCI: true + * } + */ +async function listModels() { + const schemes = model_management_ModelStoreManagerRegistry.getSchemes(); + const out = {}; + for (const scheme of schemes) { + const schemeOut = await model_management_ModelStoreManagerRegistry.getManager(scheme).listModels(); + for (const path in schemeOut) { + const url = scheme + URL_SCHEME_SUFFIX + path; + out[url] = schemeOut[path]; + } + } + return out; +} +/** + * Remove a model specified by URL from a reigstered storage medium. + * + * ```js + * // First create and save a model. + * const model = tf.sequential(); + * model.add(tf.layers.dense( + * {units: 1, inputShape: [10], activation: 'sigmoid'})); + * await model.save('localstorage://demo/management/model1'); + * + * // Then list existing models. + * console.log(JSON.stringify(await tf.io.listModels())); + * + * // Delete the model. + * await tf.io.removeModel('localstorage://demo/management/model1'); + * + * // List models again. + * console.log(JSON.stringify(await tf.io.listModels())); + * ``` + * + * @param url A URL to a stored model, with a scheme prefix, e.g., + * 'localstorage://my-model-1', 'indexeddb://my/model/2'. + * @returns ModelArtifactsInfo of the deleted model (if and only if deletion + * is successful). + * @throws Error if deletion fails, e.g., if no model exists at `path`. + */ +/** + * @doc { + * heading: 'Models', + * subheading: 'Management', + * namespace: 'io', + * ignoreCI: true + * } + */ +async function removeModel(url) { + const schemeAndPath = parseURL(url); + const manager = model_management_ModelStoreManagerRegistry.getManager(schemeAndPath.scheme); + return manager.removeModel(schemeAndPath.path); +} +/** + * Copy a model from one URL to another. + * + * This function supports: + * + * 1. Copying within a storage medium, e.g., + * `tf.io.copyModel('localstorage://model-1', 'localstorage://model-2')` + * 2. Copying between two storage mediums, e.g., + * `tf.io.copyModel('localstorage://model-1', 'indexeddb://model-1')` + * + * ```js + * // First create and save a model. + * const model = tf.sequential(); + * model.add(tf.layers.dense( + * {units: 1, inputShape: [10], activation: 'sigmoid'})); + * await model.save('localstorage://demo/management/model1'); + * + * // Then list existing models. + * console.log(JSON.stringify(await tf.io.listModels())); + * + * // Copy the model, from Local Storage to IndexedDB. + * await tf.io.copyModel( + * 'localstorage://demo/management/model1', + * 'indexeddb://demo/management/model1'); + * + * // List models again. + * console.log(JSON.stringify(await tf.io.listModels())); + * + * // Remove both models. + * await tf.io.removeModel('localstorage://demo/management/model1'); + * await tf.io.removeModel('indexeddb://demo/management/model1'); + * ``` + * + * @param sourceURL Source URL of copying. + * @param destURL Destination URL of copying. + * @returns ModelArtifactsInfo of the copied model (if and only if copying + * is successful). + * @throws Error if copying fails, e.g., if no model exists at `sourceURL`, or + * if `oldPath` and `newPath` are identical. + */ +/** + * @doc { + * heading: 'Models', + * subheading: 'Management', + * namespace: 'io', + * ignoreCI: true + * } + */ +async function copyModel(sourceURL, destURL) { + const deleteSource = false; + return cloneModelInternal(sourceURL, destURL, deleteSource); +} +/** + * Move a model from one URL to another. + * + * This function supports: + * + * 1. Moving within a storage medium, e.g., + * `tf.io.moveModel('localstorage://model-1', 'localstorage://model-2')` + * 2. Moving between two storage mediums, e.g., + * `tf.io.moveModel('localstorage://model-1', 'indexeddb://model-1')` + * + * ```js + * // First create and save a model. + * const model = tf.sequential(); + * model.add(tf.layers.dense( + * {units: 1, inputShape: [10], activation: 'sigmoid'})); + * await model.save('localstorage://demo/management/model1'); + * + * // Then list existing models. + * console.log(JSON.stringify(await tf.io.listModels())); + * + * // Move the model, from Local Storage to IndexedDB. + * await tf.io.moveModel( + * 'localstorage://demo/management/model1', + * 'indexeddb://demo/management/model1'); + * + * // List models again. + * console.log(JSON.stringify(await tf.io.listModels())); + * + * // Remove the moved model. + * await tf.io.removeModel('indexeddb://demo/management/model1'); + * ``` + * + * @param sourceURL Source URL of moving. + * @param destURL Destination URL of moving. + * @returns ModelArtifactsInfo of the copied model (if and only if copying + * is successful). + * @throws Error if moving fails, e.g., if no model exists at `sourceURL`, or + * if `oldPath` and `newPath` are identical. + */ +/** + * @doc { + * heading: 'Models', + * subheading: 'Management', + * namespace: 'io', + * ignoreCI: true + * } + */ +async function moveModel(sourceURL, destURL) { + const deleteSource = true; + return cloneModelInternal(sourceURL, destURL, deleteSource); +} + +//# sourceMappingURL=model_management.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/io/indexed_db.js +/** + * @license + * Copyright 2018 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + +const DATABASE_NAME = 'tensorflowjs'; +const DATABASE_VERSION = 1; +// Model data and ModelArtifactsInfo (metadata) are stored in two separate +// stores for efficient access of the list of stored models and their metadata. +// 1. The object store for model data: topology, weights and weight manifests. +const MODEL_STORE_NAME = 'models_store'; +// 2. The object store for ModelArtifactsInfo, including meta-information such +// as the type of topology (JSON vs binary), byte size of the topology, byte +// size of the weights, etc. +const INFO_STORE_NAME = 'model_info_store'; +/** + * Delete the entire database for tensorflow.js, including the models store. + */ +async function deleteDatabase() { + const idbFactory = getIndexedDBFactory(); + return new Promise((resolve, reject) => { + const deleteRequest = idbFactory.deleteDatabase(DATABASE_NAME); + deleteRequest.onsuccess = () => resolve(); + deleteRequest.onerror = error => reject(error); + }); +} +function getIndexedDBFactory() { + if (!Object(environment["c" /* env */])().getBool('IS_BROWSER')) { + // TODO(cais): Add more info about what IOHandler subtypes are available. + // Maybe point to a doc page on the web and/or automatically determine + // the available IOHandlers and print them in the error message. + throw new Error('Failed to obtain IndexedDB factory because the current environment' + + 'is not a web browser.'); + } + // tslint:disable-next-line:no-any + const theWindow = typeof window === 'undefined' ? self : window; + const factory = theWindow.indexedDB || theWindow.mozIndexedDB || + theWindow.webkitIndexedDB || theWindow.msIndexedDB || + theWindow.shimIndexedDB; + if (factory == null) { + throw new Error('The current browser does not appear to support IndexedDB.'); + } + return factory; +} +function setUpDatabase(openRequest) { + const db = openRequest.result; + db.createObjectStore(MODEL_STORE_NAME, { keyPath: 'modelPath' }); + db.createObjectStore(INFO_STORE_NAME, { keyPath: 'modelPath' }); +} +/** + * IOHandler subclass: Browser IndexedDB. + * + * See the doc string of `browserIndexedDB` for more details. + */ +class indexed_db_BrowserIndexedDB { + constructor(modelPath) { + this.indexedDB = getIndexedDBFactory(); + if (modelPath == null || !modelPath) { + throw new Error('For IndexedDB, modelPath must not be null, undefined or empty.'); + } + this.modelPath = modelPath; + } + async save(modelArtifacts) { + // TODO(cais): Support saving GraphDef models. + if (modelArtifacts.modelTopology instanceof ArrayBuffer) { + throw new Error('BrowserLocalStorage.save() does not support saving model topology ' + + 'in binary formats yet.'); + } + return this.databaseAction(this.modelPath, modelArtifacts); + } + async load() { + return this.databaseAction(this.modelPath); + } + /** + * Perform database action to put model artifacts into or read model artifacts + * from IndexedDB object store. + * + * Whether the action is put or get depends on whether `modelArtifacts` is + * specified. If it is specified, the action will be put; otherwise the action + * will be get. + * + * @param modelPath A unique string path for the model. + * @param modelArtifacts If specified, it will be the model artifacts to be + * stored in IndexedDB. + * @returns A `Promise` of `SaveResult`, if the action is put, or a `Promise` + * of `ModelArtifacts`, if the action is get. + */ + databaseAction(modelPath, modelArtifacts) { + return new Promise((resolve, reject) => { + const openRequest = this.indexedDB.open(DATABASE_NAME, DATABASE_VERSION); + openRequest.onupgradeneeded = () => setUpDatabase(openRequest); + openRequest.onsuccess = () => { + const db = openRequest.result; + if (modelArtifacts == null) { + // Read model out from object store. + const modelTx = db.transaction(MODEL_STORE_NAME, 'readonly'); + const modelStore = modelTx.objectStore(MODEL_STORE_NAME); + const getRequest = modelStore.get(this.modelPath); + getRequest.onsuccess = () => { + if (getRequest.result == null) { + db.close(); + return reject(new Error(`Cannot find model with path '${this.modelPath}' ` + + `in IndexedDB.`)); + } + else { + resolve(getRequest.result.modelArtifacts); + } + }; + getRequest.onerror = error => { + db.close(); + return reject(getRequest.error); + }; + modelTx.oncomplete = () => db.close(); + } + else { + // Put model into object store. + const modelArtifactsInfo = Object(io_utils["g" /* getModelArtifactsInfoForJSON */])(modelArtifacts); + // First, put ModelArtifactsInfo into info store. + const infoTx = db.transaction(INFO_STORE_NAME, 'readwrite'); + let infoStore = infoTx.objectStore(INFO_STORE_NAME); + const putInfoRequest = infoStore.put({ modelPath: this.modelPath, modelArtifactsInfo }); + let modelTx; + putInfoRequest.onsuccess = () => { + // Second, put model data into model store. + modelTx = db.transaction(MODEL_STORE_NAME, 'readwrite'); + const modelStore = modelTx.objectStore(MODEL_STORE_NAME); + const putModelRequest = modelStore.put({ + modelPath: this.modelPath, + modelArtifacts, + modelArtifactsInfo + }); + putModelRequest.onsuccess = () => resolve({ modelArtifactsInfo }); + putModelRequest.onerror = error => { + // If the put-model request fails, roll back the info entry as + // well. + infoStore = infoTx.objectStore(INFO_STORE_NAME); + const deleteInfoRequest = infoStore.delete(this.modelPath); + deleteInfoRequest.onsuccess = () => { + db.close(); + return reject(putModelRequest.error); + }; + deleteInfoRequest.onerror = error => { + db.close(); + return reject(putModelRequest.error); + }; + }; + }; + putInfoRequest.onerror = error => { + db.close(); + return reject(putInfoRequest.error); + }; + infoTx.oncomplete = () => { + if (modelTx == null) { + db.close(); + } + else { + modelTx.oncomplete = () => db.close(); + } + }; + } + }; + openRequest.onerror = error => reject(openRequest.error); + }); + } +} +indexed_db_BrowserIndexedDB.URL_SCHEME = 'indexeddb://'; +const indexedDBRouter = (url) => { + if (!Object(environment["c" /* env */])().getBool('IS_BROWSER')) { + return null; + } + else { + if (!Array.isArray(url) && url.startsWith(indexed_db_BrowserIndexedDB.URL_SCHEME)) { + return browserIndexedDB(url.slice(indexed_db_BrowserIndexedDB.URL_SCHEME.length)); + } + else { + return null; + } + } +}; +IORouterRegistry.registerSaveRouter(indexedDBRouter); +IORouterRegistry.registerLoadRouter(indexedDBRouter); +/** + * Creates a browser IndexedDB IOHandler for saving and loading models. + * + * ```js + * const model = tf.sequential(); + * model.add( + * tf.layers.dense({units: 1, inputShape: [100], activation: 'sigmoid'})); + * + * const saveResult = await model.save('indexeddb://MyModel')); + * console.log(saveResult); + * ``` + * + * @param modelPath A unique identifier for the model to be saved. Must be a + * non-empty string. + * @returns An instance of `BrowserIndexedDB` (sublcass of `IOHandler`), + * which can be used with, e.g., `tf.Model.save`. + */ +function browserIndexedDB(modelPath) { + return new indexed_db_BrowserIndexedDB(modelPath); +} +function maybeStripScheme(key) { + return key.startsWith(indexed_db_BrowserIndexedDB.URL_SCHEME) ? + key.slice(indexed_db_BrowserIndexedDB.URL_SCHEME.length) : + key; +} +class BrowserIndexedDBManager { + constructor() { + this.indexedDB = getIndexedDBFactory(); + } + async listModels() { + return new Promise((resolve, reject) => { + const openRequest = this.indexedDB.open(DATABASE_NAME, DATABASE_VERSION); + openRequest.onupgradeneeded = () => setUpDatabase(openRequest); + openRequest.onsuccess = () => { + const db = openRequest.result; + const tx = db.transaction(INFO_STORE_NAME, 'readonly'); + const store = tx.objectStore(INFO_STORE_NAME); + // tslint:disable:max-line-length + // Need to cast `store` as `any` here because TypeScript's DOM + // library does not have the `getAll()` method even though the + // method is supported in the latest version of most mainstream + // browsers: + // https://developer.mozilla.org/en-US/docs/Web/API/IDBObjectStore/getAll + // tslint:enable:max-line-length + // tslint:disable-next-line:no-any + const getAllInfoRequest = store.getAll(); + getAllInfoRequest.onsuccess = () => { + const out = {}; + for (const item of getAllInfoRequest.result) { + out[item.modelPath] = item.modelArtifactsInfo; + } + resolve(out); + }; + getAllInfoRequest.onerror = error => { + db.close(); + return reject(getAllInfoRequest.error); + }; + tx.oncomplete = () => db.close(); + }; + openRequest.onerror = error => reject(openRequest.error); + }); + } + async removeModel(path) { + path = maybeStripScheme(path); + return new Promise((resolve, reject) => { + const openRequest = this.indexedDB.open(DATABASE_NAME, DATABASE_VERSION); + openRequest.onupgradeneeded = () => setUpDatabase(openRequest); + openRequest.onsuccess = () => { + const db = openRequest.result; + const infoTx = db.transaction(INFO_STORE_NAME, 'readwrite'); + const infoStore = infoTx.objectStore(INFO_STORE_NAME); + const getInfoRequest = infoStore.get(path); + let modelTx; + getInfoRequest.onsuccess = () => { + if (getInfoRequest.result == null) { + db.close(); + return reject(new Error(`Cannot find model with path '${path}' ` + + `in IndexedDB.`)); + } + else { + // First, delete the entry in the info store. + const deleteInfoRequest = infoStore.delete(path); + const deleteModelData = () => { + // Second, delete the entry in the model store. + modelTx = db.transaction(MODEL_STORE_NAME, 'readwrite'); + const modelStore = modelTx.objectStore(MODEL_STORE_NAME); + const deleteModelRequest = modelStore.delete(path); + deleteModelRequest.onsuccess = () => resolve(getInfoRequest.result.modelArtifactsInfo); + deleteModelRequest.onerror = error => reject(getInfoRequest.error); + }; + // Proceed with deleting model data regardless of whether deletion + // of info data succeeds or not. + deleteInfoRequest.onsuccess = deleteModelData; + deleteInfoRequest.onerror = error => { + deleteModelData(); + db.close(); + return reject(getInfoRequest.error); + }; + } + }; + getInfoRequest.onerror = error => { + db.close(); + return reject(getInfoRequest.error); + }; + infoTx.oncomplete = () => { + if (modelTx == null) { + db.close(); + } + else { + modelTx.oncomplete = () => db.close(); + } + }; + }; + openRequest.onerror = error => reject(openRequest.error); + }); + } +} +if (Object(environment["c" /* env */])().getBool('IS_BROWSER')) { + // Wrap the construction and registration, to guard against browsers that + // don't support Local Storage. + try { + model_management_ModelStoreManagerRegistry.registerManager(indexed_db_BrowserIndexedDB.URL_SCHEME, new BrowserIndexedDBManager()); + } + catch (err) { + } +} +//# sourceMappingURL=indexed_db.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/io/local_storage.js +/** + * @license + * Copyright 2018 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +const PATH_SEPARATOR = '/'; +const PATH_PREFIX = 'tensorflowjs_models'; +const INFO_SUFFIX = 'info'; +const MODEL_TOPOLOGY_SUFFIX = 'model_topology'; +const WEIGHT_SPECS_SUFFIX = 'weight_specs'; +const WEIGHT_DATA_SUFFIX = 'weight_data'; +const MODEL_METADATA_SUFFIX = 'model_metadata'; +/** + * Purge all tensorflow.js-saved model artifacts from local storage. + * + * @returns Paths of the models purged. + */ +function purgeLocalStorageArtifacts() { + if (!Object(environment["c" /* env */])().getBool('IS_BROWSER') || + typeof window === 'undefined' || + typeof window.localStorage === 'undefined') { + throw new Error('purgeLocalStorageModels() cannot proceed because local storage is ' + + 'unavailable in the current environment.'); + } + const LS = window.localStorage; + const purgedModelPaths = []; + for (let i = 0; i < LS.length; ++i) { + const key = LS.key(i); + const prefix = PATH_PREFIX + PATH_SEPARATOR; + if (key.startsWith(prefix) && key.length > prefix.length) { + LS.removeItem(key); + const modelName = getModelPathFromKey(key); + if (purgedModelPaths.indexOf(modelName) === -1) { + purgedModelPaths.push(modelName); + } + } + } + return purgedModelPaths; +} +function getModelKeys(path) { + return { + info: [PATH_PREFIX, path, INFO_SUFFIX].join(PATH_SEPARATOR), + topology: [PATH_PREFIX, path, MODEL_TOPOLOGY_SUFFIX].join(PATH_SEPARATOR), + weightSpecs: [PATH_PREFIX, path, WEIGHT_SPECS_SUFFIX].join(PATH_SEPARATOR), + weightData: [PATH_PREFIX, path, WEIGHT_DATA_SUFFIX].join(PATH_SEPARATOR), + modelMetadata: [PATH_PREFIX, path, MODEL_METADATA_SUFFIX].join(PATH_SEPARATOR) + }; +} +/** + * Get model path from a local-storage key. + * + * E.g., 'tensorflowjs_models/my/model/1/info' --> 'my/model/1' + * + * @param key + */ +function getModelPathFromKey(key) { + const items = key.split(PATH_SEPARATOR); + if (items.length < 3) { + throw new Error(`Invalid key format: ${key}`); + } + return items.slice(1, items.length - 1).join(PATH_SEPARATOR); +} +function local_storage_maybeStripScheme(key) { + return key.startsWith(local_storage_BrowserLocalStorage.URL_SCHEME) ? + key.slice(local_storage_BrowserLocalStorage.URL_SCHEME.length) : + key; +} +/** + * IOHandler subclass: Browser Local Storage. + * + * See the doc string to `browserLocalStorage` for more details. + */ +class local_storage_BrowserLocalStorage { + constructor(modelPath) { + if (!Object(environment["c" /* env */])().getBool('IS_BROWSER') || + typeof window === 'undefined' || + typeof window.localStorage === 'undefined') { + // TODO(cais): Add more info about what IOHandler subtypes are + // available. + // Maybe point to a doc page on the web and/or automatically determine + // the available IOHandlers and print them in the error message. + throw new Error('The current environment does not support local storage.'); + } + this.LS = window.localStorage; + if (modelPath == null || !modelPath) { + throw new Error('For local storage, modelPath must not be null, undefined or empty.'); + } + this.modelPath = modelPath; + this.keys = getModelKeys(this.modelPath); + } + /** + * Save model artifacts to browser local storage. + * + * See the documentation to `browserLocalStorage` for details on the saved + * artifacts. + * + * @param modelArtifacts The model artifacts to be stored. + * @returns An instance of SaveResult. + */ + async save(modelArtifacts) { + if (modelArtifacts.modelTopology instanceof ArrayBuffer) { + throw new Error('BrowserLocalStorage.save() does not support saving model topology ' + + 'in binary formats yet.'); + } + else { + const topology = JSON.stringify(modelArtifacts.modelTopology); + const weightSpecs = JSON.stringify(modelArtifacts.weightSpecs); + const modelArtifactsInfo = Object(io_utils["g" /* getModelArtifactsInfoForJSON */])(modelArtifacts); + try { + this.LS.setItem(this.keys.info, JSON.stringify(modelArtifactsInfo)); + this.LS.setItem(this.keys.topology, topology); + this.LS.setItem(this.keys.weightSpecs, weightSpecs); + this.LS.setItem(this.keys.weightData, Object(io_utils["a" /* arrayBufferToBase64String */])(modelArtifacts.weightData)); + this.LS.setItem(this.keys.modelMetadata, JSON.stringify({ + format: modelArtifacts.format, + generatedBy: modelArtifacts.generatedBy, + convertedBy: modelArtifacts.convertedBy, + userDefinedMetadata: modelArtifacts.userDefinedMetadata + })); + return { modelArtifactsInfo }; + } + catch (err) { + // If saving failed, clean up all items saved so far. + this.LS.removeItem(this.keys.info); + this.LS.removeItem(this.keys.topology); + this.LS.removeItem(this.keys.weightSpecs); + this.LS.removeItem(this.keys.weightData); + this.LS.removeItem(this.keys.modelMetadata); + throw new Error(`Failed to save model '${this.modelPath}' to local storage: ` + + `size quota being exceeded is a possible cause of this failure: ` + + `modelTopologyBytes=${modelArtifactsInfo.modelTopologyBytes}, ` + + `weightSpecsBytes=${modelArtifactsInfo.weightSpecsBytes}, ` + + `weightDataBytes=${modelArtifactsInfo.weightDataBytes}.`); + } + } + } + /** + * Load a model from local storage. + * + * See the documentation to `browserLocalStorage` for details on the saved + * artifacts. + * + * @returns The loaded model (if loading succeeds). + */ + async load() { + const info = JSON.parse(this.LS.getItem(this.keys.info)); + if (info == null) { + throw new Error(`In local storage, there is no model with name '${this.modelPath}'`); + } + if (info.modelTopologyType !== 'JSON') { + throw new Error('BrowserLocalStorage does not support loading non-JSON model ' + + 'topology yet.'); + } + const out = {}; + // Load topology. + const topology = JSON.parse(this.LS.getItem(this.keys.topology)); + if (topology == null) { + throw new Error(`In local storage, the topology of model '${this.modelPath}' ` + + `is missing.`); + } + out.modelTopology = topology; + // Load weight specs. + const weightSpecs = JSON.parse(this.LS.getItem(this.keys.weightSpecs)); + if (weightSpecs == null) { + throw new Error(`In local storage, the weight specs of model '${this.modelPath}' ` + + `are missing.`); + } + out.weightSpecs = weightSpecs; + // Load meta-data fields. + const metadataString = this.LS.getItem(this.keys.modelMetadata); + if (metadataString != null) { + const metadata = JSON.parse(metadataString); + out.format = metadata['format']; + out.generatedBy = metadata['generatedBy']; + out.convertedBy = metadata['convertedBy']; + out.userDefinedMetadata = metadata['userDefinedMetadata']; + } + // Load weight data. + const weightDataBase64 = this.LS.getItem(this.keys.weightData); + if (weightDataBase64 == null) { + throw new Error(`In local storage, the binary weight values of model ` + + `'${this.modelPath}' are missing.`); + } + out.weightData = Object(io_utils["b" /* base64StringToArrayBuffer */])(weightDataBase64); + return out; + } +} +local_storage_BrowserLocalStorage.URL_SCHEME = 'localstorage://'; +const localStorageRouter = (url) => { + if (!Object(environment["c" /* env */])().getBool('IS_BROWSER')) { + return null; + } + else { + if (!Array.isArray(url) && url.startsWith(local_storage_BrowserLocalStorage.URL_SCHEME)) { + return browserLocalStorage(url.slice(local_storage_BrowserLocalStorage.URL_SCHEME.length)); + } + else { + return null; + } + } +}; +IORouterRegistry.registerSaveRouter(localStorageRouter); +IORouterRegistry.registerLoadRouter(localStorageRouter); +/** + * Factory function for local storage IOHandler. + * + * This `IOHandler` supports both `save` and `load`. + * + * For each model's saved artifacts, four items are saved to local storage. + * - `${PATH_SEPARATOR}/${modelPath}/info`: Contains meta-info about the + * model, such as date saved, type of the topology, size in bytes, etc. + * - `${PATH_SEPARATOR}/${modelPath}/topology`: Model topology. For Keras- + * style models, this is a stringized JSON. + * - `${PATH_SEPARATOR}/${modelPath}/weight_specs`: Weight specs of the + * model, can be used to decode the saved binary weight values (see + * item below). + * - `${PATH_SEPARATOR}/${modelPath}/weight_data`: Concatenated binary + * weight values, stored as a base64-encoded string. + * + * Saving may throw an `Error` if the total size of the artifacts exceed the + * browser-specific quota. + * + * @param modelPath A unique identifier for the model to be saved. Must be a + * non-empty string. + * @returns An instance of `IOHandler`, which can be used with, e.g., + * `tf.Model.save`. + */ +function browserLocalStorage(modelPath) { + return new local_storage_BrowserLocalStorage(modelPath); +} +class local_storage_BrowserLocalStorageManager { + constructor() { + Object(util["assert"])(Object(environment["c" /* env */])().getBool('IS_BROWSER'), () => 'Current environment is not a web browser'); + Object(util["assert"])(typeof window === 'undefined' || + typeof window.localStorage !== 'undefined', () => 'Current browser does not appear to support localStorage'); + this.LS = window.localStorage; + } + async listModels() { + const out = {}; + const prefix = PATH_PREFIX + PATH_SEPARATOR; + const suffix = PATH_SEPARATOR + INFO_SUFFIX; + for (let i = 0; i < this.LS.length; ++i) { + const key = this.LS.key(i); + if (key.startsWith(prefix) && key.endsWith(suffix)) { + const modelPath = getModelPathFromKey(key); + out[modelPath] = JSON.parse(this.LS.getItem(key)); + } + } + return out; + } + async removeModel(path) { + path = local_storage_maybeStripScheme(path); + const keys = getModelKeys(path); + if (this.LS.getItem(keys.info) == null) { + throw new Error(`Cannot find model at path '${path}'`); + } + const info = JSON.parse(this.LS.getItem(keys.info)); + this.LS.removeItem(keys.info); + this.LS.removeItem(keys.topology); + this.LS.removeItem(keys.weightSpecs); + this.LS.removeItem(keys.weightData); + return info; + } +} +if (Object(environment["c" /* env */])().getBool('IS_BROWSER')) { + // Wrap the construction and registration, to guard against browsers that + // don't support Local Storage. + try { + model_management_ModelStoreManagerRegistry.registerManager(local_storage_BrowserLocalStorage.URL_SCHEME, new local_storage_BrowserLocalStorageManager()); + } + catch (err) { + } +} +//# sourceMappingURL=local_storage.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/io/browser_files.js +/** + * @license + * Copyright 2018 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ +/** + * IOHandlers related to files, such as browser-triggered file downloads, + * user-selected files in browser. + */ + + + +const DEFAULT_FILE_NAME_PREFIX = 'model'; +const DEFAULT_JSON_EXTENSION_NAME = '.json'; +const DEFAULT_WEIGHT_DATA_EXTENSION_NAME = '.weights.bin'; +function defer(f) { + return new Promise(resolve => setTimeout(resolve)).then(f); +} +class browser_files_BrowserDownloads { + constructor(fileNamePrefix) { + if (!Object(environment["c" /* env */])().getBool('IS_BROWSER')) { + // TODO(cais): Provide info on what IOHandlers are available under the + // current environment. + throw new Error('browserDownloads() cannot proceed because the current environment ' + + 'is not a browser.'); + } + if (fileNamePrefix.startsWith(browser_files_BrowserDownloads.URL_SCHEME)) { + fileNamePrefix = fileNamePrefix.slice(browser_files_BrowserDownloads.URL_SCHEME.length); + } + if (fileNamePrefix == null || fileNamePrefix.length === 0) { + fileNamePrefix = DEFAULT_FILE_NAME_PREFIX; + } + this.modelTopologyFileName = fileNamePrefix + DEFAULT_JSON_EXTENSION_NAME; + this.weightDataFileName = + fileNamePrefix + DEFAULT_WEIGHT_DATA_EXTENSION_NAME; + } + async save(modelArtifacts) { + if (typeof (document) === 'undefined') { + throw new Error('Browser downloads are not supported in ' + + 'this environment since `document` is not present'); + } + const weightsURL = window.URL.createObjectURL(new Blob([modelArtifacts.weightData], { type: 'application/octet-stream' })); + if (modelArtifacts.modelTopology instanceof ArrayBuffer) { + throw new Error('BrowserDownloads.save() does not support saving model topology ' + + 'in binary formats yet.'); + } + else { + const weightsManifest = [{ + paths: ['./' + this.weightDataFileName], + weights: modelArtifacts.weightSpecs + }]; + const modelTopologyAndWeightManifest = { + modelTopology: modelArtifacts.modelTopology, + format: modelArtifacts.format, + generatedBy: modelArtifacts.generatedBy, + convertedBy: modelArtifacts.convertedBy, + weightsManifest + }; + const modelTopologyAndWeightManifestURL = window.URL.createObjectURL(new Blob([JSON.stringify(modelTopologyAndWeightManifest)], { type: 'application/json' })); + // If anchor elements are not provided, create them without attaching them + // to parents, so that the downloaded file names can be controlled. + const jsonAnchor = this.jsonAnchor == null ? document.createElement('a') : + this.jsonAnchor; + jsonAnchor.download = this.modelTopologyFileName; + jsonAnchor.href = modelTopologyAndWeightManifestURL; + // Trigger downloads by evoking a click event on the download anchors. + // When multiple downloads are started synchronously, Firefox will only + // save the last one. + await defer(() => jsonAnchor.dispatchEvent(new MouseEvent('click'))); + if (modelArtifacts.weightData != null) { + const weightDataAnchor = this.weightDataAnchor == null ? + document.createElement('a') : + this.weightDataAnchor; + weightDataAnchor.download = this.weightDataFileName; + weightDataAnchor.href = weightsURL; + await defer(() => weightDataAnchor.dispatchEvent(new MouseEvent('click'))); + } + return { modelArtifactsInfo: Object(io_utils["g" /* getModelArtifactsInfoForJSON */])(modelArtifacts) }; + } + } +} +browser_files_BrowserDownloads.URL_SCHEME = 'downloads://'; +class browser_files_BrowserFiles { + constructor(files) { + if (files == null || files.length < 1) { + throw new Error(`When calling browserFiles, at least 1 file is required, ` + + `but received ${files}`); + } + this.files = files; + } + async load() { + const jsonFile = this.files[0]; + const weightFiles = this.files.slice(1); + return new Promise((resolve, reject) => { + const jsonReader = new FileReader(); + jsonReader.onload = (event) => { + // tslint:disable-next-line:no-any + const modelJSON = JSON.parse(event.target.result); + const modelTopology = modelJSON.modelTopology; + if (modelTopology == null) { + reject(new Error(`modelTopology field is missing from file ${jsonFile.name}`)); + return; + } + if (weightFiles.length === 0) { + resolve({ modelTopology }); + } + const weightsManifest = modelJSON.weightsManifest; + if (weightsManifest == null) { + reject(new Error(`weightManifest field is missing from file ${jsonFile.name}`)); + return; + } + let pathToFile; + try { + pathToFile = + this.checkManifestAndWeightFiles(weightsManifest, weightFiles); + } + catch (err) { + reject(err); + return; + } + const weightSpecs = []; + const paths = []; + const perFileBuffers = []; + weightsManifest.forEach(weightsGroup => { + weightsGroup.paths.forEach(path => { + paths.push(path); + perFileBuffers.push(null); + }); + weightSpecs.push(...weightsGroup.weights); + }); + weightsManifest.forEach(weightsGroup => { + weightsGroup.paths.forEach(path => { + const weightFileReader = new FileReader(); + weightFileReader.onload = (event) => { + // tslint:disable-next-line:no-any + const weightData = event.target.result; + const index = paths.indexOf(path); + perFileBuffers[index] = weightData; + if (perFileBuffers.indexOf(null) === -1) { + resolve({ + modelTopology, + weightSpecs, + weightData: Object(io_utils["d" /* concatenateArrayBuffers */])(perFileBuffers), + format: modelJSON.format, + generatedBy: modelJSON.generatedBy, + convertedBy: modelJSON.convertedBy, + userDefinedMetadata: modelJSON.userDefinedMetadata + }); + } + }; + weightFileReader.onerror = error => reject(`Failed to weights data from file of path '${path}'.`); + weightFileReader.readAsArrayBuffer(pathToFile[path]); + }); + }); + }; + jsonReader.onerror = error => reject(`Failed to read model topology and weights manifest JSON ` + + `from file '${jsonFile.name}'. BrowserFiles supports loading ` + + `Keras-style tf.Model artifacts only.`); + jsonReader.readAsText(jsonFile); + }); + } + /** + * Check the compatibility between weights manifest and weight files. + */ + checkManifestAndWeightFiles(manifest, files) { + const basenames = []; + const fileNames = files.map(file => Object(io_utils["c" /* basename */])(file.name)); + const pathToFile = {}; + for (const group of manifest) { + group.paths.forEach(path => { + const pathBasename = Object(io_utils["c" /* basename */])(path); + if (basenames.indexOf(pathBasename) !== -1) { + throw new Error(`Duplicate file basename found in weights manifest: ` + + `'${pathBasename}'`); + } + basenames.push(pathBasename); + if (fileNames.indexOf(pathBasename) === -1) { + throw new Error(`Weight file with basename '${pathBasename}' is not provided.`); + } + else { + pathToFile[path] = files[fileNames.indexOf(pathBasename)]; + } + }); + } + if (basenames.length !== files.length) { + throw new Error(`Mismatch in the number of files in weights manifest ` + + `(${basenames.length}) and the number of weight files provided ` + + `(${files.length}).`); + } + return pathToFile; + } +} +const browserDownloadsRouter = (url) => { + if (!Object(environment["c" /* env */])().getBool('IS_BROWSER')) { + return null; + } + else { + if (!Array.isArray(url) && url.startsWith(browser_files_BrowserDownloads.URL_SCHEME)) { + return browserDownloads(url.slice(browser_files_BrowserDownloads.URL_SCHEME.length)); + } + else { + return null; + } + } +}; +IORouterRegistry.registerSaveRouter(browserDownloadsRouter); +/** + * Creates an IOHandler that triggers file downloads from the browser. + * + * The returned `IOHandler` instance can be used as model exporting methods such + * as `tf.Model.save` and supports only saving. + * + * ```js + * const model = tf.sequential(); + * model.add(tf.layers.dense( + * {units: 1, inputShape: [10], activation: 'sigmoid'})); + * const saveResult = await model.save('downloads://mymodel'); + * // This will trigger downloading of two files: + * // 'mymodel.json' and 'mymodel.weights.bin'. + * console.log(saveResult); + * ``` + * + * @param fileNamePrefix Prefix name of the files to be downloaded. For use with + * `tf.Model`, `fileNamePrefix` should follow either of the following two + * formats: + * 1. `null` or `undefined`, in which case the default file + * names will be used: + * - 'model.json' for the JSON file containing the model topology and + * weights manifest. + * - 'model.weights.bin' for the binary file containing the binary weight + * values. + * 2. A single string or an Array of a single string, as the file name prefix. + * For example, if `'foo'` is provided, the downloaded JSON + * file and binary weights file will be named 'foo.json' and + * 'foo.weights.bin', respectively. + * @param config Additional configuration for triggering downloads. + * @returns An instance of `BrowserDownloads` `IOHandler`. + */ +/** + * @doc { + * heading: 'Models', + * subheading: 'Loading', + * namespace: 'io', + * ignoreCI: true + * } + */ +function browserDownloads(fileNamePrefix = 'model') { + return new browser_files_BrowserDownloads(fileNamePrefix); +} +/** + * Creates an IOHandler that loads model artifacts from user-selected files. + * + * This method can be used for loading from files such as user-selected files + * in the browser. + * When used in conjunction with `tf.loadLayersModel`, an instance of + * `tf.LayersModel` (Keras-style) can be constructed from the loaded artifacts. + * + * ```js + * // Note: This code snippet won't run properly without the actual file input + * // elements in the HTML DOM. + * + * // Suppose there are two HTML file input (``) + * // elements. + * const uploadJSONInput = document.getElementById('upload-json'); + * const uploadWeightsInput = document.getElementById('upload-weights'); + * const model = await tf.loadLayersModel(tf.io.browserFiles( + * [uploadJSONInput.files[0], uploadWeightsInput.files[0]])); + * ``` + * + * @param files `File`s to load from. Currently, this function supports only + * loading from files that contain Keras-style models (i.e., `tf.Model`s), for + * which an `Array` of `File`s is expected (in that order): + * - A JSON file containing the model topology and weight manifest. + * - Optionally, One or more binary files containing the binary weights. + * These files must have names that match the paths in the `weightsManifest` + * contained by the aforementioned JSON file, or errors will be thrown + * during loading. These weights files have the same format as the ones + * generated by `tensorflowjs_converter` that comes with the `tensorflowjs` + * Python PIP package. If no weights files are provided, only the model + * topology will be loaded from the JSON file above. + * @returns An instance of `Files` `IOHandler`. + */ +/** + * @doc { + * heading: 'Models', + * subheading: 'Loading', + * namespace: 'io', + * ignoreCI: true + * } + */ +function browserFiles(files) { + return new browser_files_BrowserFiles(files); +} +//# sourceMappingURL=browser_files.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/io/progress.js +/** + * @license + * Copyright 2019 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + +/** + * Monitor Promise.all progress, fire onProgress callback function. + * + * @param promises Promise list going to be monitored + * @param onProgress Callback function. Fired when a promise resolved. + * @param startFraction Optional fraction start. Default to 0. + * @param endFraction Optional fraction end. Default to 1. + */ +function monitorPromisesProgress(promises, onProgress, startFraction, endFraction) { + checkPromises(promises); + startFraction = startFraction == null ? 0 : startFraction; + endFraction = endFraction == null ? 1 : endFraction; + checkFraction(startFraction, endFraction); + let resolvedPromise = 0; + const registerMonitor = (promise) => { + promise.then(value => { + const fraction = startFraction + + ++resolvedPromise / promises.length * (endFraction - startFraction); + // pass fraction as parameter to callback function. + onProgress(fraction); + return value; + }); + return promise; + }; + function checkPromises(promises) { + Object(util["assert"])(promises != null && Array.isArray(promises) && promises.length > 0, () => 'promises must be a none empty array'); + } + function checkFraction(startFraction, endFraction) { + Object(util["assert"])(startFraction >= 0 && startFraction <= 1, () => `Progress fraction must be in range [0, 1], but ` + + `got startFraction ${startFraction}`); + Object(util["assert"])(endFraction >= 0 && endFraction <= 1, () => `Progress fraction must be in range [0, 1], but ` + + `got endFraction ${endFraction}`); + Object(util["assert"])(endFraction >= startFraction, () => `startFraction must be no more than endFraction, but ` + + `got startFraction ${startFraction} and endFraction ` + + `${endFraction}`); + } + return Promise.all(promises.map(registerMonitor)); +} +//# sourceMappingURL=progress.js.map +// EXTERNAL MODULE: ./node_modules/@tensorflow/tfjs-core/dist/io/types.js +var types = __webpack_require__(34); + +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/io/weights_loader.js +/** + * @license + * Copyright 2018 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +/** + * Reads binary weights data from a number of URLs. + * + * @param fetchURLs URLs to send the HTTP requests at, using `fetch` calls. + * @param requestOptions RequestInit (options) for the HTTP requests. + * @param fetchFunc Optional overriding value for the `window.fetch` function. + * @param onProgress Optional, progress callback function, fired periodically + * before the load is completed. + * @returns A `Promise` of an Array of `ArrayBuffer`. The Array has the same + * length as `fetchURLs`. + */ +async function loadWeightsAsArrayBuffer(fetchURLs, loadOptions) { + if (loadOptions == null) { + loadOptions = {}; + } + const fetchFunc = loadOptions.fetchFunc == null ? Object(environment["c" /* env */])().platform.fetch : + loadOptions.fetchFunc; + // Create the requests for all of the weights in parallel. + const requests = fetchURLs.map(fetchURL => fetchFunc(fetchURL, loadOptions.requestInit, { isBinary: true })); + const fetchStartFraction = 0; + const fetchEndFraction = 0.5; + const responses = loadOptions.onProgress == null ? + await Promise.all(requests) : + await monitorPromisesProgress(requests, loadOptions.onProgress, fetchStartFraction, fetchEndFraction); + const bufferPromises = responses.map(response => response.arrayBuffer()); + const bufferStartFraction = 0.5; + const bufferEndFraction = 1; + const buffers = loadOptions.onProgress == null ? + await Promise.all(bufferPromises) : + await monitorPromisesProgress(bufferPromises, loadOptions.onProgress, bufferStartFraction, bufferEndFraction); + return buffers; +} +/** + * Reads a weights manifest JSON configuration, fetches the weights and + * returns them as `Tensor`s. + * + * @param manifest The weights manifest JSON. + * @param filePathPrefix The path prefix for filenames given in the manifest. + * Defaults to the empty string. + * @param weightNames The names of the weights to be fetched. + */ +async function loadWeights(manifest, filePathPrefix = '', weightNames, requestInit) { + // TODO(nsthorat): Groups are currently fetched atomically. If you need a + // single weight from a group, the whole group will be fetched. At a future + // date, we should support fetching only the individual shards within a + // group that are needed to reconstruct the requested weight. + // TODO(cais): Use `decodeWeights` for implementation. + const fetchWeights = (fetchUrls) => loadWeightsAsArrayBuffer(fetchUrls, { requestInit }); + const loadWeights = weightsLoaderFactory(fetchWeights); + return loadWeights(manifest, filePathPrefix, weightNames); +} +/** + * Creates a function, which reads a weights manifest JSON configuration, + * fetches the weight files using the specified function and returns them as + * `Tensor`s. + * + * ```js + * // example for creating a nodejs weight loader, which reads the weight files + * // from disk using fs.readFileSync + * + * import * as fs from 'fs' + * + * const fetchWeightsFromDisk = (filePaths: string[]) => + * filePaths.map(filePath => fs.readFileSync(filePath).buffer) + * + * const loadWeights = tf.io.weightsLoaderFactory(fetchWeightsFromDisk) + * + * const manifest = JSON.parse( + * fs.readFileSync('./my_model-weights_manifest').toString() + * ) + * const weightMap = await loadWeights(manifest, './') + * ``` + * @param fetchWeightsFunction The function used for fetching the weight files. + * @returns Weight loading function. + */ +function weightsLoaderFactory(fetchWeightsFunction) { + return async (manifest, filePathPrefix = '', weightNames) => { + // Collect all the groups, weights, and their relative offsets to be + // fetched. + const groupIndicesToFetchMap = manifest.map(() => false); + const groupWeightsToFetch = {}; + const weightsFound = weightNames != null ? weightNames.map(() => false) : []; + const allManifestWeightNames = []; + manifest.forEach((manifestGroupConfig, groupIndex) => { + let groupOffset = 0; + manifestGroupConfig.weights.forEach(weightsEntry => { + const rawDtype = ('quantization' in weightsEntry) ? + weightsEntry.quantization.dtype : + weightsEntry.dtype; + const weightsBytes = types["a" /* DTYPE_VALUE_SIZE_MAP */][rawDtype] * + util["sizeFromShape"](weightsEntry.shape); + const enqueueWeightsForFetchingFn = () => { + groupIndicesToFetchMap[groupIndex] = true; + if (groupWeightsToFetch[groupIndex] == null) { + groupWeightsToFetch[groupIndex] = []; + } + groupWeightsToFetch[groupIndex].push({ + manifestEntry: weightsEntry, + groupOffset, + sizeBytes: weightsBytes + }); + }; + if (weightNames != null) { + weightNames.forEach((weightName, weightIndex) => { + if (weightName === weightsEntry.name) { + enqueueWeightsForFetchingFn(); + weightsFound[weightIndex] = true; + } + }); + } + else { + enqueueWeightsForFetchingFn(); + } + allManifestWeightNames.push(weightsEntry.name); + groupOffset += weightsBytes; + }); + }); + if (!weightsFound.every(found => found)) { + const weightsNotFound = weightNames.filter((_, i) => !weightsFound[i]); + throw new Error(`Could not find weights in manifest with names: ` + + `${weightsNotFound.join(', ')}. \n` + + `Manifest JSON has weights with names: ` + + `${allManifestWeightNames.join(', ')}.`); + } + // Convert the one-hot boolean groupId => shouldFetch map to a list of group + // IDs. + const groupIndicesToFetch = groupIndicesToFetchMap.reduce((accumulator, shouldFetch, i) => { + if (shouldFetch) { + accumulator.push(i); + } + return accumulator; + }, []); + const fetchUrls = []; + groupIndicesToFetch.forEach(i => { + manifest[i].paths.forEach(filepath => { + const fetchUrl = filePathPrefix + + (!filePathPrefix.endsWith('/') ? '/' : '') + filepath; + fetchUrls.push(fetchUrl); + }); + }); + const buffers = await fetchWeightsFunction(fetchUrls); + const weightsTensorMap = {}; + let bufferIndexOffset = 0; + groupIndicesToFetch.forEach(i => { + const numBuffers = manifest[i].paths.length; + let groupBytes = 0; + for (let i = 0; i < numBuffers; i++) { + groupBytes += buffers[bufferIndexOffset + i].byteLength; + } + // Create a buffer for the whole group. + const groupBuffer = new ArrayBuffer(groupBytes); + const groupByteBuffer = new Uint8Array(groupBuffer); + let groupBufferOffset = 0; + for (let i = 0; i < numBuffers; i++) { + const buffer = new Uint8Array(buffers[bufferIndexOffset + i]); + groupByteBuffer.set(buffer, groupBufferOffset); + groupBufferOffset += buffer.byteLength; + } + const weightsEntries = groupWeightsToFetch[i]; + weightsEntries.forEach(weightsEntry => { + const byteBuffer = groupBuffer.slice(weightsEntry.groupOffset, weightsEntry.groupOffset + weightsEntry.sizeBytes); + const nameToTensorMap = Object(io_utils["e" /* decodeWeights */])(byteBuffer, [weightsEntry.manifestEntry]); + for (const name in nameToTensorMap) { + weightsTensorMap[name] = nameToTensorMap[name]; + } + }); + bufferIndexOffset += numBuffers; + }); + return weightsTensorMap; + }; +} +//# sourceMappingURL=weights_loader.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/io/http.js +/** + * @license + * Copyright 2018 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ +/** + * IOHandler implementations based on HTTP requests in the web browser. + * + * Uses [`fetch`](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API). + */ + + + + + +const OCTET_STREAM_MIME_TYPE = 'application/octet-stream'; +const JSON_TYPE = 'application/json'; +class http_HTTPRequest { + constructor(path, loadOptions) { + this.DEFAULT_METHOD = 'POST'; + if (loadOptions == null) { + loadOptions = {}; + } + this.weightPathPrefix = loadOptions.weightPathPrefix; + this.onProgress = loadOptions.onProgress; + if (loadOptions.fetchFunc != null) { + Object(util["assert"])(typeof loadOptions.fetchFunc === 'function', () => 'Must pass a function that matches the signature of ' + + '`fetch` (see ' + + 'https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API)'); + this.fetch = loadOptions.fetchFunc; + } + else { + this.fetch = Object(environment["c" /* env */])().platform.fetch; + } + Object(util["assert"])(path != null && path.length > 0, () => 'URL path for http must not be null, undefined or ' + + 'empty.'); + if (Array.isArray(path)) { + Object(util["assert"])(path.length === 2, () => 'URL paths for http must have a length of 2, ' + + `(actual length is ${path.length}).`); + } + this.path = path; + if (loadOptions.requestInit != null && + loadOptions.requestInit.body != null) { + throw new Error('requestInit is expected to have no pre-existing body, but has one.'); + } + this.requestInit = loadOptions.requestInit || {}; + } + async save(modelArtifacts) { + if (modelArtifacts.modelTopology instanceof ArrayBuffer) { + throw new Error('BrowserHTTPRequest.save() does not support saving model topology ' + + 'in binary formats yet.'); + } + const init = Object.assign({ method: this.DEFAULT_METHOD }, this.requestInit); + init.body = new FormData(); + const weightsManifest = [{ + paths: ['./model.weights.bin'], + weights: modelArtifacts.weightSpecs, + }]; + const modelTopologyAndWeightManifest = { + modelTopology: modelArtifacts.modelTopology, + format: modelArtifacts.format, + generatedBy: modelArtifacts.generatedBy, + convertedBy: modelArtifacts.convertedBy, + userDefinedMetadata: modelArtifacts.userDefinedMetadata, + weightsManifest + }; + init.body.append('model.json', new Blob([JSON.stringify(modelTopologyAndWeightManifest)], { type: JSON_TYPE }), 'model.json'); + if (modelArtifacts.weightData != null) { + init.body.append('model.weights.bin', new Blob([modelArtifacts.weightData], { type: OCTET_STREAM_MIME_TYPE }), 'model.weights.bin'); + } + const response = await this.fetch(this.path, init); + if (response.ok) { + return { + modelArtifactsInfo: Object(io_utils["g" /* getModelArtifactsInfoForJSON */])(modelArtifacts), + responses: [response], + }; + } + else { + throw new Error(`BrowserHTTPRequest.save() failed due to HTTP response status ` + + `${response.status}.`); + } + } + /** + * Load model artifacts via HTTP request(s). + * + * See the documentation to `tf.io.http` for details on the saved + * artifacts. + * + * @returns The loaded model artifacts (if loading succeeds). + */ + async load() { + const modelConfigRequest = await this.fetch(this.path, this.requestInit); + if (!modelConfigRequest.ok) { + throw new Error(`Request to ${this.path} failed with status code ` + + `${modelConfigRequest.status}. Please verify this URL points to ` + + `the model JSON of the model to load.`); + } + let modelConfig; + try { + modelConfig = await modelConfigRequest.json(); + } + catch (e) { + let message = `Failed to parse model JSON of response from ${this.path}.`; + // TODO(nsthorat): Remove this after some time when we're comfortable that + // .pb files are mostly gone. + if (this.path.endsWith('.pb')) { + message += ' Your path contains a .pb file extension. ' + + 'Support for .pb models have been removed in TensorFlow.js 1.0 ' + + 'in favor of .json models. You can re-convert your Python ' + + 'TensorFlow model using the TensorFlow.js 1.0 conversion scripts ' + + 'or you can convert your.pb models with the \'pb2json\'' + + 'NPM script in the tensorflow/tfjs-converter repository.'; + } + else { + message += ' Please make sure the server is serving valid ' + + 'JSON for this request.'; + } + throw new Error(message); + } + const modelTopology = modelConfig.modelTopology; + const weightsManifest = modelConfig.weightsManifest; + const generatedBy = modelConfig.generatedBy; + const convertedBy = modelConfig.convertedBy; + const format = modelConfig.format; + const userDefinedMetadata = modelConfig.userDefinedMetadata; + // We do not allow both modelTopology and weightsManifest to be missing. + if (modelTopology == null && weightsManifest == null) { + throw new Error(`The JSON from HTTP path ${this.path} contains neither model ` + + `topology or manifest for weights.`); + } + let weightSpecs; + let weightData; + if (weightsManifest != null) { + const results = await this.loadWeights(weightsManifest); + [weightSpecs, weightData] = results; + } + return { + modelTopology, + weightSpecs, + weightData, + userDefinedMetadata, + generatedBy, + convertedBy, + format + }; + } + async loadWeights(weightsManifest) { + const weightPath = Array.isArray(this.path) ? this.path[1] : this.path; + const [prefix, suffix] = parseUrl(weightPath); + const pathPrefix = this.weightPathPrefix || prefix; + const weightSpecs = []; + for (const entry of weightsManifest) { + weightSpecs.push(...entry.weights); + } + const fetchURLs = []; + weightsManifest.forEach(weightsGroup => { + weightsGroup.paths.forEach(path => { + fetchURLs.push(pathPrefix + path + suffix); + }); + }); + const buffers = await loadWeightsAsArrayBuffer(fetchURLs, { + requestInit: this.requestInit, + fetchFunc: this.fetch, + onProgress: this.onProgress + }); + return [weightSpecs, Object(io_utils["d" /* concatenateArrayBuffers */])(buffers)]; + } +} +http_HTTPRequest.URL_SCHEME_REGEX = /^https?:\/\//; +/** + * Extract the prefix and suffix of the url, where the prefix is the path before + * the last file, and suffix is the search params after the last file. + * ``` + * const url = 'http://tfhub.dev/model/1/tensorflowjs_model.pb?tfjs-format=file' + * [prefix, suffix] = parseUrl(url) + * // prefix = 'http://tfhub.dev/model/1/' + * // suffix = '?tfjs-format=file' + * ``` + * @param url the model url to be parsed. + */ +function parseUrl(url) { + const lastSlash = url.lastIndexOf('/'); + const lastSearchParam = url.lastIndexOf('?'); + const prefix = url.substring(0, lastSlash); + const suffix = lastSearchParam > lastSlash ? url.substring(lastSearchParam) : ''; + return [prefix + '/', suffix]; +} +function isHTTPScheme(url) { + return url.match(http_HTTPRequest.URL_SCHEME_REGEX) != null; +} +const httpRouter = (url, loadOptions) => { + if (typeof fetch === 'undefined' && + (loadOptions == null || loadOptions.fetchFunc == null)) { + // `http` uses `fetch` or `node-fetch`, if one wants to use it in + // an environment that is not the browser or node they have to setup a + // global fetch polyfill. + return null; + } + else { + let isHTTP = true; + if (Array.isArray(url)) { + isHTTP = url.every(urlItem => isHTTPScheme(urlItem)); + } + else { + isHTTP = isHTTPScheme(url); + } + if (isHTTP) { + return http(url, loadOptions); + } + } + return null; +}; +IORouterRegistry.registerSaveRouter(httpRouter); +IORouterRegistry.registerLoadRouter(httpRouter); +/** + * Creates an IOHandler subtype that sends model artifacts to HTTP server. + * + * An HTTP request of the `multipart/form-data` mime type will be sent to the + * `path` URL. The form data includes artifacts that represent the topology + * and/or weights of the model. In the case of Keras-style `tf.Model`, two + * blobs (files) exist in form-data: + * - A JSON file consisting of `modelTopology` and `weightsManifest`. + * - A binary weights file consisting of the concatenated weight values. + * These files are in the same format as the one generated by + * [tfjs_converter](https://js.tensorflow.org/tutorials/import-keras.html). + * + * The following code snippet exemplifies the client-side code that uses this + * function: + * + * ```js + * const model = tf.sequential(); + * model.add( + * tf.layers.dense({units: 1, inputShape: [100], activation: 'sigmoid'})); + * + * const saveResult = await model.save(tf.io.http( + * 'http://model-server:5000/upload', {requestInit: {method: 'PUT'}})); + * console.log(saveResult); + * ``` + * + * If the default `POST` method is to be used, without any custom parameters + * such as headers, you can simply pass an HTTP or HTTPS URL to `model.save`: + * + * ```js + * const saveResult = await model.save('http://model-server:5000/upload'); + * ``` + * + * The following GitHub Gist + * https://gist.github.com/dsmilkov/1b6046fd6132d7408d5257b0976f7864 + * implements a server based on [flask](https://github.com/pallets/flask) that + * can receive the request. Upon receiving the model artifacts via the requst, + * this particular server reconsistutes instances of [Keras + * Models](https://keras.io/models/model/) in memory. + * + * + * @param path A URL path to the model. + * Can be an absolute HTTP path (e.g., + * 'http://localhost:8000/model-upload)') or a relative path (e.g., + * './model-upload'). + * @param requestInit Request configurations to be used when sending + * HTTP request to server using `fetch`. It can contain fields such as + * `method`, `credentials`, `headers`, `mode`, etc. See + * https://developer.mozilla.org/en-US/docs/Web/API/Request/Request + * for more information. `requestInit` must not have a body, because the + * body will be set by TensorFlow.js. File blobs representing the model + * topology (filename: 'model.json') and the weights of the model (filename: + * 'model.weights.bin') will be appended to the body. If `requestInit` has a + * `body`, an Error will be thrown. + * @param loadOptions Optional configuration for the loading. It includes the + * following fields: + * - weightPathPrefix Optional, this specifies the path prefix for weight + * files, by default this is calculated from the path param. + * - fetchFunc Optional, custom `fetch` function. E.g., in Node.js, + * the `fetch` from node-fetch can be used here. + * - onProgress Optional, progress callback function, fired periodically + * before the load is completed. + * @returns An instance of `IOHandler`. + */ +/** + * @doc { + * heading: 'Models', + * subheading: 'Loading', + * namespace: 'io', + * ignoreCI: true + * } + */ +function http(path, loadOptions) { + return new http_HTTPRequest(path, loadOptions); +} +/** + * Deprecated. Use `tf.io.http`. + * @param path + * @param loadOptions + */ +function browserHTTPRequest(path, loadOptions) { + return http(path, loadOptions); +} +//# sourceMappingURL=http.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/io/passthrough.js +/** + * @license + * Copyright 2018 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ +class PassthroughLoader { + constructor(modelArtifacts) { + this.modelArtifacts = modelArtifacts; + } + async load() { + return this.modelArtifacts; + } +} +class PassthroughSaver { + constructor(saveHandler) { + this.saveHandler = saveHandler; + } + async save(modelArtifacts) { + return this.saveHandler(modelArtifacts); + } +} +/** + * Creates an IOHandler that loads model artifacts from memory. + * + * When used in conjunction with `tf.loadLayersModel`, an instance of + * `tf.LayersModel` (Keras-style) can be constructed from the loaded artifacts. + * + * ```js + * const model = await tf.loadLayersModel(tf.io.fromMemory( + * modelTopology, weightSpecs, weightData)); + * ``` + * + * @param modelArtifacts a object containing model topology (i.e., parsed from + * the JSON format). + * @param weightSpecs An array of `WeightsManifestEntry` objects describing the + * names, shapes, types, and quantization of the weight data. + * @param weightData A single `ArrayBuffer` containing the weight data, + * concatenated in the order described by the weightSpecs. + * @param trainingConfig Model training configuration. Optional. + * + * @returns A passthrough `IOHandler` that simply loads the provided data. + */ +function fromMemory(modelArtifacts, weightSpecs, weightData, trainingConfig) { + if (arguments.length === 1) { + const isModelArtifacts = modelArtifacts.modelTopology != null || + modelArtifacts.weightSpecs != null; + if (isModelArtifacts) { + return new PassthroughLoader(modelArtifacts); + } + else { + // Legacy support: with only modelTopology. + // TODO(cais): Remove this deprecated API. + console.warn('Please call tf.io.fromMemory() with only one argument. ' + + 'The argument should be of type ModelArtifacts. ' + + 'The multi-argument signature of tf.io.fromMemory() has been ' + + 'deprecated and will be removed in a future release.'); + return new PassthroughLoader({ modelTopology: modelArtifacts }); + } + } + else { + // Legacy support. + // TODO(cais): Remove this deprecated API. + console.warn('Please call tf.io.fromMemory() with only one argument. ' + + 'The argument should be of type ModelArtifacts. ' + + 'The multi-argument signature of tf.io.fromMemory() has been ' + + 'deprecated and will be removed in a future release.'); + return new PassthroughLoader({ + modelTopology: modelArtifacts, + weightSpecs, + weightData, + trainingConfig + }); + } +} +/** + * Creates an IOHandler that passes saved model artifacts to a callback. + * + * ```js + * function handleSave(artifacts) { + * // ... do something with the artifacts ... + * return {modelArtifactsInfo: {...}, ...}; + * } + * + * const saveResult = model.save(tf.io.withSaveHandler(handleSave)); + * ``` + * + * @param saveHandler A function that accepts a `ModelArtifacts` and returns a + * `SaveResult`. + */ +function withSaveHandler(saveHandler) { + return new PassthroughSaver(saveHandler); +} +//# sourceMappingURL=passthrough.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/io/io.js +/** + * @license + * Copyright 2018 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ +// Importing local_storage and indexed_db is necessary for the routers to be +// registered. + + + + + + + + + + +//# sourceMappingURL=io.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/one_hot.js +/** + * @license + * Copyright 2020 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + +/** + * Creates a one-hot `tf.Tensor`. The locations represented by `indices` take + * value `onValue` (defaults to 1), while all other locations take value + * `offValue` (defaults to 0). If `indices` is rank `R`, the output has rank + * `R+1` with the last axis of size `depth`. + * + * ```js + * tf.oneHot(tf.tensor1d([0, 1], 'int32'), 3).print(); + * ``` + * + * @param indices `tf.Tensor` of indices with dtype `int32`. + * @param depth The depth of the one hot dimension. + * @param onValue A number used to fill in the output when the index matches + * the location. + * @param offValue A number used to fill in the output when the index does + * not match the location. + */ +/** @doc {heading: 'Tensors', subheading: 'Creation'} */ +function oneHot_(indices, depth, onValue = 1, offValue = 0) { + if (depth < 2) { + throw new Error(`Error in oneHot: depth must be >=2, but it is ${depth}`); + } + let $indices = Object(tensor_util_env["a" /* convertToTensor */])(indices, 'indices', 'oneHot', 'int32'); + const outShape = [...$indices.shape, depth]; + $indices = $indices.flatten(); + const forward = (backend, save) => { + save([$indices]); + return reshape(backend.oneHot($indices, depth, onValue, offValue), outShape); + }; + const inputs = { indices: $indices }; + const attrs = { depth, onValue, offValue }; + return engine["a" /* ENGINE */].runKernelFunc(forward, inputs, null /* grad */, kernel_names["cb" /* OneHot */], attrs); +} +const oneHot = Object(operation["a" /* op */])({ oneHot_ }); +//# sourceMappingURL=one_hot.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/confusion_matrix.js +/** + * @license + * Copyright 2018 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + +/** + * Computes the confusion matrix from true labels and predicted labels. + * + * ```js + * const labels = tf.tensor1d([0, 1, 2, 1, 0], 'int32'); + * const predictions = tf.tensor1d([0, 2, 2, 1, 0], 'int32'); + * const numClasses = 3; + * const out = tf.math.confusionMatrix(labels, predictions, numClasses); + * out.print(); + * // Expected output matrix: + * // [[2, 0, 0], + * // [0, 1, 1], + * // [0, 0, 1]] + * ``` + * + * @param labels The target labels, assumed to be 0-based integers + * for the classes. The shape is `[numExamples]`, where + * `numExamples` is the number of examples included. + * @param predictions The predicted classes, assumed to be + * 0-based integers for the classes. Must have the same shape as `labels`. + * @param numClasses Number of all classes, as an integer. + * Its value must be larger than the largest element in `labels` and + * `predictions`. + * @returns The confusion matrix as a int32-type 2D tensor. The value at + * row `r` and column `c` is the number of times examples of actual class + * `r` were predicted as class `c`. + */ +/** @doc {heading: 'Operations', subheading: 'Evaluation'} */ +function confusionMatrix_(labels, predictions, numClasses) { + const $labels = Object(tensor_util_env["a" /* convertToTensor */])(labels, 'labels', 'confusionMatrix'); + const $predictions = Object(tensor_util_env["a" /* convertToTensor */])(predictions, 'predictions', 'confusionMatrix'); + util["assert"](numClasses == null || numClasses > 0 && Number.isInteger(numClasses), () => `If provided, numClasses must be a positive integer, ` + + `but got ${numClasses}`); + util["assert"]($labels.rank === 1, () => `Expected the rank of labels to be 1, but got ${$labels.rank}`); + util["assert"]($predictions.rank === 1, () => `Expected the rank of predictions to be 1, ` + + `but got ${$predictions.rank}`); + util["assert"]($labels.shape[0] === $predictions.shape[0], () => `Mismatch in the number of examples: ` + + `${$labels.shape[0]} vs. ${$predictions.shape[0]}. ` + + `Labels and predictions should have the same number of elements.`); + util["assert"](numClasses > 0 && Number.isInteger(numClasses), () => `numClasses is required to be a positive integer, but got ` + + `${numClasses}`); + // TODO(cais): In the future, if oneHot supports tensors inputs for + // `numClasses`, `confusionMatrix` can make `numClasses` optional. + const oneHotLabels = oneHot($labels.asType('int32'), numClasses); + const oneHotPredictions = oneHot($predictions.asType('int32'), numClasses); + const oneHotLabelsT = oneHotLabels.transpose(); + return oneHotLabelsT.matMul(oneHotPredictions).asType('int32'); +} +const confusionMatrix = Object(operation["a" /* op */])({ confusionMatrix_ }); +//# sourceMappingURL=confusion_matrix.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/math.js +/** + * @license + * Copyright 2018 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ +/** + * Exports under the tf.math.* namespace. + */ + + +//# sourceMappingURL=math.js.map +// CONCATENATED MODULE: ./node_modules/@tensorflow/tfjs-core/dist/ops/browser.js +/** + * @license + * Copyright 2019 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ + + + + + + + +let fromPixels2DContext; +/** + * Creates a `tf.Tensor` from an image. + * + * ```js + * const image = new ImageData(1, 1); + * image.data[0] = 100; + * image.data[1] = 150; + * image.data[2] = 200; + * image.data[3] = 255; + * + * tf.browser.fromPixels(image).print(); + * ``` + * + * @param pixels The input image to construct the tensor from. The + * supported image types are all 4-channel. You can also pass in an image + * object with following attributes: + * `{data: Uint8Array; width: number; height: number}` + * @param numChannels The number of channels of the output tensor. A + * numChannels value less than 4 allows you to ignore channels. Defaults to + * 3 (ignores alpha channel of input image). + */ +/** @doc {heading: 'Browser', namespace: 'browser', ignoreCI: true} */ +function fromPixels_(pixels, numChannels = 3) { + // Sanity checks. + if (numChannels > 4) { + throw new Error('Cannot construct Tensor with more than 4 channels from pixels.'); + } + if (pixels == null) { + throw new Error('pixels passed to tf.browser.fromPixels() can not be null'); + } + let isPixelData = false; + let isImageData = false; + let isVideo = false; + let isImage = false; + let isCanvasLike = false; + if (pixels.data instanceof Uint8Array) { + isPixelData = true; + } + else if (typeof (ImageData) !== 'undefined' && pixels instanceof ImageData) { + isImageData = true; + } + else if (typeof (HTMLVideoElement) !== 'undefined' && + pixels instanceof HTMLVideoElement) { + isVideo = true; + } + else if (typeof (HTMLImageElement) !== 'undefined' && + pixels instanceof HTMLImageElement) { + isImage = true; + // tslint:disable-next-line: no-any + } + else if (pixels.getContext != null) { + isCanvasLike = true; + } + else { + throw new Error('pixels passed to tf.browser.fromPixels() must be either an ' + + `HTMLVideoElement, HTMLImageElement, HTMLCanvasElement, ImageData ` + + `in browser, or OffscreenCanvas, ImageData in webworker` + + ` or {data: Uint32Array, width: number, height: number}, ` + + `but was ${pixels.constructor.name}`); + } + if (isVideo) { + const HAVE_CURRENT_DATA_READY_STATE = 2; + if (isVideo && + pixels.readyState < + HAVE_CURRENT_DATA_READY_STATE) { + throw new Error('The video element has not loaded data yet. Please wait for ' + + '`loadeddata` event on the