g(x,c))a[d]=x,a[n]=c,d=n;else break a}}return b}\nfunction g(a,b){var c=a.sortIndex-b.sortIndex;return 0!==c?c:a.id-b.id}if(\"object\"===typeof performance&&\"function\"===typeof performance.now){var l=performance;exports.unstable_now=function(){return l.now()}}else{var p=Date,q=p.now();exports.unstable_now=function(){return p.now()-q}}var r=[],t=[],u=1,v=null,y=3,z=!1,A=!1,B=!1,D=\"function\"===typeof setTimeout?setTimeout:null,E=\"function\"===typeof clearTimeout?clearTimeout:null,F=\"undefined\"!==typeof setImmediate?setImmediate:null;\n\"undefined\"!==typeof navigator&&void 0!==navigator.scheduling&&void 0!==navigator.scheduling.isInputPending&&navigator.scheduling.isInputPending.bind(navigator.scheduling);function G(a){for(var b=h(t);null!==b;){if(null===b.callback)k(t);else if(b.startTime<=a)k(t),b.sortIndex=b.expirationTime,f(r,b);else break;b=h(t)}}function H(a){B=!1;G(a);if(!A)if(null!==h(r))A=!0,I(J);else{var b=h(t);null!==b&&K(H,b.startTime-a)}}\nfunction J(a,b){A=!1;B&&(B=!1,E(L),L=-1);z=!0;var c=y;try{G(b);for(v=h(r);null!==v&&(!(v.expirationTime>b)||a&&!M());){var d=v.callback;if(\"function\"===typeof d){v.callback=null;y=v.priorityLevel;var e=d(v.expirationTime<=b);b=exports.unstable_now();\"function\"===typeof e?v.callback=e:v===h(r)&&k(r);G(b)}else k(r);v=h(r)}if(null!==v)var w=!0;else{var m=h(t);null!==m&&K(H,m.startTime-b);w=!1}return w}finally{v=null,y=c,z=!1}}var N=!1,O=null,L=-1,P=5,Q=-1;\nfunction M(){return exports.unstable_now()-Qa||125d?(a.sortIndex=c,f(t,a),null===h(r)&&a===h(t)&&(B?(E(L),L=-1):B=!0,K(H,c-d))):(a.sortIndex=e,f(r,a),A||z||(A=!0,I(J)));return a};\nexports.unstable_shouldYield=M;exports.unstable_wrapCallback=function(a){var b=y;return function(){var c=y;y=b;try{return a.apply(this,arguments)}finally{y=c}}};\n","'use strict';\n\nif (process.env.NODE_ENV === 'production') {\n module.exports = require('./cjs/scheduler.production.min.js');\n} else {\n module.exports = require('./cjs/scheduler.development.js');\n}\n","//\n\nmodule.exports = function shallowEqual(objA, objB, compare, compareContext) {\n var ret = compare ? compare.call(compareContext, objA, objB) : void 0;\n\n if (ret !== void 0) {\n return !!ret;\n }\n\n if (objA === objB) {\n return true;\n }\n\n if (typeof objA !== \"object\" || !objA || typeof objB !== \"object\" || !objB) {\n return false;\n }\n\n var keysA = Object.keys(objA);\n var keysB = Object.keys(objB);\n\n if (keysA.length !== keysB.length) {\n return false;\n }\n\n var bHasOwnProperty = Object.prototype.hasOwnProperty.bind(objB);\n\n // Test for A's keys different from B.\n for (var idx = 0; idx < keysA.length; idx++) {\n var key = keysA[idx];\n\n if (!bHasOwnProperty(key)) {\n return false;\n }\n\n var valueA = objA[key];\n var valueB = objB[key];\n\n ret = compare ? compare.call(compareContext, valueA, valueB, key) : void 0;\n\n if (ret === false || (ret === void 0 && valueA !== valueB)) {\n return false;\n }\n }\n\n return true;\n};\n","var parse = require('inline-style-parser');\n\n/**\n * Parses inline style to object.\n *\n * @example\n * // returns { 'line-height': '42' }\n * StyleToObject('line-height: 42;');\n *\n * @param {String} style - The inline style.\n * @param {Function} [iterator] - The iterator function.\n * @return {null|Object}\n */\nfunction StyleToObject(style, iterator) {\n var output = null;\n if (!style || typeof style !== 'string') {\n return output;\n }\n\n var declaration;\n var declarations = parse(style);\n var hasIterator = typeof iterator === 'function';\n var property;\n var value;\n\n for (var i = 0, len = declarations.length; i < len; i++) {\n declaration = declarations[i];\n property = declaration.property;\n value = declaration.value;\n\n if (hasIterator) {\n iterator(property, value, declaration);\n } else if (value) {\n output || (output = {});\n output[property] = value;\n }\n }\n\n return output;\n}\n\nmodule.exports = StyleToObject;\nmodule.exports.default = StyleToObject; // ESM support\n","// The module cache\nvar __webpack_module_cache__ = {};\n\n// The require function\nfunction __webpack_require__(moduleId) {\n\t// Check if module is in cache\n\tvar cachedModule = __webpack_module_cache__[moduleId];\n\tif (cachedModule !== undefined) {\n\t\treturn cachedModule.exports;\n\t}\n\t// Create a new module (and put it into the cache)\n\tvar module = __webpack_module_cache__[moduleId] = {\n\t\t// no module.id needed\n\t\t// no module.loaded needed\n\t\texports: {}\n\t};\n\n\t// Execute the module function\n\t__webpack_modules__[moduleId](module, module.exports, __webpack_require__);\n\n\t// Return the exports of the module\n\treturn module.exports;\n}\n\n","// getDefaultExport function for compatibility with non-harmony modules\n__webpack_require__.n = (module) => {\n\tvar getter = module && module.__esModule ?\n\t\t() => (module['default']) :\n\t\t() => (module);\n\t__webpack_require__.d(getter, { a: getter });\n\treturn getter;\n};","var getProto = Object.getPrototypeOf ? (obj) => (Object.getPrototypeOf(obj)) : (obj) => (obj.__proto__);\nvar leafPrototypes;\n// create a fake namespace object\n// mode & 1: value is a module id, require it\n// mode & 2: merge all properties of value into the ns\n// mode & 4: return value when already ns object\n// mode & 16: return value when it's Promise-like\n// mode & 8|1: behave like require\n__webpack_require__.t = function(value, mode) {\n\tif(mode & 1) value = this(value);\n\tif(mode & 8) return value;\n\tif(typeof value === 'object' && value) {\n\t\tif((mode & 4) && value.__esModule) return value;\n\t\tif((mode & 16) && typeof value.then === 'function') return value;\n\t}\n\tvar ns = Object.create(null);\n\t__webpack_require__.r(ns);\n\tvar def = {};\n\tleafPrototypes = leafPrototypes || [null, getProto({}), getProto([]), getProto(getProto)];\n\tfor(var current = mode & 2 && value; typeof current == 'object' && !~leafPrototypes.indexOf(current); current = getProto(current)) {\n\t\tObject.getOwnPropertyNames(current).forEach((key) => (def[key] = () => (value[key])));\n\t}\n\tdef['default'] = () => (value);\n\t__webpack_require__.d(ns, def);\n\treturn ns;\n};","// define getter functions for harmony exports\n__webpack_require__.d = (exports, definition) => {\n\tfor(var key in definition) {\n\t\tif(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {\n\t\t\tObject.defineProperty(exports, key, { enumerable: true, get: definition[key] });\n\t\t}\n\t}\n};","__webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))","// define __esModule on exports\n__webpack_require__.r = (exports) => {\n\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n\t}\n\tObject.defineProperty(exports, '__esModule', { value: true });\n};","__webpack_require__.nc = undefined;","export default function _typeof(o) {\n \"@babel/helpers - typeof\";\n\n return _typeof = \"function\" == typeof Symbol && \"symbol\" == typeof Symbol.iterator ? function (o) {\n return typeof o;\n } : function (o) {\n return o && \"function\" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? \"symbol\" : typeof o;\n }, _typeof(o);\n}","import _typeof from \"./typeof.js\";\nimport toPrimitive from \"./toPrimitive.js\";\nexport default function toPropertyKey(t) {\n var i = toPrimitive(t, \"string\");\n return \"symbol\" == _typeof(i) ? i : String(i);\n}","import _typeof from \"./typeof.js\";\nexport default function toPrimitive(t, r) {\n if (\"object\" != _typeof(t) || !t) return t;\n var e = t[Symbol.toPrimitive];\n if (void 0 !== e) {\n var i = e.call(t, r || \"default\");\n if (\"object\" != _typeof(i)) return i;\n throw new TypeError(\"@@toPrimitive must return a primitive value.\");\n }\n return (\"string\" === r ? String : Number)(t);\n}","import toPropertyKey from \"./toPropertyKey.js\";\nexport default function _defineProperty(obj, key, value) {\n key = toPropertyKey(key);\n if (key in obj) {\n Object.defineProperty(obj, key, {\n value: value,\n enumerable: true,\n configurable: true,\n writable: true\n });\n } else {\n obj[key] = value;\n }\n return obj;\n}","(function(root, factory) {\n if (typeof module === 'object' && module.exports) {\n module.exports = factory();\n } else {\n root.nearley = factory();\n }\n}(this, function() {\n\n function Rule(name, symbols, postprocess) {\n this.id = ++Rule.highestId;\n this.name = name;\n this.symbols = symbols; // a list of literal | regex class | nonterminal\n this.postprocess = postprocess;\n return this;\n }\n Rule.highestId = 0;\n\n Rule.prototype.toString = function(withCursorAt) {\n var symbolSequence = (typeof withCursorAt === \"undefined\")\n ? this.symbols.map(getSymbolShortDisplay).join(' ')\n : ( this.symbols.slice(0, withCursorAt).map(getSymbolShortDisplay).join(' ')\n + \" ● \"\n + this.symbols.slice(withCursorAt).map(getSymbolShortDisplay).join(' ') );\n return this.name + \" → \" + symbolSequence;\n }\n\n\n // a State is a rule at a position from a given starting point in the input stream (reference)\n function State(rule, dot, reference, wantedBy) {\n this.rule = rule;\n this.dot = dot;\n this.reference = reference;\n this.data = [];\n this.wantedBy = wantedBy;\n this.isComplete = this.dot === rule.symbols.length;\n }\n\n State.prototype.toString = function() {\n return \"{\" + this.rule.toString(this.dot) + \"}, from: \" + (this.reference || 0);\n };\n\n State.prototype.nextState = function(child) {\n var state = new State(this.rule, this.dot + 1, this.reference, this.wantedBy);\n state.left = this;\n state.right = child;\n if (state.isComplete) {\n state.data = state.build();\n // Having right set here will prevent the right state and its children\n // form being garbage collected\n state.right = undefined;\n }\n return state;\n };\n\n State.prototype.build = function() {\n var children = [];\n var node = this;\n do {\n children.push(node.right.data);\n node = node.left;\n } while (node.left);\n children.reverse();\n return children;\n };\n\n State.prototype.finish = function() {\n if (this.rule.postprocess) {\n this.data = this.rule.postprocess(this.data, this.reference, Parser.fail);\n }\n };\n\n\n function Column(grammar, index) {\n this.grammar = grammar;\n this.index = index;\n this.states = [];\n this.wants = {}; // states indexed by the non-terminal they expect\n this.scannable = []; // list of states that expect a token\n this.completed = {}; // states that are nullable\n }\n\n\n Column.prototype.process = function(nextColumn) {\n var states = this.states;\n var wants = this.wants;\n var completed = this.completed;\n\n for (var w = 0; w < states.length; w++) { // nb. we push() during iteration\n var state = states[w];\n\n if (state.isComplete) {\n state.finish();\n if (state.data !== Parser.fail) {\n // complete\n var wantedBy = state.wantedBy;\n for (var i = wantedBy.length; i--; ) { // this line is hot\n var left = wantedBy[i];\n this.complete(left, state);\n }\n\n // special-case nullables\n if (state.reference === this.index) {\n // make sure future predictors of this rule get completed.\n var exp = state.rule.name;\n (this.completed[exp] = this.completed[exp] || []).push(state);\n }\n }\n\n } else {\n // queue scannable states\n var exp = state.rule.symbols[state.dot];\n if (typeof exp !== 'string') {\n this.scannable.push(state);\n continue;\n }\n\n // predict\n if (wants[exp]) {\n wants[exp].push(state);\n\n if (completed.hasOwnProperty(exp)) {\n var nulls = completed[exp];\n for (var i = 0; i < nulls.length; i++) {\n var right = nulls[i];\n this.complete(state, right);\n }\n }\n } else {\n wants[exp] = [state];\n this.predict(exp);\n }\n }\n }\n }\n\n Column.prototype.predict = function(exp) {\n var rules = this.grammar.byName[exp] || [];\n\n for (var i = 0; i < rules.length; i++) {\n var r = rules[i];\n var wantedBy = this.wants[exp];\n var s = new State(r, 0, this.index, wantedBy);\n this.states.push(s);\n }\n }\n\n Column.prototype.complete = function(left, right) {\n var copy = left.nextState(right);\n this.states.push(copy);\n }\n\n\n function Grammar(rules, start) {\n this.rules = rules;\n this.start = start || this.rules[0].name;\n var byName = this.byName = {};\n this.rules.forEach(function(rule) {\n if (!byName.hasOwnProperty(rule.name)) {\n byName[rule.name] = [];\n }\n byName[rule.name].push(rule);\n });\n }\n\n // So we can allow passing (rules, start) directly to Parser for backwards compatibility\n Grammar.fromCompiled = function(rules, start) {\n var lexer = rules.Lexer;\n if (rules.ParserStart) {\n start = rules.ParserStart;\n rules = rules.ParserRules;\n }\n var rules = rules.map(function (r) { return (new Rule(r.name, r.symbols, r.postprocess)); });\n var g = new Grammar(rules, start);\n g.lexer = lexer; // nb. storing lexer on Grammar is iffy, but unavoidable\n return g;\n }\n\n\n function StreamLexer() {\n this.reset(\"\");\n }\n\n StreamLexer.prototype.reset = function(data, state) {\n this.buffer = data;\n this.index = 0;\n this.line = state ? state.line : 1;\n this.lastLineBreak = state ? -state.col : 0;\n }\n\n StreamLexer.prototype.next = function() {\n if (this.index < this.buffer.length) {\n var ch = this.buffer[this.index++];\n if (ch === '\\n') {\n this.line += 1;\n this.lastLineBreak = this.index;\n }\n return {value: ch};\n }\n }\n\n StreamLexer.prototype.save = function() {\n return {\n line: this.line,\n col: this.index - this.lastLineBreak,\n }\n }\n\n StreamLexer.prototype.formatError = function(token, message) {\n // nb. this gets called after consuming the offending token,\n // so the culprit is index-1\n var buffer = this.buffer;\n if (typeof buffer === 'string') {\n var lines = buffer\n .split(\"\\n\")\n .slice(\n Math.max(0, this.line - 5), \n this.line\n );\n\n var nextLineBreak = buffer.indexOf('\\n', this.index);\n if (nextLineBreak === -1) nextLineBreak = buffer.length;\n var col = this.index - this.lastLineBreak;\n var lastLineDigits = String(this.line).length;\n message += \" at line \" + this.line + \" col \" + col + \":\\n\\n\";\n message += lines\n .map(function(line, i) {\n return pad(this.line - lines.length + i + 1, lastLineDigits) + \" \" + line;\n }, this)\n .join(\"\\n\");\n message += \"\\n\" + pad(\"\", lastLineDigits + col) + \"^\\n\";\n return message;\n } else {\n return message + \" at index \" + (this.index - 1);\n }\n\n function pad(n, length) {\n var s = String(n);\n return Array(length - s.length + 1).join(\" \") + s;\n }\n }\n\n function Parser(rules, start, options) {\n if (rules instanceof Grammar) {\n var grammar = rules;\n var options = start;\n } else {\n var grammar = Grammar.fromCompiled(rules, start);\n }\n this.grammar = grammar;\n\n // Read options\n this.options = {\n keepHistory: false,\n lexer: grammar.lexer || new StreamLexer,\n };\n for (var key in (options || {})) {\n this.options[key] = options[key];\n }\n\n // Setup lexer\n this.lexer = this.options.lexer;\n this.lexerState = undefined;\n\n // Setup a table\n var column = new Column(grammar, 0);\n var table = this.table = [column];\n\n // I could be expecting anything.\n column.wants[grammar.start] = [];\n column.predict(grammar.start);\n // TODO what if start rule is nullable?\n column.process();\n this.current = 0; // token index\n }\n\n // create a reserved token for indicating a parse fail\n Parser.fail = {};\n\n Parser.prototype.feed = function(chunk) {\n var lexer = this.lexer;\n lexer.reset(chunk, this.lexerState);\n\n var token;\n while (true) {\n try {\n token = lexer.next();\n if (!token) {\n break;\n }\n } catch (e) {\n // Create the next column so that the error reporter\n // can display the correctly predicted states.\n var nextColumn = new Column(this.grammar, this.current + 1);\n this.table.push(nextColumn);\n var err = new Error(this.reportLexerError(e));\n err.offset = this.current;\n err.token = e.token;\n throw err;\n }\n // We add new states to table[current+1]\n var column = this.table[this.current];\n\n // GC unused states\n if (!this.options.keepHistory) {\n delete this.table[this.current - 1];\n }\n\n var n = this.current + 1;\n var nextColumn = new Column(this.grammar, n);\n this.table.push(nextColumn);\n\n // Advance all tokens that expect the symbol\n var literal = token.text !== undefined ? token.text : token.value;\n var value = lexer.constructor === StreamLexer ? token.value : token;\n var scannable = column.scannable;\n for (var w = scannable.length; w--; ) {\n var state = scannable[w];\n var expect = state.rule.symbols[state.dot];\n // Try to consume the token\n // either regex or literal\n if (expect.test ? expect.test(value) :\n expect.type ? expect.type === token.type\n : expect.literal === literal) {\n // Add it\n var next = state.nextState({data: value, token: token, isToken: true, reference: n - 1});\n nextColumn.states.push(next);\n }\n }\n\n // Next, for each of the rules, we either\n // (a) complete it, and try to see if the reference row expected that\n // rule\n // (b) predict the next nonterminal it expects by adding that\n // nonterminal's start state\n // To prevent duplication, we also keep track of rules we have already\n // added\n\n nextColumn.process();\n\n // If needed, throw an error:\n if (nextColumn.states.length === 0) {\n // No states at all! This is not good.\n var err = new Error(this.reportError(token));\n err.offset = this.current;\n err.token = token;\n throw err;\n }\n\n // maybe save lexer state\n if (this.options.keepHistory) {\n column.lexerState = lexer.save()\n }\n\n this.current++;\n }\n if (column) {\n this.lexerState = lexer.save()\n }\n\n // Incrementally keep track of results\n this.results = this.finish();\n\n // Allow chaining, for whatever it's worth\n return this;\n };\n\n Parser.prototype.reportLexerError = function(lexerError) {\n var tokenDisplay, lexerMessage;\n // Planning to add a token property to moo's thrown error\n // even on erroring tokens to be used in error display below\n var token = lexerError.token;\n if (token) {\n tokenDisplay = \"input \" + JSON.stringify(token.text[0]) + \" (lexer error)\";\n lexerMessage = this.lexer.formatError(token, \"Syntax error\");\n } else {\n tokenDisplay = \"input (lexer error)\";\n lexerMessage = lexerError.message;\n }\n return this.reportErrorCommon(lexerMessage, tokenDisplay);\n };\n\n Parser.prototype.reportError = function(token) {\n var tokenDisplay = (token.type ? token.type + \" token: \" : \"\") + JSON.stringify(token.value !== undefined ? token.value : token);\n var lexerMessage = this.lexer.formatError(token, \"Syntax error\");\n return this.reportErrorCommon(lexerMessage, tokenDisplay);\n };\n\n Parser.prototype.reportErrorCommon = function(lexerMessage, tokenDisplay) {\n var lines = [];\n lines.push(lexerMessage);\n var lastColumnIndex = this.table.length - 2;\n var lastColumn = this.table[lastColumnIndex];\n var expectantStates = lastColumn.states\n .filter(function(state) {\n var nextSymbol = state.rule.symbols[state.dot];\n return nextSymbol && typeof nextSymbol !== \"string\";\n });\n\n if (expectantStates.length === 0) {\n lines.push('Unexpected ' + tokenDisplay + '. I did not expect any more input. Here is the state of my parse table:\\n');\n this.displayStateStack(lastColumn.states, lines);\n } else {\n lines.push('Unexpected ' + tokenDisplay + '. Instead, I was expecting to see one of the following:\\n');\n // Display a \"state stack\" for each expectant state\n // - which shows you how this state came to be, step by step.\n // If there is more than one derivation, we only display the first one.\n var stateStacks = expectantStates\n .map(function(state) {\n return this.buildFirstStateStack(state, []) || [state];\n }, this);\n // Display each state that is expecting a terminal symbol next.\n stateStacks.forEach(function(stateStack) {\n var state = stateStack[0];\n var nextSymbol = state.rule.symbols[state.dot];\n var symbolDisplay = this.getSymbolDisplay(nextSymbol);\n lines.push('A ' + symbolDisplay + ' based on:');\n this.displayStateStack(stateStack, lines);\n }, this);\n }\n lines.push(\"\");\n return lines.join(\"\\n\");\n }\n \n Parser.prototype.displayStateStack = function(stateStack, lines) {\n var lastDisplay;\n var sameDisplayCount = 0;\n for (var j = 0; j < stateStack.length; j++) {\n var state = stateStack[j];\n var display = state.rule.toString(state.dot);\n if (display === lastDisplay) {\n sameDisplayCount++;\n } else {\n if (sameDisplayCount > 0) {\n lines.push(' ^ ' + sameDisplayCount + ' more lines identical to this');\n }\n sameDisplayCount = 0;\n lines.push(' ' + display);\n }\n lastDisplay = display;\n }\n };\n\n Parser.prototype.getSymbolDisplay = function(symbol) {\n return getSymbolLongDisplay(symbol);\n };\n\n /*\n Builds a the first state stack. You can think of a state stack as the call stack\n of the recursive-descent parser which the Nearley parse algorithm simulates.\n A state stack is represented as an array of state objects. Within a\n state stack, the first item of the array will be the starting\n state, with each successive item in the array going further back into history.\n\n This function needs to be given a starting state and an empty array representing\n the visited states, and it returns an single state stack.\n\n */\n Parser.prototype.buildFirstStateStack = function(state, visited) {\n if (visited.indexOf(state) !== -1) {\n // Found cycle, return null\n // to eliminate this path from the results, because\n // we don't know how to display it meaningfully\n return null;\n }\n if (state.wantedBy.length === 0) {\n return [state];\n }\n var prevState = state.wantedBy[0];\n var childVisited = [state].concat(visited);\n var childResult = this.buildFirstStateStack(prevState, childVisited);\n if (childResult === null) {\n return null;\n }\n return [state].concat(childResult);\n };\n\n Parser.prototype.save = function() {\n var column = this.table[this.current];\n column.lexerState = this.lexerState;\n return column;\n };\n\n Parser.prototype.restore = function(column) {\n var index = column.index;\n this.current = index;\n this.table[index] = column;\n this.table.splice(index + 1);\n this.lexerState = column.lexerState;\n\n // Incrementally keep track of results\n this.results = this.finish();\n };\n\n // nb. deprecated: use save/restore instead!\n Parser.prototype.rewind = function(index) {\n if (!this.options.keepHistory) {\n throw new Error('set option `keepHistory` to enable rewinding')\n }\n // nb. recall column (table) indicies fall between token indicies.\n // col 0 -- token 0 -- col 1\n this.restore(this.table[index]);\n };\n\n Parser.prototype.finish = function() {\n // Return the possible parsings\n var considerations = [];\n var start = this.grammar.start;\n var column = this.table[this.table.length - 1]\n column.states.forEach(function (t) {\n if (t.rule.name === start\n && t.dot === t.rule.symbols.length\n && t.reference === 0\n && t.data !== Parser.fail) {\n considerations.push(t);\n }\n });\n return considerations.map(function(c) {return c.data; });\n };\n\n function getSymbolLongDisplay(symbol) {\n var type = typeof symbol;\n if (type === \"string\") {\n return symbol;\n } else if (type === \"object\") {\n if (symbol.literal) {\n return JSON.stringify(symbol.literal);\n } else if (symbol instanceof RegExp) {\n return 'character matching ' + symbol;\n } else if (symbol.type) {\n return symbol.type + ' token';\n } else if (symbol.test) {\n return 'token matching ' + String(symbol.test);\n } else {\n throw new Error('Unknown symbol type: ' + symbol);\n }\n }\n }\n\n function getSymbolShortDisplay(symbol) {\n var type = typeof symbol;\n if (type === \"string\") {\n return symbol;\n } else if (type === \"object\") {\n if (symbol.literal) {\n return JSON.stringify(symbol.literal);\n } else if (symbol instanceof RegExp) {\n return symbol.toString();\n } else if (symbol.type) {\n return '%' + symbol.type;\n } else if (symbol.test) {\n return '<' + String(symbol.test) + '>';\n } else {\n throw new Error('Unknown symbol type: ' + symbol);\n }\n }\n }\n\n return {\n Parser: Parser,\n Grammar: Grammar,\n Rule: Rule,\n };\n\n}));\n","(function(root, factory) {\n if (typeof define === 'function' && define.amd) {\n define([], factory) /* global define */\n } else if (typeof module === 'object' && module.exports) {\n module.exports = factory()\n } else {\n root.moo = factory()\n }\n}(this, function() {\n 'use strict';\n\n var hasOwnProperty = Object.prototype.hasOwnProperty\n var toString = Object.prototype.toString\n var hasSticky = typeof new RegExp().sticky === 'boolean'\n\n /***************************************************************************/\n\n function isRegExp(o) { return o && toString.call(o) === '[object RegExp]' }\n function isObject(o) { return o && typeof o === 'object' && !isRegExp(o) && !Array.isArray(o) }\n\n function reEscape(s) {\n return s.replace(/[-\\/\\\\^$*+?.()|[\\]{}]/g, '\\\\$&')\n }\n function reGroups(s) {\n var re = new RegExp('|' + s)\n return re.exec('').length - 1\n }\n function reCapture(s) {\n return '(' + s + ')'\n }\n function reUnion(regexps) {\n if (!regexps.length) return '(?!)'\n var source = regexps.map(function(s) {\n return \"(?:\" + s + \")\"\n }).join('|')\n return \"(?:\" + source + \")\"\n }\n\n function regexpOrLiteral(obj) {\n if (typeof obj === 'string') {\n return '(?:' + reEscape(obj) + ')'\n\n } else if (isRegExp(obj)) {\n // TODO: consider /u support\n if (obj.ignoreCase) throw new Error('RegExp /i flag not allowed')\n if (obj.global) throw new Error('RegExp /g flag is implied')\n if (obj.sticky) throw new Error('RegExp /y flag is implied')\n if (obj.multiline) throw new Error('RegExp /m flag is implied')\n return obj.source\n\n } else {\n throw new Error('Not a pattern: ' + obj)\n }\n }\n\n function pad(s, length) {\n if (s.length > length) {\n return s\n }\n return Array(length - s.length + 1).join(\" \") + s\n }\n\n function lastNLines(string, numLines) {\n var position = string.length\n var lineBreaks = 0;\n while (true) {\n var idx = string.lastIndexOf(\"\\n\", position - 1)\n if (idx === -1) {\n break;\n } else {\n lineBreaks++\n }\n position = idx\n if (lineBreaks === numLines) {\n break;\n }\n if (position === 0) {\n break;\n }\n }\n var startPosition = \n lineBreaks < numLines ?\n 0 : \n position + 1\n return string.substring(startPosition).split(\"\\n\")\n }\n\n function objectToRules(object) {\n var keys = Object.getOwnPropertyNames(object)\n var result = []\n for (var i = 0; i < keys.length; i++) {\n var key = keys[i]\n var thing = object[key]\n var rules = [].concat(thing)\n if (key === 'include') {\n for (var j = 0; j < rules.length; j++) {\n result.push({include: rules[j]})\n }\n continue\n }\n var match = []\n rules.forEach(function(rule) {\n if (isObject(rule)) {\n if (match.length) result.push(ruleOptions(key, match))\n result.push(ruleOptions(key, rule))\n match = []\n } else {\n match.push(rule)\n }\n })\n if (match.length) result.push(ruleOptions(key, match))\n }\n return result\n }\n\n function arrayToRules(array) {\n var result = []\n for (var i = 0; i < array.length; i++) {\n var obj = array[i]\n if (obj.include) {\n var include = [].concat(obj.include)\n for (var j = 0; j < include.length; j++) {\n result.push({include: include[j]})\n }\n continue\n }\n if (!obj.type) {\n throw new Error('Rule has no type: ' + JSON.stringify(obj))\n }\n result.push(ruleOptions(obj.type, obj))\n }\n return result\n }\n\n function ruleOptions(type, obj) {\n if (!isObject(obj)) {\n obj = { match: obj }\n }\n if (obj.include) {\n throw new Error('Matching rules cannot also include states')\n }\n\n // nb. error and fallback imply lineBreaks\n var options = {\n defaultType: type,\n lineBreaks: !!obj.error || !!obj.fallback,\n pop: false,\n next: null,\n push: null,\n error: false,\n fallback: false,\n value: null,\n type: null,\n shouldThrow: false,\n }\n\n // Avoid Object.assign(), so we support IE9+\n for (var key in obj) {\n if (hasOwnProperty.call(obj, key)) {\n options[key] = obj[key]\n }\n }\n\n // type transform cannot be a string\n if (typeof options.type === 'string' && type !== options.type) {\n throw new Error(\"Type transform cannot be a string (type '\" + options.type + \"' for token '\" + type + \"')\")\n }\n\n // convert to array\n var match = options.match\n options.match = Array.isArray(match) ? match : match ? [match] : []\n options.match.sort(function(a, b) {\n return isRegExp(a) && isRegExp(b) ? 0\n : isRegExp(b) ? -1 : isRegExp(a) ? +1 : b.length - a.length\n })\n return options\n }\n\n function toRules(spec) {\n return Array.isArray(spec) ? arrayToRules(spec) : objectToRules(spec)\n }\n\n var defaultErrorRule = ruleOptions('error', {lineBreaks: true, shouldThrow: true})\n function compileRules(rules, hasStates) {\n var errorRule = null\n var fast = Object.create(null)\n var fastAllowed = true\n var unicodeFlag = null\n var groups = []\n var parts = []\n\n // If there is a fallback rule, then disable fast matching\n for (var i = 0; i < rules.length; i++) {\n if (rules[i].fallback) {\n fastAllowed = false\n }\n }\n\n for (var i = 0; i < rules.length; i++) {\n var options = rules[i]\n\n if (options.include) {\n // all valid inclusions are removed by states() preprocessor\n throw new Error('Inheritance is not allowed in stateless lexers')\n }\n\n if (options.error || options.fallback) {\n // errorRule can only be set once\n if (errorRule) {\n if (!options.fallback === !errorRule.fallback) {\n throw new Error(\"Multiple \" + (options.fallback ? \"fallback\" : \"error\") + \" rules not allowed (for token '\" + options.defaultType + \"')\")\n } else {\n throw new Error(\"fallback and error are mutually exclusive (for token '\" + options.defaultType + \"')\")\n }\n }\n errorRule = options\n }\n\n var match = options.match.slice()\n if (fastAllowed) {\n while (match.length && typeof match[0] === 'string' && match[0].length === 1) {\n var word = match.shift()\n fast[word.charCodeAt(0)] = options\n }\n }\n\n // Warn about inappropriate state-switching options\n if (options.pop || options.push || options.next) {\n if (!hasStates) {\n throw new Error(\"State-switching options are not allowed in stateless lexers (for token '\" + options.defaultType + \"')\")\n }\n if (options.fallback) {\n throw new Error(\"State-switching options are not allowed on fallback tokens (for token '\" + options.defaultType + \"')\")\n }\n }\n\n // Only rules with a .match are included in the RegExp\n if (match.length === 0) {\n continue\n }\n fastAllowed = false\n\n groups.push(options)\n\n // Check unicode flag is used everywhere or nowhere\n for (var j = 0; j < match.length; j++) {\n var obj = match[j]\n if (!isRegExp(obj)) {\n continue\n }\n\n if (unicodeFlag === null) {\n unicodeFlag = obj.unicode\n } else if (unicodeFlag !== obj.unicode && options.fallback === false) {\n throw new Error('If one rule is /u then all must be')\n }\n }\n\n // convert to RegExp\n var pat = reUnion(match.map(regexpOrLiteral))\n\n // validate\n var regexp = new RegExp(pat)\n if (regexp.test(\"\")) {\n throw new Error(\"RegExp matches empty string: \" + regexp)\n }\n var groupCount = reGroups(pat)\n if (groupCount > 0) {\n throw new Error(\"RegExp has capture groups: \" + regexp + \"\\nUse (?: … ) instead\")\n }\n\n // try and detect rules matching newlines\n if (!options.lineBreaks && regexp.test('\\n')) {\n throw new Error('Rule should declare lineBreaks: ' + regexp)\n }\n\n // store regex\n parts.push(reCapture(pat))\n }\n\n\n // If there's no fallback rule, use the sticky flag so we only look for\n // matches at the current index.\n //\n // If we don't support the sticky flag, then fake it using an irrefutable\n // match (i.e. an empty pattern).\n var fallbackRule = errorRule && errorRule.fallback\n var flags = hasSticky && !fallbackRule ? 'ym' : 'gm'\n var suffix = hasSticky || fallbackRule ? '' : '|'\n\n if (unicodeFlag === true) flags += \"u\"\n var combined = new RegExp(reUnion(parts) + suffix, flags)\n return {regexp: combined, groups: groups, fast: fast, error: errorRule || defaultErrorRule}\n }\n\n function compile(rules) {\n var result = compileRules(toRules(rules))\n return new Lexer({start: result}, 'start')\n }\n\n function checkStateGroup(g, name, map) {\n var state = g && (g.push || g.next)\n if (state && !map[state]) {\n throw new Error(\"Missing state '\" + state + \"' (in token '\" + g.defaultType + \"' of state '\" + name + \"')\")\n }\n if (g && g.pop && +g.pop !== 1) {\n throw new Error(\"pop must be 1 (in token '\" + g.defaultType + \"' of state '\" + name + \"')\")\n }\n }\n function compileStates(states, start) {\n var all = states.$all ? toRules(states.$all) : []\n delete states.$all\n\n var keys = Object.getOwnPropertyNames(states)\n if (!start) start = keys[0]\n\n var ruleMap = Object.create(null)\n for (var i = 0; i < keys.length; i++) {\n var key = keys[i]\n ruleMap[key] = toRules(states[key]).concat(all)\n }\n for (var i = 0; i < keys.length; i++) {\n var key = keys[i]\n var rules = ruleMap[key]\n var included = Object.create(null)\n for (var j = 0; j < rules.length; j++) {\n var rule = rules[j]\n if (!rule.include) continue\n var splice = [j, 1]\n if (rule.include !== key && !included[rule.include]) {\n included[rule.include] = true\n var newRules = ruleMap[rule.include]\n if (!newRules) {\n throw new Error(\"Cannot include nonexistent state '\" + rule.include + \"' (in state '\" + key + \"')\")\n }\n for (var k = 0; k < newRules.length; k++) {\n var newRule = newRules[k]\n if (rules.indexOf(newRule) !== -1) continue\n splice.push(newRule)\n }\n }\n rules.splice.apply(rules, splice)\n j--\n }\n }\n\n var map = Object.create(null)\n for (var i = 0; i < keys.length; i++) {\n var key = keys[i]\n map[key] = compileRules(ruleMap[key], true)\n }\n\n for (var i = 0; i < keys.length; i++) {\n var name = keys[i]\n var state = map[name]\n var groups = state.groups\n for (var j = 0; j < groups.length; j++) {\n checkStateGroup(groups[j], name, map)\n }\n var fastKeys = Object.getOwnPropertyNames(state.fast)\n for (var j = 0; j < fastKeys.length; j++) {\n checkStateGroup(state.fast[fastKeys[j]], name, map)\n }\n }\n\n return new Lexer(map, start)\n }\n\n function keywordTransform(map) {\n\n // Use a JavaScript Map to map keywords to their corresponding token type\n // unless Map is unsupported, then fall back to using an Object:\n var isMap = typeof Map !== 'undefined'\n var reverseMap = isMap ? new Map : Object.create(null)\n\n var types = Object.getOwnPropertyNames(map)\n for (var i = 0; i < types.length; i++) {\n var tokenType = types[i]\n var item = map[tokenType]\n var keywordList = Array.isArray(item) ? item : [item]\n keywordList.forEach(function(keyword) {\n if (typeof keyword !== 'string') {\n throw new Error(\"keyword must be string (in keyword '\" + tokenType + \"')\")\n }\n if (isMap) {\n reverseMap.set(keyword, tokenType)\n } else {\n reverseMap[keyword] = tokenType\n }\n })\n }\n return function(k) {\n return isMap ? reverseMap.get(k) : reverseMap[k]\n }\n }\n\n /***************************************************************************/\n\n var Lexer = function(states, state) {\n this.startState = state\n this.states = states\n this.buffer = ''\n this.stack = []\n this.reset()\n }\n\n Lexer.prototype.reset = function(data, info) {\n this.buffer = data || ''\n this.index = 0\n this.line = info ? info.line : 1\n this.col = info ? info.col : 1\n this.queuedToken = info ? info.queuedToken : null\n this.queuedText = info ? info.queuedText: \"\";\n this.queuedThrow = info ? info.queuedThrow : null\n this.setState(info ? info.state : this.startState)\n this.stack = info && info.stack ? info.stack.slice() : []\n return this\n }\n\n Lexer.prototype.save = function() {\n return {\n line: this.line,\n col: this.col,\n state: this.state,\n stack: this.stack.slice(),\n queuedToken: this.queuedToken,\n queuedText: this.queuedText,\n queuedThrow: this.queuedThrow,\n }\n }\n\n Lexer.prototype.setState = function(state) {\n if (!state || this.state === state) return\n this.state = state\n var info = this.states[state]\n this.groups = info.groups\n this.error = info.error\n this.re = info.regexp\n this.fast = info.fast\n }\n\n Lexer.prototype.popState = function() {\n this.setState(this.stack.pop())\n }\n\n Lexer.prototype.pushState = function(state) {\n this.stack.push(this.state)\n this.setState(state)\n }\n\n var eat = hasSticky ? function(re, buffer) { // assume re is /y\n return re.exec(buffer)\n } : function(re, buffer) { // assume re is /g\n var match = re.exec(buffer)\n // will always match, since we used the |(?:) trick\n if (match[0].length === 0) {\n return null\n }\n return match\n }\n\n Lexer.prototype._getGroup = function(match) {\n var groupCount = this.groups.length\n for (var i = 0; i < groupCount; i++) {\n if (match[i + 1] !== undefined) {\n return this.groups[i]\n }\n }\n throw new Error('Cannot find token type for matched text')\n }\n\n function tokenToString() {\n return this.value\n }\n\n Lexer.prototype.next = function() {\n var index = this.index\n\n // If a fallback token matched, we don't need to re-run the RegExp\n if (this.queuedGroup) {\n var token = this._token(this.queuedGroup, this.queuedText, index)\n this.queuedGroup = null\n this.queuedText = \"\"\n return token\n }\n\n var buffer = this.buffer\n if (index === buffer.length) {\n return // EOF\n }\n\n // Fast matching for single characters\n var group = this.fast[buffer.charCodeAt(index)]\n if (group) {\n return this._token(group, buffer.charAt(index), index)\n }\n\n // Execute RegExp\n var re = this.re\n re.lastIndex = index\n var match = eat(re, buffer)\n\n // Error tokens match the remaining buffer\n var error = this.error\n if (match == null) {\n return this._token(error, buffer.slice(index, buffer.length), index)\n }\n\n var group = this._getGroup(match)\n var text = match[0]\n\n if (error.fallback && match.index !== index) {\n this.queuedGroup = group\n this.queuedText = text\n\n // Fallback tokens contain the unmatched portion of the buffer\n return this._token(error, buffer.slice(index, match.index), index)\n }\n\n return this._token(group, text, index)\n }\n\n Lexer.prototype._token = function(group, text, offset) {\n // count line breaks\n var lineBreaks = 0\n if (group.lineBreaks) {\n var matchNL = /\\n/g\n var nl = 1\n if (text === '\\n') {\n lineBreaks = 1\n } else {\n while (matchNL.exec(text)) { lineBreaks++; nl = matchNL.lastIndex }\n }\n }\n\n var token = {\n type: (typeof group.type === 'function' && group.type(text)) || group.defaultType,\n value: typeof group.value === 'function' ? group.value(text) : text,\n text: text,\n toString: tokenToString,\n offset: offset,\n lineBreaks: lineBreaks,\n line: this.line,\n col: this.col,\n }\n // nb. adding more props to token object will make V8 sad!\n\n var size = text.length\n this.index += size\n this.line += lineBreaks\n if (lineBreaks !== 0) {\n this.col = size - nl + 1\n } else {\n this.col += size\n }\n\n // throw, if no rule with {error: true}\n if (group.shouldThrow) {\n var err = new Error(this.formatError(token, \"invalid syntax\"))\n throw err;\n }\n\n if (group.pop) this.popState()\n else if (group.push) this.pushState(group.push)\n else if (group.next) this.setState(group.next)\n\n return token\n }\n\n if (typeof Symbol !== 'undefined' && Symbol.iterator) {\n var LexerIterator = function(lexer) {\n this.lexer = lexer\n }\n\n LexerIterator.prototype.next = function() {\n var token = this.lexer.next()\n return {value: token, done: !token}\n }\n\n LexerIterator.prototype[Symbol.iterator] = function() {\n return this\n }\n\n Lexer.prototype[Symbol.iterator] = function() {\n return new LexerIterator(this)\n }\n }\n\n Lexer.prototype.formatError = function(token, message) {\n if (token == null) {\n // An undefined token indicates EOF\n var text = this.buffer.slice(this.index)\n var token = {\n text: text,\n offset: this.index,\n lineBreaks: text.indexOf('\\n') === -1 ? 0 : 1,\n line: this.line,\n col: this.col,\n }\n }\n \n var numLinesAround = 2\n var firstDisplayedLine = Math.max(token.line - numLinesAround, 1)\n var lastDisplayedLine = token.line + numLinesAround\n var lastLineDigits = String(lastDisplayedLine).length\n var displayedLines = lastNLines(\n this.buffer, \n (this.line - token.line) + numLinesAround + 1\n )\n .slice(0, 5)\n var errorLines = []\n errorLines.push(message + \" at line \" + token.line + \" col \" + token.col + \":\")\n errorLines.push(\"\")\n for (var i = 0; i < displayedLines.length; i++) {\n var line = displayedLines[i]\n var lineNo = firstDisplayedLine + i\n errorLines.push(pad(String(lineNo), lastLineDigits) + \" \" + line);\n if (lineNo === token.line) {\n errorLines.push(pad(\"\", lastLineDigits + token.col + 1) + \"^\")\n }\n }\n return errorLines.join(\"\\n\")\n }\n\n Lexer.prototype.clone = function() {\n return new Lexer(this.states, this.state)\n }\n\n Lexer.prototype.has = function(tokenType) {\n return true\n }\n\n\n return {\n compile: compile,\n states: compileStates,\n error: Object.freeze({error: true}),\n fallback: Object.freeze({fallback: true}),\n keywords: keywordTransform,\n }\n\n}));\n","import { Logger } from '.'\n\n/**\n * Each error name with corresponding type in info value\n */\ninterface PublicodesErrorTypes {\n\tInternalError: {\n\t\tdottedName?: string\n\t}\n\tEngineError: {}\n\tSyntaxError: {\n\t\tdottedName: string\n\t}\n\tEvaluationError: {\n\t\tdottedName: string\n\t}\n\tUnknownRule: {\n\t\tdottedName: string\n\t}\n\tPrivateRule: {\n\t\tdottedName: string\n\t}\n}\n\n/**\n * Return true if `error` is a PublicodesError,\n * use `name` parameter to check and narow error type\n * @example\n * try {\n * \tnew Engine().evaluate()\n * } catch (error) {\n * \tif (isPublicodesError(error, 'EngineError')) {\n * \t\tconsole.log(error.info)\n * \t}\n * }\n */\nexport const isPublicodesError = (\n\terror: unknown,\n\tname?: Name,\n): error is PublicodesError<\n\ttypeof name extends undefined ? keyof PublicodesErrorTypes : Name\n> =>\n\terror instanceof PublicodesError &&\n\t(name === undefined ? true : error.name === name)\n\n/**\n * Generic error for Publicodes\n */\nexport class PublicodesError<\n\tName extends keyof PublicodesErrorTypes,\n> extends Error {\n\tname: Name\n\tinfo: PublicodesErrorTypes[Name]\n\n\tconstructor(\n\t\tname: Name,\n\t\tmessage: string,\n\t\tinfo: PublicodesErrorTypes[Name],\n\t\toriginalError?: Error,\n\t) {\n\t\tsuper(buildMessage(name, message, info, originalError))\n\t\tthis.name = name\n\t\tthis.info = info\n\t}\n}\n\nconst buildMessage = (\n\tname: string,\n\tmessage: string,\n\tinfo?: PublicodesErrorTypes[keyof PublicodesErrorTypes],\n\toriginalError?: Error,\n) => {\n\tconst types: Partial> = {\n\t\tSyntaxError: 'Erreur syntaxique',\n\t\tEvaluationError: \"Erreur d'évaluation\",\n\t\tUnknownRule: 'Règle inconnue',\n\t\tPrivateRule: 'Règle privée',\n\t}\n\tconst isError = /error/i.test(name)\n\n\treturn (\n\t\t`\\n[ ${types[name] ?? name} ]` +\n\t\t(info && 'dottedName' in info && info.dottedName?.length ?\n\t\t\t`\\n➡️ Dans la règle \"${info.dottedName}\"`\n\t\t:\t'') +\n\t\t`\\n${isError ? '✖️' : '⚠️'} ${message}` +\n\t\t(originalError ?\n\t\t\t'\\n' + (isError ? ' ' : 'ℹ️ ') + originalError.message\n\t\t:\t'')\n\t)\n}\n\n/**\n * @deprecated Throw an internal server error, replace this by `throw new PublicodesError('InternalError', ...)`\n */\nexport class PublicodesInternalError extends PublicodesError<'InternalError'> {\n\tconstructor(payload: {}) {\n\t\tsuper(\n\t\t\t'InternalError',\n\t\t\t`\nErreur interne du moteur.\n\nCette erreur est le signe d'un bug dans publicodes. Pour nous aider à le résoudre, vous pouvez copier ce texte dans un nouveau ticket : https://github.com/betagouv/mon-entreprise/issues/new.\n\npayload:\n${JSON.stringify(payload, null, 2)}\n`,\n\t\t\tpayload,\n\t\t)\n\t}\n}\n\n/**\n * Use this error in default case of a switch to check exhaustivity statically\n * inspired by https://github.com/ts-essentials/ts-essentials#exhaustive-switch-cases\n */\nexport class UnreachableCaseError extends PublicodesInternalError {\n\tconstructor(value: never) {\n\t\tsuper(value)\n\t}\n}\n\nexport function warning(\n\tlogger: Logger,\n\tmessage: string,\n\tinformation: { dottedName: string },\n\toriginalError?: Error,\n) {\n\tlogger.warn(\n\t\tbuildMessage('Avertissement', message, information, originalError),\n\t)\n}\n\nexport function experimentalRuleWarning(logger: Logger, dottedName: string) {\n\tlogger.warn(\n\t\tbuildMessage(\n\t\t\t'Avertissement',\n\t\t\t\"Cette règle est tagguée comme experimentale. \\n\\nCela veut dire qu'elle peut être modifiée, renommée, ou supprimée sans qu'il n'y ait de changement de version majeure dans l'API.\\n\",\n\t\t\t{ dottedName },\n\t\t),\n\t)\n}\n","export function addToMapSet(map: Map>, key: T, value: T) {\n\tif (map.has(key)) {\n\t\tmap.get(key)!.add(value)\n\t\treturn\n\t}\n\tmap.set(key, new Set([value]))\n}\n\nexport function mergeWithArray<\n\tN extends string | number | symbol,\n\tM extends string | number | symbol,\n\tT,\n>(obj1: Record>, obj2: Record>): Record>\n\nexport function mergeWithArray<\n\tN extends string | number | symbol,\n\tM extends string | number | symbol,\n\tT,\n>(\n\tobj1: Partial>>,\n\tobj2: Partial>>,\n): Partial>>\n\nexport function mergeWithArray(\n\tobj1: Partial>>,\n\tobj2: Partial>>,\n): Partial>> {\n\treturn (Object.entries(obj2) as Array<[K, Array]>).reduce(\n\t\t(obj, [key, value]) => ({\n\t\t\t...obj,\n\t\t\t[key]: [...(obj[key] ?? []), ...value],\n\t\t}),\n\t\tobj1,\n\t) as Partial>>\n}\n\nexport const weakCopyObj = >(obj: T): T => {\n\tconst copy = {} as T\n\tfor (const key in obj) {\n\t\tcopy[key] = obj[key]\n\t}\n\n\treturn copy\n}\n","import { ParsedRules } from '..'\nimport { UnreachableCaseError } from '../error'\nimport { TrancheNodes } from '../mecanisms/trancheUtils'\nimport { ReferenceNode } from '../reference'\nimport { ReplacementRule } from '../replacement'\nimport { weakCopyObj } from '../utils'\nimport {\n\tASTNode,\n\tASTTransformer,\n\tASTVisitor,\n\tNodeKind,\n\tTraverseFunction,\n} from './types'\n\n/**\n\tThis function creates a transormation of the AST from on a simpler\n\tcallback function `fn`\n\n\t`fn` will be called with the nodes of the ASTTree during the exploration\n\n\tThe outcome of the callback function has an influence on the exploration of the AST :\n\t- `false`, the node is not updated and the exploration does not continue further down this branch\n\t- `undefined`, the node is not updated but the exploration continues and its children will be transformed\n\t- `ASTNode`, the node is transformed to the new value and the exploration does not continue further down the branch\n\n\t`updateFn` : It is possible to specifically use the updated version of a child\n\tby using the function passed as second argument. The returned value will be the\n\ttransformed version of the node.\n\t*/\nexport function makeASTTransformer(\n\tfn: (node: ASTNode, transform: ASTTransformer) => ASTNode | undefined | false,\n\tstopOnUpdate = true,\n): ASTTransformer {\n\tfunction transform(node: ASTNode): ASTNode {\n\t\tconst updatedNode = fn(node, transform)\n\t\tif (updatedNode === false) {\n\t\t\treturn node\n\t\t}\n\t\tif (updatedNode === undefined) {\n\t\t\treturn traverseASTNode(transform, node)\n\t\t}\n\t\treturn stopOnUpdate ? updatedNode : traverseASTNode(transform, updatedNode)\n\t}\n\treturn transform\n}\nexport function makeASTVisitor(\n\tfn: (node: ASTNode, visit: ASTVisitor) => 'continue' | 'stop',\n): ASTVisitor {\n\tfunction visit(node: ASTNode) {\n\t\tswitch (fn(node, visit)) {\n\t\t\tcase 'continue':\n\t\t\t\ttraverseASTNode(transformizedVisit, node)\n\t\t\t\treturn\n\t\t\tcase 'stop':\n\t\t\t\treturn\n\t\t}\n\t}\n\tconst transformizedVisit: ASTTransformer = (node) => {\n\t\tvisit(node)\n\t\treturn node\n\t}\n\treturn visit\n}\n\n// Can be made more flexible with other args like a filter function (ASTNode -> Bool).\nexport function iterAST(\n\tchildrenSelector: (node: ASTNode) => Iterable,\n\tnode: ASTNode,\n): ASTNode[] {\n\tfunction* iterate(node: ASTNode): IterableIterator {\n\t\tyield node\n\t\tconst selectedSubNodes = childrenSelector(node)\n\t\tfor (const subNode of selectedSubNodes) yield* iterate(subNode)\n\t}\n\treturn [...iterate(node)]\n}\n\n/**\n * This function allows to construct a specific value while exploring the AST with\n * a simple reducing function as argument.\n *\n * `fn` will be called with the currently reduced value `acc` and the current node of the AST\n *\n * If the callback function returns:\n * - `undefined`, the exploration continues further down and all the children are reduced\n * \tsuccessively to a single value\n * - `T`, the reduced value is returned\n *\n * `reduceFn` : It is possible to specifically use the reduced value of a child\n * by using the function passed as second argument. The returned value will be the reduced version\n * of the node\n */\nexport function reduceAST(\n\tfn: (acc: T, n: ASTNode, reduceFn: (n: ASTNode) => T) => T | undefined,\n\tstart: T,\n\tnode: ASTNode,\n): T {\n\tfunction traverseFn(acc: T, node: ASTNode): T {\n\t\tconst result = fn(acc, node, traverseFn.bind(null, start))\n\t\tif (result === undefined) {\n\t\t\treturn getChildrenNodes(node).reduce(traverseFn, acc)\n\t\t}\n\t\treturn result\n\t}\n\treturn traverseFn(start, node)\n}\n\nexport function getChildrenNodes(node: ASTNode): ASTNode[] {\n\tconst nodes: ASTNode[] = []\n\ttraverseASTNode((node) => {\n\t\tnodes.push(node)\n\t\treturn node\n\t}, node)\n\treturn nodes\n}\n\nexport function traverseParsedRules(\n\tfn: ASTTransformer,\n\tparsedRules: ParsedRules,\n): ParsedRules {\n\tconst ret = {} as Record\n\tfor (const name in parsedRules) {\n\t\tret[name] = fn(parsedRules[name])\n\t}\n\n\treturn ret as ParsedRules\n}\n\n/**\n * Apply a transform function on children. Not recursive.\n */\nexport const traverseASTNode: TraverseFunction = (fn, node) => {\n\tnode = traverseSourceMap(fn, node)\n\tswitch (node.nodeKind) {\n\t\tcase 'rule':\n\t\t\treturn traverseRuleNode(fn, node)\n\t\tcase 'reference':\n\t\tcase 'constant':\n\t\t\treturn node\n\t\tcase 'arrondi':\n\t\t\treturn traverseArrondiNode(fn, node)\n\t\tcase 'simplifier unité':\n\t\tcase 'variable manquante':\n\t\tcase 'est non applicable':\n\t\tcase 'est non défini':\n\t\t\treturn traverseUnaryOperationNode(fn, node)\n\t\tcase 'barème':\n\t\tcase 'taux progressif':\n\t\tcase 'grille':\n\t\t\treturn traverseNodeWithTranches(fn, node)\n\t\tcase 'une possibilité':\n\t\t\treturn traverseArrayNode(fn, node)\n\t\tcase 'durée':\n\t\t\treturn traverseDuréeNode(fn, node)\n\t\tcase 'résoudre référence circulaire':\n\t\t\treturn traverseRésoudreRéférenceCirculaireNode(fn, node)\n\t\tcase 'inversion':\n\t\t\treturn traverseInversionNode(fn, node)\n\t\tcase 'operation':\n\t\t\treturn traverseOperationNode(fn, node)\n\n\t\tcase 'contexte':\n\t\t\treturn traverseContexteNode(fn, node)\n\t\tcase 'unité':\n\t\t\treturn traverseUnitéNode(fn, node)\n\t\tcase 'variations':\n\t\t\treturn traverseVariationNode(fn, node)\n\t\tcase 'replacementRule':\n\t\t\treturn traverseReplacementNode(fn, node)\n\t\tcase 'texte':\n\t\t\treturn traverseTextNode(fn, node)\n\t\tcase 'condition':\n\t\t\treturn traverseConditionNode(fn, node)\n\n\t\tdefault:\n\t\t\tthrow new UnreachableCaseError(node)\n\t}\n}\n\nconst traverseSourceMap: TraverseFunction = (fn, node) => {\n\tif (!('sourceMap' in node) || !node.sourceMap || !node.sourceMap.args) {\n\t\treturn node\n\t}\n\tconst sourceMap = node.sourceMap\n\n\tconst args = {}\n\tfor (const key in sourceMap.args) {\n\t\tconst value = sourceMap.args[key]\n\t\targs[key] = Array.isArray(value) ? value.map((v) => fn(v)) : fn(value)\n\t}\n\n\treturn {\n\t\t...node,\n\t\tsourceMap: {\n\t\t\t...sourceMap,\n\t\t\targs,\n\t\t},\n\t}\n}\n\nconst traverseRuleNode: TraverseFunction<'rule'> = (fn, node) => {\n\tconst copy = weakCopyObj(node)\n\tcopy.suggestions = {}\n\tfor (const key in node.suggestions) {\n\t\tcopy.suggestions[key] = fn(node.suggestions[key])\n\t}\n\tcopy.replacements = node.replacements.map(fn) as Array\n\tcopy.explanation = {\n\t\truleDisabledByItsParent: node.explanation.ruleDisabledByItsParent,\n\t\tnullableParent:\n\t\t\tnode.explanation.nullableParent ?\n\t\t\t\tfn(node.explanation.nullableParent)\n\t\t\t:\tundefined,\n\t\tparents: node.explanation.parents.map(fn),\n\t\tvaleur: fn(node.explanation.valeur),\n\t}\n\n\treturn copy\n}\n\nconst traverseReplacementNode: TraverseFunction<'replacementRule'> = (\n\tfn,\n\tnode,\n) =>\n\t({\n\t\t...node,\n\t\tdefinitionRule: fn(node.definitionRule),\n\t\treplacedReference: fn(node.replacedReference),\n\t\twhiteListedNames: node.whiteListedNames.map(fn),\n\t\tblackListedNames: node.blackListedNames.map(fn),\n\t}) as ReplacementRule\n\nconst traverseUnaryOperationNode: TraverseFunction<\n\t| 'simplifier unité'\n\t| 'est non applicable'\n\t| 'est non défini'\n\t| 'variable manquante'\n> = (fn, node) => ({\n\t...node,\n\texplanation: fn(node.explanation),\n})\n\nfunction traverseTranche(fn: (n: ASTNode) => ASTNode, tranches: TrancheNodes) {\n\treturn tranches.map((tranche) => ({\n\t\t...tranche,\n\t\t...(tranche.plafond && { plafond: fn(tranche.plafond) }),\n\t\t...('montant' in tranche && { montant: fn(tranche.montant) }),\n\t\t...('taux' in tranche && { taux: fn(tranche.taux) }),\n\t}))\n}\nconst traverseNodeWithTranches: TraverseFunction<\n\t'barème' | 'taux progressif' | 'grille'\n> = (fn, node) => ({\n\t...node,\n\texplanation: {\n\t\tassiette: fn(node.explanation.assiette),\n\t\tmultiplicateur: fn(node.explanation.multiplicateur),\n\t\ttranches: traverseTranche(fn, node.explanation.tranches),\n\t},\n})\n\nconst traverseArrayNode: TraverseFunction<'une possibilité'> = (fn, node) => ({\n\t...node,\n\texplanation: node.explanation.map(fn),\n})\n\nconst traverseOperationNode: TraverseFunction<'operation'> = (fn, node) => {\n\tconst copy = weakCopyObj(node)\n\tcopy.explanation = [fn(node.explanation[0]), fn(node.explanation[1])]\n\n\treturn copy\n}\n\nconst traverseDuréeNode: TraverseFunction<'durée'> = (fn, node) => ({\n\t...node,\n\texplanation: {\n\t\tdepuis: fn(node.explanation.depuis),\n\t\t\"jusqu'à\": fn(node.explanation[\"jusqu'à\"]),\n\t},\n})\n\nconst traverseInversionNode: TraverseFunction<'inversion'> = (fn, node) => ({\n\t...node,\n\texplanation: {\n\t\t...node.explanation,\n\t\tinversionCandidates: node.explanation.inversionCandidates.map(fn) as any, // TODO\n\t},\n})\n\nconst traverseArrondiNode: TraverseFunction<'arrondi'> = (fn, node) => ({\n\t...node,\n\texplanation: {\n\t\tvaleur: fn(node.explanation.valeur),\n\t\tarrondi: fn(node.explanation.arrondi),\n\t},\n})\n\nconst traverseRésoudreRéférenceCirculaireNode: TraverseFunction<\n\t'résoudre référence circulaire'\n> = (fn, node) => ({\n\t...node,\n\texplanation: {\n\t\t...node.explanation,\n\t\tvaleur: fn(node.explanation.valeur),\n\t},\n})\n\nconst traverseTextNode: TraverseFunction<'texte'> = (fn, node) => ({\n\t...node,\n\texplanation: node.explanation.map((element) =>\n\t\ttypeof element === 'string' ? element : fn(element),\n\t),\n})\n\nconst traverseContexteNode: TraverseFunction<'contexte'> = (fn, node) => ({\n\t...node,\n\texplanation: {\n\t\t...node.explanation,\n\t\tcontexte: node.explanation.contexte.map(([name, value]) => [\n\t\t\tfn(name) as ReferenceNode,\n\t\t\tfn(value),\n\t\t]),\n\t\tnode: fn(node.explanation.node),\n\t},\n})\n\nconst traverseUnitéNode: TraverseFunction<'unité'> = (fn, node) => {\n\tconst copy = weakCopyObj(node)\n\tcopy.explanation = fn(node.explanation)\n\n\treturn copy\n}\n\nconst traverseVariationNode: TraverseFunction<'variations'> = (fn, node) => ({\n\t...node,\n\texplanation: node.explanation.map(({ condition, consequence }) => ({\n\t\tcondition: fn(condition),\n\t\tconsequence: consequence && fn(consequence),\n\t})),\n})\n\nconst traverseConditionNode: TraverseFunction<'condition'> = (fn, node) => {\n\tconst copy = weakCopyObj(node)\n\tcopy.explanation = {\n\t\tsi: fn(node.explanation.si),\n\t\talors: fn(node.explanation.alors),\n\t\tsinon: fn(node.explanation.sinon),\n\t}\n\n\treturn copy\n}\n","import { EvaluationFunction, PublicodesError } from '.'\nimport { ASTNode } from './AST/types'\n\nexport let evaluationFunctions = {\n\tconstant: (node) => node,\n} as any\n\nexport function registerEvaluationFunction<\n\tNodeName extends ASTNode['nodeKind'],\n>(nodeKind: NodeName, evaluationFunction: EvaluationFunction) {\n\tevaluationFunctions ??= {}\n\tif (evaluationFunctions[nodeKind]) {\n\t\tthrow new PublicodesError(\n\t\t\t'EvaluationError',\n\t\t\t`Multiple evaluation functions registered for the nodeKind \\x1b[4m${nodeKind}`,\n\t\t\t{ dottedName: '' },\n\t\t)\n\t}\n\tevaluationFunctions[nodeKind] = evaluationFunction\n}\n","import { ParsedRules } from '.'\nimport { ASTNode, ConstantNode } from './AST/types'\n\nexport type NodesTypes = WeakMap\n\n// TODO: Currently only handle nullability, but the infering logic should be\n// extended to support the full unit type system.\nexport type InferedType = {\n\tisNullable: boolean | undefined\n} & Pick\n\nconst UNDEFINED_TYPE = {\n\tisNullable: undefined,\n\ttype: undefined,\n}\n\nexport default function inferNodesTypes(\n\tnewRulesNames: Array,\n\tparsedRules: ParsedRules,\n\tnodesTypes: NodesTypes,\n) {\n\tfunction inferNodeUnitAndCache(node: ASTNode): InferedType {\n\t\tif (!node || typeof node !== 'object') {\n\t\t\treturn UNDEFINED_TYPE\n\t\t}\n\t\tif (nodesTypes.has(node)) {\n\t\t\treturn nodesTypes.get(node)!\n\t\t}\n\t\t// Sometimes there are cycles, so we need to prevent infinite loop by setting a default\n\t\tnodesTypes.set(node, UNDEFINED_TYPE)\n\t\tconst type = inferNodeType(node)\n\t\tnodesTypes.set(node, type)\n\t\treturn type\n\t}\n\n\tfunction inferNodeType(node: ASTNode): InferedType {\n\t\tswitch (node.nodeKind) {\n\t\t\tcase 'barème':\n\t\t\tcase 'durée':\n\t\t\tcase 'grille':\n\t\t\tcase 'taux progressif':\n\t\t\t\treturn { isNullable: false, type: 'number' }\n\t\t\tcase 'est non défini':\n\t\t\tcase 'est non applicable':\n\t\t\t\treturn { isNullable: false, type: 'boolean' }\n\n\t\t\tcase 'constant':\n\t\t\t\treturn {\n\t\t\t\t\tisNullable: node.isNullable ?? node.nodeValue === null,\n\t\t\t\t\ttype: node.type,\n\t\t\t\t}\n\t\t\tcase 'operation':\n\t\t\t\treturn {\n\t\t\t\t\tisNullable:\n\t\t\t\t\t\t['<', '<=', '>', '>=', '/', '*'].includes(node.operationKind) ?\n\t\t\t\t\t\t\tinferNodeUnitAndCache(node.explanation[0]).isNullable ||\n\t\t\t\t\t\t\tinferNodeUnitAndCache(node.explanation[1]).isNullable\n\t\t\t\t\t\t: node.operationKind === '-' ?\n\t\t\t\t\t\t\tinferNodeUnitAndCache(node.explanation[0]).isNullable\n\t\t\t\t\t\t:\tfalse,\n\t\t\t\t\ttype:\n\t\t\t\t\t\t(\n\t\t\t\t\t\t\t['<', '<=', '>', '>=', '=', '!=', 'et', 'ou'].includes(\n\t\t\t\t\t\t\t\tnode.operationKind,\n\t\t\t\t\t\t\t)\n\t\t\t\t\t\t) ?\n\t\t\t\t\t\t\t'boolean'\n\t\t\t\t\t\t:\t'number',\n\t\t\t\t}\n\n\t\t\tcase 'inversion':\n\t\t\tcase 'contexte':\n\t\t\tcase 'replacementRule':\n\t\t\tcase 'résoudre référence circulaire':\n\t\t\t\treturn { isNullable: false, type: 'number' }\n\n\t\t\tcase 'texte':\n\t\t\tcase 'une possibilité':\n\t\t\t\treturn { isNullable: false, type: 'string' }\n\n\t\t\tcase 'rule':\n\t\t\tcase 'arrondi':\n\t\t\t\treturn inferNodeUnitAndCache(node.explanation.valeur)\n\t\t\tcase 'unité':\n\t\t\tcase 'simplifier unité':\n\t\t\tcase 'variable manquante':\n\t\t\t\treturn inferNodeUnitAndCache(node.explanation)\n\t\t\tcase 'condition':\n\t\t\t\treturn {\n\t\t\t\t\tisNullable: [\n\t\t\t\t\t\tnode.explanation.si,\n\t\t\t\t\t\tnode.explanation.alors,\n\t\t\t\t\t\tnode.explanation.sinon,\n\t\t\t\t\t].some((n) => inferNodeUnitAndCache(n).isNullable),\n\t\t\t\t\ttype:\n\t\t\t\t\t\tinferNodeUnitAndCache(node.explanation.alors).type ??\n\t\t\t\t\t\tinferNodeUnitAndCache(node.explanation.sinon).type,\n\t\t\t\t}\n\n\t\t\tcase 'variations':\n\t\t\t\tconst consequencesTypes = node.explanation.map(({ consequence }) =>\n\t\t\t\t\tinferNodeUnitAndCache(consequence),\n\t\t\t\t)\n\t\t\t\treturn {\n\t\t\t\t\tisNullable: consequencesTypes.some(\n\t\t\t\t\t\t(consequence) => consequence.isNullable,\n\t\t\t\t\t),\n\t\t\t\t\ttype: consequencesTypes\n\t\t\t\t\t\t.map((c) => c.type)\n\t\t\t\t\t\t.find((type) => type !== undefined),\n\t\t\t\t}\n\n\t\t\tcase 'reference':\n\t\t\t\treturn inferNodeUnitAndCache(parsedRules[node.dottedName as string])\n\t\t}\n\t}\n\n\tnewRulesNames.forEach((name) => {\n\t\tconst rule = parsedRules[name]\n\t\tinferNodeUnitAndCache(rule)\n\t\trule.explanation.parents.forEach(inferNodeUnitAndCache)\n\t})\n\n\treturn nodesTypes\n}\n","import { ASTNode, ConstantNode, EvaluatedNode, Evaluation } from './AST/types'\n\nexport const collectNodeMissing = (\n\tnode: EvaluatedNode | ASTNode,\n): Record =>\n\t'missingVariables' in node ? node.missingVariables : {}\n\nexport const bonus = (missings: Record = {}) =>\n\tObject.fromEntries(\n\t\tObject.entries(missings).map(([key, value]) => [key, value + 1]),\n\t)\nexport const mergeMissing = (\n\tleft: Record | undefined = {},\n\tright: Record | undefined = {},\n): Record =>\n\tObject.fromEntries(\n\t\t[...Object.keys(left), ...Object.keys(right)].map((key) => [\n\t\t\tkey,\n\t\t\t(left[key] ?? 0) + (right[key] ?? 0),\n\t\t]),\n\t)\n\nexport const mergeAllMissing = (missings: Array) =>\n\tmissings.map(collectNodeMissing).reduce(mergeMissing, {})\n\nexport const defaultNode = (nodeValue: Evaluation) =>\n\t({\n\t\tnodeValue,\n\t\ttype: typeof nodeValue,\n\t\tisDefault: true,\n\t\tnodeKind: 'constant',\n\t}) as ConstantNode\n\nexport const notApplicableNode = {\n\tnodeKind: 'constant',\n\tnodeValue: null,\n\tmissingVariables: {},\n\ttype: undefined,\n\tisNullable: true,\n} as EvaluatedNode<'constant'>\n\nexport const undefinedNode = {\n\tnodeKind: 'constant',\n\tnodeValue: undefined,\n\tmissingVariables: {},\n\ttype: undefined,\n\tisNullable: false,\n} as EvaluatedNode<'constant'>\n\nexport const undefinedNumberNode = {\n\t...undefinedNode,\n\ttype: 'number',\n} as EvaluatedNode<'constant'>\n","import { PublicodesExpression } from '..'\nimport { makeASTTransformer } from '../AST'\nimport { ASTNode } from '../AST/types'\nimport { PublicodesError } from '../error'\nimport parse from '../parse'\nimport { Context, createContext } from '../parsePublicodes'\n\nexport function createParseInlinedMecanism(\n\tname: string,\n\targs: Record,\n\tbody: PublicodesExpression,\n) {\n\tlet parsedBody\n\tlet parsedDefaultArgs\n\tfunction parseInlineMecanism(providedArgs, context) {\n\t\tparsedBody ??= parse(body, createContext({ dottedName: 'INLINE_MECANISM' }))\n\t\tparsedDefaultArgs ??= {}\n\t\tfor (const name in args) {\n\t\t\tif ('par défaut' in args[name]) {\n\t\t\t\tparsedDefaultArgs[name] = parse(\n\t\t\t\t\targs[name]['par défaut'],\n\t\t\t\t\tcreateContext({}),\n\t\t\t\t)\n\t\t\t}\n\t\t}\n\n\t\t// Case of unary mecanism\n\t\tif (Object.keys(args).length === 1 && 'valeur' in args) {\n\t\t\tprovidedArgs = {\n\t\t\t\tvaleur: providedArgs,\n\t\t\t}\n\t\t}\n\n\t\tconst parsedProvidedArgs = {}\n\t\tfor (const name in providedArgs) {\n\t\t\tparsedProvidedArgs[name] = parse(providedArgs[name], context)\n\t\t}\n\n\t\tconst parsedInlineMecanism = makeASTTransformer((node) => {\n\t\t\tif (node.nodeKind !== 'reference' || !(node.name in args)) {\n\t\t\t\treturn\n\t\t\t}\n\t\t\tconst argName = node.name\n\t\t\tif (argName in parsedProvidedArgs) {\n\t\t\t\treturn parsedProvidedArgs[argName]\n\t\t\t}\n\t\t\tif (argName in parsedDefaultArgs) {\n\t\t\t\treturn parsedDefaultArgs[argName]\n\t\t\t}\n\t\t\tthrow new PublicodesError(\n\t\t\t\t'SyntaxError',\n\t\t\t\t`Il manque la clé '${argName} dans le mécanisme ${name}`,\n\t\t\t\t{ dottedName: argName },\n\t\t\t)\n\t\t})(parsedBody)\n\n\t\tparsedInlineMecanism.sourceMap = {\n\t\t\tmecanismName: name,\n\t\t\targs: parsedProvidedArgs,\n\t\t}\n\n\t\treturn parsedInlineMecanism\n\t}\n\n\tparseInlineMecanism.nom = name\n\n\treturn Object.assign(parseInlineMecanism, 'name', {\n\t\tvalue: `parse${toCamelCase(name)}Inline`,\n\t})\n}\n\n/**\n Note : Les transformations de mécanisme opérant sur les listes sont plus couteuses que celles opérant sur des scalaires.\n\n Cela vient du fait qu'il n'y a pas la possibilité de définir ces transformations dans publicodes : il manque le type liste et les opérations de bases associées (reduce, map).\n\n On doit donc déplier l'opération statiquement, au parsing, ce qui prend plus de temps, au parsing et à l'évaluation. somme: [1,2,3] est transformé en (1 + 2) + 3).\n\n De manière général, les baisse en performances de cette PR sont attenduee : il s'agit d'une contrepartie logique de l'utilisation de mécanisme de base publicodes. Ce qu'on gagne en solidité de l'évaluation & en amélioration du typage, on le perd en performance. C'est logique puisque l'evaluation de ces mécanisme n'est plus du JS natif mais passe par une structure intermédiaire.\n\n Pour améliorer les perfs, il y a plusieurs pistes :\n\n\t- optimiser d'avantage les opérations de bases\n\t- ajouter les listes et les opérations sur les listes dans publicodes\n\t- ajouter une implémentation \"native\" de certains mécanismes utilisés (on gagne quand même à les décomposer en mécanismes de base pour la partie spécification et typage).\n */\nexport function createParseInlinedMecanismWithArray(\n\tname: string,\n\targs: Record,\n\tbody: (\n\t\targs: Record>,\n\t) => PublicodesExpression,\n) {\n\tfunction parseInlineMecanism(providedArgs, context: Context) {\n\t\t// Case of unary mecanism\n\t\tif (Object.keys(args).length === 1 && 'valeur' in args) {\n\t\t\tprovidedArgs = {\n\t\t\t\tvaleur: providedArgs,\n\t\t\t}\n\t\t}\n\n\t\tconst parsedProvidedArgs = {}\n\t\tfor (const name in providedArgs) {\n\t\t\tconst value = providedArgs[name]\n\t\t\tparsedProvidedArgs[name] =\n\t\t\t\tArray.isArray(value) ?\n\t\t\t\t\tvalue.map((v) => parse(v, context))\n\t\t\t\t:\tparse(value, context)\n\t\t}\n\n\t\tconst parsedInlineMecanism = parse(body(parsedProvidedArgs), context)\n\t\tparsedInlineMecanism.sourceMap = {\n\t\t\tmecanismName: name,\n\t\t\targs: parsedProvidedArgs,\n\t\t}\n\t\treturn parsedInlineMecanism\n\t}\n\n\tparseInlineMecanism.nom = name\n\n\treturn Object.assign(parseInlineMecanism, 'name', {\n\t\tvalue: `parse${toCamelCase(name)}Inline`,\n\t})\n}\n\nfunction toCamelCase(str: string) {\n\treturn str\n\t\t.replace(/(?:^\\w|[A-Z]|\\b\\w)/g, (ltr) => ltr.toUpperCase())\n\t\t.replace(/\\s+/g, '')\n}\n","import { createParseInlinedMecanism } from './inlineMecanism'\n\nexport default createParseInlinedMecanism(\n\t'abattement',\n\t{\n\t\tabattement: {},\n\t\tvaleur: {},\n\t},\n\t{\n\t\t'-': ['valeur', 'abattement'],\n\t\tplancher: 0,\n\t},\n)\n","import { notApplicableNode } from '../evaluationUtils'\nimport { createParseInlinedMecanism } from './inlineMecanism'\n\nexport default createParseInlinedMecanism(\n\t'applicable si',\n\t{\n\t\t'applicable si': {},\n\t\tvaleur: {},\n\t},\n\t{\n\t\tcondition: {\n\t\t\tsi: 'applicable si != non',\n\t\t\talors: 'valeur',\n\t\t\tsinon: notApplicableNode,\n\t\t},\n\t},\n)\n","import { BaseUnit, Evaluation, Unit } from './AST/types'\nimport { PublicodesError } from './error'\n\nexport type getUnitKey = (writtenUnit: string) => string\nexport type formatUnit = (unit: string, count: number) => string\n\nexport const parseUnit = (\n\tstring: string,\n\tgetUnitKey: getUnitKey = (x) => x,\n): Unit => {\n\tif (string.includes(' /') || string.includes('/ ')) {\n\t\tthrow new Error(\n\t\t\t`L'unité \"${string}\" ne doit pas contenir d'espace avant et après \"/\"`,\n\t\t)\n\t}\n\tconst [a, ...b] = string.split('/')\n\t// denominator could be 'x/y' or 'x.y' or 'x.y/z'\n\tconst splitUnit = (string: string): string[] =>\n\t\tdecomposePower(\n\t\t\tstring\n\t\t\t\t.split('.')\n\t\t\t\t.filter(Boolean)\n\t\t\t\t.map((unit) => getUnitKey(unit)),\n\t\t)\n\tconst result = {\n\t\tnumerators: splitUnit(a),\n\t\tdenominators: b.flatMap((u) => splitUnit(u)),\n\t}\n\treturn result\n}\n\nconst lastNumberFromString = /(\\d+)(?!.*[A-Za-z])/g\n\n/**\n * Count the number of each unit, e.g. [m, m, kg, kg] -> {m: 2, kg: 2}\n */\nfunction getUnitCounts(baseUnits: Array): Record {\n\tlet countUnits = {}\n\tbaseUnits.forEach((e) => {\n\t\tconst powerMatch = e.match(lastNumberFromString)\n\t\tif (powerMatch != null) {\n\t\t\tconst power = powerMatch[0]\n\t\t\tconst primaryUnit = e.split(power)[0]\n\t\t\tcountUnits[primaryUnit] = (countUnits[primaryUnit] ?? 0) + +power\n\t\t} else {\n\t\t\tcountUnits[e] = (countUnits[e] ?? 0) + 1\n\t\t}\n\t})\n\treturn countUnits\n}\n\n/**\n * Decompose power of units, e.g. [m2] -> [m, m] or [kg2, m3] -> [kg, kg, m, m, m]\n */\nfunction decomposePower(baseUnits: Array): Array {\n\tlet unitCounts = getUnitCounts(baseUnits)\n\treturn Object.entries(unitCounts).flatMap(([primaryUnit, power]) =>\n\t\tArray(power).fill(primaryUnit),\n\t)\n}\n\n/**\n * Combine power of units, e.g. [m2, m] -> [m3] or [m, m, kg, kg] -> [m2, kg2]\n */\nfunction combinePower(baseUnit: Array): Array {\n\tlet unitCounts = getUnitCounts(baseUnit)\n\treturn Object.entries(unitCounts).map(([primaryUnit, power]) =>\n\t\tpower > 1 ? `${primaryUnit}${power}` : primaryUnit,\n\t)\n}\n\nconst printUnits = (\n\tunits: Array,\n\tcount: number,\n\tformatUnit: formatUnit = (x) => x,\n): string => {\n\treturn combinePower(units.map((unit) => formatUnit(unit, count))).join('.')\n}\n\nconst plural = 2\nexport function serializeUnit(\n\trawUnit: Unit | undefined | string,\n\tcount: number = plural,\n\tformatUnit: formatUnit = (x) => x,\n): string | undefined {\n\tif (rawUnit === null || typeof rawUnit !== 'object') {\n\t\treturn typeof rawUnit === 'string' ? formatUnit(rawUnit, count) : rawUnit\n\t}\n\tconst unit = simplify(rawUnit)\n\tconst { numerators = [], denominators = [] } = unit\n\n\tconst n = numerators.length > 0\n\tconst d = denominators.length > 0\n\tconst string =\n\t\t!n && !d ? ''\n\t\t: n && !d ? printUnits(numerators, count, formatUnit)\n\t\t: !n && d ? `/${printUnits(denominators, 1, formatUnit)}`\n\t\t: `${printUnits(numerators, plural, formatUnit)}/${printUnits(\n\t\t\t\tdenominators,\n\t\t\t\t1,\n\t\t\t\tformatUnit,\n\t\t\t)}`\n\n\treturn string\n}\n\ntype SupportedOperators = '*' | '/' | '+' | '-'\n\nconst noUnit = { numerators: [], denominators: [] }\nexport const inferUnit = (\n\toperator: SupportedOperators,\n\trawUnits: Array,\n): Unit | undefined => {\n\tif (operator === '/') {\n\t\tif (rawUnits.length !== 2) {\n\t\t\tthrow new PublicodesError(\n\t\t\t\t'InternalError',\n\t\t\t\t'Infer units of a division with units.length !== 2)',\n\t\t\t\t{},\n\t\t\t)\n\t\t}\n\n\t\treturn inferUnit('*', [\n\t\t\trawUnits[0] || noUnit,\n\t\t\t{\n\t\t\t\tnumerators: (rawUnits[1] || noUnit).denominators,\n\t\t\t\tdenominators: (rawUnits[1] || noUnit).numerators,\n\t\t\t},\n\t\t])\n\t}\n\tconst units = rawUnits.filter(Boolean)\n\tif (units.length <= 1) {\n\t\treturn units[0]\n\t}\n\tif (operator === '*')\n\t\treturn simplify({\n\t\t\tnumerators: units.flatMap((u) => u?.numerators ?? []),\n\t\t\tdenominators: units.flatMap((u) => u?.denominators ?? []),\n\t\t})\n\n\tif (operator === '-' || operator === '+') {\n\t\treturn rawUnits.find((u) => u)\n\t}\n\n\treturn undefined\n}\n\nconst equals = (a: T, b: T) => {\n\tif (Array.isArray(a) && Array.isArray(b)) {\n\t\treturn a.length === b.length && a.every((_, i) => a[i] === b[i])\n\t} else {\n\t\treturn a === b\n\t}\n}\n\nexport const removeOnce =\n\t(element: T, eqFn: (a: T, b: T) => boolean = equals) =>\n\t(list: Array): Array => {\n\t\tconst index = list.findIndex((e) => eqFn(e, element))\n\t\treturn list.filter((_, i) => i !== index)\n\t}\n\nconst simplify = (\n\tunit: Unit,\n\teqFn: (a: string, b: string) => boolean = equals,\n): Unit => {\n\tconst simplifiedUnit = [...unit.numerators, ...unit.denominators].reduce(\n\t\t({ numerators, denominators }, next) =>\n\t\t\t(\n\t\t\t\tnumerators.find((u) => eqFn(next, u)) &&\n\t\t\t\tdenominators.find((u) => eqFn(next, u))\n\t\t\t) ?\n\t\t\t\t{\n\t\t\t\t\tnumerators: removeOnce(next, eqFn)(numerators),\n\t\t\t\t\tdenominators: removeOnce(next, eqFn)(denominators),\n\t\t\t\t}\n\t\t\t:\t{ numerators, denominators },\n\t\tunit,\n\t)\n\treturn simplifiedUnit\n}\n\nconst convertTable: ConvertTable = {\n\t'mois/an': 12,\n\t'jour/an': 365,\n\t'jour/mois': 365 / 12,\n\t'trimestre/an': 4,\n\t'mois/trimestre': 3,\n\t'jour/trimestre': (365 / 12) * 3,\n\t'€/k€': 10 ** 3,\n\t'g/kg': 10 ** 3,\n\t'mg/g': 10 ** 3,\n\t'mg/kg': 10 ** 6,\n\t'm/km': 10 ** 3,\n\t'cm/m': 10 ** 2,\n\t'mm/cm': 10 ** 1,\n\t'mm/m': 10 ** 3,\n\t'cm/km': 10 ** 5,\n\t'mm/km': 10 ** 6,\n}\n\nfunction singleUnitConversionFactor(\n\tfrom: string,\n\tto: string,\n): number | undefined {\n\treturn (\n\t\tconvertTable[`${to}/${from}`] ||\n\t\t(convertTable[`${from}/${to}`] && 1 / convertTable[`${from}/${to}`])\n\t)\n}\nfunction unitsConversionFactor(from: string[], to: string[]): number {\n\tlet factor =\n\t\t100 **\n\t\t// Factor is mutliplied or divided 100 for each '%' in units\n\t\t(to.filter((unit) => unit === '%').length -\n\t\t\tfrom.filter((unit) => unit === '%').length)\n\t;[factor] = from.reduce(\n\t\t([value, toUnits], fromUnit) => {\n\t\t\tconst index = toUnits.findIndex(\n\t\t\t\t(toUnit) => !!singleUnitConversionFactor(fromUnit, toUnit),\n\t\t\t)\n\t\t\tconst factor = singleUnitConversionFactor(fromUnit, toUnits[index]) || 1\n\t\t\treturn [\n\t\t\t\tvalue * factor,\n\t\t\t\t[...toUnits.slice(0, index + 1), ...toUnits.slice(index + 1)],\n\t\t\t]\n\t\t},\n\t\t[factor, to],\n\t)\n\treturn factor\n}\n\n// TODO(@clemog):\n// - Deal with other equivalent units : l: 'dm3',\n// - Convert unit instead of ignore warning\nconst equivalentTable = {\n\t'kW.h': 'kWh',\n\t'mn/h': 'noeud',\n}\n\nfunction areEquivalentSerializedUnit(\n\tserializedFrom: string | undefined,\n\tserializedTo: string | undefined,\n): Boolean {\n\tif (!serializedFrom || !serializedTo) return false\n\treturn (\n\t\tserializedFrom === serializedTo ||\n\t\tserializedFrom === equivalentTable[serializedTo] ||\n\t\tserializedTo === equivalentTable[serializedFrom]\n\t)\n}\n\nexport function convertUnit>(\n\tfrom: Unit | undefined,\n\tto: Unit | undefined,\n\tvalue: ValType,\n): ValType {\n\tconst serializedFrom = serializeUnit(from)\n\tconst serializedTo = serializeUnit(to)\n\tif (\n\t\t!areEquivalentSerializedUnit(serializedFrom, serializedTo) &&\n\t\t!areUnitConvertible(from, to)\n\t) {\n\t\tthrow new PublicodesError(\n\t\t\t'EngineError',\n\t\t\t`Impossible de convertir l'unité '${serializedFrom}' en '${serializedTo}'`,\n\t\t\t{},\n\t\t)\n\t}\n\tif (!value) {\n\t\treturn value\n\t}\n\tif (from === undefined) {\n\t\treturn value\n\t}\n\tconst [fromSimplified, factorTo] = simplifyUnitWithValue(from || noUnit)\n\tconst [toSimplified, factorFrom] = simplifyUnitWithValue(to || noUnit)\n\treturn round(\n\t\t(((value as number) * factorTo) / factorFrom) *\n\t\t\tunitsConversionFactor(\n\t\t\t\tfromSimplified.numerators,\n\t\t\t\ttoSimplified.numerators,\n\t\t\t) *\n\t\t\tunitsConversionFactor(\n\t\t\t\ttoSimplified.denominators,\n\t\t\t\tfromSimplified.denominators,\n\t\t\t),\n\t) as any\n}\n\nconst convertibleUnitClasses = unitClasses(convertTable)\ntype unitClasses = Array>\ntype ConvertTable = { readonly [index: string]: number }\n\n// Reduce the convertTable provided by the user into a list of compatibles\n// classes.\nfunction unitClasses(convertTable: ConvertTable) {\n\treturn Object.keys(convertTable).reduce(\n\t\t(classes: unitClasses, ratio: string) => {\n\t\t\tconst [a, b] = ratio.split('/')\n\t\t\tconst ia = classes.findIndex((units) => units.has(a))\n\t\t\tconst ib = classes.findIndex((units) => units.has(b))\n\t\t\tif (ia > -1 && ib > -1 && ia !== ib) {\n\t\t\t\tthrow new PublicodesError('EngineError', `Invalid ratio ${ratio}`, {})\n\t\t\t} else if (ia === -1 && ib === -1) {\n\t\t\t\tclasses.push(new Set([a, b]))\n\t\t\t} else if (ia > -1) {\n\t\t\t\tclasses[ia].add(b)\n\t\t\t} else if (ib > -1) {\n\t\t\t\tclasses[ib].add(a)\n\t\t\t}\n\t\t\treturn classes\n\t\t},\n\t\t[],\n\t)\n}\n\nfunction areSameClass(a: string, b: string) {\n\treturn (\n\t\ta === b ||\n\t\tconvertibleUnitClasses.some(\n\t\t\t(unitsClass) => unitsClass.has(a) && unitsClass.has(b),\n\t\t)\n\t)\n}\n\nfunction round(value: number) {\n\treturn +value.toFixed(16)\n}\n\nexport function simplifyUnit(unit: Unit): Unit {\n\tconst { numerators, denominators } = simplify(unit, areSameClass)\n\tif (numerators.length && numerators.every((symb) => symb === '%')) {\n\t\treturn { numerators: ['%'], denominators }\n\t}\n\treturn removePercentages({ numerators, denominators })\n}\n\nfunction simplifyUnitWithValue(unit: Unit, value = 1): [Unit, number] {\n\tconst factor = unitsConversionFactor(unit.numerators, unit.denominators)\n\treturn [\n\t\tsimplify(removePercentages(unit), areSameClass),\n\t\tvalue ? round(value * factor) : value,\n\t]\n}\n\nconst removePercentages = (unit: Unit): Unit => ({\n\tnumerators: unit.numerators.filter((e) => e !== '%'),\n\tdenominators: unit.denominators.filter((e) => e !== '%'),\n})\n\nexport function areUnitConvertible(a: Unit | undefined, b: Unit | undefined) {\n\tif (a == null || b == null) {\n\t\treturn true\n\t}\n\tconst countByUnitClass = (units: Array) =>\n\t\tunits.reduce((counters, unit) => {\n\t\t\tconst classIndex = convertibleUnitClasses.findIndex((unitClass) =>\n\t\t\t\tunitClass.has(unit),\n\t\t\t)\n\t\t\tconst key = classIndex === -1 ? unit : '' + classIndex\n\t\t\treturn { ...counters, [key]: 1 + (counters[key] ?? 0) }\n\t\t}, {})\n\n\tconst [numA, denomA, numB, denomB] = [\n\t\ta.numerators,\n\t\ta.denominators,\n\t\tb.numerators,\n\t\tb.denominators,\n\t].map(countByUnitClass)\n\tconst uniq = (arr: Array): Array => [...new Set(arr)]\n\tconst unitClasses = [numA, denomA, numB, denomB].map(Object.keys).flat()\n\treturn uniq(unitClasses).every(\n\t\t(unitClass) =>\n\t\t\t(numA[unitClass] || 0) - (denomA[unitClass] || 0) ===\n\t\t\t\t(numB[unitClass] || 0) - (denomB[unitClass] || 0) || unitClass === '%',\n\t)\n}\n","import { EvaluationFunction, simplifyNodeUnit } from '..'\nimport { ASTNode, EvaluatedNode } from '../AST/types'\nimport { PublicodesError } from '../error'\nimport { registerEvaluationFunction } from '../evaluationFunctions'\nimport { mergeAllMissing } from '../evaluationUtils'\nimport parse from '../parse'\nimport { serializeUnit } from '../units'\n\nexport type ArrondiNode = {\n\texplanation: {\n\t\tarrondi: ASTNode\n\t\tvaleur: ASTNode\n\t}\n\tnodeKind: 'arrondi'\n}\n\nfunction roundWithPrecision(n: number, fractionDigits: number) {\n\treturn +n.toFixed(fractionDigits)\n}\n\nconst evaluate: EvaluationFunction<'arrondi'> = function (node) {\n\t// We need to simplify the node unit to correctly round values containing\n\t// percentages units, see #1358\n\tconst valeur = simplifyNodeUnit(this.evaluateNode(node.explanation.valeur))\n\tconst nodeValue = valeur.nodeValue\n\tlet arrondi = node.explanation.arrondi\n\tif (nodeValue !== false) {\n\t\tarrondi = this.evaluateNode(arrondi)\n\n\t\tif (\n\t\t\ttypeof (arrondi as EvaluatedNode).nodeValue === 'number' &&\n\t\t\t!serializeUnit((arrondi as EvaluatedNode).unit)?.match(/décimales?/)\n\t\t) {\n\t\t\tthrow new PublicodesError(\n\t\t\t\t'EvaluationError',\n\t\t\t\t`L'unité ${serializeUnit(\n\t\t\t\t\t(arrondi as EvaluatedNode).unit,\n\t\t\t\t)} de l'arrondi est inconnu. Vous devez utiliser l'unité “décimales”`,\n\t\t\t\t{ dottedName: this.cache._meta.evaluationRuleStack[0] },\n\t\t\t)\n\t\t}\n\t}\n\n\treturn {\n\t\t...node,\n\t\tnodeValue:\n\t\t\ttypeof valeur.nodeValue !== 'number' || !('nodeValue' in arrondi) ?\n\t\t\t\tvaleur.nodeValue\n\t\t\t: typeof arrondi.nodeValue === 'number' ?\n\t\t\t\troundWithPrecision(valeur.nodeValue, arrondi.nodeValue)\n\t\t\t: arrondi.nodeValue === true ? roundWithPrecision(valeur.nodeValue, 0)\n\t\t\t: arrondi.nodeValue === undefined ? undefined\n\t\t\t: valeur.nodeValue,\n\t\texplanation: { valeur, arrondi },\n\t\tmissingVariables: mergeAllMissing([valeur, arrondi]),\n\t\tunit: valeur.unit,\n\t}\n}\n\nexport default function parseArrondi(v, context) {\n\tconst explanation = {\n\t\tvaleur: parse(v.valeur, context),\n\t\tarrondi: parse(v.arrondi, context),\n\t}\n\treturn {\n\t\texplanation,\n\t\tnodeKind: parseArrondi.nom,\n\t}\n}\n\nparseArrondi.nom = 'arrondi' as const\n\nregisterEvaluationFunction(parseArrondi.nom, evaluate)\n","import { EvaluatedNode, Unit } from './AST/types'\nimport { convertUnit, simplifyUnit } from './units'\n\nexport function simplifyNodeUnit(node) {\n\tif (!node.unit) {\n\t\treturn node\n\t}\n\tconst unit = simplifyUnit(node.unit)\n\n\treturn convertNodeToUnit(unit, node)\n}\n\nexport function convertNodeToUnit(\n\tto: Unit | undefined,\n\tnode: Node,\n): Node {\n\treturn {\n\t\t...node,\n\t\tnodeValue:\n\t\t\tnode.unit && typeof node.nodeValue === 'number' ?\n\t\t\t\tconvertUnit(node.unit, to, node.nodeValue)\n\t\t\t:\tnode.nodeValue,\n\t\tunit: to,\n\t}\n}\n","import { Evaluation, Unit } from './AST/types'\nimport { simplifyNodeUnit } from './nodeUnits'\nimport { formatUnit, serializeUnit } from './units'\n\nexport const numberFormatter =\n\t({\n\t\tstyle,\n\t\tmaximumFractionDigits = 2,\n\t\tminimumFractionDigits = 0,\n\t\tlanguage,\n\t}: {\n\t\tstyle?: string\n\t\tmaximumFractionDigits?: number\n\t\tminimumFractionDigits?: number\n\t\tlanguage?: string\n\t}) =>\n\t(value: number) => {\n\t\t// When we format currency we don't want to display a single decimal digit\n\t\t// ie 8,1€ but we want to display 8,10€\n\t\tconst adaptedMinimumFractionDigits =\n\t\t\t(\n\t\t\t\tstyle === 'currency' &&\n\t\t\t\tmaximumFractionDigits >= 2 &&\n\t\t\t\tminimumFractionDigits === 0 &&\n\t\t\t\t!Number.isInteger(value)\n\t\t\t) ?\n\t\t\t\t2\n\t\t\t:\tminimumFractionDigits\n\t\treturn Intl.NumberFormat(language, {\n\t\t\tstyle,\n\t\t\tcurrency: 'EUR',\n\t\t\tmaximumFractionDigits,\n\t\t\tminimumFractionDigits: adaptedMinimumFractionDigits,\n\t\t}).format(value)\n\t}\n\nexport const formatCurrency = (\n\tnodeValue: number | undefined,\n\tlanguage: string,\n) => {\n\treturn nodeValue == undefined ? '' : (\n\t\t\t(formatNumber({ unit: '€', language, nodeValue }) ?? '').replace(\n\t\t\t\t/^(-)?€/,\n\t\t\t\t'$1€\\u00A0',\n\t\t\t)\n\t\t)\n}\n\nexport const formatPercentage = (nodeValue: number | undefined) =>\n\tnodeValue == undefined ? '' : (\n\t\tformatNumber({ unit: '%', nodeValue, maximumFractionDigits: 2 })\n\t)\n\ntype formatValueOptions = {\n\tmaximumFractionDigits?: number\n\tminimumFractionDigits?: number\n\tlanguage?: string\n\tunit?: Unit | string\n\tformatUnit?: formatUnit\n\tnodeValue: number\n}\n\nfunction formatNumber({\n\tmaximumFractionDigits,\n\tminimumFractionDigits,\n\tlanguage,\n\tformatUnit,\n\tunit,\n\tnodeValue,\n}: formatValueOptions) {\n\tif (typeof nodeValue !== 'number') {\n\t\treturn nodeValue\n\t}\n\tconst serializedUnit =\n\t\tunit ? serializeUnit(unit, nodeValue, formatUnit) : undefined\n\tswitch (serializedUnit) {\n\t\tcase '€':\n\t\t\treturn numberFormatter({\n\t\t\t\tstyle: 'currency',\n\t\t\t\tmaximumFractionDigits,\n\t\t\t\tminimumFractionDigits,\n\t\t\t\tlanguage,\n\t\t\t})(nodeValue)\n\t\tcase '%':\n\t\t\treturn numberFormatter({\n\t\t\t\tstyle: 'percent',\n\t\t\t\tmaximumFractionDigits,\n\t\t\t\tlanguage,\n\t\t\t})(nodeValue / 100)\n\t\tdefault:\n\t\t\treturn (\n\t\t\t\tnumberFormatter({\n\t\t\t\t\tstyle: 'decimal',\n\t\t\t\t\tminimumFractionDigits,\n\t\t\t\t\tmaximumFractionDigits,\n\t\t\t\t\tlanguage,\n\t\t\t\t})(nodeValue) +\n\t\t\t\t(typeof serializedUnit === 'string' ? `\\u00A0${serializedUnit}` : '')\n\t\t\t)\n\t}\n}\n\nexport function capitalise0(name: undefined): undefined\nexport function capitalise0(name: string): string\nexport function capitalise0(name?: string) {\n\treturn name && name[0].toUpperCase() + name.slice(1)\n}\n\nconst booleanTranslations = {\n\tfr: { true: 'oui', false: 'non' },\n\ten: { true: 'yes', false: 'no' },\n}\n\ntype Options = {\n\tlanguage?: string\n\tdisplayedUnit?: string\n\tprecision?: number\n\tformatUnit?: formatUnit\n}\n\nexport function formatValue(\n\tvalue: number | { nodeValue: Evaluation; unit?: Unit } | undefined,\n\n\t{ language = 'fr', displayedUnit, formatUnit, precision = 2 }: Options = {},\n) {\n\tlet nodeValue =\n\t\t(\n\t\t\ttypeof value === 'number' ||\n\t\t\ttypeof value === 'undefined' ||\n\t\t\tvalue === null\n\t\t) ?\n\t\t\tvalue\n\t\t:\tvalue.nodeValue\n\n\tif (typeof nodeValue === 'number' && Number.isNaN(nodeValue)) {\n\t\treturn 'Erreur dans le calcul du nombre'\n\t}\n\tif (nodeValue === undefined) {\n\t\treturn 'Pas encore défini'\n\t}\n\tif (nodeValue === null) {\n\t\treturn 'Non applicable'\n\t}\n\tif (typeof nodeValue === 'string') {\n\t\treturn nodeValue.replace('\\\\n', '\\n')\n\t}\n\tif (typeof nodeValue === 'boolean')\n\t\treturn booleanTranslations[language][nodeValue]\n\tif (typeof nodeValue === 'number') {\n\t\tlet unit =\n\t\t\t(\n\t\t\t\ttypeof value === 'number' ||\n\t\t\t\ttypeof value === 'undefined' ||\n\t\t\t\t!('unit' in value)\n\t\t\t) ?\n\t\t\t\tundefined\n\t\t\t:\tvalue.unit\n\t\tif (unit) {\n\t\t\tconst simplifiedNode = simplifyNodeUnit({\n\t\t\t\tunit,\n\t\t\t\tnodeValue,\n\t\t\t})\n\t\t\tunit = simplifiedNode.unit\n\t\t\tnodeValue = simplifiedNode.nodeValue as number\n\t\t}\n\t\treturn formatNumber({\n\t\t\tminimumFractionDigits: 0,\n\t\t\tmaximumFractionDigits: precision,\n\t\t\tlanguage,\n\t\t\tformatUnit,\n\t\t\tnodeValue,\n\t\t\tunit: displayedUnit ?? unit,\n\t\t}).trim()\n\t}\n\treturn undefined\n}\n\nexport function serializeValue(\n\t{ nodeValue, unit }: { nodeValue: Evaluation; unit?: Unit },\n\t{ format }: { format: formatUnit },\n) {\n\tconst serializedUnit = (\n\t\tunit && typeof nodeValue === 'number' ?\n\t\t\tserializeUnit(unit, nodeValue, format)\n\t\t:\t'')?.replace(/\\s*\\/\\s*/g, '/')\n\treturn `${nodeValue} ${serializedUnit}`.trim()\n}\n","import { ParsedRules } from '.'\nimport { ASTNode } from './AST/types'\nimport { PublicodesError } from './error'\nimport { ReferencesMaps } from './parsePublicodes'\nimport { ReferenceNode } from './reference'\nimport { RuleNode } from './rule'\nimport { addToMapSet } from './utils'\n\nexport { cyclicDependencies } from './AST/graph'\n\nconst splitName = (str: string) => str.split(' . ')\nconst joinName = (strs: Array) => strs.join(' . ')\n\n/**\n * Returns the last part of a dottedName (the leaf).\n */\nexport const nameLeaf = (dottedName: string) =>\n\tsplitName(dottedName).slice(-1)?.[0]\n\n/**\n * Encodes a dottedName for the URL to be secure.\n * @see {@link decodeRuleName}\n */\nexport const encodeRuleName = (dottedName: string): string =>\n\tdottedName\n\t\t?.replace(/\\s\\.\\s/g, '/')\n\t\t.replace(/-/g, '\\u2011') // replace with a insecable tiret to differenciate from space\n\t\t.replace(/\\s/g, '-')\n\n/**\n * Decodes an encoded dottedName.\n * @see {@link encodeRuleName}\n */\nexport const decodeRuleName = (dottedName: string): string =>\n\tdottedName\n\t\t.replace(/\\//g, ' . ')\n\t\t.replace(/-/g, ' ')\n\t\t.replace(/\\u2011/g, '-')\n\n/**\n * Return dottedName from contextName\n */\nexport const contextNameToDottedName = (contextName: string) =>\n\tcontextName.endsWith('$SITUATION') ? ruleParent(contextName) : contextName\n\n/**\n * Returns the parent dottedName\n */\nexport const ruleParent = (dottedName: string): string =>\n\tjoinName(splitName(dottedName).slice(0, -1))\n\n/**\n * Returns an array of dottedName from near parent to far parent.\n */\nexport function ruleParents(dottedName: string): Array {\n\treturn splitName(dottedName)\n\t\t.slice(0, -1)\n\t\t.map((_, i, arr) => joinName(arr.slice(0, i + 1)))\n\t\t.reverse()\n}\n\n/**\n * Returns an array of all child rules of a dottedName\n */\nexport const getChildrenRules = (\n\tparsedRules: ParsedRules,\n\tdottedName: string,\n) => {\n\tconst childrenRules = Object.keys(parsedRules).filter(\n\t\t(ruleDottedName) =>\n\t\t\truleDottedName.startsWith(dottedName) &&\n\t\t\tsplitName(ruleDottedName).length === splitName(dottedName).length + 1,\n\t)\n\n\treturn childrenRules\n}\n\n/**\n * Finds the common ancestor of two dottedName\n */\nexport function findCommonAncestor(dottedName1: string, dottedName2: string) {\n\tconst splitDottedName1 = splitName(dottedName1)\n\tconst splitDottedName2 = splitName(dottedName2)\n\tconst index = splitDottedName1.findIndex(\n\t\t(value, i) => splitDottedName2[i] !== value,\n\t)\n\n\treturn index === -1 ? dottedName1 : joinName(splitDottedName1.slice(0, index))\n}\n\n/**\n * Check wether a rule is accessible from a namespace.\n *\n * Takes into account that some namespace can be `private`, i.e. that they can only be\n * accessed by immediate parent, children or siblings.\n *\n * @param rules The parsed rules\n * @param contextName The context of the call\n * @param name The namespace checked for accessibility\n */\nexport function isAccessible(\n\trules: Record,\n\tcontextName: string,\n\tname: string,\n) {\n\tif (!(name in rules)) {\n\t\tthrow new PublicodesError(\n\t\t\t'InternalError',\n\t\t\t`La règle \"${name}\" n'existe pas`,\n\t\t\t{ dottedName: name },\n\t\t)\n\t}\n\n\tconst commonAncestor = findCommonAncestor(contextName, name)\n\tconst parents = [name, ...ruleParents(name), '']\n\tconst rulesToCheckForPrivacy = parents.slice(\n\t\t0,\n\t\tMath.max(parents.indexOf(commonAncestor) - 1, 0),\n\t)\n\n\treturn rulesToCheckForPrivacy.every(\n\t\t(dottedName) =>\n\t\t\t!(dottedName in rules) || rules[dottedName].private === false,\n\t)\n}\n\n/**\n * Check wether a rule is tagged as experimental.\n *\n * Takes into account the a children of an experimental rule is also experimental\n *\n * @param rules The parsed rules\n * @param name The namespace checked for experimental\n */\nexport function isExperimental(rules: Record, name: string) {\n\tif (!(name in rules)) {\n\t\tthrow new PublicodesError(\n\t\t\t'InternalError',\n\t\t\t`La règle \"${name}\" n'existe pas`,\n\t\t\t{ dottedName: name },\n\t\t)\n\t}\n\tconst parents = [name, ...ruleParents(name)]\n\treturn parents.some(\n\t\t(dottedName) =>\n\t\t\tdottedName in rules && rules[dottedName].rawNode?.experimental === 'oui',\n\t)\n}\n\nfunction dottedNameFromContext(context: string, partialName: string) {\n\treturn context ? context + ' . ' + partialName : partialName\n}\nexport function disambiguateReference>(\n\trules: R,\n\treferencedFrom = '',\n\tpartialName: string,\n): keyof R {\n\tconst possibleContexts = ruleParents(referencedFrom)\n\tpossibleContexts.push(referencedFrom)\n\n\t// If the partialName starts with ^ . ^ . ^ . , we want to go up in the parents\n\tif (partialName.startsWith('^ . ')) {\n\t\tconst numberParent = partialName.match(/^(\\^ \\. )+/)![0].length / 4\n\t\tpartialName = partialName.replace(/^(\\^ \\. )+/, '')\n\t\tpossibleContexts.splice(-numberParent)\n\t}\n\n\tconst rootContext = possibleContexts.pop()\n\tpossibleContexts.unshift(rootContext as string)\n\tpossibleContexts.push('')\n\n\tconst context = possibleContexts.find((context) => {\n\t\tconst dottedName = dottedNameFromContext(context, partialName)\n\t\tif (!(dottedName in rules)) {\n\t\t\treturn false\n\t\t}\n\t\tif (dottedName === referencedFrom) {\n\t\t\treturn false\n\t\t}\n\t\treturn isAccessible(rules, referencedFrom, dottedName)\n\t})\n\n\tif (context !== undefined) {\n\t\treturn dottedNameFromContext(context, partialName) as keyof R\n\t}\n\n\t// The last possibility we want to check is if the rule is referencing itself\n\tif (referencedFrom.endsWith(partialName)) {\n\t\treturn referencedFrom as keyof R\n\t}\n\n\tconst possibleDottedName = possibleContexts.map((c) =>\n\t\tdottedNameFromContext(c, partialName),\n\t)\n\n\tif (possibleDottedName.every((dottedName) => !(dottedName in rules))) {\n\t\tthrow new PublicodesError(\n\t\t\t'SyntaxError',\n\t\t\t`La référence \"${partialName}\" est introuvable.\nVérifiez que l'orthographe et l'espace de nom sont corrects`,\n\t\t\t{ dottedName: contextNameToDottedName(referencedFrom) },\n\t\t)\n\t}\n\n\tthrow new PublicodesError(\n\t\t'SyntaxError',\n\t\t`La règle \"${possibleDottedName.find(\n\t\t\t(dottedName) => dottedName in rules,\n\t\t)}\" n'est pas accessible depuis \"${referencedFrom}\".\n\tCela vient du fait qu'elle est privée ou qu'un de ses parent est privé`,\n\t\t{ dottedName: contextNameToDottedName(referencedFrom) },\n\t)\n}\n\nexport function ruleWithDedicatedDocumentationPage(rule) {\n\treturn (\n\t\trule.virtualRule !== true &&\n\t\trule.type !== 'groupe' &&\n\t\trule.type !== 'texte' &&\n\t\trule.type !== 'paragraphe' &&\n\t\trule.type !== 'notification'\n\t)\n}\n\nexport function updateReferencesMapsFromReferenceNode(\n\tnode: ASTNode,\n\treferencesMaps: ReferencesMaps,\n\truleDottedName?: string,\n) {\n\tif (node.nodeKind === 'reference') {\n\t\taddToMapSet(\n\t\t\treferencesMaps.referencesIn,\n\t\t\truleDottedName ?? node.contextDottedName,\n\t\t\tnode.dottedName,\n\t\t)\n\t\taddToMapSet(\n\t\t\treferencesMaps.rulesThatUse,\n\t\t\tnode.dottedName,\n\t\t\truleDottedName ?? node.contextDottedName,\n\t\t)\n\t}\n}\nexport function disambiguateReferenceNode(\n\tnode: ASTNode,\n\tparsedRules: ParsedRules,\n): ReferenceNode | undefined {\n\tif (node.nodeKind !== 'reference') {\n\t\treturn\n\t}\n\tif (node.dottedName) {\n\t\treturn node\n\t}\n\n\tnode.dottedName = disambiguateReference(\n\t\tparsedRules,\n\t\tnode.contextDottedName,\n\t\tnode.name,\n\t)\n\tnode.title = parsedRules[node.dottedName].title\n\tnode.acronym = parsedRules[node.dottedName].rawNode.acronyme\n\treturn node\n}\n","/* eslint-disable prefer-rest-params */\n/* eslint-disable @typescript-eslint/no-this-alias */\n// Adapted from https://github.com/dagrejs/graphlib (MIT license)\n// and https://github.com/lodash/lodash (MIT license)\n\n// TODO: type this\n\nfunction has(obj, key) {\n\treturn obj != null && Object.prototype.hasOwnProperty.call(obj, key)\n}\nfunction constant(value) {\n\treturn function (...args) {\n\t\treturn value\n\t}\n}\n\nconst DEFAULT_EDGE_NAME = '\\x00'\nconst EDGE_KEY_DELIM = '\\x01'\n\nconst incrementOrInitEntry = (map, k) => {\n\tif (map[k]) {\n\t\tmap[k]++\n\t} else {\n\t\tmap[k] = 1\n\t}\n}\n\nconst decrementOrRemoveEntry = (map, k) => {\n\tif (!--map[k]) {\n\t\tdelete map[k]\n\t}\n}\n\nconst edgeArgsToId = (isDirected, v_, w_, name) => {\n\tlet v = '' + v_\n\tlet w = '' + w_\n\tif (!isDirected && v > w) {\n\t\tconst tmp = v\n\t\tv = w\n\t\tw = tmp\n\t}\n\treturn (\n\t\tv +\n\t\tEDGE_KEY_DELIM +\n\t\tw +\n\t\tEDGE_KEY_DELIM +\n\t\t(name === undefined ? DEFAULT_EDGE_NAME : name)\n\t)\n}\n\nconst edgeArgsToObj = (isDirected, v_, w_, name) => {\n\tlet v = '' + v_\n\tlet w = '' + w_\n\tif (!isDirected && v > w) {\n\t\tconst tmp = v\n\t\tv = w\n\t\tw = tmp\n\t}\n\tconst edgeObj: any = { v: v, w: w }\n\tif (name) {\n\t\tedgeObj.name = name\n\t}\n\treturn edgeObj\n}\n\nconst edgeObjToId = (isDirected, edgeObj) => {\n\treturn edgeArgsToId(isDirected, edgeObj.v, edgeObj.w, edgeObj.name)\n}\nexport class Graph {\n\tprivate _nodeCount = 0\n\tprivate _edgeCount = 0\n\n\tprivate _isDirected: any\n\n\tprivate _label: undefined\n\tprivate _defaultNodeLabelFn: (...args: any[]) => any\n\tprivate _defaultEdgeLabelFn: (...args: any[]) => any\n\tprivate _nodes: Record\n\tprivate _in: Record\n\tprivate _preds: Record>\n\tprivate _out: Record>\n\tprivate _sucs: Record>\n\tprivate _edgeObjs: Record\n\tprivate _edgeLabels: Record\n\n\tconstructor(opts: Record = {}) {\n\t\tthis._isDirected = has(opts, 'directed') ? opts.directed : true\n\n\t\t// Label for the graph itself\n\t\tthis._label = undefined\n\n\t\t// Defaults to be set when creating a new node\n\t\tthis._defaultNodeLabelFn = constant(undefined)\n\n\t\t// Defaults to be set when creating a new edge\n\t\tthis._defaultEdgeLabelFn = constant(undefined)\n\n\t\t// v -> label\n\t\tthis._nodes = {}\n\n\t\t// v -> edgeObj\n\t\tthis._in = {}\n\n\t\t// u -> v -> Number\n\t\tthis._preds = {}\n\n\t\t// v -> edgeObj\n\t\tthis._out = {} as Record>\n\n\t\t// v -> w -> Number\n\t\tthis._sucs = {}\n\n\t\t// e -> edgeObj\n\t\tthis._edgeObjs = {}\n\n\t\t// e -> label\n\t\tthis._edgeLabels = {}\n\t}\n\n\t/* === Graph functions ========= */\n\n\tisDirected() {\n\t\treturn this._isDirected\n\t}\n\tsetGraph(label) {\n\t\tthis._label = label\n\t\treturn this\n\t}\n\tgraph() {\n\t\treturn this._label\n\t}\n\n\t/* === Node functions ========== */\n\n\tnodeCount() {\n\t\treturn this._nodeCount\n\t}\n\tnodes() {\n\t\treturn Object.keys(this._nodes)\n\t}\n\tsetNode(v, value: any = undefined) {\n\t\tif (has(this._nodes, v)) {\n\t\t\tif (arguments.length > 1) {\n\t\t\t\tthis._nodes[v] = value\n\t\t\t}\n\t\t\treturn this\n\t\t}\n\n\t\tthis._nodes[v] = arguments.length > 1 ? value : this._defaultNodeLabelFn(v)\n\t\tthis._in[v] = {}\n\t\tthis._preds[v] = {}\n\t\tthis._out[v] = {}\n\t\tthis._sucs[v] = {}\n\t\t++this._nodeCount\n\t\treturn this\n\t}\n\tsetNodes(vs, value) {\n\t\tvs.forEach((v) => {\n\t\t\tif (value !== undefined) {\n\t\t\t\tthis.setNode(v, value)\n\t\t\t} else {\n\t\t\t\tthis.setNode(v)\n\t\t\t}\n\t\t})\n\t\treturn this\n\t}\n\tnode(v) {\n\t\treturn this._nodes[v]\n\t}\n\thasNode(v) {\n\t\treturn has(this._nodes, v)\n\t}\n\tsuccessors(v) {\n\t\tconst sucsV = this._sucs[v]\n\t\tif (sucsV) {\n\t\t\treturn Object.keys(sucsV)\n\t\t}\n\t}\n\n\t/* === Edge functions ========== */\n\n\tedgeCount() {\n\t\treturn this._edgeCount\n\t}\n\tedges() {\n\t\treturn Object.values(this._edgeObjs)\n\t}\n\tsetEdge(\n\t\tv: string,\n\t\tw: string,\n\t\tvalue: any = undefined,\n\t\tname: string | undefined = undefined,\n\t) {\n\t\tv = '' + v\n\t\tw = '' + w\n\n\t\tconst e = edgeArgsToId(this._isDirected, v, w, name)\n\t\tif (has(this._edgeLabels, e)) {\n\t\t\tif (value !== undefined) {\n\t\t\t\tthis._edgeLabels[e] = value\n\t\t\t}\n\t\t\treturn this\n\t\t}\n\n\t\t// It didn't exist, so we need to create it.\n\t\t// First ensure the nodes exist.\n\t\tthis.setNode(v)\n\t\tthis.setNode(w)\n\n\t\tthis._edgeLabels[e] =\n\t\t\tvalue !== undefined ? value : this._defaultEdgeLabelFn(v, w, name)\n\n\t\tconst edgeObj = edgeArgsToObj(this._isDirected, v, w, name)\n\t\t// Ensure we add undirected edges in a consistent way.\n\t\tv = edgeObj.v\n\t\tw = edgeObj.w\n\n\t\tObject.freeze(edgeObj)\n\t\tthis._edgeObjs[e] = edgeObj\n\t\tincrementOrInitEntry(this._preds[w], v)\n\t\tincrementOrInitEntry(this._sucs[v], w)\n\t\tthis._in[w][e] = edgeObj\n\t\tthis._out[v][e] = edgeObj\n\t\tthis._edgeCount++\n\t\treturn this\n\t}\n\n\tedge(v, w, name) {\n\t\tconst e =\n\t\t\targuments.length === 1 ?\n\t\t\t\tedgeObjToId(this._isDirected, arguments[0])\n\t\t\t:\tedgeArgsToId(this._isDirected, v, w, name)\n\t\treturn this._edgeLabels[e]\n\t}\n\n\thasEdge(v, w, name) {\n\t\tconst e =\n\t\t\targuments.length === 1 ?\n\t\t\t\tedgeObjToId(this._isDirected, arguments[0])\n\t\t\t:\tedgeArgsToId(this._isDirected, v, w, name)\n\t\treturn has(this._edgeLabels, e)\n\t}\n\n\tremoveEdge(v, w, name) {\n\t\tconst e =\n\t\t\targuments.length === 1 ?\n\t\t\t\tedgeObjToId(this._isDirected, arguments[0])\n\t\t\t:\tedgeArgsToId(this._isDirected, v, w, name)\n\t\tconst edge = this._edgeObjs[e]\n\t\tif (edge) {\n\t\t\tv = edge.v\n\t\t\tw = edge.w\n\t\t\tdelete this._edgeLabels[e]\n\t\t\tdelete this._edgeObjs[e]\n\t\t\tdecrementOrRemoveEntry(this._preds[w], v)\n\t\t\tdecrementOrRemoveEntry(this._sucs[v], w)\n\t\t\tdelete this._in[w][e]\n\t\t\tdelete this._out[v][e]\n\t\t\tthis._edgeCount--\n\t\t}\n\t\treturn this\n\t}\n\n\toutEdges(v: string, w: string | undefined = undefined) {\n\t\tconst outV = this._out[v]\n\t\tif (outV) {\n\t\t\tconst edges: any = Object.values(outV)\n\t\t\tif (w === undefined) {\n\t\t\t\treturn edges\n\t\t\t}\n\t\t\treturn edges.filter(function (edge) {\n\t\t\t\treturn edge.w === w\n\t\t\t})\n\t\t}\n\t}\n}\n\n/** Cycles stuff **/\n\nfunction tarjan(graph) {\n\tlet index = 0\n\tconst stack: any[] = []\n\tconst visited = {} // node id -> { onStack, lowlink, index }\n\tconst results: any[] = []\n\n\tfunction dfs(v) {\n\t\tconst entry = (visited[v] = {\n\t\t\tonStack: true,\n\t\t\tlowlink: index,\n\t\t\tindex: index++,\n\t\t})\n\t\tstack.push(v)\n\n\t\tgraph.successors(v).forEach(function (w) {\n\t\t\tif (!Object.prototype.hasOwnProperty.call(visited, w)) {\n\t\t\t\tdfs(w)\n\t\t\t\tentry.lowlink = Math.min(entry.lowlink, visited[w].lowlink)\n\t\t\t} else if (visited[w].onStack) {\n\t\t\t\tentry.lowlink = Math.min(entry.lowlink, visited[w].index)\n\t\t\t}\n\t\t})\n\n\t\tif (entry.lowlink === entry.index) {\n\t\t\tconst cmpt: any[] = []\n\t\t\tlet w\n\t\t\tdo {\n\t\t\t\tw = stack.pop()\n\t\t\t\tvisited[w].onStack = false\n\t\t\t\tcmpt.push(w)\n\t\t\t} while (v !== w)\n\t\t\tresults.push(cmpt)\n\t\t}\n\t}\n\n\tgraph.nodes().forEach(function (v) {\n\t\tif (!Object.prototype.hasOwnProperty.call(visited, v)) {\n\t\t\tdfs(v)\n\t\t}\n\t})\n\n\treturn results\n}\n\nexport function findCycles(graph): string[][] {\n\treturn tarjan(graph).filter(function (cmpt) {\n\t\treturn (\n\t\t\tcmpt.length > 1 || (cmpt.length === 1 && graph.hasEdge(cmpt[0], cmpt[0]))\n\t\t)\n\t})\n}\n","import parsePublicodes from '../parsePublicodes'\nimport { findCycles, Graph } from './findCycles'\n\ntype GraphCycles = string[][]\n\nfunction buildDependenciesGraph(rulesDeps: Map>) {\n\tconst g = new Graph()\n\t;[...rulesDeps.entries()].forEach(([ruleDottedName, dependencies]) => {\n\t\tdependencies.forEach((depDottedName) => {\n\t\t\tg.setEdge(ruleDottedName, depDottedName)\n\t\t})\n\t})\n\treturn g\n}\n\ntype RawRules = Parameters[0]\n\nexport function cyclesInDependenciesGraph(rawRules: RawRules): GraphCycles {\n\tconst { referencesMaps } = parsePublicodes(rawRules)\n\tconst dependenciesGraph = buildDependenciesGraph(referencesMaps.referencesIn)\n\tconst cycles = findCycles(dependenciesGraph)\n\n\treturn cycles.map((c) => c.reverse())\n}\n\n/**\n * Make the cycle as small as possible.\n */\nexport function squashCycle(\n\trulesDependenciesObject: Map>,\n\tcycle: string[],\n): string[] {\n\tfunction* loopFrom(i: number) {\n\t\tlet j = i\n\t\twhile (true) {\n\t\t\tyield cycle[j++ % cycle.length]\n\t\t}\n\t}\n\tconst smallCycleStartingAt: string[][] = []\n\tfor (let i = 0; i < cycle.length; i++) {\n\t\tconst smallCycle: string[] = []\n\t\tlet previousVertex: string | undefined = undefined\n\t\tfor (const vertex of loopFrom(i)) {\n\t\t\tif (previousVertex === undefined) {\n\t\t\t\tsmallCycle.push(vertex)\n\t\t\t\tpreviousVertex = vertex\n\t\t\t} else if (rulesDependenciesObject.get(previousVertex)?.has(vertex)) {\n\t\t\t\tif (smallCycle.includes(vertex)) {\n\t\t\t\t\tsmallCycle.splice(0, smallCycle.lastIndexOf(vertex))\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t\tsmallCycle.push(vertex)\n\t\t\t\tpreviousVertex = vertex\n\t\t\t}\n\t\t}\n\t\tsmallCycleStartingAt.push(smallCycle)\n\t}\n\n\tconst smallest = smallCycleStartingAt.reduce((minCycle, someCycle) =>\n\t\tsomeCycle.length > minCycle.length ? minCycle : someCycle,\n\t)\n\treturn smallest\n}\n\n/**\n * This function is useful so as to print the dependencies at each node of the\n * cycle.\n * ⚠️ Indeed, the findCycles function returns the cycle found using the\n * Tarjan method, which is **not necessarily the smallest cycle**. However, the\n * smallest cycle is more readable.\n */\nexport function cyclicDependencies(\n\trawRules: RawRules,\n): [GraphCycles, string[]] {\n\tconst { referencesMaps } = parsePublicodes(rawRules)\n\tconst dependenciesGraph = buildDependenciesGraph(referencesMaps.referencesIn)\n\tconst cycles = findCycles(dependenciesGraph)\n\n\tconst reversedCycles = cycles.map((c) => c.reverse())\n\n\tconst smallCycles = reversedCycles.map((cycle) =>\n\t\tsquashCycle(referencesMaps.referencesIn, cycle),\n\t)\n\n\tconst printableStronglyConnectedComponents = reversedCycles.map((c, i) =>\n\t\tprintInDotFormat(dependenciesGraph, c, smallCycles[i]),\n\t)\n\n\treturn [smallCycles, printableStronglyConnectedComponents]\n}\n\n/**\n * Is edge in the cycle, in the same order?\n */\nconst edgeIsInCycle = (cycle: string[], v: string, w: string): boolean => {\n\tfor (let i = 0; i < cycle.length + 1; i++) {\n\t\tif (v === cycle[i] && w === cycle[(i + 1) % cycle.length]) return true\n\t}\n\treturn false\n}\n\nexport function printInDotFormat(\n\tdependenciesGraph: Graph,\n\tcycle: string[],\n\tsubCycleToHighlight: string[],\n) {\n\tconst edgesSet = new Set()\n\tcycle.forEach((vertex) => {\n\t\tdependenciesGraph\n\t\t\t.outEdges(vertex)\n\t\t\t.filter(({ w }) => cycle.includes(w))\n\t\t\t.forEach(({ v, w }) => {\n\t\t\t\tedgesSet.add(\n\t\t\t\t\t`\"${v}\" -> \"${w}\"` +\n\t\t\t\t\t\t(edgeIsInCycle(subCycleToHighlight, v, w) ? ' [color=red]' : ''),\n\t\t\t\t)\n\t\t\t})\n\t})\n\treturn `digraph Cycle {\\n\\t${[...edgesSet].join(';\\n\\t')};\\n}`\n}\n","import Engine from '.'\nimport { ASTNode, EvaluatedNode, MissingVariables } from './AST/types'\nimport { PublicodesError } from './error'\nimport { registerEvaluationFunction } from './evaluationFunctions'\nimport { defaultNode, mergeMissing, undefinedNode } from './evaluationUtils'\nimport { capitalise0 } from './format'\nimport parse, { mecanismKeys } from './parse'\nimport { Context, RawRule } from './parsePublicodes'\nimport {\n\tReplacementRule,\n\tparseRendNonApplicable,\n\tparseReplacements,\n} from './replacement'\nimport { isAccessible, nameLeaf, ruleParents } from './ruleUtils'\nimport { weakCopyObj } from './utils'\n\nexport type Rule = {\n\tformule?: Record | string\n\tvaleur?: Record | string\n\tquestion?: string\n\tdescription?: string\n\tunité?: string\n\tacronyme?: string\n\texemples?: any\n\trésumé?: string\n\ticônes?: string\n\ttitre?: string\n\tsévérité?: string\n\ttype?: string\n\texperimental?: 'oui'\n\t'possiblement non applicable'?: 'oui'\n\tprivé?: 'oui'\n\tnote?: string\n\tremplace?: Remplace | Array\n\t'rend non applicable'?: Remplace | Array\n\tsuggestions?: Record>\n\tréférences?: { [source: string]: string }\n\tAPI?: string\n\t'identifiant court'?: string\n} & Record\n\ntype Remplace =\n\t| {\n\t\t\t'références à': string\n\t\t\tdans?: Array | string\n\t\t\t'sauf dans'?: Array | string\n\t\t\tpriorité?: number\n\t }\n\t| string\n\nexport type RuleNode = {\n\tdottedName: Name\n\ttitle: string\n\tnodeKind: 'rule'\n\tvirtualRule: boolean\n\tprivate: boolean\n\trawNode: Rule\n\treplacements: Array\n\texplanation: {\n\t\tvaleur: ASTNode\n\t\tparents: Array\n\t\tnullableParent?: ASTNode\n\t\truleDisabledByItsParent: boolean\n\t}\n\tsuggestions: Record\n\t'identifiant court'?: string\n}\n\nfunction parseRule(nom: string, rawRule: Rule, context: Context): RuleNode {\n\tconst privateRule = rawRule.privé === 'oui' || nom.startsWith('[privé] ')\n\tnom = nom.replace(/^\\[privé\\] /, '')\n\tconst dottedName = [context.dottedName, nom].filter(Boolean).join(' . ')\n\n\tconst name = nameLeaf(dottedName)\n\tconst title = capitalise0(rawRule['titre'] ?? name)\n\n\tif (context.parsedRules[dottedName]) {\n\t\tthrow new PublicodesError(\n\t\t\t'EvaluationError',\n\t\t\t`La référence '${dottedName}' a déjà été définie`,\n\t\t\t{ dottedName },\n\t\t)\n\t}\n\n\tconst ruleValue: Record = {}\n\n\tfor (const key in rawRule) {\n\t\tif (mecanismKeys.includes(key)) {\n\t\t\truleValue[key] = rawRule[key]\n\t\t}\n\t}\n\tif ('formule' in rawRule) {\n\t\truleValue.valeur = rawRule.formule\n\t}\n\tif (!privateRule && !dottedName.endsWith('$SITUATION')) {\n\t\t// We create a $SITUATION child rule for each rule that is not private\n\t\t// This value will be used to evaluate the rule in the current situation (`setSituation`)\n\t\truleValue['dans la situation'] = `${dottedName} . $SITUATION`\n\t\truleValue['avec'] =\n\t\t\tweakCopyObj(ruleValue['avec'] as Record) ?? {}\n\t\tconst situationValue = weakCopyObj(undefinedNode)\n\t\tsituationValue.isNullable = rawRule['possiblement non applicable'] === 'oui'\n\t\t;(ruleValue['avec'] as Record)['[privé] $SITUATION'] = {\n\t\t\tvaleur: situationValue,\n\t\t}\n\n\t\t// If the `par défaut` value is used, then the rule should be listed as a missingVariables\n\t\tif (ruleValue['par défaut'] != null) {\n\t\t\truleValue['par défaut'] = {\n\t\t\t\tvaleur: ruleValue['par défaut'],\n\t\t\t\t'variable manquante': dottedName,\n\t\t\t}\n\t\t}\n\t}\n\n\t// const ruleContext = weakCopyObj(context)\n\t// ruleContext.dottedName = dottedName\n\t// const ruleContext = { ...context, dottedName }\n\tconst currentDottedNameContext = context.dottedName\n\tcontext.dottedName = dottedName\n\n\t// The following ensures that nested rules appears after the root rule when\n\t// iterating over parsedRule\n\tcontext.parsedRules[dottedName] = undefined as any\n\n\tconst explanation = {\n\t\tvaleur: parse(ruleValue, context),\n\t\t// We include a list of references to the parents to implement the branch\n\t\t// desactivation feature. When evaluating a rule we only need to know the\n\t\t// first nullable parent, but this is something that we can't determine at\n\t\t// this stage :\n\t\t// - we need to run remplacements (which works on references in the ASTs\n\t\t// which is why we insert these “virtual” references)\n\t\t// - we need to infer unit of the rules\n\t\t//\n\t\t// An alternative implementation would be possible that would colocate the\n\t\t// code related to branch desactivation (ie find the first nullable parent\n\t\t// statically after rules parsing)\n\t\tparents: ruleParents(dottedName).map(\n\t\t\t(parent) =>\n\t\t\t\t({\n\t\t\t\t\tdottedName: parent,\n\t\t\t\t\tnodeKind: 'reference',\n\t\t\t\t\tcontextDottedName: context.dottedName,\n\t\t\t\t}) as ASTNode<'reference'>,\n\t\t),\n\t}\n\n\tconst suggestions = {} as Record\n\tif (rawRule.suggestions) {\n\t\tfor (const name in rawRule.suggestions) {\n\t\t\tsuggestions[name] = parse(rawRule.suggestions[name], context)\n\t\t}\n\t}\n\n\tcontext.parsedRules[dottedName] = {\n\t\tdottedName,\n\t\treplacements: [\n\t\t\t...parseRendNonApplicable(rawRule['rend non applicable'], context),\n\t\t\t...parseReplacements(rawRule.remplace, context),\n\t\t],\n\t\ttitle: title,\n\t\tprivate: privateRule,\n\t\tsuggestions,\n\t\tnodeKind: 'rule',\n\t\texplanation,\n\t\trawNode: rawRule,\n\t\tvirtualRule: privateRule,\n\t} as RuleNode\n\tcontext.dottedName = currentDottedNameContext\n\treturn context.parsedRules[dottedName]\n}\n\nexport function parseRules(\n\trules: Partial>,\n\tcontext: Context,\n) {\n\tfor (const dottedName in rules) {\n\t\tlet rule = rules[dottedName]\n\n\t\tif (typeof rule === 'string' || typeof rule === 'number') {\n\t\t\trule = { valeur: `${rule}` }\n\t\t}\n\t\tif (typeof rule !== 'object') {\n\t\t\tthrow new PublicodesError(\n\t\t\t\t'SyntaxError',\n\t\t\t\t`Rule ${dottedName} is incorrectly written. Please give it a proper value.`,\n\t\t\t\t{ dottedName },\n\t\t\t)\n\t\t}\n\t\tconst copy = rule === null ? {} : weakCopyObj(rule)\n\t\tparseRule(dottedName, copy, context)\n\t}\n}\n\nregisterEvaluationFunction('rule', function evaluate(node) {\n\tconst { ruleDisabledByItsParent, nullableParent, parentMissingVariables } =\n\t\tevaluateDisablingParent(this, node)\n\n\tlet valeurEvaluation: EvaluatedNode = {\n\t\t...node.explanation.valeur,\n\t\tnodeValue: null,\n\t\tmissingVariables: {},\n\t}\n\tif (!ruleDisabledByItsParent) {\n\t\tif (\n\t\t\tthis.cache._meta.evaluationRuleStack.filter(\n\t\t\t\t(dottedName) => dottedName === node.dottedName,\n\t\t\t).length > 1\n\t\t) {\n\t\t\t// TODO : remettre ce warning. Je ne sais pas pourquoi, mais la base de règle de mon-entreprise lève un warning sur quasiment toutes les cotisations\n\t\t\t// \t\t\twarning(\n\t\t\t// \t\t\t\tthis.context.logger,\n\t\t\t// \t\t\t\t`Un cycle a été détecté lors de l'évaluation de cette règle.\n\n\t\t\t// Par défaut cette règle sera évaluée à 'null'.\n\t\t\t// Pour indiquer au moteur de résoudre la référence circulaire en trouvant le point fixe\n\t\t\t// de la fonction, il vous suffit d'ajouter l'attribut suivant niveau de la règle :\n\n\t\t\t// \t${node.dottedName}:\n\t\t\t// \t\trésoudre la référence circulaire: oui\"\n\t\t\t// \t\t...\n\t\t\t// `,\n\t\t\t// \t\t\t\t{ dottedName: node.dottedName }\n\t\t\t// \t\t\t)\n\n\t\t\tvaleurEvaluation = {\n\t\t\t\tnodeValue: undefined,\n\t\t\t} as EvaluatedNode\n\t\t} else {\n\t\t\tthis.cache._meta.evaluationRuleStack.unshift(node.dottedName)\n\t\t\tvaleurEvaluation = this.evaluateNode(node.explanation.valeur)\n\t\t\tthis.cache._meta.evaluationRuleStack.shift()\n\t\t}\n\t}\n\tvaleurEvaluation.missingVariables ??= {}\n\tupdateRuleMissingVariables(this, node, valeurEvaluation)\n\tconst evaluation = {\n\t\t...valeurEvaluation,\n\t\tmissingVariables: mergeMissing(\n\t\t\tvaleurEvaluation.missingVariables,\n\t\t\tparentMissingVariables,\n\t\t),\n\t\t...node,\n\t\texplanation: {\n\t\t\tparents: node.explanation.parents,\n\t\t\tvaleur: valeurEvaluation,\n\t\t\tnullableParent,\n\t\t\truleDisabledByItsParent,\n\t\t},\n\t}\n\n\treturn evaluation\n})\n\n/*\n\tWe implement the terminal case for missing variables manually here as\n\ta rule is missing if it is undefined and has no other missing dependencies\n*/\nfunction updateRuleMissingVariables(\n\tengine: Engine,\n\tnode: RuleNode,\n\tvaleurEvaluation: EvaluatedNode,\n): void {\n\tif (\n\t\tnode.private === true ||\n\t\t!isAccessible(engine.context.parsedRules, '', node.dottedName)\n\t) {\n\t\treturn\n\t}\n\n\tif (\n\t\tvaleurEvaluation.nodeValue === undefined &&\n\t\t!Object.keys(valeurEvaluation.missingVariables).length\n\t) {\n\t\tvaleurEvaluation.missingVariables[node.dottedName] = 1\n\t}\n\n\treturn\n}\n\nexport function evaluateDisablingParent(\n\tengine: Engine,\n\tnode: RuleNode,\n): {\n\truleDisabledByItsParent: boolean\n\tparentMissingVariables: MissingVariables\n\tnullableParent?: ASTNode\n} {\n\tif (node.private) {\n\t\t// We do not need to check if a private rule is disabled by its parent :\n\t\t// they are accessible only from its sibling or parent\n\t\t// (which would already be disabled)\n\t\treturn { ruleDisabledByItsParent: false, parentMissingVariables: {} }\n\t}\n\n\tconst nodesTypes = engine.context.nodesTypes\n\tconst nullableParent = node.explanation.parents.find(\n\t\t(ref) =>\n\t\t\tnodesTypes.get(ref)?.isNullable ||\n\t\t\tnodesTypes.get(ref)?.type === 'boolean',\n\t)\n\n\tif (!nullableParent) {\n\t\treturn { ruleDisabledByItsParent: false, parentMissingVariables: {} }\n\t}\n\n\tif (\n\t\t// TODO: remove this condition and the associated \"parentRuleStack\", cycles\n\t\t// should be detected and avoided at parse time.\n\t\t!engine.cache._meta.parentRuleStack.includes(node.dottedName)\n\t) {\n\t\tengine.cache._meta.parentRuleStack.unshift(node.dottedName)\n\t\tlet parentIsNotApplicable = defaultNode(false) as EvaluatedNode\n\t\tif (nodesTypes.get(nullableParent)?.isNullable) {\n\t\t\tparentIsNotApplicable = engine.evaluateNode({\n\t\t\t\tnodeKind: 'est non applicable',\n\t\t\t\texplanation: nullableParent,\n\t\t\t})\n\t\t}\n\t\tif (\n\t\t\tparentIsNotApplicable.nodeValue !== true &&\n\t\t\tnodesTypes.get(nullableParent)?.type === 'boolean'\n\t\t) {\n\t\t\tparentIsNotApplicable = engine.evaluateNode({\n\t\t\t\tnodeKind: 'operation',\n\t\t\t\toperator: '=',\n\t\t\t\toperationKind: '=',\n\t\t\t\texplanation: [nullableParent, defaultNode(false)],\n\t\t\t})\n\t\t}\n\n\t\tengine.cache._meta.parentRuleStack.shift()\n\t\tif (parentIsNotApplicable.nodeValue === true) {\n\t\t\treturn {\n\t\t\t\truleDisabledByItsParent: true,\n\t\t\t\tparentMissingVariables: parentIsNotApplicable.missingVariables ?? {},\n\t\t\t\tnullableParent,\n\t\t\t}\n\t\t}\n\t}\n\n\tlet parentMissingVariables: MissingVariables = {}\n\n\tif (nodesTypes.get(nullableParent)?.type === 'boolean') {\n\t\tconst parentEvaluation = engine.evaluateNode(nullableParent)\n\t\tparentMissingVariables = parentEvaluation.missingVariables ?? {}\n\t\treturn {\n\t\t\truleDisabledByItsParent: parentEvaluation.nodeValue === false,\n\t\t\tparentMissingVariables,\n\t\t\tnullableParent,\n\t\t}\n\t}\n\n\treturn {\n\t\truleDisabledByItsParent: false,\n\t\tparentMissingVariables,\n\t\tnullableParent,\n\t}\n}\n","import parse from '../parse'\nimport { Context } from '../parsePublicodes'\nimport { parseRules } from '../rule'\n\nexport default function parseAvec(v, context: Context) {\n\tparseRules(v.avec, context)\n\tconst valeur = parse(v.valeur, context)\n\treturn valeur\n}\n\nparseAvec.nom = 'avec' as const\n","import Engine from '..'\nimport { ASTNode, Evaluation } from '../AST/types'\nimport { PublicodesError, warning } from '../error'\nimport { mergeAllMissing } from '../evaluationUtils'\nimport parse from '../parse'\nimport { convertUnit, inferUnit } from '../units'\n\ntype TrancheNode = { taux: ASTNode } | { montant: ASTNode }\nexport type TrancheNodes = Array\n\nexport const parseTranches = (tranches, context): TrancheNodes => {\n\treturn tranches.map((node, i) => {\n\t\tif (!node.plafond && i > tranches.length) {\n\t\t\tthrow new PublicodesError(\n\t\t\t\t'SyntaxError',\n\t\t\t\t`La tranche n°${i} du barème n'a pas de plafond précisé. Seule la dernière tranche peut ne pas être plafonnée`,\n\t\t\t\t{ dottedName: '' },\n\t\t\t)\n\t\t}\n\t\treturn {\n\t\t\t...node,\n\t\t\t...(node.taux !== undefined ? { taux: parse(node.taux, context) } : {}),\n\t\t\t...(node.montant !== undefined ?\n\t\t\t\t{ montant: parse(node.montant, context) }\n\t\t\t:\t{}),\n\t\t\tplafond:\n\t\t\t\t'plafond' in node ?\n\t\t\t\t\tparse(node.plafond, context)\n\t\t\t\t:\t{\n\t\t\t\t\t\tnodeValue: Infinity,\n\t\t\t\t\t\tnodeKind: 'constant',\n\t\t\t\t\t\ttype: 'number',\n\t\t\t\t\t\tisNullable: false,\n\t\t\t\t\t},\n\t\t}\n\t})\n}\n\nexport function evaluatePlafondUntilActiveTranche(\n\tthis: Engine,\n\t{ multiplicateur, assiette, parsedTranches },\n) {\n\treturn parsedTranches.reduce(\n\t\t([tranches, activeTrancheFound], parsedTranche, i: number) => {\n\t\t\tif (activeTrancheFound) {\n\t\t\t\treturn [\n\t\t\t\t\t[...tranches, { ...parsedTranche, isAfterActive: true }],\n\t\t\t\t\tactiveTrancheFound,\n\t\t\t\t]\n\t\t\t}\n\n\t\t\tconst plafond = this.evaluateNode(parsedTranche.plafond)\n\t\t\tconst plancher =\n\t\t\t\ttranches[i - 1] ? tranches[i - 1].plafond : { nodeValue: 0 }\n\n\t\t\tlet plafondValue: Evaluation =\n\t\t\t\t(\n\t\t\t\t\tplafond.nodeValue === undefined ||\n\t\t\t\t\tmultiplicateur.nodeValue === undefined\n\t\t\t\t) ?\n\t\t\t\t\tundefined\n\t\t\t\t:\tplafond.nodeValue * multiplicateur.nodeValue\n\n\t\t\ttry {\n\t\t\t\tplafondValue =\n\t\t\t\t\tplafondValue === Infinity || plafondValue === 0 ?\n\t\t\t\t\t\tplafondValue\n\t\t\t\t\t:\tconvertUnit(\n\t\t\t\t\t\t\tinferUnit('*', [plafond.unit, multiplicateur.unit]),\n\t\t\t\t\t\t\tassiette.unit,\n\t\t\t\t\t\t\tplafondValue,\n\t\t\t\t\t\t)\n\t\t\t} catch (e) {\n\t\t\t\twarning(\n\t\t\t\t\tthis.context.logger,\n\t\t\t\t\t`L'unité du plafond de la tranche n°${\n\t\t\t\t\t\ti + 1\n\t\t\t\t\t} n'est pas compatible avec celle l'assiette`,\n\t\t\t\t\t{ dottedName: this.cache._meta.evaluationRuleStack[0] },\n\t\t\t\t\te,\n\t\t\t\t)\n\t\t\t}\n\t\t\tconst plancherValue = tranches[i - 1] ? tranches[i - 1].plafondValue : 0\n\t\t\tconst isAfterActive =\n\t\t\t\tplancherValue === undefined || assiette.nodeValue === undefined ?\n\t\t\t\t\tundefined\n\t\t\t\t:\tplancherValue > assiette.nodeValue\n\n\t\t\tconst calculationValues = [plafond, assiette, multiplicateur, plancher]\n\t\t\tif (calculationValues.some((node) => node.nodeValue === undefined)) {\n\t\t\t\treturn [\n\t\t\t\t\t[\n\t\t\t\t\t\t...tranches,\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\t...parsedTranche,\n\t\t\t\t\t\t\tplafond,\n\t\t\t\t\t\t\tplafondValue,\n\t\t\t\t\t\t\tplancherValue,\n\t\t\t\t\t\t\tnodeValue: undefined,\n\t\t\t\t\t\t\tisActive: undefined,\n\t\t\t\t\t\t\tisAfterActive,\n\t\t\t\t\t\t\tmissingVariables: mergeAllMissing(calculationValues),\n\t\t\t\t\t\t},\n\t\t\t\t\t],\n\t\t\t\t\tfalse,\n\t\t\t\t]\n\t\t\t}\n\n\t\t\tif (\n\t\t\t\t!!tranches[i - 1] &&\n\t\t\t\t!!plancherValue &&\n\t\t\t\t(plafondValue as number) <= plancherValue\n\t\t\t) {\n\t\t\t\tthrow new PublicodesError(\n\t\t\t\t\t'EvaluationError',\n\t\t\t\t\t`Le plafond de la tranche n°${\n\t\t\t\t\t\ti + 1\n\t\t\t\t\t} a une valeur inférieure à celui de la tranche précédente`,\n\t\t\t\t\t{ dottedName: this.cache._meta.evaluationRuleStack[0] },\n\t\t\t\t)\n\t\t\t}\n\n\t\t\tconst tranche = {\n\t\t\t\t...parsedTranche,\n\t\t\t\tplafond,\n\t\t\t\tplancherValue,\n\t\t\t\tplafondValue,\n\t\t\t\tisAfterActive,\n\t\t\t\tisActive:\n\t\t\t\t\tassiette.nodeValue >= plancherValue &&\n\t\t\t\t\tassiette.nodeValue < (plafondValue as number),\n\t\t\t}\n\n\t\t\treturn [[...tranches, tranche], tranche.isActive]\n\t\t},\n\t\t[[], false],\n\t)[0]\n}\n","import { EvaluationFunction, PublicodesError } from '..'\nimport { ASTNode } from '../AST/types'\nimport { registerEvaluationFunction } from '../evaluationFunctions'\nimport { defaultNode, mergeAllMissing } from '../evaluationUtils'\nimport parse from '../parse'\nimport { convertUnit, parseUnit } from '../units'\nimport {\n\tevaluatePlafondUntilActiveTranche,\n\tparseTranches,\n\tTrancheNodes,\n} from './trancheUtils'\n\n// Barème en taux marginaux.\nexport type BarèmeNode = {\n\texplanation: {\n\t\ttranches: TrancheNodes\n\t\tmultiplicateur: ASTNode\n\t\tassiette: ASTNode\n\t}\n\tnodeKind: 'barème'\n}\nexport default function parseBarème(v, context): BarèmeNode {\n\tconst explanation = {\n\t\tassiette: parse(v.assiette, context),\n\t\tmultiplicateur:\n\t\t\tv.multiplicateur ? parse(v.multiplicateur, context) : defaultNode(1),\n\t\ttranches: parseTranches(v.tranches, context),\n\t}\n\treturn {\n\t\texplanation,\n\t\tnodeKind: 'barème',\n\t}\n}\n\nfunction evaluateBarème(tranches, assiette, evaluate) {\n\treturn tranches.map((tranche) => {\n\t\tif (tranche.isAfterActive) {\n\t\t\treturn { ...tranche, nodeValue: 0 }\n\t\t}\n\t\tconst taux = evaluate(tranche.taux)\n\t\tconst missingVariables = mergeAllMissing([taux, tranche])\n\n\t\tif (\n\t\t\t[\n\t\t\t\tassiette.nodeValue,\n\t\t\t\ttaux.nodeValue,\n\t\t\t\ttranche.plafondValue,\n\t\t\t\ttranche.plancherValue,\n\t\t\t].some((value) => value === undefined)\n\t\t) {\n\t\t\treturn {\n\t\t\t\t...tranche,\n\t\t\t\ttaux,\n\t\t\t\tnodeValue: undefined,\n\t\t\t\tmissingVariables,\n\t\t\t}\n\t\t}\n\t\treturn {\n\t\t\t...tranche,\n\t\t\ttaux,\n\t\t\t...('unit' in assiette && { unit: assiette.unit }),\n\t\t\tnodeValue:\n\t\t\t\t(Math.min(assiette.nodeValue, tranche.plafondValue) -\n\t\t\t\t\ttranche.plancherValue) *\n\t\t\t\tconvertUnit(taux.unit, parseUnit(''), taux.nodeValue as number),\n\t\t\tmissingVariables,\n\t\t}\n\t})\n}\nconst evaluate: EvaluationFunction<'barème'> = function (node) {\n\tconst evaluate = this.evaluateNode.bind(this)\n\tconst assiette = this.evaluateNode(node.explanation.assiette)\n\tconst multiplicateur = this.evaluateNode(node.explanation.multiplicateur)\n\n\tif (multiplicateur.nodeValue === 0) {\n\t\tthrow new PublicodesError(\n\t\t\t'EvaluationError',\n\t\t\t`Le multiplicateur ne peut pas être nul`,\n\t\t\t{ dottedName: this.cache._meta.evaluationRuleStack[0] },\n\t\t)\n\t}\n\n\tconst tranches = evaluateBarème(\n\t\tevaluatePlafondUntilActiveTranche.call(this, {\n\t\t\tparsedTranches: node.explanation.tranches,\n\t\t\tassiette,\n\t\t\tmultiplicateur,\n\t\t}),\n\t\tassiette,\n\t\tevaluate,\n\t)\n\tconst nodeValue = tranches.reduce(\n\t\t(value, { nodeValue }) =>\n\t\t\tnodeValue == undefined ? undefined : value + nodeValue,\n\t\t0,\n\t)\n\n\treturn {\n\t\t...node,\n\t\tnodeValue,\n\t\tmissingVariables: mergeAllMissing([assiette, multiplicateur, ...tranches]),\n\t\texplanation: {\n\t\t\tassiette,\n\t\t\tmultiplicateur,\n\t\t\ttranches,\n\t\t},\n\t\tunit: assiette.unit,\n\t} as any\n}\n\nregisterEvaluationFunction('barème', evaluate)\n","import { EvaluationFunction } from '..'\nimport { ASTNode } from '../AST/types'\nimport { PublicodesError } from '../error'\nimport { registerEvaluationFunction } from '../evaluationFunctions'\nimport { bonus, mergeAllMissing, mergeMissing } from '../evaluationUtils'\nimport parse from '../parse'\n\nexport type ConditionNode = {\n\texplanation: {\n\t\tsi: ASTNode\n\t\talors: ASTNode\n\t\tsinon: ASTNode\n\t}\n\tnodeKind: 'condition'\n}\n\nconst evaluate: EvaluationFunction<'condition'> = function (node) {\n\tlet evaluation\n\tconst condition = this.evaluateNode(node.explanation.si)\n\tlet alors = node.explanation.alors\n\tlet sinon = node.explanation.sinon\n\tif ('unit' in condition) {\n\t\tthrow new PublicodesError(\n\t\t\t'EvaluationError',\n\t\t\t'La condition doit être de type booléen',\n\t\t\t{ dottedName: this.cache._meta.evaluationRuleStack[0] },\n\t\t)\n\t}\n\tif (condition.nodeValue === true) {\n\t\talors = this.evaluateNode(node.explanation.alors)\n\t\t;(alors as any).isActive = true\n\t\tevaluation = alors\n\t} else if (condition.nodeValue === false) {\n\t\tsinon = this.evaluateNode(node.explanation.sinon)\n\t\tevaluation = sinon\n\t} else if (condition.nodeValue === null) {\n\t\tevaluation = condition\n\t} else if (condition.nodeValue === undefined) {\n\t\tsinon = this.evaluateNode(node.explanation.sinon)\n\t\talors = this.evaluateNode(node.explanation.alors)\n\t\tevaluation = {\n\t\t\t...condition,\n\t\t\tmissingVariables: mergeAllMissing([sinon, alors]),\n\t\t}\n\t} else {\n\t\tthrow new PublicodesError(\n\t\t\t'EvaluationError',\n\t\t\t'La condition doit être de type booléen',\n\t\t\t{ dottedName: this.cache._meta.evaluationRuleStack[0] },\n\t\t)\n\t}\n\tconst unit = evaluation.unit ?? (alors as any).unit\n\treturn {\n\t\tnodeValue: evaluation.nodeValue,\n\t\tmissingVariables: mergeMissing(\n\t\t\tbonus(condition.missingVariables),\n\t\t\tevaluation.missingVariables,\n\t\t),\n\t\t...(unit != undefined ? { unit } : {}),\n\t\t...node,\n\t\texplanation: { si: condition, alors, sinon },\n\t}\n}\nexport default function parseCondition(v, context) {\n\tconst explanation = {\n\t\tsi: parse(v.si, context),\n\t\talors: parse(v.alors, context),\n\t\tsinon: parse(v.sinon, context),\n\t}\n\treturn {\n\t\texplanation,\n\t\tnodeKind: 'condition',\n\t} as ConditionNode\n}\n\nparseCondition.nom = 'condition'\n\nregisterEvaluationFunction('condition', evaluate)\n","import { EvaluationFunction } from '..'\nimport { ASTNode } from '../AST/types'\nimport { registerEvaluationFunction } from '../evaluationFunctions'\nimport { notApplicableNode } from '../evaluationUtils'\nimport parse from '../parse'\nimport { ReferenceNode } from '../reference'\nimport { serializeUnit } from '../units'\n\nexport type ContextNode = {\n\texplanation: {\n\t\tnode: ASTNode\n\t\tcontexte: Array<[ReferenceNode, ASTNode]>\n\t\tsubEngineId: number\n\t}\n\tnodeKind: 'contexte'\n}\n\nexport default function parseMecanismContexte(v, context) {\n\tconst contexte = Object.keys(v.contexte).map((dottedName) => [\n\t\tparse(dottedName, context),\n\t\tparse(v.contexte[dottedName], context),\n\t])\n\n\tconst node = parse(v.valeur, context)\n\n\treturn {\n\t\texplanation: {\n\t\t\tnode,\n\t\t\tcontexte,\n\t\t\tsubEngineId: context.subEngineIncrementingNumber++,\n\t\t},\n\t\tnodeKind: parseMecanismContexte.nom,\n\t} as ContextNode\n}\nparseMecanismContexte.nom = 'contexte' as const\n\nconst evaluateContexte: EvaluationFunction<'contexte'> = function (node) {\n\tif (this.cache._meta.currentEvaluationWithContext === node.explanation.node) {\n\t\treturn { ...notApplicableNode, ...node }\n\t}\n\tconst amendedSituation = Object.fromEntries(\n\t\tnode.explanation.contexte\n\t\t\t.filter(([originRule, replacement]) => {\n\t\t\t\tconst originRuleEvaluation = this.evaluateNode(originRule)\n\t\t\t\tconst replacementEvaluation = this.evaluateNode(replacement)\n\n\t\t\t\treturn (\n\t\t\t\t\toriginRuleEvaluation.nodeValue !== replacementEvaluation.nodeValue ||\n\t\t\t\t\tserializeUnit(originRuleEvaluation.unit) !==\n\t\t\t\t\t\tserializeUnit(replacementEvaluation.unit)\n\t\t\t\t)\n\t\t\t})\n\t\t\t.map(\n\t\t\t\t([originRule, replacement]) =>\n\t\t\t\t\t[originRule.dottedName, replacement] as [string, ASTNode],\n\t\t\t),\n\t)\n\n\tlet engine = this\n\tif (Object.keys(amendedSituation).length) {\n\t\tengine = this.shallowCopy().setSituation(amendedSituation, {\n\t\t\tkeepPreviousSituation: true,\n\t\t})\n\t\tengine.subEngineId = node.explanation.subEngineId\n\n\t\t// The value of the replaced ruled are computed **without the replacement active**\n\t\tObject.values(amendedSituation).forEach((value) =>\n\t\t\tengine.cache.nodes.set(value, this.evaluate(value)),\n\t\t)\n\n\t\tthis.subEngines[node.explanation.subEngineId] = engine\n\t}\n\tengine.cache._meta.currentEvaluationWithContext = node.explanation.node\n\tconst evaluatedNode = engine.evaluateNode(node.explanation.node)\n\n\tdelete engine.cache._meta.currentEvaluationWithContext\n\n\treturn {\n\t\t...node,\n\t\tnodeValue: evaluatedNode.nodeValue,\n\t\texplanation: {\n\t\t\t...node.explanation,\n\t\t\tnode: evaluatedNode,\n\t\t},\n\t\tmissingVariables: evaluatedNode.missingVariables,\n\t\t...('unit' in evaluatedNode && { unit: evaluatedNode.unit }),\n\t}\n}\nregisterEvaluationFunction('contexte', evaluateContexte)\n","import { PublicodesError } from './error'\n\nexport function normalizeDateString(dateString: string): string {\n\tlet [day, month, year] = dateString.split('/')\n\tif (!year) {\n\t\t;[day, month, year] = ['01', day, month]\n\t}\n\treturn normalizeDate(+year, +month, +day)\n}\n\nconst pad = (n: number): string => (+n < 10 ? `0${n}` : '' + n)\nexport function normalizeDate(\n\tyear: number,\n\tmonth: number,\n\tday: number,\n): string {\n\tconst date = new Date(+year, +month - 1, +day)\n\tif (!+date || date.getDate() !== +day) {\n\t\tthrow new PublicodesError(\n\t\t\t'SyntaxError',\n\t\t\t`La date ${day}/${month}/${year} n'est pas valide`,\n\t\t\t{ dottedName: '' },\n\t\t)\n\t}\n\treturn `${pad(day)}/${pad(month)}/${pad(year)}`\n}\n\nexport function convertToDate(value: string): Date {\n\tconst [day, month, year] = normalizeDateString(value).split('/')\n\tconst result = new Date(+year, +month - 1, +day)\n\t// Reset date to utc midnight for exact calculation of day difference (no\n\t// daylight saving effect)\n\tresult.setMinutes(result.getMinutes() - result.getTimezoneOffset())\n\treturn result\n}\n\nexport function convertToString(date: Date): string {\n\treturn normalizeDate(date.getFullYear(), date.getMonth() + 1, date.getDate())\n}\n\nexport function getRelativeDate(date: string, dayDifferential: number): string {\n\tconst relativeDate = new Date(convertToDate(date))\n\trelativeDate.setDate(relativeDate.getDate() + dayDifferential)\n\treturn convertToString(relativeDate)\n}\n\nexport function getYear(date: string): number {\n\treturn +date.slice(-4)\n}\n\nexport function getDifferenceInDays(from: string, to: string): number {\n\tconst millisecondsPerDay = 1000 * 60 * 60 * 24\n\treturn (\n\t\t(convertToDate(from).getTime() - convertToDate(to).getTime()) /\n\t\tmillisecondsPerDay\n\t)\n}\n\nexport function getDifferenceInMonths(from: string, to: string): number {\n\t// We want to compute the difference in actual month between the two dates\n\t// For date that start during a month, a pro-rata will be done depending on\n\t// the duration of the month in days\n\tconst [dayFrom, monthFrom, yearFrom] = from.split('/').map((x) => +x)\n\tconst [dayTo, monthTo, yearTo] = to.split('/').map((x) => +x)\n\tconst numberOfFullMonth = monthTo - monthFrom + 12 * (yearTo - yearFrom)\n\tconst numDayMonthFrom = new Date(yearFrom, monthFrom, 0).getDate()\n\tconst numDayMonthTo = new Date(yearTo, monthTo, 0).getDate()\n\tconst prorataMonthFrom = (dayFrom - 1) / numDayMonthFrom\n\tconst prorataMonthTo = dayTo / numDayMonthTo\n\treturn numberOfFullMonth - prorataMonthFrom + prorataMonthTo\n}\n\nexport function getDifferenceInYears(from: string, to: string): number {\n\tconst differenceInDays = getDifferenceInDays(to, from)\n\n\tconst isLeapYear = (year: number) =>\n\t\t(year % 4 === 0 && year % 100 !== 0) || year % 400 === 0\n\tconst after1stMarch = (date: Date) =>\n\t\tdate >= new Date(date.getFullYear(), 2, 1)\n\n\tconst fromDate = convertToDate(from)\n\tconst toDate = convertToDate(to)\n\n\tconst fromYear = fromDate.getFullYear() + (after1stMarch(fromDate) ? 1 : 0)\n\tconst toYear = toDate.getFullYear() + (after1stMarch(fromDate) ? 0 : -1)\n\n\tconst leapYearsCount = Array.from(\n\t\t{ length: toYear - fromYear + 1 },\n\t\t(_, i) => fromYear + i,\n\t).filter(isLeapYear).length\n\n\treturn (differenceInDays - leapYearsCount) / 365\n}\n","import { EvaluationFunction } from '..'\nimport { ASTNode, Unit } from '../AST/types'\nimport { convertToDate, convertToString } from '../date'\nimport { registerEvaluationFunction } from '../evaluationFunctions'\nimport { defaultNode, mergeAllMissing } from '../evaluationUtils'\nimport parse from '../parse'\nimport { parseUnit } from '../units'\n\nexport type DuréeNode = {\n\texplanation: {\n\t\tdepuis: ASTNode\n\t\t\"jusqu'à\": ASTNode\n\t}\n\tunit: Unit\n\tnodeKind: 'durée'\n}\nconst evaluate: EvaluationFunction<'durée'> = function (node) {\n\tconst from = this.evaluateNode(node.explanation.depuis)\n\tconst to = this.evaluateNode(node.explanation[\"jusqu'à\"])\n\tlet nodeValue\n\tif ([from, to].some(({ nodeValue }) => nodeValue === undefined)) {\n\t\tnodeValue = undefined\n\t} else {\n\t\tconst [fromDate, toDate] = ([from.nodeValue, to.nodeValue] as string[]).map(\n\t\t\tconvertToDate,\n\t\t)\n\t\tnodeValue = Math.max(\n\t\t\t0,\n\t\t\tMath.round(\n\t\t\t\t(toDate.getTime() - fromDate.getTime()) / (1000 * 60 * 60 * 24),\n\t\t\t),\n\t\t)\n\t}\n\treturn {\n\t\t...node,\n\t\tmissingVariables: mergeAllMissing([from, to]),\n\t\tnodeValue,\n\t\texplanation: {\n\t\t\tdepuis: from,\n\t\t\t\"jusqu'à\": to,\n\t\t},\n\t}\n}\n\nconst today = defaultNode(convertToString(new Date()))\nexport default (v, context) => {\n\tconst explanation = {\n\t\tdepuis: parse(v.depuis ?? today, context),\n\t\t\"jusqu'à\": parse(v[\"jusqu'à\"] ?? today, context),\n\t}\n\treturn {\n\t\texplanation,\n\t\tunit: parseUnit('jour'),\n\t\tnodeKind: 'durée',\n\t} as DuréeNode\n}\n\nregisterEvaluationFunction('durée', evaluate)\n","import { EvaluationFunction } from '..'\nimport { ASTNode } from '../AST/types'\nimport { registerEvaluationFunction } from '../evaluationFunctions'\nimport parse from '../parse'\nimport { createParseInlinedMecanism } from './inlineMecanism'\n\nexport type EstNonDéfiniNode = {\n\texplanation: ASTNode\n\tnodeKind: 'est non défini'\n}\n\nexport function parseEstNonDéfini(v, context) {\n\tconst explanation = parse(v, context)\n\treturn {\n\t\texplanation,\n\t\tnodeKind: 'est non défini',\n\t} as EstNonDéfiniNode\n}\nparseEstNonDéfini.nom = 'est non défini'\n\nconst parseEstDéfini = createParseInlinedMecanism(\n\t'est défini',\n\t{\n\t\tvaleur: {},\n\t},\n\t{\n\t\t'=': [{ 'est non défini': 'valeur' }, 'non'],\n\t},\n)\n\nconst parseEstApplicable = createParseInlinedMecanism(\n\t'est applicable',\n\t{\n\t\tvaleur: {},\n\t},\n\t{\n\t\t'=': [{ 'est non applicable': 'valeur' }, 'non'],\n\t},\n)\n\nexport { parseEstDéfini, parseEstApplicable }\n\nconst evaluate: EvaluationFunction<'est non défini'> = function (node) {\n\tconst valeur = this.evaluateNode(node.explanation)\n\tlet nodeValue: boolean | undefined | null = false\n\tif (valeur.nodeValue === undefined) {\n\t\tnodeValue = true\n\t}\n\n\treturn {\n\t\t...node,\n\t\tnodeValue,\n\t\tmissingVariables: valeur.missingVariables,\n\t\texplanation: valeur,\n\t}\n}\nregisterEvaluationFunction('est non défini', evaluate)\n","import { EvaluationFunction } from '..'\nimport { ASTNode } from '../AST/types'\nimport { registerEvaluationFunction } from '../evaluationFunctions'\nimport { mergeMissing } from '../evaluationUtils'\nimport parse from '../parse'\nimport { evaluateDisablingParent } from '../rule'\n\nexport type EstNonApplicableNode = {\n\texplanation: ASTNode\n\tnodeKind: 'est non applicable'\n}\nexport function parseEstNonApplicable(v, context) {\n\tconst explanation = parse(v, context)\n\treturn {\n\t\texplanation,\n\t\tnodeKind: 'est non applicable' as const,\n\t} as EstNonApplicableNode\n}\nparseEstNonApplicable.nom = 'est non applicable'\n\nconst isNotApplicable = (node: ASTNode) => {\n\treturn {\n\t\tnodeKind: 'est non applicable' as const,\n\t\texplanation: node,\n\t}\n}\n\nconst evaluateIsNotApplicable: EvaluationFunction<'est non applicable'> =\n\tfunction (node) {\n\t\tconst valeur = node.explanation\n\n\t\tif (\n\t\t\tthis.context.nodesTypes.get(valeur)?.isNullable === false &&\n\t\t\tvaleur.nodeKind !== 'rule' &&\n\t\t\tvaleur.nodeKind !== 'reference'\n\t\t) {\n\t\t\treturn { ...node, nodeValue: false, missingVariables: {} }\n\t\t}\n\n\t\tif (\n\t\t\tthis.cache.nodes.has(valeur) &&\n\t\t\tthis.cache.nodes.get(valeur) !== undefined\n\t\t) {\n\t\t\treturn {\n\t\t\t\t...node,\n\t\t\t\tnodeValue: this.cache.nodes.get(valeur)?.nodeValue === null,\n\t\t\t\tmissingVariables: this.cache.nodes.get(valeur)?.missingVariables ?? {},\n\t\t\t}\n\t\t}\n\n\t\tswitch (valeur.nodeKind) {\n\t\t\tcase 'rule':\n\t\t\t\tconst { ruleDisabledByItsParent, parentMissingVariables } =\n\t\t\t\t\tevaluateDisablingParent(this, valeur)\n\n\t\t\t\tif (ruleDisabledByItsParent) {\n\t\t\t\t\treturn {\n\t\t\t\t\t\t...node,\n\t\t\t\t\t\tnodeValue: true,\n\t\t\t\t\t\tmissingVariables: parentMissingVariables,\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tconst isNotApplicableEvaluation = this.evaluateNode(\n\t\t\t\t\tisNotApplicable(valeur.explanation.valeur),\n\t\t\t\t)\n\t\t\t\tconst missingVariables = mergeMissing(\n\t\t\t\t\tparentMissingVariables,\n\t\t\t\t\tisNotApplicableEvaluation.missingVariables,\n\t\t\t\t)\n\n\t\t\t\t// If the rule can be disabled thought the situation, it should be listed inside the missing variables\n\t\t\t\tif (\n\t\t\t\t\tisNotApplicableEvaluation.nodeValue === false &&\n\t\t\t\t\tthis.context.nodesTypes.get(\n\t\t\t\t\t\tthis.context.parsedRules[`${valeur.dottedName} . $SITUATION`],\n\t\t\t\t\t)?.isNullable &&\n\t\t\t\t\t!Object.keys(isNotApplicableEvaluation.missingVariables).length\n\t\t\t\t) {\n\t\t\t\t\tmissingVariables[valeur.dottedName] = 1\n\t\t\t\t}\n\n\t\t\t\treturn {\n\t\t\t\t\t...node,\n\t\t\t\t\tnodeValue: isNotApplicableEvaluation.nodeValue,\n\t\t\t\t\tmissingVariables,\n\t\t\t\t}\n\n\t\t\tcase 'reference':\n\t\t\t\treturn {\n\t\t\t\t\t...this.evaluateNode(\n\t\t\t\t\t\tisNotApplicable(this.context.parsedRules[valeur.dottedName!]),\n\t\t\t\t\t),\n\t\t\t\t\t...node,\n\t\t\t\t}\n\n\t\t\tcase 'condition':\n\t\t\t\treturn {\n\t\t\t\t\t...this.evaluateNode({\n\t\t\t\t\t\t...valeur,\n\t\t\t\t\t\texplanation: {\n\t\t\t\t\t\t\tsi: valeur.explanation.si,\n\t\t\t\t\t\t\talors: isNotApplicable(valeur.explanation.alors),\n\t\t\t\t\t\t\tsinon: isNotApplicable(valeur.explanation.sinon),\n\t\t\t\t\t\t},\n\t\t\t\t\t}),\n\t\t\t\t\t...node,\n\t\t\t\t}\n\t\t}\n\t\tconst evaluatedValeur = this.evaluateNode(valeur)\n\n\t\treturn {\n\t\t\t...node,\n\t\t\tnodeValue:\n\t\t\t\tevaluatedValeur.nodeValue === undefined ?\n\t\t\t\t\tundefined\n\t\t\t\t:\tevaluatedValeur.nodeValue === null,\n\t\t\tmissingVariables: evaluatedValeur.missingVariables,\n\t\t}\n\t}\n\nregisterEvaluationFunction('est non applicable', evaluateIsNotApplicable)\n","// We use a JavaScript implementation of the Brent method to find the root (the\n// \"zero\") of a monotone function. There are other methods like the\n// Newton-Raphson method, but they take the derivative of the function as an\n// input, wich in our case is costly to calculate. The Brent method doesn't\n// need to calculate the derivative.\n// An interesting description of the algorithm can be found here:\n// https://blogs.mathworks.com/cleve/2015/10/26/zeroin-part-2-brents-version/\n\n/**\n * Copied from https://gist.github.com/borgar/3317728\n *\n * Searches the interval from lowerLimit to upperLimit\n * for a root (i.e., zero) of the function func with respect to\n * its first argument using Brent's method root-finding algorithm.\n *\n * Translated from zeroin.c in http://www.netlib.org/c/brent.shar.\n *\n * Copyright (c) 2012 Borgar Thorsteinsson \n * MIT License, http://www.opensource.org/licenses/mit-license.php\n *\n * @param func function for which the root is sought.\n * @param lowerLimit the lower point of the interval to be searched.\n * @param upperLimit the upper point of the interval to be searched.\n * @param errorTol the desired accuracy (convergence tolerance).\n * @param maxIter the maximum number of iterations.\n * @param acceptableErrorTol return a result even if errorTol isn't reached after maxIter.\n * @returns an estimate for the root within accuracy.\n *\n */\nexport default function uniroot(\n\tfunc: (x: number) => number,\n\tlowerLimit: number,\n\tupperLimit: number,\n\terrorTol = 0,\n\tmaxIter = 100,\n\tacceptableErrorTol = 0,\n) {\n\tlet a = lowerLimit,\n\t\tb = upperLimit,\n\t\tc = a,\n\t\tfa = func(a),\n\t\tfb = func(b),\n\t\tfc = fa,\n\t\tactualTolerance: number,\n\t\tnewStep: number, // Step at this iteration\n\t\tprevStep: number, // Distance from the last but one to the last approximation\n\t\tp: number, // Interpolation step is calculated in the form p/q; division is delayed until the last moment\n\t\tq: number,\n\t\tfallback: number | undefined = undefined\n\n\twhile (maxIter-- > 0) {\n\t\tprevStep = b - a\n\n\t\tif (Math.abs(fc) < Math.abs(fb)) {\n\t\t\t// Swap data for b to be the best approximation\n\t\t\t;(a = b), (b = c), (c = a)\n\t\t\t;(fa = fb), (fb = fc), (fc = fa)\n\t\t}\n\n\t\tactualTolerance = 1e-15 * Math.abs(b) + errorTol / 2\n\t\tnewStep = (c - b) / 2\n\n\t\tif (Math.abs(newStep) <= actualTolerance || fb === 0) {\n\t\t\treturn b // Acceptable approx. is found\n\t\t}\n\n\t\t// Decide if the interpolation can be tried\n\t\tif (Math.abs(prevStep) >= actualTolerance && Math.abs(fa) > Math.abs(fb)) {\n\t\t\t// If prevStep was large enough and was in true direction, Interpolatiom may be tried\n\t\t\tlet t1: number, t2: number\n\t\t\tconst cb = c - b\n\t\t\tif (a === c) {\n\t\t\t\t// If we have only two distinct points linear interpolation can only be applied\n\t\t\t\tt1 = fb / fa\n\t\t\t\tp = cb * t1\n\t\t\t\tq = 1.0 - t1\n\t\t\t} else {\n\t\t\t\t// Quadric inverse interpolation\n\t\t\t\t;(q = fa / fc), (t1 = fb / fc), (t2 = fb / fa)\n\t\t\t\tp = t2 * (cb * q * (q - t1) - (b - a) * (t1 - 1))\n\t\t\t\tq = (q - 1) * (t1 - 1) * (t2 - 1)\n\t\t\t}\n\n\t\t\tif (p > 0) {\n\t\t\t\tq = -q // p was calculated with the opposite sign; make p positive\n\t\t\t} else {\n\t\t\t\tp = -p // and assign possible minus to q\n\t\t\t}\n\n\t\t\tif (\n\t\t\t\tp < 0.75 * cb * q - Math.abs(actualTolerance * q) / 2 &&\n\t\t\t\tp < Math.abs((prevStep * q) / 2)\n\t\t\t) {\n\t\t\t\t// If (b + p / q) falls in [b,c] and isn't too large it is accepted\n\t\t\t\tnewStep = p / q\n\t\t\t}\n\n\t\t\t// If p/q is too large then the bissection procedure can reduce [b,c] range to more extent\n\t\t}\n\n\t\tif (Math.abs(newStep) < actualTolerance) {\n\t\t\t// Adjust the step to be not less than tolerance\n\t\t\tnewStep = newStep > 0 ? actualTolerance : -actualTolerance\n\t\t}\n\n\t\t;(a = b), (fa = fb) // Save the previous approx.\n\t\t;(b += newStep), (fb = func(b)) // Do step to a new approxim.\n\n\t\tif ((fb > 0 && fc > 0) || (fb < 0 && fc < 0)) {\n\t\t\t;(c = a), (fc = fa) // Adjust c for it to have a sign opposite to that of b\n\t\t}\n\t\tif (Math.abs(fb) < errorTol) {\n\t\t\treturn b\n\t\t}\n\t\tif (Math.abs(fb) < acceptableErrorTol) {\n\t\t\tfallback = b\n\t\t}\n\t}\n\treturn fallback\n}\n","import { EvaluationFunction, PublicodesError } from '..'\nimport { ASTNode } from '../AST/types'\nimport { registerEvaluationFunction } from '../evaluationFunctions'\nimport { defaultNode, mergeAllMissing } from '../evaluationUtils'\nimport parse from '../parse'\nimport {\n\tevaluatePlafondUntilActiveTranche,\n\tparseTranches,\n\tTrancheNodes,\n} from './trancheUtils'\n\nexport type GrilleNode = {\n\texplanation: {\n\t\tassiette: ASTNode\n\t\tmultiplicateur: ASTNode\n\t\ttranches: TrancheNodes\n\t}\n\tnodeKind: 'grille'\n}\n\nexport default function parseGrille(v, context): GrilleNode {\n\tconst explanation = {\n\t\tassiette: parse(v.assiette, context),\n\t\tmultiplicateur:\n\t\t\tv.multiplicateur ? parse(v.multiplicateur, context) : defaultNode(1),\n\t\ttranches: parseTranches(v.tranches, context),\n\t}\n\treturn {\n\t\texplanation,\n\t\tnodeKind: 'grille',\n\t}\n}\n\nconst evaluate: EvaluationFunction<'grille'> = function (node) {\n\tconst evaluate = this.evaluateNode.bind(this)\n\tconst assiette = this.evaluateNode(node.explanation.assiette)\n\tconst multiplicateur = this.evaluateNode(node.explanation.multiplicateur)\n\n\tif (multiplicateur.nodeValue === 0) {\n\t\tthrow new PublicodesError(\n\t\t\t'EvaluationError',\n\t\t\t`Le multiplicateur ne peut pas être nul`,\n\t\t\t{\n\t\t\t\tdottedName: this.cache._meta.evaluationRuleStack[0],\n\t\t\t},\n\t\t)\n\t}\n\n\tconst tranches = evaluatePlafondUntilActiveTranche\n\t\t.call(this, {\n\t\t\tparsedTranches: node.explanation.tranches,\n\t\t\tassiette,\n\t\t\tmultiplicateur,\n\t\t})\n\t\t.map((tranche) => {\n\t\t\tif (tranche.isActive === false) {\n\t\t\t\treturn tranche\n\t\t\t}\n\t\t\tconst montant = evaluate(tranche.montant)\n\t\t\treturn {\n\t\t\t\t...tranche,\n\t\t\t\tmontant,\n\t\t\t\tnodeValue: montant.nodeValue,\n\t\t\t\tunit: montant.unit,\n\t\t\t\tmissingVariables: mergeAllMissing([montant, tranche]),\n\t\t\t}\n\t\t})\n\n\tlet activeTranches\n\tconst activeTranche = tranches.find((tranche) => tranche.isActive)\n\tif (activeTranche) {\n\t\tactiveTranches = [activeTranche]\n\t} else if (tranches[tranches.length - 1].isAfterActive === false) {\n\t\tactiveTranches = [{ nodeValue: false }]\n\t} else {\n\t\tactiveTranches = tranches.filter(\n\t\t\t(tranche) => tranche.isActive === undefined,\n\t\t)\n\t}\n\n\tconst nodeValue =\n\t\t!activeTranches[0] ? false\n\t\t: activeTranches[0].isActive === undefined ? undefined\n\t\t: activeTranches[0].nodeValue\n\n\treturn {\n\t\t...node,\n\t\tnodeValue,\n\t\tmissingVariables: mergeAllMissing([\n\t\t\tassiette,\n\t\t\tmultiplicateur,\n\t\t\t...activeTranches,\n\t\t]),\n\t\texplanation: {\n\t\t\t...node.explanation,\n\t\t\tassiette,\n\t\t\tmultiplicateur,\n\t\t\ttranches,\n\t\t},\n\t\tunit: activeTranches[0]?.unit ?? undefined,\n\t} as any\n}\n\nregisterEvaluationFunction('grille', evaluate)\n","import { EvaluationFunction, PublicodesError } from '..'\nimport { EvaluatedNode, Unit } from '../AST/types'\nimport { registerEvaluationFunction } from '../evaluationFunctions'\nimport { undefinedNumberNode } from '../evaluationUtils'\nimport parse from '../parse'\nimport { Context } from '../parsePublicodes'\nimport { ReferenceNode } from '../reference'\nimport uniroot from '../uniroot'\n\nexport type InversionNode = {\n\texplanation: {\n\t\truleToInverse: string\n\t\tinversionCandidates: Array\n\t\tunit?: Unit\n\t}\n\tnodeKind: 'inversion'\n}\n\n// The user of the inversion mechanism has to define a list of \"inversion\n// candidates\". At runtime, the evaluation function of the mechanism will look\n// at the situation value of these candidates, and use the first one that is\n// defined as its \"goal\" for the inversion\n//\n// The game is then to find an input such as the computed value of the \"goal\" is\n// equal to its situation value, mathematically we search for the zero of the\n// function x → f(x) - goal. The iteration logic between each test is\n// implemented in the `uniroot` file.\nexport const evaluateInversion: EvaluationFunction<'inversion'> = function (\n\tnode,\n) {\n\tconst inversionEngine = this.shallowCopy()\n\tif (\n\t\tthis.cache._meta.evaluationRuleStack\n\t\t\t.slice(1)\n\t\t\t.includes(node.explanation.ruleToInverse)\n\t) {\n\t\treturn {\n\t\t\t...undefinedNumberNode,\n\t\t\t...node,\n\t\t}\n\t}\n\tinversionEngine.cache._meta.parentRuleStack = [\n\t\t...this.cache._meta.parentRuleStack,\n\t]\n\tinversionEngine.cache._meta.evaluationRuleStack = [\n\t\t...this.cache._meta.evaluationRuleStack,\n\t]\n\tconst inversionGoal = node.explanation.inversionCandidates.find(\n\t\t(candidate) => {\n\t\t\tif (\n\t\t\t\tthis.cache._meta.evaluationRuleStack.includes(candidate.dottedName!)\n\t\t\t) {\n\t\t\t\treturn false\n\t\t\t}\n\t\t\tconst evaluation = inversionEngine.evaluateNode(\n\t\t\t\tinversionEngine.context.parsedRules[\n\t\t\t\t\t`${candidate.dottedName} . $SITUATION`\n\t\t\t\t],\n\t\t\t)\n\t\t\treturn (\n\t\t\t\ttypeof evaluation.nodeValue === 'number' &&\n\t\t\t\t!(candidate.dottedName! in evaluation.missingVariables)\n\t\t\t)\n\t\t},\n\t)\n\n\tif (inversionGoal === undefined) {\n\t\treturn {\n\t\t\t...node,\n\t\t\tnodeValue: undefined,\n\t\t\tmissingVariables: {\n\t\t\t\t...Object.fromEntries(\n\t\t\t\t\tnode.explanation.inversionCandidates.map((candidate) => [\n\t\t\t\t\t\tcandidate.dottedName,\n\t\t\t\t\t\t1,\n\t\t\t\t\t]),\n\t\t\t\t),\n\t\t\t\t[node.explanation.ruleToInverse]: 1,\n\t\t\t},\n\t\t}\n\t}\n\tconst evaluatedInversionGoal = inversionEngine.evaluateNode(inversionGoal)\n\tlet numberOfIteration = 0\n\n\tinversionEngine.setSituation(\n\t\t{\n\t\t\t[inversionGoal.dottedName!]: undefinedNumberNode,\n\t\t},\n\t\t{ keepPreviousSituation: true },\n\t)\n\tinversionEngine.cache.traversedVariablesStack =\n\t\tthis.cache.traversedVariablesStack ? [] : undefined\n\n\tlet lastEvaluation: EvaluatedNode\n\tconst evaluateWithValue = (n: number) => {\n\t\tnumberOfIteration++\n\t\tinversionEngine.setSituation(\n\t\t\t{\n\t\t\t\t[node.explanation.ruleToInverse]: {\n\t\t\t\t\tnodeValue: n,\n\t\t\t\t\tnodeKind: 'constant',\n\t\t\t\t\ttype: 'number',\n\t\t\t\t\tunit: evaluatedInversionGoal.unit,\n\t\t\t\t},\n\t\t\t},\n\t\t\t{ keepPreviousSituation: true },\n\t\t)\n\t\tinversionEngine.cache.traversedVariablesStack =\n\t\t\tthis.cache.traversedVariablesStack ? [] : undefined\n\n\t\tlastEvaluation = inversionEngine.evaluateNode(inversionGoal)\n\t\treturn lastEvaluation\n\t}\n\n\tconst goal = evaluatedInversionGoal.nodeValue as number\n\tlet nodeValue: number | undefined | undefined = undefined\n\n\t// We do some blind attempts here to avoid using the default minimum and\n\t// maximum of +/- 10^8 that are required by the `uniroot` function. For the\n\t// first attempt we use the goal value as a very rough first approximation.\n\t// For the second attempt we do a proportionality coefficient with the result\n\t// from the first try and the goal value. The two attempts are then used in\n\t// the following way:\n\t// - if both results are `undefined` we assume that the inversion is impossible\n\t// because of missing variables\n\t// - otherwise, we calculate the missing variables of the node as the union of\n\t// the missings variables of our two attempts\n\t// - we cache the result of our two attempts so that `uniroot` doesn't\n\t// recompute them\n\tconst x1 = goal\n\tconst y1Node = evaluateWithValue(x1)\n\tconst y1 = y1Node.nodeValue as number\n\tconst coeff = y1 > goal ? 0.9 : 1.2\n\tconst x2 = y1 !== undefined ? (x1 * goal * coeff) / y1 : 2000\n\tconst y2Node = evaluateWithValue(x2)\n\tconst y2 = y2Node.nodeValue as number\n\n\tconst maxIterations = this.context.inversionMaxIterations ?? 10\n\n\tif (y1 !== undefined || y2 !== undefined) {\n\t\t// The `uniroot` function parameter. It will be called with its `min` and\n\t\t// `max` arguments, so we can use our cached nodes if the function is called\n\t\t// with the already computed x1 or x2.\n\t\tconst test = (x: number): number => {\n\t\t\tconst y =\n\t\t\t\tx === x1 ? y1\n\t\t\t\t: x === x2 ? y2\n\t\t\t\t: evaluateWithValue(x).nodeValue\n\t\t\treturn (y as number) - goal\n\t\t}\n\n\t\tconst defaultMin = -1000000\n\t\tconst defaultMax = 100000000\n\t\tconst nearestBelowGoal =\n\t\t\ty2 !== undefined && y2 < goal && (y2 > y1 || y1 > goal) ? x2\n\t\t\t: y1 !== undefined && y1 < goal && (y1 > y2 || y2 > goal) ? x1\n\t\t\t: defaultMin\n\t\tconst nearestAboveGoal =\n\t\t\ty2 !== undefined && y2 > goal && (y2 < y1 || y1 < goal) ? x2\n\t\t\t: y1 !== undefined && y1 > goal && (y1 < y2 || y2 < goal) ? x1\n\t\t\t: defaultMax\n\n\t\tnodeValue = uniroot(\n\t\t\ttest,\n\t\t\tnearestBelowGoal,\n\t\t\tnearestAboveGoal,\n\t\t\t0.1,\n\t\t\tmaxIterations,\n\t\t\t1,\n\t\t)\n\t}\n\n\tif (nodeValue == undefined) {\n\t\tthis.cache.inversionFail = true\n\t}\n\n\t// Uncomment to display the two attempts and their result\n\t// console.table([\n\t// \t{ x: x1, y: y1 },\n\t// \t{ x: x2, y: y2 },\n\t// ])\n\t// console.log('iteration inversion:', numberOfIteration)\n\tif (this.cache.traversedVariablesStack) {\n\t\tconst traversedVariablesStack = this.cache.traversedVariablesStack[0]\n\t\tif (traversedVariablesStack) {\n\t\t\t;(lastEvaluation!.traversedVariables ?? []).forEach((v) =>\n\t\t\t\ttraversedVariablesStack.add(v),\n\t\t\t)\n\t\t}\n\t}\n\treturn {\n\t\t...node,\n\t\tnodeValue,\n\t\tunit: evaluatedInversionGoal.unit,\n\t\texplanation: {\n\t\t\t...node.explanation,\n\t\t\tinversionGoal,\n\t\t\tnumberOfIteration,\n\t\t},\n\t\tmissingVariables: lastEvaluation!.missingVariables,\n\t}\n}\n\nexport const mecanismInversion = (v, context: Context) => {\n\tlet avec = typeof v === 'object' && 'avec' in v ? v.avec : v\n\tif (v === null) {\n\t\tthrow new PublicodesError(\n\t\t\t'SyntaxError',\n\t\t\t\"Il manque les règles avec laquelle effectuer le calcul d'inversion dans le mécanisme `inversion numérique`\",\n\t\t\t{ dottedName: context.dottedName },\n\t\t)\n\t}\n\tif (!Array.isArray(avec)) {\n\t\tavec = [avec]\n\t}\n\treturn {\n\t\texplanation: {\n\t\t\truleToInverse: context.dottedName,\n\t\t\tinversionCandidates: avec.map((node) => ({\n\t\t\t\t...parse(node, context),\n\t\t\t})),\n\t\t},\n\t\tnodeKind: 'inversion',\n\t} as InversionNode\n}\n\nregisterEvaluationFunction('inversion', evaluateInversion)\n","import { PublicodesExpression } from '..'\nimport { notApplicableNode } from '../evaluationUtils'\nimport { createParseInlinedMecanismWithArray } from './inlineMecanism'\n\nexport const parseMaximumDe = createParseInlinedMecanismWithArray(\n\t'le maximum de',\n\t{\n\t\tvaleur: { type: 'liste' },\n\t},\n\t({ valeur }) =>\n\t\t(valeur as Array).reduce(\n\t\t\t(acc, value) => ({\n\t\t\t\tcondition: {\n\t\t\t\t\tsi: {\n\t\t\t\t\t\t'est non applicable': '$INTERNAL valeur',\n\t\t\t\t\t},\n\t\t\t\t\talors: '$INTERNAL acc',\n\t\t\t\t\tsinon: {\n\t\t\t\t\t\tcondition: {\n\t\t\t\t\t\t\tsi: {\n\t\t\t\t\t\t\t\tou: [\n\t\t\t\t\t\t\t\t\t{ 'est non applicable': '$INTERNAL acc' },\n\t\t\t\t\t\t\t\t\t{ '>': ['$INTERNAL valeur', '$INTERNAL acc'] },\n\t\t\t\t\t\t\t\t],\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\talors: '$INTERNAL valeur',\n\t\t\t\t\t\t\tsinon: '$INTERNAL acc',\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tavec: {\n\t\t\t\t\t'[privé] $INTERNAL valeur': { valeur: value },\n\t\t\t\t\t'[privé] $INTERNAL acc': { valeur: acc },\n\t\t\t\t},\n\t\t\t}),\n\t\t\tnotApplicableNode,\n\t\t),\n)\n\nexport const parseMinimumDe = createParseInlinedMecanismWithArray(\n\t'le minimum de',\n\t{\n\t\tvaleur: { type: 'liste' },\n\t},\n\t({ valeur }) =>\n\t\t(valeur as Array).reduce(\n\t\t\t(acc, value) => ({\n\t\t\t\tcondition: {\n\t\t\t\t\tsi: {\n\t\t\t\t\t\t'est non applicable': '$INTERNAL valeur',\n\t\t\t\t\t},\n\t\t\t\t\talors: '$INTERNAL acc',\n\t\t\t\t\tsinon: {\n\t\t\t\t\t\tcondition: {\n\t\t\t\t\t\t\tsi: {\n\t\t\t\t\t\t\t\tou: [\n\t\t\t\t\t\t\t\t\t{ 'est non applicable': '$INTERNAL acc' },\n\t\t\t\t\t\t\t\t\t{ '<': ['$INTERNAL valeur', '$INTERNAL acc'] },\n\t\t\t\t\t\t\t\t],\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\talors: '$INTERNAL valeur',\n\t\t\t\t\t\t\tsinon: '$INTERNAL acc',\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tavec: {\n\t\t\t\t\t'[privé] $INTERNAL valeur': { valeur: value },\n\t\t\t\t\t'[privé] $INTERNAL acc': { valeur: acc },\n\t\t\t\t},\n\t\t\t}),\n\t\t\tnotApplicableNode,\n\t\t),\n)\n","import { PublicodesExpression } from '..'\nimport { notApplicableNode } from '../evaluationUtils'\nimport { createParseInlinedMecanismWithArray } from './inlineMecanism'\n\nexport function reduceToSumNodes(\n\tvaleurs: Array,\n): PublicodesExpression {\n\treturn valeurs\n\t\t.reverse()\n\t\t.reduce((acc, value) => ({ '+': [value, acc] }), notApplicableNode)\n}\n\nexport default createParseInlinedMecanismWithArray(\n\t'somme',\n\t{\n\t\tvaleur: { type: 'liste' },\n\t},\n\t({ valeur }) =>\n\t\treduceToSumNodes([...(valeur as Array)]),\n)\n","import { PublicodesExpression } from '..'\nimport { createParseInlinedMecanismWithArray } from './inlineMecanism'\nimport { reduceToSumNodes } from './somme'\n\nexport default createParseInlinedMecanismWithArray(\n\t'moyenne',\n\t{\n\t\tvaleur: { type: 'liste' },\n\t},\n\t({ valeur }) => {\n\t\tconst valeurs = [...(valeur as Array)]\n\n\t\treturn {\n\t\t\t'/': [\n\t\t\t\treduceToSumNodes(valeurs),\n\t\t\t\treduceToSumNodes(valeurs.map(oneIfApplicable)),\n\t\t\t],\n\t\t}\n\t},\n)\n\nfunction oneIfApplicable(exp: PublicodesExpression): PublicodesExpression {\n\treturn {\n\t\t'applicable si': { 'est applicable': exp },\n\t\tvaleur: 1,\n\t}\n}\n","import { notApplicableNode } from '../evaluationUtils'\nimport { createParseInlinedMecanism } from './inlineMecanism'\n\nexport default createParseInlinedMecanism(\n\t'non applicable si',\n\t{\n\t\t'non applicable si': {},\n\t\tvaleur: {},\n\t},\n\t{\n\t\tcondition: {\n\t\t\tsi: 'non applicable si = non',\n\t\t\talors: 'valeur',\n\t\t\tsinon: notApplicableNode,\n\t\t},\n\t},\n)\n","import { ASTNode } from '../AST/types'\nimport { registerEvaluationFunction } from '../evaluationFunctions'\nimport parse from '../parse'\nimport { Context } from '../parsePublicodes'\n\nexport type PossibilityNode = {\n\texplanation: Array\n\t'choix obligatoire'?: 'oui' | 'non'\n\tcontext: string\n\tnodeKind: 'une possibilité'\n}\n// TODO : This isn't a real mecanism, cf. #963\nexport const mecanismOnePossibility = (v, context: Context) => {\n\tif (Array.isArray(v)) {\n\t\tv = {\n\t\t\tpossibilités: v,\n\t\t}\n\t}\n\treturn {\n\t\t...v,\n\t\texplanation: v.possibilités.map((p) => parse(p, context)),\n\t\tcontext: context.dottedName,\n\t\tnodeKind: 'une possibilité',\n\t} as PossibilityNode\n}\nregisterEvaluationFunction<'une possibilité'>('une possibilité', (node) => ({\n\t...node,\n\tmissingVariables: { [node.context]: 1 },\n\tnodeValue: undefined,\n}))\n","import { EvaluationFunction, PublicodesError } from '..'\nimport { ASTNode, EvaluatedNode } from '../AST/types'\nimport { convertToDate } from '../date'\nimport { warning } from '../error'\nimport { registerEvaluationFunction } from '../evaluationFunctions'\nimport { mergeAllMissing } from '../evaluationUtils'\nimport { convertNodeToUnit } from '../nodeUnits'\nimport parse from '../parse'\nimport { inferUnit, serializeUnit } from '../units'\n\nconst knownOperations = {\n\t'*': [(a, b) => a * b, '×'],\n\t'/': [(a, b) => a / b, '∕'],\n\t'+': [(a, b) => a + b],\n\t'-': [(a, b) => a - b, '−'],\n\t'<': [(a, b) => a < b],\n\t'<=': [(a, b) => a <= b, '≤'],\n\t'>': [(a, b) => a > b],\n\t'>=': [(a, b) => a >= b, '≥'],\n\t'=': [(a, b) => (a ?? false) === (b ?? false)],\n\t'!=': [(a, b) => (a ?? false) !== (b ?? false), '≠'],\n\tet: [(a, b) => (a ?? false) && (b ?? false)],\n\tou: [(a, b) => (a ?? false) || (b ?? false)],\n} as const\n\nexport type OperationNode = {\n\tnodeKind: 'operation'\n\texplanation: [ASTNode, ASTNode]\n\toperationKind: keyof typeof knownOperations\n\toperator: string\n}\n\nconst parseOperation = (k, symbol) => (v, context) => {\n\tconst explanation = v.map((node) => parse(node, context))\n\n\treturn {\n\t\t...v,\n\t\tnodeKind: 'operation',\n\t\toperationKind: k,\n\t\toperator: symbol || k,\n\t\texplanation,\n\t} as OperationNode\n}\n\nconst evaluate: EvaluationFunction<'operation'> = function (node) {\n\tlet node1 = this.evaluateNode(node.explanation[0])\n\n\tlet evaluatedNode: EvaluatedNode & OperationNode = {\n\t\t...node,\n\t\tmissingVariables: {},\n\t} as EvaluatedNode & OperationNode\n\n\t// LAZY EVALUATION\n\tif (\n\t\t(node1.nodeValue === null &&\n\t\t\t['<', '>', '<=', '>=', '/', '*', '-', 'et'].includes(\n\t\t\t\tnode.operationKind,\n\t\t\t)) ||\n\t\t(node1.nodeValue === 0 && ['/', '*'].includes(node.operationKind)) ||\n\t\t(node1.nodeValue === false && node.operationKind === 'et') ||\n\t\t(node1.nodeValue === true && node.operationKind === 'ou')\n\t) {\n\t\treturn {\n\t\t\t...evaluatedNode,\n\t\t\tnodeValue: node.operationKind === 'et' ? false : node1.nodeValue,\n\t\t\tmissingVariables: node1.missingVariables,\n\t\t}\n\t}\n\n\tlet node2 = this.evaluateNode(node.explanation[1])\n\tevaluatedNode.explanation = [node1, node2]\n\n\tif (node.operationKind === '/' && node2.nodeValue === 0) {\n\t\tthrow new PublicodesError('EvaluationError', `Division by zero`, {\n\t\t\tdottedName: this.cache._meta.evaluationRuleStack[0],\n\t\t})\n\t}\n\n\t// LAZY EVALUATION 2\n\tif (\n\t\t(node2.nodeValue === null &&\n\t\t\t['<', '>', '<=', '>=', '/', '*', 'et'].includes(node.operationKind)) ||\n\t\t(node2.nodeValue === 0 && ['*'].includes(node.operationKind)) ||\n\t\t(node2.nodeValue === false && node.operationKind === 'et') ||\n\t\t(node2.nodeValue === true && node.operationKind === 'ou')\n\t) {\n\t\treturn {\n\t\t\t...evaluatedNode,\n\t\t\tnodeValue: node.operationKind === 'et' ? false : node2.nodeValue,\n\t\t\tmissingVariables: node2.missingVariables,\n\t\t}\n\t}\n\n\tevaluatedNode.missingVariables = mergeAllMissing([node1, node2])\n\n\tif (node1.nodeValue === undefined || node2.nodeValue === undefined) {\n\t\tevaluatedNode = {\n\t\t\t...evaluatedNode,\n\t\t\tnodeValue: undefined,\n\t\t}\n\t}\n\n\tconst isAdditionOrSubstractionWithPercentage =\n\t\t['+', '-'].includes(node.operationKind) &&\n\t\tserializeUnit(node2.unit) === '%' &&\n\t\tserializeUnit(node1.unit) !== '%'\n\n\tif (\n\t\t!('nodeValue' in evaluatedNode) &&\n\t\t!['/', '*'].includes(node.operationKind) &&\n\t\t!isAdditionOrSubstractionWithPercentage\n\t) {\n\t\ttry {\n\t\t\tif (node1.unit && 'unit' in node2) {\n\t\t\t\tnode2 = convertNodeToUnit(node1.unit, node2)\n\t\t\t} else if (node2.unit) {\n\t\t\t\tnode1 = convertNodeToUnit(node2.unit, node1)\n\t\t\t}\n\t\t} catch (e) {\n\t\t\twarning(\n\t\t\t\tthis.context.logger,\n\t\t\t\t`Dans l'expression '${\n\t\t\t\t\tnode.operationKind\n\t\t\t\t}', la partie gauche (unité: ${serializeUnit(\n\t\t\t\t\tnode1.unit,\n\t\t\t\t)}) n'est pas compatible avec la partie droite (unité: ${serializeUnit(\n\t\t\t\t\tnode2.unit,\n\t\t\t\t)})`,\n\t\t\t\t{ dottedName: this.cache._meta.evaluationRuleStack[0] },\n\t\t\t\te,\n\t\t\t)\n\t\t}\n\t}\n\n\tconst operatorFunction = knownOperations[node.operationKind][0]\n\n\tconst a = node1.nodeValue as string | boolean | null\n\tconst b = node2.nodeValue as string | boolean | null\n\n\tevaluatedNode.nodeValue =\n\t\t'nodeValue' in evaluatedNode ? evaluatedNode.nodeValue\n\t\t: (\n\t\t\t['<', '>', '<=', '>=', '*', '/'].includes(node.operationKind) &&\n\t\t\tnode2.nodeValue === null\n\t\t) ?\n\t\t\tnull\n\t\t: (\n\t\t\t[a, b].every(\n\t\t\t\t(value) =>\n\t\t\t\t\ttypeof value === 'string' &&\n\t\t\t\t\tvalue.match?.(/^[\\d]{2}\\/[\\d]{2}\\/[\\d]{4}$/),\n\t\t\t)\n\t\t) ?\n\t\t\t// We convert the date objects to timestamps to support comparison with the \"===\" operator:\n\t\t\t// new Date('2020-01-01') !== new Date('2020-01-01')\n\t\t\toperatorFunction(\n\t\t\t\tconvertToDate(a as string).getTime(),\n\t\t\t\tconvertToDate(b as string).getTime(),\n\t\t\t)\n\t\t:\toperatorFunction(a, b)\n\n\tif (\n\t\tnode.operationKind === '*' &&\n\t\tinferUnit('*', [node1.unit, node2.unit])?.numerators.includes('%')\n\t) {\n\t\tlet unit = inferUnit('*', [node1.unit, node2.unit])\n\t\tconst nodeValue = evaluatedNode.nodeValue\n\t\treturn {\n\t\t\t...evaluatedNode,\n\t\t\tnodeValue: typeof nodeValue === 'number' ? nodeValue / 100 : nodeValue,\n\t\t\tunit: inferUnit('*', [unit, { numerators: [], denominators: ['%'] }]),\n\t\t}\n\t}\n\n\t// Addition or substraction of scalar with a percentage is a multiplication\n\t// TODO : this logic should be handle statically by changing sum with percentage into product.\n\t// It can be done when we'll have a sound type/unit inference\n\tif (isAdditionOrSubstractionWithPercentage) {\n\t\tlet unit = inferUnit('*', [node1.unit, node2.unit])\n\t\treturn {\n\t\t\t...evaluatedNode,\n\t\t\tnodeValue:\n\t\t\t\t(\n\t\t\t\t\ttypeof node1.nodeValue === 'number' &&\n\t\t\t\t\ttypeof node2.nodeValue === 'number'\n\t\t\t\t) ?\n\t\t\t\t\tnode1.nodeValue *\n\t\t\t\t\t(1 + (node2.nodeValue / 100) * (node.operationKind === '-' ? -1 : 1))\n\t\t\t\t:\tevaluatedNode.nodeValue,\n\t\t\tunit: inferUnit('*', [unit, { numerators: [], denominators: ['%'] }]),\n\t\t}\n\t}\n\n\tif (\n\t\tnode.operationKind === '*' ||\n\t\tnode.operationKind === '/' ||\n\t\tnode.operationKind === '-' ||\n\t\tnode.operationKind === '+'\n\t) {\n\t\treturn {\n\t\t\t...evaluatedNode,\n\t\t\tunit: inferUnit(node.operationKind, [node1.unit, node2.unit]),\n\t\t}\n\t}\n\n\treturn evaluatedNode\n}\n\nregisterEvaluationFunction('operation', evaluate)\n\nconst operationDispatch = Object.fromEntries(\n\tObject.entries(knownOperations).map(([k, [f, symbol]]) => [\n\t\tk,\n\t\tparseOperation(k, symbol),\n\t]),\n)\n\nexport default operationDispatch\n","import { createParseInlinedMecanism } from './inlineMecanism'\n\nexport default createParseInlinedMecanism(\n\t'par défaut',\n\t{\n\t\t'par défaut': {},\n\t\tvaleur: {},\n\t},\n\t{\n\t\tcondition: {\n\t\t\tsi: { 'est non défini': 'valeur' },\n\t\t\talors: 'par défaut',\n\t\t\tsinon: 'valeur',\n\t\t},\n\t},\n)\n","import { createParseInlinedMecanism } from './inlineMecanism'\n\nexport default createParseInlinedMecanism(\n\t'plafond',\n\t{\n\t\tplafond: {},\n\t\tvaleur: {},\n\t},\n\t{\n\t\tcondition: {\n\t\t\tsi: { et: ['plafond != non', 'valeur > plafond'] },\n\t\t\talors: 'plafond',\n\t\t\tsinon: 'valeur',\n\t\t},\n\t},\n)\n","import { createParseInlinedMecanism } from './inlineMecanism'\n\nexport default createParseInlinedMecanism(\n\t'plancher',\n\t{\n\t\tplancher: {},\n\t\tvaleur: {},\n\t},\n\t{\n\t\tcondition: {\n\t\t\tsi: { et: ['plancher != non', 'valeur < plancher'] },\n\t\t\talors: 'plancher',\n\t\t\tsinon: 'valeur',\n\t\t},\n\t},\n)\n","import { PublicodesExpression } from '..'\nimport { defaultNode } from '../evaluationUtils'\nimport { createParseInlinedMecanismWithArray } from './inlineMecanism'\n\nexport function reduceToProduitNodes(\n\tvaleurs: Array,\n): PublicodesExpression {\n\treturn valeurs.reduce((acc, value) => ({ '*': [value, acc] }), defaultNode(1))\n}\n\nexport default createParseInlinedMecanismWithArray(\n\t'produit',\n\t{\n\t\tvaleur: { type: 'liste' },\n\t},\n\t({ valeur }) => ({\n\t\tvaleur: reduceToProduitNodes([...(valeur as Array)]),\n\t\t\"simplifier l'unité\": 'oui',\n\t}),\n)\n","import { EvaluationFunction } from '..'\nimport { ASTNode } from '../AST/types'\nimport { registerEvaluationFunction } from '../evaluationFunctions'\nimport { undefinedNumberNode } from '../evaluationUtils'\nimport parse from '../parse'\nimport { Context } from '../parsePublicodes'\nimport uniroot from '../uniroot'\n\nexport type RésoudreRéférenceCirculaireNode = {\n\texplanation: {\n\t\truleToSolve: string\n\t\tvaleur: ASTNode\n\t}\n\tnodeKind: 'résoudre référence circulaire'\n}\n\nexport const evaluateRésoudreRéférenceCirculaire: EvaluationFunction<'résoudre référence circulaire'> =\n\tfunction (node) {\n\t\tif (\n\t\t\tthis.cache._meta.evaluationRuleStack\n\t\t\t\t.slice(1)\n\t\t\t\t.includes(node.explanation.ruleToSolve)\n\t\t) {\n\t\t\treturn {\n\t\t\t\t...undefinedNumberNode,\n\t\t\t\t...node,\n\t\t\t}\n\t\t}\n\n\t\tlet numberOfIterations = 0\n\t\tconst calculationEngine = this.shallowCopy()\n\t\tcalculationEngine.cache._meta.parentRuleStack = [\n\t\t\t...this.cache._meta.parentRuleStack,\n\t\t]\n\t\tcalculationEngine.cache._meta.evaluationRuleStack = [\n\t\t\t...this.cache._meta.evaluationRuleStack,\n\t\t]\n\t\tconst maxIterations = this.context.inversionMaxIterations ?? 25\n\n\t\tconst evaluateWithValue = (n: number) => {\n\t\t\tnumberOfIterations++\n\t\t\tcalculationEngine.setSituation(\n\t\t\t\t{\n\t\t\t\t\t[node.explanation.ruleToSolve]: {\n\t\t\t\t\t\t...undefinedNumberNode,\n\t\t\t\t\t\tnodeValue: n,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t{ keepPreviousSituation: true },\n\t\t\t)\n\n\t\t\treturn calculationEngine.evaluateNode(node.explanation.valeur)\n\t\t}\n\n\t\tconst inversionFailed = Symbol('inversion failed')\n\n\t\tlet nodeValue: number | undefined | typeof inversionFailed = inversionFailed\n\n\t\tconst x0 = 1\n\t\tlet valeur = evaluateWithValue(x0)\n\t\tconst y0 = valeur.nodeValue as number\n\t\tconst unit = valeur.unit\n\t\tlet i = 0\n\t\tif (y0 !== undefined) {\n\t\t\t// The `uniroot` function parameter. It will be called with its `min` and\n\t\t\t// `max` arguments, so we can use our cached nodes if the function is called\n\t\t\t// with the already computed x1 or x2.\n\t\t\tconst test = (x: number): number => {\n\t\t\t\tif (x === x0) {\n\t\t\t\t\treturn y0 - x0\n\t\t\t\t}\n\t\t\t\tvaleur = evaluateWithValue(x)\n\t\t\t\tconst y = valeur.nodeValue\n\t\t\t\ti++\n\t\t\t\treturn (y as number) - x\n\t\t\t}\n\n\t\t\tconst defaultMin = -1_000_000\n\t\t\tconst defaultMax = 100_000_000\n\n\t\t\tnodeValue = uniroot(test, defaultMin, defaultMax, 0.5, maxIterations, 2)\n\t\t}\n\n\t\tif (nodeValue === inversionFailed) {\n\t\t\tnodeValue = undefined\n\t\t\tthis.cache.inversionFail = true\n\t\t}\n\t\tif (nodeValue !== undefined) {\n\t\t\tvaleur = evaluateWithValue(nodeValue)\n\t\t}\n\t\treturn {\n\t\t\t...node,\n\t\t\tunit,\n\t\t\tnodeValue,\n\t\t\texplanation: {\n\t\t\t\t...node.explanation,\n\t\t\t\tvaleur,\n\t\t\t\tnumberOfIterations,\n\t\t\t},\n\t\t\tmissingVariables: valeur.missingVariables,\n\t\t}\n\t}\n\nexport default function parseRésoudreRéférenceCirculaire(v, context: Context) {\n\treturn {\n\t\texplanation: {\n\t\t\truleToSolve: context.dottedName,\n\t\t\tvaleur: parse(v.valeur, context),\n\t\t},\n\t\tnodeKind: 'résoudre référence circulaire',\n\t} as RésoudreRéférenceCirculaireNode\n}\n\nparseRésoudreRéférenceCirculaire.nom = 'résoudre la référence circulaire'\n\nregisterEvaluationFunction(\n\t'résoudre référence circulaire',\n\tevaluateRésoudreRéférenceCirculaire,\n)\n","import { ASTNode } from '../AST/types'\nimport { registerEvaluationFunction } from '../evaluationFunctions'\nimport parse from '../parse'\nimport { convertUnit, simplifyUnit } from '../units'\n\nexport type SimplifierUnitéNode = {\n\texplanation: ASTNode\n\tnodeKind: 'simplifier unité'\n}\n\nexport default function parseSimplifierUnité(v, context): SimplifierUnitéNode {\n\tconst explanation = parse(v.valeur, context)\n\treturn {\n\t\texplanation,\n\t\tnodeKind: 'simplifier unité',\n\t}\n}\n\nparseSimplifierUnité.nom = \"simplifier l'unité\" as const\n\nregisterEvaluationFunction('simplifier unité', function evaluate(node) {\n\tconst valeur = this.evaluateNode(node.explanation)\n\tconst nodeValue = valeur.nodeValue\n\tconst defaultReturn = {\n\t\t...valeur,\n\t\t...node,\n\t\texplanation: valeur,\n\t}\n\tif (nodeValue == null) {\n\t\treturn defaultReturn\n\t}\n\n\tif (!valeur.unit) {\n\t\treturn {\n\t\t\t...defaultReturn,\n\t\t\tunit: valeur.unit,\n\t\t}\n\t}\n\tconst unit = simplifyUnit(valeur.unit)\n\n\treturn {\n\t\t...defaultReturn,\n\t\tnodeValue:\n\t\t\ttypeof nodeValue === 'number' ?\n\t\t\t\tconvertUnit(valeur.unit, unit, nodeValue)\n\t\t\t:\tnodeValue,\n\t\tunit,\n\t}\n})\n","import { createParseInlinedMecanism } from './inlineMecanism'\n\nexport default createParseInlinedMecanism(\n\t'dans la situation',\n\t{\n\t\tvaleur: {},\n\t\t'dans la situation': {},\n\t},\n\t{\n\t\tcondition: {\n\t\t\tsi: { 'est non défini': 'dans la situation' },\n\t\t\talors: 'valeur',\n\t\t\tsinon: 'dans la situation',\n\t\t},\n\t},\n)\n","import { EvaluationFunction, PublicodesError } from '..'\nimport { ASTNode } from '../AST/types'\nimport { registerEvaluationFunction } from '../evaluationFunctions'\nimport { defaultNode, mergeAllMissing } from '../evaluationUtils'\nimport { convertNodeToUnit } from '../nodeUnits'\nimport parse from '../parse'\nimport { parseUnit } from '../units'\nimport {\n\tevaluatePlafondUntilActiveTranche,\n\tparseTranches,\n\tTrancheNodes,\n} from './trancheUtils'\n\nexport type TauxProgressifNode = {\n\texplanation: {\n\t\ttranches: TrancheNodes\n\t\tmultiplicateur: ASTNode\n\t\tassiette: ASTNode\n\t}\n\tnodeKind: 'taux progressif'\n}\nexport default function parseTauxProgressif(v, context): TauxProgressifNode {\n\tconst explanation = {\n\t\tassiette: parse(v.assiette, context),\n\t\tmultiplicateur:\n\t\t\tv.multiplicateur ? parse(v.multiplicateur, context) : defaultNode(1),\n\t\ttranches: parseTranches(v.tranches, context),\n\t} as TauxProgressifNode['explanation']\n\treturn {\n\t\texplanation,\n\t\tnodeKind: 'taux progressif',\n\t}\n}\n\nconst evaluate: EvaluationFunction<'taux progressif'> = function (node) {\n\tconst evaluate = this.evaluateNode.bind(this)\n\tconst assiette = this.evaluateNode(node.explanation.assiette)\n\tconst multiplicateur = this.evaluateNode(node.explanation.multiplicateur)\n\tif (multiplicateur.nodeValue === 0) {\n\t\tthrow new PublicodesError('EvaluationError', `Division by zero`, {\n\t\t\tdottedName: '',\n\t\t})\n\t}\n\tconst tranches = evaluatePlafondUntilActiveTranche.call(this, {\n\t\tparsedTranches: node.explanation.tranches,\n\t\tassiette,\n\t\tmultiplicateur,\n\t})\n\n\tconst evaluatedNode = {\n\t\t...node,\n\t\texplanation: {\n\t\t\ttranches,\n\t\t\tassiette,\n\t\t\tmultiplicateur,\n\t\t},\n\t\tunit: parseUnit('%'),\n\t}\n\n\tconst lastTranche = tranches[tranches.length - 1]\n\tif (\n\t\ttranches.every(({ isActive }) => isActive === false) ||\n\t\t(lastTranche.isActive && lastTranche.plafond.nodeValue === Infinity)\n\t) {\n\t\tconst taux = convertNodeToUnit(parseUnit('%'), evaluate(lastTranche.taux))\n\t\tconst { nodeValue, missingVariables } = taux\n\t\tlastTranche.taux = taux\n\t\tlastTranche.nodeValue = nodeValue\n\t\tlastTranche.missingVariables = missingVariables\n\t\treturn {\n\t\t\t...evaluatedNode,\n\t\t\tnodeValue,\n\t\t\tmissingVariables,\n\t\t}\n\t}\n\n\tif (\n\t\ttranches.every(({ isActive }) => isActive !== true) ||\n\t\ttypeof assiette.nodeValue !== 'number'\n\t) {\n\t\treturn {\n\t\t\t...evaluatedNode,\n\t\t\tnodeValue: undefined,\n\t\t\tmissingVariables: mergeAllMissing(tranches),\n\t\t}\n\t}\n\n\tconst activeTrancheIndex = tranches.findIndex(\n\t\t({ isActive }) => isActive === true,\n\t)\n\tconst activeTranche = tranches[activeTrancheIndex]\n\tactiveTranche.taux = convertNodeToUnit(\n\t\tparseUnit('%'),\n\t\tevaluate(activeTranche.taux),\n\t)\n\n\tconst previousTranche = tranches[activeTrancheIndex - 1]\n\tif (previousTranche) {\n\t\tpreviousTranche.taux = convertNodeToUnit(\n\t\t\tparseUnit('%'),\n\t\t\tevaluate(previousTranche.taux),\n\t\t)\n\t\tpreviousTranche.isActive = true\n\t}\n\tconst previousTaux =\n\t\tpreviousTranche ? previousTranche.taux : activeTranche.taux\n\tconst calculationValues = [previousTaux, activeTranche.taux]\n\tif (calculationValues.some((n) => n.nodeValue === undefined)) {\n\t\tactiveTranche.nodeValue = undefined\n\t\treturn {\n\t\t\t...evaluatedNode,\n\t\t\tnodeValue: undefined,\n\t\t\tmissingVariables: mergeAllMissing(calculationValues),\n\t\t}\n\t}\n\n\tconst lowerTaux = previousTaux.nodeValue\n\tconst upperTaux = activeTranche.taux.nodeValue\n\tconst plancher = activeTranche.plancherValue\n\tconst plafond = activeTranche.plafondValue\n\tconst coeff = (upperTaux - lowerTaux) / (plafond - plancher)\n\tconst nodeValue = lowerTaux + (assiette.nodeValue - plancher) * coeff\n\tactiveTranche.nodeValue = nodeValue\n\treturn {\n\t\t...evaluatedNode,\n\t\tnodeValue,\n\t\tmissingVariables: {},\n\t}\n}\n\nregisterEvaluationFunction('taux progressif', evaluate)\n","import { ASTNode, formatValue } from '..'\nimport { registerEvaluationFunction } from '../evaluationFunctions'\nimport { mergeAllMissing } from '../evaluationUtils'\nimport parse from '../parse'\n\nconst NAME = 'texte' as const\n\nexport type TexteNode = {\n\texplanation: Array\n\tnodeKind: typeof NAME\n}\n\nexport default function parseTexte(texte: string, context): TexteNode {\n\tconst explanation = [] as TexteNode['explanation']\n\tlet lastIndex = 0\n\tfor (const { 0: expression, index } of texte.matchAll(/{{(.|\\n)*?}}/g)) {\n\t\tconst publicodeExpression = expression.slice(2, -2).trim()\n\t\tconst parsedNode = parse(publicodeExpression, context)\n\t\texplanation.push(texte.substring(lastIndex, index), parsedNode)\n\t\tlastIndex = (index ?? 0) + expression.length\n\t}\n\texplanation.push(texte.slice(lastIndex))\n\treturn {\n\t\tnodeKind: NAME,\n\t\texplanation,\n\t}\n}\nparseTexte.nom = NAME\n\nregisterEvaluationFunction(NAME, function evaluate(node) {\n\tconst explanation = node.explanation.map((element) =>\n\t\ttypeof element === 'string' ? element : this.evaluateNode(element),\n\t)\n\n\treturn {\n\t\t...node,\n\t\texplanation,\n\t\tmissingVariables: mergeAllMissing(\n\t\t\tnode.explanation.filter(\n\t\t\t\t(element) => typeof element !== 'string',\n\t\t\t) as Array,\n\t\t),\n\t\tnodeValue: explanation\n\t\t\t.map((element) =>\n\t\t\t\ttypeof element === 'string' ? element : formatValue(element),\n\t\t\t)\n\t\t\t.join(''),\n\t}\n})\n","import { PublicodesExpression } from '..'\nimport { createParseInlinedMecanismWithArray } from './inlineMecanism'\n\nexport default createParseInlinedMecanismWithArray(\n\t'toutes ces conditions',\n\t{\n\t\tvaleur: { type: 'liste' },\n\t},\n\t({ valeur }) =>\n\t\t(valeur as Array).reduce(\n\t\t\t(acc, value) => ({ et: [acc, value] }),\n\t\t\t'oui',\n\t\t),\n)\n","import { PublicodesExpression } from '..'\nimport { createParseInlinedMecanismWithArray } from './inlineMecanism'\n\nexport default createParseInlinedMecanismWithArray(\n\t'une de ces conditions',\n\t{\n\t\tvaleur: { type: 'liste' },\n\t},\n\t({ valeur }) =>\n\t\t(valeur as Array).reduce(\n\t\t\t(acc, value) => ({ ou: [acc, value] }),\n\t\t\t'non',\n\t\t),\n)\n","import { ASTNode, Unit } from '../AST/types'\nimport { warning } from '../error'\nimport { registerEvaluationFunction } from '../evaluationFunctions'\nimport parse from '../parse'\nimport { convertUnit, parseUnit } from '../units'\n\nexport type UnitéNode = {\n\tunit: Unit\n\texplanation: ASTNode\n\tnodeKind: 'unité'\n}\n\nexport default function parseUnité(v, context): UnitéNode {\n\tconst explanation = parse(v.valeur, context)\n\tconst unit = parseUnit(v.unité, context.getUnitKey)\n\n\treturn {\n\t\texplanation,\n\t\tunit,\n\t\tnodeKind: parseUnité.nom,\n\t}\n}\n\nparseUnité.nom = 'unité' as const\n\nregisterEvaluationFunction(parseUnité.nom, function evaluate(node) {\n\tconst valeur = this.evaluateNode(node.explanation)\n\n\tlet nodeValue = valeur.nodeValue\n\tif (nodeValue !== null && 'unit' in node) {\n\t\ttry {\n\t\t\tnodeValue = convertUnit(\n\t\t\t\tvaleur.unit,\n\t\t\t\tnode.unit,\n\t\t\t\tvaleur.nodeValue as number,\n\t\t\t)\n\t\t} catch (e) {\n\t\t\twarning(\n\t\t\t\tthis.context.logger,\n\t\t\t\t\"Erreur lors de la conversion d'unité explicite\",\n\t\t\t\t{ dottedName: this.cache._meta.evaluationRuleStack[0] },\n\t\t\t\te,\n\t\t\t)\n\t\t}\n\t}\n\n\treturn {\n\t\t...node,\n\t\tnodeValue,\n\t\texplanation: valeur,\n\t\tmissingVariables: valeur.missingVariables,\n\t}\n})\n","import { ASTNode } from '../AST/types'\nimport { registerEvaluationFunction } from '../evaluationFunctions'\nimport { mergeMissing } from '../evaluationUtils'\nimport parse from '../parse'\n\nexport type VariableManquanteNode = {\n\tmissingVariable: string\n\texplanation: ASTNode\n\tnodeKind: 'variable manquante'\n}\n\nexport default function parseVariableManquante(\n\tv,\n\tcontext,\n): VariableManquanteNode {\n\treturn {\n\t\tmissingVariable: v['variable manquante'],\n\t\tnodeKind: parseVariableManquante.nom,\n\t\texplanation: parse(v.valeur, context),\n\t}\n}\n\nparseVariableManquante.nom = 'variable manquante' as const\n\nregisterEvaluationFunction(parseVariableManquante.nom, function evaluate(node) {\n\tconst valeur = this.evaluateNode(node.explanation)\n\n\tconst maxMissingScore = Object.values(valeur.missingVariables).reduce(\n\t\t(a, b) => (a > b ? a : b),\n\t\t0,\n\t)\n\n\treturn {\n\t\t...node,\n\t\tnodeValue: valeur.nodeValue,\n\t\tunit: valeur.unit,\n\t\texplanation: valeur,\n\t\tmissingVariables: mergeMissing(valeur.missingVariables, {\n\t\t\t[node.missingVariable]: maxMissingScore + 1,\n\t\t}),\n\t}\n})\n","import { EvaluationFunction } from '..'\nimport { ASTNode, EvaluatedNode, Unit } from '../AST/types'\nimport { warning } from '../error'\nimport { registerEvaluationFunction } from '../evaluationFunctions'\nimport { bonus, defaultNode, mergeMissing } from '../evaluationUtils'\nimport { convertNodeToUnit } from '../nodeUnits'\nimport parse from '../parse'\n\nexport type VariationNode = {\n\texplanation: Array<{\n\t\tcondition: ASTNode\n\t\tconsequence: ASTNode\n\t\tsatisfied?: boolean\n\t}>\n\tnodeKind: 'variations'\n}\n\nexport default function parseVariations(v, context): VariationNode {\n\tconst explanation = v.map(({ si, alors, sinon }) =>\n\t\tsinon !== undefined ?\n\t\t\t{ consequence: parse(sinon, context), condition: defaultNode(true) }\n\t\t:\t{ consequence: parse(alors, context), condition: parse(si, context) },\n\t)\n\n\treturn {\n\t\texplanation,\n\t\tnodeKind: 'variations',\n\t}\n}\n\nconst evaluate: EvaluationFunction<'variations'> = function (node) {\n\tconst [nodeValue, explanation, unit] = node.explanation.reduce<\n\t\t[\n\t\t\tEvaluatedNode['nodeValue'],\n\t\t\tVariationNode['explanation'],\n\t\t\tUnit | undefined,\n\t\t\tboolean | undefined,\n\t\t]\n\t>(\n\t\t(\n\t\t\t[evaluation, explanations, unit, previousConditions],\n\t\t\t{ condition, consequence },\n\t\t\ti: number,\n\t\t) => {\n\t\t\tif (previousConditions === true) {\n\t\t\t\treturn [\n\t\t\t\t\tevaluation,\n\t\t\t\t\t[...explanations, { condition, consequence }],\n\t\t\t\t\tunit,\n\t\t\t\t\tpreviousConditions,\n\t\t\t\t]\n\t\t\t}\n\t\t\tconst evaluatedCondition = this.evaluateNode(condition)\n\t\t\tconst currentCondition =\n\t\t\t\tpreviousConditions === undefined ? previousConditions : (\n\t\t\t\t\t!previousConditions &&\n\t\t\t\t\t(evaluatedCondition.nodeValue === undefined ?\n\t\t\t\t\t\tundefined\n\t\t\t\t\t:\tevaluatedCondition.nodeValue !== false &&\n\t\t\t\t\t\tevaluatedCondition.nodeValue !== null)\n\t\t\t\t)\n\n\t\t\tif (currentCondition === false || currentCondition === null) {\n\t\t\t\treturn [\n\t\t\t\t\tevaluation,\n\t\t\t\t\t[...explanations, { condition: evaluatedCondition, consequence }],\n\t\t\t\t\tunit,\n\t\t\t\t\tpreviousConditions,\n\t\t\t\t]\n\t\t\t}\n\t\t\tlet evaluatedConsequence: EvaluatedNode | undefined = undefined\n\t\t\tif (\n\t\t\t\tevaluatedCondition.nodeValue !== false &&\n\t\t\t\tevaluatedCondition.nodeValue !== null\n\t\t\t) {\n\t\t\t\tevaluatedConsequence = this.evaluateNode(consequence!)\n\t\t\t\tif (unit) {\n\t\t\t\t\ttry {\n\t\t\t\t\t\tevaluatedConsequence = convertNodeToUnit(\n\t\t\t\t\t\t\tunit,\n\t\t\t\t\t\t\tevaluatedConsequence!,\n\t\t\t\t\t\t)\n\t\t\t\t\t} catch (e) {\n\t\t\t\t\t\twarning(\n\t\t\t\t\t\t\tthis.context.logger,\n\t\t\t\t\t\t\t`L'unité de la branche n° ${\n\t\t\t\t\t\t\t\ti + 1\n\t\t\t\t\t\t\t} du mécanisme 'variations' n'est pas compatible avec celle d'une branche précédente`,\n\t\t\t\t\t\t\t{ dottedName: this.cache._meta.evaluationRuleStack[0] },\n\t\t\t\t\t\t\te,\n\t\t\t\t\t\t)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn [\n\t\t\t\tcurrentCondition && evaluatedConsequence?.nodeValue,\n\t\t\t\t[\n\t\t\t\t\t...explanations,\n\t\t\t\t\t{\n\t\t\t\t\t\tcondition: evaluatedCondition,\n\t\t\t\t\t\tconsequence: evaluatedConsequence ?? consequence,\n\t\t\t\t\t},\n\t\t\t\t],\n\t\t\t\tunit || evaluatedConsequence?.unit,\n\t\t\t\tpreviousConditions || currentCondition,\n\t\t\t]\n\t\t},\n\t\t[null, [], undefined, false],\n\t)\n\n\treturn {\n\t\t...node,\n\t\tnodeValue,\n\t\t...(unit !== undefined && { unit }),\n\t\texplanation,\n\t\tmissingVariables: explanation.reduce(\n\t\t\t(values, { condition, consequence }) =>\n\t\t\t\tmergeMissing(\n\t\t\t\t\tvalues,\n\t\t\t\t\tmergeMissing(\n\t\t\t\t\t\tbonus((condition as EvaluatedNode).missingVariables),\n\t\t\t\t\t\t(\n\t\t\t\t\t\t\t'nodeValue' in condition &&\n\t\t\t\t\t\t\t\tcondition.nodeValue !== false &&\n\t\t\t\t\t\t\t\tcondition.nodeValue !== null\n\t\t\t\t\t\t) ?\n\t\t\t\t\t\t\t(consequence as EvaluatedNode).missingVariables\n\t\t\t\t\t\t:\t{},\n\t\t\t\t\t),\n\t\t\t\t),\n\t\t\t{},\n\t\t),\n\t}\n}\n\nregisterEvaluationFunction('variations', evaluate)\n","import nearley from 'nearley'\nimport { PublicodesError } from './error'\nimport grammar from './grammar.codegen'\n\n// TODO: nearley is currently exported as a CommonJS module which is why we need\n// to destructure the default import instead of directly importing the symbols\n// we need. This is sub-optimal because we our bundler will not tree-shake\n// unused nearley symbols.\n// https://github.com/kach/nearley/issues/535\nconst { Grammar, Parser } = nearley\n\nconst compiledGrammar = Grammar.fromCompiled(grammar)\n\nconst parser = new Parser(compiledGrammar)\nconst initialState = parser.save()\n\ntype BinaryOp =\n\t| { '+': [ExprAST, ExprAST] }\n\t| { '-': [ExprAST, ExprAST] }\n\t| { '*': [ExprAST, ExprAST] }\n\t| { '/': [ExprAST, ExprAST] }\n\t| { '>': [ExprAST, ExprAST] }\n\t| { '<': [ExprAST, ExprAST] }\n\t| { '>=': [ExprAST, ExprAST] }\n\t| { '<=': [ExprAST, ExprAST] }\n\t| { '=': [ExprAST, ExprAST] }\n\t| { '!=': [ExprAST, ExprAST] }\n\ntype UnaryOp = { '-': [{ value: 0 }, ExprAST] }\n\n/** AST of a publicodes expression. */\nexport type ExprAST =\n\t| BinaryOp\n\t| UnaryOp\n\t| { variable: string }\n\t| { constant: { type: 'number'; nodeValue: number }; unité?: string }\n\t| { constant: { type: 'boolean'; nodeValue: boolean } }\n\t| { constant: { type: 'string' | 'date'; nodeValue: string } }\n\n/**\n * Parse a publicodes expression into an JSON object representing the AST.\n *\n * The parsing is done with the [nearley](https://nearley.js.org/) parser based\n * on the [grammar](https://github.com/betagouv/publicodes/blob/290c079d1f22baed77a96bdd834ef6cb44fa8da9/packages/core/src/grammar.ne)\n *\n * @param rawNode The expression to parse\n * @param dottedName The dottedName of the rule being parsed\n *\n * @returns The parsing result as a JSON object\n *\n * @throws A `SyntaxError` if the expression is invalid\n * @throws A `PublicodesInternalError` if the parser is unable to parse the expression\n *\n * @example\n * ```ts\n * parseExpression('20.3 * nombre', 'foo . bar')\n * // returns { \"*\": [ { constant: { type: \"number\", nodeValue: 20.3 } }, { variable:\"nombre\" } ] }\n * ```\n */\nexport function parseExpression(rawNode: string, dottedName: string): ExprAST {\n\t/* Strings correspond to infix expressions.\n\t * Indeed, a subset of expressions like simple arithmetic operations `3 + (quantity * 2)` or like `salary [month]` are more explicit that their prefixed counterparts.\n\t * This function makes them prefixed operations. */\n\tconst singleLineExpression = (rawNode + '').replace(/\\s*\\n\\s*/g, ' ').trim()\n\n\ttry {\n\t\tparser.restore(initialState)\n\t\tconst [parseResult] = parser.feed(singleLineExpression).results\n\n\t\tif (parseResult == null) {\n\t\t\tthrow new PublicodesError(\n\t\t\t\t'InternalError',\n\t\t\t\t`\nUn problème est survenu lors du parsing de l'expression \\`${singleLineExpression}\\` :\n\n\tle parseur Nearley n'a pas réussi à parser l'expression.\n`,\n\t\t\t\t{ dottedName },\n\t\t\t)\n\t\t}\n\t\treturn parseResult\n\t} catch (e) {\n\t\tif (e instanceof PublicodesError) {\n\t\t\tthrow e\n\t\t}\n\t\tthrow new PublicodesError(\n\t\t\t'SyntaxError',\n\t\t\t`\\`${singleLineExpression}\\` n'est pas une expression valide`,\n\t\t\t{ dottedName },\n\t\t\te,\n\t\t)\n\t}\n}\n","/* Those are postprocessor functions for the Nearley grammar.ne.\nThe advantage of putting them here is to get prettier's JS formatting, since Nealrey doesn't support it https://github.com/kach/nearley/issues/310 */\nimport { normalizeDateString } from './date.ts'\n\nexport const binaryOperation = ([A, , operator, , B]) => ({\n\t[operator.value.toLowerCase()]: [A, B],\n})\n\nexport const unaryOperation = ([operator, , A]) => ({\n\t[operator]: [number([{ value: '0' }]), A],\n})\n\nexport const variable = (arg) => {\n\treturn {\n\t\tvariable: arg.value,\n\t}\n}\n\nexport const JSONObject = ([{ value }]) => {\n\tconsole.log(value)\n\t// TODO\n}\nexport const number = ([{ value }]) => ({\n\tconstant: {\n\t\ttype: 'number',\n\t\tnodeValue: parseFloat(value),\n\t},\n})\n\nexport const numberWithUnit = (value) => ({\n\t...number(value),\n\tunité: value[2].value,\n})\n\nexport const date = ([{ value }]) => {\n\treturn {\n\t\tconstant: {\n\t\t\ttype: 'date',\n\t\t\tnodeValue: normalizeDateString(value),\n\t\t},\n\t}\n}\n\nexport const boolean = ([{ value }]) => ({\n\tconstant: {\n\t\ttype: 'boolean',\n\t\tnodeValue: value === 'oui',\n\t},\n})\n\nexport const string = ([{ value }]) => ({\n\tconstant: {\n\t\ttype: 'string',\n\t\tnodeValue: value.slice(1, -1),\n\t},\n})\n","// Generated automatically by nearley, version 2.20.1\n// http://github.com/Hardmath123/nearley\nfunction id(x) { return x[0]; }\n\nimport {\n string, date, variable, binaryOperation, unaryOperation, boolean, number, numberWithUnit, JSONObject\n} from './grammarFunctions.js';\nimport moo from \"moo\";\n\n\nconst dateRegexp = `(?:(?:0?[1-9]|[12][0-9]|3[01])\\\\/)?(?:0?[1-9]|1[012])\\\\/\\\\d{4}`\nconst letter = '[a-zA-Z\\u00C0-\\u017F€$%°]';\nconst letterOrNumber = '[a-zA-Z\\u00C0-\\u017F0-9\\',]';\nconst word = `${letter}(?:[-']?${letterOrNumber}+)*`;\n\nconst numberRegExp = '-?(?:[1-9][0-9]+|[0-9])(?:\\\\.[0-9]+)?';\nconst lexer = moo.compile({\n '(': '(',\n ')': ')',\n '[': '[',\n ']': ']',\n comparison: ['>','<','>=','<=','=','!='],\n date: new RegExp(dateRegexp),\n\tboolean: ['oui','non'],\n number: new RegExp(numberRegExp),\n word: new RegExp(word),\n string: [/'.*'/, /\".*\"/],\n parentSelector: \"^\",\n JSONObject: /{.*}/,\n additionSubstraction: /[\\+-]/,\n multiplicationDivision: ['*','/'],\n dot: ' . ',\n \".\": '.',\n space: { match: /[\\s]+/, lineBreaks: true },\n});\n\nconst join = (args) => ({value: (args.map(x => x && x.value).join(\"\"))})\nconst flattenJoin = (args) => join(args.flat())\nlet Lexer = lexer;\nlet ParserRules = [\n {\"name\": \"main\", \"symbols\": [\"Comparison\"], \"postprocess\": id},\n {\"name\": \"main\", \"symbols\": [\"NumericValue\"], \"postprocess\": id},\n {\"name\": \"main\", \"symbols\": [\"Date\"], \"postprocess\": id},\n {\"name\": \"main\", \"symbols\": [\"NonNumericTerminal\"], \"postprocess\": id},\n {\"name\": \"main\", \"symbols\": [\"JSONObject\"], \"postprocess\": id},\n {\"name\": \"NumericValue\", \"symbols\": [\"AdditionSubstraction\"], \"postprocess\": id},\n {\"name\": \"NumericValue\", \"symbols\": [\"Negation\"], \"postprocess\": id},\n {\"name\": \"NumericTerminal\", \"symbols\": [\"Variable\"], \"postprocess\": id},\n {\"name\": \"NumericTerminal\", \"symbols\": [\"number\"], \"postprocess\": id},\n {\"name\": \"Negation\", \"symbols\": [{\"literal\":\"-\"}, (lexer.has(\"space\") ? {type: \"space\"} : space), \"Parentheses\"], \"postprocess\": unaryOperation},\n {\"name\": \"Parentheses\", \"symbols\": [{\"literal\":\"(\"}, (lexer.has(\"space\") ? {type: \"space\"} : space), \"NumericValue\", (lexer.has(\"space\") ? {type: \"space\"} : space), {\"literal\":\")\"}], \"postprocess\": ([,,e]) => e},\n {\"name\": \"Parentheses\", \"symbols\": [{\"literal\":\"(\"}, \"NumericValue\", {\"literal\":\")\"}], \"postprocess\": ([,e]) => e},\n {\"name\": \"Parentheses\", \"symbols\": [\"NumericTerminal\"], \"postprocess\": id},\n {\"name\": \"Date\", \"symbols\": [\"Variable\"], \"postprocess\": id},\n {\"name\": \"Date\", \"symbols\": [(lexer.has(\"date\") ? {type: \"date\"} : date)], \"postprocess\": date},\n {\"name\": \"Comparison\", \"symbols\": [\"Comparable\", (lexer.has(\"space\") ? {type: \"space\"} : space), (lexer.has(\"comparison\") ? {type: \"comparison\"} : comparison), (lexer.has(\"space\") ? {type: \"space\"} : space), \"Comparable\"], \"postprocess\": binaryOperation},\n {\"name\": \"Comparison\", \"symbols\": [\"Date\", (lexer.has(\"space\") ? {type: \"space\"} : space), (lexer.has(\"comparison\") ? {type: \"comparison\"} : comparison), (lexer.has(\"space\") ? {type: \"space\"} : space), \"Date\"], \"postprocess\": binaryOperation},\n {\"name\": \"Comparable$subexpression$1\", \"symbols\": [\"AdditionSubstraction\"]},\n {\"name\": \"Comparable$subexpression$1\", \"symbols\": [\"NonNumericTerminal\"]},\n {\"name\": \"Comparable\", \"symbols\": [\"Comparable$subexpression$1\"], \"postprocess\": ([[e]]) => e},\n {\"name\": \"NonNumericTerminal\", \"symbols\": [(lexer.has(\"boolean\") ? {type: \"boolean\"} : boolean)], \"postprocess\": boolean},\n {\"name\": \"NonNumericTerminal\", \"symbols\": [(lexer.has(\"string\") ? {type: \"string\"} : string)], \"postprocess\": string},\n {\"name\": \"Variable\", \"symbols\": [\"VariableWithoutParentSelector\"], \"postprocess\": ([x]) => variable(x)},\n {\"name\": \"Variable$ebnf$1\", \"symbols\": []},\n {\"name\": \"Variable$ebnf$1$subexpression$1\", \"symbols\": [(lexer.has(\"parentSelector\") ? {type: \"parentSelector\"} : parentSelector), (lexer.has(\"dot\") ? {type: \"dot\"} : dot)], \"postprocess\": join},\n {\"name\": \"Variable$ebnf$1\", \"symbols\": [\"Variable$ebnf$1\", \"Variable$ebnf$1$subexpression$1\"], \"postprocess\": function arrpush(d) {return d[0].concat([d[1]]);}},\n {\"name\": \"Variable\", \"symbols\": [\"Variable$ebnf$1\", \"VariableWithoutParentSelector\"], \"postprocess\": x => variable(flattenJoin(x))},\n {\"name\": \"VariableWithoutParentSelector$ebnf$1\", \"symbols\": []},\n {\"name\": \"VariableWithoutParentSelector$ebnf$1$subexpression$1\", \"symbols\": [(lexer.has(\"dot\") ? {type: \"dot\"} : dot), \"Words\"], \"postprocess\": join},\n {\"name\": \"VariableWithoutParentSelector$ebnf$1\", \"symbols\": [\"VariableWithoutParentSelector$ebnf$1\", \"VariableWithoutParentSelector$ebnf$1$subexpression$1\"], \"postprocess\": function arrpush(d) {return d[0].concat([d[1]]);}},\n {\"name\": \"VariableWithoutParentSelector\", \"symbols\": [\"Words\", \"VariableWithoutParentSelector$ebnf$1\"], \"postprocess\": x => flattenJoin(x)},\n {\"name\": \"Words$ebnf$1$subexpression$1$ebnf$1\", \"symbols\": [(lexer.has(\"space\") ? {type: \"space\"} : space)], \"postprocess\": id},\n {\"name\": \"Words$ebnf$1$subexpression$1$ebnf$1\", \"symbols\": [], \"postprocess\": function(d) {return null;}},\n {\"name\": \"Words$ebnf$1$subexpression$1\", \"symbols\": [\"Words$ebnf$1$subexpression$1$ebnf$1\", \"WordOrNumber\"], \"postprocess\": join},\n {\"name\": \"Words$ebnf$1\", \"symbols\": [\"Words$ebnf$1$subexpression$1\"]},\n {\"name\": \"Words$ebnf$1$subexpression$2$ebnf$1\", \"symbols\": [(lexer.has(\"space\") ? {type: \"space\"} : space)], \"postprocess\": id},\n {\"name\": \"Words$ebnf$1$subexpression$2$ebnf$1\", \"symbols\": [], \"postprocess\": function(d) {return null;}},\n {\"name\": \"Words$ebnf$1$subexpression$2\", \"symbols\": [\"Words$ebnf$1$subexpression$2$ebnf$1\", \"WordOrNumber\"], \"postprocess\": join},\n {\"name\": \"Words$ebnf$1\", \"symbols\": [\"Words$ebnf$1\", \"Words$ebnf$1$subexpression$2\"], \"postprocess\": function arrpush(d) {return d[0].concat([d[1]]);}},\n {\"name\": \"Words\", \"symbols\": [\"WordOrKeyword\", \"Words$ebnf$1\"], \"postprocess\": flattenJoin},\n {\"name\": \"Words\", \"symbols\": [(lexer.has(\"word\") ? {type: \"word\"} : word)], \"postprocess\": id},\n {\"name\": \"WordOrKeyword\", \"symbols\": [(lexer.has(\"word\") ? {type: \"word\"} : word)], \"postprocess\": id},\n {\"name\": \"WordOrKeyword\", \"symbols\": [(lexer.has(\"boolean\") ? {type: \"boolean\"} : boolean)], \"postprocess\": id},\n {\"name\": \"WordOrNumber\", \"symbols\": [\"WordOrKeyword\"], \"postprocess\": id},\n {\"name\": \"WordOrNumber\", \"symbols\": [(lexer.has(\"number\") ? {type: \"number\"} : number)], \"postprocess\": id},\n {\"name\": \"Unit$ebnf$1\", \"symbols\": []},\n {\"name\": \"Unit$ebnf$1\", \"symbols\": [\"Unit$ebnf$1\", \"UnitNumerator\"], \"postprocess\": function arrpush(d) {return d[0].concat([d[1]]);}},\n {\"name\": \"Unit$ebnf$2\", \"symbols\": []},\n {\"name\": \"Unit$ebnf$2\", \"symbols\": [\"Unit$ebnf$2\", \"UnitDenominator\"], \"postprocess\": function arrpush(d) {return d[0].concat([d[1]]);}},\n {\"name\": \"Unit\", \"symbols\": [\"Unit$ebnf$1\", \"Unit$ebnf$2\"], \"postprocess\": flattenJoin},\n {\"name\": \"UnitNumerator\", \"symbols\": [\"Words\"], \"postprocess\": id},\n {\"name\": \"UnitNumerator\", \"symbols\": [{\"literal\":\".\"}, \"UnitNumerator\"], \"postprocess\": join},\n {\"name\": \"UnitDenominator$ebnf$1\", \"symbols\": []},\n {\"name\": \"UnitDenominator$ebnf$1$subexpression$1\", \"symbols\": [(lexer.has(\"space\") ? {type: \"space\"} : space)]},\n {\"name\": \"UnitDenominator$ebnf$1\", \"symbols\": [\"UnitDenominator$ebnf$1\", \"UnitDenominator$ebnf$1$subexpression$1\"], \"postprocess\": function arrpush(d) {return d[0].concat([d[1]]);}},\n {\"name\": \"UnitDenominator$ebnf$2\", \"symbols\": [\"UnitNumerator\"]},\n {\"name\": \"UnitDenominator$ebnf$2\", \"symbols\": [\"UnitDenominator$ebnf$2\", \"UnitNumerator\"], \"postprocess\": function arrpush(d) {return d[0].concat([d[1]]);}},\n {\"name\": \"UnitDenominator\", \"symbols\": [\"UnitDenominator$ebnf$1\", {\"literal\":\"/\"}, \"UnitDenominator$ebnf$2\"], \"postprocess\": flattenJoin},\n {\"name\": \"AdditionSubstraction\", \"symbols\": [\"AdditionSubstraction\", (lexer.has(\"space\") ? {type: \"space\"} : space), (lexer.has(\"additionSubstraction\") ? {type: \"additionSubstraction\"} : additionSubstraction), (lexer.has(\"space\") ? {type: \"space\"} : space), \"MultiplicationDivision\"], \"postprocess\": binaryOperation},\n {\"name\": \"AdditionSubstraction\", \"symbols\": [\"MultiplicationDivision\"], \"postprocess\": id},\n {\"name\": \"MultiplicationDivision\", \"symbols\": [\"MultiplicationDivision\", (lexer.has(\"space\") ? {type: \"space\"} : space), (lexer.has(\"multiplicationDivision\") ? {type: \"multiplicationDivision\"} : multiplicationDivision), (lexer.has(\"space\") ? {type: \"space\"} : space), \"Parentheses\"], \"postprocess\": binaryOperation},\n {\"name\": \"MultiplicationDivision\", \"symbols\": [\"Parentheses\"], \"postprocess\": id},\n {\"name\": \"number\", \"symbols\": [(lexer.has(\"number\") ? {type: \"number\"} : number)], \"postprocess\": number},\n {\"name\": \"number$ebnf$1$subexpression$1\", \"symbols\": [(lexer.has(\"space\") ? {type: \"space\"} : space)]},\n {\"name\": \"number$ebnf$1\", \"symbols\": [\"number$ebnf$1$subexpression$1\"], \"postprocess\": id},\n {\"name\": \"number$ebnf$1\", \"symbols\": [], \"postprocess\": function(d) {return null;}},\n {\"name\": \"number\", \"symbols\": [(lexer.has(\"number\") ? {type: \"number\"} : number), \"number$ebnf$1\", \"Unit\"], \"postprocess\": numberWithUnit},\n {\"name\": \"JSONObject\", \"symbols\": [(lexer.has(\"JSONObject\") ? {type: \"JSONObject\"} : JSONObject)], \"postprocess\": JSONObject}\n];\nlet ParserStart = \"main\";\nexport default { Lexer, ParserRules, ParserStart };\n","import { ASTNode } from './AST/types'\nimport { PublicodesError } from './error'\nimport abattement from './mecanisms/abattement'\nimport applicable from './mecanisms/applicable'\nimport arrondi from './mecanisms/arrondi'\nimport avec from './mecanisms/avec'\nimport barème from './mecanisms/barème'\nimport condition from './mecanisms/condition'\nimport contexte from './mecanisms/contexte'\nimport durée from './mecanisms/durée'\nimport {\n\tparseEstApplicable,\n\tparseEstDéfini,\n\tparseEstNonDéfini,\n} from './mecanisms/est'\nimport { parseEstNonApplicable } from './mecanisms/est-non-applicable'\nimport grille from './mecanisms/grille'\nimport { mecanismInversion } from './mecanisms/inversion'\nimport { parseMaximumDe, parseMinimumDe } from './mecanisms/max-min'\nimport moyenne from './mecanisms/moyenne'\nimport nonApplicable from './mecanisms/non-applicable'\nimport { mecanismOnePossibility } from './mecanisms/one-possibility'\nimport operations from './mecanisms/operation'\nimport parDéfaut from './mecanisms/parDéfaut'\nimport plafond from './mecanisms/plafond'\nimport plancher from './mecanisms/plancher'\nimport produit from './mecanisms/product'\nimport résoudreRéférenceCirculaire from './mecanisms/résoudre-référence-circulaire'\nimport simplifierUnité from './mecanisms/simplifier-unité'\nimport situation from './mecanisms/situation'\nimport somme from './mecanisms/somme'\nimport tauxProgressif from './mecanisms/tauxProgressif'\nimport texte from './mecanisms/texte'\nimport toutesCesConditions from './mecanisms/toutes-ces-conditions'\nimport uneDeCesConditions from './mecanisms/une-de-ces-conditions'\nimport unité from './mecanisms/unité'\nimport variableManquante from './mecanisms/variablesManquantes'\nimport variations from './mecanisms/variations'\nimport { parseExpression } from './parseExpression'\nimport { Context } from './parsePublicodes'\nimport parseReference from './reference'\n\nexport default function parse(rawNode, context: Context): ASTNode {\n\tif (rawNode == undefined) {\n\t\tthrow new PublicodesError(\n\t\t\t'SyntaxError',\n\t\t\t`\n\tUne des valeurs de la formule est vide.\n\tVérifiez que tous les champs à droite des deux points sont remplis`,\n\t\t\t{ dottedName: context.dottedName },\n\t\t)\n\t}\n\tif (typeof rawNode === 'boolean') {\n\t\tthrow new PublicodesError(\n\t\t\t'SyntaxError',\n\t\t\t`\nLes valeurs booléennes true / false ne sont acceptées.\nUtilisez leur contrepartie française : 'oui' / 'non'`,\n\t\t\t{ dottedName: context.dottedName },\n\t\t)\n\t}\n\tconst node =\n\t\ttypeof rawNode === 'object' ? rawNode : (\n\t\t\tparseExpression(rawNode, context.dottedName)\n\t\t)\n\tif ('nodeKind' in node) {\n\t\treturn node\n\t}\n\n\treturn {\n\t\t...parseChainedMecanisms(node, context),\n\t\trawNode,\n\t}\n}\n\nfunction parseMecanism(rawNode, context: Context) {\n\tif (Array.isArray(rawNode)) {\n\t\tthrow new PublicodesError(\n\t\t\t'SyntaxError',\n\t\t\t`\nIl manque le nom du mécanisme pour le tableau : [${rawNode\n\t\t\t\t.map((x) => `'${x}'`)\n\t\t\t\t.join(', ')}]\nLes mécanisme possibles sont : 'somme', 'le maximum de', 'le minimum de', 'toutes ces conditions', 'une de ces conditions'.\n\t\t`,\n\t\t\t{ dottedName: context.dottedName },\n\t\t)\n\t}\n\n\tconst keys = Object.keys(rawNode)\n\tif (keys.length > 1) {\n\t\tthrow new PublicodesError(\n\t\t\t'SyntaxError',\n\t\t\t`\nLes mécanismes suivants se situent au même niveau : ${keys\n\t\t\t\t.map((x) => `'${x}'`)\n\t\t\t\t.join(', ')}\nCela vient probablement d'une erreur dans l'indentation\n\t`,\n\t\t\t{ dottedName: context.dottedName },\n\t\t)\n\t}\n\tif (keys.length === 0) {\n\t\treturn { nodeKind: 'constant', nodeValue: undefined }\n\t}\n\n\tconst mecanismName = keys[0]\n\tconst values = rawNode[mecanismName]\n\tconst parseFn = parseFunctions[mecanismName]\n\n\tif (!parseFn) {\n\t\tthrow new PublicodesError(\n\t\t\t'SyntaxError',\n\t\t\t`Le mécanisme \"${mecanismName}\" est inconnu.\n\nVérifiez qu'il n'y ait pas d'erreur dans l'orthographe du nom.`,\n\t\t\t{ dottedName: context.dottedName },\n\t\t)\n\t}\n\ttry {\n\t\treturn parseFn(values, context)\n\t} catch (e) {\n\t\tif (e instanceof PublicodesError) {\n\t\t\tthrow e\n\t\t}\n\t\tthrow new PublicodesError(\n\t\t\t'SyntaxError',\n\t\t\tmecanismName ?\n\t\t\t\t`➡️ Dans le mécanisme ${mecanismName}\n${e.message}`\n\t\t\t:\te.message,\n\t\t\t{ dottedName: context.dottedName },\n\t\t)\n\t}\n}\n\n// Chainable mecanisme in their composition order (first one is applyied first)\nconst chainableMecanisms = [\n\tcontexte,\n\tvariableManquante,\n\tavec,\n\tapplicable,\n\tnonApplicable,\n\tarrondi,\n\tunité,\n\tsimplifierUnité,\n\tplancher,\n\tplafond,\n\tparDéfaut,\n\tsituation,\n\trésoudreRéférenceCirculaire,\n\tabattement,\n]\n\nfunction parseChainedMecanisms(rawNode, context: Context): ASTNode {\n\tconst parseFn = chainableMecanisms.find((fn) => fn.nom in rawNode)\n\tif (!parseFn) {\n\t\treturn parseMecanism(rawNode, context)\n\t}\n\tconst { [parseFn.nom]: param, ...valeur } = rawNode\n\n\treturn parseMecanism(\n\t\t{\n\t\t\t[parseFn.nom]: {\n\t\t\t\tvaleur,\n\t\t\t\t[parseFn.nom]: param,\n\t\t\t},\n\t\t},\n\t\tcontext,\n\t)\n}\n\nconst parseFunctions = {\n\t...operations,\n\t...chainableMecanisms.reduce((acc, fn) => ({ [fn.nom]: fn, ...acc }), {}),\n\t'inversion numérique': mecanismInversion,\n\t'le maximum de': parseMaximumDe,\n\t'le minimum de': parseMinimumDe,\n\t'taux progressif': tauxProgressif,\n\t'toutes ces conditions': toutesCesConditions,\n\t'est non défini': parseEstNonDéfini,\n\t'est non applicable': parseEstNonApplicable,\n\t'est applicable': parseEstApplicable,\n\t'est défini': parseEstDéfini,\n\t'une de ces conditions': uneDeCesConditions,\n\t'une possibilité': mecanismOnePossibility,\n\tcondition,\n\tbarème,\n\tdurée,\n\tgrille,\n\tmultiplication: produit,\n\tproduit,\n\tsomme,\n\tmoyenne,\n\t[texte.nom]: texte,\n\tvaleur: parse,\n\tvariable: parseReference,\n\tvariations,\n\tconstant: (v) => ({\n\t\ttype: v.type,\n\t\t// In the documentation we want to display constants defined in the source\n\t\t// with their full precision. This is especially useful for percentages like\n\t\t// APEC 0,036 %.\n\t\tfullPrecision: true,\n\t\tisNullable: v.nodeValue == null,\n\t\tmissingVariables: {},\n\t\tnodeValue: v.nodeValue,\n\t\tnodeKind: 'constant',\n\t}),\n}\n\nexport const mecanismKeys = Object.keys(parseFunctions)\n","import { PublicodesError, PublicodesInternalError } from './error'\nimport { registerEvaluationFunction } from './evaluationFunctions'\nimport { Context } from './parsePublicodes'\n\nexport type ReferenceNode = {\n\tnodeKind: 'reference'\n\tname: string\n\tcontextDottedName: string\n\tdottedName?: string\n\ttitle?: string\n\tacronym?: string\n}\n\nexport default function parseReference(\n\tv: string,\n\tcontext: Context,\n): ReferenceNode {\n\tif (!context.dottedName) {\n\t\tthrow new PublicodesError(\n\t\t\t'InternalError',\n\t\t\t\"Une référence ne peut pas exister en dehors d'une règle (`context.dottedName` est vide)\",\n\t\t\t{\n\t\t\t\tdottedName: v,\n\t\t\t},\n\t\t)\n\t}\n\tif (!v) {\n\t\tthrow new PublicodesError(\n\t\t\t'SyntaxError',\n\t\t\t'Une référence ne peut pas être vide',\n\t\t\t{\n\t\t\t\tdottedName: context.dottedName,\n\t\t\t},\n\t\t)\n\t}\n\n\treturn {\n\t\tnodeKind: 'reference',\n\t\tname: v,\n\t\tcontextDottedName: context.dottedName,\n\t}\n}\n\nregisterEvaluationFunction('reference', function evaluateReference(node) {\n\tif (!node.dottedName) {\n\t\tthrow new PublicodesInternalError(node)\n\t}\n\tconst explanation = this.evaluateNode(\n\t\tthis.context.parsedRules[node.dottedName],\n\t)\n\tdelete explanation.sourceMap\n\treturn {\n\t\t...explanation,\n\t\t...node,\n\t}\n})\n","import { ParsedRules, PublicodesError } from '.'\nimport { makeASTTransformer, makeASTVisitor } from './AST'\nimport { ASTNode } from './AST/types'\nimport { PublicodesInternalError } from './error'\nimport { defaultNode, notApplicableNode } from './evaluationUtils'\nimport parse from './parse'\nimport { Context, ReferencesMaps, RulesReplacements } from './parsePublicodes'\nimport { Rule, RuleNode } from './rule'\nimport { updateReferencesMapsFromReferenceNode } from './ruleUtils'\nimport { mergeWithArray } from './utils'\n\nexport type ReplacementRule = {\n\tnodeKind: 'replacementRule'\n\tdefinitionRule: ASTNode<'reference'> & { dottedName: string }\n\treplacedReference: ASTNode<'reference'>\n\tpriority?: number\n\twhiteListedNames: Array>\n\trawNode: any\n\tblackListedNames: Array>\n\tremplacementRuleId: number\n\treplaceByNonApplicable: boolean\n}\n\n// Replacements depend on the context and their evaluation implies using\n// \"variations\" node everywhere there is a reference to the original rule.\n// However for performance reason we want to mutualize identical \"variations\"\n// nodes instead of duplicating them, to avoid wasteful computations.\n//\n// The implementation works by first attributing an identifier for each\n// replacementRule. We then use this identifier to create a cache key that\n// represents the combinaison of applicables replacements for a given reference.\n// For example if replacements 12, 13 et 643 are applicable we use the key\n// `12-13-643` as the cache identifier in the `inlineReplacements` function.\nlet remplacementRuleId = 0\nconst cache = {}\n\nexport function parseReplacements(\n\treplacements: Rule['remplace'],\n\tcontext: Context,\n): Array {\n\tif (!replacements) {\n\t\treturn []\n\t}\n\n\treturn (Array.isArray(replacements) ? replacements : [replacements]).map(\n\t\t(replacement) => {\n\t\t\tif (typeof replacement === 'string') {\n\t\t\t\treplacement = { 'références à': replacement }\n\t\t\t}\n\n\t\t\tconst replacedReference = parse(replacement['références à'], context)\n\n\t\t\tconst [whiteListedNames, blackListedNames] = [\n\t\t\t\treplacement.dans ?? [],\n\t\t\t\treplacement['sauf dans'] ?? [],\n\t\t\t]\n\t\t\t\t.map((dottedName) =>\n\t\t\t\t\tArray.isArray(dottedName) ? dottedName : [dottedName],\n\t\t\t\t)\n\t\t\t\t.map((refs) => refs.map((ref) => parse(ref, context)))\n\t\t\tif (\n\t\t\t\treplacement.priorité != null &&\n\t\t\t\t(typeof replacement.priorité !== 'number' || replacement.priorité < 0)\n\t\t\t) {\n\t\t\t\tthrow new PublicodesError(\n\t\t\t\t\t'SyntaxError',\n\t\t\t\t\t'La priorité du remplacement doit être un nombre positif',\n\t\t\t\t\tcontext,\n\t\t\t\t)\n\t\t\t}\n\t\t\treturn {\n\t\t\t\tnodeKind: 'replacementRule',\n\t\t\t\trawNode: replacement,\n\t\t\t\tpriority: replacement.priorité,\n\t\t\t\tdefinitionRule: parse(context.dottedName, context),\n\t\t\t\treplacedReference,\n\t\t\t\treplaceByNonApplicable: false,\n\t\t\t\twhiteListedNames,\n\t\t\t\tblackListedNames,\n\t\t\t\tremplacementRuleId: remplacementRuleId++,\n\t\t\t} as ReplacementRule\n\t\t},\n\t)\n}\n\nexport function parseRendNonApplicable(\n\trules: Rule['rend non applicable'],\n\tcontext: Context,\n): Array {\n\tconst rendNonApplicableReplacements = parseReplacements(rules, context)\n\trendNonApplicableReplacements.forEach(\n\t\t(r) => (r.replaceByNonApplicable = true),\n\t)\n\treturn rendNonApplicableReplacements\n}\n\nexport function getReplacements(\n\tparsedRules: Record,\n): RulesReplacements {\n\tconst ret = {}\n\tfor (const dottedName in parsedRules) {\n\t\tconst rule = parsedRules[dottedName]\n\t\tfor (const replacement of rule.replacements) {\n\t\t\tif (!replacement.replacedReference.dottedName) {\n\t\t\t\tthrow new PublicodesInternalError(replacement)\n\t\t\t}\n\t\t\tconst key = replacement.replacedReference.dottedName\n\t\t\tret[key] = [...(ret[key] ?? []), replacement]\n\t\t}\n\t}\n\n\treturn ret\n}\n\nexport function inlineReplacements<\n\tNewNames extends string,\n\tPreviousNames extends string,\n>({\n\tnewRules,\n\tpreviousReplacements,\n\tparsedRules,\n\treferencesMaps,\n}: {\n\tnewRules: ParsedRules\n\tpreviousReplacements: RulesReplacements\n\tparsedRules: ParsedRules\n\treferencesMaps: ReferencesMaps\n}): [\n\tParsedRules,\n\tRulesReplacements,\n] {\n\ttype Names = NewNames | PreviousNames\n\tconst newReplacements = getReplacements(newRules) as RulesReplacements\n\n\tconst ruleNamesWithNewReplacements = new Set([]) as Set\n\tfor (const replacedReference in newReplacements) {\n\t\tconst rulesThatUse =\n\t\t\treferencesMaps.rulesThatUse.get(replacedReference as NewNames | Names) ??\n\t\t\t[]\n\n\t\tfor (const value of rulesThatUse) {\n\t\t\truleNamesWithNewReplacements.add(value)\n\t\t}\n\t}\n\n\tconst newRuleNamesWithPreviousReplacements: Set = new Set(\n\t\t(Object.keys(newRules) as Array).filter((ruleName) =>\n\t\t\t[...(referencesMaps.referencesIn.get(ruleName) ?? new Set())].some(\n\t\t\t\t(reference) =>\n\t\t\t\t\t(previousReplacements[reference as PreviousNames] ?? []).length,\n\t\t\t),\n\t\t),\n\t)\n\n\tconst replacements = mergeWithArray(previousReplacements, newReplacements)\n\tif (\n\t\t!newRuleNamesWithPreviousReplacements.size &&\n\t\t!ruleNamesWithNewReplacements.size\n\t) {\n\t\treturn [parsedRules, replacements]\n\t}\n\n\tconst inlinePreviousReplacement = makeReplacementInliner(\n\t\tpreviousReplacements,\n\t\treferencesMaps,\n\t)\n\tconst inlineNewReplacement = makeReplacementInliner(\n\t\tnewReplacements,\n\t\treferencesMaps,\n\t)\n\n\tnewRuleNamesWithPreviousReplacements.forEach((name) => {\n\t\tparsedRules[name] = inlinePreviousReplacement(\n\t\t\tparsedRules[name],\n\t\t) as RuleNode