|
| 1 | +'use strict'; |
| 2 | + |
| 3 | +Object.defineProperty(exports, '__esModule', { value: true }); |
| 4 | + |
| 5 | +var language = require('@codemirror/language'); |
| 6 | +var lr = require('@lezer/lr'); |
| 7 | +var highlight = require('@lezer/highlight'); |
| 8 | + |
| 9 | +// This file was generated by lezer-generator. You probably shouldn't edit it. |
| 10 | +const indent = 30, |
| 11 | + LineText1 = 1, |
| 12 | + LineText2 = 2, |
| 13 | + LineText3 = 3, |
| 14 | + LineText4 = 4, |
| 15 | + LineText5 = 5, |
| 16 | + newline$1 = 31, |
| 17 | + newlineEmpty = 32; |
| 18 | + |
| 19 | +const LineTextTokens = [LineText1, LineText2, LineText3, LineText4, LineText5]; |
| 20 | +const newline = 10, carriageReturn = 13, space = 32, tab = 9, hash = 35, colon = 58, parenL = 40, parenR = 41, bracketL = 91, bracketR = 93, braceL = 123, braceR = 125; |
| 21 | +const newlines = new lr.ExternalTokenizer((input, _stack) => { |
| 22 | + if (input.next < 0) |
| 23 | + return; |
| 24 | + else { |
| 25 | + input.advance(); |
| 26 | + let spaces = 0; |
| 27 | + while (input.next == space || input.next == tab) { |
| 28 | + input.advance(); |
| 29 | + spaces++; |
| 30 | + } |
| 31 | + let empty = input.next == newline || |
| 32 | + input.next == carriageReturn || |
| 33 | + input.next == hash; |
| 34 | + input.acceptToken(empty ? newlineEmpty : newline$1, -spaces); |
| 35 | + } |
| 36 | +}, { contextual: true, fallback: true }); |
| 37 | +const lineTextType = new lr.ExternalTokenizer((input, stack) => { |
| 38 | + let chars = 0; |
| 39 | + while (input.next > -1 && input.next !== newline) { |
| 40 | + if (input.next === colon) |
| 41 | + return; |
| 42 | + if (input.next === parenL || |
| 43 | + input.next === bracketL || |
| 44 | + input.next === braceL) { |
| 45 | + if (chars > 0) { |
| 46 | + input.acceptToken(stack.context.lineType); |
| 47 | + return; |
| 48 | + } |
| 49 | + else |
| 50 | + return; |
| 51 | + } |
| 52 | + if ((input.next === parenR || |
| 53 | + input.next === bracketR || |
| 54 | + input.next === braceR) && |
| 55 | + chars > 0) { |
| 56 | + input.acceptToken(stack.context.lineType); |
| 57 | + return; |
| 58 | + } |
| 59 | + input.advance(); |
| 60 | + chars++; |
| 61 | + } |
| 62 | + input.acceptToken(stack.context.lineType); |
| 63 | +}); |
| 64 | +const indentation = new lr.ExternalTokenizer((input, _stack) => { |
| 65 | + let prev = input.peek(-1); |
| 66 | + if (prev == newline || prev == carriageReturn) { |
| 67 | + while (true) { |
| 68 | + if (input.next == space) |
| 69 | + ; |
| 70 | + else if (input.next == tab) |
| 71 | + ; |
| 72 | + else |
| 73 | + break; |
| 74 | + input.advance(); |
| 75 | + } |
| 76 | + if (input.next != newline && |
| 77 | + input.next != carriageReturn && |
| 78 | + input.next != hash) { |
| 79 | + input.acceptToken(indent); |
| 80 | + } |
| 81 | + } |
| 82 | +}); |
| 83 | +const indentTracker = { |
| 84 | + lineType: LineText1, |
| 85 | +}; |
| 86 | +const countIndent = (space) => { |
| 87 | + let depth = 0; |
| 88 | + for (let i = 0; i < space.length; i++) |
| 89 | + depth += space.charCodeAt(i) == tab ? 8 - (depth % 8) : 1; |
| 90 | + return depth; |
| 91 | +}; |
| 92 | +const getLineType = (depth) => { |
| 93 | + return LineTextTokens[depth % 5]; |
| 94 | +}; |
| 95 | +const trackIndent = new lr.ContextTracker({ |
| 96 | + start: indentTracker, |
| 97 | + shift(context, term, stack, input) { |
| 98 | + if (term === indent) { |
| 99 | + const depth = countIndent(input.read(input.pos, stack.pos)); |
| 100 | + context.lineType = getLineType(depth); |
| 101 | + } |
| 102 | + return context; |
| 103 | + }, |
| 104 | +}); |
| 105 | + |
| 106 | +const mindmapTags = { |
| 107 | + diagramName: highlight.Tag.define(), |
| 108 | + lineText1: highlight.Tag.define(), |
| 109 | + lineText2: highlight.Tag.define(), |
| 110 | + lineText3: highlight.Tag.define(), |
| 111 | + lineText4: highlight.Tag.define(), |
| 112 | + lineText5: highlight.Tag.define(), |
| 113 | +}; |
| 114 | + |
| 115 | +const mindmapHighlighting = highlight.styleTags({ |
| 116 | + DiagramName: mindmapTags.diagramName, |
| 117 | + LineText1: mindmapTags.lineText1, |
| 118 | + LineText2: mindmapTags.lineText2, |
| 119 | + LineText3: mindmapTags.lineText3, |
| 120 | + LineText4: mindmapTags.lineText4, |
| 121 | + LineText5: mindmapTags.lineText5, |
| 122 | +}); |
| 123 | + |
| 124 | +// This file was generated by lezer-generator. You probably shouldn't edit it. |
| 125 | +const spec_word = {__proto__:null,mindmap:72, icon:76}; |
| 126 | +const parser = lr.LRParser.deserialize({ |
| 127 | + version: 14, |
| 128 | + states: "&fOYQ[OOOOQW'#Cw'#CwQbQ[OOQgQ[OOOOQW'#Cc'#CcOOQW-E6u-E6uOlQ]O'#CdOOQW'#Cx'#CxQgQ[OOO!]Q^O,59OOOQW-E6v-E6vOOQW'#DR'#DRO!vQ[O'#CgO!{Q^O'#CjO!{Q^O'#CmO!{Q^O'#CnO!{Q^O'#CqO!{Q^O'#CrO!{Q^O'#CsO!{Q^O'#CvOOQW'#DU'#DUO#^Q[O1G.jOOQW1G.j1G.jO#hQ[O,59ROOQW'#Ch'#ChOOQW,59U,59UO#mQ[O,59XO#rQ[O,59YO#wQ[O,59]O#|Q[O,59^O$RQ[O,59_O$WQ[O,59bOOQW7+$U7+$UO!{Q^O1G.mOOQW1G.s1G.sOOQW1G.t1G.tOOQW1G.w1G.wOOQW1G.x1G.xOOQW1G.y1G.yOOQW1G.|1G.|O$]Q[O7+$XOOQW<<Gs<<Gs", |
| 129 | + stateData: "$b~OrOSpOS~OoPOtSO~OoPO~OoUO~OnXOmWXoWX~OXbO]_O`^OcaOd`OicO~OPZOQZORZOSZOTZOY[Ow]O~PwOvhO~OPZOQZORZOSZOTZO~OmWioWi~PwO]qO~O_rO~OXsO~OctO~OduO~O]vO~OhwO~OXyO~O", |
| 130 | + goto: "#[yPPPPPPPz}PP!R!UP!RPP!X!XPP!X!X!XPP!X!]!cPPPPPPPP!iPP#URROTVRWRfXRg[TdXeQQORTQQWRRYWQeXQi]Qj^Qk_Ql`QmaQnbQocRxqQfXRpe", |
| 131 | + nodeNames: "⚠ LineText1 LineText2 LineText3 LineText4 LineText5 MindmapDiagram DiagramName Line ) :: IconLine Icon ( ClassLine ] [ Square RoundedSquare )) (( Circle Bang Cloud }} {{ Hexagon", |
| 132 | + maxTerm: 40, |
| 133 | + context: trackIndent, |
| 134 | + nodeProps: [ |
| 135 | + ["openedBy", 9,"(",15,"[",19,"((",24,"{{"], |
| 136 | + ["closedBy", 13,")",16,"]",20,"))",25,"}}"] |
| 137 | + ], |
| 138 | + propSources: [mindmapHighlighting], |
| 139 | + skippedNodes: [0], |
| 140 | + repeatNodeCount: 2, |
| 141 | + tokenData: "$b~R]XYz[]zpqzxy!fyz!s![!]#Q!c!}#e!}#O#p#O#P!]#P#Q#u#T#o#e#o#p#z#q#r$V~!PSr~XYz[]zpqz#O#P!]~!`QYZz]^z~!kP]~xy!n~!sOd~~!xPX~yz!{~#QOc~~#TP![!]#W~#]PY~![!]#`~#eOw~~#jQs~!c!}#e#T#o#e~#uO`~~#zO_~~#}P#o#p$Q~$VOi~~$YP#q#r$]~$bOh~", |
| 142 | + tokenizers: [indentation, lineTextType, 0, newlines], |
| 143 | + topRules: {"MindmapDiagram":[0,6]}, |
| 144 | + specialized: [{term: 35, get: value => spec_word[value] || -1}], |
| 145 | + tokenPrec: 0 |
| 146 | +}); |
| 147 | + |
| 148 | +const mindmapLanguage = language.LRLanguage.define({ |
| 149 | + name: 'mindmap', |
| 150 | + parser: parser, |
| 151 | +}); |
| 152 | +function mindmap() { |
| 153 | + return new language.LanguageSupport(mindmapLanguage); |
| 154 | +} |
| 155 | + |
| 156 | +exports.mindmap = mindmap; |
| 157 | +exports.mindmapLanguage = mindmapLanguage; |
| 158 | +exports.mindmapTags = mindmapTags; |
0 commit comments