From db6d9ba89a76febb0fa08682c83de6ba1abb1f53 Mon Sep 17 00:00:00 2001 From: Alexandr Yanenko Date: Mon, 11 Sep 2017 16:57:16 +0300 Subject: [PATCH 1/3] Allow for spaces in LaTeX commands --- .../papeeria/papeeria_latex_highlight_rules.coffee | 12 ++++++------ .../ext/papeeria/papeeria_latex_highlight_rules.js | 12 ++++++------ 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/lib/ace/ext/papeeria/papeeria_latex_highlight_rules.coffee b/lib/ace/ext/papeeria/papeeria_latex_highlight_rules.coffee index 63cfca6e4b3..19292f26dc6 100644 --- a/lib/ace/ext/papeeria/papeeria_latex_highlight_rules.coffee +++ b/lib/ace/ext/papeeria/papeeria_latex_highlight_rules.coffee @@ -124,7 +124,7 @@ define((require, exports, module) -> PARAMETER_TOKENTYPE RPAREN_TOKENTYPE ] - regex: "(\\\\(?:begin))({)(#{text})(})" + regex: "(\\\\begin\\s*)({)(#{text})(})" next: pushState(pushedState) } @@ -149,7 +149,7 @@ define((require, exports, module) -> PARAMETER_TOKENTYPE RPAREN_TOKENTYPE ] - regex: "(\\\\(?:end))({)(#{text})(})" + regex: "(\\\\end\\s*)({)(#{text})(})" next: popState } @@ -226,7 +226,7 @@ define((require, exports, module) -> "#{PARAMETER_TOKENTYPE}.#{ENVIRONMENT_TOKENTYPE}" RPAREN_TOKENTYPE ] - regex: "(\\\\(?:begin|end))({)(\\w*)(})" + regex: "(\\\\(?:begin|end)(?:\\s*))({)(\\w*)(})" } @@ -245,7 +245,7 @@ define((require, exports, module) -> "#{PARAMETER_TOKENTYPE}.ref" RPAREN_TOKENTYPE ] - regex: "(\\\\(?:ref))({)(\\w*)(})" + regex: "(\\\\ref\\s*)({)(\\w*)(})" } # this rule is for `vref` and `vcite` citations { @@ -255,7 +255,7 @@ define((require, exports, module) -> PARAMETER_TOKENTYPE RPAREN_TOKENTYPE ] - regex: "(\\\\(?:v?ref|cite(?:[^{]*)))(?:({)([^}]*)(}))?" + regex: "(\\\\(?:v?ref|cite(?:[^{]*))(?:\\s*))(?:({)([^}]*)(}))?" } ]) @$rules[START_STATE] = [].concat(equationStartRules, listStartRules, citationsRules, [ @@ -269,7 +269,7 @@ define((require, exports, module) -> "#{STORAGE_TOKENTYPE}.type" RPAREN_TOKENTYPE ] - regex: "(\\\\(?:documentclass|usepackage|input))(?:(\\[)([^\\]]*)(\\]))?({)([^}]*)(})" + regex: "(\\\\(?:documentclass|usepackage|input)(?:\\s*))(?:(\\[)([^\\]]*)(\\]\\s*))?({)([^}]*)(})" } genericEnvironmentRule ]) diff --git a/lib/ace/ext/papeeria/papeeria_latex_highlight_rules.js b/lib/ace/ext/papeeria/papeeria_latex_highlight_rules.js index c179b44eb27..9f679846b10 100644 --- a/lib/ace/ext/papeeria/papeeria_latex_highlight_rules.js +++ b/lib/ace/ext/papeeria/papeeria_latex_highlight_rules.js @@ -130,7 +130,7 @@ beginRule = function(text, pushedState) { return { token: [STORAGE_TOKENTYPE + ".type", LPAREN_TOKENTYPE, PARAMETER_TOKENTYPE, RPAREN_TOKENTYPE], - regex: "(\\\\(?:begin))({)(" + text + ")(})", + regex: "(\\\\begin\\s*)({)(" + text + ")(})", next: pushState(pushedState) }; }; @@ -145,7 +145,7 @@ return [ { token: [STORAGE_TOKENTYPE + ".type", LPAREN_TOKENTYPE, PARAMETER_TOKENTYPE, RPAREN_TOKENTYPE], - regex: "(\\\\(?:end))({)(" + text + ")(})", + regex: "(\\\\end\\s*)({)(" + text + ")(})", next: popState } ]; @@ -210,7 +210,7 @@ listStartRules = [beginRule(LIST_ITEMIZE_REGEX, LIST_ITEMIZE_STATE), beginRule(LIST_ENUMERATE_REGEX, LIST_ENUMERATE_STATE)]; genericEnvironmentRule = { token: [STORAGE_TOKENTYPE + ".type", LPAREN_TOKENTYPE + "." + ENVIRONMENT_TOKENTYPE, PARAMETER_TOKENTYPE + "." + ENVIRONMENT_TOKENTYPE, RPAREN_TOKENTYPE], - regex: "(\\\\(?:begin|end))({)(\\w*)(})" + regex: "(\\\\(?:begin|end)(?:\\s*))({)(\\w*)(})" }; citationsRules = []; this.$rules = {}; @@ -220,16 +220,16 @@ citationsRules = citationsRules.concat([ { token: [STORAGE_TOKENTYPE + ".type", LPAREN_TOKENTYPE + ".ref", PARAMETER_TOKENTYPE + ".ref", RPAREN_TOKENTYPE], - regex: "(\\\\(?:ref))({)(\\w*)(})" + regex: "(\\\\ref\\s*)({)(\\w*)(})" }, { token: ["" + KEYWORD_TOKENTYPE, LPAREN_TOKENTYPE, PARAMETER_TOKENTYPE, RPAREN_TOKENTYPE], - regex: "(\\\\(?:v?ref|cite(?:[^{]*)))(?:({)([^}]*)(}))?" + regex: "(\\\\(?:v?ref|cite(?:[^{]*))(?:\\s*))(?:({)([^}]*)(}))?" } ]); this.$rules[START_STATE] = [].concat(equationStartRules, listStartRules, citationsRules, [ { token: ["" + KEYWORD_TOKENTYPE, LPAREN_TOKENTYPE, PARAMETER_TOKENTYPE, RPAREN_TOKENTYPE, LPAREN_TOKENTYPE, STORAGE_TOKENTYPE + ".type", RPAREN_TOKENTYPE], - regex: "(\\\\(?:documentclass|usepackage|input))(?:(\\[)([^\\]]*)(\\]))?({)([^}]*)(})" + regex: "(\\\\(?:documentclass|usepackage|input)(?:\\s*))(?:(\\[)([^\\]]*)(\\]\\s*))?({)([^}]*)(})" }, genericEnvironmentRule ]); this.$rules[LIST_ITEMIZE_STATE] = [].concat(equationStartRules, listStartRules, citationsRules, [envEndRule(LIST_ITEMIZE_REGEX), genericEnvironmentRule]); From 5db263713bb1d9ebecc8ee80db5db528fbbe6150 Mon Sep 17 00:00:00 2001 From: Alexandr Yanenko Date: Thu, 14 Sep 2017 21:25:00 +0300 Subject: [PATCH 2/3] Tests for whitespaces in commands --- .../papeeria_latex_highlight_rules_test.js | 86 ++++++++++++++++++- 1 file changed, 83 insertions(+), 3 deletions(-) diff --git a/lib/ace/ext/papeeria/papeeria_latex_highlight_rules_test.js b/lib/ace/ext/papeeria/papeeria_latex_highlight_rules_test.js index 63ab68b6c9e..560461b8ebd 100644 --- a/lib/ace/ext/papeeria/papeeria_latex_highlight_rules_test.js +++ b/lib/ace/ext/papeeria/papeeria_latex_highlight_rules_test.js @@ -8,6 +8,11 @@ define(function(require, exports, module) { var RulesModule = require("ace/ext/papeeria/papeeria_latex_highlight_rules"); var PapeeriaLatexHighlightRules = RulesModule.PapeeriaLatexHighlightRules; var EQUATION_TOKENTYPE = RulesModule.EQUATION_TOKENTYPE; + var LIST_TOKENTYPE = RulesModule.LIST_TOKENTYPE; + var STORAGE_TOKENTYPE = RulesModule.STORAGE_TOKENTYPE; + var KEYWORD_TOKENTYPE = RulesModule.KEYWORD_TOKENTYPE; + var LPAREN_TOKENTYPE = RulesModule.LPAREN_TOKENTYPE; + var RPAREN_TOKENTYPE = RulesModule.RPAREN_TOKENTYPE; var MATH_ENVIRONMENT_DISPLAYED_NUMBERED_STATE = RulesModule.MATH_ENVIRONMENT_DISPLAYED_NUMBERED_STATE; var MATH_ENVIRONMENT_DISPLAYED_STATE = RulesModule.MATH_ENVIRONMENT_DISPLAYED_STATE; var MATH_TEX_INLINE_STATE = RulesModule.MATH_TEX_INLINE_STATE; @@ -17,9 +22,10 @@ define(function(require, exports, module) { var assert = require("ace/test/assertions"); var isType = function(token, type) { - return token.type.split(".").indexOf(type) > -1 + return token.type.indexOf(type) > -1 }; + var mathConstants = {}; mathConstants[MATH_ENVIRONMENT_DISPLAYED_NUMBERED_STATE] = { "start" : "\\begin{equation}", @@ -220,8 +226,82 @@ define(function(require, exports, module) { var result = tokenizer.getLineTokens("\\cite{foo, bar} baz", "start"); var tokens = result.tokens; - assert(!isType(tokens[4], "cite")) - assert.equal(" baz", tokens[4].value) + assert(!isType(tokens[4], "cite")); + assert.equal(" baz", tokens[4].value); + }, + + "test: spaces in 'begin' and 'end'": function() { + var tokenizer = new Tokenizer(new PapeeriaLatexHighlightRules().getRules()); + + var beginEndParameters = [ + ["equation", EQUATION_TOKENTYPE], + ["equation*", EQUATION_TOKENTYPE], + ["itemize", LIST_TOKENTYPE], + ["enumerate", LIST_TOKENTYPE] + ]; + + for (var i = 0; i < beginEndParameters.length; ++i) { + var param = beginEndParameters[i][0]; + var tokentype = beginEndParameters[i][1]; + var line = "\\begin {" + param + "} hi \\end {" + param + "}"; + var result = tokenizer.getLineTokens(line, "start").tokens; + assert(isType(result[0], STORAGE_TOKENTYPE)); + assert(isType(result[2], "variable.parameter")); + assert(isType(result[4], tokentype)); + assert(isType(result[5], STORAGE_TOKENTYPE)); + assert(isType(result[7], "variable.parameter")); + } + + var line = "\\begin {someenv} hi \\end {someenv}" + var result = tokenizer.getLineTokens(line, "start").tokens; + assert(isType(result[0], STORAGE_TOKENTYPE)); + assert(isType(result[2], "variable.parameter")); + assert(isType(result[5], STORAGE_TOKENTYPE)); + assert(isType(result[7], "variable.parameter")); + }, + + "test: spaces in 'ref' and 'cite'": function() { + var tokenizer = new Tokenizer(new PapeeriaLatexHighlightRules().getRules()); + + var result; + + result = tokenizer.getLineTokens("\\ref {smth}", "start").tokens; + assert(isType(result[0], STORAGE_TOKENTYPE)); + assert(isType(result[1], LPAREN_TOKENTYPE + ".ref")); + assert(isType(result[2], "variable.parameter.ref")); + + result = tokenizer.getLineTokens("\\cite {smth}", "start").tokens; + assert(isType(result[0], KEYWORD_TOKENTYPE)); + assert(isType(result[1], LPAREN_TOKENTYPE)); + assert(isType(result[2], "variable.parameter")); + + result = tokenizer.getLineTokens("\\vref {smth}", "start").tokens; + assert(isType(result[0], KEYWORD_TOKENTYPE)); + assert(isType(result[1], LPAREN_TOKENTYPE)); + assert(isType(result[2], "variable.parameter")); + }, + + "test: spaces in 'documentclass', 'usepackage' and 'input'": function() { + var tokenizer = new Tokenizer(new PapeeriaLatexHighlightRules().getRules()); + + var commands = [ + "documentclass", + "usepackage", + "input" + ]; + + for (var i = 0; i < commands.length; ++i) { + var command = commands[i]; + var line = "\\" + command + " [smth] {smth}"; + var result = tokenizer.getLineTokens(line, "start").tokens; + assert(isType(result[0], KEYWORD_TOKENTYPE)); + assert(isType(result[1], LPAREN_TOKENTYPE)); + assert(isType(result[2], "variable.parameter")); + assert(isType(result[3], RPAREN_TOKENTYPE)); + assert(isType(result[4], LPAREN_TOKENTYPE)); + assert(isType(result[5], STORAGE_TOKENTYPE + ".type")); + assert(isType(result[6], RPAREN_TOKENTYPE)); + } } }; }); From 0019f5835a59e993a05d713eafc3412630597780 Mon Sep 17 00:00:00 2001 From: Alexandr Yanenko Date: Thu, 19 Oct 2017 17:34:56 +0300 Subject: [PATCH 3/3] Fix 'cite', 'ref', 'vcite' and 'vref' --- .../papeeria_latex_highlight_rules.coffee | 106 +++++++++--------- .../papeeria_latex_highlight_rules.js | 89 +++++++-------- .../papeeria_latex_highlight_rules_test.js | 63 +++++++---- 3 files changed, 134 insertions(+), 124 deletions(-) diff --git a/lib/ace/ext/papeeria/papeeria_latex_highlight_rules.coffee b/lib/ace/ext/papeeria/papeeria_latex_highlight_rules.coffee index 19292f26dc6..0f6d64c6cc2 100644 --- a/lib/ace/ext/papeeria/papeeria_latex_highlight_rules.coffee +++ b/lib/ace/ext/papeeria/papeeria_latex_highlight_rules.coffee @@ -33,6 +33,18 @@ define((require, exports, module) -> MATH_LATEX_INLINE_OPENING_REGEX = "\\\\\\(" MATH_LATEX_INLINE_CLOSING_REGEX = "\\\\\\)" + exports.CITE_STATE = CITE_STATE = "cite" + exports.CITE_COMMAND = CITE_COMMAND = "cite" + + exports.REF_STATE = REF_STATE = "ref" + exports.REF_COMMAND = REF_COMMAND = "ref" + + exports.VCITE_STATE = VCITE_STATE = "vcite" + exports.VCITE_COMMAND = VCITE_COMMAND = "vcite" + + exports.VREF_STATE = VREF_STATE = "vref" + exports.VREF_COMMAND = VREF_COMMAND = "vref" + exports.COMMENT_TOKENTYPE = COMMENT_TOKENTYPE = "comment" exports.ESCAPE_TOKENTYPE = ESCAPE_TOKENTYPE = "escape" exports.LPAREN_TOKENTYPE = LPAREN_TOKENTYPE = "lparen" @@ -45,6 +57,10 @@ define((require, exports, module) -> exports.ERROR_TOKENTYPE = ERROR_TOKENTYPE = "error" exports.LABEL_TOKENTYPE = LABEL_TOKENTYPE = "label" exports.PARAMETER_TOKENTYPE = PARAMETER_TOKENTYPE = "variable.parameter" + exports.CITE_TOKENTYPE = CITE_TOKENTYPE = "cite.parameter" + exports.REF_TOKENTYPE = REF_TOKENTYPE = "ref.parameter" + exports.VCITE_TOKENTYPE = VCITE_TOKENTYPE = "vcite.parameter" + exports.VREF_TOKENTYPE = VREF_TOKENTYPE = "vref.parameter" exports.SPECIFIC_TOKEN_FOR_STATE = SPECIFIC_TOKEN_FOR_STATE = {} SPECIFIC_TOKEN_FOR_STATE[LIST_ITEMIZE_STATE] = LIST_TOKENTYPE @@ -55,6 +71,10 @@ define((require, exports, module) -> SPECIFIC_TOKEN_FOR_STATE[MATH_TEX_DISPLAYED_STATE] = EQUATION_TOKENTYPE SPECIFIC_TOKEN_FOR_STATE[MATH_LATEX_INLINE_STATE] = EQUATION_TOKENTYPE SPECIFIC_TOKEN_FOR_STATE[MATH_LATEX_DISPLAYED_STATE] = EQUATION_TOKENTYPE + SPECIFIC_TOKEN_FOR_STATE[CITE_STATE] = CITE_TOKENTYPE + SPECIFIC_TOKEN_FOR_STATE[REF_STATE] = REF_TOKENTYPE + SPECIFIC_TOKEN_FOR_STATE[VCITE_STATE] = VCITE_TOKENTYPE + SPECIFIC_TOKEN_FOR_STATE[VREF_STATE] = VREF_TOKENTYPE PapeeriaLatexHighlightRules = -> ### @@ -166,6 +186,25 @@ define((require, exports, module) -> { token: "string.#{RPAREN_TOKENTYPE}", regex: closingRegex, next: popState } ] + simpleCommandOpeningRules = (commandName, stateName, stateTokentype) -> [ + { + token: [ + "#{STORAGE_TOKENTYPE}.type" + "#{LPAREN_TOKENTYPE}.#{stateTokentype}" + ] + next: pushState(stateName) + regex: "(\\\\(?:#{commandName})\\s*)({)" + } + ] + + simpleCommandInStateRules = [ + { + token: RPAREN_TOKENTYPE + regex: "(})" + next: popState + } + ] + mathEmptyLineRule = { token: "#{ERROR_TOKENTYPE}.#{EQUATION_TOKENTYPE}", regex : "^\\s*$" } @@ -189,31 +228,6 @@ define((require, exports, module) -> mathStartRule(MATH_LATEX_INLINE_OPENING_REGEX, MATH_LATEX_INLINE_STATE) ] - ## This class generates rules for a simple command \commandName{commandBody} - ## Generated rules: - ## -- append given stateName to the list of token types of \commandName and left { - ## -- append given instateTokenType to the tokens in the command body - ## Rules are appended to the arrays which need to be passed afterwards to other rules - ## or to the state map @$rules - class SimpleCommandState - constructor: (@commandName, @stateName, @instateTokenType) -> {} - generateRules: (openingRules, instateRules) => - opening = - token: [ - "#{STORAGE_TOKENTYPE}.type" - "#{LPAREN_TOKENTYPE}.#{@stateName}" - ] - next: pushState(@stateName) - regex: "(\\\\(?:#{@commandName}))({)" - openingRules.push(opening) - - closing = - token: RPAREN_TOKENTYPE - regex: "(})" - next: popState - instateRules.push(closing) - basicRules(@instateTokenType).forEach((rule) -> instateRules.push(rule)) - listStartRules = [ beginRule(LIST_ITEMIZE_REGEX, LIST_ITEMIZE_STATE) beginRule(LIST_ENUMERATE_REGEX, LIST_ENUMERATE_STATE) @@ -229,35 +243,16 @@ define((require, exports, module) -> regex: "(\\\\(?:begin|end)(?:\\s*))({)(\\w*)(})" } + citationsRules = [].concat( + simpleCommandOpeningRules(CITE_COMMAND, CITE_STATE, CITE_TOKENTYPE), + simpleCommandOpeningRules(VCITE_COMMAND, VCITE_STATE, VCITE_TOKENTYPE), + simpleCommandOpeningRules(REF_COMMAND, REF_STATE, REF_TOKENTYPE), + simpleCommandOpeningRules(VREF_COMMAND, VREF_STATE, VREF_TOKENTYPE) + ) - citationsRules = [] - @$rules = {} - citeCommandState = new SimpleCommandState("cite", "cite", "#{PARAMETER_TOKENTYPE}.cite") - citationsInstateRules = [] - citeCommandState.generateRules(citationsRules, citationsInstateRules) + @$rules = {} - citationsRules = citationsRules.concat([ - { - token: [ - "#{STORAGE_TOKENTYPE}.type" - "#{LPAREN_TOKENTYPE}.ref" - "#{PARAMETER_TOKENTYPE}.ref" - RPAREN_TOKENTYPE - ] - regex: "(\\\\ref\\s*)({)(\\w*)(})" - } - # this rule is for `vref` and `vcite` citations - { - token: [ - "#{KEYWORD_TOKENTYPE}" - LPAREN_TOKENTYPE - PARAMETER_TOKENTYPE - RPAREN_TOKENTYPE - ] - regex: "(\\\\(?:v?ref|cite(?:[^{]*))(?:\\s*))(?:({)([^}]*)(}))?" - } - ]) @$rules[START_STATE] = [].concat(equationStartRules, listStartRules, citationsRules, [ { token: [ @@ -274,6 +269,14 @@ define((require, exports, module) -> genericEnvironmentRule ]) + @$rules[CITE_STATE] = simpleCommandInStateRules + + @$rules[REF_STATE] = simpleCommandInStateRules + + @$rules[VCITE_STATE] = simpleCommandInStateRules + + @$rules[VREF_STATE] = simpleCommandInStateRules + @$rules[LIST_ITEMIZE_STATE] = [].concat(equationStartRules, listStartRules, citationsRules, [ envEndRule(LIST_ITEMIZE_REGEX) genericEnvironmentRule @@ -315,7 +318,6 @@ define((require, exports, module) -> # inside `basicRules` function for state of @$rules @$rules[state] = @$rules[state].concat(basicRules(SPECIFIC_TOKEN_FOR_STATE[state])) - @$rules[citeCommandState.stateName] = citationsInstateRules return oop.inherits(PapeeriaLatexHighlightRules, TextHighlightRules) diff --git a/lib/ace/ext/papeeria/papeeria_latex_highlight_rules.js b/lib/ace/ext/papeeria/papeeria_latex_highlight_rules.js index 9f679846b10..2c1c23fdd33 100644 --- a/lib/ace/ext/papeeria/papeeria_latex_highlight_rules.js +++ b/lib/ace/ext/papeeria/papeeria_latex_highlight_rules.js @@ -1,13 +1,12 @@ // Generated by CoffeeScript 1.12.6 (function() { - var foo, - bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; }; + var foo; foo = null; define(function(require, exports, module) { "use strict"; - var COMMENT_TOKENTYPE, ENVIRONMENT_TOKENTYPE, EQUATION_TOKENTYPE, ERROR_TOKENTYPE, ESCAPE_TOKENTYPE, KEYWORD_TOKENTYPE, LABEL_TOKENTYPE, LIST_ENUMERATE_REGEX, LIST_ENUMERATE_STATE, LIST_ITEMIZE_REGEX, LIST_ITEMIZE_STATE, LIST_TOKENTYPE, LPAREN_TOKENTYPE, MATH_ENVIRONMENT_DISPLAYED_NUMBERED_REGEX, MATH_ENVIRONMENT_DISPLAYED_NUMBERED_STATE, MATH_ENVIRONMENT_DISPLAYED_REGEX, MATH_ENVIRONMENT_DISPLAYED_STATE, MATH_LATEX_DISPLAYED_CLOSING_REGEX, MATH_LATEX_DISPLAYED_OPENING_REGEX, MATH_LATEX_DISPLAYED_STATE, MATH_LATEX_INLINE_CLOSING_REGEX, MATH_LATEX_INLINE_OPENING_REGEX, MATH_LATEX_INLINE_STATE, MATH_TEX_DISPLAYED_CLOSING_REGEX, MATH_TEX_DISPLAYED_OPENING_REGEX, MATH_TEX_DISPLAYED_STATE, MATH_TEX_INLINE_CLOSING_REGEX, MATH_TEX_INLINE_OPENING_REGEX, MATH_TEX_INLINE_STATE, PARAMETER_TOKENTYPE, PapeeriaLatexHighlightRules, RPAREN_TOKENTYPE, SPECIFIC_TOKEN_FOR_STATE, START_STATE, STORAGE_TOKENTYPE, TextHighlightRules, oop; + var CITE_COMMAND, CITE_STATE, CITE_TOKENTYPE, COMMENT_TOKENTYPE, ENVIRONMENT_TOKENTYPE, EQUATION_TOKENTYPE, ERROR_TOKENTYPE, ESCAPE_TOKENTYPE, KEYWORD_TOKENTYPE, LABEL_TOKENTYPE, LIST_ENUMERATE_REGEX, LIST_ENUMERATE_STATE, LIST_ITEMIZE_REGEX, LIST_ITEMIZE_STATE, LIST_TOKENTYPE, LPAREN_TOKENTYPE, MATH_ENVIRONMENT_DISPLAYED_NUMBERED_REGEX, MATH_ENVIRONMENT_DISPLAYED_NUMBERED_STATE, MATH_ENVIRONMENT_DISPLAYED_REGEX, MATH_ENVIRONMENT_DISPLAYED_STATE, MATH_LATEX_DISPLAYED_CLOSING_REGEX, MATH_LATEX_DISPLAYED_OPENING_REGEX, MATH_LATEX_DISPLAYED_STATE, MATH_LATEX_INLINE_CLOSING_REGEX, MATH_LATEX_INLINE_OPENING_REGEX, MATH_LATEX_INLINE_STATE, MATH_TEX_DISPLAYED_CLOSING_REGEX, MATH_TEX_DISPLAYED_OPENING_REGEX, MATH_TEX_DISPLAYED_STATE, MATH_TEX_INLINE_CLOSING_REGEX, MATH_TEX_INLINE_OPENING_REGEX, MATH_TEX_INLINE_STATE, PARAMETER_TOKENTYPE, PapeeriaLatexHighlightRules, REF_COMMAND, REF_STATE, REF_TOKENTYPE, RPAREN_TOKENTYPE, SPECIFIC_TOKEN_FOR_STATE, START_STATE, STORAGE_TOKENTYPE, TextHighlightRules, VCITE_COMMAND, VCITE_STATE, VCITE_TOKENTYPE, VREF_COMMAND, VREF_STATE, VREF_TOKENTYPE, oop; oop = require("ace/lib/oop"); TextHighlightRules = require("ace/mode/text_highlight_rules").TextHighlightRules; exports.START_STATE = START_STATE = "start"; @@ -29,6 +28,14 @@ exports.MATH_LATEX_INLINE_STATE = MATH_LATEX_INLINE_STATE = "math.latex.inline"; MATH_LATEX_INLINE_OPENING_REGEX = "\\\\\\("; MATH_LATEX_INLINE_CLOSING_REGEX = "\\\\\\)"; + exports.CITE_STATE = CITE_STATE = "cite"; + exports.CITE_COMMAND = CITE_COMMAND = "cite"; + exports.REF_STATE = REF_STATE = "ref"; + exports.REF_COMMAND = REF_COMMAND = "ref"; + exports.VCITE_STATE = VCITE_STATE = "vcite"; + exports.VCITE_COMMAND = VCITE_COMMAND = "vcite"; + exports.VREF_STATE = VREF_STATE = "vref"; + exports.VREF_COMMAND = VREF_COMMAND = "vref"; exports.COMMENT_TOKENTYPE = COMMENT_TOKENTYPE = "comment"; exports.ESCAPE_TOKENTYPE = ESCAPE_TOKENTYPE = "escape"; exports.LPAREN_TOKENTYPE = LPAREN_TOKENTYPE = "lparen"; @@ -41,6 +48,10 @@ exports.ERROR_TOKENTYPE = ERROR_TOKENTYPE = "error"; exports.LABEL_TOKENTYPE = LABEL_TOKENTYPE = "label"; exports.PARAMETER_TOKENTYPE = PARAMETER_TOKENTYPE = "variable.parameter"; + exports.CITE_TOKENTYPE = CITE_TOKENTYPE = "cite.parameter"; + exports.REF_TOKENTYPE = REF_TOKENTYPE = "ref.parameter"; + exports.VCITE_TOKENTYPE = VCITE_TOKENTYPE = "vcite.parameter"; + exports.VREF_TOKENTYPE = VREF_TOKENTYPE = "vref.parameter"; exports.SPECIFIC_TOKEN_FOR_STATE = SPECIFIC_TOKEN_FOR_STATE = {}; SPECIFIC_TOKEN_FOR_STATE[LIST_ITEMIZE_STATE] = LIST_TOKENTYPE; SPECIFIC_TOKEN_FOR_STATE[LIST_ENUMERATE_STATE] = LIST_TOKENTYPE; @@ -50,6 +61,10 @@ SPECIFIC_TOKEN_FOR_STATE[MATH_TEX_DISPLAYED_STATE] = EQUATION_TOKENTYPE; SPECIFIC_TOKEN_FOR_STATE[MATH_LATEX_INLINE_STATE] = EQUATION_TOKENTYPE; SPECIFIC_TOKEN_FOR_STATE[MATH_LATEX_DISPLAYED_STATE] = EQUATION_TOKENTYPE; + SPECIFIC_TOKEN_FOR_STATE[CITE_STATE] = CITE_TOKENTYPE; + SPECIFIC_TOKEN_FOR_STATE[REF_STATE] = REF_TOKENTYPE; + SPECIFIC_TOKEN_FOR_STATE[VCITE_STATE] = VCITE_TOKENTYPE; + SPECIFIC_TOKEN_FOR_STATE[VREF_STATE] = VREF_TOKENTYPE; PapeeriaLatexHighlightRules = function() { /* @@ -63,7 +78,7 @@ * @param {pushedState} string * @return {function} function, which correctly puts new type(pushedState) on stack */ - var SimpleCommandState, basicRules, beginRule, citationsInstateRules, citationsRules, citeCommandState, envEndRule, equationStartRules, genericEnvironmentRule, listStartRules, mathEmptyLineRule, mathEndRules, mathEnvEndRules, mathLabelRule, mathStartRule, popState, pushState, state; + var basicRules, beginRule, citationsRules, envEndRule, equationStartRules, genericEnvironmentRule, listStartRules, mathEmptyLineRule, mathEndRules, mathEnvEndRules, mathLabelRule, mathStartRule, popState, pushState, simpleCommandInStateRules, simpleCommandOpeningRules, state; pushState = function(pushedState) { return function(currentState, stack) { stack.push(pushedState); @@ -167,6 +182,22 @@ } ]; }; + simpleCommandOpeningRules = function(commandName, stateName, stateTokentype) { + return [ + { + token: [STORAGE_TOKENTYPE + ".type", LPAREN_TOKENTYPE + "." + stateTokentype], + next: pushState(stateName), + regex: "(\\\\(?:" + commandName + ")\\s*)({)" + } + ]; + }; + simpleCommandInStateRules = [ + { + token: RPAREN_TOKENTYPE, + regex: "(})", + next: popState + } + ]; mathEmptyLineRule = { token: ERROR_TOKENTYPE + "." + EQUATION_TOKENTYPE, regex: "^\\s*$" @@ -176,62 +207,23 @@ regex: "(\\\\label\\s*)({)([^}]*)(})" }; equationStartRules = [beginRule(MATH_ENVIRONMENT_DISPLAYED_NUMBERED_REGEX, MATH_ENVIRONMENT_DISPLAYED_NUMBERED_STATE), beginRule(MATH_ENVIRONMENT_DISPLAYED_REGEX, MATH_ENVIRONMENT_DISPLAYED_STATE), mathStartRule(MATH_TEX_DISPLAYED_OPENING_REGEX, MATH_TEX_DISPLAYED_STATE), mathStartRule(MATH_TEX_INLINE_OPENING_REGEX, MATH_TEX_INLINE_STATE), mathStartRule(MATH_LATEX_DISPLAYED_OPENING_REGEX, MATH_LATEX_DISPLAYED_STATE), mathStartRule(MATH_LATEX_INLINE_OPENING_REGEX, MATH_LATEX_INLINE_STATE)]; - SimpleCommandState = (function() { - function SimpleCommandState(commandName, stateName, instateTokenType) { - this.commandName = commandName; - this.stateName = stateName; - this.instateTokenType = instateTokenType; - this.generateRules = bind(this.generateRules, this); - ({}); - } - - SimpleCommandState.prototype.generateRules = function(openingRules, instateRules) { - var closing, opening; - opening = { - token: [STORAGE_TOKENTYPE + ".type", LPAREN_TOKENTYPE + "." + this.stateName], - next: pushState(this.stateName), - regex: "(\\\\(?:" + this.commandName + "))({)" - }; - openingRules.push(opening); - closing = { - token: RPAREN_TOKENTYPE, - regex: "(})", - next: popState - }; - instateRules.push(closing); - return basicRules(this.instateTokenType).forEach(function(rule) { - return instateRules.push(rule); - }); - }; - - return SimpleCommandState; - - })(); listStartRules = [beginRule(LIST_ITEMIZE_REGEX, LIST_ITEMIZE_STATE), beginRule(LIST_ENUMERATE_REGEX, LIST_ENUMERATE_STATE)]; genericEnvironmentRule = { token: [STORAGE_TOKENTYPE + ".type", LPAREN_TOKENTYPE + "." + ENVIRONMENT_TOKENTYPE, PARAMETER_TOKENTYPE + "." + ENVIRONMENT_TOKENTYPE, RPAREN_TOKENTYPE], regex: "(\\\\(?:begin|end)(?:\\s*))({)(\\w*)(})" }; - citationsRules = []; + citationsRules = [].concat(simpleCommandOpeningRules(CITE_COMMAND, CITE_STATE, CITE_TOKENTYPE), simpleCommandOpeningRules(VCITE_COMMAND, VCITE_STATE, VCITE_TOKENTYPE), simpleCommandOpeningRules(REF_COMMAND, REF_STATE, REF_TOKENTYPE), simpleCommandOpeningRules(VREF_COMMAND, VREF_STATE, VREF_TOKENTYPE)); this.$rules = {}; - citeCommandState = new SimpleCommandState("cite", "cite", PARAMETER_TOKENTYPE + ".cite"); - citationsInstateRules = []; - citeCommandState.generateRules(citationsRules, citationsInstateRules); - citationsRules = citationsRules.concat([ - { - token: [STORAGE_TOKENTYPE + ".type", LPAREN_TOKENTYPE + ".ref", PARAMETER_TOKENTYPE + ".ref", RPAREN_TOKENTYPE], - regex: "(\\\\ref\\s*)({)(\\w*)(})" - }, { - token: ["" + KEYWORD_TOKENTYPE, LPAREN_TOKENTYPE, PARAMETER_TOKENTYPE, RPAREN_TOKENTYPE], - regex: "(\\\\(?:v?ref|cite(?:[^{]*))(?:\\s*))(?:({)([^}]*)(}))?" - } - ]); this.$rules[START_STATE] = [].concat(equationStartRules, listStartRules, citationsRules, [ { token: ["" + KEYWORD_TOKENTYPE, LPAREN_TOKENTYPE, PARAMETER_TOKENTYPE, RPAREN_TOKENTYPE, LPAREN_TOKENTYPE, STORAGE_TOKENTYPE + ".type", RPAREN_TOKENTYPE], regex: "(\\\\(?:documentclass|usepackage|input)(?:\\s*))(?:(\\[)([^\\]]*)(\\]\\s*))?({)([^}]*)(})" }, genericEnvironmentRule ]); + this.$rules[CITE_STATE] = simpleCommandInStateRules; + this.$rules[REF_STATE] = simpleCommandInStateRules; + this.$rules[VCITE_STATE] = simpleCommandInStateRules; + this.$rules[VREF_STATE] = simpleCommandInStateRules; this.$rules[LIST_ITEMIZE_STATE] = [].concat(equationStartRules, listStartRules, citationsRules, [envEndRule(LIST_ITEMIZE_REGEX), genericEnvironmentRule]); this.$rules[LIST_ENUMERATE_STATE] = [].concat(equationStartRules, listStartRules, citationsRules, [envEndRule(LIST_ENUMERATE_REGEX), genericEnvironmentRule]); this.$rules[MATH_ENVIRONMENT_DISPLAYED_NUMBERED_STATE] = [mathEmptyLineRule, mathLabelRule].concat(mathEnvEndRules(MATH_ENVIRONMENT_DISPLAYED_NUMBERED_REGEX)); @@ -243,7 +235,6 @@ for (state in this.$rules) { this.$rules[state] = this.$rules[state].concat(basicRules(SPECIFIC_TOKEN_FOR_STATE[state])); } - this.$rules[citeCommandState.stateName] = citationsInstateRules; }; oop.inherits(PapeeriaLatexHighlightRules, TextHighlightRules); exports.PapeeriaLatexHighlightRules = PapeeriaLatexHighlightRules; diff --git a/lib/ace/ext/papeeria/papeeria_latex_highlight_rules_test.js b/lib/ace/ext/papeeria/papeeria_latex_highlight_rules_test.js index 560461b8ebd..42ef442935c 100644 --- a/lib/ace/ext/papeeria/papeeria_latex_highlight_rules_test.js +++ b/lib/ace/ext/papeeria/papeeria_latex_highlight_rules_test.js @@ -199,35 +199,47 @@ define(function(require, exports, module) { } }, - "test: cite tag empty": function() { + "test: cite tags empty": function() { var rules = new PapeeriaLatexHighlightRules().getRules(); var tokenizer = new Tokenizer(rules); - var result = tokenizer.getLineTokens("\\cite{}", "start"); - var tokens = result.tokens; + var commands = [RulesModule.CITE_COMMAND, RulesModule.REF_COMMAND, RulesModule.VCITE_COMMAND, RulesModule.VREF_COMMAND]; + var tokentypes = [RulesModule.CITE_TOKENTYPE, RulesModule.REF_TOKENTYPE, RulesModule.VCITE_TOKENTYPE, RulesModule.VREF_TOKENTYPE]; + for (var i = 0; i < commands.length; ++i) { + var result = tokenizer.getLineTokens("\\" + commands[i] + "{}", "start"); + var tokens = result.tokens; - assert(isType(tokens[1], "cite")) - assert(isType(tokens[1], "lparen")) + assert(isType(tokens[1], tokentypes[i])); + assert(isType(tokens[1], "lparen")); + } }, - "test: cite tag filled": function() { + "test: cite tags filled": function() { var rules = new PapeeriaLatexHighlightRules().getRules(); var tokenizer = new Tokenizer(rules); - var result = tokenizer.getLineTokens("\\cite{foo, bar}", "start"); - var tokens = result.tokens; + var commands = [RulesModule.CITE_COMMAND, RulesModule.REF_COMMAND, RulesModule.VCITE_COMMAND, RulesModule.VREF_COMMAND]; + var tokentypes = [RulesModule.CITE_TOKENTYPE, RulesModule.REF_TOKENTYPE, RulesModule.VCITE_TOKENTYPE, RulesModule.VREF_TOKENTYPE]; + for (var i = 0; i < commands.length; ++i) { + var result = tokenizer.getLineTokens("\\" + commands[i] + "{foo, bar}", "start"); + var tokens = result.tokens; - assert(isType(tokens[2], "cite")) - assert(isType(tokens[2], "parameter")) - assert.equal("foo, bar", tokens[2].value) + assert(isType(tokens[2], tokentypes[i])); + assert(isType(tokens[2], "parameter")); + assert.equal("foo, bar", tokens[2].value); + } }, - "test: cite tag closes": function() { + "test: cite tags closes": function() { var rules = new PapeeriaLatexHighlightRules().getRules(); var tokenizer = new Tokenizer(rules); - var result = tokenizer.getLineTokens("\\cite{foo, bar} baz", "start"); - var tokens = result.tokens; + var commands = [RulesModule.CITE_COMMAND, RulesModule.REF_COMMAND, RulesModule.VCITE_COMMAND, RulesModule.VREF_COMMAND]; + var tokentypes = [RulesModule.CITE_TOKENTYPE, RulesModule.REF_TOKENTYPE, RulesModule.VCITE_TOKENTYPE, RulesModule.VREF_TOKENTYPE]; + for (var i = 0; i < commands.length; ++i) { + var result = tokenizer.getLineTokens("\\" + commands[i] + "{foo, bar} baz", "start"); + var tokens = result.tokens; - assert(!isType(tokens[4], "cite")); - assert.equal(" baz", tokens[4].value); + assert(!isType(tokens[4], tokentypes[i])); + assert.equal(" baz", tokens[4].value); + } }, "test: spaces in 'begin' and 'end'": function() { @@ -268,17 +280,22 @@ define(function(require, exports, module) { result = tokenizer.getLineTokens("\\ref {smth}", "start").tokens; assert(isType(result[0], STORAGE_TOKENTYPE)); assert(isType(result[1], LPAREN_TOKENTYPE + ".ref")); - assert(isType(result[2], "variable.parameter.ref")); + assert(isType(result[2], "ref.parameter")); result = tokenizer.getLineTokens("\\cite {smth}", "start").tokens; - assert(isType(result[0], KEYWORD_TOKENTYPE)); - assert(isType(result[1], LPAREN_TOKENTYPE)); - assert(isType(result[2], "variable.parameter")); + assert(isType(result[0], STORAGE_TOKENTYPE)); + assert(isType(result[1], LPAREN_TOKENTYPE + ".cite")); + assert(isType(result[2], "cite.parameter")); result = tokenizer.getLineTokens("\\vref {smth}", "start").tokens; - assert(isType(result[0], KEYWORD_TOKENTYPE)); - assert(isType(result[1], LPAREN_TOKENTYPE)); - assert(isType(result[2], "variable.parameter")); + assert(isType(result[0], STORAGE_TOKENTYPE)); + assert(isType(result[1], LPAREN_TOKENTYPE + ".vref")); + assert(isType(result[2], "vref.parameter")); + + result = tokenizer.getLineTokens("\\vcite {smth}", "start").tokens; + assert(isType(result[0], STORAGE_TOKENTYPE)); + assert(isType(result[1], LPAREN_TOKENTYPE + ".vcite")); + assert(isType(result[2], "vcite.parameter")); }, "test: spaces in 'documentclass', 'usepackage' and 'input'": function() {