|
| 1 | +/* |
| 2 | + * Copyright (c) 2015-2021 "Neo Technology," |
| 3 | + * Network Engine for Objects in Lund AB [http://neotechnology.com] |
| 4 | + * |
| 5 | + * Licensed under the Apache License, Version 2.0 (the "License"); |
| 6 | + * you may not use this file except in compliance with the License. |
| 7 | + * You may obtain a copy of the License at |
| 8 | + * |
| 9 | + * http://www.apache.org/licenses/LICENSE-2.0 |
| 10 | + * |
| 11 | + * Unless required by applicable law or agreed to in writing, software |
| 12 | + * distributed under the License is distributed on an "AS IS" BASIS, |
| 13 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 14 | + * See the License for the specific language governing permissions and |
| 15 | + * limitations under the License. |
| 16 | + * |
| 17 | + * Attribution Notice under the terms of the Apache License 2.0 |
| 18 | + * |
| 19 | + * This work was created by the collective efforts of the openCypher community. |
| 20 | + * Without limiting the terms of Section 6, any Derivative Work that is not |
| 21 | + * approved by the public consensus process of the openCypher Implementers Group |
| 22 | + * should not be described as “Cypher” (and Cypher® is a registered trademark of |
| 23 | + * Neo4j Inc.) or as "openCypher". Extensions by implementers or prototypes or |
| 24 | + * proposals for change that have been documented or implemented should only be |
| 25 | + * described as "implementation extensions to Cypher" or as "proposed changes to |
| 26 | + * Cypher that are not yet approved by the openCypher community". |
| 27 | + */ |
| 28 | +package org.opencypher.tools.tck.inspection.coverage |
| 29 | + |
| 30 | +import org.antlr.v4.Tool |
| 31 | +import org.antlr.v4.runtime.CharStreams |
| 32 | +import org.antlr.v4.runtime.CommonTokenStream |
| 33 | +import org.antlr.v4.runtime.InterpreterRuleContext |
| 34 | +import org.antlr.v4.runtime.ParserInterpreter |
| 35 | +import org.antlr.v4.runtime.tree.ParseTree |
| 36 | +import org.antlr.v4.runtime.tree.TerminalNode |
| 37 | +import org.antlr.v4.tool.Grammar |
| 38 | +import org.opencypher.tools.grammar.Antlr4 |
| 39 | +import org.opencypher.tools.tck.api.CypherTCK |
| 40 | +import org.opencypher.tools.tck.api.ExecQuery |
| 41 | +import org.opencypher.tools.tck.api.Execute |
| 42 | + |
| 43 | +import java.io.ByteArrayOutputStream |
| 44 | +import java.nio.charset.StandardCharsets.UTF_8 |
| 45 | +import java.nio.file.Paths |
| 46 | + |
| 47 | +object SyntacticCoverage { |
| 48 | + private val RULES_PREFIX = "oC_" |
| 49 | + private val GRAMMAR_SOURCE = "/cypher.xml" |
| 50 | + |
| 51 | + private val oCGrammar = { |
| 52 | + val url = this.getClass.getResource(GRAMMAR_SOURCE) |
| 53 | + org.opencypher.grammar.Grammar.parseXML(Paths.get(url.toURI)) |
| 54 | + } |
| 55 | + private val grammar: Grammar = { |
| 56 | + val grammarString = { |
| 57 | + val out = new ByteArrayOutputStream |
| 58 | + Antlr4.write(oCGrammar, out) |
| 59 | + out.toString(UTF_8.name) |
| 60 | + } |
| 61 | + val tool = new Tool() |
| 62 | + val ast = tool.parseGrammarFromString(grammarString) |
| 63 | + val grammar = tool.createGrammar(ast) |
| 64 | + tool.process(grammar, false) |
| 65 | + //println(grammar.getRuleNames.map(name => s"$name\t${grammar.getRule(name).numberOfAlts}").mkString(System.lineSeparator())) |
| 66 | + grammar |
| 67 | + } |
| 68 | + |
| 69 | + def main(args: Array[String]): Unit = { |
| 70 | + val scenarios = CypherTCK.allTckScenariosFromFilesystem("tck/features") |
| 71 | + val queries = scenarios.flatMap(_.steps.collect { |
| 72 | + case Execute(query, ExecQuery, _) => query |
| 73 | + }) |
| 74 | + val (ruleUse, ruleUseDistinctByScenario) = collectRulesFromQueries(queries) |
| 75 | + val ruleCoverage: Map[Int, Seq[Int]] = ruleUse.groupBy(i => i) |
| 76 | + val scenarioDistinctRuleCoverage: Map[Int, Seq[Int]] = ruleUseDistinctByScenario.groupBy(i => i) |
| 77 | + |
| 78 | + val rulesNames = grammar.getRuleNames.map(_.substring(RULES_PREFIX.length)) |
| 79 | + val rulesNamesMaxLength = rulesNames.map(_.length).max |
| 80 | + val lines = rulesNames.indices.map(i => |
| 81 | + s"${rulesNames(i)}${" "*(rulesNamesMaxLength - rulesNames(i).length)}" + |
| 82 | + f"${ruleCoverage.get(i).map(_.size).getOrElse(0)}%7d" + |
| 83 | + f"${scenarioDistinctRuleCoverage.get(i).map(_.size).getOrElse(0)}%7d" |
| 84 | + ) |
| 85 | + println(lines.mkString(System.lineSeparator())) |
| 86 | + } |
| 87 | + |
| 88 | + def collectRulesFromQueries(queries: Seq[String]): (Seq[Int], Seq[Int]) = { |
| 89 | + val trees = queries.map(query => Option(initParser(query).parse(grammar.getRule("oC_Cypher" ).index))) |
| 90 | + (trees.flatMap(t => collectRulesFromParseTree(t)), trees.flatMap(t => collectRulesFromParseTree(t).toSet)) |
| 91 | + } |
| 92 | + |
| 93 | + def collectRulesFromParseTree(tree: Option[ParseTree]): Seq[Int] = tree match { |
| 94 | + case None => List[Int]() |
| 95 | + case Some(tree) => |
| 96 | + tree.getPayload match { |
| 97 | + case payload: InterpreterRuleContext => |
| 98 | + val children = (0 until tree.getChildCount).map(i => Option(tree.getChild(i))) |
| 99 | + val childRules = children.flatMap(child => collectRulesFromParseTree(child)) |
| 100 | + val (rules, terminals) = children.foldLeft((List[InterpreterRuleContext](), List[TerminalNode]())){ |
| 101 | + case (p, Some(child: InterpreterRuleContext)) => (child :: p._1, p._2) |
| 102 | + case (p, Some(child: TerminalNode)) => (p._1, child :: p._2) |
| 103 | + case (p, _) => p |
| 104 | + } |
| 105 | + val (numRules, numTerminals) = (rules.size, terminals.size) |
| 106 | + val isToCount = { |
| 107 | + if(numTerminals > 0) { |
| 108 | + true |
| 109 | + } else { |
| 110 | + val rule = grammar.getRule(payload.getRuleIndex) |
| 111 | + if(rule.name.endsWith("Expression") && numRules < 2 && rule.numberOfAlts < 2) { |
| 112 | + false |
| 113 | + } else { |
| 114 | + true |
| 115 | + } |
| 116 | + } |
| 117 | + } |
| 118 | + if(isToCount) |
| 119 | + payload.getRuleIndex +: childRules |
| 120 | + else |
| 121 | + childRules |
| 122 | + case _ => List[Int]() |
| 123 | + } |
| 124 | + } |
| 125 | + |
| 126 | + def initParser(query: String): ParserInterpreter = { |
| 127 | + val lexer = grammar.createLexerInterpreter(CharStreams.fromString(query)) |
| 128 | + val parser = grammar.createParserInterpreter(new CommonTokenStream(lexer)) |
| 129 | + lexer.removeErrorListeners() |
| 130 | + parser.removeErrorListeners() |
| 131 | + parser |
| 132 | + } |
| 133 | +} |
0 commit comments