|
6 | 6 | from sphinx.util.docutils import SphinxDirective |
7 | 7 | from sphinx.util.nodes import make_id |
8 | 8 |
|
| 9 | +def make_snippet(directive, options, content): |
| 10 | + group_name = options['group'] |
| 11 | + |
| 12 | + # Docutils elements have a `rawsource` attribute that is supposed to be |
| 13 | + # set to the original ReST source. |
| 14 | + # Sphinx does the following with it: |
| 15 | + # - if it's empty, set it to `self.astext()` |
| 16 | + # - if it matches `self.astext()` when generating the output, |
| 17 | + # apply syntax highlighting (which is based on the plain-text content |
| 18 | + # and thus discards internal formatting, like references). |
| 19 | + # To get around this, we set it to this non-empty string: |
| 20 | + rawsource = 'You should not see this.' |
| 21 | + |
| 22 | + literal = nodes.literal_block( |
| 23 | + rawsource, |
| 24 | + '', |
| 25 | + # TODO: Use a dedicated CSS class here and for strings. |
| 26 | + # and add it to the theme too |
| 27 | + classes=['highlight'], |
| 28 | + ) |
| 29 | + |
| 30 | + grammar_re = re.compile( |
| 31 | + """ |
| 32 | + (?P<rule_name>^[a-zA-Z0-9_]+) # identifier at start of line |
| 33 | + (?=:) # ... followed by a colon |
| 34 | + | |
| 35 | + [`](?P<rule_ref>[a-zA-Z0-9_]+)[`] # identifier in backquotes |
| 36 | + | |
| 37 | + (?P<single_quoted>'[^']*') # string in 'quotes' |
| 38 | + | |
| 39 | + (?P<double_quoted>"[^"]*") # string in "quotes" |
| 40 | + """, |
| 41 | + re.VERBOSE, |
| 42 | + ) |
| 43 | + |
| 44 | + for line in content: |
| 45 | + last_pos = 0 |
| 46 | + for match in grammar_re.finditer(line): |
| 47 | + # Handle text between matches |
| 48 | + if match.start() > last_pos: |
| 49 | + literal += nodes.Text(line[last_pos:match.start()]) |
| 50 | + last_pos = match.end() |
| 51 | + |
| 52 | + # Handle matches |
| 53 | + groupdict = { |
| 54 | + name: content |
| 55 | + for name, content in match.groupdict().items() |
| 56 | + if content is not None |
| 57 | + } |
| 58 | + match groupdict: |
| 59 | + case {'rule_name': name}: |
| 60 | + name_node = addnodes.literal_strong() |
| 61 | + |
| 62 | + # Cargo-culted magic to make `name_node` a link target |
| 63 | + # similar to Sphinx `production`. |
| 64 | + # This needs to be the same as what Sphinx does |
| 65 | + # to avoid breaking existing links. |
| 66 | + domain = directive.env.domains['std'] |
| 67 | + obj_name = f"{group_name}:{name}" |
| 68 | + prefix = f'grammar-token-{group_name}' |
| 69 | + node_id = make_id(directive.env, directive.state.document, prefix, name) |
| 70 | + name_node['ids'].append(node_id) |
| 71 | + directive.state.document.note_implicit_target(name_node, name_node) |
| 72 | + domain.note_object('token', obj_name, node_id, location=name_node) |
| 73 | + |
| 74 | + text_node = nodes.Text(name) |
| 75 | + name_node += text_node |
| 76 | + literal += name_node |
| 77 | + case {'rule_ref': name}: |
| 78 | + ref_node = addnodes.pending_xref( |
| 79 | + name, |
| 80 | + reftype="token", |
| 81 | + refdomain="std", |
| 82 | + reftarget=f"{group_name}:{name}", |
| 83 | + ) |
| 84 | + ref_node += nodes.Text(name) |
| 85 | + literal += ref_node |
| 86 | + case {'single_quoted': name} | {'double_quoted': name}: |
| 87 | + string_node = nodes.inline(classes=['nb']) |
| 88 | + string_node += nodes.Text(name) |
| 89 | + literal += string_node |
| 90 | + case _: |
| 91 | + raise ValueError('unhandled match') |
| 92 | + literal += nodes.Text(line[last_pos:] + '\n') |
| 93 | + |
| 94 | + node = nodes.paragraph( |
| 95 | + '', '', |
| 96 | + literal, |
| 97 | + ) |
| 98 | + |
| 99 | + return [node] |
| 100 | + |
9 | 101 |
|
10 | 102 | class GrammarSnippetDirective(SphinxDirective): |
11 | 103 | """Transform a grammar-snippet directive to a Sphinx productionlist |
@@ -37,97 +129,23 @@ class GrammarSnippetDirective(SphinxDirective): |
37 | 129 | final_argument_whitespace = True |
38 | 130 |
|
39 | 131 | def run(self): |
40 | | - group_name = self.options['group'] |
41 | | - |
42 | | - # Docutils elements have a `rawsource` attribute that is supposed to be |
43 | | - # set to the original ReST source. |
44 | | - # Sphinx does the following with it: |
45 | | - # - if it's empty, set it to `self.astext()` |
46 | | - # - if it matches `self.astext()` when generating the output, |
47 | | - # apply syntax highlighting (which is based on the plain-text content |
48 | | - # and thus discards internal formatting, like references). |
49 | | - # To get around this, we set it to this non-empty string: |
50 | | - rawsource = 'You should not see this.' |
51 | | - |
52 | | - literal = nodes.literal_block( |
53 | | - rawsource, |
54 | | - '', |
55 | | - # TODO: Use a dedicated CSS class here and for strings. |
56 | | - # and add it to the theme too |
57 | | - classes=['highlight'], |
58 | | - ) |
59 | | - |
60 | | - grammar_re = re.compile( |
61 | | - """ |
62 | | - (?P<rule_name>^[a-zA-Z0-9_]+) # identifier at start of line |
63 | | - (?=:) # ... followed by a colon |
64 | | - | |
65 | | - [`](?P<rule_ref>[a-zA-Z0-9_]+)[`] # identifier in backquotes |
66 | | - | |
67 | | - (?P<single_quoted>'[^']*') # string in 'quotes' |
68 | | - | |
69 | | - (?P<double_quoted>"[^"]*") # string in "quotes" |
70 | | - """, |
71 | | - re.VERBOSE, |
72 | | - ) |
73 | | - |
74 | | - for line in self.content: |
75 | | - last_pos = 0 |
76 | | - for match in grammar_re.finditer(line): |
77 | | - # Handle text between matches |
78 | | - if match.start() > last_pos: |
79 | | - literal += nodes.Text(line[last_pos:match.start()]) |
80 | | - last_pos = match.end() |
81 | | - |
82 | | - # Handle matches |
83 | | - groupdict = { |
84 | | - name: content |
85 | | - for name, content in match.groupdict().items() |
86 | | - if content is not None |
87 | | - } |
88 | | - match groupdict: |
89 | | - case {'rule_name': name}: |
90 | | - name_node = addnodes.literal_strong() |
91 | | - |
92 | | - # Cargo-culted magic to make `name_node` a link target |
93 | | - # similar to Sphinx `production`. |
94 | | - # This needs to be the same as what Sphinx does |
95 | | - # to avoid breaking existing links. |
96 | | - domain = self.env.domains['std'] |
97 | | - obj_name = f"{group_name}:{name}" |
98 | | - prefix = f'grammar-token-{group_name}' |
99 | | - node_id = make_id(self.env, self.state.document, prefix, name) |
100 | | - name_node['ids'].append(node_id) |
101 | | - self.state.document.note_implicit_target(name_node, name_node) |
102 | | - domain.note_object('token', obj_name, node_id, location=name_node) |
103 | | - |
104 | | - text_node = nodes.Text(name) |
105 | | - name_node += text_node |
106 | | - literal += name_node |
107 | | - case {'rule_ref': name}: |
108 | | - ref_node = addnodes.pending_xref( |
109 | | - name, |
110 | | - reftype="token", |
111 | | - refdomain="std", |
112 | | - reftarget=f"{group_name}:{name}", |
113 | | - ) |
114 | | - ref_node += nodes.Text(name) |
115 | | - literal += ref_node |
116 | | - case {'single_quoted': name} | {'double_quoted': name}: |
117 | | - string_node = nodes.inline(classes=['nb']) |
118 | | - string_node += nodes.Text(name) |
119 | | - literal += string_node |
120 | | - case _: |
121 | | - raise ValueError('unhandled match') |
122 | | - literal += nodes.Text(line[last_pos:] + '\n') |
123 | | - |
124 | | - node = nodes.paragraph( |
125 | | - '', '', |
126 | | - literal, |
127 | | - ) |
128 | | - |
129 | | - return [node] |
| 132 | + return make_snippet(self, self.options, self.content) |
| 133 | + |
| 134 | + |
| 135 | +class CompatProductionList(SphinxDirective): |
| 136 | + has_content = True |
| 137 | + option_spec = {} |
| 138 | + |
| 139 | + # We currently ignore arguments. |
| 140 | + required_arguments = 1 |
| 141 | + |
| 142 | + def run(self): |
| 143 | + options = {'group': self.arguments[0]} |
| 144 | + content = self.content |
| 145 | + return make_snippet(self, options, content) |
| 146 | + |
130 | 147 |
|
131 | 148 | def setup(app): |
132 | 149 | app.add_directive('grammar-snippet', GrammarSnippetDirective) |
| 150 | + app.add_directive('productionlist', CompatProductionList, override=True) |
133 | 151 | return {'version': '1.0', 'parallel_read_safe': True} |
0 commit comments