|
7 | 7 | from sphinx.util.nodes import make_id |
8 | 8 |
|
9 | 9 |
|
10 | | -class GrammarSnippetDirective(SphinxDirective): |
| 10 | + |
| 11 | +class GrammarSnippetBase(SphinxDirective): |
| 12 | + """Common functionality for GrammarSnippetDirective & CompatProductionList. |
| 13 | + """ |
| 14 | + # The option/argument handling is left to the individual classes. |
| 15 | + |
| 16 | + def make_grammar_snippet(self, options, content): |
| 17 | + """Create a literal block from options & content.""" |
| 18 | + |
| 19 | + group_name = options['group'] |
| 20 | + |
| 21 | + # Docutils elements have a `rawsource` attribute that is supposed to be |
| 22 | + # set to the original ReST source. |
| 23 | + # Sphinx does the following with it: |
| 24 | + # - if it's empty, set it to `self.astext()` |
| 25 | + # - if it matches `self.astext()` when generating the output, |
| 26 | + # apply syntax highlighting (which is based on the plain-text content |
| 27 | + # and thus discards internal formatting, like references). |
| 28 | + # To get around this, we set it to this non-empty string: |
| 29 | + rawsource = 'You should not see this.' |
| 30 | + |
| 31 | + literal = nodes.literal_block( |
| 32 | + rawsource, |
| 33 | + '', |
| 34 | + # TODO: Use a dedicated CSS class here and for strings. |
| 35 | + # and add it to the theme too |
| 36 | + classes=['highlight'], |
| 37 | + ) |
| 38 | + |
| 39 | + grammar_re = re.compile( |
| 40 | + """ |
| 41 | + (?P<rule_name>^[a-zA-Z0-9_]+) # identifier at start of line |
| 42 | + (?=:) # ... followed by a colon |
| 43 | + | |
| 44 | + [`](?P<rule_ref>[a-zA-Z0-9_]+)[`] # identifier in backquotes |
| 45 | + | |
| 46 | + (?P<single_quoted>'[^']*') # string in 'quotes' |
| 47 | + | |
| 48 | + (?P<double_quoted>"[^"]*") # string in "quotes" |
| 49 | + """, |
| 50 | + re.VERBOSE, |
| 51 | + ) |
| 52 | + |
| 53 | + for line in content: |
| 54 | + last_pos = 0 |
| 55 | + for match in grammar_re.finditer(line): |
| 56 | + # Handle text between matches |
| 57 | + if match.start() > last_pos: |
| 58 | + literal += nodes.Text(line[last_pos : match.start()]) |
| 59 | + last_pos = match.end() |
| 60 | + |
| 61 | + # Handle matches |
| 62 | + groupdict = { |
| 63 | + name: content |
| 64 | + for name, content in match.groupdict().items() |
| 65 | + if content is not None |
| 66 | + } |
| 67 | + match groupdict: |
| 68 | + case {'rule_name': name}: |
| 69 | + literal += self.make_link_to_token() |
| 70 | + case {'rule_ref': name}: |
| 71 | + ref_node = addnodes.pending_xref( |
| 72 | + name, |
| 73 | + reftype="token", |
| 74 | + refdomain="std", |
| 75 | + reftarget=f"{group_name}:{name}", |
| 76 | + ) |
| 77 | + ref_node += nodes.Text(name) |
| 78 | + literal += ref_node |
| 79 | + case {'single_quoted': name} | {'double_quoted': name}: |
| 80 | + string_node = nodes.inline(classes=['nb']) |
| 81 | + string_node += nodes.Text(name) |
| 82 | + literal += string_node |
| 83 | + case _: |
| 84 | + raise ValueError('unhandled match') |
| 85 | + literal += nodes.Text(line[last_pos:] + '\n') |
| 86 | + |
| 87 | + node = nodes.paragraph( |
| 88 | + '', |
| 89 | + '', |
| 90 | + literal, |
| 91 | + ) |
| 92 | + |
| 93 | + return [node] |
| 94 | + |
| 95 | + def make_link_to_token(self, group_name, name): |
| 96 | + """Return a literal node that links to the given grammar token""" |
| 97 | + name_node = addnodes.literal_strong() |
| 98 | + |
| 99 | + # Cargo-culted magic to make `name_node` a link target |
| 100 | + # similar to Sphinx `production`. |
| 101 | + # This needs to be the same as what Sphinx does |
| 102 | + # to avoid breaking existing links. |
| 103 | + domain = self.env.domains['std'] |
| 104 | + obj_name = f"{group_name}:{name}" |
| 105 | + prefix = f'grammar-token-{group_name}' |
| 106 | + node_id = make_id( |
| 107 | + self.env, self.state.document, prefix, name |
| 108 | + ) |
| 109 | + name_node['ids'].append(node_id) |
| 110 | + self.state.document.note_implicit_target( |
| 111 | + name_node, name_node |
| 112 | + ) |
| 113 | + domain.note_object( |
| 114 | + 'token', obj_name, node_id, location=name_node |
| 115 | + ) |
| 116 | + |
| 117 | + text_node = nodes.Text(name) |
| 118 | + name_node += text_node |
| 119 | + return name_node |
| 120 | + |
| 121 | + |
| 122 | +class GrammarSnippetDirective(GrammarSnippetBase): |
11 | 123 | """Transform a grammar-snippet directive to a Sphinx literal_block |
12 | 124 |
|
13 | 125 | That is, turn something like: |
@@ -40,116 +152,10 @@ class GrammarSnippetDirective(SphinxDirective): |
40 | 152 | final_argument_whitespace = True |
41 | 153 |
|
42 | 154 | def run(self): |
43 | | - return make_snippet(self, self.options, self.content) |
44 | | - |
45 | | - |
46 | | -def make_snippet(directive, options, content): |
47 | | - """Create a literal block from options & content. |
48 | | -
|
49 | | - This implements the common functionality for GrammarSnippetDirective |
50 | | - and CompatProductionList. |
51 | | - """ |
52 | | - |
53 | | - group_name = options['group'] |
54 | | - |
55 | | - # Docutils elements have a `rawsource` attribute that is supposed to be |
56 | | - # set to the original ReST source. |
57 | | - # Sphinx does the following with it: |
58 | | - # - if it's empty, set it to `self.astext()` |
59 | | - # - if it matches `self.astext()` when generating the output, |
60 | | - # apply syntax highlighting (which is based on the plain-text content |
61 | | - # and thus discards internal formatting, like references). |
62 | | - # To get around this, we set it to this non-empty string: |
63 | | - rawsource = 'You should not see this.' |
64 | | - |
65 | | - literal = nodes.literal_block( |
66 | | - rawsource, |
67 | | - '', |
68 | | - # TODO: Use a dedicated CSS class here and for strings. |
69 | | - # and add it to the theme too |
70 | | - classes=['highlight'], |
71 | | - ) |
72 | | - |
73 | | - grammar_re = re.compile( |
74 | | - """ |
75 | | - (?P<rule_name>^[a-zA-Z0-9_]+) # identifier at start of line |
76 | | - (?=:) # ... followed by a colon |
77 | | - | |
78 | | - [`](?P<rule_ref>[a-zA-Z0-9_]+)[`] # identifier in backquotes |
79 | | - | |
80 | | - (?P<single_quoted>'[^']*') # string in 'quotes' |
81 | | - | |
82 | | - (?P<double_quoted>"[^"]*") # string in "quotes" |
83 | | - """, |
84 | | - re.VERBOSE, |
85 | | - ) |
86 | | - |
87 | | - for line in content: |
88 | | - last_pos = 0 |
89 | | - for match in grammar_re.finditer(line): |
90 | | - # Handle text between matches |
91 | | - if match.start() > last_pos: |
92 | | - literal += nodes.Text(line[last_pos : match.start()]) |
93 | | - last_pos = match.end() |
94 | | - |
95 | | - # Handle matches |
96 | | - groupdict = { |
97 | | - name: content |
98 | | - for name, content in match.groupdict().items() |
99 | | - if content is not None |
100 | | - } |
101 | | - match groupdict: |
102 | | - case {'rule_name': name}: |
103 | | - name_node = addnodes.literal_strong() |
104 | | - |
105 | | - # Cargo-culted magic to make `name_node` a link target |
106 | | - # similar to Sphinx `production`. |
107 | | - # This needs to be the same as what Sphinx does |
108 | | - # to avoid breaking existing links. |
109 | | - domain = directive.env.domains['std'] |
110 | | - obj_name = f"{group_name}:{name}" |
111 | | - prefix = f'grammar-token-{group_name}' |
112 | | - node_id = make_id( |
113 | | - directive.env, directive.state.document, prefix, name |
114 | | - ) |
115 | | - name_node['ids'].append(node_id) |
116 | | - directive.state.document.note_implicit_target( |
117 | | - name_node, name_node |
118 | | - ) |
119 | | - domain.note_object( |
120 | | - 'token', obj_name, node_id, location=name_node |
121 | | - ) |
122 | | - |
123 | | - text_node = nodes.Text(name) |
124 | | - name_node += text_node |
125 | | - literal += name_node |
126 | | - case {'rule_ref': name}: |
127 | | - ref_node = addnodes.pending_xref( |
128 | | - name, |
129 | | - reftype="token", |
130 | | - refdomain="std", |
131 | | - reftarget=f"{group_name}:{name}", |
132 | | - ) |
133 | | - ref_node += nodes.Text(name) |
134 | | - literal += ref_node |
135 | | - case {'single_quoted': name} | {'double_quoted': name}: |
136 | | - string_node = nodes.inline(classes=['nb']) |
137 | | - string_node += nodes.Text(name) |
138 | | - literal += string_node |
139 | | - case _: |
140 | | - raise ValueError('unhandled match') |
141 | | - literal += nodes.Text(line[last_pos:] + '\n') |
142 | | - |
143 | | - node = nodes.paragraph( |
144 | | - '', |
145 | | - '', |
146 | | - literal, |
147 | | - ) |
148 | | - |
149 | | - return [node] |
| 155 | + return self.make_grammar_snippet(self.options, self.content) |
150 | 156 |
|
151 | 157 |
|
152 | | -class CompatProductionList(SphinxDirective): |
| 158 | +class CompatProductionList(GrammarSnippetBase): |
153 | 159 | """Create grammar snippets from ReST productionlist syntax |
154 | 160 |
|
155 | 161 | This is intended to be a transitional directive, used while we switch |
@@ -181,7 +187,7 @@ def run(self): |
181 | 187 | else: |
182 | 188 | name_part = '' |
183 | 189 | content.append(f'{name_part:<{align_column}}{text}') |
184 | | - return make_snippet(self, options, content) |
| 190 | + return self.make_grammar_snippet(options, content) |
185 | 191 |
|
186 | 192 |
|
187 | 193 | def setup(app): |
|
0 commit comments