diff options
author | Martin Schanzenbach <schanzen@gnunet.org> | 2023-09-08 20:29:38 +0200 |
---|---|---|
committer | Martin Schanzenbach <schanzen@gnunet.org> | 2023-09-08 20:29:38 +0200 |
commit | 531fca788194362f4d4230ee695479dacc348dd3 (patch) | |
tree | 1a9a9f55fa5549c35ec181aafec3436b2baf56aa | |
parent | 44f824b69eb84e6b2c4e7db579a3fc6ae05948fd (diff) | |
download | gana-531fca788194362f4d4230ee695479dacc348dd3.tar.gz gana-531fca788194362f4d4230ee695479dacc348dd3.zip |
exts forgotten
-rw-r--r-- | _exts/httpdomain/__init__.py | 14 | ||||
-rw-r--r-- | _exts/httpdomain/autohttp/__init__.py | 11 | ||||
-rw-r--r-- | _exts/httpdomain/autohttp/bottle.py | 114 | ||||
-rw-r--r-- | _exts/httpdomain/autohttp/common.py | 36 | ||||
-rw-r--r-- | _exts/httpdomain/autohttp/flask.py | 48 | ||||
-rw-r--r-- | _exts/httpdomain/autohttp/flask_base.py | 215 | ||||
-rw-r--r-- | _exts/httpdomain/autohttp/flaskqref.py | 80 | ||||
-rw-r--r-- | _exts/httpdomain/autohttp/tornado.py | 128 | ||||
-rw-r--r-- | _exts/httpdomain/httpdomain.py | 773 | ||||
-rw-r--r-- | _exts/typescriptdomain.py | 587 |
10 files changed, 2006 insertions, 0 deletions
diff --git a/_exts/httpdomain/__init__.py b/_exts/httpdomain/__init__.py new file mode 100644 index 0000000..b5a7dc2 --- /dev/null +++ b/_exts/httpdomain/__init__.py | |||
@@ -0,0 +1,14 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | """ | ||
3 | sphinxcontrib | ||
4 | ~~~~~~~~~~~~~ | ||
5 | |||
6 | This package is a namespace package that contains all extensions | ||
7 | distributed in the ``sphinx-contrib`` distribution. | ||
8 | |||
9 | :copyright: Copyright 2007-2009 by the Sphinx team, see AUTHORS. | ||
10 | :license: BSD, see LICENSE for details. | ||
11 | """ | ||
12 | |||
13 | __import__('pkg_resources').declare_namespace(__name__) | ||
14 | |||
diff --git a/_exts/httpdomain/autohttp/__init__.py b/_exts/httpdomain/autohttp/__init__.py new file mode 100644 index 0000000..95372d4 --- /dev/null +++ b/_exts/httpdomain/autohttp/__init__.py | |||
@@ -0,0 +1,11 @@ | |||
1 | """ | ||
2 | sphinxcontrib.autohttp | ||
3 | ~~~~~~~~~~~~~~~~~~~~~~ | ||
4 | |||
5 | The sphinx.ext.autodoc-style HTTP API reference builder | ||
6 | for sphinxcontrib.httpdomain. | ||
7 | |||
8 | :copyright: Copyright 2011 by Hong Minhee | ||
9 | :license: BSD, see LICENSE for details. | ||
10 | |||
11 | """ | ||
diff --git a/_exts/httpdomain/autohttp/bottle.py b/_exts/httpdomain/autohttp/bottle.py new file mode 100644 index 0000000..d8c1859 --- /dev/null +++ b/_exts/httpdomain/autohttp/bottle.py | |||
@@ -0,0 +1,114 @@ | |||
1 | """ | ||
2 | sphinxcontrib.autohttp.bottle | ||
3 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | ||
4 | |||
5 | The sphinx.ext.autodoc-style HTTP API reference builder (from Bottle) | ||
6 | for sphinxcontrib.httpdomain. | ||
7 | |||
8 | :copyright: Copyright 2012 by Jameel Al-Aziz | ||
9 | :license: BSD, see LICENSE for details. | ||
10 | |||
11 | """ | ||
12 | |||
13 | import re | ||
14 | import six | ||
15 | |||
16 | from docutils import nodes | ||
17 | from docutils.parsers.rst import directives | ||
18 | from docutils.statemachine import ViewList | ||
19 | |||
20 | from sphinx.util import force_decode | ||
21 | from sphinx.util.compat import Directive | ||
22 | from sphinx.util.nodes import nested_parse_with_titles | ||
23 | from sphinx.util.docstrings import prepare_docstring | ||
24 | from sphinx.pycode import ModuleAnalyzer | ||
25 | |||
26 | from sphinxcontrib import httpdomain | ||
27 | from sphinxcontrib.autohttp.common import http_directive, import_object | ||
28 | |||
29 | |||
30 | def translate_bottle_rule(app, rule): | ||
31 | buf = six.StringIO() | ||
32 | if hasattr(app.router, "parse_rule"): | ||
33 | iterator = app.router.parse_rule(rule) # bottle 0.11 | ||
34 | else: | ||
35 | iterator = app.router._itertokens(rule) # bottle 0.12 | ||
36 | for name, filter, conf in iterator: | ||
37 | if filter: | ||
38 | buf.write('(') | ||
39 | buf.write(name) | ||
40 | if (filter != app.router.default_filter and filter != 'default')\ | ||
41 | or conf: | ||
42 | buf.write(':') | ||
43 | buf.write(filter) | ||
44 | if conf: | ||
45 | buf.write(':') | ||
46 | buf.write(conf) | ||
47 | buf.write(')') | ||
48 | else: | ||
49 | buf.write(name) | ||
50 | return buf.getvalue() | ||
51 | |||
52 | |||
53 | def get_routes(app): | ||
54 | for route in app.routes: | ||
55 | path = translate_bottle_rule(app, route.rule) | ||
56 | yield route.method, path, route | ||
57 | |||
58 | |||
59 | class AutobottleDirective(Directive): | ||
60 | |||
61 | has_content = True | ||
62 | required_arguments = 1 | ||
63 | option_spec = {'endpoints': directives.unchanged, | ||
64 | 'undoc-endpoints': directives.unchanged, | ||
65 | 'include-empty-docstring': directives.unchanged} | ||
66 | |||
67 | @property | ||
68 | def endpoints(self): | ||
69 | endpoints = self.options.get('endpoints', None) | ||
70 | if not endpoints: | ||
71 | return None | ||
72 | return frozenset(re.split(r'\s*,\s*', endpoints)) | ||
73 | |||
74 | @property | ||
75 | def undoc_endpoints(self): | ||
76 | undoc_endpoints = self.options.get('undoc-endpoints', None) | ||
77 | if not undoc_endpoints: | ||
78 | return frozenset() | ||
79 | return frozenset(re.split(r'\s*,\s*', undoc_endpoints)) | ||
80 | |||
81 | def make_rst(self): | ||
82 | app = import_object(self.arguments[0]) | ||
83 | for method, path, target in get_routes(app): | ||
84 | endpoint = target.name or target.callback.__name__ | ||
85 | if self.endpoints and endpoint not in self.endpoints: | ||
86 | continue | ||
87 | if endpoint in self.undoc_endpoints: | ||
88 | continue | ||
89 | view = target.callback | ||
90 | docstring = view.__doc__ or '' | ||
91 | if not isinstance(docstring, six.text_type): | ||
92 | analyzer = ModuleAnalyzer.for_module(view.__module__) | ||
93 | docstring = force_decode(docstring, analyzer.encoding) | ||
94 | if not docstring and 'include-empty-docstring' not in self.options: | ||
95 | continue | ||
96 | docstring = prepare_docstring(docstring) | ||
97 | for line in http_directive(method, path, docstring): | ||
98 | yield line | ||
99 | |||
100 | def run(self): | ||
101 | node = nodes.section() | ||
102 | node.document = self.state.document | ||
103 | result = ViewList() | ||
104 | for line in self.make_rst(): | ||
105 | result.append(line, '<autobottle>') | ||
106 | nested_parse_with_titles(self.state, result, node) | ||
107 | return node.children | ||
108 | |||
109 | |||
110 | def setup(app): | ||
111 | if 'http' not in app.domains: | ||
112 | httpdomain.setup(app) | ||
113 | app.add_directive('autobottle', AutobottleDirective) | ||
114 | |||
diff --git a/_exts/httpdomain/autohttp/common.py b/_exts/httpdomain/autohttp/common.py new file mode 100644 index 0000000..199e297 --- /dev/null +++ b/_exts/httpdomain/autohttp/common.py | |||
@@ -0,0 +1,36 @@ | |||
1 | """ | ||
2 | sphinxcontrib.autohttp.common | ||
3 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | ||
4 | |||
5 | The common functions for web framework reflection. | ||
6 | |||
7 | :copyright: Copyright 2011 by Hong Minhee | ||
8 | :license: BSD, see LICENSE for details. | ||
9 | |||
10 | """ | ||
11 | import six | ||
12 | from six.moves import builtins | ||
13 | from six.moves import reduce | ||
14 | |||
15 | def import_object(import_name): | ||
16 | module_name, expr = import_name.split(':', 1) | ||
17 | mod = __import__(module_name) | ||
18 | mod = reduce(getattr, module_name.split('.')[1:], mod) | ||
19 | globals = builtins | ||
20 | if not isinstance(globals, dict): | ||
21 | globals = globals.__dict__ | ||
22 | return eval(expr, globals, mod.__dict__) | ||
23 | |||
24 | |||
25 | def http_directive(method, path, content): | ||
26 | method = method.lower().strip() | ||
27 | if isinstance(content, six.string_types): | ||
28 | content = content.splitlines() | ||
29 | yield '' | ||
30 | paths = [path] if isinstance(path, six.string_types) else path | ||
31 | for path in paths: | ||
32 | yield '.. http:{method}:: {path}'.format(**locals()) | ||
33 | yield '' | ||
34 | for line in content: | ||
35 | yield ' ' + line | ||
36 | yield '' | ||
diff --git a/_exts/httpdomain/autohttp/flask.py b/_exts/httpdomain/autohttp/flask.py new file mode 100644 index 0000000..4bd5232 --- /dev/null +++ b/_exts/httpdomain/autohttp/flask.py | |||
@@ -0,0 +1,48 @@ | |||
1 | """ | ||
2 | sphinxcontrib.autohttp.flask | ||
3 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | ||
4 | |||
5 | The sphinx.ext.autodoc-style HTTP API reference builder (from Flask) | ||
6 | for sphinxcontrib.httpdomain. | ||
7 | |||
8 | :copyright: Copyright 2011 by Hong Minhee | ||
9 | :license: BSD, see LICENSE for details. | ||
10 | |||
11 | """ | ||
12 | from __future__ import absolute_import | ||
13 | |||
14 | import re | ||
15 | import itertools | ||
16 | import six | ||
17 | |||
18 | from docutils import nodes | ||
19 | from docutils.parsers.rst import directives | ||
20 | from docutils.statemachine import ViewList | ||
21 | |||
22 | from sphinx.util import force_decode | ||
23 | from sphinx.util.compat import Directive | ||
24 | from sphinx.util.nodes import nested_parse_with_titles | ||
25 | from sphinx.util.docstrings import prepare_docstring | ||
26 | from sphinx.pycode import ModuleAnalyzer | ||
27 | |||
28 | from sphinxcontrib import httpdomain | ||
29 | from sphinxcontrib.autohttp.common import http_directive, import_object | ||
30 | |||
31 | from .flask_base import AutoflaskBase | ||
32 | |||
33 | class AutoflaskDirective(AutoflaskBase): | ||
34 | |||
35 | def run(self): | ||
36 | node = nodes.section() | ||
37 | node.document = self.state.document | ||
38 | result = ViewList() | ||
39 | for line in self.make_rst(): | ||
40 | result.append(line, '<autoflask>') | ||
41 | nested_parse_with_titles(self.state, result, node) | ||
42 | return node.children | ||
43 | |||
44 | |||
45 | def setup(app): | ||
46 | if 'http' not in app.domains: | ||
47 | httpdomain.setup(app) | ||
48 | app.add_directive('autoflask', AutoflaskDirective) | ||
diff --git a/_exts/httpdomain/autohttp/flask_base.py b/_exts/httpdomain/autohttp/flask_base.py new file mode 100644 index 0000000..50454fe --- /dev/null +++ b/_exts/httpdomain/autohttp/flask_base.py | |||
@@ -0,0 +1,215 @@ | |||
1 | """ | ||
2 | sphinxcontrib.autohttp.flask | ||
3 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | ||
4 | |||
5 | The sphinx.ext.autodoc-style HTTP API reference builder (from Flask) | ||
6 | for sphinxcontrib.httpdomain. | ||
7 | |||
8 | :copyright: Copyright 2011 by Hong Minhee | ||
9 | :license: BSD, see LICENSE for details. | ||
10 | |||
11 | """ | ||
12 | |||
13 | import re | ||
14 | import itertools | ||
15 | import six | ||
16 | |||
17 | from docutils import nodes | ||
18 | from docutils.parsers.rst import directives | ||
19 | from docutils.statemachine import ViewList | ||
20 | |||
21 | from sphinx.util import force_decode | ||
22 | from sphinx.util.compat import Directive | ||
23 | from sphinx.util.nodes import nested_parse_with_titles | ||
24 | from sphinx.util.docstrings import prepare_docstring | ||
25 | from sphinx.pycode import ModuleAnalyzer | ||
26 | |||
27 | from sphinxcontrib import httpdomain | ||
28 | from sphinxcontrib.autohttp.common import http_directive, import_object | ||
29 | |||
30 | |||
31 | def translate_werkzeug_rule(rule): | ||
32 | from werkzeug.routing import parse_rule | ||
33 | buf = six.StringIO() | ||
34 | for conv, arg, var in parse_rule(rule): | ||
35 | if conv: | ||
36 | buf.write('(') | ||
37 | if conv != 'default': | ||
38 | buf.write(conv) | ||
39 | buf.write(':') | ||
40 | buf.write(var) | ||
41 | buf.write(')') | ||
42 | else: | ||
43 | buf.write(var) | ||
44 | return buf.getvalue() | ||
45 | |||
46 | |||
47 | def get_routes(app, endpoint=None, order=None): | ||
48 | endpoints = [] | ||
49 | for rule in app.url_map.iter_rules(endpoint): | ||
50 | url_with_endpoint = ( | ||
51 | six.text_type(next(app.url_map.iter_rules(rule.endpoint))), | ||
52 | rule.endpoint | ||
53 | ) | ||
54 | if url_with_endpoint not in endpoints: | ||
55 | endpoints.append(url_with_endpoint) | ||
56 | if order == 'path': | ||
57 | endpoints.sort() | ||
58 | endpoints = [e for _, e in endpoints] | ||
59 | for endpoint in endpoints: | ||
60 | methodrules = {} | ||
61 | for rule in app.url_map.iter_rules(endpoint): | ||
62 | methods = rule.methods.difference(['OPTIONS', 'HEAD']) | ||
63 | path = translate_werkzeug_rule(rule.rule) | ||
64 | for method in methods: | ||
65 | if method in methodrules: | ||
66 | methodrules[method].append(path) | ||
67 | else: | ||
68 | methodrules[method] = [path] | ||
69 | for method, paths in methodrules.items(): | ||
70 | yield method, paths, endpoint | ||
71 | |||
72 | |||
73 | def quickref_directive(method, path, content): | ||
74 | rcomp = re.compile("^\s*.. :quickref:\s*(?P<quick>.*)$") | ||
75 | method = method.lower().strip() | ||
76 | if isinstance(content, six.string_types): | ||
77 | content = content.splitlines() | ||
78 | description="" | ||
79 | name="" | ||
80 | ref = path.replace("<","(").replace(">",")").replace("/","-").replace(":","-") | ||
81 | for line in content: | ||
82 | qref = rcomp.match(line) | ||
83 | if qref: | ||
84 | quickref = qref.group("quick") | ||
85 | parts = quickref.split(";",1) | ||
86 | if len(parts)>1: | ||
87 | name = parts[0] | ||
88 | description= parts[1] | ||
89 | else: | ||
90 | description= quickref | ||
91 | break | ||
92 | |||
93 | row ={} | ||
94 | row['name'] = name | ||
95 | row['operation'] = ' - `%s %s <#%s-%s>`_' % (method.upper(), path, method.lower(), ref) | ||
96 | row['description'] = description | ||
97 | |||
98 | return row | ||
99 | |||
100 | class AutoflaskBase(Directive): | ||
101 | |||
102 | has_content = True | ||
103 | required_arguments = 1 | ||
104 | option_spec = {'endpoints': directives.unchanged, | ||
105 | 'blueprints': directives.unchanged, | ||
106 | 'modules': directives.unchanged, | ||
107 | 'order': directives.unchanged, | ||
108 | 'undoc-endpoints': directives.unchanged, | ||
109 | 'undoc-blueprints': directives.unchanged, | ||
110 | 'undoc-modules': directives.unchanged, | ||
111 | 'undoc-static': directives.unchanged, | ||
112 | 'include-empty-docstring': directives.unchanged} | ||
113 | |||
114 | @property | ||
115 | def endpoints(self): | ||
116 | endpoints = self.options.get('endpoints', None) | ||
117 | if not endpoints: | ||
118 | return None | ||
119 | return re.split(r'\s*,\s*', endpoints) | ||
120 | |||
121 | @property | ||
122 | def undoc_endpoints(self): | ||
123 | undoc_endpoints = self.options.get('undoc-endpoints', None) | ||
124 | if not undoc_endpoints: | ||
125 | return frozenset() | ||
126 | return frozenset(re.split(r'\s*,\s*', undoc_endpoints)) | ||
127 | |||
128 | @property | ||
129 | def blueprints(self): | ||
130 | blueprints = self.options.get('blueprints', None) | ||
131 | if not blueprints: | ||
132 | return None | ||
133 | return frozenset(re.split(r'\s*,\s*', blueprints)) | ||
134 | |||
135 | @property | ||
136 | def undoc_blueprints(self): | ||
137 | undoc_blueprints = self.options.get('undoc-blueprints', None) | ||
138 | if not undoc_blueprints: | ||
139 | return frozenset() | ||
140 | return frozenset(re.split(r'\s*,\s*', undoc_blueprints)) | ||
141 | |||
142 | @property | ||
143 | def modules(self): | ||
144 | modules = self.options.get('modules', None) | ||
145 | if not modules: | ||
146 | return frozenset() | ||
147 | return frozenset(re.split(r'\s*,\s*', modules)) | ||
148 | |||
149 | @property | ||
150 | def undoc_modules(self): | ||
151 | undoc_modules = self.options.get('undoc-modules', None) | ||
152 | if not undoc_modules: | ||
153 | return frozenset() | ||
154 | return frozenset(re.split(r'\s*,\s*', undoc_modules)) | ||
155 | |||
156 | @property | ||
157 | def order(self): | ||
158 | order = self.options.get('order', None) | ||
159 | if order not in (None, 'path'): | ||
160 | raise ValueError('Invalid value for :order:') | ||
161 | return order | ||
162 | |||
163 | def make_rst(self, qref=False): | ||
164 | app = import_object(self.arguments[0]) | ||
165 | if self.endpoints: | ||
166 | routes = itertools.chain(*[get_routes(app, endpoint, self.order) | ||
167 | for endpoint in self.endpoints]) | ||
168 | else: | ||
169 | routes = get_routes(app, order=self.order) | ||
170 | for method, paths, endpoint in routes: | ||
171 | try: | ||
172 | blueprint, _, endpoint_internal = endpoint.rpartition('.') | ||
173 | if self.blueprints and blueprint not in self.blueprints: | ||
174 | continue | ||
175 | if blueprint in self.undoc_blueprints: | ||
176 | continue | ||
177 | except ValueError: | ||
178 | pass # endpoint is not within a blueprint | ||
179 | |||
180 | if endpoint in self.undoc_endpoints: | ||
181 | continue | ||
182 | try: | ||
183 | static_url_path = app.static_url_path # Flask 0.7 or higher | ||
184 | except AttributeError: | ||
185 | static_url_path = app.static_path # Flask 0.6 or under | ||
186 | if ('undoc-static' in self.options and endpoint == 'static' and | ||
187 | static_url_path + '/(path:filename)' in paths): | ||
188 | continue | ||
189 | view = app.view_functions[endpoint] | ||
190 | |||
191 | if self.modules and view.__module__ not in self.modules: | ||
192 | continue | ||
193 | |||
194 | if self.undoc_modules and view.__module__ in self.modules: | ||
195 | continue | ||
196 | |||
197 | docstring = view.__doc__ or '' | ||
198 | if hasattr(view, 'view_class'): | ||
199 | meth_func = getattr(view.view_class, method.lower(), None) | ||
200 | if meth_func and meth_func.__doc__: | ||
201 | docstring = meth_func.__doc__ | ||
202 | if not isinstance(docstring, six.text_type): | ||
203 | analyzer = ModuleAnalyzer.for_module(view.__module__) | ||
204 | docstring = force_decode(docstring, analyzer.encoding) | ||
205 | |||
206 | if not docstring and 'include-empty-docstring' not in self.options: | ||
207 | continue | ||
208 | docstring = prepare_docstring(docstring) | ||
209 | if qref == True: | ||
210 | for path in paths: | ||
211 | row = quickref_directive(method, path, docstring) | ||
212 | yield row | ||
213 | else: | ||
214 | for line in http_directive(method, paths, docstring): | ||
215 | yield line | ||
diff --git a/_exts/httpdomain/autohttp/flaskqref.py b/_exts/httpdomain/autohttp/flaskqref.py new file mode 100644 index 0000000..c28bb15 --- /dev/null +++ b/_exts/httpdomain/autohttp/flaskqref.py | |||
@@ -0,0 +1,80 @@ | |||
1 | """ | ||
2 | sphinxcontrib.autohttp.flaskqref | ||
3 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | ||
4 | |||
5 | The sphinx.ext.autodoc-style HTTP API quick reference | ||
6 | builder (from Flask) | ||
7 | for sphinxcontrib.httpdomain. | ||
8 | |||
9 | :copyright: Copyright 2011 by Hong Minhee | ||
10 | :license: BSD, see LICENSE for details. | ||
11 | |||
12 | """ | ||
13 | |||
14 | from docutils import nodes | ||
15 | from docutils.statemachine import ViewList | ||
16 | |||
17 | from sphinxcontrib import httpdomain | ||
18 | from sphinx.util.nodes import nested_parse_with_titles | ||
19 | |||
20 | from .flask import AutoflaskBase | ||
21 | |||
22 | |||
23 | class QuickReferenceFlaskDirective(AutoflaskBase): | ||
24 | |||
25 | |||
26 | header = [ '', | ||
27 | '.. list-table::', | ||
28 | ' :widths: 20 45 35', | ||
29 | ' :header-rows: 1', | ||
30 | '', | ||
31 | ' * - Resource', | ||
32 | ' - Operation', | ||
33 | ' - Description' | ||
34 | ] | ||
35 | |||
36 | def run(self): | ||
37 | node = nodes.section() | ||
38 | node.document = self.state.document | ||
39 | result = ViewList() | ||
40 | for line in QuickReferenceFlaskDirective.header: | ||
41 | result.append(line, '<qrefflask>') | ||
42 | table={} | ||
43 | table_sorted_names=[] | ||
44 | |||
45 | for table_row in self.make_rst(qref=True): | ||
46 | name = table_row['name'] | ||
47 | if table.get(name) is None: | ||
48 | table[name]=[] | ||
49 | table[name].append(table_row) | ||
50 | if name not in table_sorted_names: | ||
51 | table_sorted_names.append(name) | ||
52 | |||
53 | table_sorted_names.sort() | ||
54 | |||
55 | for name in table_sorted_names: | ||
56 | # Keep table display clean by not repeating duplicate | ||
57 | # resource names and descriptions | ||
58 | display_name = name | ||
59 | previous_description=None | ||
60 | for row in table[name]: | ||
61 | result.append(' * - %s' % display_name, '<qrefflask>') | ||
62 | display_name ="" | ||
63 | result.append(row['operation'], '<qrefflask>') | ||
64 | description = row['description'] | ||
65 | if previous_description is not None and previous_description == description: | ||
66 | description ="" | ||
67 | else: | ||
68 | previous_description = description | ||
69 | |||
70 | result.append(' - %s' % description, '<qrefflask>') | ||
71 | |||
72 | result.append('', '<qrefflask>') | ||
73 | nested_parse_with_titles(self.state, result, node) | ||
74 | return node.children | ||
75 | |||
76 | def setup(app): | ||
77 | if 'http' not in app.domains: | ||
78 | httpdomain.setup(app) | ||
79 | app.add_directive('qrefflask', QuickReferenceFlaskDirective) | ||
80 | |||
diff --git a/_exts/httpdomain/autohttp/tornado.py b/_exts/httpdomain/autohttp/tornado.py new file mode 100644 index 0000000..9a514fe --- /dev/null +++ b/_exts/httpdomain/autohttp/tornado.py | |||
@@ -0,0 +1,128 @@ | |||
1 | """ | ||
2 | sphinxcontrib.autohttp.tornado | ||
3 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | ||
4 | |||
5 | The sphinx.ext.autodoc-style HTTP API reference builder (from Tornado) | ||
6 | for sphinxcontrib.httpdomain. | ||
7 | |||
8 | :copyright: Copyright 2013 by Rodrigo Machado | ||
9 | :license: BSD, see LICENSE for details. | ||
10 | |||
11 | """ | ||
12 | |||
13 | import inspect | ||
14 | import re | ||
15 | import six | ||
16 | |||
17 | from docutils import nodes | ||
18 | from docutils.parsers.rst import directives | ||
19 | from docutils.statemachine import ViewList | ||
20 | |||
21 | from sphinx.util import force_decode | ||
22 | from sphinx.util.compat import Directive | ||
23 | from sphinx.util.nodes import nested_parse_with_titles | ||
24 | from sphinx.util.docstrings import prepare_docstring | ||
25 | from sphinx.pycode import ModuleAnalyzer | ||
26 | |||
27 | from sphinxcontrib import httpdomain | ||
28 | from sphinxcontrib.autohttp.common import http_directive, import_object | ||
29 | |||
30 | |||
31 | def translate_tornado_rule(app, rule): | ||
32 | buf = six.StringIO() | ||
33 | for name, filter, conf in app.router.parse_rule(rule): | ||
34 | if filter: | ||
35 | buf.write('(') | ||
36 | buf.write(name) | ||
37 | if filter != app.router.default_filter or conf: | ||
38 | buf.write(':') | ||
39 | buf.write(filter) | ||
40 | if conf: | ||
41 | buf.write(':') | ||
42 | buf.write(conf) | ||
43 | buf.write(')') | ||
44 | else: | ||
45 | buf.write(name) | ||
46 | return buf.getvalue() | ||
47 | |||
48 | |||
49 | def get_routes(app): | ||
50 | for spec in app.handlers[0][1]: | ||
51 | handler = spec.handler_class | ||
52 | doc_methods = list(handler.SUPPORTED_METHODS) | ||
53 | if 'HEAD' in doc_methods: | ||
54 | doc_methods.remove('HEAD') | ||
55 | if 'OPTIONS' in doc_methods: | ||
56 | doc_methods.remove('OPTIONS') | ||
57 | |||
58 | for method in doc_methods: | ||
59 | maybe_method = getattr(handler, method.lower(), None) | ||
60 | if (inspect.isfunction(maybe_method) or | ||
61 | inspect.ismethod(maybe_method)): | ||
62 | yield method.lower(), spec.regex.pattern, handler | ||
63 | |||
64 | |||
65 | def normalize_path(path): | ||
66 | if path.endswith('$'): | ||
67 | path = path[:-1] | ||
68 | return path | ||
69 | |||
70 | |||
71 | class AutoTornadoDirective(Directive): | ||
72 | |||
73 | has_content = True | ||
74 | required_arguments = 1 | ||
75 | option_spec = {'endpoints': directives.unchanged, | ||
76 | 'undoc-endpoints': directives.unchanged, | ||
77 | 'include-empty-docstring': directives.unchanged} | ||
78 | |||
79 | @property | ||
80 | def endpoints(self): | ||
81 | endpoints = self.options.get('endpoints', None) | ||
82 | if not endpoints: | ||
83 | return None | ||
84 | return frozenset(re.split(r'\s*,\s*', endpoints)) | ||
85 | |||
86 | @property | ||
87 | def undoc_endpoints(self): | ||
88 | undoc_endpoints = self.options.get('undoc-endpoints', None) | ||
89 | if not undoc_endpoints: | ||
90 | return frozenset() | ||
91 | return frozenset(re.split(r'\s*,\s*', undoc_endpoints)) | ||
92 | |||
93 | def make_rst(self): | ||
94 | app = import_object(self.arguments[0]) | ||
95 | for method, path, handler in get_routes(app): | ||
96 | class_name = handler.__name__ | ||
97 | method_name = getattr(handler, method).__name__ | ||
98 | endpoint = '.'.join((class_name, method_name)) | ||
99 | |||
100 | if self.endpoints and endpoint not in self.endpoints: | ||
101 | continue | ||
102 | if endpoint in self.undoc_endpoints: | ||
103 | continue | ||
104 | |||
105 | docstring = getattr(handler, method).__doc__ or '' | ||
106 | #if not isinstance(docstring, unicode): | ||
107 | # analyzer = ModuleAnalyzer.for_module(view.__module__) | ||
108 | # docstring = force_decode(docstring, analyzer.encoding) | ||
109 | if not docstring and 'include-empty-docstring' not in self.options: | ||
110 | continue | ||
111 | docstring = prepare_docstring(docstring) | ||
112 | for line in http_directive(method, normalize_path(path), docstring): | ||
113 | yield line | ||
114 | |||
115 | def run(self): | ||
116 | node = nodes.section() | ||
117 | node.document = self.state.document | ||
118 | result = ViewList() | ||
119 | for line in self.make_rst(): | ||
120 | result.append(line, '<autotornado>') | ||
121 | nested_parse_with_titles(self.state, result, node) | ||
122 | return node.children | ||
123 | |||
124 | |||
125 | def setup(app): | ||
126 | if 'http' not in app.domains: | ||
127 | httpdomain.setup(app) | ||
128 | app.add_directive('autotornado', AutoTornadoDirective) | ||
diff --git a/_exts/httpdomain/httpdomain.py b/_exts/httpdomain/httpdomain.py new file mode 100644 index 0000000..b31142b --- /dev/null +++ b/_exts/httpdomain/httpdomain.py | |||
@@ -0,0 +1,773 @@ | |||
1 | """ | ||
2 | sphinxcontrib.httpdomain | ||
3 | ~~~~~~~~~~~~~~~~~~~~~~~~ | ||
4 | |||
5 | The HTTP domain for documenting RESTful HTTP APIs. | ||
6 | |||
7 | :copyright: Copyright 2011 by Hong Minhee | ||
8 | :license: BSD, see LICENSE for details. | ||
9 | |||
10 | """ | ||
11 | |||
12 | import re | ||
13 | |||
14 | from docutils import nodes | ||
15 | |||
16 | from pygments.lexer import RegexLexer, bygroups | ||
17 | from pygments.lexers import get_lexer_by_name | ||
18 | from pygments.token import Literal, Text, Operator, Keyword, Name, Number | ||
19 | from pygments.util import ClassNotFound | ||
20 | |||
21 | from sphinx import addnodes | ||
22 | from sphinx.roles import XRefRole | ||
23 | from sphinx.domains import Domain, ObjType, Index | ||
24 | from sphinx.directives import ObjectDescription, directives | ||
25 | from sphinx.util.nodes import make_refnode | ||
26 | from sphinx.util.docfields import GroupedField, TypedField | ||
27 | |||
28 | # The env.get_doctree() lookup results in a pickle.load() call which is | ||
29 | # expensive enough to dominate the runtime entirely when the number of endpoints | ||
30 | # and references is large enough. The doctrees are generated during the read- | ||
31 | # phase and we can cache their lookup during the write-phase significantly | ||
32 | # improving performance. | ||
33 | # Currently sphinxcontrib-httpdomain does not declare to support parallel read | ||
34 | # support (parallel_read_safe is the default False) so we can simply use a | ||
35 | # module global to hold the cache. | ||
36 | _doctree_cache = {} | ||
37 | |||
38 | |||
39 | class DocRef(object): | ||
40 | """Represents a reference to an abstract specification.""" | ||
41 | |||
42 | def __init__(self, base_url, anchor, section): | ||
43 | self.base_url = base_url | ||
44 | self.anchor = anchor | ||
45 | self.section = section | ||
46 | |||
47 | def __repr__(self): | ||
48 | """Returns the URL onto related specification section for the related | ||
49 | object.""" | ||
50 | return '{0}#{1}{2}'.format(self.base_url, self.anchor, self.section) | ||
51 | |||
52 | |||
53 | class RFC2616Ref(DocRef): | ||
54 | """Represents a reference to RFC2616. | ||
55 | In 2014, RFC2616 was replaced by multiple RFCs (7230-7237).""" | ||
56 | |||
57 | def __init__(self, section): | ||
58 | url = 'http://www.w3.org/Protocols/rfc2616/rfc2616-sec{0:d}.html' | ||
59 | url = url.format(int(section)) | ||
60 | super(RFC2616Ref, self).__init__(url, 'sec', section) | ||
61 | |||
62 | |||
63 | class IETFRef(DocRef): | ||
64 | """Represents a reference to the specific IETF RFC.""" | ||
65 | |||
66 | def __init__(self, rfc, section): | ||
67 | url = 'https://tools.ietf.org/html/rfc{0:d}'.format(rfc) | ||
68 | super(IETFRef, self).__init__(url, 'section-', section) | ||
69 | |||
70 | |||
71 | class EventSourceRef(DocRef): | ||
72 | |||
73 | def __init__(self, section): | ||
74 | url = 'http://www.w3.org/TR/eventsource/' | ||
75 | super(EventSourceRef, self).__init__(url, section, '') | ||
76 | |||
77 | |||
78 | class CORSRef(DocRef): | ||
79 | """Represents a reference to W3 Cross-Origin Resource Sharing recommendation.""" | ||
80 | |||
81 | def __init__(self, name, type): | ||
82 | url = 'http://www.w3.org/TR/cors/' | ||
83 | super(CORSRef, self).__init__(url, name, '-' + type) | ||
84 | |||
85 | |||
86 | #: Mapping from lowercase HTTP method name to :class:`DocRef` object which | ||
87 | #: maintains the URL which points to the section of the RFC which defines that | ||
88 | #: HTTP method. | ||
89 | METHOD_REFS = { | ||
90 | 'patch': IETFRef(5789, 2), | ||
91 | 'options': IETFRef(7231, '4.3.7'), | ||
92 | 'get': IETFRef(7231, '4.3.1'), | ||
93 | 'head': IETFRef(7231, '4.3.2'), | ||
94 | 'post': IETFRef(7231, '4.3.3'), | ||
95 | 'put': IETFRef(7231, '4.3.4'), | ||
96 | 'delete': IETFRef(7231, '4.3.5'), | ||
97 | 'trace': IETFRef(7231, '4.3.8'), | ||
98 | 'connect': IETFRef(7231, '4.3.6'), | ||
99 | 'copy': IETFRef(2518, 8.8), | ||
100 | 'any': '' | ||
101 | } | ||
102 | |||
103 | |||
104 | #: Mapping from HTTP header name to :class:`DocRef` object which | ||
105 | #: maintains the URL which points to the related section of the RFC. | ||
106 | HEADER_REFS = { | ||
107 | 'Accept': IETFRef(7231, '5.3.2'), | ||
108 | 'Accept-Charset': IETFRef(7231, '5.3.3'), | ||
109 | 'Accept-Encoding': IETFRef(7231, '5.3.4'), | ||
110 | 'Accept-Language': IETFRef(7231, '5.3.5'), | ||
111 | 'Accept-Ranges': IETFRef(7233, 2.3), | ||
112 | 'Age': IETFRef(7234, 5.1), | ||
113 | 'Allow': IETFRef(7231, '7.4.1'), | ||
114 | 'Authorization': IETFRef(7235, 4.2), | ||
115 | 'Cache-Control': IETFRef(7234, 5.2), | ||
116 | 'Connection': IETFRef(7230, 6.1), | ||
117 | 'Content-Encoding': IETFRef(7231, '3.1.2.2'), | ||
118 | 'Content-Language': IETFRef(7231, '3.1.3.2'), | ||
119 | 'Content-Length': IETFRef(7230, '3.3.2'), | ||
120 | 'Content-Location': IETFRef(7231, '3.1.4.2'), | ||
121 | 'Content-MD5': RFC2616Ref(14.15), # removed | ||
122 | 'Content-Range': IETFRef(7233, 4.2), | ||
123 | 'Content-Type': IETFRef(7231, '3.1.1.5'), | ||
124 | 'Cookie': IETFRef(2109, '4.3.4'), # also RFC6265 section 5.4 | ||
125 | 'Date': IETFRef(7231, '7.1.1.2'), | ||
126 | 'Destination': IETFRef(2518, 9.3), | ||
127 | 'ETag': IETFRef(7232, 2.3), | ||
128 | 'Expect': IETFRef(7231, '5.1.1'), | ||
129 | 'Expires': IETFRef(7234, 5.3), | ||
130 | 'From': IETFRef(7231, '5.5.2'), | ||
131 | 'Host': IETFRef(7230, 5.4), | ||
132 | 'If-Match': IETFRef(7232, 3.1), | ||
133 | 'If-Modified-Since': IETFRef(7232, 3.3), | ||
134 | 'If-None-Match': IETFRef(7232, 3.2), | ||
135 | 'If-Range': IETFRef(7233, 3.2), | ||
136 | 'If-Unmodified-Since': IETFRef(7232, 3.4), | ||
137 | 'Last-Event-ID': EventSourceRef('last-event-id'), | ||
138 | 'Last-Modified': IETFRef(7232, 2.2), | ||
139 | 'Link': IETFRef(5988, '5'), | ||
140 | 'Location': IETFRef(7231, '7.1.2'), | ||
141 | 'Max-Forwards': IETFRef(7231, '5.1.2'), | ||
142 | 'Pragma': IETFRef(7234, 5.4), | ||
143 | 'Proxy-Authenticate': IETFRef(7235, 4.3), | ||
144 | 'Proxy-Authorization': IETFRef(7235, 4.4), | ||
145 | 'Range': IETFRef(7233, 3.1), | ||
146 | 'Referer': IETFRef(7231, '5.5.2'), | ||
147 | 'Retry-After': IETFRef(7231, '7.1.3'), | ||
148 | 'Server': IETFRef(7231, '7.4.2'), | ||
149 | 'Set-Cookie': IETFRef(2109, '4.2.2'), | ||
150 | 'TE': IETFRef(7230, 4.3), | ||
151 | 'Trailer': IETFRef(7230, 4.4), | ||
152 | 'Transfer-Encoding': IETFRef(7230, '3.3.1'), | ||
153 | 'Upgrade': IETFRef(7230, 6.7), | ||
154 | 'User-Agent': IETFRef(7231, '5.5.3'), | ||
155 | 'Vary': IETFRef(7231, '7.1.4'), | ||
156 | 'Via': IETFRef(7230, '5.7.1'), | ||
157 | 'Warning': IETFRef(7234, 5.5), | ||
158 | 'WWW-Authenticate': IETFRef(7235, 4.1), | ||
159 | 'Access-Control-Allow-Origin': CORSRef('access-control-allow-origin', | ||
160 | 'response-header'), | ||
161 | 'Access-Control-Allow-Credentials': CORSRef('access-control-allow-credentials', | ||
162 | 'response-header'), | ||
163 | 'Access-Control-Expose-Headers': CORSRef('access-control-expose-headers', | ||
164 | 'response-header'), | ||
165 | 'Access-Control-Max-Age': CORSRef('access-control-max-age', | ||
166 | 'response-header'), | ||
167 | 'Access-Control-Allow-Methods': CORSRef('access-control-allow-methods', | ||
168 | 'response-header'), | ||
169 | 'Access-Control-Allow-Headers': CORSRef('access-control-allow-headers', | ||
170 | 'response-header'), | ||
171 | 'Origin': CORSRef('origin', 'request-header'), | ||
172 | 'Access-Control-Request-Method': CORSRef('access-control-request-method', | ||
173 | 'response-header'), | ||
174 | 'Access-Control-Request-Headers': CORSRef('access-control-request-headers', | ||
175 | 'response-header'), | ||
176 | } | ||
177 | |||
178 | |||
179 | HTTP_STATUS_CODES = { | ||
180 | 100: 'Continue', | ||
181 | 101: 'Switching Protocols', | ||
182 | 102: 'Processing', | ||
183 | 200: 'OK', | ||
184 | 201: 'Created', | ||
185 | 202: 'Accepted', | ||
186 | 203: 'Non Authoritative Information', | ||
187 | 204: 'No Content', | ||
188 | 205: 'Reset Content', | ||
189 | 206: 'Partial Content', | ||
190 | 207: 'Multi Status', | ||
191 | 226: 'IM Used', # see RFC 3229 | ||
192 | 300: 'Multiple Choices', | ||
193 | 301: 'Moved Permanently', | ||
194 | 302: 'Found', | ||
195 | 303: 'See Other', | ||
196 | 304: 'Not Modified', | ||
197 | 305: 'Use Proxy', | ||
198 | 307: 'Temporary Redirect', | ||
199 | 308: 'Permanent Redirect', | ||
200 | 400: 'Bad Request', | ||
201 | 401: 'Unauthorized', | ||
202 | 402: 'Payment Required', # unused | ||
203 | 403: 'Forbidden', | ||
204 | 404: 'Not Found', | ||
205 | 405: 'Method Not Allowed', | ||
206 | 406: 'Not Acceptable', | ||
207 | 407: 'Proxy Authentication Required', | ||
208 | 408: 'Request Timeout', | ||
209 | 409: 'Conflict', | ||
210 | 410: 'Gone', | ||
211 | 411: 'Length Required', | ||
212 | 412: 'Precondition Failed', | ||
213 | 413: 'Request Entity Too Large', | ||
214 | 414: 'Request URI Too Long', | ||
215 | 415: 'Unsupported Media Type', | ||
216 | 416: 'Requested Range Not Satisfiable', | ||
217 | 417: 'Expectation Failed', | ||
218 | 418: "I'm a teapot", # see RFC 2324 | ||
219 | 422: 'Unprocessable Entity', | ||
220 | 423: 'Locked', | ||
221 | 424: 'Failed Dependency', | ||
222 | 425: 'Too Early', # RFC 8470 | ||
223 | 426: 'Upgrade Required', | ||
224 | 429: 'Too Many Requests', | ||
225 | 449: 'Retry With', # proprietary MS extension | ||
226 | 451: 'Unavailable For Legal Reasons', | ||
227 | 500: 'Internal Server Error', | ||
228 | 501: 'Not Implemented', | ||
229 | 502: 'Bad Gateway', | ||
230 | 503: 'Service Unavailable', | ||
231 | 504: 'Gateway Timeout', | ||
232 | 505: 'HTTP Version Not Supported', | ||
233 | 507: 'Insufficient Storage', | ||
234 | 510: 'Not Extended' | ||
235 | } | ||
236 | |||
237 | WEBDAV_STATUS_CODES = [207, 422, 423, 424, 507] | ||
238 | |||
239 | http_sig_param_re = re.compile(r'\((?:(?P<type>[^:)]+):)?(?P<name>[\w_]+)\)', | ||
240 | re.VERBOSE) | ||
241 | |||
242 | |||
243 | def sort_by_method(entries): | ||
244 | def cmp(item): | ||
245 | order = ['HEAD', 'GET', 'POST', 'PUT', 'DELETE', 'PATCH', | ||
246 | 'OPTIONS', 'TRACE', 'CONNECT', 'COPY', 'ANY'] | ||
247 | method = item[0].split(' ', 1)[0] | ||
248 | if method in order: | ||
249 | return order.index(method) | ||
250 | return 100 | ||
251 | return sorted(entries, key=cmp) | ||
252 | |||
253 | |||
254 | def http_resource_anchor(method, path): | ||
255 | path = re.sub(r'[{}]', '', re.sub(r'[<>:/]', '-', path)) | ||
256 | return method.lower() + '-' + path | ||
257 | |||
258 | |||
259 | class HTTPResource(ObjectDescription): | ||
260 | |||
261 | doc_field_types = [ | ||
262 | TypedField('parameter', label='Parameters', | ||
263 | names=('param', 'parameter', 'arg', 'argument'), | ||
264 | typerolename='obj', typenames=('paramtype', 'type')), | ||
265 | TypedField('jsonparameter', label='JSON Parameters', | ||
266 | names=('jsonparameter', 'jsonparam', 'json'), | ||
267 | typerolename='obj', typenames=('jsonparamtype', 'jsontype')), | ||
268 | TypedField('requestjsonobject', label='Request JSON Object', | ||
269 | names=('reqjsonobj', 'reqjson', '<jsonobj', '<json'), | ||
270 | typerolename='obj', typenames=('reqjsonobj', '<jsonobj')), | ||
271 | TypedField('requestjsonarray', label='Request JSON Array of Objects', | ||
272 | names=('reqjsonarr', '<jsonarr'), | ||
273 | typerolename='obj', | ||
274 | typenames=('reqjsonarrtype', '<jsonarrtype')), | ||
275 | TypedField('responsejsonobject', label='Response JSON Object', | ||
276 | names=('resjsonobj', 'resjson', '>jsonobj', '>json'), | ||
277 | typerolename='obj', typenames=('resjsonobj', '>jsonobj')), | ||
278 | TypedField('responsejsonarray', label='Response JSON Array of Objects', | ||
279 | names=('resjsonarr', '>jsonarr'), | ||
280 | typerolename='obj', | ||
281 | typenames=('resjsonarrtype', '>jsonarrtype')), | ||
282 | TypedField('queryparameter', label='Query Parameters', | ||
283 | names=('queryparameter', 'queryparam', 'qparam', 'query'), | ||
284 | typerolename='obj', | ||
285 | typenames=('queryparamtype', 'querytype', 'qtype')), | ||
286 | GroupedField('formparameter', label='Form Parameters', | ||
287 | names=('formparameter', 'formparam', 'fparam', 'form')), | ||
288 | GroupedField('requestheader', label='Request Headers', | ||
289 | rolename='header', | ||
290 | names=('<header', 'reqheader', 'requestheader')), | ||
291 | GroupedField('responseheader', label='Response Headers', | ||
292 | rolename='header', | ||
293 | names=('>header', 'resheader', 'responseheader')), | ||
294 | GroupedField('statuscode', label='Status Codes', | ||
295 | rolename='statuscode', | ||
296 | names=('statuscode', 'status', 'code')) | ||
297 | ] | ||
298 | |||
299 | option_spec = { | ||
300 | 'deprecated': directives.flag, | ||
301 | 'noindex': directives.flag, | ||
302 | 'synopsis': lambda x: x, | ||
303 | } | ||
304 | |||
305 | method = NotImplemented | ||
306 | |||
307 | def handle_signature(self, sig, signode): | ||
308 | method = self.method.upper() + ' ' | ||
309 | signode += addnodes.desc_name(method, method) | ||
310 | offset = 0 | ||
311 | path = None | ||
312 | for match in http_sig_param_re.finditer(sig): | ||
313 | path = sig[offset:match.start()] | ||
314 | signode += addnodes.desc_name(path, path) | ||
315 | params = addnodes.desc_parameterlist() | ||
316 | typ = match.group('type') | ||
317 | if typ: | ||
318 | typ += ': ' | ||
319 | params += addnodes.desc_annotation(typ, typ) | ||
320 | name = match.group('name') | ||
321 | params += addnodes.desc_parameter(name, name) | ||
322 | signode += params | ||
323 | offset = match.end() | ||
324 | if offset < len(sig): | ||
325 | path = sig[offset:len(sig)] | ||
326 | signode += addnodes.desc_name(path, path) | ||
327 | assert path is not None, 'no matches for sig: %s' % sig | ||
328 | fullname = self.method.upper() + ' ' + path | ||
329 | signode['method'] = self.method | ||
330 | signode['path'] = sig | ||
331 | signode['fullname'] = fullname | ||
332 | return (fullname, self.method, sig) | ||
333 | |||
334 | def needs_arglist(self): | ||
335 | return False | ||
336 | |||
337 | def add_target_and_index(self, name_cls, sig, signode): | ||
338 | signode['ids'].append(http_resource_anchor(*name_cls[1:])) | ||
339 | if 'noindex' not in self.options: | ||
340 | self.env.domaindata['http'][self.method][sig] = ( | ||
341 | self.env.docname, | ||
342 | self.options.get('synopsis', ''), | ||
343 | 'deprecated' in self.options) | ||
344 | |||
345 | def get_index_text(self, modname, name): | ||
346 | return '' | ||
347 | |||
348 | |||
349 | class HTTPOptions(HTTPResource): | ||
350 | |||
351 | method = 'options' | ||
352 | |||
353 | |||
354 | class HTTPHead(HTTPResource): | ||
355 | |||
356 | method = 'head' | ||
357 | |||
358 | |||
359 | class HTTPPatch(HTTPResource): | ||
360 | |||
361 | method = 'patch' | ||
362 | |||
363 | |||
364 | class HTTPPost(HTTPResource): | ||
365 | |||
366 | method = 'post' | ||
367 | |||
368 | |||
369 | class HTTPGet(HTTPResource): | ||
370 | |||
371 | method = 'get' | ||
372 | |||
373 | |||
374 | class HTTPPut(HTTPResource): | ||
375 | |||
376 | method = 'put' | ||
377 | |||
378 | |||
379 | class HTTPDelete(HTTPResource): | ||
380 | |||
381 | method = 'delete' | ||
382 | |||
383 | |||
384 | class HTTPTrace(HTTPResource): | ||
385 | |||
386 | method = 'trace' | ||
387 | |||
388 | |||
389 | class HTTPConnect(HTTPResource): | ||
390 | |||
391 | method = 'connect' | ||
392 | |||
393 | |||
394 | class HTTPCopy(HTTPResource): | ||
395 | |||
396 | method = 'copy' | ||
397 | |||
398 | |||
399 | class HTTPAny(HTTPResource): | ||
400 | |||
401 | method = 'any' | ||
402 | |||
403 | |||
404 | class HTTPXRefRole(XRefRole): | ||
405 | |||
406 | def __init__(self, method, **kwargs): | ||
407 | XRefRole.__init__(self, **kwargs) | ||
408 | self.method = method | ||
409 | |||
410 | def process_link(self, env, refnode, has_explicit_title, title, target): | ||
411 | if not has_explicit_title: | ||
412 | title = self.method.upper() + ' ' + title | ||
413 | return title, target | ||
414 | |||
415 | |||
416 | class HTTPXRefMethodRole(XRefRole): | ||
417 | |||
418 | def result_nodes(self, document, env, node, is_ref): | ||
419 | method = node[0][0].lower() | ||
420 | rawsource = node[0].rawsource | ||
421 | config = env.domains['http'].env.config | ||
422 | if method not in METHOD_REFS: | ||
423 | if not config['http_strict_mode']: | ||
424 | return [nodes.emphasis(method, method)], [] | ||
425 | reporter = document.reporter | ||
426 | msg = reporter.error('%s is not valid HTTP method' % method, | ||
427 | line=node.line) | ||
428 | prb = nodes.problematic(method, method) | ||
429 | return [prb], [msg] | ||
430 | url = str(METHOD_REFS[method]) | ||
431 | if not url: | ||
432 | return [nodes.emphasis(method, method)], [] | ||
433 | node = nodes.reference(rawsource, method.upper(), refuri=url) | ||
434 | return [node], [] | ||
435 | |||
436 | |||
437 | class HTTPXRefStatusRole(XRefRole): | ||
438 | |||
439 | def result_nodes(self, document, env, node, is_ref): | ||
440 | def get_code_status(text): | ||
441 | if text.isdigit(): | ||
442 | code = int(text) | ||
443 | return code, HTTP_STATUS_CODES.get(code) | ||
444 | else: | ||
445 | try: | ||
446 | code, status = re.split(r'\s', text.strip(), 1) | ||
447 | code = int(code) | ||
448 | except ValueError: | ||
449 | return None, None | ||
450 | known_status = HTTP_STATUS_CODES.get(code) | ||
451 | if known_status is None: | ||
452 | return code, None | ||
453 | elif known_status.lower() != status.lower(): | ||
454 | return code, None | ||
455 | else: | ||
456 | return code, status | ||
457 | |||
458 | def report_unknown_code(): | ||
459 | if not config['http_strict_mode']: | ||
460 | return [nodes.emphasis(text, text)], [] | ||
461 | reporter = document.reporter | ||
462 | msg = reporter.error('%d is unknown HTTP status code' % code, | ||
463 | line=node.line) | ||
464 | prb = nodes.problematic(text, text) | ||
465 | return [prb], [msg] | ||
466 | |||
467 | def report_invalid_code(): | ||
468 | if not config['http_strict_mode']: | ||
469 | return [nodes.emphasis(text, text)], [] | ||
470 | reporter = document.reporter | ||
471 | msg = reporter.error( | ||
472 | 'HTTP status code must be an integer (e.g. `200`) or ' | ||
473 | 'start with an integer (e.g. `200 OK`); %r is invalid' % | ||
474 | text, | ||
475 | line=node.line | ||
476 | ) | ||
477 | prb = nodes.problematic(text, text) | ||
478 | return [prb], [msg] | ||
479 | |||
480 | text = node[0][0] | ||
481 | rawsource = node[0].rawsource | ||
482 | config = env.domains['http'].env.config | ||
483 | |||
484 | code, status = get_code_status(text) | ||
485 | if code is None: | ||
486 | return report_invalid_code() | ||
487 | elif status is None: | ||
488 | return report_unknown_code() | ||
489 | elif code == 226: | ||
490 | url = 'http://www.ietf.org/rfc/rfc3229.txt' | ||
491 | elif code == 418: | ||
492 | url = 'http://www.ietf.org/rfc/rfc2324.txt' | ||
493 | elif code == 429: | ||
494 | url = 'http://tools.ietf.org/html/rfc6585#section-4' | ||
495 | elif code == 449: | ||
496 | url = 'http://msdn.microsoft.com/en-us/library/dd891478(v=prot.10).aspx' | ||
497 | elif code == 451: | ||
498 | url = 'http://www.ietf.org/rfc/rfc7725.txt' | ||
499 | elif code in WEBDAV_STATUS_CODES: | ||
500 | url = 'http://tools.ietf.org/html/rfc4918#section-11.%d' % (WEBDAV_STATUS_CODES.index(code) + 1) | ||
501 | elif code in HTTP_STATUS_CODES: | ||
502 | url = 'http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html' \ | ||
503 | '#sec10.' + ('%d.%d' % (code // 100, 1 + code % 100)) | ||
504 | else: | ||
505 | url = '' | ||
506 | node = nodes.reference(rawsource, '%d %s' % (code, status), refuri=url) | ||
507 | return [node], [] | ||
508 | |||
509 | |||
510 | class HTTPXRefHeaderRole(XRefRole): | ||
511 | |||
512 | def result_nodes(self, document, env, node, is_ref): | ||
513 | header = node[0][0] | ||
514 | rawsource = node[0].rawsource | ||
515 | if header not in HEADER_REFS: | ||
516 | _header = '-'.join(map(lambda i: i.title(), header.split('-'))) | ||
517 | if _header not in HEADER_REFS: | ||
518 | return [nodes.emphasis(header, header)], [] | ||
519 | url = str(HEADER_REFS[header]) | ||
520 | node = nodes.reference(rawsource, header, refuri=url) | ||
521 | return [node], [] | ||
522 | |||
523 | |||
524 | class HTTPIndex(Index): | ||
525 | |||
526 | name = 'routingtable' | ||
527 | localname = 'HTTP Routing Table' | ||
528 | shortname = 'routing table' | ||
529 | |||
530 | def __init__(self, *args, **kwargs): | ||
531 | super(HTTPIndex, self).__init__(*args, **kwargs) | ||
532 | |||
533 | self.ignore = [ | ||
534 | [l for l in x.split('/') if l] | ||
535 | for x in self.domain.env.config['http_index_ignore_prefixes']] | ||
536 | self.ignore.sort(reverse=True) | ||
537 | |||
538 | # During HTML generation these values pick from class, | ||
539 | # not from instance so we have a little hack the system | ||
540 | cls = self.__class__ | ||
541 | cls.shortname = self.domain.env.config['http_index_shortname'] | ||
542 | cls.localname = self.domain.env.config['http_index_localname'] | ||
543 | |||
544 | def grouping_prefix(self, path): | ||
545 | letters = [x for x in path.split('/') if x] | ||
546 | for prefix in self.ignore: | ||
547 | if letters[:len(prefix)] == prefix: | ||
548 | return '/' + '/'.join(letters[:len(prefix) + 1]) | ||
549 | return '/%s' % (letters[0] if letters else '',) | ||
550 | |||
551 | def generate(self, docnames=None): | ||
552 | content = {} | ||
553 | items = ((method, path, info) | ||
554 | for method, routes in self.domain.routes.items() | ||
555 | for path, info in routes.items()) | ||
556 | items = sorted(items, key=lambda item: item[1]) | ||
557 | for method, path, info in items: | ||
558 | entries = content.setdefault(self.grouping_prefix(path), []) | ||
559 | entries.append([ | ||
560 | method.upper() + ' ' + path, 0, info[0], | ||
561 | http_resource_anchor(method, path), | ||
562 | '', 'Deprecated' if info[2] else '', info[1] | ||
563 | ]) | ||
564 | items = sorted( | ||
565 | (path, sort_by_method(entries)) | ||
566 | for path, entries in content.items() | ||
567 | ) | ||
568 | return (items, True) | ||
569 | |||
570 | |||
571 | class HTTPDomain(Domain): | ||
572 | """HTTP domain.""" | ||
573 | |||
574 | name = 'http' | ||
575 | label = 'HTTP' | ||
576 | |||
577 | object_types = { | ||
578 | 'options': ObjType('options', 'options', 'obj'), | ||
579 | 'head': ObjType('head', 'head', 'obj'), | ||
580 | 'post': ObjType('post', 'post', 'obj'), | ||
581 | 'get': ObjType('get', 'get', 'obj'), | ||
582 | 'put': ObjType('put', 'put', 'obj'), | ||
583 | 'patch': ObjType('patch', 'patch', 'obj'), | ||
584 | 'delete': ObjType('delete', 'delete', 'obj'), | ||
585 | 'trace': ObjType('trace', 'trace', 'obj'), | ||
586 | 'connect': ObjType('connect', 'connect', 'obj'), | ||
587 | 'copy': ObjType('copy', 'copy', 'obj'), | ||
588 | 'any': ObjType('any', 'any', 'obj') | ||
589 | } | ||
590 | |||
591 | directives = { | ||
592 | 'options': HTTPOptions, | ||
593 | 'head': HTTPHead, | ||
594 | 'post': HTTPPost, | ||
595 | 'get': HTTPGet, | ||
596 | 'put': HTTPPut, | ||
597 | 'patch': HTTPPatch, | ||
598 | 'delete': HTTPDelete, | ||
599 | 'trace': HTTPTrace, | ||
600 | 'connect': HTTPConnect, | ||
601 | 'copy': HTTPCopy, | ||
602 | 'any': HTTPAny | ||
603 | } | ||
604 | |||
605 | roles = { | ||
606 | 'options': HTTPXRefRole('options'), | ||
607 | 'head': HTTPXRefRole('head'), | ||
608 | 'post': HTTPXRefRole('post'), | ||
609 | 'get': HTTPXRefRole('get'), | ||
610 | 'put': HTTPXRefRole('put'), | ||
611 | 'patch': HTTPXRefRole('patch'), | ||
612 | 'delete': HTTPXRefRole('delete'), | ||
613 | 'trace': HTTPXRefRole('trace'), | ||
614 | 'connect': HTTPXRefRole('connect'), | ||
615 | 'copy': HTTPXRefRole('copy'), | ||
616 | 'any': HTTPXRefRole('any'), | ||
617 | 'statuscode': HTTPXRefStatusRole(), | ||
618 | 'method': HTTPXRefMethodRole(), | ||
619 | 'header': HTTPXRefHeaderRole() | ||
620 | } | ||
621 | |||
622 | initial_data = { | ||
623 | 'options': {}, # path: (docname, synopsis) | ||
624 | 'head': {}, | ||
625 | 'post': {}, | ||
626 | 'get': {}, | ||
627 | 'put': {}, | ||
628 | 'patch': {}, | ||
629 | 'delete': {}, | ||
630 | 'trace': {}, | ||
631 | 'connect': {}, | ||
632 | 'copy': {}, | ||
633 | 'any': {} | ||
634 | } | ||
635 | |||
636 | indices = [] | ||
637 | |||
638 | @property | ||
639 | def routes(self): | ||
640 | return dict((key, self.data[key]) for key in self.object_types) | ||
641 | |||
642 | def clear_doc(self, docname): | ||
643 | for typ, routes in self.routes.items(): | ||
644 | for path, info in list(routes.items()): | ||
645 | if info[0] == docname: | ||
646 | del routes[path] | ||
647 | |||
648 | def resolve_xref(self, env, fromdocname, builder, typ, target, | ||
649 | node, contnode): | ||
650 | try: | ||
651 | info = self.data[str(typ)][target] | ||
652 | except KeyError: | ||
653 | text = contnode.rawsource | ||
654 | role = self.roles.get(typ) | ||
655 | if role is None: | ||
656 | return None | ||
657 | |||
658 | if fromdocname not in _doctree_cache: | ||
659 | _doctree_cache[fromdocname] = env.get_doctree(fromdocname) | ||
660 | doctree = _doctree_cache[fromdocname] | ||
661 | |||
662 | resnode = role.result_nodes(doctree, env, node, None)[0][0] | ||
663 | if isinstance(resnode, addnodes.pending_xref): | ||
664 | text = node[0][0] | ||
665 | reporter = doctree.reporter | ||
666 | reporter.warning('Cannot resolve reference to %r' % text, | ||
667 | line=node.line) | ||
668 | return None | ||
669 | return resnode | ||
670 | else: | ||
671 | anchor = http_resource_anchor(typ, target) | ||
672 | title = typ.upper() + ' ' + target | ||
673 | return make_refnode(builder, fromdocname, info[0], anchor, | ||
674 | contnode, title) | ||
675 | |||
676 | def resolve_any_xref(self, env, fromdocname, builder, target, node, contnode): | ||
677 | """Resolve the pending_xref *node* with the given *target*. | ||
678 | |||
679 | The reference comes from an "any" or similar role, which means that Sphinx | ||
680 | don't know the type. | ||
681 | |||
682 | For now sphinxcontrib-httpdomain doesn't resolve any xref nodes. | ||
683 | |||
684 | :return: | ||
685 | list of tuples ``('domain:role', newnode)``, where ``'domain:role'`` | ||
686 | is the name of a role that could have created the same reference, | ||
687 | """ | ||
688 | return [] | ||
689 | |||
690 | def get_objects(self): | ||
691 | for method, routes in self.routes.items(): | ||
692 | for path, info in routes.items(): | ||
693 | anchor = http_resource_anchor(method, path) | ||
694 | yield (path, path, method, info[0], anchor, 1) | ||
695 | |||
696 | |||
697 | class HTTPLexer(RegexLexer): | ||
698 | """Lexer for HTTP sessions.""" | ||
699 | |||
700 | name = 'HTTP' | ||
701 | aliases = ['http'] | ||
702 | |||
703 | flags = re.DOTALL | ||
704 | |||
705 | def header_callback(self, match): | ||
706 | if match.group(1).lower() == 'content-type': | ||
707 | content_type = match.group(5).strip() | ||
708 | if ';' in content_type: | ||
709 | content_type = content_type[:content_type.find(';')].strip() | ||
710 | self.content_type = content_type | ||
711 | yield match.start(1), Name.Attribute, match.group(1) | ||
712 | yield match.start(2), Text, match.group(2) | ||
713 | yield match.start(3), Operator, match.group(3) | ||
714 | yield match.start(4), Text, match.group(4) | ||
715 | yield match.start(5), Literal, match.group(5) | ||
716 | yield match.start(6), Text, match.group(6) | ||
717 | |||
718 | def continuous_header_callback(self, match): | ||
719 | yield match.start(1), Text, match.group(1) | ||
720 | yield match.start(2), Literal, match.group(2) | ||
721 | yield match.start(3), Text, match.group(3) | ||
722 | |||
723 | def content_callback(self, match): | ||
724 | content_type = getattr(self, 'content_type', None) | ||
725 | content = match.group() | ||
726 | offset = match.start() | ||
727 | if content_type: | ||
728 | from pygments.lexers import get_lexer_for_mimetype | ||
729 | try: | ||
730 | lexer = get_lexer_for_mimetype(content_type) | ||
731 | except ClassNotFound: | ||
732 | pass | ||
733 | else: | ||
734 | for idx, token, value in lexer.get_tokens_unprocessed(content): | ||
735 | yield offset + idx, token, value | ||
736 | return | ||
737 | yield offset, Text, content | ||
738 | |||
739 | tokens = { | ||
740 | 'root': [ | ||
741 | (r'(GET|POST|PUT|PATCH|DELETE|HEAD|OPTIONS|TRACE|COPY)( +)([^ ]+)( +)' | ||
742 | r'(HTTPS?)(/)(1\.[01])(\r?\n|$)', | ||
743 | bygroups(Name.Function, Text, Name.Namespace, Text, | ||
744 | Keyword.Reserved, Operator, Number, Text), | ||
745 | 'headers'), | ||
746 | (r'(HTTPS?)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|$)', | ||
747 | bygroups(Keyword.Reserved, Operator, Number, Text, Number, | ||
748 | Text, Name.Exception, Text), | ||
749 | 'headers'), | ||
750 | ], | ||
751 | 'headers': [ | ||
752 | (r'([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|$)', header_callback), | ||
753 | (r'([\t ]+)([^\r\n]+)(\r?\n|$)', continuous_header_callback), | ||
754 | (r'\r?\n', Text, 'content') | ||
755 | ], | ||
756 | 'content': [ | ||
757 | (r'.+', content_callback) | ||
758 | ] | ||
759 | } | ||
760 | |||
761 | |||
762 | def setup(app): | ||
763 | app.add_domain(HTTPDomain) | ||
764 | |||
765 | try: | ||
766 | get_lexer_by_name('http') | ||
767 | except ClassNotFound: | ||
768 | app.add_lexer('http', HTTPLexer()) | ||
769 | app.add_config_value('http_index_ignore_prefixes', [], None) | ||
770 | app.add_config_value('http_index_shortname', 'routing table', True) | ||
771 | app.add_config_value('http_index_localname', 'HTTP Routing Table', True) | ||
772 | app.add_config_value('http_strict_mode', True, None) | ||
773 | app.add_config_value('http_headers_ignore_prefixes', ['X-'], None) | ||
diff --git a/_exts/typescriptdomain.py b/_exts/typescriptdomain.py new file mode 100644 index 0000000..50ffdc0 --- /dev/null +++ b/_exts/typescriptdomain.py | |||
@@ -0,0 +1,587 @@ | |||
1 | """ | ||
2 | TypeScript domain. | ||
3 | |||
4 | :copyright: Copyright 2019 by Taler Systems SA | ||
5 | :license: LGPLv3+ | ||
6 | :author: Florian Dold | ||
7 | """ | ||
8 | |||
9 | import re | ||
10 | |||
11 | from pathlib import Path | ||
12 | |||
13 | from docutils import nodes | ||
14 | from typing import List, Optional, Iterable, Dict, Tuple | ||
15 | from typing import cast | ||
16 | |||
17 | from pygments.lexers import get_lexer_by_name | ||
18 | from pygments.filter import Filter | ||
19 | from pygments.token import Literal, Text, Operator, Keyword, Name, Number | ||
20 | from pygments.token import Comment, Token, _TokenType | ||
21 | from pygments.token import * | ||
22 | from pygments.lexer import RegexLexer, bygroups, include | ||
23 | from pygments.formatters import HtmlFormatter | ||
24 | |||
25 | from docutils import nodes | ||
26 | from docutils.nodes import Element, Node | ||
27 | |||
28 | from sphinx.roles import XRefRole | ||
29 | from sphinx.domains import Domain, ObjType, Index | ||
30 | from sphinx.directives import directives | ||
31 | from sphinx.util.docutils import SphinxDirective | ||
32 | from sphinx.util.nodes import make_refnode | ||
33 | from sphinx.util import logging | ||
34 | from sphinx.highlighting import PygmentsBridge | ||
35 | from sphinx.builders.html import StandaloneHTMLBuilder | ||
36 | from sphinx.pygments_styles import SphinxStyle | ||
37 | |||
38 | logger = logging.getLogger(__name__) | ||
39 | |||
40 | |||
41 | class TypeScriptDefinition(SphinxDirective): | ||
42 | """ | ||
43 | Directive for a code block with special highlighting or line numbering | ||
44 | settings. | ||
45 | """ | ||
46 | |||
47 | has_content = True | ||
48 | required_arguments = 1 | ||
49 | optional_arguments = 0 | ||
50 | final_argument_whitespace = False | ||
51 | option_spec = { | ||
52 | "force": directives.flag, | ||
53 | "linenos": directives.flag, | ||
54 | "dedent": int, | ||
55 | "lineno-start": int, | ||
56 | "emphasize-lines": directives.unchanged_required, | ||
57 | "caption": directives.unchanged_required, | ||
58 | "class": directives.class_option, | ||
59 | } | ||
60 | |||
61 | def run(self) -> List[Node]: | ||
62 | document = self.state.document | ||
63 | code = "\n".join(self.content) | ||
64 | location = self.state_machine.get_source_and_line(self.lineno) | ||
65 | |||
66 | linespec = self.options.get("emphasize-lines") | ||
67 | if linespec: | ||
68 | try: | ||
69 | nlines = len(self.content) | ||
70 | hl_lines = parselinenos(linespec, nlines) | ||
71 | if any(i >= nlines for i in hl_lines): | ||
72 | logger.warning( | ||
73 | __("line number spec is out of range(1-%d): %r") | ||
74 | % (nlines, self.options["emphasize-lines"]), | ||
75 | location=location, | ||
76 | ) | ||
77 | |||
78 | hl_lines = [x + 1 for x in hl_lines if x < nlines] | ||
79 | except ValueError as err: | ||
80 | return [document.reporter.warning(err, line=self.lineno)] | ||
81 | else: | ||
82 | hl_lines = None | ||
83 | |||
84 | if "dedent" in self.options: | ||
85 | location = self.state_machine.get_source_and_line(self.lineno) | ||
86 | lines = code.split("\n") | ||
87 | lines = dedent_lines(lines, self.options["dedent"], location=location) | ||
88 | code = "\n".join(lines) | ||
89 | |||
90 | literal = nodes.literal_block(code, code) # type: Element | ||
91 | if "linenos" in self.options or "lineno-start" in self.options: | ||
92 | literal["linenos"] = True | ||
93 | literal["classes"] += self.options.get("class", []) | ||
94 | literal["force"] = "force" in self.options | ||
95 | literal["language"] = "tsref" | ||
96 | extra_args = literal["highlight_args"] = {} | ||
97 | if hl_lines is not None: | ||
98 | extra_args["hl_lines"] = hl_lines | ||
99 | if "lineno-start" in self.options: | ||
100 | extra_args["linenostart"] = self.options["lineno-start"] | ||
101 | self.set_source_info(literal) | ||
102 | |||
103 | caption = self.options.get("caption") | ||
104 | if caption: | ||
105 | try: | ||
106 | literal = container_wrapper(self, literal, caption) | ||
107 | except ValueError as exc: | ||
108 | return [document.reporter.warning(exc, line=self.lineno)] | ||
109 | |||
110 | tsid = "tsref-type-" + self.arguments[0] | ||
111 | literal["ids"].append(tsid) | ||
112 | |||
113 | tsname = self.arguments[0] | ||
114 | ts = self.env.get_domain("ts") | ||
115 | ts.add_object("type", tsname, self.env.docname, tsid) | ||
116 | |||
117 | return [literal] | ||
118 | |||
119 | |||
120 | class TypeScriptDomain(Domain): | ||
121 | """TypeScript domain.""" | ||
122 | |||
123 | name = "ts" | ||
124 | label = "TypeScript" | ||
125 | |||
126 | directives = { | ||
127 | "def": TypeScriptDefinition, | ||
128 | } | ||
129 | |||
130 | roles = { | ||
131 | "type": XRefRole( | ||
132 | lowercase=False, warn_dangling=True, innernodeclass=nodes.inline | ||
133 | ), | ||
134 | } | ||
135 | |||
136 | dangling_warnings = { | ||
137 | "type": "undefined TypeScript type: %(target)s", | ||
138 | } | ||
139 | |||
140 | def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode): | ||
141 | try: | ||
142 | info = self.objects[(str(typ), str(target))] | ||
143 | except KeyError: | ||
144 | logger.warn("type {}/{} not found".format(typ, target)) | ||
145 | return None | ||
146 | else: | ||
147 | anchor = "tsref-type-{}".format(str(target)) | ||
148 | title = typ.upper() + " " + target | ||
149 | return make_refnode(builder, fromdocname, info[0], anchor, contnode, title) | ||
150 | |||
151 | def resolve_any_xref(self, env, fromdocname, builder, target, node, contnode): | ||
152 | """Resolve the pending_xref *node* with the given *target*. | ||
153 | |||
154 | The reference comes from an "any" or similar role, which means that Sphinx | ||
155 | don't know the type. | ||
156 | |||
157 | For now sphinxcontrib-httpdomain doesn't resolve any xref nodes. | ||
158 | |||
159 | :return: | ||
160 | list of tuples ``('domain:role', newnode)``, where ``'domain:role'`` | ||
161 | is the name of a role that could have created the same reference, | ||
162 | """ | ||
163 | ret = [] | ||
164 | try: | ||
165 | info = self.objects[("type", str(target))] | ||
166 | except KeyError: | ||
167 | pass | ||
168 | else: | ||
169 | anchor = "tsref-type-{}".format(str(target)) | ||
170 | title = "TYPE" + " " + target | ||
171 | node = make_refnode(builder, fromdocname, info[0], anchor, contnode, title) | ||
172 | ret.append(("ts:type", node)) | ||
173 | return ret | ||
174 | |||
175 | @property | ||
176 | def objects(self) -> Dict[Tuple[str, str], Tuple[str, str]]: | ||
177 | return self.data.setdefault( | ||
178 | "objects", {} | ||
179 | ) # (objtype, name) -> docname, labelid | ||
180 | |||
181 | def add_object(self, objtype: str, name: str, docname: str, labelid: str) -> None: | ||
182 | self.objects[objtype, name] = (docname, labelid) | ||
183 | |||
184 | |||
185 | class BetterTypeScriptLexer(RegexLexer): | ||
186 | """ | ||
187 | For `TypeScript <https://www.typescriptlang.org/>`_ source code. | ||
188 | """ | ||
189 | |||
190 | name = "TypeScript" | ||
191 | aliases = ["ts"] | ||
192 | filenames = ["*.ts"] | ||
193 | mimetypes = ["text/x-typescript"] | ||
194 | |||
195 | flags = re.DOTALL | ||
196 | tokens = { | ||
197 | "commentsandwhitespace": [ | ||
198 | (r"\s+", Text), | ||
199 | (r"<!--", Comment), | ||
200 | (r"//.*?\n", Comment.Single), | ||
201 | (r"/\*.*?\*/", Comment.Multiline), | ||
202 | ], | ||
203 | "slashstartsregex": [ | ||
204 | include("commentsandwhitespace"), | ||
205 | ( | ||
206 | r"/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/" r"([gim]+\b|\B)", | ||
207 | String.Regex, | ||
208 | "#pop", | ||
209 | ), | ||
210 | (r"(?=/)", Text, ("#pop", "badregex")), | ||
211 | (r"", Text, "#pop"), | ||
212 | ], | ||
213 | "badregex": [(r"\n", Text, "#pop")], | ||
214 | "typeexp": [ | ||
215 | (r"[a-zA-Z0-9_?.$]+", Keyword.Type), | ||
216 | (r"\s+", Text), | ||
217 | (r"[|]", Text), | ||
218 | (r"\n", Text, "#pop"), | ||
219 | (r";", Text, "#pop"), | ||
220 | (r"", Text, "#pop"), | ||
221 | ], | ||
222 | "root": [ | ||
223 | (r"^(?=\s|/|<!--)", Text, "slashstartsregex"), | ||
224 | include("commentsandwhitespace"), | ||
225 | ( | ||
226 | r"\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|" | ||
227 | r"(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?", | ||
228 | Operator, | ||
229 | "slashstartsregex", | ||
230 | ), | ||
231 | (r"[{(\[;,]", Punctuation, "slashstartsregex"), | ||
232 | (r"[})\].]", Punctuation), | ||
233 | ( | ||
234 | r"(for|in|while|do|break|return|continue|switch|case|default|if|else|" | ||
235 | r"throw|try|catch|finally|new|delete|typeof|instanceof|void|" | ||
236 | r"this)\b", | ||
237 | Keyword, | ||
238 | "slashstartsregex", | ||
239 | ), | ||
240 | ( | ||
241 | r"(var|let|const|with|function)\b", | ||
242 | Keyword.Declaration, | ||
243 | "slashstartsregex", | ||
244 | ), | ||
245 | ( | ||
246 | r"(abstract|boolean|byte|char|class|const|debugger|double|enum|export|" | ||
247 | r"extends|final|float|goto|implements|import|int|interface|long|native|" | ||
248 | r"package|private|protected|public|short|static|super|synchronized|throws|" | ||
249 | r"transient|volatile)\b", | ||
250 | Keyword.Reserved, | ||
251 | ), | ||
252 | (r"(true|false|null|NaN|Infinity|undefined)\b", Keyword.Constant), | ||
253 | ( | ||
254 | r"(Array|Boolean|Date|Error|Function|Math|netscape|" | ||
255 | r"Number|Object|Packages|RegExp|String|sun|decodeURI|" | ||
256 | r"decodeURIComponent|encodeURI|encodeURIComponent|" | ||
257 | r"Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|" | ||
258 | r"window)\b", | ||
259 | Name.Builtin, | ||
260 | ), | ||
261 | # Match stuff like: module name {...} | ||
262 | ( | ||
263 | r"\b(module)(\s*)(\s*[a-zA-Z0-9_?.$][\w?.$]*)(\s*)", | ||
264 | bygroups(Keyword.Reserved, Text, Name.Other, Text), | ||
265 | "slashstartsregex", | ||
266 | ), | ||
267 | # Match variable type keywords | ||
268 | (r"\b(string|bool|number)\b", Keyword.Type), | ||
269 | # Match stuff like: constructor | ||
270 | (r"\b(constructor|declare|interface|as|AS)\b", Keyword.Reserved), | ||
271 | # Match stuff like: super(argument, list) | ||
272 | ( | ||
273 | r"(super)(\s*)\(([a-zA-Z0-9,_?.$\s]+\s*)\)", | ||
274 | bygroups(Keyword.Reserved, Text), | ||
275 | "slashstartsregex", | ||
276 | ), | ||
277 | # Match stuff like: function() {...} | ||
278 | (r"([a-zA-Z_?.$][\w?.$]*)\(\) \{", Name.Other, "slashstartsregex"), | ||
279 | # Match stuff like: (function: return type) | ||
280 | ( | ||
281 | r"([a-zA-Z0-9_?.$][\w?.$]*)(\s*:\s*)", | ||
282 | bygroups(Name.Other, Text), | ||
283 | "typeexp", | ||
284 | ), | ||
285 | # Match stuff like: type Foo = Bar | Baz | ||
286 | ( | ||
287 | r"\b(type)(\s*)([a-zA-Z0-9_?.$]+)(\s*)(=)(\s*)", | ||
288 | bygroups(Keyword.Reserved, Text, Name.Other, Text, Operator, Text), | ||
289 | "typeexp", | ||
290 | ), | ||
291 | (r"[$a-zA-Z_][a-zA-Z0-9_]*", Name.Other), | ||
292 | (r"[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?", Number.Float), | ||
293 | (r"0x[0-9a-fA-F]+", Number.Hex), | ||
294 | (r"[0-9]+", Number.Integer), | ||
295 | (r'"(\\\\|\\"|[^"])*"', String.Double), | ||
296 | (r"'(\\\\|\\'|[^'])*'", String.Single), | ||
297 | ], | ||
298 | } | ||
299 | |||
300 | |||
301 | # Map from token id to props. | ||
302 | # Properties can't be added to tokens | ||
303 | # since they derive from Python's tuple. | ||
304 | token_props = {} | ||
305 | |||
306 | |||
307 | class LinkFilter(Filter): | ||
308 | def __init__(self, app, **options): | ||
309 | self.app = app | ||
310 | Filter.__init__(self, **options) | ||
311 | |||
312 | def _filter_one_literal(self, ttype, value): | ||
313 | last = 0 | ||
314 | for m in re.finditer(literal_reg, value): | ||
315 | pre = value[last : m.start()] | ||
316 | if pre: | ||
317 | yield ttype, pre | ||
318 | t = copy_token(ttype) | ||
319 | tok_setprop(t, "is_literal", True) | ||
320 | yield t, m.group(1) | ||
321 | last = m.end() | ||
322 | post = value[last:] | ||
323 | if post: | ||
324 | yield ttype, post | ||
325 | |||
326 | def filter(self, lexer, stream): | ||
327 | for ttype, value in stream: | ||
328 | if ttype in Token.Keyword.Type: | ||
329 | t = copy_token(ttype) | ||
330 | tok_setprop(t, "xref", value.strip()) | ||
331 | tok_setprop(t, "is_identifier", True) | ||
332 | yield t, value | ||
333 | elif ttype in Token.Comment: | ||
334 | last = 0 | ||
335 | for m in re.finditer(link_reg, value): | ||
336 | pre = value[last : m.start()] | ||
337 | if pre: | ||
338 | yield from self._filter_one_literal(ttype, pre) | ||
339 | t = copy_token(ttype) | ||
340 | x1, x2 = m.groups() | ||
341 | x0 = m.group(0) | ||
342 | if x2 is None: | ||
343 | caption = x1.strip() | ||
344 | xref = x1.strip() | ||
345 | else: | ||
346 | caption = x1.strip() | ||
347 | xref = x2.strip() | ||
348 | tok_setprop(t, "xref", xref) | ||
349 | tok_setprop(t, "caption", caption) | ||
350 | if x0.endswith("_"): | ||
351 | tok_setprop(t, "trailing_underscore", True) | ||
352 | yield t, m.group(1) | ||
353 | last = m.end() | ||
354 | post = value[last:] | ||
355 | if post: | ||
356 | yield from self._filter_one_literal(ttype, post) | ||
357 | else: | ||
358 | yield ttype, value | ||
359 | |||
360 | |||
361 | _escape_html_table = { | ||
362 | ord("&"): u"&", | ||
363 | ord("<"): u"<", | ||
364 | ord(">"): u">", | ||
365 | ord('"'): u""", | ||
366 | ord("'"): u"'", | ||
367 | } | ||
368 | |||
369 | |||
370 | class LinkingHtmlFormatter(HtmlFormatter): | ||
371 | def __init__(self, **kwargs): | ||
372 | super(LinkingHtmlFormatter, self).__init__(**kwargs) | ||
373 | self._builder = kwargs["_builder"] | ||
374 | self._bridge = kwargs["_bridge"] | ||
375 | |||
376 | def _get_value(self, value, tok): | ||
377 | xref = tok_getprop(tok, "xref") | ||
378 | caption = tok_getprop(tok, "caption") | ||
379 | |||
380 | if tok_getprop(tok, "is_literal"): | ||
381 | return '<span style="font-weight: bolder">%s</span>' % (value,) | ||
382 | |||
383 | if tok_getprop(tok, "trailing_underscore"): | ||
384 | logger.warn( | ||
385 | "{}:{}: code block contains xref to '{}' with unsupported trailing underscore".format( | ||
386 | self._bridge.path, self._bridge.line, xref | ||
387 | ) | ||
388 | ) | ||
389 | |||
390 | if tok_getprop(tok, "is_identifier"): | ||
391 | if xref.startswith('"'): | ||
392 | return value | ||
393 | if re.match("^[0-9]+$", xref) is not None: | ||
394 | return value | ||
395 | if xref in ( | ||
396 | "number", | ||
397 | "object", | ||
398 | "string", | ||
399 | "boolean", | ||
400 | "any", | ||
401 | "true", | ||
402 | "false", | ||
403 | "null", | ||
404 | "undefined", | ||
405 | "Array", | ||
406 | "unknown", | ||
407 | ): | ||
408 | return value | ||
409 | |||
410 | if self._bridge.docname is None: | ||
411 | return value | ||
412 | if xref is None: | ||
413 | return value | ||
414 | content = caption if caption is not None else value | ||
415 | ts = self._builder.env.get_domain("ts") | ||
416 | r1 = ts.objects.get(("type", xref), None) | ||
417 | if r1 is not None: | ||
418 | rel_uri = ( | ||
419 | self._builder.get_relative_uri(self._bridge.docname, r1[0]) | ||
420 | + "#" | ||
421 | + r1[1] | ||
422 | ) | ||
423 | return ( | ||
424 | '<a style="color:inherit;text-decoration:underline" href="%s">%s</a>' | ||
425 | % (rel_uri, content) | ||
426 | ) | ||
427 | |||
428 | std = self._builder.env.get_domain("std") | ||
429 | r2 = std.labels.get(xref.lower(), None) | ||
430 | if r2 is not None: | ||
431 | rel_uri = ( | ||
432 | self._builder.get_relative_uri(self._bridge.docname, r2[0]) | ||
433 | + "#" | ||
434 | + r2[1] | ||
435 | ) | ||
436 | return ( | ||
437 | '<a style="color:inherit;text-decoration:underline" href="%s">%s</a>' | ||
438 | % (rel_uri, content) | ||
439 | ) | ||
440 | r3 = std.anonlabels.get(xref.lower(), None) | ||
441 | if r3 is not None: | ||
442 | rel_uri = ( | ||
443 | self._builder.get_relative_uri(self._bridge.docname, r3[0]) | ||
444 | + "#" | ||
445 | + r3[1] | ||
446 | ) | ||
447 | return ( | ||
448 | '<a style="color:inherit;text-decoration:underline" href="%s">%s</a>' | ||
449 | % (rel_uri, content) | ||
450 | ) | ||
451 | |||
452 | logger.warn( | ||
453 | "{}:{}: code block contains unresolved xref '{}'".format( | ||
454 | self._bridge.path, self._bridge.line, xref | ||
455 | ) | ||
456 | ) | ||
457 | |||
458 | return value | ||
459 | |||
460 | def _fmt(self, value, tok): | ||
461 | cls = self._get_css_class(tok) | ||
462 | value = self._get_value(value, tok) | ||
463 | if cls is None or cls == "": | ||
464 | return value | ||
465 | return '<span class="%s">%s</span>' % (cls, value) | ||
466 | |||
467 | def _format_lines(self, tokensource): | ||
468 | """ | ||
469 | Just format the tokens, without any wrapping tags. | ||
470 | Yield individual lines. | ||
471 | """ | ||
472 | lsep = self.lineseparator | ||
473 | escape_table = _escape_html_table | ||
474 | |||
475 | line = "" | ||
476 | for ttype, value in tokensource: | ||
477 | link = get_annotation(ttype, "link") | ||
478 | |||
479 | parts = value.translate(escape_table).split("\n") | ||
480 | |||
481 | if len(parts) == 0: | ||
482 | # empty token, usually should not happen | ||
483 | pass | ||
484 | elif len(parts) == 1: | ||
485 | # no newline before or after token | ||
486 | line += self._fmt(parts[0], ttype) | ||
487 | else: | ||
488 | line += self._fmt(parts[0], ttype) | ||
489 | yield 1, line + lsep | ||
490 | for part in parts[1:-1]: | ||
491 | yield 1, self._fmt(part, ttype) + lsep | ||
492 | line = self._fmt(parts[-1], ttype) | ||
493 | |||
494 | if line: | ||
495 | yield 1, line + lsep | ||
496 | |||
497 | |||
498 | class MyPygmentsBridge(PygmentsBridge): | ||
499 | def __init__(self, builder, trim_doctest_flags): | ||
500 | self.dest = "html" | ||
501 | self.trim_doctest_flags = trim_doctest_flags | ||
502 | self.formatter_args = { | ||
503 | "style": SphinxStyle, | ||
504 | "_builder": builder, | ||
505 | "_bridge": self, | ||
506 | } | ||
507 | self.formatter = LinkingHtmlFormatter | ||
508 | self.builder = builder | ||
509 | self.path = None | ||
510 | self.line = None | ||
511 | self.docname = None | ||
512 | |||
513 | def highlight_block( | ||
514 | self, source, lang, opts=None, force=False, location=None, **kwargs | ||
515 | ): | ||
516 | if isinstance(location, tuple): | ||
517 | docname, line = location | ||
518 | self.line = line | ||
519 | self.path = self.builder.env.doc2path(docname) | ||
520 | self.docname = docname | ||
521 | elif isinstance(location, Element): | ||
522 | self.line = location.line | ||
523 | self.path = location.source | ||
524 | self.docname = self.builder.env.path2doc(self.path) | ||
525 | return super().highlight_block(source, lang, opts, force, location, **kwargs) | ||
526 | |||
527 | |||
528 | class MyHtmlBuilder(StandaloneHTMLBuilder): | ||
529 | name = "html-linked" | ||
530 | |||
531 | def init_highlighter(self): | ||
532 | if self.config.pygments_style is not None: | ||
533 | style = self.config.pygments_style | ||
534 | elif self.theme: | ||
535 | style = self.theme.get_confstr("theme", "pygments_style", "none") | ||
536 | else: | ||
537 | style = "sphinx" | ||
538 | self.highlighter = MyPygmentsBridge(self, self.config.trim_doctest_flags) | ||
539 | self.dark_highlighter = None | ||
540 | |||
541 | |||
542 | def get_annotation(tok, key): | ||
543 | if not hasattr(tok, "kv"): | ||
544 | return None | ||
545 | return tok.kv.get(key) | ||
546 | |||
547 | |||
548 | def copy_token(tok): | ||
549 | new_tok = _TokenType(tok) | ||
550 | # This part is very fragile against API changes ... | ||
551 | new_tok.subtypes = set(tok.subtypes) | ||
552 | new_tok.parent = tok.parent | ||
553 | return new_tok | ||
554 | |||
555 | |||
556 | def tok_setprop(tok, key, value): | ||
557 | tokid = id(tok) | ||
558 | e = token_props.get(tokid) | ||
559 | if e is None: | ||
560 | e = token_props[tokid] = (tok, {}) | ||
561 | _, kv = e | ||
562 | kv[key] = value | ||
563 | |||
564 | |||
565 | def tok_getprop(tok, key): | ||
566 | tokid = id(tok) | ||
567 | e = token_props.get(tokid) | ||
568 | if e is None: | ||
569 | return None | ||
570 | _, kv = e | ||
571 | return kv.get(key) | ||
572 | |||
573 | |||
574 | link_reg = re.compile(r"(?<!`)`([^`<]+)\s*(?:<([^>]+)>)?\s*`_?") | ||
575 | literal_reg = re.compile(r"``([^`]+)``") | ||
576 | |||
577 | |||
578 | def setup(app): | ||
579 | |||
580 | class TsrefLexer(BetterTypeScriptLexer): | ||
581 | def __init__(self, **options): | ||
582 | super().__init__(**options) | ||
583 | self.add_filter(LinkFilter(app)) | ||
584 | |||
585 | app.add_lexer("tsref", TsrefLexer) | ||
586 | app.add_domain(TypeScriptDomain) | ||
587 | app.add_builder(MyHtmlBuilder) | ||