commit 5fc07133c8d3ead0cdc82ff1f92c7f6b5ae04fc2
parent 4e5c8c6d4a0befd55fba632b8b1bed482ea2bac7
Author: Martin Schanzenbach <schanzen@gnunet.org>
Date: Thu, 6 Oct 2022 13:33:47 +0900
update namestore API doc
Diffstat:
15 files changed, 2118 insertions(+), 163 deletions(-)
diff --git a/_exts/__pycache__/typescriptdomain.cpython-311.pyc b/_exts/__pycache__/typescriptdomain.cpython-311.pyc
Binary files differ.
diff --git a/_exts/httpdomain/__init__.py b/_exts/httpdomain/__init__.py
@@ -0,0 +1,14 @@
+# -*- coding: utf-8 -*-
+"""
+ sphinxcontrib
+ ~~~~~~~~~~~~~
+
+ This package is a namespace package that contains all extensions
+ distributed in the ``sphinx-contrib`` distribution.
+
+ :copyright: Copyright 2007-2009 by the Sphinx team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+__import__('pkg_resources').declare_namespace(__name__)
+
diff --git a/_exts/httpdomain/__pycache__/__init__.cpython-311.pyc b/_exts/httpdomain/__pycache__/__init__.cpython-311.pyc
Binary files differ.
diff --git a/_exts/httpdomain/__pycache__/httpdomain.cpython-311.pyc b/_exts/httpdomain/__pycache__/httpdomain.cpython-311.pyc
Binary files differ.
diff --git a/_exts/httpdomain/autohttp/__init__.py b/_exts/httpdomain/autohttp/__init__.py
@@ -0,0 +1,11 @@
+"""
+ sphinxcontrib.autohttp
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ The sphinx.ext.autodoc-style HTTP API reference builder
+ for sphinxcontrib.httpdomain.
+
+ :copyright: Copyright 2011 by Hong Minhee
+ :license: BSD, see LICENSE for details.
+
+"""
diff --git a/_exts/httpdomain/autohttp/bottle.py b/_exts/httpdomain/autohttp/bottle.py
@@ -0,0 +1,114 @@
+"""
+ sphinxcontrib.autohttp.bottle
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ The sphinx.ext.autodoc-style HTTP API reference builder (from Bottle)
+ for sphinxcontrib.httpdomain.
+
+ :copyright: Copyright 2012 by Jameel Al-Aziz
+ :license: BSD, see LICENSE for details.
+
+"""
+
+import re
+import six
+
+from docutils import nodes
+from docutils.parsers.rst import directives
+from docutils.statemachine import ViewList
+
+from sphinx.util import force_decode
+from sphinx.util.compat import Directive
+from sphinx.util.nodes import nested_parse_with_titles
+from sphinx.util.docstrings import prepare_docstring
+from sphinx.pycode import ModuleAnalyzer
+
+from sphinxcontrib import httpdomain
+from sphinxcontrib.autohttp.common import http_directive, import_object
+
+
+def translate_bottle_rule(app, rule):
+ buf = six.StringIO()
+ if hasattr(app.router, "parse_rule"):
+ iterator = app.router.parse_rule(rule) # bottle 0.11
+ else:
+ iterator = app.router._itertokens(rule) # bottle 0.12
+ for name, filter, conf in iterator:
+ if filter:
+ buf.write('(')
+ buf.write(name)
+ if (filter != app.router.default_filter and filter != 'default')\
+ or conf:
+ buf.write(':')
+ buf.write(filter)
+ if conf:
+ buf.write(':')
+ buf.write(conf)
+ buf.write(')')
+ else:
+ buf.write(name)
+ return buf.getvalue()
+
+
+def get_routes(app):
+ for route in app.routes:
+ path = translate_bottle_rule(app, route.rule)
+ yield route.method, path, route
+
+
+class AutobottleDirective(Directive):
+
+ has_content = True
+ required_arguments = 1
+ option_spec = {'endpoints': directives.unchanged,
+ 'undoc-endpoints': directives.unchanged,
+ 'include-empty-docstring': directives.unchanged}
+
+ @property
+ def endpoints(self):
+ endpoints = self.options.get('endpoints', None)
+ if not endpoints:
+ return None
+ return frozenset(re.split(r'\s*,\s*', endpoints))
+
+ @property
+ def undoc_endpoints(self):
+ undoc_endpoints = self.options.get('undoc-endpoints', None)
+ if not undoc_endpoints:
+ return frozenset()
+ return frozenset(re.split(r'\s*,\s*', undoc_endpoints))
+
+ def make_rst(self):
+ app = import_object(self.arguments[0])
+ for method, path, target in get_routes(app):
+ endpoint = target.name or target.callback.__name__
+ if self.endpoints and endpoint not in self.endpoints:
+ continue
+ if endpoint in self.undoc_endpoints:
+ continue
+ view = target.callback
+ docstring = view.__doc__ or ''
+ if not isinstance(docstring, six.text_type):
+ analyzer = ModuleAnalyzer.for_module(view.__module__)
+ docstring = force_decode(docstring, analyzer.encoding)
+ if not docstring and 'include-empty-docstring' not in self.options:
+ continue
+ docstring = prepare_docstring(docstring)
+ for line in http_directive(method, path, docstring):
+ yield line
+
+ def run(self):
+ node = nodes.section()
+ node.document = self.state.document
+ result = ViewList()
+ for line in self.make_rst():
+ result.append(line, '<autobottle>')
+ nested_parse_with_titles(self.state, result, node)
+ return node.children
+
+
+def setup(app):
+ if 'http' not in app.domains:
+ httpdomain.setup(app)
+ app.add_directive('autobottle', AutobottleDirective)
+
diff --git a/_exts/httpdomain/autohttp/common.py b/_exts/httpdomain/autohttp/common.py
@@ -0,0 +1,36 @@
+"""
+ sphinxcontrib.autohttp.common
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ The common functions for web framework reflection.
+
+ :copyright: Copyright 2011 by Hong Minhee
+ :license: BSD, see LICENSE for details.
+
+"""
+import six
+from six.moves import builtins
+from six.moves import reduce
+
+def import_object(import_name):
+ module_name, expr = import_name.split(':', 1)
+ mod = __import__(module_name)
+ mod = reduce(getattr, module_name.split('.')[1:], mod)
+ globals = builtins
+ if not isinstance(globals, dict):
+ globals = globals.__dict__
+ return eval(expr, globals, mod.__dict__)
+
+
+def http_directive(method, path, content):
+ method = method.lower().strip()
+ if isinstance(content, six.string_types):
+ content = content.splitlines()
+ yield ''
+ paths = [path] if isinstance(path, six.string_types) else path
+ for path in paths:
+ yield '.. http:{method}:: {path}'.format(**locals())
+ yield ''
+ for line in content:
+ yield ' ' + line
+ yield ''
diff --git a/_exts/httpdomain/autohttp/flask.py b/_exts/httpdomain/autohttp/flask.py
@@ -0,0 +1,48 @@
+"""
+ sphinxcontrib.autohttp.flask
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ The sphinx.ext.autodoc-style HTTP API reference builder (from Flask)
+ for sphinxcontrib.httpdomain.
+
+ :copyright: Copyright 2011 by Hong Minhee
+ :license: BSD, see LICENSE for details.
+
+"""
+from __future__ import absolute_import
+
+import re
+import itertools
+import six
+
+from docutils import nodes
+from docutils.parsers.rst import directives
+from docutils.statemachine import ViewList
+
+from sphinx.util import force_decode
+from sphinx.util.compat import Directive
+from sphinx.util.nodes import nested_parse_with_titles
+from sphinx.util.docstrings import prepare_docstring
+from sphinx.pycode import ModuleAnalyzer
+
+from sphinxcontrib import httpdomain
+from sphinxcontrib.autohttp.common import http_directive, import_object
+
+from .flask_base import AutoflaskBase
+
+class AutoflaskDirective(AutoflaskBase):
+
+ def run(self):
+ node = nodes.section()
+ node.document = self.state.document
+ result = ViewList()
+ for line in self.make_rst():
+ result.append(line, '<autoflask>')
+ nested_parse_with_titles(self.state, result, node)
+ return node.children
+
+
+def setup(app):
+ if 'http' not in app.domains:
+ httpdomain.setup(app)
+ app.add_directive('autoflask', AutoflaskDirective)
diff --git a/_exts/httpdomain/autohttp/flask_base.py b/_exts/httpdomain/autohttp/flask_base.py
@@ -0,0 +1,215 @@
+"""
+ sphinxcontrib.autohttp.flask
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ The sphinx.ext.autodoc-style HTTP API reference builder (from Flask)
+ for sphinxcontrib.httpdomain.
+
+ :copyright: Copyright 2011 by Hong Minhee
+ :license: BSD, see LICENSE for details.
+
+"""
+
+import re
+import itertools
+import six
+
+from docutils import nodes
+from docutils.parsers.rst import directives
+from docutils.statemachine import ViewList
+
+from sphinx.util import force_decode
+from sphinx.util.compat import Directive
+from sphinx.util.nodes import nested_parse_with_titles
+from sphinx.util.docstrings import prepare_docstring
+from sphinx.pycode import ModuleAnalyzer
+
+from sphinxcontrib import httpdomain
+from sphinxcontrib.autohttp.common import http_directive, import_object
+
+
+def translate_werkzeug_rule(rule):
+ from werkzeug.routing import parse_rule
+ buf = six.StringIO()
+ for conv, arg, var in parse_rule(rule):
+ if conv:
+ buf.write('(')
+ if conv != 'default':
+ buf.write(conv)
+ buf.write(':')
+ buf.write(var)
+ buf.write(')')
+ else:
+ buf.write(var)
+ return buf.getvalue()
+
+
+def get_routes(app, endpoint=None, order=None):
+ endpoints = []
+ for rule in app.url_map.iter_rules(endpoint):
+ url_with_endpoint = (
+ six.text_type(next(app.url_map.iter_rules(rule.endpoint))),
+ rule.endpoint
+ )
+ if url_with_endpoint not in endpoints:
+ endpoints.append(url_with_endpoint)
+ if order == 'path':
+ endpoints.sort()
+ endpoints = [e for _, e in endpoints]
+ for endpoint in endpoints:
+ methodrules = {}
+ for rule in app.url_map.iter_rules(endpoint):
+ methods = rule.methods.difference(['OPTIONS', 'HEAD'])
+ path = translate_werkzeug_rule(rule.rule)
+ for method in methods:
+ if method in methodrules:
+ methodrules[method].append(path)
+ else:
+ methodrules[method] = [path]
+ for method, paths in methodrules.items():
+ yield method, paths, endpoint
+
+
+def quickref_directive(method, path, content):
+ rcomp = re.compile("^\s*.. :quickref:\s*(?P<quick>.*)$")
+ method = method.lower().strip()
+ if isinstance(content, six.string_types):
+ content = content.splitlines()
+ description=""
+ name=""
+ ref = path.replace("<","(").replace(">",")").replace("/","-").replace(":","-")
+ for line in content:
+ qref = rcomp.match(line)
+ if qref:
+ quickref = qref.group("quick")
+ parts = quickref.split(";",1)
+ if len(parts)>1:
+ name = parts[0]
+ description= parts[1]
+ else:
+ description= quickref
+ break
+
+ row ={}
+ row['name'] = name
+ row['operation'] = ' - `%s %s <#%s-%s>`_' % (method.upper(), path, method.lower(), ref)
+ row['description'] = description
+
+ return row
+
+class AutoflaskBase(Directive):
+
+ has_content = True
+ required_arguments = 1
+ option_spec = {'endpoints': directives.unchanged,
+ 'blueprints': directives.unchanged,
+ 'modules': directives.unchanged,
+ 'order': directives.unchanged,
+ 'undoc-endpoints': directives.unchanged,
+ 'undoc-blueprints': directives.unchanged,
+ 'undoc-modules': directives.unchanged,
+ 'undoc-static': directives.unchanged,
+ 'include-empty-docstring': directives.unchanged}
+
+ @property
+ def endpoints(self):
+ endpoints = self.options.get('endpoints', None)
+ if not endpoints:
+ return None
+ return re.split(r'\s*,\s*', endpoints)
+
+ @property
+ def undoc_endpoints(self):
+ undoc_endpoints = self.options.get('undoc-endpoints', None)
+ if not undoc_endpoints:
+ return frozenset()
+ return frozenset(re.split(r'\s*,\s*', undoc_endpoints))
+
+ @property
+ def blueprints(self):
+ blueprints = self.options.get('blueprints', None)
+ if not blueprints:
+ return None
+ return frozenset(re.split(r'\s*,\s*', blueprints))
+
+ @property
+ def undoc_blueprints(self):
+ undoc_blueprints = self.options.get('undoc-blueprints', None)
+ if not undoc_blueprints:
+ return frozenset()
+ return frozenset(re.split(r'\s*,\s*', undoc_blueprints))
+
+ @property
+ def modules(self):
+ modules = self.options.get('modules', None)
+ if not modules:
+ return frozenset()
+ return frozenset(re.split(r'\s*,\s*', modules))
+
+ @property
+ def undoc_modules(self):
+ undoc_modules = self.options.get('undoc-modules', None)
+ if not undoc_modules:
+ return frozenset()
+ return frozenset(re.split(r'\s*,\s*', undoc_modules))
+
+ @property
+ def order(self):
+ order = self.options.get('order', None)
+ if order not in (None, 'path'):
+ raise ValueError('Invalid value for :order:')
+ return order
+
+ def make_rst(self, qref=False):
+ app = import_object(self.arguments[0])
+ if self.endpoints:
+ routes = itertools.chain(*[get_routes(app, endpoint, self.order)
+ for endpoint in self.endpoints])
+ else:
+ routes = get_routes(app, order=self.order)
+ for method, paths, endpoint in routes:
+ try:
+ blueprint, _, endpoint_internal = endpoint.rpartition('.')
+ if self.blueprints and blueprint not in self.blueprints:
+ continue
+ if blueprint in self.undoc_blueprints:
+ continue
+ except ValueError:
+ pass # endpoint is not within a blueprint
+
+ if endpoint in self.undoc_endpoints:
+ continue
+ try:
+ static_url_path = app.static_url_path # Flask 0.7 or higher
+ except AttributeError:
+ static_url_path = app.static_path # Flask 0.6 or under
+ if ('undoc-static' in self.options and endpoint == 'static' and
+ static_url_path + '/(path:filename)' in paths):
+ continue
+ view = app.view_functions[endpoint]
+
+ if self.modules and view.__module__ not in self.modules:
+ continue
+
+ if self.undoc_modules and view.__module__ in self.modules:
+ continue
+
+ docstring = view.__doc__ or ''
+ if hasattr(view, 'view_class'):
+ meth_func = getattr(view.view_class, method.lower(), None)
+ if meth_func and meth_func.__doc__:
+ docstring = meth_func.__doc__
+ if not isinstance(docstring, six.text_type):
+ analyzer = ModuleAnalyzer.for_module(view.__module__)
+ docstring = force_decode(docstring, analyzer.encoding)
+
+ if not docstring and 'include-empty-docstring' not in self.options:
+ continue
+ docstring = prepare_docstring(docstring)
+ if qref == True:
+ for path in paths:
+ row = quickref_directive(method, path, docstring)
+ yield row
+ else:
+ for line in http_directive(method, paths, docstring):
+ yield line
diff --git a/_exts/httpdomain/autohttp/flaskqref.py b/_exts/httpdomain/autohttp/flaskqref.py
@@ -0,0 +1,80 @@
+"""
+ sphinxcontrib.autohttp.flaskqref
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ The sphinx.ext.autodoc-style HTTP API quick reference
+ builder (from Flask)
+ for sphinxcontrib.httpdomain.
+
+ :copyright: Copyright 2011 by Hong Minhee
+ :license: BSD, see LICENSE for details.
+
+"""
+
+from docutils import nodes
+from docutils.statemachine import ViewList
+
+from sphinxcontrib import httpdomain
+from sphinx.util.nodes import nested_parse_with_titles
+
+from .flask import AutoflaskBase
+
+
+class QuickReferenceFlaskDirective(AutoflaskBase):
+
+
+ header = [ '',
+ '.. list-table::',
+ ' :widths: 20 45 35',
+ ' :header-rows: 1',
+ '',
+ ' * - Resource',
+ ' - Operation',
+ ' - Description'
+ ]
+
+ def run(self):
+ node = nodes.section()
+ node.document = self.state.document
+ result = ViewList()
+ for line in QuickReferenceFlaskDirective.header:
+ result.append(line, '<qrefflask>')
+ table={}
+ table_sorted_names=[]
+
+ for table_row in self.make_rst(qref=True):
+ name = table_row['name']
+ if table.get(name) is None:
+ table[name]=[]
+ table[name].append(table_row)
+ if name not in table_sorted_names:
+ table_sorted_names.append(name)
+
+ table_sorted_names.sort()
+
+ for name in table_sorted_names:
+ # Keep table display clean by not repeating duplicate
+ # resource names and descriptions
+ display_name = name
+ previous_description=None
+ for row in table[name]:
+ result.append(' * - %s' % display_name, '<qrefflask>')
+ display_name =""
+ result.append(row['operation'], '<qrefflask>')
+ description = row['description']
+ if previous_description is not None and previous_description == description:
+ description =""
+ else:
+ previous_description = description
+
+ result.append(' - %s' % description, '<qrefflask>')
+
+ result.append('', '<qrefflask>')
+ nested_parse_with_titles(self.state, result, node)
+ return node.children
+
+def setup(app):
+ if 'http' not in app.domains:
+ httpdomain.setup(app)
+ app.add_directive('qrefflask', QuickReferenceFlaskDirective)
+
diff --git a/_exts/httpdomain/autohttp/tornado.py b/_exts/httpdomain/autohttp/tornado.py
@@ -0,0 +1,128 @@
+"""
+ sphinxcontrib.autohttp.tornado
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ The sphinx.ext.autodoc-style HTTP API reference builder (from Tornado)
+ for sphinxcontrib.httpdomain.
+
+ :copyright: Copyright 2013 by Rodrigo Machado
+ :license: BSD, see LICENSE for details.
+
+"""
+
+import inspect
+import re
+import six
+
+from docutils import nodes
+from docutils.parsers.rst import directives
+from docutils.statemachine import ViewList
+
+from sphinx.util import force_decode
+from sphinx.util.compat import Directive
+from sphinx.util.nodes import nested_parse_with_titles
+from sphinx.util.docstrings import prepare_docstring
+from sphinx.pycode import ModuleAnalyzer
+
+from sphinxcontrib import httpdomain
+from sphinxcontrib.autohttp.common import http_directive, import_object
+
+
+def translate_tornado_rule(app, rule):
+ buf = six.StringIO()
+ for name, filter, conf in app.router.parse_rule(rule):
+ if filter:
+ buf.write('(')
+ buf.write(name)
+ if filter != app.router.default_filter or conf:
+ buf.write(':')
+ buf.write(filter)
+ if conf:
+ buf.write(':')
+ buf.write(conf)
+ buf.write(')')
+ else:
+ buf.write(name)
+ return buf.getvalue()
+
+
+def get_routes(app):
+ for spec in app.handlers[0][1]:
+ handler = spec.handler_class
+ doc_methods = list(handler.SUPPORTED_METHODS)
+ if 'HEAD' in doc_methods:
+ doc_methods.remove('HEAD')
+ if 'OPTIONS' in doc_methods:
+ doc_methods.remove('OPTIONS')
+
+ for method in doc_methods:
+ maybe_method = getattr(handler, method.lower(), None)
+ if (inspect.isfunction(maybe_method) or
+ inspect.ismethod(maybe_method)):
+ yield method.lower(), spec.regex.pattern, handler
+
+
+def normalize_path(path):
+ if path.endswith('$'):
+ path = path[:-1]
+ return path
+
+
+class AutoTornadoDirective(Directive):
+
+ has_content = True
+ required_arguments = 1
+ option_spec = {'endpoints': directives.unchanged,
+ 'undoc-endpoints': directives.unchanged,
+ 'include-empty-docstring': directives.unchanged}
+
+ @property
+ def endpoints(self):
+ endpoints = self.options.get('endpoints', None)
+ if not endpoints:
+ return None
+ return frozenset(re.split(r'\s*,\s*', endpoints))
+
+ @property
+ def undoc_endpoints(self):
+ undoc_endpoints = self.options.get('undoc-endpoints', None)
+ if not undoc_endpoints:
+ return frozenset()
+ return frozenset(re.split(r'\s*,\s*', undoc_endpoints))
+
+ def make_rst(self):
+ app = import_object(self.arguments[0])
+ for method, path, handler in get_routes(app):
+ class_name = handler.__name__
+ method_name = getattr(handler, method).__name__
+ endpoint = '.'.join((class_name, method_name))
+
+ if self.endpoints and endpoint not in self.endpoints:
+ continue
+ if endpoint in self.undoc_endpoints:
+ continue
+
+ docstring = getattr(handler, method).__doc__ or ''
+ #if not isinstance(docstring, unicode):
+ # analyzer = ModuleAnalyzer.for_module(view.__module__)
+ # docstring = force_decode(docstring, analyzer.encoding)
+ if not docstring and 'include-empty-docstring' not in self.options:
+ continue
+ docstring = prepare_docstring(docstring)
+ for line in http_directive(method, normalize_path(path), docstring):
+ yield line
+
+ def run(self):
+ node = nodes.section()
+ node.document = self.state.document
+ result = ViewList()
+ for line in self.make_rst():
+ result.append(line, '<autotornado>')
+ nested_parse_with_titles(self.state, result, node)
+ return node.children
+
+
+def setup(app):
+ if 'http' not in app.domains:
+ httpdomain.setup(app)
+ app.add_directive('autotornado', AutoTornadoDirective)
diff --git a/_exts/httpdomain/httpdomain.py b/_exts/httpdomain/httpdomain.py
@@ -0,0 +1,773 @@
+"""
+ sphinxcontrib.httpdomain
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ The HTTP domain for documenting RESTful HTTP APIs.
+
+ :copyright: Copyright 2011 by Hong Minhee
+ :license: BSD, see LICENSE for details.
+
+"""
+
+import re
+
+from docutils import nodes
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.lexers import get_lexer_by_name
+from pygments.token import Literal, Text, Operator, Keyword, Name, Number
+from pygments.util import ClassNotFound
+
+from sphinx import addnodes
+from sphinx.roles import XRefRole
+from sphinx.domains import Domain, ObjType, Index
+from sphinx.directives import ObjectDescription, directives
+from sphinx.util.nodes import make_refnode
+from sphinx.util.docfields import GroupedField, TypedField
+
+# The env.get_doctree() lookup results in a pickle.load() call which is
+# expensive enough to dominate the runtime entirely when the number of endpoints
+# and references is large enough. The doctrees are generated during the read-
+# phase and we can cache their lookup during the write-phase significantly
+# improving performance.
+# Currently sphinxcontrib-httpdomain does not declare to support parallel read
+# support (parallel_read_safe is the default False) so we can simply use a
+# module global to hold the cache.
+_doctree_cache = {}
+
+
+class DocRef(object):
+ """Represents a reference to an abstract specification."""
+
+ def __init__(self, base_url, anchor, section):
+ self.base_url = base_url
+ self.anchor = anchor
+ self.section = section
+
+ def __repr__(self):
+ """Returns the URL onto related specification section for the related
+ object."""
+ return '{0}#{1}{2}'.format(self.base_url, self.anchor, self.section)
+
+
+class RFC2616Ref(DocRef):
+ """Represents a reference to RFC2616.
+ In 2014, RFC2616 was replaced by multiple RFCs (7230-7237)."""
+
+ def __init__(self, section):
+ url = 'http://www.w3.org/Protocols/rfc2616/rfc2616-sec{0:d}.html'
+ url = url.format(int(section))
+ super(RFC2616Ref, self).__init__(url, 'sec', section)
+
+
+class IETFRef(DocRef):
+ """Represents a reference to the specific IETF RFC."""
+
+ def __init__(self, rfc, section):
+ url = 'https://tools.ietf.org/html/rfc{0:d}'.format(rfc)
+ super(IETFRef, self).__init__(url, 'section-', section)
+
+
+class EventSourceRef(DocRef):
+
+ def __init__(self, section):
+ url = 'http://www.w3.org/TR/eventsource/'
+ super(EventSourceRef, self).__init__(url, section, '')
+
+
+class CORSRef(DocRef):
+ """Represents a reference to W3 Cross-Origin Resource Sharing recommendation."""
+
+ def __init__(self, name, type):
+ url = 'http://www.w3.org/TR/cors/'
+ super(CORSRef, self).__init__(url, name, '-' + type)
+
+
+#: Mapping from lowercase HTTP method name to :class:`DocRef` object which
+#: maintains the URL which points to the section of the RFC which defines that
+#: HTTP method.
+METHOD_REFS = {
+ 'patch': IETFRef(5789, 2),
+ 'options': IETFRef(7231, '4.3.7'),
+ 'get': IETFRef(7231, '4.3.1'),
+ 'head': IETFRef(7231, '4.3.2'),
+ 'post': IETFRef(7231, '4.3.3'),
+ 'put': IETFRef(7231, '4.3.4'),
+ 'delete': IETFRef(7231, '4.3.5'),
+ 'trace': IETFRef(7231, '4.3.8'),
+ 'connect': IETFRef(7231, '4.3.6'),
+ 'copy': IETFRef(2518, 8.8),
+ 'any': ''
+}
+
+
+#: Mapping from HTTP header name to :class:`DocRef` object which
+#: maintains the URL which points to the related section of the RFC.
+HEADER_REFS = {
+ 'Accept': IETFRef(7231, '5.3.2'),
+ 'Accept-Charset': IETFRef(7231, '5.3.3'),
+ 'Accept-Encoding': IETFRef(7231, '5.3.4'),
+ 'Accept-Language': IETFRef(7231, '5.3.5'),
+ 'Accept-Ranges': IETFRef(7233, 2.3),
+ 'Age': IETFRef(7234, 5.1),
+ 'Allow': IETFRef(7231, '7.4.1'),
+ 'Authorization': IETFRef(7235, 4.2),
+ 'Cache-Control': IETFRef(7234, 5.2),
+ 'Connection': IETFRef(7230, 6.1),
+ 'Content-Encoding': IETFRef(7231, '3.1.2.2'),
+ 'Content-Language': IETFRef(7231, '3.1.3.2'),
+ 'Content-Length': IETFRef(7230, '3.3.2'),
+ 'Content-Location': IETFRef(7231, '3.1.4.2'),
+ 'Content-MD5': RFC2616Ref(14.15), # removed
+ 'Content-Range': IETFRef(7233, 4.2),
+ 'Content-Type': IETFRef(7231, '3.1.1.5'),
+ 'Cookie': IETFRef(2109, '4.3.4'), # also RFC6265 section 5.4
+ 'Date': IETFRef(7231, '7.1.1.2'),
+ 'Destination': IETFRef(2518, 9.3),
+ 'ETag': IETFRef(7232, 2.3),
+ 'Expect': IETFRef(7231, '5.1.1'),
+ 'Expires': IETFRef(7234, 5.3),
+ 'From': IETFRef(7231, '5.5.2'),
+ 'Host': IETFRef(7230, 5.4),
+ 'If-Match': IETFRef(7232, 3.1),
+ 'If-Modified-Since': IETFRef(7232, 3.3),
+ 'If-None-Match': IETFRef(7232, 3.2),
+ 'If-Range': IETFRef(7233, 3.2),
+ 'If-Unmodified-Since': IETFRef(7232, 3.4),
+ 'Last-Event-ID': EventSourceRef('last-event-id'),
+ 'Last-Modified': IETFRef(7232, 2.2),
+ 'Link': IETFRef(5988, '5'),
+ 'Location': IETFRef(7231, '7.1.2'),
+ 'Max-Forwards': IETFRef(7231, '5.1.2'),
+ 'Pragma': IETFRef(7234, 5.4),
+ 'Proxy-Authenticate': IETFRef(7235, 4.3),
+ 'Proxy-Authorization': IETFRef(7235, 4.4),
+ 'Range': IETFRef(7233, 3.1),
+ 'Referer': IETFRef(7231, '5.5.2'),
+ 'Retry-After': IETFRef(7231, '7.1.3'),
+ 'Server': IETFRef(7231, '7.4.2'),
+ 'Set-Cookie': IETFRef(2109, '4.2.2'),
+ 'TE': IETFRef(7230, 4.3),
+ 'Trailer': IETFRef(7230, 4.4),
+ 'Transfer-Encoding': IETFRef(7230, '3.3.1'),
+ 'Upgrade': IETFRef(7230, 6.7),
+ 'User-Agent': IETFRef(7231, '5.5.3'),
+ 'Vary': IETFRef(7231, '7.1.4'),
+ 'Via': IETFRef(7230, '5.7.1'),
+ 'Warning': IETFRef(7234, 5.5),
+ 'WWW-Authenticate': IETFRef(7235, 4.1),
+ 'Access-Control-Allow-Origin': CORSRef('access-control-allow-origin',
+ 'response-header'),
+ 'Access-Control-Allow-Credentials': CORSRef('access-control-allow-credentials',
+ 'response-header'),
+ 'Access-Control-Expose-Headers': CORSRef('access-control-expose-headers',
+ 'response-header'),
+ 'Access-Control-Max-Age': CORSRef('access-control-max-age',
+ 'response-header'),
+ 'Access-Control-Allow-Methods': CORSRef('access-control-allow-methods',
+ 'response-header'),
+ 'Access-Control-Allow-Headers': CORSRef('access-control-allow-headers',
+ 'response-header'),
+ 'Origin': CORSRef('origin', 'request-header'),
+ 'Access-Control-Request-Method': CORSRef('access-control-request-method',
+ 'response-header'),
+ 'Access-Control-Request-Headers': CORSRef('access-control-request-headers',
+ 'response-header'),
+}
+
+
+HTTP_STATUS_CODES = {
+ 100: 'Continue',
+ 101: 'Switching Protocols',
+ 102: 'Processing',
+ 200: 'OK',
+ 201: 'Created',
+ 202: 'Accepted',
+ 203: 'Non Authoritative Information',
+ 204: 'No Content',
+ 205: 'Reset Content',
+ 206: 'Partial Content',
+ 207: 'Multi Status',
+ 226: 'IM Used', # see RFC 3229
+ 300: 'Multiple Choices',
+ 301: 'Moved Permanently',
+ 302: 'Found',
+ 303: 'See Other',
+ 304: 'Not Modified',
+ 305: 'Use Proxy',
+ 307: 'Temporary Redirect',
+ 308: 'Permanent Redirect',
+ 400: 'Bad Request',
+ 401: 'Unauthorized',
+ 402: 'Payment Required', # unused
+ 403: 'Forbidden',
+ 404: 'Not Found',
+ 405: 'Method Not Allowed',
+ 406: 'Not Acceptable',
+ 407: 'Proxy Authentication Required',
+ 408: 'Request Timeout',
+ 409: 'Conflict',
+ 410: 'Gone',
+ 411: 'Length Required',
+ 412: 'Precondition Failed',
+ 413: 'Request Entity Too Large',
+ 414: 'Request URI Too Long',
+ 415: 'Unsupported Media Type',
+ 416: 'Requested Range Not Satisfiable',
+ 417: 'Expectation Failed',
+ 418: "I'm a teapot", # see RFC 2324
+ 422: 'Unprocessable Entity',
+ 423: 'Locked',
+ 424: 'Failed Dependency',
+ 425: 'Too Early', # RFC 8470
+ 426: 'Upgrade Required',
+ 429: 'Too Many Requests',
+ 449: 'Retry With', # proprietary MS extension
+ 451: 'Unavailable For Legal Reasons',
+ 500: 'Internal Server Error',
+ 501: 'Not Implemented',
+ 502: 'Bad Gateway',
+ 503: 'Service Unavailable',
+ 504: 'Gateway Timeout',
+ 505: 'HTTP Version Not Supported',
+ 507: 'Insufficient Storage',
+ 510: 'Not Extended'
+}
+
+WEBDAV_STATUS_CODES = [207, 422, 423, 424, 507]
+
+http_sig_param_re = re.compile(r'\((?:(?P<type>[^:)]+):)?(?P<name>[\w_]+)\)',
+ re.VERBOSE)
+
+
+def sort_by_method(entries):
+ def cmp(item):
+ order = ['HEAD', 'GET', 'POST', 'PUT', 'DELETE', 'PATCH',
+ 'OPTIONS', 'TRACE', 'CONNECT', 'COPY', 'ANY']
+ method = item[0].split(' ', 1)[0]
+ if method in order:
+ return order.index(method)
+ return 100
+ return sorted(entries, key=cmp)
+
+
+def http_resource_anchor(method, path):
+ path = re.sub(r'[{}]', '', re.sub(r'[<>:/]', '-', path))
+ return method.lower() + '-' + path
+
+
+class HTTPResource(ObjectDescription):
+
+ doc_field_types = [
+ TypedField('parameter', label='Parameters',
+ names=('param', 'parameter', 'arg', 'argument'),
+ typerolename='obj', typenames=('paramtype', 'type')),
+ TypedField('jsonparameter', label='JSON Parameters',
+ names=('jsonparameter', 'jsonparam', 'json'),
+ typerolename='obj', typenames=('jsonparamtype', 'jsontype')),
+ TypedField('requestjsonobject', label='Request JSON Object',
+ names=('reqjsonobj', 'reqjson', '<jsonobj', '<json'),
+ typerolename='obj', typenames=('reqjsonobj', '<jsonobj')),
+ TypedField('requestjsonarray', label='Request JSON Array of Objects',
+ names=('reqjsonarr', '<jsonarr'),
+ typerolename='obj',
+ typenames=('reqjsonarrtype', '<jsonarrtype')),
+ TypedField('responsejsonobject', label='Response JSON Object',
+ names=('resjsonobj', 'resjson', '>jsonobj', '>json'),
+ typerolename='obj', typenames=('resjsonobj', '>jsonobj')),
+ TypedField('responsejsonarray', label='Response JSON Array of Objects',
+ names=('resjsonarr', '>jsonarr'),
+ typerolename='obj',
+ typenames=('resjsonarrtype', '>jsonarrtype')),
+ TypedField('queryparameter', label='Query Parameters',
+ names=('queryparameter', 'queryparam', 'qparam', 'query'),
+ typerolename='obj',
+ typenames=('queryparamtype', 'querytype', 'qtype')),
+ GroupedField('formparameter', label='Form Parameters',
+ names=('formparameter', 'formparam', 'fparam', 'form')),
+ GroupedField('requestheader', label='Request Headers',
+ rolename='header',
+ names=('<header', 'reqheader', 'requestheader')),
+ GroupedField('responseheader', label='Response Headers',
+ rolename='header',
+ names=('>header', 'resheader', 'responseheader')),
+ GroupedField('statuscode', label='Status Codes',
+ rolename='statuscode',
+ names=('statuscode', 'status', 'code'))
+ ]
+
+ option_spec = {
+ 'deprecated': directives.flag,
+ 'noindex': directives.flag,
+ 'synopsis': lambda x: x,
+ }
+
+ method = NotImplemented
+
+ def handle_signature(self, sig, signode):
+ method = self.method.upper() + ' '
+ signode += addnodes.desc_name(method, method)
+ offset = 0
+ path = None
+ for match in http_sig_param_re.finditer(sig):
+ path = sig[offset:match.start()]
+ signode += addnodes.desc_name(path, path)
+ params = addnodes.desc_parameterlist()
+ typ = match.group('type')
+ if typ:
+ typ += ': '
+ params += addnodes.desc_annotation(typ, typ)
+ name = match.group('name')
+ params += addnodes.desc_parameter(name, name)
+ signode += params
+ offset = match.end()
+ if offset < len(sig):
+ path = sig[offset:len(sig)]
+ signode += addnodes.desc_name(path, path)
+ assert path is not None, 'no matches for sig: %s' % sig
+ fullname = self.method.upper() + ' ' + path
+ signode['method'] = self.method
+ signode['path'] = sig
+ signode['fullname'] = fullname
+ return (fullname, self.method, sig)
+
+ def needs_arglist(self):
+ return False
+
+ def add_target_and_index(self, name_cls, sig, signode):
+ signode['ids'].append(http_resource_anchor(*name_cls[1:]))
+ if 'noindex' not in self.options:
+ self.env.domaindata['http'][self.method][sig] = (
+ self.env.docname,
+ self.options.get('synopsis', ''),
+ 'deprecated' in self.options)
+
+ def get_index_text(self, modname, name):
+ return ''
+
+
+class HTTPOptions(HTTPResource):
+
+ method = 'options'
+
+
+class HTTPHead(HTTPResource):
+
+ method = 'head'
+
+
+class HTTPPatch(HTTPResource):
+
+ method = 'patch'
+
+
+class HTTPPost(HTTPResource):
+
+ method = 'post'
+
+
+class HTTPGet(HTTPResource):
+
+ method = 'get'
+
+
+class HTTPPut(HTTPResource):
+
+ method = 'put'
+
+
+class HTTPDelete(HTTPResource):
+
+ method = 'delete'
+
+
+class HTTPTrace(HTTPResource):
+
+ method = 'trace'
+
+
+class HTTPConnect(HTTPResource):
+
+ method = 'connect'
+
+
+class HTTPCopy(HTTPResource):
+
+ method = 'copy'
+
+
+class HTTPAny(HTTPResource):
+
+ method = 'any'
+
+
+class HTTPXRefRole(XRefRole):
+
+ def __init__(self, method, **kwargs):
+ XRefRole.__init__(self, **kwargs)
+ self.method = method
+
+ def process_link(self, env, refnode, has_explicit_title, title, target):
+ if not has_explicit_title:
+ title = self.method.upper() + ' ' + title
+ return title, target
+
+
+class HTTPXRefMethodRole(XRefRole):
+
+ def result_nodes(self, document, env, node, is_ref):
+ method = node[0][0].lower()
+ rawsource = node[0].rawsource
+ config = env.domains['http'].env.config
+ if method not in METHOD_REFS:
+ if not config['http_strict_mode']:
+ return [nodes.emphasis(method, method)], []
+ reporter = document.reporter
+ msg = reporter.error('%s is not valid HTTP method' % method,
+ line=node.line)
+ prb = nodes.problematic(method, method)
+ return [prb], [msg]
+ url = str(METHOD_REFS[method])
+ if not url:
+ return [nodes.emphasis(method, method)], []
+ node = nodes.reference(rawsource, method.upper(), refuri=url)
+ return [node], []
+
+
+class HTTPXRefStatusRole(XRefRole):
+
+ def result_nodes(self, document, env, node, is_ref):
+ def get_code_status(text):
+ if text.isdigit():
+ code = int(text)
+ return code, HTTP_STATUS_CODES.get(code)
+ else:
+ try:
+ code, status = re.split(r'\s', text.strip(), 1)
+ code = int(code)
+ except ValueError:
+ return None, None
+ known_status = HTTP_STATUS_CODES.get(code)
+ if known_status is None:
+ return code, None
+ elif known_status.lower() != status.lower():
+ return code, None
+ else:
+ return code, status
+
+ def report_unknown_code():
+ if not config['http_strict_mode']:
+ return [nodes.emphasis(text, text)], []
+ reporter = document.reporter
+ msg = reporter.error('%d is unknown HTTP status code' % code,
+ line=node.line)
+ prb = nodes.problematic(text, text)
+ return [prb], [msg]
+
+ def report_invalid_code():
+ if not config['http_strict_mode']:
+ return [nodes.emphasis(text, text)], []
+ reporter = document.reporter
+ msg = reporter.error(
+ 'HTTP status code must be an integer (e.g. `200`) or '
+ 'start with an integer (e.g. `200 OK`); %r is invalid' %
+ text,
+ line=node.line
+ )
+ prb = nodes.problematic(text, text)
+ return [prb], [msg]
+
+ text = node[0][0]
+ rawsource = node[0].rawsource
+ config = env.domains['http'].env.config
+
+ code, status = get_code_status(text)
+ if code is None:
+ return report_invalid_code()
+ elif status is None:
+ return report_unknown_code()
+ elif code == 226:
+ url = 'http://www.ietf.org/rfc/rfc3229.txt'
+ elif code == 418:
+ url = 'http://www.ietf.org/rfc/rfc2324.txt'
+ elif code == 429:
+ url = 'http://tools.ietf.org/html/rfc6585#section-4'
+ elif code == 449:
+ url = 'http://msdn.microsoft.com/en-us/library/dd891478(v=prot.10).aspx'
+ elif code == 451:
+ url = 'http://www.ietf.org/rfc/rfc7725.txt'
+ elif code in WEBDAV_STATUS_CODES:
+ url = 'http://tools.ietf.org/html/rfc4918#section-11.%d' % (WEBDAV_STATUS_CODES.index(code) + 1)
+ elif code in HTTP_STATUS_CODES:
+ url = 'http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html' \
+ '#sec10.' + ('%d.%d' % (code // 100, 1 + code % 100))
+ else:
+ url = ''
+ node = nodes.reference(rawsource, '%d %s' % (code, status), refuri=url)
+ return [node], []
+
+
+class HTTPXRefHeaderRole(XRefRole):
+
+ def result_nodes(self, document, env, node, is_ref):
+ header = node[0][0]
+ rawsource = node[0].rawsource
+ if header not in HEADER_REFS:
+ _header = '-'.join(map(lambda i: i.title(), header.split('-')))
+ if _header not in HEADER_REFS:
+ return [nodes.emphasis(header, header)], []
+ url = str(HEADER_REFS[header])
+ node = nodes.reference(rawsource, header, refuri=url)
+ return [node], []
+
+
+class HTTPIndex(Index):
+
+ name = 'routingtable'
+ localname = 'HTTP Routing Table'
+ shortname = 'routing table'
+
+ def __init__(self, *args, **kwargs):
+ super(HTTPIndex, self).__init__(*args, **kwargs)
+
+ self.ignore = [
+ [l for l in x.split('/') if l]
+ for x in self.domain.env.config['http_index_ignore_prefixes']]
+ self.ignore.sort(reverse=True)
+
+ # During HTML generation these values pick from class,
+ # not from instance so we have a little hack the system
+ cls = self.__class__
+ cls.shortname = self.domain.env.config['http_index_shortname']
+ cls.localname = self.domain.env.config['http_index_localname']
+
+ def grouping_prefix(self, path):
+ letters = [x for x in path.split('/') if x]
+ for prefix in self.ignore:
+ if letters[:len(prefix)] == prefix:
+ return '/' + '/'.join(letters[:len(prefix) + 1])
+ return '/%s' % (letters[0] if letters else '',)
+
+ def generate(self, docnames=None):
+ content = {}
+ items = ((method, path, info)
+ for method, routes in self.domain.routes.items()
+ for path, info in routes.items())
+ items = sorted(items, key=lambda item: item[1])
+ for method, path, info in items:
+ entries = content.setdefault(self.grouping_prefix(path), [])
+ entries.append([
+ method.upper() + ' ' + path, 0, info[0],
+ http_resource_anchor(method, path),
+ '', 'Deprecated' if info[2] else '', info[1]
+ ])
+ items = sorted(
+ (path, sort_by_method(entries))
+ for path, entries in content.items()
+ )
+ return (items, True)
+
+
+class HTTPDomain(Domain):
+ """HTTP domain."""
+
+ name = 'http'
+ label = 'HTTP'
+
+ object_types = {
+ 'options': ObjType('options', 'options', 'obj'),
+ 'head': ObjType('head', 'head', 'obj'),
+ 'post': ObjType('post', 'post', 'obj'),
+ 'get': ObjType('get', 'get', 'obj'),
+ 'put': ObjType('put', 'put', 'obj'),
+ 'patch': ObjType('patch', 'patch', 'obj'),
+ 'delete': ObjType('delete', 'delete', 'obj'),
+ 'trace': ObjType('trace', 'trace', 'obj'),
+ 'connect': ObjType('connect', 'connect', 'obj'),
+ 'copy': ObjType('copy', 'copy', 'obj'),
+ 'any': ObjType('any', 'any', 'obj')
+ }
+
+ directives = {
+ 'options': HTTPOptions,
+ 'head': HTTPHead,
+ 'post': HTTPPost,
+ 'get': HTTPGet,
+ 'put': HTTPPut,
+ 'patch': HTTPPatch,
+ 'delete': HTTPDelete,
+ 'trace': HTTPTrace,
+ 'connect': HTTPConnect,
+ 'copy': HTTPCopy,
+ 'any': HTTPAny
+ }
+
+ roles = {
+ 'options': HTTPXRefRole('options'),
+ 'head': HTTPXRefRole('head'),
+ 'post': HTTPXRefRole('post'),
+ 'get': HTTPXRefRole('get'),
+ 'put': HTTPXRefRole('put'),
+ 'patch': HTTPXRefRole('patch'),
+ 'delete': HTTPXRefRole('delete'),
+ 'trace': HTTPXRefRole('trace'),
+ 'connect': HTTPXRefRole('connect'),
+ 'copy': HTTPXRefRole('copy'),
+ 'any': HTTPXRefRole('any'),
+ 'statuscode': HTTPXRefStatusRole(),
+ 'method': HTTPXRefMethodRole(),
+ 'header': HTTPXRefHeaderRole()
+ }
+
+ initial_data = {
+ 'options': {}, # path: (docname, synopsis)
+ 'head': {},
+ 'post': {},
+ 'get': {},
+ 'put': {},
+ 'patch': {},
+ 'delete': {},
+ 'trace': {},
+ 'connect': {},
+ 'copy': {},
+ 'any': {}
+ }
+
+ indices = []
+
+ @property
+ def routes(self):
+ return dict((key, self.data[key]) for key in self.object_types)
+
+ def clear_doc(self, docname):
+ for typ, routes in self.routes.items():
+ for path, info in list(routes.items()):
+ if info[0] == docname:
+ del routes[path]
+
+ def resolve_xref(self, env, fromdocname, builder, typ, target,
+ node, contnode):
+ try:
+ info = self.data[str(typ)][target]
+ except KeyError:
+ text = contnode.rawsource
+ role = self.roles.get(typ)
+ if role is None:
+ return None
+
+ if fromdocname not in _doctree_cache:
+ _doctree_cache[fromdocname] = env.get_doctree(fromdocname)
+ doctree = _doctree_cache[fromdocname]
+
+ resnode = role.result_nodes(doctree, env, node, None)[0][0]
+ if isinstance(resnode, addnodes.pending_xref):
+ text = node[0][0]
+ reporter = doctree.reporter
+ reporter.warning('Cannot resolve reference to %r' % text,
+ line=node.line)
+ return None
+ return resnode
+ else:
+ anchor = http_resource_anchor(typ, target)
+ title = typ.upper() + ' ' + target
+ return make_refnode(builder, fromdocname, info[0], anchor,
+ contnode, title)
+
+ def resolve_any_xref(self, env, fromdocname, builder, target, node, contnode):
+ """Resolve the pending_xref *node* with the given *target*.
+
+ The reference comes from an "any" or similar role, which means that Sphinx
+ don't know the type.
+
+ For now sphinxcontrib-httpdomain doesn't resolve any xref nodes.
+
+ :return:
+ list of tuples ``('domain:role', newnode)``, where ``'domain:role'``
+ is the name of a role that could have created the same reference,
+ """
+ return []
+
+ def get_objects(self):
+ for method, routes in self.routes.items():
+ for path, info in routes.items():
+ anchor = http_resource_anchor(method, path)
+ yield (path, path, method, info[0], anchor, 1)
+
+
+class HTTPLexer(RegexLexer):
+ """Lexer for HTTP sessions."""
+
+ name = 'HTTP'
+ aliases = ['http']
+
+ flags = re.DOTALL
+
+ def header_callback(self, match):
+ if match.group(1).lower() == 'content-type':
+ content_type = match.group(5).strip()
+ if ';' in content_type:
+ content_type = content_type[:content_type.find(';')].strip()
+ self.content_type = content_type
+ yield match.start(1), Name.Attribute, match.group(1)
+ yield match.start(2), Text, match.group(2)
+ yield match.start(3), Operator, match.group(3)
+ yield match.start(4), Text, match.group(4)
+ yield match.start(5), Literal, match.group(5)
+ yield match.start(6), Text, match.group(6)
+
+ def continuous_header_callback(self, match):
+ yield match.start(1), Text, match.group(1)
+ yield match.start(2), Literal, match.group(2)
+ yield match.start(3), Text, match.group(3)
+
+ def content_callback(self, match):
+ content_type = getattr(self, 'content_type', None)
+ content = match.group()
+ offset = match.start()
+ if content_type:
+ from pygments.lexers import get_lexer_for_mimetype
+ try:
+ lexer = get_lexer_for_mimetype(content_type)
+ except ClassNotFound:
+ pass
+ else:
+ for idx, token, value in lexer.get_tokens_unprocessed(content):
+ yield offset + idx, token, value
+ return
+ yield offset, Text, content
+
+ tokens = {
+ 'root': [
+ (r'(GET|POST|PUT|PATCH|DELETE|HEAD|OPTIONS|TRACE|COPY)( +)([^ ]+)( +)'
+ r'(HTTPS?)(/)(1\.[01])(\r?\n|$)',
+ bygroups(Name.Function, Text, Name.Namespace, Text,
+ Keyword.Reserved, Operator, Number, Text),
+ 'headers'),
+ (r'(HTTPS?)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|$)',
+ bygroups(Keyword.Reserved, Operator, Number, Text, Number,
+ Text, Name.Exception, Text),
+ 'headers'),
+ ],
+ 'headers': [
+ (r'([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|$)', header_callback),
+ (r'([\t ]+)([^\r\n]+)(\r?\n|$)', continuous_header_callback),
+ (r'\r?\n', Text, 'content')
+ ],
+ 'content': [
+ (r'.+', content_callback)
+ ]
+ }
+
+
+def setup(app):
+ app.add_domain(HTTPDomain)
+
+ try:
+ get_lexer_by_name('http')
+ except ClassNotFound:
+ app.add_lexer('http', HTTPLexer())
+ app.add_config_value('http_index_ignore_prefixes', [], None)
+ app.add_config_value('http_index_shortname', 'routing table', True)
+ app.add_config_value('http_index_localname', 'HTTP Routing Table', True)
+ app.add_config_value('http_strict_mode', True, None)
+ app.add_config_value('http_headers_ignore_prefixes', ['X-'], None)
diff --git a/_exts/typescriptdomain.py b/_exts/typescriptdomain.py
@@ -0,0 +1,587 @@
+"""
+TypeScript domain.
+
+:copyright: Copyright 2019 by Taler Systems SA
+:license: LGPLv3+
+:author: Florian Dold
+"""
+
+import re
+
+from pathlib import Path
+
+from docutils import nodes
+from typing import List, Optional, Iterable, Dict, Tuple
+from typing import cast
+
+from pygments.lexers import get_lexer_by_name
+from pygments.filter import Filter
+from pygments.token import Literal, Text, Operator, Keyword, Name, Number
+from pygments.token import Comment, Token, _TokenType
+from pygments.token import *
+from pygments.lexer import RegexLexer, bygroups, include
+from pygments.formatters import HtmlFormatter
+
+from docutils import nodes
+from docutils.nodes import Element, Node
+
+from sphinx.roles import XRefRole
+from sphinx.domains import Domain, ObjType, Index
+from sphinx.directives import directives
+from sphinx.util.docutils import SphinxDirective
+from sphinx.util.nodes import make_refnode
+from sphinx.util import logging
+from sphinx.highlighting import PygmentsBridge
+from sphinx.builders.html import StandaloneHTMLBuilder
+from sphinx.pygments_styles import SphinxStyle
+
+logger = logging.getLogger(__name__)
+
+
+class TypeScriptDefinition(SphinxDirective):
+ """
+ Directive for a code block with special highlighting or line numbering
+ settings.
+ """
+
+ has_content = True
+ required_arguments = 1
+ optional_arguments = 0
+ final_argument_whitespace = False
+ option_spec = {
+ "force": directives.flag,
+ "linenos": directives.flag,
+ "dedent": int,
+ "lineno-start": int,
+ "emphasize-lines": directives.unchanged_required,
+ "caption": directives.unchanged_required,
+ "class": directives.class_option,
+ }
+
+ def run(self) -> List[Node]:
+ document = self.state.document
+ code = "\n".join(self.content)
+ location = self.state_machine.get_source_and_line(self.lineno)
+
+ linespec = self.options.get("emphasize-lines")
+ if linespec:
+ try:
+ nlines = len(self.content)
+ hl_lines = parselinenos(linespec, nlines)
+ if any(i >= nlines for i in hl_lines):
+ logger.warning(
+ __("line number spec is out of range(1-%d): %r")
+ % (nlines, self.options["emphasize-lines"]),
+ location=location,
+ )
+
+ hl_lines = [x + 1 for x in hl_lines if x < nlines]
+ except ValueError as err:
+ return [document.reporter.warning(err, line=self.lineno)]
+ else:
+ hl_lines = None
+
+ if "dedent" in self.options:
+ location = self.state_machine.get_source_and_line(self.lineno)
+ lines = code.split("\n")
+ lines = dedent_lines(lines, self.options["dedent"], location=location)
+ code = "\n".join(lines)
+
+ literal = nodes.literal_block(code, code) # type: Element
+ if "linenos" in self.options or "lineno-start" in self.options:
+ literal["linenos"] = True
+ literal["classes"] += self.options.get("class", [])
+ literal["force"] = "force" in self.options
+ literal["language"] = "tsref"
+ extra_args = literal["highlight_args"] = {}
+ if hl_lines is not None:
+ extra_args["hl_lines"] = hl_lines
+ if "lineno-start" in self.options:
+ extra_args["linenostart"] = self.options["lineno-start"]
+ self.set_source_info(literal)
+
+ caption = self.options.get("caption")
+ if caption:
+ try:
+ literal = container_wrapper(self, literal, caption)
+ except ValueError as exc:
+ return [document.reporter.warning(exc, line=self.lineno)]
+
+ tsid = "tsref-type-" + self.arguments[0]
+ literal["ids"].append(tsid)
+
+ tsname = self.arguments[0]
+ ts = self.env.get_domain("ts")
+ ts.add_object("type", tsname, self.env.docname, tsid)
+
+ return [literal]
+
+
+class TypeScriptDomain(Domain):
+ """TypeScript domain."""
+
+ name = "ts"
+ label = "TypeScript"
+
+ directives = {
+ "def": TypeScriptDefinition,
+ }
+
+ roles = {
+ "type": XRefRole(
+ lowercase=False, warn_dangling=True, innernodeclass=nodes.inline
+ ),
+ }
+
+ dangling_warnings = {
+ "type": "undefined TypeScript type: %(target)s",
+ }
+
+ def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
+ try:
+ info = self.objects[(str(typ), str(target))]
+ except KeyError:
+ logger.warn("type {}/{} not found".format(typ, target))
+ return None
+ else:
+ anchor = "tsref-type-{}".format(str(target))
+ title = typ.upper() + " " + target
+ return make_refnode(builder, fromdocname, info[0], anchor, contnode, title)
+
+ def resolve_any_xref(self, env, fromdocname, builder, target, node, contnode):
+ """Resolve the pending_xref *node* with the given *target*.
+
+ The reference comes from an "any" or similar role, which means that Sphinx
+ don't know the type.
+
+ For now sphinxcontrib-httpdomain doesn't resolve any xref nodes.
+
+ :return:
+ list of tuples ``('domain:role', newnode)``, where ``'domain:role'``
+ is the name of a role that could have created the same reference,
+ """
+ ret = []
+ try:
+ info = self.objects[("type", str(target))]
+ except KeyError:
+ pass
+ else:
+ anchor = "tsref-type-{}".format(str(target))
+ title = "TYPE" + " " + target
+ node = make_refnode(builder, fromdocname, info[0], anchor, contnode, title)
+ ret.append(("ts:type", node))
+ return ret
+
+ @property
+ def objects(self) -> Dict[Tuple[str, str], Tuple[str, str]]:
+ return self.data.setdefault(
+ "objects", {}
+ ) # (objtype, name) -> docname, labelid
+
+ def add_object(self, objtype: str, name: str, docname: str, labelid: str) -> None:
+ self.objects[objtype, name] = (docname, labelid)
+
+
+class BetterTypeScriptLexer(RegexLexer):
+ """
+ For `TypeScript <https://www.typescriptlang.org/>`_ source code.
+ """
+
+ name = "TypeScript"
+ aliases = ["ts"]
+ filenames = ["*.ts"]
+ mimetypes = ["text/x-typescript"]
+
+ flags = re.DOTALL
+ tokens = {
+ "commentsandwhitespace": [
+ (r"\s+", Text),
+ (r"<!--", Comment),
+ (r"//.*?\n", Comment.Single),
+ (r"/\*.*?\*/", Comment.Multiline),
+ ],
+ "slashstartsregex": [
+ include("commentsandwhitespace"),
+ (
+ r"/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/" r"([gim]+\b|\B)",
+ String.Regex,
+ "#pop",
+ ),
+ (r"(?=/)", Text, ("#pop", "badregex")),
+ (r"", Text, "#pop"),
+ ],
+ "badregex": [(r"\n", Text, "#pop")],
+ "typeexp": [
+ (r"[a-zA-Z0-9_?.$]+", Keyword.Type),
+ (r"\s+", Text),
+ (r"[|]", Text),
+ (r"\n", Text, "#pop"),
+ (r";", Text, "#pop"),
+ (r"", Text, "#pop"),
+ ],
+ "root": [
+ (r"^(?=\s|/|<!--)", Text, "slashstartsregex"),
+ include("commentsandwhitespace"),
+ (
+ r"\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|"
+ r"(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?",
+ Operator,
+ "slashstartsregex",
+ ),
+ (r"[{(\[;,]", Punctuation, "slashstartsregex"),
+ (r"[})\].]", Punctuation),
+ (
+ r"(for|in|while|do|break|return|continue|switch|case|default|if|else|"
+ r"throw|try|catch|finally|new|delete|typeof|instanceof|void|"
+ r"this)\b",
+ Keyword,
+ "slashstartsregex",
+ ),
+ (
+ r"(var|let|const|with|function)\b",
+ Keyword.Declaration,
+ "slashstartsregex",
+ ),
+ (
+ r"(abstract|boolean|byte|char|class|const|debugger|double|enum|export|"
+ r"extends|final|float|goto|implements|import|int|interface|long|native|"
+ r"package|private|protected|public|short|static|super|synchronized|throws|"
+ r"transient|volatile)\b",
+ Keyword.Reserved,
+ ),
+ (r"(true|false|null|NaN|Infinity|undefined)\b", Keyword.Constant),
+ (
+ r"(Array|Boolean|Date|Error|Function|Math|netscape|"
+ r"Number|Object|Packages|RegExp|String|sun|decodeURI|"
+ r"decodeURIComponent|encodeURI|encodeURIComponent|"
+ r"Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|"
+ r"window)\b",
+ Name.Builtin,
+ ),
+ # Match stuff like: module name {...}
+ (
+ r"\b(module)(\s*)(\s*[a-zA-Z0-9_?.$][\w?.$]*)(\s*)",
+ bygroups(Keyword.Reserved, Text, Name.Other, Text),
+ "slashstartsregex",
+ ),
+ # Match variable type keywords
+ (r"\b(string|bool|number)\b", Keyword.Type),
+ # Match stuff like: constructor
+ (r"\b(constructor|declare|interface|as|AS)\b", Keyword.Reserved),
+ # Match stuff like: super(argument, list)
+ (
+ r"(super)(\s*)\(([a-zA-Z0-9,_?.$\s]+\s*)\)",
+ bygroups(Keyword.Reserved, Text),
+ "slashstartsregex",
+ ),
+ # Match stuff like: function() {...}
+ (r"([a-zA-Z_?.$][\w?.$]*)\(\) \{", Name.Other, "slashstartsregex"),
+ # Match stuff like: (function: return type)
+ (
+ r"([a-zA-Z0-9_?.$][\w?.$]*)(\s*:\s*)",
+ bygroups(Name.Other, Text),
+ "typeexp",
+ ),
+ # Match stuff like: type Foo = Bar | Baz
+ (
+ r"\b(type)(\s*)([a-zA-Z0-9_?.$]+)(\s*)(=)(\s*)",
+ bygroups(Keyword.Reserved, Text, Name.Other, Text, Operator, Text),
+ "typeexp",
+ ),
+ (r"[$a-zA-Z_][a-zA-Z0-9_]*", Name.Other),
+ (r"[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?", Number.Float),
+ (r"0x[0-9a-fA-F]+", Number.Hex),
+ (r"[0-9]+", Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ ],
+ }
+
+
+# Map from token id to props.
+# Properties can't be added to tokens
+# since they derive from Python's tuple.
+token_props = {}
+
+
+class LinkFilter(Filter):
+ def __init__(self, app, **options):
+ self.app = app
+ Filter.__init__(self, **options)
+
+ def _filter_one_literal(self, ttype, value):
+ last = 0
+ for m in re.finditer(literal_reg, value):
+ pre = value[last : m.start()]
+ if pre:
+ yield ttype, pre
+ t = copy_token(ttype)
+ tok_setprop(t, "is_literal", True)
+ yield t, m.group(1)
+ last = m.end()
+ post = value[last:]
+ if post:
+ yield ttype, post
+
+ def filter(self, lexer, stream):
+ for ttype, value in stream:
+ if ttype in Token.Keyword.Type:
+ t = copy_token(ttype)
+ tok_setprop(t, "xref", value.strip())
+ tok_setprop(t, "is_identifier", True)
+ yield t, value
+ elif ttype in Token.Comment:
+ last = 0
+ for m in re.finditer(link_reg, value):
+ pre = value[last : m.start()]
+ if pre:
+ yield from self._filter_one_literal(ttype, pre)
+ t = copy_token(ttype)
+ x1, x2 = m.groups()
+ x0 = m.group(0)
+ if x2 is None:
+ caption = x1.strip()
+ xref = x1.strip()
+ else:
+ caption = x1.strip()
+ xref = x2.strip()
+ tok_setprop(t, "xref", xref)
+ tok_setprop(t, "caption", caption)
+ if x0.endswith("_"):
+ tok_setprop(t, "trailing_underscore", True)
+ yield t, m.group(1)
+ last = m.end()
+ post = value[last:]
+ if post:
+ yield from self._filter_one_literal(ttype, post)
+ else:
+ yield ttype, value
+
+
+_escape_html_table = {
+ ord("&"): u"&",
+ ord("<"): u"<",
+ ord(">"): u">",
+ ord('"'): u""",
+ ord("'"): u"'",
+}
+
+
+class LinkingHtmlFormatter(HtmlFormatter):
+ def __init__(self, **kwargs):
+ super(LinkingHtmlFormatter, self).__init__(**kwargs)
+ self._builder = kwargs["_builder"]
+ self._bridge = kwargs["_bridge"]
+
+ def _get_value(self, value, tok):
+ xref = tok_getprop(tok, "xref")
+ caption = tok_getprop(tok, "caption")
+
+ if tok_getprop(tok, "is_literal"):
+ return '<span style="font-weight: bolder">%s</span>' % (value,)
+
+ if tok_getprop(tok, "trailing_underscore"):
+ logger.warn(
+ "{}:{}: code block contains xref to '{}' with unsupported trailing underscore".format(
+ self._bridge.path, self._bridge.line, xref
+ )
+ )
+
+ if tok_getprop(tok, "is_identifier"):
+ if xref.startswith('"'):
+ return value
+ if re.match("^[0-9]+$", xref) is not None:
+ return value
+ if xref in (
+ "number",
+ "object",
+ "string",
+ "boolean",
+ "any",
+ "true",
+ "false",
+ "null",
+ "undefined",
+ "Array",
+ "unknown",
+ ):
+ return value
+
+ if self._bridge.docname is None:
+ return value
+ if xref is None:
+ return value
+ content = caption if caption is not None else value
+ ts = self._builder.env.get_domain("ts")
+ r1 = ts.objects.get(("type", xref), None)
+ if r1 is not None:
+ rel_uri = (
+ self._builder.get_relative_uri(self._bridge.docname, r1[0])
+ + "#"
+ + r1[1]
+ )
+ return (
+ '<a style="color:inherit;text-decoration:underline" href="%s">%s</a>'
+ % (rel_uri, content)
+ )
+
+ std = self._builder.env.get_domain("std")
+ r2 = std.labels.get(xref.lower(), None)
+ if r2 is not None:
+ rel_uri = (
+ self._builder.get_relative_uri(self._bridge.docname, r2[0])
+ + "#"
+ + r2[1]
+ )
+ return (
+ '<a style="color:inherit;text-decoration:underline" href="%s">%s</a>'
+ % (rel_uri, content)
+ )
+ r3 = std.anonlabels.get(xref.lower(), None)
+ if r3 is not None:
+ rel_uri = (
+ self._builder.get_relative_uri(self._bridge.docname, r3[0])
+ + "#"
+ + r3[1]
+ )
+ return (
+ '<a style="color:inherit;text-decoration:underline" href="%s">%s</a>'
+ % (rel_uri, content)
+ )
+
+ logger.warn(
+ "{}:{}: code block contains unresolved xref '{}'".format(
+ self._bridge.path, self._bridge.line, xref
+ )
+ )
+
+ return value
+
+ def _fmt(self, value, tok):
+ cls = self._get_css_class(tok)
+ value = self._get_value(value, tok)
+ if cls is None or cls == "":
+ return value
+ return '<span class="%s">%s</span>' % (cls, value)
+
+ def _format_lines(self, tokensource):
+ """
+ Just format the tokens, without any wrapping tags.
+ Yield individual lines.
+ """
+ lsep = self.lineseparator
+ escape_table = _escape_html_table
+
+ line = ""
+ for ttype, value in tokensource:
+ link = get_annotation(ttype, "link")
+
+ parts = value.translate(escape_table).split("\n")
+
+ if len(parts) == 0:
+ # empty token, usually should not happen
+ pass
+ elif len(parts) == 1:
+ # no newline before or after token
+ line += self._fmt(parts[0], ttype)
+ else:
+ line += self._fmt(parts[0], ttype)
+ yield 1, line + lsep
+ for part in parts[1:-1]:
+ yield 1, self._fmt(part, ttype) + lsep
+ line = self._fmt(parts[-1], ttype)
+
+ if line:
+ yield 1, line + lsep
+
+
+class MyPygmentsBridge(PygmentsBridge):
+ def __init__(self, builder, trim_doctest_flags):
+ self.dest = "html"
+ self.trim_doctest_flags = trim_doctest_flags
+ self.formatter_args = {
+ "style": SphinxStyle,
+ "_builder": builder,
+ "_bridge": self,
+ }
+ self.formatter = LinkingHtmlFormatter
+ self.builder = builder
+ self.path = None
+ self.line = None
+ self.docname = None
+
+ def highlight_block(
+ self, source, lang, opts=None, force=False, location=None, **kwargs
+ ):
+ if isinstance(location, tuple):
+ docname, line = location
+ self.line = line
+ self.path = self.builder.env.doc2path(docname)
+ self.docname = docname
+ elif isinstance(location, Element):
+ self.line = location.line
+ self.path = location.source
+ self.docname = self.builder.env.path2doc(self.path)
+ return super().highlight_block(source, lang, opts, force, location, **kwargs)
+
+
+class MyHtmlBuilder(StandaloneHTMLBuilder):
+ name = "html-linked"
+
+ def init_highlighter(self):
+ if self.config.pygments_style is not None:
+ style = self.config.pygments_style
+ elif self.theme:
+ style = self.theme.get_confstr("theme", "pygments_style", "none")
+ else:
+ style = "sphinx"
+ self.highlighter = MyPygmentsBridge(self, self.config.trim_doctest_flags)
+ self.dark_highlighter = None
+
+
+def get_annotation(tok, key):
+ if not hasattr(tok, "kv"):
+ return None
+ return tok.kv.get(key)
+
+
+def copy_token(tok):
+ new_tok = _TokenType(tok)
+ # This part is very fragile against API changes ...
+ new_tok.subtypes = set(tok.subtypes)
+ new_tok.parent = tok.parent
+ return new_tok
+
+
+def tok_setprop(tok, key, value):
+ tokid = id(tok)
+ e = token_props.get(tokid)
+ if e is None:
+ e = token_props[tokid] = (tok, {})
+ _, kv = e
+ kv[key] = value
+
+
+def tok_getprop(tok, key):
+ tokid = id(tok)
+ e = token_props.get(tokid)
+ if e is None:
+ return None
+ _, kv = e
+ return kv.get(key)
+
+
+link_reg = re.compile(r"(?<!`)`([^`<]+)\s*(?:<([^>]+)>)?\s*`_?")
+literal_reg = re.compile(r"``([^`]+)``")
+
+
+def setup(app):
+
+ class TsrefLexer(BetterTypeScriptLexer):
+ def __init__(self, **options):
+ super().__init__(**options)
+ self.add_filter(LinkFilter(app))
+
+ app.add_lexer("tsref", TsrefLexer)
+ app.add_domain(TypeScriptDomain)
+ app.add_builder(MyHtmlBuilder)
diff --git a/conf.py b/conf.py
@@ -16,11 +16,14 @@
# -- Project information -----------------------------------------------------
+import sys
+import os
project = 'GNUnet'
copyright = '2022, GNUnet Project'
author = 'GNUnet Project'
+sys.path.append(os.path.abspath("_exts"))
# -- General configuration ---------------------------------------------------
@@ -28,6 +31,8 @@ author = 'GNUnet Project'
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
+ 'httpdomain.httpdomain',
+ 'typescriptdomain',
'sphinx.ext.todo',
'sphinx_rtd_theme',
#'breathe'
diff --git a/developers/rest/namestore.rst b/developers/rest/namestore.rst
@@ -1,188 +1,132 @@
Namestore API Service
=====================
-Definition
-~~~~~~~~~~
+.. _ref-gnsrecord:
-Variables in single quotes ``'...'`` can or must be changed according to your specific case.
+Record Set
+----------
-``name`` is the name of a zone. A zone is the name of an identity in this case.
+Namestore entries are GNS record sets. A record set consists of a ``record_name`` and a ``data`` field.
-.. _ref-gnsrecord:
+ .. ts:def:: RecordSet
-GNS Record
-----------
+ interface RecordSet {
+
+ // The name of the record set
+ record_name: string;
-Namestore entries are GNS records. GNS records have a ``record_type``, a ``value``, an ``expiration_time``, a ``flag`` and a ``record_name``.
-Adding a GNS records with the same record_name overwrites the old GNS record.
+ // The record set data array.
+ data: RecordData[];
-A GNS record is sent in the JSON format: ``{"value": "value", "record_type": "type", "expiration_time": "time", "flag": flag, "record_name": "rname"}``
+ }
-``type`` is the type of the record, e.g. "PKEY" for private key. It can be ANY, PKEY, NICK, LEHO, VPN, GNS2DNS, BOX, PLACE, PHONE, ID_ATTR, ID_TOKEN, ID_TOKEN_METADATA, CREDENTIAL, POLICY, ATTRIBUTE, ABE_KEY, ABE_MASTER. You can find more information in header ``src/gnunet_gnsrecord_lib.h``
+ interface RecordData {
-``value`` is the value of the specific type of the record, e.g. the private key of an identity.
+ // The string representation of the record data value, e.g. "1.2.3.4" for an A record
+ value: string;
-``time`` is the expiration time of the record either "never" or fancy time (see GNUNET_STRINGS_fancy_time_to_absolute)
+ // The string representation of the record type, e.g. "A" for an IPv4 address
+ record_type: string;
-``flag`` is the option of the record. Either 0 for none, 2 for private, 8 for relative expiration or 16 if all other records have expired. ``flag`` must be a number.
+ // The expiration time, e.g. "1 day"
+ expiration_time: string;
-``rname`` is the name of the record.
+ // Whether or not this is a private record
+ private: boolean;
+
+ // Whether or not the expiration time is relative (else absolute)
+ relative_expiration: boolean;
+
+ // Whether or not this is a supplemental record
+ supplemental: boolean;
+
+ // Whether or not this is a shadow record
+ shadow: boolean
+
+ }
Error Response
--------------
-An error response is sent in the JSON format: ``{"error":"*error_description*"}``
+All error responses are sent with a `NamestoreError` body.
-Following numbers are added for references inside the documentation only.
+ .. ts:def:: NamestoreError
-Error descriptions are::
-
- Nr. Error Description - Explanation
- 1) Unknown Error - Error is not specified
- 2) No identity found - Identity was not found with given name, this is combined with the HTTP Error Code 404 Not Found
- 3) No default zone specified - Identity name was not given and no identiy was added to the subsystem namestore
- 4) Namestore action failed - The task of the namestore API (not REST API) failed
- 5) No data - Missing data
- 6) Data invalid - Wrong data given
- 7) Error storing records - POST request failed
- 8) Deleting record failed - DELETE request failed
- 9) No record found - Delete failed due to missing record, this is combined with the HTTP Error Code 404 Not Found
+ interface NamestoreError {
-Error ``1)`` is always possible and is not listed in following requests.
+ // The error description
+ error: string;
-ATTENTION: Any error message from the Namestore API (not REST API) can occur and can be returned in the error response. These responses are not listed here.
+ }
Requests
-~~~~~~~~
-
-GET Request
-------------
-
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Title** |Returns all namestore entries of default identity |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**URL** |:literal:`/namestore` |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Method** |**GET** |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**URL Params** |none |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Data Params** |none |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Success Response**|[{"value": "*value*", "record_type": "*type*", "expiration_time": "*time*", "flag": *flag*, "record_name": "*rname*"},...] |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Error Response** | {"error":"*error_desc*"} :sup:`3; 4` |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-
-|
-
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Title** |Returns all namestore entries for one zone specified by its name |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**URL** |:literal:`/namestore/'name'` |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Method** |**GET** |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**URL Params** |none |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Data Params** |none |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Success Response**|[{"value": "*value*", "record_type": "*type*", "expiration_time": "*time*", "flag": *flag*, "record_name": "*rname*"},...] |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Error Response** | {"error":"*error_desc*"} :sup:`2; 4` |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-
-POST Request
-------------
-
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Title** |Creates a namestore entry for default identity |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**URL** |:literal:`/namestore` |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Method** |**POST** |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**URL Params** |none |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Data Params** |{"value": "*value*", "record_type": "*type*", "expiration_time": "*time*", "flag": *flag*, "record_name": "*rname*"} |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Success Response**|Response Code: :literal:`204` (No Content) |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Error Response** | {"error":"*error_desc*"} :sup:`3; 4; 5; 6; 7` |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-
-|
-
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Title** |Creates a namestore entry for one zone specified by its name |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**URL** |:literal:`/namestore/'name'` |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Method** |**POST** |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**URL Params** |none |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Data Params** |{"value": "*value*", "record_type": "*type*", "expiration_time": "*time*", "flag": *flag*, "record_name": "*rname*"} |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Success Response**|Response Code: :literal:`204` (No Content) |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Error Response** | {"error":"*error_desc*"} :sup:`2; 4; 5; 6; 7` |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-
-
-DELETE Request
---------------
+--------
+
+.. http:get:: /namestore/$ZNAME
+
+ This endpoint returns all namestore entries for one zone identified by $NAME.
+
+ **Response**
+
+ :http:statuscode:`200 Ok`:
+ The body is a `RecordSet` array.
+ :http:statuscode:`404 Not found`:
+ The zone $ZNAME was not found.
+
+
+.. http:post:: /namestore/$ZNAME
+
+ Create or append a record set under a label.
+
+ **Request**
+ The request body is a single `RecordSet`.
+
+ **Response**
+
+ :http:statuscode:`204 No Content`:
+ The record set was successfully added.
+ :http:statuscode:`404 Not found`:
+ The zone $ZNAME was not found.
+
+.. http:put:: /namestore/$ZNAME
+
+ Create or replace a record set under a label.
+
+ **Request**
+ The request body is a single `RecordSet`.
+
+ **Response**
+
+ :http:statuscode:`204 No Content`:
+ The record set was successfully updated.
+ :http:statuscode:`404 Not found`:
+ The zone $ZNAME was not found.
+
+
+
+.. http:delete:: /namestore/$ZNAME?record_name=$RNAME
+
+ Delete the record set under name $RNAME in zone $ZNAME |
+
+ **Response**
+
+ :http:statuscode:`204 No Content`:
+ The record set under $RNAME was successfully deleted.
+ :http:statuscode:`404 Not found`:
+ The zone $ZNAME was not found.
+
+.. http:post:: /namestore/import/$ZNAME
+
+ Bulk import (no replace) record sets under a label.
+
+ **Request**
+ The request body is a `RecordSet` array.
+
+ **Response**
+
+ :http:statuscode:`204 No Content`:
+ The record sets were successfully added.
+ :http:statuscode:`404 Not found`:
+ The zone $ZNAME was not found.
+
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-| **Title** | Deletes specific namestore entry for default identity |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**URL** |:literal:`/namestore?record_name='rname'` |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Method** | **DELETE** |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**URL Params** | none |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Data Params** | none |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Success Response**| Response Code: :literal:`204` (No Content) |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Error Response** | {"error":"*error_desc*"} :sup:`3; 4; 6; 8; 9` |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-
-|
-
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-| **Title** | Deletes specific namestore entry in specific zone |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**URL** |:literal:`/namestore/'name'?record_name='rname'` |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Method** | **DELETE** |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**URL Params** | none |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Data Params** | none |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Success Response**| Response Code: :literal:`204` (No Content) |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Error Response** | {"error":"*error_desc*"} :sup:`2; 4; 6; 8; 9` |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-
-
-OPTIONS Request
----------------
-
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Title** |Gets request options |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**URL** |:literal:`/namestore` |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Method** |**OPTIONS** |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**URL Params** |none |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Data Params** |none |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Success Response**| |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+
-|**Error Response** |none |
-+--------------------+---------------------------------------------------------------------------------------------------------------------------+