Added Black formatting

pull/161/head
Ashley Whetter 5 years ago
parent c4fe76ef23
commit f77bf19800

@ -0,0 +1,11 @@
repos:
- repo: https://github.com/ambv/black
rev: 18.9b0
hooks:
- id: black
language_version: python3.6
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v1.2.3
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer

@ -11,6 +11,9 @@ Sphinx AutoAPI
.. image:: https://ci.appveyor.com/api/projects/status/5nd33gp2eq7411t1?svg=true
:target: https://ci.appveyor.com/project/ericholscher/sphinx-autoapi
.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
:target: https://github.com/ambv/black
.. warning:: This is a pre-release version. Some or all features might not work yet.
Sphinx AutoAPI aims to provide "autodoc" or "javadoc" style documentation for Sphinx.

@ -1,22 +1,27 @@
from .mappers import DotNetSphinxMapper, PythonSphinxMapper, GoSphinxMapper, JavaScriptSphinxMapper
from .mappers import (
DotNetSphinxMapper,
PythonSphinxMapper,
GoSphinxMapper,
JavaScriptSphinxMapper,
)
default_file_mapping = {
'python': ['*.py'],
'dotnet': ['project.json', '*.csproj', '*.vbproj'],
'go': ['*.go'],
'javascript': ['*.js'],
"python": ["*.py"],
"dotnet": ["project.json", "*.csproj", "*.vbproj"],
"go": ["*.go"],
"javascript": ["*.js"],
}
default_ignore_patterns = {
'dotnet': ['*toc.yml', '*index.yml'],
'python': ['*migrations*'],
"dotnet": ["*toc.yml", "*index.yml"],
"python": ["*migrations*"],
}
default_backend_mapping = {
'python': PythonSphinxMapper,
'dotnet': DotNetSphinxMapper,
'go': GoSphinxMapper,
'javascript': JavaScriptSphinxMapper,
"python": PythonSphinxMapper,
"dotnet": DotNetSphinxMapper,
"go": GoSphinxMapper,
"javascript": JavaScriptSphinxMapper,
}

@ -21,9 +21,9 @@ class AutoapiSummary(Directive):
final_argument_whitespace = False
has_content = True
option_spec = {
'toctree': directives.unchanged,
'nosignatures': directives.flag,
'template': directives.unchanged,
"toctree": directives.unchanged,
"nosignatures": directives.flag,
"template": directives.unchanged,
}
def warn(self, msg):
@ -44,7 +44,7 @@ class AutoapiSummary(Directive):
"""
for line in self.content:
line = line.strip()
if line and re.search('^[a-zA-Z0-9]', line):
if line and re.search("^[a-zA-Z0-9]", line):
yield line
def run(self):
@ -56,68 +56,66 @@ class AutoapiSummary(Directive):
objects = [mapper.all_objects[name] for name in self._get_names()]
nodes_ = self._get_table(objects)
if 'toctree' in self.options:
if "toctree" in self.options:
dirname = posixpath.dirname(env.docname)
tree_prefix = self.options['toctree'].strip()
tree_prefix = self.options["toctree"].strip()
docnames = []
for obj in objects:
docname = posixpath.join(tree_prefix, obj.name)
docname = posixpath.normpath(posixpath.join(dirname, docname))
if docname not in env.found_docs:
self.warn(
'toctree references unknown document {}'.format(docname)
)
self.warn("toctree references unknown document {}".format(docname))
docnames.append(docname)
tocnode = addnodes.toctree()
tocnode['includefiles'] = docnames
tocnode['entries'] = [(None, docn) for docn in docnames]
tocnode['maxdepth'] = -1
tocnode['glob'] = None
tocnode["includefiles"] = docnames
tocnode["entries"] = [(None, docn) for docn in docnames]
tocnode["maxdepth"] = -1
tocnode["glob"] = None
tocnode = sphinx.ext.autosummary.autosummary_toc('', '', tocnode)
tocnode = sphinx.ext.autosummary.autosummary_toc("", "", tocnode)
nodes_.append(tocnode)
return self.warnings + nodes_
def _get_row(self, obj):
template = ':{}:`{} <{}>`\\ {}'
if 'nosignatures' in self.options:
template = ':{}:`{} <{}>`'
template = ":{}:`{} <{}>`\\ {}"
if "nosignatures" in self.options:
template = ":{}:`{} <{}>`"
col1 = template.format(
'obj', obj.short_name, obj.name, escape('({})'.format(obj.args)),
"obj", obj.short_name, obj.name, escape("({})".format(obj.args))
)
col2 = obj.summary
row = nodes.row('')
row = nodes.row("")
for text in (col1, col2):
node = nodes.paragraph('')
node = nodes.paragraph("")
view_list = ViewList()
view_list.append(text, '<autosummary>')
view_list.append(text, "<autosummary>")
self.state.nested_parse(view_list, 0, node)
try:
if isinstance(node[0], nodes.paragraph):
node = node[0]
except IndexError:
pass
row.append(nodes.entry('', node))
row.append(nodes.entry("", node))
return row
def _get_table(self, objects):
table_spec = addnodes.tabular_col_spec()
table_spec['spec'] = r'p{0.5\linewidth}p{0.5\linewidth}'
table_spec["spec"] = r"p{0.5\linewidth}p{0.5\linewidth}"
table = sphinx.ext.autosummary.autosummary_table('')
real_table = nodes.table('', classes=['longtable'])
table = sphinx.ext.autosummary.autosummary_table("")
real_table = nodes.table("", classes=["longtable"])
table.append(real_table)
group = nodes.tgroup('', cols=2)
group = nodes.tgroup("", cols=2)
real_table.append(group)
group.append(nodes.colspec('', colwidth=10))
group.append(nodes.colspec('', colwidth=90))
body = nodes.tbody('')
group.append(nodes.colspec("", colwidth=10))
group.append(nodes.colspec("", colwidth=90))
body = nodes.tbody("")
group.append(body)
for obj in objects:

@ -13,8 +13,9 @@ from . import utils
class AutoapiDocumenter(autodoc.Documenter):
def get_attr(self, obj, name, *defargs):
if (hasattr(self.env.app, 'registry')
and hasattr(self.env.app.registry, 'autodocattrgettrs')):
if hasattr(self.env.app, "registry") and hasattr(
self.env.app.registry, "autodocattrgettrs"
):
attrgetters = self.env.app.registry.autodoc_attrgettrs
else:
attrgetters = autodoc.AutoDirective._special_attrgetters
@ -23,7 +24,7 @@ class AutoapiDocumenter(autodoc.Documenter):
if isinstance(obj, type_):
return func(obj, name, *defargs)
if name == '__doc__':
if name == "__doc__":
return obj.docstring
for child in obj.children:
@ -36,9 +37,9 @@ class AutoapiDocumenter(autodoc.Documenter):
raise AttributeError(name)
def import_object(self):
max_splits = self.fullname.count('.')
max_splits = self.fullname.count(".")
for num_splits in range(max_splits, -1, -1):
path_stack = list(reversed(self.fullname.rsplit('.', num_splits)))
path_stack = list(reversed(self.fullname.rsplit(".", num_splits)))
objects = self.env.autoapi_mapper.objects
parent = objects[path_stack.pop()]
while parent and path_stack:
@ -63,7 +64,7 @@ class AutoapiDocumenter(autodoc.Documenter):
for line in docstring:
yield line
yield ''
yield ""
def get_object_members(self, want_all):
children = ((child.name, child) for child in self.object.children)
@ -72,16 +73,14 @@ class AutoapiDocumenter(autodoc.Documenter):
if not self.options.members:
return False, []
children = (
child for child in children if child[0] in self.options.members
)
children = (child for child in children if child[0] in self.options.members)
return False, sorted(children)
class AutoapiFunctionDocumenter(AutoapiDocumenter, autodoc.FunctionDocumenter):
objtype = 'apifunction'
directivetype = 'function'
objtype = "apifunction"
directivetype = "function"
# Always prefer AutoapiDocumenters
priority = autodoc.FunctionDocumenter.priority * 100 + 100
@ -90,12 +89,12 @@ class AutoapiFunctionDocumenter(AutoapiDocumenter, autodoc.FunctionDocumenter):
return isinstance(member, PythonFunction)
def format_args(self):
return '(' + self.object.args + ')'
return "(" + self.object.args + ")"
class AutoapiClassDocumenter(AutoapiDocumenter, autodoc.ClassDocumenter):
objtype = 'apiclass'
directivetype = 'class'
objtype = "apiclass"
directivetype = "class"
doc_as_attr = False
priority = autodoc.ClassDocumenter.priority * 100 + 100
@ -104,29 +103,24 @@ class AutoapiClassDocumenter(AutoapiDocumenter, autodoc.ClassDocumenter):
return isinstance(member, PythonClass)
def format_args(self):
return '(' + self.object.args + ')'
return "(" + self.object.args + ")"
def add_directive_header(self, sig):
autodoc.Documenter.add_directive_header(self, sig)
if self.options.show_inheritance:
sourcename = self.get_sourcename()
self.add_line(u'', sourcename)
self.add_line(u"", sourcename)
# TODO: Change sphinx to allow overriding of getting base names
if self.object.bases:
bases = [
':class:`{}`'.format(base) for base in self.object.bases
]
self.add_line(
' ' + 'Bases: {}'.format(', '.join(bases)),
sourcename,
)
bases = [":class:`{}`".format(base) for base in self.object.bases]
self.add_line(" " + "Bases: {}".format(", ".join(bases)), sourcename)
class AutoapiMethodDocumenter(AutoapiDocumenter, autodoc.MethodDocumenter):
objtype = 'apimethod'
directivetype = 'method'
objtype = "apimethod"
directivetype = "method"
priority = autodoc.MethodDocumenter.priority * 100 + 100
@classmethod
@ -134,13 +128,13 @@ class AutoapiMethodDocumenter(AutoapiDocumenter, autodoc.MethodDocumenter):
return isinstance(member, PythonMethod)
def format_args(self):
return '(' + self.object.args + ')'
return "(" + self.object.args + ")"
def import_object(self):
result = super(AutoapiMethodDocumenter, self).import_object()
if result:
if self.object.method_type != 'method':
if self.object.method_type != "method":
self.directivetype = self.object.method_type
# document class and static members before ordinary ones
self.member_order = self.member_order - 1
@ -149,8 +143,8 @@ class AutoapiMethodDocumenter(AutoapiDocumenter, autodoc.MethodDocumenter):
class AutoapiDataDocumenter(AutoapiDocumenter, autodoc.DataDocumenter):
objtype = 'apidata'
directivetype = 'data'
objtype = "apidata"
directivetype = "data"
priority = autodoc.DataDocumenter.priority * 100 + 100
@classmethod
@ -164,21 +158,17 @@ class AutoapiDataDocumenter(AutoapiDocumenter, autodoc.DataDocumenter):
# TODO: Change sphinx to allow overriding of object description
if self.object.value is not None:
self.add_line(
' :annotation: = {}'.format(self.object.value),
sourcename,
" :annotation: = {}".format(self.object.value), sourcename
)
elif self.options.annotation is autodoc.SUPPRESS:
pass
else:
self.add_line(
' :annotation: %s' % self.options.annotation,
sourcename,
)
self.add_line(" :annotation: %s" % self.options.annotation, sourcename)
class AutoapiAttributeDocumenter(AutoapiDocumenter, autodoc.AttributeDocumenter):
objtype = 'apiattribute'
directivetype = 'attribute'
objtype = "apiattribute"
directivetype = "attribute"
_datadescriptor = True
priority = autodoc.AttributeDocumenter.priority * 100 + 100
@ -193,28 +183,25 @@ class AutoapiAttributeDocumenter(AutoapiDocumenter, autodoc.AttributeDocumenter)
# TODO: Change sphinx to allow overriding of object description
if self.object.value is not None:
self.add_line(
' :annotation: = {}'.format(self.object.value),
sourcename,
" :annotation: = {}".format(self.object.value), sourcename
)
elif self.options.annotation is autodoc.SUPPRESS:
pass
else:
self.add_line(
' :annotation: %s' % self.options.annotation,
sourcename,
)
self.add_line(" :annotation: %s" % self.options.annotation, sourcename)
class AutoapiModuleDocumenter(AutoapiDocumenter, autodoc.ModuleDocumenter):
objtype = 'apimodule'
directivetype = 'module'
objtype = "apimodule"
directivetype = "module"
priority = autodoc.ModuleDocumenter.priority * 100 + 100
class AutoapiExceptionDocumenter(
AutoapiClassDocumenter, AutoapiDocumenter, autodoc.ExceptionDocumenter):
objtype = 'apiexception'
directivetype = 'exception'
AutoapiClassDocumenter, AutoapiDocumenter, autodoc.ExceptionDocumenter
):
objtype = "apiexception"
directivetype = "exception"
priority = autodoc.ExceptionDocumenter.priority * 100 + 100
@classmethod

@ -17,14 +17,18 @@ from docutils.parsers.rst import directives
from . import documenters
from . import utils
from .backends import default_file_mapping, default_ignore_patterns, default_backend_mapping
from .backends import (
default_file_mapping,
default_ignore_patterns,
default_backend_mapping,
)
from .directives import AutoapiSummary, NestedParse
from .settings import API_ROOT
from .toctree import add_domain_to_toctree
LOGGER = sphinx.util.logging.getLogger(__name__)
default_options = ['members', 'undoc-members', 'private-members', 'special-members']
default_options = ["members", "undoc-members", "private-members", "special-members"]
_viewcode_cache = {}
"""Caches a module's parse results for use in viewcode.
@ -38,7 +42,7 @@ def run_autoapi(app):
"""
if not app.config.autoapi_dirs:
raise ExtensionError('You must configure an autoapi_dirs setting')
raise ExtensionError("You must configure an autoapi_dirs setting")
# Make sure the paths are full
normalized_dirs = []
@ -49,25 +53,24 @@ def run_autoapi(app):
if os.path.isabs(path):
normalized_dirs.append(path)
else:
normalized_dirs.append(
os.path.normpath(os.path.join(app.confdir, path))
)
normalized_dirs.append(os.path.normpath(os.path.join(app.confdir, path)))
for _dir in normalized_dirs:
if not os.path.exists(_dir):
raise ExtensionError(
'AutoAPI Directory `{dir}` not found. '
'Please check your `autoapi_dirs` setting.'.format(
dir=_dir
)
"AutoAPI Directory `{dir}` not found. "
"Please check your `autoapi_dirs` setting.".format(dir=_dir)
)
normalized_root = os.path.normpath(os.path.join(app.confdir, app.config.autoapi_root))
url_root = os.path.join('/', app.config.autoapi_root)
normalized_root = os.path.normpath(
os.path.join(app.confdir, app.config.autoapi_root)
)
url_root = os.path.join("/", app.config.autoapi_root)
sphinx_mapper = default_backend_mapping[app.config.autoapi_type]
sphinx_mapper_obj = sphinx_mapper(app, template_dir=app.config.autoapi_template_dir,
url_root=url_root)
sphinx_mapper_obj = sphinx_mapper(
app, template_dir=app.config.autoapi_template_dir, url_root=url_root
)
app.env.autoapi_mapper = sphinx_mapper_obj
if app.config.autoapi_file_patterns:
@ -80,42 +83,39 @@ def run_autoapi(app):
else:
ignore_patterns = default_ignore_patterns.get(app.config.autoapi_type, [])
if '.rst' in app.config.source_suffix:
out_suffix = '.rst'
elif '.txt' in app.config.source_suffix:
out_suffix = '.txt'
if ".rst" in app.config.source_suffix:
out_suffix = ".rst"
elif ".txt" in app.config.source_suffix:
out_suffix = ".txt"
else:
# Fallback to first suffix listed
out_suffix = app.config.source_suffix[0]
# Actual meat of the run.
LOGGER.info(bold('[AutoAPI] ') + darkgreen('Loading Data'))
LOGGER.info(bold("[AutoAPI] ") + darkgreen("Loading Data"))
sphinx_mapper_obj.load(
patterns=file_patterns,
dirs=normalized_dirs,
ignore=ignore_patterns,
patterns=file_patterns, dirs=normalized_dirs, ignore=ignore_patterns
)
LOGGER.info(bold('[AutoAPI] ') + darkgreen('Mapping Data'))
LOGGER.info(bold("[AutoAPI] ") + darkgreen("Mapping Data"))
sphinx_mapper_obj.map(options=app.config.autoapi_options)
if app.config.autoapi_generate_api_docs:
LOGGER.info(bold('[AutoAPI] ') + darkgreen('Rendering Data'))
sphinx_mapper_obj.output_rst(
root=normalized_root,
source_suffix=out_suffix,
)
LOGGER.info(bold("[AutoAPI] ") + darkgreen("Rendering Data"))
sphinx_mapper_obj.output_rst(root=normalized_root, source_suffix=out_suffix)
def build_finished(app, exception):
if not app.config.autoapi_keep_files and app.config.autoapi_generate_api_docs:
normalized_root = os.path.normpath(os.path.join(app.confdir, app.config.autoapi_root))
normalized_root = os.path.normpath(
os.path.join(app.confdir, app.config.autoapi_root)
)
if app.verbosity > 1:
LOGGER.info(bold('[AutoAPI] ') + darkgreen('Cleaning generated .rst files'))
LOGGER.info(bold("[AutoAPI] ") + darkgreen("Cleaning generated .rst files"))
shutil.rmtree(normalized_root)
sphinx_mapper = default_backend_mapping[app.config.autoapi_type]
if hasattr(sphinx_mapper, 'build_finished'):
if hasattr(sphinx_mapper, "build_finished"):
sphinx_mapper.build_finished(app, exception)
@ -123,16 +123,16 @@ def doctree_read(app, doctree):
"""
Inject AutoAPI into the TOC Tree dynamically.
"""
if app.env.docname == 'index':
if app.env.docname == "index":
all_docs = set()
insert = True
nodes = doctree.traverse(toctree)
toc_entry = '%s/index' % app.config.autoapi_root
toc_entry = "%s/index" % app.config.autoapi_root
if not nodes:
return
# Capture all existing toctree entries
for node in nodes:
for entry in node['entries']:
for entry in node["entries"]:
all_docs.add(entry[1])
# Don't insert autoapi it's already present
for doc in all_docs:
@ -140,13 +140,11 @@ def doctree_read(app, doctree):
insert = False
if insert and app.config.autoapi_add_toctree_entry:
# Insert AutoAPI index
nodes[-1]['entries'].append(
(None, u'%s/index' % app.config.autoapi_root)
)
nodes[-1]['includefiles'].append(u'%s/index' % app.config.autoapi_root)
message_prefix = bold('[AutoAPI] ')
nodes[-1]["entries"].append((None, u"%s/index" % app.config.autoapi_root))
nodes[-1]["includefiles"].append(u"%s/index" % app.config.autoapi_root)
message_prefix = bold("[AutoAPI] ")
message = darkgreen(
'Adding AutoAPI TOCTree [{0}] to index.rst'.format(toc_entry)
"Adding AutoAPI TOCTree [{0}] to index.rst".format(toc_entry)
)
LOGGER.info(message_prefix + message)
@ -167,28 +165,30 @@ def viewcode_find(app, modname):
locations = {}
module = mapper.objects[modname]
for child in module.children:
stack = [('', child)]
stack = [("", child)]
while stack:
prefix, obj = stack.pop()
type_ = 'other'
if obj.type == 'class':
type_ = 'class'
elif obj.type in ('function', 'method'):
type_ = 'def'
type_ = "other"
if obj.type == "class":
type_ = "class"
elif obj.type in ("function", "method"):
type_ = "def"
full_name = prefix + obj.name
if 'from_line_no' in obj.obj:
if "from_line_no" in obj.obj:
locations[full_name] = (
type_, obj.obj['from_line_no'], obj.obj['to_line_no'],
type_,
obj.obj["from_line_no"],
obj.obj["to_line_no"],
)
children = getattr(obj, 'children', ())
stack.extend((full_name + '.', gchild) for gchild in children)
children = getattr(obj, "children", ())
stack.extend((full_name + ".", gchild) for gchild in children)
if module.obj['encoding']:
if module.obj["encoding"]:
source = codecs.open(
module.obj['file_path'], encoding=module.obj['encoding']
module.obj["file_path"], encoding=module.obj["encoding"]
).read()
else:
source = open(module.obj['file_path']).read()
source = open(module.obj["file_path"]).read()
result = (source, locations)
_viewcode_cache[modname] = result
@ -196,41 +196,41 @@ def viewcode_find(app, modname):
def viewcode_follow_imported(app, modname, attribute):
fullname = '{}.{}'.format(modname, attribute)
fullname = "{}.{}".format(modname, attribute)
mapper = app.env.autoapi_mapper
if fullname not in mapper.all_objects:
return None
orig_path = mapper.all_objects[fullname].obj.get('original_path', '')
orig_path = mapper.all_objects[fullname].obj.get("original_path", "")
if orig_path.endswith(attribute):
return orig_path[:-len(attribute) - 1]
return orig_path[: -len(attribute) - 1]
return modname
def setup(app):
app.connect('builder-inited', run_autoapi)
app.connect('doctree-read', doctree_read)
app.connect('doctree-resolved', add_domain_to_toctree)
app.connect('build-finished', build_finished)
app.connect('env-updated', clear_env)
app.connect("builder-inited", run_autoapi)
app.connect("doctree-read", doctree_read)
app.connect("doctree-resolved", add_domain_to_toctree)
app.connect("build-finished", build_finished)
app.connect("env-updated", clear_env)
if sphinx.version_info >= (1, 8):
if 'viewcode-find-source' in app.events.events:
app.connect('viewcode-find-source', viewcode_find)
if 'viewcode-follow-imported' in app.events.events:
app.connect('viewcode-follow-imported', viewcode_follow_imported)
app.add_config_value('autoapi_type', 'python', 'html')
app.add_config_value('autoapi_root', API_ROOT, 'html')
app.add_config_value('autoapi_ignore', [], 'html')
app.add_config_value('autoapi_options', default_options, 'html')
app.add_config_value('autoapi_file_patterns', None, 'html')
app.add_config_value('autoapi_dirs', [], 'html')
app.add_config_value('autoapi_keep_files', False, 'html')
app.add_config_value('autoapi_add_toctree_entry', True, 'html')
app.add_config_value('autoapi_template_dir', None, 'html')
app.add_config_value('autoapi_include_summaries', False, 'html')
app.add_config_value('autoapi_python_class_content', 'class', 'html')
app.add_config_value('autoapi_generate_api_docs', True, 'html')
if "viewcode-find-source" in app.events.events:
app.connect("viewcode-find-source", viewcode_find)
if "viewcode-follow-imported" in app.events.events:
app.connect("viewcode-follow-imported", viewcode_follow_imported)
app.add_config_value("autoapi_type", "python", "html")
app.add_config_value("autoapi_root", API_ROOT, "html")
app.add_config_value("autoapi_ignore", [], "html")
app.add_config_value("autoapi_options", default_options, "html")
app.add_config_value("autoapi_file_patterns", None, "html")
app.add_config_value("autoapi_dirs", [], "html")
app.add_config_value("autoapi_keep_files", False, "html")
app.add_config_value("autoapi_add_toctree_entry", True, "html")
app.add_config_value("autoapi_template_dir", None, "html")
app.add_config_value("autoapi_include_summaries", False, "html")
app.add_config_value("autoapi_python_class_content", "class", "html")
app.add_config_value("autoapi_generate_api_docs", True, "html")
app.add_autodocumenter(documenters.AutoapiFunctionDocumenter)
app.add_autodocumenter(documenters.AutoapiClassDocumenter)
app.add_autodocumenter(documenters.AutoapiMethodDocumenter)
@ -238,6 +238,6 @@ def setup(app):
app.add_autodocumenter(documenters.AutoapiAttributeDocumenter)
app.add_autodocumenter(documenters.AutoapiModuleDocumenter)
app.add_autodocumenter(documenters.AutoapiExceptionDocumenter)
directives.register_directive('autoapi-nested-parse', NestedParse)
directives.register_directive('autoapisummary', AutoapiSummary)
app.setup_extension('sphinx.ext.autosummary')
directives.register_directive("autoapi-nested-parse", NestedParse)
directives.register_directive("autoapisummary", AutoapiSummary)
app.setup_extension("sphinx.ext.autosummary")

@ -12,7 +12,7 @@ import astroid.nodes
if sys.version_info < (3,):
_EXCEPTIONS_MODULE = "exceptions"
# getattr to keep linter happy
_STRING_TYPES = getattr(builtins, 'basestring')
_STRING_TYPES = getattr(builtins, "basestring")
else:
_EXCEPTIONS_MODULE = "builtins"
_STRING_TYPES = str
@ -60,10 +60,10 @@ def get_full_import_name(import_from, name):
module = import_from.root()
assert isinstance(module, astroid.nodes.Module)
module_name = module.relative_to_absolute_name(
import_from.modname, level=import_from.level,
import_from.modname, level=import_from.level
)
return '{}.{}'.format(module_name, partial_basename)
return "{}.{}".format(module_name, partial_basename)
def get_full_basename(node, basename):
@ -79,9 +79,9 @@ def get_full_basename(node, basename):
"""
full_basename = basename
top_level_name = re.sub(r'\(.*\)', '', basename).split('.', 1)[0]
top_level_name = re.sub(r"\(.*\)", "", basename).split(".", 1)[0]
lookup_node = node
while not hasattr(lookup_node, 'lookup'):
while not hasattr(lookup_node, "lookup"):
lookup_node = lookup_node.parent
assigns = lookup_node.lookup(top_level_name)[1]
for assignment in assigns:
@ -94,20 +94,17 @@ def get_full_basename(node, basename):
full_basename = basename.replace(top_level_name, import_name, 1)
break
elif isinstance(assignment, astroid.nodes.ClassDef):
full_basename = '{}.{}'.format(
assignment.root().name,
assignment.name,
)
full_basename = "{}.{}".format(assignment.root().name, assignment.name)
break
if isinstance(node, astroid.nodes.Call):
full_basename = re.sub(r'\(.*\)', '()', full_basename)
full_basename = re.sub(r"\(.*\)", "()", full_basename)
if full_basename.startswith('builtins.'):
return full_basename[len('builtins.'):]
if full_basename.startswith("builtins."):
return full_basename[len("builtins.") :]
if full_basename.startswith('__builtin__.'):
return full_basename[len('__builtin__.'):]
if full_basename.startswith("__builtin__."):
return full_basename[len("__builtin__.") :]
return full_basename
@ -201,7 +198,7 @@ def is_decorated_with_property(node):
def _is_property_decorator(decorator):
def _is_property_class(class_node):
return (
class_node.name == 'property'
class_node.name == "property"
and class_node.root().name == builtins.__name__
)
@ -231,8 +228,10 @@ def is_decorated_with_property_setter(node):
return False
for decorator in node.decorators.nodes:
if (isinstance(decorator, astroid.nodes.Attribute)
and decorator.attrname == "setter"):
if (
isinstance(decorator, astroid.nodes.Attribute)
and decorator.attrname == "setter"
):
return True
return False
@ -250,7 +249,7 @@ def is_constructor(node):
return (
node.parent
and isinstance(node.parent.scope(), astroid.nodes.ClassDef)
and node.name == '__init__'
and node.name == "__init__"
)
@ -263,16 +262,16 @@ def is_exception(node):
:returns: True if the class is an exception, False otherwise.
:rtype: bool
"""
if (node.name in ('Exception', 'BaseException')
and node.root().name == _EXCEPTIONS_MODULE):
if (
node.name in ("Exception", "BaseException")
and node.root().name == _EXCEPTIONS_MODULE
):
return True
if not hasattr(node, 'ancestors'):
if not hasattr(node, "ancestors"):
return False
return any(
is_exception(parent) for parent in node.ancestors(recurs=True)
)
return any(is_exception(parent) for parent in node.ancestors(recurs=True))
def is_local_import_from(node, package_name):
@ -294,7 +293,7 @@ def is_local_import_from(node, package_name):
return (
node.level
or node.modname == package_name
or node.modname.startswith(package_name + '.')
or node.modname.startswith(package_name + ".")
)
@ -309,11 +308,11 @@ def get_module_all(node):
"""
all_ = None
if '__all__' in node.locals:
assigned = next(node.igetattr('__all__'))
if "__all__" in node.locals:
assigned = next(node.igetattr("__all__"))
if assigned is not astroid.Uninferable:
all_ = []
for elt in getattr(assigned, 'elts', ()):
for elt in getattr(assigned, "elts", ()):
try:
elt_name = next(elt.infer())
except astroid.InferenceError:
@ -322,8 +321,9 @@ def get_module_all(node):
if elt_name is astroid.Uninferable:
continue
if (isinstance(elt_name, astroid.Const)
and isinstance(elt_name.value, _STRING_TYPES)):
if isinstance(elt_name, astroid.Const) and isinstance(
elt_name.value, _STRING_TYPES
):
all_.append(elt_name.value)
return all_

@ -15,12 +15,12 @@ from ..settings import API_ROOT
LOGGER = sphinx.util.logging.getLogger(__name__)
Path = namedtuple('Path', ['absolute', 'relative'])
Path = namedtuple("Path", ["absolute", "relative"])
class PythonMapperBase(object):
'''
"""
Base object for JSON -> Python object mapping.
Subclasses of this object will handle their language specific JSON input,
@ -46,10 +46,10 @@ class PythonMapperBase(object):
:var list children: Children of this object
:var list parameters: Parameters to this object
:var list methods: Methods on this object
'''
"""
language = 'base'
type = 'base'
language = "base"
type = "base"
# Create a page in the output for this object.
top_level_object = False
@ -59,19 +59,19 @@ class PythonMapperBase(object):
if jinja_env:
self.jinja_env = jinja_env
if url_root is None:
url_root = os.path.join('/', API_ROOT)
url_root = os.path.join("/", API_ROOT)
self.url_root = url_root
def render(self, **kwargs):
ctx = {}
try:
template = self.jinja_env.get_template(
'{language}/{type}.rst'.format(language=self.language, type=self.type)
"{language}/{type}.rst".format(language=self.language, type=self.type)
)
except TemplateNotFound:
# Use a try/except here so we fallback to language specific defaults, over base defaults
template = self.jinja_env.get_template(
'base/{type}.rst'.format(type=self.type)
"base/{type}.rst".format(type=self.type)
)
ctx.update(**self.get_context_data())
@ -80,38 +80,32 @@ class PythonMapperBase(object):
@property
def rendered(self):
'Shortcut to render an object in templates.'
"Shortcut to render an object in templates."
return self.render()
def get_absolute_path(self):
return "/autoapi/{type}/{name}".format(
type=self.type,
name=self.name,
)
return "/autoapi/{type}/{name}".format(type=self.type, name=self.name)
def get_context_data(self):
return {
'obj': self
}
return {"obj": self}
def __lt__(self, other):
'''Object sorting comparison'''
"""Object sorting comparison"""
if isinstance(other, PythonMapperBase):
return self.id < other.id
return super(PythonMapperBase, self).__lt__(other)
def __str__(self):
return '<{cls} {id}>'.format(cls=self.__class__.__name__,
id=self.id)
return "<{cls} {id}>".format(cls=self.__class__.__name__, id=self.id)
@property
def short_name(self):
'''Shorten name property'''
return self.name.split('.')[-1]
"""Shorten name property"""
return self.name.split(".")[-1]
@property
def pathname(self):
'''Sluggified path for filenames
"""Sluggified path for filenames
Slugs to a filename using the follow steps
@ -119,18 +113,18 @@ class PythonMapperBase(object):
* Remove existing hypens
* Substitute hyphens for non-word characters
* Break up the string as paths
'''
"""
slug = self.name
slug = unidecode.unidecode(slug)
slug = slug.replace('-', '')
slug = re.sub(r'[^\w\.]+', '-', slug).strip('-')
return os.path.join(*slug.split('.'))
slug = slug.replace("-", "")
slug = re.sub(r"[^\w\.]+", "-", slug).strip("-")
return os.path.join(*slug.split("."))
def include_dir(self, root):
"""Return directory of file"""
parts = [root]
parts.extend(self.pathname.split(os.path.sep))
return '/'.join(parts)
return "/".join(parts)
@property
def include_path(self):
@ -140,8 +134,8 @@ class PythonMapperBase(object):
path separators
"""
parts = [self.include_dir(root=self.url_root)]
parts.append('index')
return '/'.join(parts)
parts.append("index")
return "/".join(parts)
@property
def ref_type(self):
@ -153,22 +147,23 @@ class PythonMapperBase(object):
@property
def namespace(self):
pieces = self.id.split('.')[:-1]
pieces = self.id.split(".")[:-1]
if pieces:
return '.'.join(pieces)
return ".".join(pieces)
return None
class SphinxMapperBase(object):
'''Base class for mapping `PythonMapperBase` objects to Sphinx.
"""Base class for mapping `PythonMapperBase` objects to Sphinx.
:param app: Sphinx application instance
'''
"""
def __init__(self, app, template_dir=None, url_root=None):
from ..settings import TEMPLATE_DIR
self.app = app
template_paths = [TEMPLATE_DIR]
@ -185,9 +180,9 @@ class SphinxMapperBase(object):
)
def _wrapped_prepare(value):
return '\n'.join(prepare_docstring(value))
return "\n".join(prepare_docstring(value))
self.jinja_env.filters['prepare_docstring'] = _wrapped_prepare
self.jinja_env.filters["prepare_docstring"] = _wrapped_prepare
self.url_root = url_root
@ -203,10 +198,10 @@ class SphinxMapperBase(object):
self.top_level_objects = OrderedDict()
def load(self, patterns, dirs, ignore=None):
'''
"""
Load objects from the filesystem into the ``paths`` dictionary.
'''
"""
for path in self.find_files(patterns=patterns, dirs=dirs, ignore=ignore):
data = self.read_file(path=path)
if data:
@ -225,10 +220,12 @@ class SphinxMapperBase(object):
# Skip ignored files
for ignore_pattern in ignore:
if fnmatch.fnmatch(os.path.join(root, filename), ignore_pattern):
if fnmatch.fnmatch(
os.path.join(root, filename), ignore_pattern
):
LOGGER.info(
bold('[AutoAPI] ') +
darkgreen("Ignoring %s/%s" % (root, filename))
bold("[AutoAPI] ")
+ darkgreen("Ignoring %s/%s" % (root, filename))
)
skip = True
@ -244,71 +241,69 @@ class SphinxMapperBase(object):
status_iterator = sphinx.util.status_iterator
for _path in status_iterator(
files_to_read,
'[AutoAPI] Reading files... ',
darkgreen,
len(files_to_read)):
files_to_read, "[AutoAPI] Reading files... ", darkgreen, len(files_to_read)
):
yield _path
def read_file(self, path, **kwargs):
'''Read file input into memory
"""Read file input into memory
:param path: Path of file to read
'''
"""
# TODO support JSON here
# TODO sphinx way of reporting errors in logs?
raise NotImplementedError
def add_object(self, obj):
'''
"""
Add object to local and app environment storage
:param obj: Instance of a AutoAPI object
'''
"""
self.objects[obj.id] = obj
self.all_objects[obj.id] = obj
child_stack = list(obj.children)
while child_stack:
child = child_stack.pop()
self.all_objects[child.id] = child
child_stack.extend(getattr(child, 'children', ()))
child_stack.extend(getattr(child, "children", ()))
def map(self, options=None):
'''Trigger find of serialized sources and build objects'''
"""Trigger find of serialized sources and build objects"""
for path, data in self.paths.items():
for obj in self.create_class(data, options=options):
self.add_object(obj)
def create_class(self, data, options=None, **kwargs):
'''
"""
Create class object.
:param data: Instance of a AutoAPI object
'''
"""
raise NotImplementedError
def output_rst(self, root, source_suffix):
for id, obj in self.objects.items():
rst = obj.render(
include_summaries=self.app.config.autoapi_include_summaries,
include_summaries=self.app.config.autoapi_include_summaries
)
if not rst:
continue
detail_dir = obj.include_dir(root=root)
ensuredir(detail_dir)
path = os.path.join(detail_dir, '%s%s' % ('index', source_suffix))
with open(path, 'wb+') as detail_file:
detail_file.write(rst.encode('utf-8'))
path = os.path.join(detail_dir, "%s%s" % ("index", source_suffix))
with open(path, "wb+") as detail_file:
detail_file.write(rst.encode("utf-8"))
if self.app.config.autoapi_add_toctree_entry:
self._output_top_rst(root)
def _output_top_rst(self, root):
# Render Top Index
top_level_index = os.path.join(root, 'index.rst')
top_level_index = os.path.join(root, "index.rst")
pages = self.objects.values()
with open(top_level_index, 'w+') as top_level_file:
content = self.jinja_env.get_template('index.rst')
with open(top_level_index, "w+") as top_level_file:
content = self.jinja_env.get_template("index.rst")
top_level_file.write(content.render(pages=pages))

@ -17,7 +17,7 @@ from .base import PythonMapperBase, SphinxMapperBase
LOGGER = sphinx.util.logging.getLogger(__name__)
# Doc comment patterns
DOC_COMMENT_PATTERN = r'''
DOC_COMMENT_PATTERN = r"""
\<%(tag)s
\s+%(attr)s="(?P<attr_value>[^"]*?)"
\s*?
@ -25,121 +25,127 @@ DOC_COMMENT_PATTERN = r'''
\/\>|
\>(?P<inner>[^\<]*?)\<\/%(tag)s\>
)
'''
"""
DOC_COMMENT_SEE_PATTERN = re.compile(
DOC_COMMENT_PATTERN % {'tag': '(?:see|seealso)',
'attr': 'cref'},
re.X)
DOC_COMMENT_PATTERN % {"tag": "(?:see|seealso)", "attr": "cref"}, re.X
)
DOC_COMMENT_PARAM_PATTERN = re.compile(
DOC_COMMENT_PATTERN % {'tag': '(?:paramref|typeparamref)',
'attr': 'name'},
re.X)
DOC_COMMENT_PATTERN % {"tag": "(?:paramref|typeparamref)", "attr": "name"}, re.X
)
# Comment member identities
# From: https://msdn.microsoft.com/en-us/library/vstudio/fsbx0t7x(v=VS.100).aspx
DOC_COMMENT_IDENTITIES = {
'N': 'dn:ns',
'T': 'any', # can be any type (class, delegate, enum, etc), so use any
'F': 'dn:field',
'P': 'dn:prop',
'M': 'dn:meth',
'E': 'dn:event',
"N": "dn:ns",
"T": "any", # can be any type (class, delegate, enum, etc), so use any
"F": "dn:field",
"P": "dn:prop",
"M": "dn:meth",
"E": "dn:event",
}
class DotNetSphinxMapper(SphinxMapperBase):
'''Auto API domain handler for .NET
"""Auto API domain handler for .NET
Searches for YAML files, and soon to be JSON files as well, for auto API
sources. If no pattern configuration was explicitly specified, then default
to looking up a ``docfx.json`` file.
:param app: Sphinx application passed in as part of the extension
'''
"""
top_namespaces = {}
DOCFX_OUTPUT_PATH = '_api'
DOCFX_OUTPUT_PATH = "_api"
# pylint: disable=arguments-differ
def load(self, patterns, dirs, ignore=None, **kwargs):
'''Load objects from the filesystem into the ``paths`` dictionary.
"""Load objects from the filesystem into the ``paths`` dictionary.
If the setting ``autoapi_patterns`` was not specified, look for a
``docfx.json`` file by default. A ``docfx.json`` should be treated as
the canonical source before the default patterns. Fallback to default
pattern matches if no ``docfx.json`` files are found.
'''
raise_error = kwargs.get('raise_error', True)
"""
raise_error = kwargs.get("raise_error", True)
all_files = set()
if not self.app.config.autoapi_file_patterns:
all_files = set()
for _file in self.find_files(patterns=['docfx.json'], dirs=dirs,
ignore=ignore):
for _file in self.find_files(
patterns=["docfx.json"], dirs=dirs, ignore=ignore
):
all_files.add(_file)
if not all_files:
for _file in self.find_files(patterns=patterns, dirs=dirs,
ignore=ignore):
for _file in self.find_files(patterns=patterns, dirs=dirs, ignore=ignore):
all_files.add(_file)
if all_files:
try:
command = ['docfx', 'metadata', '--raw', '--force']
command = ["docfx", "metadata", "--raw", "--force"]
command.extend(all_files)
proc = subprocess.Popen(
' '.join(command),
" ".join(command),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True,
env=dict((key, os.environ[key])
for key in ['PATH', 'HOME', 'SYSTEMROOT',
'USERPROFILE', 'WINDIR']
if key in os.environ),
env=dict(
(key, os.environ[key])
for key in [
"PATH",
"HOME",
"SYSTEMROOT",
"USERPROFILE",
"WINDIR",
]
if key in os.environ
),
)
_, error_output = proc.communicate()
if error_output:
LOGGER.warning(error_output)
except (OSError, subprocess.CalledProcessError) as e:
LOGGER.warning('Error generating metadata: {0}'.format(e))
LOGGER.warning("Error generating metadata: {0}".format(e))
if raise_error:
raise ExtensionError('Failure in docfx while generating AutoAPI output.')
raise ExtensionError(
"Failure in docfx while generating AutoAPI output."
)
# We now have yaml files
for xdoc_path in self.find_files(patterns=['*.yml'],
dirs=[self.DOCFX_OUTPUT_PATH],
ignore=ignore):
for xdoc_path in self.find_files(
patterns=["*.yml"], dirs=[self.DOCFX_OUTPUT_PATH], ignore=ignore
):
data = self.read_file(path=xdoc_path)
if data:
self.paths[xdoc_path] = data
def read_file(self, path, **kwargs):
'''Read file input into memory, returning deserialized objects
"""Read file input into memory, returning deserialized objects
:param path: Path of file to read
'''
"""
try:
with open(path, 'r') as handle:
with open(path, "r") as handle:
parsed_data = yaml.safe_load(handle)
return parsed_data
except IOError:
LOGGER.warning('Error reading file: {0}'.format(path))
LOGGER.warning("Error reading file: {0}".format(path))
except TypeError:
LOGGER.warning('Error reading file: {0}'.format(path))
LOGGER.warning("Error reading file: {0}".format(path))
return None
# Subclassed to iterate over items
def map(self, options=None):
'''Trigger find of serialized sources and build objects'''
"""Trigger find of serialized sources and build objects"""
for path, data in self.paths.items():
references = data.get('references', [])
for item in data['items']:
for obj in self.create_class(item, options,
references=references):
references = data.get("references", [])
for item in data["items"]:
for obj in self.create_class(item, options, references=references):
self.add_object(obj)
self.organize_objects()
def create_class(self, data, options=None, path=None, **kwargs):
'''
"""
Return instance of class based on Roslyn type property
Data keys handled here:
@ -152,19 +158,21 @@ class DotNetSphinxMapper(SphinxMapperBase):
instances
:param data: dictionary data from Roslyn output artifact
'''
"""
obj_map = dict(
(cls.type, cls) for cls
in ALL_CLASSES
)
obj_map = dict((cls.type, cls) for cls in ALL_CLASSES)
try:
cls = obj_map[data['type'].lower()]
cls = obj_map[data["type"].lower()]
except KeyError:
LOGGER.warning('Unknown type: %s' % data)
LOGGER.warning("Unknown type: %s" % data)
else:
obj = cls(data, jinja_env=self.jinja_env, options=options,
url_root=self.url_root, **kwargs)
obj = cls(
data,
jinja_env=self.jinja_env,
options=options,
url_root=self.url_root,
**kwargs
)
# Append child objects
# TODO this should recurse in the case we're getting back more
@ -173,17 +181,17 @@ class DotNetSphinxMapper(SphinxMapperBase):
yield obj
def add_object(self, obj):
'''Add object to local and app environment storage
"""Add object to local and app environment storage
:param obj: Instance of a .NET object
'''
"""
if obj.top_level_object:
if isinstance(obj, DotNetNamespace):
self.namespaces[obj.name] = obj
self.objects[obj.id] = obj
def organize_objects(self):
'''Organize objects and namespaces'''
"""Organize objects and namespaces"""
def _render_children(obj):
for child in obj.children_strings:
@ -202,8 +210,9 @@ class DotNetSphinxMapper(SphinxMapperBase):
if namespace is not None:
ns_obj = self.top_namespaces.get(namespace)
if ns_obj is None or not isinstance(ns_obj, DotNetNamespace):
for ns_obj in self.create_class({'uid': namespace,
'type': 'namespace'}):
for ns_obj in self.create_class(
{"uid": namespace, "type": "namespace"}
):
self.top_namespaces[ns_obj.id] = ns_obj
if obj not in ns_obj.children and namespace != obj.id:
ns_obj.children.append(obj)
@ -235,20 +244,22 @@ class DotNetSphinxMapper(SphinxMapperBase):
detail_dir = os.path.join(root, obj.pathname)
ensuredir(detail_dir)
path = os.path.join(detail_dir, '%s%s' % ('index', source_suffix))
with open(path, 'wb') as detail_file:
detail_file.write(rst.encode('utf-8'))
path = os.path.join(detail_dir, "%s%s" % ("index", source_suffix))
with open(path, "wb") as detail_file:
detail_file.write(rst.encode("utf-8"))
# Render Top Index
top_level_index = os.path.join(root, 'index.rst')
with open(top_level_index, 'wb') as top_level_file:
content = self.jinja_env.get_template('index.rst')
top_level_file.write(content.render(pages=self.namespaces.values()).encode('utf-8'))
top_level_index = os.path.join(root, "index.rst")
with open(top_level_index, "wb") as top_level_file:
content = self.jinja_env.get_template("index.rst")
top_level_file.write(
content.render(pages=self.namespaces.values()).encode("utf-8")
)
@staticmethod
def build_finished(app, exception):
if app.verbosity > 1:
LOGGER.info(bold('[AutoAPI] ') + darkgreen('Cleaning generated .yml files'))
LOGGER.info(bold("[AutoAPI] ") + darkgreen("Cleaning generated .yml files"))
if os.path.exists(DotNetSphinxMapper.DOCFX_OUTPUT_PATH):
shutil.rmtree(DotNetSphinxMapper.DOCFX_OUTPUT_PATH)
@ -261,69 +272,76 @@ class DotNetPythonMapper(PythonMapperBase):
:type references: list of dict objects
"""
language = 'dotnet'
language = "dotnet"
def __init__(self, obj, **kwargs):
self.references = dict((obj.get('uid'), obj)
for obj in kwargs.pop('references', [])
if 'uid' in obj)
self.references = dict(
(obj.get("uid"), obj)
for obj in kwargs.pop("references", [])
if "uid" in obj
)
super(DotNetPythonMapper, self).__init__(obj, **kwargs)
# Always exist
self.id = obj.get('uid', obj.get('id'))
self.definition = obj.get('definition', self.id)
self.name = obj.get('fullName', self.definition)
self.id = obj.get("uid", obj.get("id"))
self.definition = obj.get("definition", self.id)
self.name = obj.get("fullName", self.definition)
# Optional
self.fullname = obj.get('fullName')
self.summary = self.transform_doc_comments(obj.get('summary', ''))
self.fullname = obj.get("fullName")
self.summary = self.transform_doc_comments(obj.get("summary", ""))
self.parameters = []
self.items = obj.get('items', [])
self.children_strings = obj.get('children', [])
self.items = obj.get("items", [])
self.children_strings = obj.get("children", [])
self.children = []
self.item_map = defaultdict(list)
self.inheritance = []
self.assemblies = obj.get('assemblies', [])
self.assemblies = obj.get("assemblies", [])
# Syntax example and parameter list
syntax = obj.get('syntax', None)
self.example = ''
syntax = obj.get("syntax", None)
self.example = ""
if syntax is not None:
# Code example
try:
self.example = syntax['content']
self.example = syntax["content"]
except (KeyError, TypeError):
traceback.print_exc()
self.parameters = []
for param in syntax.get('parameters', []):
if 'id' in param:
self.parameters.append({
'name': param.get('id'),
'type': self.resolve_spec_identifier(param.get('type')),
'desc': self.transform_doc_comments(
param.get('description', ''))
})
for param in syntax.get("parameters", []):
if "id" in param:
self.parameters.append(
{
"name": param.get("id"),
"type": self.resolve_spec_identifier(param.get("type")),
"desc": self.transform_doc_comments(
param.get("description", "")
),
}
)
self.returns = {}
self.returns['type'] = self.resolve_spec_identifier(
syntax.get('return', {}).get('type')
self.returns["type"] = self.resolve_spec_identifier(
syntax.get("return", {}).get("type")
)
self.returns["description"] = self.transform_doc_comments(
syntax.get("return", {}).get("description")
)
self.returns['description'] = self.transform_doc_comments(
syntax.get('return', {}).get('description'))
# Inheritance
# TODO Support more than just a class type here, should support enum/etc
self.inheritance = [DotNetClass({'uid': name, 'name': name})
for name in obj.get('inheritance', [])]
self.inheritance = [
DotNetClass({"uid": name, "name": name})
for name in obj.get("inheritance", [])
]
def __str__(self):
return '<{cls} {id}>'.format(cls=self.__class__.__name__,
id=self.id)
return "<{cls} {id}>".format(cls=self.__class__.__name__, id=self.id)
@property
def pathname(self):
'''Sluggified path for filenames
"""Sluggified path for filenames
Slugs to a filename using the follow steps
@ -331,54 +349,51 @@ class DotNetPythonMapper(PythonMapperBase):
* Remove existing hypens
* Substitute hyphens for non-word characters
* Break up the string as paths
'''
"""
slug = self.name
try:
slug = self.name.split('(')[0]
slug = self.name.split("(")[0]
except IndexError:
pass
slug = unidecode.unidecode(slug)
slug = slug.replace('-', '')
slug = re.sub(r'[^\w\.]+', '-', slug).strip('-')
return os.path.join(*slug.split('.'))
slug = slug.replace("-", "")
slug = re.sub(r"[^\w\.]+", "-", slug).strip("-")
return os.path.join(*slug.split("."))
@property
def short_name(self):
'''Shorten name property'''
return self.name.split('.')[-1]
"""Shorten name property"""
return self.name.split(".")[-1]
@property
def edit_link(self):
try:
repo = self.source['remote']['repo'].replace('.git', '')
repo = self.source["remote"]["repo"].replace(".git", "")
path = self.path
return '{repo}/blob/master/{path}'.format(
repo=repo,
path=path,
)
return "{repo}/blob/master/{path}".format(repo=repo, path=path)
except Exception:
return ''
return ""
@property
def source(self):
return self.obj.get('source')
return self.obj.get("source")
@property
def path(self):
return self.source['path']
return self.source["path"]
@property
def namespace(self):
pieces = self.id.split('.')[:-1]
pieces = self.id.split(".")[:-1]
if pieces:
return '.'.join(pieces)
return ".".join(pieces)
return None
@property
def top_namespace(self):
pieces = self.id.split('.')[:2]
pieces = self.id.split(".")[:2]
if pieces:
return '.'.join(pieces)
return ".".join(pieces)
return None
@property
@ -391,7 +406,7 @@ class DotNetPythonMapper(PythonMapperBase):
@property
def ref_name(self):
'''Return object name suitable for use in references
"""Return object name suitable for use in references
Escapes several known strings that cause problems, including the
following reference syntax::
@ -402,15 +417,13 @@ class DotNetPythonMapper(PythonMapperBase):
the reference to Foo.Bar should be named T.
See: http://sphinx-doc.org/domains.html#role-cpp:any
'''
return (self.name
.replace('<', '\<')
.replace('`', '\`'))
"""
return self.name.replace("<", "\<").replace("`", "\`")
@property
def ref_short_name(self):
'''Same as above, return the truncated name instead'''
return self.ref_name.split('.')[-1]
"""Same as above, return the truncated name instead"""
return self.ref_name.split(".")[-1]
@staticmethod
def transform_doc_comments(text):
@ -431,44 +444,42 @@ class DotNetPythonMapper(PythonMapperBase):
found = DOC_COMMENT_SEE_PATTERN.search(text)
if found is None:
break
ref = (found.group('attr_value')
.replace('<', '\<')
.replace('`', '\`'))
ref = found.group("attr_value").replace("<", "\<").replace("`", "\`")
reftype = 'any'
replacement = ''
reftype = "any"
replacement = ""
# Given the pattern of `\w:\w+`, inspect first letter of
# reference for identity type
if ref[1] == ':' and ref[0] in DOC_COMMENT_IDENTITIES:
if ref[1] == ":" and ref[0] in DOC_COMMENT_IDENTITIES:
reftype = DOC_COMMENT_IDENTITIES[ref[:1]]
ref = ref[2:]
replacement = ':{reftype}:`{ref}`'.format(
reftype=reftype, ref=ref)
elif ref[:2] == '!:':
replacement = ":{reftype}:`{ref}`".format(reftype=reftype, ref=ref)
elif ref[:2] == "!:":
replacement = ref[2:]
else:
replacement = ':any:`{ref}`'.format(ref=ref)
replacement = ":any:`{ref}`".format(ref=ref)
# Escape following text
text_end = text[found.end():]
text_start = text[:found.start()]
text_end = re.sub(r'^(\S)', r'\\\1', text_end)
text_start = re.sub(r'(\S)$', r'\1 ', text_start)
text_end = text[found.end() :]
text_start = text[: found.start()]
text_end = re.sub(r"^(\S)", r"\\\1", text_end)
text_start = re.sub(r"(\S)$", r"\1 ", text_start)
text = ''.join([text_start, replacement, text_end])
text = "".join([text_start, replacement, text_end])
while True:
found = DOC_COMMENT_PARAM_PATTERN.search(text)
if found is None:
break
# Escape following text
text_end = text[found.end():]
text_start = text[:found.start()]
text_end = re.sub(r'^(\S)', r'\\\1', text_end)
text_start = re.sub(r'(\S)$', r'\1 ', text_start)
text_end = text[found.end() :]
text_start = text[: found.start()]
text_end = re.sub(r"^(\S)", r"\\\1", text_end)
text_start = re.sub(r"(\S)$", r"\1 ", text_start)
text = ''.join([text_start, '``', found.group('attr_value'),
'``', text_end])
text = "".join(
[text_start, "``", found.group("attr_value"), "``", text_end]
)
except TypeError:
pass
return text
@ -496,105 +507,114 @@ class DotNetPythonMapper(PythonMapperBase):
if ref is None:
return obj_name
resolved = ref.get('fullName', obj_name)
spec = ref.get('spec.csharp', [])
resolved = ref.get("fullName", obj_name)
spec = ref.get("spec.csharp", [])
parts = []
for part in spec:
if part.get('name') == '<':
parts.append('{')
elif part.get('name') == '>':
parts.append('}')
elif 'fullName' in part and 'uid' in part:
parts.append('{fullName}<{uid}>'.format(**part))
elif 'uid' in part:
parts.append(part['uid'])
elif 'fullName' in part:
parts.append(part['fullName'])
if part.get("name") == "<":
parts.append("{")
elif part.get("name") == ">":
parts.append("}")
elif "fullName" in part and "uid" in part:
parts.append("{fullName}<{uid}>".format(**part))
elif "uid" in part:
parts.append(part["uid"])
elif "fullName" in part:
parts.append(part["fullName"])
if parts:
resolved = ''.join(parts)
resolved = "".join(parts)
return resolved
class DotNetNamespace(DotNetPythonMapper):
type = 'namespace'
ref_directive = 'ns'
plural = 'namespaces'
type = "namespace"
ref_directive = "ns"
plural = "namespaces"
top_level_object = True
class DotNetMethod(DotNetPythonMapper):
type = 'method'
ref_directive = 'meth'
plural = 'methods'
type = "method"
ref_directive = "meth"
plural = "methods"
class DotNetOperator(DotNetPythonMapper):
type = 'operator'
ref_directive = 'op'
plural = 'operators'
type = "operator"
ref_directive = "op"
plural = "operators"
class DotNetProperty(DotNetPythonMapper):
type = 'property'
ref_directive = 'prop'
plural = 'properties'
type = "property"
ref_directive = "prop"
plural = "properties"
class DotNetEnum(DotNetPythonMapper):
type = 'enum'
ref_type = 'enumeration'
ref_directive = 'enum'
plural = 'enumerations'
type = "enum"
ref_type = "enumeration"
ref_directive = "enum"
plural = "enumerations"
top_level_object = True
class DotNetStruct(DotNetPythonMapper):
type = 'struct'
ref_type = 'structure'
ref_directive = 'struct'
plural = 'structures'
type = "struct"
ref_type = "structure"
ref_directive = "struct"
plural = "structures"
top_level_object = True
class DotNetConstructor(DotNetPythonMapper):
type = 'constructor'
ref_directive = 'ctor'
plural = 'constructors'
type = "constructor"
ref_directive = "ctor"
plural = "constructors"
class DotNetInterface(DotNetPythonMapper):
type = 'interface'
ref_directive = 'iface'
plural = 'interfaces'
type = "interface"
ref_directive = "iface"
plural = "interfaces"
top_level_object = True
class DotNetDelegate(DotNetPythonMapper):
type = 'delegate'
ref_directive = 'del'
plural = 'delegates'
type = "delegate"
ref_directive = "del"
plural = "delegates"
top_level_object = True
class DotNetClass(DotNetPythonMapper):
type = 'class'
ref_directive = 'cls'
plural = 'classes'
type = "class"
ref_directive = "cls"
plural = "classes"
top_level_object = True
class DotNetField(DotNetPythonMapper):
type = 'field'
plural = 'fields'
type = "field"
plural = "fields"
class DotNetEvent(DotNetPythonMapper):
type = 'event'
plural = 'events'
type = "event"
plural = "events"
ALL_CLASSES = [
DotNetNamespace, DotNetClass, DotNetEnum, DotNetStruct,
DotNetInterface, DotNetDelegate, DotNetOperator, DotNetProperty,
DotNetMethod, DotNetConstructor, DotNetField, DotNetEvent
DotNetNamespace,
DotNetClass,
DotNetEnum,
DotNetStruct,
DotNetInterface,
DotNetDelegate,
DotNetOperator,
DotNetProperty,
DotNetMethod,
DotNetConstructor,
DotNetField,
DotNetEvent,
]

@ -10,42 +10,42 @@ LOGGER = sphinx.util.logging.getLogger(__name__)
class GoSphinxMapper(SphinxMapperBase):
'''Auto API domain handler for Go
"""Auto API domain handler for Go
Parses directly from Go files.
:param app: Sphinx application passed in as part of the extension
'''
"""
def load(self, patterns, dirs, ignore=None):
'''
"""
Load objects from the filesystem into the ``paths`` dictionary.
'''
"""
for _dir in dirs:
data = self.read_file(_dir)
if data:
self.paths[_dir] = data
def read_file(self, path, **kwargs):
'''Read file input into memory, returning deserialized objects
"""Read file input into memory, returning deserialized objects
:param path: Path of file to read
'''
"""
# TODO support JSON here
# TODO sphinx way of reporting errors in logs?
try:
parsed_data = json.loads(subprocess.check_output(['godocjson', path]))
parsed_data = json.loads(subprocess.check_output(["godocjson", path]))
return parsed_data
except IOError:
LOGGER.warning('Error reading file: {0}'.format(path))
LOGGER.warning("Error reading file: {0}".format(path))
except TypeError:
LOGGER.warning('Error reading file: {0}'.format(path))
LOGGER.warning("Error reading file: {0}".format(path))
return None
def create_class(self, data, options=None, **kwargs):
'''Return instance of class based on Go data
"""Return instance of class based on Go data
Data keys handled here:
@ -57,87 +57,83 @@ class GoSphinxMapper(SphinxMapperBase):
instances
:param data: dictionary data from godocjson output
'''
_type = kwargs.get('_type')
obj_map = dict(
(cls.type, cls) for cls
in ALL_CLASSES
)
"""
_type = kwargs.get("_type")
obj_map = dict((cls.type, cls) for cls in ALL_CLASSES)
try:
# Contextual type data from children recursion
if _type:
LOGGER.debug('Forcing Go Type %s' % _type)
LOGGER.debug("Forcing Go Type %s" % _type)
cls = obj_map[_type]
else:
cls = obj_map[data['type']]
cls = obj_map[data["type"]]
except KeyError:
LOGGER.warning('Unknown Type: %s' % data)
LOGGER.warning("Unknown Type: %s" % data)
else:
if cls.inverted_names and 'names' in data:
if cls.inverted_names and "names" in data:
# Handle types that have reversed names parameter
for name in data['names']:
for name in data["names"]:
data_inv = {}
data_inv.update(data)
data_inv['name'] = name
if 'names' in data_inv:
del data_inv['names']
data_inv["name"] = name
if "names" in data_inv:
del data_inv["names"]
for obj in self.create_class(data_inv):
yield obj
else:
# Recurse for children
obj = cls(data, jinja_env=self.jinja_env)
for child_type in ['consts', 'types', 'vars', 'funcs']:
for child_type in ["consts", "types", "vars", "funcs"]:
for child_data in data.get(child_type, []):
obj.children += list(self.create_class(
child_data,
_type=child_type.replace(
'consts', 'const').replace(
'types', 'type').replace(
'vars', 'variable').replace(
'funcs', 'func')
))
obj.children += list(
self.create_class(
child_data,
_type=child_type.replace("consts", "const")
.replace("types", "type")
.replace("vars", "variable")
.replace("funcs", "func"),
)
)
yield obj
class GoPythonMapper(PythonMapperBase):
language = 'go'
language = "go"
inverted_names = False
def __init__(self, obj, **kwargs):
super(GoPythonMapper, self).__init__(obj, **kwargs)
self.name = obj.get('name') or obj.get('packageName')
self.name = obj.get("name") or obj.get("packageName")
self.id = self.name
# Second level
self.imports = obj.get('imports', [])
self.imports = obj.get("imports", [])
self.children = []
self.parameters = map(
lambda n: {'name': n['name'],
'type': n['type'].lstrip('*')},
obj.get('parameters', [])
lambda n: {"name": n["name"], "type": n["type"].lstrip("*")},
obj.get("parameters", []),
)
self.docstring = obj.get('doc', '')
self.docstring = obj.get("doc", "")
# Go Specific
self.notes = obj.get('notes', {})
self.filenames = obj.get('filenames', [])
self.bugs = obj.get('bugs', [])
self.notes = obj.get("notes", {})
self.filenames = obj.get("filenames", [])
self.bugs = obj.get("bugs", [])
def __str__(self):
return '<{cls} {id}>'.format(cls=self.__class__.__name__,
id=self.id)
return "<{cls} {id}>".format(cls=self.__class__.__name__, id=self.id)
@property
def short_name(self):
'''Shorten name property'''
return self.name.split('.')[-1]
"""Shorten name property"""
return self.name.split(".")[-1]
@property
def namespace(self):
pieces = self.id.split('.')[:-1]
pieces = self.id.split(".")[:-1]
if pieces:
return '.'.join(pieces)
return ".".join(pieces)
return None
@property
@ -150,44 +146,37 @@ class GoPythonMapper(PythonMapperBase):
@property
def methods(self):
return self.obj.get('methods', [])
return self.obj.get("methods", [])
class GoVariable(GoPythonMapper):
type = 'var'
type = "var"
inverted_names = True
class GoMethod(GoPythonMapper):
type = 'method'
ref_directive = 'meth'
type = "method"
ref_directive = "meth"
class GoConstant(GoPythonMapper):
type = 'const'
type = "const"
inverted_names = True
class GoFunction(GoPythonMapper):
type = 'func'
ref_type = 'function'
type = "func"
ref_type = "function"
class GoPackage(GoPythonMapper):
type = 'package'
ref_directive = 'pkg'
type = "package"
ref_directive = "pkg"
top_level_object = True
class GoType(GoPythonMapper):
type = 'type'
type = "type"
ALL_CLASSES = [
GoConstant,
GoFunction,
GoPackage,
GoVariable,
GoType,
GoMethod,
]
ALL_CLASSES = [GoConstant, GoFunction, GoPackage, GoVariable, GoType, GoMethod]

@ -11,36 +11,36 @@ LOGGER = sphinx.util.logging.getLogger(__name__)
class JavaScriptSphinxMapper(SphinxMapperBase):
'''Auto API domain handler for Javascript
"""Auto API domain handler for Javascript
Parses directly from Javascript files.
:param app: Sphinx application passed in as part of the extension
'''
"""
def read_file(self, path, **kwargs):
'''Read file input into memory, returning deserialized objects
"""Read file input into memory, returning deserialized objects
:param path: Path of file to read
'''
"""
# TODO support JSON here
# TODO sphinx way of reporting errors in logs?
subcmd = 'jsdoc'
if os.name == 'nt':
subcmd = '.'.join([subcmd, 'cmd'])
subcmd = "jsdoc"
if os.name == "nt":
subcmd = ".".join([subcmd, "cmd"])
try:
parsed_data = json.loads(subprocess.check_output([subcmd, '-X', path]))
parsed_data = json.loads(subprocess.check_output([subcmd, "-X", path]))
return parsed_data
except IOError:
LOGGER.warning('Error reading file: {0}'.format(path))
LOGGER.warning("Error reading file: {0}".format(path))
except TypeError:
LOGGER.warning('Error reading file: {0}'.format(path))
LOGGER.warning("Error reading file: {0}".format(path))
return None
# Subclassed to iterate over items
def map(self, options=None):
'''Trigger find of serialized sources and build objects'''
"""Trigger find of serialized sources and build objects"""
for path, data in self.paths.items():
for item in data:
for obj in self.create_class(item, options):
@ -48,7 +48,7 @@ class JavaScriptSphinxMapper(SphinxMapperBase):
self.add_object(obj)
def create_class(self, data, options=None, **kwargs):
'''Return instance of class based on Javascript data
"""Return instance of class based on Javascript data
Data keys handled here:
@ -60,20 +60,17 @@ class JavaScriptSphinxMapper(SphinxMapperBase):
instances
:param data: dictionary data from godocjson output
'''
obj_map = dict(
(cls.type, cls) for cls
in ALL_CLASSES
)
"""
obj_map = dict((cls.type, cls) for cls in ALL_CLASSES)
try:
cls = obj_map[data['kind']]
cls = obj_map[data["kind"]]
except (KeyError, TypeError):
LOGGER.warning('Unknown Type: %s' % data)
LOGGER.warning("Unknown Type: %s" % data)
else:
# Recurse for children
obj = cls(data, jinja_env=self.jinja_env)
if 'children' in data:
for child_data in data['children']:
if "children" in data:
for child_data in data["children"]:
for child_obj in self.create_class(child_data, options=options):
obj.children.append(child_obj)
yield obj
@ -81,30 +78,28 @@ class JavaScriptSphinxMapper(SphinxMapperBase):
class JavaScriptPythonMapper(PythonMapperBase):
language = 'javascript'
language = "javascript"
def __init__(self, obj, **kwargs):
'''
"""
Map JSON data into Python object.
This is the standard object that will be rendered into the templates,
so we try and keep standard naming to keep templates more re-usable.
'''
"""
super(JavaScriptPythonMapper, self).__init__(obj, **kwargs)
self.name = obj.get('name')
self.name = obj.get("name")
self.id = self.name
# Second level
self.docstring = obj.get('description', '')
self.docstring = obj.get("description", "")
# self.docstring = obj.get('comment', '')
self.imports = obj.get('imports', [])
self.imports = obj.get("imports", [])
self.children = []
self.parameters = map(
lambda n: {'name': n['name'],
'type': n['type'][0]},
obj.get('param', [])
lambda n: {"name": n["name"], "type": n["type"][0]}, obj.get("param", [])
)
# Language Specific
@ -112,29 +107,29 @@ class JavaScriptPythonMapper(PythonMapperBase):
class JavaScriptClass(JavaScriptPythonMapper):
type = 'class'
ref_directive = 'class'
type = "class"
ref_directive = "class"
top_level_object = True
class JavaScriptFunction(JavaScriptPythonMapper):
type = 'function'
ref_type = 'func'
type = "function"
ref_type = "func"
class JavaScriptData(JavaScriptPythonMapper):
type = 'data'
ref_directive = 'data'
type = "data"
ref_directive = "data"
class JavaScriptMember(JavaScriptPythonMapper):
type = 'member'
ref_directive = 'member'
type = "member"
ref_directive = "member"
class JavaScriptAttribute(JavaScriptPythonMapper):
type = 'attribute'
ref_directive = 'attr'
type = "attribute"
ref_directive = "attr"
ALL_CLASSES = [

@ -35,15 +35,15 @@ def _expand_wildcard_placeholder(original_module, originals_map, placeholder):
:rtype: list(dict)
"""
originals = originals_map.values()
if original_module['all'] is not None:
if original_module["all"] is not None:
originals = []
for name in original_module['all']:
if name == '__all__':
for name in original_module["all"]:
if name == "__all__":
continue
if name not in originals_map:
msg = 'Invalid __all__ entry {0} in {1}'.format(
name, original_module['name'],
msg = "Invalid __all__ entry {0} in {1}".format(
name, original_module["name"]
)
LOGGER.warning(msg)
continue
@ -52,17 +52,13 @@ def _expand_wildcard_placeholder(original_module, originals_map, placeholder):
placeholders = []
for original in originals:
new_full_name = placeholder['full_name'].replace(
'*', original['name'],
)
new_original_path = placeholder['original_path'].replace(
'*', original['name'],
)
if 'original_path' in original:
new_original_path = original['original_path']
new_full_name = placeholder["full_name"].replace("*", original["name"])
new_original_path = placeholder["original_path"].replace("*", original["name"])
if "original_path" in original:
new_original_path = original["original_path"]
new_placeholder = dict(
placeholder,
name=original['name'],
name=original["name"],
full_name=new_full_name,
original_path=new_original_path,
)
@ -91,54 +87,54 @@ def _resolve_module_placeholders(modules, module_name, visit_path, resolved):
module, children = modules[module_name]
for child in list(children.values()):
if child['type'] != 'placeholder':
if child["type"] != "placeholder":
continue
imported_from, original_name = child['original_path'].rsplit('.', 1)
imported_from, original_name = child["original_path"].rsplit(".", 1)
if imported_from in visit_path:
msg = "Cannot resolve cyclic import: {0}, {1}".format(
', '.join(visit_path), imported_from,
", ".join(visit_path), imported_from
)
LOGGER.warning(msg)
module['children'].remove(child)
children.pop(child['name'])
module["children"].remove(child)
children.pop(child["name"])
continue
if imported_from not in modules:
msg = "Cannot resolve import of unknown module {0} in {1}".format(
imported_from, module_name,
imported_from, module_name
)
LOGGER.warning(msg)
module['children'].remove(child)
children.pop(child['name'])
module["children"].remove(child)
children.pop(child["name"])
continue
_resolve_module_placeholders(modules, imported_from, visit_path, resolved)
if original_name == '*':
if original_name == "*":
original_module, originals_map = modules[imported_from]
# Replace the wildcard placeholder
# with a list of named placeholders.
new_placeholders = _expand_wildcard_placeholder(
original_module, originals_map, child,
original_module, originals_map, child
)
child_index = module['children'].index(child)
module['children'][child_index:child_index+1] = new_placeholders
children.pop(child['name'])
child_index = module["children"].index(child)
module["children"][child_index : child_index + 1] = new_placeholders
children.pop(child["name"])
for new_placeholder in new_placeholders:
if new_placeholder['name'] not in children:
children[new_placeholder['name']] = new_placeholder
original = originals_map[new_placeholder['name']]
if new_placeholder["name"] not in children:
children[new_placeholder["name"]] = new_placeholder
original = originals_map[new_placeholder["name"]]
_resolve_placeholder(new_placeholder, original)
elif original_name not in modules[imported_from][1]:
msg = "Cannot resolve import of {0} in {1}".format(
child['original_path'], module_name,
child["original_path"], module_name
)
LOGGER.warning(msg)
module['children'].remove(child)
children.pop(child['name'])
module["children"].remove(child)
children.pop(child["name"])
continue
else:
original = modules[imported_from][1][original_name]
@ -158,29 +154,29 @@ def _resolve_placeholder(placeholder, original):
"""
new = copy.deepcopy(original)
# The name remains the same.
new['name'] = placeholder['name']
new['full_name'] = placeholder['full_name']
new["name"] = placeholder["name"]
new["full_name"] = placeholder["full_name"]
# Record where the placeholder originally came from.
new['original_path'] = original['full_name']
new["original_path"] = original["full_name"]
# The source lines for this placeholder do not exist in this file.
# The keys might not exist if original is a resolved placeholder.
new.pop('from_line_no', None)
new.pop('to_line_no', None)
new.pop("from_line_no", None)
new.pop("to_line_no", None)
# Resolve the children
stack = list(new.get('children', ()))
stack = list(new.get("children", ()))
while stack:
child = stack.pop()
# Relocate the child to the new location
assert child['full_name'].startswith(original['full_name'])
suffix = child['full_name'][len(original['full_name']):]
child['full_name'] = new['full_name'] + suffix
assert child["full_name"].startswith(original["full_name"])
suffix = child["full_name"][len(original["full_name"]) :]
child["full_name"] = new["full_name"] + suffix
# The source lines for this placeholder do not exist in this file.
# The keys might not exist if original is a resolved placeholder.
child.pop('from_line_no', None)
child.pop('to_line_no', None)
child.pop("from_line_no", None)
child.pop("to_line_no", None)
# Resolve the remaining children
stack.extend(child.get('children', ()))
stack.extend(child.get("children", ()))
placeholder.clear()
placeholder.update(new)
@ -203,13 +199,13 @@ class PythonSphinxMapper(SphinxMapperBase):
"""
for dir_ in dirs:
dir_root = dir_
if os.path.exists(os.path.join(dir_, '__init__.py')):
if os.path.exists(os.path.join(dir_, "__init__.py")):
dir_root = os.path.abspath(os.path.join(dir_, os.pardir))
for path in self.find_files(patterns=patterns, dirs=[dir_], ignore=ignore):
data = self.read_file(path=path)
if data:
data['relative_path'] = os.path.relpath(path, dir_root)
data["relative_path"] = os.path.relpath(path, dir_root)
self.paths[path] = data
def read_file(self, path, **kwargs):
@ -221,17 +217,15 @@ class PythonSphinxMapper(SphinxMapperBase):
parsed_data = Parser().parse_file(path)
return parsed_data
except (IOError, TypeError, ImportError):
LOGGER.warning('Error reading file: {0}'.format(path))
LOGGER.warning("Error reading file: {0}".format(path))
return None
def _resolve_placeholders(self):
"""Resolve objects that have been imported from elsewhere."""
modules = {}
for module in self.paths.values():
children = {
child['name']: child for child in module['children']
}
modules[module['name']] = (module, children)
children = {child["name"]: child for child in module["children"]}
modules[module["name"]] = (module, children)
resolved = set()
for module_name in modules:
@ -245,10 +239,10 @@ class PythonSphinxMapper(SphinxMapperBase):
parents = {obj.name: obj for obj in self.objects.values()}
for obj in self.objects.values():
parent_name = obj.name.rsplit('.', 1)[0]
parent_name = obj.name.rsplit(".", 1)[0]
if parent_name in parents and parent_name != obj.name:
parent = parents[parent_name]
attr = 'sub{}s'.format(obj.type)
attr = "sub{}s".format(obj.type)
getattr(parent, attr).append(obj)
for obj in self.objects.values():
@ -260,14 +254,23 @@ class PythonSphinxMapper(SphinxMapperBase):
:param data: dictionary data of parser output
"""
obj_map = dict((cls.type, cls) for cls
in [PythonClass, PythonFunction, PythonModule,
PythonMethod, PythonPackage, PythonAttribute,
PythonData, PythonException])
obj_map = dict(
(cls.type, cls)
for cls in [
PythonClass,
PythonFunction,
PythonModule,
PythonMethod,
PythonPackage,
PythonAttribute,
PythonData,
PythonException,
]
)
try:
cls = obj_map[data['type']]
cls = obj_map[data["type"]]
except KeyError:
LOGGER.warning("Unknown type: %s" % data['type'])
LOGGER.warning("Unknown type: %s" % data["type"])
else:
obj = cls(
data,
@ -279,39 +282,40 @@ class PythonSphinxMapper(SphinxMapperBase):
)
lines = sphinx.util.docstrings.prepare_docstring(obj.docstring)
if lines and 'autodoc-process-docstring' in self.app.events.events:
if lines and "autodoc-process-docstring" in self.app.events.events:
self.app.emit(
'autodoc-process-docstring',
"autodoc-process-docstring",
cls.type,
obj.name,
None, # object
None, # options
lines,
)
obj.docstring = '\n'.join(lines)
obj.docstring = "\n".join(lines)
for child_data in data.get('children', []):
for child_obj in self.create_class(child_data, options=options,
**kwargs):
for child_data in data.get("children", []):
for child_obj in self.create_class(
child_data, options=options, **kwargs
):
obj.children.append(child_obj)
yield obj
class PythonPythonMapper(PythonMapperBase):
language = 'python'
language = "python"
is_callable = False
def __init__(self, obj, class_content='class', **kwargs):
def __init__(self, obj, class_content="class", **kwargs):
super(PythonPythonMapper, self).__init__(obj, **kwargs)
self.name = obj['name']
self.id = obj.get('full_name', self.name)
self.name = obj["name"]
self.id = obj.get("full_name", self.name)
# Optional
self.children = []
self.args = obj.get('args')
self.docstring = obj['doc']
self.args = obj.get("args")
self.docstring = obj["doc"]
# For later
self.item_map = collections.defaultdict(list)
@ -339,25 +343,19 @@ class PythonPythonMapper(PythonMapperBase):
@property
def is_private_member(self):
return (
self.short_name.startswith('_')
and not self.short_name.endswith('__')
)
return self.short_name.startswith("_") and not self.short_name.endswith("__")
@property
def is_special_member(self):
return (
self.short_name.startswith('__')
and self.short_name.endswith('__')
)
return self.short_name.startswith("__") and self.short_name.endswith("__")
@property
def display(self):
if self.is_undoc_member and 'undoc-members' not in self.options:
if self.is_undoc_member and "undoc-members" not in self.options:
return False
if self.is_private_member and 'private-members' not in self.options:
if self.is_private_member and "private-members" not in self.options:
return False
if self.is_special_member and 'special-members' not in self.options:
if self.is_special_member and "special-members" not in self.options:
return False
return True
@ -368,31 +366,31 @@ class PythonPythonMapper(PythonMapperBase):
if line:
return line
return ''
return ""
def _children_of_type(self, type_):
return list(child for child in self.children if child.type == type_)
class PythonFunction(PythonPythonMapper):
type = 'function'
type = "function"
is_callable = True
ref_directive = 'func'
ref_directive = "func"
class PythonMethod(PythonPythonMapper):
type = 'method'
type = "method"
is_callable = True
ref_directive = 'meth'
ref_directive = "meth"
def __init__(self, obj, **kwargs):
super(PythonMethod, self).__init__(obj, **kwargs)
self.method_type = obj['method_type']
self.method_type = obj["method_type"]
@property
def display(self):
if self.short_name == '__init__':
if self.short_name == "__init__":
return False
return super(PythonMethod, self).display
@ -400,55 +398,57 @@ class PythonMethod(PythonPythonMapper):
class PythonData(PythonPythonMapper):
"""Global, module level data."""
type = 'data'
type = "data"
def __init__(self, obj, **kwargs):
super(PythonData, self).__init__(obj, **kwargs)
self.value = obj.get('value')
self.value = obj.get("value")
class PythonAttribute(PythonData):
"""An object/class level attribute."""
type = 'attribute'
type = "attribute"
class TopLevelPythonPythonMapper(PythonPythonMapper):
ref_directive = 'mod'
ref_directive = "mod"
def __init__(self, obj, **kwargs):
super(TopLevelPythonPythonMapper, self).__init__(obj, **kwargs)
self.top_level_object = '.' not in self.name
self.top_level_object = "." not in self.name
self.subpackages = []
self.submodules = []
self.all = obj['all']
self.all = obj["all"]
@property
def functions(self):
return self._children_of_type('function')
return self._children_of_type("function")
@property
def classes(self):
return self._children_of_type('class')
return self._children_of_type("class")
class PythonModule(TopLevelPythonPythonMapper):
type = 'module'
type = "module"
class PythonPackage(TopLevelPythonPythonMapper):
type = 'package'
type = "package"
class PythonClass(PythonPythonMapper):
type = 'class'
type = "class"
def __init__(self, obj, **kwargs):
super(PythonClass, self).__init__(obj, **kwargs)
self.bases = obj['bases']
self.bases = obj["bases"]
@PythonPythonMapper.args.getter
def args(self):
@ -458,8 +458,8 @@ class PythonClass(PythonPythonMapper):
if constructor:
args = constructor.args
if args.startswith('self'):
args = args[4:].lstrip(',').lstrip()
if args.startswith("self"):
args = args[4:].lstrip(",").lstrip()
return args
@ -467,13 +467,11 @@ class PythonClass(PythonPythonMapper):
def docstring(self):
docstring = super(PythonClass, self).docstring
if self._class_content in ('both', 'init'):
if self._class_content in ("both", "init"):
constructor_docstring = self.constructor_docstring
if constructor_docstring:
if self._class_content == 'both':
docstring = '{0}\n{1}'.format(
docstring, constructor_docstring,
)
if self._class_content == "both":
docstring = "{0}\n{1}".format(docstring, constructor_docstring)
else:
docstring = constructor_docstring
@ -481,34 +479,34 @@ class PythonClass(PythonPythonMapper):
@property
def methods(self):
return self._children_of_type('method')
return self._children_of_type("method")
@property
def attributes(self):
return self._children_of_type('attribute')
return self._children_of_type("attribute")
@property
def classes(self):
return self._children_of_type('class')
return self._children_of_type("class")
@property
def constructor(self):
for child in self.children:
if child.short_name == '__init__':
if child.short_name == "__init__":
return child
return None
@property
def constructor_docstring(self):
docstring = ''
docstring = ""
constructor = self.constructor
if constructor and constructor.docstring:
docstring = constructor.docstring
else:
for child in self.children:
if child.short_name == '__new__':
if child.short_name == "__new__":
docstring = child.docstring
break
@ -516,7 +514,7 @@ class PythonClass(PythonPythonMapper):
class PythonException(PythonClass):
type = 'exception'
type = "exception"
class Parser(object):
@ -525,7 +523,7 @@ class Parser(object):
self._encoding = None
def _get_full_name(self, name):
return '.'.join(self._name_stack + [name])
return ".".join(self._name_stack + [name])
def _encode(self, to_encode):
if self._encoding:
@ -540,30 +538,32 @@ class Parser(object):
def parse_file(self, file_path):
directory, filename = os.path.split(file_path)
module_parts = []
if filename != '__init__.py':
if filename != "__init__.py":
module_part = os.path.splitext(filename)[0]
module_parts = [module_part]
module_parts = collections.deque(module_parts)
while os.path.isfile(os.path.join(directory, '__init__.py')):
while os.path.isfile(os.path.join(directory, "__init__.py")):
directory, module_part = os.path.split(directory)
if module_part:
module_parts.appendleft(module_part)
module_name = '.'.join(module_parts)
module_name = ".".join(module_parts)
node = astroid.MANAGER.ast_from_file(file_path, module_name)
return self.parse(node)
def parse_assign(self, node):
doc = ''
doc = ""
doc_node = node.next_sibling()
if (isinstance(doc_node, astroid.nodes.Expr)
and isinstance(doc_node.value, astroid.nodes.Const)):
if isinstance(doc_node, astroid.nodes.Expr) and isinstance(
doc_node.value, astroid.nodes.Const
):
doc = doc_node.value.value
type_ = 'data'
if (isinstance(node.scope(), astroid.nodes.ClassDef)
or astroid_utils.is_constructor(node.scope())):
type_ = 'attribute'
type_ = "data"
if isinstance(
node.scope(), astroid.nodes.ClassDef
) or astroid_utils.is_constructor(node.scope()):
type_ = "attribute"
assign_value = astroid_utils.get_assign_value(node)
if not assign_value:
@ -571,25 +571,25 @@ class Parser(object):
target, value = assign_value
data = {
'type': type_,
'name': target,
'full_name': self._get_full_name(target),
'doc': self._encode(doc),
'value': value,
'from_line_no': node.fromlineno,
'to_line_no': node.tolineno,
"type": type_,
"name": target,
"full_name": self._get_full_name(target),
"doc": self._encode(doc),
"value": value,
"from_line_no": node.fromlineno,
"to_line_no": node.tolineno,
}
return [data]
def parse_classdef(self, node, data=None):
type_ = 'class'
type_ = "class"
if astroid_utils.is_exception(node):
type_ = 'exception'
type_ = "exception"
args = ''
args = ""
try:
constructor = node.lookup('__init__')[1]
constructor = node.lookup("__init__")[1]
except IndexError:
pass
else:
@ -599,34 +599,34 @@ class Parser(object):
basenames = list(astroid_utils.get_full_basenames(node.bases, node.basenames))
data = {
'type': type_,
'name': node.name,
'full_name': self._get_full_name(node.name),
'args': args,
'bases': basenames,
'doc': self._encode(node.doc or ''),
'from_line_no': node.fromlineno,
'to_line_no': node.tolineno,
'children': [],
"type": type_,
"name": node.name,
"full_name": self._get_full_name(node.name),
"args": args,
"bases": basenames,
"doc": self._encode(node.doc or ""),
"from_line_no": node.fromlineno,
"to_line_no": node.tolineno,
"children": [],
}
self._name_stack.append(node.name)
for child in node.get_children():
child_data = self.parse(child)
if child_data:
data['children'].extend(child_data)
data["children"].extend(child_data)
self._name_stack.pop()
return [data]
def _parse_property(self, node):
data = {
'type': 'attribute',
'name': node.name,
'full_name': self._get_full_name(node.name),
'doc': self._encode(node.doc or ''),
'from_line_no': node.fromlineno,
'to_line_no': node.tolineno,
"type": "attribute",
"name": node.name,
"full_name": self._get_full_name(node.name),
"doc": self._encode(node.doc or ""),
"from_line_no": node.fromlineno,
"to_line_no": node.tolineno,
}
return [data]
@ -637,28 +637,28 @@ class Parser(object):
if astroid_utils.is_decorated_with_property_setter(node):
return []
type_ = 'function' if node.type == 'function' else 'method'
type_ = "function" if node.type == "function" else "method"
data = {
'type': type_,
'name': node.name,
'full_name': self._get_full_name(node.name),
'args': node.args.as_string(),
'doc': self._encode(node.doc or ''),
'from_line_no': node.fromlineno,
'to_line_no': node.tolineno,
"type": type_,
"name": node.name,
"full_name": self._get_full_name(node.name),
"args": node.args.as_string(),
"doc": self._encode(node.doc or ""),
"from_line_no": node.fromlineno,
"to_line_no": node.tolineno,
}
if type_ == 'method':
data['method_type'] = node.type
if type_ == "method":
data["method_type"] = node.type
result = [data]
if node.name == '__init__':
if node.name == "__init__":
for child in node.get_children():
if isinstance(child, astroid.Assign):
child_data = self.parse_assign(child)
result.extend(data for data in child_data if data['doc'])
result.extend(data for data in child_data if data["doc"])
return result
@ -666,15 +666,15 @@ class Parser(object):
result = []
for name, alias in node.names:
is_wildcard = (alias or name) == '*'
is_wildcard = (alias or name) == "*"
full_name = self._get_full_name(alias or name)
original_path = astroid_utils.get_full_import_name(node, alias or name)
data = {
'type': 'placeholder',
'name': original_path if is_wildcard else (alias or name),
'full_name': full_name,
'original_path': original_path,
"type": "placeholder",
"name": original_path if is_wildcard else (alias or name),
"full_name": full_name,
"original_path": original_path,
}
result.append(data)
@ -685,25 +685,25 @@ class Parser(object):
if isinstance(node.path, list):
path = node.path[0] if node.path else None
type_ = 'module'
type_ = "module"
if node.package:
type_ = 'package'
type_ = "package"
self._name_stack = [node.name]
self._encoding = node.file_encoding
data = {
'type': type_,
'name': node.name,
'full_name': node.name,
'doc': self._encode(node.doc or ''),
'children': [],
'file_path': path,
'encoding': node.file_encoding,
'all': astroid_utils.get_module_all(node),
"type": type_,
"name": node.name,
"full_name": node.name,
"doc": self._encode(node.doc or ""),
"children": [],
"file_path": path,
"encoding": node.file_encoding,
"all": astroid_utils.get_module_all(node),
}
top_name = node.name.split('.', 1)[0]
top_name = node.name.split(".", 1)[0]
for child in node.get_children():
if node.package and astroid_utils.is_local_import_from(child, top_name):
child_data = self._parse_local_import_from(child)
@ -711,7 +711,7 @@ class Parser(object):
child_data = self.parse(child)
if child_data:
data['children'].extend(child_data)
data["children"].extend(child_data)
return data
@ -719,7 +719,7 @@ class Parser(object):
data = {}
node_type = node.__class__.__name__.lower()
parse_func = getattr(self, 'parse_' + node_type, None)
parse_func = getattr(self, "parse_" + node_type, None)
if parse_func:
data = parse_func(node)
else:

@ -8,6 +8,6 @@ import os
SITE_ROOT = os.path.dirname(os.path.realpath(__file__))
TEMPLATE_DIR = os.path.join(SITE_ROOT, 'templates')
TEMPLATE_DIR = os.path.join(SITE_ROOT, "templates")
API_ROOT = 'autoapi'
API_ROOT = "autoapi"

@ -13,18 +13,24 @@ import sphinx.util.logging
LOGGER = sphinx.util.logging.getLogger(__name__)
def _build_toc_node(docname, anchor='anchor', text='test text', bullet=False):
def _build_toc_node(docname, anchor="anchor", text="test text", bullet=False):
"""
Create the node structure that Sphinx expects for TOC Tree entries.
The ``bullet`` argument wraps it in a ``nodes.bullet_list``,
which is how you nest TOC Tree entries.
"""
reference = nodes.reference('', '', internal=True, refuri=docname,
anchorname='#' + anchor, *[nodes.Text(text, text)])
para = addnodes.compact_paragraph('', '', reference)
ret_list = nodes.list_item('', para)
return nodes.bullet_list('', ret_list) if bullet else ret_list
reference = nodes.reference(
"",
"",
internal=True,
refuri=docname,
anchorname="#" + anchor,
*[nodes.Text(text, text)]
)
para = addnodes.compact_paragraph("", "", reference)
ret_list = nodes.list_item("", para)
return nodes.bullet_list("", ret_list) if bullet else ret_list
def _traverse_parent(node, objtypes):
@ -51,11 +57,15 @@ def _find_toc_node(toc, ref_id, objtype):
* Desc - Just use the anchor name
"""
for check_node in toc.traverse(nodes.reference):
if objtype == nodes.section and \
(check_node.attributes['refuri'] == ref_id or
check_node.attributes['anchorname'] == '#' + ref_id):
if objtype == nodes.section and (
check_node.attributes["refuri"] == ref_id
or check_node.attributes["anchorname"] == "#" + ref_id
):
return check_node
if objtype == addnodes.desc and check_node.attributes['anchorname'] == '#' + ref_id:
if (
objtype == addnodes.desc
and check_node.attributes["anchorname"] == "#" + ref_id
):
return check_node
return None
@ -67,22 +77,21 @@ def _get_toc_reference(app, node, toc, docname):
It takes a specific incoming ``node``,
and returns the actual TOC Tree node that is said reference.
"""
if isinstance(node, nodes.section) and \
isinstance(node.parent, nodes.document):
if isinstance(node, nodes.section) and isinstance(node.parent, nodes.document):
# Top Level Section header
ref_id = docname
toc_reference = _find_toc_node(toc, ref_id, nodes.section)
elif isinstance(node, nodes.section):
# Nested Section header
ref_id = node.attributes['ids'][0]
ref_id = node.attributes["ids"][0]
toc_reference = _find_toc_node(toc, ref_id, nodes.section)
else:
# Desc node
try:
ref_id = node.children[0].attributes['ids'][0]
ref_id = node.children[0].attributes["ids"][0]
toc_reference = _find_toc_node(toc, ref_id, addnodes.desc)
except (KeyError, IndexError) as e:
LOGGER.warning('Invalid desc node: %s' % e)
LOGGER.warning("Invalid desc node: %s" % e)
toc_reference = None
return toc_reference
@ -109,32 +118,40 @@ def add_domain_to_toctree(app, doctree, docname):
toc = app.env.tocs[docname]
for desc_node in doctree.traverse(addnodes.desc):
try:
ref_id = desc_node.children[0].attributes['ids'][0]
ref_id = desc_node.children[0].attributes["ids"][0]
except (KeyError, IndexError) as e:
LOGGER.warning('Invalid desc node: %s' % e)
LOGGER.warning("Invalid desc node: %s" % e)
continue
try:
# Python domain object
ref_text = desc_node[0].attributes['fullname'].split('.')[-1].split('(')[0]
ref_text = desc_node[0].attributes["fullname"].split(".")[-1].split("(")[0]
except (KeyError, IndexError):
# TODO[eric]: Support other Domains and ways of accessing this data
# Use `astext` for other types of domain objects
ref_text = desc_node[0].astext().split('.')[-1].split('(')[0]
ref_text = desc_node[0].astext().split(".")[-1].split("(")[0]
# This is the actual object that will exist in the TOC Tree
# Sections by default, and other Desc nodes that we've previously placed.
parent_node = _traverse_parent(node=desc_node, objtypes=(addnodes.desc, nodes.section))
parent_node = _traverse_parent(
node=desc_node, objtypes=(addnodes.desc, nodes.section)
)
if parent_node:
toc_reference = _get_toc_reference(app, parent_node, toc, docname)
if toc_reference:
# Get the last child of our parent's bullet list, this is where "we" live.
toc_insertion_point = _traverse_parent(toc_reference, nodes.bullet_list)[-1]
toc_insertion_point = _traverse_parent(
toc_reference, nodes.bullet_list
)[-1]
# Ensure we're added another bullet list so that we nest inside the parent,
# not next to it
if toc_insertion_point and isinstance(toc_insertion_point[0], nodes.bullet_list):
if toc_insertion_point and isinstance(
toc_insertion_point[0], nodes.bullet_list
):
new_insert = toc_insertion_point[0]
to_add = _build_toc_node(docname, anchor=ref_id, text=ref_text)
new_insert.append(to_add)
else:
to_add = _build_toc_node(docname, anchor=ref_id, text=ref_text, bullet=True)
to_add = _build_toc_node(
docname, anchor=ref_id, text=ref_text, bullet=True
)
toc_insertion_point.append(to_add)

@ -4,11 +4,12 @@ import re
# From Django
def slugify(value):
"""
Converts to lowercase, removes non-word characters (alphanumerics and
underscores) and converts spaces to hyphens. Also strips leading and
trailing whitespace.
"""
value = re.sub(r'[^\w\s-]', '', value).strip()
return re.sub(r'[-\s]+', '-', value)
value = re.sub(r"[^\w\s-]", "", value).strip()
return re.sub(r"[-\s]+", "-", value)

@ -1,30 +1,33 @@
# -*- coding: utf-8 -*-
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'dotnetexample'
copyright = u'2015, rtfd'
author = u'rtfd'
version = '0.1'
release = '0.1'
templates_path = ["_templates"]
source_suffix = ".rst"
master_doc = "index"
project = u"dotnetexample"
copyright = u"2015, rtfd"
author = u"rtfd"
version = "0.1"
release = "0.1"
language = None
exclude_patterns = ['_build']
pygments_style = 'sphinx'
exclude_patterns = ["_build"]
pygments_style = "sphinx"
todo_include_todos = False
html_theme = 'sphinx_rtd_theme'
htmlhelp_basename = 'dotnetexampledoc'
extensions = ['autoapi.extension', 'sphinxcontrib.dotnetdomain']
html_theme = "sphinx_rtd_theme"
htmlhelp_basename = "dotnetexampledoc"
extensions = ["autoapi.extension", "sphinxcontrib.dotnetdomain"]
autoapi_type = 'dotnet'
autoapi_type = "dotnet"
# Turn this on for debugging
# autoapi_keep_files = True
autoapi_dirs = ['example/Identity/src/']
autoapi_dirs = ["example/Identity/src/"]
import os
SITE_ROOT = os.path.dirname(os.path.realpath(__file__))
DIR = os.path.join(SITE_ROOT, autoapi_dirs[0])
if not os.path.exists(DIR):
os.system('git clone https://github.com/aspnet/Identity %s' % os.path.join(SITE_ROOT, 'example/Identity'))
os.system(
"git clone https://github.com/aspnet/Identity %s"
% os.path.join(SITE_ROOT, "example/Identity")
)

@ -1,22 +1,22 @@
# -*- coding: utf-8 -*-
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'goexample'
copyright = u'2015, rtfd'
author = u'rtfd'
version = '0.1'
release = '0.1'
templates_path = ["_templates"]
source_suffix = ".rst"
master_doc = "index"
project = u"goexample"
copyright = u"2015, rtfd"
author = u"rtfd"
version = "0.1"
release = "0.1"
language = None
exclude_patterns = ['_build']
pygments_style = 'sphinx'
exclude_patterns = ["_build"]
pygments_style = "sphinx"
todo_include_todos = False
html_theme = 'alabaster'
html_static_path = ['_static']
htmlhelp_basename = 'goexampledoc'
extensions = ['autoapi.extension', 'sphinxcontrib.golangdomain']
html_theme = "alabaster"
html_static_path = ["_static"]
htmlhelp_basename = "goexampledoc"
extensions = ["autoapi.extension", "sphinxcontrib.golangdomain"]
autoapi_type = 'go'
autoapi_dirs = ['example']
autoapi_file_pattern = '*.go'
autoapi_type = "go"
autoapi_dirs = ["example"]
autoapi_file_pattern = "*.go"

@ -1,21 +1,21 @@
# -*- coding: utf-8 -*-
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'jsexample'
copyright = u'2015, rtfd'
author = u'rtfd'
version = '0.1'
release = '0.1'
templates_path = ["_templates"]
source_suffix = ".rst"
master_doc = "index"
project = u"jsexample"
copyright = u"2015, rtfd"
author = u"rtfd"
version = "0.1"
release = "0.1"
language = None
exclude_patterns = ['_build']
pygments_style = 'sphinx'
exclude_patterns = ["_build"]
pygments_style = "sphinx"
todo_include_todos = False
html_theme = 'alabaster'
html_static_path = ['_static']
htmlhelp_basename = 'jsexampledoc'
extensions = ['autoapi.extension']
autoapi_type = 'javascript'
autoapi_dirs = ['example']
autoapi_file_pattern = '*.js'
html_theme = "alabaster"
html_static_path = ["_static"]
htmlhelp_basename = "jsexampledoc"
extensions = ["autoapi.extension"]
autoapi_type = "javascript"
autoapi_dirs = ["example"]
autoapi_file_pattern = "*.js"

@ -1,21 +1,21 @@
# -*- coding: utf-8 -*-
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'pyexample'
copyright = u'2015, rtfd'
author = u'rtfd'
version = '0.1'
release = '0.1'
templates_path = ["_templates"]
source_suffix = ".rst"
master_doc = "index"
project = u"pyexample"
copyright = u"2015, rtfd"
author = u"rtfd"
version = "0.1"
release = "0.1"
language = None
exclude_patterns = ['_build']
pygments_style = 'sphinx'
exclude_patterns = ["_build"]
pygments_style = "sphinx"
todo_include_todos = False
html_theme = 'alabaster'
html_static_path = ['_static']
htmlhelp_basename = 'pyexampledoc'
extensions = ['sphinx.ext.autodoc', 'autoapi.extension']
autoapi_type = 'python'
autoapi_dirs = ['example']
autoapi_file_pattern = '*.py'
html_theme = "alabaster"
html_static_path = ["_static"]
htmlhelp_basename = "pyexampledoc"
extensions = ["sphinx.ext.autodoc", "autoapi.extension"]
autoapi_type = "python"
autoapi_dirs = ["example"]
autoapi_file_pattern = "*.py"

@ -38,8 +38,7 @@ class Foo(object):
"""This method should parse okay"""
return True
def method_multiline(self, foo=None, bar=None,
baz=None):
def method_multiline(self, foo=None, bar=None, baz=None):
"""This is on multiple lines, but should parse okay too
pydocstyle gives us lines of source. Test if this means that multiline

@ -1,6 +1,7 @@
from .subpackage import public_chain
from .subpackage.submodule import public_multiple_imports
def module_level_method(foo, bar):
"""A module level method"""
pass

@ -23,8 +23,7 @@ class Foo(object):
"""This method should parse okay"""
return True
def method_multiline(self, foo=None, bar=None,
baz=None):
def method_multiline(self, foo=None, bar=None, baz=None):
"""This is on multiple lines, but should parse okay too
pydocstyle gives us lines of source. Test if this means that multiline

@ -2,6 +2,7 @@ from .submodule import public_chain
from .submodule import _private_made_public as now_public_function
from .submodule import public_multiple_imports
def module_level_method(foo, bar):
"""A module level method"""
pass

@ -2,10 +2,12 @@ def public_chain():
"""Part of a public resolution chain."""
return 5
def _private_made_public():
"""A private function made public by import."""
return 5
def public_multiple_imports():
"""A public function imported in multiple places."""
return 5

@ -1,11 +1,11 @@
from ...subpackage import *
__all__ = [
'SimpleClass',
'simple_function',
'public_chain',
'module_level_method',
'does_not_exist',
"SimpleClass",
"simple_function",
"public_chain",
"module_level_method",
"does_not_exist",
]

@ -1,21 +1,21 @@
# -*- coding: utf-8 -*-
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'pypackagecomplex'
copyright = u'2015, rtfd'
author = u'rtfd'
version = '0.1'
release = '0.1'
templates_path = ["_templates"]
source_suffix = ".rst"
master_doc = "index"
project = u"pypackagecomplex"
copyright = u"2015, rtfd"
author = u"rtfd"
version = "0.1"
release = "0.1"
language = None
exclude_patterns = ['_build']
pygments_style = 'sphinx'
exclude_patterns = ["_build"]
pygments_style = "sphinx"
todo_include_todos = False
html_theme = 'alabaster'
html_static_path = ['_static']
htmlhelp_basename = 'pypackagecomplexdoc'
extensions = ['autoapi.extension']
autoapi_type = 'python'
autoapi_dirs = ['complex']
autoapi_file_pattern = '*.py'
html_theme = "alabaster"
html_static_path = ["_static"]
htmlhelp_basename = "pypackagecomplexdoc"
extensions = ["autoapi.extension"]
autoapi_type = "python"
autoapi_dirs = ["complex"]
autoapi_file_pattern = "*.py"

@ -1,21 +1,21 @@
# -*- coding: utf-8 -*-
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'pypackageexample'
copyright = u'2015, rtfd'
author = u'rtfd'
version = '0.1'
release = '0.1'
templates_path = ["_templates"]
source_suffix = ".rst"
master_doc = "index"
project = u"pypackageexample"
copyright = u"2015, rtfd"
author = u"rtfd"
version = "0.1"
release = "0.1"
language = None
exclude_patterns = ['_build']
pygments_style = 'sphinx'
exclude_patterns = ["_build"]
pygments_style = "sphinx"
todo_include_todos = False
html_theme = 'alabaster'
html_static_path = ['_static']
htmlhelp_basename = 'pypackageexampledoc'
extensions = ['autoapi.extension']
autoapi_type = 'python'
autoapi_dirs = ['example']
autoapi_file_pattern = '*.py'
html_theme = "alabaster"
html_static_path = ["_static"]
htmlhelp_basename = "pypackageexampledoc"
extensions = ["autoapi.extension"]
autoapi_type = "python"
autoapi_dirs = ["example"]
autoapi_file_pattern = "*.py"

@ -23,8 +23,7 @@ class Foo(object):
"""This method should parse okay"""
return True
def method_multiline(self, foo=None, bar=None,
baz=None):
def method_multiline(self, foo=None, bar=None, baz=None):
"""This is on multiple lines, but should parse okay too
pydocstyle gives us lines of source. Test if this means that multiline

@ -18,7 +18,6 @@ else:
class PythonParserTests(unittest.TestCase):
def parse(self, source):
node = astroid.extract_node(source)
return Parser().parse(node)
@ -29,16 +28,16 @@ class PythonParserTests(unittest.TestCase):
pass
"""
data = self.parse(source)[0]
self.assertEqual(data['name'], 'foo')
self.assertEqual(data['type'], 'function')
self.assertEqual(data["name"], "foo")
self.assertEqual(data["type"], "function")
def test_parses_all(self):
source = """
__all__ = ['Foo', 5.0]
"""
data = self.parse(source)[0]
self.assertEqual(data['name'], '__all__')
self.assertEqual(data['value'], ['Foo', 5.0])
self.assertEqual(data["name"], "__all__")
self.assertEqual(data["value"], ["Foo", 5.0])
@pytest.mark.xfail(reason="Cannot parse list additions")
def test_parses_all_with_list_addition(self):
@ -46,8 +45,8 @@ class PythonParserTests(unittest.TestCase):
__all__ = ['Foo'] + []
"""
data = self.parse(source)[0]
self.assertEqual(data['name'], '__all__')
self.assertEqual(data['value'], ['Foo'])
self.assertEqual(data["name"], "__all__")
self.assertEqual(data["value"], ["Foo"])
@pytest.mark.xfail(reason="Cannot parse list additions")
def test_parses_all_with_name_addtion(self):
@ -55,8 +54,8 @@ class PythonParserTests(unittest.TestCase):
__all__ = ['Foo'] + bar.__all__
"""
data = self.parse(source)[0]
self.assertEqual(data['name'], '__all__')
self.assertEqual(data['value'], ['Foo'])
self.assertEqual(data["name"], "__all__")
self.assertEqual(data["value"], ["Foo"])
@pytest.mark.xfail(reason="Cannot parse list additions")
def test_parses_all_with_multiple_name_addtions(self):
@ -66,15 +65,15 @@ class PythonParserTests(unittest.TestCase):
__all__ += ['foo']
"""
data = self.parse(source)
self.assertEqual(data['name'], '__all__')
self.assertEqual(data['value'], ['foo'])
self.assertEqual(data["name"], "__all__")
self.assertEqual(data["value"], ["foo"])
source = """
__all__ = ['foo']
__all__ = foo
"""
data = self.parse(source)
self.assertEqual(data['name'], '__all__')
self.assertEqual(data['value'], [])
self.assertEqual(data["name"], "__all__")
self.assertEqual(data["value"], [])
def test_parses_all_multiline(self):
source = """
@ -84,7 +83,7 @@ class PythonParserTests(unittest.TestCase):
]
"""
data = self.parse(source)[0]
self.assertEqual(data['value'], ['foo', 'bar'])
self.assertEqual(data["value"], ["foo", "bar"])
@pytest.mark.xfail(reason="Cannot parse list additions")
def test_parses_all_generator(self):
@ -92,7 +91,7 @@ class PythonParserTests(unittest.TestCase):
__all__ = [x for x in dir(token) if x[0] != '_'] + ['foo', 'bar']
"""
data = self.parse(source)[0]
self.assertEqual(data['value'], ['foo', 'bar'])
self.assertEqual(data["value"], ["foo", "bar"])
def test_parses_name(self):
source = "foo.bar"
@ -103,58 +102,57 @@ class PythonParserTests(unittest.TestCase):
value = [1, 2, 3, 4]
source = "{} = {}".format(name, value)
data = self.parse(source)[0]
self.assertEqual(data['name'], name)
self.assertEqual(data['value'], value)
self.assertEqual(data["name"], name)
self.assertEqual(data["value"], value)
def test_parses_nested_list(self):
name = "__all__"
value = [[1, 2], [3, 4]]
source = "{} = {}".format(name, value)
data = self.parse(source)[0]
self.assertEqual(data['name'], name)
self.assertEqual(data['value'], value)
self.assertEqual(data["name"], name)
self.assertEqual(data["value"], value)
def test_arguments(self):
"""Argument parsing of source"""
source=(
'def foobar(self, bar, baz=42, foo=True,\n'
' *args, **kwargs):\n'
source = (
"def foobar(self, bar, baz=42, foo=True,\n"
" *args, **kwargs):\n"
' "This is a docstring"\n'
' return True\n'
" return True\n"
)
data = self.parse(source)[0]
self.assertEqual(
data['args'],
'self, bar, baz=42, foo=True, *args, **kwargs'
)
self.assertEqual(data["args"], "self, bar, baz=42, foo=True, *args, **kwargs")
def test_advanced_arguments(self):
"""Advanced argument parsing"""
source=(
source = (
'def foobar(self, a, b, c=42, d="string", e=(1,2),\n'
' f={"a": True}, g=None, h=[1,2,3,4],\n'
' i=dict(a=True), j=False, *args, **kwargs):\n'
" i=dict(a=True), j=False, *args, **kwargs):\n"
' "This is a docstring"\n'
' return True\n'
" return True\n"
)
data = self.parse(source)[0]
self.assertEqual(
data['args'],
', '.join([
'self',
'a',
'b',
'c=42',
'd=\'string\'',
'e=(1, 2)',
'f={\'a\': True}',
'g=None',
'h=[1, 2, 3, 4]',
'i=dict(a=True)',
'j=False',
'*args',
'**kwargs',
])
data["args"],
", ".join(
[
"self",
"a",
"b",
"c=42",
"d='string'",
"e=(1, 2)",
"f={'a': True}",
"g=None",
"h=[1, 2, 3, 4]",
"i=dict(a=True)",
"j=False",
"*args",
"**kwargs",
]
),
)
def test_dict_key_assignment(self):
@ -166,7 +164,7 @@ class PythonParserTests(unittest.TestCase):
MY_DICT['key2'] = 'value2'
"""
data = self.parse(source)[0]
self.assertEqual(data['name'], 'MY_DICT')
self.assertEqual(data["name"], "MY_DICT")
def test_list_index_assignment(self):
"""Ignore assignment to indexes."""
@ -177,4 +175,4 @@ class PythonParserTests(unittest.TestCase):
COLOUR[2] = 255
"""
data = self.parse(source)[0]
self.assertEqual(data['name'], 'COLOUR')
self.assertEqual(data["name"], "COLOUR")

@ -7,18 +7,18 @@ import sphinx
from sphinx.application import Sphinx
@pytest.fixture(scope='class')
@pytest.fixture(scope="class")
def builder():
cwd = os.getcwd()
def build(test_dir, confoverrides=None):
os.chdir('tests/python/{0}'.format(test_dir))
os.chdir("tests/python/{0}".format(test_dir))
app = Sphinx(
srcdir='.',
confdir='.',
outdir='_build/text',
doctreedir='_build/.doctrees',
buildername='text',
srcdir=".",
confdir=".",
outdir="_build/text",
doctreedir="_build/.doctrees",
buildername="text",
confoverrides=confoverrides,
)
app.build(force_all=True)
@ -26,230 +26,215 @@ def builder():
yield build
try:
shutil.rmtree('_build')
shutil.rmtree("_build")
finally:
os.chdir(cwd)
class TestSimpleModule(object):
@pytest.fixture(autouse=True, scope='class')
@pytest.fixture(autouse=True, scope="class")
def built(self, builder):
builder('pyexample')
builder("pyexample")
def test_integration(self):
self.check_integration(
'_build/text/autoapi/example/index.txt',
)
self.check_integration("_build/text/autoapi/example/index.txt")
def test_manual_directives(self):
# The manual directives should contain the same information
self.check_integration(
'_build/text/manualapi.txt',
)
self.check_integration("_build/text/manualapi.txt")
def check_integration(self, example_path):
with io.open(example_path, encoding='utf8') as example_handle:
with io.open(example_path, encoding="utf8") as example_handle:
example_file = example_handle.read()
assert 'class example.Foo' in example_file
assert 'class Meta' in example_file
assert 'attr2' in example_file
assert 'This is the docstring of an instance attribute.' in example_file
assert 'method_okay(self, foo=None, bar=None)' in example_file
assert 'method_multiline(self, foo=None, bar=None, baz=None)' in example_file
assert 'method_tricky(self, foo=None, bar=dict(foo=1, bar=2))' in example_file
assert "class example.Foo" in example_file
assert "class Meta" in example_file
assert "attr2" in example_file
assert "This is the docstring of an instance attribute." in example_file
assert "method_okay(self, foo=None, bar=None)" in example_file
assert "method_multiline(self, foo=None, bar=None, baz=None)" in example_file
assert "method_tricky(self, foo=None, bar=dict(foo=1, bar=2))" in example_file
# Are constructor arguments from the class docstring parsed?
assert 'Set an attribute' in example_file
assert "Set an attribute" in example_file
# "self" should not be included in constructor arguments
assert 'Foo(self' not in example_file
assert "Foo(self" not in example_file
assert not os.path.exists('_build/text/autoapi/method_multiline')
assert not os.path.exists("_build/text/autoapi/method_multiline")
index_path = '_build/text/index.txt'
with io.open(index_path, encoding='utf8') as index_handle:
index_path = "_build/text/index.txt"
with io.open(index_path, encoding="utf8") as index_handle:
index_file = index_handle.read()
assert 'Sphinx AutoAPI Index' in index_file
assert "Sphinx AutoAPI Index" in index_file
assert 'Foo' in index_file
assert 'Meta' in index_file
assert "Foo" in index_file
assert "Meta" in index_file
def test_napoleon_integration_not_loaded(self, builder):
example_path = '_build/text/autoapi/example/index.txt'
with io.open(example_path, encoding='utf8') as example_handle:
example_path = "_build/text/autoapi/example/index.txt"
with io.open(example_path, encoding="utf8") as example_handle:
example_file = example_handle.read()
# Check that docstrings are not transformed without napoleon loaded
assert 'Args' in example_file
assert "Args" in example_file
assert 'Returns' in example_file
assert "Returns" in example_file
def test_napoleon_integration_loaded(builder):
confoverrides = {
'extensions': [
'autoapi.extension',
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
],
"extensions": ["autoapi.extension", "sphinx.ext.autodoc", "sphinx.ext.napoleon"]
}
builder('pyexample', confoverrides=confoverrides)
builder("pyexample", confoverrides=confoverrides)
example_path = '_build/text/autoapi/example/index.txt'
with io.open(example_path, encoding='utf8') as example_handle:
example_path = "_build/text/autoapi/example/index.txt"
with io.open(example_path, encoding="utf8") as example_handle:
example_file = example_handle.read()
assert 'Parameters' in example_file
assert "Parameters" in example_file
assert 'Return type' in example_file
assert "Return type" in example_file
assert 'Args' not in example_file
assert "Args" not in example_file
class TestSimplePackage(object):
@pytest.fixture(autouse=True, scope='class')
@pytest.fixture(autouse=True, scope="class")
def built(self, builder):
builder('pypackageexample')
builder("pypackageexample")
def test_integration_with_package(self):
example_path = '_build/text/autoapi/example/index.txt'
with io.open(example_path, encoding='utf8') as example_handle:
example_path = "_build/text/autoapi/example/index.txt"
with io.open(example_path, encoding="utf8") as example_handle:
example_file = example_handle.read()
assert 'example.foo' in example_file
assert 'example.module_level_method(foo, bar)' in example_file
assert "example.foo" in example_file
assert "example.module_level_method(foo, bar)" in example_file
example_foo_path = '_build/text/autoapi/example/foo/index.txt'
with io.open(example_foo_path, encoding='utf8') as example_foo_handle:
example_foo_path = "_build/text/autoapi/example/foo/index.txt"
with io.open(example_foo_path, encoding="utf8") as example_foo_handle:
example_foo_file = example_foo_handle.read()
assert 'class example.foo.Foo' in example_foo_file
assert 'method_okay(self, foo=None, bar=None)' in example_foo_file
assert "class example.foo.Foo" in example_foo_file
assert "method_okay(self, foo=None, bar=None)" in example_foo_file
index_path = '_build/text/index.txt'
with io.open(index_path, encoding='utf8') as index_handle:
index_path = "_build/text/index.txt"
with io.open(index_path, encoding="utf8") as index_handle:
index_file = index_handle.read()
assert 'Sphinx AutoAPI Index' in index_file
assert 'example.foo' in index_file
assert 'Foo' in index_file
assert 'module_level_method' in index_file
assert "Sphinx AutoAPI Index" in index_file
assert "example.foo" in index_file
assert "Foo" in index_file
assert "module_level_method" in index_file
def _test_class_content(builder, class_content):
confoverrides = {
'autoapi_python_class_content': class_content,
}
confoverrides = {"autoapi_python_class_content": class_content}
builder('pyexample', confoverrides=confoverrides)
builder("pyexample", confoverrides=confoverrides)
example_path = '_build/text/autoapi/example/index.txt'
with io.open(example_path, encoding='utf8') as example_handle:
example_path = "_build/text/autoapi/example/index.txt"
with io.open(example_path, encoding="utf8") as example_handle:
example_file = example_handle.read()
if class_content == 'init':
assert 'Can we parse arguments' not in example_file
if class_content == "init":
assert "Can we parse arguments" not in example_file
else:
assert 'Can we parse arguments' in example_file
assert "Can we parse arguments" in example_file
if class_content not in ('both', 'init'):
assert 'Constructor docstring' not in example_file
if class_content not in ("both", "init"):
assert "Constructor docstring" not in example_file
else:
assert 'Constructor docstring' in example_file
assert "Constructor docstring" in example_file
def test_class_class_content(builder):
_test_class_content(builder, 'class')
_test_class_content(builder, "class")
def test_both_class_content(builder):
_test_class_content(builder, 'both')
_test_class_content(builder, "both")
def test_init_class_content(builder):
_test_class_content(builder, 'init')
_test_class_content(builder, "init")
def test_hiding_private_members(builder):
confoverrides = {
'autoapi_options': ['members', 'undoc-members', 'special-members'],
}
builder('pypackageexample', confoverrides=confoverrides)
confoverrides = {"autoapi_options": ["members", "undoc-members", "special-members"]}
builder("pypackageexample", confoverrides=confoverrides)
example_path = '_build/text/autoapi/example/index.txt'
with io.open(example_path, encoding='utf8') as example_handle:
example_path = "_build/text/autoapi/example/index.txt"
with io.open(example_path, encoding="utf8") as example_handle:
example_file = example_handle.read()
assert 'private' not in example_file
assert "private" not in example_file
private_path = '_build/text/autoapi/example/_private_module/index.txt'
with io.open(private_path, encoding='utf8') as private_handle:
private_path = "_build/text/autoapi/example/_private_module/index.txt"
with io.open(private_path, encoding="utf8") as private_handle:
private_file = private_handle.read()
assert 'public_method' in private_file
assert "public_method" in private_file
class TestComplexPackage(object):
@pytest.fixture(autouse=True, scope='class')
@pytest.fixture(autouse=True, scope="class")
def built(self, builder):
builder('pypackagecomplex')
builder("pypackagecomplex")
def test_public_chain_resolves(self):
submodule_path = '_build/text/autoapi/complex/subpackage/submodule/index.txt'
with io.open(submodule_path, encoding='utf8') as submodule_handle:
submodule_path = "_build/text/autoapi/complex/subpackage/submodule/index.txt"
with io.open(submodule_path, encoding="utf8") as submodule_handle:
submodule_file = submodule_handle.read()
assert "Part of a public resolution chain." in submodule_file
subpackage_path = '_build/text/autoapi/complex/subpackage/index.txt'
with io.open(subpackage_path, encoding='utf8') as subpackage_handle:
subpackage_path = "_build/text/autoapi/complex/subpackage/index.txt"
with io.open(subpackage_path, encoding="utf8") as subpackage_handle:
subpackage_file = subpackage_handle.read()
assert "Part of a public resolution chain." in subpackage_file
package_path = '_build/text/autoapi/complex/index.txt'
with io.open(package_path, encoding='utf8') as package_handle:
package_path = "_build/text/autoapi/complex/index.txt"
with io.open(package_path, encoding="utf8") as package_handle:
package_file = package_handle.read()
assert "Part of a public resolution chain." in package_file
def test_private_made_public(self):
submodule_path = '_build/text/autoapi/complex/subpackage/submodule/index.txt'
with io.open(submodule_path, encoding='utf8') as submodule_handle:
submodule_path = "_build/text/autoapi/complex/subpackage/submodule/index.txt"
with io.open(submodule_path, encoding="utf8") as submodule_handle:
submodule_file = submodule_handle.read()
assert "A private function made public by import." in submodule_file
def test_multiple_import_locations(self):
submodule_path = '_build/text/autoapi/complex/subpackage/submodule/index.txt'
with io.open(submodule_path, encoding='utf8') as submodule_handle:
submodule_path = "_build/text/autoapi/complex/subpackage/submodule/index.txt"
with io.open(submodule_path, encoding="utf8") as submodule_handle:
submodule_file = submodule_handle.read()
assert "A public function imported in multiple places." in submodule_file
subpackage_path = '_build/text/autoapi/complex/subpackage/index.txt'
with io.open(subpackage_path, encoding='utf8') as subpackage_handle:
subpackage_path = "_build/text/autoapi/complex/subpackage/index.txt"
with io.open(subpackage_path, encoding="utf8") as subpackage_handle:
subpackage_file = subpackage_handle.read()
assert "A public function imported in multiple places." in subpackage_file
package_path = '_build/text/autoapi/complex/index.txt'
with io.open(package_path, encoding='utf8') as package_handle:
package_path = "_build/text/autoapi/complex/index.txt"
with io.open(package_path, encoding="utf8") as package_handle:
package_file = package_handle.read()
assert "A public function imported in multiple places." in package_file
def test_simple_wildcard_imports(self):
wildcard_path = '_build/text/autoapi/complex/wildcard/index.txt'
with io.open(wildcard_path, encoding='utf8') as wildcard_handle:
wildcard_path = "_build/text/autoapi/complex/wildcard/index.txt"
with io.open(wildcard_path, encoding="utf8") as wildcard_handle:
wildcard_file = wildcard_handle.read()
assert "public_chain" in wildcard_file
@ -258,16 +243,16 @@ class TestComplexPackage(object):
assert "module_level_method" in wildcard_file
def test_wildcard_chain(self):
wildcard_path = '_build/text/autoapi/complex/wildchain/index.txt'
with io.open(wildcard_path, encoding='utf8') as wildcard_handle:
wildcard_path = "_build/text/autoapi/complex/wildchain/index.txt"
with io.open(wildcard_path, encoding="utf8") as wildcard_handle:
wildcard_file = wildcard_handle.read()
assert "public_chain" in wildcard_file
assert "module_level_method" in wildcard_file
def test_wildcard_all_imports(self):
wildcard_path = '_build/text/autoapi/complex/wildall/index.txt'
with io.open(wildcard_path, encoding='utf8') as wildcard_handle:
wildcard_path = "_build/text/autoapi/complex/wildall/index.txt"
with io.open(wildcard_path, encoding="utf8") as wildcard_handle:
wildcard_file = wildcard_handle.read()
assert "not_all" not in wildcard_file

@ -1,24 +1,24 @@
# -*- coding: utf-8 -*-
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'pyexample'
copyright = u'2015, rtfd'
author = u'rtfd'
version = '0.1'
release = '0.1'
templates_path = ["_templates"]
source_suffix = ".rst"
master_doc = "index"
project = u"pyexample"
copyright = u"2015, rtfd"
author = u"rtfd"
version = "0.1"
release = "0.1"
language = None
exclude_patterns = ['_build']
pygments_style = 'sphinx'
exclude_patterns = ["_build"]
pygments_style = "sphinx"
todo_include_todos = False
html_theme = 'alabaster'
html_static_path = ['_static']
htmlhelp_basename = 'pyexampledoc'
extensions = ['autoapi.extension']
autoapi_type = 'python'
autoapi_dirs = ['example']
autoapi_file_pattern = '*.py'
autoapi_template_dir = 'template_overrides'
html_theme = "alabaster"
html_static_path = ["_static"]
htmlhelp_basename = "pyexampledoc"
extensions = ["autoapi.extension"]
autoapi_type = "python"
autoapi_dirs = ["example"]
autoapi_file_pattern = "*.py"
autoapi_template_dir = "template_overrides"
exclude_patterns = [autoapi_template_dir]
exclude_patterns = [autoapi_template_dir]

@ -1,7 +1,8 @@
__author__ = 'swenson'
__author__ = "swenson"
import math
def example_function(x):
"""Compute the square root of x and return it."""
return math.sqrt(x)
return math.sqrt(x)

@ -5,93 +5,96 @@ import pytest
def generate_module_names():
for i in range(1, 5):
yield '.'.join('module{}'.format(j) for j in range(i))
yield ".".join("module{}".format(j) for j in range(i))
yield 'package.repeat.repeat'
yield "package.repeat.repeat"
def imported_basename_cases():
for module_name in generate_module_names():
import_ = 'import {}'.format(module_name)
basename = '{}.ImportedClass'.format(module_name)
import_ = "import {}".format(module_name)
basename = "{}.ImportedClass".format(module_name)
expected = basename
yield (import_, basename, expected)
import_ = 'import {} as aliased'.format(module_name)
basename = 'aliased.ImportedClass'
import_ = "import {} as aliased".format(module_name)
basename = "aliased.ImportedClass"
yield (import_, basename, expected)
if '.' in module_name:
from_name, attribute = module_name.rsplit('.', 1)
import_ = 'from {} import {}'.format(from_name, attribute)
basename = '{}.ImportedClass'.format(attribute)
if "." in module_name:
from_name, attribute = module_name.rsplit(".", 1)
import_ = "from {} import {}".format(from_name, attribute)
basename = "{}.ImportedClass".format(attribute)
yield (import_, basename, expected)
import_ += ' as aliased'
basename = 'aliased.ImportedClass'
import_ += " as aliased"
basename = "aliased.ImportedClass"
yield (import_, basename, expected)
import_ = 'from {} import ImportedClass'.format(module_name)
basename = 'ImportedClass'
import_ = "from {} import ImportedClass".format(module_name)
basename = "ImportedClass"
yield (import_, basename, expected)
import_ = 'from {} import ImportedClass as AliasedClass'.format(module_name)
basename = 'AliasedClass'
import_ = "from {} import ImportedClass as AliasedClass".format(module_name)
basename = "AliasedClass"
yield (import_, basename, expected)
def generate_args():
for i in range(5):
yield ', '.join('arg{}'.format(j) for j in range(i))
yield ", ".join("arg{}".format(j) for j in range(i))
def imported_call_cases():
for args in generate_args():
for import_, basename, expected in imported_basename_cases():
basename += '({})'.format(args)
expected += '()'
basename += "({})".format(args)
expected += "()"
yield import_, basename, expected
class TestAstroidUtils(object):
@pytest.mark.parametrize(
('import_', 'basename', 'expected'), list(imported_basename_cases()),
("import_", "basename", "expected"), list(imported_basename_cases())
)
def test_can_get_full_imported_basename(self, import_, basename, expected):
source = '''
source = """
{}
class ThisClass({}): #@
pass
'''.format(import_, basename)
node = astroid.extract_node(source)
basenames = astroid_utils.get_full_basename(
node.bases[0], node.basenames[0],
""".format(
import_, basename
)
node = astroid.extract_node(source)
basenames = astroid_utils.get_full_basename(node.bases[0], node.basenames[0])
assert basenames == expected
@pytest.mark.parametrize(
('import_', 'basename', 'expected'), list(imported_call_cases()),
("import_", "basename", "expected"), list(imported_call_cases())
)
def test_can_get_full_function_basename(self, import_, basename, expected):
source = '''
source = """
{}
class ThisClass({}): #@
pass
'''.format(import_, basename)
node = astroid.extract_node(source)
basenames = astroid_utils.get_full_basename(
node.bases[0], node.basenames[0],
""".format(
import_, basename
)
node = astroid.extract_node(source)
basenames = astroid_utils.get_full_basename(node.bases[0], node.basenames[0])
assert basenames == expected
@pytest.mark.parametrize(('source', 'expected'), [
('a = "a"', ('a', 'a')),
('a = 1', ('a', 1)),
('a, b, c = (1, 2, 3)', None),
('a = b = 1', None),
])
@pytest.mark.parametrize(
("source", "expected"),
[
('a = "a"', ("a", "a")),
("a = 1", ("a", 1)),
("a, b, c = (1, 2, 3)", None),
("a = b = 1", None),
],
)
def test_can_get_assign_values(self, source, expected):
node = astroid.extract_node(source)
value = astroid_utils.get_assign_value(node)

@ -1,4 +1,4 @@
'''Test .NET autoapi domain'''
"""Test .NET autoapi domain"""
import unittest
@ -8,14 +8,14 @@ from autoapi.mappers import dotnet
class DotNetSphinxMapperTests(unittest.TestCase):
def setUp(self):
'''Test setup'''
"""Test setup"""
class _config(object):
def __getattr__(self, key):
attrs = {
'autoapi_dirs': ['/tmp/autoapi/tmp'],
'autoapi_root': '/tmp/autoapi/root',
"autoapi_dirs": ["/tmp/autoapi/tmp"],
"autoapi_root": "/tmp/autoapi/root",
}
return attrs.get(key, None)
@ -28,32 +28,33 @@ class DotNetSphinxMapperTests(unittest.TestCase):
self.application = _application()
def test_create_class(self):
'''Test .NET class instance creation helper'''
"""Test .NET class instance creation helper"""
dom = dotnet.DotNetSphinxMapper(self.application)
def _create_class(data):
return list(dom.create_class(data))[0]
cls = _create_class({'id': 'Foo.Bar', 'type': 'Namespace'})
cls = _create_class({"id": "Foo.Bar", "type": "Namespace"})
self.assertIsInstance(cls, dotnet.DotNetNamespace)
cls = _create_class({'id': 'Foo.Bar', 'type': 'Class'})
cls = _create_class({"id": "Foo.Bar", "type": "Class"})
self.assertIsInstance(cls, dotnet.DotNetClass)
cls = _create_class({'id': 'Foo.Bar', 'type': 'Property'})
cls = _create_class({"id": "Foo.Bar", "type": "Property"})
self.assertIsInstance(cls, dotnet.DotNetProperty)
cls = _create_class({'id': 'Foo.Bar', 'type': 'Method'})
cls = _create_class({"id": "Foo.Bar", "type": "Method"})
self.assertIsInstance(cls, dotnet.DotNetMethod)
cls = _create_class({'id': 'Foo.Bar', 'type': 'Enum'})
cls = _create_class({"id": "Foo.Bar", "type": "Enum"})
self.assertIsInstance(cls, dotnet.DotNetEnum)
cls = _create_class({'id': 'Foo.Bar', 'type': 'Constructor'})
cls = _create_class({"id": "Foo.Bar", "type": "Constructor"})
self.assertIsInstance(cls, dotnet.DotNetConstructor)
cls = _create_class({'id': 'Foo.Bar', 'type': 'Struct'})
cls = _create_class({"id": "Foo.Bar", "type": "Struct"})
self.assertIsInstance(cls, dotnet.DotNetStruct)
cls = _create_class({'id': 'Foo.Bar', 'type': 'Interface'})
cls = _create_class({"id": "Foo.Bar", "type": "Interface"})
self.assertIsInstance(cls, dotnet.DotNetInterface)
cls = _create_class({'id': 'Foo.Bar', 'type': 'Delegate'})
cls = _create_class({"id": "Foo.Bar", "type": "Delegate"})
self.assertIsInstance(cls, dotnet.DotNetDelegate)
cls = _create_class({'id': 'Foo.Bar', 'type': 'Field'})
cls = _create_class({"id": "Foo.Bar", "type": "Field"})
self.assertIsInstance(cls, dotnet.DotNetField)
cls = _create_class({'id': 'Foo.Bar', 'type': 'Event'})
cls = _create_class({"id": "Foo.Bar", "type": "Event"})
self.assertIsInstance(cls, dotnet.DotNetEvent)
def test_create_class_with_children(self):
@ -61,111 +62,135 @@ class DotNetSphinxMapperTests(unittest.TestCase):
def _create_class(data):
return list(dom.create_class(data))[0]
cls = _create_class({'id': 'Foo.Bar',
'type': 'Class',
'items': [
{'id': 'Foo.Bar.Baz',
'type': 'Method'}
]})
cls = _create_class(
{
"id": "Foo.Bar",
"type": "Class",
"items": [{"id": "Foo.Bar.Baz", "type": "Method"}],
}
)
self.assertIsInstance(cls, dotnet.DotNetClass)
self.assertDictEqual(cls.item_map, {})
@patch('subprocess.check_output', lambda foo: foo)
@patch("subprocess.check_output", lambda foo: foo)
def test_get_objects(self):
'''Test basic get objects'''
"""Test basic get objects"""
objs = []
def _mock_find(self, patterns, **kwargs):
return {'items': ['foo', 'bar']}
return {"items": ["foo", "bar"]}
def _mock_read(self, path):
return {'items': [{'id': 'Foo.Bar', 'name': 'Foo', 'type': 'property'},
{'id': 'Foo.Bar2', 'name': 'Bar', 'type': 'property'}],
'id': 'Foo.Bar', 'type': 'Class', 'summary': path}
return {
"items": [
{"id": "Foo.Bar", "name": "Foo", "type": "property"},
{"id": "Foo.Bar2", "name": "Bar", "type": "property"},
],
"id": "Foo.Bar",
"type": "Class",
"summary": path,
}
with patch('autoapi.mappers.dotnet.DotNetSphinxMapper.find_files', _mock_find):
with patch('autoapi.mappers.dotnet.DotNetSphinxMapper.read_file', _mock_read):
with patch("autoapi.mappers.dotnet.DotNetSphinxMapper.find_files", _mock_find):
with patch(
"autoapi.mappers.dotnet.DotNetSphinxMapper.read_file", _mock_read
):
dom = dotnet.DotNetSphinxMapper(self.application)
dom.load('', '', '', raise_error=False)
dom.load("", "", "", raise_error=False)
dom.map()
objs = dom.objects
self.assertEqual(len(objs), 2)
self.assertEqual(objs['Foo.Bar'].id, 'Foo.Bar')
self.assertEqual(objs['Foo.Bar'].name, 'Foo.Bar')
self.assertEqual(objs['Foo.Bar2'].id, 'Foo.Bar2')
self.assertEqual(objs['Foo.Bar2'].name, 'Foo.Bar2')
self.assertEqual(objs["Foo.Bar"].id, "Foo.Bar")
self.assertEqual(objs["Foo.Bar"].name, "Foo.Bar")
self.assertEqual(objs["Foo.Bar2"].id, "Foo.Bar2")
self.assertEqual(objs["Foo.Bar2"].name, "Foo.Bar2")
class DotNetPythonMapperTests(unittest.TestCase):
def test_xml_parse(self):
"""XML doc comment parsing"""
ret = dotnet.DotNetPythonMapper.transform_doc_comments(
'This is an example comment <see cref="FOO" />')
self.assertEqual(ret, 'This is an example comment :any:`FOO`')
'This is an example comment <see cref="FOO" />'
)
self.assertEqual(ret, "This is an example comment :any:`FOO`")
ret = dotnet.DotNetPythonMapper.transform_doc_comments(
'This is an example comment <see cref="!:FOO" />')
self.assertEqual(ret, 'This is an example comment FOO')
'This is an example comment <see cref="!:FOO" />'
)
self.assertEqual(ret, "This is an example comment FOO")
ret = dotnet.DotNetPythonMapper.transform_doc_comments(
'This is an example comment <see cref="N:FOO">inner foo</see>')
self.assertEqual(ret, 'This is an example comment :dn:ns:`FOO`')
'This is an example comment <see cref="N:FOO">inner foo</see>'
)
self.assertEqual(ret, "This is an example comment :dn:ns:`FOO`")
ret = dotnet.DotNetPythonMapper.transform_doc_comments(
'Test <see cref="P:FOO" /> and <see cref="E:BAR">Blah</see>')
self.assertEqual(ret, 'Test :dn:prop:`FOO` and :dn:event:`BAR`')
'Test <see cref="P:FOO" /> and <see cref="E:BAR">Blah</see>'
)
self.assertEqual(ret, "Test :dn:prop:`FOO` and :dn:event:`BAR`")
ret = dotnet.DotNetPythonMapper.transform_doc_comments(
'This is an example comment <paramref name="FOO" />')
self.assertEqual(ret, 'This is an example comment ``FOO``')
'This is an example comment <paramref name="FOO" />'
)
self.assertEqual(ret, "This is an example comment ``FOO``")
ret = dotnet.DotNetPythonMapper.transform_doc_comments(
'This is an example comment <typeparamref name="FOO" />')
self.assertEqual(ret, 'This is an example comment ``FOO``')
'This is an example comment <typeparamref name="FOO" />'
)
self.assertEqual(ret, "This is an example comment ``FOO``")
ret = dotnet.DotNetPythonMapper.transform_doc_comments(
'With surrounding characters s<see cref="FOO" />s')
self.assertEqual(ret, 'With surrounding characters s :any:`FOO`\s')
'With surrounding characters s<see cref="FOO" />s'
)
self.assertEqual(ret, "With surrounding characters s :any:`FOO`\s")
ret = dotnet.DotNetPythonMapper.transform_doc_comments(
'With surrounding characters s<paramref name="FOO" />s')
self.assertEqual(ret, 'With surrounding characters s ``FOO``\s')
'With surrounding characters s<paramref name="FOO" />s'
)
self.assertEqual(ret, "With surrounding characters s ``FOO``\s")
def test_xml_transform_escape(self):
"""XML transform escaping"""
ret = dotnet.DotNetPythonMapper.transform_doc_comments(
'Foo <see cref="Foo`1" /> Bar')
self.assertEqual(ret, 'Foo :any:`Foo\\`1` Bar')
'Foo <see cref="Foo`1" /> Bar'
)
self.assertEqual(ret, "Foo :any:`Foo\\`1` Bar")
ret = dotnet.DotNetPythonMapper.transform_doc_comments(
'No space before<see cref="M:Foo`1" />or after')
self.assertEqual(ret, 'No space before :dn:meth:`Foo\\`1`\\or after')
'No space before<see cref="M:Foo`1" />or after'
)
self.assertEqual(ret, "No space before :dn:meth:`Foo\\`1`\\or after")
def test_parsing_obj(self):
"""Parse out object, test for transforms, etc"""
obj = {
'uid': 'Foo`1',
'name': 'Foo<TUser>',
'summary': 'Test parsing <see cref="Bar" />',
'syntax': {
'parameters': [
{'id': 'a', 'type': '{TUser}',
'description': 'Test <see cref="TUser" />'}
"uid": "Foo`1",
"name": "Foo<TUser>",
"summary": 'Test parsing <see cref="Bar" />',
"syntax": {
"parameters": [
{
"id": "a",
"type": "{TUser}",
"description": 'Test <see cref="TUser" />',
}
],
'return': {
'type': 'Bar',
'description': ('Test references <see cref="Bar" /> '
'and paramrefs <paramref name="a" />'),
}
}
"return": {
"type": "Bar",
"description": (
'Test references <see cref="Bar" /> '
'and paramrefs <paramref name="a" />'
),
},
},
}
mapped = dotnet.DotNetPythonMapper(obj)
self.assertEqual(
mapped.parameters[0],
{'name': 'a', 'type': '{TUser}', 'desc': 'Test :any:`TUser`'}
{"name": "a", "type": "{TUser}", "desc": "Test :any:`TUser`"},
)
self.assertEqual(
mapped.returns['description'],
'Test references :any:`Bar` and paramrefs ``a``'
mapped.returns["description"],
"Test references :any:`Bar` and paramrefs ``a``",
)

@ -15,110 +15,96 @@ from sphinx.application import Sphinx
@contextmanager
def sphinx_build(test_dir, confoverrides=None):
os.chdir('tests/{0}'.format(test_dir))
os.chdir("tests/{0}".format(test_dir))
try:
app = Sphinx(
srcdir='.',
confdir='.',
outdir='_build/text',
doctreedir='_build/.doctrees',
buildername='text',
srcdir=".",
confdir=".",
outdir="_build/text",
doctreedir="_build/.doctrees",
buildername="text",
confoverrides=confoverrides,
)
app.build(force_all=True)
yield
finally:
shutil.rmtree('_build')
os.chdir('../..')
shutil.rmtree("_build")
os.chdir("../..")
class LanguageIntegrationTests(unittest.TestCase):
def _run_test(self, test_dir, test_file, test_string):
with sphinx_build(test_dir):
with io.open(test_file, encoding='utf8') as fin:
with io.open(test_file, encoding="utf8") as fin:
text = fin.read().strip()
self.assertIn(test_string, text)
class JavaScriptTests(LanguageIntegrationTests):
def _js_read(self, path):
return json.load(open('../fixtures/javascript.json'))
return json.load(open("../fixtures/javascript.json"))
@patch('autoapi.mappers.javascript.JavaScriptSphinxMapper.read_file', _js_read)
@patch("autoapi.mappers.javascript.JavaScriptSphinxMapper.read_file", _js_read)
def test_integration(self):
self._run_test(
'jsexample',
'_build/text/autoapi/Circle/index.txt',
'Creates an instance of Circle'
"jsexample",
"_build/text/autoapi/Circle/index.txt",
"Creates an instance of Circle",
)
class GoTests(LanguageIntegrationTests):
def _go_read(self, path):
return json.load(open('../fixtures/go.json'))
return json.load(open("../fixtures/go.json"))
@patch('autoapi.mappers.go.GoSphinxMapper.read_file', _go_read)
@patch("autoapi.mappers.go.GoSphinxMapper.read_file", _go_read)
def test_integration(self):
self._run_test(
'goexample',
'_build/text/autoapi/main/index.txt',
'CopyFuncs produces a json-annotated array of Func objects'
"goexample",
"_build/text/autoapi/main/index.txt",
"CopyFuncs produces a json-annotated array of Func objects",
)
class DotNetTests(LanguageIntegrationTests):
def _dotnet_read(self, path):
return json.load(open('../fixtures/dotnet.json'))
return json.load(open("../fixtures/dotnet.json"))
# Mock this because it's slow otherwise
def _dotnet_load(self, patterns, dirs, ignore=[]):
data = self.read_file(path='inmem')
self.paths['inmem'] = data
data = self.read_file(path="inmem")
self.paths["inmem"] = data
@staticmethod
def _dotnet_finished(app, exception):
pass
@patch('autoapi.mappers.dotnet.DotNetSphinxMapper.load', _dotnet_load)
@patch('autoapi.mappers.dotnet.DotNetSphinxMapper.read_file', _dotnet_read)
@patch('autoapi.mappers.dotnet.DotNetSphinxMapper.build_finished', _dotnet_finished)
@patch("autoapi.mappers.dotnet.DotNetSphinxMapper.load", _dotnet_load)
@patch("autoapi.mappers.dotnet.DotNetSphinxMapper.read_file", _dotnet_read)
@patch("autoapi.mappers.dotnet.DotNetSphinxMapper.build_finished", _dotnet_finished)
def test_integration(self):
self._run_test(
'dotnetexample',
'_build/text/autoapi/Microsoft/AspNet/Identity/IUserStore-TUser/index.txt',
'Provides an abstraction for a store which manages user accounts.'
"dotnetexample",
"_build/text/autoapi/Microsoft/AspNet/Identity/IUserStore-TUser/index.txt",
"Provides an abstraction for a store which manages user accounts.",
)
class IntegrationTests(LanguageIntegrationTests):
def test_template_overrides(self):
self._run_test(
'templateexample',
'_build/text/autoapi/example/index.txt',
'This is a fuction template override'
"templateexample",
"_build/text/autoapi/example/index.txt",
"This is a fuction template override",
)
class TOCTreeTests(LanguageIntegrationTests):
def test_toctree_overrides(self):
self._run_test(
'toctreeexample',
'_build/text/index.txt',
'AutoAPI Index'
)
self._run_test("toctreeexample", "_build/text/index.txt", "AutoAPI Index")
def test_toctree_domain_insertion(self):
"""
Test that the example_function gets added to the TOC Tree
"""
self._run_test(
'toctreeexample',
'_build/text/index.txt',
'* example_function'
)
self._run_test("toctreeexample", "_build/text/index.txt", "* example_function")

@ -1,6 +1,6 @@
# coding=utf8
'''Test .NET autoapi objects'''
"""Test .NET autoapi objects"""
import os
import unittest
@ -14,126 +14,117 @@ from autoapi.settings import TEMPLATE_DIR
class DotNetObjectTests(unittest.TestCase):
def test_type(self):
'''Test types of some of the objects'''
obj = dotnet.DotNetNamespace({'id': 'Foo.Bar'})
self.assertEqual(obj.type, 'namespace')
self.assertEqual(obj.ref_type, 'namespace')
self.assertEqual(obj.ref_directive, 'ns')
obj = dotnet.DotNetMethod({'id': 'Foo.Bar'})
self.assertEqual(obj.type, 'method')
self.assertEqual(obj.ref_type, 'method')
self.assertEqual(obj.ref_directive, 'meth')
obj = dotnet.DotNetProperty({'id': 'Foo.Bar'})
self.assertEqual(obj.type, 'property')
self.assertEqual(obj.ref_type, 'property')
self.assertEqual(obj.ref_directive, 'prop')
obj = dotnet.DotNetEnum({'id': 'Foo.Bar'})
self.assertEqual(obj.type, 'enum')
self.assertEqual(obj.ref_type, 'enumeration')
self.assertEqual(obj.ref_directive, 'enum')
obj = dotnet.DotNetStruct({'id': 'Foo.Bar'})
self.assertEqual(obj.type, 'struct')
self.assertEqual(obj.ref_type, 'structure')
self.assertEqual(obj.ref_directive, 'struct')
obj = dotnet.DotNetConstructor({'id': 'Foo.Bar'})
self.assertEqual(obj.type, 'constructor')
self.assertEqual(obj.ref_type, 'constructor')
self.assertEqual(obj.ref_directive, 'ctor')
obj = dotnet.DotNetInterface({'id': 'Foo.Bar'})
self.assertEqual(obj.type, 'interface')
self.assertEqual(obj.ref_type, 'interface')
self.assertEqual(obj.ref_directive, 'iface')
obj = dotnet.DotNetDelegate({'id': 'Foo.Bar'})
self.assertEqual(obj.type, 'delegate')
self.assertEqual(obj.ref_type, 'delegate')
self.assertEqual(obj.ref_directive, 'del')
obj = dotnet.DotNetClass({'id': 'Foo.Bar'})
self.assertEqual(obj.type, 'class')
self.assertEqual(obj.ref_type, 'class')
self.assertEqual(obj.ref_directive, 'cls')
obj = dotnet.DotNetField({'id': 'Foo.Bar'})
self.assertEqual(obj.type, 'field')
self.assertEqual(obj.ref_type, 'field')
self.assertEqual(obj.ref_directive, 'field')
obj = dotnet.DotNetEvent({'id': 'Foo.Bar'})
self.assertEqual(obj.type, 'event')
self.assertEqual(obj.ref_type, 'event')
self.assertEqual(obj.ref_directive, 'event')
"""Test types of some of the objects"""
obj = dotnet.DotNetNamespace({"id": "Foo.Bar"})
self.assertEqual(obj.type, "namespace")
self.assertEqual(obj.ref_type, "namespace")
self.assertEqual(obj.ref_directive, "ns")
obj = dotnet.DotNetMethod({"id": "Foo.Bar"})
self.assertEqual(obj.type, "method")
self.assertEqual(obj.ref_type, "method")
self.assertEqual(obj.ref_directive, "meth")
obj = dotnet.DotNetProperty({"id": "Foo.Bar"})
self.assertEqual(obj.type, "property")
self.assertEqual(obj.ref_type, "property")
self.assertEqual(obj.ref_directive, "prop")
obj = dotnet.DotNetEnum({"id": "Foo.Bar"})
self.assertEqual(obj.type, "enum")
self.assertEqual(obj.ref_type, "enumeration")
self.assertEqual(obj.ref_directive, "enum")
obj = dotnet.DotNetStruct({"id": "Foo.Bar"})
self.assertEqual(obj.type, "struct")
self.assertEqual(obj.ref_type, "structure")
self.assertEqual(obj.ref_directive, "struct")
obj = dotnet.DotNetConstructor({"id": "Foo.Bar"})
self.assertEqual(obj.type, "constructor")
self.assertEqual(obj.ref_type, "constructor")
self.assertEqual(obj.ref_directive, "ctor")
obj = dotnet.DotNetInterface({"id": "Foo.Bar"})
self.assertEqual(obj.type, "interface")
self.assertEqual(obj.ref_type, "interface")
self.assertEqual(obj.ref_directive, "iface")
obj = dotnet.DotNetDelegate({"id": "Foo.Bar"})
self.assertEqual(obj.type, "delegate")
self.assertEqual(obj.ref_type, "delegate")
self.assertEqual(obj.ref_directive, "del")
obj = dotnet.DotNetClass({"id": "Foo.Bar"})
self.assertEqual(obj.type, "class")
self.assertEqual(obj.ref_type, "class")
self.assertEqual(obj.ref_directive, "cls")
obj = dotnet.DotNetField({"id": "Foo.Bar"})
self.assertEqual(obj.type, "field")
self.assertEqual(obj.ref_type, "field")
self.assertEqual(obj.ref_directive, "field")
obj = dotnet.DotNetEvent({"id": "Foo.Bar"})
self.assertEqual(obj.type, "event")
self.assertEqual(obj.ref_type, "event")
self.assertEqual(obj.ref_directive, "event")
def test_names(self):
'''Test names of objects'''
obj = dotnet.DotNetNamespace({'id': 'Foo.Bar'})
self.assertEqual(obj.name, 'Foo.Bar')
self.assertEqual(obj.short_name, 'Bar')
"""Test names of objects"""
obj = dotnet.DotNetNamespace({"id": "Foo.Bar"})
self.assertEqual(obj.name, "Foo.Bar")
self.assertEqual(obj.short_name, "Bar")
obj = dotnet.DotNetNamespace({'id': 'Foo.Bar.Something`1'})
self.assertEqual(obj.name, 'Foo.Bar.Something`1')
self.assertEqual(obj.short_name, 'Something`1')
obj = dotnet.DotNetNamespace({"id": "Foo.Bar.Something`1"})
self.assertEqual(obj.name, "Foo.Bar.Something`1")
self.assertEqual(obj.short_name, "Something`1")
def test_namespace_namespace(self):
'''Namespace parent resolution'''
ns = dotnet.DotNetNamespace({'id': 'Foo.Bar.Widgets'})
self.assertEqual(ns.namespace, 'Foo.Bar')
ns = dotnet.DotNetNamespace({'id': 'Foo.Bar'})
self.assertEqual(ns.namespace, 'Foo')
ns = dotnet.DotNetNamespace({'id': 'Foo'})
"""Namespace parent resolution"""
ns = dotnet.DotNetNamespace({"id": "Foo.Bar.Widgets"})
self.assertEqual(ns.namespace, "Foo.Bar")
ns = dotnet.DotNetNamespace({"id": "Foo.Bar"})
self.assertEqual(ns.namespace, "Foo")
ns = dotnet.DotNetNamespace({"id": "Foo"})
self.assertIsNone(ns.namespace)
def test_class_namespace(self):
'''Class parent resolution'''
cls = dotnet.DotNetClass(dict(id='Foo.Bar.Widget',
type='class'))
self.assertEqual(cls.namespace, 'Foo.Bar')
cls = dotnet.DotNetClass(dict(id='Foo.Bar',
type='class'))
self.assertEqual(cls.namespace, 'Foo')
cls = dotnet.DotNetClass(dict(id='Foo',
type='class'))
"""Class parent resolution"""
cls = dotnet.DotNetClass(dict(id="Foo.Bar.Widget", type="class"))
self.assertEqual(cls.namespace, "Foo.Bar")
cls = dotnet.DotNetClass(dict(id="Foo.Bar", type="class"))
self.assertEqual(cls.namespace, "Foo")
cls = dotnet.DotNetClass(dict(id="Foo", type="class"))
self.assertIsNone(cls.namespace)
def test_filename(self):
'''Object file name'''
cls = dotnet.DotNetClass({'id': 'Foo.Bar.Widget'})
self.assertEqual(cls.pathname, os.path.join('Foo', 'Bar', 'Widget'))
cls = dotnet.DotNetClass({'id': 'Foo.Bar.Widget<T>'})
self.assertEqual(cls.pathname, os.path.join('Foo', 'Bar', 'Widget-T'))
cls = dotnet.DotNetClass({'id': 'Foo.Bar.Widget<T>(TFoo)'})
self.assertEqual(cls.pathname, os.path.join('Foo', 'Bar', 'Widget-T'))
cls = dotnet.DotNetClass({'id': 'Foo.Foo-Bar.Widget<T>(TFoo)'})
self.assertEqual(cls.pathname, os.path.join('Foo', 'FooBar', 'Widget-T'))
cls = dotnet.DotNetClass({'id': u'Foo.Bär'})
self.assertEqual(cls.pathname, os.path.join('Foo', 'Bar'))
cls = dotnet.DotNetClass({'id': u'Ащщ.юИфк'})
self.assertEqual(cls.pathname, os.path.join('Ashchshch', 'iuIfk'))
"""Object file name"""
cls = dotnet.DotNetClass({"id": "Foo.Bar.Widget"})
self.assertEqual(cls.pathname, os.path.join("Foo", "Bar", "Widget"))
cls = dotnet.DotNetClass({"id": "Foo.Bar.Widget<T>"})
self.assertEqual(cls.pathname, os.path.join("Foo", "Bar", "Widget-T"))
cls = dotnet.DotNetClass({"id": "Foo.Bar.Widget<T>(TFoo)"})
self.assertEqual(cls.pathname, os.path.join("Foo", "Bar", "Widget-T"))
cls = dotnet.DotNetClass({"id": "Foo.Foo-Bar.Widget<T>(TFoo)"})
self.assertEqual(cls.pathname, os.path.join("Foo", "FooBar", "Widget-T"))
cls = dotnet.DotNetClass({"id": u"Foo.Bär"})
self.assertEqual(cls.pathname, os.path.join("Foo", "Bar"))
cls = dotnet.DotNetClass({"id": u"Ащщ.юИфк"})
self.assertEqual(cls.pathname, os.path.join("Ashchshch", "iuIfk"))
def test_rendered_class_escaping(self):
"""Rendered class escaping"""
jinja_env = Environment(
loader=FileSystemLoader([TEMPLATE_DIR]),
)
jinja_env = Environment(loader=FileSystemLoader([TEMPLATE_DIR]))
cls = dotnet.DotNetClass(
{
'id': 'Foo.Bar`1',
'inheritance': ['Foo.Baz`1'],
},
jinja_env=jinja_env)
self.assertIn('* :dn:cls:`Foo.Baz\\`1`\n', cls.render())
{"id": "Foo.Bar`1", "inheritance": ["Foo.Baz`1"]}, jinja_env=jinja_env
)
self.assertIn("* :dn:cls:`Foo.Baz\\`1`\n", cls.render())
def test_include_path(self):
"""Include path"""
cls = dotnet.DotNetClass({'id': 'Foo.Bar.Widget'})
self.assertEqual(cls.include_path, '/autoapi/Foo/Bar/Widget/index')
cls = dotnet.DotNetClass({'id': 'Foo.Bar.Widget'}, url_root='/autofoo')
self.assertEqual(cls.include_path, '/autofoo/Foo/Bar/Widget/index')
cls = dotnet.DotNetClass({"id": "Foo.Bar.Widget"})
self.assertEqual(cls.include_path, "/autoapi/Foo/Bar/Widget/index")
cls = dotnet.DotNetClass({"id": "Foo.Bar.Widget"}, url_root="/autofoo")
self.assertEqual(cls.include_path, "/autofoo/Foo/Bar/Widget/index")

@ -1,21 +1,21 @@
# -*- coding: utf-8 -*-
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'pyexample'
copyright = u'2015, rtfd'
author = u'rtfd'
version = '0.1'
release = '0.1'
templates_path = ["_templates"]
source_suffix = ".rst"
master_doc = "index"
project = u"pyexample"
copyright = u"2015, rtfd"
author = u"rtfd"
version = "0.1"
release = "0.1"
language = None
exclude_patterns = ['_build']
pygments_style = 'sphinx'
exclude_patterns = ["_build"]
pygments_style = "sphinx"
todo_include_todos = False
html_theme = 'alabaster'
html_static_path = ['_static']
htmlhelp_basename = 'pyexampledoc'
extensions = ['autoapi.extension']
autoapi_type = 'python'
autoapi_dirs = ['example']
autoapi_file_pattern = '*.py'
html_theme = "alabaster"
html_static_path = ["_static"]
htmlhelp_basename = "pyexampledoc"
extensions = ["autoapi.extension"]
autoapi_type = "python"
autoapi_dirs = ["example"]
autoapi_file_pattern = "*.py"

@ -1,7 +1,8 @@
__author__ = 'swenson'
__author__ = "swenson"
import math
def example_function(x):
"""Compute the square root of x and return it."""
return math.sqrt(x)
return math.sqrt(x)

@ -1,6 +1,7 @@
[tox]
envlist =
py{27,34,35,36,37}-sphinx{16,17,18}
formatting
lint
docs
@ -24,13 +25,12 @@ deps = -r{toxinidir}/requirements.txt
commands =
py.test {posargs}
[testenv:docs]
[testenv:formatting]
basepython = python3
deps =
Sphinx>=1.8,<=1.9
sphinx_rtd_theme
changedir = {toxinidir}/docs
black==18.9b0
commands =
sphinx-build -b html -d {envtmpdir}/doctrees . {envtmpdir}/html
black --check autoapi tests
[testenv:lint]
deps =
@ -40,3 +40,11 @@ commands =
--profile-path={toxinidir} \
--profile=prospector \
--die-on-tool-error {posargs}
[testenv:docs]
deps =
Sphinx>=1.8,<=1.9
sphinx_rtd_theme
changedir = {toxinidir}/docs
commands =
sphinx-build -b html -d {envtmpdir}/doctrees . {envtmpdir}/html

Loading…
Cancel
Save