2016-11-02 23:29:28 +00:00
|
|
|
import sys
|
2016-08-25 23:24:14 +00:00
|
|
|
import os
|
|
|
|
import textwrap
|
2016-11-02 23:29:28 +00:00
|
|
|
import ast
|
2017-04-07 18:46:57 +00:00
|
|
|
import tokenize as tk
|
2016-10-25 23:26:30 +00:00
|
|
|
from collections import defaultdict
|
2017-04-07 18:46:57 +00:00
|
|
|
|
|
|
|
from pydocstyle import parser
|
2015-04-08 05:54:53 +00:00
|
|
|
|
2016-11-02 23:45:41 +00:00
|
|
|
from .base import PythonMapperBase, SphinxMapperBase
|
|
|
|
from ..utils import slugify
|
|
|
|
|
2016-11-02 23:29:28 +00:00
|
|
|
if sys.version_info < (3,):
|
|
|
|
from itertools import izip_longest as zip_longest
|
|
|
|
else:
|
|
|
|
from itertools import zip_longest
|
|
|
|
|
2015-04-21 05:54:32 +00:00
|
|
|
|
2015-06-10 21:23:50 +00:00
|
|
|
class PythonSphinxMapper(SphinxMapperBase):
|
2015-04-21 05:54:32 +00:00
|
|
|
|
2016-11-02 23:29:28 +00:00
|
|
|
"""Auto API domain handler for Python
|
2015-04-21 05:54:32 +00:00
|
|
|
|
|
|
|
Parses directly from Python files.
|
|
|
|
|
|
|
|
:param app: Sphinx application passed in as part of the extension
|
2016-11-02 23:29:28 +00:00
|
|
|
"""
|
|
|
|
|
2017-11-05 23:29:39 +00:00
|
|
|
def load(self, patterns, dirs, ignore=None):
|
2016-11-02 23:29:28 +00:00
|
|
|
"""Load objects from the filesystem into the ``paths`` dictionary
|
|
|
|
|
|
|
|
Also include an attribute on the object, ``relative_path`` which is the
|
|
|
|
shortened, relative path the package/module
|
|
|
|
"""
|
|
|
|
for dir_ in dirs:
|
2017-11-05 23:29:39 +00:00
|
|
|
for path in self.find_files(patterns=patterns, dirs=[dir_], ignore=ignore):
|
2016-11-02 23:29:28 +00:00
|
|
|
data = self.read_file(path=path)
|
|
|
|
data.relative_path = os.path.relpath(path, dir_)
|
|
|
|
if data:
|
|
|
|
self.paths[path] = data
|
2015-04-21 05:54:32 +00:00
|
|
|
|
2015-06-06 23:11:49 +00:00
|
|
|
def read_file(self, path, **kwargs):
|
2016-11-02 23:29:28 +00:00
|
|
|
"""Read file input into memory, returning deserialized objects
|
2015-06-06 23:11:49 +00:00
|
|
|
|
|
|
|
:param path: Path of file to read
|
2016-11-02 23:29:28 +00:00
|
|
|
"""
|
2015-06-06 23:11:49 +00:00
|
|
|
try:
|
2017-04-07 18:46:57 +00:00
|
|
|
parsed_data = ParserExtra()(open(path), path)
|
2015-06-06 23:11:49 +00:00
|
|
|
return parsed_data
|
2016-11-02 23:29:28 +00:00
|
|
|
except (IOError, TypeError, ImportError):
|
2015-07-07 23:19:25 +00:00
|
|
|
self.app.warn('Error reading file: {0}'.format(path))
|
2015-06-06 23:11:49 +00:00
|
|
|
return None
|
|
|
|
|
2017-06-29 21:37:59 +00:00
|
|
|
def create_class(self, data, options=None, **kwargs):
|
2016-10-25 23:26:30 +00:00
|
|
|
"""Create a class from the passed in data
|
2015-04-21 05:54:32 +00:00
|
|
|
|
2016-06-09 22:43:55 +00:00
|
|
|
:param data: dictionary data of pydocstyle output
|
2016-10-25 23:26:30 +00:00
|
|
|
"""
|
2015-05-31 05:03:19 +00:00
|
|
|
obj_map = dict((cls.type, cls) for cls
|
2016-11-02 23:29:28 +00:00
|
|
|
in [PythonClass, PythonFunction, PythonModule,
|
|
|
|
PythonMethod, PythonPackage])
|
2015-05-31 05:03:19 +00:00
|
|
|
try:
|
2016-06-09 22:43:55 +00:00
|
|
|
cls = obj_map[data.kind]
|
2015-05-31 05:03:19 +00:00
|
|
|
except KeyError:
|
2016-11-02 23:29:28 +00:00
|
|
|
self.app.warn("Unknown type: %s" % data.kind)
|
2015-05-31 05:03:19 +00:00
|
|
|
else:
|
2016-08-25 23:24:14 +00:00
|
|
|
obj = cls(data, jinja_env=self.jinja_env,
|
2016-11-02 23:29:28 +00:00
|
|
|
options=self.app.config.autoapi_options, **kwargs)
|
2016-06-09 22:43:55 +00:00
|
|
|
for child_data in data.children:
|
2016-11-02 23:29:28 +00:00
|
|
|
for child_obj in self.create_class(child_data, options=options,
|
|
|
|
**kwargs):
|
2016-06-09 22:43:55 +00:00
|
|
|
obj.children.append(child_obj)
|
2015-05-31 05:03:19 +00:00
|
|
|
yield obj
|
2015-04-21 05:54:32 +00:00
|
|
|
|
2015-04-08 05:54:53 +00:00
|
|
|
|
2015-06-10 21:23:50 +00:00
|
|
|
class PythonPythonMapper(PythonMapperBase):
|
2015-04-08 05:54:53 +00:00
|
|
|
|
|
|
|
language = 'python'
|
2016-11-02 23:29:28 +00:00
|
|
|
is_callable = False
|
2015-04-08 05:54:53 +00:00
|
|
|
|
2015-06-10 18:01:06 +00:00
|
|
|
def __init__(self, obj, **kwargs):
|
2015-06-10 21:23:50 +00:00
|
|
|
super(PythonPythonMapper, self).__init__(obj, **kwargs)
|
2015-06-06 23:11:49 +00:00
|
|
|
|
2016-11-02 23:29:28 +00:00
|
|
|
self.name = self._get_full_name(obj)
|
|
|
|
self.id = slugify(self.name)
|
2015-04-08 05:54:53 +00:00
|
|
|
|
2015-04-21 05:54:32 +00:00
|
|
|
# Optional
|
|
|
|
self.children = []
|
2016-11-02 23:29:28 +00:00
|
|
|
self.args = []
|
|
|
|
if self.is_callable:
|
|
|
|
self.args = self._get_arguments(obj)
|
2017-04-07 18:46:57 +00:00
|
|
|
self.docstring = obj.docstring
|
2016-06-09 22:43:55 +00:00
|
|
|
if getattr(obj, 'parent'):
|
|
|
|
self.inheritance = [obj.parent.name]
|
|
|
|
else:
|
2016-11-02 23:45:41 +00:00
|
|
|
self.inheritance = []
|
2015-04-08 05:54:53 +00:00
|
|
|
|
2015-04-21 05:54:32 +00:00
|
|
|
# For later
|
|
|
|
self.item_map = defaultdict(list)
|
2015-04-08 05:54:53 +00:00
|
|
|
|
2015-06-10 20:33:42 +00:00
|
|
|
@property
|
2016-11-02 23:29:28 +00:00
|
|
|
def is_undoc_member(self):
|
2017-04-07 18:46:57 +00:00
|
|
|
return bool(self.docstring)
|
2015-06-10 20:33:42 +00:00
|
|
|
|
|
|
|
@property
|
2016-11-02 23:29:28 +00:00
|
|
|
def is_private_member(self):
|
2017-04-07 18:46:57 +00:00
|
|
|
return not self.obj.is_public
|
2015-06-10 20:33:42 +00:00
|
|
|
|
|
|
|
@property
|
2016-11-02 23:29:28 +00:00
|
|
|
def is_special_member(self):
|
2017-04-07 18:46:57 +00:00
|
|
|
return (
|
|
|
|
(isinstance(self.obj, parser.Method) and self.obj.is_magic) or
|
|
|
|
(self.obj.name.startswith('__') and self.obj.name.endswith('__'))
|
|
|
|
)
|
2015-06-10 20:33:42 +00:00
|
|
|
|
2015-06-10 20:58:52 +00:00
|
|
|
@property
|
|
|
|
def display(self):
|
2016-11-02 23:29:28 +00:00
|
|
|
if self.is_undoc_member and 'undoc-members' not in self.options:
|
2015-06-10 20:33:42 +00:00
|
|
|
return False
|
2016-11-02 23:29:28 +00:00
|
|
|
if self.is_private_member and 'private-members' not in self.options:
|
2015-06-10 20:33:42 +00:00
|
|
|
return False
|
2016-11-02 23:29:28 +00:00
|
|
|
if self.is_special_member and 'special-members' not in self.options:
|
2015-06-10 20:33:42 +00:00
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
2016-11-02 23:29:28 +00:00
|
|
|
@staticmethod
|
|
|
|
def _get_full_name(obj):
|
|
|
|
"""Recursively build the full name of the object from pydocstyle
|
|
|
|
|
|
|
|
Uses an additional attribute added to the object, ``relative_path``.
|
|
|
|
This is the shortened path of the object name, if the object is a
|
|
|
|
package or module.
|
|
|
|
|
|
|
|
:param obj: pydocstyle object, as returned from Parser()
|
|
|
|
:returns: Dotted name of object
|
|
|
|
:rtype: str
|
|
|
|
"""
|
|
|
|
|
2016-11-02 23:45:41 +00:00
|
|
|
def _inner(obj, parts=None):
|
|
|
|
if parts is None:
|
|
|
|
parts = []
|
2016-11-02 23:29:28 +00:00
|
|
|
obj_kind = obj.kind
|
|
|
|
obj_name = obj.name
|
|
|
|
if obj_kind == 'module':
|
|
|
|
obj_name = getattr(obj, 'relative_path', None) or obj.name
|
|
|
|
obj_name = obj_name.replace('/', '.')
|
|
|
|
ext = '.py'
|
|
|
|
if obj_name.endswith(ext):
|
|
|
|
obj_name = obj_name[:-len(ext)]
|
|
|
|
elif obj_kind == 'package':
|
|
|
|
obj_name = getattr(obj, 'relative_path', None) or obj.name
|
|
|
|
exts = ['/__init__.py', '.py']
|
|
|
|
for ext in exts:
|
|
|
|
if obj_name.endswith(ext):
|
|
|
|
obj_name = obj_name[:-len(ext)]
|
|
|
|
obj_name = obj_name.split('/').pop()
|
|
|
|
parts.insert(0, obj_name)
|
|
|
|
try:
|
|
|
|
return _inner(obj.parent, parts)
|
|
|
|
except AttributeError:
|
|
|
|
pass
|
|
|
|
return parts
|
|
|
|
|
|
|
|
return '.'.join(_inner(obj))
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _get_arguments(obj):
|
|
|
|
"""Get arguments from a pydocstyle object
|
|
|
|
|
|
|
|
:param obj: pydocstyle object, as returned from Parser()
|
|
|
|
:returns: list of argument or argument and value pairs
|
|
|
|
:rtype: list
|
|
|
|
"""
|
|
|
|
arguments = []
|
|
|
|
source = textwrap.dedent(obj.source)
|
|
|
|
# Bare except here because AST parsing can throw any number of
|
|
|
|
# exceptions, including SyntaxError
|
|
|
|
try:
|
|
|
|
parsed = ast.parse(source)
|
2016-11-03 20:10:22 +00:00
|
|
|
except Exception as e: # noqa
|
|
|
|
print("Error parsing AST: %s" % str(e))
|
2016-11-03 19:54:26 +00:00
|
|
|
return []
|
2016-11-02 23:29:28 +00:00
|
|
|
parsed_args = parsed.body[0].args
|
|
|
|
arg_names = [arg.id if sys.version_info < (3,) else arg.arg
|
|
|
|
for arg in parsed_args.args]
|
|
|
|
|
|
|
|
# Get defaults for display based on AST node type
|
|
|
|
arg_defaults = []
|
|
|
|
pydocstyle_map = {
|
|
|
|
ast.Name: 'id',
|
|
|
|
ast.Num: 'n',
|
|
|
|
ast.Str: lambda obj: '"{0}"'.format(obj.s),
|
2017-04-07 23:36:23 +00:00
|
|
|
# Call function name can be an `Attribute` or `Name` node, make sure
|
|
|
|
# we're using the correct attribute for the id
|
|
|
|
ast.Call: lambda obj: (obj.func.id if isinstance(obj.func, ast.Name)
|
|
|
|
else obj.func.attr),
|
2016-11-02 23:29:28 +00:00
|
|
|
# TODO these require traversal into the AST nodes. Add this for more
|
|
|
|
# complete argument parsing, or handle with a custom AST traversal.
|
|
|
|
ast.List: lambda _: 'list',
|
|
|
|
ast.Tuple: lambda _: 'tuple',
|
|
|
|
ast.Set: lambda _: 'set',
|
|
|
|
ast.Dict: lambda _: 'dict',
|
|
|
|
}
|
|
|
|
if sys.version_info >= (3,):
|
|
|
|
pydocstyle_map.update({
|
|
|
|
ast.NameConstant: 'value',
|
|
|
|
})
|
|
|
|
|
|
|
|
for value in parsed_args.defaults:
|
|
|
|
default = None
|
|
|
|
try:
|
|
|
|
default = pydocstyle_map[type(value)](value)
|
|
|
|
except TypeError:
|
|
|
|
default = getattr(value, pydocstyle_map[type(value)])
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
if default is None:
|
|
|
|
default = 'None'
|
|
|
|
arg_defaults.append(default)
|
|
|
|
|
|
|
|
# Apply defaults padded to the end of the longest list. AST returns
|
|
|
|
# argument defaults as a short array that applies to the end of the list
|
|
|
|
# of arguments
|
|
|
|
for (name, default) in zip_longest(reversed(arg_names),
|
|
|
|
reversed(arg_defaults)):
|
|
|
|
arg = name
|
|
|
|
if default is not None:
|
|
|
|
arg = '{0}={1}'.format(name, default)
|
|
|
|
arguments.insert(0, arg)
|
|
|
|
|
|
|
|
# Add *args and **kwargs
|
|
|
|
if parsed_args.vararg:
|
|
|
|
arguments.append('*{0}'.format(
|
|
|
|
parsed_args.vararg
|
2016-11-02 23:45:41 +00:00
|
|
|
if sys.version_info < (3, 3)
|
2016-11-02 23:29:28 +00:00
|
|
|
else parsed_args.vararg.arg
|
|
|
|
))
|
|
|
|
if parsed_args.kwarg:
|
|
|
|
arguments.append('**{0}'.format(
|
|
|
|
parsed_args.kwarg
|
2016-11-02 23:45:41 +00:00
|
|
|
if sys.version_info < (3, 3)
|
2016-11-02 23:29:28 +00:00
|
|
|
else parsed_args.kwarg.arg
|
|
|
|
))
|
|
|
|
|
|
|
|
return arguments
|
|
|
|
|
2015-04-08 05:54:53 +00:00
|
|
|
|
2015-06-10 21:23:50 +00:00
|
|
|
class PythonFunction(PythonPythonMapper):
|
2015-04-21 05:54:32 +00:00
|
|
|
type = 'function'
|
2016-11-02 23:29:28 +00:00
|
|
|
is_callable = True
|
2015-04-08 05:54:53 +00:00
|
|
|
|
2015-04-21 05:54:32 +00:00
|
|
|
|
2016-06-09 22:43:55 +00:00
|
|
|
class PythonMethod(PythonPythonMapper):
|
|
|
|
type = 'method'
|
2016-11-02 23:29:28 +00:00
|
|
|
is_callable = True
|
2016-06-09 22:43:55 +00:00
|
|
|
|
|
|
|
|
2015-06-10 21:23:50 +00:00
|
|
|
class PythonModule(PythonPythonMapper):
|
2015-04-21 05:54:32 +00:00
|
|
|
type = 'module'
|
2015-06-10 20:13:34 +00:00
|
|
|
top_level_object = True
|
2015-04-21 05:54:32 +00:00
|
|
|
|
|
|
|
|
2016-08-25 23:24:14 +00:00
|
|
|
class PythonPackage(PythonPythonMapper):
|
|
|
|
type = 'package'
|
|
|
|
top_level_object = True
|
|
|
|
|
|
|
|
|
2015-06-10 21:23:50 +00:00
|
|
|
class PythonClass(PythonPythonMapper):
|
2015-04-21 05:54:32 +00:00
|
|
|
type = 'class'
|
2017-04-07 18:46:57 +00:00
|
|
|
|
|
|
|
|
|
|
|
# Parser
|
|
|
|
class ParserExtra(parser.Parser):
|
|
|
|
|
|
|
|
"""Extend Parser object to provide customized return"""
|
|
|
|
|
|
|
|
def parse_object_identifier(self):
|
|
|
|
"""Parse object identifier"""
|
|
|
|
assert self.current.kind == tk.NAME
|
|
|
|
identifier = ''
|
|
|
|
while True:
|
|
|
|
is_identifier = (
|
|
|
|
self.current.kind == tk.NAME or
|
|
|
|
(
|
|
|
|
self.current.kind == tk.OP and
|
|
|
|
self.current.value == '.'
|
|
|
|
)
|
|
|
|
)
|
|
|
|
if is_identifier:
|
|
|
|
identifier += self.current.value
|
|
|
|
self.stream.move()
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
return identifier
|
|
|
|
|
|
|
|
def parse_string(self):
|
|
|
|
"""Clean up STRING nodes"""
|
|
|
|
val = self.current.value
|
|
|
|
self.consume(tk.STRING)
|
|
|
|
return val.lstrip('\'"').rstrip('\'"')
|
|
|
|
|
|
|
|
def parse_number(self):
|
|
|
|
"""Parse a NUMBER node to either a ``float`` or ``int``"""
|
|
|
|
val = self.current.value
|
|
|
|
self.consume(tk.NUMBER)
|
|
|
|
normalized_val = float(val)
|
|
|
|
try:
|
|
|
|
normalized_val = int(val)
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
return normalized_val
|
|
|
|
|
|
|
|
def parse_iterable(self):
|
|
|
|
"""Recursively parse an iterable object
|
|
|
|
|
|
|
|
This will return a local representation of the parsed data, except for
|
|
|
|
NAME nodes. This does not currently attempt to perform lookup on the
|
|
|
|
object names defined in an iterable.
|
|
|
|
|
|
|
|
This is mostly a naive implementation and won't handle complex
|
|
|
|
structures. This is only currently meant to parse simple iterables, such
|
|
|
|
as ``__all__`` and class parent classes on class definition.
|
|
|
|
"""
|
|
|
|
content = None
|
2017-04-07 22:25:34 +00:00
|
|
|
is_list = True
|
2017-04-07 18:46:57 +00:00
|
|
|
while self.current is not None:
|
2017-04-07 22:25:34 +00:00
|
|
|
if self.current.kind == tk.STRING:
|
|
|
|
content.append(self.parse_string())
|
|
|
|
elif self.current.kind == tk.NUMBER:
|
|
|
|
content.append(self.parse_number())
|
|
|
|
elif self.current.kind == tk.NAME:
|
|
|
|
# Handle generators
|
|
|
|
if self.current.value == 'for' and not content:
|
|
|
|
is_list = False
|
|
|
|
# TODO this is dropped for now, but will can be handled with an
|
|
|
|
# object lookup in the future, if we decide to track assignment.
|
|
|
|
# content.append(self.parse_object_identifier())
|
2017-04-07 18:46:57 +00:00
|
|
|
self.stream.move()
|
2017-04-07 22:25:34 +00:00
|
|
|
elif self.current.kind == tk.OP and self.current.value in '[(':
|
2017-04-07 18:46:57 +00:00
|
|
|
if content is None:
|
|
|
|
content = []
|
2017-04-07 22:25:34 +00:00
|
|
|
self.stream.move()
|
2017-04-07 18:46:57 +00:00
|
|
|
else:
|
|
|
|
content.append(self.parse_iterable())
|
|
|
|
continue
|
|
|
|
elif self.current.kind == tk.OP and self.current.value in '])':
|
|
|
|
self.stream.move()
|
2017-04-07 22:25:34 +00:00
|
|
|
if is_list:
|
|
|
|
return content
|
|
|
|
# Discard generator because we can't do anything with them
|
|
|
|
return []
|
2017-04-07 18:46:57 +00:00
|
|
|
else:
|
|
|
|
self.stream.move()
|
|
|
|
|
|
|
|
def parse_docstring(self):
|
|
|
|
"""Clean up object docstring"""
|
|
|
|
docstring = super(ParserExtra, self).parse_docstring()
|
|
|
|
if not docstring:
|
|
|
|
docstring = ''
|
|
|
|
docstring = textwrap.dedent(docstring)
|
|
|
|
docstring = docstring.replace("'''", '').replace('"""', '')
|
|
|
|
return docstring
|
|
|
|
|
|
|
|
def parse_all(self):
|
|
|
|
"""Parse __all__ assignment
|
|
|
|
|
|
|
|
This differs from the default __all__ assignment processing by:
|
|
|
|
|
|
|
|
* Accepting multiple __all__ assignments
|
|
|
|
* Doesn't throw exceptions on edge cases
|
|
|
|
* Parses NAME nodes (but throws them out for now
|
|
|
|
"""
|
|
|
|
assert self.current.value == '__all__'
|
|
|
|
self.consume(tk.NAME)
|
|
|
|
if self.current.kind != tk.OP or self.current.value not in ['=', '+=']:
|
|
|
|
return
|
|
|
|
assign_op = self.current.value
|
|
|
|
self.consume(tk.OP)
|
|
|
|
|
|
|
|
if self.all is None:
|
|
|
|
self.all = []
|
|
|
|
|
|
|
|
all_content = []
|
|
|
|
# Support [], [] + [], and [] + foo.__all__ by iterating of list
|
|
|
|
# assignments
|
|
|
|
while True:
|
|
|
|
if self.current.kind == tk.OP and self.current.value in '([':
|
|
|
|
content = self.parse_iterable()
|
|
|
|
all_content.extend(content)
|
|
|
|
elif self.current.kind == tk.NAME:
|
|
|
|
name = self.parse_object_identifier()
|
|
|
|
# TODO Skip these for now. In the future, this name should be
|
|
|
|
# converted to an object that will be resolved after we've
|
|
|
|
# parsed at a later stage in the mapping process.
|
2017-04-07 20:12:10 +00:00
|
|
|
# all_content.append(name)
|
2017-04-07 18:46:57 +00:00
|
|
|
if self.current.kind == tk.OP and self.current.value == '+':
|
|
|
|
self.stream.move()
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
|
|
|
if assign_op == '=':
|
|
|
|
self.all = all_content
|
|
|
|
elif assign_op == '+=':
|
|
|
|
self.all += all_content
|