Extended ast parsing with astroid

pull/131/head
Ashley Whetter 7 years ago committed by Ashley Whetter
parent ea3e8bd3a1
commit b19b9ea28b

@ -93,7 +93,7 @@ class AutoapiSummary(Directive):
template = ':{}:`{} <{}>`'
col1 = template.format(
'obj', obj.short_name, obj.name, escape(obj.signature),
'obj', obj.short_name, obj.name, escape('({})'.format(obj.args)),
)
col2 = obj.summary

@ -0,0 +1,232 @@
try:
import builtins
except ImportError:
import __builtin__ as builtins
import re
import astroid
import astroid.nodes
def resolve_import_alias(name, import_names):
"""Resolve a name from an aliased import to its original name.
:param name: The potentially aliased name to resolve.
:type name: str
:param import_names: The pairs of original names and aliases
from the import.
:type import_names: iterable(tuple(str, str or None))
:returns: The original name.
:rtype: str
"""
resolved_name = name
for import_name, imported_as in import_names:
if import_name == name:
break
if imported_as == name:
resolved_name = import_name
break
return resolved_name
def get_full_import_name(import_from, name):
"""Get the full path of a name from an ``import x from y`` statement.
:param import_from: The astroid node to resolve the name of.
:type import_from: astroid.nodes.ImportFrom
:param name:
:type name: str
:returns: The full import path of the name.
:rtype: str
"""
partial_basename = resolve_import_alias(name, import_from.names)
module_name = import_from.modname
if import_from.level:
module = import_from.root()
assert isinstance(module, astroid.nodes.Module)
module_name = module.relative_to_absolute_name(
import_from.modname, level=import_from.level,
)
return '{}.{}'.format(module_name, partial_basename)
def get_full_basename(node, basename):
"""Resolve a partial base name to the full path.
:param node: The node representing the base name.
:type node: astroid.NodeNG
:param basename: The partial base name to resolve.
:type basename: str
:returns: The fully resolved base name.
:rtype: str
"""
full_basename = basename
top_level_name = re.sub(r'\(.*\)', '', basename).split('.', 1)[0]
lookup_node = node
while not hasattr(lookup_node, 'lookup'):
lookup_node = lookup_node.parent
assigns = lookup_node.lookup(top_level_name)[1]
for assignment in assigns:
if isinstance(assignment, astroid.nodes.ImportFrom):
import_name = get_full_import_name(assignment, top_level_name)
full_basename = basename.replace(top_level_name, import_name, 1)
break
elif isinstance(assignment, astroid.nodes.Import):
import_name = resolve_import_alias(top_level_name, assignment.names)
full_basename = basename.replace(top_level_name, import_name, 1)
break
if isinstance(node, astroid.nodes.Call):
full_basename = re.sub(r'\(.*\)', '()', full_basename)
return full_basename
def get_full_basenames(bases, basenames):
"""Resolve the base nodes and partial names of a class to full names.
:param bases: The astroid node representing something that a class
inherits from.
:type bases: iterable(astroid.NodeNG)
:param basenames: The partial name of something that a class inherits from.
:type basenames: iterable(str)
:returns: The full names.
:rtype: iterable(str)
"""
for base, basename in zip(bases, basenames):
yield get_full_basename(base, basename)
def _get_const_values(node):
value = None
if isinstance(node, (astroid.nodes.List, astroid.nodes.Tuple)):
new_value = []
for element in node.elts:
if isinstance(element, astroid.nodes.Const):
new_value.append(element.value)
elif isinstance(element, (astroid.nodes.List, astroid.nodes.Tuple)):
new_value.append(_get_const_values(element))
else:
break
else:
value = new_value
elif isinstance(node, astroid.nodes.Const):
value = node.value
return value
def get_assign_value(node):
"""Get the name and value of the assignment of the given node.
Assignments to multiple names are ignored, as per PEP 257.
:param node: The node to get the assignment value from.
:type node: astroid.nodes.Assign
:returns: The name that is assigned to,
and the value assigned to the name (if it can be converted).
:rtype: tuple(str, object or None) or None
"""
if len(node.targets) == 1:
target = node.targets[0]
if isinstance(target, astroid.nodes.AssignName):
name = target.name
elif isinstance(target, astroid.nodes.AssignAttr):
name = target.attrname
else:
return None
return (name, _get_const_values(node.value))
return None
def is_decorated_with_property(node):
"""Check if the function is decorated as a property.
:param node: The node to check.
:type node: astroid.nodes.FunctionDef
:returns: True if the function is a property, False otherwise.
:rtype: bool
"""
if not node.decorators:
return False
for decorator in node.decorators.nodes:
if not isinstance(decorator, astroid.Name):
continue
try:
if _is_property_decorator(decorator):
return True
except astroid.InferenceError:
pass
return False
def _is_property_decorator(decorator):
def _is_property_class(class_node):
return (
class_node.name == 'property'
and class_node.root().name == builtins.__name__
)
for inferred in decorator.infer():
if not isinstance(inferred, astroid.nodes.ClassDef):
continue
if _is_property_class(inferred):
return True
if any(_is_property_class(ancestor) for ancestor in inferred.ancestors()):
return True
return False
def is_decorated_with_property_setter(node):
"""Check if the function is decorated as a property setter.
:param node: The node to check.
:type node: astroid.nodes.FunctionDef
:returns: True if the function is a property setter, False otherwise.
:rtype: bool
"""
if not node.decorators:
return False
for decorator in node.decorators.nodes:
if (isinstance(decorator, astroid.nodes.Attribute)
and decorator.attrname == "setter"):
return True
return False
def is_constructor(node):
"""Check if the function is a constructor.
:param node: The node to check.
:type node: astroid.nodes.FunctionDef
:returns: True if the function is a contructor, False otherwise.
:rtype: bool
"""
return (
node.parent
and isinstance(node.parent.scope(), astroid.nodes.ClassDef)
and node.name == '__init__'
)

@ -155,10 +155,6 @@ class PythonMapperBase(object):
return '.'.join(pieces)
return None
@property
def signature(self):
return '({})'.format(','.join(self.args))
class SphinxMapperBase(object):

@ -1,3 +1,4 @@
import re
import sys
import os
import textwrap
@ -5,11 +6,12 @@ import ast
import tokenize as tk
from collections import defaultdict
import astroid
import sphinx
import sphinx.util.docstrings
from pydocstyle import parser
from .base import PythonMapperBase, SphinxMapperBase
from . import astroid_utils
from ..utils import slugify
if sys.version_info < (3,):
@ -36,7 +38,7 @@ class PythonSphinxMapper(SphinxMapperBase):
for dir_ in dirs:
for path in self.find_files(patterns=patterns, dirs=[dir_], ignore=ignore):
data = self.read_file(path=path)
data.relative_path = os.path.relpath(path, dir_)
data['relative_path'] = os.path.relpath(path, dir_)
if data:
self.paths[path] = data
@ -46,7 +48,7 @@ class PythonSphinxMapper(SphinxMapperBase):
:param path: Path of file to read
"""
try:
parsed_data = ParserExtra()(open(path), path)
parsed_data = Parser().parse_file(path)
return parsed_data
except (IOError, TypeError, ImportError):
self.app.warn('Error reading file: {0}'.format(path))
@ -70,30 +72,31 @@ class PythonSphinxMapper(SphinxMapperBase):
def create_class(self, data, options=None, **kwargs):
"""Create a class from the passed in data
:param data: dictionary data of pydocstyle output
:param data: dictionary data of parser output
"""
obj_map = dict((cls.type, cls) for cls
in [PythonClass, PythonFunction, PythonModule,
PythonMethod, PythonPackage])
PythonMethod, PythonPackage, PythonAttribute,
PythonData])
try:
cls = obj_map[data.kind]
cls = obj_map[data['type']]
except KeyError:
self.app.warn("Unknown type: %s" % data.kind)
self.app.warn("Unknown type: %s" % data['type'])
else:
obj = cls(data, jinja_env=self.jinja_env,
options=self.app.config.autoapi_options, **kwargs)
type_ = cls.type if cls.type != 'package' else 'module'
lines = sphinx.util.docstrings.prepare_docstring(obj.docstring)
try:
self.app.emit(
'autodoc-process-docstring',
type_,
obj.name,
None, # object
None, # options
lines,
)
if lines:
self.app.emit(
'autodoc-process-docstring',
cls.type,
obj.name,
None, # object
None, # options
lines,
)
except KeyError:
if (sphinx.version_info >= (1, 6)
and 'autodoc-process-docstring' in self.app.events.events):
@ -101,7 +104,7 @@ class PythonSphinxMapper(SphinxMapperBase):
else:
obj.docstring = '\n'.join(lines)
for child_data in data.children:
for child_data in data.get('children', []):
for child_obj in self.create_class(child_data, options=options,
**kwargs):
obj.children.append(child_obj)
@ -124,19 +127,13 @@ class PythonPythonMapper(PythonMapperBase):
def __init__(self, obj, **kwargs):
super(PythonPythonMapper, self).__init__(obj, **kwargs)
self.name = self._get_full_name(obj)
self.name = obj['name']
self.id = slugify(self.name)
# Optional
self.children = []
self._args = []
if self.is_callable:
self.args = self._get_arguments(obj)
self.docstring = obj.docstring
if getattr(obj, 'parent'):
self.inheritance = [obj.parent.name]
else:
self.inheritance = []
self.args = obj.get('args')
self.docstring = obj['doc']
# For later
self.item_map = defaultdict(list)
@ -155,13 +152,16 @@ class PythonPythonMapper(PythonMapperBase):
@property
def is_private_member(self):
return not self.obj.is_public
return (
self.short_name.startswith('_')
and not self.short_name.endswith('__')
)
@property
def is_special_member(self):
return (
(isinstance(self.obj, parser.Method) and self.obj.is_magic) or
(self.obj.name.startswith('__') and self.obj.name.endswith('__'))
self.short_name.startswith('__')
and self.short_name.endswith('__')
)
@property
@ -174,127 +174,6 @@ class PythonPythonMapper(PythonMapperBase):
return False
return True
@staticmethod
def _get_full_name(obj):
"""Recursively build the full name of the object from pydocstyle
Uses an additional attribute added to the object, ``relative_path``.
This is the shortened path of the object name, if the object is a
package or module.
:param obj: pydocstyle object, as returned from Parser()
:returns: Dotted name of object
:rtype: str
"""
def _inner(obj, parts=None):
if parts is None:
parts = []
obj_kind = obj.kind
obj_name = obj.name
if obj_kind == 'module':
obj_name = getattr(obj, 'relative_path', None) or obj.name
obj_name = obj_name.replace('/', '.')
ext = '.py'
if obj_name.endswith(ext):
obj_name = obj_name[:-len(ext)]
elif obj_kind == 'package':
obj_name = getattr(obj, 'relative_path', None) or obj.name
exts = ['/__init__.py', '.py']
for ext in exts:
if obj_name.endswith(ext):
obj_name = obj_name[:-len(ext)]
obj_name = obj_name.replace('/', '.')
parts.insert(0, obj_name)
try:
return _inner(obj.parent, parts)
except AttributeError:
pass
return parts
return '.'.join(_inner(obj))
@staticmethod
def _get_arguments(obj):
"""Get arguments from a pydocstyle object
:param obj: pydocstyle object, as returned from Parser()
:returns: list of argument or argument and value pairs
:rtype: list
"""
arguments = []
source = textwrap.dedent(obj.source)
# Bare except here because AST parsing can throw any number of
# exceptions, including SyntaxError
try:
parsed = ast.parse(source)
except Exception as e: # noqa
print("Error parsing AST: %s" % str(e))
return []
parsed_args = parsed.body[0].args
arg_names = [arg.id if sys.version_info < (3,) else arg.arg
for arg in parsed_args.args]
# Get defaults for display based on AST node type
arg_defaults = []
pydocstyle_map = {
ast.Name: 'id',
ast.Num: 'n',
ast.Str: lambda obj: '"{0}"'.format(obj.s),
# Call function name can be an `Attribute` or `Name` node, make sure
# we're using the correct attribute for the id
ast.Call: lambda obj: (obj.func.id if isinstance(obj.func, ast.Name)
else obj.func.attr),
# TODO these require traversal into the AST nodes. Add this for more
# complete argument parsing, or handle with a custom AST traversal.
ast.List: lambda _: 'list',
ast.Tuple: lambda _: 'tuple',
ast.Set: lambda _: 'set',
ast.Dict: lambda _: 'dict',
}
if sys.version_info >= (3,):
pydocstyle_map.update({
ast.NameConstant: 'value',
})
for value in parsed_args.defaults:
default = None
try:
default = pydocstyle_map[type(value)](value)
except TypeError:
default = getattr(value, pydocstyle_map[type(value)])
except KeyError:
pass
if default is None:
default = 'None'
arg_defaults.append(default)
# Apply defaults padded to the end of the longest list. AST returns
# argument defaults as a short array that applies to the end of the list
# of arguments
for (name, default) in zip_longest(reversed(arg_names),
reversed(arg_defaults)):
arg = name
if default is not None:
arg = '{0}={1}'.format(name, default)
arguments.insert(0, arg)
# Add *args and **kwargs
if parsed_args.vararg:
arguments.append('*{0}'.format(
parsed_args.vararg
if sys.version_info < (3, 3)
else parsed_args.vararg.arg
))
if parsed_args.kwarg:
arguments.append('**{0}'.format(
parsed_args.kwarg
if sys.version_info < (3, 3)
else parsed_args.kwarg.arg
))
return arguments
@property
def summary(self):
for line in self.docstring.splitlines():
@ -304,6 +183,9 @@ class PythonPythonMapper(PythonMapperBase):
return ''
def _children_of_type(self, type_):
return list(child for child in self.children if child.type == type_)
class PythonFunction(PythonPythonMapper):
type = 'function'
@ -317,6 +199,21 @@ class PythonMethod(PythonPythonMapper):
ref_directive = 'meth'
class PythonData(PythonPythonMapper):
"""Global, module level data."""
type = 'data'
def __init__(self, obj, **kwargs):
super(PythonData, self).__init__(obj, **kwargs)
self.value = obj.get('value')
class PythonAttribute(PythonData):
"""An object/class level attribute."""
type = 'attribute'
class TopLevelPythonPythonMapper(PythonPythonMapper):
top_level_object = True
ref_directive = 'mod'
@ -324,20 +221,15 @@ class TopLevelPythonPythonMapper(PythonPythonMapper):
def __init__(self, obj, **kwargs):
super(TopLevelPythonPythonMapper, self).__init__(obj, **kwargs)
self._resolve_name()
self.subpackages = []
self.submodules = []
def _children_of_type(self, type_):
return list(child for child in self.children if child.type == type_)
@property
def functions(self):
return self._children_of_type('function')
@property
def methods(self):
return self._children_of_type('method')
@property
def classes(self):
return self._children_of_type('class')
@ -346,156 +238,187 @@ class TopLevelPythonPythonMapper(PythonPythonMapper):
class PythonModule(TopLevelPythonPythonMapper):
type = 'module'
def _resolve_name(self):
name = self.obj['relative_path']
name = name.replace('/', '.')
ext = '.py'
if name.endswith(ext):
name = name[:-len(ext)]
self.name = name
class PythonPackage(TopLevelPythonPythonMapper):
type = 'package'
def _resolve_name(self):
name = self.obj['relative_path']
exts = ['/__init__.py', '.py']
for ext in exts:
if name.endswith(ext):
name = name[:-len(ext)]
name = name.replace('/', '.')
self.name = name
class PythonClass(PythonPythonMapper):
type = 'class'
def __init__(self, obj, **kwargs):
super(PythonClass, self).__init__(obj, **kwargs)
self.bases = obj['bases']
@PythonPythonMapper.args.getter
def args(self):
if self._args:
return self._args
args = self._args
for child in self.children:
if child.short_name == '__init__':
return child.args
args = child.args
break
return self._args
if args.startswith('self'):
args = args[4:].lstrip(',').lstrip()
return args
# Parser
class ParserExtra(parser.Parser):
"""Extend Parser object to provide customized return"""
def parse_object_identifier(self):
"""Parse object identifier"""
assert self.current.kind == tk.NAME
identifier = ''
while True:
is_identifier = (
self.current.kind == tk.NAME or
(
self.current.kind == tk.OP and
self.current.value == '.'
)
)
if is_identifier:
identifier += self.current.value
self.stream.move()
else:
break
return identifier
def parse_string(self):
"""Clean up STRING nodes"""
val = self.current.value
self.consume(tk.STRING)
return val.lstrip('\'"').rstrip('\'"')
def parse_number(self):
"""Parse a NUMBER node to either a ``float`` or ``int``"""
val = self.current.value
self.consume(tk.NUMBER)
normalized_val = float(val)
@property
def methods(self):
return self._children_of_type('method')
@property
def attributes(self):
return self._children_of_type('attribute')
class Parser(object):
def parse_file(self, file_path):
node = astroid.MANAGER.ast_from_file(file_path)
return self.parse(node)
def parse_assign(self, node):
doc = ''
doc_node = node.next_sibling()
if (isinstance(doc_node, astroid.nodes.Expr)
and isinstance(doc_node.value, astroid.nodes.Const)):
doc = doc_node.value.value
type_ = 'data'
if (isinstance(node.scope(), astroid.nodes.ClassDef)
or astroid_utils.is_constructor(node.scope())):
type_ = 'attribute'
assign_value = astroid_utils.get_assign_value(node)
if not assign_value:
return []
target, value = assign_value
data = {
'type': type_,
'name': target,
'doc': doc,
'value': value,
}
return [data]
def parse_classdef(self, node, data=None):
args = ''
try:
normalized_val = int(val)
except ValueError:
constructor = node.lookup('__init__')[1]
except IndexError:
pass
return normalized_val
else:
if isinstance(constructor, astroid.nodes.FunctionDef):
args = constructor.args.as_string()
basenames = list(astroid_utils.get_full_basenames(node.bases, node.basenames))
data = {
'type': 'class',
'name': node.name,
'args': args,
'bases': basenames,
'doc': node.doc or '',
'children': [],
}
def parse_iterable(self):
"""Recursively parse an iterable object
for child in node.get_children():
child_data = self.parse(child)
if child_data:
data['children'].extend(child_data)
This will return a local representation of the parsed data, except for
NAME nodes. This does not currently attempt to perform lookup on the
object names defined in an iterable.
return [data]
This is mostly a naive implementation and won't handle complex
structures. This is only currently meant to parse simple iterables, such
as ``__all__`` and class parent classes on class definition.
"""
content = None
is_list = True
while self.current is not None:
if self.current.kind == tk.STRING:
content.append(self.parse_string())
elif self.current.kind == tk.NUMBER:
content.append(self.parse_number())
elif self.current.kind == tk.NAME:
# Handle generators
if self.current.value == 'for' and not content:
is_list = False
# TODO this is dropped for now, but will can be handled with an
# object lookup in the future, if we decide to track assignment.
# content.append(self.parse_object_identifier())
self.stream.move()
elif self.current.kind == tk.OP and self.current.value in '[(':
if content is None:
content = []
self.stream.move()
else:
content.append(self.parse_iterable())
continue
elif self.current.kind == tk.OP and self.current.value in '])':
self.stream.move()
if is_list:
return content
# Discard generator because we can't do anything with them
return []
else:
self.stream.move()
def _parse_property(self, node):
data = {
'type': 'attribute',
'name': node.name,
'doc': node.doc or '',
}
def parse_docstring(self):
"""Clean up object docstring"""
docstring = super(ParserExtra, self).parse_docstring()
if not docstring:
docstring = ''
docstring = textwrap.dedent(docstring)
docstring = docstring.replace("'''", '').replace('"""', '')
return docstring
return [data]
def parse_all(self):
"""Parse __all__ assignment
def parse_functiondef(self, node):
if astroid_utils.is_decorated_with_property(node):
return self._parse_property(node)
elif astroid_utils.is_decorated_with_property_setter(node):
return []
This differs from the default __all__ assignment processing by:
type_ = 'function'
if isinstance(node.parent.scope(), astroid.nodes.ClassDef):
type_ = 'method'
* Accepting multiple __all__ assignments
* Doesn't throw exceptions on edge cases
* Parses NAME nodes (but throws them out for now
"""
assert self.current.value == '__all__'
self.consume(tk.NAME)
if self.current.kind != tk.OP or self.current.value not in ['=', '+=']:
return
assign_op = self.current.value
self.consume(tk.OP)
if self.all is None:
self.all = []
all_content = []
# Support [], [] + [], and [] + foo.__all__ by iterating of list
# assignments
while True:
if self.current.kind == tk.OP and self.current.value in '([':
content = self.parse_iterable()
all_content.extend(content)
elif self.current.kind == tk.NAME:
name = self.parse_object_identifier()
# TODO Skip these for now. In the future, this name should be
# converted to an object that will be resolved after we've
# parsed at a later stage in the mapping process.
# all_content.append(name)
if self.current.kind == tk.OP and self.current.value == '+':
self.stream.move()
else:
break
data = {
'type': type_,
'name': node.name,
'args': node.args.as_string(),
'doc': node.doc or '',
}
result = [data]
if node.name == '__init__':
for child in node.get_children():
if isinstance(child, astroid.Assign):
child_data = self.parse_assign(child)
result.extend(child_data)
return result
def parse_module(self, node):
type_ = 'module'
if node.path.endswith('__init__.py'):
type_ = 'package'
data = {
'type': type_,
'name': node.name,
'doc': node.doc or '',
'children': [],
}
for child in node.get_children():
child_data = self.parse(child)
if child_data:
data['children'].extend(child_data)
return data
def parse(self, node):
data = {}
node_type = node.__class__.__name__.lower()
parse_func = getattr(self, 'parse_' + node_type, None)
if parse_func:
data = parse_func(node)
else:
for child in node.get_children():
data = self.parse(child)
if data:
break
if assign_op == '=':
self.all = all_content
elif assign_op == '+=':
self.all += all_content
return data

@ -0,0 +1 @@
{% extends "python/data.rst" %}

@ -1,25 +1,25 @@
.. py:class:: {{ obj.short_name }}{% if obj.args %}({{ obj.args[1:]|join(',') }}){% endif %}
.. py:class:: {{ obj.short_name }}{% if obj.args %}({{ obj.args }}){% endif %}
{%- if obj.docstring %}
{%- if obj.bases %}
{{ obj.docstring|prepare_docstring|indent(3) }}
Bases: {%- for base in obj.bases %}:class:`{{ base }}`{%- if not loop.last %}, {% endif %}{% endfor %}
{% endif %}
{%- if obj.methods %}
{%- if obj.docstring %}
{%- for method in obj.methods %}
{{ obj.docstring|prepare_docstring|indent(3) }}
{{ method.rendered|indent(3) }}
{% endif %}
{%- endfor %}
{%- for attribute in obj.attributes %}
{% endif %}
{{ attribute.rendered|indent(3) }}
{% block content %}
{%- for obj_item in obj.children %}
{% endfor %}
{{ obj_item.rendered|indent(3) }}
{%- for method in obj.methods %}
{{ method.rendered|indent(3) }}
{%- endfor %}
{% endblock %}

@ -1,4 +1,5 @@
.. {{ obj.type }}:: {{ obj.name }}
{% if obj.value is not none %}:annotation: = {{ obj.value|pprint }} {% endif %}
{{ obj.docstring|prepare_docstring|indent(3) }}

@ -1,6 +1,6 @@
{%- if obj.display %}
.. function:: {{ obj.short_name }}({{ obj.args|join(',') }})
.. function:: {{ obj.short_name }}({{ obj.args }})
{% if obj.docstring %}
{{ obj.docstring|prepare_docstring|indent(3) }}

@ -1,6 +1,6 @@
{%- if obj.display %}
.. method:: {{ obj.short_name }}({{ obj.args[1:]|join(',') }})
.. method:: {{ obj.short_name }}({{ obj.args.split(',', 1)[1:]|join(',') }})
{% if obj.docstring %}
{{ obj.docstring|prepare_docstring|indent(3) }}

@ -3,8 +3,8 @@ try:
from setuptools import setup, find_packages
extra_setup = dict(
install_requires=[
'astroid',
'PyYAML',
'pydocstyle',
'wheel==0.24.0',
'sphinx',
'sphinxcontrib-golangdomain',
@ -18,8 +18,8 @@ except ImportError:
from distutils.core import setup
extra_setup = dict(
requires=[
'astroid',
'PyYAML',
'pydocstyle',
'wheel==0.24.0',
'sphinx'
'sphinxcontrib-golangdomain',

@ -5,6 +5,11 @@ This is a description
class Foo(object):
"""Can we parse arguments from the class docstring?
:param attr: Set an attribute.
:type attr: str
"""
class_var = 42 #: Class var docstring
@ -19,6 +24,14 @@ class Foo(object):
"""The foo class method"""
return True
def __init__(self, attr):
self.attr = attr
self.attr2 = attr
"""This is the docstring of an instance attribute.
:type: str
"""
def method_okay(self, foo=None, bar=None):
"""This method should parse okay"""
return True

@ -0,0 +1,98 @@
import astroid
from autoapi.mappers import astroid_utils
import pytest
def generate_module_names():
for i in range(1, 5):
yield '.'.join('module{}'.format(j) for j in range(i))
yield 'package.repeat.repeat'
def imported_basename_cases():
for module_name in generate_module_names():
import_ = 'import {}'.format(module_name)
basename = '{}.ImportedClass'.format(module_name)
expected = basename
yield (import_, basename, expected)
import_ = 'import {} as aliased'.format(module_name)
basename = 'aliased.ImportedClass'
yield (import_, basename, expected)
if '.' in module_name:
from_name, attribute = module_name.rsplit('.', 1)
import_ = 'from {} import {}'.format(from_name, attribute)
basename = '{}.ImportedClass'.format(attribute)
yield (import_, basename, expected)
import_ += ' as aliased'
basename = 'aliased.ImportedClass'
yield (import_, basename, expected)
import_ = 'from {} import ImportedClass'.format(module_name)
basename = 'ImportedClass'
yield (import_, basename, expected)
import_ = 'from {} import ImportedClass as AliasedClass'.format(module_name)
basename = 'AliasedClass'
yield (import_, basename, expected)
def generate_args():
for i in range(5):
yield ', '.join('arg{}'.format(j) for j in range(i))
def imported_call_cases():
for args in generate_args():
for import_, basename, expected in imported_basename_cases():
basename += '({})'.format(args)
expected += '()'
yield import_, basename, expected
class TestAstroidUtils(object):
@pytest.mark.parametrize(
('import_', 'basename', 'expected'), list(imported_basename_cases()),
)
def test_can_get_full_imported_basename(self, import_, basename, expected):
source = '''
{}
class ThisClass({}): #@
pass
'''.format(import_, basename)
node = astroid.extract_node(source)
basenames = astroid_utils.get_full_basename(
node.bases[0], node.basenames[0],
)
assert basenames == expected
@pytest.mark.parametrize(
('import_', 'basename', 'expected'), list(imported_call_cases()),
)
def test_can_get_full_function_basename(self, import_, basename, expected):
source = '''
{}
class ThisClass({}): #@
pass
'''.format(import_, basename)
node = astroid.extract_node(source)
basenames = astroid_utils.get_full_basename(
node.bases[0], node.basenames[0],
)
assert basenames == expected
@pytest.mark.parametrize(('source', 'expected'), [
('a = "a"', ('a', 'a')),
('a = 1', ('a', 1)),
('a, b, c = (1, 2, 3)', None),
('a = b = 1', None),
])
def test_can_get_assign_values(self, source, expected):
node = astroid.extract_node(source)
value = astroid_utils.get_assign_value(node)
assert value == expected

@ -80,6 +80,14 @@ class PythonTests(LanguageIntegrationTests):
'class example.Foo',
example_file
)
self.assertIn(
'attr2',
example_file
)
self.assertIn(
'This is the docstring of an instance attribute.',
example_file
)
self.assertIn(
'method_okay(foo=None, bar=None)',
example_file
@ -89,7 +97,17 @@ class PythonTests(LanguageIntegrationTests):
example_file
)
self.assertIn(
'method_tricky(foo=None, bar=dict)',
'method_tricky(foo=None, bar=dict(foo=1, bar=2))',
example_file
)
# Are constructor arguments from the class docstring parsed?
self.assertIn(
'Set an attribute',
example_file
)
# "self" should not be included in constructor arguments
self.assertNotIn(
'self',
example_file
)
self.assertFalse(

@ -137,93 +137,3 @@ class DotNetObjectTests(unittest.TestCase):
self.assertEqual(cls.include_path, '/autoapi/Foo/Bar/Widget/index')
cls = dotnet.DotNetClass({'id': 'Foo.Bar.Widget'}, url_root='/autofoo')
self.assertEqual(cls.include_path, '/autofoo/Foo/Bar/Widget/index')
class PythonObjectTests(unittest.TestCase):
def test_full_name(self):
"""Full name resolution on nested objects"""
Source = namedtuple('Source', ['kind', 'name', 'parent'])
obj_module = Source(kind='module', name='example/example.py', parent=None)
obj_class = Source(kind='class', name='Foo', parent=obj_module)
obj_method = Source(kind='method', name='bar', parent=obj_class)
self.assertEqual(
python.PythonPythonMapper._get_full_name(obj_module),
'example.example'
)
self.assertEqual(
python.PythonPythonMapper._get_full_name(obj_class),
'example.example.Foo'
)
self.assertEqual(
python.PythonPythonMapper._get_full_name(obj_method),
'example.example.Foo.bar'
)
def test_arguments(self):
"""Argument parsing of source"""
Source = namedtuple('Source', ['source', 'docstring'])
obj = Source(
source=('def foobar(self, bar, baz=42, foo=True,\n'
' *args, **kwargs):\n'
' "This is a docstring"\n'
' return True\n'),
docstring='"This is a docstring"',
)
self.assertEqual(
python.PythonPythonMapper._get_arguments(obj),
['self', 'bar', 'baz=42', 'foo=True', '*args', '**kwargs']
)
def test_advanced_arguments(self):
"""Advanced argument parsing"""
Source = namedtuple('Source', ['source', 'docstring'])
obj = Source(
source=('def foobar(self, a, b, c=42, d="string", e=(1,2),\n'
' f={"a": True}, g=None, h=[1,2,3,4],\n'
' i=dict(a=True), j=False, *args, **kwargs):\n'
' "This is a docstring"\n'
' return True\n'),
docstring='"This is a docstring"',
)
self.assertEqual(
python.PythonPythonMapper._get_arguments(obj),
[
'self',
'a',
'b',
'c=42',
'd="string"',
'e=tuple',
'f=dict',
'g=None',
'h=list',
'i=dict',
'j=False',
'*args',
'**kwargs',
]
)
def test_bunk_whitespace(self):
"""Whitespace in definition throws off argument parsing"""
Source = namedtuple('Source', ['source', 'docstring'])
obj = Source(
source=(' def method_foo(self, a, b,\n'
' c):\n'
' call_something()\n'
' "This is a docstring"\n'
' return True\n'),
docstring='"This is a docstring"',
)
self.assertEqual(
python.PythonPythonMapper._get_arguments(obj),
['self', 'a', 'b', 'c']
)

@ -6,7 +6,10 @@ import sys
import unittest
from textwrap import dedent
from autoapi.mappers.python import ParserExtra
import astroid
import pytest
from autoapi.mappers.python import Parser
if sys.version_info < (3, 0):
from StringIO import StringIO
@ -17,46 +20,61 @@ else:
class PythonParserTests(unittest.TestCase):
def parse(self, source):
in_h = StringIO(dedent(source))
return ParserExtra()(in_h, '/dev/null')
node = astroid.extract_node(source)
return Parser().parse(node)
def test_parses_basic_file(self):
source = """
def foo(bar):
pass
"""
self.assertIsNone(self.parse(source).all)
data = self.parse(source)[0]
self.assertEqual(data['name'], 'foo')
self.assertEqual(data['type'], 'function')
def test_parses_all(self):
source = """
__all__ = ['Foo', 5.0]
"""
self.assertEqual(self.parse(source).all, ['Foo', 5.0])
data = self.parse(source)[0]
self.assertEqual(data['name'], '__all__')
self.assertEqual(data['value'], ['Foo', 5.0])
@pytest.mark.xfail(reason="Cannot parse list additions")
def test_parses_all_with_list_addition(self):
source = """
__all__ = ['Foo'] + []
"""
self.assertEqual(self.parse(source).all, ['Foo'])
data = self.parse(source)[0]
self.assertEqual(data['name'], '__all__')
self.assertEqual(data['value'], ['Foo'])
@pytest.mark.xfail(reason="Cannot parse list additions")
def test_parses_all_with_name_addtion(self):
source = """
__all__ = ['Foo'] + bar.__all__
"""
self.assertEqual(self.parse(source).all, ['Foo'])
data = self.parse(source)[0]
self.assertEqual(data['name'], '__all__')
self.assertEqual(data['value'], ['Foo'])
@pytest.mark.xfail(reason="Cannot parse list additions")
def test_parses_all_with_multiple_name_addtions(self):
source = """
__all__ = foo + bar
__all__ += boop
__all__ += ['foo']
"""
self.assertEqual(self.parse(source).all, ['foo'])
data = self.parse(source)
self.assertEqual(data['name'], '__all__')
self.assertEqual(data['value'], ['foo'])
source = """
__all__ = ['foo']
__all__ = foo
"""
self.assertEqual(self.parse(source).all, [])
data = self.parse(source)
self.assertEqual(data['name'], '__all__')
self.assertEqual(data['value'], [])
def test_parses_all_multiline(self):
source = """
@ -65,19 +83,76 @@ class PythonParserTests(unittest.TestCase):
'bar',
]
"""
self.assertEqual(self.parse(source).all, ['foo', 'bar'])
data = self.parse(source)[0]
self.assertEqual(data['value'], ['foo', 'bar'])
@pytest.mark.xfail(reason="Cannot parse list additions")
def test_parses_all_generator(self):
source = """
__all__ = [x for x in dir(token) if x[0] != '_'] + ['foo', 'bar']
"""
out = self.parse(source)
self.assertEqual(self.parse(source).all, ['foo', 'bar'])
data = self.parse(source)[0]
self.assertEqual(data['value'], ['foo', 'bar'])
def test_parses_name(self):
source = "foo.bar"
self.assertEqual(self.parse(source).children, [])
self.assertEqual(self.parse(source), {})
def test_parses_list(self):
source = "__all__ = [[1, 2], [3, 4]]"
self.assertEqual(self.parse(source).all, [[1, 2], [3, 4]])
name = "__all__"
value = [1, 2, 3, 4]
source = "{} = {}".format(name, value)
data = self.parse(source)[0]
self.assertEqual(data['name'], name)
self.assertEqual(data['value'], value)
def test_parses_nested_list(self):
name = "__all__"
value = [[1, 2], [3, 4]]
source = "{} = {}".format(name, value)
data = self.parse(source)[0]
self.assertEqual(data['name'], name)
self.assertEqual(data['value'], value)
def test_arguments(self):
"""Argument parsing of source"""
source=(
'def foobar(self, bar, baz=42, foo=True,\n'
' *args, **kwargs):\n'
' "This is a docstring"\n'
' return True\n'
)
data = self.parse(source)[0]
self.assertEqual(
data['args'],
'self, bar, baz=42, foo=True, *args, **kwargs'
)
def test_advanced_arguments(self):
"""Advanced argument parsing"""
source=(
'def foobar(self, a, b, c=42, d="string", e=(1,2),\n'
' f={"a": True}, g=None, h=[1,2,3,4],\n'
' i=dict(a=True), j=False, *args, **kwargs):\n'
' "This is a docstring"\n'
' return True\n'
)
data = self.parse(source)[0]
self.assertEqual(
data['args'],
', '.join([
'self',
'a',
'b',
'c=42',
'd=\'string\'',
'e=(1, 2)',
'f={\'a\': True}',
'g=None',
'h=[1, 2, 3, 4]',
'i=dict(a=True)',
'j=False',
'*args',
'**kwargs',
])
)

Loading…
Cancel
Save