Merge pull request #78 from rtfd/python-upgrades

Start using pydocstyle for Python doc generation
pull/88/head
Anthony 8 years ago committed by GitHub
commit 76c458f85f

1
.gitignore vendored

@ -11,3 +11,4 @@ tests/dotnetexample/example/Identity/
_api_
.tox
.eggs
.ropeproject/

@ -0,0 +1,35 @@
"""AutoAPI directives"""
from docutils.parsers.rst import Directive
from docutils import nodes
from sphinx.util.nodes import nested_parse_with_titles
class NestedParse(Directive):
"""Nested parsing to remove the first heading of included rST
This is used to handle the case where we like to remove user supplied
headings from module docstrings. This is required to reduce the number of
duplicate headings on sections.
"""
has_content = 1
required_arguments = 0
optional_arguments = 0
final_argument_whitespace = False
option_spec = {}
def run(self):
node = nodes.paragraph()
node.document = self.state.document
nested_parse_with_titles(self.state, self.content, node)
try:
title_node = node[0][0]
if isinstance(title_node, nodes.title):
if isinstance(title_node[0], nodes.Text):
del node[0][0][0]
except IndexError:
pass
return [node]

@ -10,8 +10,10 @@ import shutil
from sphinx.util.console import darkgreen, bold
from sphinx.addnodes import toctree
from sphinx.errors import ExtensionError
from docutils.parsers.rst import directives
from .backends import default_file_mapping, default_ignore_patterns, default_backend_mapping
from .directives import NestedParse
from .settings import API_ROOT
default_options = ['members', 'undoc-members', 'private-members', 'special-members']
@ -66,6 +68,15 @@ def run_autoapi(app):
else:
ignore_patterns = default_ignore_patterns.get(app.config.autoapi_type, [])
if '.rst' in app.config.source_suffix:
out_suffix = '.rst'
elif '.txt' in app.config.source_suffix:
out_suffix = '.txt'
else:
# Fallback to first suffix listed
out_suffix = app.config.source_suffix[0]
# Actual meat of the run.
app.info(bold('[AutoAPI] ') + darkgreen('Loading Data'))
domain_obj.load(
patterns=file_patterns,
@ -79,8 +90,7 @@ def run_autoapi(app):
app.info(bold('[AutoAPI] ') + darkgreen('Rendering Data'))
domain_obj.output_rst(
root=normalized_root,
# TODO: Better way to determine suffix?
source_suffix=app.config.source_suffix[0],
source_suffix=out_suffix,
)
@ -131,3 +141,4 @@ def setup(app):
app.add_config_value('autoapi_add_toctree_entry', True, 'html')
app.add_config_value('autoapi_template_dir', [], 'html')
app.add_stylesheet('autoapi.css')
directives.register_directive('autoapi-nested-parse', NestedParse)

@ -1,7 +1,7 @@
import re
import os
import fnmatch
from collections import OrderedDict
from collections import OrderedDict, namedtuple
import unidecode
from jinja2 import Environment, FileSystemLoader, TemplateNotFound
@ -10,6 +10,8 @@ from sphinx.util.osutil import ensuredir
from ..settings import API_ROOT
Path = namedtuple('Path', ['absolute', 'relative'])
class PythonMapperBase(object):
@ -39,7 +41,6 @@ class PythonMapperBase(object):
:var list children: Children of this object
:var list parameters: Parameters to this object
:var list methods: Methods on this object
'''
language = 'base'
@ -115,14 +116,16 @@ class PythonMapperBase(object):
* Break up the string as paths
'''
slug = self.name
try:
slug = self.name.split('(')[0]
except IndexError:
pass
slug = unidecode.unidecode(slug)
slug = slug.replace('-', '')
slug = re.sub(r'[^\w\.]+', '-', slug).strip('-')
return os.path.join(*slug.split('.'))
return slug.split('.')[-1]
def include_dir(self, root):
"""Return directory of file"""
parts = [root]
parts.extend(self.pathname.split(os.path.sep))
return '/'.join(parts)
@property
def include_path(self):
@ -131,8 +134,7 @@ class PythonMapperBase(object):
This is used in ``toctree`` directives, as Sphinx always expects Unix
path separators
"""
parts = [self.url_root]
parts.extend(self.pathname.split(os.path.sep))
parts = [self.include_dir(root=self.url_root)]
parts.append('index')
return '/'.join(parts)
@ -219,11 +221,12 @@ class SphinxMapperBase(object):
if skip:
continue
# Make sure the path is full
if os.path.isabs(filename):
if not os.path.isabs(filename):
filename = os.path.join(root, filename)
files_to_read.append(filename)
else:
files_to_read.append(os.path.join(root, filename))
for _path in self.app.status_iterator(
files_to_read,
@ -252,10 +255,10 @@ class SphinxMapperBase(object):
def map(self, options=None):
'''Trigger find of serialized sources and build objects'''
for path, data in self.paths.items():
for obj in self.create_class(data, options=options):
for obj in self.create_class(data, options=options, path=path):
self.add_object(obj)
def create_class(self, obj, options=None, **kwargs):
def create_class(self, obj, options=None, path=None, **kwargs):
'''
Create class object.
@ -266,19 +269,11 @@ class SphinxMapperBase(object):
def output_rst(self, root, source_suffix):
for id, obj in self.objects.items():
if not obj or not obj.top_level_object:
continue
rst = obj.render()
if not rst:
continue
try:
filename = id.split('(')[0]
except IndexError:
filename = id
filename = filename.replace('#', '-')
detail_dir = os.path.join(root, *filename.split('.'))
detail_dir = obj.include_dir(root=root)
ensuredir(detail_dir)
path = os.path.join(detail_dir, '%s%s' % ('index', source_suffix))
with open(path, 'wb+') as detail_file:

@ -4,6 +4,7 @@ import subprocess
import traceback
import shutil
from collections import defaultdict
import unidecode
import yaml
from sphinx.util.osutil import ensuredir
@ -317,6 +318,27 @@ class DotNetPythonMapper(PythonMapperBase):
return '<{cls} {id}>'.format(cls=self.__class__.__name__,
id=self.id)
@property
def pathname(self):
'''Sluggified path for filenames
Slugs to a filename using the follow steps
* Decode unicode to approximate ascii
* Remove existing hypens
* Substitute hyphens for non-word characters
* Break up the string as paths
'''
slug = self.name
try:
slug = self.name.split('(')[0]
except IndexError:
pass
slug = unidecode.unidecode(slug)
slug = slug.replace('-', '')
slug = re.sub(r'[^\w\.]+', '-', slug).strip('-')
return os.path.join(*slug.split('.'))
@property
def short_name(self):
'''Shorten name property'''

@ -1,121 +1,251 @@
from collections import defaultdict
import sys
import os
import re
import textwrap
import ast
from collections import defaultdict
from pydocstyle.parser import Parser
from .base import PythonMapperBase, SphinxMapperBase
from ..utils import slugify
if sys.version_info < (3,):
from epyparse import parsed
from itertools import izip_longest as zip_longest
else:
# Don't raise exception on module level because it would
# break all backends on Python 3
def parsed(path):
raise Exception('Python 3 not supported')
from itertools import zip_longest
class PythonSphinxMapper(SphinxMapperBase):
'''Auto API domain handler for Python
"""Auto API domain handler for Python
Parses directly from Python files.
:param app: Sphinx application passed in as part of the extension
'''
"""
def load(self, patterns, dirs, **kwargs):
"""Load objects from the filesystem into the ``paths`` dictionary
Also include an attribute on the object, ``relative_path`` which is the
shortened, relative path the package/module
"""
for dir_ in dirs:
for path in self.find_files(patterns=patterns, dirs=[dir_], **kwargs):
data = self.read_file(path=path)
data.relative_path = os.path.relpath(path, dir_)
if data:
self.paths[path] = data
def read_file(self, path, **kwargs):
'''Read file input into memory, returning deserialized objects
"""Read file input into memory, returning deserialized objects
:param path: Path of file to read
'''
"""
try:
parsed_data = parsed(path)
parsed_data = Parser()(open(path), path)
return parsed_data
except IOError:
self.app.warn('Error reading file: {0}'.format(path))
except TypeError:
self.app.warn('Error reading file: {0}'.format(path))
except ImportError:
except (IOError, TypeError, ImportError):
self.app.warn('Error reading file: {0}'.format(path))
return None
def create_class(self, data, options=None, **kwargs):
'''Return instance of class based on Roslyn type property
Data keys handled here:
def create_class(self, data, options=None, path=None, **kwargs):
"""Create a class from the passed in data
type
Set the object class
items
Recurse into :py:meth:`create_class` to create child object
instances
:param data: dictionary data of epydoc output
'''
:param data: dictionary data of pydocstyle output
"""
obj_map = dict((cls.type, cls) for cls
in [PythonClass, PythonFunction, PythonModule])
in [PythonClass, PythonFunction, PythonModule,
PythonMethod, PythonPackage])
try:
cls = obj_map[data['type']]
cls = obj_map[data.kind]
except KeyError:
self.app.warn("Unknown Type: %s" % data['type'])
self.app.warn("Unknown type: %s" % data.kind)
else:
obj = cls(data, jinja_env=self.jinja_env, options=self.app.config.autoapi_options)
if 'children' in data:
for child_data in data['children']:
for child_obj in self.create_class(child_data, options=options):
obj = cls(data, jinja_env=self.jinja_env,
options=self.app.config.autoapi_options, **kwargs)
for child_data in data.children:
for child_obj in self.create_class(child_data, options=options,
**kwargs):
obj.children.append(child_obj)
self.add_object(child_obj)
yield obj
class PythonPythonMapper(PythonMapperBase):
language = 'python'
is_callable = False
def __init__(self, obj, **kwargs):
super(PythonPythonMapper, self).__init__(obj, **kwargs)
# Always exist
self.id = obj['fullname']
self.name = self.obj.get('fullname', self.id)
self.name = self._get_full_name(obj)
self.id = slugify(self.name)
# Optional
self.imports = obj.get('imports', [])
self.children = []
self.args = obj.get('args', [])
self.params = obj.get('params', [])
self.docstring = obj.get('docstring', '')
self.methods = obj.get('methods', [])
self.inheritance = obj.get('bases', [])
self.args = []
if self.is_callable:
self.args = self._get_arguments(obj)
self.docstring = obj.docstring or ''
self.docstring = textwrap.dedent(self.docstring)
self.docstring = self.docstring.replace("'''", '').replace('"""', '')
if getattr(obj, 'parent'):
self.inheritance = [obj.parent.name]
else:
self.inheritance = []
# For later
self.item_map = defaultdict(list)
@property
def undoc_member(self):
def is_undoc_member(self):
return self.docstring == ''
@property
def private_member(self):
def is_private_member(self):
return self.short_name[0] == '_'
@property
def special_member(self):
def is_special_member(self):
return self.short_name[0:2] == '__'
@property
def display(self):
if self.undoc_member and 'undoc-members' not in self.options:
if self.is_undoc_member and 'undoc-members' not in self.options:
return False
if self.private_member and 'private-members' not in self.options:
if self.is_private_member and 'private-members' not in self.options:
return False
if self.special_member and 'special-members' not in self.options:
if self.is_special_member and 'special-members' not in self.options:
return False
return True
@staticmethod
def _get_full_name(obj):
"""Recursively build the full name of the object from pydocstyle
Uses an additional attribute added to the object, ``relative_path``.
This is the shortened path of the object name, if the object is a
package or module.
:param obj: pydocstyle object, as returned from Parser()
:returns: Dotted name of object
:rtype: str
"""
def _inner(obj, parts=None):
if parts is None:
parts = []
obj_kind = obj.kind
obj_name = obj.name
if obj_kind == 'module':
obj_name = getattr(obj, 'relative_path', None) or obj.name
obj_name = obj_name.replace('/', '.')
ext = '.py'
if obj_name.endswith(ext):
obj_name = obj_name[:-len(ext)]
elif obj_kind == 'package':
obj_name = getattr(obj, 'relative_path', None) or obj.name
exts = ['/__init__.py', '.py']
for ext in exts:
if obj_name.endswith(ext):
obj_name = obj_name[:-len(ext)]
obj_name = obj_name.split('/').pop()
parts.insert(0, obj_name)
try:
return _inner(obj.parent, parts)
except AttributeError:
pass
return parts
return '.'.join(_inner(obj))
@staticmethod
def _get_arguments(obj):
"""Get arguments from a pydocstyle object
:param obj: pydocstyle object, as returned from Parser()
:returns: list of argument or argument and value pairs
:rtype: list
"""
arguments = []
source = textwrap.dedent(obj.source)
# Bare except here because AST parsing can throw any number of
# exceptions, including SyntaxError
try:
parsed = ast.parse(source)
except: # noqa
return
parsed_args = parsed.body[0].args
arg_names = [arg.id if sys.version_info < (3,) else arg.arg
for arg in parsed_args.args]
# Get defaults for display based on AST node type
arg_defaults = []
pydocstyle_map = {
ast.Name: 'id',
ast.Num: 'n',
ast.Str: lambda obj: '"{0}"'.format(obj.s),
ast.Call: lambda obj: obj.func.id,
# TODO these require traversal into the AST nodes. Add this for more
# complete argument parsing, or handle with a custom AST traversal.
ast.List: lambda _: 'list',
ast.Tuple: lambda _: 'tuple',
ast.Set: lambda _: 'set',
ast.Dict: lambda _: 'dict',
}
if sys.version_info >= (3,):
pydocstyle_map.update({
ast.NameConstant: 'value',
})
for value in parsed_args.defaults:
default = None
try:
default = pydocstyle_map[type(value)](value)
except TypeError:
default = getattr(value, pydocstyle_map[type(value)])
except KeyError:
pass
if default is None:
default = 'None'
arg_defaults.append(default)
# Apply defaults padded to the end of the longest list. AST returns
# argument defaults as a short array that applies to the end of the list
# of arguments
for (name, default) in zip_longest(reversed(arg_names),
reversed(arg_defaults)):
arg = name
if default is not None:
arg = '{0}={1}'.format(name, default)
arguments.insert(0, arg)
# Add *args and **kwargs
if parsed_args.vararg:
arguments.append('*{0}'.format(
parsed_args.vararg
if sys.version_info < (3, 3)
else parsed_args.vararg.arg
))
if parsed_args.kwarg:
arguments.append('**{0}'.format(
parsed_args.kwarg
if sys.version_info < (3, 3)
else parsed_args.kwarg.arg
))
return arguments
class PythonFunction(PythonPythonMapper):
type = 'function'
is_callable = True
class PythonMethod(PythonPythonMapper):
type = 'method'
is_callable = True
class PythonModule(PythonPythonMapper):
@ -123,5 +253,10 @@ class PythonModule(PythonPythonMapper):
top_level_object = True
class PythonPackage(PythonPythonMapper):
type = 'package'
top_level_object = True
class PythonClass(PythonPythonMapper):
type = 'class'

@ -9,8 +9,10 @@ Below is a list of all items that are documented here.
:glob:
:maxdepth: 1
{%- for page in pages|sort %}
{% if page.top_level_object %}
/autoapi/{{ page.id.split('.')|join('/') }}/index
{% endif %}
{# Force whitespace #}
{%- for page in pages %}
{%- if page.top_level_object %}
{{ page.include_path }}
{%- endif %}
{%- endfor %}

@ -1,23 +1,13 @@
{{ obj.short_name }}
{{ "-" * obj.short_name|length }}
.. autoapi-hidden::
{{ obj.short_name }}
{{ "=" * obj.short_name|length }}
.. py:class:: {{ obj.short_name }}{% if obj.args %}({{ obj.args|join(',') }}){% endif %}
{%- if obj.inheritance %}
.. rubric:: Imports
{% for import in obj.inheritance %}
* {{ import }}
{% endfor %}
{% endif %}
{%- if obj.docstring %}
.. rubric:: Summary
{{ obj.docstring|indent(3) }}
.. autoapi-nested-parse::
{{ obj.docstring|indent(6) }}
{% endif %}

@ -1,13 +1,8 @@
{%- if obj.display %}
{%- if is_method %}
{# Slice self off #}
.. method:: {{ obj.name.split('.')[-1] }}({{ obj.args[1:]|join(',') }})
{% else %}
.. function:: {{ obj.name.split('.')[-1] }}({{ obj.args|join(',') }})
{% endif %}
.. function:: {{ obj.name }}({{ obj.args|join(',') }})
{%- if obj.docstring %}
{% if obj.docstring %}
{{ obj.docstring.strip()|indent(3) }}
{% endif %}

@ -2,4 +2,3 @@
{{ obj.docstring|indent(3) }}

@ -0,0 +1,9 @@
{%- if obj.display %}
.. method:: {{ obj.name }}({{ obj.args[1:]|join(',') }})
{% if obj.docstring %}
{{ obj.docstring|indent(3) }}
{% endif %}
{% endif %}

@ -1,16 +1,15 @@
{{ obj.name }}
{{ "~" * obj.name|length }}
{{ "=" * obj.name|length }}
{%- if obj.docstring %}
.. py:module:: {{ obj.name }}
.. rubric:: Summary
{%- if obj.docstring %}
{{ obj.docstring }}
.. autoapi-nested-parse::
{{ obj.docstring|indent(3) }}
{% endif %}
.. py:module:: {{ obj.name }}
{% block content %}
{%- for obj_item in obj.children %}
@ -18,4 +17,3 @@
{%- endfor %}
{% endblock %}

@ -0,0 +1 @@
{% extends "python/module.rst" %}

@ -0,0 +1,14 @@
import unicodedata
import re
# From Django
def slugify(value):
"""
Converts to lowercase, removes non-word characters (alphanumerics and
underscores) and converts spaces to hyphens. Also strips leading and
trailing whitespace.
"""
value = re.sub('[^\w\s-]', '', value).strip()
return re.sub('[-\s]+', '-', value)

@ -51,9 +51,9 @@ author = u'Read the Docs, Inc'
# built documents.
#
# The short X.Y version.
version = '0.2.0'
version = '0.4.0'
# The full version, including alpha/beta/rc tags.
release = '0.2.0'
release = '0.4.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.

@ -1,10 +1,2 @@
epydoc==3.0.1
epyparse==0.2.5
PyYAML==3.11
wheel==0.24.0
sphinx==1.3.1
sphinxcontrib-golangdomain
sphinxcontrib-dotnetdomain
pytest
mock
-e .
tox

@ -4,8 +4,8 @@ try:
extra_setup = dict(
install_requires=[
'PyYAML',
'epyparse',
'epydoc',
'pydocstyle',
'wheel==0.24.0',
'sphinx',
'sphinxcontrib-golangdomain',
'sphinxcontrib-dotnetdomain',
@ -19,8 +19,8 @@ except ImportError:
extra_setup = dict(
requires=[
'PyYAML',
'epyparse',
'epydoc',
'pydocstyle',
'wheel==0.24.0',
'sphinx'
'sphinxcontrib-golangdomain',
'sphinxcontrib-dotnetdomain',
@ -30,12 +30,12 @@ except ImportError:
setup(
name='sphinx-autoapi',
version='0.2.0',
version='0.4.0',
author='Eric Holscher',
author_email='eric@ericholscher.com',
url='http://github.com/ericholscher/sphinx-autoapi',
url='http://github.com/rtfd/sphinx-autoapi',
license='BSD',
description='',
description='Sphinx auto API documentation generator',
package_dir={'': '.'},
packages=find_packages('.'),
long_description=codecs.open("README.rst", "r", "utf-8").read(),

@ -1,7 +1,40 @@
__author__ = 'swenson'
"""Example module
import math
This is a description
"""
def example_function(x):
"""Compute the square root of x and return it."""
return math.sqrt(x)
class Foo(object):
class_var = 42 #: Class var docstring
another_class_var = 42
"""Another class var docstring"""
class Meta(object):
"""A nested class just to test things out"""
@classmethod
def foo():
"""The foo class method"""
return True
def method_okay(self, foo=None, bar=None):
"""This method should parse okay"""
return True
def method_multiline(self, foo=None, bar=None,
baz=None):
"""This is on multiple lines, but should parse okay too
pydocstyle gives us lines of source. Test if this means that multiline
definitions are covered in the way we're anticipating here
"""
return True
def method_tricky(self, foo=None, bar=dict(foo=1, bar=2)):
"""This will likely fail our argument testing
We parse naively on commas, so the nested dictionary will throw this off
"""
return True

@ -3,15 +3,15 @@ import os
import sys
import shutil
import unittest
from contextlib import contextmanager
from mock import patch
from sphinx.application import Sphinx
class LanguageIntegrationTests(unittest.TestCase):
def _run_test(self, test_dir, test_file, test_string):
@contextmanager
def sphinx_build(test_dir):
os.chdir('tests/{0}'.format(test_dir))
try:
app = Sphinx(
@ -22,14 +22,21 @@ class LanguageIntegrationTests(unittest.TestCase):
buildername='text',
)
app.build(force_all=True)
with open(test_file) as fin:
text = fin.read().strip()
self.assertIn(test_string, text)
yield
finally:
shutil.rmtree('_build')
os.chdir('../..')
class LanguageIntegrationTests(unittest.TestCase):
def _run_test(self, test_dir, test_file, test_string):
with sphinx_build(test_dir):
with open(test_file) as fin:
text = fin.read().strip()
self.assertIn(test_string, text)
class JavaScriptTests(LanguageIntegrationTests):
def _js_read(self, path):
@ -60,12 +67,27 @@ class GoTests(LanguageIntegrationTests):
class PythonTests(LanguageIntegrationTests):
@unittest.skipIf(sys.version_info > (3, 0), 'Epydoc does not support Python 3')
def test_integration(self):
self._run_test(
'pyexample',
'_build/text/autoapi/example/index.txt',
'Compute the square root of x and return it'
with sphinx_build('pyexample'):
example_file = open('_build/text/autoapi/example/index.txt').read()
self.assertIn(
'class example.Foo',
example_file
)
self.assertIn(
'example.Foo.method_okay(foo=None, bar=None)',
example_file
)
self.assertIn(
'example.Foo.method_multiline(foo=None, bar=None, baz=None)',
example_file
)
self.assertIn(
'example.Foo.method_tricky(foo=None, bar=dict)',
example_file
)
self.assertFalse(
os.path.exists('_build/text/autoapi/method_multiline')
)
@ -96,7 +118,6 @@ class DotNetTests(LanguageIntegrationTests):
class IntegrationTests(LanguageIntegrationTests):
@unittest.skipIf(sys.version_info > (3, 0), 'Epydoc does not support Python 3')
def test_template_overrides(self):
self._run_test(
'templateexample',
@ -107,7 +128,6 @@ class IntegrationTests(LanguageIntegrationTests):
class TOCTreeTests(LanguageIntegrationTests):
@unittest.skipIf(sys.version_info > (3, 0), 'Epydoc does not support Python 3')
def test_toctree_overrides(self):
self._run_test(
'toctreeexample',

@ -4,10 +4,12 @@
import os
import unittest
from collections import namedtuple
from jinja2 import Environment, FileSystemLoader
from autoapi.mappers import dotnet
from autoapi.mappers import python
from autoapi.settings import TEMPLATE_DIR
@ -135,3 +137,93 @@ class DotNetObjectTests(unittest.TestCase):
self.assertEqual(cls.include_path, '/autoapi/Foo/Bar/Widget/index')
cls = dotnet.DotNetClass({'id': 'Foo.Bar.Widget'}, url_root='/autofoo')
self.assertEqual(cls.include_path, '/autofoo/Foo/Bar/Widget/index')
class PythonObjectTests(unittest.TestCase):
def test_full_name(self):
"""Full name resolution on nested objects"""
Source = namedtuple('Source', ['kind', 'name', 'parent'])
obj_module = Source(kind='module', name='example/example.py', parent=None)
obj_class = Source(kind='class', name='Foo', parent=obj_module)
obj_method = Source(kind='method', name='bar', parent=obj_class)
self.assertEqual(
python.PythonPythonMapper._get_full_name(obj_module),
'example.example'
)
self.assertEqual(
python.PythonPythonMapper._get_full_name(obj_class),
'example.example.Foo'
)
self.assertEqual(
python.PythonPythonMapper._get_full_name(obj_method),
'example.example.Foo.bar'
)
def test_arguments(self):
"""Argument parsing of source"""
Source = namedtuple('Source', ['source', 'docstring'])
obj = Source(
source=('def foobar(self, bar, baz=42, foo=True,\n'
' *args, **kwargs):\n'
' "This is a docstring"\n'
' return True\n'),
docstring='"This is a docstring"',
)
self.assertEqual(
python.PythonPythonMapper._get_arguments(obj),
['self', 'bar', 'baz=42', 'foo=True', '*args', '**kwargs']
)
def test_advanced_arguments(self):
"""Advanced argument parsing"""
Source = namedtuple('Source', ['source', 'docstring'])
obj = Source(
source=('def foobar(self, a, b, c=42, d="string", e=(1,2),\n'
' f={"a": True}, g=None, h=[1,2,3,4],\n'
' i=dict(a=True), j=False, *args, **kwargs):\n'
' "This is a docstring"\n'
' return True\n'),
docstring='"This is a docstring"',
)
self.assertEqual(
python.PythonPythonMapper._get_arguments(obj),
[
'self',
'a',
'b',
'c=42',
'd="string"',
'e=tuple',
'f=dict',
'g=None',
'h=list',
'i=dict',
'j=False',
'*args',
'**kwargs',
]
)
def test_bunk_whitespace(self):
"""Whitespace in definition throws off argument parsing"""
Source = namedtuple('Source', ['source', 'docstring'])
obj = Source(
source=(' def method_foo(self, a, b,\n'
' c):\n'
' call_something()\n'
' "This is a docstring"\n'
' return True\n'),
docstring='"This is a docstring"',
)
self.assertEqual(
python.PythonPythonMapper._get_arguments(obj),
['self', 'a', 'b', 'c']
)

@ -5,6 +5,8 @@ envlist = py27,py35,lint,docs
setenv =
LANG=C
deps = -r{toxinidir}/requirements.txt
pytest
mock
commands =
py.test {posargs}

Loading…
Cancel
Save