Fixed Sphinx logger deprecation warnings

pull/161/head
Ashley Whetter 6 years ago
parent dfe82ae94e
commit 2cf80b1569

@ -12,6 +12,7 @@ import sphinx
from sphinx.util.console import darkgreen, bold
from sphinx.addnodes import toctree
from sphinx.errors import ExtensionError
import sphinx.util.logging
from docutils.parsers.rst import directives
from . import documenters
@ -21,6 +22,8 @@ from .directives import AutoapiSummary, NestedParse
from .settings import API_ROOT
from .toctree import add_domain_to_toctree
LOGGER = sphinx.util.logging.getLogger(__name__)
default_options = ['members', 'undoc-members', 'private-members', 'special-members']
_viewcode_cache = {}
"""Caches a module's parse results for use in viewcode.
@ -86,18 +89,18 @@ def run_autoapi(app):
out_suffix = app.config.source_suffix[0]
# Actual meat of the run.
app.info(bold('[AutoAPI] ') + darkgreen('Loading Data'))
LOGGER.info(bold('[AutoAPI] ') + darkgreen('Loading Data'))
sphinx_mapper_obj.load(
patterns=file_patterns,
dirs=normalized_dirs,
ignore=ignore_patterns,
)
app.info(bold('[AutoAPI] ') + darkgreen('Mapping Data'))
LOGGER.info(bold('[AutoAPI] ') + darkgreen('Mapping Data'))
sphinx_mapper_obj.map(options=app.config.autoapi_options)
if app.config.autoapi_generate_api_docs:
app.info(bold('[AutoAPI] ') + darkgreen('Rendering Data'))
LOGGER.info(bold('[AutoAPI] ') + darkgreen('Rendering Data'))
sphinx_mapper_obj.output_rst(
root=normalized_root,
source_suffix=out_suffix,
@ -108,7 +111,7 @@ def build_finished(app, exception):
if not app.config.autoapi_keep_files and app.config.autoapi_generate_api_docs:
normalized_root = os.path.normpath(os.path.join(app.confdir, app.config.autoapi_root))
if app.verbosity > 1:
app.info(bold('[AutoAPI] ') + darkgreen('Cleaning generated .rst files'))
LOGGER.info(bold('[AutoAPI] ') + darkgreen('Cleaning generated .rst files'))
shutil.rmtree(normalized_root)
sphinx_mapper = default_backend_mapping[app.config.autoapi_type]
@ -145,7 +148,7 @@ def doctree_read(app, doctree):
message = darkgreen(
'Adding AutoAPI TOCTree [{0}] to index.rst'.format(toc_entry)
)
app.info(message_prefix + message)
LOGGER.info(message_prefix + message)
def clear_env(app, env):

@ -9,9 +9,12 @@ import sphinx.util
from sphinx.util.console import darkgreen, bold
from sphinx.util.osutil import ensuredir
from sphinx.util.docstrings import prepare_docstring
import sphinx.util.logging
from ..settings import API_ROOT
LOGGER = sphinx.util.logging.getLogger(__name__)
Path = namedtuple('Path', ['absolute', 'relative'])
@ -223,7 +226,7 @@ class SphinxMapperBase(object):
# Skip ignored files
for ignore_pattern in ignore:
if fnmatch.fnmatch(os.path.join(root, filename), ignore_pattern):
self.app.info(
LOGGER.info(
bold('[AutoAPI] ') +
darkgreen("Ignoring %s/%s" % (root, filename))
)

@ -9,10 +9,12 @@ import unidecode
import yaml
from sphinx.util.osutil import ensuredir
from sphinx.util.console import darkgreen, bold
import sphinx.util.logging
from sphinx.errors import ExtensionError
from .base import PythonMapperBase, SphinxMapperBase
LOGGER = sphinx.util.logging.getLogger(__name__)
# Doc comment patterns
DOC_COMMENT_PATTERN = r'''
@ -96,9 +98,9 @@ class DotNetSphinxMapper(SphinxMapperBase):
)
_, error_output = proc.communicate()
if error_output:
self.app.warn(error_output)
LOGGER.warning(error_output)
except (OSError, subprocess.CalledProcessError) as e:
self.app.warn('Error generating metadata: {0}'.format(e))
LOGGER.warning('Error generating metadata: {0}'.format(e))
if raise_error:
raise ExtensionError('Failure in docfx while generating AutoAPI output.')
# We now have yaml files
@ -119,9 +121,9 @@ class DotNetSphinxMapper(SphinxMapperBase):
parsed_data = yaml.safe_load(handle)
return parsed_data
except IOError:
self.app.warn('Error reading file: {0}'.format(path))
LOGGER.warning('Error reading file: {0}'.format(path))
except TypeError:
self.app.warn('Error reading file: {0}'.format(path))
LOGGER.warning('Error reading file: {0}'.format(path))
return None
# Subclassed to iterate over items
@ -159,7 +161,7 @@ class DotNetSphinxMapper(SphinxMapperBase):
try:
cls = obj_map[data['type'].lower()]
except KeyError:
self.app.warn('Unknown type: %s' % data)
LOGGER.warning('Unknown type: %s' % data)
else:
obj = cls(data, jinja_env=self.jinja_env, options=options,
url_root=self.url_root, **kwargs)
@ -246,7 +248,7 @@ class DotNetSphinxMapper(SphinxMapperBase):
@staticmethod
def build_finished(app, exception):
if app.verbosity > 1:
app.info(bold('[AutoAPI] ') + darkgreen('Cleaning generated .yml files'))
LOGGER.info(bold('[AutoAPI] ') + darkgreen('Cleaning generated .yml files'))
if os.path.exists(DotNetSphinxMapper.DOCFX_OUTPUT_PATH):
shutil.rmtree(DotNetSphinxMapper.DOCFX_OUTPUT_PATH)

@ -1,8 +1,12 @@
import json
import subprocess
import sphinx.util.logging
from .base import PythonMapperBase, SphinxMapperBase
LOGGER = sphinx.util.logging.getLogger(__name__)
class GoSphinxMapper(SphinxMapperBase):
@ -35,9 +39,9 @@ class GoSphinxMapper(SphinxMapperBase):
parsed_data = json.loads(subprocess.check_output(['godocjson', path]))
return parsed_data
except IOError:
self.app.warn('Error reading file: {0}'.format(path))
LOGGER.warning('Error reading file: {0}'.format(path))
except TypeError:
self.app.warn('Error reading file: {0}'.format(path))
LOGGER.warning('Error reading file: {0}'.format(path))
return None
def create_class(self, data, options=None, **kwargs):
@ -62,12 +66,12 @@ class GoSphinxMapper(SphinxMapperBase):
try:
# Contextual type data from children recursion
if _type:
self.app.debug('Forcing Go Type %s' % _type)
LOGGER.debug('Forcing Go Type %s' % _type)
cls = obj_map[_type]
else:
cls = obj_map[data['type']]
except KeyError:
self.app.warn('Unknown Type: %s' % data)
LOGGER.warning('Unknown Type: %s' % data)
else:
if cls.inverted_names and 'names' in data:
# Handle types that have reversed names parameter

@ -2,8 +2,12 @@ import json
import subprocess
import os
import sphinx.util.logging
from .base import PythonMapperBase, SphinxMapperBase
LOGGER = sphinx.util.logging.getLogger(__name__)
class JavaScriptSphinxMapper(SphinxMapperBase):
@ -29,9 +33,9 @@ class JavaScriptSphinxMapper(SphinxMapperBase):
parsed_data = json.loads(subprocess.check_output([subcmd, '-X', path]))
return parsed_data
except IOError:
self.app.warn('Error reading file: {0}'.format(path))
LOGGER.warning('Error reading file: {0}'.format(path))
except TypeError:
self.app.warn('Error reading file: {0}'.format(path))
LOGGER.warning('Error reading file: {0}'.format(path))
return None
# Subclassed to iterate over items
@ -64,7 +68,7 @@ class JavaScriptSphinxMapper(SphinxMapperBase):
try:
cls = obj_map[data['kind']]
except (KeyError, TypeError):
self.app.warn('Unknown Type: %s' % data)
LOGGER.warning('Unknown Type: %s' % data)
else:
# Recurse for children
obj = cls(data, jinja_env=self.jinja_env)

@ -5,6 +5,7 @@ import os
import astroid
import sphinx
import sphinx.util.docstrings
import sphinx.util.logging
from .base import PythonMapperBase, SphinxMapperBase
from . import astroid_utils
@ -15,8 +16,10 @@ try:
except NameError:
_TEXT_TYPE = str
LOGGER = sphinx.util.logging.getLogger(__name__)
def _expand_wildcard_placeholder(original_module, originals_map, placeholder, app):
def _expand_wildcard_placeholder(original_module, originals_map, placeholder):
"""Expand a wildcard placeholder to a sequence of named placeholders.
:param original_module: The data dictionary of the module
@ -27,8 +30,6 @@ def _expand_wildcard_placeholder(original_module, originals_map, placeholder, ap
:type originals_map: dict(str, dict)
:param placeholder: The wildcard placeholder to expand.
:type placeholder: dict
:param app: The Sphinx application to report errors with.
:type app: sphinx.Application
:returns: The placeholders that the wildcard placeholder represents.
:rtype: list(dict)
@ -44,7 +45,7 @@ def _expand_wildcard_placeholder(original_module, originals_map, placeholder, ap
msg = 'Invalid __all__ entry {0} in {1}'.format(
name, original_module['name'],
)
app.warn(msg)
LOGGER.warning(msg)
continue
originals.append(originals_map[name])
@ -70,7 +71,7 @@ def _expand_wildcard_placeholder(original_module, originals_map, placeholder, ap
return placeholders
def _resolve_module_placeholders(modules, module_name, visit_path, resolved, app):
def _resolve_module_placeholders(modules, module_name, visit_path, resolved):
"""Resolve all placeholder children under a module.
:param modules: A mapping of module names to their data dictionary.
@ -82,8 +83,6 @@ def _resolve_module_placeholders(modules, module_name, visit_path, resolved, app
:type visited: collections.OrderedDict
:param resolved: A set of already resolved module names.
:type resolved: set(str)
:param app: The Sphinx application to report with.
:type app: sphinx.Application
"""
if module_name in resolved:
return
@ -100,7 +99,7 @@ def _resolve_module_placeholders(modules, module_name, visit_path, resolved, app
msg = "Cannot resolve cyclic import: {0}, {1}".format(
', '.join(visit_path), imported_from,
)
app.warn(msg)
LOGGER.warning(msg)
module['children'].remove(child)
children.pop(child['name'])
continue
@ -109,12 +108,12 @@ def _resolve_module_placeholders(modules, module_name, visit_path, resolved, app
msg = "Cannot resolve import of unknown module {0} in {1}".format(
imported_from, module_name,
)
app.warn(msg)
LOGGER.warning(msg)
module['children'].remove(child)
children.pop(child['name'])
continue
_resolve_module_placeholders(modules, imported_from, visit_path, resolved, app)
_resolve_module_placeholders(modules, imported_from, visit_path, resolved)
if original_name == '*':
original_module, originals_map = modules[imported_from]
@ -122,7 +121,7 @@ def _resolve_module_placeholders(modules, module_name, visit_path, resolved, app
# Replace the wildcard placeholder
# with a list of named placeholders.
new_placeholders = _expand_wildcard_placeholder(
original_module, originals_map, child, app,
original_module, originals_map, child,
)
child_index = module['children'].index(child)
module['children'][child_index:child_index+1] = new_placeholders
@ -137,7 +136,7 @@ def _resolve_module_placeholders(modules, module_name, visit_path, resolved, app
msg = "Cannot resolve import of {0} in {1}".format(
child['original_path'], module_name,
)
app.warn(msg)
LOGGER.warning(msg)
module['children'].remove(child)
children.pop(child['name'])
continue
@ -222,7 +221,7 @@ class PythonSphinxMapper(SphinxMapperBase):
parsed_data = Parser().parse_file(path)
return parsed_data
except (IOError, TypeError, ImportError):
self.app.warn('Error reading file: {0}'.format(path))
LOGGER.warning('Error reading file: {0}'.format(path))
return None
def _resolve_placeholders(self):
@ -237,7 +236,7 @@ class PythonSphinxMapper(SphinxMapperBase):
resolved = set()
for module_name in modules:
visit_path = collections.OrderedDict()
_resolve_module_placeholders(modules, module_name, visit_path, resolved, self.app)
_resolve_module_placeholders(modules, module_name, visit_path, resolved)
def map(self, options=None):
self._resolve_placeholders()
@ -268,7 +267,7 @@ class PythonSphinxMapper(SphinxMapperBase):
try:
cls = obj_map[data['type']]
except KeyError:
self.app.warn("Unknown type: %s" % data['type'])
LOGGER.warning("Unknown type: %s" % data['type'])
else:
obj = cls(
data,

@ -8,6 +8,9 @@ we then nest our Domain references inside the already existing Sections.
from docutils import nodes
from sphinx import addnodes
import sphinx.util.logging
LOGGER = sphinx.util.logging.getLogger(__name__)
def _build_toc_node(docname, anchor='anchor', text='test text', bullet=False):
@ -79,7 +82,7 @@ def _get_toc_reference(app, node, toc, docname):
ref_id = node.children[0].attributes['ids'][0]
toc_reference = _find_toc_node(toc, ref_id, addnodes.desc)
except (KeyError, IndexError) as e:
app.warn('Invalid desc node: %s' % e)
LOGGER.warning('Invalid desc node: %s' % e)
toc_reference = None
return toc_reference
@ -108,7 +111,7 @@ def add_domain_to_toctree(app, doctree, docname):
try:
ref_id = desc_node.children[0].attributes['ids'][0]
except (KeyError, IndexError) as e:
app.warn('Invalid desc node: %s' % e)
LOGGER.warning('Invalid desc node: %s' % e)
continue
try:
# Python domain object

Loading…
Cancel
Save