Run through black and isort
This commit is contained in:
parent
0af66c6964
commit
fdb126996a
8 changed files with 297 additions and 252 deletions
|
@ -1 +1 @@
|
|||
__import__('pkg_resources').declare_namespace(__name__)
|
||||
__import__("pkg_resources").declare_namespace(__name__)
|
||||
|
|
|
@ -1 +1 @@
|
|||
__import__('pkg_resources').declare_namespace(__name__)
|
||||
__import__("pkg_resources").declare_namespace(__name__)
|
||||
|
|
|
@ -11,11 +11,9 @@
|
|||
|
||||
import re
|
||||
|
||||
from ipcalc import Network
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.parsers.rst import Directive
|
||||
|
||||
from ipcalc import Network
|
||||
from sphinx import addnodes
|
||||
from sphinx.domains import Domain, ObjType
|
||||
from sphinx.errors import NoUri
|
||||
|
@ -24,15 +22,15 @@ from sphinx.roles import XRefRole
|
|||
from sphinx.util import logging
|
||||
from sphinx.util.nodes import make_refnode
|
||||
|
||||
__version__ = '0.3.0'
|
||||
__version__ = "0.4.0"
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def ip_object_anchor(typ, path):
|
||||
path = re.sub(r'[.:/]', '-', path)
|
||||
return typ.lower() + '-' + path
|
||||
path = re.sub(r"[.:/]", "-", path)
|
||||
return typ.lower() + "-" + path
|
||||
|
||||
|
||||
class ip_node(nodes.Inline, nodes.TextElement):
|
||||
|
@ -47,17 +45,16 @@ class IPXRefRole(XRefRole):
|
|||
"""
|
||||
Cross referencing role for the IP domain.
|
||||
"""
|
||||
|
||||
def __init__(self, method, index_type, **kwargs):
|
||||
self.method = method
|
||||
self.index_type = index_type
|
||||
innernodeclass = None
|
||||
if method in ('v4', 'v6'):
|
||||
if method in ("v4", "v6"):
|
||||
innernodeclass = ip_node
|
||||
super(IPXRefRole, self).__init__(
|
||||
innernodeclass=innernodeclass, **kwargs)
|
||||
super(IPXRefRole, self).__init__(innernodeclass=innernodeclass, **kwargs)
|
||||
|
||||
def __cal__(self, typ, rawtext, text, lineno, inliner,
|
||||
options=None, content=None):
|
||||
def __cal__(self, typ, rawtext, text, lineno, inliner, options=None, content=None):
|
||||
if content is None:
|
||||
content = []
|
||||
if options is None:
|
||||
|
@ -66,28 +63,31 @@ class IPXRefRole(XRefRole):
|
|||
Network(text)
|
||||
except ValueError as e:
|
||||
env = inliner.document.settings.env
|
||||
logger.warning("invalid ip address/range %s" % text, location=(env.docname, lineno))
|
||||
logger.warning(
|
||||
"invalid ip address/range %s" % text, location=(env.docname, lineno)
|
||||
)
|
||||
return [nodes.literal(text, text), []]
|
||||
return super(IPXRefRole, self).__call__(
|
||||
typ, rawtext, text, lineno, inliner, options, content)
|
||||
typ, rawtext, text, lineno, inliner, options, content
|
||||
)
|
||||
|
||||
def process_link(self, env, refnode, has_explicit_title, title, target):
|
||||
domaindata = env.domaindata['ip']
|
||||
domaindata = env.domaindata["ip"]
|
||||
domaindata[self.method][target] = (target, refnode)
|
||||
return title, target
|
||||
|
||||
def result_nodes(self, document, env, node, is_ref):
|
||||
try:
|
||||
node['typ'] = self.method
|
||||
node["typ"] = self.method
|
||||
indexnode = addnodes.index()
|
||||
targetid = 'index-%s' % env.new_serialno('index')
|
||||
targetnode = nodes.target('', '', ids=[targetid])
|
||||
targetid = "index-%s" % env.new_serialno("index")
|
||||
targetnode = nodes.target("", "", ids=[targetid])
|
||||
doctitle = list(document.traverse(nodes.title))[0].astext()
|
||||
idxtext = "%s; %s" % (node.astext(), doctitle)
|
||||
idxtext2 = "%s; %s" % (self.index_type, node.astext())
|
||||
indexnode['entries'] = [
|
||||
('single', idxtext, targetid, '', None),
|
||||
('single', idxtext2, targetid, '', None),
|
||||
indexnode["entries"] = [
|
||||
("single", idxtext, targetid, "", None),
|
||||
("single", idxtext2, targetid, "", None),
|
||||
]
|
||||
return [indexnode, targetnode, node], []
|
||||
except KeyError as e:
|
||||
|
@ -104,20 +104,21 @@ class IPRange(Directive):
|
|||
def handle_rangespec(self, node):
|
||||
titlenode = nodes.title()
|
||||
node.append(titlenode)
|
||||
titlenode.append(nodes.inline('', self.get_prefix_title()))
|
||||
titlenode.append(nodes.literal('', self.rangespec))
|
||||
titlenode.append(nodes.inline("", self.get_prefix_title()))
|
||||
titlenode.append(nodes.literal("", self.rangespec))
|
||||
ids = ip_object_anchor(self.typ, self.rangespec)
|
||||
node['ids'].append(ids)
|
||||
node["ids"].append(ids)
|
||||
self.env.domaindata[self.domain][self.typ][ids] = (
|
||||
self.env.docname,
|
||||
self.options.get('synopsis', ''))
|
||||
self.options.get("synopsis", ""),
|
||||
)
|
||||
return ids
|
||||
|
||||
def run(self):
|
||||
if ':' in self.name:
|
||||
self.domain, self.objtype = self.name.split(':', 1)
|
||||
if ":" in self.name:
|
||||
self.domain, self.objtype = self.name.split(":", 1)
|
||||
else:
|
||||
self.domain, self.objtype = '', self.name
|
||||
self.domain, self.objtype = "", self.name
|
||||
self.env = self.state.document.settings.env
|
||||
self.rangespec = self.arguments[0]
|
||||
node = nodes.section()
|
||||
|
@ -127,97 +128,98 @@ class IPRange(Directive):
|
|||
else:
|
||||
doctitle = list(self.state.document.traverse(nodes.title))[0].astext()
|
||||
idx_text = "%s; %s" % (self.rangespec, doctitle)
|
||||
self.indexnode = addnodes.index(entries=[
|
||||
('single', idx_text, name, '', None),
|
||||
('single', self.get_index_text(), name, '', None)
|
||||
])
|
||||
self.indexnode = addnodes.index(
|
||||
entries=[
|
||||
("single", idx_text, name, "", None),
|
||||
("single", self.get_index_text(), name, "", None),
|
||||
]
|
||||
)
|
||||
|
||||
if self.content:
|
||||
contentnode = nodes.paragraph('')
|
||||
contentnode = nodes.paragraph("")
|
||||
node.append(contentnode)
|
||||
self.state.nested_parse(
|
||||
self.content, self.content_offset, contentnode)
|
||||
self.state.nested_parse(self.content, self.content_offset, contentnode)
|
||||
|
||||
iprange = ip_range()
|
||||
node.append(iprange)
|
||||
iprange['rangespec'] = self.rangespec
|
||||
iprange["rangespec"] = self.rangespec
|
||||
return [self.indexnode, node]
|
||||
|
||||
|
||||
class IPv4Range(IPRange):
|
||||
typ = 'v4range'
|
||||
typ = "v4range"
|
||||
|
||||
def get_prefix_title(self):
|
||||
return _('IPv4 address range ')
|
||||
return _("IPv4 address range ")
|
||||
|
||||
def get_index_text(self):
|
||||
return "%s; %s" % (_('IPv4 range'), self.rangespec)
|
||||
return "%s; %s" % (_("IPv4 range"), self.rangespec)
|
||||
|
||||
|
||||
class IPv6Range(IPRange):
|
||||
typ = 'v6range'
|
||||
typ = "v6range"
|
||||
|
||||
def get_prefix_title(self):
|
||||
return _('IPv6 address range ')
|
||||
return _("IPv6 address range ")
|
||||
|
||||
def get_index_text(self):
|
||||
return "%s; %s" % (_('IPv6 range'), self.rangespec)
|
||||
return "%s; %s" % (_("IPv6 range"), self.rangespec)
|
||||
|
||||
|
||||
class IPDomain(Domain):
|
||||
"""
|
||||
IP address and range domain.
|
||||
"""
|
||||
name = 'ip'
|
||||
label = 'IP addresses and ranges.'
|
||||
|
||||
name = "ip"
|
||||
label = "IP addresses and ranges."
|
||||
|
||||
object_types = {
|
||||
'v4': ObjType(_('v4'), 'v4', 'obj'),
|
||||
'v6': ObjType(_('v6'), 'v6', 'obj'),
|
||||
'v4range': ObjType(_('v4range'), 'v4range', 'obj'),
|
||||
'v6range': ObjType(_('v6range'), 'v6range', 'obj'),
|
||||
"v4": ObjType(_("v4"), "v4", "obj"),
|
||||
"v6": ObjType(_("v6"), "v6", "obj"),
|
||||
"v4range": ObjType(_("v4range"), "v4range", "obj"),
|
||||
"v6range": ObjType(_("v6range"), "v6range", "obj"),
|
||||
}
|
||||
|
||||
directives = {
|
||||
'v4range': IPv4Range,
|
||||
'v6range': IPv6Range,
|
||||
"v4range": IPv4Range,
|
||||
"v6range": IPv6Range,
|
||||
}
|
||||
|
||||
roles = {
|
||||
'v4': IPXRefRole('v4', _('IPv4 address')),
|
||||
'v6': IPXRefRole('v6', _('IPv6 address')),
|
||||
'v4range': IPXRefRole('v4range', _('IPv4 range')),
|
||||
'v6range': IPXRefRole('v6range', _('IPv6 range')),
|
||||
"v4": IPXRefRole("v4", _("IPv4 address")),
|
||||
"v6": IPXRefRole("v6", _("IPv6 address")),
|
||||
"v4range": IPXRefRole("v4range", _("IPv4 range")),
|
||||
"v6range": IPXRefRole("v6range", _("IPv6 range")),
|
||||
}
|
||||
|
||||
initial_data = {
|
||||
'v4': {},
|
||||
'v6': {},
|
||||
'v4range': {},
|
||||
'v6range': {},
|
||||
'ips': [],
|
||||
"v4": {},
|
||||
"v6": {},
|
||||
"v4range": {},
|
||||
"v6range": {},
|
||||
"ips": [],
|
||||
}
|
||||
|
||||
def clear_doc(self, docname):
|
||||
to_remove = []
|
||||
for key, value in self.data['v4range'].items():
|
||||
for key, value in self.data["v4range"].items():
|
||||
if docname == value[0]:
|
||||
to_remove.append(key)
|
||||
for key in to_remove:
|
||||
del self.data['v4range'][key]
|
||||
del self.data["v4range"][key]
|
||||
|
||||
to_remove = []
|
||||
for key, value in self.data['v6range'].items():
|
||||
for key, value in self.data["v6range"].items():
|
||||
if docname == value[0]:
|
||||
to_remove.append(key)
|
||||
for key in to_remove:
|
||||
del self.data['v6range'][key]
|
||||
self.data['ips'] = [
|
||||
item for item in self.data['ips'] if item['docname'] != docname
|
||||
del self.data["v6range"][key]
|
||||
self.data["ips"] = [
|
||||
item for item in self.data["ips"] if item["docname"] != docname
|
||||
]
|
||||
|
||||
def resolve_xref(self, env, fromdocname, builder, typ, target, node,
|
||||
contnode):
|
||||
def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
||||
key = ip_object_anchor(typ, target)
|
||||
try:
|
||||
info = self.data[typ][key]
|
||||
|
@ -226,19 +228,20 @@ class IPDomain(Domain):
|
|||
role = self.roles.get(typ)
|
||||
if role is None:
|
||||
return None
|
||||
resnode = role.result_nodes(env.get_doctree(fromdocname),
|
||||
env, node, True)[0][2]
|
||||
resnode = role.result_nodes(env.get_doctree(fromdocname), env, node, True)[
|
||||
0
|
||||
][2]
|
||||
if isinstance(resnode, addnodes.pending_xref):
|
||||
text = node[0][0]
|
||||
reporter = env.get_doctree(fromdocname).reporter
|
||||
reporter.warning('Cannot resolve reference to %r' % text,
|
||||
line=node.line)
|
||||
reporter.warning(
|
||||
"Cannot resolve reference to %r" % text, line=node.line
|
||||
)
|
||||
return node.children
|
||||
return resnode
|
||||
else:
|
||||
title = typ.upper() + ' ' + target
|
||||
return make_refnode(builder, fromdocname, info[0], key,
|
||||
contnode, title)
|
||||
title = typ.upper() + " " + target
|
||||
return make_refnode(builder, fromdocname, info[0], key, contnode, title)
|
||||
|
||||
@property
|
||||
def items(self):
|
||||
|
@ -257,13 +260,15 @@ def process_ips(app, doctree):
|
|||
|
||||
for node in doctree.traverse(ip_node):
|
||||
ip = node.astext()
|
||||
domaindata['ips'].append({
|
||||
'docname': env.docname,
|
||||
'source': node.parent.source or env.doc2path(env.docname),
|
||||
'lineno': node.parent.line,
|
||||
'ip': ip,
|
||||
'typ': node.parent['typ'],
|
||||
})
|
||||
domaindata["ips"].append(
|
||||
{
|
||||
"docname": env.docname,
|
||||
"source": node.parent.source or env.doc2path(env.docname),
|
||||
"lineno": node.parent.line,
|
||||
"ip": ip,
|
||||
"typ": node.parent["typ"],
|
||||
}
|
||||
)
|
||||
replacement = nodes.literal(ip, ip)
|
||||
node.replace_self(replacement)
|
||||
|
||||
|
@ -285,17 +290,14 @@ def process_ip_nodes(app, doctree, fromdocname):
|
|||
env = app.builder.env
|
||||
domaindata = env.domaindata[IPDomain.name]
|
||||
|
||||
header = (_('IP address'), _('Used by'))
|
||||
header = (_("IP address"), _("Used by"))
|
||||
colwidths = (1, 3)
|
||||
|
||||
for node in doctree.traverse(ip_range):
|
||||
content = []
|
||||
net = Network(node['rangespec'])
|
||||
net = Network(node["rangespec"])
|
||||
ips = {}
|
||||
for key, value in [
|
||||
(ip_info['ip'], ip_info) for ip_info in
|
||||
domaindata['ips']
|
||||
]:
|
||||
for key, value in [(ip_info["ip"], ip_info) for ip_info in domaindata["ips"]]:
|
||||
try:
|
||||
if not key in net:
|
||||
continue
|
||||
|
@ -313,34 +315,32 @@ def process_ip_nodes(app, doctree, fromdocname):
|
|||
tgroup += nodes.colspec(colwidth=colwidth)
|
||||
thead = nodes.thead()
|
||||
tgroup += thead
|
||||
thead += create_table_row([
|
||||
nodes.paragraph(text=label) for label in header])
|
||||
thead += create_table_row([nodes.paragraph(text=label) for label in header])
|
||||
tbody = nodes.tbody()
|
||||
tgroup += tbody
|
||||
for ip, ip_info in [
|
||||
(ip, ips[ip]) for ip in sorted(ips, key=sort_ip)
|
||||
]:
|
||||
for ip, ip_info in [(ip, ips[ip]) for ip in sorted(ips, key=sort_ip)]:
|
||||
para = nodes.paragraph()
|
||||
para += nodes.literal('', ip)
|
||||
para += nodes.literal("", ip)
|
||||
refnode = nodes.paragraph()
|
||||
refuris = set()
|
||||
refnodes = []
|
||||
for item in ip_info:
|
||||
ids = ip_object_anchor(item['typ'], item['ip'])
|
||||
if ids not in para['ids']:
|
||||
para['ids'].append(ids)
|
||||
ids = ip_object_anchor(item["typ"], item["ip"])
|
||||
if ids not in para["ids"]:
|
||||
para["ids"].append(ids)
|
||||
|
||||
domaindata[item['typ']][ids] = (fromdocname, '')
|
||||
newnode = nodes.reference('', '', internal=True)
|
||||
domaindata[item["typ"]][ids] = (fromdocname, "")
|
||||
newnode = nodes.reference("", "", internal=True)
|
||||
try:
|
||||
newnode['refuri'] = app.builder.get_relative_uri(
|
||||
fromdocname, item['docname'])
|
||||
if newnode['refuri'] in refuris:
|
||||
newnode["refuri"] = app.builder.get_relative_uri(
|
||||
fromdocname, item["docname"]
|
||||
)
|
||||
if newnode["refuri"] in refuris:
|
||||
continue
|
||||
refuris.add(newnode['refuri'])
|
||||
refuris.add(newnode["refuri"])
|
||||
except NoUri:
|
||||
pass
|
||||
title = env.titles[item['docname']]
|
||||
title = env.titles[item["docname"]]
|
||||
innernode = nodes.Text(title.astext())
|
||||
newnode.append(innernode)
|
||||
refnodes.append(newnode)
|
||||
|
@ -351,13 +351,13 @@ def process_ip_nodes(app, doctree, fromdocname):
|
|||
tbody += create_table_row([para, refnode])
|
||||
content.append(table)
|
||||
else:
|
||||
para = nodes.paragraph(_('No IP addresses in this range'))
|
||||
para = nodes.paragraph(_("No IP addresses in this range"))
|
||||
content.append(para)
|
||||
node.replace_self(content)
|
||||
|
||||
|
||||
def setup(app):
|
||||
app.add_domain(IPDomain)
|
||||
app.connect('doctree-read', process_ips)
|
||||
app.connect('doctree-resolved', process_ip_nodes)
|
||||
return {'version': __version__}
|
||||
app.connect("doctree-read", process_ips)
|
||||
app.connect("doctree-resolved", process_ip_nodes)
|
||||
return {"version": __version__}
|
||||
|
|
14
setup.py
14
setup.py
|
@ -1,16 +1,16 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
from setuptools import setup, find_packages
|
||||
from setuptools import find_packages, setup
|
||||
|
||||
version = '0.3.1'
|
||||
version = "0.3.1"
|
||||
|
||||
with open('README.rst') as readme:
|
||||
with open("README.rst") as readme:
|
||||
description = readme.read() + "\n\n"
|
||||
|
||||
with open('CHANGES.rst') as changes:
|
||||
with open("CHANGES.rst") as changes:
|
||||
description += changes.read()
|
||||
|
||||
requires = ['Sphinx>=3', 'ipcalc>=1.99']
|
||||
requires = ["Sphinx>=3", "ipcalc>=1.99"]
|
||||
|
||||
|
||||
setup(
|
||||
|
@ -24,9 +24,9 @@ setup(
|
|||
license="GPLv3+",
|
||||
url="https://pypi.python.org/pypi/jandd.sphinxext.ip",
|
||||
name="jandd.sphinxext.ip",
|
||||
namespace_packages=['jandd', 'jandd.sphinxext'],
|
||||
namespace_packages=["jandd", "jandd.sphinxext"],
|
||||
packages=find_packages(),
|
||||
platforms='any',
|
||||
platforms="any",
|
||||
version=version,
|
||||
zip_safe=False,
|
||||
classifiers=[
|
||||
|
|
|
@ -29,35 +29,35 @@
|
|||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = ['jandd.sphinxext.ip']
|
||||
extensions = ["jandd.sphinxext.ip"]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
templates_path = ["_templates"]
|
||||
|
||||
# The suffix(es) of source filenames.
|
||||
# You can specify multiple suffix as a list of string:
|
||||
# source_suffix = ['.rst', '.md']
|
||||
source_suffix = '.rst'
|
||||
source_suffix = ".rst"
|
||||
|
||||
# The encoding of source files.
|
||||
# source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
master_doc = "index"
|
||||
|
||||
# General information about the project.
|
||||
project = 'Sphinxext IP Tests'
|
||||
copyright = '2016, Jan Dittberner'
|
||||
author = 'Jan Dittberner'
|
||||
project = "Sphinxext IP Tests"
|
||||
copyright = "2016, Jan Dittberner"
|
||||
author = "Jan Dittberner"
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = '0.1.0'
|
||||
version = "0.1.0"
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = '0.1.0'
|
||||
release = "0.1.0"
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
|
@ -75,7 +75,7 @@ language = None
|
|||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
# This patterns also effect to html_static_path and html_extra_path
|
||||
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
|
||||
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
|
@ -93,7 +93,7 @@ exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
|
|||
# show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
pygments_style = "sphinx"
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
# modindex_common_prefix = []
|
||||
|
@ -109,7 +109,7 @@ todo_include_todos = False
|
|||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = 'alabaster'
|
||||
html_theme = "alabaster"
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
|
@ -138,7 +138,7 @@ html_theme = 'alabaster'
|
|||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
html_static_path = ["_static"]
|
||||
|
||||
# Add any extra paths that contain custom files (such as robots.txt or
|
||||
# .htaccess) here, relative to this directory. These files are copied
|
||||
|
@ -203,20 +203,17 @@ html_static_path = ['_static']
|
|||
# html_search_scorer = 'scorer.js'
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'SphinxextIPTestsdoc'
|
||||
htmlhelp_basename = "SphinxextIPTestsdoc"
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
|
||||
# Latex figure (float) alignment
|
||||
#'figure_align': 'htbp',
|
||||
}
|
||||
|
@ -225,8 +222,13 @@ latex_elements = {
|
|||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
(master_doc, 'SphinxextIPTests.tex', 'Sphinxext IP Tests Documentation',
|
||||
'Jan Dittberner', 'manual'),
|
||||
(
|
||||
master_doc,
|
||||
"SphinxextIPTests.tex",
|
||||
"Sphinxext IP Tests Documentation",
|
||||
"Jan Dittberner",
|
||||
"manual",
|
||||
),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
|
@ -255,8 +257,7 @@ latex_documents = [
|
|||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
(master_doc, 'sphinxextiptests', 'Sphinxext IP Tests Documentation',
|
||||
[author], 1)
|
||||
(master_doc, "sphinxextiptests", "Sphinxext IP Tests Documentation", [author], 1)
|
||||
]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
|
@ -269,9 +270,15 @@ man_pages = [
|
|||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
(master_doc, 'SphinxextIPTests', 'Sphinxext IP Tests Documentation',
|
||||
author, 'SphinxextIPTests', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
(
|
||||
master_doc,
|
||||
"SphinxextIPTests",
|
||||
"Sphinxext IP Tests Documentation",
|
||||
author,
|
||||
"SphinxextIPTests",
|
||||
"One line description of project.",
|
||||
"Miscellaneous",
|
||||
),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
|
|
18
tests/run.py
18
tests/run.py
|
@ -11,23 +11,25 @@ This script runs the jandd.sphinxext.ip unit test suite.
|
|||
"""
|
||||
|
||||
import sys
|
||||
from os import path
|
||||
import unittest
|
||||
from os import path
|
||||
|
||||
|
||||
def run(extra_args=[]):
|
||||
sys.path.insert(0, path.join(path.dirname(__file__), path.pardir))
|
||||
sys.path.insert(1, path.abspath(
|
||||
path.join(path.dirname(__file__), path.pardir,
|
||||
'jandd', 'sphinxext', 'ip'
|
||||
))
|
||||
sys.path.insert(
|
||||
1,
|
||||
path.abspath(
|
||||
path.join(path.dirname(__file__), path.pardir, "jandd", "sphinxext", "ip")
|
||||
),
|
||||
)
|
||||
|
||||
try:
|
||||
import sphinx
|
||||
except ImportError:
|
||||
print("The sphinx package is needed to run the jandd.sphinxext.ip "
|
||||
"test suite.")
|
||||
print(
|
||||
"The sphinx package is needed to run the jandd.sphinxext.ip " "test suite."
|
||||
)
|
||||
|
||||
from .test_ip import TestIPExtension
|
||||
|
||||
|
@ -37,5 +39,5 @@ def run(extra_args=[]):
|
|||
unittest.TextTestRunner(verbosity=2).run(suite)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
run()
|
||||
|
|
|
@ -1,40 +1,41 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from io import StringIO
|
||||
from .util import SphinxTestApplication, test_root
|
||||
import unittest
|
||||
from io import StringIO
|
||||
|
||||
from .util import SphinxTestApplication, test_root
|
||||
|
||||
IP4_ADDRESSES = ['127.0.0.1', '192.168.0.1']
|
||||
IP6_ADDRESSES = ['::1', '2001:dead:beef::1']
|
||||
IP4_RANGES = ['172.16.0.0/24', '192.168.0.0/24']
|
||||
IP6_RANGES = ['2001:dead:beef::/64', '2001:dada:b001::/64']
|
||||
IP4_ADDRESSES = ["127.0.0.1", "192.168.0.1"]
|
||||
IP6_ADDRESSES = ["::1", "2001:dead:beef::1"]
|
||||
IP4_RANGES = ["172.16.0.0/24", "192.168.0.0/24"]
|
||||
IP6_RANGES = ["2001:dead:beef::/64", "2001:dada:b001::/64"]
|
||||
|
||||
|
||||
class TestIPExtension(unittest.TestCase):
|
||||
def setUp(self):
|
||||
if not (test_root / '_static').exists():
|
||||
(test_root / '_static').mkdir()
|
||||
if not (test_root / "_static").exists():
|
||||
(test_root / "_static").mkdir()
|
||||
self.feed_warnfile = StringIO()
|
||||
self.app = SphinxTestApplication(
|
||||
buildername='html', warning=self.feed_warnfile, cleanenv=True)
|
||||
buildername="html", warning=self.feed_warnfile, cleanenv=True
|
||||
)
|
||||
self.app.build(force_all=True, filenames=[])
|
||||
|
||||
def tearDown(self):
|
||||
self.app.cleanup()
|
||||
(test_root / '_build').rmtree(True)
|
||||
(test_root / "_build").rmtree(True)
|
||||
|
||||
def test_ip_domaindata(self):
|
||||
self.assertIn('ip', self.app.env.domaindata)
|
||||
ipdomdata = self.app.env.domaindata['ip']
|
||||
self.assertIn('v4', ipdomdata)
|
||||
self.assertIn('v6', ipdomdata)
|
||||
self.assertIn('v4range', ipdomdata)
|
||||
self.assertIn('v6range', ipdomdata)
|
||||
self.assertIn('ips', ipdomdata)
|
||||
self.assertIn("ip", self.app.env.domaindata)
|
||||
ipdomdata = self.app.env.domaindata["ip"]
|
||||
self.assertIn("v4", ipdomdata)
|
||||
self.assertIn("v6", ipdomdata)
|
||||
self.assertIn("v4range", ipdomdata)
|
||||
self.assertIn("v6range", ipdomdata)
|
||||
self.assertIn("ips", ipdomdata)
|
||||
|
||||
def find_in_index(self, entry):
|
||||
indexentries = self.app.env.get_domain('index').entries
|
||||
indexentries = self.app.env.get_domain("index").entries
|
||||
for index in indexentries:
|
||||
for value in indexentries[index]:
|
||||
if value[1] == entry:
|
||||
|
@ -42,19 +43,19 @@ class TestIPExtension(unittest.TestCase):
|
|||
self.fail("%s not found in index" % entry)
|
||||
|
||||
def test_ip4_addresses(self):
|
||||
ipv4 = self.app.env.domaindata['ip']['v4']
|
||||
ips = self.app.env.domaindata['ip']['ips']
|
||||
ipv4 = self.app.env.domaindata["ip"]["v4"]
|
||||
ips = self.app.env.domaindata["ip"]["ips"]
|
||||
for ip in IP4_ADDRESSES:
|
||||
self.assertIn(ip, ipv4)
|
||||
self.assertIn(ip, [item['ip'] for item in ips])
|
||||
self.assertIn(ip, [item["ip"] for item in ips])
|
||||
self.find_in_index("IPv4 address; %s" % ip)
|
||||
self.find_in_index("%s; Test page 2" % ip)
|
||||
|
||||
def test_ip6_addresses(self):
|
||||
ipv6 = self.app.env.domaindata['ip']['v6']
|
||||
ips = self.app.env.domaindata['ip']['ips']
|
||||
ipv6 = self.app.env.domaindata["ip"]["v6"]
|
||||
ips = self.app.env.domaindata["ip"]["ips"]
|
||||
for ip in IP6_ADDRESSES:
|
||||
self.assertIn(ip, ipv6)
|
||||
self.assertIn(ip, [item['ip'] for item in ips])
|
||||
self.assertIn(ip, [item["ip"] for item in ips])
|
||||
self.find_in_index("IPv6 address; %s" % ip)
|
||||
self.find_in_index("%s; Test page 2" % ip)
|
||||
|
|
|
@ -17,15 +17,22 @@ from path import Path
|
|||
from sphinx import application
|
||||
|
||||
__all__ = [
|
||||
'test_root',
|
||||
'raises', 'raises_msg', 'Struct',
|
||||
'ListOutput', 'SphinxTestApplication', 'with_app', 'gen_with_app',
|
||||
'Path', 'with_tempdir', 'write_file',
|
||||
'sprint',
|
||||
"test_root",
|
||||
"raises",
|
||||
"raises_msg",
|
||||
"Struct",
|
||||
"ListOutput",
|
||||
"SphinxTestApplication",
|
||||
"with_app",
|
||||
"gen_with_app",
|
||||
"Path",
|
||||
"with_tempdir",
|
||||
"write_file",
|
||||
"sprint",
|
||||
]
|
||||
|
||||
|
||||
test_root = Path(__file__).parent.joinpath('root').abspath()
|
||||
test_root = Path(__file__).parent.joinpath("root").abspath()
|
||||
|
||||
|
||||
def _excstr(exc):
|
||||
|
@ -44,8 +51,7 @@ def raises(exc, func, *args, **kwds):
|
|||
except exc:
|
||||
pass
|
||||
else:
|
||||
raise AssertionError('%s did not raise %s' %
|
||||
(func.__name__, _excstr(exc)))
|
||||
raise AssertionError("%s did not raise %s" % (func.__name__, _excstr(exc)))
|
||||
|
||||
|
||||
def raises_msg(exc, msg, func, *args, **kwds):
|
||||
|
@ -56,10 +62,9 @@ def raises_msg(exc, msg, func, *args, **kwds):
|
|||
try:
|
||||
func(*args, **kwds)
|
||||
except exc as err:
|
||||
assert msg in str(err), "\"%s\" not in \"%s\"" % (msg, err)
|
||||
assert msg in str(err), '"%s" not in "%s"' % (msg, err)
|
||||
else:
|
||||
raise AssertionError('%s did not raise %s' %
|
||||
(func.__name__, _excstr(exc)))
|
||||
raise AssertionError("%s did not raise %s" % (func.__name__, _excstr(exc)))
|
||||
|
||||
|
||||
class Struct(object):
|
||||
|
@ -71,6 +76,7 @@ class ListOutput(object):
|
|||
"""
|
||||
File-like object that collects written text in a list.
|
||||
"""
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
self.content = []
|
||||
|
@ -88,27 +94,38 @@ class SphinxTestApplication(application.Sphinx):
|
|||
better default values for the initialization parameters.
|
||||
"""
|
||||
|
||||
def __init__(self, srcdir=None, confdir=None, outdir=None, doctreedir=None,
|
||||
buildername='html', confoverrides=None,
|
||||
status=None, warning=None, freshenv=None,
|
||||
warningiserror=None, tags=None,
|
||||
confname='conf.py', cleanenv=False):
|
||||
def __init__(
|
||||
self,
|
||||
srcdir=None,
|
||||
confdir=None,
|
||||
outdir=None,
|
||||
doctreedir=None,
|
||||
buildername="html",
|
||||
confoverrides=None,
|
||||
status=None,
|
||||
warning=None,
|
||||
freshenv=None,
|
||||
warningiserror=None,
|
||||
tags=None,
|
||||
confname="conf.py",
|
||||
cleanenv=False,
|
||||
):
|
||||
|
||||
application.CONFIG_FILENAME = confname
|
||||
|
||||
self.cleanup_trees = [test_root / 'generated']
|
||||
self.cleanup_trees = [test_root / "generated"]
|
||||
|
||||
if srcdir is None:
|
||||
srcdir = test_root
|
||||
if srcdir == '(temp)':
|
||||
if srcdir == "(temp)":
|
||||
tempdir = Path(tempfile.mkdtemp())
|
||||
self.cleanup_trees.append(tempdir)
|
||||
temproot = tempdir / 'root'
|
||||
temproot = tempdir / "root"
|
||||
test_root.copytree(temproot)
|
||||
srcdir = temproot
|
||||
else:
|
||||
srcdir = Path(srcdir)
|
||||
self.builddir = srcdir.joinpath('_build')
|
||||
self.builddir = srcdir.joinpath("_build")
|
||||
if confdir is None:
|
||||
confdir = srcdir
|
||||
if outdir is None:
|
||||
|
@ -117,7 +134,7 @@ class SphinxTestApplication(application.Sphinx):
|
|||
outdir.makedirs()
|
||||
self.cleanup_trees.insert(0, outdir)
|
||||
if doctreedir is None:
|
||||
doctreedir = srcdir.joinpath(srcdir, self.builddir, 'doctrees')
|
||||
doctreedir = srcdir.joinpath(srcdir, self.builddir, "doctrees")
|
||||
if cleanenv:
|
||||
self.cleanup_trees.insert(0, doctreedir)
|
||||
if confoverrides is None:
|
||||
|
@ -125,15 +142,26 @@ class SphinxTestApplication(application.Sphinx):
|
|||
if status is None:
|
||||
status = io.StringIO()
|
||||
if warning is None:
|
||||
warning = ListOutput('stderr')
|
||||
warning = ListOutput("stderr")
|
||||
if freshenv is None:
|
||||
freshenv = False
|
||||
if warningiserror is None:
|
||||
warningiserror = False
|
||||
|
||||
application.Sphinx.__init__(self, srcdir, confdir, outdir, doctreedir,
|
||||
buildername, confoverrides, status, warning,
|
||||
freshenv, warningiserror, tags)
|
||||
application.Sphinx.__init__(
|
||||
self,
|
||||
srcdir,
|
||||
confdir,
|
||||
outdir,
|
||||
doctreedir,
|
||||
buildername,
|
||||
confoverrides,
|
||||
status,
|
||||
warning,
|
||||
freshenv,
|
||||
warningiserror,
|
||||
tags,
|
||||
)
|
||||
|
||||
def cleanup(self, doctrees=False):
|
||||
for tree in self.cleanup_trees:
|
||||
|
@ -145,6 +173,7 @@ def with_app(*args, **kwargs):
|
|||
Make a TestApp with args and kwargs, pass it to the test and clean up
|
||||
properly.
|
||||
"""
|
||||
|
||||
def generator(func):
|
||||
@wraps(func)
|
||||
def deco(*args2, **kwargs2):
|
||||
|
@ -152,7 +181,9 @@ def with_app(*args, **kwargs):
|
|||
func(app, *args2, **kwargs2)
|
||||
# don't execute cleanup if test failed
|
||||
app.cleanup()
|
||||
|
||||
return deco
|
||||
|
||||
return generator
|
||||
|
||||
|
||||
|
@ -161,6 +192,7 @@ def gen_with_app(*args, **kwargs):
|
|||
Make a TestApp with args and kwargs, pass it to the test and clean up
|
||||
properly.
|
||||
"""
|
||||
|
||||
def generator(func):
|
||||
@wraps(func)
|
||||
def deco(*args2, **kwargs2):
|
||||
|
@ -169,7 +201,9 @@ def gen_with_app(*args, **kwargs):
|
|||
yield item
|
||||
# don't execute cleanup if test failed
|
||||
app.cleanup()
|
||||
|
||||
return deco
|
||||
|
||||
return generator
|
||||
|
||||
|
||||
|
@ -178,15 +212,16 @@ def with_tempdir(func):
|
|||
tempdir = Path(tempfile.mkdtemp())
|
||||
func(tempdir)
|
||||
tempdir.rmtree()
|
||||
|
||||
new_func.__name__ = func.__name__
|
||||
return new_func
|
||||
|
||||
|
||||
def write_file(name, contents):
|
||||
f = open(str(name), 'wb')
|
||||
f = open(str(name), "wb")
|
||||
f.write(contents)
|
||||
f.close()
|
||||
|
||||
|
||||
def sprint(*args):
|
||||
sys.stderr.write(' '.join(map(str, args)) + '\n')
|
||||
sys.stderr.write(" ".join(map(str, args)) + "\n")
|
||||
|
|
Loading…
Reference in a new issue