aboutsummaryrefslogtreecommitdiff
path: root/dev
diff options
context:
space:
mode:
authorwbond <will@wbond.net>2019-09-09 01:53:00 -0400
committerwbond <will@wbond.net>2019-09-13 06:41:24 -0400
commitbba5d621c393b86e6fb0838e631ccea252852189 (patch)
tree1e700c6b17079456926febf5f646911828d685c7 /dev
parent35d686750d96fddcdef6c44d7514dde71dfb0ca0 (diff)
downloadasn1crypto-bba5d621c393b86e6fb0838e631ccea252852189.tar.gz
Create asn1crypto_tests package, along with supporting tooling
Adds the following tasks: - python run.py build - python run.py version {pep440_version} Tests may now be executed a number of different ways and will automatically ensure the local copy of asn1crypto is used, if run from a Git working copy, or archive of a working copy. Versioning scheme switched from SemVer to PEP 440 since that is what the Python ecosystem tooling supports.
Diffstat (limited to 'dev')
-rw-r--r--dev/__init__.py3
-rw-r--r--dev/_import.py93
-rw-r--r--dev/build.py89
-rw-r--r--dev/ci.py23
-rw-r--r--dev/deps.py84
-rw-r--r--dev/release.py19
-rw-r--r--dev/tests.py58
-rw-r--r--dev/version.py80
8 files changed, 375 insertions, 74 deletions
diff --git a/dev/__init__.py b/dev/__init__.py
index 403922e..02e9c6c 100644
--- a/dev/__init__.py
+++ b/dev/__init__.py
@@ -15,6 +15,9 @@ other_packages = [
"ocspbuilder"
]
+requires_oscrypto = False
+has_tests_package = True
+
package_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
build_root = os.path.abspath(os.path.join(package_root, '..'))
diff --git a/dev/_import.py b/dev/_import.py
new file mode 100644
index 0000000..2599588
--- /dev/null
+++ b/dev/_import.py
@@ -0,0 +1,93 @@
+# coding: utf-8
+from __future__ import unicode_literals, division, absolute_import, print_function
+
+import imp
+import sys
+import os
+
+from . import build_root
+
+
+def _import_from(mod, path, mod_dir=None):
+ """
+ Imports a module from a specific path
+
+ :param mod:
+ A unicode string of the module name
+
+ :param path:
+ A unicode string to the directory containing the module
+
+ :param mod_dir:
+ If the sub directory of "path" is different than the "mod" name,
+ pass the sub directory as a unicode string
+
+ :return:
+ None if not loaded, otherwise the module
+ """
+
+ if mod_dir is None:
+ mod_dir = mod
+
+ if not os.path.exists(path):
+ return None
+
+ if not os.path.exists(os.path.join(path, mod_dir)):
+ return None
+
+ try:
+ mod_info = imp.find_module(mod_dir, [path])
+ return imp.load_module(mod, *mod_info)
+ except ImportError:
+ return None
+
+
+def _preload(require_oscrypto, print_info):
+ """
+ Preloads asn1crypto and optionally oscrypto from a local source checkout,
+ or from a normal install
+
+ :param require_oscrypto:
+ A bool if oscrypto needs to be preloaded
+
+ :param print_info:
+ A bool if info about asn1crypto and oscrypto should be printed
+ """
+
+ if print_info:
+ print('Python ' + sys.version.replace('\n', ''))
+
+ asn1crypto = None
+ oscrypto = None
+
+ if require_oscrypto:
+ oscrypto_dir = os.path.join(build_root, 'oscrypto')
+ oscrypto_tests = None
+ if os.path.exists(oscrypto_dir):
+ oscrypto_tests = _import_from('oscrypto_tests', oscrypto_dir, 'tests')
+ if oscrypto_tests is None:
+ import oscrypto_tests
+ asn1crypto, oscrypto = oscrypto_tests.local_oscrypto()
+
+ else:
+ asn1crypto_dir = os.path.join(build_root, 'asn1crypto')
+ if os.path.exists(asn1crypto_dir):
+ asn1crypto = _import_from('asn1crypto', asn1crypto_dir)
+ if asn1crypto is None:
+ import asn1crypto
+
+ if print_info:
+ print(
+ '\nasn1crypto: %s, %s' % (
+ asn1crypto.__version__,
+ os.path.dirname(asn1crypto.__file__)
+ )
+ )
+ if require_oscrypto:
+ print(
+ 'oscrypto: %s backend, %s, %s' % (
+ oscrypto.backend(),
+ oscrypto.__version__,
+ os.path.dirname(oscrypto.__file__)
+ )
+ )
diff --git a/dev/build.py b/dev/build.py
new file mode 100644
index 0000000..4899594
--- /dev/null
+++ b/dev/build.py
@@ -0,0 +1,89 @@
+# coding: utf-8
+from __future__ import unicode_literals, division, absolute_import, print_function
+
+import imp
+import os
+import tarfile
+import zipfile
+
+import setuptools.sandbox
+
+from . import package_root, package_name, has_tests_package
+
+
+def _list_zip(filename):
+ """
+ Prints all of the files in a .zip file
+ """
+
+ zf = zipfile.ZipFile(filename, 'r')
+ for name in zf.namelist():
+ print(' %s' % name)
+
+
+def _list_tgz(filename):
+ """
+ Prints all of the files in a .tar.gz file
+ """
+
+ tf = tarfile.open(filename, 'r:gz')
+ for name in tf.getnames():
+ print(' %s' % name)
+
+
+def run():
+ """
+ Creates a sdist .tar.gz and a bdist_wheel --univeral .whl
+
+ :return:
+ A bool - if the packaging process was successful
+ """
+
+ setup = os.path.join(package_root, 'setup.py')
+ tests_root = os.path.join(package_root, 'tests')
+ tests_setup = os.path.join(tests_root, 'setup.py')
+
+ # Trying to call setuptools.sandbox.run_setup(setup, ['--version'])
+ # resulted in a segfault, so we do this instead
+ module_info = imp.find_module('version', [os.path.join(package_root, package_name)])
+ version_mod = imp.load_module('%s.version' % package_name, *module_info)
+
+ pkg_name_info = (package_name, version_mod.__version__)
+ print('Building %s-%s' % pkg_name_info)
+
+ sdist = '%s-%s.tar.gz' % pkg_name_info
+ whl = '%s-%s-py2.py3-none-any.whl' % pkg_name_info
+ setuptools.sandbox.run_setup(setup, ['-q', 'sdist'])
+ print(' - created %s' % sdist)
+ _list_tgz(os.path.join(package_root, 'dist', sdist))
+ setuptools.sandbox.run_setup(setup, ['-q', 'bdist_wheel', '--universal'])
+ print(' - created %s' % whl)
+ _list_zip(os.path.join(package_root, 'dist', whl))
+ setuptools.sandbox.run_setup(setup, ['-q', 'clean'])
+
+ if has_tests_package:
+ print('Building %s_tests-%s' % (package_name, version_mod.__version__))
+
+ tests_sdist = '%s_tests-%s.tar.gz' % pkg_name_info
+ tests_whl = '%s_tests-%s-py2.py3-none-any.whl' % pkg_name_info
+ setuptools.sandbox.run_setup(tests_setup, ['-q', 'sdist'])
+ print(' - created %s' % tests_sdist)
+ _list_tgz(os.path.join(tests_root, 'dist', tests_sdist))
+ setuptools.sandbox.run_setup(tests_setup, ['-q', 'bdist_wheel', '--universal'])
+ print(' - created %s' % tests_whl)
+ _list_zip(os.path.join(tests_root, 'dist', tests_whl))
+ setuptools.sandbox.run_setup(tests_setup, ['-q', 'clean'])
+
+ dist_dir = os.path.join(package_root, 'dist')
+ tests_dist_dir = os.path.join(tests_root, 'dist')
+ os.rename(
+ os.path.join(tests_dist_dir, tests_sdist),
+ os.path.join(dist_dir, tests_sdist)
+ )
+ os.rename(
+ os.path.join(tests_dist_dir, tests_whl),
+ os.path.join(dist_dir, tests_whl)
+ )
+ os.rmdir(tests_dist_dir)
+
+ return True
diff --git a/dev/ci.py b/dev/ci.py
index c819696..a3c91e9 100644
--- a/dev/ci.py
+++ b/dev/ci.py
@@ -3,9 +3,9 @@ from __future__ import unicode_literals, division, absolute_import, print_functi
import sys
import os
-import imp
-from . import build_root
+from . import build_root, requires_oscrypto
+from ._import import _preload
deps_dir = os.path.join(build_root, 'modularcrypto-deps')
@@ -34,24 +34,7 @@ def run():
A bool - if the linter and tests ran successfully
"""
- print('Python ' + sys.version.replace('\n', ''))
-
- oscrypto_tests_module_info = imp.find_module('tests', [os.path.join(build_root, 'oscrypto')])
- oscrypto_tests = imp.load_module('oscrypto.tests', *oscrypto_tests_module_info)
- asn1crypto, oscrypto = oscrypto_tests.local_oscrypto()
- print(
- '\nasn1crypto: %s, %s' % (
- asn1crypto.__version__,
- os.path.dirname(asn1crypto.__file__)
- )
- )
- print(
- 'oscrypto: %s backend, %s, %s' % (
- oscrypto.backend(),
- oscrypto.__version__,
- os.path.dirname(oscrypto.__file__)
- )
- )
+ _preload(requires_oscrypto, True)
if run_lint:
print('')
diff --git a/dev/deps.py b/dev/deps.py
index d995c55..d4865cf 100644
--- a/dev/deps.py
+++ b/dev/deps.py
@@ -205,7 +205,7 @@ def _extract_info(archive, info):
return None
-def _extract_package(deps_dir, pkg_path):
+def _extract_package(deps_dir, pkg_path, pkg_dir):
"""
Extract a .whl, .zip, .tar.gz or .tar.bz2 into a package path to
use when running CI tasks
@@ -215,6 +215,9 @@ def _extract_package(deps_dir, pkg_path):
:param pkg_path:
A unicode string of the path to the archive
+
+ :param pkg_dir:
+ If running setup.py, change to this dir first - a unicode string
"""
if pkg_path.endswith('.exe'):
@@ -249,51 +252,61 @@ def _extract_package(deps_dir, pkg_path):
zf.close()
return
- # Source archives may contain a bunch of other things.
- # The following code works for the packages coverage and
- # configparser, which are the two we currently require that
- # do not provide wheels
+ # Source archives may contain a bunch of other things, including mutliple
+ # packages, so we must use setup.py/setuptool to install/extract it
+ ar = None
+ staging_dir = os.path.join(deps_dir, '_staging')
try:
- ar = None
ar = _open_archive(pkg_path)
- pkg_name = None
- base_path = _archive_single_dir(ar) or ''
- if len(base_path):
- if '-' in base_path:
- pkg_name, _ = base_path.split('-', 1)
- base_path += '/'
-
- base_pkg_path = None
- if pkg_name is not None:
- base_pkg_path = base_path + pkg_name + '/'
- src_path = base_path + 'src/'
+ common_root = _archive_single_dir(ar)
members = []
for info in _list_archive_members(ar):
- fn = _info_name(info)
- if base_pkg_path is not None and fn.startswith(base_pkg_path):
- dst_path = fn[len(base_pkg_path) - len(pkg_name) - 1:]
- members.append((info, dst_path))
- continue
- if fn.startswith(src_path):
- members.append((info, fn[len(src_path):]))
- continue
+ dst_rel_path = _info_name(info)
+ if common_root is not None:
+ dst_rel_path = dst_rel_path[len(common_root) + 1:]
+ members.append((info, dst_rel_path))
+
+ if not os.path.exists(staging_dir):
+ os.makedirs(staging_dir)
- for info, path in members:
+ for info, rel_path in members:
info_data = _extract_info(ar, info)
# Dirs won't return a file
if info_data is not None:
- dst_path = os.path.join(deps_dir, path)
+ dst_path = os.path.join(staging_dir, rel_path)
dst_dir = os.path.dirname(dst_path)
if not os.path.exists(dst_dir):
os.makedirs(dst_dir)
with open(dst_path, 'wb') as f:
f.write(info_data)
+
+ setup_dir = staging_dir
+ if pkg_dir:
+ setup_dir = os.path.join(staging_dir, pkg_dir)
+
+ root = os.path.abspath(os.path.join(deps_dir, '..'))
+ install_lib = os.path.basename(deps_dir)
+
+ _execute(
+ [
+ 'python',
+ 'setup.py',
+ 'install',
+ '--root=%s' % root,
+ '--install-lib=%s' % install_lib,
+ '--no-compile'
+ ],
+ setup_dir
+ )
+
finally:
if ar:
ar.close()
+ if staging_dir:
+ shutil.rmtree(staging_dir)
def _stage_requirements(deps_dir, path):
@@ -306,7 +319,7 @@ def _stage_requirements(deps_dir, path):
A unicode path to a temporary diretory to use for downloads
:param path:
- A unicoe filesystem path to a requirements file
+ A unicode filesystem path to a requirements file
"""
valid_tags = _pep425tags()
@@ -320,7 +333,20 @@ def _stage_requirements(deps_dir, path):
packages = _parse_requires(path)
for p in packages:
pkg = p['pkg']
+ pkg_sub_dir = None
if p['type'] == 'url':
+ anchor = None
+ if '#' in pkg:
+ pkg, anchor = pkg.split('#', 1)
+ if '&' in anchor:
+ parts = anchor.split('&')
+ else:
+ parts = [anchor]
+ for part in parts:
+ param, value = part.split('=')
+ if param == 'subdirectory':
+ pkg_sub_dir = value
+
if pkg.endswith('.zip') or pkg.endswith('.tar.gz') or pkg.endswith('.tar.bz2') or pkg.endswith('.whl'):
url = pkg
else:
@@ -383,7 +409,7 @@ def _stage_requirements(deps_dir, path):
local_path = _download(url, deps_dir)
- _extract_package(deps_dir, local_path)
+ _extract_package(deps_dir, local_path, pkg_sub_dir)
os.remove(local_path)
diff --git a/dev/release.py b/dev/release.py
index d5a3c7e..a854196 100644
--- a/dev/release.py
+++ b/dev/release.py
@@ -1,14 +1,13 @@
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
-import os
import subprocess
import sys
-import setuptools.sandbox
import twine.cli
-from . import package_name, package_root
+from . import package_name, package_root, has_tests_package
+from .build import run as build
def run():
@@ -20,8 +19,6 @@ def run():
A bool - if the packaging and upload process was successful
"""
- setup_file = os.path.join(package_root, 'setup.py')
-
git_wc_proc = subprocess.Popen(
['git', 'status', '--porcelain', '-uno'],
stdout=subprocess.PIPE,
@@ -54,14 +51,10 @@ def run():
tag = tag.decode('ascii').strip()
- setuptools.sandbox.run_setup(
- setup_file,
- ['sdist', 'bdist_wheel', '--universal']
- )
+ build()
twine.cli.dispatch(['upload', 'dist/%s-%s*' % (package_name, tag)])
+ if has_tests_package:
+ twine.cli.dispatch(['upload', 'dist/%s_tests-%s*' % (package_name, tag)])
- setuptools.sandbox.run_setup(
- setup_file,
- ['clean']
- )
+ return True
diff --git a/dev/tests.py b/dev/tests.py
index e05d4a2..a065c38 100644
--- a/dev/tests.py
+++ b/dev/tests.py
@@ -1,16 +1,23 @@
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
-import os
import unittest
import re
import sys
+from . import requires_oscrypto
+from ._import import _preload
+
from tests import test_classes
-import asn1crypto
+
+if sys.version_info < (3,):
+ range = xrange # noqa
+ from cStringIO import StringIO
+else:
+ from io import StringIO
-def run(matcher=None, ci=False):
+def run(matcher=None, repeat=1, ci=False):
"""
Runs the tests
@@ -18,24 +25,51 @@ def run(matcher=None, ci=False):
A unicode string containing a regular expression to use to filter test
names by. A value of None will cause no filtering.
+ :param repeat:
+ An integer - the number of times to run the tests
+
+ :param ci:
+ A bool, indicating if the tests are being run as part of CI
+
:return:
A bool - if the tests succeeded
"""
- if not ci:
- print('Python ' + sys.version.replace('\n', ''))
- print('\nasn1crypto: %s, %s\n' % (asn1crypto.__version__, os.path.dirname(asn1crypto.__file__)))
+ _preload(requires_oscrypto, not ci)
- suite = unittest.TestSuite()
loader = unittest.TestLoader()
+ # We have to manually track the list of applicable tests because for
+ # some reason with Python 3.4 on Windows, the tests in a suite are replaced
+ # with None after being executed. This breaks the repeat functionality.
+ test_list = []
for test_class in test_classes():
if matcher:
names = loader.getTestCaseNames(test_class)
for name in names:
if re.search(matcher, name):
- suite.addTest(test_class(name))
+ test_list.append(test_class(name))
else:
- suite.addTest(loader.loadTestsFromTestCase(test_class))
- verbosity = 2 if matcher else 1
- result = unittest.TextTestRunner(stream=sys.stdout, verbosity=verbosity).run(suite)
- return result.wasSuccessful()
+ test_list.append(loader.loadTestsFromTestCase(test_class))
+
+ stream = sys.stdout
+ verbosity = 1
+ if matcher and repeat == 1:
+ verbosity = 2
+ elif repeat > 1:
+ stream = StringIO()
+
+ for _ in range(0, repeat):
+ suite = unittest.TestSuite()
+ for test in test_list:
+ suite.addTest(test)
+ result = unittest.TextTestRunner(stream=stream, verbosity=verbosity).run(suite)
+
+ if len(result.errors) > 0 or len(result.failures) > 0:
+ if repeat > 1:
+ print(stream.getvalue())
+ return False
+
+ if repeat > 1:
+ stream.truncate(0)
+
+ return True
diff --git a/dev/version.py b/dev/version.py
new file mode 100644
index 0000000..3027431
--- /dev/null
+++ b/dev/version.py
@@ -0,0 +1,80 @@
+# coding: utf-8
+from __future__ import unicode_literals, division, absolute_import, print_function
+
+import codecs
+import os
+import re
+
+from . import package_root, package_name, has_tests_package
+
+
+def run(new_version):
+ """
+ Updates the package version in the various locations
+
+ :param new_version:
+ A unicode string of the new library version as a PEP 440 version
+
+ :return:
+ A bool - if the version number was successfully bumped
+ """
+
+ # We use a restricted form of PEP 440 versions
+ version_match = re.match(
+ r'(\d+)\.(\d+)\.(\d)+(?:\.((?:dev|a|b|rc)\d+))?$',
+ new_version
+ )
+ if not version_match:
+ raise ValueError('Invalid PEP 440 version: %s' % new_version)
+
+ new_version_info = (
+ int(version_match.group(1)),
+ int(version_match.group(2)),
+ int(version_match.group(3)),
+ )
+ if version_match.group(4):
+ new_version_info += (version_match.group(4),)
+
+ version_path = os.path.join(package_root, package_name, 'version.py')
+ setup_path = os.path.join(package_root, 'setup.py')
+ setup_tests_path = os.path.join(package_root, 'tests', 'setup.py')
+ tests_path = os.path.join(package_root, 'tests', '__init__.py')
+
+ file_paths = [version_path, setup_path]
+ if has_tests_package:
+ file_paths.extend([setup_tests_path, tests_path])
+
+ for file_path in file_paths:
+ orig_source = ''
+ with codecs.open(file_path, 'r', encoding='utf-8') as f:
+ orig_source = f.read()
+
+ found = 0
+ new_source = ''
+ for line in orig_source.splitlines(True):
+ if line.startswith('__version__ = '):
+ found += 1
+ new_source += '__version__ = %r\n' % new_version
+ elif line.startswith('__version_info__ = '):
+ found += 1
+ new_source += '__version_info__ = %r\n' % (new_version_info,)
+ elif line.startswith('PACKAGE_VERSION = '):
+ found += 1
+ new_source += 'PACKAGE_VERSION = %r\n' % new_version
+ else:
+ new_source += line
+
+ if found == 0:
+ raise ValueError('Did not find any versions in %s' % file_path)
+
+ s = 's' if found > 1 else ''
+ rel_path = file_path[len(package_root) + 1:]
+ was_were = 'was' if found == 1 else 'were'
+ if new_source != orig_source:
+ print('Updated %d version%s in %s' % (found, s, rel_path))
+ with codecs.open(file_path, 'w', encoding='utf-8') as f:
+ f.write(new_source)
+ else:
+ print('%d version%s in %s %s up-to-date' % (found, s, rel_path, was_were))
+
+ return True