diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..d1a8b1e --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +SOURCES/Python-3.6.8-noexe.tar.xz diff --git a/.python3.metadata b/.python3.metadata new file mode 100644 index 0000000..77864cd --- /dev/null +++ b/.python3.metadata @@ -0,0 +1 @@ +a39802ac8f0c61645c6a50fbdd32e3ca92862ff5 SOURCES/Python-3.6.8-noexe.tar.xz diff --git a/SOURCES/00001-rpath.patch b/SOURCES/00001-rpath.patch new file mode 100644 index 0000000..9fae54c --- /dev/null +++ b/SOURCES/00001-rpath.patch @@ -0,0 +1,19 @@ +diff -up Python-3.1.1/Lib/distutils/unixccompiler.py.rpath Python-3.1.1/Lib/distutils/unixccompiler.py +--- Python-3.1.1/Lib/distutils/unixccompiler.py.rpath 2009-09-04 17:29:34.000000000 -0400 ++++ Python-3.1.1/Lib/distutils/unixccompiler.py 2009-09-04 17:49:54.000000000 -0400 +@@ -141,6 +141,15 @@ class UnixCCompiler(CCompiler): + if sys.platform == "cygwin": + exe_extension = ".exe" + ++ def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs): ++ """Remove standard library path from rpath""" ++ libraries, library_dirs, runtime_library_dirs = super()._fix_lib_args( ++ libraries, library_dirs, runtime_library_dirs) ++ libdir = sysconfig.get_config_var('LIBDIR') ++ if runtime_library_dirs and (libdir in runtime_library_dirs): ++ runtime_library_dirs.remove(libdir) ++ return libraries, library_dirs, runtime_library_dirs ++ + def preprocess(self, source, output_file=None, macros=None, + include_dirs=None, extra_preargs=None, extra_postargs=None): + fixed_args = self._fix_compile_args(None, macros, include_dirs) diff --git a/SOURCES/00102-lib64.patch b/SOURCES/00102-lib64.patch new file mode 100644 index 0000000..2b913ca --- /dev/null +++ b/SOURCES/00102-lib64.patch @@ -0,0 +1,202 @@ +diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py +index 9474e9c..c0ce4c6 100644 +--- a/Lib/distutils/command/install.py ++++ b/Lib/distutils/command/install.py +@@ -30,14 +30,14 @@ WINDOWS_SCHEME = { + INSTALL_SCHEMES = { + 'unix_prefix': { + 'purelib': '$base/lib/python$py_version_short/site-packages', +- 'platlib': '$platbase/lib/python$py_version_short/site-packages', ++ 'platlib': '$platbase/lib64/python$py_version_short/site-packages', + 'headers': '$base/include/python$py_version_short$abiflags/$dist_name', + 'scripts': '$base/bin', + 'data' : '$base', + }, + 'unix_home': { + 'purelib': '$base/lib/python', +- 'platlib': '$base/lib/python', ++ 'platlib': '$base/lib64/python', + 'headers': '$base/include/python/$dist_name', + 'scripts': '$base/bin', + 'data' : '$base', +diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py +index 026cca7..6d3e077 100644 +--- a/Lib/distutils/sysconfig.py ++++ b/Lib/distutils/sysconfig.py +@@ -132,8 +132,12 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None): + prefix = plat_specific and EXEC_PREFIX or PREFIX + + if os.name == "posix": ++ if plat_specific or standard_lib: ++ lib = "lib64" ++ else: ++ lib = "lib" + libpython = os.path.join(prefix, +- "lib", "python" + get_python_version()) ++ lib, "python" + get_python_version()) + if standard_lib: + return libpython + else: +diff a/Lib/distutils/tests/test_install.py b/Lib/distutils/tests/test_install.py +--- a/Lib/distutils/tests/test_install.py ++++ b/Lib/distutils/tests/test_install.py +@@ -57,8 +57,9 @@ + self.assertEqual(got, expected) + + libdir = os.path.join(destination, "lib", "python") ++ platlibdir = os.path.join(destination, "lib64", "python") + check_path(cmd.install_lib, libdir) +- check_path(cmd.install_platlib, libdir) ++ check_path(cmd.install_platlib, platlibdir) + check_path(cmd.install_purelib, libdir) + check_path(cmd.install_headers, + os.path.join(destination, "include", "python", "foopkg")) +diff --git a/Lib/site.py b/Lib/site.py +index a84e3bb..ba0d3ea 100644 +--- a/Lib/site.py ++++ b/Lib/site.py +@@ -303,11 +303,15 @@ def getsitepackages(prefixes=None): + seen.add(prefix) + + if os.sep == '/': ++ sitepackages.append(os.path.join(prefix, "lib64", ++ "python" + sys.version[:3], ++ "site-packages")) + sitepackages.append(os.path.join(prefix, "lib", + "python%d.%d" % sys.version_info[:2], + "site-packages")) + else: + sitepackages.append(prefix) ++ sitepackages.append(os.path.join(prefix, "lib64", "site-packages")) + sitepackages.append(os.path.join(prefix, "lib", "site-packages")) + if sys.platform == "darwin": + # for framework builds *only* we add the standard Apple +diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py +index b9bbfe5..2a5f29c 100644 +--- a/Lib/sysconfig.py ++++ b/Lib/sysconfig.py +@@ -20,10 +20,10 @@ __all__ = [ + + _INSTALL_SCHEMES = { + 'posix_prefix': { +- 'stdlib': '{installed_base}/lib/python{py_version_short}', +- 'platstdlib': '{platbase}/lib/python{py_version_short}', ++ 'stdlib': '{installed_base}/lib64/python{py_version_short}', ++ 'platstdlib': '{platbase}/lib64/python{py_version_short}', + 'purelib': '{base}/lib/python{py_version_short}/site-packages', +- 'platlib': '{platbase}/lib/python{py_version_short}/site-packages', ++ 'platlib': '{platbase}/lib64/python{py_version_short}/site-packages', + 'include': + '{installed_base}/include/python{py_version_short}{abiflags}', + 'platinclude': +@@ -61,10 +61,10 @@ _INSTALL_SCHEMES = { + 'data': '{userbase}', + }, + 'posix_user': { +- 'stdlib': '{userbase}/lib/python{py_version_short}', +- 'platstdlib': '{userbase}/lib/python{py_version_short}', ++ 'stdlib': '{userbase}/lib64/python{py_version_short}', ++ 'platstdlib': '{userbase}/lib64/python{py_version_short}', + 'purelib': '{userbase}/lib/python{py_version_short}/site-packages', +- 'platlib': '{userbase}/lib/python{py_version_short}/site-packages', ++ 'platlib': '{userbase}/lib64/python{py_version_short}/site-packages', + 'include': '{userbase}/include/python{py_version_short}', + 'scripts': '{userbase}/bin', + 'data': '{userbase}', +diff --git a/Lib/test/test_site.py b/Lib/test/test_site.py +index f698927..bc977b5 100644 +--- a/Lib/test/test_site.py ++++ b/Lib/test/test_site.py +@@ -248,8 +248,8 @@ class HelperFunctionsTests(unittest.TestCase): + self.assertEqual(dirs[1], wanted) + elif os.sep == '/': + # OS X non-framework builds, Linux, FreeBSD, etc +- self.assertEqual(len(dirs), 1) +- wanted = os.path.join('xoxo', 'lib', ++ self.assertEqual(len(dirs), 2) ++ wanted = os.path.join('xoxo', 'lib64', + 'python%d.%d' % sys.version_info[:2], + 'site-packages') + self.assertEqual(dirs[0], wanted) +diff --git a/Makefile.pre.in b/Makefile.pre.in +index 8fa7934..a693917 100644 +--- a/Makefile.pre.in ++++ b/Makefile.pre.in +@@ -126,7 +126,7 @@ LIBDIR= @libdir@ + MANDIR= @mandir@ + INCLUDEDIR= @includedir@ + CONFINCLUDEDIR= $(exec_prefix)/include +-SCRIPTDIR= $(prefix)/lib ++SCRIPTDIR= $(prefix)/lib64 + ABIFLAGS= @ABIFLAGS@ + + # Detailed destination directories +diff --git a/Modules/getpath.c b/Modules/getpath.c +index 65b47a3..eaa756c 100644 +--- a/Modules/getpath.c ++++ b/Modules/getpath.c +@@ -494,7 +494,7 @@ calculate_path(void) + _pythonpath = Py_DecodeLocale(PYTHONPATH, NULL); + _prefix = Py_DecodeLocale(PREFIX, NULL); + _exec_prefix = Py_DecodeLocale(EXEC_PREFIX, NULL); +- lib_python = Py_DecodeLocale("lib/python" VERSION, NULL); ++ lib_python = Py_DecodeLocale("lib64/python" VERSION, NULL); + + if (!_pythonpath || !_prefix || !_exec_prefix || !lib_python) { + Py_FatalError( +@@ -683,7 +683,7 @@ calculate_path(void) + } + else + wcsncpy(zip_path, _prefix, MAXPATHLEN); +- joinpath(zip_path, L"lib/python00.zip"); ++ joinpath(zip_path, L"lib64/python00.zip"); + bufsz = wcslen(zip_path); /* Replace "00" with version */ + zip_path[bufsz - 6] = VERSION[0]; + zip_path[bufsz - 5] = VERSION[2]; +@@ -695,7 +695,7 @@ calculate_path(void) + fprintf(stderr, + "Could not find platform dependent libraries \n"); + wcsncpy(exec_prefix, _exec_prefix, MAXPATHLEN); +- joinpath(exec_prefix, L"lib/lib-dynload"); ++ joinpath(exec_prefix, L"lib64/lib-dynload"); + } + /* If we found EXEC_PREFIX do *not* reduce it! (Yet.) */ + +diff --git a/setup.py b/setup.py +index 0f2dfc4..da37896 100644 +--- a/setup.py ++++ b/setup.py +@@ -492,7 +492,7 @@ class PyBuildExt(build_ext): + # directories (i.e. '.' and 'Include') must be first. See issue + # 10520. + if not cross_compiling: +- add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') ++ add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib64') + add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') + # only change this for cross builds for 3.3, issues on Mageia + if cross_compiling: +@@ -780,11 +780,11 @@ class PyBuildExt(build_ext): + elif curses_library: + readline_libs.append(curses_library) + elif self.compiler.find_library_file(lib_dirs + +- ['/usr/lib/termcap'], ++ ['/usr/lib64/termcap'], + 'termcap'): + readline_libs.append('termcap') + exts.append( Extension('readline', ['readline.c'], +- library_dirs=['/usr/lib/termcap'], ++ library_dirs=['/usr/lib64/termcap'], + extra_link_args=readline_extra_link_args, + libraries=readline_libs) ) + else: +@@ -821,8 +821,8 @@ class PyBuildExt(build_ext): + if krb5_h: + ssl_incs += krb5_h + ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs, +- ['/usr/local/ssl/lib', +- '/usr/contrib/ssl/lib/' ++ ['/usr/local/ssl/lib64', ++ '/usr/contrib/ssl/lib64/' + ] ) + + if (ssl_incs is not None and diff --git a/SOURCES/00111-no-static-lib.patch b/SOURCES/00111-no-static-lib.patch new file mode 100644 index 0000000..aa8cdc0 --- /dev/null +++ b/SOURCES/00111-no-static-lib.patch @@ -0,0 +1,53 @@ +diff --git a/Makefile.pre.in b/Makefile.pre.in +index 9cd482f..b074b26 100644 +--- a/Makefile.pre.in ++++ b/Makefile.pre.in +@@ -549,7 +549,7 @@ clinic: check-clean-src $(srcdir)/Modules/_blake2/blake2s_impl.c + $(PYTHON_FOR_REGEN) ./Tools/clinic/clinic.py --make + + # Build the interpreter +-$(BUILDPYTHON): Programs/python.o $(LIBRARY) $(LDLIBRARY) $(PY3LIBRARY) ++$(BUILDPYTHON): Programs/python.o $(LDLIBRARY) $(PY3LIBRARY) + $(LINKCC) $(PY_CORE_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/python.o $(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST) + + platform: $(BUILDPYTHON) pybuilddir.txt +@@ -597,12 +597,6 @@ sharedmods: $(BUILDPYTHON) pybuilddir.txt Modules/_math.o + _TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \ + $(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build + +- +-# Build static library +-$(LIBRARY): $(LIBRARY_OBJS) +- -rm -f $@ +- $(AR) $(ARFLAGS) $@ $(LIBRARY_OBJS) +- + libpython$(LDVERSION).so: $(LIBRARY_OBJS) + if test $(INSTSONAME) != $(LDLIBRARY); then \ + $(BLDSHARED) -Wl,-h$(INSTSONAME) -o $(INSTSONAME) $(LIBRARY_OBJS) $(MODLIBS) $(SHLIBS) $(LIBC) $(LIBM) $(LDLAST); \ +@@ -692,7 +686,7 @@ Modules/Setup: $(srcdir)/Modules/Setup.dist + echo "-----------------------------------------------"; \ + fi + +-Programs/_testembed: Programs/_testembed.o $(LIBRARY) $(LDLIBRARY) $(PY3LIBRARY) ++Programs/_testembed: Programs/_testembed.o $(LDLIBRARY) $(PY3LIBRARY) + $(LINKCC) $(PY_CORE_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/_testembed.o $(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST) + + ############################################################################ +@@ -1428,17 +1422,6 @@ libainstall: @DEF_MAKE_RULE@ python-config + else true; \ + fi; \ + done +- @if test -d $(LIBRARY); then :; else \ +- if test "$(PYTHONFRAMEWORKDIR)" = no-framework; then \ +- if test "$(SHLIB_SUFFIX)" = .dll; then \ +- $(INSTALL_DATA) $(LDLIBRARY) $(DESTDIR)$(LIBPL) ; \ +- else \ +- $(INSTALL_DATA) $(LIBRARY) $(DESTDIR)$(LIBPL)/$(LIBRARY) ; \ +- fi; \ +- else \ +- echo Skip install of $(LIBRARY) - use make frameworkinstall; \ +- fi; \ +- fi + $(INSTALL_DATA) Modules/config.c $(DESTDIR)$(LIBPL)/config.c + $(INSTALL_DATA) Programs/python.o $(DESTDIR)$(LIBPL)/python.o + $(INSTALL_DATA) $(srcdir)/Modules/config.c.in $(DESTDIR)$(LIBPL)/config.c.in diff --git a/SOURCES/00132-add-rpmbuild-hooks-to-unittest.patch b/SOURCES/00132-add-rpmbuild-hooks-to-unittest.patch new file mode 100644 index 0000000..77dc6ec --- /dev/null +++ b/SOURCES/00132-add-rpmbuild-hooks-to-unittest.patch @@ -0,0 +1,46 @@ +diff -up Python-3.2.2/Lib/unittest/case.py.add-rpmbuild-hooks-to-unittest Python-3.2.2/Lib/unittest/case.py +--- Python-3.2.2/Lib/unittest/case.py.add-rpmbuild-hooks-to-unittest 2011-09-03 12:16:44.000000000 -0400 ++++ Python-3.2.2/Lib/unittest/case.py 2011-09-09 06:35:16.365568382 -0400 +@@ -3,6 +3,7 @@ + import sys + import functools + import difflib ++import os + import logging + import pprint + import re +@@ -101,5 +102,21 @@ def expectedFailure(func): + raise self.test_case.failureException(msg) + ++# Non-standard/downstream-only hooks for handling issues with specific test ++# cases: ++ ++def _skipInRpmBuild(reason): ++ """ ++ Non-standard/downstream-only decorator for marking a specific unit test ++ to be skipped when run within the %check of an rpmbuild. ++ ++ Specifically, this takes effect when WITHIN_PYTHON_RPM_BUILD is set within ++ the environment, and has no effect otherwise. ++ """ ++ if 'WITHIN_PYTHON_RPM_BUILD' in os.environ: ++ return skip(reason) ++ else: ++ return _id ++ + class _AssertRaisesBaseContext(_BaseTestCaseContext): + + def __init__(self, expected, test_case, expected_regex=None): +diff -up Python-3.2.2/Lib/unittest/__init__.py.add-rpmbuild-hooks-to-unittest Python-3.2.2/Lib/unittest/__init__.py +--- Python-3.2.2/Lib/unittest/__init__.py.add-rpmbuild-hooks-to-unittest 2011-09-03 12:16:44.000000000 -0400 ++++ Python-3.2.2/Lib/unittest/__init__.py 2011-09-09 06:35:16.366568382 -0400 +@@ -57,7 +57,8 @@ __unittest = True + + from .result import TestResult + from .case import (TestCase, FunctionTestCase, SkipTest, skip, skipIf, +- skipUnless, expectedFailure) ++ skipUnless, expectedFailure, ++ _skipInRpmBuild) + from .suite import BaseTestSuite, TestSuite + from .loader import (TestLoader, defaultTestLoader, makeSuite, getTestCaseNames, + findTestCases) diff --git a/SOURCES/00155-avoid-ctypes-thunks.patch b/SOURCES/00155-avoid-ctypes-thunks.patch new file mode 100644 index 0000000..f03890e --- /dev/null +++ b/SOURCES/00155-avoid-ctypes-thunks.patch @@ -0,0 +1,15 @@ +diff -up Python-3.2.3/Lib/ctypes/__init__.py.rhbz814391 Python-3.2.3/Lib/ctypes/__init__.py +--- Python-3.2.3/Lib/ctypes/__init__.py.rhbz814391 2012-04-20 15:12:49.017867692 -0400 ++++ Python-3.2.3/Lib/ctypes/__init__.py 2012-04-20 15:15:09.501111408 -0400 +@@ -275,11 +275,6 @@ def _reset_cache(): + # _SimpleCData.c_char_p_from_param + POINTER(c_char).from_param = c_char_p.from_param + _pointer_type_cache[None] = c_void_p +- # XXX for whatever reasons, creating the first instance of a callback +- # function is needed for the unittests on Win64 to succeed. This MAY +- # be a compiler bug, since the problem occurs only when _ctypes is +- # compiled with the MS SDK compiler. Or an uninitialized variable? +- CFUNCTYPE(c_int)(lambda: None) + + def create_unicode_buffer(init, size=None): + """create_unicode_buffer(aString) -> character array diff --git a/SOURCES/00160-disable-test_fs_holes-in-rpm-build.patch b/SOURCES/00160-disable-test_fs_holes-in-rpm-build.patch new file mode 100644 index 0000000..9fa91d5 --- /dev/null +++ b/SOURCES/00160-disable-test_fs_holes-in-rpm-build.patch @@ -0,0 +1,11 @@ +diff -up cpython-59223da36dec/Lib/test/test_posix.py.disable-test_fs_holes-in-rpm-build cpython-59223da36dec/Lib/test/test_posix.py +--- cpython-59223da36dec/Lib/test/test_posix.py.disable-test_fs_holes-in-rpm-build 2012-08-07 17:15:59.000000000 -0400 ++++ cpython-59223da36dec/Lib/test/test_posix.py 2012-08-07 17:16:53.528330330 -0400 +@@ -973,6 +973,7 @@ class PosixTester(unittest.TestCase): + posix.RTLD_GLOBAL + posix.RTLD_LOCAL + ++ @unittest._skipInRpmBuild('running kernel may not match kernel in chroot') + @unittest.skipUnless(hasattr(os, 'SEEK_HOLE'), + "test needs an OS that reports file holes") + def test_fs_holes(self): diff --git a/SOURCES/00163-disable-parts-of-test_socket-in-rpm-build.patch b/SOURCES/00163-disable-parts-of-test_socket-in-rpm-build.patch new file mode 100644 index 0000000..0e28036 --- /dev/null +++ b/SOURCES/00163-disable-parts-of-test_socket-in-rpm-build.patch @@ -0,0 +1,11 @@ +diff -up Python-3.3.0b1/Lib/test/test_socket.py.disable-test_socket-in-rpm-builds Python-3.3.0b1/Lib/test/test_socket.py +--- Python-3.3.0b1/Lib/test/test_socket.py.disable-test_socket-in-rpm-builds 2012-07-24 15:02:30.823355067 -0400 ++++ Python-3.3.0b1/Lib/test/test_socket.py 2012-07-24 15:08:13.021354999 -0400 +@@ -2188,6 +2188,7 @@ class RecvmsgGenericStreamTests(RecvmsgG + # Tests which require a stream socket and can use either recvmsg() + # or recvmsg_into(). + ++ @unittest._skipInRpmBuild('fails intermittently when run within Koji') + def testRecvmsgEOF(self): + # Receive end-of-stream indicator (b"", peer socket closed). + msg, ancdata, flags, addr = self.doRecvmsg(self.serv_sock, 1024) diff --git a/SOURCES/00170-gc-assertions.patch b/SOURCES/00170-gc-assertions.patch new file mode 100644 index 0000000..3e20d43 --- /dev/null +++ b/SOURCES/00170-gc-assertions.patch @@ -0,0 +1,309 @@ +diff --git a/Include/object.h b/Include/object.h +index 63e37b8..613b26c 100644 +--- a/Include/object.h ++++ b/Include/object.h +@@ -1071,6 +1071,49 @@ PyAPI_FUNC(void) + _PyObject_DebugTypeStats(FILE *out); + #endif /* ifndef Py_LIMITED_API */ + ++/* ++ Define a pair of assertion macros. ++ ++ These work like the regular C assert(), in that they will abort the ++ process with a message on stderr if the given condition fails to hold, ++ but compile away to nothing if NDEBUG is defined. ++ ++ However, before aborting, Python will also try to call _PyObject_Dump() on ++ the given object. This may be of use when investigating bugs in which a ++ particular object is corrupt (e.g. buggy a tp_visit method in an extension ++ module breaking the garbage collector), to help locate the broken objects. ++ ++ The WITH_MSG variant allows you to supply an additional message that Python ++ will attempt to print to stderr, after the object dump. ++*/ ++#ifdef NDEBUG ++/* No debugging: compile away the assertions: */ ++#define PyObject_ASSERT_WITH_MSG(obj, expr, msg) ((void)0) ++#else ++/* With debugging: generate checks: */ ++#define PyObject_ASSERT_WITH_MSG(obj, expr, msg) \ ++ ((expr) \ ++ ? (void)(0) \ ++ : _PyObject_AssertFailed((obj), \ ++ (msg), \ ++ (__STRING(expr)), \ ++ (__FILE__), \ ++ (__LINE__), \ ++ (__PRETTY_FUNCTION__))) ++#endif ++ ++#define PyObject_ASSERT(obj, expr) \ ++ PyObject_ASSERT_WITH_MSG(obj, expr, NULL) ++ ++/* ++ Declare and define the entrypoint even when NDEBUG is defined, to avoid ++ causing compiler/linker errors when building extensions without NDEBUG ++ against a Python built with NDEBUG defined ++*/ ++PyAPI_FUNC(void) _PyObject_AssertFailed(PyObject *, const char *, ++ const char *, const char *, int, ++ const char *); ++ + #ifdef __cplusplus + } + #endif +diff --git a/Lib/test/test_gc.py b/Lib/test/test_gc.py +index 7e82b24..8ecc3d9 100644 +--- a/Lib/test/test_gc.py ++++ b/Lib/test/test_gc.py +@@ -2,9 +2,11 @@ import unittest + from test.support import (verbose, refcount_test, run_unittest, + strip_python_stderr, cpython_only, start_threads, + temp_dir, requires_type_collecting, TESTFN, unlink) ++from test.support import import_module + from test.support.script_helper import assert_python_ok, make_script + + import sys ++import sysconfig + import time + import gc + import weakref +@@ -50,6 +52,8 @@ class GC_Detector(object): + # gc collects it. + self.wr = weakref.ref(C1055820(666), it_happened) + ++BUILD_WITH_NDEBUG = ('-DNDEBUG' in sysconfig.get_config_vars()['PY_CFLAGS']) ++ + @with_tp_del + class Uncollectable(object): + """Create a reference cycle with multiple __del__ methods. +@@ -877,6 +881,50 @@ class GCCallbackTests(unittest.TestCase): + self.assertEqual(len(gc.garbage), 0) + + ++ @unittest.skipIf(BUILD_WITH_NDEBUG, ++ 'built with -NDEBUG') ++ def test_refcount_errors(self): ++ self.preclean() ++ # Verify the "handling" of objects with broken refcounts ++ import_module("ctypes") #skip if not supported ++ ++ import subprocess ++ code = '''if 1: ++ a = [] ++ b = [a] ++ ++ # Simulate the refcount of "a" being too low (compared to the ++ # references held on it by live data), but keeping it above zero ++ # (to avoid deallocating it): ++ import ctypes ++ ctypes.pythonapi.Py_DecRef(ctypes.py_object(a)) ++ ++ # The garbage collector should now have a fatal error when it reaches ++ # the broken object: ++ import gc ++ gc.collect() ++ ''' ++ p = subprocess.Popen([sys.executable, "-c", code], ++ stdout=subprocess.PIPE, ++ stderr=subprocess.PIPE) ++ stdout, stderr = p.communicate() ++ p.stdout.close() ++ p.stderr.close() ++ # Verify that stderr has a useful error message: ++ self.assertRegex(stderr, ++ b'Modules/gcmodule.c:[0-9]+: visit_decref: Assertion "\(\(gc\)->gc.gc_refs >> \(1\)\) != 0" failed.') ++ self.assertRegex(stderr, ++ b'refcount was too small') ++ self.assertRegex(stderr, ++ b'object : \[\]') ++ self.assertRegex(stderr, ++ b'type : list') ++ self.assertRegex(stderr, ++ b'refcount: 1') ++ self.assertRegex(stderr, ++ b'address : 0x[0-9a-f]+') ++ ++ + class GCTogglingTests(unittest.TestCase): + def setUp(self): + gc.enable() +diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c +index 3bddc40..0cc24f7 100644 +--- a/Modules/gcmodule.c ++++ b/Modules/gcmodule.c +@@ -342,7 +342,8 @@ update_refs(PyGC_Head *containers) + { + PyGC_Head *gc = containers->gc.gc_next; + for (; gc != containers; gc = gc->gc.gc_next) { +- assert(_PyGCHead_REFS(gc) == GC_REACHABLE); ++ PyObject_ASSERT(FROM_GC(gc), ++ _PyGCHead_REFS(gc) == GC_REACHABLE); + _PyGCHead_SET_REFS(gc, Py_REFCNT(FROM_GC(gc))); + /* Python's cyclic gc should never see an incoming refcount + * of 0: if something decref'ed to 0, it should have been +@@ -362,7 +363,8 @@ update_refs(PyGC_Head *containers) + * so serious that maybe this should be a release-build + * check instead of an assert? + */ +- assert(_PyGCHead_REFS(gc) != 0); ++ PyObject_ASSERT(FROM_GC(gc), ++ _PyGCHead_REFS(gc) != 0); + } + } + +@@ -377,7 +379,9 @@ visit_decref(PyObject *op, void *data) + * generation being collected, which can be recognized + * because only they have positive gc_refs. + */ +- assert(_PyGCHead_REFS(gc) != 0); /* else refcount was too small */ ++ PyObject_ASSERT_WITH_MSG(FROM_GC(gc), ++ _PyGCHead_REFS(gc) != 0, ++ "refcount was too small"); /* else refcount was too small */ + if (_PyGCHead_REFS(gc) > 0) + _PyGCHead_DECREF(gc); + } +@@ -437,9 +441,10 @@ visit_reachable(PyObject *op, PyGC_Head *reachable) + * If gc_refs == GC_UNTRACKED, it must be ignored. + */ + else { +- assert(gc_refs > 0 +- || gc_refs == GC_REACHABLE +- || gc_refs == GC_UNTRACKED); ++ PyObject_ASSERT(FROM_GC(gc), ++ gc_refs > 0 ++ || gc_refs == GC_REACHABLE ++ || gc_refs == GC_UNTRACKED); + } + } + return 0; +@@ -481,7 +486,7 @@ move_unreachable(PyGC_Head *young, PyGC_Head *unreachable) + */ + PyObject *op = FROM_GC(gc); + traverseproc traverse = Py_TYPE(op)->tp_traverse; +- assert(_PyGCHead_REFS(gc) > 0); ++ PyObject_ASSERT(op, _PyGCHead_REFS(gc) > 0); + _PyGCHead_SET_REFS(gc, GC_REACHABLE); + (void) traverse(op, + (visitproc)visit_reachable, +@@ -544,7 +549,7 @@ move_legacy_finalizers(PyGC_Head *unreachable, PyGC_Head *finalizers) + for (gc = unreachable->gc.gc_next; gc != unreachable; gc = next) { + PyObject *op = FROM_GC(gc); + +- assert(IS_TENTATIVELY_UNREACHABLE(op)); ++ PyObject_ASSERT(op, IS_TENTATIVELY_UNREACHABLE(op)); + next = gc->gc.gc_next; + + if (has_legacy_finalizer(op)) { +@@ -620,7 +625,7 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old) + PyWeakReference **wrlist; + + op = FROM_GC(gc); +- assert(IS_TENTATIVELY_UNREACHABLE(op)); ++ PyObject_ASSERT(op, IS_TENTATIVELY_UNREACHABLE(op)); + next = gc->gc.gc_next; + + if (! PyType_SUPPORTS_WEAKREFS(Py_TYPE(op))) +@@ -641,9 +646,9 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old) + * the callback pointer intact. Obscure: it also + * changes *wrlist. + */ +- assert(wr->wr_object == op); ++ PyObject_ASSERT(wr->wr_object, wr->wr_object == op); + _PyWeakref_ClearRef(wr); +- assert(wr->wr_object == Py_None); ++ PyObject_ASSERT(wr->wr_object, wr->wr_object == Py_None); + if (wr->wr_callback == NULL) + continue; /* no callback */ + +@@ -677,7 +682,7 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old) + */ + if (IS_TENTATIVELY_UNREACHABLE(wr)) + continue; +- assert(IS_REACHABLE(wr)); ++ PyObject_ASSERT(op, IS_REACHABLE(wr)); + + /* Create a new reference so that wr can't go away + * before we can process it again. +@@ -686,7 +691,8 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old) + + /* Move wr to wrcb_to_call, for the next pass. */ + wrasgc = AS_GC(wr); +- assert(wrasgc != next); /* wrasgc is reachable, but ++ PyObject_ASSERT(op, wrasgc != next); ++ /* wrasgc is reachable, but + next isn't, so they can't + be the same */ + gc_list_move(wrasgc, &wrcb_to_call); +@@ -702,11 +708,11 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old) + + gc = wrcb_to_call.gc.gc_next; + op = FROM_GC(gc); +- assert(IS_REACHABLE(op)); +- assert(PyWeakref_Check(op)); ++ PyObject_ASSERT(op, IS_REACHABLE(op)); ++ PyObject_ASSERT(op, PyWeakref_Check(op)); + wr = (PyWeakReference *)op; + callback = wr->wr_callback; +- assert(callback != NULL); ++ PyObject_ASSERT(op, callback != NULL); + + /* copy-paste of weakrefobject.c's handle_callback() */ + temp = PyObject_CallFunctionObjArgs(callback, wr, NULL); +@@ -820,12 +826,14 @@ check_garbage(PyGC_Head *collectable) + for (gc = collectable->gc.gc_next; gc != collectable; + gc = gc->gc.gc_next) { + _PyGCHead_SET_REFS(gc, Py_REFCNT(FROM_GC(gc))); +- assert(_PyGCHead_REFS(gc) != 0); ++ PyObject_ASSERT(FROM_GC(gc), ++ _PyGCHead_REFS(gc) != 0); + } + subtract_refs(collectable); + for (gc = collectable->gc.gc_next; gc != collectable; + gc = gc->gc.gc_next) { +- assert(_PyGCHead_REFS(gc) >= 0); ++ PyObject_ASSERT(FROM_GC(gc), ++ _PyGCHead_REFS(gc) >= 0); + if (_PyGCHead_REFS(gc) != 0) + return -1; + } +diff --git a/Objects/object.c b/Objects/object.c +index fdd41a6..bfe806c 100644 +--- a/Objects/object.c ++++ b/Objects/object.c +@@ -2031,6 +2031,35 @@ _PyTrash_thread_destroy_chain(void) + } + } + ++PyAPI_FUNC(void) ++_PyObject_AssertFailed(PyObject *obj, const char *msg, const char *expr, ++ const char *file, int line, const char *function) ++{ ++ fprintf(stderr, ++ "%s:%d: %s: Assertion \"%s\" failed.\n", ++ file, line, function, expr); ++ if (msg) { ++ fprintf(stderr, "%s\n", msg); ++ } ++ ++ fflush(stderr); ++ ++ if (obj) { ++ /* This might succeed or fail, but we're about to abort, so at least ++ try to provide any extra info we can: */ ++ _PyObject_Dump(obj); ++ } ++ else { ++ fprintf(stderr, "NULL object\n"); ++ } ++ ++ fflush(stdout); ++ fflush(stderr); ++ ++ /* Terminate the process: */ ++ abort(); ++} ++ + #ifndef Py_TRACE_REFS + /* For Py_LIMITED_API, we need an out-of-line version of _Py_Dealloc. + Define this here, so we can undefine the macro. */ diff --git a/SOURCES/00178-dont-duplicate-flags-in-sysconfig.patch b/SOURCES/00178-dont-duplicate-flags-in-sysconfig.patch new file mode 100644 index 0000000..fc49b30 --- /dev/null +++ b/SOURCES/00178-dont-duplicate-flags-in-sysconfig.patch @@ -0,0 +1,30 @@ +diff -r 39b9b05c3085 Lib/distutils/sysconfig.py +--- a/Lib/distutils/sysconfig.py Wed Apr 10 00:27:23 2013 +0200 ++++ b/Lib/distutils/sysconfig.py Wed Apr 10 10:14:18 2013 +0200 +@@ -362,7 +362,10 @@ + done[n] = item = "" + if found: + after = value[m.end():] +- value = value[:m.start()] + item + after ++ value = value[:m.start()] ++ if item.strip() not in value: ++ value += item ++ value += after + if "$" in after: + notdone[name] = value + else: +diff -r 39b9b05c3085 Lib/sysconfig.py +--- a/Lib/sysconfig.py Wed Apr 10 00:27:23 2013 +0200 ++++ b/Lib/sysconfig.py Wed Apr 10 10:14:18 2013 +0200 +@@ -296,7 +296,10 @@ + + if found: + after = value[m.end():] +- value = value[:m.start()] + item + after ++ value = value[:m.start()] ++ if item.strip() not in value: ++ value += item ++ value += after + if "$" in after: + notdone[name] = value + else: diff --git a/SOURCES/00189-use-rpm-wheels.patch b/SOURCES/00189-use-rpm-wheels.patch new file mode 100644 index 0000000..6e12814 --- /dev/null +++ b/SOURCES/00189-use-rpm-wheels.patch @@ -0,0 +1,70 @@ +diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py +index 09c572d..167d27b 100644 +--- a/Lib/ensurepip/__init__.py ++++ b/Lib/ensurepip/__init__.py +@@ -1,16 +1,27 @@ ++import distutils.version ++import glob + import os + import os.path +-import pkgutil + import sys + import tempfile + + + __all__ = ["version", "bootstrap"] + ++_WHEEL_DIR = "/usr/share/python{}-wheels/".format(sys.version_info[0]) + +-_SETUPTOOLS_VERSION = "40.6.2" ++def _get_most_recent_wheel_version(pkg): ++ prefix = os.path.join(_WHEEL_DIR, "{}-".format(pkg)) ++ suffix = "-py2.py3-none-any.whl" ++ pattern = "{}*{}".format(prefix, suffix) ++ versions = (p[len(prefix):-len(suffix)] for p in glob.glob(pattern)) ++ return str(max(versions, key=distutils.version.LooseVersion)) ++ ++ ++_SETUPTOOLS_VERSION = _get_most_recent_wheel_version("setuptools") ++ ++_PIP_VERSION = _get_most_recent_wheel_version("pip") + +-_PIP_VERSION = "18.1" + + _PROJECTS = [ + ("setuptools", _SETUPTOOLS_VERSION), +@@ -23,9 +34,15 @@ def _run_pip(args, additional_paths=None): + if additional_paths is not None: + sys.path = additional_paths + sys.path + +- # Install the bundled software +- import pip._internal +- return pip._internal.main(args) ++ try: ++ # pip 10 ++ from pip._internal import main ++ except ImportError: ++ # pip 9 ++ from pip import main ++ if args[0] in ["install", "list", "wheel"]: ++ args.append('--pre') ++ return main(args) + + + def version(): +@@ -94,12 +111,9 @@ def _bootstrap(*, root=None, upgrade=False, user=False, + additional_paths = [] + for project, version in _PROJECTS: + wheel_name = "{}-{}-py2.py3-none-any.whl".format(project, version) +- whl = pkgutil.get_data( +- "ensurepip", +- "_bundled/{}".format(wheel_name), +- ) +- with open(os.path.join(tmpdir, wheel_name), "wb") as fp: +- fp.write(whl) ++ with open(os.path.join(_WHEEL_DIR, wheel_name), "rb") as sfp: ++ with open(os.path.join(tmpdir, wheel_name), "wb") as fp: ++ fp.write(sfp.read()) + + additional_paths.append(os.path.join(tmpdir, wheel_name)) + diff --git a/SOURCES/00205-make-libpl-respect-lib64.patch b/SOURCES/00205-make-libpl-respect-lib64.patch new file mode 100644 index 0000000..3e7c797 --- /dev/null +++ b/SOURCES/00205-make-libpl-respect-lib64.patch @@ -0,0 +1,12 @@ +diff -up Python-3.5.0/Makefile.pre.in.lib Python-3.5.0/Makefile.pre.in +--- Python-3.5.0/Makefile.pre.in.lib 2015-09-21 15:39:47.928286620 +0200 ++++ Python-3.5.0/Makefile.pre.in 2015-09-21 15:42:58.004042762 +0200 +@@ -1340,7 +1340,7 @@ inclinstall: + + # Install the library and miscellaneous stuff needed for extending/embedding + # This goes into $(exec_prefix) +-LIBPL= @LIBPL@ ++LIBPL= $(LIBDEST)/config-$(LDVERSION)-$(MULTIARCH) + + # pkgconfig directory + LIBPC= $(LIBDIR)/pkgconfig diff --git a/SOURCES/00251-change-user-install-location.patch b/SOURCES/00251-change-user-install-location.patch new file mode 100644 index 0000000..4104449 --- /dev/null +++ b/SOURCES/00251-change-user-install-location.patch @@ -0,0 +1,46 @@ +diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py +index 0258d3d..4ebf50a 100644 +--- a/Lib/distutils/command/install.py ++++ b/Lib/distutils/command/install.py +@@ -418,8 +418,19 @@ class install(Command): + raise DistutilsOptionError( + "must not supply exec-prefix without prefix") + +- self.prefix = os.path.normpath(sys.prefix) +- self.exec_prefix = os.path.normpath(sys.exec_prefix) ++ # self.prefix is set to sys.prefix + /local/ ++ # if neither RPM build nor virtual environment is ++ # detected to make pip and distutils install packages ++ # into the separate location. ++ if (not (hasattr(sys, 'real_prefix') or ++ sys.prefix != sys.base_prefix) and ++ 'RPM_BUILD_ROOT' not in os.environ): ++ addition = "/local" ++ else: ++ addition = "" ++ ++ self.prefix = os.path.normpath(sys.prefix) + addition ++ self.exec_prefix = os.path.normpath(sys.exec_prefix) + addition + + else: + if self.exec_prefix is None: +diff --git a/Lib/site.py b/Lib/site.py +index 0fc9200..c95202e 100644 +--- a/Lib/site.py ++++ b/Lib/site.py +@@ -322,7 +322,14 @@ def getsitepackages(prefixes=None): + return sitepackages + + def addsitepackages(known_paths, prefixes=None): +- """Add site-packages to sys.path""" ++ """Add site-packages to sys.path ++ ++ '/usr/local' is included in PREFIXES if RPM build is not detected ++ to make packages installed into this location visible. ++ ++ """ ++ if ENABLE_USER_SITE and 'RPM_BUILD_ROOT' not in os.environ: ++ PREFIXES.insert(0, "/usr/local") + for sitedir in getsitepackages(prefixes): + if os.path.isdir(sitedir): + addsitedir(sitedir, known_paths) diff --git a/SOURCES/00262-pep538_coerce_legacy_c_locale.patch b/SOURCES/00262-pep538_coerce_legacy_c_locale.patch new file mode 100644 index 0000000..0e452e7 --- /dev/null +++ b/SOURCES/00262-pep538_coerce_legacy_c_locale.patch @@ -0,0 +1,901 @@ +diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst +index d14793a..65aa3ad 100644 +--- a/Doc/using/cmdline.rst ++++ b/Doc/using/cmdline.rst +@@ -728,6 +728,45 @@ conflict. + + .. versionadded:: 3.6 + ++ ++.. envvar:: PYTHONCOERCECLOCALE ++ ++ If set to the value ``0``, causes the main Python command line application ++ to skip coercing the legacy ASCII-based C locale to a more capable UTF-8 ++ based alternative. Note that this setting is checked even when the ++ :option:`-E` or :option:`-I` options are used, as it is handled prior to ++ the processing of command line options. ++ ++ If this variable is *not* set, or is set to a value other than ``0``, and ++ the current locale reported for the ``LC_CTYPE`` category is the default ++ ``C`` locale, then the Python CLI will attempt to configure one of the ++ following locales for the given locale categories before loading the ++ interpreter runtime: ++ ++ * ``C.UTF-8`` (``LC_ALL``) ++ * ``C.utf8`` (``LC_ALL``) ++ * ``UTF-8`` (``LC_CTYPE``) ++ ++ If setting one of these locale categories succeeds, then the matching ++ environment variables will be set (both ``LC_ALL`` and ``LANG`` for the ++ ``LC_ALL`` category, and ``LC_CTYPE`` for the ``LC_CTYPE`` category) in ++ the current process environment before the Python runtime is initialized. ++ ++ Configuring one of these locales (either explicitly or via the above ++ implicit locale coercion) will automatically set the error handler for ++ :data:`sys.stdin` and :data:`sys.stdout` to ``surrogateescape``. This ++ behavior can be overridden using :envvar:`PYTHONIOENCODING` as usual. ++ ++ For debugging purposes, setting ``PYTHONCOERCECLOCALE=warn`` will cause ++ Python to emit warning messages on ``stderr`` if either the locale coercion ++ activates, or else if a locale that *would* have triggered coercion is ++ still active when the Python runtime is initialized. ++ ++ Availability: \*nix ++ ++ .. versionadded:: 3.7 ++ See :pep:`538` for more details. ++ + Debug-mode variables + ~~~~~~~~~~~~~~~~~~~~ + +diff --git a/Lib/test/support/script_helper.py b/Lib/test/support/script_helper.py +index 507dc48..c3cb720 100644 +--- a/Lib/test/support/script_helper.py ++++ b/Lib/test/support/script_helper.py +@@ -56,8 +56,35 @@ def interpreter_requires_environment(): + return __cached_interp_requires_environment + + +-_PythonRunResult = collections.namedtuple("_PythonRunResult", +- ("rc", "out", "err")) ++class _PythonRunResult(collections.namedtuple("_PythonRunResult", ++ ("rc", "out", "err"))): ++ """Helper for reporting Python subprocess run results""" ++ def fail(self, cmd_line): ++ """Provide helpful details about failed subcommand runs""" ++ # Limit to 80 lines to ASCII characters ++ maxlen = 80 * 100 ++ out, err = self.out, self.err ++ if len(out) > maxlen: ++ out = b'(... truncated stdout ...)' + out[-maxlen:] ++ if len(err) > maxlen: ++ err = b'(... truncated stderr ...)' + err[-maxlen:] ++ out = out.decode('ascii', 'replace').rstrip() ++ err = err.decode('ascii', 'replace').rstrip() ++ raise AssertionError("Process return code is %d\n" ++ "command line: %r\n" ++ "\n" ++ "stdout:\n" ++ "---\n" ++ "%s\n" ++ "---\n" ++ "\n" ++ "stderr:\n" ++ "---\n" ++ "%s\n" ++ "---" ++ % (self.rc, cmd_line, ++ out, ++ err)) + + + # Executing the interpreter in a subprocess +@@ -115,30 +142,7 @@ def run_python_until_end(*args, **env_vars): + def _assert_python(expected_success, *args, **env_vars): + res, cmd_line = run_python_until_end(*args, **env_vars) + if (res.rc and expected_success) or (not res.rc and not expected_success): +- # Limit to 80 lines to ASCII characters +- maxlen = 80 * 100 +- out, err = res.out, res.err +- if len(out) > maxlen: +- out = b'(... truncated stdout ...)' + out[-maxlen:] +- if len(err) > maxlen: +- err = b'(... truncated stderr ...)' + err[-maxlen:] +- out = out.decode('ascii', 'replace').rstrip() +- err = err.decode('ascii', 'replace').rstrip() +- raise AssertionError("Process return code is %d\n" +- "command line: %r\n" +- "\n" +- "stdout:\n" +- "---\n" +- "%s\n" +- "---\n" +- "\n" +- "stderr:\n" +- "---\n" +- "%s\n" +- "---" +- % (res.rc, cmd_line, +- out, +- err)) ++ res.fail(cmd_line) + return res + + def assert_python_ok(*args, **env_vars): +diff --git a/Lib/test/test_c_locale_coercion.py b/Lib/test/test_c_locale_coercion.py +new file mode 100644 +index 0000000..635c98f +--- /dev/null ++++ b/Lib/test/test_c_locale_coercion.py +@@ -0,0 +1,371 @@ ++# Tests the attempted automatic coercion of the C locale to a UTF-8 locale ++ ++import unittest ++import locale ++import os ++import sys ++import sysconfig ++import shutil ++import subprocess ++from collections import namedtuple ++ ++import test.support ++from test.support.script_helper import ( ++ run_python_until_end, ++ interpreter_requires_environment, ++) ++ ++# Set our expectation for the default encoding used in the C locale ++# for the filesystem encoding and the standard streams ++ ++# AIX uses iso8859-1 in the C locale, other *nix platforms use ASCII ++if sys.platform.startswith("aix"): ++ C_LOCALE_STREAM_ENCODING = "iso8859-1" ++else: ++ C_LOCALE_STREAM_ENCODING = "ascii" ++ ++# FS encoding is UTF-8 on macOS, other *nix platforms use the locale encoding ++if sys.platform == "darwin": ++ C_LOCALE_FS_ENCODING = "utf-8" ++else: ++ C_LOCALE_FS_ENCODING = C_LOCALE_STREAM_ENCODING ++ ++# Note that the above is probably still wrong in some cases, such as: ++# * Windows when PYTHONLEGACYWINDOWSFSENCODING is set ++# * AIX and any other platforms that use latin-1 in the C locale ++# ++# Options for dealing with this: ++# * Don't set PYTHON_COERCE_C_LOCALE on such platforms (e.g. Windows doesn't) ++# * Fix the test expectations to match the actual platform behaviour ++ ++# In order to get the warning messages to match up as expected, the candidate ++# order here must much the target locale order in Python/pylifecycle.c ++_C_UTF8_LOCALES = ("C.UTF-8", "C.utf8", "UTF-8") ++ ++# There's no reliable cross-platform way of checking locale alias ++# lists, so the only way of knowing which of these locales will work ++# is to try them with locale.setlocale(). We do that in a subprocess ++# to avoid altering the locale of the test runner. ++# ++# If the relevant locale module attributes exist, and we're not on a platform ++# where we expect it to always succeed, we also check that ++# `locale.nl_langinfo(locale.CODESET)` works, as if it fails, the interpreter ++# will skip locale coercion for that particular target locale ++_check_nl_langinfo_CODESET = bool( ++ sys.platform not in ("darwin", "linux") and ++ hasattr(locale, "nl_langinfo") and ++ hasattr(locale, "CODESET") ++) ++ ++def _set_locale_in_subprocess(locale_name): ++ cmd_fmt = "import locale; print(locale.setlocale(locale.LC_CTYPE, '{}'))" ++ if _check_nl_langinfo_CODESET: ++ # If there's no valid CODESET, we expect coercion to be skipped ++ cmd_fmt += "; import sys; sys.exit(not locale.nl_langinfo(locale.CODESET))" ++ cmd = cmd_fmt.format(locale_name) ++ result, py_cmd = run_python_until_end("-c", cmd, __isolated=True) ++ return result.rc == 0 ++ ++ ++ ++_fields = "fsencoding stdin_info stdout_info stderr_info lang lc_ctype lc_all" ++_EncodingDetails = namedtuple("EncodingDetails", _fields) ++ ++class EncodingDetails(_EncodingDetails): ++ # XXX (ncoghlan): Using JSON for child state reporting may be less fragile ++ CHILD_PROCESS_SCRIPT = ";".join([ ++ "import sys, os", ++ "print(sys.getfilesystemencoding())", ++ "print(sys.stdin.encoding + ':' + sys.stdin.errors)", ++ "print(sys.stdout.encoding + ':' + sys.stdout.errors)", ++ "print(sys.stderr.encoding + ':' + sys.stderr.errors)", ++ "print(os.environ.get('LANG', 'not set'))", ++ "print(os.environ.get('LC_CTYPE', 'not set'))", ++ "print(os.environ.get('LC_ALL', 'not set'))", ++ ]) ++ ++ @classmethod ++ def get_expected_details(cls, coercion_expected, fs_encoding, stream_encoding, env_vars): ++ """Returns expected child process details for a given encoding""" ++ _stream = stream_encoding + ":{}" ++ # stdin and stdout should use surrogateescape either because the ++ # coercion triggered, or because the C locale was detected ++ stream_info = 2*[_stream.format("surrogateescape")] ++ # stderr should always use backslashreplace ++ stream_info.append(_stream.format("backslashreplace")) ++ expected_lang = env_vars.get("LANG", "not set").lower() ++ if coercion_expected: ++ expected_lc_ctype = CLI_COERCION_TARGET.lower() ++ else: ++ expected_lc_ctype = env_vars.get("LC_CTYPE", "not set").lower() ++ expected_lc_all = env_vars.get("LC_ALL", "not set").lower() ++ env_info = expected_lang, expected_lc_ctype, expected_lc_all ++ return dict(cls(fs_encoding, *stream_info, *env_info)._asdict()) ++ ++ @staticmethod ++ def _handle_output_variations(data): ++ """Adjust the output to handle platform specific idiosyncrasies ++ ++ * Some platforms report ASCII as ANSI_X3.4-1968 ++ * Some platforms report ASCII as US-ASCII ++ * Some platforms report UTF-8 instead of utf-8 ++ """ ++ data = data.replace(b"ANSI_X3.4-1968", b"ascii") ++ data = data.replace(b"US-ASCII", b"ascii") ++ data = data.lower() ++ return data ++ ++ @classmethod ++ def get_child_details(cls, env_vars): ++ """Retrieves fsencoding and standard stream details from a child process ++ ++ Returns (encoding_details, stderr_lines): ++ ++ - encoding_details: EncodingDetails for eager decoding ++ - stderr_lines: result of calling splitlines() on the stderr output ++ ++ The child is run in isolated mode if the current interpreter supports ++ that. ++ """ ++ result, py_cmd = run_python_until_end( ++ "-c", cls.CHILD_PROCESS_SCRIPT, ++ __isolated=True, ++ **env_vars ++ ) ++ if not result.rc == 0: ++ result.fail(py_cmd) ++ # All subprocess outputs in this test case should be pure ASCII ++ adjusted_output = cls._handle_output_variations(result.out) ++ stdout_lines = adjusted_output.decode("ascii").splitlines() ++ child_encoding_details = dict(cls(*stdout_lines)._asdict()) ++ stderr_lines = result.err.decode("ascii").rstrip().splitlines() ++ return child_encoding_details, stderr_lines ++ ++ ++# Details of the shared library warning emitted at runtime ++LEGACY_LOCALE_WARNING = ( ++ "Python runtime initialized with LC_CTYPE=C (a locale with default ASCII " ++ "encoding), which may cause Unicode compatibility problems. Using C.UTF-8, " ++ "C.utf8, or UTF-8 (if available) as alternative Unicode-compatible " ++ "locales is recommended." ++) ++ ++# Details of the CLI locale coercion warning emitted at runtime ++CLI_COERCION_WARNING_FMT = ( ++ "Python detected LC_CTYPE=C: LC_CTYPE coerced to {} (set another locale " ++ "or PYTHONCOERCECLOCALE=0 to disable this locale coercion behavior)." ++) ++ ++ ++AVAILABLE_TARGETS = None ++CLI_COERCION_TARGET = None ++CLI_COERCION_WARNING = None ++ ++def setUpModule(): ++ global AVAILABLE_TARGETS ++ global CLI_COERCION_TARGET ++ global CLI_COERCION_WARNING ++ ++ if AVAILABLE_TARGETS is not None: ++ # initialization already done ++ return ++ AVAILABLE_TARGETS = [] ++ ++ # Find the target locales available in the current system ++ for target_locale in _C_UTF8_LOCALES: ++ if _set_locale_in_subprocess(target_locale): ++ AVAILABLE_TARGETS.append(target_locale) ++ ++ if AVAILABLE_TARGETS: ++ # Coercion is expected to use the first available target locale ++ CLI_COERCION_TARGET = AVAILABLE_TARGETS[0] ++ CLI_COERCION_WARNING = CLI_COERCION_WARNING_FMT.format(CLI_COERCION_TARGET) ++ ++ ++class _LocaleHandlingTestCase(unittest.TestCase): ++ # Base class to check expected locale handling behaviour ++ ++ def _check_child_encoding_details(self, ++ env_vars, ++ expected_fs_encoding, ++ expected_stream_encoding, ++ expected_warnings, ++ coercion_expected): ++ """Check the C locale handling for the given process environment ++ ++ Parameters: ++ expected_fs_encoding: expected sys.getfilesystemencoding() result ++ expected_stream_encoding: expected encoding for standard streams ++ expected_warning: stderr output to expect (if any) ++ """ ++ result = EncodingDetails.get_child_details(env_vars) ++ encoding_details, stderr_lines = result ++ expected_details = EncodingDetails.get_expected_details( ++ coercion_expected, ++ expected_fs_encoding, ++ expected_stream_encoding, ++ env_vars ++ ) ++ self.assertEqual(encoding_details, expected_details) ++ if expected_warnings is None: ++ expected_warnings = [] ++ self.assertEqual(stderr_lines, expected_warnings) ++ ++ ++class LocaleConfigurationTests(_LocaleHandlingTestCase): ++ # Test explicit external configuration via the process environment ++ ++ def setUpClass(): ++ # This relies on setupModule() having been run, so it can't be ++ # handled via the @unittest.skipUnless decorator ++ if not AVAILABLE_TARGETS: ++ raise unittest.SkipTest("No C-with-UTF-8 locale available") ++ ++ def test_external_target_locale_configuration(self): ++ ++ # Explicitly setting a target locale should give the same behaviour as ++ # is seen when implicitly coercing to that target locale ++ self.maxDiff = None ++ ++ expected_fs_encoding = "utf-8" ++ expected_stream_encoding = "utf-8" ++ ++ base_var_dict = { ++ "LANG": "", ++ "LC_CTYPE": "", ++ "LC_ALL": "", ++ } ++ for env_var in ("LANG", "LC_CTYPE"): ++ for locale_to_set in AVAILABLE_TARGETS: ++ # XXX (ncoghlan): LANG=UTF-8 doesn't appear to work as ++ # expected, so skip that combination for now ++ # See https://bugs.python.org/issue30672 for discussion ++ if env_var == "LANG" and locale_to_set == "UTF-8": ++ continue ++ ++ with self.subTest(env_var=env_var, ++ configured_locale=locale_to_set): ++ var_dict = base_var_dict.copy() ++ var_dict[env_var] = locale_to_set ++ self._check_child_encoding_details(var_dict, ++ expected_fs_encoding, ++ expected_stream_encoding, ++ expected_warnings=None, ++ coercion_expected=False) ++ ++ ++ ++@test.support.cpython_only ++@unittest.skipUnless(sysconfig.get_config_var("PY_COERCE_C_LOCALE"), ++ "C locale coercion disabled at build time") ++class LocaleCoercionTests(_LocaleHandlingTestCase): ++ # Test implicit reconfiguration of the environment during CLI startup ++ ++ def _check_c_locale_coercion(self, ++ fs_encoding, stream_encoding, ++ coerce_c_locale, ++ expected_warnings=None, ++ coercion_expected=True, ++ **extra_vars): ++ """Check the C locale handling for various configurations ++ ++ Parameters: ++ fs_encoding: expected sys.getfilesystemencoding() result ++ stream_encoding: expected encoding for standard streams ++ coerce_c_locale: setting to use for PYTHONCOERCECLOCALE ++ None: don't set the variable at all ++ str: the value set in the child's environment ++ expected_warnings: expected warning lines on stderr ++ extra_vars: additional environment variables to set in subprocess ++ """ ++ self.maxDiff = None ++ ++ if not AVAILABLE_TARGETS: ++ # Locale coercion is disabled when there aren't any target locales ++ fs_encoding = C_LOCALE_FS_ENCODING ++ stream_encoding = C_LOCALE_STREAM_ENCODING ++ coercion_expected = False ++ if expected_warnings: ++ expected_warnings = [LEGACY_LOCALE_WARNING] ++ ++ base_var_dict = { ++ "LANG": "", ++ "LC_CTYPE": "", ++ "LC_ALL": "", ++ } ++ base_var_dict.update(extra_vars) ++ for env_var in ("LANG", "LC_CTYPE"): ++ for locale_to_set in ("", "C", "POSIX", "invalid.ascii"): ++ # XXX (ncoghlan): *BSD platforms don't behave as expected in the ++ # POSIX locale, so we skip that for now ++ # See https://bugs.python.org/issue30672 for discussion ++ if locale_to_set == "POSIX": ++ continue ++ with self.subTest(env_var=env_var, ++ nominal_locale=locale_to_set, ++ PYTHONCOERCECLOCALE=coerce_c_locale): ++ var_dict = base_var_dict.copy() ++ var_dict[env_var] = locale_to_set ++ if coerce_c_locale is not None: ++ var_dict["PYTHONCOERCECLOCALE"] = coerce_c_locale ++ # Check behaviour on successful coercion ++ self._check_child_encoding_details(var_dict, ++ fs_encoding, ++ stream_encoding, ++ expected_warnings, ++ coercion_expected) ++ ++ def test_test_PYTHONCOERCECLOCALE_not_set(self): ++ # This should coerce to the first available target locale by default ++ self._check_c_locale_coercion("utf-8", "utf-8", coerce_c_locale=None) ++ ++ def test_PYTHONCOERCECLOCALE_not_zero(self): ++ # *Any* string other than "0" is considered "set" for our purposes ++ # and hence should result in the locale coercion being enabled ++ for setting in ("", "1", "true", "false"): ++ self._check_c_locale_coercion("utf-8", "utf-8", coerce_c_locale=setting) ++ ++ def test_PYTHONCOERCECLOCALE_set_to_warn(self): ++ # PYTHONCOERCECLOCALE=warn enables runtime warnings for legacy locales ++ self._check_c_locale_coercion("utf-8", "utf-8", ++ coerce_c_locale="warn", ++ expected_warnings=[CLI_COERCION_WARNING]) ++ ++ ++ def test_PYTHONCOERCECLOCALE_set_to_zero(self): ++ # The setting "0" should result in the locale coercion being disabled ++ self._check_c_locale_coercion(C_LOCALE_FS_ENCODING, ++ C_LOCALE_STREAM_ENCODING, ++ coerce_c_locale="0", ++ coercion_expected=False) ++ # Setting LC_ALL=C shouldn't make any difference to the behaviour ++ self._check_c_locale_coercion(C_LOCALE_FS_ENCODING, ++ C_LOCALE_STREAM_ENCODING, ++ coerce_c_locale="0", ++ LC_ALL="C", ++ coercion_expected=False) ++ ++ def test_LC_ALL_set_to_C(self): ++ # Setting LC_ALL should render the locale coercion ineffective ++ self._check_c_locale_coercion(C_LOCALE_FS_ENCODING, ++ C_LOCALE_STREAM_ENCODING, ++ coerce_c_locale=None, ++ LC_ALL="C", ++ coercion_expected=False) ++ # And result in a warning about a lack of locale compatibility ++ self._check_c_locale_coercion(C_LOCALE_FS_ENCODING, ++ C_LOCALE_STREAM_ENCODING, ++ coerce_c_locale="warn", ++ LC_ALL="C", ++ expected_warnings=[LEGACY_LOCALE_WARNING], ++ coercion_expected=False) ++ ++def test_main(): ++ test.support.run_unittest( ++ LocaleConfigurationTests, ++ LocaleCoercionTests ++ ) ++ test.support.reap_children() ++ ++if __name__ == "__main__": ++ test_main() +diff --git a/Lib/test/test_cmd_line.py b/Lib/test/test_cmd_line.py +index 38156b4..5922ed9 100644 +--- a/Lib/test/test_cmd_line.py ++++ b/Lib/test/test_cmd_line.py +@@ -153,6 +153,7 @@ class CmdLineTest(unittest.TestCase): + env = os.environ.copy() + # Use C locale to get ascii for the locale encoding + env['LC_ALL'] = 'C' ++ env['PYTHONCOERCECLOCALE'] = '0' + code = ( + b'import locale; ' + b'print(ascii("' + undecodable + b'"), ' +diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py +index 7866a5c..b41239a 100644 +--- a/Lib/test/test_sys.py ++++ b/Lib/test/test_sys.py +@@ -680,6 +680,7 @@ class SysModuleTest(unittest.TestCase): + # Force the POSIX locale + env = os.environ.copy() + env["LC_ALL"] = "C" ++ env["PYTHONCOERCECLOCALE"] = "0" + code = '\n'.join(( + 'import sys', + 'def dump(name):', +diff --git a/Modules/main.c b/Modules/main.c +index 585d696..96d8be4 100644 +--- a/Modules/main.c ++++ b/Modules/main.c +@@ -107,7 +107,11 @@ static const char usage_6[] = + " predictable seed.\n" + "PYTHONMALLOC: set the Python memory allocators and/or install debug hooks\n" + " on Python memory allocators. Use PYTHONMALLOC=debug to install debug\n" +-" hooks.\n"; ++" hooks.\n" ++ ++"PYTHONCOERCECLOCALE: if this variable is set to 0, it disables the locale\n" ++" coercion behavior. Use PYTHONCOERCECLOCALE=warn to request display of\n" ++" locale coercion and locale compatibility warnings on stderr.\n"; + + static int + usage(int exitcode, const wchar_t* program) +diff --git a/Programs/_testembed.c b/Programs/_testembed.c +index 813cf30..2a64092 100644 +--- a/Programs/_testembed.c ++++ b/Programs/_testembed.c +@@ -1,4 +1,5 @@ + #include ++#include "pyconfig.h" + #include "pythread.h" + #include + +diff --git a/Programs/python.c b/Programs/python.c +index a7afbc7..03f8295 100644 +--- a/Programs/python.c ++++ b/Programs/python.c +@@ -15,6 +15,21 @@ wmain(int argc, wchar_t **argv) + } + #else + ++/* Access private pylifecycle helper API to better handle the legacy C locale ++ * ++ * The legacy C locale assumes ASCII as the default text encoding, which ++ * causes problems not only for the CPython runtime, but also other ++ * components like GNU readline. ++ * ++ * Accordingly, when the CLI detects it, it attempts to coerce it to a ++ * more capable UTF-8 based alternative. ++ * ++ * See the documentation of the PYTHONCOERCECLOCALE setting for more details. ++ * ++ */ ++extern int _Py_LegacyLocaleDetected(void); ++extern void _Py_CoerceLegacyLocale(void); ++ + int + main(int argc, char **argv) + { +@@ -25,7 +40,11 @@ main(int argc, char **argv) + char *oldloc; + + /* Force malloc() allocator to bootstrap Python */ ++#ifdef Py_DEBUG ++ (void)_PyMem_SetupAllocators("malloc_debug"); ++# else + (void)_PyMem_SetupAllocators("malloc"); ++# endif + + argv_copy = (wchar_t **)PyMem_RawMalloc(sizeof(wchar_t*) * (argc+1)); + argv_copy2 = (wchar_t **)PyMem_RawMalloc(sizeof(wchar_t*) * (argc+1)); +@@ -49,7 +68,21 @@ main(int argc, char **argv) + return 1; + } + ++#ifdef __ANDROID__ ++ /* Passing "" to setlocale() on Android requests the C locale rather ++ * than checking environment variables, so request C.UTF-8 explicitly ++ */ ++ setlocale(LC_ALL, "C.UTF-8"); ++#else ++ /* Reconfigure the locale to the default for this process */ + setlocale(LC_ALL, ""); ++#endif ++ ++ if (_Py_LegacyLocaleDetected()) { ++ _Py_CoerceLegacyLocale(); ++ } ++ ++ /* Convert from char to wchar_t based on the locale settings */ + for (i = 0; i < argc; i++) { + argv_copy[i] = Py_DecodeLocale(argv[i], NULL); + if (!argv_copy[i]) { +@@ -70,7 +103,11 @@ main(int argc, char **argv) + + /* Force again malloc() allocator to release memory blocks allocated + before Py_Main() */ ++#ifdef Py_DEBUG ++ (void)_PyMem_SetupAllocators("malloc_debug"); ++# else + (void)_PyMem_SetupAllocators("malloc"); ++# endif + + for (i = 0; i < argc; i++) { + PyMem_RawFree(argv_copy2[i]); +diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c +index ecfdfee..4fee178 100644 +--- a/Python/pylifecycle.c ++++ b/Python/pylifecycle.c +@@ -167,6 +167,7 @@ Py_SetStandardStreamEncoding(const char *encoding, const char *errors) + return 0; + } + ++ + /* Global initializations. Can be undone by Py_FinalizeEx(). Don't + call this twice without an intervening Py_FinalizeEx() call. When + initializations fail, a fatal error is issued and the function does +@@ -301,6 +302,183 @@ import_init(PyInterpreterState *interp, PyObject *sysmod) + } + + ++/* Helper functions to better handle the legacy C locale ++ * ++ * The legacy C locale assumes ASCII as the default text encoding, which ++ * causes problems not only for the CPython runtime, but also other ++ * components like GNU readline. ++ * ++ * Accordingly, when the CLI detects it, it attempts to coerce it to a ++ * more capable UTF-8 based alternative as follows: ++ * ++ * if (_Py_LegacyLocaleDetected()) { ++ * _Py_CoerceLegacyLocale(); ++ * } ++ * ++ * See the documentation of the PYTHONCOERCECLOCALE setting for more details. ++ * ++ * Locale coercion also impacts the default error handler for the standard ++ * streams: while the usual default is "strict", the default for the legacy ++ * C locale and for any of the coercion target locales is "surrogateescape". ++ */ ++ ++int ++_Py_LegacyLocaleDetected(void) ++{ ++#ifndef MS_WINDOWS ++ /* On non-Windows systems, the C locale is considered a legacy locale */ ++ /* XXX (ncoghlan): some platforms (notably Mac OS X) don't appear to treat ++ * the POSIX locale as a simple alias for the C locale, so ++ * we may also want to check for that explicitly. ++ */ ++ const char *ctype_loc = setlocale(LC_CTYPE, NULL); ++ return ctype_loc != NULL && strcmp(ctype_loc, "C") == 0; ++#else ++ /* Windows uses code pages instead of locales, so no locale is legacy */ ++ return 0; ++#endif ++} ++ ++ ++static const char *_C_LOCALE_WARNING = ++ "Python runtime initialized with LC_CTYPE=C (a locale with default ASCII " ++ "encoding), which may cause Unicode compatibility problems. Using C.UTF-8, " ++ "C.utf8, or UTF-8 (if available) as alternative Unicode-compatible " ++ "locales is recommended.\n"; ++ ++static int ++_legacy_locale_warnings_enabled(void) ++{ ++ const char *coerce_c_locale = getenv("PYTHONCOERCECLOCALE"); ++ return (coerce_c_locale != NULL && ++ strncmp(coerce_c_locale, "warn", 5) == 0); ++} ++ ++static void ++_emit_stderr_warning_for_legacy_locale(void) ++{ ++ if (_legacy_locale_warnings_enabled()) { ++ if (_Py_LegacyLocaleDetected()) { ++ fprintf(stderr, "%s", _C_LOCALE_WARNING); ++ } ++ } ++} ++ ++typedef struct _CandidateLocale { ++ const char *locale_name; /* The locale to try as a coercion target */ ++} _LocaleCoercionTarget; ++ ++static _LocaleCoercionTarget _TARGET_LOCALES[] = { ++ {"C.UTF-8"}, ++ {"C.utf8"}, ++ {"UTF-8"}, ++ {NULL} ++}; ++ ++static char * ++get_default_standard_stream_error_handler(void) ++{ ++ const char *ctype_loc = setlocale(LC_CTYPE, NULL); ++ if (ctype_loc != NULL) { ++ /* "surrogateescape" is the default in the legacy C locale */ ++ if (strcmp(ctype_loc, "C") == 0) { ++ return "surrogateescape"; ++ } ++ ++#ifdef PY_COERCE_C_LOCALE ++ /* "surrogateescape" is the default in locale coercion target locales */ ++ const _LocaleCoercionTarget *target = NULL; ++ for (target = _TARGET_LOCALES; target->locale_name; target++) { ++ if (strcmp(ctype_loc, target->locale_name) == 0) { ++ return "surrogateescape"; ++ } ++ } ++#endif ++ } ++ ++ /* Otherwise return NULL to request the typical default error handler */ ++ return NULL; ++} ++ ++#ifdef PY_COERCE_C_LOCALE ++static const char *_C_LOCALE_COERCION_WARNING = ++ "Python detected LC_CTYPE=C: LC_CTYPE coerced to %.20s (set another locale " ++ "or PYTHONCOERCECLOCALE=0 to disable this locale coercion behavior).\n"; ++ ++static void ++_coerce_default_locale_settings(const _LocaleCoercionTarget *target) ++{ ++ ++ const char *newloc = target->locale_name; ++ ++ /* Reset locale back to currently configured defaults */ ++ setlocale(LC_ALL, ""); ++ ++ /* Set the relevant locale environment variable */ ++ if (setenv("LC_CTYPE", newloc, 1)) { ++ fprintf(stderr, ++ "Error setting LC_CTYPE, skipping C locale coercion\n"); ++ return; ++ } ++ if (_legacy_locale_warnings_enabled()) { ++ fprintf(stderr, _C_LOCALE_COERCION_WARNING, newloc); ++ } ++ ++ /* Reconfigure with the overridden environment variables */ ++ setlocale(LC_ALL, ""); ++} ++#endif ++ ++ ++void ++_Py_CoerceLegacyLocale(void) ++{ ++#ifdef PY_COERCE_C_LOCALE ++ /* We ignore the Python -E and -I flags here, as the CLI needs to sort out ++ * the locale settings *before* we try to do anything with the command ++ * line arguments. For cross-platform debugging purposes, we also need ++ * to give end users a way to force even scripts that are otherwise ++ * isolated from their environment to use the legacy ASCII-centric C ++ * locale. ++ * ++ * Ignoring -E and -I is safe from a security perspective, as we only use ++ * the setting to turn *off* the implicit locale coercion, and anyone with ++ * access to the process environment already has the ability to set ++ * `LC_ALL=C` to override the C level locale settings anyway. ++ */ ++ const char *coerce_c_locale = getenv("PYTHONCOERCECLOCALE"); ++ if (coerce_c_locale == NULL || strncmp(coerce_c_locale, "0", 2) != 0) { ++ /* PYTHONCOERCECLOCALE is not set, or is set to something other than "0" */ ++ const char *locale_override = getenv("LC_ALL"); ++ if (locale_override == NULL || *locale_override == '\0') { ++ /* LC_ALL is also not set (or is set to an empty string) */ ++ const _LocaleCoercionTarget *target = NULL; ++ for (target = _TARGET_LOCALES; target->locale_name; target++) { ++ const char *new_locale = setlocale(LC_CTYPE, ++ target->locale_name); ++ if (new_locale != NULL) { ++#if !defined(__APPLE__) && defined(HAVE_LANGINFO_H) && defined(CODESET) ++ /* Also ensure that nl_langinfo works in this locale */ ++ char *codeset = nl_langinfo(CODESET); ++ if (!codeset || *codeset == '\0') { ++ /* CODESET is not set or empty, so skip coercion */ ++ new_locale = NULL; ++ setlocale(LC_CTYPE, ""); ++ continue; ++ } ++#endif ++ /* Successfully configured locale, so make it the default */ ++ _coerce_default_locale_settings(target); ++ return; ++ } ++ } ++ } ++ } ++ /* No C locale warning here, as Py_Initialize will emit one later */ ++#endif ++} ++ ++ + void + _Py_InitializeEx_Private(int install_sigs, int install_importlib) + { +@@ -315,11 +493,19 @@ _Py_InitializeEx_Private(int install_sigs, int install_importlib) + initialized = 1; + _Py_Finalizing = NULL; + +-#ifdef HAVE_SETLOCALE ++#ifdef __ANDROID__ ++ /* Passing "" to setlocale() on Android requests the C locale rather ++ * than checking environment variables, so request C.UTF-8 explicitly ++ */ ++ setlocale(LC_CTYPE, "C.UTF-8"); ++#else ++#ifndef MS_WINDOWS + /* Set up the LC_CTYPE locale, so we can obtain + the locale's charset without having to switch + locales. */ + setlocale(LC_CTYPE, ""); ++ _emit_stderr_warning_for_legacy_locale(); ++#endif + #endif + + if ((p = Py_GETENV("PYTHONDEBUG")) && *p != '\0') +@@ -1247,12 +1433,8 @@ initstdio(void) + } + } + if (!errors && !(pythonioencoding && *pythonioencoding)) { +- /* When the LC_CTYPE locale is the POSIX locale ("C locale"), +- stdin and stdout use the surrogateescape error handler by +- default, instead of the strict error handler. */ +- char *loc = setlocale(LC_CTYPE, NULL); +- if (loc != NULL && strcmp(loc, "C") == 0) +- errors = "surrogateescape"; ++ /* Choose the default error handler based on the current locale */ ++ errors = get_default_standard_stream_error_handler(); + } + } + +diff --git a/configure.ac b/configure.ac +index 3f2459a..7444486 100644 +--- a/configure.ac ++++ b/configure.ac +@@ -3360,6 +3360,40 @@ then + fi + AC_MSG_RESULT($with_pymalloc) + ++# Check for --with-c-locale-coercion ++AC_MSG_CHECKING(for --with-c-locale-coercion) ++AC_ARG_WITH(c-locale-coercion, ++ AS_HELP_STRING([--with(out)-c-locale-coercion], ++ [disable/enable C locale coercion to a UTF-8 based locale])) ++ ++if test -z "$with_c_locale_coercion" ++then ++ with_c_locale_coercion="yes" ++fi ++if test "$with_c_locale_coercion" != "no" ++then ++ AC_DEFINE(PY_COERCE_C_LOCALE, 1, ++ [Define if you want to coerce the C locale to a UTF-8 based locale]) ++fi ++AC_MSG_RESULT($with_c_locale_coercion) ++ ++# Check for --with-c-locale-warning ++AC_MSG_CHECKING(for --with-c-locale-warning) ++AC_ARG_WITH(c-locale-warning, ++ AS_HELP_STRING([--with(out)-c-locale-warning], ++ [disable/enable locale compatibility warning in the C locale])) ++ ++if test -z "$with_c_locale_warning" ++then ++ with_c_locale_warning="yes" ++fi ++if test "$with_c_locale_warning" != "no" ++then ++ AC_DEFINE(PY_WARN_ON_C_LOCALE, 1, ++ [Define to emit a locale compatibility warning in the C locale]) ++fi ++AC_MSG_RESULT($with_c_locale_warning) ++ + # Check for Valgrind support + AC_MSG_CHECKING([for --with-valgrind]) + AC_ARG_WITH([valgrind], diff --git a/SOURCES/00274-fix-arch-names.patch b/SOURCES/00274-fix-arch-names.patch new file mode 100644 index 0000000..9d69223 --- /dev/null +++ b/SOURCES/00274-fix-arch-names.patch @@ -0,0 +1,58 @@ +diff -up Python-3.5.0/configure.ac.than Python-3.5.0/configure.ac +--- Python-3.5.0/configure.ac.than 2015-11-13 11:51:32.039560172 -0500 ++++ Python-3.5.0/configure.ac 2015-11-13 11:52:11.670168157 -0500 +@@ -788,9 +788,9 @@ cat >> conftest.c <> conftest.c <> conftest.c <> conftest.c <= (1, 1, 0) +- ++PY_SSL_DEFAULT_CIPHERS = sysconfig.get_config_var('PY_SSL_DEFAULT_CIPHERS') + + def data_file(*name): + return os.path.join(os.path.dirname(__file__), *name) +@@ -889,6 +890,19 @@ class ContextTests(unittest.TestCase): + with self.assertRaisesRegex(ssl.SSLError, "No cipher can be selected"): + ctx.set_ciphers("^$:,;?*'dorothyx") + ++ @unittest.skipUnless(PY_SSL_DEFAULT_CIPHERS == 1, ++ "Test applies only to Python default ciphers") ++ def test_python_ciphers(self): ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) ++ ciphers = ctx.get_ciphers() ++ for suite in ciphers: ++ name = suite['name'] ++ self.assertNotIn("PSK", name) ++ self.assertNotIn("SRP", name) ++ self.assertNotIn("MD5", name) ++ self.assertNotIn("RC4", name) ++ self.assertNotIn("3DES", name) ++ + @unittest.skipIf(ssl.OPENSSL_VERSION_INFO < (1, 0, 2, 0, 0), 'OpenSSL too old') + def test_get_ciphers(self): + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +diff --git a/Modules/_ssl.c b/Modules/_ssl.c +index 5e007da..130f006 100644 +--- a/Modules/_ssl.c ++++ b/Modules/_ssl.c +@@ -237,6 +237,31 @@ SSL_SESSION_get_ticket_lifetime_hint(const SSL_SESSION *s) + + #endif /* OpenSSL < 1.1.0 or LibreSSL < 2.7.0 */ + ++/* Default cipher suites */ ++#ifndef PY_SSL_DEFAULT_CIPHERS ++#define PY_SSL_DEFAULT_CIPHERS 1 ++#endif ++ ++#if PY_SSL_DEFAULT_CIPHERS == 0 ++ #ifndef PY_SSL_DEFAULT_CIPHER_STRING ++ #error "Py_SSL_DEFAULT_CIPHERS 0 needs Py_SSL_DEFAULT_CIPHER_STRING" ++ #endif ++#elif PY_SSL_DEFAULT_CIPHERS == 1 ++/* Python custom selection of sensible ciper suites ++ * DEFAULT: OpenSSL's default cipher list. Since 1.0.2 the list is in sensible order. ++ * !aNULL:!eNULL: really no NULL ciphers ++ * !MD5:!3DES:!DES:!RC4:!IDEA:!SEED: no weak or broken algorithms on old OpenSSL versions. ++ * !aDSS: no authentication with discrete logarithm DSA algorithm ++ * !SRP:!PSK: no secure remote password or pre-shared key authentication ++ */ ++ #define PY_SSL_DEFAULT_CIPHER_STRING "DEFAULT:!aNULL:!eNULL:!MD5:!3DES:!DES:!RC4:!IDEA:!SEED:!aDSS:!SRP:!PSK" ++#elif PY_SSL_DEFAULT_CIPHERS == 2 ++/* Ignored in SSLContext constructor, only used to as _ssl.DEFAULT_CIPHER_STRING */ ++ #define PY_SSL_DEFAULT_CIPHER_STRING SSL_DEFAULT_CIPHER_LIST ++#else ++ #error "Unsupported PY_SSL_DEFAULT_CIPHERS" ++#endif ++ + + enum py_ssl_error { + /* these mirror ssl.h */ +@@ -2803,7 +2828,12 @@ _ssl__SSLContext_impl(PyTypeObject *type, int proto_version) + /* A bare minimum cipher list without completely broken cipher suites. + * It's far from perfect but gives users a better head start. */ + if (proto_version != PY_SSL_VERSION_SSL2) { +- result = SSL_CTX_set_cipher_list(ctx, "HIGH:!aNULL:!eNULL:!MD5"); ++#if PY_SSL_DEFAULT_CIPHERS == 2 ++ /* stick to OpenSSL's default settings */ ++ result = 1; ++#else ++ result = SSL_CTX_set_cipher_list(ctx, PY_SSL_DEFAULT_CIPHER_STRING); ++#endif + } else { + /* SSLv2 needs MD5 */ + result = SSL_CTX_set_cipher_list(ctx, "HIGH:!aNULL:!eNULL"); +@@ -5343,6 +5373,9 @@ PyInit__ssl(void) + (PyObject *)&PySSLSession_Type) != 0) + return NULL; + ++ PyModule_AddStringConstant(m, "_DEFAULT_CIPHERS", ++ PY_SSL_DEFAULT_CIPHER_STRING); ++ + PyModule_AddIntConstant(m, "SSL_ERROR_ZERO_RETURN", + PY_SSL_ERROR_ZERO_RETURN); + PyModule_AddIntConstant(m, "SSL_ERROR_WANT_READ", +diff --git a/configure.ac b/configure.ac +index 3703701..2eff514 100644 +--- a/configure.ac ++++ b/configure.ac +@@ -5598,6 +5598,42 @@ if test "$have_getrandom" = yes; then + [Define to 1 if the getrandom() function is available]) + fi + ++# ssl module default cipher suite string ++AH_TEMPLATE(PY_SSL_DEFAULT_CIPHERS, ++ [Default cipher suites list for ssl module. ++ 1: Python's preferred selection, 2: leave OpenSSL defaults untouched, 0: custom string]) ++AH_TEMPLATE(PY_SSL_DEFAULT_CIPHER_STRING, ++ [Cipher suite string for PY_SSL_DEFAULT_CIPHERS=0] ++) ++AC_MSG_CHECKING(for --with-ssl-default-suites) ++AC_ARG_WITH(ssl-default-suites, ++ AS_HELP_STRING([--with-ssl-default-suites=@<:@python|openssl|STRING@:>@], ++ [Override default cipher suites string, ++ python: use Python's preferred selection (default), ++ openssl: leave OpenSSL's defaults untouched, ++ STRING: use a custom string, ++ PROTOCOL_SSLv2 ignores the setting]), ++[ ++AC_MSG_RESULT($withval) ++case "$withval" in ++ python) ++ AC_DEFINE(PY_SSL_DEFAULT_CIPHERS, 1) ++ ;; ++ openssl) ++ AC_DEFINE(PY_SSL_DEFAULT_CIPHERS, 2) ++ ;; ++ *) ++ AC_DEFINE(PY_SSL_DEFAULT_CIPHERS, 0) ++ AC_DEFINE_UNQUOTED(PY_SSL_DEFAULT_CIPHER_STRING, "$withval") ++ ;; ++esac ++], ++[ ++AC_MSG_RESULT(python) ++AC_DEFINE(PY_SSL_DEFAULT_CIPHERS, 1) ++]) ++ ++ + # generate output files + AC_CONFIG_FILES(Makefile.pre Modules/Setup.config Misc/python.pc Misc/python-config.sh) + AC_CONFIG_FILES([Modules/ld_so_aix], [chmod +x Modules/ld_so_aix]) diff --git a/SOURCES/00316-mark-bdist_wininst-unsupported.patch b/SOURCES/00316-mark-bdist_wininst-unsupported.patch new file mode 100644 index 0000000..c091be0 --- /dev/null +++ b/SOURCES/00316-mark-bdist_wininst-unsupported.patch @@ -0,0 +1,14 @@ +diff --git a/Lib/distutils/command/bdist_wininst.py b/Lib/distutils/command/bdist_wininst.py +index fde5675..15434c3 100644 +--- a/Lib/distutils/command/bdist_wininst.py ++++ b/Lib/distutils/command/bdist_wininst.py +@@ -55,6 +55,9 @@ class bdist_wininst(Command): + boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize', + 'skip-build'] + ++ # bpo-10945: bdist_wininst requires mbcs encoding only available on Windows ++ _unsupported = (sys.platform != "win32") ++ + def initialize_options(self): + self.bdist_dir = None + self.plat_name = None diff --git a/SOURCES/00317-CVE-2019-5010.patch b/SOURCES/00317-CVE-2019-5010.patch new file mode 100644 index 0000000..62e931e --- /dev/null +++ b/SOURCES/00317-CVE-2019-5010.patch @@ -0,0 +1,111 @@ +From c660debb97f4f422255a82fef2d77804552c043a Mon Sep 17 00:00:00 2001 +From: Christian Heimes +Date: Tue, 15 Jan 2019 18:16:30 +0100 +Subject: [PATCH] bpo-35746: Fix segfault in ssl's cert parser + +CVE-2019-5010, Fix a NULL pointer deref in ssl module. The cert parser did +not handle CRL distribution points with empty DP or URI correctly. A +malicious or buggy certificate can result into segfault. + +Signed-off-by: Christian Heimes +--- + Lib/test/talos-2019-0758.pem | 22 +++++++++++++++++++ + Lib/test/test_ssl.py | 22 +++++++++++++++++++ + .../2019-01-15-18-16-05.bpo-35746.nMSd0j.rst | 3 +++ + Modules/_ssl.c | 4 ++++ + 4 files changed, 51 insertions(+) + create mode 100644 Lib/test/talos-2019-0758.pem + create mode 100644 Misc/NEWS.d/next/Security/2019-01-15-18-16-05.bpo-35746.nMSd0j.rst + +diff --git a/Lib/test/talos-2019-0758.pem b/Lib/test/talos-2019-0758.pem +new file mode 100644 +index 000000000000..13b95a77fd8a +--- /dev/null ++++ b/Lib/test/talos-2019-0758.pem +@@ -0,0 +1,22 @@ ++-----BEGIN CERTIFICATE----- ++MIIDqDCCApKgAwIBAgIBAjALBgkqhkiG9w0BAQswHzELMAkGA1UEBhMCVUsxEDAO ++BgNVBAMTB2NvZHktY2EwHhcNMTgwNjE4MTgwMDU4WhcNMjgwNjE0MTgwMDU4WjA7 ++MQswCQYDVQQGEwJVSzEsMCoGA1UEAxMjY29kZW5vbWljb24tdm0tMi50ZXN0Lmxh ++bC5jaXNjby5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC63fGB ++J80A9Av1GB0bptslKRIUtJm8EeEu34HkDWbL6AJY0P8WfDtlXjlPaLqFa6sqH6ES ++V48prSm1ZUbDSVL8R6BYVYpOlK8/48xk4pGTgRzv69gf5SGtQLwHy8UPBKgjSZoD ++5a5k5wJXGswhKFFNqyyxqCvWmMnJWxXTt2XDCiWc4g4YAWi4O4+6SeeHVAV9rV7C ++1wxqjzKovVe2uZOHjKEzJbbIU6JBPb6TRfMdRdYOw98n1VXDcKVgdX2DuuqjCzHP ++WhU4Tw050M9NaK3eXp4Mh69VuiKoBGOLSOcS8reqHIU46Reg0hqeL8LIL6OhFHIF ++j7HR6V1X6F+BfRS/AgMBAAGjgdYwgdMwCQYDVR0TBAIwADAdBgNVHQ4EFgQUOktp ++HQjxDXXUg8prleY9jeLKeQ4wTwYDVR0jBEgwRoAUx6zgPygZ0ZErF9sPC4+5e2Io ++UU+hI6QhMB8xCzAJBgNVBAYTAlVLMRAwDgYDVQQDEwdjb2R5LWNhggkA1QEAuwb7 ++2s0wCQYDVR0SBAIwADAuBgNVHREEJzAlgiNjb2Rlbm9taWNvbi12bS0yLnRlc3Qu ++bGFsLmNpc2NvLmNvbTAOBgNVHQ8BAf8EBAMCBaAwCwYDVR0fBAQwAjAAMAsGCSqG ++SIb3DQEBCwOCAQEAvqantx2yBlM11RoFiCfi+AfSblXPdrIrHvccepV4pYc/yO6p ++t1f2dxHQb8rWH3i6cWag/EgIZx+HJQvo0rgPY1BFJsX1WnYf1/znZpkUBGbVmlJr ++t/dW1gSkNS6sPsM0Q+7HPgEv8CPDNK5eo7vU2seE0iWOkxSyVUuiCEY9ZVGaLVit ++p0C78nZ35Pdv4I+1cosmHl28+es1WI22rrnmdBpH8J1eY6WvUw2xuZHLeNVN0TzV ++Q3qq53AaCWuLOD1AjESWuUCxMZTK9DPS4JKXTK8RLyDeqOvJGjsSWp3kL0y3GaQ+ ++10T1rfkKJub2+m9A9duin1fn6tHc2wSvB7m3DA== ++-----END CERTIFICATE----- +diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py +index 7f6b93148f45..1fc657f4d867 100644 +--- a/Lib/test/test_ssl.py ++++ b/Lib/test/test_ssl.py +@@ -115,6 +115,7 @@ def data_file(*name): + BADKEY = data_file("badkey.pem") + NOKIACERT = data_file("nokia.pem") + NULLBYTECERT = data_file("nullbytecert.pem") ++TALOS_INVALID_CRLDP = data_file("talos-2019-0758.pem") + + DHFILE = data_file("ffdh3072.pem") + BYTES_DHFILE = os.fsencode(DHFILE) +@@ -348,6 +349,27 @@ def test_parse_cert(self): + self.assertEqual(p['crlDistributionPoints'], + ('http://SVRIntl-G3-crl.verisign.com/SVRIntlG3.crl',)) + ++ def test_parse_cert_CVE_2019_5010(self): ++ p = ssl._ssl._test_decode_cert(TALOS_INVALID_CRLDP) ++ if support.verbose: ++ sys.stdout.write("\n" + pprint.pformat(p) + "\n") ++ self.assertEqual( ++ p, ++ { ++ 'issuer': ( ++ (('countryName', 'UK'),), (('commonName', 'cody-ca'),)), ++ 'notAfter': 'Jun 14 18:00:58 2028 GMT', ++ 'notBefore': 'Jun 18 18:00:58 2018 GMT', ++ 'serialNumber': '02', ++ 'subject': ((('countryName', 'UK'),), ++ (('commonName', ++ 'codenomicon-vm-2.test.lal.cisco.com'),)), ++ 'subjectAltName': ( ++ ('DNS', 'codenomicon-vm-2.test.lal.cisco.com'),), ++ 'version': 3 ++ } ++ ) ++ + def test_parse_cert_CVE_2013_4238(self): + p = ssl._ssl._test_decode_cert(NULLBYTECERT) + if support.verbose: +diff --git a/Misc/NEWS.d/next/Security/2019-01-15-18-16-05.bpo-35746.nMSd0j.rst b/Misc/NEWS.d/next/Security/2019-01-15-18-16-05.bpo-35746.nMSd0j.rst +new file mode 100644 +index 000000000000..dffe347eec84 +--- /dev/null ++++ b/Misc/NEWS.d/next/Security/2019-01-15-18-16-05.bpo-35746.nMSd0j.rst +@@ -0,0 +1,3 @@ ++[CVE-2019-5010] Fix a NULL pointer deref in ssl module. The cert parser did ++not handle CRL distribution points with empty DP or URI correctly. A ++malicious or buggy certificate can result into segfault. +diff --git a/Modules/_ssl.c b/Modules/_ssl.c +index 4e3352d9e661..0e720e268d93 100644 +--- a/Modules/_ssl.c ++++ b/Modules/_ssl.c +@@ -1515,6 +1515,10 @@ _get_crl_dp(X509 *certificate) { + STACK_OF(GENERAL_NAME) *gns; + + dp = sk_DIST_POINT_value(dps, i); ++ if (dp->distpoint == NULL) { ++ /* Ignore empty DP value, CVE-2019-5010 */ ++ continue; ++ } + gns = dp->distpoint->name.fullname; + + for (j=0; j < sk_GENERAL_NAME_num(gns); j++) { diff --git a/SOURCES/00318-fixes-for-tls-13.patch b/SOURCES/00318-fixes-for-tls-13.patch new file mode 100644 index 0000000..8f4a1b0 --- /dev/null +++ b/SOURCES/00318-fixes-for-tls-13.patch @@ -0,0 +1,949 @@ +From 412ccf4c6f8c417006c0a93392a8274a425074c0 Mon Sep 17 00:00:00 2001 +From: Victor Stinner +Date: Wed, 29 May 2019 04:04:54 +0200 +Subject: [PATCH 1/5] bpo-32947: test_ssl fixes for TLS 1.3 and OpenSSL 1.1.1 + (GH-11612) + +Backport partially commit 529525fb5a8fd9b96ab4021311a598c77588b918: +complete the previous partial backport (commit +2a4ee8aa01d61b6a9c8e9c65c211e61bdb471826. + +Co-Authored-By: Christian Heimes +--- + Lib/test/test_ssl.py | 15 +++++++++++++++ + .../2019-01-18-17-46-10.bpo-32947.Hk0KnM.rst | 1 + + 2 files changed, 16 insertions(+) + create mode 100644 Misc/NEWS.d/next/Tests/2019-01-18-17-46-10.bpo-32947.Hk0KnM.rst + +diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py +index cb0acda..639109f 100644 +--- a/Lib/test/test_ssl.py ++++ b/Lib/test/test_ssl.py +@@ -2043,6 +2043,16 @@ if _have_threads: + sys.stdout.write(" server: read %r (%s), sending back %r (%s)...\n" + % (msg, ctype, msg.lower(), ctype)) + self.write(msg.lower()) ++ except ConnectionResetError: ++ # XXX: OpenSSL 1.1.1 sometimes raises ConnectionResetError ++ # when connection is not shut down gracefully. ++ if self.server.chatty and support.verbose: ++ sys.stdout.write( ++ " Connection reset by peer: {}\n".format( ++ self.addr) ++ ) ++ self.close() ++ self.running = False + except OSError: + if self.server.chatty: + handle_error("Test server failure:\n") +@@ -2122,6 +2132,11 @@ if _have_threads: + pass + except KeyboardInterrupt: + self.stop() ++ except BaseException as e: ++ if support.verbose and self.chatty: ++ sys.stdout.write( ++ ' connection handling failed: ' + repr(e) + '\n') ++ + self.sock.close() + + def stop(self): +diff --git a/Misc/NEWS.d/next/Tests/2019-01-18-17-46-10.bpo-32947.Hk0KnM.rst b/Misc/NEWS.d/next/Tests/2019-01-18-17-46-10.bpo-32947.Hk0KnM.rst +new file mode 100644 +index 0000000..f508504 +--- /dev/null ++++ b/Misc/NEWS.d/next/Tests/2019-01-18-17-46-10.bpo-32947.Hk0KnM.rst +@@ -0,0 +1 @@ ++test_ssl fixes for TLS 1.3 and OpenSSL 1.1.1. +-- +2.21.0 + + +From 6b728ec778067849dd1f0d9b73cf1ac47dafa270 Mon Sep 17 00:00:00 2001 +From: "Miss Islington (bot)" + <31488909+miss-islington@users.noreply.github.com> +Date: Wed, 25 Sep 2019 09:12:59 -0700 +Subject: [PATCH 2/5] bpo-38271: encrypt private key test files with AES256 + (GH-16385) + +The private keys for test_ssl were encrypted with 3DES in traditional +PKCSGH-5 format. 3DES and the digest algorithm of PKCSGH-5 are blocked by +some strict crypto policies. Use PKCSGH-8 format with AES256 encryption +instead. + +Signed-off-by: Christian Heimes + +https://bugs.python.org/issue38271 + +Automerge-Triggered-By: @tiran +(cherry picked from commit bfd0c963d88f3df69489ee250655e2b8f3d235bd) + +Co-authored-by: Christian Heimes +--- + Lib/test/keycert.passwd.pem | 85 ++++++++++--------- + Lib/test/make_ssl_certs.py | 4 +- + Lib/test/ssl_key.passwd.pem | 84 +++++++++--------- + .../2019-09-25-13-11-29.bpo-38271.iHXNIg.rst | 4 + + 4 files changed, 91 insertions(+), 86 deletions(-) + create mode 100644 Misc/NEWS.d/next/Tests/2019-09-25-13-11-29.bpo-38271.iHXNIg.rst + +diff --git a/Lib/test/keycert.passwd.pem b/Lib/test/keycert.passwd.pem +index cbb3c3b..c330c36 100644 +--- a/Lib/test/keycert.passwd.pem ++++ b/Lib/test/keycert.passwd.pem +@@ -1,45 +1,45 @@ +------BEGIN RSA PRIVATE KEY----- +-Proc-Type: 4,ENCRYPTED +-DEK-Info: DES-EDE3-CBC,D134E931C96D9DEC +- +-nuGFEej7vIjkYWSMz5OJeVTNntDRQi6ZM4DBm3g8T7i/0odr3WFqGMMKZcIhLYQf +-rgRq7RSKtrJ1y5taVucMV+EuCjyfzDo0TsYt+ZrXv/D08eZhjRmkhoHnGVF0TqQm +-nQEXM/ERT4J2RM78dnG+homMkI76qOqxgGbRqQqJo6AiVRcAZ45y8s96bru2TAB8 +-+pWjO/v0Je7AFVdwSU52N8OOY6uoSAygW+0UY1WVxbVGJF2XfRsNpPX+YQHYl6e+ +-3xM5XBVCgr6kmdAyub5qUJ38X3TpdVGoR0i+CVS9GTr2pSRib1zURAeeHnlqiUZM +-4m0Gn9s72nJevU1wxED8pwOhR8fnHEmMKGD2HPhKoOCbzDhwwBZO27TNa1uWeM3f +-M5oixKDi2PqMn3y2cDx1NjJtP661688EcJ5a2Ih9BgO9xpnhSyzBWEKcAn0tJB0H +-/56M0FW6cdOOIzMveGGL7sHW5E+iOdI1n5e7C6KJUzew78Y9qJnhS53EdI6qTz9R +-wsIsj1i070Fk6RbPo6zpLlF6w7Zj8GlZaZA7OZZv9wo5VEV/0ST8gmiiBOBc4C6Y +-u9hyLIIu4dFEBKyQHRvBnQSLNpKx6or1OGFDVBay2In9Yh2BHh1+vOj/OIz/wq48 +-EHOIV27fRJxLu4jeK5LIGDhuPnMJ8AJYQ0bQOUP6fd7p+TxWkAQZPB/Dx/cs3hxr +-nFEdzx+eO+IAsObx/b1EGZyEJyETBslu4GwYX7/KK3HsJhDJ1bdZ//28jOCaoir6 +-ZOMT72GRwmVoQTJ0XpccfjHfKJDRLT7C1xvzo4Eibth0hpTZkA75IUYUp6qK/PuJ +-kH/qdiC7QIkRKtsrawW4vEDna3YtxIYhQqz9+KwO6u/0gzooZtv1RU4U3ifMDB5u +-5P5GAzACRqlY8QYBkM869lvWqzQPHvybC4ak9Yx6/heMO9ddjdIW9BaK8BLxvN/6 +-UCD936Y4fWltt09jHZIoxWFykouBwmd7bXooNYXmDRNmjTdVhKJuOEOQw8hDzx7e +-pWFJ9Z/V4Qm1tvXbCD7QFqMCDoY3qFvVG8DBqXpmxe1yPfz21FWrT7IuqDXAD3ns +-vxfN/2a+Cy04U9FBNVCvWqWIs5AgNpdCMJC2FlXKTy+H3/7rIjNyFyvbX0vxIXtK +-liOVNXiyVM++KZXqktqMUDlsJENmIHV9B046luqbgW018fHkyEYlL3iRZGbYegwr +-XO9VVIKVPw1BEvJ8VNdGFGuZGepd8qX2ezfYADrNR+4t85HDm8inbjTobSjWuljs +-ftUNkOeCHqAvWCFQTLCfdykvV08EJfVY79y7yFPtfRV2gxYokXFifjo3su9sVQr1 +-UiIS5ZAsIC1hBXWeXoBN7QVTkFi7Yto6E1q2k10LiT3obpUUUQ/oclhrJOCJVjrS +-oRcj2QBy8OT4T9slJr5maTWdgd7Lt6+I6cGQXPaDvjGOJl0eBYM14vhx4rRQWytJ +-k07hhHFO4+9CGCuHS8AAy2gR6acYFWt2ZiiNZ0z/iPIHNK4YEyy9aLf6uZH/KQjE +-jmHToo7XD6QvCAEC5qTHby3o3LfHIhyZi/4L+AhS4FKUHF6M0peeyYt4z3HaK2d2 +-N6mHLPdjwNjra7GOmcns4gzcrdfoF+R293KpPal4PjknvR3dZL4kKP/ougTAM5zv +-qDIvRbkHzjP8ChTpoLcJsNVXykNcNkjcSi0GHtIpYjh6QX6P2uvR/S4+Bbb9p9rn +-hIy/ovu9tWN2hiPxGPe6torF6BulAxsTYlDercC204AyzsrdA0pr6HBgJH9C6ML1 +-TchwodbFJqn9rSv91i1liusAGoOvE81AGBdrXY7LxfSNhYY1IK6yR/POJPTd53sA +-uX2/j6Rtoksd/2BHPM6AUnI/2B9slhuzWX2aCtWLeuwvXDS6rYuTigaQmLkzTRfM +-dlMI3s9KLXxgi5YVumUZleJWXwBNP7KiKajd+VTSD+7WAhyhM5FIG5wVOaxmy4G2 +-TyqZ/Ax9d2VEjTQHWvQlLPQ4Mp0EIz0aEl94K/S8CK8bJRH6+PRkar+dJi1xqlL+ +-BYb42At9mEJ8odLlFikvNi1+t7jqXk5jRi5C0xFKx3nTtzoH2zNUeuA3R6vSocVK +-45jnze9IkKmxMlJ4loR5sgszdpDCD3kXqjtCcbMTmcrGyzJek3HSOTpiEORoTFOe +-Rhg6jH5lm+QcC263oipojS0qEQcnsWJP2CylNYMYHR9O/9NQxT3o2lsRHqZTMELV +-uQa/SFH+paQNbZOj8MRwPSqqiIxJFuLswKte1R+W7LKn1yBSM7Pp39lNbzGvJD2E +-YRfnCwFpJ54voVAuQ4jXJvigCW2qeCjXlxeD6K2j4eGJEEOmIjIW1wjubyBY6OI3 +------END RSA PRIVATE KEY----- ++-----BEGIN ENCRYPTED PRIVATE KEY----- ++MIIHbTBXBgkqhkiG9w0BBQ0wSjApBgkqhkiG9w0BBQwwHAQIhD+rJdxqb6ECAggA ++MAwGCCqGSIb3DQIJBQAwHQYJYIZIAWUDBAEqBBDTdyjCP3riOSUfxix4aXEvBIIH ++ECGkbsFabrcFMZcplw5jHMaOlG7rYjUzwDJ80JM8uzbv2Jb8SvNlns2+xmnEvH/M ++mNvRmnXmplbVjH3XBMK8o2Psnr2V/a0j7/pgqpRxHykG+koOY4gzdt3MAg8JPbS2 ++hymSl+Y5EpciO3xLfz4aFL1ZNqspQbO/TD13Ij7DUIy7xIRBMp4taoZCrP0cEBAZ +++wgu9m23I4dh3E8RUBzWyFFNic2MVVHrui6JbHc4dIHfyKLtXJDhUcS0vIC9PvcV ++jhorh3UZC4lM+/jjXV5AhzQ0VrJ2tXAUX2dA144XHzkSH2QmwfnajPsci7BL2CGC ++rjyTy4NfB/lDwU+55dqJZQSKXMxAapJMrtgw7LD5CKQcN6zmfhXGssJ7HQUXKkaX ++I1YOFzuUD7oo56BVCnVswv0jX9RxrE5QYNreMlOP9cS+kIYH65N+PAhlURuQC14K ++PgDkHn5knSa2UQA5tc5f7zdHOZhGRUfcjLP+KAWA3nh+/2OKw/X3zuPx75YT/FKe ++tACPw5hjEpl62m9Xa0eWepZXwqkIOkzHMmCyNCsbC0mmRoEjmvfnslfsmnh4Dg/c ++4YsTYMOLLIeCa+WIc38aA5W2lNO9lW0LwLhX1rP+GRVPv+TVHXlfoyaI+jp0iXrJ ++t3xxT0gaiIR/VznyS7Py68QV/zB7VdqbsNzS7LdquHK1k8+7OYiWjY3gqyU40Iu2 ++d1eSnIoDvQJwyYp7XYXbOlXNLY+s1Qb7yxcW3vXm0Bg3gKT8r1XHWJ9rj+CxAn5r ++ysfkPs1JsesxzzQjwTiDNvHnBnZnwxuxfBr26ektEHmuAXSl8V6dzLN/aaPjpTj4 ++CkE7KyqX3U9bLkp+ztl4xWKEmW44nskzm0+iqrtrxMyTfvvID4QrABjZL4zmWIqc ++e3ZfA3AYk9VDIegk/YKGC5VZ8YS7ZXQ0ASK652XqJ7QlMKTxxV7zda6Fp4uW6/qN ++ezt5wgbGGhZQXj2wDQmWNQYyG/juIgYTpCUA54U5XBIjuR6pg+Ytm0UrvNjsUoAC ++wGelyqaLDq8U8jdIFYVTJy9aJjQOYXjsUJ0dZN2aGHSlju0ZGIZc49cTIVQ9BTC5 ++Yc0Vlwzpl+LuA25DzKZNSb/ci0lO/cQGJ2uXQQgaNgdsHlu8nukENGJhnIzx4fzK ++wEh3yHxhTRCzPPwDfXmx0IHXrPqJhSpAgaXBVIm8OjvmMxO+W75W4uLfNY/B7e2H ++3cjklGuvkofOf7sEOrGUYf4cb6Obg8FpvHgpKo5Twwmoh/qvEKckBFqNhZXDDl88 ++GbGlSEgyaAV1Ig8s1NJKBolWFa0juyPAwJ8vT1T4iwW7kQ7KXKt2UNn96K/HxkLu ++pikvukz8oRHMlfVHa0R48UB1fFHwZLzPmwkpu6ancIxk3uO3yfhf6iDk3bmnyMlz ++g3k/b6MrLYaOVByRxay85jH3Vvgqfgn6wa6BJ7xQ81eZ8B45gFuTH0J5JtLL7SH8 ++darRPLCYfA+Ums9/H6pU5EXfd3yfjMIbvhCXHkJrrljkZ+th3p8dyto6wmYqIY6I ++qR9sU+o6DhRaiP8tCICuhHxQpXylUM6WeJkJwduTJ8KWIvzsj4mReIKOl/oC2jSd ++gIdKhb9Q3zj9ce4N5m6v66tyvjxGZ+xf3BvUPDD+LwZeXgf7OBsNVbXzQbzto594 ++nbCzPocFi3gERE50ru4K70eQCy08TPG5NpOz+DDdO5vpAuMLYEuI7O3L+3GjW40Q ++G5bu7H5/i7o/RWR67qhG/7p9kPw3nkUtYgnvnWaPMIuTfb4c2d069kjlfgWjIbbI ++tpSKmm5DHlqTE4/ECAbIEDtSaw9dXHCdL3nh5+n428xDdGbjN4lT86tfu17EYKzl ++ydH1RJ1LX3o3TEj9UkmDPt7LnftvwybMFEcP7hM2xD4lC++wKQs7Alg6dTkBnJV4 ++5xU78WRntJkJTU7kFkpPKA0QfyCuSF1fAMoukDBkqUdOj6jE0BlJQlHk5iwgnJlt ++uEdkTjHZEjIUxWC6llPcAzaPNlmnD45AgfEW+Jn21IvutmJiQAz5lm9Z9PXaR0C8 ++hXB6owRY67C0YKQwXhoNf6xQun2xGBGYy5rPEEezX1S1tUH5GR/KW1Lh+FzFqHXI ++ZEb5avfDqHKehGAjPON+Br7akuQ125M9LLjKuSyPaQzeeCAy356Xd7XzVwbPddbm ++9S9WSPqzaPgh10chIHoNoC8HMd33dB5j9/Q6jrbU/oPlptu/GlorWblvJdcTuBGI ++IVn45RFnkG8hCz0GJSNzW7+70YdESQbfJW79vssWMaiSjFE0pMyFXrFR5lBywBTx ++PiGEUWtvrKG94X1TMlGUzDzDJOQNZ9dT94bonNe9pVmP5BP4/DzwwiWh6qrzWk6p ++j8OE4cfCSh2WvHnhJbH7/N0v+JKjtxeIeJ16jx/K2oK5 ++-----END ENCRYPTED PRIVATE KEY----- + -----BEGIN CERTIFICATE----- + MIIEWTCCAsGgAwIBAgIJAJinz4jHSjLtMA0GCSqGSIb3DQEBCwUAMF8xCzAJBgNV + BAYTAlhZMRcwFQYDVQQHDA5DYXN0bGUgQW50aHJheDEjMCEGA1UECgwaUHl0aG9u +@@ -66,3 +66,4 @@ jMqTFlmO7kpf/jpCSmamp3/JSEE1BJKHwQ6Ql4nzRA2N1mnvWH7Zxcv043gkHeAu + 9Wc2uXpw9xF8itV4Uvcdr3dwqByvIqn7iI/gB+4l41e0u8OmH2MKOx4Nxlly5TNW + HcVKQHyOeyvnINuBAQ== + -----END CERTIFICATE----- ++ +diff --git a/Lib/test/make_ssl_certs.py b/Lib/test/make_ssl_certs.py +index 3622765..41b5f46 100644 +--- a/Lib/test/make_ssl_certs.py ++++ b/Lib/test/make_ssl_certs.py +@@ -206,8 +206,8 @@ if __name__ == '__main__': + with open('ssl_key.pem', 'w') as f: + f.write(key) + print("password protecting ssl_key.pem in ssl_key.passwd.pem") +- check_call(['openssl','rsa','-in','ssl_key.pem','-out','ssl_key.passwd.pem','-des3','-passout','pass:somepass']) +- check_call(['openssl','rsa','-in','ssl_key.pem','-out','keycert.passwd.pem','-des3','-passout','pass:somepass']) ++ check_call(['openssl','pkey','-in','ssl_key.pem','-out','ssl_key.passwd.pem','-aes256','-passout','pass:somepass']) ++ check_call(['openssl','pkey','-in','ssl_key.pem','-out','keycert.passwd.pem','-aes256','-passout','pass:somepass']) + + with open('keycert.pem', 'w') as f: + f.write(key) +diff --git a/Lib/test/ssl_key.passwd.pem b/Lib/test/ssl_key.passwd.pem +index e4f1370..46de61a 100644 +--- a/Lib/test/ssl_key.passwd.pem ++++ b/Lib/test/ssl_key.passwd.pem +@@ -1,42 +1,42 @@ +------BEGIN RSA PRIVATE KEY----- +-Proc-Type: 4,ENCRYPTED +-DEK-Info: DES-EDE3-CBC,8064BE1494B24B13 +- +-KJrffOMbo8M0I3PzcYxRZGMpKD1yB3Ii4+bT5XoanxjIJ+4fdx6LfZ0Rsx+riyzs +-tymsQu/iYY9j+4rCvN9+eetsL1X6iZpiimKsLexcid9M3fb0vxED5Sgw0dvunCUA +-xhqjLIKR92MKbODHf6KrDKCpsiPbjq4gZ7P+uCGXAMHL3MXIJSC0hW9rK7Ce6oyO +-CjpIcgB8x+GUWZZZhAFdlzIHMZrteNP2P5HK6QcaT71P034Dz1hhqoj4Q0t+Fta2 +-4tfsM/bnTR/l6hwlhPa1e3Uj322tDTDWBScgWANn5+sEWldLmozMaWhZsn22pfk2 +-KjRMGXG024JVheV882nbdOBvG7oq+lxkZ/ZP+vvqJqnvYtf7WtM8UivzYpe5Hz5b +-kVvWzPjBLUSZ9whM9rDLqSSqMPyPvDTuEmLkuq+xm7pYJmsLqIMP2klZLqRxLX6K +-uqwplb8UG440qauxgnQ905PId1l2fJEnRtV+7vXprA0L0QotgXLVHBhLmTFM+3PH +-9H3onf31dionUAPrn3nfVE36HhvVgRyvDBnBzJSIMighgq21Qx/d1dk0DRYi1hUI +-nCHl0YJPXheVcXR7JiSF2XQCAaFuS1Mr7NCXfWZOZQC/0dkvmHnl9DUAhuqq9BNZ +-1cKhZXcKHadg2/r0Zup/oDzmHPUEfTAXT0xbqoWlhkdwbF2veWQ96A/ncx3ISTb4 +-PkXBlX9rdia8nmtyQDQRn4NuvchbaGkj4WKFC8pF8Hn7naHqwjpHaDUimBc0CoQW +-edNJqruKWwtSVLuwKHCC2gZFX9AXSKJXJz/QRSUlhFGOhuF/J6yKaXj6n5lxWNiQ +-54J+OP/hz2aS95CD2+Zf1SKpxdWiLZSIQqESpmmUrXROixNJZ/Z7gI74Dd9dSJOH +-W+3AU03vrrFZVrJVZhjcINHoH1Skh6JKscH18L6x4U868nSr4SrRLX8BhHllOQyD +-bmU+PZAjF8ZBIaCtTGulDXD29F73MeAZeTSsgQjFu0iKLj1wPiphbx8i/SUtR4YP +-X6PVA04g66r1NBw+3RQASVorZ3g1MSFvITHXcbKkBDeJH2z1+c6t/VVyTONnQhM5 +-lLgRSk6HCbetvT9PKxWrWutA12pdBYEHdZhMHVf2+xclky7l09w8hg2/qqcdGRGe +-oAOZ72t0l5ObNyaruDKUS6f4AjOyWq/Xj5xuFtf1n3tQHyslSyCTPcAbQhDfTHUx +-vixb/V9qvYPt7OCn8py7v1M69NH42QVFAvwveDIFjZdqfIKBoJK2V4qPoevJI6uj +-Q5ByMt8OXOjSXNpHXpYQWUiWeCwOEBXJX8rzCHdMtg37jJ0zCmeErR1NTdg+EujM +-TWYgd06jlT67tURST0aB2kg4ijKgUJefD313LW1zC6gVsTbjSZxYyRbPfSP6flQB +-yCi1C19E2OsgleqbkBVC5GlYUzaJT7SGjCRmGx1eqtbrALu+LVH24Wceexlpjydl +-+s2nf/DZlKun/tlPh6YioifPCJjByZMQOCEfIox6BkemZETz8uYA4TTWimG13Z03 +-gyDGC2jdpEW414J2qcQDvrdUgJ+HlhrAAHaWpMQDbXYxBGoZ+3+ORvQV4kAsCwL8 +-k3EIrVpePdik+1xgOWsyLj6QxFXlTMvL6Wc5pnArFPORsgHEolJvxSPTf9aAHNPn +-V2WBvxiLBtYpGrujAUM40Syx/aN2RPtcXYPAusHUBw+S8/p+/8Kg8GZmnIXG3F89 +-45Eepl2quZYIrou7a1fwIpIIZ0hFiBQ1mlHVMFtxwVHS1bQb3SU2GeO+JcGjdVXc +-04qeGuQ5M164eQ5C0T7ZQ1ULiUlFWKD30m+cjqmZzt3d7Q0mKpMKuESIuZJo/wpD +-Nas432aLKUhcNx/pOYLkKJRpGZKOupQoD5iUj/j44o8JoFkDK33v2S57XB5QGz28 +-9Zuhx49b3W8mbM6EBanlQKLWJGCxXqc/jhYhFWn+b0MhidynFgA0oeWvf6ZDyt6H +-Yi5Etxsar09xp0Do3NxtQXLuSUu0ji2pQzSIKuoqQWKqldm6VrpwojiqJhy4WQBQ +-aVVyFeWBC7G3Zj76dO+yp2sfJ0itJUQ8AIB9Cg0f34rEZu+r9luPmqBoUeL95Tk7 +-YvCOU3Jl8Iqysv8aNpVXT8sa8rrSbruWCByEePZ37RIdHLMVBwVY0eVaFQjrjU7E +-mXmM9eaoYLfXOllsQ+M2+qPFUITr/GU3Qig13DhK/+yC1R6V2a0l0WRhMltIPYKW +-Ztvvr4hK5LcYCeS113BLiMbDIMMZZYGDZGMdC8DnnVbT2loF0Rfmp80Af31KmMQ4 +-6XvMatW9UDjBoY5a/YMpdm7SRwm+MgV2KNPpc2kST87/yi9oprGAb8qiarHiHTM0 +------END RSA PRIVATE KEY----- ++-----BEGIN ENCRYPTED PRIVATE KEY----- ++MIIHbTBXBgkqhkiG9w0BBQ0wSjApBgkqhkiG9w0BBQwwHAQI072N7W+PDDMCAggA ++MAwGCCqGSIb3DQIJBQAwHQYJYIZIAWUDBAEqBBA/AuaRNi4vE4KGqI4In+70BIIH ++ENGS5Vex5NID873frmd1UZEHZ+O/Bd0wDb+NUpIqesHkRYf7kKi6Gnr+nKQ/oVVn ++Lm3JjE7c8ECP0OkOOXmiXuWL1SkzBBWqCI4stSGUPvBiHsGwNnvJAaGjUffgMlcC ++aJOA2+dnejLkzblq4CB2LQdm06N3Xoe9tyqtQaUHxfzJAf5Ydd8uj7vpKN2MMhY7 ++icIPJwSyh0N7S6XWVtHEokr9Kp4y2hS5a+BgCWV1/1z0aF7agnSVndmT1VR+nWmc ++lM14k+lethmHMB+fsNSjnqeJ7XOPlOTHqhiZ9bBSTgF/xr5Bck/NiKRzHjdovBox ++TKg+xchaBhpRh7wBPBIlNJeHmIjv+8obOKjKU98Ig/7R9+IryZaNcKAH0PuOT+Sw ++QHXiCGQbOiYHB9UyhDTWiB7YVjd8KHefOFxfHzOQb/iBhbv1x3bTl3DgepvRN6VO ++dIsPLoIZe42sdf9GeMsk8mGJyZUQ6AzsfhWk3grb/XscizPSvrNsJ2VL1R7YTyT3 ++3WA4ZXR1EqvXnWL7N/raemQjy62iOG6t7fcF5IdP9CMbWP+Plpsz4cQW7FtesCTq ++a5ZXraochQz361ODFNIeBEGU+0qqXUtZDlmos/EySkZykSeU/L0bImS62VGE3afo ++YXBmznTTT9kkFkqv7H0MerfJsrE/wF8puP3GM01DW2JRgXRpSWlvbPV/2LnMtRuD ++II7iH4rWDtTjCN6BWKAgDOnPkc9sZ4XulqT32lcUeV6LTdMBfq8kMEc8eDij1vUT ++maVCRpuwaq8EIT3lVgNLufHiG96ojlyYtj3orzw22IjkgC/9ee8UDik9CqbMVmFf ++fVHhsw8LNSg8Q4bmwm5Eg2w2it2gtI68+mwr75oCxuJ/8OMjW21Prj8XDh5reie2 ++c0lDKQOFZ9UnLU1bXR/6qUM+JFKR4DMq+fOCuoQSVoyVUEOsJpvBOYnYZN9cxsZm ++vh9dKafMEcKZ8flsbr+gOmOw7+Py2ifSlf25E/Frb1W4gtbTb0LQVHb6+drutrZj ++8HEu4CnHYFCD4ZnOJb26XlZCb8GFBddW86yJYyUqMMV6Q1aJfAOAglsTo1LjIMOZ ++byo0BTAmwUevU/iuOXQ4qRBXXcoidDcTCrxfUSPG9wdt9l+m5SdQpWqfQ+fx5O7m ++SLlrHyZCiPSFMtC9DxqjIklHjf5W3wslGLgaD30YXa4VDYkRihf3CNsxGQ+tVvef ++l0ZjoAitF7Gaua06IESmKnpHe23dkr1cjYq+u2IV+xGH8LeExdwsQ9kpuTeXPnQs ++JOA99SsFx1ct32RrwjxnDDsiNkaViTKo9GDkV3jQTfoFgAVqfSgg9wGXpqUqhNG7 ++TiSIHCowllLny2zn4XrXCy2niD3VDt0skb3l/PaegHE2z7S5YY85nQtYwpLiwB9M ++SQ08DYKxPBZYKtS2iZ/fsA1gjSRQDPg/SIxMhUC3M3qH8iWny1Lzl25F2Uq7VVEX ++LdTUtaby49jRTT3CQGr5n6z7bMbUegiY7h8WmOekuThGDH+4xZp6+rDP4GFk4FeK ++JcF70vMQYIjQZhadic6olv+9VtUP42ltGG/yP9a3eWRkzfAf2eCh6B1rYdgEWwE8 ++rlcZzwM+y6eUmeNF2FVWB8iWtTMQHy+dYNPM+Jtus1KQKxiiq/yCRs7nWvzWRFWA ++HRyqV0J6/lqgm4FvfktFt1T0W+mDoLJOR2/zIwMy2lgL5zeHuR3SaMJnCikJbqKS ++HB3UvrhAWUcZqdH29+FhVWeM7ybyF1Wccmf+IIC/ePLa6gjtqPV8lG/5kbpcpnB6 ++UQY8WWaKMxyr3jJ9bAX5QKshchp04cDecOLZrpFGNNQngR8RxSEkiIgAqNxWunIu ++KrdBDrupv/XAgEOclmgToY3iywLJSV5gHAyHWDUhRH4cFCLiGPl4XIcnXOuTze3H ++3j+EYSiS3v3DhHjp33YU2pXlJDjiYsKzAXejEh66++Y8qaQdCAad3ruWRCzW3kgk ++Md0A1VGzntTnQsewvExQEMZH2LtYIsPv3KCYGeSAuLabX4tbGk79PswjnjLLEOr0 ++Ghf6RF6qf5/iFyJoG4vrbKT8kx6ywh0InILCdjUunuDskIBxX6tEcr9XwajoIvb2 ++kcmGdjam5kKLS7QOWQTl8/r/cuFes0dj34cX5Qpq+Gd7tRq/D+b0207926Cxvftv ++qQ1cVn8HiLxKkZzd3tpf2xnoV1zkTL0oHrNg+qzxoxXUTUcwtIf1d/HRbYEAhi/d ++bBBoFeftEHWNq+sJgS9bH+XNzo/yK4u04B5miOq8v4CSkJdzu+ZdF22d4cjiGmtQ ++8BTmcn0Unzm+u5H0+QSZe54QBHJGNXXOIKMTkgnOdW27g4DbI1y7fCqJiSMbRW6L ++oHmMfbdB3GWqGbsUkhY8i6h9op0MU6WOX7ea2Rxyt4t6 ++-----END ENCRYPTED PRIVATE KEY----- +diff --git a/Misc/NEWS.d/next/Tests/2019-09-25-13-11-29.bpo-38271.iHXNIg.rst b/Misc/NEWS.d/next/Tests/2019-09-25-13-11-29.bpo-38271.iHXNIg.rst +new file mode 100644 +index 0000000..8f43d32 +--- /dev/null ++++ b/Misc/NEWS.d/next/Tests/2019-09-25-13-11-29.bpo-38271.iHXNIg.rst +@@ -0,0 +1,4 @@ ++The private keys for test_ssl were encrypted with 3DES in traditional ++PKCS#5 format. 3DES and the digest algorithm of PKCS#5 are blocked by ++some strict crypto policies. Use PKCS#8 format with AES256 encryption ++instead. +-- +2.21.0 + + +From d8584f9bb3fb841a1b21ed25abc2237ea8bbc206 Mon Sep 17 00:00:00 2001 +From: Charalampos Stratakis +Date: Tue, 26 Nov 2019 23:57:21 +0100 +Subject: [PATCH 3/5] Use PROTOCOL_TLS_CLIENT/SERVER + +Replaces PROTOCOL_TLSv* and PROTOCOL_SSLv23 with PROTOCOL_TLS_CLIENT and +PROTOCOL_TLS_SERVER. + +Partially backports a170fa162dc03f0a014373349e548954fff2e567 +--- + Lib/ssl.py | 7 +- + Lib/test/test_logging.py | 2 +- + Lib/test/test_ssl.py | 169 +++++++++++++++++++-------------------- + 3 files changed, 87 insertions(+), 91 deletions(-) + +diff --git a/Lib/ssl.py b/Lib/ssl.py +index 0114387..c5c5529 100644 +--- a/Lib/ssl.py ++++ b/Lib/ssl.py +@@ -473,7 +473,7 @@ def create_default_context(purpose=Purpose.SERVER_AUTH, *, cafile=None, + context.load_default_certs(purpose) + return context + +-def _create_unverified_context(protocol=PROTOCOL_TLS, *, cert_reqs=None, ++def _create_unverified_context(protocol=PROTOCOL_TLS, *, cert_reqs=CERT_NONE, + check_hostname=False, purpose=Purpose.SERVER_AUTH, + certfile=None, keyfile=None, + cafile=None, capath=None, cadata=None): +@@ -492,9 +492,12 @@ def _create_unverified_context(protocol=PROTOCOL_TLS, *, cert_reqs=None, + # by default. + context = SSLContext(protocol) + ++ if not check_hostname: ++ context.check_hostname = False + if cert_reqs is not None: + context.verify_mode = cert_reqs +- context.check_hostname = check_hostname ++ if check_hostname: ++ context.check_hostname = True + + if keyfile and not certfile: + raise ValueError("certfile must be specified") +diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py +index 763a5d1..d5c63b4 100644 +--- a/Lib/test/test_logging.py ++++ b/Lib/test/test_logging.py +@@ -1830,7 +1830,7 @@ class HTTPHandlerTest(BaseTest): + else: + here = os.path.dirname(__file__) + localhost_cert = os.path.join(here, "keycert.pem") +- sslctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ++ sslctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) + sslctx.load_cert_chain(localhost_cert) + + context = ssl.create_default_context(cafile=localhost_cert) +diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py +index 639109f..a7bf2f7 100644 +--- a/Lib/test/test_ssl.py ++++ b/Lib/test/test_ssl.py +@@ -155,6 +155,8 @@ def test_wrap_socket(sock, ssl_version=ssl.PROTOCOL_TLS, *, + **kwargs): + context = ssl.SSLContext(ssl_version) + if cert_reqs is not None: ++ if cert_reqs == ssl.CERT_NONE: ++ context.check_hostname = False + context.verify_mode = cert_reqs + if ca_certs is not None: + context.load_verify_locations(ca_certs) +@@ -1377,7 +1379,7 @@ class ContextTests(unittest.TestCase): + self._assert_context_options(ctx) + + def test_check_hostname(self): +- ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ ctx = ssl.SSLContext(ssl.PROTOCOL_TLS) + self.assertFalse(ctx.check_hostname) + + # Requires CERT_REQUIRED or CERT_OPTIONAL +@@ -2386,17 +2388,13 @@ if _have_threads: + server_params_test(context, context, + chatty=True, connectionchatty=True) + +- client_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) +- client_context.load_verify_locations(SIGNING_CA) +- server_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) +- # server_context.load_verify_locations(SIGNING_CA) +- server_context.load_cert_chain(SIGNED_CERTFILE2) ++ client_context, server_context, hostname = testing_context() + + with self.subTest(client=ssl.PROTOCOL_TLS_CLIENT, server=ssl.PROTOCOL_TLS_SERVER): + server_params_test(client_context=client_context, + server_context=server_context, + chatty=True, connectionchatty=True, +- sni_name='fakehostname') ++ sni_name='localhost') + + client_context.check_hostname = False + with self.subTest(client=ssl.PROTOCOL_TLS_SERVER, server=ssl.PROTOCOL_TLS_CLIENT): +@@ -2404,7 +2402,7 @@ if _have_threads: + server_params_test(client_context=server_context, + server_context=client_context, + chatty=True, connectionchatty=True, +- sni_name='fakehostname') ++ sni_name='localhost') + self.assertIn('called a function you should not call', + str(e.exception)) + +@@ -2469,39 +2467,38 @@ if _have_threads: + if support.verbose: + sys.stdout.write("\n") + +- server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- server_context.load_cert_chain(SIGNED_CERTFILE) ++ client_context, server_context, hostname = testing_context() + +- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- context.verify_mode = ssl.CERT_REQUIRED +- context.load_verify_locations(SIGNING_CA) + tf = getattr(ssl, "VERIFY_X509_TRUSTED_FIRST", 0) +- self.assertEqual(context.verify_flags, ssl.VERIFY_DEFAULT | tf) ++ self.assertEqual(client_context.verify_flags, ssl.VERIFY_DEFAULT | tf) + + # VERIFY_DEFAULT should pass + server = ThreadedEchoServer(context=server_context, chatty=True) + with server: +- with context.wrap_socket(socket.socket()) as s: ++ with client_context.wrap_socket(socket.socket(), ++ server_hostname=hostname) as s: + s.connect((HOST, server.port)) + cert = s.getpeercert() + self.assertTrue(cert, "Can't get peer certificate.") + + # VERIFY_CRL_CHECK_LEAF without a loaded CRL file fails +- context.verify_flags |= ssl.VERIFY_CRL_CHECK_LEAF ++ client_context.verify_flags |= ssl.VERIFY_CRL_CHECK_LEAF + + server = ThreadedEchoServer(context=server_context, chatty=True) + with server: +- with context.wrap_socket(socket.socket()) as s: ++ with client_context.wrap_socket(socket.socket(), ++ server_hostname=hostname) as s: + with self.assertRaisesRegex(ssl.SSLError, + "certificate verify failed"): + s.connect((HOST, server.port)) + + # now load a CRL file. The CRL file is signed by the CA. +- context.load_verify_locations(CRLFILE) ++ client_context.load_verify_locations(CRLFILE) + + server = ThreadedEchoServer(context=server_context, chatty=True) + with server: +- with context.wrap_socket(socket.socket()) as s: ++ with client_context.wrap_socket(socket.socket(), ++ server_hostname=hostname) as s: + s.connect((HOST, server.port)) + cert = s.getpeercert() + self.assertTrue(cert, "Can't get peer certificate.") +@@ -2510,19 +2507,13 @@ if _have_threads: + if support.verbose: + sys.stdout.write("\n") + +- server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- server_context.load_cert_chain(SIGNED_CERTFILE) +- +- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- context.verify_mode = ssl.CERT_REQUIRED +- context.check_hostname = True +- context.load_verify_locations(SIGNING_CA) ++ client_context, server_context, hostname = testing_context() + + # correct hostname should verify + server = ThreadedEchoServer(context=server_context, chatty=True) + with server: +- with context.wrap_socket(socket.socket(), +- server_hostname="localhost") as s: ++ with client_context.wrap_socket(socket.socket(), ++ server_hostname=hostname) as s: + s.connect((HOST, server.port)) + cert = s.getpeercert() + self.assertTrue(cert, "Can't get peer certificate.") +@@ -2530,7 +2521,7 @@ if _have_threads: + # incorrect hostname should raise an exception + server = ThreadedEchoServer(context=server_context, chatty=True) + with server: +- with context.wrap_socket(socket.socket(), ++ with client_context.wrap_socket(socket.socket(), + server_hostname="invalid") as s: + with self.assertRaisesRegex(ssl.CertificateError, + "hostname 'invalid' doesn't match 'localhost'"): +@@ -2542,7 +2533,7 @@ if _have_threads: + with socket.socket() as s: + with self.assertRaisesRegex(ValueError, + "check_hostname requires server_hostname"): +- context.wrap_socket(s) ++ client_context.wrap_socket(s) + + def test_wrong_cert(self): + """Connecting when the server rejects the client's certificate +@@ -2767,7 +2758,6 @@ if _have_threads: + msgs = (b"msg 1", b"MSG 2", b"STARTTLS", b"MSG 3", b"msg 4", b"ENDTLS", b"msg 5", b"msg 6") + + server = ThreadedEchoServer(CERTFILE, +- ssl_version=ssl.PROTOCOL_TLSv1, + starttls_server=True, + chatty=True, + connectionchatty=True) +@@ -2795,7 +2785,7 @@ if _have_threads: + sys.stdout.write( + " client: read %r from server, starting TLS...\n" + % msg) +- conn = test_wrap_socket(s, ssl_version=ssl.PROTOCOL_TLSv1) ++ conn = test_wrap_socket(s) + wrapped = True + elif indata == b"ENDTLS" and msg.startswith(b"ok"): + # ENDTLS ok, switch back to clear text +@@ -2882,7 +2872,7 @@ if _have_threads: + + server = ThreadedEchoServer(CERTFILE, + certreqs=ssl.CERT_NONE, +- ssl_version=ssl.PROTOCOL_TLSv1, ++ ssl_version=ssl.PROTOCOL_TLS_SERVER, + cacerts=CERTFILE, + chatty=True, + connectionchatty=False) +@@ -2892,7 +2882,7 @@ if _have_threads: + certfile=CERTFILE, + ca_certs=CERTFILE, + cert_reqs=ssl.CERT_NONE, +- ssl_version=ssl.PROTOCOL_TLSv1) ++ ssl_version=ssl.PROTOCOL_TLS_CLIENT) + s.connect((HOST, server.port)) + # helper methods for standardising recv* method signatures + def _recv_into(): +@@ -3034,7 +3024,7 @@ if _have_threads: + def test_nonblocking_send(self): + server = ThreadedEchoServer(CERTFILE, + certreqs=ssl.CERT_NONE, +- ssl_version=ssl.PROTOCOL_TLSv1, ++ ssl_version=ssl.PROTOCOL_TLS_SERVER, + cacerts=CERTFILE, + chatty=True, + connectionchatty=False) +@@ -3044,7 +3034,7 @@ if _have_threads: + certfile=CERTFILE, + ca_certs=CERTFILE, + cert_reqs=ssl.CERT_NONE, +- ssl_version=ssl.PROTOCOL_TLSv1) ++ ssl_version=ssl.PROTOCOL_TLS_CLIENT) + s.connect((HOST, server.port)) + s.setblocking(False) + +@@ -3190,9 +3180,11 @@ if _have_threads: + Basic tests for SSLSocket.version(). + More tests are done in the test_protocol_*() methods. + """ +- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) ++ context.check_hostname = False ++ context.verify_mode = ssl.CERT_NONE + with ThreadedEchoServer(CERTFILE, +- ssl_version=ssl.PROTOCOL_TLSv1, ++ ssl_version=ssl.PROTOCOL_TLS_SERVER, + chatty=False) as server: + with context.wrap_socket(socket.socket()) as s: + self.assertIs(s.version(), None) +@@ -3247,7 +3239,7 @@ if _have_threads: + + server = ThreadedEchoServer(CERTFILE, + certreqs=ssl.CERT_NONE, +- ssl_version=ssl.PROTOCOL_TLSv1, ++ ssl_version=ssl.PROTOCOL_TLS_SERVER, + cacerts=CERTFILE, + chatty=True, + connectionchatty=False) +@@ -3257,7 +3249,7 @@ if _have_threads: + certfile=CERTFILE, + ca_certs=CERTFILE, + cert_reqs=ssl.CERT_NONE, +- ssl_version=ssl.PROTOCOL_TLSv1) ++ ssl_version=ssl.PROTOCOL_TLS_CLIENT) + s.connect((HOST, server.port)) + # get the data + cb_data = s.get_channel_binding("tls-unique") +@@ -3282,7 +3274,7 @@ if _have_threads: + certfile=CERTFILE, + ca_certs=CERTFILE, + cert_reqs=ssl.CERT_NONE, +- ssl_version=ssl.PROTOCOL_TLSv1) ++ ssl_version=ssl.PROTOCOL_TLS_CLIENT) + s.connect((HOST, server.port)) + new_cb_data = s.get_channel_binding("tls-unique") + if support.verbose: +@@ -3299,32 +3291,35 @@ if _have_threads: + s.close() + + def test_compression(self): +- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- context.load_cert_chain(CERTFILE) +- stats = server_params_test(context, context, +- chatty=True, connectionchatty=True) ++ client_context, server_context, hostname = testing_context() ++ stats = server_params_test(client_context, server_context, ++ chatty=True, connectionchatty=True, ++ sni_name=hostname) + if support.verbose: + sys.stdout.write(" got compression: {!r}\n".format(stats['compression'])) + self.assertIn(stats['compression'], { None, 'ZLIB', 'RLE' }) + ++ + @unittest.skipUnless(hasattr(ssl, 'OP_NO_COMPRESSION'), + "ssl.OP_NO_COMPRESSION needed for this test") + def test_compression_disabled(self): +- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- context.load_cert_chain(CERTFILE) +- context.options |= ssl.OP_NO_COMPRESSION +- stats = server_params_test(context, context, +- chatty=True, connectionchatty=True) ++ client_context, server_context, hostname = testing_context() ++ client_context.options |= ssl.OP_NO_COMPRESSION ++ server_context.options |= ssl.OP_NO_COMPRESSION ++ stats = server_params_test(client_context, server_context, ++ chatty=True, connectionchatty=True, ++ sni_name=hostname) + self.assertIs(stats['compression'], None) + + def test_dh_params(self): + # Check we can get a connection with ephemeral Diffie-Hellman +- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- context.load_cert_chain(CERTFILE) +- context.load_dh_params(DHFILE) +- context.set_ciphers("kEDH") +- stats = server_params_test(context, context, +- chatty=True, connectionchatty=True) ++ client_context, server_context, hostname = testing_context() ++ server_context.load_dh_params(DHFILE) ++ server_context.set_ciphers("kEDH") ++ server_context.options |= ssl.OP_NO_TLSv1_3 ++ stats = server_params_test(client_context, server_context, ++ chatty=True, connectionchatty=True, ++ sni_name=hostname) + cipher = stats["cipher"][0] + parts = cipher.split("-") + if "ADH" not in parts and "EDH" not in parts and "DHE" not in parts: +@@ -3332,22 +3327,20 @@ if _have_threads: + + def test_selected_alpn_protocol(self): + # selected_alpn_protocol() is None unless ALPN is used. +- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- context.load_cert_chain(CERTFILE) +- stats = server_params_test(context, context, +- chatty=True, connectionchatty=True) ++ client_context, server_context, hostname = testing_context() ++ stats = server_params_test(client_context, server_context, ++ chatty=True, connectionchatty=True, ++ sni_name=hostname) + self.assertIs(stats['client_alpn_protocol'], None) + + @unittest.skipUnless(ssl.HAS_ALPN, "ALPN support required") + def test_selected_alpn_protocol_if_server_uses_alpn(self): + # selected_alpn_protocol() is None unless ALPN is used by the client. +- client_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- client_context.load_verify_locations(CERTFILE) +- server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- server_context.load_cert_chain(CERTFILE) ++ client_context, server_context, hostname = testing_context() + server_context.set_alpn_protocols(['foo', 'bar']) + stats = server_params_test(client_context, server_context, +- chatty=True, connectionchatty=True) ++ chatty=True, connectionchatty=True, ++ sni_name=hostname) + self.assertIs(stats['client_alpn_protocol'], None) + + @unittest.skipUnless(ssl.HAS_ALPN, "ALPN support needed for this test") +@@ -3394,10 +3387,10 @@ if _have_threads: + + def test_selected_npn_protocol(self): + # selected_npn_protocol() is None unless NPN is used +- context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- context.load_cert_chain(CERTFILE) +- stats = server_params_test(context, context, +- chatty=True, connectionchatty=True) ++ client_context, server_context, hostname = testing_context() ++ stats = server_params_test(client_context, server_context, ++ chatty=True, connectionchatty=True, ++ sni_name=hostname) + self.assertIs(stats['client_npn_protocol'], None) + + @unittest.skipUnless(ssl.HAS_NPN, "NPN support needed for this test") +@@ -3430,12 +3423,11 @@ if _have_threads: + self.assertEqual(server_result, expected, msg % (server_result, "server")) + + def sni_contexts(self): +- server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ server_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) + server_context.load_cert_chain(SIGNED_CERTFILE) +- other_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ++ other_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) + other_context.load_cert_chain(SIGNED_CERTFILE2) +- client_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- client_context.verify_mode = ssl.CERT_REQUIRED ++ client_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) + client_context.load_verify_locations(SIGNING_CA) + return server_context, other_context, client_context + +@@ -3448,6 +3440,8 @@ if _have_threads: + calls = [] + server_context, other_context, client_context = self.sni_contexts() + ++ client_context.check_hostname = False ++ + def servername_cb(ssl_sock, server_name, initial_context): + calls.append((server_name, initial_context)) + if server_name is not None: +@@ -3533,11 +3527,7 @@ if _have_threads: + self.assertIn("TypeError", stderr.getvalue()) + + def test_shared_ciphers(self): +- server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- server_context.load_cert_chain(SIGNED_CERTFILE) +- client_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- client_context.verify_mode = ssl.CERT_REQUIRED +- client_context.load_verify_locations(SIGNING_CA) ++ client_context, server_context, hostname = testing_context() + if ssl.OPENSSL_VERSION_INFO >= (1, 0, 2): + client_context.set_ciphers("AES128:AES256") + server_context.set_ciphers("AES256") +@@ -3555,7 +3545,8 @@ if _have_threads: + # TLS 1.3 ciphers are always enabled + expected_algs.extend(["TLS_CHACHA20", "TLS_AES"]) + +- stats = server_params_test(client_context, server_context) ++ stats = server_params_test(client_context, server_context, ++ sni_name=hostname) + ciphers = stats['server_shared_ciphers'][0] + self.assertGreater(len(ciphers), 0) + for name, tls_version, bits in ciphers: +@@ -3595,14 +3586,13 @@ if _have_threads: + self.assertEqual(s.recv(1024), TEST_DATA) + + def test_session(self): +- server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- server_context.load_cert_chain(SIGNED_CERTFILE) +- client_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) +- client_context.verify_mode = ssl.CERT_REQUIRED +- client_context.load_verify_locations(SIGNING_CA) ++ client_context, server_context, hostname = testing_context() ++ # TODO: sessions aren't compatible with TLSv1.3 yet ++ client_context.options |= ssl.OP_NO_TLSv1_3 + + # first connection without session +- stats = server_params_test(client_context, server_context) ++ stats = server_params_test(client_context, server_context, ++ sni_name=hostname) + session = stats['session'] + self.assertTrue(session.id) + self.assertGreater(session.time, 0) +@@ -3616,7 +3606,8 @@ if _have_threads: + self.assertEqual(sess_stat['hits'], 0) + + # reuse session +- stats = server_params_test(client_context, server_context, session=session) ++ stats = server_params_test(client_context, server_context, ++ session=session, sni_name=hostname) + sess_stat = server_context.session_stats() + self.assertEqual(sess_stat['accept'], 2) + self.assertEqual(sess_stat['hits'], 1) +@@ -3629,7 +3620,8 @@ if _have_threads: + self.assertGreaterEqual(session2.timeout, session.timeout) + + # another one without session +- stats = server_params_test(client_context, server_context) ++ stats = server_params_test(client_context, server_context, ++ sni_name=hostname) + self.assertFalse(stats['session_reused']) + session3 = stats['session'] + self.assertNotEqual(session3.id, session.id) +@@ -3639,7 +3631,8 @@ if _have_threads: + self.assertEqual(sess_stat['hits'], 1) + + # reuse session again +- stats = server_params_test(client_context, server_context, session=session) ++ stats = server_params_test(client_context, server_context, ++ session=session, sni_name=hostname) + self.assertTrue(stats['session_reused']) + session4 = stats['session'] + self.assertEqual(session4.id, session.id) +-- +2.21.0 + + +From 743c3e09b485092b51a982ab9859ffc79cbb7791 Mon Sep 17 00:00:00 2001 +From: Charalampos Stratakis +Date: Wed, 27 Nov 2019 00:01:17 +0100 +Subject: [PATCH 4/5] Adjust some tests for TLS 1.3 compatibility + +Partially backports some changes from 529525fb5a8fd9b96ab4021311a598c77588b918 +and 2614ed4c6e4b32eafb683f2378ed20e87d42976d +--- + Lib/test/test_ssl.py | 17 ++++++++++++++--- + 1 file changed, 14 insertions(+), 3 deletions(-) + +diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py +index a7bf2f7..43c2dbc 100644 +--- a/Lib/test/test_ssl.py ++++ b/Lib/test/test_ssl.py +@@ -3189,7 +3189,12 @@ if _have_threads: + with context.wrap_socket(socket.socket()) as s: + self.assertIs(s.version(), None) + s.connect((HOST, server.port)) +- self.assertEqual(s.version(), 'TLSv1') ++ if IS_OPENSSL_1_1: ++ self.assertEqual(s.version(), 'TLSv1.3') ++ elif ssl.OPENSSL_VERSION_INFO >= (1, 0, 2): ++ self.assertEqual(s.version(), 'TLSv1.2') ++ else: # 0.9.8 to 1.0.1 ++ self.assertIn(s.version(), ('TLSv1', 'TLSv1.2')) + self.assertIs(s.version(), None) + + @unittest.skipUnless(ssl.HAS_TLSv1_3, +@@ -3259,7 +3264,10 @@ if _have_threads: + + # check if it is sane + self.assertIsNotNone(cb_data) +- self.assertEqual(len(cb_data), 12) # True for TLSv1 ++ if s.version() == 'TLSv1.3': ++ self.assertEqual(len(cb_data), 48) ++ else: ++ self.assertEqual(len(cb_data), 12) # True for TLSv1 + + # and compare with the peers version + s.write(b"CB tls-unique\n") +@@ -3283,7 +3291,10 @@ if _have_threads: + # is it really unique + self.assertNotEqual(cb_data, new_cb_data) + self.assertIsNotNone(cb_data) +- self.assertEqual(len(cb_data), 12) # True for TLSv1 ++ if s.version() == 'TLSv1.3': ++ self.assertEqual(len(cb_data), 48) ++ else: ++ self.assertEqual(len(cb_data), 12) # True for TLSv1 + s.write(b"CB tls-unique\n") + peer_data_repr = s.read().strip() + self.assertEqual(peer_data_repr, +-- +2.21.0 + + +From cd250c8a782f36c7a6f5ffabc922cb75744fa9c0 Mon Sep 17 00:00:00 2001 +From: Charalampos Stratakis +Date: Tue, 26 Nov 2019 23:18:10 +0100 +Subject: [PATCH 5/5] Skip the ssl tests that rely on TLSv1 and TLSv1.1 + availability + +--- + Lib/test/test_ssl.py | 32 +++++++++++++++++++++++--------- + 1 file changed, 23 insertions(+), 9 deletions(-) + +diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py +index 43c2dbc..b35db25 100644 +--- a/Lib/test/test_ssl.py ++++ b/Lib/test/test_ssl.py +@@ -39,6 +39,13 @@ IS_LIBRESSL = ssl.OPENSSL_VERSION.startswith('LibreSSL') + IS_OPENSSL_1_1 = not IS_LIBRESSL and ssl.OPENSSL_VERSION_INFO >= (1, 1, 0) + PY_SSL_DEFAULT_CIPHERS = sysconfig.get_config_var('PY_SSL_DEFAULT_CIPHERS') + ++# On RHEL8 openssl disables TLSv1 and TLSv1.1 on runtime. ++# Since we don't have a good way to detect runtime changes ++# on the allowed protocols, we hardcode the default config ++# with those flags. ++TLSv1_enabled = False ++TLSv1_1_enabled = False ++ + def data_file(*name): + return os.path.join(os.path.dirname(__file__), *name) + +@@ -2380,7 +2387,8 @@ if _have_threads: + if support.verbose: + sys.stdout.write("\n") + for protocol in PROTOCOLS: +- if protocol in {ssl.PROTOCOL_TLS_CLIENT, ssl.PROTOCOL_TLS_SERVER}: ++ if protocol in {ssl.PROTOCOL_TLS_CLIENT, ssl.PROTOCOL_TLS_SERVER, ++ ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1_1}: + continue + with self.subTest(protocol=ssl._PROTOCOL_NAMES[protocol]): + context = ssl.SSLContext(protocol) +@@ -2650,17 +2658,20 @@ if _have_threads: + if hasattr(ssl, 'PROTOCOL_SSLv3'): + try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, False) + try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True) +- try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, 'TLSv1') ++ if TLSv1_enabled: ++ try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, 'TLSv1') + + if hasattr(ssl, 'PROTOCOL_SSLv3'): + try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, False, ssl.CERT_OPTIONAL) + try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True, ssl.CERT_OPTIONAL) +- try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_OPTIONAL) ++ if TLSv1_enabled: ++ try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_OPTIONAL) + + if hasattr(ssl, 'PROTOCOL_SSLv3'): + try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, False, ssl.CERT_REQUIRED) + try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True, ssl.CERT_REQUIRED) +- try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_REQUIRED) ++ if TLSv1_enabled: ++ try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_REQUIRED) + + # Server with specific SSL options + if hasattr(ssl, 'PROTOCOL_SSLv3'): +@@ -2698,9 +2709,10 @@ if _have_threads: + """Connecting to a TLSv1 server with various client options""" + if support.verbose: + sys.stdout.write("\n") +- try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1') +- try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_OPTIONAL) +- try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_REQUIRED) ++ if TLSv1_enabled: ++ try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1') ++ try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_OPTIONAL) ++ try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_REQUIRED) + if hasattr(ssl, 'PROTOCOL_SSLv2'): + try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv2, False) + if hasattr(ssl, 'PROTOCOL_SSLv3'): +@@ -2716,7 +2728,8 @@ if _have_threads: + Testing against older TLS versions.""" + if support.verbose: + sys.stdout.write("\n") +- try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_TLSv1_1, 'TLSv1.1') ++ if TLSv1_1_enabled: ++ try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_TLSv1_1, 'TLSv1.1') + if hasattr(ssl, 'PROTOCOL_SSLv2'): + try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_SSLv2, False) + if hasattr(ssl, 'PROTOCOL_SSLv3'): +@@ -2724,7 +2737,8 @@ if _have_threads: + try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_SSLv23, False, + client_options=ssl.OP_NO_TLSv1_1) + +- try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1_1, 'TLSv1.1') ++ if TLSv1_1_enabled: ++ try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1_1, 'TLSv1.1') + try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_TLSv1, False) + try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1_1, False) + +-- +2.21.0 + diff --git a/SOURCES/00319-test_tarfile_ppc64.patch b/SOURCES/00319-test_tarfile_ppc64.patch new file mode 100644 index 0000000..132b9b3 --- /dev/null +++ b/SOURCES/00319-test_tarfile_ppc64.patch @@ -0,0 +1,41 @@ +commit 86ed41792d394f804d2c9e695ac8b257220fbdee +Author: Victor Stinner +Date: Tue Mar 12 17:17:13 2019 +0100 + + Fix test_tarfile on ppc64 + + Fix sparse file tests of test_tarfile on ppc64le with the tmpfs + filesystem. + + * https://bugzilla.redhat.com/show_bug.cgi?id=1639490 + * https://bugs.python.org/issue35772 + * https://github.com/python/cpython/commit/d1dd6be613381b996b9071443ef081de8e5f3aff + +diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py +index 4cd7d53..bd8b05f 100644 +--- a/Lib/test/test_tarfile.py ++++ b/Lib/test/test_tarfile.py +@@ -973,16 +973,21 @@ class GNUReadTest(LongnameTest, ReadTest, unittest.TestCase): + def _fs_supports_holes(): + # Return True if the platform knows the st_blocks stat attribute and + # uses st_blocks units of 512 bytes, and if the filesystem is able to +- # store holes in files. ++ # store holes of 4 KiB in files. ++ # ++ # The function returns False if page size is larger than 4 KiB. ++ # For example, ppc64 uses pages of 64 KiB. + if sys.platform.startswith("linux"): + # Linux evidentially has 512 byte st_blocks units. + name = os.path.join(TEMPDIR, "sparse-test") + with open(name, "wb") as fobj: ++ # Seek to "punch a hole" of 4 KiB + fobj.seek(4096) ++ fobj.write(b'x' * 4096) + fobj.truncate() + s = os.stat(name) + support.unlink(name) +- return s.st_blocks == 0 ++ return (s.st_blocks * 512 < s.st_size) + else: + return False + diff --git a/SOURCES/00320-CVE-2019-9636-and-CVE-2019-10160.patch b/SOURCES/00320-CVE-2019-9636-and-CVE-2019-10160.patch new file mode 100644 index 0000000..9d5c63a --- /dev/null +++ b/SOURCES/00320-CVE-2019-9636-and-CVE-2019-10160.patch @@ -0,0 +1,137 @@ +diff --git a/Doc/library/urllib.parse.rst b/Doc/library/urllib.parse.rst +index d991254..647af61 100644 +--- a/Doc/library/urllib.parse.rst ++++ b/Doc/library/urllib.parse.rst +@@ -121,6 +121,11 @@ or on combining URL components into a URL string. + Unmatched square brackets in the :attr:`netloc` attribute will raise a + :exc:`ValueError`. + ++ Characters in the :attr:`netloc` attribute that decompose under NFKC ++ normalization (as used by the IDNA encoding) into any of ``/``, ``?``, ++ ``#``, ``@``, or ``:`` will raise a :exc:`ValueError`. If the URL is ++ decomposed before parsing, no error will be raised. ++ + .. versionchanged:: 3.2 + Added IPv6 URL parsing capabilities. + +@@ -133,6 +138,10 @@ or on combining URL components into a URL string. + Out-of-range port numbers now raise :exc:`ValueError`, instead of + returning :const:`None`. + ++ .. versionchanged:: 3.6.9 ++ Characters that affect netloc parsing under NFKC normalization will ++ now raise :exc:`ValueError`. ++ + + .. function:: parse_qs(qs, keep_blank_values=False, strict_parsing=False, encoding='utf-8', errors='replace', max_num_fields=None) + +@@ -256,10 +265,19 @@ or on combining URL components into a URL string. + Unmatched square brackets in the :attr:`netloc` attribute will raise a + :exc:`ValueError`. + ++ Characters in the :attr:`netloc` attribute that decompose under NFKC ++ normalization (as used by the IDNA encoding) into any of ``/``, ``?``, ++ ``#``, ``@``, or ``:`` will raise a :exc:`ValueError`. If the URL is ++ decomposed before parsing, no error will be raised. ++ + .. versionchanged:: 3.6 + Out-of-range port numbers now raise :exc:`ValueError`, instead of + returning :const:`None`. + ++ .. versionchanged:: 3.6.9 ++ Characters that affect netloc parsing under NFKC normalization will ++ now raise :exc:`ValueError`. ++ + + .. function:: urlunsplit(parts) + +diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py +index be50b47..68f633c 100644 +--- a/Lib/test/test_urlparse.py ++++ b/Lib/test/test_urlparse.py +@@ -1,3 +1,5 @@ ++import sys ++import unicodedata + import unittest + import urllib.parse + +@@ -984,6 +986,34 @@ class UrlParseTestCase(unittest.TestCase): + expected.append(name) + self.assertCountEqual(urllib.parse.__all__, expected) + ++ def test_urlsplit_normalization(self): ++ # Certain characters should never occur in the netloc, ++ # including under normalization. ++ # Ensure that ALL of them are detected and cause an error ++ illegal_chars = '/:#?@' ++ hex_chars = {'{:04X}'.format(ord(c)) for c in illegal_chars} ++ denorm_chars = [ ++ c for c in map(chr, range(128, sys.maxunicode)) ++ if (hex_chars & set(unicodedata.decomposition(c).split())) ++ and c not in illegal_chars ++ ] ++ # Sanity check that we found at least one such character ++ self.assertIn('\u2100', denorm_chars) ++ self.assertIn('\uFF03', denorm_chars) ++ ++ # bpo-36742: Verify port separators are ignored when they ++ # existed prior to decomposition ++ urllib.parse.urlsplit('http://\u30d5\u309a:80') ++ with self.assertRaises(ValueError): ++ urllib.parse.urlsplit('http://\u30d5\u309a\ufe1380') ++ ++ for scheme in ["http", "https", "ftp"]: ++ for netloc in ["netloc{}false.netloc", "n{}user@netloc"]: ++ for c in denorm_chars: ++ url = "{}://{}/path".format(scheme, netloc.format(c)) ++ with self.subTest(url=url, char='{:04X}'.format(ord(c))): ++ with self.assertRaises(ValueError): ++ urllib.parse.urlsplit(url) + + class Utility_Tests(unittest.TestCase): + """Testcase to test the various utility functions in the urllib.""" +diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py +index 85e68c8..fa8827a 100644 +--- a/Lib/urllib/parse.py ++++ b/Lib/urllib/parse.py +@@ -391,6 +391,24 @@ def _splitnetloc(url, start=0): + delim = min(delim, wdelim) # use earliest delim position + return url[start:delim], url[delim:] # return (domain, rest) + ++def _checknetloc(netloc): ++ if not netloc or not any(ord(c) > 127 for c in netloc): ++ return ++ # looking for characters like \u2100 that expand to 'a/c' ++ # IDNA uses NFKC equivalence, so normalize for this check ++ import unicodedata ++ n = netloc.replace('@', '') # ignore characters already included ++ n = n.replace(':', '') # but not the surrounding text ++ n = n.replace('#', '') ++ n = n.replace('?', '') ++ netloc2 = unicodedata.normalize('NFKC', n) ++ if n == netloc2: ++ return ++ for c in '/?#@:': ++ if c in netloc2: ++ raise ValueError("netloc '" + netloc + "' contains invalid " + ++ "characters under NFKC normalization") ++ + def urlsplit(url, scheme='', allow_fragments=True): + """Parse a URL into 5 components: + :///?# +@@ -420,6 +438,7 @@ def urlsplit(url, scheme='', allow_fragments=True): + url, fragment = url.split('#', 1) + if '?' in url: + url, query = url.split('?', 1) ++ _checknetloc(netloc) + v = SplitResult(scheme, netloc, url, query, fragment) + _parse_cache[key] = v + return _coerce_result(v) +@@ -443,6 +462,7 @@ def urlsplit(url, scheme='', allow_fragments=True): + url, fragment = url.split('#', 1) + if '?' in url: + url, query = url.split('?', 1) ++ _checknetloc(netloc) + v = SplitResult(scheme, netloc, url, query, fragment) + _parse_cache[key] = v + return _coerce_result(v) diff --git a/SOURCES/00324-disallow-control-chars-in-http-urls.patch b/SOURCES/00324-disallow-control-chars-in-http-urls.patch new file mode 100644 index 0000000..9e4317a --- /dev/null +++ b/SOURCES/00324-disallow-control-chars-in-http-urls.patch @@ -0,0 +1,150 @@ +From 7e200e0763f5b71c199aaf98bd5588f291585619 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Miro=20Hron=C4=8Dok?= +Date: Tue, 7 May 2019 17:28:47 +0200 +Subject: [PATCH] bpo-30458: Disallow control chars in http URLs. (GH-12755) + (GH-13154) +MIME-Version: 1.0 +Content-Type: text/plain; charset=UTF-8 +Content-Transfer-Encoding: 8bit + +Disallow control chars in http URLs in urllib.urlopen. This addresses a potential security problem for applications that do not sanity check their URLs where http request headers could be injected. + +Disable https related urllib tests on a build without ssl (GH-13032) +These tests require an SSL enabled build. Skip these tests when python is built without SSL to fix test failures. + +Use http.client.InvalidURL instead of ValueError as the new error case's exception. (GH-13044) + +Backport Co-Authored-By: Miro Hrončok +--- + Lib/http/client.py | 15 ++++++ + Lib/test/test_urllib.py | 53 +++++++++++++++++++ + Lib/test/test_xmlrpc.py | 7 ++- + .../2019-04-10-08-53-30.bpo-30458.51E-DA.rst | 1 + + 4 files changed, 75 insertions(+), 1 deletion(-) + create mode 100644 Misc/NEWS.d/next/Security/2019-04-10-08-53-30.bpo-30458.51E-DA.rst + +diff --git a/Lib/http/client.py b/Lib/http/client.py +index 1de151c38e..2afd452fe3 100644 +--- a/Lib/http/client.py ++++ b/Lib/http/client.py +@@ -140,6 +140,16 @@ _MAXHEADERS = 100 + _is_legal_header_name = re.compile(rb'[^:\s][^:\r\n]*').fullmatch + _is_illegal_header_value = re.compile(rb'\n(?![ \t])|\r(?![ \t\n])').search + ++# These characters are not allowed within HTTP URL paths. ++# See https://tools.ietf.org/html/rfc3986#section-3.3 and the ++# https://tools.ietf.org/html/rfc3986#appendix-A pchar definition. ++# Prevents CVE-2019-9740. Includes control characters such as \r\n. ++# We don't restrict chars above \x7f as putrequest() limits us to ASCII. ++_contains_disallowed_url_pchar_re = re.compile('[\x00-\x20\x7f]') ++# Arguably only these _should_ allowed: ++# _is_allowed_url_pchars_re = re.compile(r"^[/!$&'()*+,;=:@%a-zA-Z0-9._~-]+$") ++# We are more lenient for assumed real world compatibility purposes. ++ + # We always set the Content-Length header for these methods because some + # servers will otherwise respond with a 411 + _METHODS_EXPECTING_BODY = {'PATCH', 'POST', 'PUT'} +@@ -1101,6 +1111,11 @@ class HTTPConnection: + self._method = method + if not url: + url = '/' ++ # Prevent CVE-2019-9740. ++ match = _contains_disallowed_url_pchar_re.search(url) ++ if match: ++ raise InvalidURL(f"URL can't contain control characters. {url!r} " ++ f"(found at least {match.group()!r})") + request = '%s %s %s' % (method, url, self._http_vsn_str) + + # Non-ASCII characters should have been eliminated earlier +diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py +index 2ac73b58d8..7214492eca 100644 +--- a/Lib/test/test_urllib.py ++++ b/Lib/test/test_urllib.py +@@ -329,6 +329,59 @@ class urlopen_HttpTests(unittest.TestCase, FakeHTTPMixin, FakeFTPMixin): + finally: + self.unfakehttp() + ++ @unittest.skipUnless(ssl, "ssl module required") ++ def test_url_with_control_char_rejected(self): ++ for char_no in list(range(0, 0x21)) + [0x7f]: ++ char = chr(char_no) ++ schemeless_url = f"//localhost:7777/test{char}/" ++ self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.") ++ try: ++ # We explicitly test urllib.request.urlopen() instead of the top ++ # level 'def urlopen()' function defined in this... (quite ugly) ++ # test suite. They use different url opening codepaths. Plain ++ # urlopen uses FancyURLOpener which goes via a codepath that ++ # calls urllib.parse.quote() on the URL which makes all of the ++ # above attempts at injection within the url _path_ safe. ++ escaped_char_repr = repr(char).replace('\\', r'\\') ++ InvalidURL = http.client.InvalidURL ++ with self.assertRaisesRegex( ++ InvalidURL, f"contain control.*{escaped_char_repr}"): ++ urllib.request.urlopen(f"http:{schemeless_url}") ++ with self.assertRaisesRegex( ++ InvalidURL, f"contain control.*{escaped_char_repr}"): ++ urllib.request.urlopen(f"https:{schemeless_url}") ++ # This code path quotes the URL so there is no injection. ++ resp = urlopen(f"http:{schemeless_url}") ++ self.assertNotIn(char, resp.geturl()) ++ finally: ++ self.unfakehttp() ++ ++ @unittest.skipUnless(ssl, "ssl module required") ++ def test_url_with_newline_header_injection_rejected(self): ++ self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.") ++ host = "localhost:7777?a=1 HTTP/1.1\r\nX-injected: header\r\nTEST: 123" ++ schemeless_url = "//" + host + ":8080/test/?test=a" ++ try: ++ # We explicitly test urllib.request.urlopen() instead of the top ++ # level 'def urlopen()' function defined in this... (quite ugly) ++ # test suite. They use different url opening codepaths. Plain ++ # urlopen uses FancyURLOpener which goes via a codepath that ++ # calls urllib.parse.quote() on the URL which makes all of the ++ # above attempts at injection within the url _path_ safe. ++ InvalidURL = http.client.InvalidURL ++ with self.assertRaisesRegex( ++ InvalidURL, r"contain control.*\\r.*(found at least . .)"): ++ urllib.request.urlopen(f"http:{schemeless_url}") ++ with self.assertRaisesRegex(InvalidURL, r"contain control.*\\n"): ++ urllib.request.urlopen(f"https:{schemeless_url}") ++ # This code path quotes the URL so there is no injection. ++ resp = urlopen(f"http:{schemeless_url}") ++ self.assertNotIn(' ', resp.geturl()) ++ self.assertNotIn('\r', resp.geturl()) ++ self.assertNotIn('\n', resp.geturl()) ++ finally: ++ self.unfakehttp() ++ + def test_read_0_9(self): + # "0.9" response accepted (but not "simple responses" without + # a status line) +diff --git a/Lib/test/test_xmlrpc.py b/Lib/test/test_xmlrpc.py +index 32263f7f0b..0e002ec4ef 100644 +--- a/Lib/test/test_xmlrpc.py ++++ b/Lib/test/test_xmlrpc.py +@@ -945,7 +945,12 @@ class SimpleServerTestCase(BaseServerTestCase): + def test_partial_post(self): + # Check that a partial POST doesn't make the server loop: issue #14001. + conn = http.client.HTTPConnection(ADDR, PORT) +- conn.request('POST', '/RPC2 HTTP/1.0\r\nContent-Length: 100\r\n\r\nbye') ++ conn.send('POST /RPC2 HTTP/1.0\r\n' ++ 'Content-Length: 100\r\n\r\n' ++ 'bye HTTP/1.1\r\n' ++ f'Host: {ADDR}:{PORT}\r\n' ++ 'Accept-Encoding: identity\r\n' ++ 'Content-Length: 0\r\n\r\n'.encode('ascii')) + conn.close() + + def test_context_manager(self): +diff --git a/Misc/NEWS.d/next/Security/2019-04-10-08-53-30.bpo-30458.51E-DA.rst b/Misc/NEWS.d/next/Security/2019-04-10-08-53-30.bpo-30458.51E-DA.rst +new file mode 100644 +index 0000000000..ed8027fb4d +--- /dev/null ++++ b/Misc/NEWS.d/next/Security/2019-04-10-08-53-30.bpo-30458.51E-DA.rst +@@ -0,0 +1 @@ ++Address CVE-2019-9740 by disallowing URL paths with embedded whitespace or control characters through into the underlying http client request. Such potentially malicious header injection URLs now cause an http.client.InvalidURL exception to be raised. +-- +2.21.0 + diff --git a/SOURCES/00325-CVE-2019-9948.patch b/SOURCES/00325-CVE-2019-9948.patch new file mode 100644 index 0000000..4bf81ca --- /dev/null +++ b/SOURCES/00325-CVE-2019-9948.patch @@ -0,0 +1,49 @@ +diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py +index 649a5b8..0061a52 100644 +--- a/Lib/test/test_urllib.py ++++ b/Lib/test/test_urllib.py +@@ -16,6 +16,7 @@ except ImportError: + ssl = None + import sys + import tempfile ++import warnings + from nturl2path import url2pathname, pathname2url + + from base64 import b64encode +@@ -1463,6 +1464,23 @@ class URLopener_Tests(unittest.TestCase): + "spam://c:|windows%/:=&?~#+!$,;'@()*[]|/path/"), + "//c:|windows%/:=&?~#+!$,;'@()*[]|/path/") + ++ def test_local_file_open(self): ++ # bpo-35907, CVE-2019-9948: urllib must reject local_file:// scheme ++ class DummyURLopener(urllib.request.URLopener): ++ def open_local_file(self, url): ++ return url ++ ++ with warnings.catch_warnings(record=True): ++ warnings.simplefilter("ignore", DeprecationWarning) ++ ++ for url in ('local_file://example', 'local-file://example'): ++ self.assertRaises(OSError, urllib.request.urlopen, url) ++ self.assertRaises(OSError, urllib.request.URLopener().open, url) ++ self.assertRaises(OSError, urllib.request.URLopener().retrieve, url) ++ self.assertRaises(OSError, DummyURLopener().open, url) ++ self.assertRaises(OSError, DummyURLopener().retrieve, url) ++ ++ + # Just commented them out. + # Can't really tell why keep failing in windows and sparc. + # Everywhere else they work ok, but on those machines, sometimes +diff --git a/Lib/urllib/request.py b/Lib/urllib/request.py +index d28f2f8..c9945d9 100644 +--- a/Lib/urllib/request.py ++++ b/Lib/urllib/request.py +@@ -1747,7 +1747,7 @@ class URLopener: + name = 'open_' + urltype + self.type = urltype + name = name.replace('-', '_') +- if not hasattr(self, name): ++ if not hasattr(self, name) or name == 'open_local_file': + if proxy: + return self.open_unknown_proxy(proxy, fullurl, data) + else: diff --git a/SOURCES/00326-do-not-set-PHA-verify-flag-on-client-side.patch b/SOURCES/00326-do-not-set-PHA-verify-flag-on-client-side.patch new file mode 100644 index 0000000..4584ce1 --- /dev/null +++ b/SOURCES/00326-do-not-set-PHA-verify-flag-on-client-side.patch @@ -0,0 +1,117 @@ +diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py +index 883201f..cf4d84d 100644 +--- a/Lib/test/test_ssl.py ++++ b/Lib/test/test_ssl.py +@@ -3891,6 +3891,37 @@ class TestPostHandshakeAuth(unittest.TestCase): + s.write(b'PHA') + self.assertIn(b'WRONG_SSL_VERSION', s.recv(1024)) + ++ def test_bpo37428_pha_cert_none(self): ++ # verify that post_handshake_auth does not implicitly enable cert ++ # validation. ++ hostname = 'localhost' ++ client_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) ++ client_context.post_handshake_auth = True ++ client_context.load_cert_chain(SIGNED_CERTFILE) ++ # no cert validation and CA on client side ++ client_context.check_hostname = False ++ client_context.verify_mode = ssl.CERT_NONE ++ ++ server_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) ++ server_context.load_cert_chain(SIGNED_CERTFILE) ++ server_context.load_verify_locations(SIGNING_CA) ++ server_context.post_handshake_auth = True ++ server_context.verify_mode = ssl.CERT_REQUIRED ++ ++ server = ThreadedEchoServer(context=server_context, chatty=False) ++ with server: ++ with client_context.wrap_socket(socket.socket(), ++ server_hostname=hostname) as s: ++ s.connect((HOST, server.port)) ++ s.write(b'HASCERT') ++ self.assertEqual(s.recv(1024), b'FALSE\n') ++ s.write(b'PHA') ++ self.assertEqual(s.recv(1024), b'OK\n') ++ s.write(b'HASCERT') ++ self.assertEqual(s.recv(1024), b'TRUE\n') ++ # server cert has not been validated ++ self.assertEqual(s.getpeercert(), {}) ++ + + def test_main(verbose=False): + if support.verbose: +diff --git a/Modules/_ssl.c b/Modules/_ssl.c +index ec366f0..9bf1cde 100644 +--- a/Modules/_ssl.c ++++ b/Modules/_ssl.c +@@ -732,6 +732,26 @@ newPySSLSocket(PySSLContext *sslctx, PySocketSockObject *sock, + #endif + SSL_set_mode(self->ssl, mode); + ++#ifdef TLS1_3_VERSION ++ if (sslctx->post_handshake_auth == 1) { ++ if (socket_type == PY_SSL_SERVER) { ++ /* bpo-37428: OpenSSL does not ignore SSL_VERIFY_POST_HANDSHAKE. ++ * Set SSL_VERIFY_POST_HANDSHAKE flag only for server sockets and ++ * only in combination with SSL_VERIFY_PEER flag. */ ++ int mode = SSL_get_verify_mode(self->ssl); ++ if (mode & SSL_VERIFY_PEER) { ++ int (*verify_cb)(int, X509_STORE_CTX *) = NULL; ++ verify_cb = SSL_get_verify_callback(self->ssl); ++ mode |= SSL_VERIFY_POST_HANDSHAKE; ++ SSL_set_verify(self->ssl, mode, verify_cb); ++ } ++ } else { ++ /* client socket */ ++ SSL_set_post_handshake_auth(self->ssl, 1); ++ } ++ } ++#endif ++ + #if HAVE_SNI + if (server_hostname != NULL) { + /* Don't send SNI for IP addresses. We cannot simply use inet_aton() and +@@ -2765,10 +2785,10 @@ _set_verify_mode(PySSLContext *self, enum py_ssl_cert_requirements n) + "invalid value for verify_mode"); + return -1; + } +-#ifdef TLS1_3_VERSION +- if (self->post_handshake_auth) +- mode |= SSL_VERIFY_POST_HANDSHAKE; +-#endif ++ ++ /* bpo-37428: newPySSLSocket() sets SSL_VERIFY_POST_HANDSHAKE flag for ++ * server sockets and SSL_set_post_handshake_auth() for client. */ ++ + /* keep current verify cb */ + verify_cb = SSL_CTX_get_verify_callback(self->ctx); + SSL_CTX_set_verify(self->ctx, mode, verify_cb); +@@ -3346,8 +3366,6 @@ get_post_handshake_auth(PySSLContext *self, void *c) { + #if TLS1_3_VERSION + static int + set_post_handshake_auth(PySSLContext *self, PyObject *arg, void *c) { +- int (*verify_cb)(int, X509_STORE_CTX *) = NULL; +- int mode = SSL_CTX_get_verify_mode(self->ctx); + int pha = PyObject_IsTrue(arg); + + if (pha == -1) { +@@ -3355,17 +3373,8 @@ set_post_handshake_auth(PySSLContext *self, PyObject *arg, void *c) { + } + self->post_handshake_auth = pha; + +- /* client-side socket setting, ignored by server-side */ +- SSL_CTX_set_post_handshake_auth(self->ctx, pha); +- +- /* server-side socket setting, ignored by client-side */ +- verify_cb = SSL_CTX_get_verify_callback(self->ctx); +- if (pha) { +- mode |= SSL_VERIFY_POST_HANDSHAKE; +- } else { +- mode ^= SSL_VERIFY_POST_HANDSHAKE; +- } +- SSL_CTX_set_verify(self->ctx, mode, verify_cb); ++ /* bpo-37428: newPySSLSocket() sets SSL_VERIFY_POST_HANDSHAKE flag for ++ * server sockets and SSL_set_post_handshake_auth() for client. */ + + return 0; + } diff --git a/SOURCES/00327-enable-tls-1.3-PHA-in-http.client.patch b/SOURCES/00327-enable-tls-1.3-PHA-in-http.client.patch new file mode 100644 index 0000000..bf92e84 --- /dev/null +++ b/SOURCES/00327-enable-tls-1.3-PHA-in-http.client.patch @@ -0,0 +1,70 @@ +diff --git a/Doc/library/http.client.rst b/Doc/library/http.client.rst +index 2f59ece..d756916 100644 +--- a/Doc/library/http.client.rst ++++ b/Doc/library/http.client.rst +@@ -88,6 +88,11 @@ The module provides the following classes: + :func:`ssl._create_unverified_context` can be passed to the *context* + parameter. + ++ .. versionchanged:: 3.7.4 ++ This class now enables TLS 1.3 ++ :attr:`ssl.SSLContext.post_handshake_auth` for the default *context* or ++ when *cert_file* is passed with a custom *context*. ++ + .. deprecated:: 3.6 + + *key_file* and *cert_file* are deprecated in favor of *context*. +diff --git a/Lib/http/client.py b/Lib/http/client.py +index 1a6bd8a..f0d2642 100644 +--- a/Lib/http/client.py ++++ b/Lib/http/client.py +@@ -1390,6 +1390,9 @@ else: + self.cert_file = cert_file + if context is None: + context = ssl._create_default_https_context() ++ # enable PHA for TLS 1.3 connections if available ++ if context.post_handshake_auth is not None: ++ context.post_handshake_auth = True + will_verify = context.verify_mode != ssl.CERT_NONE + if check_hostname is None: + check_hostname = context.check_hostname +@@ -1398,6 +1401,10 @@ else: + "either CERT_OPTIONAL or CERT_REQUIRED") + if key_file or cert_file: + context.load_cert_chain(cert_file, key_file) ++ # cert and key file means the user wants to authenticate. ++ # enable TLS 1.3 PHA implicitly even for custom contexts. ++ if context.post_handshake_auth is not None: ++ context.post_handshake_auth = True + self._context = context + self._check_hostname = check_hostname + +diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py +index 714d521..5795b7a 100644 +--- a/Lib/test/test_httplib.py ++++ b/Lib/test/test_httplib.py +@@ -1709,6 +1709,24 @@ class HTTPSTest(TestCase): + self.assertEqual(h, c.host) + self.assertEqual(p, c.port) + ++ def test_tls13_pha(self): ++ import ssl ++ if not ssl.HAS_TLSv1_3: ++ self.skipTest('TLS 1.3 support required') ++ # just check status of PHA flag ++ h = client.HTTPSConnection('localhost', 443) ++ self.assertTrue(h._context.post_handshake_auth) ++ ++ context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) ++ self.assertFalse(context.post_handshake_auth) ++ h = client.HTTPSConnection('localhost', 443, context=context) ++ self.assertIs(h._context, context) ++ self.assertFalse(h._context.post_handshake_auth) ++ ++ h = client.HTTPSConnection('localhost', 443, context=context, ++ cert_file=CERT_localhost) ++ self.assertTrue(h._context.post_handshake_auth) ++ + + class RequestBodyTest(TestCase): + """Test cases where a request includes a message body.""" diff --git a/SOURCES/00329-fips.patch b/SOURCES/00329-fips.patch new file mode 100644 index 0000000..e250caa --- /dev/null +++ b/SOURCES/00329-fips.patch @@ -0,0 +1,5571 @@ +From 53d108c5103c99079f1f231b0e731f3f47a142fc Mon Sep 17 00:00:00 2001 +From: "Miss Islington (bot)" + <31488909+miss-islington@users.noreply.github.com> +Date: Wed, 25 Sep 2019 08:50:31 -0700 +Subject: [PATCH 01/36] [3.8] bpo-38270: Check for hash digest algorithms and + avoid MD5 (GH-16382) (GH-16393) + +Make it easier to run and test Python on systems with restrict crypto policies: + +* add requires_hashdigest to test.support to check if a hash digest algorithm is available and working +* avoid MD5 in test_hmac +* replace MD5 with SHA256 in test_tarfile +* mark network tests that require MD5 for MD5-based digest auth or CRAM-MD5 + +https://bugs.python.org/issue38270 +(cherry picked from commit c64a1a61e6fc542cada40eb069a239317e1af36e) + +Co-authored-by: Christian Heimes + +https://bugs.python.org/issue38270 + +Automerge-Triggered-By: @tiran +--- + Lib/test/support/__init__.py | 22 ++++++++++ + Lib/test/test_hmac.py | 67 +++++++++++++++++++------------ + Lib/test/test_imaplib.py | 6 ++- + Lib/test/test_poplib.py | 2 + + Lib/test/test_smtplib.py | 11 ++++- + Lib/test/test_tarfile.py | 56 ++++++++++++++------------ + Lib/test/test_urllib2_localnet.py | 1 + + 7 files changed, 112 insertions(+), 53 deletions(-) + +diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py +index 66c0fed8411..a43819904e0 100644 +--- a/Lib/test/support/__init__.py ++++ b/Lib/test/support/__init__.py +@@ -11,6 +11,7 @@ import faulthandler + import fnmatch + import functools + import gc ++import hashlib + import importlib + import importlib.util + import io +@@ -627,6 +628,27 @@ def requires_mac_ver(*min_version): + return wrapper + return decorator + ++def requires_hashdigest(digestname): ++ """Decorator raising SkipTest if a hashing algorithm is not available ++ ++ The hashing algorithm could be missing or blocked by a strict crypto ++ policy. ++ ++ ValueError: [digital envelope routines: EVP_DigestInit_ex] disabled for FIPS ++ ValueError: unsupported hash type md4 ++ """ ++ def decorator(func): ++ @functools.wraps(func) ++ def wrapper(*args, **kwargs): ++ try: ++ hashlib.new(digestname) ++ except ValueError: ++ raise unittest.SkipTest( ++ f"hash digest '{digestname}' is not available." ++ ) ++ return func(*args, **kwargs) ++ return wrapper ++ return decorator + + # Don't use "localhost", since resolving it uses the DNS under recent + # Windows versions (see issue #18792). +diff --git a/Lib/test/test_hmac.py b/Lib/test/test_hmac.py +index 067e13f1079..81c3485f761 100644 +--- a/Lib/test/test_hmac.py ++++ b/Lib/test/test_hmac.py +@@ -4,6 +4,8 @@ import hashlib + import unittest + import warnings + ++from test.support import requires_hashdigest ++ + + def ignore_warning(func): + @functools.wraps(func) +@@ -17,6 +19,7 @@ def ignore_warning(func): + + class TestVectorsTestCase(unittest.TestCase): + ++ @requires_hashdigest('md5') + def test_md5_vectors(self): + # Test the HMAC module against test vectors from the RFC. + +@@ -63,6 +66,7 @@ class TestVectorsTestCase(unittest.TestCase): + b"and Larger Than One Block-Size Data"), + "6f630fad67cda0ee1fb1f562db3aa53e") + ++ @requires_hashdigest('sha1') + def test_sha_vectors(self): + def shatest(key, data, digest): + h = hmac.HMAC(key, data, digestmod=hashlib.sha1) +@@ -230,23 +234,28 @@ class TestVectorsTestCase(unittest.TestCase): + '134676fb6de0446065c97440fa8c6a58', + }) + ++ @requires_hashdigest('sha224') + def test_sha224_rfc4231(self): + self._rfc4231_test_cases(hashlib.sha224, 'sha224', 28, 64) + ++ @requires_hashdigest('sha256') + def test_sha256_rfc4231(self): + self._rfc4231_test_cases(hashlib.sha256, 'sha256', 32, 64) + ++ @requires_hashdigest('sha384') + def test_sha384_rfc4231(self): + self._rfc4231_test_cases(hashlib.sha384, 'sha384', 48, 128) + ++ @requires_hashdigest('sha512') + def test_sha512_rfc4231(self): + self._rfc4231_test_cases(hashlib.sha512, 'sha512', 64, 128) + ++ @requires_hashdigest('sha256') + def test_legacy_block_size_warnings(self): + class MockCrazyHash(object): + """Ain't no block_size attribute here.""" + def __init__(self, *args): +- self._x = hashlib.sha1(*args) ++ self._x = hashlib.sha256(*args) + self.digest_size = self._x.digest_size + def update(self, v): + self._x.update(v) +@@ -273,76 +282,80 @@ class TestVectorsTestCase(unittest.TestCase): + self.assertEqual(h.hexdigest().upper(), digest) + + ++ + class ConstructorTestCase(unittest.TestCase): + ++ expected = ( ++ "6c845b47f52b3b47f6590c502db7825aad757bf4fadc8fa972f7cd2e76a5bdeb" ++ ) + @ignore_warning ++ @requires_hashdigest('sha256') + def test_normal(self): + # Standard constructor call. +- failed = 0 + try: +- h = hmac.HMAC(b"key") ++ hmac.HMAC(b"key", digestmod='sha256') + except Exception: + self.fail("Standard constructor call raised exception.") + + @ignore_warning ++ @requires_hashdigest('sha256') + def test_with_str_key(self): + # Pass a key of type str, which is an error, because it expects a key + # of type bytes + with self.assertRaises(TypeError): +- h = hmac.HMAC("key") ++ h = hmac.HMAC("key", digestmod='sha256') + +- @ignore_warning ++ @requires_hashdigest('sha256') + def test_dot_new_with_str_key(self): + # Pass a key of type str, which is an error, because it expects a key + # of type bytes + with self.assertRaises(TypeError): +- h = hmac.new("key") ++ h = hmac.HMAC("key", digestmod='sha256') + + @ignore_warning ++ @requires_hashdigest('sha256') + def test_withtext(self): + # Constructor call with text. + try: +- h = hmac.HMAC(b"key", b"hash this!") ++ h = hmac.HMAC(b"key", b"hash this!", digestmod='sha256') + except Exception: + self.fail("Constructor call with text argument raised exception.") +- self.assertEqual(h.hexdigest(), '34325b639da4cfd95735b381e28cb864') ++ self.assertEqual(h.hexdigest(), self.expected) + ++ @requires_hashdigest('sha256') + def test_with_bytearray(self): + try: + h = hmac.HMAC(bytearray(b"key"), bytearray(b"hash this!"), +- digestmod="md5") ++ digestmod="sha256") + except Exception: + self.fail("Constructor call with bytearray arguments raised exception.") +- self.assertEqual(h.hexdigest(), '34325b639da4cfd95735b381e28cb864') ++ self.assertEqual(h.hexdigest(), self.expected) + ++ @requires_hashdigest('sha256') + def test_with_memoryview_msg(self): + try: +- h = hmac.HMAC(b"key", memoryview(b"hash this!"), digestmod="md5") ++ h = hmac.HMAC(b"key", memoryview(b"hash this!"), digestmod="sha256") + except Exception: + self.fail("Constructor call with memoryview msg raised exception.") +- self.assertEqual(h.hexdigest(), '34325b639da4cfd95735b381e28cb864') ++ self.assertEqual(h.hexdigest(), self.expected) + ++ @requires_hashdigest('sha256') + def test_withmodule(self): + # Constructor call with text and digest module. + try: +- h = hmac.HMAC(b"key", b"", hashlib.sha1) ++ h = hmac.HMAC(b"key", b"", hashlib.sha256) + except Exception: +- self.fail("Constructor call with hashlib.sha1 raised exception.") ++ self.fail("Constructor call with hashlib.sha256 raised exception.") + +-class SanityTestCase(unittest.TestCase): + +- @ignore_warning +- def test_default_is_md5(self): +- # Testing if HMAC defaults to MD5 algorithm. +- # NOTE: this whitebox test depends on the hmac class internals +- h = hmac.HMAC(b"key") +- self.assertEqual(h.digest_cons, hashlib.md5) ++class SanityTestCase(unittest.TestCase): + ++ @requires_hashdigest('sha256') + def test_exercise_all_methods(self): + # Exercising all methods once. + # This must not raise any exceptions + try: +- h = hmac.HMAC(b"my secret key", digestmod="md5") ++ h = hmac.HMAC(b"my secret key", digestmod="sha256") + h.update(b"compute the hash of this text!") + dig = h.digest() + dig = h.hexdigest() +@@ -350,11 +363,13 @@ class SanityTestCase(unittest.TestCase): + except Exception: + self.fail("Exception raised during normal usage of HMAC class.") + ++ + class CopyTestCase(unittest.TestCase): + ++ @requires_hashdigest('sha256') + def test_attributes(self): + # Testing if attributes are of same type. +- h1 = hmac.HMAC(b"key", digestmod="md5") ++ h1 = hmac.HMAC(b"key", digestmod="sha256") + h2 = h1.copy() + self.assertTrue(h1.digest_cons == h2.digest_cons, + "digest constructors don't match.") +@@ -363,9 +378,10 @@ class CopyTestCase(unittest.TestCase): + self.assertEqual(type(h1.outer), type(h2.outer), + "Types of outer don't match.") + ++ @requires_hashdigest('sha256') + def test_realcopy(self): + # Testing if the copy method created a real copy. +- h1 = hmac.HMAC(b"key", digestmod="md5") ++ h1 = hmac.HMAC(b"key", digestmod="sha256") + h2 = h1.copy() + # Using id() in case somebody has overridden __eq__/__ne__. + self.assertTrue(id(h1) != id(h2), "No real copy of the HMAC instance.") +@@ -374,9 +390,10 @@ class CopyTestCase(unittest.TestCase): + self.assertTrue(id(h1.outer) != id(h2.outer), + "No real copy of the attribute 'outer'.") + ++ @requires_hashdigest('sha256') + def test_equality(self): + # Testing if the copy has the same digests. +- h1 = hmac.HMAC(b"key", digestmod="md5") ++ h1 = hmac.HMAC(b"key", digestmod="sha256") + h1.update(b"some random text") + h2 = h1.copy() + self.assertEqual(h1.digest(), h2.digest(), +diff --git a/Lib/test/test_imaplib.py b/Lib/test/test_imaplib.py +index 0593a3756b0..086cc0ac4e0 100644 +--- a/Lib/test/test_imaplib.py ++++ b/Lib/test/test_imaplib.py +@@ -14,7 +14,8 @@ import calendar + import inspect + + from test.support import (reap_threads, verbose, transient_internet, +- run_with_tz, run_with_locale, cpython_only) ++ run_with_tz, run_with_locale, cpython_only, ++ requires_hashdigest) + import unittest + from unittest import mock + from datetime import datetime, timezone, timedelta +@@ -369,6 +370,7 @@ class NewIMAPTestsMixin(): + self.assertEqual(code, 'OK') + self.assertEqual(server.response, b'ZmFrZQ==\r\n') # b64 encoded 'fake' + ++ @requires_hashdigest('md5') + def test_login_cram_md5_bytes(self): + class AuthHandler(SimpleIMAPHandler): + capabilities = 'LOGINDISABLED AUTH=CRAM-MD5' +@@ -386,6 +388,7 @@ class NewIMAPTestsMixin(): + ret, _ = client.login_cram_md5("tim", b"tanstaaftanstaaf") + self.assertEqual(ret, "OK") + ++ @requires_hashdigest('md5') + def test_login_cram_md5_plain_text(self): + class AuthHandler(SimpleIMAPHandler): + capabilities = 'LOGINDISABLED AUTH=CRAM-MD5' +@@ -797,6 +800,7 @@ class ThreadedNetworkedTests(unittest.TestCase): + b'ZmFrZQ==\r\n') # b64 encoded 'fake' + + @reap_threads ++ @requires_hashdigest('md5') + def test_login_cram_md5(self): + + class AuthHandler(SimpleIMAPHandler): +diff --git a/Lib/test/test_poplib.py b/Lib/test/test_poplib.py +index 234c855545c..b8146be3a8c 100644 +--- a/Lib/test/test_poplib.py ++++ b/Lib/test/test_poplib.py +@@ -304,9 +304,11 @@ class TestPOP3Class(TestCase): + def test_rpop(self): + self.assertOK(self.client.rpop('foo')) + ++ @test_support.requires_hashdigest('md5') + def test_apop_normal(self): + self.assertOK(self.client.apop('foo', 'dummypassword')) + ++ @test_support.requires_hashdigest('md5') + def test_apop_REDOS(self): + # Replace welcome with very long evil welcome. + # NB The upper bound on welcome length is currently 2048. +diff --git a/Lib/test/test_smtplib.py b/Lib/test/test_smtplib.py +index 87047514e7a..64b3201254a 100644 +--- a/Lib/test/test_smtplib.py ++++ b/Lib/test/test_smtplib.py +@@ -4,6 +4,7 @@ import email.mime.text + from email.message import EmailMessage + from email.base64mime import body_encode as encode_base64 + import email.utils ++import hashlib + import hmac + import socket + import smtpd +@@ -18,6 +19,7 @@ import textwrap + + import unittest + from test import support, mock_socket ++from test.support import requires_hashdigest + from unittest.mock import Mock + + HOST = "localhost" +@@ -968,6 +970,7 @@ class SMTPSimTests(unittest.TestCase): + self.assertEqual(resp, (235, b'Authentication Succeeded')) + smtp.close() + ++ @requires_hashdigest('md5') + def testAUTH_CRAM_MD5(self): + self.serv.add_feature("AUTH CRAM-MD5") + smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15) +@@ -984,7 +987,13 @@ class SMTPSimTests(unittest.TestCase): + smtp.close() + + def test_auth_function(self): +- supported = {'CRAM-MD5', 'PLAIN', 'LOGIN'} ++ supported = {'PLAIN', 'LOGIN'} ++ try: ++ hashlib.md5() ++ except ValueError: ++ pass ++ else: ++ supported.add('CRAM-MD5') + for mechanism in supported: + self.serv.add_feature("AUTH {}".format(mechanism)) + for mechanism in supported: +diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py +index 4cd7d5370f5..b5e855e0d7e 100644 +--- a/Lib/test/test_tarfile.py ++++ b/Lib/test/test_tarfile.py +@@ -1,7 +1,7 @@ + import sys + import os + import io +-from hashlib import md5 ++from hashlib import sha256 + from contextlib import contextmanager + from random import Random + import pathlib +@@ -11,7 +11,7 @@ import unittest.mock + import tarfile + + from test import support +-from test.support import script_helper ++from test.support import script_helper, requires_hashdigest + + # Check for our compression modules. + try: +@@ -27,8 +27,8 @@ try: + except ImportError: + lzma = None + +-def md5sum(data): +- return md5(data).hexdigest() ++def sha256sum(data): ++ return sha256(data).hexdigest() + + TEMPDIR = os.path.abspath(support.TESTFN) + "-tardir" + tarextdir = TEMPDIR + '-extract-test' +@@ -39,8 +39,12 @@ xzname = os.path.join(TEMPDIR, "testtar.tar.xz") + tmpname = os.path.join(TEMPDIR, "tmp.tar") + dotlessname = os.path.join(TEMPDIR, "testtar") + +-md5_regtype = "65f477c818ad9e15f7feab0c6d37742f" +-md5_sparse = "a54fbc4ca4f4399a90e1b27164012fc6" ++sha256_regtype = ( ++ "e09e4bc8b3c9d9177e77256353b36c159f5f040531bbd4b024a8f9b9196c71ce" ++) ++sha256_sparse = ( ++ "4f05a776071146756345ceee937b33fc5644f5a96b9780d1c7d6a32cdf164d7b" ++) + + + class TarTest: +@@ -95,7 +99,7 @@ class UstarReadTest(ReadTest, unittest.TestCase): + data = fobj.read() + self.assertEqual(len(data), tarinfo.size, + "regular file extraction failed") +- self.assertEqual(md5sum(data), md5_regtype, ++ self.assertEqual(sha256sum(data), sha256_regtype, + "regular file extraction failed") + + def test_fileobj_readlines(self): +@@ -180,7 +184,7 @@ class UstarReadTest(ReadTest, unittest.TestCase): + with self.tar.extractfile("ustar/regtype") as fobj: + fobj = io.TextIOWrapper(fobj) + data = fobj.read().encode("iso8859-1") +- self.assertEqual(md5sum(data), md5_regtype) ++ self.assertEqual(sha256sum(data), sha256_regtype) + try: + fobj.seek(100) + except AttributeError: +@@ -546,13 +550,13 @@ class MiscReadTestBase(CommonReadTest): + self.addCleanup(support.unlink, os.path.join(TEMPDIR, "ustar/lnktype")) + with open(os.path.join(TEMPDIR, "ustar/lnktype"), "rb") as f: + data = f.read() +- self.assertEqual(md5sum(data), md5_regtype) ++ self.assertEqual(sha256sum(data), sha256_regtype) + + tar.extract("ustar/symtype", TEMPDIR) + self.addCleanup(support.unlink, os.path.join(TEMPDIR, "ustar/symtype")) + with open(os.path.join(TEMPDIR, "ustar/symtype"), "rb") as f: + data = f.read() +- self.assertEqual(md5sum(data), md5_regtype) ++ self.assertEqual(sha256sum(data), sha256_regtype) + + def test_extractall(self): + # Test if extractall() correctly restores directory permissions +@@ -687,7 +691,7 @@ class StreamReadTest(CommonReadTest, unittest.TestCase): + data = fobj.read() + self.assertEqual(len(data), tarinfo.size, + "regular file extraction failed") +- self.assertEqual(md5sum(data), md5_regtype, ++ self.assertEqual(sha256sum(data), sha256_regtype, + "regular file extraction failed") + + def test_provoke_stream_error(self): +@@ -799,8 +803,8 @@ class MemberReadTest(ReadTest, unittest.TestCase): + def _test_member(self, tarinfo, chksum=None, **kwargs): + if chksum is not None: + with self.tar.extractfile(tarinfo) as f: +- self.assertEqual(md5sum(f.read()), chksum, +- "wrong md5sum for %s" % tarinfo.name) ++ self.assertEqual(sha256sum(f.read()), chksum, ++ "wrong sha256sum for %s" % tarinfo.name) + + kwargs["mtime"] = 0o7606136617 + kwargs["uid"] = 1000 +@@ -815,11 +819,11 @@ class MemberReadTest(ReadTest, unittest.TestCase): + + def test_find_regtype(self): + tarinfo = self.tar.getmember("ustar/regtype") +- self._test_member(tarinfo, size=7011, chksum=md5_regtype) ++ self._test_member(tarinfo, size=7011, chksum=sha256_regtype) + + def test_find_conttype(self): + tarinfo = self.tar.getmember("ustar/conttype") +- self._test_member(tarinfo, size=7011, chksum=md5_regtype) ++ self._test_member(tarinfo, size=7011, chksum=sha256_regtype) + + def test_find_dirtype(self): + tarinfo = self.tar.getmember("ustar/dirtype") +@@ -851,28 +855,28 @@ class MemberReadTest(ReadTest, unittest.TestCase): + + def test_find_sparse(self): + tarinfo = self.tar.getmember("ustar/sparse") +- self._test_member(tarinfo, size=86016, chksum=md5_sparse) ++ self._test_member(tarinfo, size=86016, chksum=sha256_sparse) + + def test_find_gnusparse(self): + tarinfo = self.tar.getmember("gnu/sparse") +- self._test_member(tarinfo, size=86016, chksum=md5_sparse) ++ self._test_member(tarinfo, size=86016, chksum=sha256_sparse) + + def test_find_gnusparse_00(self): + tarinfo = self.tar.getmember("gnu/sparse-0.0") +- self._test_member(tarinfo, size=86016, chksum=md5_sparse) ++ self._test_member(tarinfo, size=86016, chksum=sha256_sparse) + + def test_find_gnusparse_01(self): + tarinfo = self.tar.getmember("gnu/sparse-0.1") +- self._test_member(tarinfo, size=86016, chksum=md5_sparse) ++ self._test_member(tarinfo, size=86016, chksum=sha256_sparse) + + def test_find_gnusparse_10(self): + tarinfo = self.tar.getmember("gnu/sparse-1.0") +- self._test_member(tarinfo, size=86016, chksum=md5_sparse) ++ self._test_member(tarinfo, size=86016, chksum=sha256_sparse) + + def test_find_umlauts(self): + tarinfo = self.tar.getmember("ustar/umlauts-" + "\xc4\xd6\xdc\xe4\xf6\xfc\xdf") +- self._test_member(tarinfo, size=7011, chksum=md5_regtype) ++ self._test_member(tarinfo, size=7011, chksum=sha256_regtype) + + def test_find_ustar_longname(self): + name = "ustar/" + "12345/" * 39 + "1234567/longname" +@@ -880,7 +884,7 @@ class MemberReadTest(ReadTest, unittest.TestCase): + + def test_find_regtype_oldv7(self): + tarinfo = self.tar.getmember("misc/regtype-old-v7") +- self._test_member(tarinfo, size=7011, chksum=md5_regtype) ++ self._test_member(tarinfo, size=7011, chksum=sha256_regtype) + + def test_find_pax_umlauts(self): + self.tar.close() +@@ -888,7 +892,7 @@ class MemberReadTest(ReadTest, unittest.TestCase): + encoding="iso8859-1") + tarinfo = self.tar.getmember("pax/umlauts-" + "\xc4\xd6\xdc\xe4\xf6\xfc\xdf") +- self._test_member(tarinfo, size=7011, chksum=md5_regtype) ++ self._test_member(tarinfo, size=7011, chksum=sha256_regtype) + + + class LongnameTest: +@@ -950,8 +954,8 @@ class GNUReadTest(LongnameTest, ReadTest, unittest.TestCase): + filename = os.path.join(TEMPDIR, name) + with open(filename, "rb") as fobj: + data = fobj.read() +- self.assertEqual(md5sum(data), md5_sparse, +- "wrong md5sum for %s" % name) ++ self.assertEqual(sha256sum(data), sha256_sparse, ++ "wrong sha256sum for %s" % name) + + if self._fs_supports_holes(): + s = os.stat(filename) +@@ -2431,7 +2435,7 @@ class LinkEmulationTest(ReadTest, unittest.TestCase): + self.tar.extract(name, TEMPDIR) + with open(os.path.join(TEMPDIR, name), "rb") as f: + data = f.read() +- self.assertEqual(md5sum(data), md5_regtype) ++ self.assertEqual(sha256sum(data), sha256_regtype) + + # See issues #1578269, #8879, and #17689 for some history on these skips + @unittest.skipIf(hasattr(os.path, "islink"), +diff --git a/Lib/test/test_urllib2_localnet.py b/Lib/test/test_urllib2_localnet.py +index ef0091c4930..895f97cc09a 100644 +--- a/Lib/test/test_urllib2_localnet.py ++++ b/Lib/test/test_urllib2_localnet.py +@@ -325,6 +325,7 @@ class ProxyAuthTests(unittest.TestCase): + PASSWD = "test123" + REALM = "TestRealm" + ++ @support.requires_hashdigest("md5") + def setUp(self): + super(ProxyAuthTests, self).setUp() + # Ignore proxy bypass settings in the environment. +-- +2.21.0 + + +From 75d9613294e1cbd5aab9363cf3c435871a25f065 Mon Sep 17 00:00:00 2001 +From: Christian Heimes +Date: Mon, 30 Sep 2019 09:10:38 +0200 +Subject: [PATCH 02/36] [3.8] bpo-38270: More fixes for strict crypto policy + (GH-16418) (#16437) + +test_hmac and test_hashlib test built-in hashing implementations and +OpenSSL-based hashing implementations. Add more checks to skip OpenSSL +implementations when a strict crypto policy is active. + +Use EVP_DigestInit_ex() instead of EVP_DigestInit() to initialize the +EVP context. The EVP_DigestInit() function clears alls flags and breaks +usedforsecurity flag again. + +Signed-off-by: Christian Heimes + +https://bugs.python.org/issue38270. +(cherry picked from commit 90558158093c0ad893102158fd3c2dd9f864e82e) + +Co-authored-by: Christian Heimes +--- + Lib/test/support/__init__.py | 19 ++++++++++++++++--- + Lib/test/test_hashlib.py | 3 +++ + Lib/test/test_hmac.py | 12 ++++++------ + 3 files changed, 25 insertions(+), 9 deletions(-) + +diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py +index a43819904e0..3342218cf0a 100644 +--- a/Lib/test/support/__init__.py ++++ b/Lib/test/support/__init__.py +@@ -71,6 +71,11 @@ try: + except ImportError: + resource = None + ++try: ++ import _hashlib ++except ImportError: ++ _hashlib = None ++ + __all__ = [ + # globals + "PIPE_MAX_SIZE", "verbose", "max_memuse", "use_resources", "failfast", +@@ -88,7 +93,8 @@ __all__ = [ + "create_empty_file", "can_symlink", "fs_is_case_insensitive", + # unittest + "is_resource_enabled", "requires", "requires_freebsd_version", +- "requires_linux_version", "requires_mac_ver", "check_syntax_error", ++ "requires_linux_version", "requires_mac_ver", "requires_hashdigest", ++ "check_syntax_error", + "TransientResource", "time_out", "socket_peer_reset", "ioerror_peer_reset", + "transient_internet", "BasicTestRunner", "run_unittest", "run_doctest", + "skip_unless_symlink", "requires_gzip", "requires_bz2", "requires_lzma", +@@ -628,12 +634,16 @@ def requires_mac_ver(*min_version): + return wrapper + return decorator + +-def requires_hashdigest(digestname): ++def requires_hashdigest(digestname, openssl=None): + """Decorator raising SkipTest if a hashing algorithm is not available + + The hashing algorithm could be missing or blocked by a strict crypto + policy. + ++ If 'openssl' is True, then the decorator checks that OpenSSL provides ++ the algorithm. Otherwise the check falls back to built-in ++ implementations. ++ + ValueError: [digital envelope routines: EVP_DigestInit_ex] disabled for FIPS + ValueError: unsupported hash type md4 + """ +@@ -641,7 +651,10 @@ def requires_hashdigest(digestname): + @functools.wraps(func) + def wrapper(*args, **kwargs): + try: +- hashlib.new(digestname) ++ if openssl and _hashlib is not None: ++ _hashlib.new(digestname) ++ else: ++ hashlib.new(digestname) + except ValueError: + raise unittest.SkipTest( + f"hash digest '{digestname}' is not available." +diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py +index 9711856853d..1b1b4d54112 100644 +--- a/Lib/test/test_hashlib.py ++++ b/Lib/test/test_hashlib.py +@@ -8,6 +8,7 @@ + + import array + from binascii import unhexlify ++import functools + import hashlib + import importlib + import itertools +@@ -21,6 +22,7 @@ import unittest + import warnings + from test import support + from test.support import _4G, bigmemtest, import_fresh_module ++from test.support import requires_hashdigest + from http.client import HTTPException + + # Were we compiled --with-pydebug or with #define Py_DEBUG? +@@ -117,6 +119,7 @@ class HashLibTestCase(unittest.TestCase): + constructors.add(_test_algorithm_via_hashlib_new) + + _hashlib = self._conditional_import_module('_hashlib') ++ self._hashlib = _hashlib + if _hashlib: + # These two algorithms should always be present when this module + # is compiled. If not, something was compiled wrong. +diff --git a/Lib/test/test_hmac.py b/Lib/test/test_hmac.py +index 81c3485f761..2a5a0d3d061 100644 +--- a/Lib/test/test_hmac.py ++++ b/Lib/test/test_hmac.py +@@ -19,7 +19,7 @@ def ignore_warning(func): + + class TestVectorsTestCase(unittest.TestCase): + +- @requires_hashdigest('md5') ++ @requires_hashdigest('md5', openssl=True) + def test_md5_vectors(self): + # Test the HMAC module against test vectors from the RFC. + +@@ -66,7 +66,7 @@ class TestVectorsTestCase(unittest.TestCase): + b"and Larger Than One Block-Size Data"), + "6f630fad67cda0ee1fb1f562db3aa53e") + +- @requires_hashdigest('sha1') ++ @requires_hashdigest('sha1', openssl=True) + def test_sha_vectors(self): + def shatest(key, data, digest): + h = hmac.HMAC(key, data, digestmod=hashlib.sha1) +@@ -234,19 +234,19 @@ class TestVectorsTestCase(unittest.TestCase): + '134676fb6de0446065c97440fa8c6a58', + }) + +- @requires_hashdigest('sha224') ++ @requires_hashdigest('sha224', openssl=True) + def test_sha224_rfc4231(self): + self._rfc4231_test_cases(hashlib.sha224, 'sha224', 28, 64) + +- @requires_hashdigest('sha256') ++ @requires_hashdigest('sha256', openssl=True) + def test_sha256_rfc4231(self): + self._rfc4231_test_cases(hashlib.sha256, 'sha256', 32, 64) + +- @requires_hashdigest('sha384') ++ @requires_hashdigest('sha384', openssl=True) + def test_sha384_rfc4231(self): + self._rfc4231_test_cases(hashlib.sha384, 'sha384', 48, 128) + +- @requires_hashdigest('sha512') ++ @requires_hashdigest('sha512', openssl=True) + def test_sha512_rfc4231(self): + self._rfc4231_test_cases(hashlib.sha512, 'sha512', 64, 128) + +-- +2.21.0 + + +From 19a40f7e820bb991c03e03c05dc832539b676983 Mon Sep 17 00:00:00 2001 +From: stratakis +Date: Tue, 3 Dec 2019 16:35:54 +0100 +Subject: [PATCH 03/36] bpo-38270: Fix indentation of test_hmac assertions + (GH-17446) + +Since https://github.com/python/cpython/commit/c64a1a61e6fc542cada40eb069a239317e1af36e two assertions were indented and thus ignored when running test_hmac. + +This PR fixes it. As the change is quite trivial I didn't add a NEWS entry. + + +https://bugs.python.org/issue38270 +--- + Lib/test/test_hmac.py | 4 ++-- + 1 file changed, 2 insertions(+), 2 deletions(-) + +diff --git a/Lib/test/test_hmac.py b/Lib/test/test_hmac.py +index 2a5a0d3d061..338e0215a41 100644 +--- a/Lib/test/test_hmac.py ++++ b/Lib/test/test_hmac.py +@@ -329,7 +329,7 @@ class ConstructorTestCase(unittest.TestCase): + digestmod="sha256") + except Exception: + self.fail("Constructor call with bytearray arguments raised exception.") +- self.assertEqual(h.hexdigest(), self.expected) ++ self.assertEqual(h.hexdigest(), self.expected) + + @requires_hashdigest('sha256') + def test_with_memoryview_msg(self): +@@ -337,7 +337,7 @@ class ConstructorTestCase(unittest.TestCase): + h = hmac.HMAC(b"key", memoryview(b"hash this!"), digestmod="sha256") + except Exception: + self.fail("Constructor call with memoryview msg raised exception.") +- self.assertEqual(h.hexdigest(), self.expected) ++ self.assertEqual(h.hexdigest(), self.expected) + + @requires_hashdigest('sha256') + def test_withmodule(self): +-- +2.21.0 + + +From 5fa96205b380f2cb555ac346e6b7039746cb51ca Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Thu, 25 Jul 2019 16:19:52 +0200 +Subject: [PATCH 04/36] Expose OpenSSL FIPS_mode() as hashlib.get_fips_mode() + +--- + Lib/hashlib.py | 5 +++++ + Modules/_hashopenssl.c | 36 +++++++++++++++++++++++++++++++++ + Modules/clinic/_hashopenssl.c.h | 25 ++++++++++++++++++++++- + 3 files changed, 65 insertions(+), 1 deletion(-) + +diff --git a/Lib/hashlib.py b/Lib/hashlib.py +index 98d2d7981a3..ae17c585110 100644 +--- a/Lib/hashlib.py ++++ b/Lib/hashlib.py +@@ -236,6 +236,11 @@ try: + except ImportError: + pass + ++try: ++ from _hashlib import get_fips_mode ++except ImportError: ++ pass ++ + + for __func_name in __always_supported: + # try them all, some may not work due to the OpenSSL +diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c +index 84edd72c85a..4876a7f7aa7 100644 +--- a/Modules/_hashopenssl.c ++++ b/Modules/_hashopenssl.c +@@ -25,6 +25,9 @@ + #include + #include "openssl/err.h" + ++/* Expose FIPS_mode */ ++#include ++ + #include "clinic/_hashopenssl.c.h" + /*[clinic input] + module _hashlib +@@ -987,6 +990,38 @@ GEN_CONSTRUCTOR(sha256) + GEN_CONSTRUCTOR(sha384) + GEN_CONSTRUCTOR(sha512) + ++/*[clinic input] ++_hashlib.get_fips_mode ++ ++Determine the OpenSSL FIPS mode of operation. ++ ++Effectively any non-zero return value indicates FIPS mode; ++values other than 1 may have additional significance. ++ ++See OpenSSL documentation for the FIPS_mode() function for details. ++[clinic start generated code]*/ ++ ++static PyObject * ++_hashlib_get_fips_mode_impl(PyObject *module) ++/*[clinic end generated code: output=ad8a7793310d3f98 input=f42a2135df2a5e11]*/ ++{ ++ int result = FIPS_mode(); ++ if (result == 0) { ++ // "If the library was built without support of the FIPS Object Module, ++ // then the function will return 0 with an error code of ++ // CRYPTO_R_FIPS_MODE_NOT_SUPPORTED (0x0f06d065)." ++ // But 0 is also a valid result value. ++ ++ unsigned long errcode = ERR_peek_last_error(); ++ if (errcode) { ++ _setException(PyExc_ValueError); ++ return NULL; ++ } ++ } ++ return PyLong_FromLong(result); ++} ++ ++ + /* List of functions exported by this module */ + + static struct PyMethodDef EVP_functions[] = { +@@ -996,6 +1031,7 @@ static struct PyMethodDef EVP_functions[] = { + pbkdf2_hmac__doc__}, + #endif + _HASHLIB_SCRYPT_METHODDEF ++ _HASHLIB_GET_FIPS_MODE_METHODDEF + CONSTRUCTOR_METH_DEF(md5), + CONSTRUCTOR_METH_DEF(sha1), + CONSTRUCTOR_METH_DEF(sha224), +diff --git a/Modules/clinic/_hashopenssl.c.h b/Modules/clinic/_hashopenssl.c.h +index 04453526040..8828e2776e9 100644 +--- a/Modules/clinic/_hashopenssl.c.h ++++ b/Modules/clinic/_hashopenssl.c.h +@@ -54,7 +54,30 @@ exit: + + #endif /* (OPENSSL_VERSION_NUMBER > 0x10100000L && !defined(OPENSSL_NO_SCRYPT) && !defined(LIBRESSL_VERSION_NUMBER)) */ + ++PyDoc_STRVAR(_hashlib_get_fips_mode__doc__, ++"get_fips_mode($module, /)\n" ++"--\n" ++"\n" ++"Determine the OpenSSL FIPS mode of operation.\n" ++"\n" ++"Effectively any non-zero return value indicates FIPS mode;\n" ++"values other than 1 may have additional significance.\n" ++"\n" ++"See OpenSSL documentation for the FIPS_mode() function for details."); ++ ++#define _HASHLIB_GET_FIPS_MODE_METHODDEF \ ++ {"get_fips_mode", (PyCFunction)_hashlib_get_fips_mode, METH_NOARGS, _hashlib_get_fips_mode__doc__}, ++ ++static PyObject * ++_hashlib_get_fips_mode_impl(PyObject *module); ++ ++static PyObject * ++_hashlib_get_fips_mode(PyObject *module, PyObject *Py_UNUSED(ignored)) ++{ ++ return _hashlib_get_fips_mode_impl(module); ++} ++ + #ifndef _HASHLIB_SCRYPT_METHODDEF + #define _HASHLIB_SCRYPT_METHODDEF + #endif /* !defined(_HASHLIB_SCRYPT_METHODDEF) */ +-/*[clinic end generated code: output=118cd7036fa0fb52 input=a9049054013a1b77]*/ ++/*[clinic end generated code: output=7d683c930bbb7c36 input=a9049054013a1b77]*/ +-- +2.21.0 + + +From e60cc93037b913ca2a410d73b52d8301ab0d76cd Mon Sep 17 00:00:00 2001 +From: Charalampos Stratakis +Date: Thu, 25 Jul 2019 17:04:06 +0200 +Subject: [PATCH 05/36] Use python's fall backs for the crypto it implements + only if we are not in FIPS mode + +--- + Lib/hashlib.py | 209 +++++++++++++++------------------------ + Lib/test/test_hashlib.py | 1 + + 2 files changed, 81 insertions(+), 129 deletions(-) + +diff --git a/Lib/hashlib.py b/Lib/hashlib.py +index ae17c585110..7db1e02601f 100644 +--- a/Lib/hashlib.py ++++ b/Lib/hashlib.py +@@ -67,56 +67,64 @@ algorithms_available = set(__always_supported) + __all__ = __always_supported + ('new', 'algorithms_guaranteed', + 'algorithms_available', 'pbkdf2_hmac') + +- +-__builtin_constructor_cache = {} +- +-def __get_builtin_constructor(name): +- cache = __builtin_constructor_cache +- constructor = cache.get(name) +- if constructor is not None: +- return constructor +- try: +- if name in ('SHA1', 'sha1'): +- import _sha1 +- cache['SHA1'] = cache['sha1'] = _sha1.sha1 +- elif name in ('MD5', 'md5'): +- import _md5 +- cache['MD5'] = cache['md5'] = _md5.md5 +- elif name in ('SHA256', 'sha256', 'SHA224', 'sha224'): +- import _sha256 +- cache['SHA224'] = cache['sha224'] = _sha256.sha224 +- cache['SHA256'] = cache['sha256'] = _sha256.sha256 +- elif name in ('SHA512', 'sha512', 'SHA384', 'sha384'): +- import _sha512 +- cache['SHA384'] = cache['sha384'] = _sha512.sha384 +- cache['SHA512'] = cache['sha512'] = _sha512.sha512 +- elif name in ('blake2b', 'blake2s'): +- import _blake2 +- cache['blake2b'] = _blake2.blake2b +- cache['blake2s'] = _blake2.blake2s +- elif name in {'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512', +- 'shake_128', 'shake_256'}: +- import _sha3 +- cache['sha3_224'] = _sha3.sha3_224 +- cache['sha3_256'] = _sha3.sha3_256 +- cache['sha3_384'] = _sha3.sha3_384 +- cache['sha3_512'] = _sha3.sha3_512 +- cache['shake_128'] = _sha3.shake_128 +- cache['shake_256'] = _sha3.shake_256 +- except ImportError: +- pass # no extension module, this hash is unsupported. +- +- constructor = cache.get(name) +- if constructor is not None: +- return constructor +- +- raise ValueError('unsupported hash type ' + name) ++try: ++ from _hashlib import get_fips_mode ++except ImportError: ++ def get_fips_mode(): ++ return 0 ++ ++ ++if not get_fips_mode(): ++ __builtin_constructor_cache = {} ++ ++ def __get_builtin_constructor(name): ++ cache = __builtin_constructor_cache ++ constructor = cache.get(name) ++ if constructor is not None: ++ return constructor ++ try: ++ if name in ('SHA1', 'sha1'): ++ import _sha1 ++ cache['SHA1'] = cache['sha1'] = _sha1.sha1 ++ elif name in ('MD5', 'md5'): ++ import _md5 ++ cache['MD5'] = cache['md5'] = _md5.md5 ++ elif name in ('SHA256', 'sha256', 'SHA224', 'sha224'): ++ import _sha256 ++ cache['SHA224'] = cache['sha224'] = _sha256.sha224 ++ cache['SHA256'] = cache['sha256'] = _sha256.sha256 ++ elif name in ('SHA512', 'sha512', 'SHA384', 'sha384'): ++ import _sha512 ++ cache['SHA384'] = cache['sha384'] = _sha512.sha384 ++ cache['SHA512'] = cache['sha512'] = _sha512.sha512 ++ elif name in ('blake2b', 'blake2s'): ++ import _blake2 ++ cache['blake2b'] = _blake2.blake2b ++ cache['blake2s'] = _blake2.blake2s ++ elif name in {'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512', ++ 'shake_128', 'shake_256'}: ++ import _sha3 ++ cache['sha3_224'] = _sha3.sha3_224 ++ cache['sha3_256'] = _sha3.sha3_256 ++ cache['sha3_384'] = _sha3.sha3_384 ++ cache['sha3_512'] = _sha3.sha3_512 ++ cache['shake_128'] = _sha3.shake_128 ++ cache['shake_256'] = _sha3.shake_256 ++ except ImportError: ++ pass # no extension module, this hash is unsupported. ++ ++ constructor = cache.get(name) ++ if constructor is not None: ++ return constructor ++ ++ raise ValueError('unsupported hash type ' + name) + + + def __get_openssl_constructor(name): +- if name in {'blake2b', 'blake2s'}: +- # Prefer our blake2 implementation. +- return __get_builtin_constructor(name) ++ if not get_fips_mode(): ++ if name in {'blake2b', 'blake2s'}: ++ # Prefer our blake2 implementation. ++ return __get_builtin_constructor(name) + try: + f = getattr(_hashlib, 'openssl_' + name) + # Allow the C module to raise ValueError. The function will be +@@ -125,27 +133,30 @@ def __get_openssl_constructor(name): + # Use the C function directly (very fast) + return f + except (AttributeError, ValueError): ++ if get_fips_mode(): ++ raise + return __get_builtin_constructor(name) + +- +-def __py_new(name, data=b'', **kwargs): +- """new(name, data=b'', **kwargs) - Return a new hashing object using the +- named algorithm; optionally initialized with data (which must be +- a bytes-like object). +- """ +- return __get_builtin_constructor(name)(data, **kwargs) ++if not get_fips_mode(): ++ def __py_new(name, data=b'', **kwargs): ++ """new(name, data=b'', **kwargs) - Return a new hashing object using the ++ named algorithm; optionally initialized with data (which must be ++ a bytes-like object). ++ """ ++ return __get_builtin_constructor(name)(data, **kwargs) + + + def __hash_new(name, data=b'', **kwargs): + """new(name, data=b'') - Return a new hashing object using the named algorithm; + optionally initialized with data (which must be a bytes-like object). + """ +- if name in {'blake2b', 'blake2s'}: +- # Prefer our blake2 implementation. +- # OpenSSL 1.1.0 comes with a limited implementation of blake2b/s. +- # It does neither support keyed blake2 nor advanced features like +- # salt, personal, tree hashing or SSE. +- return __get_builtin_constructor(name)(data, **kwargs) ++ if not get_fips_mode(): ++ if name in {'blake2b', 'blake2s'}: ++ # Prefer our blake2 implementation. ++ # OpenSSL 1.1.0 comes with a limited implementation of blake2b/s. ++ # It does neither support keyed blake2 nor advanced features like ++ # salt, personal, tree hashing or SSE. ++ return __get_builtin_constructor(name)(data, **kwargs) + try: + return _hashlib.new(name, data) + except ValueError: +@@ -153,6 +164,8 @@ def __hash_new(name, data=b'', **kwargs): + # hash, try using our builtin implementations. + # This allows for SHA224/256 and SHA384/512 support even though + # the OpenSSL library prior to 0.9.8 doesn't provide them. ++ if get_fips_mode(): ++ raise + return __get_builtin_constructor(name)(data) + + +@@ -163,72 +176,14 @@ try: + algorithms_available = algorithms_available.union( + _hashlib.openssl_md_meth_names) + except ImportError: ++ if get_fips_mode(): ++ raise + new = __py_new + __get_hash = __get_builtin_constructor + +-try: +- # OpenSSL's PKCS5_PBKDF2_HMAC requires OpenSSL 1.0+ with HMAC and SHA +- from _hashlib import pbkdf2_hmac +-except ImportError: +- _trans_5C = bytes((x ^ 0x5C) for x in range(256)) +- _trans_36 = bytes((x ^ 0x36) for x in range(256)) +- +- def pbkdf2_hmac(hash_name, password, salt, iterations, dklen=None): +- """Password based key derivation function 2 (PKCS #5 v2.0) + +- This Python implementations based on the hmac module about as fast +- as OpenSSL's PKCS5_PBKDF2_HMAC for short passwords and much faster +- for long passwords. +- """ +- if not isinstance(hash_name, str): +- raise TypeError(hash_name) +- +- if not isinstance(password, (bytes, bytearray)): +- password = bytes(memoryview(password)) +- if not isinstance(salt, (bytes, bytearray)): +- salt = bytes(memoryview(salt)) +- +- # Fast inline HMAC implementation +- inner = new(hash_name) +- outer = new(hash_name) +- blocksize = getattr(inner, 'block_size', 64) +- if len(password) > blocksize: +- password = new(hash_name, password).digest() +- password = password + b'\x00' * (blocksize - len(password)) +- inner.update(password.translate(_trans_36)) +- outer.update(password.translate(_trans_5C)) +- +- def prf(msg, inner=inner, outer=outer): +- # PBKDF2_HMAC uses the password as key. We can re-use the same +- # digest objects and just update copies to skip initialization. +- icpy = inner.copy() +- ocpy = outer.copy() +- icpy.update(msg) +- ocpy.update(icpy.digest()) +- return ocpy.digest() +- +- if iterations < 1: +- raise ValueError(iterations) +- if dklen is None: +- dklen = outer.digest_size +- if dklen < 1: +- raise ValueError(dklen) +- +- dkey = b'' +- loop = 1 +- from_bytes = int.from_bytes +- while len(dkey) < dklen: +- prev = prf(salt + loop.to_bytes(4, 'big')) +- # endianess doesn't matter here as long to / from use the same +- rkey = int.from_bytes(prev, 'big') +- for i in range(iterations - 1): +- prev = prf(prev) +- # rkey = rkey ^ prev +- rkey ^= from_bytes(prev, 'big') +- loop += 1 +- dkey += rkey.to_bytes(inner.digest_size, 'big') +- +- return dkey[:dklen] ++# OpenSSL's PKCS5_PBKDF2_HMAC requires OpenSSL 1.0+ with HMAC and SHA ++from _hashlib import pbkdf2_hmac + + try: + # OpenSSL's scrypt requires OpenSSL 1.1+ +@@ -236,12 +191,6 @@ try: + except ImportError: + pass + +-try: +- from _hashlib import get_fips_mode +-except ImportError: +- pass +- +- + for __func_name in __always_supported: + # try them all, some may not work due to the OpenSSL + # version not supporting that algorithm. +@@ -254,4 +203,6 @@ for __func_name in __always_supported: + + # Cleanup locals() + del __always_supported, __func_name, __get_hash +-del __py_new, __hash_new, __get_openssl_constructor ++del __hash_new, __get_openssl_constructor ++if not get_fips_mode(): ++ del __py_new +diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py +index 1b1b4d54112..4e6b5b607a9 100644 +--- a/Lib/test/test_hashlib.py ++++ b/Lib/test/test_hashlib.py +@@ -933,6 +933,7 @@ class KDFTests(unittest.TestCase): + iterations=1, dklen=None) + self.assertEqual(out, self.pbkdf2_results['sha1'][0][0]) + ++ @unittest.skip("The python implementation of pbkdf2_hmac has been removed") + def test_pbkdf2_hmac_py(self): + self._test_pbkdf2_hmac(py_hashlib.pbkdf2_hmac) + +-- +2.21.0 + + +From 4803386d980b9ad1a9bdde829e8642676acd8bf4 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Thu, 25 Jul 2019 17:19:06 +0200 +Subject: [PATCH 06/36] Disable Python's hash implementations in FIPS mode, + forcing OpenSSL + +--- + Include/_hashopenssl.h | 66 ++++++++++++++++++++++++++++++++++ + Modules/_blake2/blake2b_impl.c | 5 +++ + Modules/_blake2/blake2module.c | 3 ++ + Modules/_blake2/blake2s_impl.c | 5 +++ + Modules/_hashopenssl.c | 36 +------------------ + Modules/_sha3/sha3module.c | 5 +++ + setup.py | 31 ++++++++-------- + 7 files changed, 101 insertions(+), 50 deletions(-) + create mode 100644 Include/_hashopenssl.h + +diff --git a/Include/_hashopenssl.h b/Include/_hashopenssl.h +new file mode 100644 +index 00000000000..a726c0d3fbf +--- /dev/null ++++ b/Include/_hashopenssl.h +@@ -0,0 +1,66 @@ ++#ifndef Py_HASHOPENSSL_H ++#define Py_HASHOPENSSL_H ++ ++#include "Python.h" ++#include ++#include ++ ++/* LCOV_EXCL_START */ ++static PyObject * ++_setException(PyObject *exc) ++{ ++ unsigned long errcode; ++ const char *lib, *func, *reason; ++ ++ errcode = ERR_peek_last_error(); ++ if (!errcode) { ++ PyErr_SetString(exc, "unknown reasons"); ++ return NULL; ++ } ++ ERR_clear_error(); ++ ++ lib = ERR_lib_error_string(errcode); ++ func = ERR_func_error_string(errcode); ++ reason = ERR_reason_error_string(errcode); ++ ++ if (lib && func) { ++ PyErr_Format(exc, "[%s: %s] %s", lib, func, reason); ++ } ++ else if (lib) { ++ PyErr_Format(exc, "[%s] %s", lib, reason); ++ } ++ else { ++ PyErr_SetString(exc, reason); ++ } ++ return NULL; ++} ++/* LCOV_EXCL_STOP */ ++ ++ ++__attribute__((__unused__)) ++static int ++_Py_hashlib_fips_error(char *name) { ++ int result = FIPS_mode(); ++ if (result == 0) { ++ // "If the library was built without support of the FIPS Object Module, ++ // then the function will return 0 with an error code of ++ // CRYPTO_R_FIPS_MODE_NOT_SUPPORTED (0x0f06d065)." ++ // But 0 is also a valid result value. ++ ++ unsigned long errcode = ERR_peek_last_error(); ++ if (errcode) { ++ _setException(PyExc_ValueError); ++ return 1; ++ } ++ return 0; ++ } ++ PyErr_Format(PyExc_ValueError, "%s is not available in FIPS mode", ++ name); ++ return 1; ++} ++ ++#define FAIL_RETURN_IN_FIPS_MODE(name) do { \ ++ if (_Py_hashlib_fips_error(name)) return NULL; \ ++} while (0) ++ ++#endif // !Py_HASHOPENSSL_H +diff --git a/Modules/_blake2/blake2b_impl.c b/Modules/_blake2/blake2b_impl.c +index 418c0184006..341e67a8fcc 100644 +--- a/Modules/_blake2/blake2b_impl.c ++++ b/Modules/_blake2/blake2b_impl.c +@@ -14,6 +14,7 @@ + */ + + #include "Python.h" ++#include "_hashopenssl.h" + #include "pystrhex.h" + #ifdef WITH_THREAD + #include "pythread.h" +@@ -103,6 +104,8 @@ py_blake2b_new_impl(PyTypeObject *type, PyObject *data, int digest_size, + unsigned long leaf_size = 0; + unsigned long long node_offset = 0; + ++ FAIL_RETURN_IN_FIPS_MODE("_blake2"); ++ + self = new_BLAKE2bObject(type); + if (self == NULL) { + goto error; +@@ -293,6 +296,8 @@ _blake2_blake2b_update(BLAKE2bObject *self, PyObject *data) + { + Py_buffer buf; + ++ FAIL_RETURN_IN_FIPS_MODE("_blake2"); ++ + GET_BUFFER_VIEW_OR_ERROUT(data, &buf); + + #ifdef WITH_THREAD +diff --git a/Modules/_blake2/blake2module.c b/Modules/_blake2/blake2module.c +index e2a3d420d4e..817b7165684 100644 +--- a/Modules/_blake2/blake2module.c ++++ b/Modules/_blake2/blake2module.c +@@ -9,6 +9,7 @@ + */ + + #include "Python.h" ++#include "_hashopenssl.h" + + #include "impl/blake2.h" + +@@ -57,6 +58,8 @@ PyInit__blake2(void) + PyObject *m; + PyObject *d; + ++ FAIL_RETURN_IN_FIPS_MODE("blake2"); ++ + m = PyModule_Create(&blake2_module); + if (m == NULL) + return NULL; +diff --git a/Modules/_blake2/blake2s_impl.c b/Modules/_blake2/blake2s_impl.c +index 24e529b6596..0dfe0586b57 100644 +--- a/Modules/_blake2/blake2s_impl.c ++++ b/Modules/_blake2/blake2s_impl.c +@@ -14,6 +14,7 @@ + */ + + #include "Python.h" ++#include "_hashopenssl.h" + #include "pystrhex.h" + #ifdef WITH_THREAD + #include "pythread.h" +@@ -103,6 +104,8 @@ py_blake2s_new_impl(PyTypeObject *type, PyObject *data, int digest_size, + unsigned long leaf_size = 0; + unsigned long long node_offset = 0; + ++ FAIL_RETURN_IN_FIPS_MODE("_blake2"); ++ + self = new_BLAKE2sObject(type); + if (self == NULL) { + goto error; +@@ -293,6 +296,8 @@ _blake2_blake2s_update(BLAKE2sObject *self, PyObject *data) + { + Py_buffer buf; + ++ FAIL_RETURN_IN_FIPS_MODE("_blake2"); ++ + GET_BUFFER_VIEW_OR_ERROUT(data, &buf); + + #ifdef WITH_THREAD +diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c +index 4876a7f7aa7..02cf6087a2e 100644 +--- a/Modules/_hashopenssl.c ++++ b/Modules/_hashopenssl.c +@@ -17,16 +17,13 @@ + #include "structmember.h" + #include "hashlib.h" + #include "pystrhex.h" ++#include "_hashopenssl.h" + + + /* EVP is the preferred interface to hashing in OpenSSL */ + #include + /* We use the object interface to discover what hashes OpenSSL supports. */ + #include +-#include "openssl/err.h" +- +-/* Expose FIPS_mode */ +-#include + + #include "clinic/_hashopenssl.c.h" + /*[clinic input] +@@ -77,37 +74,6 @@ DEFINE_CONSTS_FOR_NEW(sha384) + DEFINE_CONSTS_FOR_NEW(sha512) + + +-/* LCOV_EXCL_START */ +-static PyObject * +-_setException(PyObject *exc) +-{ +- unsigned long errcode; +- const char *lib, *func, *reason; +- +- errcode = ERR_peek_last_error(); +- if (!errcode) { +- PyErr_SetString(exc, "unknown reasons"); +- return NULL; +- } +- ERR_clear_error(); +- +- lib = ERR_lib_error_string(errcode); +- func = ERR_func_error_string(errcode); +- reason = ERR_reason_error_string(errcode); +- +- if (lib && func) { +- PyErr_Format(exc, "[%s: %s] %s", lib, func, reason); +- } +- else if (lib) { +- PyErr_Format(exc, "[%s] %s", lib, reason); +- } +- else { +- PyErr_SetString(exc, reason); +- } +- return NULL; +-} +-/* LCOV_EXCL_STOP */ +- + static EVPobject * + newEVPobject(PyObject *name) + { +diff --git a/Modules/_sha3/sha3module.c b/Modules/_sha3/sha3module.c +index 2c2b2dbc5c7..624f7f247d4 100644 +--- a/Modules/_sha3/sha3module.c ++++ b/Modules/_sha3/sha3module.c +@@ -18,6 +18,7 @@ + #include "Python.h" + #include "pystrhex.h" + #include "../hashlib.h" ++#include "_hashopenssl.h" + + /* ************************************************************************** + * SHA-3 (Keccak) and SHAKE +@@ -162,6 +163,7 @@ static PyTypeObject SHAKE256type; + static SHA3object * + newSHA3object(PyTypeObject *type) + { ++ FAIL_RETURN_IN_FIPS_MODE("_sha3"); + SHA3object *newobj; + newobj = (SHA3object *)PyObject_New(SHA3object, type); + if (newobj == NULL) { +@@ -177,6 +179,7 @@ newSHA3object(PyTypeObject *type) + static PyObject * + py_sha3_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) + { ++ FAIL_RETURN_IN_FIPS_MODE("_sha3"); + SHA3object *self = NULL; + Py_buffer buf = {NULL, NULL}; + HashReturn res; +@@ -724,6 +727,8 @@ PyInit__sha3(void) + { + PyObject *m = NULL; + ++ FAIL_RETURN_IN_FIPS_MODE("_sha3"); ++ + if ((m = PyModule_Create(&_SHA3module)) == NULL) { + return NULL; + } +diff --git a/setup.py b/setup.py +index e2c18982532..e2e659ef795 100644 +--- a/setup.py ++++ b/setup.py +@@ -901,31 +901,30 @@ class PyBuildExt(build_ext): + have_usable_openssl = (have_any_openssl and + openssl_ver >= min_openssl_ver) + ++ if not have_usable_openssl: ++ raise ValueError('Cannot build for RHEL without OpenSSL') ++ ++ ssl_args = { ++ 'include_dirs': ssl_incs, ++ 'library_dirs': ssl_libs, ++ 'libraries': ['ssl', 'crypto'], ++ } ++ + if have_any_openssl: + if have_usable_openssl: + # The _hashlib module wraps optimized implementations + # of hash functions from the OpenSSL library. + exts.append( Extension('_hashlib', ['_hashopenssl.c'], + depends = ['hashlib.h'], +- include_dirs = ssl_incs, +- library_dirs = ssl_libs, +- libraries = ['ssl', 'crypto']) ) ++ **ssl_args)) + else: + print("warning: openssl 0x%08x is too old for _hashlib" % + openssl_ver) + missing.append('_hashlib') + +- # We always compile these even when OpenSSL is available (issue #14693). +- # It's harmless and the object code is tiny (40-50 KB per module, +- # only loaded when actually used). +- exts.append( Extension('_sha256', ['sha256module.c'], +- depends=['hashlib.h']) ) +- exts.append( Extension('_sha512', ['sha512module.c'], +- depends=['hashlib.h']) ) +- exts.append( Extension('_md5', ['md5module.c'], +- depends=['hashlib.h']) ) +- exts.append( Extension('_sha1', ['sha1module.c'], +- depends=['hashlib.h']) ) ++ # RHEL: Always force OpenSSL for md5, sha1, sha256, sha512; ++ # don't build Python's implementations. ++ # sha3 and blake2 have extra functionality, so do build those: + + blake2_deps = glob(os.path.join(os.getcwd(), srcdir, + 'Modules/_blake2/impl/*')) +@@ -944,6 +943,7 @@ class PyBuildExt(build_ext): + '_blake2/blake2b_impl.c', + '_blake2/blake2s_impl.c'], + define_macros=blake2_macros, ++ **ssl_args, + depends=blake2_deps) ) + + sha3_deps = glob(os.path.join(os.getcwd(), srcdir, +@@ -951,7 +951,8 @@ class PyBuildExt(build_ext): + sha3_deps.append('hashlib.h') + exts.append( Extension('_sha3', + ['_sha3/sha3module.c'], +- depends=sha3_deps)) ++ **ssl_args, ++ depends=sha3_deps + ['hashlib.h'])) + + # Modules that provide persistent dictionary-like semantics. You will + # probably want to arrange for at least one of them to be available on +-- +2.21.0 + + +From 0f35f480e2134c7d5c4ea3d68d8c0af14f56afa3 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Thu, 25 Jul 2019 17:35:27 +0200 +Subject: [PATCH 07/36] Expose all hashes available to OpenSSL, using a list + +--- + Modules/_hashopenssl.c | 44 ++++++++++++++----------------------- + Modules/_hashopenssl_list.h | 21 ++++++++++++++++++ + 2 files changed, 37 insertions(+), 28 deletions(-) + create mode 100644 Modules/_hashopenssl_list.h + +diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c +index 02cf6087a2e..7dfd70822b9 100644 +--- a/Modules/_hashopenssl.c ++++ b/Modules/_hashopenssl.c +@@ -66,12 +66,9 @@ static PyTypeObject EVPtype; + static PyObject *CONST_ ## Name ## _name_obj = NULL; \ + static EVP_MD_CTX *CONST_new_ ## Name ## _ctx_p = NULL; + +-DEFINE_CONSTS_FOR_NEW(md5) +-DEFINE_CONSTS_FOR_NEW(sha1) +-DEFINE_CONSTS_FOR_NEW(sha224) +-DEFINE_CONSTS_FOR_NEW(sha256) +-DEFINE_CONSTS_FOR_NEW(sha384) +-DEFINE_CONSTS_FOR_NEW(sha512) ++#define _HASH(py_name, openssl_name) DEFINE_CONSTS_FOR_NEW(py_name) ++#include "_hashopenssl_list.h" ++#undef _HASH + + + static EVPobject * +@@ -896,7 +893,7 @@ generate_hash_name_list(void) + * The first call will lazy-initialize, which reports an exception + * if initialization fails. + */ +-#define GEN_CONSTRUCTOR(NAME) \ ++#define GEN_CONSTRUCTOR(NAME, SSL_NAME) \ + static PyObject * \ + EVP_new_ ## NAME (PyObject *self, PyObject *args) \ + { \ +@@ -910,8 +907,8 @@ generate_hash_name_list(void) + \ + if (CONST_new_ ## NAME ## _ctx_p == NULL) { \ + EVP_MD_CTX *ctx_p = EVP_MD_CTX_new(); \ +- if (!EVP_get_digestbyname(#NAME) || \ +- !EVP_DigestInit(ctx_p, EVP_get_digestbyname(#NAME))) { \ ++ if (!EVP_get_digestbyname(SSL_NAME) || \ ++ !EVP_DigestInit(ctx_p, EVP_get_digestbyname(SSL_NAME))) { \ + _setException(PyExc_ValueError); \ + EVP_MD_CTX_free(ctx_p); \ + return NULL; \ +@@ -939,7 +936,7 @@ generate_hash_name_list(void) + {"openssl_" #NAME, (PyCFunction)EVP_new_ ## NAME, METH_VARARGS, \ + PyDoc_STR("Returns a " #NAME \ + " hash object; optionally initialized with a string") \ +- } ++ }, + + /* used in the init function to setup a constructor: initialize OpenSSL + constructor constants if they haven't been initialized already. */ +@@ -949,12 +946,9 @@ generate_hash_name_list(void) + } \ + } while (0); + +-GEN_CONSTRUCTOR(md5) +-GEN_CONSTRUCTOR(sha1) +-GEN_CONSTRUCTOR(sha224) +-GEN_CONSTRUCTOR(sha256) +-GEN_CONSTRUCTOR(sha384) +-GEN_CONSTRUCTOR(sha512) ++#define _HASH(py_name, openssl_name) GEN_CONSTRUCTOR(py_name, openssl_name) ++#include "_hashopenssl_list.h" ++#undef _HASH + + /*[clinic input] + _hashlib.get_fips_mode +@@ -998,12 +992,9 @@ static struct PyMethodDef EVP_functions[] = { + #endif + _HASHLIB_SCRYPT_METHODDEF + _HASHLIB_GET_FIPS_MODE_METHODDEF +- CONSTRUCTOR_METH_DEF(md5), +- CONSTRUCTOR_METH_DEF(sha1), +- CONSTRUCTOR_METH_DEF(sha224), +- CONSTRUCTOR_METH_DEF(sha256), +- CONSTRUCTOR_METH_DEF(sha384), +- CONSTRUCTOR_METH_DEF(sha512), ++#define _HASH(py_name, openssl_name) CONSTRUCTOR_METH_DEF(py_name) ++#include "_hashopenssl_list.h" ++#undef _HASH + {NULL, NULL} /* Sentinel */ + }; + +@@ -1061,11 +1052,8 @@ PyInit__hashlib(void) + PyModule_AddObject(m, "HASH", (PyObject *)&EVPtype); + + /* these constants are used by the convenience constructors */ +- INIT_CONSTRUCTOR_CONSTANTS(md5); +- INIT_CONSTRUCTOR_CONSTANTS(sha1); +- INIT_CONSTRUCTOR_CONSTANTS(sha224); +- INIT_CONSTRUCTOR_CONSTANTS(sha256); +- INIT_CONSTRUCTOR_CONSTANTS(sha384); +- INIT_CONSTRUCTOR_CONSTANTS(sha512); ++#define _HASH(py_name, openssl_name) INIT_CONSTRUCTOR_CONSTANTS(py_name) ++#include "_hashopenssl_list.h" ++#undef _HASH + return m; + } +diff --git a/Modules/_hashopenssl_list.h b/Modules/_hashopenssl_list.h +new file mode 100644 +index 00000000000..3c11b2eb6c6 +--- /dev/null ++++ b/Modules/_hashopenssl_list.h +@@ -0,0 +1,21 @@ ++/* Call the _HASH macro with all the hashes exported by OpenSSL, ++ * at compile time. ++ * ++ * This file is meant to be included multiple times, with different values of ++ * _HASH. ++ */ ++ ++_HASH(md5, "md5") ++_HASH(sha1, "sha1") ++_HASH(sha224, "sha224") ++_HASH(sha256, "sha256") ++_HASH(sha384, "sha384") ++_HASH(sha512, "sha512") ++_HASH(blake2b, "blake2b512") ++_HASH(blake2s, "blake2s256") ++_HASH(sha3_224, "sha3-224") ++_HASH(sha3_256, "sha3-256") ++_HASH(sha3_384, "sha3-384") ++_HASH(sha3_512, "sha3-512") ++_HASH(shake_128, "shake128") ++_HASH(shake_256, "shake256") +-- +2.21.0 + + +From 7e5b74b7268e4fb7d473d24cd023493e4e87eb76 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Thu, 25 Jul 2019 18:13:45 +0200 +Subject: [PATCH 08/36] Fix tests + +--- + Lib/hashlib.py | 5 +++- + Lib/test/test_hashlib.py | 58 +++++++++++++++++++++++++++++++--------- + 2 files changed, 49 insertions(+), 14 deletions(-) + +diff --git a/Lib/hashlib.py b/Lib/hashlib.py +index 7db1e02601f..2def0a310c6 100644 +--- a/Lib/hashlib.py ++++ b/Lib/hashlib.py +@@ -122,7 +122,10 @@ if not get_fips_mode(): + + def __get_openssl_constructor(name): + if not get_fips_mode(): +- if name in {'blake2b', 'blake2s'}: ++ if name in { ++ 'blake2b', 'blake2s', 'shake_256', 'shake_128', ++ #'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512', ++ }: + # Prefer our blake2 implementation. + return __get_builtin_constructor(name) + try: +diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py +index 4e6b5b607a9..e57c93b42f5 100644 +--- a/Lib/test/test_hashlib.py ++++ b/Lib/test/test_hashlib.py +@@ -182,7 +182,9 @@ class HashLibTestCase(unittest.TestCase): + a = array.array("b", range(10)) + for cons in self.hash_constructors: + c = cons(a) +- if c.name in self.shakes: ++ if (c.name in self.shakes ++ and not cons.__name__.startswith('openssl_') ++ ): + c.hexdigest(16) + else: + c.hexdigest() +@@ -229,7 +231,9 @@ class HashLibTestCase(unittest.TestCase): + def test_hexdigest(self): + for cons in self.hash_constructors: + h = cons() +- if h.name in self.shakes: ++ if (h.name in self.shakes ++ and not cons.__name__.startswith('openssl_') ++ ): + self.assertIsInstance(h.digest(16), bytes) + self.assertEqual(hexstr(h.digest(16)), h.hexdigest(16)) + else: +@@ -243,6 +247,8 @@ class HashLibTestCase(unittest.TestCase): + h = cons() + if h.name not in self.shakes: + continue ++ if cons.__name__.startswith('openssl_'): ++ continue + for digest in h.digest, h.hexdigest: + with self.assertRaises((ValueError, OverflowError)): + digest(-10) +@@ -272,7 +278,9 @@ class HashLibTestCase(unittest.TestCase): + m1.update(bees) + m1.update(cees) + m1.update(dees) +- if m1.name in self.shakes: ++ if (m1.name in self.shakes ++ and not cons.__name__.startswith('openssl_') ++ ): + args = (16,) + else: + args = () +@@ -299,15 +307,36 @@ class HashLibTestCase(unittest.TestCase): + # 2 is for hashlib.name(...) and hashlib.new(name, ...) + self.assertGreaterEqual(len(constructors), 2) + for hash_object_constructor in constructors: ++ if ( ++ kwargs ++ and hash_object_constructor.__name__.startswith('openssl_') ++ ): ++ return + m = hash_object_constructor(data, **kwargs) +- computed = m.hexdigest() if not shake else m.hexdigest(length) ++ if shake: ++ if hash_object_constructor.__name__.startswith('openssl_'): ++ if length > m.digest_size: ++ # OpenSSL doesn't give long digests ++ return ++ computed = m.hexdigest()[:length*2] ++ hexdigest = hexdigest[:length*2] ++ else: ++ computed = m.hexdigest(length) ++ else: ++ computed = m.hexdigest() + self.assertEqual( + computed, hexdigest, + "Hash algorithm %s constructed using %s returned hexdigest" + " %r for %d byte input data that should have hashed to %r." + % (name, hash_object_constructor, + computed, len(data), hexdigest)) +- computed = m.digest() if not shake else m.digest(length) ++ if shake: ++ if hash_object_constructor.__name__.startswith('openssl_'): ++ computed = m.digest()[:length] ++ else: ++ computed = m.digest(length) ++ else: ++ computed = m.digest() + digest = bytes.fromhex(hexdigest) + self.assertEqual(computed, digest) + if not shake: +@@ -347,12 +376,14 @@ class HashLibTestCase(unittest.TestCase): + for hash_object_constructor in constructors: + m = hash_object_constructor() + self.assertEqual(m.block_size, block_size) +- self.assertEqual(m.digest_size, digest_size) ++ if not hash_object_constructor.__name__.startswith('openssl_'): ++ self.assertEqual(m.digest_size, digest_size) + if digest_length: +- self.assertEqual(len(m.digest(digest_length)), +- digest_length) +- self.assertEqual(len(m.hexdigest(digest_length)), +- 2*digest_length) ++ if not hash_object_constructor.__name__.startswith('openssl_'): ++ self.assertEqual(len(m.digest(digest_length)), ++ digest_length) ++ self.assertEqual(len(m.hexdigest(digest_length)), ++ 2*digest_length) + else: + self.assertEqual(len(m.digest()), digest_size) + self.assertEqual(len(m.hexdigest()), 2*digest_size) +@@ -382,9 +413,10 @@ class HashLibTestCase(unittest.TestCase): + for hash_object_constructor in constructors: + m = hash_object_constructor() + self.assertEqual(capacity + rate, 1600) +- self.assertEqual(m._capacity_bits, capacity) +- self.assertEqual(m._rate_bits, rate) +- self.assertEqual(m._suffix, suffix) ++ if not hash_object_constructor.__name__.startswith('openssl_'): ++ self.assertEqual(m._capacity_bits, capacity) ++ self.assertEqual(m._rate_bits, rate) ++ self.assertEqual(m._suffix, suffix) + + @requires_sha3 + def test_extra_sha3(self): +-- +2.21.0 + + +From d6618795dd079acd8585f830b129d7f2fc9a6c52 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Fri, 26 Jul 2019 11:27:57 +0200 +Subject: [PATCH 09/36] Change FIPS exceptions from _blake2, _sha3 module init + to ImportError + +--- + Include/_hashopenssl.h | 11 +++++------ + Modules/_blake2/blake2b_impl.c | 4 ++-- + Modules/_blake2/blake2module.c | 2 +- + Modules/_blake2/blake2s_impl.c | 4 ++-- + Modules/_sha3/sha3module.c | 6 +++--- + 5 files changed, 13 insertions(+), 14 deletions(-) + +diff --git a/Include/_hashopenssl.h b/Include/_hashopenssl.h +index a726c0d3fbf..47ed0030422 100644 +--- a/Include/_hashopenssl.h ++++ b/Include/_hashopenssl.h +@@ -39,7 +39,7 @@ _setException(PyObject *exc) + + __attribute__((__unused__)) + static int +-_Py_hashlib_fips_error(char *name) { ++_Py_hashlib_fips_error(PyObject *exc, char *name) { + int result = FIPS_mode(); + if (result == 0) { + // "If the library was built without support of the FIPS Object Module, +@@ -49,18 +49,17 @@ _Py_hashlib_fips_error(char *name) { + + unsigned long errcode = ERR_peek_last_error(); + if (errcode) { +- _setException(PyExc_ValueError); ++ _setException(exc); + return 1; + } + return 0; + } +- PyErr_Format(PyExc_ValueError, "%s is not available in FIPS mode", +- name); ++ PyErr_Format(exc, "%s is not available in FIPS mode", name); + return 1; + } + +-#define FAIL_RETURN_IN_FIPS_MODE(name) do { \ +- if (_Py_hashlib_fips_error(name)) return NULL; \ ++#define FAIL_RETURN_IN_FIPS_MODE(exc, name) do { \ ++ if (_Py_hashlib_fips_error(exc, name)) return NULL; \ + } while (0) + + #endif // !Py_HASHOPENSSL_H +diff --git a/Modules/_blake2/blake2b_impl.c b/Modules/_blake2/blake2b_impl.c +index 341e67a8fcc..f6bfce823b8 100644 +--- a/Modules/_blake2/blake2b_impl.c ++++ b/Modules/_blake2/blake2b_impl.c +@@ -104,7 +104,7 @@ py_blake2b_new_impl(PyTypeObject *type, PyObject *data, int digest_size, + unsigned long leaf_size = 0; + unsigned long long node_offset = 0; + +- FAIL_RETURN_IN_FIPS_MODE("_blake2"); ++ FAIL_RETURN_IN_FIPS_MODE(PyExc_ValueError, "_blake2"); + + self = new_BLAKE2bObject(type); + if (self == NULL) { +@@ -296,7 +296,7 @@ _blake2_blake2b_update(BLAKE2bObject *self, PyObject *data) + { + Py_buffer buf; + +- FAIL_RETURN_IN_FIPS_MODE("_blake2"); ++ FAIL_RETURN_IN_FIPS_MODE(PyExc_ValueError, "_blake2"); + + GET_BUFFER_VIEW_OR_ERROUT(data, &buf); + +diff --git a/Modules/_blake2/blake2module.c b/Modules/_blake2/blake2module.c +index 817b7165684..a9c7cbc7ebe 100644 +--- a/Modules/_blake2/blake2module.c ++++ b/Modules/_blake2/blake2module.c +@@ -58,7 +58,7 @@ PyInit__blake2(void) + PyObject *m; + PyObject *d; + +- FAIL_RETURN_IN_FIPS_MODE("blake2"); ++ FAIL_RETURN_IN_FIPS_MODE(PyExc_ImportError, "blake2"); + + m = PyModule_Create(&blake2_module); + if (m == NULL) +diff --git a/Modules/_blake2/blake2s_impl.c b/Modules/_blake2/blake2s_impl.c +index 0dfe0586b57..28ae5b65101 100644 +--- a/Modules/_blake2/blake2s_impl.c ++++ b/Modules/_blake2/blake2s_impl.c +@@ -104,7 +104,7 @@ py_blake2s_new_impl(PyTypeObject *type, PyObject *data, int digest_size, + unsigned long leaf_size = 0; + unsigned long long node_offset = 0; + +- FAIL_RETURN_IN_FIPS_MODE("_blake2"); ++ FAIL_RETURN_IN_FIPS_MODE(PyExc_ValueError, "_blake2"); + + self = new_BLAKE2sObject(type); + if (self == NULL) { +@@ -296,7 +296,7 @@ _blake2_blake2s_update(BLAKE2sObject *self, PyObject *data) + { + Py_buffer buf; + +- FAIL_RETURN_IN_FIPS_MODE("_blake2"); ++ FAIL_RETURN_IN_FIPS_MODE(PyExc_ValueError, "_blake2"); + + GET_BUFFER_VIEW_OR_ERROUT(data, &buf); + +diff --git a/Modules/_sha3/sha3module.c b/Modules/_sha3/sha3module.c +index 624f7f247d4..2783a75644f 100644 +--- a/Modules/_sha3/sha3module.c ++++ b/Modules/_sha3/sha3module.c +@@ -163,7 +163,7 @@ static PyTypeObject SHAKE256type; + static SHA3object * + newSHA3object(PyTypeObject *type) + { +- FAIL_RETURN_IN_FIPS_MODE("_sha3"); ++ FAIL_RETURN_IN_FIPS_MODE(PyExc_ValueError, "_sha3"); + SHA3object *newobj; + newobj = (SHA3object *)PyObject_New(SHA3object, type); + if (newobj == NULL) { +@@ -179,7 +179,7 @@ newSHA3object(PyTypeObject *type) + static PyObject * + py_sha3_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) + { +- FAIL_RETURN_IN_FIPS_MODE("_sha3"); ++ FAIL_RETURN_IN_FIPS_MODE(PyExc_ValueError, "_sha3"); + SHA3object *self = NULL; + Py_buffer buf = {NULL, NULL}; + HashReturn res; +@@ -727,7 +727,7 @@ PyInit__sha3(void) + { + PyObject *m = NULL; + +- FAIL_RETURN_IN_FIPS_MODE("_sha3"); ++ FAIL_RETURN_IN_FIPS_MODE(PyExc_ImportError, "_sha3"); + + if ((m = PyModule_Create(&_SHA3module)) == NULL) { + return NULL; +-- +2.21.0 + + +From eb78e4b4179842185f53edba2073ab7644db7ad3 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Fri, 26 Jul 2019 11:24:09 +0200 +Subject: [PATCH 10/36] Make hashlib importable under FIPS mode + +--- + Lib/hashlib.py | 8 +++++--- + 1 file changed, 5 insertions(+), 3 deletions(-) + +diff --git a/Lib/hashlib.py b/Lib/hashlib.py +index 2def0a310c6..ca1dd202251 100644 +--- a/Lib/hashlib.py ++++ b/Lib/hashlib.py +@@ -132,12 +132,14 @@ def __get_openssl_constructor(name): + f = getattr(_hashlib, 'openssl_' + name) + # Allow the C module to raise ValueError. The function will be + # defined but the hash not actually available thanks to OpenSSL. +- f() ++ if not get_fips_mode(): ++ # N.B. In "FIPS mode", there is no fallback. ++ # If this test fails, we want to export the broken hash ++ # constructor anyway. ++ f() + # Use the C function directly (very fast) + return f + except (AttributeError, ValueError): +- if get_fips_mode(): +- raise + return __get_builtin_constructor(name) + + if not get_fips_mode(): +-- +2.21.0 + + +From 0f313f65457f1e7d0e7238a71935f5d4e0d197ee Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Fri, 26 Jul 2019 15:41:10 +0200 +Subject: [PATCH 11/36] Implement hmac.new using new built-in module, + _hmacopenssl + +--- + Lib/hmac.py | 33 ++- + Modules/_hmacopenssl.c | 396 ++++++++++++++++++++++++++++++++ + Modules/clinic/_hmacopenssl.c.h | 133 +++++++++++ + setup.py | 4 + + 4 files changed, 565 insertions(+), 1 deletion(-) + create mode 100644 Modules/_hmacopenssl.c + create mode 100644 Modules/clinic/_hmacopenssl.c.h + +diff --git a/Lib/hmac.py b/Lib/hmac.py +index 121029aa670..ed98406bd2e 100644 +--- a/Lib/hmac.py ++++ b/Lib/hmac.py +@@ -6,6 +6,8 @@ Implements the HMAC algorithm as described by RFC 2104. + import warnings as _warnings + from _operator import _compare_digest as compare_digest + import hashlib as _hashlib ++import _hashlib as _hashlibopenssl ++import _hmacopenssl + + trans_5C = bytes((x ^ 0x5C) for x in range(256)) + trans_36 = bytes((x ^ 0x36) for x in range(256)) +@@ -37,6 +39,11 @@ class HMAC: + + Note: key and msg must be a bytes or bytearray objects. + """ ++ if _hashlib.get_fips_mode(): ++ raise ValueError( ++ 'hmac.HMAC is not available in FIPS mode. ' ++ + 'Use hmac.new().' ++ ) + + if not isinstance(key, (bytes, bytearray)): + raise TypeError("key: expected bytes or bytearray, but got %r" % type(key).__name__) +@@ -90,6 +97,8 @@ class HMAC: + def update(self, msg): + """Update this hashing object with the string msg. + """ ++ if _hashlib.get_fips_mode(): ++ raise ValueError('hmac.HMAC is not available in FIPS mode') + self.inner.update(msg) + + def copy(self): +@@ -130,6 +139,19 @@ class HMAC: + h = self._current() + return h.hexdigest() + ++ ++def _get_openssl_name(digestmod): ++ if isinstance(digestmod, str): ++ return digestmod.lower() ++ elif callable(digestmod): ++ digestmod = digestmod(b'') ++ ++ if not isinstance(digestmod, _hashlibopenssl.HASH): ++ raise TypeError( ++ 'Only OpenSSL hashlib hashes are accepted in FIPS mode.') ++ ++ return digestmod.name.lower().replace('_', '-') ++ + def new(key, msg = None, digestmod = None): + """Create a new hashing object and return it. + +@@ -141,4 +163,13 @@ def new(key, msg = None, digestmod = None): + method, and can ask for the hash value at any time by calling its digest() + method. + """ +- return HMAC(key, msg, digestmod) ++ if _hashlib.get_fips_mode(): ++ if digestmod is None: ++ digestmod = 'md5' ++ name = _get_openssl_name(digestmod) ++ result = _hmacopenssl.new(key, digestmod=name) ++ if msg: ++ result.update(msg) ++ return result ++ else: ++ return HMAC(key, msg, digestmod) +diff --git a/Modules/_hmacopenssl.c b/Modules/_hmacopenssl.c +new file mode 100644 +index 00000000000..ca95d725f01 +--- /dev/null ++++ b/Modules/_hmacopenssl.c +@@ -0,0 +1,396 @@ ++/* Module that wraps all OpenSSL MHAC algorithm */ ++ ++/* Copyright (C) 2019 Red Hat, Inc. Red Hat, Inc. and/or its affiliates ++ * ++ * Based on _hashopenssl.c, which is: ++ * Copyright (C) 2005-2010 Gregory P. Smith (greg@krypto.org) ++ * Licensed to PSF under a Contributor Agreement. ++ * ++ * Derived from a skeleton of shamodule.c containing work performed by: ++ * ++ * Andrew Kuchling (amk@amk.ca) ++ * Greg Stein (gstein@lyra.org) ++ * ++ */ ++ ++#define PY_SSIZE_T_CLEAN ++ ++#include "Python.h" ++#include "structmember.h" ++#include "hashlib.h" ++#include "pystrhex.h" ++#include "_hashopenssl.h" ++ ++ ++#include ++ ++static PyTypeObject HmacType; ++ ++typedef struct { ++ PyObject_HEAD ++ PyObject *name; /* name of the hash algorithm */ ++ HMAC_CTX *ctx; /* OpenSSL hmac context */ ++ PyThread_type_lock lock; /* HMAC context lock */ ++} HmacObject; ++ ++#include "clinic/_hmacopenssl.c.h" ++/*[clinic input] ++module _hmacopenssl ++class _hmacopenssl.HMAC "HmacObject *" "&HmacType" ++[clinic start generated code]*/ ++/*[clinic end generated code: output=da39a3ee5e6b4b0d input=c98d3f2af591c085]*/ ++ ++ ++/*[clinic input] ++_hmacopenssl.new ++ ++ key: Py_buffer ++ * ++ digestmod: str ++ ++Return a new hmac object. ++[clinic start generated code]*/ ++ ++static PyObject * ++_hmacopenssl_new_impl(PyObject *module, Py_buffer *key, ++ const char *digestmod) ++/*[clinic end generated code: output=46f1cb4e02921922 input=be8c0c2e4fad508c]*/ ++{ ++ if (digestmod == NULL) { ++ PyErr_SetString(PyExc_ValueError, "digestmod must be specified"); ++ return NULL; ++ } ++ ++ /* name mut be lowercase */ ++ for (int i=0; digestmod[i]; i++) { ++ if ( ++ ((digestmod[i] < 'a') || (digestmod[i] > 'z')) ++ && ((digestmod[i] < '0') || (digestmod[i] > '9')) ++ && digestmod[i] != '-' ++ ) { ++ PyErr_SetString(PyExc_ValueError, "digestmod must be lowercase"); ++ return NULL; ++ } ++ } ++ ++ const EVP_MD *digest = EVP_get_digestbyname(digestmod); ++ if (!digest) { ++ PyErr_SetString(PyExc_ValueError, "unknown hash function"); ++ return NULL; ++ } ++ ++ PyObject *name = NULL; ++ HMAC_CTX *ctx = NULL; ++ HmacObject *retval = NULL; ++ ++ name = PyUnicode_FromFormat("hmac-%s", digestmod); ++ if (name == NULL) { ++ goto error; ++ } ++ ++ ctx = HMAC_CTX_new(); ++ if (ctx == NULL) { ++ _setException(PyExc_ValueError); ++ goto error; ++ } ++ ++ int r = HMAC_Init_ex( ++ ctx, ++ (const char*)key->buf, ++ key->len, ++ digest, ++ NULL /*impl*/); ++ if (r == 0) { ++ _setException(PyExc_ValueError); ++ goto error; ++ } ++ ++ retval = (HmacObject *)PyObject_New(HmacObject, &HmacType); ++ if (retval == NULL) { ++ goto error; ++ } ++ ++ retval->name = name; ++ retval->ctx = ctx; ++ retval->lock = NULL; ++ ++ return (PyObject*)retval; ++ ++error: ++ if (ctx) HMAC_CTX_free(ctx); ++ if (name) Py_DECREF(name); ++ if (retval) PyObject_Del(name); ++ return NULL; ++} ++ ++/*[clinic input] ++_hmacopenssl.HMAC.copy ++ ++Return a copy (“clone”) of the HMAC object. ++[clinic start generated code]*/ ++ ++static PyObject * ++_hmacopenssl_HMAC_copy_impl(HmacObject *self) ++/*[clinic end generated code: output=fe5ee41faf30dcf0 input=f5ed20feec42d8d0]*/ ++{ ++ HmacObject *retval = (HmacObject *)PyObject_New(HmacObject, &HmacType); ++ if (retval == NULL) { ++ return NULL; ++ } ++ ++ Py_INCREF(self->name); ++ retval->name = self->name; ++ ++ int r = HMAC_CTX_copy(retval->ctx, self->ctx); ++ if (r == 0) { ++ PyObject_Del(retval); ++ return _setException(PyExc_ValueError); ++ } ++ ++ return (PyObject*)retval; ++} ++ ++static void ++_hmac_dealloc(HmacObject *self) ++{ ++ if (self->lock != NULL) { ++ PyThread_free_lock(self->lock); ++ } ++ HMAC_CTX_free(self->ctx); ++ Py_XDECREF(self->name); ++ PyObject_Del(self); ++} ++ ++static PyObject * ++_hmac_repr(HmacObject *self) ++{ ++ return PyUnicode_FromFormat("<%U HMAC object @ %p>", self->name, self); ++} ++ ++/*[clinic input] ++_hmacopenssl.HMAC.update ++ ++ msg: Py_buffer ++ ++Update the HMAC object with msg. ++[clinic start generated code]*/ ++ ++static PyObject * ++_hmacopenssl_HMAC_update_impl(HmacObject *self, Py_buffer *msg) ++/*[clinic end generated code: output=0efeee663a98cee5 input=0683d64f35808cb9]*/ ++{ ++ if (self->lock == NULL && msg->len >= HASHLIB_GIL_MINSIZE) { ++ self->lock = PyThread_allocate_lock(); ++ /* fail? lock = NULL and we fail over to non-threaded code. */ ++ } ++ ++ int r; ++ ++ if (self->lock != NULL) { ++ Py_BEGIN_ALLOW_THREADS ++ PyThread_acquire_lock(self->lock, 1); ++ r = HMAC_Update(self->ctx, (const unsigned char*)msg->buf, msg->len); ++ PyThread_release_lock(self->lock); ++ Py_END_ALLOW_THREADS ++ } else { ++ r = HMAC_Update(self->ctx, (const unsigned char*)msg->buf, msg->len); ++ } ++ ++ if (r == 0) { ++ _setException(PyExc_ValueError); ++ return NULL; ++ } ++ Py_RETURN_NONE; ++} ++ ++static unsigned int ++_digest_size(HmacObject *self) ++{ ++ const EVP_MD *md = HMAC_CTX_get_md(self->ctx); ++ if (md == NULL) { ++ _setException(PyExc_ValueError); ++ return 0; ++ } ++ return EVP_MD_size(md); ++} ++ ++static int ++_digest(HmacObject *self, unsigned char *buf, unsigned int len) ++{ ++ HMAC_CTX *temp_ctx = HMAC_CTX_new(); ++ if (temp_ctx == NULL) { ++ PyErr_NoMemory(); ++ return 0; ++ } ++ int r = HMAC_CTX_copy(temp_ctx, self->ctx); ++ if (r == 0) { ++ _setException(PyExc_ValueError); ++ return 0; ++ } ++ r = HMAC_Final(temp_ctx, buf, &len); ++ HMAC_CTX_free(temp_ctx); ++ if (r == 0) { ++ _setException(PyExc_ValueError); ++ return 0; ++ } ++ return 1; ++} ++ ++/*[clinic input] ++_hmacopenssl.HMAC.digest ++ ++Return the digest of the bytes passed to the update() method so far. ++[clinic start generated code]*/ ++ ++static PyObject * ++_hmacopenssl_HMAC_digest_impl(HmacObject *self) ++/*[clinic end generated code: output=3aa6dbfc46ec4957 input=bf769a10b1d9edd9]*/ ++{ ++ unsigned int digest_size = _digest_size(self); ++ if (digest_size == 0) { ++ return _setException(PyExc_ValueError); ++ } ++ unsigned char buf[digest_size]; /* FIXME: C99 feature */ ++ int r = _digest(self, buf, digest_size); ++ if (r == 0) { ++ return NULL; ++ } ++ return PyBytes_FromStringAndSize((const char *)buf, digest_size); ++} ++ ++/*[clinic input] ++_hmacopenssl.HMAC.hexdigest ++ ++Return hexadecimal digest of the bytes passed to the update() method so far. ++ ++This may be used to exchange the value safely in email or other non-binary ++environments. ++[clinic start generated code]*/ ++ ++static PyObject * ++_hmacopenssl_HMAC_hexdigest_impl(HmacObject *self) ++/*[clinic end generated code: output=630f6fa89f9f1e48 input=b8e60ec8b811c4cd]*/ ++{ ++ unsigned int digest_size = _digest_size(self); ++ if (digest_size == 0) { ++ return _setException(PyExc_ValueError); ++ } ++ unsigned char buf[digest_size]; /* FIXME: C99 feature */ ++ int r = _digest(self, buf, digest_size); ++ if (r == 0) { ++ return NULL; ++ } ++ return _Py_strhex((const char *)buf, digest_size); ++} ++ ++ ++ ++static PyObject * ++_hmacopenssl_get_digest_size(HmacObject *self, void *closure) ++{ ++ unsigned int digest_size = _digest_size(self); ++ if (digest_size == 0) { ++ return _setException(PyExc_ValueError); ++ } ++ return PyLong_FromLong(digest_size); ++} ++ ++static PyObject * ++_hmacopenssl_get_block_size(HmacObject *self, void *closure) ++{ ++ const EVP_MD *md = HMAC_CTX_get_md(self->ctx); ++ if (md == NULL) { ++ return _setException(PyExc_ValueError); ++ } ++ return PyLong_FromLong(EVP_MD_size(md)); ++} ++ ++static PyMethodDef Hmac_methods[] = { ++ _HMACOPENSSL_HMAC_UPDATE_METHODDEF ++ _HMACOPENSSL_HMAC_DIGEST_METHODDEF ++ _HMACOPENSSL_HMAC_HEXDIGEST_METHODDEF ++ _HMACOPENSSL_HMAC_COPY_METHODDEF ++ {NULL, NULL} /* sentinel */ ++}; ++ ++static PyGetSetDef Hmac_getset[] = { ++ {"digest_size", (getter)_hmacopenssl_get_digest_size, NULL, NULL, NULL}, ++ {"block_size", (getter)_hmacopenssl_get_block_size, NULL, NULL, NULL}, ++ {NULL} /* Sentinel */ ++}; ++ ++static PyMemberDef Hmac_members[] = { ++ {"name", T_OBJECT, offsetof(HmacObject, name), READONLY, PyDoc_STR("HMAC name")}, ++}; ++ ++PyDoc_STRVAR(hmactype_doc, ++"The object used to calculate HMAC of a message.\n\ ++\n\ ++Methods:\n\ ++\n\ ++update() -- updates the current digest with an additional string\n\ ++digest() -- return the current digest value\n\ ++hexdigest() -- return the current digest as a string of hexadecimal digits\n\ ++copy() -- return a copy of the current hash object\n\ ++\n\ ++Attributes:\n\ ++\n\ ++name -- the name, including the hash algorithm used by this object\n\ ++digest_size -- number of bytes in digest() output\n"); ++ ++static PyTypeObject HmacType = { ++ PyVarObject_HEAD_INIT(NULL, 0) ++ "_hmacopenssl.HMAC", /*tp_name*/ ++ sizeof(HmacObject), /*tp_basicsize*/ ++ .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, ++ .tp_doc = hmactype_doc, ++ .tp_repr = (reprfunc)_hmac_repr, ++ .tp_dealloc = (destructor)_hmac_dealloc, ++ .tp_methods = Hmac_methods, ++ .tp_getset = Hmac_getset, ++ .tp_members = Hmac_members, ++}; ++ ++static struct PyMethodDef hmacopenssl_functions[] = { ++ _HMACOPENSSL_NEW_METHODDEF ++ {NULL, NULL} /* Sentinel */ ++}; ++ ++ ++ ++/* Initialize this module. */ ++ ++ ++static struct PyModuleDef _hmacopenssl_module = { ++ PyModuleDef_HEAD_INIT, ++ "_hmacopenssl", ++ NULL, ++ -1, ++ hmacopenssl_functions, ++ NULL, ++ NULL, ++ NULL, ++ NULL ++}; ++ ++PyMODINIT_FUNC ++PyInit__hmacopenssl(void) ++{ ++ /* TODO build EVP_functions openssl_* entries dynamically based ++ * on what hashes are supported rather than listing many ++ * but having some be unsupported. Only init appropriate ++ * constants. */ ++ ++ Py_TYPE(&HmacType) = &PyType_Type; ++ if (PyType_Ready(&HmacType) < 0) ++ return NULL; ++ ++ PyObject *m = PyModule_Create(&_hmacopenssl_module); ++ if (m == NULL) ++ return NULL; ++ ++ Py_INCREF((PyObject *)&HmacType); ++ PyModule_AddObject(m, "HMAC", (PyObject *)&HmacType); ++ ++ return m; ++} +diff --git a/Modules/clinic/_hmacopenssl.c.h b/Modules/clinic/_hmacopenssl.c.h +new file mode 100644 +index 00000000000..b472a6eddd3 +--- /dev/null ++++ b/Modules/clinic/_hmacopenssl.c.h +@@ -0,0 +1,133 @@ ++/*[clinic input] ++preserve ++[clinic start generated code]*/ ++ ++PyDoc_STRVAR(_hmacopenssl_new__doc__, ++"new($module, /, key, *, digestmod)\n" ++"--\n" ++"\n" ++"Return a new hmac object."); ++ ++#define _HMACOPENSSL_NEW_METHODDEF \ ++ {"new", (PyCFunction)_hmacopenssl_new, METH_FASTCALL, _hmacopenssl_new__doc__}, ++ ++static PyObject * ++_hmacopenssl_new_impl(PyObject *module, Py_buffer *key, ++ const char *digestmod); ++ ++static PyObject * ++_hmacopenssl_new(PyObject *module, PyObject **args, Py_ssize_t nargs, PyObject *kwnames) ++{ ++ PyObject *return_value = NULL; ++ static const char * const _keywords[] = {"key", "digestmod", NULL}; ++ static _PyArg_Parser _parser = {"y*$s:new", _keywords, 0}; ++ Py_buffer key = {NULL, NULL}; ++ const char *digestmod; ++ ++ if (!_PyArg_ParseStack(args, nargs, kwnames, &_parser, ++ &key, &digestmod)) { ++ goto exit; ++ } ++ return_value = _hmacopenssl_new_impl(module, &key, digestmod); ++ ++exit: ++ /* Cleanup for key */ ++ if (key.obj) { ++ PyBuffer_Release(&key); ++ } ++ ++ return return_value; ++} ++ ++PyDoc_STRVAR(_hmacopenssl_HMAC_copy__doc__, ++"copy($self, /)\n" ++"--\n" ++"\n" ++"Return a copy (“clone”) of the HMAC object."); ++ ++#define _HMACOPENSSL_HMAC_COPY_METHODDEF \ ++ {"copy", (PyCFunction)_hmacopenssl_HMAC_copy, METH_NOARGS, _hmacopenssl_HMAC_copy__doc__}, ++ ++static PyObject * ++_hmacopenssl_HMAC_copy_impl(HmacObject *self); ++ ++static PyObject * ++_hmacopenssl_HMAC_copy(HmacObject *self, PyObject *Py_UNUSED(ignored)) ++{ ++ return _hmacopenssl_HMAC_copy_impl(self); ++} ++ ++PyDoc_STRVAR(_hmacopenssl_HMAC_update__doc__, ++"update($self, /, msg)\n" ++"--\n" ++"\n" ++"Update the HMAC object with msg."); ++ ++#define _HMACOPENSSL_HMAC_UPDATE_METHODDEF \ ++ {"update", (PyCFunction)_hmacopenssl_HMAC_update, METH_FASTCALL, _hmacopenssl_HMAC_update__doc__}, ++ ++static PyObject * ++_hmacopenssl_HMAC_update_impl(HmacObject *self, Py_buffer *msg); ++ ++static PyObject * ++_hmacopenssl_HMAC_update(HmacObject *self, PyObject **args, Py_ssize_t nargs, PyObject *kwnames) ++{ ++ PyObject *return_value = NULL; ++ static const char * const _keywords[] = {"msg", NULL}; ++ static _PyArg_Parser _parser = {"y*:update", _keywords, 0}; ++ Py_buffer msg = {NULL, NULL}; ++ ++ if (!_PyArg_ParseStack(args, nargs, kwnames, &_parser, ++ &msg)) { ++ goto exit; ++ } ++ return_value = _hmacopenssl_HMAC_update_impl(self, &msg); ++ ++exit: ++ /* Cleanup for msg */ ++ if (msg.obj) { ++ PyBuffer_Release(&msg); ++ } ++ ++ return return_value; ++} ++ ++PyDoc_STRVAR(_hmacopenssl_HMAC_digest__doc__, ++"digest($self, /)\n" ++"--\n" ++"\n" ++"Return the digest of the bytes passed to the update() method so far."); ++ ++#define _HMACOPENSSL_HMAC_DIGEST_METHODDEF \ ++ {"digest", (PyCFunction)_hmacopenssl_HMAC_digest, METH_NOARGS, _hmacopenssl_HMAC_digest__doc__}, ++ ++static PyObject * ++_hmacopenssl_HMAC_digest_impl(HmacObject *self); ++ ++static PyObject * ++_hmacopenssl_HMAC_digest(HmacObject *self, PyObject *Py_UNUSED(ignored)) ++{ ++ return _hmacopenssl_HMAC_digest_impl(self); ++} ++ ++PyDoc_STRVAR(_hmacopenssl_HMAC_hexdigest__doc__, ++"hexdigest($self, /)\n" ++"--\n" ++"\n" ++"Return hexadecimal digest of the bytes passed to the update() method so far.\n" ++"\n" ++"This may be used to exchange the value safely in email or other non-binary\n" ++"environments."); ++ ++#define _HMACOPENSSL_HMAC_HEXDIGEST_METHODDEF \ ++ {"hexdigest", (PyCFunction)_hmacopenssl_HMAC_hexdigest, METH_NOARGS, _hmacopenssl_HMAC_hexdigest__doc__}, ++ ++static PyObject * ++_hmacopenssl_HMAC_hexdigest_impl(HmacObject *self); ++ ++static PyObject * ++_hmacopenssl_HMAC_hexdigest(HmacObject *self, PyObject *Py_UNUSED(ignored)) ++{ ++ return _hmacopenssl_HMAC_hexdigest_impl(self); ++} ++/*[clinic end generated code: output=10b6e8cac6d7a2c9 input=a9049054013a1b77]*/ +diff --git a/setup.py b/setup.py +index e2e659ef795..282aa4178ed 100644 +--- a/setup.py ++++ b/setup.py +@@ -922,6 +922,10 @@ class PyBuildExt(build_ext): + openssl_ver) + missing.append('_hashlib') + ++ exts.append( Extension('_hmacopenssl', ['_hmacopenssl.c'], ++ depends = ['hashlib.h'], ++ **ssl_args)) ++ + # RHEL: Always force OpenSSL for md5, sha1, sha256, sha512; + # don't build Python's implementations. + # sha3 and blake2 have extra functionality, so do build those: +-- +2.21.0 + + +From 37797b4d89e7b4859dc4728da91cbebf5fdb2a52 Mon Sep 17 00:00:00 2001 +From: Marcel Plch +Date: Mon, 29 Jul 2019 12:45:11 +0200 +Subject: [PATCH 12/36] FIPS review + +* Port _hmacopenssl to multiphase init. +* Make _hmacopenssl.HMAC.copy create same type as self. +* hmac.py cosmetic nitpick +--- + Lib/hmac.py | 2 +- + Modules/_hmacopenssl.c | 112 +++++++++++++++++++++++++---------------- + 2 files changed, 70 insertions(+), 44 deletions(-) + +diff --git a/Lib/hmac.py b/Lib/hmac.py +index ed98406bd2e..b9bf16b84ca 100644 +--- a/Lib/hmac.py ++++ b/Lib/hmac.py +@@ -42,7 +42,7 @@ class HMAC: + if _hashlib.get_fips_mode(): + raise ValueError( + 'hmac.HMAC is not available in FIPS mode. ' +- + 'Use hmac.new().' ++ 'Use hmac.new().' + ) + + if not isinstance(key, (bytes, bytearray)): +diff --git a/Modules/_hmacopenssl.c b/Modules/_hmacopenssl.c +index ca95d725f01..216ed04f236 100644 +--- a/Modules/_hmacopenssl.c ++++ b/Modules/_hmacopenssl.c +@@ -24,7 +24,10 @@ + + #include + +-static PyTypeObject HmacType; ++typedef struct hmacopenssl_state { ++ PyTypeObject *HmacType; ++} hmacopenssl_state; ++ + + typedef struct { + PyObject_HEAD +@@ -36,9 +39,9 @@ typedef struct { + #include "clinic/_hmacopenssl.c.h" + /*[clinic input] + module _hmacopenssl +-class _hmacopenssl.HMAC "HmacObject *" "&HmacType" ++class _hmacopenssl.HMAC "HmacObject *" "PyModule_GetState(module)->HmacType" + [clinic start generated code]*/ +-/*[clinic end generated code: output=da39a3ee5e6b4b0d input=c98d3f2af591c085]*/ ++/*[clinic end generated code: output=da39a3ee5e6b4b0d input=204b7f45847f57b4]*/ + + + /*[clinic input] +@@ -56,11 +59,18 @@ _hmacopenssl_new_impl(PyObject *module, Py_buffer *key, + const char *digestmod) + /*[clinic end generated code: output=46f1cb4e02921922 input=be8c0c2e4fad508c]*/ + { ++ hmacopenssl_state *state; ++ + if (digestmod == NULL) { + PyErr_SetString(PyExc_ValueError, "digestmod must be specified"); + return NULL; + } + ++ state = PyModule_GetState(module); ++ if (state == NULL) { ++ return NULL; ++ } ++ + /* name mut be lowercase */ + for (int i=0; digestmod[i]; i++) { + if ( +@@ -105,7 +115,7 @@ _hmacopenssl_new_impl(PyObject *module, Py_buffer *key, + goto error; + } + +- retval = (HmacObject *)PyObject_New(HmacObject, &HmacType); ++ retval = (HmacObject *)PyObject_New(HmacObject, state->HmacType); + if (retval == NULL) { + goto error; + } +@@ -133,7 +143,9 @@ static PyObject * + _hmacopenssl_HMAC_copy_impl(HmacObject *self) + /*[clinic end generated code: output=fe5ee41faf30dcf0 input=f5ed20feec42d8d0]*/ + { +- HmacObject *retval = (HmacObject *)PyObject_New(HmacObject, &HmacType); ++ HmacObject *retval; ++ ++ retval = (HmacObject *)PyObject_New(HmacObject, (PyTypeObject *)PyObject_Type((PyObject *)self)); + if (retval == NULL) { + return NULL; + } +@@ -147,7 +159,7 @@ _hmacopenssl_HMAC_copy_impl(HmacObject *self) + return _setException(PyExc_ValueError); + } + +- return (PyObject*)retval; ++ return (PyObject *)retval; + } + + static void +@@ -338,19 +350,24 @@ Attributes:\n\ + name -- the name, including the hash algorithm used by this object\n\ + digest_size -- number of bytes in digest() output\n"); + +-static PyTypeObject HmacType = { +- PyVarObject_HEAD_INIT(NULL, 0) +- "_hmacopenssl.HMAC", /*tp_name*/ +- sizeof(HmacObject), /*tp_basicsize*/ +- .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, +- .tp_doc = hmactype_doc, +- .tp_repr = (reprfunc)_hmac_repr, +- .tp_dealloc = (destructor)_hmac_dealloc, +- .tp_methods = Hmac_methods, +- .tp_getset = Hmac_getset, +- .tp_members = Hmac_members, ++static PyType_Slot HmacType_slots[] = { ++ {Py_tp_doc, hmactype_doc}, ++ {Py_tp_repr, (reprfunc)_hmac_repr}, ++ {Py_tp_dealloc,(destructor)_hmac_dealloc}, ++ {Py_tp_methods, Hmac_methods}, ++ {Py_tp_getset, Hmac_getset}, ++ {Py_tp_members, Hmac_members}, ++ {0, NULL} ++}; ++ ++PyType_Spec HmacType_spec = { ++ "_hmacopenssl.HMAC", /* name */ ++ sizeof(HmacObject), /* basicsize */ ++ .flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, ++ .slots = HmacType_slots, + }; + ++ + static struct PyMethodDef hmacopenssl_functions[] = { + _HMACOPENSSL_NEW_METHODDEF + {NULL, NULL} /* Sentinel */ +@@ -360,37 +377,46 @@ static struct PyMethodDef hmacopenssl_functions[] = { + + /* Initialize this module. */ + +- +-static struct PyModuleDef _hmacopenssl_module = { +- PyModuleDef_HEAD_INIT, +- "_hmacopenssl", +- NULL, +- -1, +- hmacopenssl_functions, +- NULL, +- NULL, +- NULL, +- NULL +-}; +- +-PyMODINIT_FUNC +-PyInit__hmacopenssl(void) +-{ ++static int ++hmacopenssl_exec(PyObject *m) { + /* TODO build EVP_functions openssl_* entries dynamically based + * on what hashes are supported rather than listing many +- * but having some be unsupported. Only init appropriate ++ * and having some unsupported. Only init appropriate + * constants. */ ++ PyObject *temp; + +- Py_TYPE(&HmacType) = &PyType_Type; +- if (PyType_Ready(&HmacType) < 0) +- return NULL; ++ temp = PyType_FromSpec(&HmacType_spec); ++ if (temp == NULL) { ++ goto fail; ++ } + +- PyObject *m = PyModule_Create(&_hmacopenssl_module); +- if (m == NULL) +- return NULL; ++ if (PyModule_AddObject(m, "HMAC", temp) == -1) { ++ goto fail; ++ } ++ ++ return 0; + +- Py_INCREF((PyObject *)&HmacType); +- PyModule_AddObject(m, "HMAC", (PyObject *)&HmacType); ++fail: ++ Py_XDECREF(temp); ++ return -1; ++} + +- return m; ++static PyModuleDef_Slot hmacopenssl_slots[] = { ++ {Py_mod_exec, hmacopenssl_exec}, ++ {0, NULL}, ++}; ++ ++static struct PyModuleDef _hmacopenssl_def = { ++ PyModuleDef_HEAD_INIT, /* m_base */ ++ .m_name = "_hmacopenssl", ++ .m_methods = hmacopenssl_functions, ++ .m_slots = hmacopenssl_slots, ++ .m_size = sizeof(hmacopenssl_state) ++}; ++ ++ ++PyMODINIT_FUNC ++PyInit__hmacopenssl(void) ++{ ++ return PyModuleDef_Init(&_hmacopenssl_def); + } +-- +2.21.0 + + +From 9a3bf73d382cda99a665cab8438f3cd0388256b0 Mon Sep 17 00:00:00 2001 +From: Marcel Plch +Date: Mon, 29 Jul 2019 13:05:04 +0200 +Subject: [PATCH 13/36] revert cosmetic nitpick and remove trailing whitespace + +--- + Lib/hmac.py | 2 +- + Modules/_hmacopenssl.c | 4 ++-- + 2 files changed, 3 insertions(+), 3 deletions(-) + +diff --git a/Lib/hmac.py b/Lib/hmac.py +index b9bf16b84ca..ed98406bd2e 100644 +--- a/Lib/hmac.py ++++ b/Lib/hmac.py +@@ -42,7 +42,7 @@ class HMAC: + if _hashlib.get_fips_mode(): + raise ValueError( + 'hmac.HMAC is not available in FIPS mode. ' +- 'Use hmac.new().' ++ + 'Use hmac.new().' + ) + + if not isinstance(key, (bytes, bytearray)): +diff --git a/Modules/_hmacopenssl.c b/Modules/_hmacopenssl.c +index 216ed04f236..221714ca434 100644 +--- a/Modules/_hmacopenssl.c ++++ b/Modules/_hmacopenssl.c +@@ -363,7 +363,7 @@ static PyType_Slot HmacType_slots[] = { + PyType_Spec HmacType_spec = { + "_hmacopenssl.HMAC", /* name */ + sizeof(HmacObject), /* basicsize */ +- .flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, ++ .flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, + .slots = HmacType_slots, + }; + +@@ -407,7 +407,7 @@ static PyModuleDef_Slot hmacopenssl_slots[] = { + }; + + static struct PyModuleDef _hmacopenssl_def = { +- PyModuleDef_HEAD_INIT, /* m_base */ ++ PyModuleDef_HEAD_INIT, /* m_base */ + .m_name = "_hmacopenssl", + .m_methods = hmacopenssl_functions, + .m_slots = hmacopenssl_slots, +-- +2.21.0 + + +From b2853963d35c60595cec7e5cd40f31b1c9c59426 Mon Sep 17 00:00:00 2001 +From: Charalampos Stratakis +Date: Wed, 31 Jul 2019 15:43:43 +0200 +Subject: [PATCH 14/36] Add initial tests for various hashes under FIPS mode + +--- + Lib/test/test_fips.py | 64 +++++++++++++++++++++++++++++++++++++++++++ + 1 file changed, 64 insertions(+) + create mode 100644 Lib/test/test_fips.py + +diff --git a/Lib/test/test_fips.py b/Lib/test/test_fips.py +new file mode 100644 +index 00000000000..bee911ef405 +--- /dev/null ++++ b/Lib/test/test_fips.py +@@ -0,0 +1,64 @@ ++import unittest ++import hmac, _hmacopenssl ++import hashlib, _hashlib ++ ++ ++ ++class HashlibFipsTests(unittest.TestCase): ++ ++ @unittest.skipUnless(hashlib.get_fips_mode(), "Test only when FIPS is enabled") ++ def test_fips_imports(self): ++ """blake2s and blake2b should fail to import in FIPS mode ++ """ ++ with self.assertRaises(ValueError, msg='blake2s not available in FIPS'): ++ m = hashlib.blake2s() ++ with self.assertRaises(ValueError, msg='blake2b not available in FIPS'): ++ m = hashlib.blake2b() ++ ++ def compare_hashes(self, python_hash, openssl_hash): ++ """ ++ Compare between the python implementation and the openssl one that the digests ++ are the same ++ """ ++ if python_hash.name.startswith('shake_128'): ++ m = python_hash.hexdigest(16) ++ elif python_hash.name.startswith('shake_256'): ++ m = python_hash.hexdigest(32) ++ else: ++ m = python_hash.hexdigest() ++ h = openssl_hash.hexdigest() ++ ++ self.assertEqual(m, h) ++ ++ @unittest.skipIf(hashlib.get_fips_mode(), "blake2 hashes are not available under FIPS") ++ def test_blake2_hashes(self): ++ self.compare_hashes(hashlib.blake2b(b'abc'), _hashlib.openssl_blake2b(b'abc')) ++ self.compare_hashes(hashlib.blake2s(b'abc'), _hashlib.openssl_blake2s(b'abc')) ++ ++ def test_sha3_hashes(self): ++ self.compare_hashes(hashlib.sha3_224(b'abc'), _hashlib.openssl_sha3_224(b'abc')) ++ self.compare_hashes(hashlib.sha3_256(b'abc'), _hashlib.openssl_sha3_256(b'abc')) ++ self.compare_hashes(hashlib.sha3_384(b'abc'), _hashlib.openssl_sha3_384(b'abc')) ++ self.compare_hashes(hashlib.sha3_512(b'abc'), _hashlib.openssl_sha3_512(b'abc')) ++ ++ @unittest.skipIf(hashlib.get_fips_mode(), "shake hashes are not available under FIPS") ++ def test_shake_hashes(self): ++ self.compare_hashes(hashlib.shake_128(b'abc'), _hashlib.openssl_shake_128(b'abc')) ++ self.compare_hashes(hashlib.shake_256(b'abc'), _hashlib.openssl_shake_256(b'abc')) ++ ++ def test_sha(self): ++ self.compare_hashes(hashlib.sha1(b'abc'), _hashlib.openssl_sha1(b'abc')) ++ self.compare_hashes(hashlib.sha224(b'abc'), _hashlib.openssl_sha224(b'abc')) ++ self.compare_hashes(hashlib.sha256(b'abc'), _hashlib.openssl_sha256(b'abc')) ++ self.compare_hashes(hashlib.sha384(b'abc'), _hashlib.openssl_sha384(b'abc')) ++ self.compare_hashes(hashlib.sha512(b'abc'), _hashlib.openssl_sha512(b'abc')) ++ ++ def test_hmac_digests(self): ++ self.compare_hashes(_hmacopenssl.new(b'My hovercraft is full of eels', digestmod='sha384'), ++ hmac.new(b'My hovercraft is full of eels', digestmod='sha384')) ++ ++ ++ ++ ++if __name__ == "__main__": ++ unittest.main() +-- +2.21.0 + + +From b3a8214b5afe4809983e6a5e5d339527e8238318 Mon Sep 17 00:00:00 2001 +From: Marcel Plch +Date: Thu, 1 Aug 2019 16:39:37 +0200 +Subject: [PATCH 15/36] Initialize HMAC type. + +--- + Modules/_hmacopenssl.c | 12 ++++++++---- + 1 file changed, 8 insertions(+), 4 deletions(-) + +diff --git a/Modules/_hmacopenssl.c b/Modules/_hmacopenssl.c +index 221714ca434..239445a0831 100644 +--- a/Modules/_hmacopenssl.c ++++ b/Modules/_hmacopenssl.c +@@ -22,12 +22,12 @@ + #include "_hashopenssl.h" + + +-#include + + typedef struct hmacopenssl_state { + PyTypeObject *HmacType; + } hmacopenssl_state; + ++#include + + typedef struct { + PyObject_HEAD +@@ -39,7 +39,7 @@ typedef struct { + #include "clinic/_hmacopenssl.c.h" + /*[clinic input] + module _hmacopenssl +-class _hmacopenssl.HMAC "HmacObject *" "PyModule_GetState(module)->HmacType" ++class _hmacopenssl.HMAC "HmacObject *" "((hmacopenssl_state *)PyModule_GetState(module))->HmacType" + [clinic start generated code]*/ + /*[clinic end generated code: output=da39a3ee5e6b4b0d input=204b7f45847f57b4]*/ + +@@ -71,7 +71,7 @@ _hmacopenssl_new_impl(PyObject *module, Py_buffer *key, + return NULL; + } + +- /* name mut be lowercase */ ++ /* name must be lowercase */ + for (int i=0; digestmod[i]; i++) { + if ( + ((digestmod[i] < 'a') || (digestmod[i] > 'z')) +@@ -383,7 +383,8 @@ hmacopenssl_exec(PyObject *m) { + * on what hashes are supported rather than listing many + * and having some unsupported. Only init appropriate + * constants. */ +- PyObject *temp; ++ PyObject *temp = NULL; ++ hmacopenssl_state *state; + + temp = PyType_FromSpec(&HmacType_spec); + if (temp == NULL) { +@@ -394,6 +395,9 @@ hmacopenssl_exec(PyObject *m) { + goto fail; + } + ++ state = PyModule_GetState(m); ++ state->HmacType = (PyTypeObject *)temp; ++ + return 0; + + fail: +-- +2.21.0 + + +From 5bd6eb14097fe4ff2bb639e5c50260b1379d0a69 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Thu, 1 Aug 2019 17:57:05 +0200 +Subject: [PATCH 16/36] Use a stronger hash in multiprocessing handshake + +Adapted from patch by David Malcolm, +https://bugs.python.org/issue17258 +--- + Lib/multiprocessing/connection.py | 8 ++++++-- + 1 file changed, 6 insertions(+), 2 deletions(-) + +diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py +index d3797503a75..a0b1538f88b 100644 +--- a/Lib/multiprocessing/connection.py ++++ b/Lib/multiprocessing/connection.py +@@ -42,6 +42,10 @@ BUFSIZE = 8192 + # A very generous timeout when it comes to local connections... + CONNECTION_TIMEOUT = 20. + ++# The hmac module implicitly defaults to using MD5. ++# Support using a stronger algorithm for the challenge/response code: ++HMAC_DIGEST_NAME='sha256' ++ + _mmap_counter = itertools.count() + + default_family = 'AF_INET' +@@ -718,7 +722,7 @@ def deliver_challenge(connection, authkey): + assert isinstance(authkey, bytes) + message = os.urandom(MESSAGE_LENGTH) + connection.send_bytes(CHALLENGE + message) +- digest = hmac.new(authkey, message, 'md5').digest() ++ digest = hmac.new(authkey, message, HMAC_DIGEST_NAME).digest() + response = connection.recv_bytes(256) # reject large message + if response == digest: + connection.send_bytes(WELCOME) +@@ -732,7 +736,7 @@ def answer_challenge(connection, authkey): + message = connection.recv_bytes(256) # reject large message + assert message[:len(CHALLENGE)] == CHALLENGE, 'message = %r' % message + message = message[len(CHALLENGE):] +- digest = hmac.new(authkey, message, 'md5').digest() ++ digest = hmac.new(authkey, message, HMAC_DIGEST_NAME).digest() + connection.send_bytes(digest) + response = connection.recv_bytes(256) # reject large message + if response != WELCOME: +-- +2.21.0 + + +From 1dd4b2d6c4797b828ac38d8c62f1e69fce49f1e9 Mon Sep 17 00:00:00 2001 +From: Marcel Plch +Date: Fri, 2 Aug 2019 17:36:01 +0200 +Subject: [PATCH 17/36] Fix refcounting + +--- + Modules/_hmacopenssl.c | 35 ++++++++++++++++++++++++++++++++++- + 1 file changed, 34 insertions(+), 1 deletion(-) + +diff --git a/Modules/_hmacopenssl.c b/Modules/_hmacopenssl.c +index 239445a0831..9c2882833d1 100644 +--- a/Modules/_hmacopenssl.c ++++ b/Modules/_hmacopenssl.c +@@ -373,6 +373,34 @@ static struct PyMethodDef hmacopenssl_functions[] = { + {NULL, NULL} /* Sentinel */ + }; + ++static int ++hmacopenssl_traverse(PyObject *self, visitproc visit, void *arg) ++{ ++ hmacopenssl_state *state; ++ ++ state = PyModule_GetState(self); ++ ++ if (state) { ++ Py_VISIT(state->HmacType); ++ } ++ ++ return 0; ++} ++ ++static int ++hmacopenssl_clear(PyObject *self) ++{ ++ hmacopenssl_state *state; ++ ++ state = PyModule_GetState(self); ++ ++ if (state) { ++ Py_CLEAR(state->HmacType); ++ } ++ ++ return 0; ++} ++ + + + /* Initialize this module. */ +@@ -396,7 +424,10 @@ hmacopenssl_exec(PyObject *m) { + } + + state = PyModule_GetState(m); ++ + state->HmacType = (PyTypeObject *)temp; ++ Py_INCREF(temp); ++ + + return 0; + +@@ -415,7 +446,9 @@ static struct PyModuleDef _hmacopenssl_def = { + .m_name = "_hmacopenssl", + .m_methods = hmacopenssl_functions, + .m_slots = hmacopenssl_slots, +- .m_size = sizeof(hmacopenssl_state) ++ .m_size = sizeof(hmacopenssl_state), ++ .m_traverse = hmacopenssl_traverse, ++ .m_clear = hmacopenssl_clear + }; + + +-- +2.21.0 + + +From 4e1351cc2d704e8da7bb8125ad0ab194e8a02ec4 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Mon, 5 Aug 2019 13:37:05 +0200 +Subject: [PATCH 18/36] hmac: Don't default to md5 in FIPS mode + +--- + Lib/hmac.py | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/Lib/hmac.py b/Lib/hmac.py +index ed98406bd2e..7b8821edd58 100644 +--- a/Lib/hmac.py ++++ b/Lib/hmac.py +@@ -165,7 +165,7 @@ def new(key, msg = None, digestmod = None): + """ + if _hashlib.get_fips_mode(): + if digestmod is None: +- digestmod = 'md5' ++ raise ValueError("'digestmod' argument is mandatory in FIPS mode") + name = _get_openssl_name(digestmod) + result = _hmacopenssl.new(key, digestmod=name) + if msg: +-- +2.21.0 + + +From ea44160c242101f14d22242a0722e730ef304b8b Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Mon, 5 Aug 2019 14:20:58 +0200 +Subject: [PATCH 19/36] Make _hmacopenssl.HMAC subclassable; subclass it as + hmac.HMAC under FIPS + +This removes the _hmacopenssl.new function. +--- + Lib/hmac.py | 27 +++++++----- + Lib/test/test_fips.py | 2 +- + Modules/_hmacopenssl.c | 75 ++++++++++++++++----------------- + Modules/clinic/_hmacopenssl.c.h | 39 +---------------- + 4 files changed, 56 insertions(+), 87 deletions(-) + +diff --git a/Lib/hmac.py b/Lib/hmac.py +index 7b8821edd58..d479c5a4492 100644 +--- a/Lib/hmac.py ++++ b/Lib/hmac.py +@@ -141,6 +141,8 @@ class HMAC: + + + def _get_openssl_name(digestmod): ++ if digestmod is None: ++ raise ValueError("'digestmod' argument is mandatory in FIPS mode") + if isinstance(digestmod, str): + return digestmod.lower() + elif callable(digestmod): +@@ -152,6 +154,20 @@ def _get_openssl_name(digestmod): + + return digestmod.name.lower().replace('_', '-') + ++ ++class HMAC_openssl(_hmacopenssl.HMAC): ++ def __new__(cls, key, msg = None, digestmod = None): ++ name = _get_openssl_name(digestmod) ++ result = _hmacopenssl.HMAC.__new__(cls, key, digestmod=name) ++ if msg: ++ result.update(msg) ++ return result ++ ++ ++if _hashlib.get_fips_mode(): ++ HMAC = HMAC_openssl ++ ++ + def new(key, msg = None, digestmod = None): + """Create a new hashing object and return it. + +@@ -163,13 +179,4 @@ def new(key, msg = None, digestmod = None): + method, and can ask for the hash value at any time by calling its digest() + method. + """ +- if _hashlib.get_fips_mode(): +- if digestmod is None: +- raise ValueError("'digestmod' argument is mandatory in FIPS mode") +- name = _get_openssl_name(digestmod) +- result = _hmacopenssl.new(key, digestmod=name) +- if msg: +- result.update(msg) +- return result +- else: +- return HMAC(key, msg, digestmod) ++ return HMAC(key, msg, digestmod) +diff --git a/Lib/test/test_fips.py b/Lib/test/test_fips.py +index bee911ef405..34812e6098a 100644 +--- a/Lib/test/test_fips.py ++++ b/Lib/test/test_fips.py +@@ -54,7 +54,7 @@ class HashlibFipsTests(unittest.TestCase): + self.compare_hashes(hashlib.sha512(b'abc'), _hashlib.openssl_sha512(b'abc')) + + def test_hmac_digests(self): +- self.compare_hashes(_hmacopenssl.new(b'My hovercraft is full of eels', digestmod='sha384'), ++ self.compare_hashes(_hmacopenssl.HMAC(b'My hovercraft is full of eels', digestmod='sha384'), + hmac.new(b'My hovercraft is full of eels', digestmod='sha384')) + + +diff --git a/Modules/_hmacopenssl.c b/Modules/_hmacopenssl.c +index 9c2882833d1..7d3d9739f3a 100644 +--- a/Modules/_hmacopenssl.c ++++ b/Modules/_hmacopenssl.c +@@ -41,33 +41,25 @@ typedef struct { + module _hmacopenssl + class _hmacopenssl.HMAC "HmacObject *" "((hmacopenssl_state *)PyModule_GetState(module))->HmacType" + [clinic start generated code]*/ +-/*[clinic end generated code: output=da39a3ee5e6b4b0d input=204b7f45847f57b4]*/ ++/*[clinic end generated code: output=da39a3ee5e6b4b0d input=9fe07a087adc2cf9]*/ + + +-/*[clinic input] +-_hmacopenssl.new +- +- key: Py_buffer +- * +- digestmod: str +- +-Return a new hmac object. +-[clinic start generated code]*/ +- + static PyObject * +-_hmacopenssl_new_impl(PyObject *module, Py_buffer *key, +- const char *digestmod) +-/*[clinic end generated code: output=46f1cb4e02921922 input=be8c0c2e4fad508c]*/ ++Hmac_new(PyTypeObject *subtype, PyObject *args, PyObject *kwds) + { +- hmacopenssl_state *state; +- +- if (digestmod == NULL) { +- PyErr_SetString(PyExc_ValueError, "digestmod must be specified"); ++ static char *kwarg_names[] = {"key", "digestmod", NULL}; ++ Py_buffer key = {NULL, NULL}; ++ char *digestmod = NULL; ++ ++ int ret = PyArg_ParseTupleAndKeywords( ++ args, kwds, "y*|$s:_hmacopenssl.HMAC", kwarg_names, ++ &key, &digestmod); ++ if (ret == 0) { + return NULL; + } + +- state = PyModule_GetState(module); +- if (state == NULL) { ++ if (digestmod == NULL) { ++ PyErr_SetString(PyExc_ValueError, "digestmod must be specified"); + return NULL; + } + +@@ -106,8 +98,8 @@ _hmacopenssl_new_impl(PyObject *module, Py_buffer *key, + + int r = HMAC_Init_ex( + ctx, +- (const char*)key->buf, +- key->len, ++ (const char*)key.buf, ++ key.len, + digest, + NULL /*impl*/); + if (r == 0) { +@@ -115,7 +107,10 @@ _hmacopenssl_new_impl(PyObject *module, Py_buffer *key, + goto error; + } + +- retval = (HmacObject *)PyObject_New(HmacObject, state->HmacType); ++ PyBuffer_Release(&key); ++ key.buf = NULL; ++ ++ retval = (HmacObject *)subtype->tp_alloc(subtype, 0); + if (retval == NULL) { + goto error; + } +@@ -130,6 +125,7 @@ error: + if (ctx) HMAC_CTX_free(ctx); + if (name) Py_DECREF(name); + if (retval) PyObject_Del(name); ++ if (key.buf) PyBuffer_Release(&key); + return NULL; + } + +@@ -145,19 +141,27 @@ _hmacopenssl_HMAC_copy_impl(HmacObject *self) + { + HmacObject *retval; + +- retval = (HmacObject *)PyObject_New(HmacObject, (PyTypeObject *)PyObject_Type((PyObject *)self)); ++ HMAC_CTX *ctx = HMAC_CTX_new(); ++ if (ctx == NULL) { ++ return _setException(PyExc_ValueError); ++ } ++ ++ int r = HMAC_CTX_copy(ctx, self->ctx); ++ if (r == 0) { ++ HMAC_CTX_free(ctx); ++ return _setException(PyExc_ValueError); ++ } ++ ++ retval = (HmacObject *)Py_TYPE(self)->tp_alloc(Py_TYPE(self), 0); + if (retval == NULL) { ++ HMAC_CTX_free(ctx); + return NULL; + } +- ++ retval->ctx = ctx; + Py_INCREF(self->name); + retval->name = self->name; + +- int r = HMAC_CTX_copy(retval->ctx, self->ctx); +- if (r == 0) { +- PyObject_Del(retval); +- return _setException(PyExc_ValueError); +- } ++ retval->lock = NULL; + + return (PyObject *)retval; + } +@@ -169,8 +173,8 @@ _hmac_dealloc(HmacObject *self) + PyThread_free_lock(self->lock); + } + HMAC_CTX_free(self->ctx); +- Py_XDECREF(self->name); +- PyObject_Del(self); ++ Py_CLEAR(self->name); ++ Py_TYPE(self)->tp_free(self); + } + + static PyObject * +@@ -357,6 +361,7 @@ static PyType_Slot HmacType_slots[] = { + {Py_tp_methods, Hmac_methods}, + {Py_tp_getset, Hmac_getset}, + {Py_tp_members, Hmac_members}, ++ {Py_tp_new, Hmac_new}, + {0, NULL} + }; + +@@ -368,11 +373,6 @@ PyType_Spec HmacType_spec = { + }; + + +-static struct PyMethodDef hmacopenssl_functions[] = { +- _HMACOPENSSL_NEW_METHODDEF +- {NULL, NULL} /* Sentinel */ +-}; +- + static int + hmacopenssl_traverse(PyObject *self, visitproc visit, void *arg) + { +@@ -444,7 +444,6 @@ static PyModuleDef_Slot hmacopenssl_slots[] = { + static struct PyModuleDef _hmacopenssl_def = { + PyModuleDef_HEAD_INIT, /* m_base */ + .m_name = "_hmacopenssl", +- .m_methods = hmacopenssl_functions, + .m_slots = hmacopenssl_slots, + .m_size = sizeof(hmacopenssl_state), + .m_traverse = hmacopenssl_traverse, +diff --git a/Modules/clinic/_hmacopenssl.c.h b/Modules/clinic/_hmacopenssl.c.h +index b472a6eddd3..861acc11bfd 100644 +--- a/Modules/clinic/_hmacopenssl.c.h ++++ b/Modules/clinic/_hmacopenssl.c.h +@@ -2,43 +2,6 @@ + preserve + [clinic start generated code]*/ + +-PyDoc_STRVAR(_hmacopenssl_new__doc__, +-"new($module, /, key, *, digestmod)\n" +-"--\n" +-"\n" +-"Return a new hmac object."); +- +-#define _HMACOPENSSL_NEW_METHODDEF \ +- {"new", (PyCFunction)_hmacopenssl_new, METH_FASTCALL, _hmacopenssl_new__doc__}, +- +-static PyObject * +-_hmacopenssl_new_impl(PyObject *module, Py_buffer *key, +- const char *digestmod); +- +-static PyObject * +-_hmacopenssl_new(PyObject *module, PyObject **args, Py_ssize_t nargs, PyObject *kwnames) +-{ +- PyObject *return_value = NULL; +- static const char * const _keywords[] = {"key", "digestmod", NULL}; +- static _PyArg_Parser _parser = {"y*$s:new", _keywords, 0}; +- Py_buffer key = {NULL, NULL}; +- const char *digestmod; +- +- if (!_PyArg_ParseStack(args, nargs, kwnames, &_parser, +- &key, &digestmod)) { +- goto exit; +- } +- return_value = _hmacopenssl_new_impl(module, &key, digestmod); +- +-exit: +- /* Cleanup for key */ +- if (key.obj) { +- PyBuffer_Release(&key); +- } +- +- return return_value; +-} +- + PyDoc_STRVAR(_hmacopenssl_HMAC_copy__doc__, + "copy($self, /)\n" + "--\n" +@@ -130,4 +93,4 @@ _hmacopenssl_HMAC_hexdigest(HmacObject *self, PyObject *Py_UNUSED(ignored)) + { + return _hmacopenssl_HMAC_hexdigest_impl(self); + } +-/*[clinic end generated code: output=10b6e8cac6d7a2c9 input=a9049054013a1b77]*/ ++/*[clinic end generated code: output=d93ad460795d49b5 input=a9049054013a1b77]*/ +-- +2.21.0 + + +From e3d6a5adbe0032726dff0b1f6473ce3ff63fad51 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Mon, 5 Aug 2019 16:10:36 +0200 +Subject: [PATCH 20/36] Fix _hmacopenssl.HMAC.block_size + +--- + Modules/_hmacopenssl.c | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/Modules/_hmacopenssl.c b/Modules/_hmacopenssl.c +index 7d3d9739f3a..a24c8ba0229 100644 +--- a/Modules/_hmacopenssl.c ++++ b/Modules/_hmacopenssl.c +@@ -318,7 +318,7 @@ _hmacopenssl_get_block_size(HmacObject *self, void *closure) + if (md == NULL) { + return _setException(PyExc_ValueError); + } +- return PyLong_FromLong(EVP_MD_size(md)); ++ return PyLong_FromLong(EVP_MD_block_size(md)); + } + + static PyMethodDef Hmac_methods[] = { +-- +2.21.0 + + +From f925a1165d7b07fb4d0bba216f7dbc387b94e94d Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Mon, 5 Aug 2019 15:02:08 +0200 +Subject: [PATCH 21/36] distutils upload: Skip md5 checksum in FIPS mode + +--- + Lib/distutils/command/upload.py | 11 ++++++++++- + Lib/distutils/tests/test_upload.py | 13 +++++++++++-- + 2 files changed, 21 insertions(+), 3 deletions(-) + +diff --git a/Lib/distutils/command/upload.py b/Lib/distutils/command/upload.py +index 32dda359bad..0edb39efd4c 100644 +--- a/Lib/distutils/command/upload.py ++++ b/Lib/distutils/command/upload.py +@@ -102,7 +102,6 @@ class upload(PyPIRCCommand): + 'content': (os.path.basename(filename),content), + 'filetype': command, + 'pyversion': pyversion, +- 'md5_digest': hashlib.md5(content).hexdigest(), + + # additional meta-data + 'metadata_version': '1.0', +@@ -121,6 +120,16 @@ class upload(PyPIRCCommand): + 'requires': meta.get_requires(), + 'obsoletes': meta.get_obsoletes(), + } ++ try: ++ digest = hashlib.md5(content).hexdigest() ++ except ValueError as e: ++ msg = 'calculating md5 checksum failed: %s' % e ++ self.announce(msg, log.ERROR) ++ if not hashlib.get_fips_mode(): ++ # this really shouldn't fail ++ raise ++ else: ++ data['md5_digest'] = digest + comment = '' + if command == 'bdist_rpm': + dist, version, id = platform.dist() +diff --git a/Lib/distutils/tests/test_upload.py b/Lib/distutils/tests/test_upload.py +index c17d8e7d54e..b4b64e97737 100644 +--- a/Lib/distutils/tests/test_upload.py ++++ b/Lib/distutils/tests/test_upload.py +@@ -3,6 +3,7 @@ import os + import unittest + import unittest.mock as mock + from urllib.request import HTTPError ++import hashlib + + from test.support import run_unittest + +@@ -130,7 +131,11 @@ class uploadTestCase(BasePyPIRCCommandTestCase): + + # what did we send ? + headers = dict(self.last_open.req.headers) +- self.assertEqual(headers['Content-length'], '2162') ++ if hashlib.get_fips_mode(): ++ # md5 hash is omitted ++ self.assertEqual(headers['Content-length'], '2020') ++ else: ++ self.assertEqual(headers['Content-length'], '2162') + content_type = headers['Content-type'] + self.assertTrue(content_type.startswith('multipart/form-data')) + self.assertEqual(self.last_open.req.get_method(), 'POST') +@@ -166,7 +171,11 @@ class uploadTestCase(BasePyPIRCCommandTestCase): + cmd.run() + + headers = dict(self.last_open.req.headers) +- self.assertEqual(headers['Content-length'], '2172') ++ if hashlib.get_fips_mode(): ++ # md5 hash is omitted ++ self.assertEqual(headers['Content-length'], '2030') ++ else: ++ self.assertEqual(headers['Content-length'], '2172') + self.assertIn(b'long description\r', self.last_open.req.data) + + def test_upload_fails(self): +-- +2.21.0 + + +From 936a7835a1873952eab8f7ec3b9b67f63786c001 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Mon, 5 Aug 2019 15:32:25 +0200 +Subject: [PATCH 22/36] Fix HMAC tests on FIPS mode + +--- + Lib/hmac.py | 3 +++ + Lib/test/test_hmac.py | 26 ++++++++++++++++++++++++++ + 2 files changed, 29 insertions(+) + +diff --git a/Lib/hmac.py b/Lib/hmac.py +index d479c5a4492..7af94c39ed6 100644 +--- a/Lib/hmac.py ++++ b/Lib/hmac.py +@@ -157,6 +157,9 @@ def _get_openssl_name(digestmod): + + class HMAC_openssl(_hmacopenssl.HMAC): + def __new__(cls, key, msg = None, digestmod = None): ++ if not isinstance(key, (bytes, bytearray)): ++ raise TypeError("key: expected bytes or bytearray, but got %r" % type(key).__name__) ++ + name = _get_openssl_name(digestmod) + result = _hmacopenssl.HMAC.__new__(cls, key, digestmod=name) + if msg: +diff --git a/Lib/test/test_hmac.py b/Lib/test/test_hmac.py +index 338e0215a41..5b090727ef6 100644 +--- a/Lib/test/test_hmac.py ++++ b/Lib/test/test_hmac.py +@@ -250,6 +250,7 @@ class TestVectorsTestCase(unittest.TestCase): + def test_sha512_rfc4231(self): + self._rfc4231_test_cases(hashlib.sha512, 'sha512', 64, 128) + ++ @unittest.skipIf(hashlib.get_fips_mode(), 'MockCrazyHash unacceptable in FIPS mode.') + @requires_hashdigest('sha256') + def test_legacy_block_size_warnings(self): + class MockCrazyHash(object): +@@ -298,6 +299,14 @@ class ConstructorTestCase(unittest.TestCase): + self.fail("Standard constructor call raised exception.") + + @ignore_warning ++ def test_normal_digestmod(self): ++ # Standard constructor call. ++ failed = 0 ++ try: ++ h = hmac.HMAC(b"key", digestmod='sha1') ++ except Exception: ++ self.fail("Standard constructor call raised exception.") ++ + @requires_hashdigest('sha256') + def test_with_str_key(self): + # Pass a key of type str, which is an error, because it expects a key +@@ -366,6 +375,7 @@ class SanityTestCase(unittest.TestCase): + + class CopyTestCase(unittest.TestCase): + ++ @unittest.skipIf(hashlib.get_fips_mode(), "Internal attributes unavailable in FIPS mode") + @requires_hashdigest('sha256') + def test_attributes(self): + # Testing if attributes are of same type. +@@ -378,6 +388,7 @@ class CopyTestCase(unittest.TestCase): + self.assertEqual(type(h1.outer), type(h2.outer), + "Types of outer don't match.") + ++ @unittest.skipIf(hashlib.get_fips_mode(), "Internal attributes unavailable in FIPS mode") + @requires_hashdigest('sha256') + def test_realcopy(self): + # Testing if the copy method created a real copy. +@@ -390,6 +401,21 @@ class CopyTestCase(unittest.TestCase): + self.assertTrue(id(h1.outer) != id(h2.outer), + "No real copy of the attribute 'outer'.") + ++ def test_realcopy(self): ++ # Testing if the copy method created a real copy. ++ h1 = hmac.HMAC(b"key", digestmod="sha1") ++ h2 = h1.copy() ++ # Using id() in case somebody has overridden __eq__/__ne__. ++ self.assertTrue(id(h1) != id(h2), "No real copy of the HMAC instance.") ++ old_digest = h1.digest() ++ assert h1.digest() == h2.digest() ++ h1.update(b'hi') ++ assert h1.digest() != h2.digest() ++ assert h2.digest() == old_digest ++ new_digest = h1.digest() ++ h2.update(b'hi') ++ assert h1.digest() == h2.digest() == new_digest ++ + @requires_hashdigest('sha256') + def test_equality(self): + # Testing if the copy has the same digests. +-- +2.21.0 + + +From 264c6eed5a5d060fe7bad7e4410d920cecbb083c Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Mon, 5 Aug 2019 16:37:12 +0200 +Subject: [PATCH 23/36] test_tools: Skip md5sum tests in FIPS mode + +--- + Lib/test/test_tools/test_md5sum.py | 4 ++++ + 1 file changed, 4 insertions(+) + +diff --git a/Lib/test/test_tools/test_md5sum.py b/Lib/test/test_tools/test_md5sum.py +index fb565b73778..7028a4dc214 100644 +--- a/Lib/test/test_tools/test_md5sum.py ++++ b/Lib/test/test_tools/test_md5sum.py +@@ -4,11 +4,15 @@ import os + import unittest + from test import support + from test.support.script_helper import assert_python_ok, assert_python_failure ++import hashlib + + from test.test_tools import scriptsdir, skip_if_missing + + skip_if_missing() + ++if hashlib.get_fips_mode(): ++ raise unittest.SkipTest("md5sum won't work at all in FIPS mode") ++ + class MD5SumTests(unittest.TestCase): + @classmethod + def setUpClass(cls): +-- +2.21.0 + + +From c5373f5e7ae4ecfe8337ee3c73b594b5f0ebb411 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Mon, 5 Aug 2019 17:21:16 +0200 +Subject: [PATCH 24/36] _hashopenssl: Include hash name in the error message + +--- + Modules/_hashopenssl.c | 4 ++-- + 1 file changed, 2 insertions(+), 2 deletions(-) + +diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c +index 7dfd70822b9..0563473c627 100644 +--- a/Modules/_hashopenssl.c ++++ b/Modules/_hashopenssl.c +@@ -431,7 +431,7 @@ EVPnew(PyObject *name_obj, + EVPobject *self; + + if (!digest && !initial_ctx) { +- PyErr_SetString(PyExc_ValueError, "unsupported hash type"); ++ PyErr_Format(PyExc_ValueError, "unsupported hash type %U", name_obj); + return NULL; + } + +@@ -622,7 +622,7 @@ pbkdf2_hmac(PyObject *self, PyObject *args, PyObject *kwdict) + + digest = EVP_get_digestbyname(name); + if (digest == NULL) { +- PyErr_SetString(PyExc_ValueError, "unsupported hash type"); ++ PyErr_Format(PyExc_ValueError, "unsupported hash type %s", name); + goto end; + } + +-- +2.21.0 + + +From c3f9e194eb5f86ebec4db2820b8968d6896abc8b Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Mon, 5 Aug 2019 18:23:57 +0200 +Subject: [PATCH 25/36] Make hashlib tests pass in FIPS mode (with some hashlib + changes) + +--- + Lib/hashlib.py | 18 +++++++++-- + Lib/test/test_hashlib.py | 67 ++++++++++++++++++++++++++++------------ + Modules/_hashopenssl.c | 14 +++++++++ + 3 files changed, 78 insertions(+), 21 deletions(-) + +diff --git a/Lib/hashlib.py b/Lib/hashlib.py +index ca1dd202251..d3344f60b2e 100644 +--- a/Lib/hashlib.py ++++ b/Lib/hashlib.py +@@ -155,7 +155,18 @@ def __hash_new(name, data=b'', **kwargs): + """new(name, data=b'') - Return a new hashing object using the named algorithm; + optionally initialized with data (which must be a bytes-like object). + """ +- if not get_fips_mode(): ++ if get_fips_mode(): ++ # Use OpenSSL names for Python built-in hashes ++ orig_name = name ++ name = { ++ 'sha3_224': "sha3-224", ++ 'sha3_256': "sha3-256", ++ 'sha3_384': "sha3-384", ++ 'sha3_512': "sha3-512", ++ 'shake_128': "shake128", ++ 'shake_256': "shake256", ++ }.get(name, name) ++ else: + if name in {'blake2b', 'blake2s'}: + # Prefer our blake2 implementation. + # OpenSSL 1.1.0 comes with a limited implementation of blake2b/s. +@@ -163,7 +174,10 @@ def __hash_new(name, data=b'', **kwargs): + # salt, personal, tree hashing or SSE. + return __get_builtin_constructor(name)(data, **kwargs) + try: +- return _hashlib.new(name, data) ++ retval = _hashlib.new(name, data) ++ if get_fips_mode() and name != orig_name: ++ retval._set_name(orig_name) ++ return retval + except ValueError: + # If the _hashlib module (OpenSSL) doesn't support the named + # hash, try using our builtin implementations. +diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py +index e57c93b42f5..9745bfd6fa2 100644 +--- a/Lib/test/test_hashlib.py ++++ b/Lib/test/test_hashlib.py +@@ -31,6 +31,11 @@ COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount') + c_hashlib = import_fresh_module('hashlib', fresh=['_hashlib']) + py_hashlib = import_fresh_module('hashlib', blocked=['_hashlib']) + ++if hashlib.get_fips_mode(): ++ FIPS_DISABLED = {'md5', 'MD5', 'blake2b', 'blake2s'} ++else: ++ FIPS_DISABLED = set() ++ + try: + import _blake2 + except ImportError: +@@ -86,6 +91,11 @@ class HashLibTestCase(unittest.TestCase): + # Issue #14693: fallback modules are always compiled under POSIX + _warn_on_extension_import = os.name == 'posix' or COMPILED_WITH_PYDEBUG + ++ if hashlib.get_fips_mode(): ++ shakes = set() ++ supported_hash_names = tuple( ++ n for n in supported_hash_names if n not in FIPS_DISABLED) ++ + def _conditional_import_module(self, module_name): + """Import a module and return a reference to it or None on failure.""" + try: +@@ -93,8 +103,20 @@ class HashLibTestCase(unittest.TestCase): + except ModuleNotFoundError as error: + if self._warn_on_extension_import: + warnings.warn('Did a C extension fail to compile? %s' % error) ++ except ImportError as error: ++ if not hashlib.get_fips_mode(): ++ raise + return None + ++ def _has_shake_extras(self, hasher): ++ """Return true if the hasher should have "shake" API (digest length)""" ++ if hasher.name not in self.shakes: ++ return False ++ _hashlib = self._conditional_import_module('_hashlib') ++ if _hashlib and isinstance(hasher, _hashlib.HASH): ++ return False ++ return True ++ + def __init__(self, *args, **kwargs): + algorithms = set() + for algorithm in self.supported_hash_names: +@@ -182,15 +204,13 @@ class HashLibTestCase(unittest.TestCase): + a = array.array("b", range(10)) + for cons in self.hash_constructors: + c = cons(a) +- if (c.name in self.shakes +- and not cons.__name__.startswith('openssl_') +- ): ++ if self._has_shake_extras(c): + c.hexdigest(16) + else: + c.hexdigest() + + def test_algorithms_guaranteed(self): +- self.assertEqual(hashlib.algorithms_guaranteed, ++ self.assertEqual(hashlib.algorithms_guaranteed - FIPS_DISABLED, + set(_algo for _algo in self.supported_hash_names + if _algo.islower())) + +@@ -202,6 +222,12 @@ class HashLibTestCase(unittest.TestCase): + self.assertRaises(ValueError, hashlib.new, 'spam spam spam spam spam') + self.assertRaises(TypeError, hashlib.new, 1) + ++ @unittest.skipUnless(hashlib.get_fips_mode(), "Builtin constructor only unavailable in FIPS mode") ++ def test_get_builtin_constructor_fips(self): ++ with self.assertRaises(AttributeError): ++ hashlib.__get_builtin_constructor ++ ++ @unittest.skipIf(hashlib.get_fips_mode(), "No builtin constructors in FIPS mode") + def test_get_builtin_constructor(self): + get_builtin_constructor = getattr(hashlib, + '__get_builtin_constructor') +@@ -231,9 +257,7 @@ class HashLibTestCase(unittest.TestCase): + def test_hexdigest(self): + for cons in self.hash_constructors: + h = cons() +- if (h.name in self.shakes +- and not cons.__name__.startswith('openssl_') +- ): ++ if self._has_shake_extras(h): + self.assertIsInstance(h.digest(16), bytes) + self.assertEqual(hexstr(h.digest(16)), h.hexdigest(16)) + else: +@@ -245,9 +269,7 @@ class HashLibTestCase(unittest.TestCase): + large_sizes = (2**29, 2**32-10, 2**32+10, 2**61, 2**64-10, 2**64+10) + for cons in self.hash_constructors: + h = cons() +- if h.name not in self.shakes: +- continue +- if cons.__name__.startswith('openssl_'): ++ if not self._has_shake_extras(h): + continue + for digest in h.digest, h.hexdigest: + with self.assertRaises((ValueError, OverflowError)): +@@ -278,9 +300,7 @@ class HashLibTestCase(unittest.TestCase): + m1.update(bees) + m1.update(cees) + m1.update(dees) +- if (m1.name in self.shakes +- and not cons.__name__.startswith('openssl_') +- ): ++ if self._has_shake_extras(m1): + args = (16,) + else: + args = () +@@ -349,7 +369,8 @@ class HashLibTestCase(unittest.TestCase): + self.assertRaises(TypeError, hash_object_constructor, 'spam') + + def test_no_unicode(self): +- self.check_no_unicode('md5') ++ if not hashlib.get_fips_mode(): ++ self.check_no_unicode('md5') + self.check_no_unicode('sha1') + self.check_no_unicode('sha224') + self.check_no_unicode('sha256') +@@ -392,7 +413,8 @@ class HashLibTestCase(unittest.TestCase): + self.assertIn(name.split("_")[0], repr(m)) + + def test_blocksize_name(self): +- self.check_blocksize_name('md5', 64, 16) ++ if not hashlib.get_fips_mode(): ++ self.check_blocksize_name('md5', 64, 16) + self.check_blocksize_name('sha1', 64, 20) + self.check_blocksize_name('sha224', 64, 28) + self.check_blocksize_name('sha256', 64, 32) +@@ -432,22 +454,27 @@ class HashLibTestCase(unittest.TestCase): + self.check_blocksize_name('blake2b', 128, 64) + self.check_blocksize_name('blake2s', 64, 32) + ++ @unittest.skipIf(hashlib.get_fips_mode(), "md5 unacceptable in FIPS mode") + def test_case_md5_0(self): + self.check('md5', b'', 'd41d8cd98f00b204e9800998ecf8427e') + ++ @unittest.skipIf(hashlib.get_fips_mode(), "md5 unacceptable in FIPS mode") + def test_case_md5_1(self): + self.check('md5', b'abc', '900150983cd24fb0d6963f7d28e17f72') + ++ @unittest.skipIf(hashlib.get_fips_mode(), "md5 unacceptable in FIPS mode") + def test_case_md5_2(self): + self.check('md5', + b'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', + 'd174ab98d277d9f5a5611c2c9f419d9f') + ++ @unittest.skipIf(hashlib.get_fips_mode(), "md5 unacceptable in FIPS mode") + @unittest.skipIf(sys.maxsize < _4G + 5, 'test cannot run on 32-bit systems') + @bigmemtest(size=_4G + 5, memuse=1, dry_run=False) + def test_case_md5_huge(self, size): + self.check('md5', b'A'*size, 'c9af2dff37468ce5dfee8f2cfc0a9c6d') + ++ @unittest.skipIf(hashlib.get_fips_mode(), "md5 unacceptable in FIPS mode") + @unittest.skipIf(sys.maxsize < _4G - 1, 'test cannot run on 32-bit systems') + @bigmemtest(size=_4G - 1, memuse=1, dry_run=False) + def test_case_md5_uintmax(self, size): +@@ -829,14 +856,16 @@ class HashLibTestCase(unittest.TestCase): + m = cons(b'x' * gil_minsize) + m.update(b'1') + +- m = hashlib.md5() ++ m = hashlib.sha1() + m.update(b'1') + m.update(b'#' * gil_minsize) + m.update(b'1') +- self.assertEqual(m.hexdigest(), 'cb1e1a2cbc80be75e19935d621fb9b21') ++ self.assertEqual(m.hexdigest(), ++ 'c45f7445ca0ea087d7a1758fbea07935f267c46a') + +- m = hashlib.md5(b'x' * gil_minsize) +- self.assertEqual(m.hexdigest(), 'cfb767f225d58469c5de3632a8803958') ++ m = hashlib.sha1(b'x' * gil_minsize) ++ self.assertEqual(m.hexdigest(), ++ '63fda1efde982ba1ffe9d53035bff5c9ce4758fb') + + @unittest.skipUnless(threading, 'Threading required for this test.') + @support.reap_threads +diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c +index 0563473c627..e330423e266 100644 +--- a/Modules/_hashopenssl.c ++++ b/Modules/_hashopenssl.c +@@ -256,11 +256,25 @@ EVP_update(EVPobject *self, PyObject *args) + Py_RETURN_NONE; + } + ++static PyObject * ++EVP_set_name(EVPobject *self, PyObject *new_name) ++{ ++ if (!PyUnicode_CheckExact(new_name)) { ++ PyErr_SetString(PyExc_TypeError, "expected string"); ++ return NULL; ++ } ++ Py_DECREF(self->name); ++ Py_INCREF(new_name); ++ self->name = new_name; ++ Py_RETURN_NONE; ++} ++ + static PyMethodDef EVP_methods[] = { + {"update", (PyCFunction)EVP_update, METH_VARARGS, EVP_update__doc__}, + {"digest", (PyCFunction)EVP_digest, METH_NOARGS, EVP_digest__doc__}, + {"hexdigest", (PyCFunction)EVP_hexdigest, METH_NOARGS, EVP_hexdigest__doc__}, + {"copy", (PyCFunction)EVP_copy, METH_NOARGS, EVP_copy__doc__}, ++ {"_set_name", (PyCFunction)EVP_set_name, METH_O, EVP_copy__doc__}, + {NULL, NULL} /* sentinel */ + }; + +-- +2.21.0 + + +From a40d1cb8672bc2c5403b6e9ff6eaa3324282de09 Mon Sep 17 00:00:00 2001 +From: Lumir Balhar +Date: Wed, 14 Aug 2019 14:43:07 +0200 +Subject: [PATCH 26/36] distutils upload: only add md5 if available, but + *always* use sha256 + +--- + Lib/distutils/command/upload.py | 3 ++- + Lib/distutils/tests/test_upload.py | 14 ++++++++------ + 2 files changed, 10 insertions(+), 7 deletions(-) + +diff --git a/Lib/distutils/command/upload.py b/Lib/distutils/command/upload.py +index 0edb39efd4c..170acb3d6b6 100644 +--- a/Lib/distutils/command/upload.py ++++ b/Lib/distutils/command/upload.py +@@ -102,6 +102,7 @@ class upload(PyPIRCCommand): + 'content': (os.path.basename(filename),content), + 'filetype': command, + 'pyversion': pyversion, ++ 'sha256_digest': hashlib.sha256(content).hexdigest(), + + # additional meta-data + 'metadata_version': '1.0', +@@ -124,7 +125,7 @@ class upload(PyPIRCCommand): + digest = hashlib.md5(content).hexdigest() + except ValueError as e: + msg = 'calculating md5 checksum failed: %s' % e +- self.announce(msg, log.ERROR) ++ self.announce(msg, log.INFO) + if not hashlib.get_fips_mode(): + # this really shouldn't fail + raise +diff --git a/Lib/distutils/tests/test_upload.py b/Lib/distutils/tests/test_upload.py +index b4b64e97737..f720a7905dd 100644 +--- a/Lib/distutils/tests/test_upload.py ++++ b/Lib/distutils/tests/test_upload.py +@@ -132,10 +132,11 @@ class uploadTestCase(BasePyPIRCCommandTestCase): + # what did we send ? + headers = dict(self.last_open.req.headers) + if hashlib.get_fips_mode(): +- # md5 hash is omitted +- self.assertEqual(headers['Content-length'], '2020') ++ # only sha256 hash is used ++ self.assertEqual(headers['Content-length'], '2197') + else: +- self.assertEqual(headers['Content-length'], '2162') ++ # both sha256 and md5 hashes are used ++ self.assertEqual(headers['Content-length'], '2339') + content_type = headers['Content-type'] + self.assertTrue(content_type.startswith('multipart/form-data')) + self.assertEqual(self.last_open.req.get_method(), 'POST') +@@ -172,10 +173,11 @@ class uploadTestCase(BasePyPIRCCommandTestCase): + + headers = dict(self.last_open.req.headers) + if hashlib.get_fips_mode(): +- # md5 hash is omitted +- self.assertEqual(headers['Content-length'], '2030') ++ # only sha256 hash is used ++ self.assertEqual(headers['Content-length'], '2207') + else: +- self.assertEqual(headers['Content-length'], '2172') ++ # both sha256 and md5 hashes are used ++ self.assertEqual(headers['Content-length'], '2349') + self.assertIn(b'long description\r', self.last_open.req.data) + + def test_upload_fails(self): +-- +2.21.0 + + +From e0d2cca338186bbc5cb9c67bb4402dec38fed5a7 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Mon, 26 Aug 2019 15:55:48 +0200 +Subject: [PATCH 27/36] Add the usedforsecurity argument back + +--- + Lib/hashlib.py | 4 ++- + Modules/_hashopenssl.c | 82 +++++++++++++++++++++++++++++++----------- + 2 files changed, 65 insertions(+), 21 deletions(-) + +diff --git a/Lib/hashlib.py b/Lib/hashlib.py +index d3344f60b2e..cd3b035b1d7 100644 +--- a/Lib/hashlib.py ++++ b/Lib/hashlib.py +@@ -174,7 +174,9 @@ def __hash_new(name, data=b'', **kwargs): + # salt, personal, tree hashing or SSE. + return __get_builtin_constructor(name)(data, **kwargs) + try: +- retval = _hashlib.new(name, data) ++ usedforsecurity = kwargs.pop('usedforsecurity', True) ++ retval = _hashlib.new( ++ name, data, usedforsecurity=usedforsecurity) + if get_fips_mode() and name != orig_name: + retval._set_name(orig_name) + return retval +diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c +index e330423e266..b621c330c32 100644 +--- a/Modules/_hashopenssl.c ++++ b/Modules/_hashopenssl.c +@@ -318,6 +318,25 @@ EVP_repr(EVPobject *self) + return PyUnicode_FromFormat("<%U HASH object @ %p>", self->name, self); + } + ++ ++static void ++mc_ctx_init(EVP_MD_CTX *ctx, int usedforsecurity) ++{ ++ EVP_MD_CTX_init(ctx); ++ /* ++ If the user has declared that this digest is being used in a ++ non-security role (e.g. indexing into a data structure), set ++ the exception flag for openssl to allow it ++ */ ++ if (!usedforsecurity) { ++#ifdef EVP_MD_CTX_FLAG_NON_FIPS_ALLOW ++ EVP_MD_CTX_set_flags(ctx, ++ EVP_MD_CTX_FLAG_NON_FIPS_ALLOW); ++#endif ++ } ++} ++ ++ + #if HASH_OBJ_CONSTRUCTOR + static int + EVP_tp_init(EVPobject *self, PyObject *args, PyObject *kwds) +@@ -328,9 +347,10 @@ EVP_tp_init(EVPobject *self, PyObject *args, PyObject *kwds) + Py_buffer view; + char *nameStr; + const EVP_MD *digest; ++ int usedforsecurity=1; + +- if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|O:HASH", kwlist, +- &name_obj, &data_obj)) { ++ if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|O$d:HASH", kwlist, ++ &name_obj, &data_obj, &usedforsecurity)) { + return -1; + } + +@@ -351,7 +371,8 @@ EVP_tp_init(EVPobject *self, PyObject *args, PyObject *kwds) + PyBuffer_Release(&view); + return -1; + } +- if (!EVP_DigestInit(self->ctx, digest)) { ++ mc_ctx_init(&self->ctx, usedforsecurity); ++ if (!EVP_DigestInit_ex(self->ctx, digest, NULL)) { + _setException(PyExc_ValueError); + if (data_obj) + PyBuffer_Release(&view); +@@ -440,7 +461,7 @@ static PyTypeObject EVPtype = { + static PyObject * + EVPnew(PyObject *name_obj, + const EVP_MD *digest, const EVP_MD_CTX *initial_ctx, +- const unsigned char *cp, Py_ssize_t len) ++ const unsigned char *cp, Py_ssize_t len, int usedforsecurity) + { + EVPobject *self; + +@@ -455,7 +476,8 @@ EVPnew(PyObject *name_obj, + if (initial_ctx) { + EVP_MD_CTX_copy(self->ctx, initial_ctx); + } else { +- if (!EVP_DigestInit(self->ctx, digest)) { ++ mc_ctx_init(self->ctx, usedforsecurity); ++ if (!EVP_DigestInit_ex(self->ctx, digest, NULL)) { + _setException(PyExc_ValueError); + Py_DECREF(self); + return NULL; +@@ -484,26 +506,35 @@ An optional string argument may be provided and will be\n\ + automatically hashed.\n\ + \n\ + The MD5 and SHA1 algorithms are always supported.\n"); ++static PyObject *_EVP_new_impl(PyObject *name_obj, char *name, PyObject *data_obj, int usedforsecurity); + + static PyObject * + EVP_new(PyObject *self, PyObject *args, PyObject *kwdict) + { +- static char *kwlist[] = {"name", "string", NULL}; ++ static char *kwlist[] = {"name", "string", "usedforsecurity", NULL}; + PyObject *name_obj = NULL; + PyObject *data_obj = NULL; +- Py_buffer view = { 0 }; +- PyObject *ret_obj; +- char *name; +- const EVP_MD *digest; ++ int usedforsecurity = 1; + +- if (!PyArg_ParseTupleAndKeywords(args, kwdict, "O|O:new", kwlist, +- &name_obj, &data_obj)) { ++ if (!PyArg_ParseTupleAndKeywords(args, kwdict, "O|O$p:new", kwlist, ++ &name_obj, &data_obj, &usedforsecurity)) { + return NULL; + } ++ return _EVP_new_impl(name_obj, NULL, data_obj, usedforsecurity); ++} + +- if (!PyArg_Parse(name_obj, "s", &name)) { +- PyErr_SetString(PyExc_TypeError, "name must be a string"); +- return NULL; ++static PyObject * ++_EVP_new_impl(PyObject *name_obj, char *name, PyObject *data_obj, int usedforsecurity) ++{ ++ Py_buffer view = { 0 }; ++ PyObject *ret_obj; ++ const EVP_MD *digest; ++ ++ if (!name) { ++ if (!PyArg_Parse(name_obj, "s", &name)) { ++ PyErr_SetString(PyExc_TypeError, "name must be a string"); ++ return NULL; ++ } + } + + if (data_obj) +@@ -511,7 +542,7 @@ EVP_new(PyObject *self, PyObject *args, PyObject *kwdict) + + digest = EVP_get_digestbyname(name); + +- ret_obj = EVPnew(name_obj, digest, NULL, (unsigned char*)view.buf, view.len); ++ ret_obj = EVPnew(name_obj, digest, NULL, (unsigned char*)view.buf, view.len, usedforsecurity); + + if (data_obj) + PyBuffer_Release(&view); +@@ -906,18 +937,27 @@ generate_hash_name_list(void) + * code that wants to make hashes of a bunch of small strings. + * The first call will lazy-initialize, which reports an exception + * if initialization fails. ++ * ++ * Note that for usedforsecurity=0, we fall back to new(). + */ + #define GEN_CONSTRUCTOR(NAME, SSL_NAME) \ + static PyObject * \ +- EVP_new_ ## NAME (PyObject *self, PyObject *args) \ ++ EVP_new_ ## NAME (PyObject *self, PyObject *args, PyObject *kw) \ + { \ + PyObject *data_obj = NULL; \ + Py_buffer view = { 0 }; \ + PyObject *ret_obj; \ ++ int usedforsecurity = 1; \ ++ char *kwnames[] = {"", "usedforsecurity", NULL}; \ + \ +- if (!PyArg_ParseTuple(args, "|O:" #NAME , &data_obj)) { \ ++ if (!PyArg_ParseTupleAndKeywords(args, kw, "|O$p:" #NAME, kwnames, &data_obj, &usedforsecurity)) { \ + return NULL; \ + } \ ++ if (!usedforsecurity) { \ ++ return _EVP_new_impl( \ ++ CONST_ ## NAME ## _name_obj, SSL_NAME, \ ++ data_obj, usedforsecurity); \ ++ } \ + \ + if (CONST_new_ ## NAME ## _ctx_p == NULL) { \ + EVP_MD_CTX *ctx_p = EVP_MD_CTX_new(); \ +@@ -938,7 +978,8 @@ generate_hash_name_list(void) + NULL, \ + CONST_new_ ## NAME ## _ctx_p, \ + (unsigned char*)view.buf, \ +- view.len); \ ++ view.len, \ ++ usedforsecurity); \ + \ + if (data_obj) \ + PyBuffer_Release(&view); \ +@@ -947,7 +988,8 @@ generate_hash_name_list(void) + + /* a PyMethodDef structure for the constructor */ + #define CONSTRUCTOR_METH_DEF(NAME) \ +- {"openssl_" #NAME, (PyCFunction)EVP_new_ ## NAME, METH_VARARGS, \ ++ {"openssl_" #NAME, (PyCFunction)EVP_new_ ## NAME, \ ++ METH_VARARGS|METH_KEYWORDS, \ + PyDoc_STR("Returns a " #NAME \ + " hash object; optionally initialized with a string") \ + }, +-- +2.21.0 + + +From 0d169ad7d9aa48bdb3313b6d72682d097dc9dea5 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Mon, 26 Aug 2019 18:59:15 +0200 +Subject: [PATCH 28/36] Add no-op usedforsecurity argument to internal hash + implementations + +--- + Modules/_blake2/blake2b_impl.c | 6 ++++-- + Modules/_blake2/blake2s_impl.c | 6 ++++-- + Modules/_blake2/clinic/blake2b_impl.c.h | 16 +++++++++------- + Modules/_blake2/clinic/blake2s_impl.c.h | 16 +++++++++------- + Modules/_sha3/sha3module.c | 5 +++++ + 5 files changed, 31 insertions(+), 18 deletions(-) + +diff --git a/Modules/_blake2/blake2b_impl.c b/Modules/_blake2/blake2b_impl.c +index f6bfce823b8..ae9d244200a 100644 +--- a/Modules/_blake2/blake2b_impl.c ++++ b/Modules/_blake2/blake2b_impl.c +@@ -87,6 +87,8 @@ _blake2.blake2b.__new__ as py_blake2b_new + inner_size: int = 0 + last_node: bool = False + ++ usedforsecurity: bool = True ++ + Return a new BLAKE2b hash object. + [clinic start generated code]*/ + +@@ -95,8 +97,8 @@ py_blake2b_new_impl(PyTypeObject *type, PyObject *data, int digest_size, + Py_buffer *key, Py_buffer *salt, Py_buffer *person, + int fanout, int depth, PyObject *leaf_size_obj, + PyObject *node_offset_obj, int node_depth, +- int inner_size, int last_node) +-/*[clinic end generated code: output=7506d8d890e5f13b input=aca35b33c5612b4b]*/ ++ int inner_size, int last_node, int usedforsecurity) ++/*[clinic end generated code: output=02dcc52ee784622b input=c5dfcb847f9065ac]*/ + { + BLAKE2bObject *self = NULL; + Py_buffer buf; +diff --git a/Modules/_blake2/blake2s_impl.c b/Modules/_blake2/blake2s_impl.c +index 28ae5b65101..bf80d6b7428 100644 +--- a/Modules/_blake2/blake2s_impl.c ++++ b/Modules/_blake2/blake2s_impl.c +@@ -87,6 +87,8 @@ _blake2.blake2s.__new__ as py_blake2s_new + inner_size: int = 0 + last_node: bool = False + ++ usedforsecurity: bool = True ++ + Return a new BLAKE2s hash object. + [clinic start generated code]*/ + +@@ -95,8 +97,8 @@ py_blake2s_new_impl(PyTypeObject *type, PyObject *data, int digest_size, + Py_buffer *key, Py_buffer *salt, Py_buffer *person, + int fanout, int depth, PyObject *leaf_size_obj, + PyObject *node_offset_obj, int node_depth, +- int inner_size, int last_node) +-/*[clinic end generated code: output=fe060b258a8cbfc6 input=3abfaabe7f5f62cc]*/ ++ int inner_size, int last_node, int usedforsecurity) ++/*[clinic end generated code: output=e32ea5e22d54db91 input=af5344c57efd870d]*/ + { + BLAKE2sObject *self = NULL; + Py_buffer buf; +diff --git a/Modules/_blake2/clinic/blake2b_impl.c.h b/Modules/_blake2/clinic/blake2b_impl.c.h +index 9b2965eb6b3..9688c04dda8 100644 +--- a/Modules/_blake2/clinic/blake2b_impl.c.h ++++ b/Modules/_blake2/clinic/blake2b_impl.c.h +@@ -5,7 +5,8 @@ preserve + PyDoc_STRVAR(py_blake2b_new__doc__, + "blake2b(data=b\'\', /, *, digest_size=_blake2.blake2b.MAX_DIGEST_SIZE,\n" + " key=b\'\', salt=b\'\', person=b\'\', fanout=1, depth=1, leaf_size=0,\n" +-" node_offset=0, node_depth=0, inner_size=0, last_node=False)\n" ++" node_offset=0, node_depth=0, inner_size=0, last_node=False,\n" ++" usedforsecurity=True)\n" + "--\n" + "\n" + "Return a new BLAKE2b hash object."); +@@ -15,14 +16,14 @@ py_blake2b_new_impl(PyTypeObject *type, PyObject *data, int digest_size, + Py_buffer *key, Py_buffer *salt, Py_buffer *person, + int fanout, int depth, PyObject *leaf_size_obj, + PyObject *node_offset_obj, int node_depth, +- int inner_size, int last_node); ++ int inner_size, int last_node, int usedforsecurity); + + static PyObject * + py_blake2b_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) + { + PyObject *return_value = NULL; +- static const char * const _keywords[] = {"", "digest_size", "key", "salt", "person", "fanout", "depth", "leaf_size", "node_offset", "node_depth", "inner_size", "last_node", NULL}; +- static _PyArg_Parser _parser = {"|O$iy*y*y*iiOOiip:blake2b", _keywords, 0}; ++ static const char * const _keywords[] = {"", "digest_size", "key", "salt", "person", "fanout", "depth", "leaf_size", "node_offset", "node_depth", "inner_size", "last_node", "usedforsecurity", NULL}; ++ static _PyArg_Parser _parser = {"|O$iy*y*y*iiOOiipp:blake2b", _keywords, 0}; + PyObject *data = NULL; + int digest_size = BLAKE2B_OUTBYTES; + Py_buffer key = {NULL, NULL}; +@@ -35,12 +36,13 @@ py_blake2b_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) + int node_depth = 0; + int inner_size = 0; + int last_node = 0; ++ int usedforsecurity = 1; + + if (!_PyArg_ParseTupleAndKeywordsFast(args, kwargs, &_parser, +- &data, &digest_size, &key, &salt, &person, &fanout, &depth, &leaf_size_obj, &node_offset_obj, &node_depth, &inner_size, &last_node)) { ++ &data, &digest_size, &key, &salt, &person, &fanout, &depth, &leaf_size_obj, &node_offset_obj, &node_depth, &inner_size, &last_node, &usedforsecurity)) { + goto exit; + } +- return_value = py_blake2b_new_impl(type, data, digest_size, &key, &salt, &person, fanout, depth, leaf_size_obj, node_offset_obj, node_depth, inner_size, last_node); ++ return_value = py_blake2b_new_impl(type, data, digest_size, &key, &salt, &person, fanout, depth, leaf_size_obj, node_offset_obj, node_depth, inner_size, last_node, usedforsecurity); + + exit: + /* Cleanup for key */ +@@ -121,4 +123,4 @@ _blake2_blake2b_hexdigest(BLAKE2bObject *self, PyObject *Py_UNUSED(ignored)) + { + return _blake2_blake2b_hexdigest_impl(self); + } +-/*[clinic end generated code: output=0eb559f418fc0a21 input=a9049054013a1b77]*/ ++/*[clinic end generated code: output=d5f214b052c75135 input=a9049054013a1b77]*/ +diff --git a/Modules/_blake2/clinic/blake2s_impl.c.h b/Modules/_blake2/clinic/blake2s_impl.c.h +index 42b87b7099d..5653e93044d 100644 +--- a/Modules/_blake2/clinic/blake2s_impl.c.h ++++ b/Modules/_blake2/clinic/blake2s_impl.c.h +@@ -5,7 +5,8 @@ preserve + PyDoc_STRVAR(py_blake2s_new__doc__, + "blake2s(data=b\'\', /, *, digest_size=_blake2.blake2s.MAX_DIGEST_SIZE,\n" + " key=b\'\', salt=b\'\', person=b\'\', fanout=1, depth=1, leaf_size=0,\n" +-" node_offset=0, node_depth=0, inner_size=0, last_node=False)\n" ++" node_offset=0, node_depth=0, inner_size=0, last_node=False,\n" ++" usedforsecurity=True)\n" + "--\n" + "\n" + "Return a new BLAKE2s hash object."); +@@ -15,14 +16,14 @@ py_blake2s_new_impl(PyTypeObject *type, PyObject *data, int digest_size, + Py_buffer *key, Py_buffer *salt, Py_buffer *person, + int fanout, int depth, PyObject *leaf_size_obj, + PyObject *node_offset_obj, int node_depth, +- int inner_size, int last_node); ++ int inner_size, int last_node, int usedforsecurity); + + static PyObject * + py_blake2s_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) + { + PyObject *return_value = NULL; +- static const char * const _keywords[] = {"", "digest_size", "key", "salt", "person", "fanout", "depth", "leaf_size", "node_offset", "node_depth", "inner_size", "last_node", NULL}; +- static _PyArg_Parser _parser = {"|O$iy*y*y*iiOOiip:blake2s", _keywords, 0}; ++ static const char * const _keywords[] = {"", "digest_size", "key", "salt", "person", "fanout", "depth", "leaf_size", "node_offset", "node_depth", "inner_size", "last_node", "usedforsecurity", NULL}; ++ static _PyArg_Parser _parser = {"|O$iy*y*y*iiOOiipp:blake2s", _keywords, 0}; + PyObject *data = NULL; + int digest_size = BLAKE2S_OUTBYTES; + Py_buffer key = {NULL, NULL}; +@@ -35,12 +36,13 @@ py_blake2s_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) + int node_depth = 0; + int inner_size = 0; + int last_node = 0; ++ int usedforsecurity = 1; + + if (!_PyArg_ParseTupleAndKeywordsFast(args, kwargs, &_parser, +- &data, &digest_size, &key, &salt, &person, &fanout, &depth, &leaf_size_obj, &node_offset_obj, &node_depth, &inner_size, &last_node)) { ++ &data, &digest_size, &key, &salt, &person, &fanout, &depth, &leaf_size_obj, &node_offset_obj, &node_depth, &inner_size, &last_node, &usedforsecurity)) { + goto exit; + } +- return_value = py_blake2s_new_impl(type, data, digest_size, &key, &salt, &person, fanout, depth, leaf_size_obj, node_offset_obj, node_depth, inner_size, last_node); ++ return_value = py_blake2s_new_impl(type, data, digest_size, &key, &salt, &person, fanout, depth, leaf_size_obj, node_offset_obj, node_depth, inner_size, last_node, usedforsecurity); + + exit: + /* Cleanup for key */ +@@ -121,4 +123,4 @@ _blake2_blake2s_hexdigest(BLAKE2sObject *self, PyObject *Py_UNUSED(ignored)) + { + return _blake2_blake2s_hexdigest_impl(self); + } +-/*[clinic end generated code: output=13d4b08ea9ee2d62 input=a9049054013a1b77]*/ ++/*[clinic end generated code: output=2373a3b3fa542e89 input=a9049054013a1b77]*/ +diff --git a/Modules/_sha3/sha3module.c b/Modules/_sha3/sha3module.c +index 2783a75644f..62db9cb5616 100644 +--- a/Modules/_sha3/sha3module.c ++++ b/Modules/_sha3/sha3module.c +@@ -185,6 +185,11 @@ py_sha3_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) + HashReturn res; + PyObject *data = NULL; + ++ // Ignore "usedforsecurity" ++ if (PyMapping_DelItemString(kwargs, "usedforsecurity")) { ++ PyErr_Clear(); ++ } ++ + if (!_PyArg_NoKeywords(type->tp_name, kwargs)) { + return NULL; + } +-- +2.21.0 + + +From c451744d82f78f42bfa947b782a24ff7c8c6ad9d Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Mon, 26 Aug 2019 19:09:39 +0200 +Subject: [PATCH 29/36] Test the usedforsecurity flag + +--- + Lib/test/test_hashlib.py | 82 ++++++++++++++++++++++++++-------------- + 1 file changed, 54 insertions(+), 28 deletions(-) + +diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py +index 9745bfd6fa2..19a2fbdf294 100644 +--- a/Lib/test/test_hashlib.py ++++ b/Lib/test/test_hashlib.py +@@ -24,6 +24,7 @@ from test import support + from test.support import _4G, bigmemtest, import_fresh_module + from test.support import requires_hashdigest + from http.client import HTTPException ++from functools import partial + + # Were we compiled --with-pydebug or with #define Py_DEBUG? + COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount') +@@ -32,8 +33,10 @@ c_hashlib = import_fresh_module('hashlib', fresh=['_hashlib']) + py_hashlib = import_fresh_module('hashlib', blocked=['_hashlib']) + + if hashlib.get_fips_mode(): +- FIPS_DISABLED = {'md5', 'MD5', 'blake2b', 'blake2s'} ++ FIPS_UNAVAILABLE = {'blake2b', 'blake2s'} ++ FIPS_DISABLED = {'md5', 'MD5', *FIPS_UNAVAILABLE} + else: ++ FIPS_UNAVAILABLE = set() + FIPS_DISABLED = set() + + try: +@@ -78,6 +81,15 @@ def read_vectors(hash_name): + yield parts + + ++def _is_openssl_constructor(constructor): ++ if getattr(constructor, '__name__', '').startswith('openssl_'): ++ return True ++ if isinstance(constructor, partial): ++ if constructor.func.__name__.startswith('openssl_'): ++ return True ++ return False ++ ++ + class HashLibTestCase(unittest.TestCase): + supported_hash_names = ( 'md5', 'MD5', 'sha1', 'SHA1', + 'sha224', 'SHA224', 'sha256', 'SHA256', +@@ -93,8 +105,6 @@ class HashLibTestCase(unittest.TestCase): + + if hashlib.get_fips_mode(): + shakes = set() +- supported_hash_names = tuple( +- n for n in supported_hash_names if n not in FIPS_DISABLED) + + def _conditional_import_module(self, module_name): + """Import a module and return a reference to it or None on failure.""" +@@ -120,7 +130,8 @@ class HashLibTestCase(unittest.TestCase): + def __init__(self, *args, **kwargs): + algorithms = set() + for algorithm in self.supported_hash_names: +- algorithms.add(algorithm.lower()) ++ if algorithm not in FIPS_UNAVAILABLE: ++ algorithms.add(algorithm.lower()) + + _blake2 = self._conditional_import_module('_blake2') + if _blake2: +@@ -130,15 +141,21 @@ class HashLibTestCase(unittest.TestCase): + for algorithm in algorithms: + self.constructors_to_test[algorithm] = set() + ++ def _add_constructor(algorithm, constructor): ++ constructors.add(partial(constructor, usedforsecurity=False)) ++ if algorithm not in FIPS_DISABLED: ++ constructors.add(constructor) ++ constructors.add(partial(constructor, usedforsecurity=True)) ++ + # For each algorithm, test the direct constructor and the use + # of hashlib.new given the algorithm name. + for algorithm, constructors in self.constructors_to_test.items(): +- constructors.add(getattr(hashlib, algorithm)) ++ _add_constructor(algorithm, getattr(hashlib, algorithm)) + def _test_algorithm_via_hashlib_new(data=None, _alg=algorithm, **kwargs): + if data is None: + return hashlib.new(_alg, **kwargs) + return hashlib.new(_alg, data, **kwargs) +- constructors.add(_test_algorithm_via_hashlib_new) ++ _add_constructor(algorithm, _test_algorithm_via_hashlib_new) + + _hashlib = self._conditional_import_module('_hashlib') + self._hashlib = _hashlib +@@ -150,7 +167,7 @@ class HashLibTestCase(unittest.TestCase): + for algorithm, constructors in self.constructors_to_test.items(): + constructor = getattr(_hashlib, 'openssl_'+algorithm, None) + if constructor: +- constructors.add(constructor) ++ _add_constructor(algorithm, constructor) + + def add_builtin_constructor(name): + constructor = getattr(hashlib, "__get_builtin_constructor")(name) +@@ -210,7 +227,7 @@ class HashLibTestCase(unittest.TestCase): + c.hexdigest() + + def test_algorithms_guaranteed(self): +- self.assertEqual(hashlib.algorithms_guaranteed - FIPS_DISABLED, ++ self.assertEqual(hashlib.algorithms_guaranteed, + set(_algo for _algo in self.supported_hash_names + if _algo.islower())) + +@@ -286,7 +303,9 @@ class HashLibTestCase(unittest.TestCase): + self.assertIn(h.name, self.supported_hash_names) + else: + self.assertNotIn(h.name, self.supported_hash_names) +- self.assertEqual(h.name, hashlib.new(h.name).name) ++ if h.name not in FIPS_DISABLED: ++ self.assertEqual(h.name, hashlib.new(h.name).name) ++ self.assertEqual(h.name, hashlib.new(h.name, usedforsecurity=False).name) + + def test_large_update(self): + aas = b'a' * 128 +@@ -328,13 +347,15 @@ class HashLibTestCase(unittest.TestCase): + self.assertGreaterEqual(len(constructors), 2) + for hash_object_constructor in constructors: + if ( +- kwargs +- and hash_object_constructor.__name__.startswith('openssl_') ++ (kwargs.keys() - {'usedforsecurity'}) ++ and _is_openssl_constructor(hash_object_constructor) + ): ++ # Don't check openssl constructors with ++ # any extra keys (except usedforsecurity) + return + m = hash_object_constructor(data, **kwargs) + if shake: +- if hash_object_constructor.__name__.startswith('openssl_'): ++ if _is_openssl_constructor(hash_object_constructor): + if length > m.digest_size: + # OpenSSL doesn't give long digests + return +@@ -351,7 +372,7 @@ class HashLibTestCase(unittest.TestCase): + % (name, hash_object_constructor, + computed, len(data), hexdigest)) + if shake: +- if hash_object_constructor.__name__.startswith('openssl_'): ++ if _is_openssl_constructor(hash_object_constructor): + computed = m.digest()[:length] + else: + computed = m.digest(length) +@@ -369,8 +390,7 @@ class HashLibTestCase(unittest.TestCase): + self.assertRaises(TypeError, hash_object_constructor, 'spam') + + def test_no_unicode(self): +- if not hashlib.get_fips_mode(): +- self.check_no_unicode('md5') ++ self.check_no_unicode('md5') + self.check_no_unicode('sha1') + self.check_no_unicode('sha224') + self.check_no_unicode('sha256') +@@ -397,10 +417,10 @@ class HashLibTestCase(unittest.TestCase): + for hash_object_constructor in constructors: + m = hash_object_constructor() + self.assertEqual(m.block_size, block_size) +- if not hash_object_constructor.__name__.startswith('openssl_'): ++ if not _is_openssl_constructor(hash_object_constructor): + self.assertEqual(m.digest_size, digest_size) + if digest_length: +- if not hash_object_constructor.__name__.startswith('openssl_'): ++ if not _is_openssl_constructor(hash_object_constructor): + self.assertEqual(len(m.digest(digest_length)), + digest_length) + self.assertEqual(len(m.hexdigest(digest_length)), +@@ -435,7 +455,7 @@ class HashLibTestCase(unittest.TestCase): + for hash_object_constructor in constructors: + m = hash_object_constructor() + self.assertEqual(capacity + rate, 1600) +- if not hash_object_constructor.__name__.startswith('openssl_'): ++ if not _is_openssl_constructor(hash_object_constructor): + self.assertEqual(m._capacity_bits, capacity) + self.assertEqual(m._rate_bits, rate) + self.assertEqual(m._suffix, suffix) +@@ -454,31 +474,27 @@ class HashLibTestCase(unittest.TestCase): + self.check_blocksize_name('blake2b', 128, 64) + self.check_blocksize_name('blake2s', 64, 32) + +- @unittest.skipIf(hashlib.get_fips_mode(), "md5 unacceptable in FIPS mode") + def test_case_md5_0(self): +- self.check('md5', b'', 'd41d8cd98f00b204e9800998ecf8427e') ++ self.check('md5', b'', 'd41d8cd98f00b204e9800998ecf8427e', usedforsecurity=False) + +- @unittest.skipIf(hashlib.get_fips_mode(), "md5 unacceptable in FIPS mode") + def test_case_md5_1(self): +- self.check('md5', b'abc', '900150983cd24fb0d6963f7d28e17f72') ++ self.check('md5', b'abc', '900150983cd24fb0d6963f7d28e17f72', usedforsecurity=False) + +- @unittest.skipIf(hashlib.get_fips_mode(), "md5 unacceptable in FIPS mode") + def test_case_md5_2(self): + self.check('md5', + b'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', +- 'd174ab98d277d9f5a5611c2c9f419d9f') ++ 'd174ab98d277d9f5a5611c2c9f419d9f', ++ usedforsecurity=False) + +- @unittest.skipIf(hashlib.get_fips_mode(), "md5 unacceptable in FIPS mode") + @unittest.skipIf(sys.maxsize < _4G + 5, 'test cannot run on 32-bit systems') + @bigmemtest(size=_4G + 5, memuse=1, dry_run=False) + def test_case_md5_huge(self, size): +- self.check('md5', b'A'*size, 'c9af2dff37468ce5dfee8f2cfc0a9c6d') ++ self.check('md5', b'A'*size, 'c9af2dff37468ce5dfee8f2cfc0a9c6d', usedforsecurity=False) + +- @unittest.skipIf(hashlib.get_fips_mode(), "md5 unacceptable in FIPS mode") + @unittest.skipIf(sys.maxsize < _4G - 1, 'test cannot run on 32-bit systems') + @bigmemtest(size=_4G - 1, memuse=1, dry_run=False) + def test_case_md5_uintmax(self, size): +- self.check('md5', b'A'*size, '28138d306ff1b8281f1a9067e1a1a2b3') ++ self.check('md5', b'A'*size, '28138d306ff1b8281f1a9067e1a1a2b3', usedforsecurity=False) + + # use the three examples from Federal Information Processing Standards + # Publication 180-1, Secure Hash Standard, 1995 April 17 +@@ -904,6 +920,16 @@ class HashLibTestCase(unittest.TestCase): + + self.assertEqual(expected_hash, hasher.hexdigest()) + ++ @unittest.skipUnless(hashlib.get_fips_mode(), 'Needs FIPS mode.') ++ def test_usedforsecurity_repeat(self): ++ """Make sure usedforsecurity flag isn't copied to other contexts""" ++ for i in range(3): ++ for cons in hashlib.md5, partial(hashlib.new, 'md5'): ++ self.assertRaises(ValueError, cons) ++ self.assertRaises(ValueError, partial(cons, usedforsecurity=True)) ++ self.assertEqual(cons(usedforsecurity=False).hexdigest(), ++ 'd41d8cd98f00b204e9800998ecf8427e') ++ + + class KDFTests(unittest.TestCase): + +-- +2.21.0 + + +From 327707f185572cf13da66851e0af824a81d65aa9 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Thu, 29 Aug 2019 10:25:28 +0200 +Subject: [PATCH 30/36] Skip error checking in _hashlib.get_fips_mode + +Fixes: https://bugzilla.redhat.com/show_bug.cgi?id=1745499 +--- + Modules/_hashopenssl.c | 30 ++++++++++++++++-------------- + 1 file changed, 16 insertions(+), 14 deletions(-) + +diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c +index b621c330c32..6bfb12c5618 100644 +--- a/Modules/_hashopenssl.c ++++ b/Modules/_hashopenssl.c +@@ -1021,20 +1021,22 @@ static PyObject * + _hashlib_get_fips_mode_impl(PyObject *module) + /*[clinic end generated code: output=ad8a7793310d3f98 input=f42a2135df2a5e11]*/ + { +- int result = FIPS_mode(); +- if (result == 0) { +- // "If the library was built without support of the FIPS Object Module, +- // then the function will return 0 with an error code of +- // CRYPTO_R_FIPS_MODE_NOT_SUPPORTED (0x0f06d065)." +- // But 0 is also a valid result value. +- +- unsigned long errcode = ERR_peek_last_error(); +- if (errcode) { +- _setException(PyExc_ValueError); +- return NULL; +- } +- } +- return PyLong_FromLong(result); ++ // XXX: This function skips error checking. ++ // This is only appropriate for RHEL. ++ ++ // From the OpenSSL docs: ++ // "If the library was built without support of the FIPS Object Module, ++ // then the function will return 0 with an error code of ++ // CRYPTO_R_FIPS_MODE_NOT_SUPPORTED (0x0f06d065)." ++ // In RHEL: ++ // * we do build with FIPS, so the function always succeeds ++ // * even if it didn't, people seem used to errors being left on the ++ // OpenSSL error stack. ++ ++ // For more info, see: ++ // https://bugzilla.redhat.com/show_bug.cgi?id=1745499 ++ ++ return PyLong_FromLong(FIPS_mode()); + } + + +-- +2.21.0 + + +From 695039674b78b19d9ff26d637c11acea99c44807 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Thu, 10 Oct 2019 13:04:50 +0200 +Subject: [PATCH 31/36] Skip error checking in _Py_hashlib_fips_error + +https://bugzilla.redhat.com/show_bug.cgi?id=1760106 +--- + Include/_hashopenssl.h | 12 +++--------- + 1 file changed, 3 insertions(+), 9 deletions(-) + +diff --git a/Include/_hashopenssl.h b/Include/_hashopenssl.h +index 47ed0030422..d4cbdef984d 100644 +--- a/Include/_hashopenssl.h ++++ b/Include/_hashopenssl.h +@@ -42,16 +42,10 @@ static int + _Py_hashlib_fips_error(PyObject *exc, char *name) { + int result = FIPS_mode(); + if (result == 0) { +- // "If the library was built without support of the FIPS Object Module, +- // then the function will return 0 with an error code of +- // CRYPTO_R_FIPS_MODE_NOT_SUPPORTED (0x0f06d065)." +- // But 0 is also a valid result value. ++ // XXX: This function skips error checking. ++ // This is only appropriate for RHEL. ++ // See _hashlib.get_fips_mode for details. + +- unsigned long errcode = ERR_peek_last_error(); +- if (errcode) { +- _setException(exc); +- return 1; +- } + return 0; + } + PyErr_Format(exc, "%s is not available in FIPS mode", name); +-- +2.21.0 + + +From 0f9008e92b62f5f22e2bfd0a73d5325d16eedf6f Mon Sep 17 00:00:00 2001 +From: Charalampos Stratakis +Date: Thu, 21 Nov 2019 00:20:09 +0100 +Subject: [PATCH 32/36] Use usedforsecurity=False in uuid + +Partially backport commit 7cad53e6b084435a220e6604010f1fa5778bd0b1 +only for uuid +--- + Lib/uuid.py | 7 +++++-- + 1 file changed, 5 insertions(+), 2 deletions(-) + +diff --git a/Lib/uuid.py b/Lib/uuid.py +index db8b2ef94ed..7e680a24d06 100644 +--- a/Lib/uuid.py ++++ b/Lib/uuid.py +@@ -615,8 +615,11 @@ def uuid1(node=None, clock_seq=None): + def uuid3(namespace, name): + """Generate a UUID from the MD5 hash of a namespace UUID and a name.""" + from hashlib import md5 +- hash = md5(namespace.bytes + bytes(name, "utf-8")).digest() +- return UUID(bytes=hash[:16], version=3) ++ digest = md5( ++ namespace.bytes + bytes(name, "utf-8"), ++ usedforsecurity=False ++ ).digest() ++ return UUID(bytes=digest[:16], version=3) + + def uuid4(): + """Generate a random UUID.""" +-- +2.21.0 + + +From 75829df72b65fb573f3b310033978e493ff700b4 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Mon, 5 Aug 2019 19:12:38 +0200 +Subject: [PATCH 33/36] Fixups + +- Adjust error message of the original hmac.HMAC class +- Don't duplicate a test name +--- + Lib/hmac.py | 2 +- + Lib/test/test_hmac.py | 2 +- + 2 files changed, 2 insertions(+), 2 deletions(-) + +diff --git a/Lib/hmac.py b/Lib/hmac.py +index 7af94c39ed6..33b5be613d9 100644 +--- a/Lib/hmac.py ++++ b/Lib/hmac.py +@@ -41,7 +41,7 @@ class HMAC: + """ + if _hashlib.get_fips_mode(): + raise ValueError( +- 'hmac.HMAC is not available in FIPS mode. ' ++ 'This class is not available in FIPS mode. ' + + 'Use hmac.new().' + ) + +diff --git a/Lib/test/test_hmac.py b/Lib/test/test_hmac.py +index 5b090727ef6..449b1366314 100644 +--- a/Lib/test/test_hmac.py ++++ b/Lib/test/test_hmac.py +@@ -401,7 +401,7 @@ class CopyTestCase(unittest.TestCase): + self.assertTrue(id(h1.outer) != id(h2.outer), + "No real copy of the attribute 'outer'.") + +- def test_realcopy(self): ++ def test_realcopy_by_digest(self): + # Testing if the copy method created a real copy. + h1 = hmac.HMAC(b"key", digestmod="sha1") + h2 = h1.copy() +-- +2.21.0 + + +From 86f1f161b5aab08975379dbae6ebac08a697e3d7 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Mon, 26 Aug 2019 19:39:48 +0200 +Subject: [PATCH 34/36] Don't re-export get_fips_mode from hashlib + +Fixes: https://bugzilla.redhat.com/show_bug.cgi?id=1745685 +--- + Lib/distutils/command/upload.py | 3 ++- + Lib/distutils/tests/test_upload.py | 5 +++-- + Lib/hashlib.py | 23 ++++++++++++----------- + Lib/hmac.py | 6 +++--- + Lib/test/test_fips.py | 6 +++--- + Lib/test/test_hashlib.py | 16 +++++++++------- + Lib/test/test_hmac.py | 8 +++++--- + Lib/test/test_logging.py | 1 + + Lib/test/test_smtplib.py | 4 +++- + Lib/test/test_tools/test_md5sum.py | 4 ++-- + Lib/test/test_urllib2_localnet.py | 1 + + 11 files changed, 44 insertions(+), 33 deletions(-) + +diff --git a/Lib/distutils/command/upload.py b/Lib/distutils/command/upload.py +index 170acb3d6b6..d0a4aee001e 100644 +--- a/Lib/distutils/command/upload.py ++++ b/Lib/distutils/command/upload.py +@@ -126,7 +126,8 @@ class upload(PyPIRCCommand): + except ValueError as e: + msg = 'calculating md5 checksum failed: %s' % e + self.announce(msg, log.INFO) +- if not hashlib.get_fips_mode(): ++ from _hashlib import get_fips_mode ++ if not get_fips_mode(): + # this really shouldn't fail + raise + else: +diff --git a/Lib/distutils/tests/test_upload.py b/Lib/distutils/tests/test_upload.py +index f720a7905dd..a198b213577 100644 +--- a/Lib/distutils/tests/test_upload.py ++++ b/Lib/distutils/tests/test_upload.py +@@ -4,6 +4,7 @@ import unittest + import unittest.mock as mock + from urllib.request import HTTPError + import hashlib ++from _hashlib import get_fips_mode + + from test.support import run_unittest + +@@ -131,7 +132,7 @@ class uploadTestCase(BasePyPIRCCommandTestCase): + + # what did we send ? + headers = dict(self.last_open.req.headers) +- if hashlib.get_fips_mode(): ++ if get_fips_mode(): + # only sha256 hash is used + self.assertEqual(headers['Content-length'], '2197') + else: +@@ -172,7 +173,7 @@ class uploadTestCase(BasePyPIRCCommandTestCase): + cmd.run() + + headers = dict(self.last_open.req.headers) +- if hashlib.get_fips_mode(): ++ if get_fips_mode(): + # only sha256 hash is used + self.assertEqual(headers['Content-length'], '2207') + else: +diff --git a/Lib/hashlib.py b/Lib/hashlib.py +index cd3b035b1d7..3e9a4aa27a7 100644 +--- a/Lib/hashlib.py ++++ b/Lib/hashlib.py +@@ -68,13 +68,13 @@ __all__ = __always_supported + ('new', 'algorithms_guaranteed', + 'algorithms_available', 'pbkdf2_hmac') + + try: +- from _hashlib import get_fips_mode ++ from _hashlib import get_fips_mode as _hashlib_get_fips_mode + except ImportError: +- def get_fips_mode(): ++ def _hashlib_get_fips_mode(): + return 0 + + +-if not get_fips_mode(): ++if not _hashlib_get_fips_mode(): + __builtin_constructor_cache = {} + + def __get_builtin_constructor(name): +@@ -121,7 +121,7 @@ if not get_fips_mode(): + + + def __get_openssl_constructor(name): +- if not get_fips_mode(): ++ if not _hashlib.get_fips_mode(): + if name in { + 'blake2b', 'blake2s', 'shake_256', 'shake_128', + #'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512', +@@ -132,7 +132,7 @@ def __get_openssl_constructor(name): + f = getattr(_hashlib, 'openssl_' + name) + # Allow the C module to raise ValueError. The function will be + # defined but the hash not actually available thanks to OpenSSL. +- if not get_fips_mode(): ++ if not _hashlib.get_fips_mode(): + # N.B. In "FIPS mode", there is no fallback. + # If this test fails, we want to export the broken hash + # constructor anyway. +@@ -142,7 +142,7 @@ def __get_openssl_constructor(name): + except (AttributeError, ValueError): + return __get_builtin_constructor(name) + +-if not get_fips_mode(): ++if not _hashlib_get_fips_mode(): + def __py_new(name, data=b'', **kwargs): + """new(name, data=b'', **kwargs) - Return a new hashing object using the + named algorithm; optionally initialized with data (which must be +@@ -155,7 +155,7 @@ def __hash_new(name, data=b'', **kwargs): + """new(name, data=b'') - Return a new hashing object using the named algorithm; + optionally initialized with data (which must be a bytes-like object). + """ +- if get_fips_mode(): ++ if _hashlib.get_fips_mode(): + # Use OpenSSL names for Python built-in hashes + orig_name = name + name = { +@@ -177,7 +177,7 @@ def __hash_new(name, data=b'', **kwargs): + usedforsecurity = kwargs.pop('usedforsecurity', True) + retval = _hashlib.new( + name, data, usedforsecurity=usedforsecurity) +- if get_fips_mode() and name != orig_name: ++ if _hashlib.get_fips_mode() and name != orig_name: + retval._set_name(orig_name) + return retval + except ValueError: +@@ -185,7 +185,7 @@ def __hash_new(name, data=b'', **kwargs): + # hash, try using our builtin implementations. + # This allows for SHA224/256 and SHA384/512 support even though + # the OpenSSL library prior to 0.9.8 doesn't provide them. +- if get_fips_mode(): ++ if _hashlib.get_fips_mode(): + raise + return __get_builtin_constructor(name)(data) + +@@ -197,7 +197,7 @@ try: + algorithms_available = algorithms_available.union( + _hashlib.openssl_md_meth_names) + except ImportError: +- if get_fips_mode(): ++ if _hashlib_get_fips_mode(): + raise + new = __py_new + __get_hash = __get_builtin_constructor +@@ -225,5 +225,6 @@ for __func_name in __always_supported: + # Cleanup locals() + del __always_supported, __func_name, __get_hash + del __hash_new, __get_openssl_constructor +-if not get_fips_mode(): ++if not _hashlib.get_fips_mode(): + del __py_new ++del _hashlib_get_fips_mode +diff --git a/Lib/hmac.py b/Lib/hmac.py +index 33b5be613d9..ca83d9dc0d3 100644 +--- a/Lib/hmac.py ++++ b/Lib/hmac.py +@@ -39,7 +39,7 @@ class HMAC: + + Note: key and msg must be a bytes or bytearray objects. + """ +- if _hashlib.get_fips_mode(): ++ if _hashlibopenssl.get_fips_mode(): + raise ValueError( + 'This class is not available in FIPS mode. ' + + 'Use hmac.new().' +@@ -97,7 +97,7 @@ class HMAC: + def update(self, msg): + """Update this hashing object with the string msg. + """ +- if _hashlib.get_fips_mode(): ++ if _hashlibopenssl.get_fips_mode(): + raise ValueError('hmac.HMAC is not available in FIPS mode') + self.inner.update(msg) + +@@ -167,7 +167,7 @@ class HMAC_openssl(_hmacopenssl.HMAC): + return result + + +-if _hashlib.get_fips_mode(): ++if _hashlibopenssl.get_fips_mode(): + HMAC = HMAC_openssl + + +diff --git a/Lib/test/test_fips.py b/Lib/test/test_fips.py +index 34812e6098a..86e61e29c0b 100644 +--- a/Lib/test/test_fips.py ++++ b/Lib/test/test_fips.py +@@ -6,7 +6,7 @@ import hashlib, _hashlib + + class HashlibFipsTests(unittest.TestCase): + +- @unittest.skipUnless(hashlib.get_fips_mode(), "Test only when FIPS is enabled") ++ @unittest.skipUnless(_hashlib.get_fips_mode(), "Test only when FIPS is enabled") + def test_fips_imports(self): + """blake2s and blake2b should fail to import in FIPS mode + """ +@@ -30,7 +30,7 @@ class HashlibFipsTests(unittest.TestCase): + + self.assertEqual(m, h) + +- @unittest.skipIf(hashlib.get_fips_mode(), "blake2 hashes are not available under FIPS") ++ @unittest.skipIf(_hashlib.get_fips_mode(), "blake2 hashes are not available under FIPS") + def test_blake2_hashes(self): + self.compare_hashes(hashlib.blake2b(b'abc'), _hashlib.openssl_blake2b(b'abc')) + self.compare_hashes(hashlib.blake2s(b'abc'), _hashlib.openssl_blake2s(b'abc')) +@@ -41,7 +41,7 @@ class HashlibFipsTests(unittest.TestCase): + self.compare_hashes(hashlib.sha3_384(b'abc'), _hashlib.openssl_sha3_384(b'abc')) + self.compare_hashes(hashlib.sha3_512(b'abc'), _hashlib.openssl_sha3_512(b'abc')) + +- @unittest.skipIf(hashlib.get_fips_mode(), "shake hashes are not available under FIPS") ++ @unittest.skipIf(_hashlib.get_fips_mode(), "shake hashes are not available under FIPS") + def test_shake_hashes(self): + self.compare_hashes(hashlib.shake_128(b'abc'), _hashlib.openssl_shake_128(b'abc')) + self.compare_hashes(hashlib.shake_256(b'abc'), _hashlib.openssl_shake_256(b'abc')) +diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py +index 19a2fbdf294..a6f9a353fd1 100644 +--- a/Lib/test/test_hashlib.py ++++ b/Lib/test/test_hashlib.py +@@ -32,7 +32,9 @@ COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount') + c_hashlib = import_fresh_module('hashlib', fresh=['_hashlib']) + py_hashlib = import_fresh_module('hashlib', blocked=['_hashlib']) + +-if hashlib.get_fips_mode(): ++from _hashlib import get_fips_mode as _get_fips_mode ++ ++if _get_fips_mode(): + FIPS_UNAVAILABLE = {'blake2b', 'blake2s'} + FIPS_DISABLED = {'md5', 'MD5', *FIPS_UNAVAILABLE} + else: +@@ -103,7 +105,7 @@ class HashLibTestCase(unittest.TestCase): + # Issue #14693: fallback modules are always compiled under POSIX + _warn_on_extension_import = os.name == 'posix' or COMPILED_WITH_PYDEBUG + +- if hashlib.get_fips_mode(): ++ if _get_fips_mode(): + shakes = set() + + def _conditional_import_module(self, module_name): +@@ -114,7 +116,7 @@ class HashLibTestCase(unittest.TestCase): + if self._warn_on_extension_import: + warnings.warn('Did a C extension fail to compile? %s' % error) + except ImportError as error: +- if not hashlib.get_fips_mode(): ++ if not _get_fips_mode(): + raise + return None + +@@ -239,12 +241,12 @@ class HashLibTestCase(unittest.TestCase): + self.assertRaises(ValueError, hashlib.new, 'spam spam spam spam spam') + self.assertRaises(TypeError, hashlib.new, 1) + +- @unittest.skipUnless(hashlib.get_fips_mode(), "Builtin constructor only unavailable in FIPS mode") ++ @unittest.skipUnless(_get_fips_mode(), "Builtin constructor only unavailable in FIPS mode") + def test_get_builtin_constructor_fips(self): + with self.assertRaises(AttributeError): + hashlib.__get_builtin_constructor + +- @unittest.skipIf(hashlib.get_fips_mode(), "No builtin constructors in FIPS mode") ++ @unittest.skipIf(_get_fips_mode(), "No builtin constructors in FIPS mode") + def test_get_builtin_constructor(self): + get_builtin_constructor = getattr(hashlib, + '__get_builtin_constructor') +@@ -433,7 +435,7 @@ class HashLibTestCase(unittest.TestCase): + self.assertIn(name.split("_")[0], repr(m)) + + def test_blocksize_name(self): +- if not hashlib.get_fips_mode(): ++ if not _get_fips_mode(): + self.check_blocksize_name('md5', 64, 16) + self.check_blocksize_name('sha1', 64, 20) + self.check_blocksize_name('sha224', 64, 28) +@@ -920,7 +922,7 @@ class HashLibTestCase(unittest.TestCase): + + self.assertEqual(expected_hash, hasher.hexdigest()) + +- @unittest.skipUnless(hashlib.get_fips_mode(), 'Needs FIPS mode.') ++ @unittest.skipUnless(_get_fips_mode(), 'Needs FIPS mode.') + def test_usedforsecurity_repeat(self): + """Make sure usedforsecurity flag isn't copied to other contexts""" + for i in range(3): +diff --git a/Lib/test/test_hmac.py b/Lib/test/test_hmac.py +index 449b1366314..35b893509cb 100644 +--- a/Lib/test/test_hmac.py ++++ b/Lib/test/test_hmac.py +@@ -3,6 +3,7 @@ import hmac + import hashlib + import unittest + import warnings ++from _hashlib import get_fips_mode + + from test.support import requires_hashdigest + +@@ -250,7 +251,7 @@ class TestVectorsTestCase(unittest.TestCase): + def test_sha512_rfc4231(self): + self._rfc4231_test_cases(hashlib.sha512, 'sha512', 64, 128) + +- @unittest.skipIf(hashlib.get_fips_mode(), 'MockCrazyHash unacceptable in FIPS mode.') ++ @unittest.skipIf(get_fips_mode(), 'MockCrazyHash unacceptable in FIPS mode.') + @requires_hashdigest('sha256') + def test_legacy_block_size_warnings(self): + class MockCrazyHash(object): +@@ -274,6 +275,7 @@ class TestVectorsTestCase(unittest.TestCase): + hmac.HMAC(b'a', b'b', digestmod=MockCrazyHash) + self.fail('Expected warning about small block_size') + ++ @unittest.skipIf(get_fips_mode(), 'md5 is not default in FIPS mode.') + def test_with_digestmod_warning(self): + with self.assertWarns(PendingDeprecationWarning): + key = b"\x0b" * 16 +@@ -375,7 +377,7 @@ class SanityTestCase(unittest.TestCase): + + class CopyTestCase(unittest.TestCase): + +- @unittest.skipIf(hashlib.get_fips_mode(), "Internal attributes unavailable in FIPS mode") ++ @unittest.skipIf(get_fips_mode(), "Internal attributes unavailable in FIPS mode") + @requires_hashdigest('sha256') + def test_attributes(self): + # Testing if attributes are of same type. +@@ -388,7 +390,7 @@ class CopyTestCase(unittest.TestCase): + self.assertEqual(type(h1.outer), type(h2.outer), + "Types of outer don't match.") + +- @unittest.skipIf(hashlib.get_fips_mode(), "Internal attributes unavailable in FIPS mode") ++ @unittest.skipIf(get_fips_mode(), "Internal attributes unavailable in FIPS mode") + @requires_hashdigest('sha256') + def test_realcopy(self): + # Testing if the copy method created a real copy. +diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py +index d5c63b483ee..45b72e3bc7e 100644 +--- a/Lib/test/test_logging.py ++++ b/Lib/test/test_logging.py +@@ -46,6 +46,7 @@ import time + import unittest + import warnings + import weakref ++ + try: + import threading + # The following imports are needed only for tests which +diff --git a/Lib/test/test_smtplib.py b/Lib/test/test_smtplib.py +index 64b3201254a..704b2bc0d35 100644 +--- a/Lib/test/test_smtplib.py ++++ b/Lib/test/test_smtplib.py +@@ -16,6 +16,8 @@ import time + import select + import errno + import textwrap ++import hashlib ++from _hashlib import get_fips_mode + + import unittest + from test import support, mock_socket +@@ -980,7 +982,7 @@ class SMTPSimTests(unittest.TestCase): + + def testAUTH_multiple(self): + # Test that multiple authentication methods are tried. +- self.serv.add_feature("AUTH BOGUS PLAIN LOGIN CRAM-MD5") ++ self.serv.add_feature("AUTH BOGUS PLAIN LOGIN") + smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15) + resp = smtp.login(sim_auth[0], sim_auth[1]) + self.assertEqual(resp, (235, b'Authentication Succeeded')) +diff --git a/Lib/test/test_tools/test_md5sum.py b/Lib/test/test_tools/test_md5sum.py +index 7028a4dc214..3ba1ca0f146 100644 +--- a/Lib/test/test_tools/test_md5sum.py ++++ b/Lib/test/test_tools/test_md5sum.py +@@ -4,13 +4,13 @@ import os + import unittest + from test import support + from test.support.script_helper import assert_python_ok, assert_python_failure +-import hashlib ++import _hashlib + + from test.test_tools import scriptsdir, skip_if_missing + + skip_if_missing() + +-if hashlib.get_fips_mode(): ++if _hashlib.get_fips_mode(): + raise unittest.SkipTest("md5sum won't work at all in FIPS mode") + + class MD5SumTests(unittest.TestCase): +diff --git a/Lib/test/test_urllib2_localnet.py b/Lib/test/test_urllib2_localnet.py +index 895f97cc09a..b4fb13ad092 100644 +--- a/Lib/test/test_urllib2_localnet.py ++++ b/Lib/test/test_urllib2_localnet.py +@@ -6,6 +6,7 @@ import urllib.request + import http.server + import unittest + import hashlib ++from _hashlib import get_fips_mode + + from test import support + +-- +2.21.0 + + +From 3538f31cc15c633f1df2d6ed18471c1f771c4a52 Mon Sep 17 00:00:00 2001 +From: Christian Heimes +Date: Wed, 20 Nov 2019 10:59:25 +0100 +Subject: [PATCH 35/36] Use FIPS compliant CSPRNG + +Kernel's getrandom() source is not yet FIPS compliant. Use OpenSSL's +DRBG in FIPS mode and disable os.getrandom() function. + +Signed-off-by: Christian Heimes +--- + Lib/test/test_os.py | 5 +++ + Makefile.pre.in | 2 +- + Modules/posixmodule.c | 8 +++++ + Python/random.c | 75 +++++++++++++++++++++++++++++++++++++++++++ + 4 files changed, 89 insertions(+), 1 deletion(-) + +diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py +index 7a839c83fe4..91816afa47d 100644 +--- a/Lib/test/test_os.py ++++ b/Lib/test/test_os.py +@@ -1362,6 +1362,11 @@ class GetRandomTests(unittest.TestCase): + raise unittest.SkipTest("getrandom() syscall fails with ENOSYS") + else: + raise ++ except ValueError as exc: ++ if exc.args[0] == "getrandom is not FIPS compliant": ++ raise unittest.SkipTest("Skip in FIPS mode") ++ else: ++ raise + + def test_getrandom_type(self): + data = os.getrandom(16) +diff --git a/Makefile.pre.in b/Makefile.pre.in +index d15d93509d1..40502fc36d6 100644 +--- a/Makefile.pre.in ++++ b/Makefile.pre.in +@@ -112,7 +112,7 @@ CFLAGSFORSHARED=@CFLAGSFORSHARED@ + # C flags used for building the interpreter object files + PY_CORE_CFLAGS= $(PY_CFLAGS) $(PY_CFLAGS_NODIST) $(PY_CPPFLAGS) $(CFLAGSFORSHARED) -DPy_BUILD_CORE + # Linker flags used for building the interpreter object files +-PY_CORE_LDFLAGS=$(PY_LDFLAGS) $(PY_LDFLAGS_NODIST) ++PY_CORE_LDFLAGS=$(PY_LDFLAGS) $(PY_LDFLAGS_NODIST) -lcrypto + # Strict or non-strict aliasing flags used to compile dtoa.c, see above + CFLAGS_ALIASING=@CFLAGS_ALIASING@ + +diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c +index 776a3d249a8..647dbd3b4d6 100644 +--- a/Modules/posixmodule.c ++++ b/Modules/posixmodule.c +@@ -397,6 +397,9 @@ static int win32_can_symlink = 0; + #define MODNAME "posix" + #endif + ++/* for FIPS check in os.getrandom() */ ++#include ++ + #ifdef MS_WINDOWS + /* defined in fileutils.c */ + PyAPI_FUNC(void) _Py_time_t_to_FILE_TIME(time_t, int, FILETIME *); +@@ -12196,6 +12199,11 @@ os_getrandom_impl(PyObject *module, Py_ssize_t size, int flags) + return posix_error(); + } + ++ if (FIPS_mode()) { ++ PyErr_SetString(PyExc_ValueError, "getrandom is not FIPS compliant"); ++ return NULL; ++ } ++ + bytes = PyBytes_FromStringAndSize(NULL, size); + if (bytes == NULL) { + PyErr_NoMemory(); +diff --git a/Python/random.c b/Python/random.c +index d4747d61eb8..ab3cee29a39 100644 +--- a/Python/random.c ++++ b/Python/random.c +@@ -410,6 +410,77 @@ dev_urandom_close(void) + } + #endif /* !MS_WINDOWS */ + ++#include ++#include ++#include ++ ++#if (OPENSSL_VERSION_NUMBER < 0x10101000L) || defined(LIBRESSL_VERSION_NUMBER) ++# error "py_openssl_drbg_urandom requires OpenSSL 1.1.1 for fork safety" ++#endif ++ ++static void ++py_openssl_set_exception(PyObject* exc) { ++ unsigned long errcode; ++ const char *lib, *func, *reason; ++ ++ errcode = ERR_peek_last_error(); ++ if (!errcode) { ++ PyErr_SetString(exc, "unknown reasons"); ++ } ++ ERR_clear_error(); ++ ++ lib = ERR_lib_error_string(errcode); ++ func = ERR_func_error_string(errcode); ++ reason = ERR_reason_error_string(errcode); ++ ++ if (lib && func) { ++ PyErr_Format(exc, "[%s: %s] %s", lib, func, reason); ++ } ++ else if (lib) { ++ PyErr_Format(exc, "[%s] %s", lib, reason); ++ } ++ else { ++ PyErr_SetString(exc, reason); ++ } ++} ++ ++static int ++py_openssl_drbg_urandom(char *buffer, Py_ssize_t size, int raise) ++{ ++ int res; ++ static int init = 0; ++ ++ if (!init) { ++ init = 1; ++ res = OPENSSL_init_crypto(OPENSSL_INIT_ATFORK, NULL); ++ if (res == 0) { ++ if (raise) { ++ py_openssl_set_exception(PyExc_RuntimeError); ++ } ++ return 0; ++ } ++ } ++ ++ if (size > INT_MAX) { ++ if (raise) { ++ PyErr_Format(PyExc_OverflowError, ++ "RAND_bytes() size is limited to 2GB."); ++ } ++ return -1; ++ } ++ ++ res = RAND_bytes((unsigned char*)buffer, (int)size); ++ ++ if (res == 1) { ++ return 1; ++ } else { ++ if (raise) { ++ py_openssl_set_exception(PyExc_RuntimeError); ++ } ++ return 0; ++ } ++} ++ + + /* Fill buffer with pseudo-random bytes generated by a linear congruent + generator (LCG): +@@ -494,6 +565,10 @@ pyurandom(void *buffer, Py_ssize_t size, int blocking, int raise) + return 0; + } + ++ if (FIPS_mode()) { ++ return py_openssl_drbg_urandom(buffer, size, raise); ++ } ++ + #ifdef MS_WINDOWS + return win32_urandom((unsigned char *)buffer, size, raise); + #else +-- +2.21.0 + + +From ed4103bbd15a09bff4fb94a38d1b3f02df5e7c96 Mon Sep 17 00:00:00 2001 +From: Charalampos Stratakis +Date: Thu, 28 Nov 2019 17:26:02 +0100 +Subject: [PATCH 36/36] Fixups for FIPS compliant CSPRNG + +--- + Lib/test/test_os.py | 3 ++- + Python/random.c | 33 +++------------------------------ + 2 files changed, 5 insertions(+), 31 deletions(-) + +diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py +index 91816afa47d..f2572bb6c44 100644 +--- a/Lib/test/test_os.py ++++ b/Lib/test/test_os.py +@@ -27,6 +27,7 @@ import time + import unittest + import uuid + import warnings ++from _hashlib import get_fips_mode + from test import support + try: + import threading +@@ -1363,7 +1364,7 @@ class GetRandomTests(unittest.TestCase): + else: + raise + except ValueError as exc: +- if exc.args[0] == "getrandom is not FIPS compliant": ++ if get_fips_mode() and exc.args[0] == "getrandom is not FIPS compliant": + raise unittest.SkipTest("Skip in FIPS mode") + else: + raise +diff --git a/Python/random.c b/Python/random.c +index ab3cee29a39..222d651407c 100644 +--- a/Python/random.c ++++ b/Python/random.c +@@ -410,40 +410,13 @@ dev_urandom_close(void) + } + #endif /* !MS_WINDOWS */ + +-#include +-#include + #include ++#include <_hashopenssl.h> + + #if (OPENSSL_VERSION_NUMBER < 0x10101000L) || defined(LIBRESSL_VERSION_NUMBER) + # error "py_openssl_drbg_urandom requires OpenSSL 1.1.1 for fork safety" + #endif + +-static void +-py_openssl_set_exception(PyObject* exc) { +- unsigned long errcode; +- const char *lib, *func, *reason; +- +- errcode = ERR_peek_last_error(); +- if (!errcode) { +- PyErr_SetString(exc, "unknown reasons"); +- } +- ERR_clear_error(); +- +- lib = ERR_lib_error_string(errcode); +- func = ERR_func_error_string(errcode); +- reason = ERR_reason_error_string(errcode); +- +- if (lib && func) { +- PyErr_Format(exc, "[%s: %s] %s", lib, func, reason); +- } +- else if (lib) { +- PyErr_Format(exc, "[%s] %s", lib, reason); +- } +- else { +- PyErr_SetString(exc, reason); +- } +-} +- + static int + py_openssl_drbg_urandom(char *buffer, Py_ssize_t size, int raise) + { +@@ -455,7 +428,7 @@ py_openssl_drbg_urandom(char *buffer, Py_ssize_t size, int raise) + res = OPENSSL_init_crypto(OPENSSL_INIT_ATFORK, NULL); + if (res == 0) { + if (raise) { +- py_openssl_set_exception(PyExc_RuntimeError); ++ _setException(PyExc_RuntimeError); + } + return 0; + } +@@ -475,7 +448,7 @@ py_openssl_drbg_urandom(char *buffer, Py_ssize_t size, int raise) + return 1; + } else { + if (raise) { +- py_openssl_set_exception(PyExc_RuntimeError); ++ _setException(PyExc_RuntimeError); + } + return 0; + } +-- +2.21.0 + diff --git a/SOURCES/00330-CVE-2018-20852.patch b/SOURCES/00330-CVE-2018-20852.patch new file mode 100644 index 0000000..380fc33 --- /dev/null +++ b/SOURCES/00330-CVE-2018-20852.patch @@ -0,0 +1,93 @@ +diff --git a/Lib/http/cookiejar.py b/Lib/http/cookiejar.py +index adf956d..97599d4 100644 +--- a/Lib/http/cookiejar.py ++++ b/Lib/http/cookiejar.py +@@ -1148,6 +1148,11 @@ class DefaultCookiePolicy(CookiePolicy): + req_host, erhn = eff_request_host(request) + domain = cookie.domain + ++ if domain and not domain.startswith("."): ++ dotdomain = "." + domain ++ else: ++ dotdomain = domain ++ + # strict check of non-domain cookies: Mozilla does this, MSIE5 doesn't + if (cookie.version == 0 and + (self.strict_ns_domain & self.DomainStrictNonDomain) and +@@ -1160,7 +1165,7 @@ class DefaultCookiePolicy(CookiePolicy): + _debug(" effective request-host name %s does not domain-match " + "RFC 2965 cookie domain %s", erhn, domain) + return False +- if cookie.version == 0 and not ("."+erhn).endswith(domain): ++ if cookie.version == 0 and not ("."+erhn).endswith(dotdomain): + _debug(" request-host %s does not match Netscape cookie domain " + "%s", req_host, domain) + return False +@@ -1174,7 +1179,11 @@ class DefaultCookiePolicy(CookiePolicy): + req_host = "."+req_host + if not erhn.startswith("."): + erhn = "."+erhn +- if not (req_host.endswith(domain) or erhn.endswith(domain)): ++ if domain and not domain.startswith("."): ++ dotdomain = "." + domain ++ else: ++ dotdomain = domain ++ if not (req_host.endswith(dotdomain) or erhn.endswith(dotdomain)): + #_debug(" request domain %s does not match cookie domain %s", + # req_host, domain) + return False +diff --git a/Lib/test/test_http_cookiejar.py b/Lib/test/test_http_cookiejar.py +index abc625d..6e1b308 100644 +--- a/Lib/test/test_http_cookiejar.py ++++ b/Lib/test/test_http_cookiejar.py +@@ -415,6 +415,7 @@ class CookieTests(unittest.TestCase): + ("http://foo.bar.com/", ".foo.bar.com", True), + ("http://foo.bar.com/", "foo.bar.com", True), + ("http://foo.bar.com/", ".bar.com", True), ++ ("http://foo.bar.com/", "bar.com", True), + ("http://foo.bar.com/", "com", True), + ("http://foo.com/", "rhubarb.foo.com", False), + ("http://foo.com/", ".foo.com", True), +@@ -425,6 +426,8 @@ class CookieTests(unittest.TestCase): + ("http://foo/", "foo", True), + ("http://foo/", "foo.local", True), + ("http://foo/", ".local", True), ++ ("http://barfoo.com", ".foo.com", False), ++ ("http://barfoo.com", "foo.com", False), + ]: + request = urllib.request.Request(url) + r = pol.domain_return_ok(domain, request) +@@ -959,6 +962,33 @@ class CookieTests(unittest.TestCase): + c.add_cookie_header(req) + self.assertFalse(req.has_header("Cookie")) + ++ c.clear() ++ ++ pol.set_blocked_domains([]) ++ req = urllib.request.Request("http://acme.com/") ++ res = FakeResponse(headers, "http://acme.com/") ++ cookies = c.make_cookies(res, req) ++ c.extract_cookies(res, req) ++ self.assertEqual(len(c), 1) ++ ++ req = urllib.request.Request("http://acme.com/") ++ c.add_cookie_header(req) ++ self.assertTrue(req.has_header("Cookie")) ++ ++ req = urllib.request.Request("http://badacme.com/") ++ c.add_cookie_header(req) ++ self.assertFalse(pol.return_ok(cookies[0], req)) ++ self.assertFalse(req.has_header("Cookie")) ++ ++ p = pol.set_blocked_domains(["acme.com"]) ++ req = urllib.request.Request("http://acme.com/") ++ c.add_cookie_header(req) ++ self.assertFalse(req.has_header("Cookie")) ++ ++ req = urllib.request.Request("http://badacme.com/") ++ c.add_cookie_header(req) ++ self.assertFalse(req.has_header("Cookie")) ++ + def test_secure(self): + for ns in True, False: + for whitespace in " ", "": diff --git a/SOURCES/00332-CVE-2019-16056.patch b/SOURCES/00332-CVE-2019-16056.patch new file mode 100644 index 0000000..ca2e3d4 --- /dev/null +++ b/SOURCES/00332-CVE-2019-16056.patch @@ -0,0 +1,95 @@ +diff --git a/Lib/email/_header_value_parser.py b/Lib/email/_header_value_parser.py +index 737951e4b1b1..bc9c9b6241d4 100644 +--- a/Lib/email/_header_value_parser.py ++++ b/Lib/email/_header_value_parser.py +@@ -1561,6 +1561,8 @@ def get_domain(value): + token, value = get_dot_atom(value) + except errors.HeaderParseError: + token, value = get_atom(value) ++ if value and value[0] == '@': ++ raise errors.HeaderParseError('Invalid Domain') + if leader is not None: + token[:0] = [leader] + domain.append(token) +diff --git a/Lib/email/_parseaddr.py b/Lib/email/_parseaddr.py +index cdfa3729adc7..41ff6f8c000d 100644 +--- a/Lib/email/_parseaddr.py ++++ b/Lib/email/_parseaddr.py +@@ -379,7 +379,12 @@ def getaddrspec(self): + aslist.append('@') + self.pos += 1 + self.gotonext() +- return EMPTYSTRING.join(aslist) + self.getdomain() ++ domain = self.getdomain() ++ if not domain: ++ # Invalid domain, return an empty address instead of returning a ++ # local part to denote failed parsing. ++ return EMPTYSTRING ++ return EMPTYSTRING.join(aslist) + domain + + def getdomain(self): + """Get the complete domain name from an address.""" +@@ -394,6 +399,10 @@ def getdomain(self): + elif self.field[self.pos] == '.': + self.pos += 1 + sdlist.append('.') ++ elif self.field[self.pos] == '@': ++ # bpo-34155: Don't parse domains with two `@` like ++ # `a@malicious.org@important.com`. ++ return EMPTYSTRING + elif self.field[self.pos] in self.atomends: + break + else: +diff --git a/Lib/test/test_email/test__header_value_parser.py b/Lib/test/test_email/test__header_value_parser.py +index a2c900fa7fd2..02ef3e1006c6 100644 +--- a/Lib/test/test_email/test__header_value_parser.py ++++ b/Lib/test/test_email/test__header_value_parser.py +@@ -1418,6 +1418,16 @@ def test_get_addr_spec_dot_atom(self): + self.assertEqual(addr_spec.domain, 'example.com') + self.assertEqual(addr_spec.addr_spec, 'star.a.star@example.com') + ++ def test_get_addr_spec_multiple_domains(self): ++ with self.assertRaises(errors.HeaderParseError): ++ parser.get_addr_spec('star@a.star@example.com') ++ ++ with self.assertRaises(errors.HeaderParseError): ++ parser.get_addr_spec('star@a@example.com') ++ ++ with self.assertRaises(errors.HeaderParseError): ++ parser.get_addr_spec('star@172.17.0.1@example.com') ++ + # get_obs_route + + def test_get_obs_route_simple(self): +diff --git a/Lib/test/test_email/test_email.py b/Lib/test/test_email/test_email.py +index f97ccc6711cc..68d052279987 100644 +--- a/Lib/test/test_email/test_email.py ++++ b/Lib/test/test_email/test_email.py +@@ -3035,6 +3035,20 @@ def test_parseaddr_empty(self): + self.assertEqual(utils.parseaddr('<>'), ('', '')) + self.assertEqual(utils.formataddr(utils.parseaddr('<>')), '') + ++ def test_parseaddr_multiple_domains(self): ++ self.assertEqual( ++ utils.parseaddr('a@b@c'), ++ ('', '') ++ ) ++ self.assertEqual( ++ utils.parseaddr('a@b.c@c'), ++ ('', '') ++ ) ++ self.assertEqual( ++ utils.parseaddr('a@172.17.0.1@c'), ++ ('', '') ++ ) ++ + def test_noquote_dump(self): + self.assertEqual( + utils.formataddr(('A Silly Person', 'person@dom.ain')), +diff --git a/Misc/NEWS.d/next/Security/2019-05-04-13-33-37.bpo-34155.MJll68.rst b/Misc/NEWS.d/next/Security/2019-05-04-13-33-37.bpo-34155.MJll68.rst +new file mode 100644 +index 000000000000..50292e29ed1d +--- /dev/null ++++ b/Misc/NEWS.d/next/Security/2019-05-04-13-33-37.bpo-34155.MJll68.rst +@@ -0,0 +1 @@ ++Fix parsing of invalid email addresses with more than one ``@`` (e.g. a@b@c.com.) to not return the part before 2nd ``@`` as valid email address. Patch by maxking & jpic. diff --git a/SOURCES/00333-reduce-pgo-tests.patch b/SOURCES/00333-reduce-pgo-tests.patch new file mode 100644 index 0000000..7dcd3b9 --- /dev/null +++ b/SOURCES/00333-reduce-pgo-tests.patch @@ -0,0 +1,296 @@ +diff --git a/Lib/test/libregrtest/cmdline.py b/Lib/test/libregrtest/cmdline.py +index 538ff05..e7f2013 100644 +--- a/Lib/test/libregrtest/cmdline.py ++++ b/Lib/test/libregrtest/cmdline.py +@@ -263,7 +263,9 @@ def _create_parser(): + help='only write the name of test cases that will be run' + ' , don\'t execute them') + group.add_argument('-P', '--pgo', dest='pgo', action='store_true', +- help='enable Profile Guided Optimization training') ++ help='enable Profile Guided Optimization (PGO) training') ++ group.add_argument('--pgo-extended', action='store_true', ++ help='enable extended PGO training (slower training)') + group.add_argument('--fail-env-changed', action='store_true', + help='if a test file alters the environment, mark ' + 'the test as failed') +@@ -339,6 +341,8 @@ def _parse_args(args, **kwargs): + parser.error("-G/--failfast needs either -v or -W") + if ns.pgo and (ns.verbose or ns.verbose2 or ns.verbose3): + parser.error("--pgo/-v don't go together!") ++ if ns.pgo_extended: ++ ns.pgo = True # pgo_extended implies pgo + + if ns.nowindows: + print("Warning: the --nowindows (-n) option is deprecated. " +diff --git a/Lib/test/libregrtest/main.py b/Lib/test/libregrtest/main.py +index b6d05f6..524dbfa 100644 +--- a/Lib/test/libregrtest/main.py ++++ b/Lib/test/libregrtest/main.py +@@ -17,6 +17,7 @@ from test.libregrtest.runtest import ( + INTERRUPTED, CHILD_ERROR, TEST_DID_NOT_RUN, + PROGRESS_MIN_TIME, format_test_result) + from test.libregrtest.setup import setup_tests ++from test.libregrtest.pgo import setup_pgo_tests + from test.libregrtest.utils import removepy, count, format_duration, printlist + from test import support + try: +@@ -214,6 +215,10 @@ class Regrtest: + + removepy(self.tests) + ++ if self.ns.pgo: ++ # add default PGO tests if no tests are specified ++ setup_pgo_tests(self.ns) ++ + stdtests = STDTESTS[:] + nottests = NOTTESTS.copy() + if self.ns.exclude: +@@ -601,6 +606,7 @@ class Regrtest: + input("Press any key to continue...") + + support.PGO = self.ns.pgo ++ support.PGO_EXTENDED = self.ns.pgo_extended + + setup_tests(self.ns) + +diff --git a/Lib/test/libregrtest/pgo.py b/Lib/test/libregrtest/pgo.py +new file mode 100644 +index 0000000..379ff05 +--- /dev/null ++++ b/Lib/test/libregrtest/pgo.py +@@ -0,0 +1,55 @@ ++# Set of tests run by default if --pgo is specified. The tests below were ++# chosen based on the following criteria: either they exercise a commonly used ++# C extension module or type, or they run some relatively typical Python code. ++# Long running tests should be avoided because the PGO instrumented executable ++# runs slowly. ++PGO_TESTS = [ ++ 'test_array', ++ 'test_base64', ++ 'test_binascii', ++ 'test_binop', ++ 'test_bisect', ++ 'test_bytes', ++ 'test_bz2', ++ 'test_cmath', ++ 'test_codecs', ++ 'test_collections', ++ 'test_complex', ++ 'test_dataclasses', ++ 'test_datetime', ++ 'test_decimal', ++ 'test_difflib', ++ 'test_embed', ++ 'test_float', ++ 'test_fstring', ++ 'test_functools', ++ 'test_generators', ++ 'test_hashlib', ++ 'test_heapq', ++ 'test_int', ++ 'test_itertools', ++ 'test_json', ++ 'test_long', ++ 'test_lzma', ++ 'test_math', ++ 'test_memoryview', ++ 'test_operator', ++ 'test_ordered_dict', ++ 'test_pickle', ++ 'test_pprint', ++ 'test_re', ++ 'test_set', ++ 'test_sqlite', ++ 'test_statistics', ++ 'test_struct', ++ 'test_tabnanny', ++ 'test_time', ++ 'test_unicode', ++ 'test_xml_etree', ++ 'test_xml_etree_c', ++] ++ ++def setup_pgo_tests(ns): ++ if not ns.args and not ns.pgo_extended: ++ # run default set of tests for PGO training ++ ns.args = PGO_TESTS[:] +diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py +index 764057a..468ee46 100644 +--- a/Lib/test/pickletester.py ++++ b/Lib/test/pickletester.py +@@ -2039,6 +2039,7 @@ class AbstractPickleTests(unittest.TestCase): + + FRAME_SIZE_TARGET = 64 * 1024 + ++ @support.skip_if_pgo_task + def check_frame_opcodes(self, pickled): + """ + Check the arguments of FRAME opcodes in a protocol 4+ pickle. +@@ -2059,6 +2060,7 @@ class AbstractPickleTests(unittest.TestCase): + frame_size = len(pickled) - last_pos - frame_opcode_size + self.assertEqual(frame_size, last_arg) + ++ @support.skip_if_pgo_task + def test_framing_many_objects(self): + obj = list(range(10**5)) + for proto in range(4, pickle.HIGHEST_PROTOCOL + 1): +diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py +index 66c0fed..e80a819 100644 +--- a/Lib/test/support/__init__.py ++++ b/Lib/test/support/__init__.py +@@ -953,6 +953,10 @@ SAVEDCWD = os.getcwd() + # useful for PGO + PGO = False + ++# Set by libregrtest/main.py if we are running the extended (time consuming) ++# PGO task. If this is True, PGO is also True. ++PGO_EXTENDED = False ++ + @contextlib.contextmanager + def temp_dir(path=None, quiet=False): + """Return a context manager that creates a temporary directory. +@@ -2442,6 +2446,11 @@ def skip_unless_xattr(test): + msg = "no non-broken extended attribute support" + return test if ok else unittest.skip(msg)(test) + ++def skip_if_pgo_task(test): ++ """Skip decorator for tests not run in (non-extended) PGO task""" ++ ok = not PGO or PGO_EXTENDED ++ msg = "Not run for (non-extended) PGO task" ++ return test if ok else unittest.skip(msg)(test) + + def fs_is_case_insensitive(directory): + """Detects if the file system for the specified directory is case-insensitive.""" +diff --git a/Lib/test/test_bz2.py b/Lib/test/test_bz2.py +index f340f23..ebb151c 100644 +--- a/Lib/test/test_bz2.py ++++ b/Lib/test/test_bz2.py +@@ -654,6 +654,7 @@ class BZ2CompressorTest(BaseTest): + data += bz2c.flush() + self.assertEqual(ext_decompress(data), self.TEXT) + ++ @support.skip_if_pgo_task + @bigmemtest(size=_4G + 100, memuse=2) + def testCompress4G(self, size): + # "Test BZ2Compressor.compress()/flush() with >4GiB input" +@@ -712,6 +713,7 @@ class BZ2DecompressorTest(BaseTest): + self.assertRaises(EOFError, bz2d.decompress, b"anything") + self.assertRaises(EOFError, bz2d.decompress, b"") + ++ @support.skip_if_pgo_task + @bigmemtest(size=_4G + 100, memuse=3.3) + def testDecompress4G(self, size): + # "Test BZ2Decompressor.decompress() with >4GiB input" +diff --git a/Lib/test/test_itertools.py b/Lib/test/test_itertools.py +index 9317951..8c1d016 100644 +--- a/Lib/test/test_itertools.py ++++ b/Lib/test/test_itertools.py +@@ -2023,6 +2023,7 @@ class RegressionTests(unittest.TestCase): + self.assertRaises(AssertionError, list, cycle(gen1())) + self.assertEqual(hist, [0,1]) + ++ @support.skip_if_pgo_task + def test_long_chain_of_empty_iterables(self): + # Make sure itertools.chain doesn't run into recursion limits when + # dealing with long chains of empty iterables. Even with a high +diff --git a/Lib/test/test_lzma.py b/Lib/test/test_lzma.py +index 3dc2c1e..117de0a 100644 +--- a/Lib/test/test_lzma.py ++++ b/Lib/test/test_lzma.py +@@ -333,6 +333,7 @@ class CompressorDecompressorTestCase(unittest.TestCase): + + # Test with inputs larger than 4GiB. + ++ @support.skip_if_pgo_task + @bigmemtest(size=_4G + 100, memuse=2) + def test_compressor_bigmem(self, size): + lzc = LZMACompressor() +@@ -344,6 +345,7 @@ class CompressorDecompressorTestCase(unittest.TestCase): + finally: + ddata = None + ++ @support.skip_if_pgo_task + @bigmemtest(size=_4G + 100, memuse=3) + def test_decompressor_bigmem(self, size): + lzd = LZMADecompressor() +diff --git a/Lib/test/test_regrtest.py b/Lib/test/test_regrtest.py +index 5347bb1..9d83217 100644 +--- a/Lib/test/test_regrtest.py ++++ b/Lib/test/test_regrtest.py +@@ -6,6 +6,7 @@ Note: test_regrtest cannot be run twice in parallel. + + import contextlib + import faulthandler ++import glob + import io + import os.path + import platform +@@ -532,6 +533,31 @@ class BaseTestCase(unittest.TestCase): + return proc.stdout + + ++class CheckActualTests(BaseTestCase): ++ """ ++ Check that regrtest appears to find the expected set of tests. ++ """ ++ ++ def test_finds_expected_number_of_tests(self): ++ args = ['-Wd', '-E', '-bb', '-m', 'test.regrtest', '--list-tests'] ++ output = self.run_python(args) ++ rough_number_of_tests_found = len(output.splitlines()) ++ actual_testsuite_glob = os.path.join(os.path.dirname(__file__), ++ 'test*.py') ++ rough_counted_test_py_files = len(glob.glob(actual_testsuite_glob)) ++ # We're not trying to duplicate test finding logic in here, ++ # just give a rough estimate of how many there should be and ++ # be near that. This is a regression test to prevent mishaps ++ # such as https://bugs.python.org/issue37667 in the future. ++ # If you need to change the values in here during some ++ # mythical future test suite reorganization, don't go ++ # overboard with logic and keep that goal in mind. ++ self.assertGreater(rough_number_of_tests_found, ++ rough_counted_test_py_files*9//10, ++ msg='Unexpectedly low number of tests found in:\n' ++ f'{", ".join(output.splitlines())}') ++ ++ + class ProgramsTestCase(BaseTestCase): + """ + Test various ways to run the Python test suite. Use options close +diff --git a/Makefile.pre.in b/Makefile.pre.in +index b452289..cc428ac 100644 +--- a/Makefile.pre.in ++++ b/Makefile.pre.in +@@ -247,9 +247,10 @@ TCLTK_INCLUDES= @TCLTK_INCLUDES@ + TCLTK_LIBS= @TCLTK_LIBS@ + + # The task to run while instrumented when building the profile-opt target. +-# We exclude unittests with -x that take a rediculious amount of time to +-# run in the instrumented training build or do not provide much value. +-PROFILE_TASK=-m test.regrtest --pgo ++# To speed up profile generation, we don't run the full unit test suite ++# by default. The default is "-m test --pgo". To run more tests, use ++# PROFILE_TASK="-m test --pgo-extended" ++PROFILE_TASK= @PROFILE_TASK@ + + # report files for gcov / lcov coverage report + COVERAGE_INFO= $(abs_builddir)/coverage.info +diff --git a/configure.ac b/configure.ac +index c071ec3..816fc5a 100644 +--- a/configure.ac ++++ b/configure.ac +@@ -1308,6 +1308,14 @@ else + DEF_MAKE_RULE="all" + fi + ++AC_ARG_VAR(PROFILE_TASK, Python args for PGO generation task) ++AC_MSG_CHECKING(PROFILE_TASK) ++if test -z "$PROFILE_TASK" ++then ++ PROFILE_TASK='-m test --pgo' ++fi ++AC_MSG_RESULT($PROFILE_TASK) ++ + # Make llvm-relatec checks work on systems where llvm tools are not installed with their + # normal names in the default $PATH (ie: Ubuntu). They exist under the + # non-suffixed name in their versioned llvm directory. diff --git a/SOURCES/check-pyc-and-pyo-timestamps.py b/SOURCES/check-pyc-and-pyo-timestamps.py new file mode 100644 index 0000000..262a985 --- /dev/null +++ b/SOURCES/check-pyc-and-pyo-timestamps.py @@ -0,0 +1,62 @@ +"""Checks if all *.pyc and *.pyo files have later mtime than their *.py files.""" + +import importlib.util +import os +import sys + +# list of test and other files that we expect not to have bytecode +not_compiled = [ + '/usr/bin/pathfix.py', + 'test/bad_coding.py', + 'test/bad_coding2.py', + 'test/badsyntax_3131.py', + 'test/badsyntax_future3.py', + 'test/badsyntax_future4.py', + 'test/badsyntax_future5.py', + 'test/badsyntax_future6.py', + 'test/badsyntax_future7.py', + 'test/badsyntax_future8.py', + 'test/badsyntax_future9.py', + 'test/badsyntax_future10.py', + 'test/badsyntax_async1.py', + 'test/badsyntax_async2.py', + 'test/badsyntax_async3.py', + 'test/badsyntax_async4.py', + 'test/badsyntax_async5.py', + 'test/badsyntax_async6.py', + 'test/badsyntax_async7.py', + 'test/badsyntax_async8.py', + 'test/badsyntax_async9.py', + 'test/badsyntax_pep3120.py', + 'lib2to3/tests/data/bom.py', + 'lib2to3/tests/data/crlf.py', + 'lib2to3/tests/data/different_encoding.py', + 'lib2to3/tests/data/false_encoding.py', + 'lib2to3/tests/data/py2_test_grammar.py', + '.debug-gdb.py', +] +failed = 0 + + +def bytecode_expected(source): + for f in not_compiled: + if source.endswith(f): + return False + return True + + +compiled = filter(lambda f: bytecode_expected(f), sys.argv[1:]) +for f in compiled: + # check both pyo and pyc + to_check = map(lambda b: importlib.util.cache_from_source(f, b), (True, False)) + f_mtime = os.path.getmtime(f) + for c in to_check: + c_mtime = os.path.getmtime(c) + if c_mtime < f_mtime: + sys.stderr.write('Failed bytecompilation timestamps check: ') + sys.stderr.write('Bytecode file {} is older than source file {}.\n'.format(c, f)) + failed += 1 + +if failed: + sys.stderr.write('\n{} files failed bytecompilation timestamps check.\n'.format(failed)) + sys.exit(1) diff --git a/SOURCES/get-source.sh b/SOURCES/get-source.sh new file mode 100755 index 0000000..32c3d23 --- /dev/null +++ b/SOURCES/get-source.sh @@ -0,0 +1,28 @@ +#! /bin/bash -ex + +# Download a release of Python (if missing) and remove .exe files from it + +version=$1 + +if [ -z "${version}" ]; then + echo "Usage: $0 VERSION" >& 2 + echo "" >& 2 + echo "example: $0 3.6.6" >& 2 + exit 1 +fi + +versionedname=Python-${version} +orig_archive=${versionedname}.tar.xz +new_archive=${versionedname}-noexe.tar.xz + +if [ ! -e ${orig_archive} ]; then + wget -N https://www.python.org/ftp/python/${version}/${orig_archive} +fi + +deleted_names=$(tar --list -Jf ${orig_archive} | grep '\.exe$') + +# tar --delete does not operate on compressed archives, so do +# xz compression/decompression explicitly +xz --decompress --stdout ${orig_archive} | \ + tar --delete -v ${deleted_names} | \ + xz --compress --stdout -3 -T0 > ${new_archive} diff --git a/SOURCES/idle3.appdata.xml b/SOURCES/idle3.appdata.xml new file mode 100644 index 0000000..94f87a2 --- /dev/null +++ b/SOURCES/idle3.appdata.xml @@ -0,0 +1,35 @@ + + + + + idle3.desktop + IDLE3 + CC0 + Python-2.0 + Python 3 Integrated Development and Learning Environment + +

+ IDLE is Python’s Integrated Development and Learning Environment. + The GUI is uniform between Windows, Unix, and Mac OS X. + IDLE provides an easy way to start writing, running, and debugging + Python code. +

+

+ IDLE is written in pure Python, and uses the tkinter GUI toolkit. + It provides: +

+
    +
  • a Python shell window (interactive interpreter) with colorizing of code input, output, and error messages,
  • +
  • a multi-window text editor with multiple undo, Python colorizing, smart indent, call tips, auto completion, and other features,
  • +
  • search within any window, replace within editor windows, and search through multiple files (grep),
  • +
  • a debugger with persistent breakpoints, stepping, and viewing of global and local namespaces.
  • +
+
+ https://docs.python.org/3/library/idle.html + + http://in.waw.pl/~zbyszek/fedora/idle3-appdata/idle3-main-window.png + http://in.waw.pl/~zbyszek/fedora/idle3-appdata/idle3-class-browser.png + http://in.waw.pl/~zbyszek/fedora/idle3-appdata/idle3-code-viewer.png + + zbyszek@in.waw.pl +
diff --git a/SOURCES/idle3.desktop b/SOURCES/idle3.desktop new file mode 100644 index 0000000..dc1d3c3 --- /dev/null +++ b/SOURCES/idle3.desktop @@ -0,0 +1,11 @@ +[Desktop Entry] +Version=1.0 +Name=IDLE 3 +Comment=Python 3 Integrated Development and Learning Environment +Exec=idle3 %F +TryExec=idle3 +Terminal=false +Type=Application +Icon=idle3 +Categories=Development;IDE; +MimeType=text/x-python; \ No newline at end of file diff --git a/SOURCES/no-python b/SOURCES/no-python new file mode 100755 index 0000000..1510fb5 --- /dev/null +++ b/SOURCES/no-python @@ -0,0 +1,8 @@ +#! /bin/bash + +echo "For more information about this script," +echo "please see the manual page of the same name." + +echo "Run: man unversioned-python" +exit 2 + diff --git a/SOURCES/unversioned-python.1 b/SOURCES/unversioned-python.1 new file mode 100644 index 0000000..98e5817 --- /dev/null +++ b/SOURCES/unversioned-python.1 @@ -0,0 +1,57 @@ +.\" unversioned-python.8 +.TH UNVERSIONED-PYTHON 8 "17 September 2018" +.SH NAME +unversioned-python \- info on how to set up the `python` command. +.SH SYNOPSIS +.B unversioned-python +.SH DESCRIPTION +.B unversioned-python +The "unversioned" `python` command (/usr/bin/python) is missing by default. +We recommend using `python3` or `python2` instead. +If using the explicit versioned command is inconvenient, +you can use `alternatives` to configure `python` to launch +either Python 3 or Python 2. + +Note: The `python3` or `python2` package needs to be installed before its +functionality is selected. + +.SH EXAXPLES +.B alternatives +.B --config +.IR python + + Interactively select what the `python` command runs. + + +.B alternatives +.B --set +.IR python +.IR /usr/bin/python3 + + Configure the `python` command to run Python 3 + + Note: this is non-standard behavior according to [PEP 394]. + + +.B alternatives +.B --set +.IR python +.IR /usr/bin/python2 + + Configure the `python` command to run Python 2 + + Note: please review the support lifecycle of python2 before relying on it + + +.B alternatives +.B --auto +.IR python + + Undo configuration changes and revert to the default (missing `python` command) + + +.SH LINKS + +.B [PEP 394]: +.IR https://www.python.org/dev/peps/pep-0394/ + diff --git a/SPECS/python3.spec b/SPECS/python3.spec new file mode 100644 index 0000000..d786e5c --- /dev/null +++ b/SPECS/python3.spec @@ -0,0 +1,3070 @@ +# ================== +# Top-level metadata +# ================== + +Name: python3 +Summary: Interpreter of the Python programming language +URL: https://www.python.org/ + +%global pybasever 3.6 + +# pybasever without the dot: +%global pyshortver 36 + +# WARNING When rebasing to a new Python version, +# remember to update the python3-docs package as well +Version: %{pybasever}.8 +Release: 23%{?dist} +License: Python + + +# ================================== +# Conditionals controlling the build +# ================================== + +# Note that the bcond macros are named for the CLI option they create. +# "%%bcond_without" means "ENABLE by default and create a --without option" + +# Whether to use RPM build wheels from the python-{pip,setuptools}-wheel package +# Uses upstream bundled prebuilt wheels otherwise +%bcond_without rpmwheels + +# Expensive optimizations (mainly, profile-guided optimizations) +%bcond_without optimizations + +# Run the test suite in %%check +%bcond_without tests + +# Extra build for debugging the interpreter or C-API extensions +# (the -debug subpackages) +%bcond_without debug_build + +# Support for the GDB debugger +%bcond_without gdb_hooks + +# The dbm.gnu module (key-value database) +%bcond_without gdbm + +# Main interpreter loop optimization +%bcond_without computed_gotos + +# Support for the Valgrind debugger/profiler +%ifarch %{valgrind_arches} +%bcond_without valgrind +%else +# Some arches don't have valgrind, disable support for it there. +%bcond_with valgrind +%endif + + +# ================================== +# Notes from bootstraping Python 3.6 +# ================================== +# +# New Python major version (3.X) break ABI and bytecode compatibility, +# so all packages depending on it need to be rebuilt. +# +# Due to a dependency cycle between Python, gdb, rpm, pip, setuptools, wheel, +# and other packages, this isn't straightforward. +# Build in the following order: +# +# 1. At the same time: +# - gdb without python support (add %%global _without_python 1 on top of +# gdb's SPEC file) +# - python-rpm-generators with bootstrapping_python set to 1 +# (this can be done also during step 2., but should be done before 3.) +# 2. python3 without rewheel (use %%bcond_with rewheel instead of +# %%bcond_without) +# 3. At the same time: +# - gdb with python support (remove %%global _without_python 1 on top of +# gdb's SPEC file) +# - python-rpm-generators with bootstrapping_python set to 0 +# (this can be done at any later step without negative effects) +# 4. rpm +# 5. python-setuptools with bootstrap set to 1 +# 6. python-pip with build_wheel set to 0 +# 7. python-wheel with %%bcond_without bootstrap +# 8. python-setuptools with bootstrap set to 0 and also with_check set to 0 +# 9. python-pip with build_wheel set to 1 +# 10. pyparsing +# 11. python3 with rewheel +# +# Then the most important packages have to be built, in dependency order. +# These were: +# python-sphinx, pytest, python-requests, cloud-init, dnf, anaconda, abrt +# +# After these have been built, a targeted rebuild should be done for the rest. + + +# ===================== +# General global macros +# ===================== + +%global pylibdir %{_libdir}/python%{pybasever} +%global dynload_dir %{pylibdir}/lib-dynload + +# ABIFLAGS, LDVERSION and SOABI are in the upstream configure.ac +# See PEP 3149 for some background: http://www.python.org/dev/peps/pep-3149/ +%global ABIFLAGS_optimized m +%global ABIFLAGS_debug dm + +%global LDVERSION_optimized %{pybasever}%{ABIFLAGS_optimized} +%global LDVERSION_debug %{pybasever}%{ABIFLAGS_debug} + +%global SOABI_optimized cpython-%{pyshortver}%{ABIFLAGS_optimized}-%{_arch}-linux%{_gnu} +%global SOABI_debug cpython-%{pyshortver}%{ABIFLAGS_debug}-%{_arch}-linux%{_gnu} + +# All bytecode files are in a __pycache__ subdirectory, with a name +# reflecting the version of the bytecode. +# See PEP 3147: http://www.python.org/dev/peps/pep-3147/ +# For example, +# foo/bar.py +# has bytecode at: +# foo/__pycache__/bar.cpython-%%{pyshortver}.pyc +# foo/__pycache__/bar.cpython-%%{pyshortver}.opt-1.pyc +# foo/__pycache__/bar.cpython-%%{pyshortver}.opt-2.pyc +%global bytecode_suffixes .cpython-%{pyshortver}*.pyc + +# Python's configure script defines SOVERSION, and this is used in the Makefile +# to determine INSTSONAME, the name of the libpython DSO: +# LDLIBRARY='libpython$(VERSION).so' +# INSTSONAME="$LDLIBRARY".$SOVERSION +# We mirror this here in order to make it easier to add the -gdb.py hooks. +# (if these get out of sync, the payload of the libs subpackage will fail +# and halt the build) +%global py_SOVERSION 1.0 +%global py_INSTSONAME_optimized libpython%{LDVERSION_optimized}.so.%{py_SOVERSION} +%global py_INSTSONAME_debug libpython%{LDVERSION_debug}.so.%{py_SOVERSION} + +# Disable automatic bytecompilation. The python3 binary is not yet be +# available in /usr/bin when Python is built. Also, the bytecompilation fails +# on files that test invalid syntax. +%undefine py_auto_byte_compile + +# For multilib support, files that are different between 32- and 64-bit arches +# need different filenames. Use "64" or "32" according to the word size. +# Currently, the best way to determine an architecture's word size happens to +# be checking %%{_lib}. +%if "%{_lib}" == "lib64" +%global wordsize 64 +%else +%global wordsize 32 +%endif + + +# ======================= +# Build-time requirements +# ======================= + +# (keep this list alphabetized) + +BuildRequires: autoconf +BuildRequires: bluez-libs-devel +BuildRequires: bzip2 +BuildRequires: bzip2-devel +BuildRequires: desktop-file-utils +BuildRequires: expat-devel + +BuildRequires: findutils +BuildRequires: gcc-c++ +%if %{with gdbm} +BuildRequires: gdbm-devel >= 1:1.13 +%endif +BuildRequires: glibc-devel +BuildRequires: gmp-devel +BuildRequires: libappstream-glib +BuildRequires: libffi-devel +BuildRequires: libnsl2-devel +BuildRequires: libtirpc-devel +BuildRequires: libGL-devel +BuildRequires: libX11-devel +BuildRequires: ncurses-devel + +BuildRequires: openssl-devel +BuildRequires: pkgconfig +BuildRequires: readline-devel +BuildRequires: redhat-rpm-config >= 118 +BuildRequires: sqlite-devel +BuildRequires: gdb + +BuildRequires: tar +BuildRequires: tcl-devel +BuildRequires: tix-devel +BuildRequires: tk-devel + +%if %{with valgrind} +BuildRequires: valgrind-devel +%endif + +BuildRequires: xz-devel +BuildRequires: zlib-devel + +BuildRequires: /usr/bin/dtrace + +# workaround http://bugs.python.org/issue19804 (test_uuid requires ifconfig) +BuildRequires: /usr/sbin/ifconfig + +%if %{with rpmwheels} +BuildRequires: python3-setuptools-wheel +BuildRequires: python3-pip-wheel + +# Verify that the BuildRoot includes python36. +# Not actually needed for build. +BuildRequires: python36-devel +%endif + + +# ======================= +# Source code and patches +# ======================= + +# The upstream tarball includes questionable executable files for Windows, +# which we should not ship even in the SRPM. +# Run the "get-source.sh" with the version as argument to download the upstream +# tarball and generate a version with the .exe files removed. For example: +# $ ./get-source.sh 3.7.0 + +Source: Python-%{version}-noexe.tar.xz + +# A script to remove .exe files from the source distribution +Source1: get-source.sh + +# A simple script to check timestamps of bytecode files +# Run in check section with Python that is currently being built +# Written by bkabrda +Source8: check-pyc-and-pyo-timestamps.py + +# Desktop menu entry for idle3 +Source10: idle3.desktop + +# AppData file for idle3 +Source11: idle3.appdata.xml + +# unversioned-python script +Source12: no-python + +# unversioned-python man page +Source13: unversioned-python.1 + +# 00001 # +# Fixup distutils/unixccompiler.py to remove standard library path from rpath: +# Was Patch0 in ivazquez' python3000 specfile: +Patch1: 00001-rpath.patch + +# 00102 # +# Change the various install paths to use /usr/lib64/ instead or /usr/lib +# Only used when "%%{_lib}" == "lib64" +# Not yet sent upstream. +Patch102: 00102-lib64.patch + +# 00111 # +# Patch the Makefile.pre.in so that the generated Makefile doesn't try to build +# a libpythonMAJOR.MINOR.a +# See https://bugzilla.redhat.com/show_bug.cgi?id=556092 +# Downstream only: not appropriate for upstream +Patch111: 00111-no-static-lib.patch + +# 00132 # +# Add non-standard hooks to unittest for use in the "check" phase below, when +# running selftests within the build: +# @unittest._skipInRpmBuild(reason) +# for tests that hang or fail intermittently within the build environment, and: +# @unittest._expectedFailureInRpmBuild +# for tests that always fail within the build environment +# +# The hooks only take effect if WITHIN_PYTHON_RPM_BUILD is set in the +# environment, which we set manually in the appropriate portion of the "check" +# phase below (and which potentially other python-* rpms could set, to reuse +# these unittest hooks in their own "check" phases) +Patch132: 00132-add-rpmbuild-hooks-to-unittest.patch + +# 00155 # +# Avoid allocating thunks in ctypes unless absolutely necessary, to avoid +# generating SELinux denials on "import ctypes" and "import uuid" when +# embedding Python within httpd +# See https://bugzilla.redhat.com/show_bug.cgi?id=814391 +Patch155: 00155-avoid-ctypes-thunks.patch + +# 00160 # +# Python 3.3 added os.SEEK_DATA and os.SEEK_HOLE, which may be present in the +# header files in the build chroot, but may not be supported in the running +# kernel, hence we disable this test in an rpm build. +# Adding these was upstream issue http://bugs.python.org/issue10142 +# Not yet sent upstream +Patch160: 00160-disable-test_fs_holes-in-rpm-build.patch + +# 00163 # +# Some tests within test_socket fail intermittently when run inside Koji; +# disable them using unittest._skipInRpmBuild +# Not yet sent upstream +Patch163: 00163-disable-parts-of-test_socket-in-rpm-build.patch + +# 00170 # +# In debug builds, try to print repr() when a C-level assert fails in the +# garbage collector (typically indicating a reference-counting error +# somewhere else e.g in an extension module) +# The new macros/functions within gcmodule.c are hidden to avoid exposing +# them within the extension API. +# Sent upstream: http://bugs.python.org/issue9263 +# See https://bugzilla.redhat.com/show_bug.cgi?id=614680 +Patch170: 00170-gc-assertions.patch + +# 00178 # +# Don't duplicate various FLAGS in sysconfig values +# http://bugs.python.org/issue17679 +# Does not affect python2 AFAICS (different sysconfig values initialization) +Patch178: 00178-dont-duplicate-flags-in-sysconfig.patch + +# 00189 # +# Instead of bundled wheels, use our RPM packaged wheels from +# /usr/share/python3-wheels +Patch189: 00189-use-rpm-wheels.patch + +# 00205 # +# LIBPL variable in makefile takes LIBPL from configure.ac +# but the LIBPL variable defined there doesn't respect libdir macro +Patch205: 00205-make-libpl-respect-lib64.patch + +# 00251 +# Set values of prefix and exec_prefix in distutils install command +# to /usr/local if executable is /usr/bin/python* and RPM build +# is not detected to make pip and distutils install into separate location +# Fedora Change: https://fedoraproject.org/wiki/Changes/Making_sudo_pip_safe +Patch251: 00251-change-user-install-location.patch + +# 00262 # +# Backport of PEP 538: Coercing the legacy C locale to a UTF-8 based locale +# https://www.python.org/dev/peps/pep-0538/ +# Fedora Change: https://fedoraproject.org/wiki/Changes/python3_c.utf-8_locale +# Original proposal: https://bugzilla.redhat.com/show_bug.cgi?id=1404918 +Patch262: 00262-pep538_coerce_legacy_c_locale.patch + +# 00274 # +# Upstream uses Debian-style architecture naming. Change to match Fedora. +Patch274: 00274-fix-arch-names.patch + +# 00294 # +# Define TLS cipher suite on build time depending +# on the OpenSSL default cipher suite selection. +# Fixed upstream on CPython's 3.7 branch: +# https://bugs.python.org/issue31429 +# See also: https://bugzilla.redhat.com/show_bug.cgi?id=1489816 +Patch294: 00294-define-TLS-cipher-suite-on-build-time.patch + +# 00316 # +# We remove the exe files from distutil's bdist_wininst +# So we mark the command as unsupported - and the tests are skipped +# Fixed upstream and backported from the 3.7 branch: https://bugs.python.org/issue10945 +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1754040 +Patch316: 00316-mark-bdist_wininst-unsupported.patch + +# 00317 # +# Security fix for CVE-2019-5010: Fix segfault in ssl's cert parser +# https://bugzilla.redhat.com/show_bug.cgi?id=1666789 +# Fixed upstream: https://bugs.python.org/issue35746 +Patch317: 00317-CVE-2019-5010.patch + +# 00318 # +# Various fixes for TLS 1.3 and OpenSSL 1.1.1 +# https://bugzilla.redhat.com/show_bug.cgi?id=1639531 + +# test_ssl fixes for TLS 1.3 and OpenSSL 1.1.1 +# https://bugs.python.org/issue32947#msg333990 +# https://github.com/python/cpython/pull/11612 + +# Encrypt private key test files with AES256 +# https://bugs.python.org/issue38271 +# https://github.com/python/cpython/pull/16396 + +# Prefer PROTOCOL_TLS_CLIENT/SERVER (partial backport) +# https://bugs.python.org/issue31346 +# https://github.com/python/cpython/pull/3058 + +# Enable TLS 1.3 in tests (partial backport) +# https://bugs.python.org/issue33618 +# https://github.com/python/cpython/pull/7082 + +# OpenSSL 1.1.1-pre1 / TLS 1.3 fixes (partial backport) +# https://bugs.python.org/issue32947 +# https://github.com/python/cpython/pull/5923 +Patch318: 00318-fixes-for-tls-13.patch + +# 00319 # +# Fix test_tarfile on ppc64 +# https://bugzilla.redhat.com/show_bug.cgi?id=1639490 +# https://bugs.python.org/issue35772 +Patch319: 00319-test_tarfile_ppc64.patch + +# 00320 # +# Security fix for CVE-2019-9636 and CVE-2019-10160: Information Disclosure due to urlsplit improper NFKC normalization +# Fixed upstream: https://bugs.python.org/issue36216 and https://bugs.python.org/issue36742 +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1689318 +Patch320: 00320-CVE-2019-9636-and-CVE-2019-10160.patch + +# 00324 # +# Disallow control chars in http URLs +# Security fix for CVE-2019-9740 and CVE-2019-9947 +# Fixed upstream: https://bugs.python.org/issue30458 +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1704365 +# and https://bugzilla.redhat.com/show_bug.cgi?id=1703531 +Patch324: 00324-disallow-control-chars-in-http-urls.patch + +# 00325 # +# Unnecessary URL scheme exists to allow local_file:// reading file in urllib +# Security fix for CVE-2019-9948 +# Fixed upstream: https://bugs.python.org/issue35907 +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1714643 +Patch325: 00325-CVE-2019-9948.patch + +# 00326 # +# Don't set the post-handshake authentication verify flag on client side +# on TLS 1.3, as it also implicitly enables cert chain validation and an +# SSL/TLS connection will fail when verify mode is set to CERT_NONE. +# Fixed upstream: https://bugs.python.org/issue37428 +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1725721 +Patch326: 00326-do-not-set-PHA-verify-flag-on-client-side.patch + +# 00327 # +# Enable TLS 1.3 post-handshake authentication in http.client for default +# context or if a cert_file is passed to HTTPSConnection +# Fixed upstream: https://bugs.python.org/issue37440 +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1671353 +Patch327: 00327-enable-tls-1.3-PHA-in-http.client.patch + +# 00329 # +# Support OpenSSL FIPS mode +# - Fallback implementations md5, sha1, sha256, sha512 are removed in favor of OpenSSL wrappers +# - In FIPS mode, OpenSSL wrappers are always used in hashlib +# - add a new "usedforsecurity" keyword argument to the various digest +# algorithms in hashlib so that you can whitelist a callsite with +# "usedforsecurity=False" +# The change has been implemented upstream since Python 3.9: +# https://bugs.python.org/issue9216 +# - OpenSSL wrappers for the hashes blake2{b512,s256}, +# sha3_{224,256,384,512}, shake_{128,256} are now exported from _hashlib +# - In FIPS mode, the blake2, sha3 and shake hashes use OpenSSL wrappers +# and do not offer extended functionality (keys, tree hashing, custom digest size) +# - In FIPS mode, hmac.HMAC can only be instantiated with an OpenSSL wrapper +# or an string with OpenSSL hash name as the "digestmod" argument. +# The argument must be specified (instead of defaulting to ‘md5’). +# +# - Also while in FIPS mode, we utilize OpenSSL's DRBG and disable the +# os.getrandom() function. +# +# Upstream changes that have also been backported with this patch +# to allow tests to pass on stricter environments: +# +# Avoid MD5 or check for MD5 availablity +# https://bugs.python.org/issue38270 +# https://github.com/python/cpython/pull/16393 +# https://github.com/python/cpython/pull/16437 +# https://github.com/python/cpython/pull/17446 +# +# add usedforsecurity to hashlib constructors (partial backport for fixing a uuid test) +# https://github.com/python/cpython/pull/16044 +# Resolves: rhbz#1731424 +Patch329: 00329-fips.patch + +# 00330 # +# Fix CVE-2018-20852: cookie domain check returning incorrect results +# Fixed upstream: https://bugs.python.org/issue35121 +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1741553 +Patch330: 00330-CVE-2018-20852.patch + +# 00332 # +# Fix CVE-2019-16056: Don't parse email addresses containing +# multiple '@' characters. +# Fixed upstream: https://bugs.python.org/issue34155 +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1750776 +Patch332: 00332-CVE-2019-16056.patch + +# 00333 # +# Reduce the number of tests run during the profile guided optimizations build, +# as running the whole test suite during profiling increases the build time +# substantially, with negligible performance gain. +# Fixed upstream and backported from the 3.8 branch: +# https://bugs.python.org/issue36044 +# https://bugs.python.org/issue37667 +# Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1749576 +Patch333: 00333-reduce-pgo-tests.patch + +# (New patches go here ^^^) +# +# When adding new patches to "python" and "python3" in Fedora, EL, etc., +# please try to keep the patch numbers in-sync between all specfiles. +# +# More information, and a patch number catalog, is at: +# +# https://fedoraproject.org/wiki/SIGs/Python/PythonPatches + + +# ========================================== +# Descriptions, and metadata for subpackages +# ========================================== + +%package -n platform-python +Summary: Internal interpreter of the Python programming language + +Conflicts: python3 < 3.6.6-13 + +# Packages with Python modules in standard locations automatically +# depend on python(abi). Provide that here. +Provides: python(abi) = %{pybasever} + +Requires: %{name}-libs%{?_isa} = %{version}-%{release} + +%if %{with rpmwheels} + +# RHEL8 was forked from F28 and thus required python3-setuptools here +# for the rewheel module to work. We've since backported the use of RPM +# prepared wheels from F29+ into RHEL8, and thus this dependency isn't +# strictly needed. +# However, it is possible, that some packages in BaseOS actually depend on +# setuptools without declaring the dependency in their spec file. Thus +# we're keeping this dependency here to avoid the possibility of breaking +# them. +Requires: platform-python-setuptools +# For python3-pip the Requires has been reduced to Recommends, as there are +# generally less packages that depend on pip than packages that depend on +# setuptools at runtime, and thus there's less chance of breakage. +# (rhbz#1756217). +Recommends: platform-python-pip + +Requires: python3-setuptools-wheel +Requires: python3-pip-wheel +%endif + +# Runtime require alternatives +Requires: %{_sbindir}/alternatives +Requires(post): %{_sbindir}/alternatives +Requires(postun): %{_sbindir}/alternatives + +# This prevents ALL subpackages built from this spec to require +# /usr/bin/python3*. Granularity per subpackage is impossible. +# It's intended for the libs package not to drag in the interpreter, see +# https://bugzilla.redhat.com/show_bug.cgi?id=1547131 +# All others require %%{name} anyway. +%global __requires_exclude ^/usr/bin/python3 + + +# The description is the same for the SRPM and the main `platform-python` subpackage: +%description +This is the internal interpreter of the Python language for the system. +To use Python yourself, please install one of the available Python 3 packages, +for example python36. + + +# The description is the same for the SRPM and the main `platform-python` subpackage: +%description -n platform-python +This is the internal interpreter of the Python language for the system. +To use Python yourself, please install one of the available Python 3 packages, +for example python36. + + +%package libs +Summary: Python runtime libraries + +# The "enum" module is included in the standard library. +# Provide an upgrade path from the external library. +Provides: python3-enum34 = 1.0.4-5%{?dist} +Obsoletes: python3-enum34 < 1.0.4-5%{?dist} + +# Python 3 built with glibc >= 2.24.90-26 needs to require it +# See https://bugzilla.redhat.com/show_bug.cgi?id=1410644 +Requires: glibc%{?_isa} >= 2.24.90-26 + +Requires: chkconfig + +%if %{with gdbm} +# When built with this (as guarded by the BuildRequires above), require it +Requires: gdbm%{?_isa} >= 1:1.13 +%endif + +%if %{with rpmwheels} +Requires: python3-setuptools-wheel +Requires: python3-pip-wheel +%else +Provides: bundled(python3-pip) = 18.1 +Provides: bundled(python3-setuptools) = 40.6.2 +%endif + +# There are files in the standard library that have python shebang. +# We've filtered the automatic requirement out so libs are installable without +# the main package. This however makes it pulled in by default. +# See https://bugzilla.redhat.com/show_bug.cgi?id=1547131 +Recommends: platform-python%{?_isa} = %{version}-%{release} + +%description libs +This package contains runtime libraries for use by Python: +- the majority of the Python standard library +- a dynamically linked library for use by applications that embed Python as + a scripting language, and by the main "python3" executable + + +# The contents of this package were moved into the `platform-python-devel` subpackage. +# See a comment above the definition of that subpackage for details. +%package devel +Summary: Libraries and header files needed for Python development +Requires: platform-python-devel%{?_isa} = %{version}-%{release} +Requires: platform-python = %{version}-%{release} +Requires: python36-devel + +%description devel +This package contains the header files and configuration needed to compile +Python extension modules (typically written in C or C++), to embed Python +into other programs, and to make binary distributions for Python libraries. + +It also contains the necessary macros to build RPM packages with Python modules +and 2to3 tool, an automatic source converter from Python 2.X. + +It also makes the "python3" and "python3-config" commands available +for compatibility with some build systems. +When building packages, prefer requiring platform-python-devel and using +the %%{__python3} macro instead, if possible. + + + +# The `platform-python-devel` (previously python3-libs-devel) +# subpackage was created because the `python3-devel` subpackage needs +# to pull into the buildroot the /usr/bin/python3{,-config} symlinks (for old +# buildsystems that are hard to switch to platform-python). But these symlinks +# cannot be shipped in RHEL8 to customers. Therefore we'll ship only +# `platform-python-devel` and have the `python3-devel` package require it in +# the buildroot, as well as pull in the symlinks. +%package -n platform-python-devel +Summary: Libraries and header files needed for Python development +Provides: %{name}-libs-devel = %{version}-%{release} +Provides: %{name}-libs-devel%{?_isa} = %{version}-%{release} +Obsoletes: %{name}-libs-devel < 3.6.6-12 +Requires: platform-python = %{version}-%{release} +Requires: %{name}-libs%{?_isa} = %{version}-%{release} +BuildRequires: python-rpm-macros +Requires: python-rpm-macros +Requires: python3-rpm-macros +Requires: python3-rpm-generators + +# This is not "API" (packages that need setuptools should still BuildRequire it) +# However some packages apparently can build both with and without setuptools +# producing egg-info as file or directory (depending on setuptools presence). +# Directory-to-file updates are problematic in RPM, so we ensure setuptools is +# installed when -devel is required. +# See https://bugzilla.redhat.com/show_bug.cgi?id=1623914 +# See https://fedoraproject.org/wiki/Packaging:Directory_Replacement +Requires: platform-python-setuptools + +Provides: %{name}-2to3 = %{version}-%{release} +Provides: 2to3 = %{version}-%{release} + +Conflicts: platform-python < %{version}-%{release} +Conflicts: python3 < 3.6.6-9 + +%description -n platform-python-devel +This package contains the header files and configuration needed to compile +Python extension modules (typically written in C or C++), to embed Python +into other programs, and to make binary distributions for Python libraries. + +It also contains the necessary macros to build RPM packages with Python modules +and 2to3 tool, an automatic source converter from Python 2.X. + + +%package idle +Summary: A basic graphical development environment for Python +Requires: platform-python = %{version}-%{release} +Requires: %{name}-tkinter = %{version}-%{release} + +Provides: idle3 = %{version}-%{release} + +Provides: %{name}-tools = %{version}-%{release} +Provides: %{name}-tools%{?_isa} = %{version}-%{release} +Obsoletes: %{name}-tools < %{version}-%{release} + +# python36 installs the alternatives master symlink to which we attach a slave +Requires: python36 +Requires(post): python36 +Requires(postun): python36 + +%description idle +IDLE is Python’s Integrated Development and Learning Environment. + +IDLE has the following features: Python shell window (interactive +interpreter) with colorizing of code input, output, and error messages; +multi-window text editor with multiple undo, Python colorizing, +smart indent, call tips, auto completion, and other features; +search within any window, replace within editor windows, and +search through multiple files (grep); debugger with persistent +breakpoints, stepping, and viewing of global and local namespaces; +configuration, browsers, and other dialogs. + + +%package tkinter +Summary: A GUI toolkit for Python +Requires: platform-python = %{version}-%{release} + +%description tkinter +The Tkinter (Tk interface) library is a graphical user interface toolkit for +the Python programming language. + + +%package test +Summary: The self-test suite for the main python3 package +Requires: platform-python = %{version}-%{release} +Requires: %{name}-libs%{?_isa} = %{version}-%{release} + +%description test +The self-test suite for the Python interpreter. + +This is only useful to test Python itself. For testing general Python code, +you should use the unittest module from %{name}-libs, or a library such as +%{name}-pytest or %{name}-nose. + + +%if %{with debug_build} +%package -n platform-python-debug +Conflicts: python3-debug < 3.6.6-13 +Summary: Debug version of the Python runtime + +# The debug build is an all-in-one package version of the regular build, and +# shares the same .py/.pyc files and directories as the regular build. Hence +# we depend on all of the subpackages of the regular build: +Requires: platform-python%{?_isa} = %{version}-%{release} +Requires: %{name}-libs%{?_isa} = %{version}-%{release} +Requires: platform-python-devel%{?_isa} = %{version}-%{release} +Requires: %{name}-test%{?_isa} = %{version}-%{release} +Requires: %{name}-tkinter%{?_isa} = %{version}-%{release} +Requires: %{name}-idle%{?_isa} = %{version}-%{release} + +%description -n platform-python-debug +python3-debug provides a version of the Python runtime with numerous debugging +features enabled, aimed at advanced Python users such as developers of Python +extension modules. + +This version uses more memory and will be slower than the regular Python build, +but is useful for tracking down reference-counting issues and other bugs. + +The bytecode format is unchanged, so that .pyc files are compatible between +this and the standard version of Python, but the debugging features mean that +C/C++ extension modules are ABI-incompatible and must be built for each version +separately. + +The debug build shares installation directories with the standard Python +runtime, so that .py and .pyc files can be shared. +Compiled extension modules use a special ABI flag ("d") in the filename, +so extensions for both versions can co-exist in the same directory. +%endif # with debug_build + + +# ====================================================== +# The prep phase of the build: +# ====================================================== + +%prep +%setup -q -n Python-%{version}%{?prerel} + +# Remove bundled libraries to ensure that we're using the system copy. +rm -r Modules/expat +rm -r Modules/zlib + +# +# Apply patches: +# +%patch1 -p1 + +%if "%{_lib}" == "lib64" +%patch102 -p1 +%endif +%patch111 -p1 +%patch132 -p1 +%patch155 -p1 +%patch160 -p1 +%patch163 -p1 +%patch170 -p1 +%patch178 -p1 + +%if %{with rpmwheels} +%patch189 -p1 +rm Lib/ensurepip/_bundled/*.whl +%endif + +%patch205 -p1 +%patch251 -p1 +%patch262 -p1 +%patch274 -p1 +%patch294 -p1 +%patch316 -p1 +%patch317 -p1 +%patch318 -p1 +%patch319 -p1 +%patch320 -p1 +%patch324 -p1 +%patch325 -p1 +%patch326 -p1 +%patch327 -p1 +%patch329 -p1 +%patch330 -p1 +%patch332 -p1 +%patch333 -p1 + + +# Remove files that should be generated by the build +# (This is after patching, so that we can use patches directly from upstream) +rm configure pyconfig.h.in + + +# ====================================================== +# Configuring and building the code: +# ====================================================== + +%build + +# Regenerate the configure script and pyconfig.h.in +autoconf +autoheader + +# Remember the current directory (which has sources and the configure script), +# so we can refer to it after we "cd" elsewhere. +topdir=$(pwd) + +# Get proper option names from bconds +%if %{with computed_gotos} +%global computed_gotos_flag yes +%else +%global computed_gotos_flag no +%endif + +%if %{with optimizations} +%global optimizations_flag "--enable-optimizations" +%else +%global optimizations_flag "--disable-optimizations" +%endif + +# Set common compiler/linker flags +# We utilize the %%extension_...flags macros here so users building C/C++ +# extensions with our python won't get all the compiler/linker flags used +# in RHEL RPMs. +# Standard library built here will still use the %%build_...flags, +# RHEL packages utilizing %%py3_build will use them as well +# https://fedoraproject.org/wiki/Changes/Python_Extension_Flags +export CFLAGS="%{extension_cflags} -D_GNU_SOURCE -fPIC -fwrapv" +export CFLAGS_NODIST="%{build_cflags} -D_GNU_SOURCE -fPIC -fwrapv" +export CXXFLAGS="%{extension_cxxflags} -D_GNU_SOURCE -fPIC -fwrapv" +export CPPFLAGS="$(pkg-config --cflags-only-I libffi)" +export OPT="%{extension_cflags} -D_GNU_SOURCE -fPIC -fwrapv" +export LINKCC="gcc" +export CFLAGS="$CFLAGS $(pkg-config --cflags openssl)" +export LDFLAGS="%{extension_ldflags} -g $(pkg-config --libs-only-L openssl)" +export LDFLAGS_NODIST="%{build_ldflags} -g $(pkg-config --libs-only-L openssl)" + +# We can build several different configurations of Python: regular and debug. +# Define a common function that does one build: +BuildPython() { + ConfName=$1 + ExtraConfigArgs=$2 + MoreCFlags=$3 + + # Each build is done in its own directory + ConfDir=build/$ConfName + echo STARTING: BUILD OF PYTHON FOR CONFIGURATION: $ConfName + mkdir -p $ConfDir + pushd $ConfDir + + # Normally, %%configure looks for the "configure" script in the current + # directory. + # Since we changed directories, we need to tell %%configure where to look. + %global _configure $topdir/configure + +%configure \ + --enable-ipv6 \ + --enable-shared \ + --with-computed-gotos=%{computed_gotos_flag} \ + --with-dbmliborder=gdbm:ndbm:bdb \ + --with-system-expat \ + --with-system-ffi \ + --enable-loadable-sqlite-extensions \ + --with-dtrace \ + --with-lto \ + --with-ssl-default-suites=openssl \ +%if %{with valgrind} + --with-valgrind \ +%endif + $ExtraConfigArgs \ + %{nil} + + # Invoke the build + %make_build CFLAGS_NODIST="$CFLAGS_NODIST $MoreCFlags" + + popd + echo FINISHED: BUILD OF PYTHON FOR CONFIGURATION: $ConfName +} + +# Call the above to build each configuration. + +%if %{with debug_build} +BuildPython debug \ + "--without-ensurepip --with-pydebug" \ + "-Og" +%endif # with debug_build + +BuildPython optimized \ + "--without-ensurepip %{optimizations_flag}" \ + "" + +# ====================================================== +# Installing the built code: +# ====================================================== + +%install + +# As in %%build, remember the current directory +topdir=$(pwd) + +# We install a collection of hooks for gdb that make it easier to debug +# executables linked against libpython3* (such as /usr/bin/python3 itself) +# +# These hooks are implemented in Python itself (though they are for the version +# of python that gdb is linked with) +# +# gdb-archer looks for them in the same path as the ELF file or its .debug +# file, with a -gdb.py suffix. +# We put them next to the debug file, because ldconfig would complain if +# it found non-library files directly in /usr/lib/ +# (see https://bugzilla.redhat.com/show_bug.cgi?id=562980) +# +# We'll put these files in the debuginfo package by installing them to e.g.: +# /usr/lib/debug/usr/lib/libpython3.2.so.1.0.debug-gdb.py +# (note that the debug path is /usr/lib/debug for both 32/64 bit) +# +# See https://fedoraproject.org/wiki/Features/EasierPythonDebugging for more +# information + +%if %{with gdb_hooks} +DirHoldingGdbPy=%{_prefix}/lib/debug/%{_libdir} +mkdir -p %{buildroot}$DirHoldingGdbPy +%endif # with gdb_hooks + +# Multilib support for pyconfig.h +# 32- and 64-bit versions of pyconfig.h are different. For multilib support +# (making it possible to install 32- and 64-bit versions simultaneously), +# we need to install them under different filenames, and to make the common +# "pyconfig.h" include the right file based on architecture. +# See https://bugzilla.redhat.com/show_bug.cgi?id=192747 +# Filanames are defined here: +%global _pyconfig32_h pyconfig-32.h +%global _pyconfig64_h pyconfig-64.h +%global _pyconfig_h pyconfig-%{wordsize}.h + +# Use a common function to do an install for all our configurations: +InstallPython() { + + ConfName=$1 + PyInstSoName=$2 + MoreCFlags=$3 + LDVersion=$4 + + # Switch to the directory with this configuration's built files + ConfDir=build/$ConfName + echo STARTING: INSTALL OF PYTHON FOR CONFIGURATION: $ConfName + mkdir -p $ConfDir + pushd $ConfDir + + make \ + DESTDIR=%{buildroot} \ + INSTALL="install -p" \ + EXTRA_CFLAGS="$MoreCFlags" \ + install + + popd + +%if %{with gdb_hooks} + # See comment on $DirHoldingGdbPy above + PathOfGdbPy=$DirHoldingGdbPy/$PyInstSoName-%{version}-%{release}.%{_arch}.debug-gdb.py + cp Tools/gdb/libpython.py %{buildroot}$PathOfGdbPy +%endif # with gdb_hooks + + # Rename the -devel script that differs on different arches to arch specific name + mv %{buildroot}%{_bindir}/python${LDVersion}-{,`uname -m`-}config + echo -e '#!/bin/sh\nexec %{_libexecdir}/platform-python'${LDVersion}'-`uname -m`-config "$@"' > \ + %{buildroot}%{_bindir}/python${LDVersion}-config + echo '[ $? -eq 127 ] && echo "Could not find %{_libexecdir}/platform-python'${LDVersion}'-`uname -m`-config. Look around to see available arches." >&2' >> \ + %{buildroot}%{_bindir}/python${LDVersion}-config + chmod +x %{buildroot}%{_bindir}/python${LDVersion}-config + + # Platform Python: Move the python*-config to libexec + mkdir -p %{buildroot}%{_libexecdir} + mv %{buildroot}%{_bindir}/python${LDVersion}-config %{buildroot}%{_libexecdir}/platform-python${LDVersion}-config + ln -s %{_libexecdir}/platform-python${LDVersion}-config %{buildroot}%{_bindir}/python${LDVersion}-config + mv %{buildroot}%{_bindir}/python${LDVersion}-`uname -m`-config %{buildroot}%{_libexecdir}/platform-python${LDVersion}-`uname -m`-config + ln -s %{_libexecdir}/platform-python${LDVersion}-`uname -m`-config %{buildroot}%{_bindir}/python${LDVersion}-`uname -m`-config + + # Platform Python: Move optimized and debug executables + mv %{buildroot}%{_bindir}/python${LDVersion} %{buildroot}%{_libexecdir}/platform-python${LDVersion} + ln -s %{_libexecdir}/platform-python${LDVersion} %{buildroot}%{_bindir}/python${LDVersion} + + # Make python3-devel multilib-ready + mv %{buildroot}%{_includedir}/python${LDVersion}/pyconfig.h \ + %{buildroot}%{_includedir}/python${LDVersion}/%{_pyconfig_h} + cat > %{buildroot}%{_includedir}/python${LDVersion}/pyconfig.h << EOF +#include + +#if __WORDSIZE == 32 +#include "%{_pyconfig32_h}" +#elif __WORDSIZE == 64 +#include "%{_pyconfig64_h}" +#else +#error "Unknown word size" +#endif +EOF + + echo FINISHED: INSTALL OF PYTHON FOR CONFIGURATION: $ConfName +} + +# Install the "debug" build first; any common files will be overridden with +# later builds +%if %{with debug_build} +InstallPython debug \ + %{py_INSTSONAME_debug} \ + -O0 \ + %{LDVERSION_debug} +%endif # with debug_build + +# Now the optimized build: +InstallPython optimized \ + %{py_INSTSONAME_optimized} \ + "" \ + %{LDVERSION_optimized} + +# Install directories for additional packages +install -d -m 0755 %{buildroot}%{pylibdir}/site-packages/__pycache__ +%if "%{_lib}" == "lib64" +# The 64-bit version needs to create "site-packages" in /usr/lib/ (for +# pure-Python modules) as well as in /usr/lib64/ (for packages with extension +# modules). +# Note that rpmlint will complain about hardcoded library path; +# this is intentional. +install -d -m 0755 %{buildroot}%{_prefix}/lib/python%{pybasever}/site-packages/__pycache__ +%endif + +# add idle3 to menu +install -D -m 0644 Lib/idlelib/Icons/idle_16.png %{buildroot}%{_datadir}/icons/hicolor/16x16/apps/idle3.png +install -D -m 0644 Lib/idlelib/Icons/idle_32.png %{buildroot}%{_datadir}/icons/hicolor/32x32/apps/idle3.png +install -D -m 0644 Lib/idlelib/Icons/idle_48.png %{buildroot}%{_datadir}/icons/hicolor/48x48/apps/idle3.png +desktop-file-install --dir=%{buildroot}%{_datadir}/applications %{SOURCE10} + +# Install and validate appdata file +mkdir -p %{buildroot}%{_metainfodir} +cp -a %{SOURCE11} %{buildroot}%{_metainfodir} +appstream-util validate-relax --nonet %{buildroot}%{_metainfodir}/idle3.appdata.xml + +# Make sure distutils looks at the right pyconfig.h file +# See https://bugzilla.redhat.com/show_bug.cgi?id=201434 +# Similar for sysconfig: sysconfig.get_config_h_filename tries to locate +# pyconfig.h so it can be parsed, and needs to do this at runtime in site.py +# when python starts up (see https://bugzilla.redhat.com/show_bug.cgi?id=653058) +# +# Split this out so it goes directly to the pyconfig-32.h/pyconfig-64.h +# variants: +sed -i -e "s/'pyconfig.h'/'%{_pyconfig_h}'/" \ + %{buildroot}%{pylibdir}/distutils/sysconfig.py \ + %{buildroot}%{pylibdir}/sysconfig.py + +# Install pathfix.py to bindir +# See https://github.com/fedora-python/python-rpm-porting/issues/24 +cp -p Tools/scripts/pathfix.py %{buildroot}%{_bindir}/ + +# Switch all shebangs to refer to the specific Python version. +# This currently only covers files matching ^[a-zA-Z0-9_]+\.py$, +# so handle files named using other naming scheme separately. +# - Files in /usr/bin/ (without .py) are listed manually +LD_LIBRARY_PATH=./build/optimized ./build/optimized/python \ + Tools/scripts/pathfix.py \ + -i "%{_libexecdir}/platform-python" -pn \ + %{buildroot} \ + %{?with_gdb_hooks:%{buildroot}$DirHoldingGdbPy/*.py} \ + %{buildroot}%{_bindir}/{idle,pydoc,pyvenv-,2to3-}%{pybasever} + +# The python-config.py files need to be shebanged with the given LDVERSION'ed +# executable +for LDVersion in %{LDVERSION_optimized} %{LDVERSION_debug} +do + LD_LIBRARY_PATH=./build/optimized ./build/optimized/python \ + Tools/scripts/pathfix.py \ + -i "%{_libexecdir}/platform-python${LDVersion}" -pn \ + %{buildroot}%{pylibdir}/config-${LDVersion}-%{_arch}-linux%{_gnu}/python-config.py +done + +# Remove tests for python3-tools which was removed in +# https://bugzilla.redhat.com/show_bug.cgi?id=1312030 +rm -rf %{buildroot}%{pylibdir}/test/test_tools + +# Remove shebang lines from .py files that aren't executable, and +# remove executability from .py files that don't have a shebang line: +find %{buildroot} -name \*.py \ + \( \( \! -perm /u+x,g+x,o+x -exec sed -e '/^#!/Q 0' -e 'Q 1' {} \; \ + -print -exec sed -i '1d' {} \; \) -o \( \ + -perm /u+x,g+x,o+x ! -exec grep -m 1 -q '^#!' {} \; \ + -exec chmod a-x {} \; \) \) + +# Get rid of DOS batch files: +find %{buildroot} -name \*.bat -exec rm {} \; + +# Get rid of backup files: +find %{buildroot}/ -name "*~" -exec rm -f {} \; +find . -name "*~" -exec rm -f {} \; + +# Get rid of a stray copy of the license: +rm %{buildroot}%{pylibdir}/LICENSE.txt + +# Do bytecompilation with the newly installed interpreter. +# This is similar to the script in macros.pybytecompile +# compile *.pyc +find %{buildroot} -type f -a -name "*.py" -print0 | \ + LD_LIBRARY_PATH="%{buildroot}%{dynload_dir}/:%{buildroot}%{_libdir}" \ + PYTHONPATH="%{buildroot}%{_libdir}/python%{pybasever} %{buildroot}%{_libdir}/python%{pybasever}/site-packages" \ + xargs -0 %{buildroot}%{_bindir}/python%{pybasever} -O -c 'import py_compile, sys; [py_compile.compile(f, dfile=f.partition("%{buildroot}")[2], optimize=opt) for opt in range(3) for f in sys.argv[1:]]' || : + +# Since we have pathfix.py in bindir, this is created, but we don't want it +rm -rf %{buildroot}%{_bindir}/__pycache__ + +# Fixup permissions for shared libraries from non-standard 555 to standard 755: +find %{buildroot} -perm 555 -exec chmod 755 {} \; + +# Install macros for rpm: +mkdir -p %{buildroot}/%{_rpmconfigdir}/macros.d/ + +# Create "/usr/bin/python3-debug" (and /usr/libexec/platform-python-debug), +# a symlink to the python3 debug binary, to +# avoid the user having to know the precise version and ABI flags. +# See e.g. https://bugzilla.redhat.com/show_bug.cgi?id=676748 +%if %{with debug_build} +ln -s \ + %{_bindir}/python%{LDVERSION_debug} \ + %{buildroot}%{_bindir}/python3-debug +ln -s \ + %{_libexecdir}/platform-python%{LDVERSION_debug} \ + %{buildroot}%{_libexecdir}/platform-python-debug +%endif + + +# Platform Python: Move the executable to libexec & provide a symlink from bindir +# (this symlink will be moved to the python36 module) +mkdir -p %{buildroot}%{_libexecdir} +mv %{buildroot}%{_bindir}/python%{pybasever} %{buildroot}%{_libexecdir}/platform-python%{pybasever} +ln -s %{_libexecdir}/platform-python%{pybasever} %{buildroot}%{_bindir}/python%{pybasever} +ln -s ./platform-python%{pybasever} %{buildroot}%{_libexecdir}/platform-python + +# Platform Python: Add symlink from platform-python-config +ln -s ./platform-python%{LDVERSION_optimized}-config %{buildroot}%{_libexecdir}/platform-python%{pybasever}-config +ln -s ./platform-python%{pybasever}-config %{buildroot}%{_libexecdir}/platform-python-config + +# Remove symlinks 3 > 3.6 for pydoc, idle, manpage +rm %{buildroot}%{_bindir}/pydoc3 +rm %{buildroot}%{_bindir}/idle3 +rm %{buildroot}%{_mandir}/man1/python3.1 + +# Install the unversioned-python script and its man page +install -m 755 %{SOURCE12} %{buildroot}%{_libexecdir}/no-python +install -m 644 %{SOURCE13} %{buildroot}%{_mandir}/man1/unversioned-python.1 +# Touch the files that are controlled by `alternatives` so we can declare them +# as ghosts in the files section +touch %{buildroot}%{_bindir}/unversioned-python +touch %{buildroot}%{_bindir}/idle3 +touch %{buildroot}%{_mandir}/man1/python.1.gz + + +# ====================================================== +# Checks for packaging issues +# ====================================================== + +%check +# first of all, check timestamps of bytecode files +find %{buildroot} -type f -a -name "*.py" -print0 | \ + LD_LIBRARY_PATH="%{buildroot}%{dynload_dir}/:%{buildroot}%{_libdir}" \ + PYTHONPATH="%{buildroot}%{_libdir}/python%{pybasever} %{buildroot}%{_libdir}/python%{pybasever}/site-packages" \ + xargs -0 %{buildroot}%{_libexecdir}/platform-python %{SOURCE8} + +# Ensure that the curses module was linked against libncursesw.so, rather than +# libncurses.so +# See https://bugzilla.redhat.com/show_bug.cgi?id=539917 +ldd %{buildroot}/%{dynload_dir}/_curses*.so \ + | grep curses \ + | grep libncurses.so && (echo "_curses.so linked against libncurses.so" ; exit 1) + +# Ensure that the debug modules are linked against the debug libpython, and +# likewise for the optimized modules and libpython: +for Module in %{buildroot}/%{dynload_dir}/*.so ; do + case $Module in + *.%{SOABI_debug}) + ldd $Module | grep %{py_INSTSONAME_optimized} && + (echo Debug module $Module linked against optimized %{py_INSTSONAME_optimized} ; exit 1) + + ;; + *.%{SOABI_optimized}) + ldd $Module | grep %{py_INSTSONAME_debug} && + (echo Optimized module $Module linked against debug %{py_INSTSONAME_debug} ; exit 1) + ;; + esac +done + +# ====================================================== +# Running the upstream test suite +# ====================================================== + +topdir=$(pwd) +CheckPython() { + ConfName=$1 + ConfDir=$(pwd)/build/$ConfName + + export OPENSSL_CONF=/non-existing-file + + echo STARTING: CHECKING OF PYTHON FOR CONFIGURATION: $ConfName + + # Note that we're running the tests using the version of the code in the + # builddir, not in the buildroot. + + # Run the upstream test suite, setting "WITHIN_PYTHON_RPM_BUILD" so that the + # our non-standard decorators take effect on the relevant tests: + # @unittest._skipInRpmBuild(reason) + # @unittest._expectedFailureInRpmBuild + # test_faulthandler.test_register_chain currently fails on ppc64le and + # aarch64, see upstream bug http://bugs.python.org/issue21131 + WITHIN_PYTHON_RPM_BUILD= \ + LD_LIBRARY_PATH=$ConfDir $ConfDir/python -m test.regrtest \ + -wW --slowest --findleaks \ + -x test_bdist_rpm \ + %ifarch %{mips64} + -x test_ctypes \ + %endif + + echo FINISHED: CHECKING OF PYTHON FOR CONFIGURATION: $ConfName + +} + +%if %{with tests} + +# Check each of the configurations: +%if %{with debug_build} +CheckPython debug +%endif # with debug_build +CheckPython optimized + +%endif # with tests + + +%post -n platform-python +# Default with no /usr/bin/python symlink +alternatives --install %{_bindir}/unversioned-python \ + python \ + %{_libexecdir}/no-python \ + 404 \ + --slave %{_mandir}/man1/python.1.gz \ + unversioned-python-man \ + %{_mandir}/man1/unversioned-python.1.gz + +%postun -n platform-python +# Do this only during uninstall process (not during update) +if [ $1 -eq 0 ]; then + alternatives --remove python \ + %{_libexecdir}/no-python + +fi + + +%post -n python3-idle +alternatives --add-slave python3 %{_bindir}/python3.6 \ + %{_bindir}/idle3 \ + idle3 \ + %{_bindir}/idle3.6 + +%postun -n python3-idle +# Do this only during uninstall process (not during update) +if [ $1 -eq 0 ]; then + alternatives --remove-slave python3 %{_bindir}/python3.6 \ + idle3 +fi + + +%files -n platform-python +%license LICENSE +%doc README.rst +%{_bindir}/pydoc* + +%exclude %{_bindir}/python3 +%exclude %{_bindir}/python%{pybasever} +%exclude %{_bindir}/python%{LDVERSION_optimized} +%{_libexecdir}/platform-python +%{_libexecdir}/platform-python%{pybasever} +%{_libexecdir}/platform-python%{LDVERSION_optimized} +%{_libexecdir}/no-python +%ghost %{_bindir}/unversioned-python +%ghost %{_mandir}/man1/python.1.gz + +%exclude %{_bindir}/pyvenv +%{_bindir}/pyvenv-%{pybasever} + +%{_mandir}/man1/python3.6.1* +%{_mandir}/man1/unversioned-python.1* + +%files libs +%license LICENSE +%doc README.rst + +%dir %{pylibdir} +%dir %{dynload_dir} + +%{pylibdir}/lib2to3 +%exclude %{pylibdir}/lib2to3/tests + +%dir %{pylibdir}/unittest/ +%dir %{pylibdir}/unittest/__pycache__/ +%{pylibdir}/unittest/*.py +%{pylibdir}/unittest/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/asyncio/ +%dir %{pylibdir}/asyncio/__pycache__/ +%{pylibdir}/asyncio/*.py +%{pylibdir}/asyncio/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/venv/ +%dir %{pylibdir}/venv/__pycache__/ +%{pylibdir}/venv/*.py +%{pylibdir}/venv/__pycache__/*%{bytecode_suffixes} +%{pylibdir}/venv/scripts + +%{pylibdir}/wsgiref +%{pylibdir}/xmlrpc + +%dir %{pylibdir}/ensurepip/ +%dir %{pylibdir}/ensurepip/__pycache__/ +%{pylibdir}/ensurepip/*.py +%{pylibdir}/ensurepip/__pycache__/*%{bytecode_suffixes} +%if %{with rpmwheels} +%exclude %{pylibdir}/ensurepip/_bundled +%else +%dir %{pylibdir}/ensurepip/_bundled +%{pylibdir}/ensurepip/_bundled/*.whl +%endif + +# The majority of the test module lives in the test subpackage +# However test.support is in libs - it contains stuff used when testing your code +# https://bugzilla.redhat.com/show_bug.cgi?id=1651215 +%dir %{pylibdir}/test/ +%dir %{pylibdir}/test/__pycache__/ +%dir %{pylibdir}/test/support/ +%dir %{pylibdir}/test/support/__pycache__/ +%{pylibdir}/test/__init__.py +%{pylibdir}/test/__pycache__/__init__%{bytecode_suffixes} +%{pylibdir}/test/support/*.py +%{pylibdir}/test/support/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/concurrent/ +%dir %{pylibdir}/concurrent/__pycache__/ +%{pylibdir}/concurrent/*.py +%{pylibdir}/concurrent/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/concurrent/futures/ +%dir %{pylibdir}/concurrent/futures/__pycache__/ +%{pylibdir}/concurrent/futures/*.py +%{pylibdir}/concurrent/futures/__pycache__/*%{bytecode_suffixes} + +%{pylibdir}/pydoc_data + +%{dynload_dir}/_blake2.%{SOABI_optimized}.so +%{dynload_dir}/_sha3.%{SOABI_optimized}.so +%{dynload_dir}/_hmacopenssl.%{SOABI_optimized}.so + +%{dynload_dir}/_asyncio.%{SOABI_optimized}.so +%{dynload_dir}/_bisect.%{SOABI_optimized}.so +%{dynload_dir}/_bz2.%{SOABI_optimized}.so +%{dynload_dir}/_codecs_cn.%{SOABI_optimized}.so +%{dynload_dir}/_codecs_hk.%{SOABI_optimized}.so +%{dynload_dir}/_codecs_iso2022.%{SOABI_optimized}.so +%{dynload_dir}/_codecs_jp.%{SOABI_optimized}.so +%{dynload_dir}/_codecs_kr.%{SOABI_optimized}.so +%{dynload_dir}/_codecs_tw.%{SOABI_optimized}.so +%{dynload_dir}/_crypt.%{SOABI_optimized}.so +%{dynload_dir}/_csv.%{SOABI_optimized}.so +%{dynload_dir}/_ctypes.%{SOABI_optimized}.so +%{dynload_dir}/_curses.%{SOABI_optimized}.so +%{dynload_dir}/_curses_panel.%{SOABI_optimized}.so +%{dynload_dir}/_dbm.%{SOABI_optimized}.so +%{dynload_dir}/_decimal.%{SOABI_optimized}.so +%{dynload_dir}/_elementtree.%{SOABI_optimized}.so +%if %{with gdbm} +%{dynload_dir}/_gdbm.%{SOABI_optimized}.so +%endif +%{dynload_dir}/_hashlib.%{SOABI_optimized}.so +%{dynload_dir}/_heapq.%{SOABI_optimized}.so +%{dynload_dir}/_json.%{SOABI_optimized}.so +%{dynload_dir}/_lsprof.%{SOABI_optimized}.so +%{dynload_dir}/_lzma.%{SOABI_optimized}.so +%{dynload_dir}/_multibytecodec.%{SOABI_optimized}.so +%{dynload_dir}/_multiprocessing.%{SOABI_optimized}.so +%{dynload_dir}/_opcode.%{SOABI_optimized}.so +%{dynload_dir}/_pickle.%{SOABI_optimized}.so +%{dynload_dir}/_posixsubprocess.%{SOABI_optimized}.so +%{dynload_dir}/_random.%{SOABI_optimized}.so +%{dynload_dir}/_socket.%{SOABI_optimized}.so +%{dynload_dir}/_sqlite3.%{SOABI_optimized}.so +%{dynload_dir}/_ssl.%{SOABI_optimized}.so +%{dynload_dir}/_struct.%{SOABI_optimized}.so +%{dynload_dir}/array.%{SOABI_optimized}.so +%{dynload_dir}/audioop.%{SOABI_optimized}.so +%{dynload_dir}/binascii.%{SOABI_optimized}.so +%{dynload_dir}/cmath.%{SOABI_optimized}.so +%{dynload_dir}/_datetime.%{SOABI_optimized}.so +%{dynload_dir}/fcntl.%{SOABI_optimized}.so +%{dynload_dir}/grp.%{SOABI_optimized}.so +%{dynload_dir}/math.%{SOABI_optimized}.so +%{dynload_dir}/mmap.%{SOABI_optimized}.so +%{dynload_dir}/nis.%{SOABI_optimized}.so +%{dynload_dir}/ossaudiodev.%{SOABI_optimized}.so +%{dynload_dir}/parser.%{SOABI_optimized}.so +%{dynload_dir}/pyexpat.%{SOABI_optimized}.so +%{dynload_dir}/readline.%{SOABI_optimized}.so +%{dynload_dir}/resource.%{SOABI_optimized}.so +%{dynload_dir}/select.%{SOABI_optimized}.so +%{dynload_dir}/spwd.%{SOABI_optimized}.so +%{dynload_dir}/syslog.%{SOABI_optimized}.so +%{dynload_dir}/termios.%{SOABI_optimized}.so +%{dynload_dir}/_testmultiphase.%{SOABI_optimized}.so +%{dynload_dir}/unicodedata.%{SOABI_optimized}.so +%{dynload_dir}/xxlimited.%{SOABI_optimized}.so +%{dynload_dir}/zlib.%{SOABI_optimized}.so + +%dir %{pylibdir}/site-packages/ +%dir %{pylibdir}/site-packages/__pycache__/ +%{pylibdir}/site-packages/README.txt +%{pylibdir}/*.py +%dir %{pylibdir}/__pycache__/ +%{pylibdir}/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/collections/ +%dir %{pylibdir}/collections/__pycache__/ +%{pylibdir}/collections/*.py +%{pylibdir}/collections/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/ctypes/ +%dir %{pylibdir}/ctypes/__pycache__/ +%{pylibdir}/ctypes/*.py +%{pylibdir}/ctypes/__pycache__/*%{bytecode_suffixes} +%{pylibdir}/ctypes/macholib + +%{pylibdir}/curses + +%dir %{pylibdir}/dbm/ +%dir %{pylibdir}/dbm/__pycache__/ +%{pylibdir}/dbm/*.py +%{pylibdir}/dbm/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/distutils/ +%dir %{pylibdir}/distutils/__pycache__/ +%{pylibdir}/distutils/*.py +%{pylibdir}/distutils/__pycache__/*%{bytecode_suffixes} +%{pylibdir}/distutils/README +%{pylibdir}/distutils/command +%exclude %{pylibdir}/distutils/command/wininst-*.exe + +%dir %{pylibdir}/email/ +%dir %{pylibdir}/email/__pycache__/ +%{pylibdir}/email/*.py +%{pylibdir}/email/__pycache__/*%{bytecode_suffixes} +%{pylibdir}/email/mime +%doc %{pylibdir}/email/architecture.rst + +%{pylibdir}/encodings + +%{pylibdir}/html +%{pylibdir}/http + +%dir %{pylibdir}/importlib/ +%dir %{pylibdir}/importlib/__pycache__/ +%{pylibdir}/importlib/*.py +%{pylibdir}/importlib/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/json/ +%dir %{pylibdir}/json/__pycache__/ +%{pylibdir}/json/*.py +%{pylibdir}/json/__pycache__/*%{bytecode_suffixes} + +%{pylibdir}/logging +%{pylibdir}/multiprocessing + +%dir %{pylibdir}/sqlite3/ +%dir %{pylibdir}/sqlite3/__pycache__/ +%{pylibdir}/sqlite3/*.py +%{pylibdir}/sqlite3/__pycache__/*%{bytecode_suffixes} + +%exclude %{pylibdir}/turtle.py +%exclude %{pylibdir}/__pycache__/turtle*%{bytecode_suffixes} + +%{pylibdir}/urllib +%{pylibdir}/xml + +%if "%{_lib}" == "lib64" +%attr(0755,root,root) %dir %{_prefix}/lib/python%{pybasever} +%attr(0755,root,root) %dir %{_prefix}/lib/python%{pybasever}/site-packages +%attr(0755,root,root) %dir %{_prefix}/lib/python%{pybasever}/site-packages/__pycache__/ +%endif + +# "Makefile" and the config-32/64.h file are needed by +# distutils/sysconfig.py:_init_posix(), so we include them in the core +# package, along with their parent directories (bug 531901): +%dir %{pylibdir}/config-%{LDVERSION_optimized}-%{_arch}-linux%{_gnu}/ +%{pylibdir}/config-%{LDVERSION_optimized}-%{_arch}-linux%{_gnu}/Makefile +%dir %{_includedir}/python%{LDVERSION_optimized}/ +%{_includedir}/python%{LDVERSION_optimized}/%{_pyconfig_h} + +%{_libdir}/%{py_INSTSONAME_optimized} +%{_libdir}/libpython3.so + +%files devel + + +%files -n platform-python-devel +%{_bindir}/2to3 +# TODO: Remove 2to3-3.7 once rebased to 3.7 +%{_bindir}/2to3-%{pybasever} +%{pylibdir}/config-%{LDVERSION_optimized}-%{_arch}-linux%{_gnu}/* +%exclude %{pylibdir}/config-%{LDVERSION_optimized}-%{_arch}-linux%{_gnu}/Makefile +%exclude %{pylibdir}/distutils/command/wininst-*.exe +%{_includedir}/python%{LDVERSION_optimized}/*.h +%exclude %{_includedir}/python%{LDVERSION_optimized}/%{_pyconfig_h} +%doc Misc/README.valgrind Misc/valgrind-python.supp Misc/gdbinit + +%exclude %{_bindir}/python3-config +%exclude %{_bindir}/python%{pybasever}-config +%exclude %{_bindir}/python%{LDVERSION_optimized}-config +%exclude %{_bindir}/python%{LDVERSION_optimized}-*-config +%{_libexecdir}/platform-python-config +%{_libexecdir}/platform-python%{pybasever}-config +%{_libexecdir}/platform-python%{LDVERSION_optimized}-config +%{_libexecdir}/platform-python%{LDVERSION_optimized}-*-config + +%{_bindir}/pathfix.py +%{_libdir}/libpython%{LDVERSION_optimized}.so +%{_libdir}/pkgconfig/python-%{LDVERSION_optimized}.pc +%{_libdir}/pkgconfig/python-%{pybasever}.pc +%{_libdir}/pkgconfig/python3.pc + +%files idle +%{_bindir}/idle%{pybasever} +%{pylibdir}/idlelib +%{_metainfodir}/idle3.appdata.xml +%{_datadir}/applications/idle3.desktop +%{_datadir}/icons/hicolor/*/apps/idle3.* +%ghost %{_bindir}/idle3 + +%files tkinter +%{pylibdir}/tkinter +%exclude %{pylibdir}/tkinter/test +%{dynload_dir}/_tkinter.%{SOABI_optimized}.so +%{pylibdir}/turtle.py +%{pylibdir}/__pycache__/turtle*%{bytecode_suffixes} +%dir %{pylibdir}/turtledemo +%{pylibdir}/turtledemo/*.py +%{pylibdir}/turtledemo/*.cfg +%dir %{pylibdir}/turtledemo/__pycache__/ +%{pylibdir}/turtledemo/__pycache__/*%{bytecode_suffixes} + +%files test +%{pylibdir}/ctypes/test +%{pylibdir}/distutils/tests +%{pylibdir}/sqlite3/test +%{pylibdir}/test +%{dynload_dir}/_ctypes_test.%{SOABI_optimized}.so +%{dynload_dir}/_testbuffer.%{SOABI_optimized}.so +%{dynload_dir}/_testcapi.%{SOABI_optimized}.so +%{dynload_dir}/_testimportmultiple.%{SOABI_optimized}.so +%{pylibdir}/lib2to3/tests +%{pylibdir}/tkinter/test +%{pylibdir}/unittest/test + +# stuff already owned by the libs subpackage +# test requires libs, so we are safe not owning those dirs +%exclude %dir %{pylibdir}/test/ +%exclude %dir %{pylibdir}/test/__pycache__/ +%exclude %{pylibdir}/test/__init__.py +%exclude %{pylibdir}/test/__pycache__/__init__%{bytecode_suffixes} +%exclude %{pylibdir}/test/support/ + +# We don't bother splitting the debug build out into further subpackages: +# if you need it, you're probably a developer. + +# Hence the manifest is the combination of analogous files in the manifests of +# all of the other subpackages + +%if %{with debug_build} +%files -n platform-python-debug +# Analog of the core subpackage's files: +%exclude %{_bindir}/python%{LDVERSION_debug} +%{_libexecdir}/platform-python%{LDVERSION_debug} +%exclude %{_bindir}/python3-debug +%{_libexecdir}/platform-python-debug + +# Analog of the -libs subpackage's files: +# ...with debug builds of the built-in "extension" modules: + +%{dynload_dir}/_blake2.%{SOABI_debug}.so +%{dynload_dir}/_sha3.%{SOABI_debug}.so +%{dynload_dir}/_hmacopenssl.%{SOABI_debug}.so + +%{dynload_dir}/_asyncio.%{SOABI_debug}.so +%{dynload_dir}/_bisect.%{SOABI_debug}.so +%{dynload_dir}/_bz2.%{SOABI_debug}.so +%{dynload_dir}/_codecs_cn.%{SOABI_debug}.so +%{dynload_dir}/_codecs_hk.%{SOABI_debug}.so +%{dynload_dir}/_codecs_iso2022.%{SOABI_debug}.so +%{dynload_dir}/_codecs_jp.%{SOABI_debug}.so +%{dynload_dir}/_codecs_kr.%{SOABI_debug}.so +%{dynload_dir}/_codecs_tw.%{SOABI_debug}.so +%{dynload_dir}/_crypt.%{SOABI_debug}.so +%{dynload_dir}/_csv.%{SOABI_debug}.so +%{dynload_dir}/_ctypes.%{SOABI_debug}.so +%{dynload_dir}/_curses.%{SOABI_debug}.so +%{dynload_dir}/_curses_panel.%{SOABI_debug}.so +%{dynload_dir}/_dbm.%{SOABI_debug}.so +%{dynload_dir}/_decimal.%{SOABI_debug}.so +%{dynload_dir}/_elementtree.%{SOABI_debug}.so +%if %{with gdbm} +%{dynload_dir}/_gdbm.%{SOABI_debug}.so +%endif +%{dynload_dir}/_hashlib.%{SOABI_debug}.so +%{dynload_dir}/_heapq.%{SOABI_debug}.so +%{dynload_dir}/_json.%{SOABI_debug}.so +%{dynload_dir}/_lsprof.%{SOABI_debug}.so +%{dynload_dir}/_lzma.%{SOABI_debug}.so +%{dynload_dir}/_multibytecodec.%{SOABI_debug}.so +%{dynload_dir}/_multiprocessing.%{SOABI_debug}.so +%{dynload_dir}/_opcode.%{SOABI_debug}.so +%{dynload_dir}/_pickle.%{SOABI_debug}.so +%{dynload_dir}/_posixsubprocess.%{SOABI_debug}.so +%{dynload_dir}/_random.%{SOABI_debug}.so +%{dynload_dir}/_socket.%{SOABI_debug}.so +%{dynload_dir}/_sqlite3.%{SOABI_debug}.so +%{dynload_dir}/_ssl.%{SOABI_debug}.so +%{dynload_dir}/_struct.%{SOABI_debug}.so +%{dynload_dir}/array.%{SOABI_debug}.so +%{dynload_dir}/audioop.%{SOABI_debug}.so +%{dynload_dir}/binascii.%{SOABI_debug}.so +%{dynload_dir}/cmath.%{SOABI_debug}.so +%{dynload_dir}/_datetime.%{SOABI_debug}.so +%{dynload_dir}/fcntl.%{SOABI_debug}.so +%{dynload_dir}/grp.%{SOABI_debug}.so +%{dynload_dir}/math.%{SOABI_debug}.so +%{dynload_dir}/mmap.%{SOABI_debug}.so +%{dynload_dir}/nis.%{SOABI_debug}.so +%{dynload_dir}/ossaudiodev.%{SOABI_debug}.so +%{dynload_dir}/parser.%{SOABI_debug}.so +%{dynload_dir}/pyexpat.%{SOABI_debug}.so +%{dynload_dir}/readline.%{SOABI_debug}.so +%{dynload_dir}/resource.%{SOABI_debug}.so +%{dynload_dir}/select.%{SOABI_debug}.so +%{dynload_dir}/spwd.%{SOABI_debug}.so +%{dynload_dir}/syslog.%{SOABI_debug}.so +%{dynload_dir}/termios.%{SOABI_debug}.so +%{dynload_dir}/_testmultiphase.%{SOABI_debug}.so +%{dynload_dir}/unicodedata.%{SOABI_debug}.so +%{dynload_dir}/zlib.%{SOABI_debug}.so + +# No need to split things out the "Makefile" and the config-32/64.h file as we +# do for the regular build above (bug 531901), since they're all in one package +# now; they're listed below, under "-devel": + +%{_libdir}/%{py_INSTSONAME_debug} + +# Analog of the -devel subpackage's files: +%{pylibdir}/config-%{LDVERSION_debug}-%{_arch}-linux%{_gnu} +%{_includedir}/python%{LDVERSION_debug} + +%exclude %{_bindir}/python%{LDVERSION_debug}-config +%exclude %{_bindir}/python%{LDVERSION_debug}-*-config +%{_libexecdir}/platform-python%{LDVERSION_debug}-config +%{_libexecdir}/platform-python%{LDVERSION_debug}-*-config + +%{_libdir}/libpython%{LDVERSION_debug}.so +%{_libdir}/libpython%{LDVERSION_debug}.so.1.0 +%{_libdir}/pkgconfig/python-%{LDVERSION_debug}.pc + +# Analog of the -tools subpackage's files: +# None for now; we could build precanned versions that have the appropriate +# shebang if needed + +# Analog of the tkinter subpackage's files: +%{dynload_dir}/_tkinter.%{SOABI_debug}.so + +# Analog of the -test subpackage's files: +%{dynload_dir}/_ctypes_test.%{SOABI_debug}.so +%{dynload_dir}/_testbuffer.%{SOABI_debug}.so +%{dynload_dir}/_testcapi.%{SOABI_debug}.so +%{dynload_dir}/_testimportmultiple.%{SOABI_debug}.so + +%endif # with debug_build + +# We put the debug-gdb.py file inside /usr/lib/debug to avoid noise from ldconfig +# See https://bugzilla.redhat.com/show_bug.cgi?id=562980 +# +# The /usr/lib/rpm/redhat/macros defines %%__debug_package to use +# debugfiles.list, and it appears that everything below /usr/lib/debug and +# (/usr/src/debug) gets added to this file (via LISTFILES) in +# /usr/lib/rpm/find-debuginfo.sh +# +# Hence by installing it below /usr/lib/debug we ensure it is added to the +# -debuginfo subpackage +# (if it doesn't, then the rpmbuild ought to fail since the debug-gdb.py +# payload file would be unpackaged) + +# Workaround for https://bugzilla.redhat.com/show_bug.cgi?id=1476593 +%undefine _debuginfo_subpackages + +# ====================================================== +# Finally, the changelog: +# ====================================================== + +%changelog +* Wed Nov 27 2019 Charalampos Stratakis - 3.6.8-23 +- Modify the test suite to better handle disabled SSL/TLS versions and FIPS mode +- Use OpenSSL's DRBG and disable os.getrandom() function in FIPS mode +Resolves: rhbz#1754028, rhbz#1754027, rhbz#1754026, rhbz#1774471 + +* Thu Oct 24 2019 Tomas Orsava - 3.6.8-22 +- Changed Requires into Recommends for python3-pip to allow a lower RHEL8 + footprint for containers and other minimal environments +Resolves: rhbz#1756217 + +* Wed Oct 16 2019 Tomas Orsava - 3.6.8-21 +- Patch 329 (FIPS) modified: Added workaround for mod_ssl: + Skip error checking in _Py_hashlib_fips_error +Resolves: rhbz#1760106 + +* Mon Oct 14 2019 Charalampos Stratakis - 3.6.8-20 +- Security fix for CVE-2019-16056 +Resolves: rhbz#1750776 + +* Wed Oct 09 2019 Charalampos Stratakis - 3.6.8-19 +- Skip windows specific test_get_exe_bytes test case and enable test_distutils +Resolves: rhbz#1754040 + +* Mon Oct 07 2019 Charalampos Stratakis - 3.6.8-18 +- Reduce the number of tests running during the profile guided optimizations build +- Enable profile guided optimizations for all the supported architectures +Resolves: rhbz#1749576 + +* Mon Oct 07 2019 Charalampos Stratakis - 3.6.8-17 +- Security fix for CVE-2018-20852 +Resolves: rhbz#1741553 + +* Fri Oct 04 2019 Charalampos Stratakis - 3.6.8-16 +- Properly pass the -Og optimization flag to the debug build +Resolves: rhbz#1712977 and rhbz#1714733 + +* Thu Aug 29 2019 Tomas Orsava - 3.6.8-15 +- Patch 329 that adds support for OpenSSL FIPS mode has been improved and + bugfixed +Resolves: rhbz#1744670 rhbz#1745499 rhbz#1745685 + +* Tue Aug 06 2019 Tomas Orsava - 3.6.8-14 +- Adding a new patch 329 that adds support for OpenSSL FIPS mode +- Explicitly listing man pages in files section to fix an RPM warning +Resolves: rhbz#1731424 + +* Tue Jul 02 2019 Charalampos Stratakis - 3.6.8-13 +- Do not set PHA verify flag on client side (rhbz#1725721) +- Enable TLS 1.3 post-handshake authentication in http.client (rhbz#1671353) + +* Fri Jun 21 2019 Miro Hrončok - 3.6.8-12 +- Use RPM built wheels of pip and setuptools in ensurepip instead of our rewheel patch +- Require platform-python-setuptools from platform-python-devel to prevent packaging errors +Resolves: rhbz#1701286 + +* Fri Jun 07 2019 Charalampos Stratakis - 3.6.8-11 +- Fix for CVE-2019-10160 +Resolves: rhbz#1689318 + +* Wed May 29 2019 Charalampos Stratakis - 3.6.8-10 +- Security fix for CVE-2019-9948 +Resolves: rhbz#1714643 + +* Tue May 21 2019 Miro Hrončok - 3.6.8-9 +- Reduced default build flags used to build extension modules + https://fedoraproject.org/wiki/Changes/Python_Extension_Flags +Resolves: rhbz#1634784 + +* Mon May 13 2019 Tomas Orsava - 3.6.8-8 +- gzip the unversioned-python man page +Resolves: rhbz#1665514 + +* Wed May 08 2019 Charalampos Stratakis - 3.6.8-7 +- Disallow control chars in http URLs +- Fixes CVE-2019-9740 and CVE-2019-9947 +Resolves: rhbz#1704365 and rhbz#1703531 + +* Fri May 03 2019 Charalampos Stratakis - 3.6.8-6 +- Updated fix for CVE-2019-9636 (rhbz#1689318) + +* Wed Apr 3 2019 Miro Hrončok - 3.6.8-5 +- Security fix for CVE-2019-9636 (rhbz#1689318) + +* Wed Mar 20 2019 Victor Stinner - 3.6.8-4 +- Security fix for CVE-2019-5010 (rhbz#1666789) + +* Wed Mar 13 2019 Victor Stinner - 3.6.8-3 +- Fix test_tarfile on ppc64 (rhbz#1639490) + +* Fri Jan 18 2019 Victor Stinner - 3.6.8-2 +- test_ssl fixes for TLS 1.3 and OpenSSL 1.1.1 (rhbz#1639531) + +* Wed Jan 09 2019 Charalampos Stratakis - 3.6.8-1 +- Update to 3.6.8 +Resolves: rhbz#1658271 + +* Wed Nov 21 2018 Miro Hrončok - 3.6.7-4 +- Make sure the entire test.support module is in python3-libs +Resolves: rhbz#1651215 + +* Tue Nov 13 2018 Charalampos Stratakis - 3.6.7-3 +- Add choices for sort option of cProfile for better output +Resolves: rhbz#1640151 + +* Mon Nov 05 2018 Tomas Orsava - 3.6.7-2 +- Switch to requiring platform-python-pip/setuptools instead of the python3- + versions +- Resolves: rhbz#1638836 + +* Thu Oct 25 2018 Charalampos Stratakis - 3.6.7-1 +- Update to 3.6.7 (rhbz#1627739) +- Re-enable test_gdb (rhbz#1639536) +- Re-enable test_faulthandler (rhbz#1640147) + +* Tue Oct 16 2018 Charalampos Stratakis - 3.6.6-19 +- Add compatibility fixes for openssl 1.1.1 and tls 1.3 +Resolves: rhbz#1610023 + +* Tue Oct 16 2018 Charalampos Stratakis - 3.6.6-18 +- Fix test_dbm_gnu for gdbm 1.15 which fails on ppc64le +Resolves: rhbz#1638710 + +* Sun Oct 14 2018 Tomas Orsava - 3.6.6-17 +- Add Requires (/post/postun) on /usr/sbin/alternatives +- Resolves: rhbz#1632625 + +* Fri Oct 12 2018 Petr Viktorin - 3.6.6-16 +- Remove Windows binaries from the source archive +- Resolves: rhbz#1633219 + +* Wed Oct 10 2018 Petr Viktorin - 3.6.6-15 +- Compile the debug build with -Og rather than -O0 +- Resolves: rhbz#1624162 + +* Wed Oct 10 2018 Miro Hrončok - 3.6.6-14 +- Security fix for CVE-2018-14647 +- Resolves: rhbz#1632096 + +* Sun Oct 07 2018 Tomas Orsava - 3.6.6-13 +- Stop providing the `python3` and `python3-debug` names from the + platform-python/-debug subpackages +- The `python3` and `python3-debug` names are now provided from the python36 + component +- Conflict with older versions of `python3` and `python3-debug` +- Related: rhbz#1619153 + +* Tue Oct 02 2018 Tomas Orsava - 3.6.6-12.2 +- Fix update of idle3's alternative symlink +- Resolves: rhbz#1632625 + +* Mon Oct 01 2018 Tomas Orsava - 3.6.6-12.1 +- Add idle3 to the alternatives system +- Resolves: rhbz#1632625 + +* Fri Sep 28 2018 Tomas Orsava - 3.6.6-12 +- Rename the python3-debug subpackage to platform-python-debug +- Provide the `python3-debug` name for backwards compatibility until it's taken + over by the python36 component +- Rename the python3-libs-devel subpackage to platform-python-devel for + symmetry with the `platform-python` and `platform-python-debug` package +- Add symlink /usr/libexec/platform-python-debug that was mistakenly omitted +- Related: rhbz#1619153 + +* Fri Sep 28 2018 Tomas Orsava - 3.6.6-11 +- Implement `alternatives` for chosing /usr/bin/python +- Provide the default `no-python` alternative +- Resolves: rhbz#1632625 + +* Wed Sep 19 2018 Tomas Orsava - 3.6.6-10 +- Provide the `python3` name with _isa until some packages can be rebuilt +- Resolves: rhbz#1619153 + +* Tue Sep 11 2018 Tomas Orsava - 3.6.6-9 +- Rename the python3 subpackage to platform-python +- Provide the `python3` name for backwards compatibility until it's taken over + by the python36 component +- The python36 component that contains /usr/bin/python3 will Provide the + name `python3` in its upcoming update +- Resolves: rhbz#1619153 + +* Tue Sep 04 2018 Lumír Balhar - 3.6.6-8 +- Remove /usr/bin/idle3 symlink +- Resolves: rhbz#1623811 + +* Wed Aug 15 2018 Lumír Balhar - 3.6.6-7 +- Remove 3 > 3.6 symlinks for pydoc and python manpage +- Resolves: rhbz#1615727 + +* Sun Aug 12 2018 Troy Dawson +- Disable %check so package will build for Mass Rebuild +- Related: bug#1614611 + +* Mon Aug 06 2018 Petr Viktorin - 3.6.6-6 +- Make `devel` subpackage require python36-devel again + (and get /usr/bin/python3 and /usr/bin/python3-config from that). +- Remove /usr/bin/python3* executables +- Use pip36 instead of `pip3` + +* Fri Aug 03 2018 Petr Viktorin - 3.6.6-5 +- Fix the `devel` subpackage to require python3, rather than python36-devel, + and provide /usr/bin/python3-config itself. + +* Wed Aug 01 2018 Tomas Orsava - 3.6.6-4 +- Create the `libs-devel` subpackage and move `devel` contents there +- `devel` subpackage is only for the buildroot and requires `python36-devel` + to get /usr/bin/python3{,-config} symlinks there +- `devel` subpackage will not be shipped into RHEL8, only `libs-devel` will +- `debug` subpackage now runtime requires `libs-devel` instead of `devel` + +* Wed Aug 01 2018 Charalampos Stratakis - 3.6.6-3 +- Disable ssl related tests for now + +* Wed Jul 25 2018 Petr Kubat - 3.6.6-2 +- Rebuilt for gdbm + +* Thu Jul 19 2018 Charalampos Stratakis - 3.6.6-1 +- Update to Python 3.6.6 + +* Thu Jul 19 2018 Tomas Orsava - 3.6.5-7 +- Fix %%py_byte_compile macro: when invoked with a Python 2 binary it also + mistakenly ran py3_byte_compile + +* Thu Jul 19 2018 Charalampos Stratakis - 3.6.5-6 +- Do not include the unversioned pyvenv binary in the rpm + +* Tue Jul 03 2018 Tomas Orsava - 3.6.5-5 +- Remove old system-python Provides/Obsoletes/symlinks/patches from Fedora + +* Wed May 09 2018 Tomas Orsava - 3.6.5-4 +- Switch all shebangs to point to the Platform-Python executables + +* Wed May 09 2018 Tomas Orsava - 3.6.5-3 +- Platform-Python: Rebase implementation from RHEL8 Alpha: +- Move the main executable to /usr/libexec/platform-python +- Move /usr/bin/python*-config and /usr/bin/pythonX.Ym scripts to /usr/libexec/ +- Provide symlink to the main executable and other scripts from /usr/bin/, + these will be later shipped only in the python36 module + +* Wed May 09 2018 Tomas Orsava - 3.6.5-2 +- Remove Obsoletes and Provides that are not relevant for RHEL + +* Thu Mar 29 2018 Charalampos Stratakis - 3.6.5-1 +- Update to 3.6.5 + +* Sat Mar 24 2018 Miro Hrončok - 3.6.4-20 +- Fix broken macro invocation and broken building of C Python extensions +Resolves: rhbz#1560103 + +* Fri Mar 16 2018 Miro Hrončok - 3.6.4-19 +- Add -n option for pathfix.py +Resolves: rhbz#1546990 + +* Thu Mar 15 2018 Miro Hrončok - 3.6.4-18 +- Fix the py_byte_compile macro to work on Python 2 +- Remove the pybytecompile macro file from the flat package +Resolves: rhbz#1484993 + +* Tue Mar 13 2018 Charalampos Stratakis - 3.6.4-17 +- Do not send IP addresses in SNI TLS extension + +* Sat Feb 24 2018 Florian Weimer - 3.6.4-16 +- Rebuild with new LDFLAGS from redhat-rpm-config + +* Wed Feb 21 2018 Miro Hrončok - 3.6.4-15 +- Filter out automatic /usr/bin/python3.X requirement, + recommend the main package from libs instead +Resolves: rhbz#1547131 + +* Thu Feb 15 2018 Iryna Shcherbina - 3.6.4-14 +- Remove the python3-tools package (#rhbz 1312030) +- Move /usr/bin/2to3 to python3-devel +- Move /usr/bin/idle and idlelib to python3-idle +- Provide python3-tools from python3-idle + +* Fri Feb 09 2018 Igor Gnatenko - 3.6.4-13 +- Escape macros in %%changelog + +* Fri Feb 02 2018 Michal Cyprian - 3.6.4-12 +- Remove sys.executable check from change-user-install-location patch +Resolves: rhbz#1532287 + +* Thu Feb 01 2018 Charalampos Stratakis - 3.6.4-11 +- Define TLS cipher suite on build time. + +* Wed Jan 31 2018 Tomas Orsava - 3.6.4-10 +- Disable test_gdb for all arches and test_buffer for ppc64le in anticipation + of the F28 mass rebuild +- Re-enable these tests after the mass rebuild when they can be properly + addressed + +* Tue Jan 23 2018 Charalampos Stratakis - 3.6.4-9 +- Restore the PyExc_RecursionErrorInst public symbol + +* Tue Jan 23 2018 Björn Esser - 3.6.4-8 +- Add patch to explicitly link _ctypes module with -ldl (#1537489) +- Refactored patch for libxcrypt +- Re-enable strict symbol checks in the link editor + +* Mon Jan 22 2018 Björn Esser - 3.6.4-7 +- Add patch for libxcrypt +- Disable strict symbol checks in the link editor + +* Sat Jan 20 2018 Björn Esser - 3.6.4-6 +- Rebuilt for switch to libxcrypt + +* Fri Jan 19 2018 Charalampos Stratakis - 3.6.4-5 +- Fix localeconv() encoding for LC_NUMERIC + +* Thu Jan 18 2018 Igor Gnatenko - 3.6.4-4 +- R: gdbm-devel → R: gdbm for python3-libs + +* Wed Jan 17 2018 Miro Hrončok - 3.6.4-3 +- Require large enough gdbm (fixup for previous bump) + +* Tue Jan 16 2018 Charalampos Stratakis - 3.6.4-2 +- Rebuild for reverted gdbm 1.13 on Fedora 27 + +* Mon Jan 15 2018 Charalampos Stratakis - 3.6.4-1 +- Update to version 3.6.4 + +* Fri Jan 12 2018 Charalampos Stratakis - 3.6.3-5 +- Fix the compilation of the nis module. + +* Tue Nov 21 2017 Miro Hrončok - 3.6.3-4 +- Raise the release of platform-python obsoletes for better maintainability + +* Wed Nov 15 2017 Miro Hrončok - 3.6.3-3 +- Obsolete platform-python and it's subpackages + +* Mon Oct 09 2017 Charalampos Stratakis - 3.6.3-2 +- Fix memory corruption due to allocator mix +Resolves: rhbz#1498207 + +* Fri Oct 06 2017 Charalampos Stratakis - 3.6.3-1 +- Update to Python 3.6.3 + +* Fri Sep 29 2017 Miro Hrončok - 3.6.2-19 +- Move pathfix.py to bindir, https://github.com/fedora-python/python-rpm-porting/issues/24 +- Make the -devel package require redhat-rpm-config +Resolves: rhbz#1496757 + +* Wed Sep 13 2017 Iryna Shcherbina - 3.6.2-18 +- Fix /usr/bin/env dependency from python3-tools +Resolves: rhbz#1482118 + +* Wed Sep 06 2017 Iryna Shcherbina - 3.6.2-17 +- Include `-g` in the flags sent to the linker (LDFLAGS) +Resolves: rhbz#1483222 + +* Tue Sep 05 2017 Petr Viktorin - 3.6.2-16 +- Specfile cleanup +- Make the main description also applicable to the SRPM +- Add audiotest.au to the test package + +* Fri Sep 01 2017 Miro Hrončok - 3.6.2-15 +- Remove %%{pylibdir}/Tools/scripts/2to3 + +* Fri Sep 01 2017 Miro Hrončok - 3.6.2-14 +- Expat >= 2.1.0 is everywhere, remove explicit requires +- Conditionalize systemtap-devel BuildRequires +- For consistency, require /usr/sbin/ifconfig instead of net-tools + +* Mon Aug 28 2017 Petr Viktorin - 3.6.2-13 +- Rename patch files to be consistent +- Run autotools to generate the configure script before building +- Merge lib64 patches (104 into 102) +- Skip test_bdist_rpm using test config rather than a patch (removes patch 137) +- Remove patches 157 and 186, which had test changes left over after upstreaming +- Remove patch 188, a temporary workaround for hashlib tests +- Merge patches 180, 206, 243, 5001 (architecture naming) into new patch 274 +- Move python2-tools conflicts to tools subpackage (it was wrongly in tkinter) + +* Mon Aug 28 2017 Michal Cyprian - 3.6.2-12 +- Use python3 style of calling super() without arguments in rpath + patch to prevent recursion in UnixCCompiler subclasses +Resolves: rhbz#1458122 + +* Mon Aug 21 2017 Petr Viktorin - 3.6.2-11 +- Add bcond for --without optimizations +- Reword package descriptions +- Remove Group declarations +- Skip failing test_float_with_comma + +* Mon Aug 21 2017 Miro Hrončok - 3.6.2-10 +- Remove system-python, see https://fedoraproject.org/wiki/Changes/Platform_Python_Stack + +* Wed Aug 16 2017 Petr Viktorin - 3.6.2-9 +- Use bconds for configuring the build +- Reorganize the initial sections + +* Wed Aug 16 2017 Miro Hrončok - 3.6.2-8 +- Have /usr/bin/2to3 (rhbz#1111275) +- Provide 2to3 and idle3, list them in summary and description (rhbz#1076401) + +* Fri Aug 11 2017 Michal Cyprian - 3.6.2-7 +- Revert "Add --executable option to install.py command" + This enhancement is currently not needed and it can possibly + collide with `pip --editable`option + +* Mon Aug 07 2017 Iryna Shcherbina - 3.6.2-6 +- Fix the "urllib FTP protocol stream injection" vulnerability +Resolves: rhbz#1478916 + +* Tue Aug 01 2017 Tomas Orsava - 3.6.2-5 +- Dropped BuildRequires on db4-devel which was useful for Python 2 (module + bsddb), however, no longer needod for Python 3 +- Tested building Python 3 with and without the dependency, all tests pass and + filelists of resulting RPMs are identical + +* Sun Jul 30 2017 Florian Weimer - 3.6.2-4 +- Do not generate debuginfo subpackages (#1476593) +- Rebuild with binutils fix for ppc64le (#1475636) + +* Thu Jul 27 2017 Fedora Release Engineering - 3.6.2-3 +- Rebuilt for https://fedoraproject.org/wiki/Fedora_27_Mass_Rebuild + +* Tue Jul 25 2017 Charalampos Stratakis - 3.6.2-2 +- Make test_asyncio to not depend on the current SIGHUP signal handler. + +* Tue Jul 18 2017 Charalampos Stratakis - 3.6.2-1 +- Update to Python 3.6.2 + +* Tue Jun 27 2017 Charalampos Stratakis - 3.6.1-10 +- Update to the latest upstream implementation of PEP 538 + +* Mon Jun 26 2017 Michal Cyprian - 3.6.1-9 +- Make pip and distutils in user environment install into separate location + +* Fri Jun 23 2017 Charalampos Stratakis - 3.6.1-8 +- Fix test_alpn_protocols from test_ssl +- Do not require rebundled setuptools dependencies + +* Tue May 16 2017 Tomas Orsava - 3.6.1-7 +- Added a dependency to the devel subpackage on python3-rpm-generators which + have been excised out of rpm-build +- Updated notes on bootstrapping Python on top of this specfile accordingly +- Involves: rhbz#1410631, rhbz#1444925 + +* Tue May 09 2017 Charalampos Stratakis - 3.6.1-6 +- Enable profile guided optimizations for x86_64 and i686 architectures +- Update to a newer implementation of PEP 538 +- Update description to reflect that Python 3 is now the default Python + +* Fri May 05 2017 Charalampos Stratakis - 3.6.1-5 +- Update PEP 538 to the latest upstream implementation + +* Tue Apr 18 2017 Charalampos Stratakis - 3.6.1-4 +- Enable link time optimizations +- Move windows executables to the devel subpackage (rhbz#1426257) + +* Thu Apr 13 2017 Tomas Orsava - 3.6.1-3 +- Rename python3.Xdm-config script from -debug to be arch specific +Resolves: rhbz#1179073 + +* Wed Apr 05 2017 Charalampos Stratakis - 3.6.1-2 +- Install the Makefile in its proper location (rhbz#1438219) + +* Wed Mar 22 2017 Iryna Shcherbina - 3.6.1-1 +- Update to version 3.6.1 final + +* Tue Mar 21 2017 Tomas Orsava - 3.6.1-0.2.rc1 +- Fix syntax error in %%py_byte_compile macro (rhbz#1433569) + +* Thu Mar 16 2017 Iryna Shcherbina - 3.6.1-0.1.rc1 +- Update to Python 3.6.1 release candidate 1 +- Add patch 264 to skip a known test failure on aarch64 + +* Fri Mar 10 2017 Charalampos Stratakis - 3.6.0-21 +- Use proper command line parsing in _testembed +- Backport of PEP 538: Coercing the legacy C locale to a UTF-8 based locale + https://fedoraproject.org/wiki/Changes/python3_c.utf-8_locale + +* Mon Feb 27 2017 Charalampos Stratakis - 3.6.0-20 +- Add desktop entry and appdata.xml file for IDLE 3 (rhbz#1392049) + +* Fri Feb 24 2017 Michal Cyprian - 3.6.0-19 +- Revert "Set values of prefix and exec_prefix to /usr/local for + /usr/bin/python* executables..." to prevent build failures + of packages using alternate build tools + +* Tue Feb 21 2017 Michal Cyprian - 3.6.0-18 +- Set values of prefix and exec_prefix to /usr/local for + /usr/bin/python* executables +- Use new %%_module_build macro + +* Fri Feb 17 2017 Michal Cyprian - 3.6.0-13 +- Add --executable option to install.py command + +* Wed Feb 15 2017 Charalampos Stratakis - 3.6.0-12 +- BuildRequire the new dependencies of setuptools when rewheel mode is enabled +in order for the virtualenvs to work properly + +* Sat Feb 11 2017 Fedora Release Engineering - 3.6.0-11 +- Rebuilt for https://fedoraproject.org/wiki/Fedora_26_Mass_Rebuild + +* Wed Feb 01 2017 Stephen Gallagher - 3.6.0-10 +- Add missing %%license macro + +* Thu Jan 26 2017 Tomas Orsava - 3.6.0-9 +- Modify the runtime dependency of python3-libs on system-python-libs again, + because previous attempt didn't work properly with dnf resolving mechanism + +* Wed Jan 25 2017 Tomas Orsava - 3.6.0-8 +- Modify the runtime dependency of python3-libs on system-python-libs to use + just the version and release number, but not the dist tag due to Modularity + +* Mon Jan 16 2017 Charalampos Stratakis - 3.6.0-7 +- Fix error check, so that Random.seed actually uses OS randomness (rhbz#1412275) +- Skip test_aead_aes_gcm during rpmbuild + +* Thu Jan 12 2017 Igor Gnatenko - 3.6.0-6 +- Rebuild for readline 7.x + +* Tue Jan 10 2017 Charalampos Stratakis - 3.6.0-5 +- Require glibc >= 2.24.90-26 for system-python-libs (rhbz#1410644) + +* Mon Jan 09 2017 Charalampos Stratakis - 3.6.0-4 +- Define HAVE_LONG_LONG as 1 for backwards compatibility + +* Thu Jan 05 2017 Miro Hrončok - 3.6.0-3 +- Don't blow up on EL7 kernel (random generator) (rhbz#1410175) + +* Tue Dec 27 2016 Charalampos Stratakis - 3.6.0-1 +- Update to Python 3.6.0 final + +* Fri Dec 09 2016 Charalampos Stratakis - 3.6.0-0.6.rc1 +- Enable rewheel + +* Wed Dec 07 2016 Charalampos Stratakis - 3.6.0-0.5.rc1 +- Update to Python 3.6.0 release candidate 1 + +* Mon Dec 05 2016 Charalampos Stratakis - 3.6.0-0.4.b4 +- Update to Python 3.6.0 beta 4 + +* Mon Dec 05 2016 Charalampos Stratakis - 3.5.2-7 +- Set to work with pip version 9.0.1 + +* Wed Oct 12 2016 Charalampos Stratakis - 3.5.2-6 +- Use proper patch numbering and base upstream branch for +porting ssl and hashlib modules to OpenSSL 1.1.0 +- Drop hashlib patch for now +- Add riscv64 arch to 64bit and no-valgrind arches + +* Tue Oct 11 2016 Tomáš Mráz - 3.5.2-5 +- Make it build with OpenSSL-1.1.0 based on upstream patch + +* Wed Sep 14 2016 Charalampos Stratakis - 3.5.2-4 +- Obsolete and Provide python35 package + +* Mon Sep 12 2016 Charalampos Stratakis - 3.5.2-3 +- Update %%py_byte_compile macro +- Remove unused configure flags (rhbz#1374357) + +* Fri Sep 09 2016 Tomas Orsava - 3.5.2-2 +- Updated .pyc 'bytecompilation with the newly installed interpreter' to also + recompile optimized .pyc files +- Removed .pyo 'bytecompilation with the newly installed interpreter', as .pyo + files are no more +- Resolves rhbz#1373635 + +* Mon Aug 15 2016 Tomas Orsava - 3.5.2-1 +- Rebased to version 3.5.2 +- Set to work with pip version 8.1.2 +- Removed patches 207, 237, 241 as fixes are already contained in Python 3.5.2 +- Removed arch or environment specific patches 194, 196, 203, and 208 + as test builds indicate they are no longer needed +- Updated patches 102, 146, and 242 to work with the new Python codebase +- Removed patches 200, 201, 5000 which weren't even being applied + +* Tue Aug 09 2016 Charalampos Stratakis - 3.5.1-15 +- Fix for CVE-2016-1000110 HTTPoxy attack +- SPEC file cleanup + +* Mon Aug 01 2016 Michal Toman - 3.5.1-14 +- Build properly on MIPS + +* Tue Jul 19 2016 Fedora Release Engineering - 3.5.1-13 +- https://fedoraproject.org/wiki/Changes/Automatic_Provides_for_Python_RPM_Packages + +* Fri Jul 08 2016 Charalampos Stratakis - 3.5.1-12 +- Refactor patch for properly fixing CVE-2016-5636 + +* Fri Jul 08 2016 Charalampos Stratakis - 3.5.1-11 +- Fix test_pyexpat failure with Expat version of 2.2.0 + +* Fri Jul 08 2016 Miro Hrončok - 3.5.1-10 +- Move xml module to system-python-libs + +* Thu Jun 16 2016 Tomas Orsava - 3.5.1-9 +- Fix for: CVE-2016-0772 python: smtplib StartTLS stripping attack +- Raise an error when STARTTLS fails +- rhbz#1303647: https://bugzilla.redhat.com/show_bug.cgi?id=1303647 +- rhbz#1346345: https://bugzilla.redhat.com/show_bug.cgi?id=1346345 +- Fixed upstream: https://hg.python.org/cpython/rev/d590114c2394 + +* Mon Jun 13 2016 Charalampos Stratakis - 3.5.1-8 +- Added patch for fixing possible integer overflow and heap corruption in zipimporter.get_data() + +* Fri Mar 04 2016 Miro Hrončok - 3.5.1-7 +- Move distutils to system-python-libs + +* Wed Feb 24 2016 Robert Kuska - 3.5.1-6 +- Provide python3-enum34 + +* Fri Feb 19 2016 Miro Hrončok - 3.5.1-5 +- Provide System Python packages and macros + +* Thu Feb 04 2016 Fedora Release Engineering - 3.5.1-4 +- Rebuilt for https://fedoraproject.org/wiki/Fedora_24_Mass_Rebuild + +* Wed Jan 13 2016 Orion Poplwski - 3.5.1-2 +- Drop python3 macros, require python/python3-rpm-macros + +* Mon Dec 14 2015 Robert Kuska - 3.5.1-1 +- Update to 3.5.1 +- Removed patch 199 and 207 (upstream) + +* Sun Nov 15 2015 Robert Kuska - 3.5.0-5 +- Remove versioned libpython from devel package + +* Fri Nov 13 2015 Than Ngo 3.5.0-4 +- add correct arch for ppc64/ppc64le to fix build failure + +* Wed Nov 11 2015 Robert Kuska - 3.5.0-3 +- Hide the private _Py_atomic_xxx symbols from public header + +* Wed Oct 14 2015 Robert Kuska - 3.5.0-2 +- Rebuild with wheel set to 1 + +* Tue Sep 15 2015 Matej Stuchlik - 3.5.0-1 +- Update to 3.5.0 + +* Mon Jun 29 2015 Thomas Spura - 3.4.3-4 +- python3-devel: Require python-macros for version independant macros such as + python_provide. See fpc#281 and fpc#534. + +* Thu Jun 18 2015 Fedora Release Engineering - 3.4.3-3 +- Rebuilt for https://fedoraproject.org/wiki/Fedora_23_Mass_Rebuild + +* Wed Jun 17 2015 Matej Stuchlik - 3.4.3-4 +- Use 1024bit DH key in test_ssl +- Use -O0 when compiling -debug build +- Update pip version variable to the version we actually ship + +* Wed Jun 17 2015 Matej Stuchlik - 3.4.3-3 +- Make relocating Python by changing _prefix actually work +Resolves: rhbz#1231801 + +* Mon May 4 2015 Peter Robinson 3.4.3-2 +- Disable test_gdb on aarch64 (rhbz#1196181), it joins all other non x86 arches + +* Thu Mar 12 2015 Matej Stuchlik - 3.4.3-1 +- Updated to 3.4.3 +- BuildPython now accepts additional build options +- Temporarily disabled test_gdb on arm (rhbz#1196181) + +* Wed Feb 25 2015 Matej Stuchlik - 3.4.2-7 +- Fixed undefined behaviour in faulthandler which caused test to hang on x86_64 + (http://bugs.python.org/issue23433) + +* Sat Feb 21 2015 Till Maas - 3.4.2-6 +- Rebuilt for Fedora 23 Change + https://fedoraproject.org/wiki/Changes/Harden_all_packages_with_position-independent_code + +* Tue Feb 17 2015 Ville Skyttä - 3.4.2-5 +- Own systemtap dirs (#710733) + +* Mon Jan 12 2015 Dan Horák - 3.4.2-4 +- build with valgrind on ppc64le +- disable test_gdb on s390(x) until rhbz#1181034 is resolved + +* Tue Dec 16 2014 Robert Kuska - 3.4.2-3 +- New patches: 170 (gc asserts), 200 (gettext headers), + 201 (gdbm memory leak) + +* Thu Dec 11 2014 Robert Kuska - 3.4.2-2 +- OpenSSL disabled SSLv3 in SSLv23 method + +* Thu Nov 13 2014 Matej Stuchlik - 3.4.2-1 +- Update to 3.4.2 +- Refreshed patches: 156 (gdb autoload) +- Removed: 195 (Werror declaration), 197 (CVE-2014-4650) + +* Mon Nov 03 2014 Slavek Kabrda - 3.4.1-16 +- Fix CVE-2014-4650 - CGIHTTPServer URL handling +Resolves: rhbz#1113529 + +* Sun Sep 07 2014 Karsten Hopp 3.4.1-15 +- exclude test_gdb on ppc* (rhbz#1132488) + +* Thu Aug 21 2014 Slavek Kabrda - 3.4.1-14 +- Update rewheel patch with fix from https://github.com/bkabrda/rewheel/pull/1 + +* Sun Aug 17 2014 Fedora Release Engineering - 3.4.1-13 +- Rebuilt for https://fedoraproject.org/wiki/Fedora_21_22_Mass_Rebuild + +* Sun Jun 8 2014 Peter Robinson 3.4.1-12 +- aarch64 has valgrind, just list those that don't support it + +* Sun Jun 08 2014 Fedora Release Engineering - 3.4.1-11 +- Rebuilt for https://fedoraproject.org/wiki/Fedora_21_Mass_Rebuild + +* Wed Jun 04 2014 Karsten Hopp 3.4.1-10 +- bump release and rebuild to link with the correct tcl/tk libs on ppcle + +* Tue Jun 03 2014 Matej Stuchlik - 3.4.1-9 +- Change paths to bundled projects in rewheel patch + +* Fri May 30 2014 Miro Hrončok - 3.4.1-8 +- In config script, use uname -m to write the arch + +* Thu May 29 2014 Dan Horák - 3.4.1-7 +- update the arch list where valgrind exists - %%power64 includes also + ppc64le which is not supported yet + +* Thu May 29 2014 Miro Hrončok - 3.4.1-6 +- Forward arguments to the arch specific config script +Resolves: rhbz#1102683 + +* Wed May 28 2014 Miro Hrončok - 3.4.1-5 +- Rename python3.Xm-config script to arch specific. +Resolves: rhbz#1091815 + +* Tue May 27 2014 Bohuslav Kabrda - 3.4.1-4 +- Use python3-*, not python-* runtime requires on setuptools and pip +- rebuild for tcl-8.6 + +* Tue May 27 2014 Matej Stuchlik - 3.4.1-3 +- Update the rewheel module + +* Mon May 26 2014 Miro Hrončok - 3.4.1-2 +- Fix multilib dependencies. +Resolves: rhbz#1091815 + +* Sun May 25 2014 Matej Stuchlik - 3.4.1-1 +- Update to Python 3.4.1 + +* Sun May 25 2014 Matej Stuchlik - 3.4.0-8 +- Fix test_gdb failure on ppc64le +Resolves: rhbz#1095355 + +* Thu May 22 2014 Miro Hrončok - 3.4.0-7 +- Add macro %%python3_version_nodots + +* Sun May 18 2014 Matej Stuchlik - 3.4.0-6 +- Disable test_faulthandler, test_gdb on aarch64 +Resolves: rhbz#1045193 + +* Fri May 16 2014 Matej Stuchlik - 3.4.0-5 +- Don't add Werror=declaration-after-statement for extension + modules through setup.py (PyBT#21121) + +* Mon May 12 2014 Matej Stuchlik - 3.4.0-4 +- Add setuptools and pip to Requires + +* Tue Apr 29 2014 Matej Stuchlik - 3.4.0-3 +- Point __os_install_post to correct brp-* files + +* Tue Apr 15 2014 Matej Stuchlik - 3.4.0-2 +- Temporarily disable tests requiring SIGHUP (rhbz#1088233) + +* Tue Apr 15 2014 Matej Stuchlik - 3.4.0-1 +- Update to Python 3.4 final +- Add patch adding the rewheel module +- Merge patches from master + +* Wed Jan 08 2014 Bohuslav Kabrda - 3.4.0-0.1.b2 +- Update to Python 3.4 beta 2. +- Refreshed patches: 55 (systemtap), 146 (hashlib-fips), 154 (test_gdb noise) +- Dropped patches: 114 (statvfs constants), 177 (platform unicode) + +* Mon Nov 25 2013 Bohuslav Kabrda - 3.4.0-0.1.b1 +- Update to Python 3.4 beta 1. +- Refreshed patches: 102 (lib64), 111 (no static lib), 125 (less verbose COUNT +ALLOCS), 141 (fix COUNT_ALLOCS in test_module), 146 (hashlib fips), +157 (UID+GID overflows), 173 (ENOPROTOOPT in bind_port) +- Removed patch 00187 (remove pthread atfork; upstreamed) + +* Mon Nov 04 2013 Bohuslav Kabrda - 3.4.0-0.1.a4 +- Update to Python 3.4 alpha 4. +- Refreshed patches: 55 (systemtap), 102 (lib64), 111 (no static lib), +114 (statvfs flags), 132 (unittest rpmbuild hooks), 134 (fix COUNT_ALLOCS in +test_sys), 143 (tsc on ppc64), 146 (hashlib fips), 153 (test gdb noise), +157 (UID+GID overflows), 173 (ENOPROTOOPT in bind_port), 186 (dont raise +from py_compile) +- Removed patches: 129 (test_subprocess nonreadable dir - no longer fails in +Koji), 142 (the mock issue that caused this is fixed) +- Added patch 187 (remove thread atfork) - will be in next version +- Refreshed script for checking pyc and pyo timestamps with new ignored files. +- The fips patch is disabled for now until upstream makes a final decision +what to do with sha3 implementation for 3.4.0. + +* Wed Oct 30 2013 Bohuslav Kabrda - 3.3.2-7 +- Bytecompile all *.py files properly during build (rhbz#1023607) + +* Fri Aug 23 2013 Matej Stuchlik - 3.3.2-6 +- Added fix for CVE-2013-4238 (rhbz#996399) + +* Fri Jul 26 2013 Dennis Gilmore - 3.3.2-5 +- fix up indentation in arm patch + +* Fri Jul 26 2013 Dennis Gilmore - 3.3.2-4 +- disable a test that fails on arm +- enable valgrind support on arm arches + +* Tue Jul 02 2013 Bohuslav Kabrda - 3.3.2-3 +- Fix build with libffi containing multilib wrapper for ffi.h (rhbz#979696). + +* Mon May 20 2013 Bohuslav Kabrda - 3.3.2-2 +- Add patch for CVE-2013-2099 (rhbz#963261). + +* Thu May 16 2013 Bohuslav Kabrda - 3.3.2-1 +- Updated to Python 3.3.2. +- Refreshed patches: 153 (gdb test noise) +- Dropped patches: 175 (configure -Wformat, fixed upstream), 182 (gdb +test threads) +- Synced patch numbers with python.spec. + +* Thu May 9 2013 David Malcolm - 3.3.1-4 +- fix test.test_gdb.PyBtTests.test_threads on ppc64 (patch 181; rhbz#960010) + +* Thu May 02 2013 Bohuslav Kabrda - 3.3.1-3 +- Add patch that enables building on ppc64p7 (replace the sed, so that +we get consistent with python2 spec and it's more obvious that we're doing it. + +* Wed Apr 24 2013 Bohuslav Kabrda - 3.3.1-2 +- Add fix for gdb tests failing on arm, rhbz#951802. + +* Tue Apr 09 2013 Bohuslav Kabrda - 3.3.1-1 +- Updated to Python 3.3.1. +- Refreshed patches: 55 (systemtap), 111 (no static lib), 146 (hashlib fips), +153 (fix test_gdb noise), 157 (uid, gid overflow - fixed upstream, just +keeping few more downstream tests) +- Removed patches: 3 (audiotest.au made it to upstream tarball) +- Removed workaround for http://bugs.python.org/issue14774, discussed in +http://bugs.python.org/issue15298 and fixed in revision 24d52d3060e8. + +* Mon Mar 25 2013 David Malcolm - 3.3.0-10 +- fix gcc 4.8 incompatibility (rhbz#927358); regenerate autotool intermediates + +* Mon Mar 25 2013 David Malcolm - 3.3.0-9 +- renumber patches to keep them in sync with python.spec + +* Fri Mar 15 2013 Toshio Kuratomi - 3.3.0-8 +- Fix error in platform.platform() when non-ascii byte strings are decoded to + unicode (rhbz#922149) + +* Thu Mar 14 2013 Toshio Kuratomi - 3.3.0-7 +- Fix up shared library extension (rhbz#889784) + +* Thu Mar 07 2013 Karsten Hopp 3.3.0-6 +- add ppc64p7 build target, optimized for Power7 + +* Mon Mar 4 2013 David Malcolm - 3.3.0-5 +- add workaround for ENOPROTOOPT seen running selftests in Koji +(rhbz#913732) + +* Mon Mar 4 2013 David Malcolm - 3.3.0-4 +- remove config flag from /etc/rpm/macros.{python3|pybytecompile} + +* Mon Feb 11 2013 David Malcolm - 3.3.0-3 +- add aarch64 (rhbz#909783) + +* Thu Nov 29 2012 David Malcolm - 3.3.0-2 +- add BR on bluez-libs-devel (rhbz#879720) + +* Sat Sep 29 2012 David Malcolm - 3.3.0-1 +- 3.3.0rc3 -> 3.3.0; drop alphatag + +* Mon Sep 24 2012 David Malcolm - 3.3.0-0.6.rc3 +- 3.3.0rc2 -> 3.3.0rc3 + +* Mon Sep 10 2012 David Malcolm - 3.3.0-0.5.rc2 +- 3.3.0rc1 -> 3.3.0rc2; refresh patch 55 + +* Mon Aug 27 2012 David Malcolm - 3.3.0-0.4.rc1 +- 3.3.0b2 -> 3.3.0rc1; refresh patches 3, 55 + +* Mon Aug 13 2012 David Malcolm - 3.3.0-0.3.b2 +- 3.3b1 -> 3.3b2; drop upstreamed patch 152; refresh patches 3, 102, 111, +134, 153, 160; regenenerate autotools patch; rework systemtap patch to work +correctly when LANG=C (patch 55); importlib.test was moved to +test.test_importlib upstream + +* Mon Aug 13 2012 Karsten Hopp 3.3.0-0.2.b1 +- disable some failing checks on PPC* (rhbz#846849) + +* Fri Aug 3 2012 David Malcolm - 3.3.0-0.1.b1 +- 3.2 -> 3.3: https://fedoraproject.org/wiki/Features/Python_3.3 +- 3.3.0b1: refresh patches 3, 55, 102, 111, 113, 114, 134, 157; drop upstream +patch 147; regenenerate autotools patch; drop "--with-wide-unicode" from +configure (PEP 393); "plat-linux2" -> "plat-linux" (upstream issue 12326); +"bz2" -> "_bz2" and "crypt" -> "_crypt"; egg-info files are no longer shipped +for stdlib (upstream issues 10645 and 12218); email/test moved to +test/test_email; add /usr/bin/pyvenv[-3.3] and venv module (PEP 405); add +_decimal and _lzma modules; make collections modules explicit in payload again +(upstream issue 11085); add _testbuffer module to tests subpackage (added in +upstream commit 3f9b3b6f7ff0); fix test failures (patches 160 and 161); +workaround erroneously shared _sysconfigdata.py upstream issue #14774; fix +distutils.sysconfig traceback (patch 162); add BuildRequires: xz-devel (for +_lzma module); skip some tests within test_socket (patch 163) + +* Sat Jul 21 2012 Fedora Release Engineering - 3.2.3-11 +- Rebuilt for https://fedoraproject.org/wiki/Fedora_18_Mass_Rebuild + +* Fri Jul 20 2012 David Malcolm - 3.3.0-0.1.b1 + +* Fri Jun 22 2012 David Malcolm - 3.2.3-10 +- use macro for power64 (rhbz#834653) + +* Mon Jun 18 2012 David Malcolm - 3.2.3-9 +- fix missing include in uid/gid handling patch (patch 157; rhbz#830405) + +* Wed May 30 2012 Bohuslav Kabrda - 3.2.3-8 +- fix tapset for debug build + +* Tue May 15 2012 David Malcolm - 3.2.3-7 +- update uid/gid handling to avoid int overflows seen with uid/gid +values >= 2^31 on 32-bit architectures (patch 157; rhbz#697470) + +* Fri May 4 2012 David Malcolm - 3.2.3-6 +- renumber autotools patch from 300 to 5000 +- specfile cleanups + +* Mon Apr 30 2012 David Malcolm - 3.2.3-5 +- fix test_gdb.py (patch 156; rhbz#817072) + +* Fri Apr 20 2012 David Malcolm - 3.2.3-4 +- avoid allocating thunks in ctypes unless absolutely necessary, to avoid +generating SELinux denials on "import ctypes" and "import uuid" when embedding +Python within httpd (patch 155; rhbz#814391) + +* Fri Apr 20 2012 David Malcolm - 3.2.3-3 +- add explicit version requirements on expat to avoid linkage problems with +XML_SetHashSalt + +* Thu Apr 12 2012 David Malcolm - 3.2.3-2 +- fix test_gdb (patch 153) + +* Wed Apr 11 2012 David Malcolm - 3.2.3-1 +- 3.2.3; refresh patch 102 (lib64); drop upstream patches 148 (gdbm magic +values), 149 (__pycache__ fix); add patch 152 (test_gdb regex) + +* Thu Feb 9 2012 Thomas Spura - 3.2.2-13 +- use newly installed python for byte compiling (now for real) + +* Sun Feb 5 2012 Thomas Spura - 3.2.2-12 +- use newly installed python for byte compiling (#787498) + +* Wed Jan 4 2012 Ville Skyttä - 3.2.2-11 +- Build with $RPM_LD_FLAGS (#756863). +- Use xz-compressed source tarball. + +* Wed Dec 07 2011 Karsten Hopp 3.2.2-10 +- disable rAssertAlmostEqual in test_cmath on PPC (#750811) + +* Mon Oct 17 2011 Rex Dieter - 3.2.2-9 +- python3-devel missing autogenerated pkgconfig() provides (#746751) + +* Mon Oct 10 2011 David Malcolm - 3.2.2-8 +- cherrypick fix for distutils not using __pycache__ when byte-compiling +files (rhbz#722578) + +* Fri Sep 30 2011 David Malcolm - 3.2.2-7 +- re-enable gdbm (patch 148; rhbz#742242) + +* Fri Sep 16 2011 David Malcolm - 3.2.2-6 +- add a sys._debugmallocstats() function (patch 147) + +* Wed Sep 14 2011 David Malcolm - 3.2.2-5 +- support OpenSSL FIPS mode in _hashlib and hashlib; don't build the _md5 and +_sha* modules, relying on _hashlib in hashlib (rhbz#563986; patch 146) + +* Tue Sep 13 2011 David Malcolm - 3.2.2-4 +- disable gdbm module to prepare for gdbm soname bump + +* Mon Sep 12 2011 David Malcolm - 3.2.2-3 +- renumber and rename patches for consistency with python.spec (8 to 55, 106 +to 104, 6 to 111, 104 to 113, 105 to 114, 125, 131, 130 to 143) + +* Sat Sep 10 2011 David Malcolm - 3.2.2-2 +- rewrite of "check", introducing downstream-only hooks for skipping specific +cases in an rpmbuild (patch 132), and fixing/skipping failing tests in a more +fine-grained manner than before; (patches 106, 133-142 sparsely, moving +patches for consistency with python.spec: 128 to 134, 126 to 135, 127 to 141) + +* Tue Sep 6 2011 David Malcolm - 3.2.2-1 +- 3.2.2 + +* Thu Sep 1 2011 David Malcolm - 3.2.1-7 +- run selftests with "--verbose" +- disable parts of test_io on ppc (rhbz#732998) + +* Wed Aug 31 2011 David Malcolm - 3.2.1-6 +- use "--findleaks --verbose3" when running test suite + +* Tue Aug 23 2011 David Malcolm - 3.2.1-5 +- re-enable and fix the --with-tsc option on ppc64, and rework it on 32-bit +ppc to avoid aliasing violations (patch 130; rhbz#698726) + +* Tue Aug 23 2011 David Malcolm - 3.2.1-4 +- don't use --with-tsc on ppc64 debug builds (rhbz#698726) + +* Thu Aug 18 2011 David Malcolm - 3.2.1-3 +- add %%python3_version to the rpm macros (rhbz#719082) + +* Mon Jul 11 2011 Dennis Gilmore - 3.2.1-2 +- disable some tests on sparc arches + +* Mon Jul 11 2011 David Malcolm - 3.2.1-1 +- 3.2.1; refresh lib64 patch (102), subprocess unit test patch (129), disabling +of static library build (due to Modules/_testembed; patch 6), autotool +intermediates (patch 300) + +* Fri Jul 8 2011 David Malcolm - 3.2-5 +- use the gdb hooks from the upstream tarball, rather than keeping our own copy + +* Fri Jul 8 2011 David Malcolm - 3.2-4 +- don't run test_openpty and test_pty in %%check + +* Fri Jul 8 2011 David Malcolm - 3.2-3 +- cleanup of BuildRequires; add comment headings to specfile sections + +* Tue Apr 19 2011 David Malcolm - 3.2-2 +- fix the libpython.stp systemtap tapset (rhbz#697730) + +* Mon Feb 21 2011 David Malcolm - 3.2-1 +- 3.2 +- drop alphatag +- regenerate autotool patch + +* Mon Feb 14 2011 David Malcolm - 3.2-0.13.rc3 +- add a /usr/bin/python3-debug symlink within the debug subpackage + +* Mon Feb 14 2011 David Malcolm - 3.2-0.12.rc3 +- 3.2rc3 +- regenerate autotool patch + +* Wed Feb 09 2011 Fedora Release Engineering - 3.2-0.11.rc2 +- Rebuilt for https://fedoraproject.org/wiki/Fedora_15_Mass_Rebuild + +* Mon Jan 31 2011 David Malcolm - 3.2-0.10.rc2 +- 3.2rc2 + +* Mon Jan 17 2011 David Malcolm - 3.2-0.9.rc1 +- 3.2rc1 +- rework patch 6 (static lib removal) +- remove upstreamed patch 130 (ppc debug build) +- regenerate patch 300 (autotool intermediates) +- updated packaging to reflect upstream rewrite of "Demo" (issue 7962) +- added libpython3.so and 2to3-3.2 + +* Wed Jan 5 2011 David Malcolm - 3.2-0.8.b2 +- set EXTRA_CFLAGS to our CFLAGS, rather than overriding OPT, fixing a linker +error with dynamic annotations (when configured using --with-valgrind) +- fix the ppc build of the debug configuration (patch 130; rhbz#661510) + +* Tue Jan 4 2011 David Malcolm - 3.2-0.7.b2 +- add --with-valgrind to configuration (on architectures that support this) + +* Wed Dec 29 2010 David Malcolm - 3.2-0.6.b2 +- work around test_subprocess failure seen in koji (patch 129) + +* Tue Dec 28 2010 David Malcolm - 3.2-0.5.b2 +- 3.2b2 +- rework patch 3 (removal of mimeaudio tests), patch 6 (no static libs), +patch 8 (systemtap), patch 102 (lib64) +- remove patch 4 (rendered redundant by upstream r85537), patch 103 (PEP 3149), +patch 110 (upstreamed expat fix), patch 111 (parallel build fix for grammar +fixed upstream) +- regenerate patch 300 (autotool intermediates) +- workaround COUNT_ALLOCS weakref issues in test suite (patch 126, patch 127, +patch 128) +- stop using runtest.sh in %%check (dropped by upstream), replacing with +regrtest; fixup list of failing tests +- introduce "pyshortver", "SOABI_optimized" and "SOABI_debug" macros +- rework manifests of shared libraries to use "SOABI_" macros, reflecting +PEP 3149 +- drop itertools, operator and _collections modules from the manifests as py3k +commit r84058 moved these inside libpython; json/tests moved to test/json_tests +- move turtle code into the tkinter subpackage + +* Wed Nov 17 2010 David Malcolm - 3.2-0.5.a1 +- fix sysconfig to not rely on the -devel subpackage (rhbz#653058) + +* Thu Sep 9 2010 David Malcolm - 3.2-0.4.a1 +- move most of the content of the core package to the libs subpackage, given +that the libs aren't meaningfully usable without the standard libraries + +* Wed Sep 8 2010 David Malcolm - 3.2-0.3.a1 +- Move test.support to core package (rhbz#596258) +- Add various missing __pycache__ directories to payload + +* Sun Aug 22 2010 Toshio Kuratomi - 3.2-0.2.a1 +- Add __pycache__ directory for site-packages + +* Sun Aug 22 2010 Thomas Spura - 3.2-0.1.a1 +- on 64bit "stdlib" was still "/usr/lib/python*" (modify *lib64.patch) +- make find-provides-without-python-sonames.sh 64bit aware + +* Sat Aug 21 2010 David Malcolm - 3.2-0.0.a1 +- 3.2a1; add alphatag +- rework %%files in the light of PEP 3147 (__pycache__) +- drop our configuration patch to Setup.dist (patch 0): setup.py should do a +better job of things, and the %%files explicitly lists our modules (r82746 +appears to break the old way of doing things). This leads to various modules +changing from "foomodule.so" to "foo.so". It also leads to the optimized build +dropping the _sha1, _sha256 and _sha512 modules, but these are provided by +_hashlib; _weakref becomes a builtin module; xxsubtype goes away (it's only for +testing/devel purposes) +- fixup patches 3, 4, 6, 8, 102, 103, 105, 111 for the rebase +- remove upstream patches: 7 (system expat), 106, 107, 108 (audioop reformat +plus CVE-2010-1634 and CVE-2010-2089), 109 (CVE-2008-5983) +- add machinery for rebuilding "configure" and friends, using the correct +version of autoconf (patch 300) +- patch the debug build's usage of COUNT_ALLOCS to be less verbose (patch 125) +- "modulator" was removed upstream +- drop "-b" from patch applications affecting .py files to avoid littering the +installation tree + +* Thu Aug 19 2010 Toshio Kuratomi - 3.1.2-13 +- Turn on computed-gotos. +- Fix for parallel make and graminit.c + +* Fri Jul 2 2010 David Malcolm - 3.1.2-12 +- rebuild + +* Fri Jul 2 2010 David Malcolm - 3.1.2-11 +- Fix an incompatibility between pyexpat and the system expat-2.0.1 that led to +a segfault running test_pyexpat.py (patch 110; upstream issue 9054; rhbz#610312) + +* Fri Jun 4 2010 David Malcolm - 3.1.2-10 +- ensure that the compiler is invoked with "-fwrapv" (rhbz#594819) +- reformat whitespace in audioop.c (patch 106) +- CVE-2010-1634: fix various integer overflow checks in the audioop +module (patch 107) +- CVE-2010-2089: further checks within the audioop module (patch 108) +- CVE-2008-5983: the new PySys_SetArgvEx entry point from r81399 (patch 109) + +* Thu May 27 2010 Dan Horák - 3.1.2-9 +- reading the timestamp counter is available only on some arches (see Python/ceval.c) + +* Wed May 26 2010 David Malcolm - 3.1.2-8 +- add flags for statvfs.f_flag to the constant list in posixmodule (i.e. "os") +(patch 105) + +* Tue May 25 2010 David Malcolm - 3.1.2-7 +- add configure-time support for COUNT_ALLOCS and CALL_PROFILE debug options +(patch 104); enable them and the WITH_TSC option within the debug build + +* Mon May 24 2010 David Malcolm - 3.1.2-6 +- build and install two different configurations of Python 3: debug and +standard, packaging the debug build in a new "python3-debug" subpackage +(patch 103) + +* Tue Apr 13 2010 David Malcolm - 3.1.2-5 +- exclude test_http_cookies when running selftests, due to hang seen on +http://koji.fedoraproject.org/koji/taskinfo?taskID=2088463 (cancelled after +11 hours) +- update python-gdb.py from v5 to py3k version submitted upstream + +* Wed Mar 31 2010 David Malcolm - 3.1.2-4 +- update python-gdb.py from v4 to v5 (improving performance and stability, +adding commands) + +* Thu Mar 25 2010 David Malcolm - 3.1.2-3 +- update python-gdb.py from v3 to v4 (fixing infinite recursion on reference +cycles and tracebacks on bytes 0x80-0xff in strings, adding handlers for sets +and exceptions) + +* Wed Mar 24 2010 David Malcolm - 3.1.2-2 +- refresh gdb hooks to v3 (reworking how they are packaged) + +* Sun Mar 21 2010 David Malcolm - 3.1.2-1 +- update to 3.1.2: http://www.python.org/download/releases/3.1.2/ +- drop upstreamed patch 2 (.pyc permissions handling) +- drop upstream patch 5 (fix for the test_tk and test_ttk_* selftests) +- drop upstreamed patch 200 (path-fixing script) + +* Sat Mar 20 2010 David Malcolm - 3.1.1-28 +- fix typo in libpython.stp (rhbz:575336) + +* Fri Mar 12 2010 David Malcolm - 3.1.1-27 +- add pyfuntop.stp example (source 7) +- convert usage of $$RPM_BUILD_ROOT to %%{buildroot} throughout, for +consistency with python.spec + +* Mon Feb 15 2010 Thomas Spura - 3.1.1-26 +- rebuild for new package of redhat-rpm-config (rhbz:564527) +- use 'install -p' when running 'make install' + +* Fri Feb 12 2010 David Malcolm - 3.1.1-25 +- split configure options into multiple lines for easy of editing +- add systemtap static markers (wcohen, mjw, dmalcolm; patch 8), a systemtap +tapset defining "python.function.entry" and "python.function.return" to make +the markers easy to use (dmalcolm; source 5), and an example of using the +tapset to the docs (dmalcolm; source 6) (rhbz:545179) + +* Mon Feb 8 2010 David Malcolm - 3.1.1-24 +- move the -gdb.py file from %%{_libdir}/INSTSONAME-gdb.py to +%%{_prefix}/lib/debug/%%{_libdir}/INSTSONAME.debug-gdb.py to avoid noise from +ldconfig (bug 562980), and which should also ensure it becomes part of the +debuginfo subpackage, rather than the libs subpackage +- introduce %%{py_SOVERSION} and %%{py_INSTSONAME} to reflect the upstream +configure script, and to avoid fragile scripts that try to figure this out +dynamically (e.g. for the -gdb.py change) + +* Mon Feb 8 2010 David Malcolm - 3.1.1-23 +- add gdb hooks for easier debugging (Source 4) + +* Thu Jan 28 2010 David Malcolm - 3.1.1-22 +- update python-3.1.1-config.patch to remove downstream customization of build +of pyexpat and elementtree modules +- add patch adapted from upstream (patch 7) to add support for building against +system expat; add --with-system-expat to "configure" invocation +- remove embedded copies of expat and zlib from source tree during "prep" + +* Mon Jan 25 2010 David Malcolm - 3.1.1-21 +- introduce %%{dynload_dir} macro +- explicitly list all lib-dynload files, rather than dynamically gathering the +payload into a temporary text file, so that we can be sure what we are +shipping +- introduce a macros.pybytecompile source file, to help with packaging python3 +modules (Source3; written by Toshio) +- rename "2to3-3" to "python3-2to3" to better reflect python 3 module packaging +plans + +* Mon Jan 25 2010 David Malcolm - 3.1.1-20 +- change python-3.1.1-config.patch to remove our downstream change to curses +configuration in Modules/Setup.dist, so that the curses modules are built using +setup.py with the downstream default (linking against libncursesw.so, rather +than libncurses.so), rather than within the Makefile; add a test to %%install +to verify the dso files that the curses module is linked against the correct +DSO (bug 539917; changes _cursesmodule.so -> _curses.so) + +* Fri Jan 22 2010 David Malcolm - 3.1.1-19 +- add %%py3dir macro to macros.python3 (to be used during unified python 2/3 +builds for setting up the python3 copy of the source tree) + +* Wed Jan 20 2010 David Malcolm - 3.1.1-18 +- move lib2to3 from -tools subpackage to main package (bug 556667) + +* Sun Jan 17 2010 David Malcolm - 3.1.1-17 +- patch Makefile.pre.in to avoid building static library (patch 6, bug 556092) + +* Fri Jan 15 2010 David Malcolm - 3.1.1-16 +- use the %%{_isa} macro to ensure that the python-devel dependency on python +is for the correct multilib arch (#555943) +- delete bundled copy of libffi to make sure we use the system one + +* Fri Jan 15 2010 David Malcolm - 3.1.1-15 +- fix the URLs output by pydoc so they point at python.org's 3.1 build of the +docs, rather than the 2.6 build + +* Wed Jan 13 2010 David Malcolm - 3.1.1-14 +- replace references to /usr with %%{_prefix}; replace references to +/usr/include with %%{_includedir} (Toshio) + +* Mon Jan 11 2010 David Malcolm - 3.1.1-13 +- fix permission on find-provides-without-python-sonames.sh from 775 to 755 + +* Mon Jan 11 2010 David Malcolm - 3.1.1-12 +- remove build-time requirements on tix and tk, since we already have +build-time requirements on the -devel subpackages for each of these (Thomas +Spura) +- replace usage of %%define with %%global (Thomas Spura) +- remove forcing of CC=gcc as this old workaround for bug 109268 appears to +longer be necessary +- move various test files from the "tools"/"tkinter" subpackages to the "test" +subpackage + +* Thu Jan 7 2010 David Malcolm - 3.1.1-11 +- add %%check section (thanks to Thomas Spura) +- update patch 4 to use correct shebang line +- get rid of stray patch file from buildroot + +* Tue Nov 17 2009 Andrew McNabb - 3.1.1-10 +- switched a few instances of "find |xargs" to "find -exec" for consistency. +- made the description of __os_install_post more accurate. + +* Wed Nov 4 2009 David Malcolm - 3.1.1-9 +- add macros.python3 to the -devel subpackage, containing common macros for use +when packaging python3 modules + +* Tue Nov 3 2009 David Malcolm - 3.1.1-8 +- add a provides of "python(abi)" (see bug 532118) +- fix issues identified by a.badger in package review (bug 526126, comment 39): + - use "3" thoughout metadata, rather than "3.*" + - remove conditional around "pkg-config openssl" + - use standard cleanup of RPM_BUILD_ROOT + - replace hardcoded references to /usr with _prefix macro + - stop removing egg-info files + - use /usr/bin/python3.1 rather than /use/bin/env python3.1 when fixing +up shebang lines + - stop attempting to remove no-longer-present .cvsignore files + - move the post/postun sections above the "files" sections + +* Thu Oct 29 2009 David Malcolm - 3.1.1-7 +- remove commented-away patch 51 (python-2.6-distutils_rpm.patch): the -O1 +flag is used by default in the upstream code +- "Makefile" and the config-32/64.h file are needed by distutils/sysconfig.py +_init_posix(), so we include them in the core package, along with their parent +directories (bug 531901) + +* Tue Oct 27 2009 David Malcolm - 3.1.1-6 +- reword description, based on suggestion by amcnabb +- fix the test_email and test_imp selftests (patch 3 and patch 4 respectively) +- fix the test_tk and test_ttk_* selftests (patch 5) +- fix up the specfile's handling of shebang/perms to avoid corrupting +test_httpservers.py (sed command suggested by amcnabb) + +* Thu Oct 22 2009 David Malcolm - 3.1.1-5 +- fixup importlib/_bootstrap.py so that it correctly handles being unable to +open .pyc files for writing (patch 2, upstream issue 7187) +- actually apply the rpath patch (patch 1) + +* Thu Oct 22 2009 David Malcolm - 3.1.1-4 +- update patch0's setup of the crypt module to link it against libcrypt +- update patch0 to comment "datetimemodule" back out, so that it is built +using setup.py (see Setup, option 3), thus linking it statically against +timemodule.c and thus avoiding a run-time "undefined symbol: +_PyTime_DoubleToTimet" failure on "import datetime" + +* Wed Oct 21 2009 David Malcolm - 3.1.1-3 +- remove executable flag from various files that shouldn't have it +- fix end-of-line encodings +- fix a character encoding + +* Tue Oct 20 2009 David Malcolm - 3.1.1-2 +- disable invocation of brp-python-bytecompile in postprocessing, since +it would be with the wrong version of python (adapted from ivazquez' +python3000 specfile) +- use a custom implementation of __find_provides in order to filter out bogus +provides lines for the various .so modules +- fixup distutils/unixccompiler.py to remove standard library path from rpath +(patch 1, was Patch0 in ivazquez' python3000 specfile) +- split out libraries into a -libs subpackage +- update summaries and descriptions, basing content on ivazquez' specfile +- fixup executable permissions on .py, .xpm and .xbm files, based on work in +ivazquez's specfile +- get rid of DOS batch files +- fixup permissions for shared libraries from non-standard 555 to standard 755 +- move /usr/bin/python*-config to the -devel subpackage +- mark various directories as being documentation + +* Thu Sep 24 2009 Andrew McNabb 3.1.1-1 +- Initial package for Python 3. +