Mock Version: 4.1 Mock Version: 4.1 Mock Version: 4.1 ENTER ['do_with_status'](['bash', '--login', '-c', '/usr/bin/rpmbuild -bs --noclean --target noarch --nodeps /builddir/build/SPECS/python-kafka.spec'], chrootPath='/var/lib/mock/dist-ocs23-python311-build-60851-2252/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'}shell=Falselogger=timeout=86400uid=981gid=135user='mockbuild'nspawn_args=['--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.3mppfq_w:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11']unshare_net=TrueprintOutput=False) Using nspawn with args ['--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.3mppfq_w:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11'] Executing command: ['/usr/bin/systemd-nspawn', '-q', '-M', 'f8863dc71972410ab44c16fc1cb9dcf9', '-D', '/var/lib/mock/dist-ocs23-python311-build-60851-2252/root', '-a', '-u', 'mockbuild', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.3mppfq_w:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11', '--console=pipe', '--setenv=TERM=vt100', '--setenv=SHELL=/bin/bash', '--setenv=HOME=/builddir', '--setenv=HOSTNAME=mock', '--setenv=PATH=/usr/bin:/bin:/usr/sbin:/sbin', '--setenv=PROMPT_COMMAND=printf "\\033]0;\\007"', '--setenv=PS1= \\s-\\v\\$ ', '--setenv=LANG=C.UTF-8', '--resolv-conf=off', 'bash', '--login', '-c', '/usr/bin/rpmbuild -bs --noclean --target noarch --nodeps /builddir/build/SPECS/python-kafka.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8', 'SYSTEMD_NSPAWN_TMPFS_TMP': '0', 'SYSTEMD_SECCOMP': '0'} and shell False Building target platforms: noarch Building for target noarch Wrote: /builddir/build/SRPMS/python-kafka-2.0.2-3.ocs23.src.rpm Child return code was: 0 ENTER ['do_with_status'](['bash', '--login', '-c', '/usr/bin/rpmbuild -br --noclean --target noarch --nodeps /builddir/build/SPECS/python-kafka.spec'], chrootPath='/var/lib/mock/dist-ocs23-python311-build-60851-2252/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'}shell=Falselogger=timeout=86400uid=981gid=135user='mockbuild'nspawn_args=['--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.3mppfq_w:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11']unshare_net=TrueraiseExc=FalseprintOutput=False) Using nspawn with args ['--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.3mppfq_w:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11'] Executing command: ['/usr/bin/systemd-nspawn', '-q', '-M', '9de94019037b4733a663b60addeb9101', '-D', '/var/lib/mock/dist-ocs23-python311-build-60851-2252/root', '-a', '-u', 'mockbuild', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.3mppfq_w:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11', '--console=pipe', '--setenv=TERM=vt100', '--setenv=SHELL=/bin/bash', '--setenv=HOME=/builddir', '--setenv=HOSTNAME=mock', '--setenv=PATH=/usr/bin:/bin:/usr/sbin:/sbin', '--setenv=PROMPT_COMMAND=printf "\\033]0;\\007"', '--setenv=PS1= \\s-\\v\\$ ', '--setenv=LANG=C.UTF-8', '--resolv-conf=off', 'bash', '--login', '-c', '/usr/bin/rpmbuild -br --noclean --target noarch --nodeps /builddir/build/SPECS/python-kafka.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8', 'SYSTEMD_NSPAWN_TMPFS_TMP': '0', 'SYSTEMD_SECCOMP': '0'} and shell False Building target platforms: noarch Building for target noarch Executing(%prep): /bin/sh -e /var/tmp/rpm-tmp.8sNm9E + umask 022 + cd /builddir/build/BUILD + cd /builddir/build/BUILD + rm -rf kafka-python-2.0.2 + /usr/lib/rpm/rpmuncompress -x /builddir/build/SOURCES/2.0.2.tar.gz + STATUS=0 + '[' 0 -ne 0 ']' + cd kafka-python-2.0.2 + /usr/bin/chmod -Rf a+rX,u+w,g-w,o-w . + /usr/lib/rpm/rpmuncompress /builddir/build/SOURCES/test_conn.py.patch + /usr/bin/patch -p0 -s --fuzz=0 --no-backup-if-mismatch -f + /usr/lib/rpm/rpmuncompress /builddir/build/SOURCES/test_default_records.py.patch + /usr/bin/patch -p0 -s --fuzz=0 --no-backup-if-mismatch -f + /usr/lib/rpm/rpmuncompress /builddir/build/SOURCES/test_legacy_records.py.patch + /usr/bin/patch -p0 -s --fuzz=0 --no-backup-if-mismatch -f + /usr/lib/rpm/rpmuncompress /builddir/build/SOURCES/setup.py.patch + /usr/bin/patch -p0 -s --fuzz=0 --no-backup-if-mismatch -f + /usr/lib/rpm/rpmuncompress /builddir/build/SOURCES/test_assignors.py.patch + /usr/bin/patch -p0 -s --fuzz=0 --no-backup-if-mismatch -f + /usr/lib/rpm/rpmuncompress /builddir/build/SOURCES/fix_test_assert_error.patch + /usr/bin/patch -p0 -s --fuzz=0 --no-backup-if-mismatch -f + install -m 644 /builddir/build/SOURCES/LICENSE_doc /builddir/build/BUILD/kafka-python-2.0.2/LICENSE_doc + RPM_EC=0 ++ jobs -p + exit 0 Executing(%generate_buildrequires): /bin/sh -e /var/tmp/rpm-tmp.JRIKdi + umask 022 + cd /builddir/build/BUILD + cd kafka-python-2.0.2 + echo pyproject-rpm-macros + echo python3-devel + echo 'python3dist(pip) >= 19' + echo 'python3dist(packaging)' + '[' -f pyproject.toml ']' + '[' -f setup.py ']' + echo 'python3dist(setuptools) >= 40.8' + echo 'python3dist(wheel)' + rm -rfv '*.dist-info/' + '[' -f /usr/bin/python3 ']' + RPM_TOXENV=py311 + HOSTNAME=rpmbuild + /usr/bin/python3 -s /usr/lib/rpm/OpenCloudOS/pyproject_buildrequires.py --generate-extras --python3_pkgversion 3 -r Handling setuptools >= 40.8 from default build backend Requirement satisfied: setuptools >= 40.8 (installed: setuptools 65.5.1) Handling wheel from default build backend Requirement satisfied: wheel (installed: wheel 0.37.0) /usr/lib/python3.11/site-packages/setuptools/config/setupcfg.py:508: SetuptoolsDeprecationWarning: The license_file parameter is deprecated, use license_files instead. warnings.warn(msg, warning_class) HOOK STDOUT: running egg_info HOOK STDOUT: creating kafka_python.egg-info HOOK STDOUT: writing kafka_python.egg-info/PKG-INFO HOOK STDOUT: writing dependency_links to kafka_python.egg-info/dependency_links.txt HOOK STDOUT: writing requirements to kafka_python.egg-info/requires.txt HOOK STDOUT: writing top-level names to kafka_python.egg-info/top_level.txt HOOK STDOUT: writing manifest file 'kafka_python.egg-info/SOURCES.txt' HOOK STDOUT: reading manifest file 'kafka_python.egg-info/SOURCES.txt' HOOK STDOUT: reading manifest template 'MANIFEST.in' HOOK STDOUT: adding license file 'LICENSE' HOOK STDOUT: writing manifest file 'kafka_python.egg-info/SOURCES.txt' Handling wheel from get_requires_for_build_wheel Requirement satisfied: wheel (installed: wheel 0.37.0) /usr/lib/python3.11/site-packages/setuptools/config/setupcfg.py:508: SetuptoolsDeprecationWarning: The license_file parameter is deprecated, use license_files instead. warnings.warn(msg, warning_class) HOOK STDOUT: running dist_info HOOK STDOUT: writing kafka_python.egg-info/PKG-INFO HOOK STDOUT: writing dependency_links to kafka_python.egg-info/dependency_links.txt HOOK STDOUT: writing requirements to kafka_python.egg-info/requires.txt HOOK STDOUT: writing top-level names to kafka_python.egg-info/top_level.txt HOOK STDOUT: reading manifest file 'kafka_python.egg-info/SOURCES.txt' HOOK STDOUT: reading manifest template 'MANIFEST.in' HOOK STDOUT: adding license file 'LICENSE' HOOK STDOUT: writing manifest file 'kafka_python.egg-info/SOURCES.txt' HOOK STDOUT: creating '/builddir/build/BUILD/kafka-python-2.0.2/kafka_python-2.0.2.dist-info' Handling crc32c ; extra == 'crc32c' from wheel metadata: Requires-Dist Ignoring alien requirement: crc32c ; extra == 'crc32c' Handling lz4 ; extra == 'lz4' from wheel metadata: Requires-Dist Ignoring alien requirement: lz4 ; extra == 'lz4' Handling python-snappy ; extra == 'snappy' from wheel metadata: Requires-Dist Ignoring alien requirement: python-snappy ; extra == 'snappy' Handling python-zstandard ; extra == 'zstd' from wheel metadata: Requires-Dist Ignoring alien requirement: python-zstandard ; extra == 'zstd' + RPM_EC=0 ++ jobs -p + exit 0 Wrote: /builddir/build/SRPMS/python-kafka-2.0.2-3.ocs23.buildreqs.nosrc.rpm Child return code was: 11 Dynamic buildrequires detected Going to install missing buildrequires. See root.log for details. ENTER ['do_with_status'](['bash', '--login', '-c', '/usr/bin/rpmbuild -ba --noprep --noclean --target noarch --nodeps /builddir/build/SPECS/python-kafka.spec'], chrootPath='/var/lib/mock/dist-ocs23-python311-build-60851-2252/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'}shell=Falselogger=timeout=86400uid=981gid=135user='mockbuild'nspawn_args=['--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.3mppfq_w:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11']unshare_net=TrueprintOutput=False) Using nspawn with args ['--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.3mppfq_w:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11'] Executing command: ['/usr/bin/systemd-nspawn', '-q', '-M', 'c4f4629c08374b2c8c38503b18b97234', '-D', '/var/lib/mock/dist-ocs23-python311-build-60851-2252/root', '-a', '-u', 'mockbuild', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.3mppfq_w:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11', '--console=pipe', '--setenv=TERM=vt100', '--setenv=SHELL=/bin/bash', '--setenv=HOME=/builddir', '--setenv=HOSTNAME=mock', '--setenv=PATH=/usr/bin:/bin:/usr/sbin:/sbin', '--setenv=PROMPT_COMMAND=printf "\\033]0;\\007"', '--setenv=PS1= \\s-\\v\\$ ', '--setenv=LANG=C.UTF-8', '--resolv-conf=off', 'bash', '--login', '-c', '/usr/bin/rpmbuild -ba --noprep --noclean --target noarch --nodeps /builddir/build/SPECS/python-kafka.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8', 'SYSTEMD_NSPAWN_TMPFS_TMP': '0', 'SYSTEMD_SECCOMP': '0'} and shell False Building target platforms: noarch Building for target noarch Executing(%generate_buildrequires): /bin/sh -e /var/tmp/rpm-tmp.fZHnZ0 + umask 022 + cd /builddir/build/BUILD + cd kafka-python-2.0.2 + echo pyproject-rpm-macros + echo python3-devel + echo 'python3dist(pip) >= 19' + echo 'python3dist(packaging)' + '[' -f pyproject.toml ']' + '[' -f setup.py ']' + echo 'python3dist(setuptools) >= 40.8' + echo 'python3dist(wheel)' + rm -rfv kafka_python-2.0.2.dist-info/ removed 'kafka_python-2.0.2.dist-info/METADATA' removed 'kafka_python-2.0.2.dist-info/top_level.txt' removed 'kafka_python-2.0.2.dist-info/LICENSE' removed directory 'kafka_python-2.0.2.dist-info/' + '[' -f /usr/bin/python3 ']' + RPM_TOXENV=py311 + HOSTNAME=rpmbuild + /usr/bin/python3 -s /usr/lib/rpm/OpenCloudOS/pyproject_buildrequires.py --generate-extras --python3_pkgversion 3 -r Handling setuptools >= 40.8 from default build backend Requirement satisfied: setuptools >= 40.8 (installed: setuptools 65.5.1) Handling wheel from default build backend Requirement satisfied: wheel (installed: wheel 0.37.0) /usr/lib/python3.11/site-packages/setuptools/config/setupcfg.py:508: SetuptoolsDeprecationWarning: The license_file parameter is deprecated, use license_files instead. warnings.warn(msg, warning_class) HOOK STDOUT: running egg_info HOOK STDOUT: writing kafka_python.egg-info/PKG-INFO HOOK STDOUT: writing dependency_links to kafka_python.egg-info/dependency_links.txt HOOK STDOUT: writing requirements to kafka_python.egg-info/requires.txt HOOK STDOUT: writing top-level names to kafka_python.egg-info/top_level.txt HOOK STDOUT: reading manifest file 'kafka_python.egg-info/SOURCES.txt' HOOK STDOUT: reading manifest template 'MANIFEST.in' HOOK STDOUT: adding license file 'LICENSE' HOOK STDOUT: writing manifest file 'kafka_python.egg-info/SOURCES.txt' Handling wheel from get_requires_for_build_wheel Requirement satisfied: wheel (installed: wheel 0.37.0) /usr/lib/python3.11/site-packages/setuptools/config/setupcfg.py:508: SetuptoolsDeprecationWarning: The license_file parameter is deprecated, use license_files instead. warnings.warn(msg, warning_class) HOOK STDOUT: running dist_info HOOK STDOUT: writing kafka_python.egg-info/PKG-INFO HOOK STDOUT: writing dependency_links to kafka_python.egg-info/dependency_links.txt HOOK STDOUT: writing requirements to kafka_python.egg-info/requires.txt HOOK STDOUT: writing top-level names to kafka_python.egg-info/top_level.txt HOOK STDOUT: reading manifest file 'kafka_python.egg-info/SOURCES.txt' HOOK STDOUT: reading manifest template 'MANIFEST.in' HOOK STDOUT: adding license file 'LICENSE' HOOK STDOUT: writing manifest file 'kafka_python.egg-info/SOURCES.txt' HOOK STDOUT: creating '/builddir/build/BUILD/kafka-python-2.0.2/kafka_python-2.0.2.dist-info' Handling crc32c ; extra == 'crc32c' from wheel metadata: Requires-Dist Ignoring alien requirement: crc32c ; extra == 'crc32c' Handling lz4 ; extra == 'lz4' from wheel metadata: Requires-Dist Ignoring alien requirement: lz4 ; extra == 'lz4' Handling python-snappy ; extra == 'snappy' from wheel metadata: Requires-Dist Ignoring alien requirement: python-snappy ; extra == 'snappy' Handling python-zstandard ; extra == 'zstd' from wheel metadata: Requires-Dist Ignoring alien requirement: python-zstandard ; extra == 'zstd' + RPM_EC=0 ++ jobs -p + exit 0 Executing(%build): /bin/sh -e /var/tmp/rpm-tmp.FMp82Y + umask 022 + cd /builddir/build/BUILD + PYTHONPATH=:/usr/lib/python3.10/site-packages/:/usr/lib64/python3.10/site-packages/ + export PYTHONPATH + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-cc1 -fstack-protector-strong -m64 -march=x86-64-v2 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection' + export CFLAGS + CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-cc1 -fstack-protector-strong -m64 -march=x86-64-v2 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection' + export CXXFLAGS + FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-cc1 -fstack-protector-strong -m64 -march=x86-64-v2 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -I/usr/lib/gfortran/modules' + export FFLAGS + FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-cc1 -fstack-protector-strong -m64 -march=x86-64-v2 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -I/usr/lib/gfortran/modules' + export FCFLAGS + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-ld -Wl,--build-id=sha1' + export LDFLAGS + LT_SYS_LIBRARY_PATH=/usr/lib: + export LT_SYS_LIBRARY_PATH + CC=gcc + export CC + CXX=g++ + export CXX + cd kafka-python-2.0.2 + mkdir -p /builddir/build/BUILD/kafka-python-2.0.2/.pyproject-builddir + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-cc1 -fstack-protector-strong -m64 -march=x86-64-v2 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection' + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-ld -Wl,--build-id=sha1' + TMPDIR=/builddir/build/BUILD/kafka-python-2.0.2/.pyproject-builddir + /usr/bin/python3 -m pip wheel --wheel-dir /builddir/build/BUILD/kafka-python-2.0.2/pyproject-wheeldir --no-deps --use-pep517 --no-build-isolation --disable-pip-version-check --no-clean --progress-bar off --verbose . Processing /builddir/build/BUILD/kafka-python-2.0.2 Preparing metadata (pyproject.toml): started Running command Preparing metadata (pyproject.toml) /usr/lib/python3.11/site-packages/setuptools/config/setupcfg.py:508: SetuptoolsDeprecationWarning: The license_file parameter is deprecated, use license_files instead. warnings.warn(msg, warning_class) running dist_info creating /builddir/build/BUILD/kafka-python-2.0.2/.pyproject-builddir/pip-modern-metadata-7bkl_wk0/kafka_python.egg-info writing /builddir/build/BUILD/kafka-python-2.0.2/.pyproject-builddir/pip-modern-metadata-7bkl_wk0/kafka_python.egg-info/PKG-INFO writing dependency_links to /builddir/build/BUILD/kafka-python-2.0.2/.pyproject-builddir/pip-modern-metadata-7bkl_wk0/kafka_python.egg-info/dependency_links.txt writing requirements to /builddir/build/BUILD/kafka-python-2.0.2/.pyproject-builddir/pip-modern-metadata-7bkl_wk0/kafka_python.egg-info/requires.txt writing top-level names to /builddir/build/BUILD/kafka-python-2.0.2/.pyproject-builddir/pip-modern-metadata-7bkl_wk0/kafka_python.egg-info/top_level.txt writing manifest file '/builddir/build/BUILD/kafka-python-2.0.2/.pyproject-builddir/pip-modern-metadata-7bkl_wk0/kafka_python.egg-info/SOURCES.txt' reading manifest file '/builddir/build/BUILD/kafka-python-2.0.2/.pyproject-builddir/pip-modern-metadata-7bkl_wk0/kafka_python.egg-info/SOURCES.txt' reading manifest template 'MANIFEST.in' adding license file 'LICENSE' writing manifest file '/builddir/build/BUILD/kafka-python-2.0.2/.pyproject-builddir/pip-modern-metadata-7bkl_wk0/kafka_python.egg-info/SOURCES.txt' creating '/builddir/build/BUILD/kafka-python-2.0.2/.pyproject-builddir/pip-modern-metadata-7bkl_wk0/kafka_python-2.0.2.dist-info' Preparing metadata (pyproject.toml): finished with status 'done' Building wheels for collected packages: kafka-python Building wheel for kafka-python (pyproject.toml): started Running command Building wheel for kafka-python (pyproject.toml) /usr/lib/python3.11/site-packages/setuptools/config/setupcfg.py:508: SetuptoolsDeprecationWarning: The license_file parameter is deprecated, use license_files instead. warnings.warn(msg, warning_class) running bdist_wheel running build running build_py creating build creating build/lib creating build/lib/kafka copying kafka/scram.py -> build/lib/kafka copying kafka/util.py -> build/lib/kafka copying kafka/conn.py -> build/lib/kafka copying kafka/future.py -> build/lib/kafka copying kafka/version.py -> build/lib/kafka copying kafka/__init__.py -> build/lib/kafka copying kafka/codec.py -> build/lib/kafka copying kafka/cluster.py -> build/lib/kafka copying kafka/client_async.py -> build/lib/kafka copying kafka/errors.py -> build/lib/kafka copying kafka/structs.py -> build/lib/kafka creating build/lib/kafka/coordinator copying kafka/coordinator/base.py -> build/lib/kafka/coordinator copying kafka/coordinator/__init__.py -> build/lib/kafka/coordinator copying kafka/coordinator/heartbeat.py -> build/lib/kafka/coordinator copying kafka/coordinator/protocol.py -> build/lib/kafka/coordinator copying kafka/coordinator/consumer.py -> build/lib/kafka/coordinator creating build/lib/kafka/oauth copying kafka/oauth/__init__.py -> build/lib/kafka/oauth copying kafka/oauth/abstract.py -> build/lib/kafka/oauth creating build/lib/kafka/partitioner copying kafka/partitioner/__init__.py -> build/lib/kafka/partitioner copying kafka/partitioner/default.py -> build/lib/kafka/partitioner creating build/lib/kafka/producer copying kafka/producer/future.py -> build/lib/kafka/producer copying kafka/producer/buffer.py -> build/lib/kafka/producer copying kafka/producer/__init__.py -> build/lib/kafka/producer copying kafka/producer/sender.py -> build/lib/kafka/producer copying kafka/producer/record_accumulator.py -> build/lib/kafka/producer copying kafka/producer/kafka.py -> build/lib/kafka/producer creating build/lib/kafka/admin copying kafka/admin/client.py -> build/lib/kafka/admin copying kafka/admin/__init__.py -> build/lib/kafka/admin copying kafka/admin/new_topic.py -> build/lib/kafka/admin copying kafka/admin/acl_resource.py -> build/lib/kafka/admin copying kafka/admin/new_partitions.py -> build/lib/kafka/admin copying kafka/admin/config_resource.py -> build/lib/kafka/admin creating build/lib/kafka/record copying kafka/record/_crc32c.py -> build/lib/kafka/record copying kafka/record/util.py -> build/lib/kafka/record copying kafka/record/legacy_records.py -> build/lib/kafka/record copying kafka/record/__init__.py -> build/lib/kafka/record copying kafka/record/memory_records.py -> build/lib/kafka/record copying kafka/record/default_records.py -> build/lib/kafka/record copying kafka/record/abc.py -> build/lib/kafka/record creating build/lib/kafka/vendor copying kafka/vendor/selectors34.py -> build/lib/kafka/vendor copying kafka/vendor/__init__.py -> build/lib/kafka/vendor copying kafka/vendor/six.py -> build/lib/kafka/vendor copying kafka/vendor/enum34.py -> build/lib/kafka/vendor copying kafka/vendor/socketpair.py -> build/lib/kafka/vendor creating build/lib/kafka/protocol copying kafka/protocol/group.py -> build/lib/kafka/protocol copying kafka/protocol/api.py -> build/lib/kafka/protocol copying kafka/protocol/fetch.py -> build/lib/kafka/protocol copying kafka/protocol/message.py -> build/lib/kafka/protocol copying kafka/protocol/pickle.py -> build/lib/kafka/protocol copying kafka/protocol/__init__.py -> build/lib/kafka/protocol copying kafka/protocol/offset.py -> build/lib/kafka/protocol copying kafka/protocol/metadata.py -> build/lib/kafka/protocol copying kafka/protocol/struct.py -> build/lib/kafka/protocol copying kafka/protocol/produce.py -> build/lib/kafka/protocol copying kafka/protocol/admin.py -> build/lib/kafka/protocol copying kafka/protocol/parser.py -> build/lib/kafka/protocol copying kafka/protocol/types.py -> build/lib/kafka/protocol copying kafka/protocol/abstract.py -> build/lib/kafka/protocol copying kafka/protocol/frame.py -> build/lib/kafka/protocol copying kafka/protocol/commit.py -> build/lib/kafka/protocol creating build/lib/kafka/serializer copying kafka/serializer/__init__.py -> build/lib/kafka/serializer copying kafka/serializer/abstract.py -> build/lib/kafka/serializer creating build/lib/kafka/consumer copying kafka/consumer/group.py -> build/lib/kafka/consumer copying kafka/consumer/__init__.py -> build/lib/kafka/consumer copying kafka/consumer/subscription_state.py -> build/lib/kafka/consumer copying kafka/consumer/fetcher.py -> build/lib/kafka/consumer creating build/lib/kafka/metrics copying kafka/metrics/metric_name.py -> build/lib/kafka/metrics copying kafka/metrics/metric_config.py -> build/lib/kafka/metrics copying kafka/metrics/__init__.py -> build/lib/kafka/metrics copying kafka/metrics/metrics.py -> build/lib/kafka/metrics copying kafka/metrics/kafka_metric.py -> build/lib/kafka/metrics copying kafka/metrics/measurable.py -> build/lib/kafka/metrics copying kafka/metrics/compound_stat.py -> build/lib/kafka/metrics copying kafka/metrics/quota.py -> build/lib/kafka/metrics copying kafka/metrics/measurable_stat.py -> build/lib/kafka/metrics copying kafka/metrics/dict_reporter.py -> build/lib/kafka/metrics copying kafka/metrics/metrics_reporter.py -> build/lib/kafka/metrics copying kafka/metrics/stat.py -> build/lib/kafka/metrics creating build/lib/kafka/coordinator/assignors copying kafka/coordinator/assignors/__init__.py -> build/lib/kafka/coordinator/assignors copying kafka/coordinator/assignors/roundrobin.py -> build/lib/kafka/coordinator/assignors copying kafka/coordinator/assignors/abstract.py -> build/lib/kafka/coordinator/assignors copying kafka/coordinator/assignors/range.py -> build/lib/kafka/coordinator/assignors creating build/lib/kafka/coordinator/assignors/sticky copying kafka/coordinator/assignors/sticky/partition_movements.py -> build/lib/kafka/coordinator/assignors/sticky copying kafka/coordinator/assignors/sticky/sticky_assignor.py -> build/lib/kafka/coordinator/assignors/sticky copying kafka/coordinator/assignors/sticky/__init__.py -> build/lib/kafka/coordinator/assignors/sticky copying kafka/coordinator/assignors/sticky/sorted_set.py -> build/lib/kafka/coordinator/assignors/sticky creating build/lib/kafka/metrics/stats copying kafka/metrics/stats/sampled_stat.py -> build/lib/kafka/metrics/stats copying kafka/metrics/stats/percentile.py -> build/lib/kafka/metrics/stats copying kafka/metrics/stats/total.py -> build/lib/kafka/metrics/stats copying kafka/metrics/stats/rate.py -> build/lib/kafka/metrics/stats copying kafka/metrics/stats/__init__.py -> build/lib/kafka/metrics/stats copying kafka/metrics/stats/avg.py -> build/lib/kafka/metrics/stats copying kafka/metrics/stats/histogram.py -> build/lib/kafka/metrics/stats copying kafka/metrics/stats/max_stat.py -> build/lib/kafka/metrics/stats copying kafka/metrics/stats/min_stat.py -> build/lib/kafka/metrics/stats copying kafka/metrics/stats/sensor.py -> build/lib/kafka/metrics/stats copying kafka/metrics/stats/count.py -> build/lib/kafka/metrics/stats copying kafka/metrics/stats/percentiles.py -> build/lib/kafka/metrics/stats installing to build/bdist.linux-x86_64/wheel running install running install_lib creating build/bdist.linux-x86_64 creating build/bdist.linux-x86_64/wheel creating build/bdist.linux-x86_64/wheel/kafka creating build/bdist.linux-x86_64/wheel/kafka/coordinator copying build/lib/kafka/coordinator/base.py -> build/bdist.linux-x86_64/wheel/kafka/coordinator copying build/lib/kafka/coordinator/__init__.py -> build/bdist.linux-x86_64/wheel/kafka/coordinator creating build/bdist.linux-x86_64/wheel/kafka/coordinator/assignors creating build/bdist.linux-x86_64/wheel/kafka/coordinator/assignors/sticky copying build/lib/kafka/coordinator/assignors/sticky/partition_movements.py -> build/bdist.linux-x86_64/wheel/kafka/coordinator/assignors/sticky copying build/lib/kafka/coordinator/assignors/sticky/sticky_assignor.py -> build/bdist.linux-x86_64/wheel/kafka/coordinator/assignors/sticky copying build/lib/kafka/coordinator/assignors/sticky/__init__.py -> build/bdist.linux-x86_64/wheel/kafka/coordinator/assignors/sticky copying build/lib/kafka/coordinator/assignors/sticky/sorted_set.py -> build/bdist.linux-x86_64/wheel/kafka/coordinator/assignors/sticky copying build/lib/kafka/coordinator/assignors/__init__.py -> build/bdist.linux-x86_64/wheel/kafka/coordinator/assignors copying build/lib/kafka/coordinator/assignors/roundrobin.py -> build/bdist.linux-x86_64/wheel/kafka/coordinator/assignors copying build/lib/kafka/coordinator/assignors/abstract.py -> build/bdist.linux-x86_64/wheel/kafka/coordinator/assignors copying build/lib/kafka/coordinator/assignors/range.py -> build/bdist.linux-x86_64/wheel/kafka/coordinator/assignors copying build/lib/kafka/coordinator/heartbeat.py -> build/bdist.linux-x86_64/wheel/kafka/coordinator copying build/lib/kafka/coordinator/protocol.py -> build/bdist.linux-x86_64/wheel/kafka/coordinator copying build/lib/kafka/coordinator/consumer.py -> build/bdist.linux-x86_64/wheel/kafka/coordinator creating build/bdist.linux-x86_64/wheel/kafka/oauth copying build/lib/kafka/oauth/__init__.py -> build/bdist.linux-x86_64/wheel/kafka/oauth copying build/lib/kafka/oauth/abstract.py -> build/bdist.linux-x86_64/wheel/kafka/oauth creating build/bdist.linux-x86_64/wheel/kafka/partitioner copying build/lib/kafka/partitioner/__init__.py -> build/bdist.linux-x86_64/wheel/kafka/partitioner copying build/lib/kafka/partitioner/default.py -> build/bdist.linux-x86_64/wheel/kafka/partitioner copying build/lib/kafka/scram.py -> build/bdist.linux-x86_64/wheel/kafka creating build/bdist.linux-x86_64/wheel/kafka/producer copying build/lib/kafka/producer/future.py -> build/bdist.linux-x86_64/wheel/kafka/producer copying build/lib/kafka/producer/buffer.py -> build/bdist.linux-x86_64/wheel/kafka/producer copying build/lib/kafka/producer/__init__.py -> build/bdist.linux-x86_64/wheel/kafka/producer copying build/lib/kafka/producer/sender.py -> build/bdist.linux-x86_64/wheel/kafka/producer copying build/lib/kafka/producer/record_accumulator.py -> build/bdist.linux-x86_64/wheel/kafka/producer copying build/lib/kafka/producer/kafka.py -> build/bdist.linux-x86_64/wheel/kafka/producer copying build/lib/kafka/util.py -> build/bdist.linux-x86_64/wheel/kafka copying build/lib/kafka/conn.py -> build/bdist.linux-x86_64/wheel/kafka copying build/lib/kafka/future.py -> build/bdist.linux-x86_64/wheel/kafka creating build/bdist.linux-x86_64/wheel/kafka/admin copying build/lib/kafka/admin/client.py -> build/bdist.linux-x86_64/wheel/kafka/admin copying build/lib/kafka/admin/__init__.py -> build/bdist.linux-x86_64/wheel/kafka/admin copying build/lib/kafka/admin/new_topic.py -> build/bdist.linux-x86_64/wheel/kafka/admin copying build/lib/kafka/admin/acl_resource.py -> build/bdist.linux-x86_64/wheel/kafka/admin copying build/lib/kafka/admin/new_partitions.py -> build/bdist.linux-x86_64/wheel/kafka/admin copying build/lib/kafka/admin/config_resource.py -> build/bdist.linux-x86_64/wheel/kafka/admin creating build/bdist.linux-x86_64/wheel/kafka/record copying build/lib/kafka/record/_crc32c.py -> build/bdist.linux-x86_64/wheel/kafka/record copying build/lib/kafka/record/util.py -> build/bdist.linux-x86_64/wheel/kafka/record copying build/lib/kafka/record/legacy_records.py -> build/bdist.linux-x86_64/wheel/kafka/record copying build/lib/kafka/record/__init__.py -> build/bdist.linux-x86_64/wheel/kafka/record copying build/lib/kafka/record/memory_records.py -> build/bdist.linux-x86_64/wheel/kafka/record copying build/lib/kafka/record/default_records.py -> build/bdist.linux-x86_64/wheel/kafka/record copying build/lib/kafka/record/abc.py -> build/bdist.linux-x86_64/wheel/kafka/record copying build/lib/kafka/version.py -> build/bdist.linux-x86_64/wheel/kafka copying build/lib/kafka/__init__.py -> build/bdist.linux-x86_64/wheel/kafka creating build/bdist.linux-x86_64/wheel/kafka/vendor copying build/lib/kafka/vendor/selectors34.py -> build/bdist.linux-x86_64/wheel/kafka/vendor copying build/lib/kafka/vendor/__init__.py -> build/bdist.linux-x86_64/wheel/kafka/vendor copying build/lib/kafka/vendor/six.py -> build/bdist.linux-x86_64/wheel/kafka/vendor copying build/lib/kafka/vendor/enum34.py -> build/bdist.linux-x86_64/wheel/kafka/vendor copying build/lib/kafka/vendor/socketpair.py -> build/bdist.linux-x86_64/wheel/kafka/vendor copying build/lib/kafka/codec.py -> build/bdist.linux-x86_64/wheel/kafka creating build/bdist.linux-x86_64/wheel/kafka/protocol copying build/lib/kafka/protocol/group.py -> build/bdist.linux-x86_64/wheel/kafka/protocol copying build/lib/kafka/protocol/api.py -> build/bdist.linux-x86_64/wheel/kafka/protocol copying build/lib/kafka/protocol/fetch.py -> build/bdist.linux-x86_64/wheel/kafka/protocol copying build/lib/kafka/protocol/message.py -> build/bdist.linux-x86_64/wheel/kafka/protocol copying build/lib/kafka/protocol/pickle.py -> build/bdist.linux-x86_64/wheel/kafka/protocol copying build/lib/kafka/protocol/__init__.py -> build/bdist.linux-x86_64/wheel/kafka/protocol copying build/lib/kafka/protocol/offset.py -> build/bdist.linux-x86_64/wheel/kafka/protocol copying build/lib/kafka/protocol/metadata.py -> build/bdist.linux-x86_64/wheel/kafka/protocol copying build/lib/kafka/protocol/struct.py -> build/bdist.linux-x86_64/wheel/kafka/protocol copying build/lib/kafka/protocol/produce.py -> build/bdist.linux-x86_64/wheel/kafka/protocol copying build/lib/kafka/protocol/admin.py -> build/bdist.linux-x86_64/wheel/kafka/protocol copying build/lib/kafka/protocol/parser.py -> build/bdist.linux-x86_64/wheel/kafka/protocol copying build/lib/kafka/protocol/types.py -> build/bdist.linux-x86_64/wheel/kafka/protocol copying build/lib/kafka/protocol/abstract.py -> build/bdist.linux-x86_64/wheel/kafka/protocol copying build/lib/kafka/protocol/frame.py -> build/bdist.linux-x86_64/wheel/kafka/protocol copying build/lib/kafka/protocol/commit.py -> build/bdist.linux-x86_64/wheel/kafka/protocol copying build/lib/kafka/cluster.py -> build/bdist.linux-x86_64/wheel/kafka copying build/lib/kafka/client_async.py -> build/bdist.linux-x86_64/wheel/kafka creating build/bdist.linux-x86_64/wheel/kafka/serializer copying build/lib/kafka/serializer/__init__.py -> build/bdist.linux-x86_64/wheel/kafka/serializer copying build/lib/kafka/serializer/abstract.py -> build/bdist.linux-x86_64/wheel/kafka/serializer copying build/lib/kafka/errors.py -> build/bdist.linux-x86_64/wheel/kafka creating build/bdist.linux-x86_64/wheel/kafka/consumer copying build/lib/kafka/consumer/group.py -> build/bdist.linux-x86_64/wheel/kafka/consumer copying build/lib/kafka/consumer/__init__.py -> build/bdist.linux-x86_64/wheel/kafka/consumer copying build/lib/kafka/consumer/subscription_state.py -> build/bdist.linux-x86_64/wheel/kafka/consumer copying build/lib/kafka/consumer/fetcher.py -> build/bdist.linux-x86_64/wheel/kafka/consumer creating build/bdist.linux-x86_64/wheel/kafka/metrics copying build/lib/kafka/metrics/metric_name.py -> build/bdist.linux-x86_64/wheel/kafka/metrics creating build/bdist.linux-x86_64/wheel/kafka/metrics/stats copying build/lib/kafka/metrics/stats/sampled_stat.py -> build/bdist.linux-x86_64/wheel/kafka/metrics/stats copying build/lib/kafka/metrics/stats/percentile.py -> build/bdist.linux-x86_64/wheel/kafka/metrics/stats copying build/lib/kafka/metrics/stats/total.py -> build/bdist.linux-x86_64/wheel/kafka/metrics/stats copying build/lib/kafka/metrics/stats/rate.py -> build/bdist.linux-x86_64/wheel/kafka/metrics/stats copying build/lib/kafka/metrics/stats/__init__.py -> build/bdist.linux-x86_64/wheel/kafka/metrics/stats copying build/lib/kafka/metrics/stats/avg.py -> build/bdist.linux-x86_64/wheel/kafka/metrics/stats copying build/lib/kafka/metrics/stats/histogram.py -> build/bdist.linux-x86_64/wheel/kafka/metrics/stats copying build/lib/kafka/metrics/stats/max_stat.py -> build/bdist.linux-x86_64/wheel/kafka/metrics/stats copying build/lib/kafka/metrics/stats/min_stat.py -> build/bdist.linux-x86_64/wheel/kafka/metrics/stats copying build/lib/kafka/metrics/stats/sensor.py -> build/bdist.linux-x86_64/wheel/kafka/metrics/stats copying build/lib/kafka/metrics/stats/count.py -> build/bdist.linux-x86_64/wheel/kafka/metrics/stats copying build/lib/kafka/metrics/stats/percentiles.py -> build/bdist.linux-x86_64/wheel/kafka/metrics/stats copying build/lib/kafka/metrics/metric_config.py -> build/bdist.linux-x86_64/wheel/kafka/metrics copying build/lib/kafka/metrics/__init__.py -> build/bdist.linux-x86_64/wheel/kafka/metrics copying build/lib/kafka/metrics/metrics.py -> build/bdist.linux-x86_64/wheel/kafka/metrics copying build/lib/kafka/metrics/kafka_metric.py -> build/bdist.linux-x86_64/wheel/kafka/metrics copying build/lib/kafka/metrics/measurable.py -> build/bdist.linux-x86_64/wheel/kafka/metrics copying build/lib/kafka/metrics/compound_stat.py -> build/bdist.linux-x86_64/wheel/kafka/metrics copying build/lib/kafka/metrics/quota.py -> build/bdist.linux-x86_64/wheel/kafka/metrics copying build/lib/kafka/metrics/measurable_stat.py -> build/bdist.linux-x86_64/wheel/kafka/metrics copying build/lib/kafka/metrics/dict_reporter.py -> build/bdist.linux-x86_64/wheel/kafka/metrics copying build/lib/kafka/metrics/metrics_reporter.py -> build/bdist.linux-x86_64/wheel/kafka/metrics copying build/lib/kafka/metrics/stat.py -> build/bdist.linux-x86_64/wheel/kafka/metrics copying build/lib/kafka/structs.py -> build/bdist.linux-x86_64/wheel/kafka running install_egg_info running egg_info writing kafka_python.egg-info/PKG-INFO writing dependency_links to kafka_python.egg-info/dependency_links.txt writing requirements to kafka_python.egg-info/requires.txt writing top-level names to kafka_python.egg-info/top_level.txt reading manifest file 'kafka_python.egg-info/SOURCES.txt' reading manifest template 'MANIFEST.in' adding license file 'LICENSE' writing manifest file 'kafka_python.egg-info/SOURCES.txt' Copying kafka_python.egg-info to build/bdist.linux-x86_64/wheel/kafka_python-2.0.2-py3.11.egg-info running install_scripts creating build/bdist.linux-x86_64/wheel/kafka_python-2.0.2.dist-info/WHEEL creating '/builddir/build/BUILD/kafka-python-2.0.2/.pyproject-builddir/pip-wheel-apuywkgi/tmpzzwtcszf/kafka_python-2.0.2-py2.py3-none-any.whl' and adding 'build/bdist.linux-x86_64/wheel' to it adding 'kafka/__init__.py' adding 'kafka/client_async.py' adding 'kafka/cluster.py' adding 'kafka/codec.py' adding 'kafka/conn.py' adding 'kafka/errors.py' adding 'kafka/future.py' adding 'kafka/scram.py' adding 'kafka/structs.py' adding 'kafka/util.py' adding 'kafka/version.py' adding 'kafka/admin/__init__.py' adding 'kafka/admin/acl_resource.py' adding 'kafka/admin/client.py' adding 'kafka/admin/config_resource.py' adding 'kafka/admin/new_partitions.py' adding 'kafka/admin/new_topic.py' adding 'kafka/consumer/__init__.py' adding 'kafka/consumer/fetcher.py' adding 'kafka/consumer/group.py' adding 'kafka/consumer/subscription_state.py' adding 'kafka/coordinator/__init__.py' adding 'kafka/coordinator/base.py' adding 'kafka/coordinator/consumer.py' adding 'kafka/coordinator/heartbeat.py' adding 'kafka/coordinator/protocol.py' adding 'kafka/coordinator/assignors/__init__.py' adding 'kafka/coordinator/assignors/abstract.py' adding 'kafka/coordinator/assignors/range.py' adding 'kafka/coordinator/assignors/roundrobin.py' adding 'kafka/coordinator/assignors/sticky/__init__.py' adding 'kafka/coordinator/assignors/sticky/partition_movements.py' adding 'kafka/coordinator/assignors/sticky/sorted_set.py' adding 'kafka/coordinator/assignors/sticky/sticky_assignor.py' adding 'kafka/metrics/__init__.py' adding 'kafka/metrics/compound_stat.py' adding 'kafka/metrics/dict_reporter.py' adding 'kafka/metrics/kafka_metric.py' adding 'kafka/metrics/measurable.py' adding 'kafka/metrics/measurable_stat.py' adding 'kafka/metrics/metric_config.py' adding 'kafka/metrics/metric_name.py' adding 'kafka/metrics/metrics.py' adding 'kafka/metrics/metrics_reporter.py' adding 'kafka/metrics/quota.py' adding 'kafka/metrics/stat.py' adding 'kafka/metrics/stats/__init__.py' adding 'kafka/metrics/stats/avg.py' adding 'kafka/metrics/stats/count.py' adding 'kafka/metrics/stats/histogram.py' adding 'kafka/metrics/stats/max_stat.py' adding 'kafka/metrics/stats/min_stat.py' adding 'kafka/metrics/stats/percentile.py' adding 'kafka/metrics/stats/percentiles.py' adding 'kafka/metrics/stats/rate.py' adding 'kafka/metrics/stats/sampled_stat.py' adding 'kafka/metrics/stats/sensor.py' adding 'kafka/metrics/stats/total.py' adding 'kafka/oauth/__init__.py' adding 'kafka/oauth/abstract.py' adding 'kafka/partitioner/__init__.py' adding 'kafka/partitioner/default.py' adding 'kafka/producer/__init__.py' adding 'kafka/producer/buffer.py' adding 'kafka/producer/future.py' adding 'kafka/producer/kafka.py' adding 'kafka/producer/record_accumulator.py' adding 'kafka/producer/sender.py' adding 'kafka/protocol/__init__.py' adding 'kafka/protocol/abstract.py' adding 'kafka/protocol/admin.py' adding 'kafka/protocol/api.py' adding 'kafka/protocol/commit.py' adding 'kafka/protocol/fetch.py' adding 'kafka/protocol/frame.py' adding 'kafka/protocol/group.py' adding 'kafka/protocol/message.py' adding 'kafka/protocol/metadata.py' adding 'kafka/protocol/offset.py' adding 'kafka/protocol/parser.py' adding 'kafka/protocol/pickle.py' adding 'kafka/protocol/produce.py' adding 'kafka/protocol/struct.py' adding 'kafka/protocol/types.py' adding 'kafka/record/__init__.py' adding 'kafka/record/_crc32c.py' adding 'kafka/record/abc.py' adding 'kafka/record/default_records.py' adding 'kafka/record/legacy_records.py' adding 'kafka/record/memory_records.py' adding 'kafka/record/util.py' adding 'kafka/serializer/__init__.py' adding 'kafka/serializer/abstract.py' adding 'kafka/vendor/__init__.py' adding 'kafka/vendor/enum34.py' adding 'kafka/vendor/selectors34.py' adding 'kafka/vendor/six.py' adding 'kafka/vendor/socketpair.py' adding 'kafka_python-2.0.2.dist-info/LICENSE' adding 'kafka_python-2.0.2.dist-info/METADATA' adding 'kafka_python-2.0.2.dist-info/WHEEL' adding 'kafka_python-2.0.2.dist-info/top_level.txt' adding 'kafka_python-2.0.2.dist-info/RECORD' removing build/bdist.linux-x86_64/wheel Building wheel for kafka-python (pyproject.toml): finished with status 'done' Created wheel for kafka-python: filename=kafka_python-2.0.2-py2.py3-none-any.whl size=246541 sha256=2de9dee50989f5d9e2c323cfea4ea7565c4afdac58721b86046198039106e123 Stored in directory: /builddir/.cache/pip/wheels/a3/38/f2/411206d705787bb1c5e15d51f76a8fffb23079284a1d566845 Successfully built kafka-python + /usr/bin/make -O -j32 V=1 VERBOSE=1 doc make -C docs html make[1]: Entering directory '/builddir/build/BUILD/kafka-python-2.0.2/docs' sphinx-build -b html -d _build/doctrees . _build/html Running Sphinx v4.3.1 making output directory... done building [mo]: targets for 0 po files that are out of date building [html]: targets for 15 source files that are out of date updating environment: [new config] 15 added, 0 changed, 0 removed reading sources... [ 6%] apidoc/BrokerConnection reading sources... [ 13%] apidoc/ClusterMetadata reading sources... [ 20%] apidoc/KafkaAdminClient reading sources... [ 26%] apidoc/KafkaClient reading sources... [ 33%] apidoc/KafkaConsumer reading sources... [ 40%] apidoc/KafkaProducer reading sources... [ 46%] apidoc/modules reading sources... [ 53%] changelog reading sources... [ 60%] compatibility reading sources... [ 66%] index reading sources... [ 73%] install reading sources... [ 80%] license reading sources... [ 86%] support reading sources... [ 93%] tests reading sources... [100%] usage looking for now-outdated files... none found pickling environment... done checking consistency... done preparing documents... done writing output... [ 6%] apidoc/BrokerConnection writing output... [ 13%] apidoc/ClusterMetadata writing output... [ 20%] apidoc/KafkaAdminClient writing output... [ 26%] apidoc/KafkaClient writing output... [ 33%] apidoc/KafkaConsumer writing output... [ 40%] apidoc/KafkaProducer writing output... [ 46%] apidoc/modules writing output... [ 53%] changelog writing output... [ 60%] compatibility writing output... [ 66%] index writing output... [ 73%] install writing output... [ 80%] license writing output... [ 86%] support writing output... [ 93%] tests writing output... [100%] usage generating indices... genindex done writing additional pages... search done copying static files... done copying extra files... done dumping search index in English (code: en)... done dumping object inventory... done build succeeded, 13 warnings. The HTML pages are in _build/html. Build finished. The HTML pages are in _build/html. make[1]: Leaving directory '/builddir/build/BUILD/kafka-python-2.0.2/docs' open file:///builddir/build/BUILD/kafka-python-2.0.2/docs/_build/html/index.html WARNING: html_static_path entry '_static' does not exist WARNING: autodoc: failed to import class 'BrokerConnection' from module 'kafka'; the following exception was raised: No module named 'kafka' /builddir/build/BUILD/kafka-python-2.0.2/docs/apidoc/ClusterMetadata.rst:2: WARNING: Title underline too short. ClusterMetadata =========== WARNING: autodoc: failed to import class 'cluster.ClusterMetadata' from module 'kafka'; the following exception was raised: No module named 'kafka' /builddir/build/BUILD/kafka-python-2.0.2/docs/apidoc/KafkaAdminClient.rst:2: WARNING: Title underline too short. KafkaAdminClient =========== WARNING: autodoc: failed to import class 'KafkaAdminClient' from module 'kafka'; the following exception was raised: No module named 'kafka' WARNING: autodoc: failed to import class 'KafkaClient' from module 'kafka'; the following exception was raised: No module named 'kafka' WARNING: autodoc: failed to import class 'KafkaConsumer' from module 'kafka'; the following exception was raised: No module named 'kafka' WARNING: autodoc: failed to import class 'KafkaProducer' from module 'kafka'; the following exception was raised: No module named 'kafka' /builddir/build/BUILD/kafka-python-2.0.2/docs/changelog.rst:284: WARNING: Title underline too short. 1.4.4 (Nov 20, 2018) ########## /builddir/build/BUILD/kafka-python-2.0.2/docs/changelog.rst:284: WARNING: Title underline too short. 1.4.4 (Nov 20, 2018) ########## /builddir/build/BUILD/kafka-python-2.0.2/docs/install.rst:35: WARNING: Title underline too short. Optional crc32c install ******************** /builddir/build/BUILD/kafka-python-2.0.2/docs/install.rst:35: WARNING: Title underline too short. Optional crc32c install ******************** + rm -rf docs/_build/html/.buildinfo + RPM_EC=0 ++ jobs -p + exit 0 Executing(%install): /bin/sh -e /var/tmp/rpm-tmp.Mx3GVK + umask 022 + cd /builddir/build/BUILD + PYTHONPATH=:/usr/lib/python3.10/site-packages/:/usr/lib64/python3.10/site-packages/ + export PYTHONPATH + '[' /builddir/build/BUILDROOT/python-kafka-2.0.2-3.ocs23.noarch '!=' / ']' + rm -rf /builddir/build/BUILDROOT/python-kafka-2.0.2-3.ocs23.noarch ++ dirname /builddir/build/BUILDROOT/python-kafka-2.0.2-3.ocs23.noarch + mkdir -p /builddir/build/BUILDROOT + mkdir /builddir/build/BUILDROOT/python-kafka-2.0.2-3.ocs23.noarch + cd kafka-python-2.0.2 ++ ls /builddir/build/BUILD/kafka-python-2.0.2/pyproject-wheeldir/kafka_python-2.0.2-py2.py3-none-any.whl ++ xargs basename --multiple ++ sed -E 's/([^-]+)-([^-]+)-.+\.whl/\1==\2/' + specifier=kafka_python==2.0.2 + TMPDIR=/builddir/build/BUILD/kafka-python-2.0.2/.pyproject-builddir + /usr/bin/python3 -m pip install --root /builddir/build/BUILDROOT/python-kafka-2.0.2-3.ocs23.noarch --no-deps --disable-pip-version-check --progress-bar off --verbose --ignore-installed --no-warn-script-location --no-index --no-cache-dir --find-links /builddir/build/BUILD/kafka-python-2.0.2/pyproject-wheeldir kafka_python==2.0.2 Using pip 22.3.1 from /usr/lib/python3.11/site-packages/pip (python 3.11) Looking in links: /builddir/build/BUILD/kafka-python-2.0.2/pyproject-wheeldir Processing ./pyproject-wheeldir/kafka_python-2.0.2-py2.py3-none-any.whl Installing collected packages: kafka_python Successfully installed kafka_python-2.0.2 + '[' -d /builddir/build/BUILDROOT/python-kafka-2.0.2-3.ocs23.noarch/usr/bin ']' + rm -f /builddir/build/BUILD/pyproject-ghost-distinfo + site_dirs=() + '[' -d /builddir/build/BUILDROOT/python-kafka-2.0.2-3.ocs23.noarch/usr/lib/python3.11/site-packages ']' + site_dirs+=("/usr/lib/python3.11/site-packages") + '[' /builddir/build/BUILDROOT/python-kafka-2.0.2-3.ocs23.noarch/usr/lib64/python3.11/site-packages '!=' /builddir/build/BUILDROOT/python-kafka-2.0.2-3.ocs23.noarch/usr/lib/python3.11/site-packages ']' + '[' -d /builddir/build/BUILDROOT/python-kafka-2.0.2-3.ocs23.noarch/usr/lib64/python3.11/site-packages ']' + for site_dir in ${site_dirs[@]} + for distinfo in /builddir/build/BUILDROOT/python-kafka-2.0.2-3.ocs23.noarch$site_dir/*.dist-info + echo '%ghost /usr/lib/python3.11/site-packages/kafka_python-2.0.2.dist-info' + sed -i s/pip/rpm/ /builddir/build/BUILDROOT/python-kafka-2.0.2-3.ocs23.noarch/usr/lib/python3.11/site-packages/kafka_python-2.0.2.dist-info/INSTALLER + PYTHONPATH=/usr/lib/rpm/OpenCloudOS + /usr/bin/python3 -B /usr/lib/rpm/OpenCloudOS/pyproject_preprocess_record.py --buildroot /builddir/build/BUILDROOT/python-kafka-2.0.2-3.ocs23.noarch --record /builddir/build/BUILDROOT/python-kafka-2.0.2-3.ocs23.noarch/usr/lib/python3.11/site-packages/kafka_python-2.0.2.dist-info/RECORD --output /builddir/build/BUILD/pyproject-record + rm -fv /builddir/build/BUILDROOT/python-kafka-2.0.2-3.ocs23.noarch/usr/lib/python3.11/site-packages/kafka_python-2.0.2.dist-info/RECORD removed '/builddir/build/BUILDROOT/python-kafka-2.0.2-3.ocs23.noarch/usr/lib/python3.11/site-packages/kafka_python-2.0.2.dist-info/RECORD' + rm -fv /builddir/build/BUILDROOT/python-kafka-2.0.2-3.ocs23.noarch/usr/lib/python3.11/site-packages/kafka_python-2.0.2.dist-info/REQUESTED removed '/builddir/build/BUILDROOT/python-kafka-2.0.2-3.ocs23.noarch/usr/lib/python3.11/site-packages/kafka_python-2.0.2.dist-info/REQUESTED' ++ wc -l /builddir/build/BUILD/pyproject-ghost-distinfo ++ cut -f1 '-d ' + lines=1 + '[' 1 -ne 1 ']' + install -pm 755 kafka/record/_crc32c.py /builddir/build/BUILDROOT/python-kafka-2.0.2-3.ocs23.noarch//usr/lib/python3.11/site-packages/kafka/record/_crc32c.py + '[' -f /usr/bin/pathfix3.11.py ']' + pathfix=/usr/bin/pathfix3.11.py + '[' -z s ']' + shebang_flags=-kas + /usr/bin/pathfix3.11.py -pni /usr/bin/python3 -kas /builddir/build/BUILDROOT/python-kafka-2.0.2-3.ocs23.noarch//usr/lib/python3.11/site-packages/kafka/record/_crc32c.py /builddir/build/BUILDROOT/python-kafka-2.0.2-3.ocs23.noarch//usr/lib/python3.11/site-packages/kafka/record/_crc32c.py: updating + /usr/bin/python3 /usr/lib/rpm/OpenCloudOS/pyproject_save_files.py --output-files /builddir/build/BUILD/pyproject-files --output-modules /builddir/build/BUILD/pyproject-modules --buildroot /builddir/build/BUILDROOT/python-kafka-2.0.2-3.ocs23.noarch --sitelib /usr/lib/python3.11/site-packages --sitearch /usr/lib64/python3.11/site-packages --python-version 3.11 --pyproject-record /builddir/build/BUILD/pyproject-record --prefix /usr kafka + /usr/bin/find-debuginfo -j32 --strict-build-id -m -i --build-id-seed 2.0.2-3.ocs23 --unique-debug-suffix -2.0.2-3.ocs23.noarch --unique-debug-src-base python-kafka-2.0.2-3.ocs23.noarch -S debugsourcefiles.list /builddir/build/BUILD/kafka-python-2.0.2 find: 'debug': No such file or directory + /usr/lib/rpm/check-buildroot + /usr/lib/rpm/OpenCloudOS/brp-ldconfig + /usr/lib/rpm/brp-compress + /usr/lib/rpm/OpenCloudOS/brp-strip-lto /usr/bin/strip + /usr/lib/rpm/brp-strip-static-archive /usr/bin/strip + /usr/lib/rpm/check-rpaths + /usr/lib/rpm/OpenCloudOS/brp-mangle-shebangs + /usr/lib/rpm/OpenCloudOS/brp-python-bytecompile '' 1 0 Bytecompiling .py files below /builddir/build/BUILDROOT/python-kafka-2.0.2-3.ocs23.noarch/usr/lib/python3.11 using python3.11 + /usr/lib/rpm/OpenCloudOS/brp-python-hardlink Executing(%check): /bin/sh -e /var/tmp/rpm-tmp.XpSAbk + umask 022 + cd /builddir/build/BUILD + PYTHONPATH=:/usr/lib/python3.10/site-packages/:/usr/lib64/python3.10/site-packages/ + export PYTHONPATH + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-cc1 -fstack-protector-strong -m64 -march=x86-64-v2 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection' + export CFLAGS + CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-cc1 -fstack-protector-strong -m64 -march=x86-64-v2 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection' + export CXXFLAGS + FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-cc1 -fstack-protector-strong -m64 -march=x86-64-v2 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -I/usr/lib/gfortran/modules' + export FFLAGS + FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-cc1 -fstack-protector-strong -m64 -march=x86-64-v2 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -I/usr/lib/gfortran/modules' + export FCFLAGS + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-ld -Wl,--build-id=sha1' + export LDFLAGS + LT_SYS_LIBRARY_PATH=/usr/lib: + export LT_SYS_LIBRARY_PATH + CC=gcc + export CC + CXX=g++ + export CXX + cd kafka-python-2.0.2 + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-cc1 -fstack-protector-strong -m64 -march=x86-64-v2 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection' + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-ld -Wl,--build-id=sha1' + PATH=/builddir/build/BUILDROOT/python-kafka-2.0.2-3.ocs23.noarch/usr/bin:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/sbin + PYTHONPATH=/builddir/build/BUILDROOT/python-kafka-2.0.2-3.ocs23.noarch/usr/lib64/python3.11/site-packages:/builddir/build/BUILDROOT/python-kafka-2.0.2-3.ocs23.noarch/usr/lib/python3.11/site-packages::/usr/lib/python3.10/site-packages/:/usr/lib64/python3.10/site-packages/ + PYTHONDONTWRITEBYTECODE=1 + PYTEST_ADDOPTS=' --ignore=/builddir/build/BUILD/kafka-python-2.0.2/.pyproject-builddir' + /usr/bin/pytest --ignore=test/test_consumer_integration.py --ignore=test/record/test_util.py test ============================= test session starts ============================== platform linux -- Python 3.11.4, pytest-7.4.0, pluggy-1.0.0 rootdir: /builddir/build/BUILD/kafka-python-2.0.2 configfile: tox.ini plugins: mock-3.10.0 collected 1115 items test/test_acl_comparisons.py ... [ 0%] test/test_admin.py .... [ 0%] test/test_admin_integration.py sssssssss [ 1%] test/test_api_object_implementation.py ................................. [ 4%] ........................................................................ [ 10%] ........................................................................ [ 17%] ........................................................................ [ 23%] ........................................................................ [ 30%] ........................................................................ [ 36%] ........................................................................ [ 43%] ........................................................................ [ 49%] ............................................................. [ 55%] test/test_assignors.py ................................................. [ 59%] ........................................................................ [ 65%] ....................................................... [ 70%] test/test_client_async.py ...................... [ 72%] test/test_cluster.py . [ 72%] test/test_codec.py .....sss. [ 73%] test/test_conn.py ........................... [ 76%] test/test_consumer.py .... [ 76%] test/test_consumer_group.py sssss [ 76%] test/test_coordinator.py ............................................... [ 81%] .................... [ 82%] test/test_fetcher.py ............................. [ 85%] test/test_metrics.py ................ [ 86%] test/test_object_conversion.py ................. [ 88%] test/test_package.py .. [ 88%] test/test_partition_movements.py ... [ 88%] test/test_partitioner.py ........ [ 89%] test/test_producer.py .sssss.sssss [ 90%] test/test_protocol.py ............ [ 91%] test/test_sasl_integration.py sssssssss [ 92%] test/test_sender.py ... [ 92%] test/test_subscription_state.py ............ [ 93%] test/record/test_default_records.py ...F.......F..F [ 95%] test/record/test_legacy_records.py ....F..F............F..F [ 97%] test/record/test_records.py .......F...F...F............ [100%] =================================== FAILURES =================================== _________________________ test_read_write_serde_v2[3] __________________________ compression_type = 3 @pytest.mark.parametrize("compression_type", [ DefaultRecordBatch.CODEC_NONE, DefaultRecordBatch.CODEC_GZIP, DefaultRecordBatch.CODEC_SNAPPY, DefaultRecordBatch.CODEC_LZ4 ]) def test_read_write_serde_v2(compression_type): builder = DefaultRecordBatchBuilder( magic=2, compression_type=compression_type, is_transactional=1, producer_id=123456, producer_epoch=123, base_sequence=9999, batch_size=999999) headers = [("header1", b"aaa"), ("header2", b"bbb")] for offset in range(10): builder.append( offset, timestamp=9999999, key=b"test", value=b"Super", headers=headers) > buffer = builder.build() test/record/test_default_records.py:31: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ kafka/record/default_records.py:541: in build send_compressed = self._maybe_compress() kafka/record/default_records.py:516: in _maybe_compress self._assert_has_codec(self._compression_type) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = compression_type = 3 def _assert_has_codec(self, compression_type): if compression_type == self.CODEC_GZIP: checker, name = codecs.has_gzip, "gzip" elif compression_type == self.CODEC_SNAPPY: checker, name = codecs.has_snappy, "snappy" elif compression_type == self.CODEC_LZ4: checker, name = codecs.has_lz4, "lz4" elif compression_type == self.CODEC_ZSTD: checker, name = codecs.has_zstd, "zstd" if not checker(): > raise UnsupportedCodecError( "Libraries for {} compression codec not found".format(name)) E kafka.errors.UnsupportedCodecError: UnsupportedCodecError: Libraries for lz4 compression codec not found kafka/record/default_records.py:118: UnsupportedCodecError ___________________ test_unavailable_codec[0-3-lz4-has_lz4] ____________________ magic = 0, compression_type = 3, name = 'lz4', checker_name = 'has_lz4' @pytest.mark.parametrize("compression_type,name,checker_name", [ (DefaultRecordBatch.CODEC_GZIP, "gzip", "has_gzip"), (DefaultRecordBatch.CODEC_SNAPPY, "snappy", "has_snappy"), (DefaultRecordBatch.CODEC_LZ4, "lz4", "has_lz4") ]) @pytest.mark.parametrize("magic", [0, 1]) def test_unavailable_codec(magic, compression_type, name, checker_name): builder = DefaultRecordBatchBuilder( magic=2, compression_type=compression_type, is_transactional=0, producer_id=-1, producer_epoch=-1, base_sequence=-1, batch_size=1024) builder.append(0, timestamp=None, key=None, value=b"M" * 2000, headers=[]) > correct_buffer = builder.build() test/record/test_default_records.py:194: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ kafka/record/default_records.py:541: in build send_compressed = self._maybe_compress() kafka/record/default_records.py:516: in _maybe_compress self._assert_has_codec(self._compression_type) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = compression_type = 3 def _assert_has_codec(self, compression_type): if compression_type == self.CODEC_GZIP: checker, name = codecs.has_gzip, "gzip" elif compression_type == self.CODEC_SNAPPY: checker, name = codecs.has_snappy, "snappy" elif compression_type == self.CODEC_LZ4: checker, name = codecs.has_lz4, "lz4" elif compression_type == self.CODEC_ZSTD: checker, name = codecs.has_zstd, "zstd" if not checker(): > raise UnsupportedCodecError( "Libraries for {} compression codec not found".format(name)) E kafka.errors.UnsupportedCodecError: UnsupportedCodecError: Libraries for lz4 compression codec not found kafka/record/default_records.py:118: UnsupportedCodecError ___________________ test_unavailable_codec[1-3-lz4-has_lz4] ____________________ magic = 1, compression_type = 3, name = 'lz4', checker_name = 'has_lz4' @pytest.mark.parametrize("compression_type,name,checker_name", [ (DefaultRecordBatch.CODEC_GZIP, "gzip", "has_gzip"), (DefaultRecordBatch.CODEC_SNAPPY, "snappy", "has_snappy"), (DefaultRecordBatch.CODEC_LZ4, "lz4", "has_lz4") ]) @pytest.mark.parametrize("magic", [0, 1]) def test_unavailable_codec(magic, compression_type, name, checker_name): builder = DefaultRecordBatchBuilder( magic=2, compression_type=compression_type, is_transactional=0, producer_id=-1, producer_epoch=-1, base_sequence=-1, batch_size=1024) builder.append(0, timestamp=None, key=None, value=b"M" * 2000, headers=[]) > correct_buffer = builder.build() test/record/test_default_records.py:194: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ kafka/record/default_records.py:541: in build send_compressed = self._maybe_compress() kafka/record/default_records.py:516: in _maybe_compress self._assert_has_codec(self._compression_type) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = compression_type = 3 def _assert_has_codec(self, compression_type): if compression_type == self.CODEC_GZIP: checker, name = codecs.has_gzip, "gzip" elif compression_type == self.CODEC_SNAPPY: checker, name = codecs.has_snappy, "snappy" elif compression_type == self.CODEC_LZ4: checker, name = codecs.has_lz4, "lz4" elif compression_type == self.CODEC_ZSTD: checker, name = codecs.has_zstd, "zstd" if not checker(): > raise UnsupportedCodecError( "Libraries for {} compression codec not found".format(name)) E kafka.errors.UnsupportedCodecError: UnsupportedCodecError: Libraries for lz4 compression codec not found kafka/record/default_records.py:118: UnsupportedCodecError ______________ test_read_write_serde_v0_v1_with_compression[0-3] _______________ compression_type = 3, magic = 0 @pytest.mark.parametrize("compression_type", [ LegacyRecordBatch.CODEC_GZIP, LegacyRecordBatch.CODEC_SNAPPY, LegacyRecordBatch.CODEC_LZ4 ]) @pytest.mark.parametrize("magic", [0, 1]) def test_read_write_serde_v0_v1_with_compression(compression_type, magic): builder = LegacyRecordBatchBuilder( magic=magic, compression_type=compression_type, batch_size=9999999) for offset in range(10): builder.append( offset, timestamp=9999999, key=b"test", value=b"Super") > buffer = builder.build() test/record/test_legacy_records.py:48: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ kafka/record/legacy_records.py:469: in build self._maybe_compress() kafka/record/legacy_records.py:442: in _maybe_compress self._assert_has_codec(self._compression_type) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = compression_type = 3 def _assert_has_codec(self, compression_type): if compression_type == self.CODEC_GZIP: checker, name = codecs.has_gzip, "gzip" elif compression_type == self.CODEC_SNAPPY: checker, name = codecs.has_snappy, "snappy" elif compression_type == self.CODEC_LZ4: checker, name = codecs.has_lz4, "lz4" if not checker(): > raise UnsupportedCodecError( "Libraries for {} compression codec not found".format(name)) E kafka.errors.UnsupportedCodecError: UnsupportedCodecError: Libraries for lz4 compression codec not found kafka/record/legacy_records.py:126: UnsupportedCodecError ______________ test_read_write_serde_v0_v1_with_compression[1-3] _______________ compression_type = 3, magic = 1 @pytest.mark.parametrize("compression_type", [ LegacyRecordBatch.CODEC_GZIP, LegacyRecordBatch.CODEC_SNAPPY, LegacyRecordBatch.CODEC_LZ4 ]) @pytest.mark.parametrize("magic", [0, 1]) def test_read_write_serde_v0_v1_with_compression(compression_type, magic): builder = LegacyRecordBatchBuilder( magic=magic, compression_type=compression_type, batch_size=9999999) for offset in range(10): builder.append( offset, timestamp=9999999, key=b"test", value=b"Super") > buffer = builder.build() test/record/test_legacy_records.py:48: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ kafka/record/legacy_records.py:469: in build self._maybe_compress() kafka/record/legacy_records.py:442: in _maybe_compress self._assert_has_codec(self._compression_type) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = compression_type = 3 def _assert_has_codec(self, compression_type): if compression_type == self.CODEC_GZIP: checker, name = codecs.has_gzip, "gzip" elif compression_type == self.CODEC_SNAPPY: checker, name = codecs.has_snappy, "snappy" elif compression_type == self.CODEC_LZ4: checker, name = codecs.has_lz4, "lz4" if not checker(): > raise UnsupportedCodecError( "Libraries for {} compression codec not found".format(name)) E kafka.errors.UnsupportedCodecError: UnsupportedCodecError: Libraries for lz4 compression codec not found kafka/record/legacy_records.py:126: UnsupportedCodecError ___________________ test_unavailable_codec[0-3-lz4-has_lz4] ____________________ magic = 0, compression_type = 3, name = 'lz4', checker_name = 'has_lz4' @pytest.mark.parametrize("compression_type,name,checker_name", [ (LegacyRecordBatch.CODEC_GZIP, "gzip", "has_gzip"), (LegacyRecordBatch.CODEC_SNAPPY, "snappy", "has_snappy"), (LegacyRecordBatch.CODEC_LZ4, "lz4", "has_lz4") ]) @pytest.mark.parametrize("magic", [0, 1]) def test_unavailable_codec(magic, compression_type, name, checker_name): builder = LegacyRecordBatchBuilder( magic=magic, compression_type=compression_type, batch_size=1024) builder.append(0, timestamp=None, key=None, value=b"M") > correct_buffer = builder.build() test/record/test_legacy_records.py:186: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ kafka/record/legacy_records.py:469: in build self._maybe_compress() kafka/record/legacy_records.py:442: in _maybe_compress self._assert_has_codec(self._compression_type) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = compression_type = 3 def _assert_has_codec(self, compression_type): if compression_type == self.CODEC_GZIP: checker, name = codecs.has_gzip, "gzip" elif compression_type == self.CODEC_SNAPPY: checker, name = codecs.has_snappy, "snappy" elif compression_type == self.CODEC_LZ4: checker, name = codecs.has_lz4, "lz4" if not checker(): > raise UnsupportedCodecError( "Libraries for {} compression codec not found".format(name)) E kafka.errors.UnsupportedCodecError: UnsupportedCodecError: Libraries for lz4 compression codec not found kafka/record/legacy_records.py:126: UnsupportedCodecError ___________________ test_unavailable_codec[1-3-lz4-has_lz4] ____________________ magic = 1, compression_type = 3, name = 'lz4', checker_name = 'has_lz4' @pytest.mark.parametrize("compression_type,name,checker_name", [ (LegacyRecordBatch.CODEC_GZIP, "gzip", "has_gzip"), (LegacyRecordBatch.CODEC_SNAPPY, "snappy", "has_snappy"), (LegacyRecordBatch.CODEC_LZ4, "lz4", "has_lz4") ]) @pytest.mark.parametrize("magic", [0, 1]) def test_unavailable_codec(magic, compression_type, name, checker_name): builder = LegacyRecordBatchBuilder( magic=magic, compression_type=compression_type, batch_size=1024) builder.append(0, timestamp=None, key=None, value=b"M") > correct_buffer = builder.build() test/record/test_legacy_records.py:186: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ kafka/record/legacy_records.py:469: in build self._maybe_compress() kafka/record/legacy_records.py:442: in _maybe_compress self._assert_has_codec(self._compression_type) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = compression_type = 3 def _assert_has_codec(self, compression_type): if compression_type == self.CODEC_GZIP: checker, name = codecs.has_gzip, "gzip" elif compression_type == self.CODEC_SNAPPY: checker, name = codecs.has_snappy, "snappy" elif compression_type == self.CODEC_LZ4: checker, name = codecs.has_lz4, "lz4" if not checker(): > raise UnsupportedCodecError( "Libraries for {} compression codec not found".format(name)) E kafka.errors.UnsupportedCodecError: UnsupportedCodecError: Libraries for lz4 compression codec not found kafka/record/legacy_records.py:126: UnsupportedCodecError _______________________ test_memory_records_builder[0-3] _______________________ magic = 0, compression_type = 3 @pytest.mark.parametrize("compression_type", [0, 1, 2, 3]) @pytest.mark.parametrize("magic", [0, 1, 2]) def test_memory_records_builder(magic, compression_type): builder = MemoryRecordsBuilder( magic=magic, compression_type=compression_type, batch_size=1024 * 10) base_size = builder.size_in_bytes() # V2 has a header before msg_sizes = [] for offset in range(10): metadata = builder.append( timestamp=10000 + offset, key=b"test", value=b"Super") msg_sizes.append(metadata.size) assert metadata.offset == offset if magic > 0: assert metadata.timestamp == 10000 + offset else: assert metadata.timestamp == -1 assert builder.next_offset() == offset + 1 # Error appends should not leave junk behind, like null bytes or something with pytest.raises(TypeError): builder.append( timestamp=None, key="test", value="Super") # Not bytes, but str assert not builder.is_full() size_before_close = builder.size_in_bytes() assert size_before_close == sum(msg_sizes) + base_size # Size should remain the same after closing. No trailing bytes > builder.close() test/record/test_records.py:199: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ kafka/record/memory_records.py:162: in close self._buffer = bytes(self._builder.build()) kafka/record/legacy_records.py:469: in build self._maybe_compress() kafka/record/legacy_records.py:442: in _maybe_compress self._assert_has_codec(self._compression_type) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = compression_type = 3 def _assert_has_codec(self, compression_type): if compression_type == self.CODEC_GZIP: checker, name = codecs.has_gzip, "gzip" elif compression_type == self.CODEC_SNAPPY: checker, name = codecs.has_snappy, "snappy" elif compression_type == self.CODEC_LZ4: checker, name = codecs.has_lz4, "lz4" if not checker(): > raise UnsupportedCodecError( "Libraries for {} compression codec not found".format(name)) E kafka.errors.UnsupportedCodecError: UnsupportedCodecError: Libraries for lz4 compression codec not found kafka/record/legacy_records.py:126: UnsupportedCodecError _______________________ test_memory_records_builder[1-3] _______________________ magic = 1, compression_type = 3 @pytest.mark.parametrize("compression_type", [0, 1, 2, 3]) @pytest.mark.parametrize("magic", [0, 1, 2]) def test_memory_records_builder(magic, compression_type): builder = MemoryRecordsBuilder( magic=magic, compression_type=compression_type, batch_size=1024 * 10) base_size = builder.size_in_bytes() # V2 has a header before msg_sizes = [] for offset in range(10): metadata = builder.append( timestamp=10000 + offset, key=b"test", value=b"Super") msg_sizes.append(metadata.size) assert metadata.offset == offset if magic > 0: assert metadata.timestamp == 10000 + offset else: assert metadata.timestamp == -1 assert builder.next_offset() == offset + 1 # Error appends should not leave junk behind, like null bytes or something with pytest.raises(TypeError): builder.append( timestamp=None, key="test", value="Super") # Not bytes, but str assert not builder.is_full() size_before_close = builder.size_in_bytes() assert size_before_close == sum(msg_sizes) + base_size # Size should remain the same after closing. No trailing bytes > builder.close() test/record/test_records.py:199: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ kafka/record/memory_records.py:162: in close self._buffer = bytes(self._builder.build()) kafka/record/legacy_records.py:469: in build self._maybe_compress() kafka/record/legacy_records.py:442: in _maybe_compress self._assert_has_codec(self._compression_type) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = compression_type = 3 def _assert_has_codec(self, compression_type): if compression_type == self.CODEC_GZIP: checker, name = codecs.has_gzip, "gzip" elif compression_type == self.CODEC_SNAPPY: checker, name = codecs.has_snappy, "snappy" elif compression_type == self.CODEC_LZ4: checker, name = codecs.has_lz4, "lz4" if not checker(): > raise UnsupportedCodecError( "Libraries for {} compression codec not found".format(name)) E kafka.errors.UnsupportedCodecError: UnsupportedCodecError: Libraries for lz4 compression codec not found kafka/record/legacy_records.py:126: UnsupportedCodecError _______________________ test_memory_records_builder[2-3] _______________________ magic = 2, compression_type = 3 @pytest.mark.parametrize("compression_type", [0, 1, 2, 3]) @pytest.mark.parametrize("magic", [0, 1, 2]) def test_memory_records_builder(magic, compression_type): builder = MemoryRecordsBuilder( magic=magic, compression_type=compression_type, batch_size=1024 * 10) base_size = builder.size_in_bytes() # V2 has a header before msg_sizes = [] for offset in range(10): metadata = builder.append( timestamp=10000 + offset, key=b"test", value=b"Super") msg_sizes.append(metadata.size) assert metadata.offset == offset if magic > 0: assert metadata.timestamp == 10000 + offset else: assert metadata.timestamp == -1 assert builder.next_offset() == offset + 1 # Error appends should not leave junk behind, like null bytes or something with pytest.raises(TypeError): builder.append( timestamp=None, key="test", value="Super") # Not bytes, but str assert not builder.is_full() size_before_close = builder.size_in_bytes() assert size_before_close == sum(msg_sizes) + base_size # Size should remain the same after closing. No trailing bytes > builder.close() test/record/test_records.py:199: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ kafka/record/memory_records.py:162: in close self._buffer = bytes(self._builder.build()) kafka/record/default_records.py:541: in build send_compressed = self._maybe_compress() kafka/record/default_records.py:516: in _maybe_compress self._assert_has_codec(self._compression_type) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = compression_type = 3 def _assert_has_codec(self, compression_type): if compression_type == self.CODEC_GZIP: checker, name = codecs.has_gzip, "gzip" elif compression_type == self.CODEC_SNAPPY: checker, name = codecs.has_snappy, "snappy" elif compression_type == self.CODEC_LZ4: checker, name = codecs.has_lz4, "lz4" elif compression_type == self.CODEC_ZSTD: checker, name = codecs.has_zstd, "zstd" if not checker(): > raise UnsupportedCodecError( "Libraries for {} compression codec not found".format(name)) E kafka.errors.UnsupportedCodecError: UnsupportedCodecError: Libraries for lz4 compression codec not found kafka/record/default_records.py:118: UnsupportedCodecError ============================= slowest 10 durations ============================= 0.19s call test/test_assignors.py::test_sticky_large_assignment_with_multiple_consumers_leaving 0.11s call test/test_assignors.py::test_reassignment_with_random_subscriptions_and_changes[4-20-38] 0.10s call test/test_coordinator.py::test_fetch_committed_offsets 0.10s call test/test_coordinator.py::test_commit_offsets_sync 0.09s call test/test_assignors.py::test_reassignment_with_random_subscriptions_and_changes[37-20-39] 0.08s call test/test_codec.py::test_gzip 0.08s call test/test_assignors.py::test_reassignment_with_random_subscriptions_and_changes[16-20-33] 0.07s call test/test_assignors.py::test_reassignment_with_random_subscriptions_and_changes[55-20-38] 0.07s call test/test_assignors.py::test_reassignment_with_random_subscriptions_and_changes[24-19-26] 0.07s call test/test_assignors.py::test_reassignment_with_random_subscriptions_and_changes[17-18-29] =========================== short test summary info ============================ FAILED test/record/test_default_records.py::test_read_write_serde_v2[3] - kaf... FAILED test/record/test_default_records.py::test_unavailable_codec[0-3-lz4-has_lz4] FAILED test/record/test_default_records.py::test_unavailable_codec[1-3-lz4-has_lz4] FAILED test/record/test_legacy_records.py::test_read_write_serde_v0_v1_with_compression[0-3] FAILED test/record/test_legacy_records.py::test_read_write_serde_v0_v1_with_compression[1-3] FAILED test/record/test_legacy_records.py::test_unavailable_codec[0-3-lz4-has_lz4] FAILED test/record/test_legacy_records.py::test_unavailable_codec[1-3-lz4-has_lz4] FAILED test/record/test_records.py::test_memory_records_builder[0-3] - kafka.... FAILED test/record/test_records.py::test_memory_records_builder[1-3] - kafka.... FAILED test/record/test_records.py::test_memory_records_builder[2-3] - kafka.... ================= 10 failed, 1069 passed, 36 skipped in 6.21s ================== error: Bad exit status from /var/tmp/rpm-tmp.XpSAbk (%check) Bad exit status from /var/tmp/rpm-tmp.XpSAbk (%check) RPM build errors: Child return code was: 1 EXCEPTION: [Error('Command failed: \n # /usr/bin/systemd-nspawn -q -M c4f4629c08374b2c8c38503b18b97234 -D /var/lib/mock/dist-ocs23-python311-build-60851-2252/root -a -u mockbuild --capability=cap_ipc_lock --bind=/tmp/mock-resolv.3mppfq_w:/etc/resolv.conf --bind=/dev/btrfs-control --bind=/dev/mapper/control --bind=/dev/loop-control --bind=/dev/loop0 --bind=/dev/loop1 --bind=/dev/loop2 --bind=/dev/loop3 --bind=/dev/loop4 --bind=/dev/loop5 --bind=/dev/loop6 --bind=/dev/loop7 --bind=/dev/loop8 --bind=/dev/loop9 --bind=/dev/loop10 --bind=/dev/loop11 --console=pipe --setenv=TERM=vt100 --setenv=SHELL=/bin/bash --setenv=HOME=/builddir --setenv=HOSTNAME=mock --setenv=PATH=/usr/bin:/bin:/usr/sbin:/sbin --setenv=PROMPT_COMMAND=printf "\\033]0;\\007" --setenv=PS1= \\s-\\v\\$ --setenv=LANG=C.UTF-8 --resolv-conf=off bash --login -c /usr/bin/rpmbuild -ba --noprep --noclean --target noarch --nodeps /builddir/build/SPECS/python-kafka.spec\n', 1)] Traceback (most recent call last): File "/usr/lib/python3.10/site-packages/mockbuild/trace_decorator.py", line 93, in trace result = func(*args, **kw) File "/usr/lib/python3.10/site-packages/mockbuild/util.py", line 597, in do_with_status raise exception.Error("Command failed: \n # %s\n%s" % (command, output), child.returncode) mockbuild.exception.Error: Command failed: # /usr/bin/systemd-nspawn -q -M c4f4629c08374b2c8c38503b18b97234 -D /var/lib/mock/dist-ocs23-python311-build-60851-2252/root -a -u mockbuild --capability=cap_ipc_lock --bind=/tmp/mock-resolv.3mppfq_w:/etc/resolv.conf --bind=/dev/btrfs-control --bind=/dev/mapper/control --bind=/dev/loop-control --bind=/dev/loop0 --bind=/dev/loop1 --bind=/dev/loop2 --bind=/dev/loop3 --bind=/dev/loop4 --bind=/dev/loop5 --bind=/dev/loop6 --bind=/dev/loop7 --bind=/dev/loop8 --bind=/dev/loop9 --bind=/dev/loop10 --bind=/dev/loop11 --console=pipe --setenv=TERM=vt100 --setenv=SHELL=/bin/bash --setenv=HOME=/builddir --setenv=HOSTNAME=mock --setenv=PATH=/usr/bin:/bin:/usr/sbin:/sbin --setenv=PROMPT_COMMAND=printf "\033]0;\007" --setenv=PS1= \s-\v\$ --setenv=LANG=C.UTF-8 --resolv-conf=off bash --login -c /usr/bin/rpmbuild -ba --noprep --noclean --target noarch --nodeps /builddir/build/SPECS/python-kafka.spec