Mock Version: 3.5 Mock Version: 3.5 Mock Version: 3.5 ENTER ['do_with_status'](['bash', '--login', '-c', '/usr/bin/rpmbuild -bs --noclean --target noarch --nodeps /builddir/build/SPECS/python-rdflib.spec'], chrootPath='/home/lib/mock/dist-ocs23-loongarch64-build-224563-16804/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'}shell=Falselogger=timeout=86400uid=983gid=135user='mockbuild'nspawn_args=[]unshare_net=TrueprintOutput=False) Executing command: ['bash', '--login', '-c', '/usr/bin/rpmbuild -bs --noclean --target noarch --nodeps /builddir/build/SPECS/python-rdflib.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'} and shell False Building target platforms: noarch Building for target noarch Wrote: /builddir/build/SRPMS/python-rdflib-6.2.0-4.ocs23.src.rpm Child return code was: 0 ENTER ['do_with_status'](['bash', '--login', '-c', '/usr/bin/rpmbuild -br --noclean --target noarch --nodeps /builddir/build/SPECS/python-rdflib.spec'], chrootPath='/home/lib/mock/dist-ocs23-loongarch64-build-224563-16804/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'}shell=Falselogger=timeout=86400uid=983gid=135user='mockbuild'nspawn_args=[]unshare_net=TrueraiseExc=FalseprintOutput=False) Executing command: ['bash', '--login', '-c', '/usr/bin/rpmbuild -br --noclean --target noarch --nodeps /builddir/build/SPECS/python-rdflib.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'} and shell False Building target platforms: noarch Building for target noarch Executing(%prep): /bin/sh -e /var/tmp/rpm-tmp.jJmppM + umask 022 + cd /builddir/build/BUILD + cd /builddir/build/BUILD + rm -rf rdflib-6.2.0 + /usr/lib/rpm/rpmuncompress -x /builddir/build/SOURCES/rdflib-6.2.0.tar.gz + STATUS=0 + '[' 0 -ne 0 ']' + cd rdflib-6.2.0 + /usr/bin/chmod -Rf a+rX,u+w,g-w,o-w . + RPM_EC=0 ++ jobs -p + exit 0 Executing(%generate_buildrequires): /bin/sh -e /var/tmp/rpm-tmp.OBhyEy + umask 022 + cd /builddir/build/BUILD + cd rdflib-6.2.0 + echo pyproject-rpm-macros + echo python3-devel + echo 'python3dist(pip) >= 19' + echo 'python3dist(packaging)' + '[' -f pyproject.toml ']' + echo 'python3dist(toml)' + rm -rfv '*.dist-info/' + '[' -f /usr/bin/python3 ']' + RPM_TOXENV=py311 + HOSTNAME=rpmbuild + /usr/bin/python3 -s /usr/lib/rpm/OpenCloudOS/pyproject_buildrequires.py --generate-extras --python3_pkgversion 3 -x tests Import error: No module named 'toml' + RPM_EC=0 ++ jobs -p + exit 0 Wrote: /builddir/build/SRPMS/python-rdflib-6.2.0-4.ocs23.buildreqs.nosrc.rpm Child return code was: 11 Dynamic buildrequires detected Going to install missing buildrequires. See root.log for details. ENTER ['do_with_status'](['bash', '--login', '-c', '/usr/bin/rpmbuild -br --noclean --target noarch --nodeps /builddir/build/SPECS/python-rdflib.spec'], chrootPath='/home/lib/mock/dist-ocs23-loongarch64-build-224563-16804/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'}shell=Falselogger=timeout=86400uid=983gid=135user='mockbuild'nspawn_args=[]unshare_net=TrueraiseExc=FalseprintOutput=False) Executing command: ['bash', '--login', '-c', '/usr/bin/rpmbuild -br --noclean --target noarch --nodeps /builddir/build/SPECS/python-rdflib.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'} and shell False Building target platforms: noarch Building for target noarch Executing(%prep): /bin/sh -e /var/tmp/rpm-tmp.N6sVeF + umask 022 + cd /builddir/build/BUILD + cd /builddir/build/BUILD + rm -rf rdflib-6.2.0 + /usr/lib/rpm/rpmuncompress -x /builddir/build/SOURCES/rdflib-6.2.0.tar.gz + STATUS=0 + '[' 0 -ne 0 ']' + cd rdflib-6.2.0 + /usr/bin/chmod -Rf a+rX,u+w,g-w,o-w . + RPM_EC=0 ++ jobs -p + exit 0 Executing(%generate_buildrequires): /bin/sh -e /var/tmp/rpm-tmp.xqpQKt + umask 022 + cd /builddir/build/BUILD + cd rdflib-6.2.0 + echo pyproject-rpm-macros + echo python3-devel + echo 'python3dist(pip) >= 19' + echo 'python3dist(packaging)' + '[' -f pyproject.toml ']' + echo 'python3dist(toml)' + rm -rfv '*.dist-info/' + '[' -f /usr/bin/python3 ']' + RPM_TOXENV=py311 + HOSTNAME=rpmbuild + /usr/bin/python3 -s /usr/lib/rpm/OpenCloudOS/pyproject_buildrequires.py --generate-extras --python3_pkgversion 3 -x tests Handling setuptools >= 40.8 from default build backend Requirement satisfied: setuptools >= 40.8 (installed: setuptools 68.0.0) Handling wheel from default build backend Requirement not satisfied: wheel Exiting dependency generation pass: build backend + RPM_EC=0 ++ jobs -p + exit 0 Wrote: /builddir/build/SRPMS/python-rdflib-6.2.0-4.ocs23.buildreqs.nosrc.rpm Child return code was: 11 Dynamic buildrequires detected Going to install missing buildrequires. See root.log for details. ENTER ['do_with_status'](['bash', '--login', '-c', '/usr/bin/rpmbuild -br --noclean --target noarch --nodeps /builddir/build/SPECS/python-rdflib.spec'], chrootPath='/home/lib/mock/dist-ocs23-loongarch64-build-224563-16804/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'}shell=Falselogger=timeout=86400uid=983gid=135user='mockbuild'nspawn_args=[]unshare_net=TrueraiseExc=FalseprintOutput=False) Executing command: ['bash', '--login', '-c', '/usr/bin/rpmbuild -br --noclean --target noarch --nodeps /builddir/build/SPECS/python-rdflib.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'} and shell False Building target platforms: noarch Building for target noarch Executing(%prep): /bin/sh -e /var/tmp/rpm-tmp.ow2qOV + umask 022 + cd /builddir/build/BUILD + cd /builddir/build/BUILD + rm -rf rdflib-6.2.0 + /usr/lib/rpm/rpmuncompress -x /builddir/build/SOURCES/rdflib-6.2.0.tar.gz + STATUS=0 + '[' 0 -ne 0 ']' + cd rdflib-6.2.0 + /usr/bin/chmod -Rf a+rX,u+w,g-w,o-w . + RPM_EC=0 ++ jobs -p + exit 0 Executing(%generate_buildrequires): /bin/sh -e /var/tmp/rpm-tmp.pzHhgC + umask 022 + cd /builddir/build/BUILD + cd rdflib-6.2.0 + echo pyproject-rpm-macros + echo python3-devel + echo 'python3dist(pip) >= 19' + echo 'python3dist(packaging)' + '[' -f pyproject.toml ']' + echo 'python3dist(toml)' + rm -rfv '*.dist-info/' + '[' -f /usr/bin/python3 ']' + RPM_TOXENV=py311 + HOSTNAME=rpmbuild + /usr/bin/python3 -s /usr/lib/rpm/OpenCloudOS/pyproject_buildrequires.py --generate-extras --python3_pkgversion 3 -x tests Handling setuptools >= 40.8 from default build backend Requirement satisfied: setuptools >= 40.8 (installed: setuptools 68.0.0) Handling wheel from default build backend Requirement satisfied: wheel (installed: wheel 0.41.2) warning: no files found matching 'ez_setup.py' warning: no files found matching 'skiptests.list' no previously-included directories found matching 'docs/_build' warning: no previously-included files matching '*.pyc' found anywhere in distribution warning: no previously-included files matching '*$py.class' found anywhere in distribution HOOK STDOUT: running egg_info HOOK STDOUT: writing rdflib.egg-info/PKG-INFO HOOK STDOUT: writing dependency_links to rdflib.egg-info/dependency_links.txt HOOK STDOUT: writing entry points to rdflib.egg-info/entry_points.txt HOOK STDOUT: writing requirements to rdflib.egg-info/requires.txt HOOK STDOUT: writing top-level names to rdflib.egg-info/top_level.txt HOOK STDOUT: reading manifest file 'rdflib.egg-info/SOURCES.txt' HOOK STDOUT: reading manifest template 'MANIFEST.in' HOOK STDOUT: adding license file 'LICENSE' HOOK STDOUT: writing manifest file 'rdflib.egg-info/SOURCES.txt' Handling wheel from get_requires_for_build_wheel Requirement satisfied: wheel (installed: wheel 0.41.2) warning: no files found matching 'ez_setup.py' warning: no files found matching 'skiptests.list' no previously-included directories found matching 'docs/_build' warning: no previously-included files matching '*.pyc' found anywhere in distribution warning: no previously-included files matching '*$py.class' found anywhere in distribution Handling isodate from wheel metadata: Requires-Dist Requirement not satisfied: isodate Handling pyparsing from wheel metadata: Requires-Dist Requirement not satisfied: pyparsing Handling setuptools from wheel metadata: Requires-Dist Requirement satisfied: setuptools (installed: setuptools 68.0.0) Handling importlib-metadata ; python_version < "3.8.0" from wheel metadata: Requires-Dist Ignoring alien requirement: importlib-metadata ; python_version < "3.8.0" Handling berkeleydb ; extra == 'berkeleydb' from wheel metadata: Requires-Dist Ignoring alien requirement: berkeleydb ; extra == 'berkeleydb' Handling black ==22.6.0 ; extra == 'dev' from wheel metadata: Requires-Dist Ignoring alien requirement: black ==22.6.0 ; extra == 'dev' Handling flake8 ; extra == 'dev' from wheel metadata: Requires-Dist Ignoring alien requirement: flake8 ; extra == 'dev' Handling isort ; extra == 'dev' from wheel metadata: Requires-Dist Ignoring alien requirement: isort ; extra == 'dev' Handling mypy ; extra == 'dev' from wheel metadata: Requires-Dist Ignoring alien requirement: mypy ; extra == 'dev' Handling pep8-naming ; extra == 'dev' from wheel metadata: Requires-Dist Ignoring alien requirement: pep8-naming ; extra == 'dev' Handling types-setuptools ; extra == 'dev' from wheel metadata: Requires-Dist Ignoring alien requirement: types-setuptools ; extra == 'dev' Handling flakeheaven ; (python_version >= "3.8.0") and extra == 'dev' from wheel metadata: Requires-Dist Ignoring alien requirement: flakeheaven ; (python_version >= "3.8.0") and extra == 'dev' Handling myst-parser ; extra == 'docs' from wheel metadata: Requires-Dist Ignoring alien requirement: myst-parser ; extra == 'docs' Handling sphinx <6 ; extra == 'docs' from wheel metadata: Requires-Dist Ignoring alien requirement: sphinx <6 ; extra == 'docs' Handling sphinxcontrib-apidoc ; extra == 'docs' from wheel metadata: Requires-Dist Ignoring alien requirement: sphinxcontrib-apidoc ; extra == 'docs' Handling sphinxcontrib-kroki ; extra == 'docs' from wheel metadata: Requires-Dist Ignoring alien requirement: sphinxcontrib-kroki ; extra == 'docs' Handling sphinx-autodoc-typehints ; extra == 'docs' from wheel metadata: Requires-Dist Ignoring alien requirement: sphinx-autodoc-typehints ; extra == 'docs' Handling html5lib ; extra == 'html' from wheel metadata: Requires-Dist Ignoring alien requirement: html5lib ; extra == 'html' Handling networkx ; extra == 'networkx' from wheel metadata: Requires-Dist Ignoring alien requirement: networkx ; extra == 'networkx' Handling html5lib ; extra == 'tests' from wheel metadata: Requires-Dist Requirement not satisfied: html5lib ; extra == 'tests' Handling pytest ; extra == 'tests' from wheel metadata: Requires-Dist Requirement not satisfied: pytest ; extra == 'tests' Handling pytest-cov ; extra == 'tests' from wheel metadata: Requires-Dist Requirement not satisfied: pytest-cov ; extra == 'tests' + RPM_EC=0 ++ jobs -p + exit 0 Wrote: /builddir/build/SRPMS/python-rdflib-6.2.0-4.ocs23.buildreqs.nosrc.rpm Child return code was: 11 Dynamic buildrequires detected Going to install missing buildrequires. See root.log for details. ENTER ['do_with_status'](['bash', '--login', '-c', '/usr/bin/rpmbuild -br --noclean --target noarch --nodeps /builddir/build/SPECS/python-rdflib.spec'], chrootPath='/home/lib/mock/dist-ocs23-loongarch64-build-224563-16804/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'}shell=Falselogger=timeout=86400uid=983gid=135user='mockbuild'nspawn_args=[]unshare_net=TrueraiseExc=FalseprintOutput=False) Executing command: ['bash', '--login', '-c', '/usr/bin/rpmbuild -br --noclean --target noarch --nodeps /builddir/build/SPECS/python-rdflib.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'} and shell False Building target platforms: noarch Building for target noarch Executing(%prep): /bin/sh -e /var/tmp/rpm-tmp.qSmj2s + umask 022 + cd /builddir/build/BUILD + cd /builddir/build/BUILD + rm -rf rdflib-6.2.0 + /usr/lib/rpm/rpmuncompress -x /builddir/build/SOURCES/rdflib-6.2.0.tar.gz + STATUS=0 + '[' 0 -ne 0 ']' + cd rdflib-6.2.0 + /usr/bin/chmod -Rf a+rX,u+w,g-w,o-w . + RPM_EC=0 ++ jobs -p + exit 0 Executing(%generate_buildrequires): /bin/sh -e /var/tmp/rpm-tmp.yg95iK + umask 022 + cd /builddir/build/BUILD + cd rdflib-6.2.0 + echo pyproject-rpm-macros + echo python3-devel + echo 'python3dist(pip) >= 19' + echo 'python3dist(packaging)' + '[' -f pyproject.toml ']' + echo 'python3dist(toml)' + rm -rfv '*.dist-info/' + '[' -f /usr/bin/python3 ']' + RPM_TOXENV=py311 + HOSTNAME=rpmbuild + /usr/bin/python3 -s /usr/lib/rpm/OpenCloudOS/pyproject_buildrequires.py --generate-extras --python3_pkgversion 3 -x tests Handling setuptools >= 40.8 from default build backend Requirement satisfied: setuptools >= 40.8 (installed: setuptools 68.0.0) Handling wheel from default build backend Requirement satisfied: wheel (installed: wheel 0.41.2) warning: no files found matching 'ez_setup.py' warning: no files found matching 'skiptests.list' no previously-included directories found matching 'docs/_build' warning: no previously-included files matching '*.pyc' found anywhere in distribution warning: no previously-included files matching '*$py.class' found anywhere in distribution HOOK STDOUT: running egg_info HOOK STDOUT: writing rdflib.egg-info/PKG-INFO HOOK STDOUT: writing dependency_links to rdflib.egg-info/dependency_links.txt HOOK STDOUT: writing entry points to rdflib.egg-info/entry_points.txt HOOK STDOUT: writing requirements to rdflib.egg-info/requires.txt HOOK STDOUT: writing top-level names to rdflib.egg-info/top_level.txt HOOK STDOUT: reading manifest file 'rdflib.egg-info/SOURCES.txt' HOOK STDOUT: reading manifest template 'MANIFEST.in' HOOK STDOUT: adding license file 'LICENSE' HOOK STDOUT: writing manifest file 'rdflib.egg-info/SOURCES.txt' Handling wheel from get_requires_for_build_wheel Requirement satisfied: wheel (installed: wheel 0.41.2) warning: no files found matching 'ez_setup.py' warning: no files found matching 'skiptests.list' no previously-included directories found matching 'docs/_build' warning: no previously-included files matching '*.pyc' found anywhere in distribution warning: no previously-included files matching '*$py.class' found anywhere in distribution Handling isodate from wheel metadata: Requires-Dist Requirement satisfied: isodate (installed: isodate 0.6.1) Handling pyparsing from wheel metadata: Requires-Dist Requirement satisfied: pyparsing (installed: pyparsing 3.0.9) Handling setuptools from wheel metadata: Requires-Dist Requirement satisfied: setuptools (installed: setuptools 68.0.0) Handling importlib-metadata ; python_version < "3.8.0" from wheel metadata: Requires-Dist Ignoring alien requirement: importlib-metadata ; python_version < "3.8.0" Handling berkeleydb ; extra == 'berkeleydb' from wheel metadata: Requires-Dist Ignoring alien requirement: berkeleydb ; extra == 'berkeleydb' Handling black ==22.6.0 ; extra == 'dev' from wheel metadata: Requires-Dist Ignoring alien requirement: black ==22.6.0 ; extra == 'dev' Handling flake8 ; extra == 'dev' from wheel metadata: Requires-Dist Ignoring alien requirement: flake8 ; extra == 'dev' Handling isort ; extra == 'dev' from wheel metadata: Requires-Dist Ignoring alien requirement: isort ; extra == 'dev' Handling mypy ; extra == 'dev' from wheel metadata: Requires-Dist Ignoring alien requirement: mypy ; extra == 'dev' Handling pep8-naming ; extra == 'dev' from wheel metadata: Requires-Dist Ignoring alien requirement: pep8-naming ; extra == 'dev' Handling types-setuptools ; extra == 'dev' from wheel metadata: Requires-Dist Ignoring alien requirement: types-setuptools ; extra == 'dev' Handling flakeheaven ; (python_version >= "3.8.0") and extra == 'dev' from wheel metadata: Requires-Dist Ignoring alien requirement: flakeheaven ; (python_version >= "3.8.0") and extra == 'dev' Handling myst-parser ; extra == 'docs' from wheel metadata: Requires-Dist Ignoring alien requirement: myst-parser ; extra == 'docs' Handling sphinx <6 ; extra == 'docs' from wheel metadata: Requires-Dist Ignoring alien requirement: sphinx <6 ; extra == 'docs' Handling sphinxcontrib-apidoc ; extra == 'docs' from wheel metadata: Requires-Dist Ignoring alien requirement: sphinxcontrib-apidoc ; extra == 'docs' Handling sphinxcontrib-kroki ; extra == 'docs' from wheel metadata: Requires-Dist Ignoring alien requirement: sphinxcontrib-kroki ; extra == 'docs' Handling sphinx-autodoc-typehints ; extra == 'docs' from wheel metadata: Requires-Dist Ignoring alien requirement: sphinx-autodoc-typehints ; extra == 'docs' Handling html5lib ; extra == 'html' from wheel metadata: Requires-Dist Ignoring alien requirement: html5lib ; extra == 'html' Handling networkx ; extra == 'networkx' from wheel metadata: Requires-Dist Ignoring alien requirement: networkx ; extra == 'networkx' Handling html5lib ; extra == 'tests' from wheel metadata: Requires-Dist Requirement satisfied: html5lib ; extra == 'tests' (installed: html5lib 1.1) Handling pytest ; extra == 'tests' from wheel metadata: Requires-Dist Requirement satisfied: pytest ; extra == 'tests' (installed: pytest 7.4.0) Handling pytest-cov ; extra == 'tests' from wheel metadata: Requires-Dist Requirement satisfied: pytest-cov ; extra == 'tests' (installed: pytest-cov 4.1.0) + RPM_EC=0 ++ jobs -p + exit 0 Wrote: /builddir/build/SRPMS/python-rdflib-6.2.0-4.ocs23.buildreqs.nosrc.rpm Child return code was: 11 Dynamic buildrequires detected Going to install missing buildrequires. See root.log for details. ENTER ['do_with_status'](['bash', '--login', '-c', '/usr/bin/rpmbuild -ba --noprep --noclean --target noarch --nodeps /builddir/build/SPECS/python-rdflib.spec'], chrootPath='/home/lib/mock/dist-ocs23-loongarch64-build-224563-16804/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'}shell=Falselogger=timeout=86400uid=983gid=135user='mockbuild'nspawn_args=[]unshare_net=TrueprintOutput=False) Executing command: ['bash', '--login', '-c', '/usr/bin/rpmbuild -ba --noprep --noclean --target noarch --nodeps /builddir/build/SPECS/python-rdflib.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'} and shell False Building target platforms: noarch Building for target noarch Executing(%generate_buildrequires): /bin/sh -e /var/tmp/rpm-tmp.5kxZ4Y + umask 022 + cd /builddir/build/BUILD + cd rdflib-6.2.0 + echo pyproject-rpm-macros + echo python3-devel + echo 'python3dist(pip) >= 19' + echo 'python3dist(packaging)' + '[' -f pyproject.toml ']' + echo 'python3dist(toml)' + rm -rfv rdflib-6.2.0.dist-info/ removed 'rdflib-6.2.0.dist-info/entry_points.txt' removed 'rdflib-6.2.0.dist-info/top_level.txt' removed 'rdflib-6.2.0.dist-info/METADATA' removed 'rdflib-6.2.0.dist-info/LICENSE' removed directory 'rdflib-6.2.0.dist-info/' + '[' -f /usr/bin/python3 ']' + RPM_TOXENV=py311 + HOSTNAME=rpmbuild + /usr/bin/python3 -s /usr/lib/rpm/OpenCloudOS/pyproject_buildrequires.py --generate-extras --python3_pkgversion 3 -x tests Handling setuptools >= 40.8 from default build backend Requirement satisfied: setuptools >= 40.8 (installed: setuptools 68.0.0) Handling wheel from default build backend Requirement satisfied: wheel (installed: wheel 0.41.2) warning: no files found matching 'ez_setup.py' warning: no files found matching 'skiptests.list' no previously-included directories found matching 'docs/_build' warning: no previously-included files matching '*.pyc' found anywhere in distribution warning: no previously-included files matching '*$py.class' found anywhere in distribution HOOK STDOUT: running egg_info HOOK STDOUT: writing rdflib.egg-info/PKG-INFO HOOK STDOUT: writing dependency_links to rdflib.egg-info/dependency_links.txt HOOK STDOUT: writing entry points to rdflib.egg-info/entry_points.txt HOOK STDOUT: writing requirements to rdflib.egg-info/requires.txt HOOK STDOUT: writing top-level names to rdflib.egg-info/top_level.txt HOOK STDOUT: reading manifest file 'rdflib.egg-info/SOURCES.txt' HOOK STDOUT: reading manifest template 'MANIFEST.in' HOOK STDOUT: adding license file 'LICENSE' HOOK STDOUT: writing manifest file 'rdflib.egg-info/SOURCES.txt' Handling wheel from get_requires_for_build_wheel Requirement satisfied: wheel (installed: wheel 0.41.2) warning: no files found matching 'ez_setup.py' warning: no files found matching 'skiptests.list' no previously-included directories found matching 'docs/_build' warning: no previously-included files matching '*.pyc' found anywhere in distribution warning: no previously-included files matching '*$py.class' found anywhere in distribution Handling isodate from wheel metadata: Requires-Dist Requirement satisfied: isodate (installed: isodate 0.6.1) Handling pyparsing from wheel metadata: Requires-Dist Requirement satisfied: pyparsing (installed: pyparsing 3.0.9) Handling setuptools from wheel metadata: Requires-Dist Requirement satisfied: setuptools (installed: setuptools 68.0.0) Handling importlib-metadata ; python_version < "3.8.0" from wheel metadata: Requires-Dist Ignoring alien requirement: importlib-metadata ; python_version < "3.8.0" Handling berkeleydb ; extra == 'berkeleydb' from wheel metadata: Requires-Dist Ignoring alien requirement: berkeleydb ; extra == 'berkeleydb' Handling black ==22.6.0 ; extra == 'dev' from wheel metadata: Requires-Dist Ignoring alien requirement: black ==22.6.0 ; extra == 'dev' Handling flake8 ; extra == 'dev' from wheel metadata: Requires-Dist Ignoring alien requirement: flake8 ; extra == 'dev' Handling isort ; extra == 'dev' from wheel metadata: Requires-Dist Ignoring alien requirement: isort ; extra == 'dev' Handling mypy ; extra == 'dev' from wheel metadata: Requires-Dist Ignoring alien requirement: mypy ; extra == 'dev' Handling pep8-naming ; extra == 'dev' from wheel metadata: Requires-Dist Ignoring alien requirement: pep8-naming ; extra == 'dev' Handling types-setuptools ; extra == 'dev' from wheel metadata: Requires-Dist Ignoring alien requirement: types-setuptools ; extra == 'dev' Handling flakeheaven ; (python_version >= "3.8.0") and extra == 'dev' from wheel metadata: Requires-Dist Ignoring alien requirement: flakeheaven ; (python_version >= "3.8.0") and extra == 'dev' Handling myst-parser ; extra == 'docs' from wheel metadata: Requires-Dist Ignoring alien requirement: myst-parser ; extra == 'docs' Handling sphinx <6 ; extra == 'docs' from wheel metadata: Requires-Dist Ignoring alien requirement: sphinx <6 ; extra == 'docs' Handling sphinxcontrib-apidoc ; extra == 'docs' from wheel metadata: Requires-Dist Ignoring alien requirement: sphinxcontrib-apidoc ; extra == 'docs' Handling sphinxcontrib-kroki ; extra == 'docs' from wheel metadata: Requires-Dist Ignoring alien requirement: sphinxcontrib-kroki ; extra == 'docs' Handling sphinx-autodoc-typehints ; extra == 'docs' from wheel metadata: Requires-Dist Ignoring alien requirement: sphinx-autodoc-typehints ; extra == 'docs' Handling html5lib ; extra == 'html' from wheel metadata: Requires-Dist Ignoring alien requirement: html5lib ; extra == 'html' Handling networkx ; extra == 'networkx' from wheel metadata: Requires-Dist Ignoring alien requirement: networkx ; extra == 'networkx' Handling html5lib ; extra == 'tests' from wheel metadata: Requires-Dist Requirement satisfied: html5lib ; extra == 'tests' (installed: html5lib 1.1) Handling pytest ; extra == 'tests' from wheel metadata: Requires-Dist Requirement satisfied: pytest ; extra == 'tests' (installed: pytest 7.4.0) Handling pytest-cov ; extra == 'tests' from wheel metadata: Requires-Dist Requirement satisfied: pytest-cov ; extra == 'tests' (installed: pytest-cov 4.1.0) + RPM_EC=0 ++ jobs -p + exit 0 Executing(%build): /bin/sh -e /var/tmp/rpm-tmp.Q3NBuM + umask 022 + cd /builddir/build/BUILD + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-cc1 -fstack-protector-strong -fasynchronous-unwind-tables -fstack-clash-protection' + export CFLAGS + CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-cc1 -fstack-protector-strong -fasynchronous-unwind-tables -fstack-clash-protection' + export CXXFLAGS + FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-cc1 -fstack-protector-strong -fasynchronous-unwind-tables -fstack-clash-protection -I/usr/lib/gfortran/modules' + export FFLAGS + FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-cc1 -fstack-protector-strong -fasynchronous-unwind-tables -fstack-clash-protection -I/usr/lib/gfortran/modules' + export FCFLAGS + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-ld -Wl,--build-id=sha1' + export LDFLAGS + LT_SYS_LIBRARY_PATH=/usr/lib: + export LT_SYS_LIBRARY_PATH + CC=gcc + export CC + CXX=g++ + export CXX + cd rdflib-6.2.0 + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-cc1 -fstack-protector-strong -fasynchronous-unwind-tables -fstack-clash-protection' + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-ld -Wl,--build-id=sha1' + /usr/bin/python3 setup.py build '--executable=/usr/bin/python3 -s' running build running build_py creating build creating build/lib creating build/lib/rdflib copying rdflib/__init__.py -> build/lib/rdflib copying rdflib/_type_checking.py -> build/lib/rdflib copying rdflib/collection.py -> build/lib/rdflib copying rdflib/compare.py -> build/lib/rdflib copying rdflib/compat.py -> build/lib/rdflib copying rdflib/container.py -> build/lib/rdflib copying rdflib/events.py -> build/lib/rdflib copying rdflib/exceptions.py -> build/lib/rdflib copying rdflib/graph.py -> build/lib/rdflib copying rdflib/parser.py -> build/lib/rdflib copying rdflib/paths.py -> build/lib/rdflib copying rdflib/plugin.py -> build/lib/rdflib copying rdflib/query.py -> build/lib/rdflib copying rdflib/resource.py -> build/lib/rdflib copying rdflib/serializer.py -> build/lib/rdflib copying rdflib/store.py -> build/lib/rdflib copying rdflib/term.py -> build/lib/rdflib copying rdflib/util.py -> build/lib/rdflib copying rdflib/void.py -> build/lib/rdflib creating build/lib/rdflib/extras copying rdflib/extras/__init__.py -> build/lib/rdflib/extras copying rdflib/extras/cmdlineutils.py -> build/lib/rdflib/extras copying rdflib/extras/describer.py -> build/lib/rdflib/extras copying rdflib/extras/external_graph_libs.py -> build/lib/rdflib/extras copying rdflib/extras/infixowl.py -> build/lib/rdflib/extras creating build/lib/rdflib/namespace copying rdflib/namespace/_BRICK.py -> build/lib/rdflib/namespace copying rdflib/namespace/_CSVW.py -> build/lib/rdflib/namespace copying rdflib/namespace/_DC.py -> build/lib/rdflib/namespace copying rdflib/namespace/_DCAM.py -> build/lib/rdflib/namespace copying rdflib/namespace/_DCAT.py -> build/lib/rdflib/namespace copying rdflib/namespace/_DCMITYPE.py -> build/lib/rdflib/namespace copying rdflib/namespace/_DCTERMS.py -> build/lib/rdflib/namespace copying rdflib/namespace/_DOAP.py -> build/lib/rdflib/namespace copying rdflib/namespace/_FOAF.py -> build/lib/rdflib/namespace copying rdflib/namespace/_GEO.py -> build/lib/rdflib/namespace copying rdflib/namespace/_ODRL2.py -> build/lib/rdflib/namespace copying rdflib/namespace/_ORG.py -> build/lib/rdflib/namespace copying rdflib/namespace/_OWL.py -> build/lib/rdflib/namespace copying rdflib/namespace/_PROF.py -> build/lib/rdflib/namespace copying rdflib/namespace/_PROV.py -> build/lib/rdflib/namespace copying rdflib/namespace/_QB.py -> build/lib/rdflib/namespace copying rdflib/namespace/_RDF.py -> build/lib/rdflib/namespace copying rdflib/namespace/_RDFS.py -> build/lib/rdflib/namespace copying rdflib/namespace/_SDO.py -> build/lib/rdflib/namespace copying rdflib/namespace/_SH.py -> build/lib/rdflib/namespace copying rdflib/namespace/_SKOS.py -> build/lib/rdflib/namespace copying rdflib/namespace/_SOSA.py -> build/lib/rdflib/namespace copying rdflib/namespace/_SSN.py -> build/lib/rdflib/namespace copying rdflib/namespace/_TIME.py -> build/lib/rdflib/namespace copying rdflib/namespace/_VANN.py -> build/lib/rdflib/namespace copying rdflib/namespace/_VOID.py -> build/lib/rdflib/namespace copying rdflib/namespace/_WGS.py -> build/lib/rdflib/namespace copying rdflib/namespace/_XSD.py -> build/lib/rdflib/namespace copying rdflib/namespace/__init__.py -> build/lib/rdflib/namespace creating build/lib/rdflib/plugins copying rdflib/plugins/__init__.py -> build/lib/rdflib/plugins creating build/lib/rdflib/tools copying rdflib/tools/__init__.py -> build/lib/rdflib/tools copying rdflib/tools/csv2rdf.py -> build/lib/rdflib/tools copying rdflib/tools/defined_namespace_creator.py -> build/lib/rdflib/tools copying rdflib/tools/graphisomorphism.py -> build/lib/rdflib/tools copying rdflib/tools/rdf2dot.py -> build/lib/rdflib/tools copying rdflib/tools/rdfpipe.py -> build/lib/rdflib/tools copying rdflib/tools/rdfs2dot.py -> build/lib/rdflib/tools creating build/lib/rdflib/plugins/parsers copying rdflib/plugins/parsers/RDFVOC.py -> build/lib/rdflib/plugins/parsers copying rdflib/plugins/parsers/__init__.py -> build/lib/rdflib/plugins/parsers copying rdflib/plugins/parsers/hext.py -> build/lib/rdflib/plugins/parsers copying rdflib/plugins/parsers/jsonld.py -> build/lib/rdflib/plugins/parsers copying rdflib/plugins/parsers/notation3.py -> build/lib/rdflib/plugins/parsers copying rdflib/plugins/parsers/nquads.py -> build/lib/rdflib/plugins/parsers copying rdflib/plugins/parsers/ntriples.py -> build/lib/rdflib/plugins/parsers copying rdflib/plugins/parsers/rdfxml.py -> build/lib/rdflib/plugins/parsers copying rdflib/plugins/parsers/trig.py -> build/lib/rdflib/plugins/parsers copying rdflib/plugins/parsers/trix.py -> build/lib/rdflib/plugins/parsers creating build/lib/rdflib/plugins/serializers copying rdflib/plugins/serializers/__init__.py -> build/lib/rdflib/plugins/serializers copying rdflib/plugins/serializers/hext.py -> build/lib/rdflib/plugins/serializers copying rdflib/plugins/serializers/jsonld.py -> build/lib/rdflib/plugins/serializers copying rdflib/plugins/serializers/longturtle.py -> build/lib/rdflib/plugins/serializers copying rdflib/plugins/serializers/n3.py -> build/lib/rdflib/plugins/serializers copying rdflib/plugins/serializers/nquads.py -> build/lib/rdflib/plugins/serializers copying rdflib/plugins/serializers/nt.py -> build/lib/rdflib/plugins/serializers copying rdflib/plugins/serializers/rdfxml.py -> build/lib/rdflib/plugins/serializers copying rdflib/plugins/serializers/trig.py -> build/lib/rdflib/plugins/serializers copying rdflib/plugins/serializers/trix.py -> build/lib/rdflib/plugins/serializers copying rdflib/plugins/serializers/turtle.py -> build/lib/rdflib/plugins/serializers copying rdflib/plugins/serializers/xmlwriter.py -> build/lib/rdflib/plugins/serializers creating build/lib/rdflib/plugins/shared copying rdflib/plugins/shared/__init__.py -> build/lib/rdflib/plugins/shared creating build/lib/rdflib/plugins/sparql copying rdflib/plugins/sparql/__init__.py -> build/lib/rdflib/plugins/sparql copying rdflib/plugins/sparql/aggregates.py -> build/lib/rdflib/plugins/sparql copying rdflib/plugins/sparql/algebra.py -> build/lib/rdflib/plugins/sparql copying rdflib/plugins/sparql/datatypes.py -> build/lib/rdflib/plugins/sparql copying rdflib/plugins/sparql/evaluate.py -> build/lib/rdflib/plugins/sparql copying rdflib/plugins/sparql/evalutils.py -> build/lib/rdflib/plugins/sparql copying rdflib/plugins/sparql/operators.py -> build/lib/rdflib/plugins/sparql copying rdflib/plugins/sparql/parser.py -> build/lib/rdflib/plugins/sparql copying rdflib/plugins/sparql/parserutils.py -> build/lib/rdflib/plugins/sparql copying rdflib/plugins/sparql/processor.py -> build/lib/rdflib/plugins/sparql copying rdflib/plugins/sparql/sparql.py -> build/lib/rdflib/plugins/sparql copying rdflib/plugins/sparql/update.py -> build/lib/rdflib/plugins/sparql creating build/lib/rdflib/plugins/stores copying rdflib/plugins/stores/__init__.py -> build/lib/rdflib/plugins/stores copying rdflib/plugins/stores/auditable.py -> build/lib/rdflib/plugins/stores copying rdflib/plugins/stores/berkeleydb.py -> build/lib/rdflib/plugins/stores copying rdflib/plugins/stores/concurrent.py -> build/lib/rdflib/plugins/stores copying rdflib/plugins/stores/memory.py -> build/lib/rdflib/plugins/stores copying rdflib/plugins/stores/regexmatching.py -> build/lib/rdflib/plugins/stores copying rdflib/plugins/stores/sparqlconnector.py -> build/lib/rdflib/plugins/stores copying rdflib/plugins/stores/sparqlstore.py -> build/lib/rdflib/plugins/stores creating build/lib/rdflib/plugins/shared/jsonld copying rdflib/plugins/shared/jsonld/__init__.py -> build/lib/rdflib/plugins/shared/jsonld copying rdflib/plugins/shared/jsonld/context.py -> build/lib/rdflib/plugins/shared/jsonld copying rdflib/plugins/shared/jsonld/errors.py -> build/lib/rdflib/plugins/shared/jsonld copying rdflib/plugins/shared/jsonld/keys.py -> build/lib/rdflib/plugins/shared/jsonld copying rdflib/plugins/shared/jsonld/util.py -> build/lib/rdflib/plugins/shared/jsonld creating build/lib/rdflib/plugins/sparql/results copying rdflib/plugins/sparql/results/__init__.py -> build/lib/rdflib/plugins/sparql/results copying rdflib/plugins/sparql/results/csvresults.py -> build/lib/rdflib/plugins/sparql/results copying rdflib/plugins/sparql/results/graph.py -> build/lib/rdflib/plugins/sparql/results copying rdflib/plugins/sparql/results/jsonresults.py -> build/lib/rdflib/plugins/sparql/results copying rdflib/plugins/sparql/results/rdfresults.py -> build/lib/rdflib/plugins/sparql/results copying rdflib/plugins/sparql/results/tsvresults.py -> build/lib/rdflib/plugins/sparql/results copying rdflib/plugins/sparql/results/txtresults.py -> build/lib/rdflib/plugins/sparql/results copying rdflib/plugins/sparql/results/xmlresults.py -> build/lib/rdflib/plugins/sparql/results copying rdflib/py.typed -> build/lib/rdflib + RPM_EC=0 ++ jobs -p + exit 0 Executing(%install): /bin/sh -e /var/tmp/rpm-tmp.tajTfX + umask 022 + cd /builddir/build/BUILD + '[' /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch '!=' / ']' + rm -rf /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch ++ dirname /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch + mkdir -p /builddir/build/BUILDROOT + mkdir /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch + cd rdflib-6.2.0 + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-cc1 -fstack-protector-strong -fasynchronous-unwind-tables -fstack-clash-protection' + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-ld -Wl,--build-id=sha1' + /usr/bin/python3 setup.py install -O1 --skip-build --root /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch running install /usr/lib/python3.11/site-packages/setuptools/_distutils/cmd.py:66: SetuptoolsDeprecationWarning: setup.py install is deprecated. !! ******************************************************************************** Please avoid running ``setup.py`` directly. Instead, use pypa/build, pypa/installer or other standards-based tools. Follow the current Python packaging guidelines when building Python RPM packages. See https://blog.ganssle.io/articles/2021/10/setup-py-deprecated.html and https://docs.fedoraproject.org/en-US/packaging-guidelines/Python/ for details. ******************************************************************************** !! self.initialize_options() running install_lib creating /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr creating /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib creating /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11 creating /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages creating /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib copying build/lib/rdflib/__init__.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib copying build/lib/rdflib/_type_checking.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib copying build/lib/rdflib/collection.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib copying build/lib/rdflib/compare.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib copying build/lib/rdflib/compat.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib copying build/lib/rdflib/container.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib copying build/lib/rdflib/events.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib copying build/lib/rdflib/exceptions.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib copying build/lib/rdflib/graph.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib copying build/lib/rdflib/parser.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib copying build/lib/rdflib/paths.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib copying build/lib/rdflib/plugin.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib copying build/lib/rdflib/query.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib copying build/lib/rdflib/resource.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib copying build/lib/rdflib/serializer.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib copying build/lib/rdflib/store.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib copying build/lib/rdflib/term.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib copying build/lib/rdflib/util.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib copying build/lib/rdflib/void.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib creating /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/extras copying build/lib/rdflib/extras/__init__.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/extras copying build/lib/rdflib/extras/cmdlineutils.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/extras copying build/lib/rdflib/extras/describer.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/extras copying build/lib/rdflib/extras/external_graph_libs.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/extras copying build/lib/rdflib/extras/infixowl.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/extras creating /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace copying build/lib/rdflib/namespace/_BRICK.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace copying build/lib/rdflib/namespace/_CSVW.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace copying build/lib/rdflib/namespace/_DC.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace copying build/lib/rdflib/namespace/_DCAM.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace copying build/lib/rdflib/namespace/_DCAT.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace copying build/lib/rdflib/namespace/_DCMITYPE.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace copying build/lib/rdflib/namespace/_DCTERMS.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace copying build/lib/rdflib/namespace/_DOAP.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace copying build/lib/rdflib/namespace/_FOAF.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace copying build/lib/rdflib/namespace/_GEO.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace copying build/lib/rdflib/namespace/_ODRL2.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace copying build/lib/rdflib/namespace/_ORG.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace copying build/lib/rdflib/namespace/_OWL.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace copying build/lib/rdflib/namespace/_PROF.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace copying build/lib/rdflib/namespace/_PROV.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace copying build/lib/rdflib/namespace/_QB.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace copying build/lib/rdflib/namespace/_RDF.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace copying build/lib/rdflib/namespace/_RDFS.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace copying build/lib/rdflib/namespace/_SDO.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace copying build/lib/rdflib/namespace/_SH.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace copying build/lib/rdflib/namespace/_SKOS.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace copying build/lib/rdflib/namespace/_SOSA.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace copying build/lib/rdflib/namespace/_SSN.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace copying build/lib/rdflib/namespace/_TIME.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace copying build/lib/rdflib/namespace/_VANN.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace copying build/lib/rdflib/namespace/_VOID.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace copying build/lib/rdflib/namespace/_WGS.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace copying build/lib/rdflib/namespace/_XSD.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace copying build/lib/rdflib/namespace/__init__.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace creating /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins copying build/lib/rdflib/plugins/__init__.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins creating /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/parsers copying build/lib/rdflib/plugins/parsers/RDFVOC.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/parsers copying build/lib/rdflib/plugins/parsers/__init__.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/parsers copying build/lib/rdflib/plugins/parsers/hext.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/parsers copying build/lib/rdflib/plugins/parsers/jsonld.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/parsers copying build/lib/rdflib/plugins/parsers/notation3.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/parsers copying build/lib/rdflib/plugins/parsers/nquads.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/parsers copying build/lib/rdflib/plugins/parsers/ntriples.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/parsers copying build/lib/rdflib/plugins/parsers/rdfxml.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/parsers copying build/lib/rdflib/plugins/parsers/trig.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/parsers copying build/lib/rdflib/plugins/parsers/trix.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/parsers creating /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/serializers copying build/lib/rdflib/plugins/serializers/__init__.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/serializers copying build/lib/rdflib/plugins/serializers/hext.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/serializers copying build/lib/rdflib/plugins/serializers/jsonld.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/serializers copying build/lib/rdflib/plugins/serializers/longturtle.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/serializers copying build/lib/rdflib/plugins/serializers/n3.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/serializers copying build/lib/rdflib/plugins/serializers/nquads.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/serializers copying build/lib/rdflib/plugins/serializers/nt.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/serializers copying build/lib/rdflib/plugins/serializers/rdfxml.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/serializers copying build/lib/rdflib/plugins/serializers/trig.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/serializers copying build/lib/rdflib/plugins/serializers/trix.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/serializers copying build/lib/rdflib/plugins/serializers/turtle.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/serializers copying build/lib/rdflib/plugins/serializers/xmlwriter.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/serializers creating /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/shared copying build/lib/rdflib/plugins/shared/__init__.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/shared creating /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/shared/jsonld copying build/lib/rdflib/plugins/shared/jsonld/__init__.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/shared/jsonld copying build/lib/rdflib/plugins/shared/jsonld/context.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/shared/jsonld copying build/lib/rdflib/plugins/shared/jsonld/errors.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/shared/jsonld copying build/lib/rdflib/plugins/shared/jsonld/keys.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/shared/jsonld copying build/lib/rdflib/plugins/shared/jsonld/util.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/shared/jsonld creating /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql copying build/lib/rdflib/plugins/sparql/__init__.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql copying build/lib/rdflib/plugins/sparql/aggregates.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql copying build/lib/rdflib/plugins/sparql/algebra.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql copying build/lib/rdflib/plugins/sparql/datatypes.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql copying build/lib/rdflib/plugins/sparql/evaluate.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql copying build/lib/rdflib/plugins/sparql/evalutils.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql copying build/lib/rdflib/plugins/sparql/operators.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql copying build/lib/rdflib/plugins/sparql/parser.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql copying build/lib/rdflib/plugins/sparql/parserutils.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql copying build/lib/rdflib/plugins/sparql/processor.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql copying build/lib/rdflib/plugins/sparql/sparql.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql copying build/lib/rdflib/plugins/sparql/update.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql creating /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql/results copying build/lib/rdflib/plugins/sparql/results/__init__.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql/results copying build/lib/rdflib/plugins/sparql/results/csvresults.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql/results copying build/lib/rdflib/plugins/sparql/results/graph.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql/results copying build/lib/rdflib/plugins/sparql/results/jsonresults.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql/results copying build/lib/rdflib/plugins/sparql/results/rdfresults.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql/results copying build/lib/rdflib/plugins/sparql/results/tsvresults.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql/results copying build/lib/rdflib/plugins/sparql/results/txtresults.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql/results copying build/lib/rdflib/plugins/sparql/results/xmlresults.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql/results creating /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/stores copying build/lib/rdflib/plugins/stores/__init__.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/stores copying build/lib/rdflib/plugins/stores/auditable.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/stores copying build/lib/rdflib/plugins/stores/berkeleydb.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/stores copying build/lib/rdflib/plugins/stores/concurrent.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/stores copying build/lib/rdflib/plugins/stores/memory.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/stores copying build/lib/rdflib/plugins/stores/regexmatching.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/stores copying build/lib/rdflib/plugins/stores/sparqlconnector.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/stores copying build/lib/rdflib/plugins/stores/sparqlstore.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/stores creating /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/tools copying build/lib/rdflib/tools/__init__.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/tools copying build/lib/rdflib/tools/csv2rdf.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/tools copying build/lib/rdflib/tools/defined_namespace_creator.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/tools copying build/lib/rdflib/tools/graphisomorphism.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/tools copying build/lib/rdflib/tools/rdf2dot.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/tools copying build/lib/rdflib/tools/rdfpipe.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/tools copying build/lib/rdflib/tools/rdfs2dot.py -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/tools copying build/lib/rdflib/py.typed -> /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/__init__.py to __init__.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/_type_checking.py to _type_checking.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/collection.py to collection.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/compare.py to compare.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/compat.py to compat.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/container.py to container.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/events.py to events.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/exceptions.py to exceptions.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/graph.py to graph.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/parser.py to parser.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/paths.py to paths.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugin.py to plugin.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/query.py to query.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/resource.py to resource.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/serializer.py to serializer.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/store.py to store.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/term.py to term.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/util.py to util.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/void.py to void.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/extras/__init__.py to __init__.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/extras/cmdlineutils.py to cmdlineutils.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/extras/describer.py to describer.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/extras/external_graph_libs.py to external_graph_libs.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/extras/infixowl.py to infixowl.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace/_BRICK.py to _BRICK.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace/_CSVW.py to _CSVW.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace/_DC.py to _DC.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace/_DCAM.py to _DCAM.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace/_DCAT.py to _DCAT.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace/_DCMITYPE.py to _DCMITYPE.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace/_DCTERMS.py to _DCTERMS.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace/_DOAP.py to _DOAP.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace/_FOAF.py to _FOAF.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace/_GEO.py to _GEO.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace/_ODRL2.py to _ODRL2.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace/_ORG.py to _ORG.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace/_OWL.py to _OWL.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace/_PROF.py to _PROF.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace/_PROV.py to _PROV.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace/_QB.py to _QB.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace/_RDF.py to _RDF.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace/_RDFS.py to _RDFS.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace/_SDO.py to _SDO.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace/_SH.py to _SH.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace/_SKOS.py to _SKOS.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace/_SOSA.py to _SOSA.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace/_SSN.py to _SSN.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace/_TIME.py to _TIME.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace/_VANN.py to _VANN.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace/_VOID.py to _VOID.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace/_WGS.py to _WGS.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace/_XSD.py to _XSD.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/namespace/__init__.py to __init__.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/__init__.py to __init__.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/parsers/RDFVOC.py to RDFVOC.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/parsers/__init__.py to __init__.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/parsers/hext.py to hext.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/parsers/jsonld.py to jsonld.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/parsers/notation3.py to notation3.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/parsers/nquads.py to nquads.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/parsers/ntriples.py to ntriples.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/parsers/rdfxml.py to rdfxml.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/parsers/trig.py to trig.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/parsers/trix.py to trix.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/serializers/__init__.py to __init__.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/serializers/hext.py to hext.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/serializers/jsonld.py to jsonld.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/serializers/longturtle.py to longturtle.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/serializers/n3.py to n3.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/serializers/nquads.py to nquads.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/serializers/nt.py to nt.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/serializers/rdfxml.py to rdfxml.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/serializers/trig.py to trig.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/serializers/trix.py to trix.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/serializers/turtle.py to turtle.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/serializers/xmlwriter.py to xmlwriter.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/shared/__init__.py to __init__.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/shared/jsonld/__init__.py to __init__.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/shared/jsonld/context.py to context.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/shared/jsonld/errors.py to errors.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/shared/jsonld/keys.py to keys.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/shared/jsonld/util.py to util.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql/__init__.py to __init__.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql/aggregates.py to aggregates.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql/algebra.py to algebra.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql/datatypes.py to datatypes.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql/evaluate.py to evaluate.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql/evalutils.py to evalutils.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql/operators.py to operators.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql/parser.py to parser.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql/parserutils.py to parserutils.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql/processor.py to processor.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql/sparql.py to sparql.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql/update.py to update.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql/results/__init__.py to __init__.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql/results/csvresults.py to csvresults.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql/results/graph.py to graph.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql/results/jsonresults.py to jsonresults.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql/results/rdfresults.py to rdfresults.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql/results/tsvresults.py to tsvresults.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql/results/txtresults.py to txtresults.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/sparql/results/xmlresults.py to xmlresults.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/stores/__init__.py to __init__.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/stores/auditable.py to auditable.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/stores/berkeleydb.py to berkeleydb.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/stores/concurrent.py to concurrent.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/stores/memory.py to memory.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/stores/regexmatching.py to regexmatching.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/stores/sparqlconnector.py to sparqlconnector.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/stores/sparqlstore.py to sparqlstore.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/tools/__init__.py to __init__.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/tools/csv2rdf.py to csv2rdf.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/tools/defined_namespace_creator.py to defined_namespace_creator.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/tools/graphisomorphism.py to graphisomorphism.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/tools/rdf2dot.py to rdf2dot.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/tools/rdfpipe.py to rdfpipe.cpython-311.pyc byte-compiling /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/tools/rdfs2dot.py to rdfs2dot.cpython-311.pyc writing byte-compilation script '/tmp/tmp9m_6fkr3.py' /usr/bin/python3 /tmp/tmp9m_6fkr3.py removing /tmp/tmp9m_6fkr3.py running install_egg_info running egg_info writing rdflib.egg-info/PKG-INFO writing dependency_links to rdflib.egg-info/dependency_links.txt writing entry points to rdflib.egg-info/entry_points.txt writing requirements to rdflib.egg-info/requires.txt writing top-level names to rdflib.egg-info/top_level.txt reading manifest file 'rdflib.egg-info/SOURCES.txt' reading manifest template 'MANIFEST.in' warning: no files found matching 'ez_setup.py' warning: no files found matching 'skiptests.list' no previously-included directories found matching 'docs/_build' warning: no previously-included files matching '*.pyc' found anywhere in distribution warning: no previously-included files matching '*$py.class' found anywhere in distribution adding license file 'LICENSE' writing manifest file 'rdflib.egg-info/SOURCES.txt' Copying rdflib.egg-info to /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib-6.2.0-py3.11.egg-info running install_scripts Installing csv2rdf script to /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/bin Installing rdf2dot script to /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/bin Installing rdfgraphisomorphism script to /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/bin Installing rdfpipe script to /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/bin Installing rdfs2dot script to /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/bin + rm -rfv /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/bin/__pycache__ + chmod +x /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/parsers/ntriples.py + chmod +x /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/parsers/notation3.py + chmod +x /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/tools/rdfpipe.py + chmod +x /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/extras/infixowl.py /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/extras/external_graph_libs.py + for lib in /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/extras/describer.py /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/parsers/pyRdfa/extras/httpheader.py + sed '1{\@^#!/usr/bin/env python@d}' /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/extras/describer.py + touch -r /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/extras/describer.py /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/extras/describer.py.new + mv /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/extras/describer.py.new /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/extras/describer.py + for lib in /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/extras/describer.py /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/parsers/pyRdfa/extras/httpheader.py + sed '1{\@^#!/usr/bin/env python@d}' /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/parsers/pyRdfa/extras/httpheader.py /var/tmp/rpm-tmp.tajTfX: line 51: /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/parsers/pyRdfa/extras/httpheader.py.new: No such file or directory + sed -i '1s=^#!/usr/bin/\(python\|env python\).*=#!/usr/bin/python3=' /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/extras/infixowl.py /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/extras/external_graph_libs.py /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/parsers/ntriples.py /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/tools/rdfpipe.py /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11/site-packages/rdflib/plugins/parsers/notation3.py + /usr/bin/find-debuginfo -j32 --strict-build-id -m -i --build-id-seed 6.2.0-4.ocs23 --unique-debug-suffix -6.2.0-4.ocs23.noarch --unique-debug-src-base python-rdflib-6.2.0-4.ocs23.noarch -S debugsourcefiles.list /builddir/build/BUILD/rdflib-6.2.0 find: 'debug': No such file or directory + /usr/lib/rpm/check-buildroot + /usr/lib/rpm/OpenCloudOS/brp-ldconfig + /usr/lib/rpm/brp-compress + /usr/lib/rpm/OpenCloudOS/brp-strip-lto /usr/bin/strip + /usr/lib/rpm/brp-strip-static-archive /usr/bin/strip + /usr/lib/rpm/check-rpaths + /usr/lib/rpm/OpenCloudOS/brp-mangle-shebangs + /usr/lib/rpm/OpenCloudOS/brp-python-bytecompile '' 1 0 Bytecompiling .py files below /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/lib/python3.11 using python3.11 + /usr/lib/rpm/OpenCloudOS/brp-python-hardlink Executing(%check): /bin/sh -e /var/tmp/rpm-tmp.WajDPj + umask 022 + cd /builddir/build/BUILD + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-cc1 -fstack-protector-strong -fasynchronous-unwind-tables -fstack-clash-protection' + export CFLAGS + CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-cc1 -fstack-protector-strong -fasynchronous-unwind-tables -fstack-clash-protection' + export CXXFLAGS + FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-cc1 -fstack-protector-strong -fasynchronous-unwind-tables -fstack-clash-protection -I/usr/lib/gfortran/modules' + export FFLAGS + FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-cc1 -fstack-protector-strong -fasynchronous-unwind-tables -fstack-clash-protection -I/usr/lib/gfortran/modules' + export FCFLAGS + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/OpenCloudOS/OpenCloudOS-hardened-ld -Wl,--build-id=sha1' + export LDFLAGS + LT_SYS_LIBRARY_PATH=/usr/lib: + export LT_SYS_LIBRARY_PATH + CC=gcc + export CC + CXX=g++ + export CXX + cd rdflib-6.2.0 + /usr/bin/python3 -m pytest ============================= test session starts ============================== platform linux -- Python 3.11.6, pytest-7.4.0, pluggy-1.3.0 rootdir: /builddir/build/BUILD/rdflib-6.2.0 configfile: pyproject.toml plugins: cov-4.1.0 collected 7093 items / 2 skipped docs/namespaces_and_bindings.rst . [ 0%] docs/rdf_terms.rst . [ 0%] rdflib/__init__.py F [ 0%] rdflib/collection.py .... [ 0%] rdflib/compare.py .. [ 0%] rdflib/container.py . [ 0%] rdflib/events.py . [ 0%] rdflib/graph.py ....... [ 0%] rdflib/parser.py . [ 0%] rdflib/paths.py . [ 0%] rdflib/query.py . [ 0%] rdflib/resource.py . [ 0%] rdflib/term.py ................ [ 0%] rdflib/util.py ...... [ 0%] rdflib/extras/describer.py ...... [ 0%] rdflib/extras/infixowl.py F......... [ 0%] rdflib/namespace/__init__.py ...... [ 0%] rdflib/plugins/parsers/jsonld.py . [ 0%] rdflib/plugins/parsers/notation3.py ... [ 0%] rdflib/plugins/parsers/nquads.py . [ 1%] rdflib/plugins/serializers/jsonld.py . [ 1%] rdflib/plugins/shared/jsonld/util.py . [ 1%] rdflib/plugins/stores/sparqlstore.py . [ 1%] rdflib/tools/csv2rdf.py . [ 1%] rdflib/tools/rdfpipe.py . [ 1%] test/test_having.py ... [ 1%] test/test_mulpath_n3.py . [ 1%] test/test_n3.py ...........s.... [ 1%] test/test_namespacemanager.py ................. [ 1%] test/test_nt_misc.py ...................... [ 1%] test/test_roundtrip.py .................x............................... [ 2%] ........................................................................ [ 3%] ........................................................................ [ 4%] ............................x...x....................................... [ 5%] ........................................................................ [ 6%] ........................................................................ [ 7%] ........................................................................ [ 8%] ........................................................................ [ 9%] ........................................................................ [ 10%] ........................................................................ [ 11%] .................................................x...................... [ 12%] ..x..xx..........x.........x....x....................................... [ 13%] ...................x.................................................... [ 14%] .....................................x.........................x........ [ 15%] ........................................................................ [ 16%] ........................................................................ [ 17%] ........................................................................ [ 18%] ........................................................................ [ 19%] ....................x........................x.......................... [ 20%] ........................................................................ [ 21%] ........................................................................ [ 22%] ....................xx.......x......................x.x................. [ 23%] x............................................................x.....x.... [ 24%] ........x. [ 25%] test/test_skolem_genid.py . [ 25%] test/test_trig.py ........s.. [ 25%] test/test_turtle_quoting.py ............................................ [ 25%] ........................................................................ [ 26%] ........................................................................ [ 27%] ........................................................................ [ 28%] ........................................................................ [ 29%] ........................................................................ [ 30%] ........................................................................ [ 31%] ........................................................................ [ 32%] ........................................................................ [ 33%] ................. [ 34%] test/test_turtle_sort_issue613.py .. [ 34%] test/test_typing.py . [ 34%] test/test_util.py ..............................................xxx..... [ 35%] ..................... [ 35%] test/data/suites/trix/test_trix.py ..........x....xxx..xxx.xxx.......... [ 35%] ......x..x.x...................xxxxxxxxx [ 36%] test/jsonld/test_api.py . [ 36%] test/jsonld/test_compaction.py ........... [ 36%] test/jsonld/test_context.py ................ [ 36%] test/jsonld/test_localsuite.py .. [ 36%] test/jsonld/test_named_graphs.py ... [ 36%] test/jsonld/test_onedotone.py .......................................... [ 37%] ........................F............................................... [ 38%] .....................................................FF................. [ 39%] ...................................................................FFFF. [ 40%] .... [ 40%] test/jsonld/test_pythonparse.py . [ 40%] test/jsonld/test_testsuite.py .......................................... [ 41%] ........................................................................ [ 42%] ............. [ 42%] test/jsonld/test_util.py x................. [ 42%] test/test_conjunctivegraph/test_conjunctive_graph.py .... [ 42%] test/test_conjunctivegraph/test_conjunctivegraph_generators.py ........ [ 42%] test/test_conjunctivegraph/test_conjunctivegraph_operator_combinations.py . [ 42%] .... [ 42%] test/test_dataset/test_dataset.py .s.s.s.s. [ 42%] test/test_dataset/test_dataset_generators.py ........ [ 43%] test/test_extras/test_extras_external_graph_libs.py ss [ 43%] test/test_extras/test_infixowl/test_annotatableterms.py ... [ 43%] test/test_extras/test_infixowl/test_basic.py ...F [ 43%] test/test_extras/test_infixowl/test_booleanclass.py xx..xx.. [ 43%] test/test_extras/test_infixowl/test_class.py ....... [ 43%] test/test_extras/test_infixowl/test_componentterms.py ... [ 43%] test/test_extras/test_infixowl/test_context.py F [ 43%] test/test_extras/test_infixowl/test_cover.py .xX.................. [ 43%] test/test_extras/test_infixowl/test_enumerated_class.py .. [ 43%] test/test_extras/test_infixowl/test_individual.py ..... [ 43%] test/test_extras/test_infixowl/test_logic_structuring.py . [ 43%] test/test_extras/test_infixowl/test_manchester_syntax.py .. [ 43%] test/test_extras/test_infixowl/test_ontology.py .. [ 43%] test/test_extras/test_infixowl/test_property.py . [ 43%] test/test_extras/test_infixowl/test_restriction.py ..x..x.x.x.x...xx.x.x [ 44%] xx [ 44%] test/test_graph/test_aggregate_graphs.py .. [ 44%] test/test_graph/test_batch_add.py .......... [ 44%] test/test_graph/test_canonicalization.py ..........x [ 44%] test/test_graph/test_container.py ................. [ 44%] test/test_graph/test_diff.py ..x..x [ 44%] test/test_graph/test_graph.py ...........F.. [ 45%] test/test_graph/test_graph_cbd.py ... [ 45%] test/test_graph/test_graph_context.py ..........ssssssssss [ 45%] test/test_graph/test_graph_formula.py ..s [ 45%] test/test_graph/test_graph_generators.py ....... [ 45%] test/test_graph/test_graph_http.py ...... [ 45%] test/test_graph/test_graph_items.py . [ 45%] test/test_graph/test_graph_operator.py ... [ 45%] test/test_graph/test_graph_store.py ........................... [ 46%] test/test_graph/test_namespace_rebinding.py ....x......... [ 46%] test/test_graph/test_skolemization.py ......... [ 46%] test/test_graph/test_slice.py . [ 46%] test/test_graph/test_variants.py .X...x.x.... [ 46%] test/test_issues/test_issue084.py ........ [ 46%] test/test_issues/test_issue1003.py ......... [ 46%] test/test_issues/test_issue1043.py . [ 46%] test/test_issues/test_issue1141.py ... [ 46%] test/test_issues/test_issue1160.py . [ 46%] test/test_issues/test_issue1404.py . [ 46%] test/test_issues/test_issue1484.py .. [ 46%] test/test_issues/test_issue160.py . [ 47%] test/test_issues/test_issue161.py . [ 47%] test/test_issues/test_issue1808.py . [ 47%] test/test_issues/test_issue184.py . [ 47%] test/test_issues/test_issue1873.py .. [ 47%] test/test_issues/test_issue190.py xx [ 47%] test/test_issues/test_issue1998.py . [ 47%] test/test_issues/test_issue200.py . [ 47%] test/test_issues/test_issue209.py . [ 47%] test/test_issues/test_issue223.py . [ 47%] test/test_issues/test_issue247.py . [ 47%] test/test_issues/test_issue248.py . [ 47%] test/test_issues/test_issue274.py ................................. [ 47%] test/test_issues/test_issue363.py .. [ 47%] test/test_issues/test_issue379.py .. [ 47%] test/test_issues/test_issue381.py ....... [ 47%] test/test_issues/test_issue432.py . [ 47%] test/test_issues/test_issue446.py . [ 47%] test/test_issues/test_issue492.py . [ 47%] test/test_issues/test_issue523.py . [ 47%] test/test_issues/test_issue532.py . [ 47%] test/test_issues/test_issue535.py . [ 47%] test/test_issues/test_issue545.py . [ 47%] test/test_issues/test_issue554.py . [ 47%] test/test_issues/test_issue563.py .. [ 47%] test/test_issues/test_issue579.py . [ 47%] test/test_issues/test_issue604.py . [ 47%] test/test_issues/test_issue655.py . [ 47%] test/test_issues/test_issue715.py . [ 48%] test/test_issues/test_issue733.py .. [ 48%] test/test_issues/test_issue801.py . [ 48%] test/test_issues/test_issue893.py . [ 48%] test/test_issues/test_issue910.py .... [ 48%] test/test_issues/test_issue920.py . [ 48%] test/test_issues/test_issue923.py . [ 48%] test/test_issues/test_issue953.py . [ 48%] test/test_issues/test_issue977.py .. [ 48%] test/test_issues/test_issue980.py . [ 48%] test/test_issues/test_issue_git_200.py . [ 48%] test/test_issues/test_issue_git_336.py . [ 48%] test/test_literal/test_datetime.py ....... [ 48%] test/test_literal/test_duration.py ........ [ 48%] test/test_literal/test_hex_binary.py .. [ 48%] test/test_literal/test_literal.py ...................................... [ 49%] ........................................................................ [ 50%] ........................................................................ [ 51%] .xxxxxxxxxx. [ 51%] test/test_literal/test_normalized_string.py .... [ 51%] test/test_literal/test_term.py ....... [ 51%] test/test_literal/test_tokendatatype.py ..... [ 51%] test/test_literal/test_uriref_literal_comparison.py .......... [ 51%] test/test_literal/test_xmlliterals.py ...... [ 51%] test/test_misc/test_b64_binary.py . [ 51%] test/test_misc/test_bnode_ncname.py . [ 51%] test/test_misc/test_collection.py . [ 51%] test/test_misc/test_conventions.py . [ 51%] test/test_misc/test_create_input_source.py .. [ 51%] test/test_misc/test_events.py . [ 51%] test/test_misc/test_parse_file_guess_format.py ..... [ 51%] test/test_misc/test_path_div_future.py . [ 51%] test/test_misc/test_plugins.py .. [ 51%] test/test_misc/test_prefix_types.py . [ 51%] test/test_misc/test_resource.py . [ 51%] test/test_namespace/test_definednamespace.py ........................... [ 52%] ........................................................................ [ 53%] ......................................... [ 53%] test/test_namespace/test_definednamespace_creator.py ... [ 53%] test/test_namespace/test_definednamespace_dir.py . [ 53%] test/test_namespace/test_namespace.py ........x......................... [ 54%] ............ [ 54%] test/test_parsers/test_broken_parse_data_from_jena.py xx.xxxx. [ 54%] test/test_parsers/test_empty_xml_base.py .. [ 54%] test/test_parsers/test_n3parse_of_rdf_lists.py .. [ 54%] test/test_parsers/test_nquads.py ........... [ 54%] test/test_parsers/test_parser.py .... [ 54%] test/test_parsers/test_parser_helpers.py . [ 54%] test/test_parsers/test_parser_hext.py ...... [ 55%] test/test_parsers/test_parser_reads_from_pathlike_object.py . [ 55%] test/test_parsers/test_parser_structure.py . [ 55%] test/test_parsers/test_parser_turtlelike.py ............................ [ 55%] ...........................................................x.x.......... [ 56%] ........................................................................ [ 57%] ........................................ [ 58%] test/test_parsers/test_swap_n3.py .....x..xxxxx...x..xx.xxx.x... [ 58%] test/test_parsers/test_trix_parse.py ... [ 58%] test/test_serializers/test_finalnewline.py . [ 58%] test/test_serializers/test_prettyxml.py ....... [ 58%] test/test_serializers/test_serializer.py .................x.x.x.x.x.x.x. [ 59%] x.x.x....................xxxxxxxxxxxxxxxxxxxx........................... [ 60%] ...........................................x.x.x.x.x.x.x.x.x.x.......... [ 61%] ........................................ [ 61%] test/test_serializers/test_serializer_hext.py ...... [ 61%] test/test_serializers/test_serializer_longturtle.py . [ 61%] test/test_serializers/test_serializer_n3.py .... [ 61%] test/test_serializers/test_serializer_trix.py .. [ 61%] test/test_serializers/test_serializer_turtle.py ..... [ 61%] test/test_serializers/test_serializer_xml.py ..... [ 62%] test/test_serializers/test_xmlwriter_qname.py ..... [ 62%] test/test_sparql/test_agg_distinct.py .... [ 62%] test/test_sparql/test_agg_undef.py ....... [ 62%] test/test_sparql/test_construct_bindings.py . [ 62%] test/test_sparql/test_datetime_processing.py .... [ 62%] test/test_sparql/test_evaluate_bind.py ... [ 62%] test/test_sparql/test_expressions.py ...... [ 62%] test/test_sparql/test_forward_slash_escapes.py .x.x.x [ 62%] test/test_sparql/test_initbindings.py .......................... [ 62%] test/test_sparql/test_operators.py ... [ 62%] test/test_sparql/test_prefixed_name.py ................................. [ 63%] ..................... [ 63%] test/test_sparql/test_prepare.py .F [ 63%] test/test_sparql/test_result.py ............xxxx........xx............ [ 64%] test/test_sparql/test_service.py FFFFFFFFF.. [ 64%] test/test_sparql/test_sparql.py .............x...x..................... [ 64%] test/test_sparql/test_sparql_parser.py .. [ 64%] test/test_sparql/test_translate_algebra.py ............................. [ 65%] ..x.........x [ 65%] test/test_sparql/test_tsvresults.py . [ 65%] test/test_store/test_namespace_binding.py .............................. [ 66%] .......................................x.x [ 66%] test/test_store/test_nodepickler.py ... [ 66%] test/test_store/test_store.py ... [ 66%] test/test_store/test_store_auditable.py ........................ [ 67%] test/test_store/test_store_berkeleydb.py ssssss [ 67%] test/test_store/test_store_memorystore.py .. [ 67%] test/test_store/test_store_sparqlstore.py ......... [ 67%] test/test_store/test_store_sparqlupdatestore_mock.py . [ 67%] test/test_store/test_store_triple_store.py ... [ 67%] test/test_tools/test_csv2rdf.py .. [ 67%] test/test_w3c_spec/test_n3_w3c.py ......x...................x........... [ 67%] ...........xxxxxxxxxx........xxxxxxxxxxxxx.............................. [ 68%] .x.x.xx.........xxx..x.x................................................ [ 69%] ..............................................................x......... [ 70%] ................x................. [ 71%] test/test_w3c_spec/test_nquads_w3c.py .................................. [ 71%] ..xxxx.........xxs................................. [ 72%] test/test_w3c_spec/test_nt_w3c.py ...................xxxx.........xxx... [ 73%] .............................x [ 73%] test/test_w3c_spec/test_rdfxml_w3c.py .................................. [ 74%] ........................................................................ [ 75%] ............................................................... [ 75%] test/test_w3c_spec/test_sparql10_w3c.py ..................xx............ [ 76%] ...........xx..............................x............................ [ 77%] ....................................................x................... [ 78%] ........................................................................ [ 79%] x.....................s.........................x......x..x............. [ 80%] .................x............x................x........................ [ 81%] ........................x..............xxx...xxxxxxxx... [ 82%] test/test_w3c_spec/test_sparql11_w3c.py ...............x.xxxx........... [ 82%] .x...................................................................... [ 83%] .............................x...x....x...xxxxxxx.xxx.....xxxxx......... [ 84%] ..xx......xxxx..xxxxxxxxxxxxxxxx.....x.x................................ [ 85%] ..............................xx........................................ [ 86%] ...........xsssssss..................................................... [ 87%] .xxx.............xxx..x................................................. [ 88%] .................xx.....xxx.x.................ssssssssssssssssssssssssss [ 89%] sssssssssss [ 89%] test/test_w3c_spec/test_sparql_rdflib.py .........................xxxx [ 90%] test/test_w3c_spec/test_trig_w3c.py ....xxxx............................ [ 90%] ........................................................................ [ 91%] ....x................................................................... [ 92%] ........xxxxx..........xxxx...xx..xx.x.........x.xxxx..xxx........xx.... [ 93%] ..............................xxxx.x.......xxxx..............x.....x.... [ 94%] ........... [ 95%] test/test_w3c_spec/test_turtle_w3c.py .................................. [ 95%] ...................................................................x.... [ 96%] .......................................................................x [ 97%] xxxx...........xxxx...xx..xx.x.............xxxx..xxx........xx.......... [ 98%] ........................xxxx.x........... [ 99%] test/utils/helper.py . [ 99%] test/utils/test/test_httpservermock.py .. [ 99%] test/utils/test/test_iri.py ......... [ 99%] test/utils/test/test_result.py ........... [ 99%] test/utils/test/test_testutils.py ................................. [100%] =================================== FAILURES =================================== _______________________________ [doctest] rdflib _______________________________ 008 009 The primary interface `rdflib` exposes to work with RDF is 010 `rdflib.graph.Graph`. 011 012 A tiny example: 013 014 >>> from rdflib import Graph, URIRef, Literal 015 016 >>> g = Graph() 017 >>> result = g.parse("http://www.w3.org/2000/10/swap/test/meet/blue.rdf") UNEXPECTED EXCEPTION: URLError(gaierror(-3, 'Temporary failure in name resolution')) Traceback (most recent call last): File "/usr/lib64/python3.11/urllib/request.py", line 1348, in do_open h.request(req.get_method(), req.selector, req.data, headers, File "/usr/lib64/python3.11/http/client.py", line 1286, in request self._send_request(method, url, body, headers, encode_chunked) File "/usr/lib64/python3.11/http/client.py", line 1332, in _send_request self.endheaders(body, encode_chunked=encode_chunked) File "/usr/lib64/python3.11/http/client.py", line 1281, in endheaders self._send_output(message_body, encode_chunked=encode_chunked) File "/usr/lib64/python3.11/http/client.py", line 1041, in _send_output self.send(msg) File "/usr/lib64/python3.11/http/client.py", line 979, in send self.connect() File "/usr/lib64/python3.11/http/client.py", line 945, in connect self.sock = self._create_connection( ^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib64/python3.11/socket.py", line 827, in create_connection for res in getaddrinfo(host, port, 0, SOCK_STREAM): ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib64/python3.11/socket.py", line 962, in getaddrinfo for res in _socket.getaddrinfo(host, port, family, type, proto, flags): ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ socket.gaierror: [Errno -3] Temporary failure in name resolution During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/usr/lib64/python3.11/doctest.py", line 1351, in __run exec(compile(example.source, filename, "single", File "", line 1, in File "/builddir/build/BUILD/rdflib-6.2.0/rdflib/graph.py", line 1306, in parse source = create_input_source( ^^^^^^^^^^^^^^^^^^^^ File "/builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py", line 404, in create_input_source ) = _create_input_source_from_location( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py", line 458, in _create_input_source_from_location input_source = URLInputSource(absolute_location, format) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py", line 282, in __init__ response: HTTPResponse = _urlopen(req) ^^^^^^^^^^^^^ File "/builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py", line 270, in _urlopen return urlopen(req) ^^^^^^^^^^^^ File "/usr/lib64/python3.11/urllib/request.py", line 216, in urlopen return opener.open(url, data, timeout) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib64/python3.11/urllib/request.py", line 519, in open response = self._open(req, data) ^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib64/python3.11/urllib/request.py", line 536, in _open result = self._call_chain(self.handle_open, protocol, protocol + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib64/python3.11/urllib/request.py", line 496, in _call_chain result = func(*args) ^^^^^^^^^^^ File "/usr/lib64/python3.11/urllib/request.py", line 1377, in http_open return self.do_open(http.client.HTTPConnection, req) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib64/python3.11/urllib/request.py", line 1351, in do_open raise URLError(err) urllib.error.URLError: /builddir/build/BUILD/rdflib-6.2.0/rdflib/__init__.py:17: UnexpectedException _______________________ [doctest] rdflib.extras.infixowl _______________________ 138 >>> a.subClassOf = [exNs.MusicalWork] 139 140 We can then access the rdfs:subClassOf relationships 141 142 >>> print(list(a.subClassOf)) 143 [Class: ex:MusicalWork ] 144 145 This can also be used against already populated graphs: 146 147 >>> owlGraph = Graph().parse(str(OWL)) UNEXPECTED EXCEPTION: URLError(gaierror(-3, 'Temporary failure in name resolution')) Traceback (most recent call last): File "/usr/lib64/python3.11/urllib/request.py", line 1348, in do_open h.request(req.get_method(), req.selector, req.data, headers, File "/usr/lib64/python3.11/http/client.py", line 1286, in request self._send_request(method, url, body, headers, encode_chunked) File "/usr/lib64/python3.11/http/client.py", line 1332, in _send_request self.endheaders(body, encode_chunked=encode_chunked) File "/usr/lib64/python3.11/http/client.py", line 1281, in endheaders self._send_output(message_body, encode_chunked=encode_chunked) File "/usr/lib64/python3.11/http/client.py", line 1041, in _send_output self.send(msg) File "/usr/lib64/python3.11/http/client.py", line 979, in send self.connect() File "/usr/lib64/python3.11/http/client.py", line 945, in connect self.sock = self._create_connection( ^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib64/python3.11/socket.py", line 827, in create_connection for res in getaddrinfo(host, port, 0, SOCK_STREAM): ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib64/python3.11/socket.py", line 962, in getaddrinfo for res in _socket.getaddrinfo(host, port, family, type, proto, flags): ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ socket.gaierror: [Errno -3] Temporary failure in name resolution During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/usr/lib64/python3.11/doctest.py", line 1351, in __run exec(compile(example.source, filename, "single", File "", line 1, in File "/builddir/build/BUILD/rdflib-6.2.0/rdflib/graph.py", line 1306, in parse source = create_input_source( ^^^^^^^^^^^^^^^^^^^^ File "/builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py", line 404, in create_input_source ) = _create_input_source_from_location( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py", line 458, in _create_input_source_from_location input_source = URLInputSource(absolute_location, format) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py", line 282, in __init__ response: HTTPResponse = _urlopen(req) ^^^^^^^^^^^^^ File "/builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py", line 270, in _urlopen return urlopen(req) ^^^^^^^^^^^^ File "/usr/lib64/python3.11/urllib/request.py", line 216, in urlopen return opener.open(url, data, timeout) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib64/python3.11/urllib/request.py", line 519, in open response = self._open(req, data) ^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib64/python3.11/urllib/request.py", line 536, in _open result = self._call_chain(self.handle_open, protocol, protocol + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib64/python3.11/urllib/request.py", line 496, in _call_chain result = func(*args) ^^^^^^^^^^^ File "/usr/lib64/python3.11/urllib/request.py", line 1377, in http_open return self.do_open(http.client.HTTPConnection, req) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib64/python3.11/urllib/request.py", line 1351, in do_open raise URLError(err) urllib.error.URLError: /builddir/build/BUILD/rdflib-6.2.0/rdflib/extras/infixowl.py:147: UnexpectedException _ test_suite[https://w3c.github.io/json-ld-api/tests/toRdf-manifest#tc034-do_test_parser-https://w3c.github.io/json-ld-api/tests/-toRdf-c034-toRdf/c034-in.jsonld-toRdf/c034-out.nq-False-options66] _ self = http_class = req = http_conn_args = {'check_hostname': None, 'context': None} host = 'w3c.github.io', h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: > h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) /usr/lib64/python3.11/urllib/request.py:1348: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.11/http/client.py:1286: in request self._send_request(method, url, body, headers, encode_chunked) /usr/lib64/python3.11/http/client.py:1332: in _send_request self.endheaders(body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1281: in endheaders self._send_output(message_body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1041: in _send_output self.send(msg) /usr/lib64/python3.11/http/client.py:979: in send self.connect() /usr/lib64/python3.11/http/client.py:1451: in connect super().connect() /usr/lib64/python3.11/http/client.py:945: in connect self.sock = self._create_connection( /usr/lib64/python3.11/socket.py:827: in create_connection for res in getaddrinfo(host, port, 0, SOCK_STREAM): _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ host = 'w3c.github.io', port = 443, family = 0 type = , proto = 0, flags = 0 def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): """Resolve host and port into list of address info entries. Translate the host/port argument into a sequence of 5-tuples that contain all the necessary arguments for creating a socket connected to that service. host is a domain name, a string representation of an IPv4/v6 address or None. port is a string service name such as 'http', a numeric port number or None. By passing None as the value of host and port, you can pass NULL to the underlying C API. The family, type and proto arguments can be optionally specified in order to narrow the list of addresses returned. Passing zero as a value for each of these arguments selects the full range of results. """ # We override this function since we want to translate the numeric family # and socket type values to enum constants. addrlist = [] > for res in _socket.getaddrinfo(host, port, family, type, proto, flags): E socket.gaierror: [Errno -3] Temporary failure in name resolution /usr/lib64/python3.11/socket.py:962: gaierror During handling of the above exception, another exception occurred: rdf_test_uri = rdflib.term.URIRef('https://w3c.github.io/json-ld-api/tests/toRdf-manifest#tc034') func = suite_base = 'https://w3c.github.io/json-ld-api/tests/', cat = 'toRdf' num = 'c034', inputpath = 'toRdf/c034-in.jsonld' expectedpath = 'toRdf/c034-out.nq', context = False options = {'specVersion': 'json-ld-1.1'} @pytest.mark.parametrize( "rdf_test_uri, func, suite_base, cat, num, inputpath, expectedpath, context, options", get_test_suite_cases(), ) def test_suite( rdf_test_uri: URIRef, func, suite_base, cat, num, inputpath, expectedpath, context, options, ): > func(suite_base, cat, num, inputpath, expectedpath, context, options) /builddir/build/BUILD/rdflib-6.2.0/test/jsonld/test_onedotone.py:249: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /builddir/build/BUILD/rdflib-6.2.0/test/jsonld/runner.py:142: in do_test_parser to_rdf( /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/parsers/jsonld.py:134: in to_rdf return parser.parse(data, context, dataset) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/parsers/jsonld.py:169: in parse self._add_to_graph(dataset, graph, context, node, topcontext) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/parsers/jsonld.py:221: in _add_to_graph self._key_to_graph(dataset, graph, context, subj, key, obj, no_id=no_id) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/parsers/jsonld.py:305: in _key_to_graph context = context.get_context_for_term(term) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/context.py:134: in get_context_for_term return self._subcontext(term.context, propagate=True) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/context.py:119: in _subcontext ctx.load(source) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/context.py:375: in load self._prep_sources(base, source, sources, referenced_contexts) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/context.py:404: in _prep_sources new_ctx = self._fetch_context( /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/context.py:440: in _fetch_context source = source_to_json(source_url) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/util.py:35: in source_to_json source = create_input_source(source, format="json-ld") /builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py:404: in create_input_source ) = _create_input_source_from_location( /builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py:458: in _create_input_source_from_location input_source = URLInputSource(absolute_location, format) /builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py:282: in __init__ response: HTTPResponse = _urlopen(req) /builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py:270: in _urlopen return urlopen(req) /usr/lib64/python3.11/urllib/request.py:216: in urlopen return opener.open(url, data, timeout) /usr/lib64/python3.11/urllib/request.py:519: in open response = self._open(req, data) /usr/lib64/python3.11/urllib/request.py:536: in _open result = self._call_chain(self.handle_open, protocol, protocol + /usr/lib64/python3.11/urllib/request.py:496: in _call_chain result = func(*args) /usr/lib64/python3.11/urllib/request.py:1391: in https_open return self.do_open(http.client.HTTPSConnection, req, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = http_class = req = http_conn_args = {'check_hostname': None, 'context': None} host = 'w3c.github.io', h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) except OSError as err: # timeout error > raise URLError(err) E urllib.error.URLError: /usr/lib64/python3.11/urllib/request.py:1351: URLError _ test_suite[https://w3c.github.io/json-ld-api/tests/toRdf-manifest#te126-do_test_parser-https://w3c.github.io/json-ld-api/tests/-toRdf-e126-toRdf/e126-in.jsonld-toRdf/e126-out.nq-False-options167] _ self = http_class = req = http_conn_args = {'check_hostname': None, 'context': None} host = 'w3c.github.io', h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: > h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) /usr/lib64/python3.11/urllib/request.py:1348: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.11/http/client.py:1286: in request self._send_request(method, url, body, headers, encode_chunked) /usr/lib64/python3.11/http/client.py:1332: in _send_request self.endheaders(body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1281: in endheaders self._send_output(message_body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1041: in _send_output self.send(msg) /usr/lib64/python3.11/http/client.py:979: in send self.connect() /usr/lib64/python3.11/http/client.py:1451: in connect super().connect() /usr/lib64/python3.11/http/client.py:945: in connect self.sock = self._create_connection( /usr/lib64/python3.11/socket.py:827: in create_connection for res in getaddrinfo(host, port, 0, SOCK_STREAM): _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ host = 'w3c.github.io', port = 443, family = 0 type = , proto = 0, flags = 0 def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): """Resolve host and port into list of address info entries. Translate the host/port argument into a sequence of 5-tuples that contain all the necessary arguments for creating a socket connected to that service. host is a domain name, a string representation of an IPv4/v6 address or None. port is a string service name such as 'http', a numeric port number or None. By passing None as the value of host and port, you can pass NULL to the underlying C API. The family, type and proto arguments can be optionally specified in order to narrow the list of addresses returned. Passing zero as a value for each of these arguments selects the full range of results. """ # We override this function since we want to translate the numeric family # and socket type values to enum constants. addrlist = [] > for res in _socket.getaddrinfo(host, port, family, type, proto, flags): E socket.gaierror: [Errno -3] Temporary failure in name resolution /usr/lib64/python3.11/socket.py:962: gaierror During handling of the above exception, another exception occurred: rdf_test_uri = rdflib.term.URIRef('https://w3c.github.io/json-ld-api/tests/toRdf-manifest#te126') func = suite_base = 'https://w3c.github.io/json-ld-api/tests/', cat = 'toRdf' num = 'e126', inputpath = 'toRdf/e126-in.jsonld' expectedpath = 'toRdf/e126-out.nq', context = False options = {'specVersion': 'json-ld-1.1'} @pytest.mark.parametrize( "rdf_test_uri, func, suite_base, cat, num, inputpath, expectedpath, context, options", get_test_suite_cases(), ) def test_suite( rdf_test_uri: URIRef, func, suite_base, cat, num, inputpath, expectedpath, context, options, ): > func(suite_base, cat, num, inputpath, expectedpath, context, options) /builddir/build/BUILD/rdflib-6.2.0/test/jsonld/test_onedotone.py:249: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /builddir/build/BUILD/rdflib-6.2.0/test/jsonld/runner.py:142: in do_test_parser to_rdf( /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/parsers/jsonld.py:134: in to_rdf return parser.parse(data, context, dataset) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/parsers/jsonld.py:154: in parse context.load(local_context, context.base) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/context.py:375: in load self._prep_sources(base, source, sources, referenced_contexts) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/context.py:404: in _prep_sources new_ctx = self._fetch_context( /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/context.py:440: in _fetch_context source = source_to_json(source_url) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/util.py:35: in source_to_json source = create_input_source(source, format="json-ld") /builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py:404: in create_input_source ) = _create_input_source_from_location( /builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py:458: in _create_input_source_from_location input_source = URLInputSource(absolute_location, format) /builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py:282: in __init__ response: HTTPResponse = _urlopen(req) /builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py:270: in _urlopen return urlopen(req) /usr/lib64/python3.11/urllib/request.py:216: in urlopen return opener.open(url, data, timeout) /usr/lib64/python3.11/urllib/request.py:519: in open response = self._open(req, data) /usr/lib64/python3.11/urllib/request.py:536: in _open result = self._call_chain(self.handle_open, protocol, protocol + /usr/lib64/python3.11/urllib/request.py:496: in _call_chain result = func(*args) /usr/lib64/python3.11/urllib/request.py:1391: in https_open return self.do_open(http.client.HTTPSConnection, req, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = http_class = req = http_conn_args = {'check_hostname': None, 'context': None} host = 'w3c.github.io', h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) except OSError as err: # timeout error > raise URLError(err) E urllib.error.URLError: /usr/lib64/python3.11/urllib/request.py:1351: URLError _ test_suite[https://w3c.github.io/json-ld-api/tests/toRdf-manifest#te127-do_test_parser-https://w3c.github.io/json-ld-api/tests/-toRdf-e127-toRdf/e127-in.jsonld-toRdf/e127-out.nq-False-options168] _ self = http_class = req = http_conn_args = {'check_hostname': None, 'context': None} host = 'w3c.github.io', h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: > h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) /usr/lib64/python3.11/urllib/request.py:1348: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.11/http/client.py:1286: in request self._send_request(method, url, body, headers, encode_chunked) /usr/lib64/python3.11/http/client.py:1332: in _send_request self.endheaders(body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1281: in endheaders self._send_output(message_body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1041: in _send_output self.send(msg) /usr/lib64/python3.11/http/client.py:979: in send self.connect() /usr/lib64/python3.11/http/client.py:1451: in connect super().connect() /usr/lib64/python3.11/http/client.py:945: in connect self.sock = self._create_connection( /usr/lib64/python3.11/socket.py:827: in create_connection for res in getaddrinfo(host, port, 0, SOCK_STREAM): _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ host = 'w3c.github.io', port = 443, family = 0 type = , proto = 0, flags = 0 def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): """Resolve host and port into list of address info entries. Translate the host/port argument into a sequence of 5-tuples that contain all the necessary arguments for creating a socket connected to that service. host is a domain name, a string representation of an IPv4/v6 address or None. port is a string service name such as 'http', a numeric port number or None. By passing None as the value of host and port, you can pass NULL to the underlying C API. The family, type and proto arguments can be optionally specified in order to narrow the list of addresses returned. Passing zero as a value for each of these arguments selects the full range of results. """ # We override this function since we want to translate the numeric family # and socket type values to enum constants. addrlist = [] > for res in _socket.getaddrinfo(host, port, family, type, proto, flags): E socket.gaierror: [Errno -3] Temporary failure in name resolution /usr/lib64/python3.11/socket.py:962: gaierror During handling of the above exception, another exception occurred: rdf_test_uri = rdflib.term.URIRef('https://w3c.github.io/json-ld-api/tests/toRdf-manifest#te127') func = suite_base = 'https://w3c.github.io/json-ld-api/tests/', cat = 'toRdf' num = 'e127', inputpath = 'toRdf/e127-in.jsonld' expectedpath = 'toRdf/e127-out.nq', context = False options = {'specVersion': 'json-ld-1.1'} @pytest.mark.parametrize( "rdf_test_uri, func, suite_base, cat, num, inputpath, expectedpath, context, options", get_test_suite_cases(), ) def test_suite( rdf_test_uri: URIRef, func, suite_base, cat, num, inputpath, expectedpath, context, options, ): > func(suite_base, cat, num, inputpath, expectedpath, context, options) /builddir/build/BUILD/rdflib-6.2.0/test/jsonld/test_onedotone.py:249: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /builddir/build/BUILD/rdflib-6.2.0/test/jsonld/runner.py:142: in do_test_parser to_rdf( /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/parsers/jsonld.py:134: in to_rdf return parser.parse(data, context, dataset) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/parsers/jsonld.py:154: in parse context.load(local_context, context.base) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/context.py:375: in load self._prep_sources(base, source, sources, referenced_contexts) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/context.py:404: in _prep_sources new_ctx = self._fetch_context( /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/context.py:440: in _fetch_context source = source_to_json(source_url) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/util.py:35: in source_to_json source = create_input_source(source, format="json-ld") /builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py:404: in create_input_source ) = _create_input_source_from_location( /builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py:458: in _create_input_source_from_location input_source = URLInputSource(absolute_location, format) /builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py:282: in __init__ response: HTTPResponse = _urlopen(req) /builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py:270: in _urlopen return urlopen(req) /usr/lib64/python3.11/urllib/request.py:216: in urlopen return opener.open(url, data, timeout) /usr/lib64/python3.11/urllib/request.py:519: in open response = self._open(req, data) /usr/lib64/python3.11/urllib/request.py:536: in _open result = self._call_chain(self.handle_open, protocol, protocol + /usr/lib64/python3.11/urllib/request.py:496: in _call_chain result = func(*args) /usr/lib64/python3.11/urllib/request.py:1391: in https_open return self.do_open(http.client.HTTPSConnection, req, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = http_class = req = http_conn_args = {'check_hostname': None, 'context': None} host = 'w3c.github.io', h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) except OSError as err: # timeout error > raise URLError(err) E urllib.error.URLError: /usr/lib64/python3.11/urllib/request.py:1351: URLError _ test_suite[https://w3c.github.io/json-ld-api/tests/toRdf-manifest#tso05-do_test_parser-https://w3c.github.io/json-ld-api/tests/-toRdf-so05-toRdf/so05-in.jsonld-toRdf/so05-out.nq-False-options253] _ self = http_class = req = http_conn_args = {'check_hostname': None, 'context': None} host = 'w3c.github.io', h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: > h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) /usr/lib64/python3.11/urllib/request.py:1348: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.11/http/client.py:1286: in request self._send_request(method, url, body, headers, encode_chunked) /usr/lib64/python3.11/http/client.py:1332: in _send_request self.endheaders(body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1281: in endheaders self._send_output(message_body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1041: in _send_output self.send(msg) /usr/lib64/python3.11/http/client.py:979: in send self.connect() /usr/lib64/python3.11/http/client.py:1451: in connect super().connect() /usr/lib64/python3.11/http/client.py:945: in connect self.sock = self._create_connection( /usr/lib64/python3.11/socket.py:827: in create_connection for res in getaddrinfo(host, port, 0, SOCK_STREAM): _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ host = 'w3c.github.io', port = 443, family = 0 type = , proto = 0, flags = 0 def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): """Resolve host and port into list of address info entries. Translate the host/port argument into a sequence of 5-tuples that contain all the necessary arguments for creating a socket connected to that service. host is a domain name, a string representation of an IPv4/v6 address or None. port is a string service name such as 'http', a numeric port number or None. By passing None as the value of host and port, you can pass NULL to the underlying C API. The family, type and proto arguments can be optionally specified in order to narrow the list of addresses returned. Passing zero as a value for each of these arguments selects the full range of results. """ # We override this function since we want to translate the numeric family # and socket type values to enum constants. addrlist = [] > for res in _socket.getaddrinfo(host, port, family, type, proto, flags): E socket.gaierror: [Errno -3] Temporary failure in name resolution /usr/lib64/python3.11/socket.py:962: gaierror During handling of the above exception, another exception occurred: rdf_test_uri = rdflib.term.URIRef('https://w3c.github.io/json-ld-api/tests/toRdf-manifest#tso05') func = suite_base = 'https://w3c.github.io/json-ld-api/tests/', cat = 'toRdf' num = 'so05', inputpath = 'toRdf/so05-in.jsonld' expectedpath = 'toRdf/so05-out.nq', context = False options = {'specVersion': 'json-ld-1.1'} @pytest.mark.parametrize( "rdf_test_uri, func, suite_base, cat, num, inputpath, expectedpath, context, options", get_test_suite_cases(), ) def test_suite( rdf_test_uri: URIRef, func, suite_base, cat, num, inputpath, expectedpath, context, options, ): > func(suite_base, cat, num, inputpath, expectedpath, context, options) /builddir/build/BUILD/rdflib-6.2.0/test/jsonld/test_onedotone.py:249: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /builddir/build/BUILD/rdflib-6.2.0/test/jsonld/runner.py:142: in do_test_parser to_rdf( /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/parsers/jsonld.py:134: in to_rdf return parser.parse(data, context, dataset) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/parsers/jsonld.py:169: in parse self._add_to_graph(dataset, graph, context, node, topcontext) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/parsers/jsonld.py:184: in _add_to_graph context = context.get_context_for_type(node) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/context.py:151: in get_context_for_type subcontext = self.subcontext(typeterm.context, propagate=False) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/context.py:104: in subcontext return parent._subcontext(source, propagate) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/context.py:119: in _subcontext ctx.load(source) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/context.py:380: in load self._read_source(source, source_url, referenced_contexts) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/context.py:453: in _read_source imported = self._fetch_context( /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/context.py:440: in _fetch_context source = source_to_json(source_url) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/util.py:35: in source_to_json source = create_input_source(source, format="json-ld") /builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py:404: in create_input_source ) = _create_input_source_from_location( /builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py:458: in _create_input_source_from_location input_source = URLInputSource(absolute_location, format) /builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py:282: in __init__ response: HTTPResponse = _urlopen(req) /builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py:270: in _urlopen return urlopen(req) /usr/lib64/python3.11/urllib/request.py:216: in urlopen return opener.open(url, data, timeout) /usr/lib64/python3.11/urllib/request.py:519: in open response = self._open(req, data) /usr/lib64/python3.11/urllib/request.py:536: in _open result = self._call_chain(self.handle_open, protocol, protocol + /usr/lib64/python3.11/urllib/request.py:496: in _call_chain result = func(*args) /usr/lib64/python3.11/urllib/request.py:1391: in https_open return self.do_open(http.client.HTTPSConnection, req, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = http_class = req = http_conn_args = {'check_hostname': None, 'context': None} host = 'w3c.github.io', h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) except OSError as err: # timeout error > raise URLError(err) E urllib.error.URLError: /usr/lib64/python3.11/urllib/request.py:1351: URLError _ test_suite[https://w3c.github.io/json-ld-api/tests/toRdf-manifest#tso08-do_test_parser-https://w3c.github.io/json-ld-api/tests/-toRdf-so08-toRdf/so08-in.jsonld-toRdf/so08-out.nq-False-options254] _ self = http_class = req = http_conn_args = {'check_hostname': None, 'context': None} host = 'w3c.github.io', h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: > h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) /usr/lib64/python3.11/urllib/request.py:1348: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.11/http/client.py:1286: in request self._send_request(method, url, body, headers, encode_chunked) /usr/lib64/python3.11/http/client.py:1332: in _send_request self.endheaders(body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1281: in endheaders self._send_output(message_body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1041: in _send_output self.send(msg) /usr/lib64/python3.11/http/client.py:979: in send self.connect() /usr/lib64/python3.11/http/client.py:1451: in connect super().connect() /usr/lib64/python3.11/http/client.py:945: in connect self.sock = self._create_connection( /usr/lib64/python3.11/socket.py:827: in create_connection for res in getaddrinfo(host, port, 0, SOCK_STREAM): _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ host = 'w3c.github.io', port = 443, family = 0 type = , proto = 0, flags = 0 def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): """Resolve host and port into list of address info entries. Translate the host/port argument into a sequence of 5-tuples that contain all the necessary arguments for creating a socket connected to that service. host is a domain name, a string representation of an IPv4/v6 address or None. port is a string service name such as 'http', a numeric port number or None. By passing None as the value of host and port, you can pass NULL to the underlying C API. The family, type and proto arguments can be optionally specified in order to narrow the list of addresses returned. Passing zero as a value for each of these arguments selects the full range of results. """ # We override this function since we want to translate the numeric family # and socket type values to enum constants. addrlist = [] > for res in _socket.getaddrinfo(host, port, family, type, proto, flags): E socket.gaierror: [Errno -3] Temporary failure in name resolution /usr/lib64/python3.11/socket.py:962: gaierror During handling of the above exception, another exception occurred: rdf_test_uri = rdflib.term.URIRef('https://w3c.github.io/json-ld-api/tests/toRdf-manifest#tso08') func = suite_base = 'https://w3c.github.io/json-ld-api/tests/', cat = 'toRdf' num = 'so08', inputpath = 'toRdf/so08-in.jsonld' expectedpath = 'toRdf/so08-out.nq', context = False options = {'specVersion': 'json-ld-1.1'} @pytest.mark.parametrize( "rdf_test_uri, func, suite_base, cat, num, inputpath, expectedpath, context, options", get_test_suite_cases(), ) def test_suite( rdf_test_uri: URIRef, func, suite_base, cat, num, inputpath, expectedpath, context, options, ): > func(suite_base, cat, num, inputpath, expectedpath, context, options) /builddir/build/BUILD/rdflib-6.2.0/test/jsonld/test_onedotone.py:249: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /builddir/build/BUILD/rdflib-6.2.0/test/jsonld/runner.py:142: in do_test_parser to_rdf( /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/parsers/jsonld.py:134: in to_rdf return parser.parse(data, context, dataset) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/parsers/jsonld.py:154: in parse context.load(local_context, context.base) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/context.py:380: in load self._read_source(source, source_url, referenced_contexts) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/context.py:453: in _read_source imported = self._fetch_context( /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/context.py:440: in _fetch_context source = source_to_json(source_url) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/util.py:35: in source_to_json source = create_input_source(source, format="json-ld") /builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py:404: in create_input_source ) = _create_input_source_from_location( /builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py:458: in _create_input_source_from_location input_source = URLInputSource(absolute_location, format) /builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py:282: in __init__ response: HTTPResponse = _urlopen(req) /builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py:270: in _urlopen return urlopen(req) /usr/lib64/python3.11/urllib/request.py:216: in urlopen return opener.open(url, data, timeout) /usr/lib64/python3.11/urllib/request.py:519: in open response = self._open(req, data) /usr/lib64/python3.11/urllib/request.py:536: in _open result = self._call_chain(self.handle_open, protocol, protocol + /usr/lib64/python3.11/urllib/request.py:496: in _call_chain result = func(*args) /usr/lib64/python3.11/urllib/request.py:1391: in https_open return self.do_open(http.client.HTTPSConnection, req, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = http_class = req = http_conn_args = {'check_hostname': None, 'context': None} host = 'w3c.github.io', h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) except OSError as err: # timeout error > raise URLError(err) E urllib.error.URLError: /usr/lib64/python3.11/urllib/request.py:1351: URLError _ test_suite[https://w3c.github.io/json-ld-api/tests/toRdf-manifest#tso09-do_test_parser-https://w3c.github.io/json-ld-api/tests/-toRdf-so09-toRdf/so09-in.jsonld-toRdf/so09-out.nq-False-options255] _ self = http_class = req = http_conn_args = {'check_hostname': None, 'context': None} host = 'w3c.github.io', h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: > h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) /usr/lib64/python3.11/urllib/request.py:1348: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.11/http/client.py:1286: in request self._send_request(method, url, body, headers, encode_chunked) /usr/lib64/python3.11/http/client.py:1332: in _send_request self.endheaders(body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1281: in endheaders self._send_output(message_body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1041: in _send_output self.send(msg) /usr/lib64/python3.11/http/client.py:979: in send self.connect() /usr/lib64/python3.11/http/client.py:1451: in connect super().connect() /usr/lib64/python3.11/http/client.py:945: in connect self.sock = self._create_connection( /usr/lib64/python3.11/socket.py:827: in create_connection for res in getaddrinfo(host, port, 0, SOCK_STREAM): _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ host = 'w3c.github.io', port = 443, family = 0 type = , proto = 0, flags = 0 def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): """Resolve host and port into list of address info entries. Translate the host/port argument into a sequence of 5-tuples that contain all the necessary arguments for creating a socket connected to that service. host is a domain name, a string representation of an IPv4/v6 address or None. port is a string service name such as 'http', a numeric port number or None. By passing None as the value of host and port, you can pass NULL to the underlying C API. The family, type and proto arguments can be optionally specified in order to narrow the list of addresses returned. Passing zero as a value for each of these arguments selects the full range of results. """ # We override this function since we want to translate the numeric family # and socket type values to enum constants. addrlist = [] > for res in _socket.getaddrinfo(host, port, family, type, proto, flags): E socket.gaierror: [Errno -3] Temporary failure in name resolution /usr/lib64/python3.11/socket.py:962: gaierror During handling of the above exception, another exception occurred: rdf_test_uri = rdflib.term.URIRef('https://w3c.github.io/json-ld-api/tests/toRdf-manifest#tso09') func = suite_base = 'https://w3c.github.io/json-ld-api/tests/', cat = 'toRdf' num = 'so09', inputpath = 'toRdf/so09-in.jsonld' expectedpath = 'toRdf/so09-out.nq', context = False options = {'specVersion': 'json-ld-1.1'} @pytest.mark.parametrize( "rdf_test_uri, func, suite_base, cat, num, inputpath, expectedpath, context, options", get_test_suite_cases(), ) def test_suite( rdf_test_uri: URIRef, func, suite_base, cat, num, inputpath, expectedpath, context, options, ): > func(suite_base, cat, num, inputpath, expectedpath, context, options) /builddir/build/BUILD/rdflib-6.2.0/test/jsonld/test_onedotone.py:249: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /builddir/build/BUILD/rdflib-6.2.0/test/jsonld/runner.py:142: in do_test_parser to_rdf( /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/parsers/jsonld.py:134: in to_rdf return parser.parse(data, context, dataset) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/parsers/jsonld.py:154: in parse context.load(local_context, context.base) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/context.py:380: in load self._read_source(source, source_url, referenced_contexts) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/context.py:453: in _read_source imported = self._fetch_context( /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/context.py:440: in _fetch_context source = source_to_json(source_url) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/util.py:35: in source_to_json source = create_input_source(source, format="json-ld") /builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py:404: in create_input_source ) = _create_input_source_from_location( /builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py:458: in _create_input_source_from_location input_source = URLInputSource(absolute_location, format) /builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py:282: in __init__ response: HTTPResponse = _urlopen(req) /builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py:270: in _urlopen return urlopen(req) /usr/lib64/python3.11/urllib/request.py:216: in urlopen return opener.open(url, data, timeout) /usr/lib64/python3.11/urllib/request.py:519: in open response = self._open(req, data) /usr/lib64/python3.11/urllib/request.py:536: in _open result = self._call_chain(self.handle_open, protocol, protocol + /usr/lib64/python3.11/urllib/request.py:496: in _call_chain result = func(*args) /usr/lib64/python3.11/urllib/request.py:1391: in https_open return self.do_open(http.client.HTTPSConnection, req, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = http_class = req = http_conn_args = {'check_hostname': None, 'context': None} host = 'w3c.github.io', h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) except OSError as err: # timeout error > raise URLError(err) E urllib.error.URLError: /usr/lib64/python3.11/urllib/request.py:1351: URLError _ test_suite[https://w3c.github.io/json-ld-api/tests/toRdf-manifest#tso11-do_test_parser-https://w3c.github.io/json-ld-api/tests/-toRdf-so11-toRdf/so11-in.jsonld-toRdf/so11-out.nq-False-options256] _ self = http_class = req = http_conn_args = {'check_hostname': None, 'context': None} host = 'w3c.github.io', h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: > h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) /usr/lib64/python3.11/urllib/request.py:1348: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.11/http/client.py:1286: in request self._send_request(method, url, body, headers, encode_chunked) /usr/lib64/python3.11/http/client.py:1332: in _send_request self.endheaders(body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1281: in endheaders self._send_output(message_body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1041: in _send_output self.send(msg) /usr/lib64/python3.11/http/client.py:979: in send self.connect() /usr/lib64/python3.11/http/client.py:1451: in connect super().connect() /usr/lib64/python3.11/http/client.py:945: in connect self.sock = self._create_connection( /usr/lib64/python3.11/socket.py:827: in create_connection for res in getaddrinfo(host, port, 0, SOCK_STREAM): _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ host = 'w3c.github.io', port = 443, family = 0 type = , proto = 0, flags = 0 def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): """Resolve host and port into list of address info entries. Translate the host/port argument into a sequence of 5-tuples that contain all the necessary arguments for creating a socket connected to that service. host is a domain name, a string representation of an IPv4/v6 address or None. port is a string service name such as 'http', a numeric port number or None. By passing None as the value of host and port, you can pass NULL to the underlying C API. The family, type and proto arguments can be optionally specified in order to narrow the list of addresses returned. Passing zero as a value for each of these arguments selects the full range of results. """ # We override this function since we want to translate the numeric family # and socket type values to enum constants. addrlist = [] > for res in _socket.getaddrinfo(host, port, family, type, proto, flags): E socket.gaierror: [Errno -3] Temporary failure in name resolution /usr/lib64/python3.11/socket.py:962: gaierror During handling of the above exception, another exception occurred: rdf_test_uri = rdflib.term.URIRef('https://w3c.github.io/json-ld-api/tests/toRdf-manifest#tso11') func = suite_base = 'https://w3c.github.io/json-ld-api/tests/', cat = 'toRdf' num = 'so11', inputpath = 'toRdf/so11-in.jsonld' expectedpath = 'toRdf/so11-out.nq', context = False options = {'specVersion': 'json-ld-1.1'} @pytest.mark.parametrize( "rdf_test_uri, func, suite_base, cat, num, inputpath, expectedpath, context, options", get_test_suite_cases(), ) def test_suite( rdf_test_uri: URIRef, func, suite_base, cat, num, inputpath, expectedpath, context, options, ): > func(suite_base, cat, num, inputpath, expectedpath, context, options) /builddir/build/BUILD/rdflib-6.2.0/test/jsonld/test_onedotone.py:249: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /builddir/build/BUILD/rdflib-6.2.0/test/jsonld/runner.py:142: in do_test_parser to_rdf( /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/parsers/jsonld.py:134: in to_rdf return parser.parse(data, context, dataset) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/parsers/jsonld.py:154: in parse context.load(local_context, context.base) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/context.py:380: in load self._read_source(source, source_url, referenced_contexts) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/context.py:453: in _read_source imported = self._fetch_context( /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/context.py:440: in _fetch_context source = source_to_json(source_url) /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/shared/jsonld/util.py:35: in source_to_json source = create_input_source(source, format="json-ld") /builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py:404: in create_input_source ) = _create_input_source_from_location( /builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py:458: in _create_input_source_from_location input_source = URLInputSource(absolute_location, format) /builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py:282: in __init__ response: HTTPResponse = _urlopen(req) /builddir/build/BUILD/rdflib-6.2.0/rdflib/parser.py:270: in _urlopen return urlopen(req) /usr/lib64/python3.11/urllib/request.py:216: in urlopen return opener.open(url, data, timeout) /usr/lib64/python3.11/urllib/request.py:519: in open response = self._open(req, data) /usr/lib64/python3.11/urllib/request.py:536: in _open result = self._call_chain(self.handle_open, protocol, protocol + /usr/lib64/python3.11/urllib/request.py:496: in _call_chain result = func(*args) /usr/lib64/python3.11/urllib/request.py:1391: in https_open return self.do_open(http.client.HTTPSConnection, req, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = http_class = req = http_conn_args = {'check_hostname': None, 'context': None} host = 'w3c.github.io', h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) except OSError as err: # timeout error > raise URLError(err) E urllib.error.URLError: /usr/lib64/python3.11/urllib/request.py:1351: URLError ___________________________ test_infix_owl_example1 ____________________________ self = http_class = req = , http_conn_args = {} host = 'www.w3.org', h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: > h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) /usr/lib64/python3.11/urllib/request.py:1348: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.11/http/client.py:1286: in request self._send_request(method, url, body, headers, encode_chunked) /usr/lib64/python3.11/http/client.py:1332: in _send_request self.endheaders(body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1281: in endheaders self._send_output(message_body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1041: in _send_output self.send(msg) /usr/lib64/python3.11/http/client.py:979: in send self.connect() /usr/lib64/python3.11/http/client.py:945: in connect self.sock = self._create_connection( /usr/lib64/python3.11/socket.py:827: in create_connection for res in getaddrinfo(host, port, 0, SOCK_STREAM): _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ host = 'www.w3.org', port = 80, family = 0, type = proto = 0, flags = 0 def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): """Resolve host and port into list of address info entries. Translate the host/port argument into a sequence of 5-tuples that contain all the necessary arguments for creating a socket connected to that service. host is a domain name, a string representation of an IPv4/v6 address or None. port is a string service name such as 'http', a numeric port number or None. By passing None as the value of host and port, you can pass NULL to the underlying C API. The family, type and proto arguments can be optionally specified in order to narrow the list of addresses returned. Passing zero as a value for each of these arguments selects the full range of results. """ # We override this function since we want to translate the numeric family # and socket type values to enum constants. addrlist = [] > for res in _socket.getaddrinfo(host, port, family, type, proto, flags): E socket.gaierror: [Errno -3] Temporary failure in name resolution /usr/lib64/python3.11/socket.py:962: gaierror During handling of the above exception, another exception occurred: def test_infix_owl_example1(): g = Graph(identifier=context0) g.bind("ex", EXNS) Individual.factoryGraph = g classD = Class(EXNS.D) # noqa: N806 anonClass = EXNS.someProp << some >> classD # noqa: N806 assert str(anonClass) == "( ex:someProp SOME ex:D )" a = Class(EXNS.Opera, graph=g) # Now we can assert rdfs:subClassOf and owl:equivalentClass relationships # (in the underlying graph) with other classes using the 'subClassOf' # and 'equivalentClass' descriptors which can be set to a list # of objects for the corresponding predicates. a.subClassOf = [EXNS.MusicalWork] # We can then access the rdfs:subClassOf relationships assert str(list(a.subClassOf)) == "[Class: ex:MusicalWork ]" # [Class: ex:MusicalWork ] # This can also be used against already populated graphs: > owlgraph = Graph().parse(str(OWL)) test/test_extras/test_infixowl/test_basic.py:109: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ rdflib/graph.py:1306: in parse source = create_input_source( rdflib/parser.py:404: in create_input_source ) = _create_input_source_from_location( rdflib/parser.py:458: in _create_input_source_from_location input_source = URLInputSource(absolute_location, format) rdflib/parser.py:282: in __init__ response: HTTPResponse = _urlopen(req) rdflib/parser.py:270: in _urlopen return urlopen(req) /usr/lib64/python3.11/urllib/request.py:216: in urlopen return opener.open(url, data, timeout) /usr/lib64/python3.11/urllib/request.py:519: in open response = self._open(req, data) /usr/lib64/python3.11/urllib/request.py:536: in _open result = self._call_chain(self.handle_open, protocol, protocol + /usr/lib64/python3.11/urllib/request.py:496: in _call_chain result = func(*args) /usr/lib64/python3.11/urllib/request.py:1377: in http_open return self.do_open(http.client.HTTPConnection, req) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = http_class = req = , http_conn_args = {} host = 'www.w3.org', h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) except OSError as err: # timeout error > raise URLError(err) E urllib.error.URLError: /usr/lib64/python3.11/urllib/request.py:1351: URLError _________________________________ test_context _________________________________ self = http_class = req = , http_conn_args = {} host = 'www.w3.org', h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: > h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) /usr/lib64/python3.11/urllib/request.py:1348: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.11/http/client.py:1286: in request self._send_request(method, url, body, headers, encode_chunked) /usr/lib64/python3.11/http/client.py:1332: in _send_request self.endheaders(body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1281: in endheaders self._send_output(message_body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1041: in _send_output self.send(msg) /usr/lib64/python3.11/http/client.py:979: in send self.connect() /usr/lib64/python3.11/http/client.py:945: in connect self.sock = self._create_connection( /usr/lib64/python3.11/socket.py:827: in create_connection for res in getaddrinfo(host, port, 0, SOCK_STREAM): _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ host = 'www.w3.org', port = 80, family = 0, type = proto = 0, flags = 0 def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): """Resolve host and port into list of address info entries. Translate the host/port argument into a sequence of 5-tuples that contain all the necessary arguments for creating a socket connected to that service. host is a domain name, a string representation of an IPv4/v6 address or None. port is a string service name such as 'http', a numeric port number or None. By passing None as the value of host and port, you can pass NULL to the underlying C API. The family, type and proto arguments can be optionally specified in order to narrow the list of addresses returned. Passing zero as a value for each of these arguments selects the full range of results. """ # We override this function since we want to translate the numeric family # and socket type values to enum constants. addrlist = [] > for res in _socket.getaddrinfo(host, port, family, type, proto, flags): E socket.gaierror: [Errno -3] Temporary failure in name resolution /usr/lib64/python3.11/socket.py:962: gaierror During handling of the above exception, another exception occurred: graph = )> def test_context(graph): # Now we have an empty graph, we can construct OWL classes in it # using the Python classes defined in this module a = Class(EXNS.Opera, graph=graph) # Now we can assert rdfs:subClassOf and owl:equivalentClass relationships # (in the underlying graph) with other classes using the 'subClassOf' # and 'equivalentClass' descriptors which can be set to a list # of objects for the corresponding predicates. a.subClassOf = [EXNS.MusicalWork] # We can then access the rdfs:subClassOf relationships assert str(list(a.subClassOf)) == "[Class: ex:MusicalWork ]" # This can also be used against already populated graphs: > owlgraph = Graph().parse(str(OWL)) test/test_extras/test_infixowl/test_context.py:51: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ rdflib/graph.py:1306: in parse source = create_input_source( rdflib/parser.py:404: in create_input_source ) = _create_input_source_from_location( rdflib/parser.py:458: in _create_input_source_from_location input_source = URLInputSource(absolute_location, format) rdflib/parser.py:282: in __init__ response: HTTPResponse = _urlopen(req) rdflib/parser.py:270: in _urlopen return urlopen(req) /usr/lib64/python3.11/urllib/request.py:216: in urlopen return opener.open(url, data, timeout) /usr/lib64/python3.11/urllib/request.py:519: in open response = self._open(req, data) /usr/lib64/python3.11/urllib/request.py:536: in _open result = self._call_chain(self.handle_open, protocol, protocol + /usr/lib64/python3.11/urllib/request.py:496: in _call_chain result = func(*args) /usr/lib64/python3.11/urllib/request.py:1377: in http_open return self.do_open(http.client.HTTPConnection, req) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = http_class = req = , http_conn_args = {} host = 'www.w3.org', h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) except OSError as err: # timeout error > raise URLError(err) E urllib.error.URLError: /usr/lib64/python3.11/urllib/request.py:1351: URLError ______________________ test_guess_format_for_parse[None] _______________________ self = http_class = req = http_conn_args = {'check_hostname': None, 'context': None} host = 'www.google.com' h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: > h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) /usr/lib64/python3.11/urllib/request.py:1348: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.11/http/client.py:1286: in request self._send_request(method, url, body, headers, encode_chunked) /usr/lib64/python3.11/http/client.py:1332: in _send_request self.endheaders(body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1281: in endheaders self._send_output(message_body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1041: in _send_output self.send(msg) /usr/lib64/python3.11/http/client.py:979: in send self.connect() /usr/lib64/python3.11/http/client.py:1451: in connect super().connect() /usr/lib64/python3.11/http/client.py:945: in connect self.sock = self._create_connection( /usr/lib64/python3.11/socket.py:827: in create_connection for res in getaddrinfo(host, port, 0, SOCK_STREAM): _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ host = 'www.google.com', port = 443, family = 0 type = , proto = 0, flags = 0 def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): """Resolve host and port into list of address info entries. Translate the host/port argument into a sequence of 5-tuples that contain all the necessary arguments for creating a socket connected to that service. host is a domain name, a string representation of an IPv4/v6 address or None. port is a string service name such as 'http', a numeric port number or None. By passing None as the value of host and port, you can pass NULL to the underlying C API. The family, type and proto arguments can be optionally specified in order to narrow the list of addresses returned. Passing zero as a value for each of these arguments selects the full range of results. """ # We override this function since we want to translate the numeric family # and socket type values to enum constants. addrlist = [] > for res in _socket.getaddrinfo(host, port, family, type, proto, flags): E socket.gaierror: [Errno -3] Temporary failure in name resolution /usr/lib64/python3.11/socket.py:962: gaierror During handling of the above exception, another exception occurred: make_graph = .make_graph at 0xffe9d8d1c0> def test_guess_format_for_parse(make_graph: GraphFactory): graph = make_graph() # files with pytest.raises(ParserError): graph.parse(__file__) # here we are trying to parse a Python file!! # .nt can be parsed by Turtle Parser graph.parse(os.path.join(TEST_DATA_DIR, "suites", "nt_misc", "anons-01.nt")) # RDF/XML graph.parse( os.path.join( TEST_DATA_DIR, "suites", "w3c", "rdf-xml", "datatypes", "test001.rdf" ) ) # XML # bad filename but set format graph.parse( os.path.join(TEST_DATA_DIR, "w3c-rdfxml-test001.borked"), format="xml", ) with pytest.raises(ParserError): graph.parse(data="rubbish") # Turtle - default graph.parse( data=" ." ) # Turtle - format given graph.parse( data=" .", format="turtle", ) # RDF/XML - format given rdf = """ """ graph.parse(data=rdf, format="xml") # URI # only getting HTML with pytest.raises(PluginException): > graph.parse(location="https://www.google.com") test/test_graph/test_graph.py:338: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ rdflib/graph.py:1306: in parse source = create_input_source( rdflib/parser.py:404: in create_input_source ) = _create_input_source_from_location( rdflib/parser.py:458: in _create_input_source_from_location input_source = URLInputSource(absolute_location, format) rdflib/parser.py:282: in __init__ response: HTTPResponse = _urlopen(req) rdflib/parser.py:270: in _urlopen return urlopen(req) /usr/lib64/python3.11/urllib/request.py:216: in urlopen return opener.open(url, data, timeout) /usr/lib64/python3.11/urllib/request.py:519: in open response = self._open(req, data) /usr/lib64/python3.11/urllib/request.py:536: in _open result = self._call_chain(self.handle_open, protocol, protocol + /usr/lib64/python3.11/urllib/request.py:496: in _call_chain result = func(*args) /usr/lib64/python3.11/urllib/request.py:1391: in https_open return self.do_open(http.client.HTTPSConnection, req, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = http_class = req = http_conn_args = {'check_hostname': None, 'context': None} host = 'www.google.com' h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) except OSError as err: # timeout error > raise URLError(err) E urllib.error.URLError: /usr/lib64/python3.11/urllib/request.py:1351: URLError ______________________________ test_prepare_query ______________________________ def test_prepare_query(): q = prepareQuery( "SELECT ?name WHERE { ?person foaf:knows/foaf:name ?name . }", initNs={"foaf": FOAF}, ) g = Graph() > g.parse( location=os.path.join( os.path.dirname(__file__), "..", "..", "examples", "foaf.n3" ), format="n3", ) test/test_sparql/test_prepare.py:34: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ rdflib/graph.py:1306: in parse source = create_input_source( rdflib/parser.py:404: in create_input_source ) = _create_input_source_from_location( _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ file = None, format = 'n3', input_source = None location = '/builddir/build/BUILD/rdflib-6.2.0/test/test_sparql/../../examples/foaf.n3' def _create_input_source_from_location( file: Optional[Union[BinaryIO, TextIO]], format: Optional[str], input_source: Optional[InputSource], location: str, ) -> Tuple[URIRef, bool, Optional[Union[BinaryIO, TextIO]], Optional[InputSource]]: # Fix for Windows problem https://github.com/RDFLib/rdflib/issues/145 and # https://github.com/RDFLib/rdflib/issues/1430 # NOTE: using pathlib.Path.exists on a URL fails on windows as it is not a # valid path. However os.path.exists() returns false for a URL on windows # which is why it is being used instead. if os.path.exists(location): location = pathlib.Path(location).absolute().as_uri() base = pathlib.Path.cwd().as_uri() absolute_location = URIRef(rdflib.util._iri2uri(location), base=base) if absolute_location.startswith("file:///"): filename = url2pathname(absolute_location.replace("file:///", "/")) > file = open(filename, "rb") E FileNotFoundError: [Errno 2] No such file or directory: '/builddir/build/BUILD/rdflib-6.2.0/examples/foaf.n3' rdflib/parser.py:456: FileNotFoundError _________________________________ test_service _________________________________ self = http_class = req = , http_conn_args = {} host = 'DBpedia.org', h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: > h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) /usr/lib64/python3.11/urllib/request.py:1348: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.11/http/client.py:1286: in request self._send_request(method, url, body, headers, encode_chunked) /usr/lib64/python3.11/http/client.py:1332: in _send_request self.endheaders(body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1281: in endheaders self._send_output(message_body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1041: in _send_output self.send(msg) /usr/lib64/python3.11/http/client.py:979: in send self.connect() /usr/lib64/python3.11/http/client.py:945: in connect self.sock = self._create_connection( /usr/lib64/python3.11/socket.py:827: in create_connection for res in getaddrinfo(host, port, 0, SOCK_STREAM): _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ host = 'DBpedia.org', port = 80, family = 0, type = proto = 0, flags = 0 def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): """Resolve host and port into list of address info entries. Translate the host/port argument into a sequence of 5-tuples that contain all the necessary arguments for creating a socket connected to that service. host is a domain name, a string representation of an IPv4/v6 address or None. port is a string service name such as 'http', a numeric port number or None. By passing None as the value of host and port, you can pass NULL to the underlying C API. The family, type and proto arguments can be optionally specified in order to narrow the list of addresses returned. Passing zero as a value for each of these arguments selects the full range of results. """ # We override this function since we want to translate the numeric family # and socket type values to enum constants. addrlist = [] > for res in _socket.getaddrinfo(host, port, family, type, proto, flags): E socket.gaierror: [Errno -3] Temporary failure in name resolution /usr/lib64/python3.11/socket.py:962: gaierror During handling of the above exception, another exception occurred: def test_service(): g = Graph() q = """select ?sameAs ?dbpComment where { service { select ?dbpHypernym ?dbpComment where { ?sameAs ; ?dbpComment . } } } limit 2""" > results = helper.query_with_retry(g, q) test/test_sparql/test_service.py:42: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ test/utils/helper.py:47: in query_with_retry raise e test/utils/helper.py:43: in query_with_retry result.bindings # access bindings to ensure no lazy loading rdflib/query.py:184: in bindings self._bindings += list(self._genbindings) rdflib/plugins/sparql/evaluate.py:550: in return (row.project(project.PV) for row in res) rdflib/plugins/sparql/evaluate.py:338: in evalServiceQuery response = urlopen( /usr/lib64/python3.11/urllib/request.py:216: in urlopen return opener.open(url, data, timeout) /usr/lib64/python3.11/urllib/request.py:519: in open response = self._open(req, data) /usr/lib64/python3.11/urllib/request.py:536: in _open result = self._call_chain(self.handle_open, protocol, protocol + /usr/lib64/python3.11/urllib/request.py:496: in _call_chain result = func(*args) /usr/lib64/python3.11/urllib/request.py:1377: in http_open return self.do_open(http.client.HTTPConnection, req) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = http_class = req = , http_conn_args = {} host = 'DBpedia.org', h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) except OSError as err: # timeout error > raise URLError(err) E urllib.error.URLError: /usr/lib64/python3.11/urllib/request.py:1351: URLError ----------------------------- Captured stdout call ----------------------------- Network error during query, waiting for 0.10s and retrying Network error during query, waiting for 0.15s and retrying Network error during query, waiting for 0.23s and retrying Network error during query, waiting for 0.34s and retrying Network error during query, waiting for 0.51s and retrying Network error during query, waiting for 0.76s and retrying Network error during query, waiting for 1.14s and retrying Network error during query, waiting for 1.71s and retrying Network error during query, waiting for 2.56s and retrying ____________________________ test_service_with_bind ____________________________ self = http_class = req = , http_conn_args = {} host = 'DBpedia.org', h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: > h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) /usr/lib64/python3.11/urllib/request.py:1348: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.11/http/client.py:1286: in request self._send_request(method, url, body, headers, encode_chunked) /usr/lib64/python3.11/http/client.py:1332: in _send_request self.endheaders(body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1281: in endheaders self._send_output(message_body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1041: in _send_output self.send(msg) /usr/lib64/python3.11/http/client.py:979: in send self.connect() /usr/lib64/python3.11/http/client.py:945: in connect self.sock = self._create_connection( /usr/lib64/python3.11/socket.py:827: in create_connection for res in getaddrinfo(host, port, 0, SOCK_STREAM): _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ host = 'DBpedia.org', port = 80, family = 0, type = proto = 0, flags = 0 def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): """Resolve host and port into list of address info entries. Translate the host/port argument into a sequence of 5-tuples that contain all the necessary arguments for creating a socket connected to that service. host is a domain name, a string representation of an IPv4/v6 address or None. port is a string service name such as 'http', a numeric port number or None. By passing None as the value of host and port, you can pass NULL to the underlying C API. The family, type and proto arguments can be optionally specified in order to narrow the list of addresses returned. Passing zero as a value for each of these arguments selects the full range of results. """ # We override this function since we want to translate the numeric family # and socket type values to enum constants. addrlist = [] > for res in _socket.getaddrinfo(host, port, family, type, proto, flags): E socket.gaierror: [Errno -3] Temporary failure in name resolution /usr/lib64/python3.11/socket.py:962: gaierror During handling of the above exception, another exception occurred: def test_service_with_bind(): g = Graph() q = """select ?sameAs ?dbpComment ?subject where { bind ( as ?subject) service { select ?sameAs ?dbpComment ?subject where { ?sameAs ; ?dbpComment ; ?subject . } } } limit 2""" > results = helper.query_with_retry(g, q) test/test_sparql/test_service.py:66: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ test/utils/helper.py:47: in query_with_retry raise e test/utils/helper.py:43: in query_with_retry result.bindings # access bindings to ensure no lazy loading rdflib/query.py:184: in bindings self._bindings += list(self._genbindings) rdflib/plugins/sparql/evaluate.py:550: in return (row.project(project.PV) for row in res) rdflib/plugins/sparql/evaluate.py:123: in evalLazyJoin for b in evalPart(c, join.p2): rdflib/plugins/sparql/evaluate.py:338: in evalServiceQuery response = urlopen( /usr/lib64/python3.11/urllib/request.py:216: in urlopen return opener.open(url, data, timeout) /usr/lib64/python3.11/urllib/request.py:519: in open response = self._open(req, data) /usr/lib64/python3.11/urllib/request.py:536: in _open result = self._call_chain(self.handle_open, protocol, protocol + /usr/lib64/python3.11/urllib/request.py:496: in _call_chain result = func(*args) /usr/lib64/python3.11/urllib/request.py:1377: in http_open return self.do_open(http.client.HTTPConnection, req) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = http_class = req = , http_conn_args = {} host = 'DBpedia.org', h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) except OSError as err: # timeout error > raise URLError(err) E urllib.error.URLError: /usr/lib64/python3.11/urllib/request.py:1351: URLError ----------------------------- Captured stdout call ----------------------------- Network error during query, waiting for 0.10s and retrying Network error during query, waiting for 0.15s and retrying Network error during query, waiting for 0.23s and retrying Network error during query, waiting for 0.34s and retrying Network error during query, waiting for 0.51s and retrying Network error during query, waiting for 0.76s and retrying Network error during query, waiting for 1.14s and retrying Network error during query, waiting for 1.71s and retrying Network error during query, waiting for 2.56s and retrying ___________________________ test_service_with_values ___________________________ self = http_class = req = , http_conn_args = {} host = 'DBpedia.org', h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: > h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) /usr/lib64/python3.11/urllib/request.py:1348: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.11/http/client.py:1286: in request self._send_request(method, url, body, headers, encode_chunked) /usr/lib64/python3.11/http/client.py:1332: in _send_request self.endheaders(body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1281: in endheaders self._send_output(message_body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1041: in _send_output self.send(msg) /usr/lib64/python3.11/http/client.py:979: in send self.connect() /usr/lib64/python3.11/http/client.py:945: in connect self.sock = self._create_connection( /usr/lib64/python3.11/socket.py:827: in create_connection for res in getaddrinfo(host, port, 0, SOCK_STREAM): _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ host = 'DBpedia.org', port = 80, family = 0, type = proto = 0, flags = 0 def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): """Resolve host and port into list of address info entries. Translate the host/port argument into a sequence of 5-tuples that contain all the necessary arguments for creating a socket connected to that service. host is a domain name, a string representation of an IPv4/v6 address or None. port is a string service name such as 'http', a numeric port number or None. By passing None as the value of host and port, you can pass NULL to the underlying C API. The family, type and proto arguments can be optionally specified in order to narrow the list of addresses returned. Passing zero as a value for each of these arguments selects the full range of results. """ # We override this function since we want to translate the numeric family # and socket type values to enum constants. addrlist = [] > for res in _socket.getaddrinfo(host, port, family, type, proto, flags): E socket.gaierror: [Errno -3] Temporary failure in name resolution /usr/lib64/python3.11/socket.py:962: gaierror During handling of the above exception, another exception occurred: def test_service_with_values(): g = Graph() q = """select ?sameAs ?dbpComment ?subject where { values (?sameAs ?subject) {( ) ( )} service { select ?sameAs ?dbpComment ?subject where { ?sameAs ; ?dbpComment ; ?subject . } } } limit 2""" > results = helper.query_with_retry(g, q) test/test_sparql/test_service.py:88: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ test/utils/helper.py:47: in query_with_retry raise e test/utils/helper.py:43: in query_with_retry result.bindings # access bindings to ensure no lazy loading rdflib/query.py:184: in bindings self._bindings += list(self._genbindings) rdflib/plugins/sparql/evaluate.py:550: in return (row.project(project.PV) for row in res) rdflib/plugins/sparql/evaluate.py:123: in evalLazyJoin for b in evalPart(c, join.p2): rdflib/plugins/sparql/evaluate.py:338: in evalServiceQuery response = urlopen( /usr/lib64/python3.11/urllib/request.py:216: in urlopen return opener.open(url, data, timeout) /usr/lib64/python3.11/urllib/request.py:519: in open response = self._open(req, data) /usr/lib64/python3.11/urllib/request.py:536: in _open result = self._call_chain(self.handle_open, protocol, protocol + /usr/lib64/python3.11/urllib/request.py:496: in _call_chain result = func(*args) /usr/lib64/python3.11/urllib/request.py:1377: in http_open return self.do_open(http.client.HTTPConnection, req) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = http_class = req = , http_conn_args = {} host = 'DBpedia.org', h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) except OSError as err: # timeout error > raise URLError(err) E urllib.error.URLError: /usr/lib64/python3.11/urllib/request.py:1351: URLError ----------------------------- Captured stdout call ----------------------------- Network error during query, waiting for 0.10s and retrying Network error during query, waiting for 0.15s and retrying Network error during query, waiting for 0.23s and retrying Network error during query, waiting for 0.34s and retrying Network error during query, waiting for 0.51s and retrying Network error during query, waiting for 0.76s and retrying Network error during query, waiting for 1.14s and retrying Network error during query, waiting for 1.71s and retrying Network error during query, waiting for 2.56s and retrying ______________________ test_service_with_implicit_select _______________________ self = http_class = req = http_conn_args = {'check_hostname': None, 'context': None}, host = 'DBpedia.org' h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: > h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) /usr/lib64/python3.11/urllib/request.py:1348: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.11/http/client.py:1286: in request self._send_request(method, url, body, headers, encode_chunked) /usr/lib64/python3.11/http/client.py:1332: in _send_request self.endheaders(body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1281: in endheaders self._send_output(message_body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1041: in _send_output self.send(msg) /usr/lib64/python3.11/http/client.py:979: in send self.connect() /usr/lib64/python3.11/http/client.py:1451: in connect super().connect() /usr/lib64/python3.11/http/client.py:945: in connect self.sock = self._create_connection( /usr/lib64/python3.11/socket.py:827: in create_connection for res in getaddrinfo(host, port, 0, SOCK_STREAM): _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ host = 'DBpedia.org', port = 443, family = 0, type = proto = 0, flags = 0 def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): """Resolve host and port into list of address info entries. Translate the host/port argument into a sequence of 5-tuples that contain all the necessary arguments for creating a socket connected to that service. host is a domain name, a string representation of an IPv4/v6 address or None. port is a string service name such as 'http', a numeric port number or None. By passing None as the value of host and port, you can pass NULL to the underlying C API. The family, type and proto arguments can be optionally specified in order to narrow the list of addresses returned. Passing zero as a value for each of these arguments selects the full range of results. """ # We override this function since we want to translate the numeric family # and socket type values to enum constants. addrlist = [] > for res in _socket.getaddrinfo(host, port, family, type, proto, flags): E socket.gaierror: [Errno -3] Temporary failure in name resolution /usr/lib64/python3.11/socket.py:962: gaierror During handling of the above exception, another exception occurred: def test_service_with_implicit_select(): g = Graph() q = """select ?s ?p ?o where { service { values (?s ?p ?o) {( 1) ( 2)} }} limit 2""" > results = helper.query_with_retry(g, q) test/test_sparql/test_service.py:104: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ test/utils/helper.py:47: in query_with_retry raise e test/utils/helper.py:43: in query_with_retry result.bindings # access bindings to ensure no lazy loading rdflib/query.py:184: in bindings self._bindings += list(self._genbindings) rdflib/plugins/sparql/evaluate.py:550: in return (row.project(project.PV) for row in res) rdflib/plugins/sparql/evaluate.py:338: in evalServiceQuery response = urlopen( /usr/lib64/python3.11/urllib/request.py:216: in urlopen return opener.open(url, data, timeout) /usr/lib64/python3.11/urllib/request.py:519: in open response = self._open(req, data) /usr/lib64/python3.11/urllib/request.py:536: in _open result = self._call_chain(self.handle_open, protocol, protocol + /usr/lib64/python3.11/urllib/request.py:496: in _call_chain result = func(*args) /usr/lib64/python3.11/urllib/request.py:1391: in https_open return self.do_open(http.client.HTTPSConnection, req, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = http_class = req = http_conn_args = {'check_hostname': None, 'context': None}, host = 'DBpedia.org' h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) except OSError as err: # timeout error > raise URLError(err) E urllib.error.URLError: /usr/lib64/python3.11/urllib/request.py:1351: URLError ----------------------------- Captured stdout call ----------------------------- Network error during query, waiting for 0.10s and retrying Network error during query, waiting for 0.15s and retrying Network error during query, waiting for 0.23s and retrying Network error during query, waiting for 0.34s and retrying Network error during query, waiting for 0.51s and retrying Network error during query, waiting for 0.76s and retrying Network error during query, waiting for 1.14s and retrying Network error during query, waiting for 1.71s and retrying Network error during query, waiting for 2.56s and retrying _________________ test_service_with_implicit_select_and_prefix _________________ self = http_class = req = http_conn_args = {'check_hostname': None, 'context': None}, host = 'DBpedia.org' h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: > h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) /usr/lib64/python3.11/urllib/request.py:1348: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.11/http/client.py:1286: in request self._send_request(method, url, body, headers, encode_chunked) /usr/lib64/python3.11/http/client.py:1332: in _send_request self.endheaders(body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1281: in endheaders self._send_output(message_body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1041: in _send_output self.send(msg) /usr/lib64/python3.11/http/client.py:979: in send self.connect() /usr/lib64/python3.11/http/client.py:1451: in connect super().connect() /usr/lib64/python3.11/http/client.py:945: in connect self.sock = self._create_connection( /usr/lib64/python3.11/socket.py:827: in create_connection for res in getaddrinfo(host, port, 0, SOCK_STREAM): _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ host = 'DBpedia.org', port = 443, family = 0, type = proto = 0, flags = 0 def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): """Resolve host and port into list of address info entries. Translate the host/port argument into a sequence of 5-tuples that contain all the necessary arguments for creating a socket connected to that service. host is a domain name, a string representation of an IPv4/v6 address or None. port is a string service name such as 'http', a numeric port number or None. By passing None as the value of host and port, you can pass NULL to the underlying C API. The family, type and proto arguments can be optionally specified in order to narrow the list of addresses returned. Passing zero as a value for each of these arguments selects the full range of results. """ # We override this function since we want to translate the numeric family # and socket type values to enum constants. addrlist = [] > for res in _socket.getaddrinfo(host, port, family, type, proto, flags): E socket.gaierror: [Errno -3] Temporary failure in name resolution /usr/lib64/python3.11/socket.py:962: gaierror During handling of the above exception, another exception occurred: def test_service_with_implicit_select_and_prefix(): g = Graph() q = """prefix ex: select ?s ?p ?o where { service { values (?s ?p ?o) {(ex:a ex:b 1) ( 2)} }} limit 2""" > results = helper.query_with_retry(g, q) test/test_sparql/test_service.py:121: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ test/utils/helper.py:47: in query_with_retry raise e test/utils/helper.py:43: in query_with_retry result.bindings # access bindings to ensure no lazy loading rdflib/query.py:184: in bindings self._bindings += list(self._genbindings) rdflib/plugins/sparql/evaluate.py:550: in return (row.project(project.PV) for row in res) rdflib/plugins/sparql/evaluate.py:338: in evalServiceQuery response = urlopen( /usr/lib64/python3.11/urllib/request.py:216: in urlopen return opener.open(url, data, timeout) /usr/lib64/python3.11/urllib/request.py:519: in open response = self._open(req, data) /usr/lib64/python3.11/urllib/request.py:536: in _open result = self._call_chain(self.handle_open, protocol, protocol + /usr/lib64/python3.11/urllib/request.py:496: in _call_chain result = func(*args) /usr/lib64/python3.11/urllib/request.py:1391: in https_open return self.do_open(http.client.HTTPSConnection, req, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = http_class = req = http_conn_args = {'check_hostname': None, 'context': None}, host = 'DBpedia.org' h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) except OSError as err: # timeout error > raise URLError(err) E urllib.error.URLError: /usr/lib64/python3.11/urllib/request.py:1351: URLError ----------------------------- Captured stdout call ----------------------------- Network error during query, waiting for 0.10s and retrying Network error during query, waiting for 0.15s and retrying Network error during query, waiting for 0.23s and retrying Network error during query, waiting for 0.34s and retrying Network error during query, waiting for 0.51s and retrying Network error during query, waiting for 0.76s and retrying Network error during query, waiting for 1.14s and retrying Network error during query, waiting for 1.71s and retrying Network error during query, waiting for 2.56s and retrying __________________ test_service_with_implicit_select_and_base __________________ self = http_class = req = http_conn_args = {'check_hostname': None, 'context': None}, host = 'DBpedia.org' h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: > h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) /usr/lib64/python3.11/urllib/request.py:1348: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.11/http/client.py:1286: in request self._send_request(method, url, body, headers, encode_chunked) /usr/lib64/python3.11/http/client.py:1332: in _send_request self.endheaders(body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1281: in endheaders self._send_output(message_body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1041: in _send_output self.send(msg) /usr/lib64/python3.11/http/client.py:979: in send self.connect() /usr/lib64/python3.11/http/client.py:1451: in connect super().connect() /usr/lib64/python3.11/http/client.py:945: in connect self.sock = self._create_connection( /usr/lib64/python3.11/socket.py:827: in create_connection for res in getaddrinfo(host, port, 0, SOCK_STREAM): _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ host = 'DBpedia.org', port = 443, family = 0, type = proto = 0, flags = 0 def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): """Resolve host and port into list of address info entries. Translate the host/port argument into a sequence of 5-tuples that contain all the necessary arguments for creating a socket connected to that service. host is a domain name, a string representation of an IPv4/v6 address or None. port is a string service name such as 'http', a numeric port number or None. By passing None as the value of host and port, you can pass NULL to the underlying C API. The family, type and proto arguments can be optionally specified in order to narrow the list of addresses returned. Passing zero as a value for each of these arguments selects the full range of results. """ # We override this function since we want to translate the numeric family # and socket type values to enum constants. addrlist = [] > for res in _socket.getaddrinfo(host, port, family, type, proto, flags): E socket.gaierror: [Errno -3] Temporary failure in name resolution /usr/lib64/python3.11/socket.py:962: gaierror During handling of the above exception, another exception occurred: def test_service_with_implicit_select_and_base(): g = Graph() q = """base select ?s ?p ?o where { service { values (?s ?p ?o) {( 1) ( 2)} }} limit 2""" > results = helper.query_with_retry(g, q) test/test_sparql/test_service.py:138: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ test/utils/helper.py:47: in query_with_retry raise e test/utils/helper.py:43: in query_with_retry result.bindings # access bindings to ensure no lazy loading rdflib/query.py:184: in bindings self._bindings += list(self._genbindings) rdflib/plugins/sparql/evaluate.py:550: in return (row.project(project.PV) for row in res) rdflib/plugins/sparql/evaluate.py:338: in evalServiceQuery response = urlopen( /usr/lib64/python3.11/urllib/request.py:216: in urlopen return opener.open(url, data, timeout) /usr/lib64/python3.11/urllib/request.py:519: in open response = self._open(req, data) /usr/lib64/python3.11/urllib/request.py:536: in _open result = self._call_chain(self.handle_open, protocol, protocol + /usr/lib64/python3.11/urllib/request.py:496: in _call_chain result = func(*args) /usr/lib64/python3.11/urllib/request.py:1391: in https_open return self.do_open(http.client.HTTPSConnection, req, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = http_class = req = http_conn_args = {'check_hostname': None, 'context': None}, host = 'DBpedia.org' h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) except OSError as err: # timeout error > raise URLError(err) E urllib.error.URLError: /usr/lib64/python3.11/urllib/request.py:1351: URLError ----------------------------- Captured stdout call ----------------------------- Network error during query, waiting for 0.10s and retrying Network error during query, waiting for 0.15s and retrying Network error during query, waiting for 0.23s and retrying Network error during query, waiting for 0.34s and retrying Network error during query, waiting for 0.51s and retrying Network error during query, waiting for 0.76s and retrying Network error during query, waiting for 1.14s and retrying Network error during query, waiting for 1.71s and retrying Network error during query, waiting for 2.56s and retrying ------------------------------ Captured log call ------------------------------- 2024-07-27T18:10:55.237 WARNING rdflib.term term.py:277:__new__ BASE does not look like a valid URI, trying to serialize this will break. 2024-07-27T18:10:55.238 WARNING rdflib.term term.py:277:__new__ BASE PREFIX xml: PREFIX xsd: PREFIX rdfs: PREFIX rdf: PREFIX owl: SELECT REDUCED * WHERE { values (?s ?p ?o) {( 1) ( 2)} } does not look like a valid URI, trying to serialize this will break. 2024-07-27T18:10:55.412 WARNING rdflib.term term.py:277:__new__ BASE does not look like a valid URI, trying to serialize this will break. 2024-07-27T18:10:55.413 WARNING rdflib.term term.py:277:__new__ BASE PREFIX xml: PREFIX xsd: PREFIX rdfs: PREFIX rdf: PREFIX owl: SELECT REDUCED * WHERE { values (?s ?p ?o) {( 1) ( 2)} } does not look like a valid URI, trying to serialize this will break. 2024-07-27T18:10:55.636 WARNING rdflib.term term.py:277:__new__ BASE does not look like a valid URI, trying to serialize this will break. 2024-07-27T18:10:55.636 WARNING rdflib.term term.py:277:__new__ BASE PREFIX xml: PREFIX xsd: PREFIX rdfs: PREFIX rdf: PREFIX owl: SELECT REDUCED * WHERE { values (?s ?p ?o) {( 1) ( 2)} } does not look like a valid URI, trying to serialize this will break. 2024-07-27T18:10:55.937 WARNING rdflib.term term.py:277:__new__ BASE does not look like a valid URI, trying to serialize this will break. 2024-07-27T18:10:55.937 WARNING rdflib.term term.py:277:__new__ BASE PREFIX xml: PREFIX xsd: PREFIX rdfs: PREFIX rdf: PREFIX owl: SELECT REDUCED * WHERE { values (?s ?p ?o) {( 1) ( 2)} } does not look like a valid URI, trying to serialize this will break. 2024-07-27T18:10:56.353 WARNING rdflib.term term.py:277:__new__ BASE does not look like a valid URI, trying to serialize this will break. 2024-07-27T18:10:56.354 WARNING rdflib.term term.py:277:__new__ BASE PREFIX xml: PREFIX xsd: PREFIX rdfs: PREFIX rdf: PREFIX owl: SELECT REDUCED * WHERE { values (?s ?p ?o) {( 1) ( 2)} } does not look like a valid URI, trying to serialize this will break. 2024-07-27T18:10:56.941 WARNING rdflib.term term.py:277:__new__ BASE does not look like a valid URI, trying to serialize this will break. 2024-07-27T18:10:56.941 WARNING rdflib.term term.py:277:__new__ BASE PREFIX xml: PREFIX xsd: PREFIX rdfs: PREFIX rdf: PREFIX owl: SELECT REDUCED * WHERE { values (?s ?p ?o) {( 1) ( 2)} } does not look like a valid URI, trying to serialize this will break. 2024-07-27T18:10:57.783 WARNING rdflib.term term.py:277:__new__ BASE does not look like a valid URI, trying to serialize this will break. 2024-07-27T18:10:57.783 WARNING rdflib.term term.py:277:__new__ BASE PREFIX xml: PREFIX xsd: PREFIX rdfs: PREFIX rdf: PREFIX owl: SELECT REDUCED * WHERE { values (?s ?p ?o) {( 1) ( 2)} } does not look like a valid URI, trying to serialize this will break. 2024-07-27T18:10:59.003 WARNING rdflib.term term.py:277:__new__ BASE does not look like a valid URI, trying to serialize this will break. 2024-07-27T18:10:59.004 WARNING rdflib.term term.py:277:__new__ BASE PREFIX xml: PREFIX xsd: PREFIX rdfs: PREFIX rdf: PREFIX owl: SELECT REDUCED * WHERE { values (?s ?p ?o) {( 1) ( 2)} } does not look like a valid URI, trying to serialize this will break. 2024-07-27T18:11:00.793 WARNING rdflib.term term.py:277:__new__ BASE does not look like a valid URI, trying to serialize this will break. 2024-07-27T18:11:00.794 WARNING rdflib.term term.py:277:__new__ BASE PREFIX xml: PREFIX xsd: PREFIX rdfs: PREFIX rdf: PREFIX owl: SELECT REDUCED * WHERE { values (?s ?p ?o) {( 1) ( 2)} } does not look like a valid URI, trying to serialize this will break. 2024-07-27T18:11:03.436 WARNING rdflib.term term.py:277:__new__ BASE does not look like a valid URI, trying to serialize this will break. 2024-07-27T18:11:03.437 WARNING rdflib.term term.py:277:__new__ BASE PREFIX xml: PREFIX xsd: PREFIX rdfs: PREFIX rdf: PREFIX owl: SELECT REDUCED * WHERE { values (?s ?p ?o) {( 1) ( 2)} } does not look like a valid URI, trying to serialize this will break. ________________ test_service_with_implicit_select_and_allcaps _________________ self = http_class = req = http_conn_args = {'check_hostname': None, 'context': None}, host = 'dbpedia.org' h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: > h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) /usr/lib64/python3.11/urllib/request.py:1348: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.11/http/client.py:1286: in request self._send_request(method, url, body, headers, encode_chunked) /usr/lib64/python3.11/http/client.py:1332: in _send_request self.endheaders(body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1281: in endheaders self._send_output(message_body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1041: in _send_output self.send(msg) /usr/lib64/python3.11/http/client.py:979: in send self.connect() /usr/lib64/python3.11/http/client.py:1451: in connect super().connect() /usr/lib64/python3.11/http/client.py:945: in connect self.sock = self._create_connection( /usr/lib64/python3.11/socket.py:827: in create_connection for res in getaddrinfo(host, port, 0, SOCK_STREAM): _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ host = 'dbpedia.org', port = 443, family = 0, type = proto = 0, flags = 0 def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): """Resolve host and port into list of address info entries. Translate the host/port argument into a sequence of 5-tuples that contain all the necessary arguments for creating a socket connected to that service. host is a domain name, a string representation of an IPv4/v6 address or None. port is a string service name such as 'http', a numeric port number or None. By passing None as the value of host and port, you can pass NULL to the underlying C API. The family, type and proto arguments can be optionally specified in order to narrow the list of addresses returned. Passing zero as a value for each of these arguments selects the full range of results. """ # We override this function since we want to translate the numeric family # and socket type values to enum constants. addrlist = [] > for res in _socket.getaddrinfo(host, port, family, type, proto, flags): E socket.gaierror: [Errno -3] Temporary failure in name resolution /usr/lib64/python3.11/socket.py:962: gaierror During handling of the above exception, another exception occurred: def test_service_with_implicit_select_and_allcaps(): g = Graph() q = """SELECT ?s WHERE { SERVICE { ?s ?sameAs . } } LIMIT 3""" > results = helper.query_with_retry(g, q) test/test_sparql/test_service.py:155: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ test/utils/helper.py:47: in query_with_retry raise e test/utils/helper.py:43: in query_with_retry result.bindings # access bindings to ensure no lazy loading rdflib/query.py:184: in bindings self._bindings += list(self._genbindings) rdflib/plugins/sparql/evaluate.py:550: in return (row.project(project.PV) for row in res) rdflib/plugins/sparql/evaluate.py:338: in evalServiceQuery response = urlopen( /usr/lib64/python3.11/urllib/request.py:216: in urlopen return opener.open(url, data, timeout) /usr/lib64/python3.11/urllib/request.py:519: in open response = self._open(req, data) /usr/lib64/python3.11/urllib/request.py:536: in _open result = self._call_chain(self.handle_open, protocol, protocol + /usr/lib64/python3.11/urllib/request.py:496: in _call_chain result = func(*args) /usr/lib64/python3.11/urllib/request.py:1391: in https_open return self.do_open(http.client.HTTPSConnection, req, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = http_class = req = http_conn_args = {'check_hostname': None, 'context': None}, host = 'dbpedia.org' h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) except OSError as err: # timeout error > raise URLError(err) E urllib.error.URLError: /usr/lib64/python3.11/urllib/request.py:1351: URLError ----------------------------- Captured stdout call ----------------------------- Network error during query, waiting for 0.10s and retrying Network error during query, waiting for 0.15s and retrying Network error during query, waiting for 0.23s and retrying Network error during query, waiting for 0.34s and retrying Network error during query, waiting for 0.51s and retrying Network error during query, waiting for 0.76s and retrying Network error during query, waiting for 1.14s and retrying Network error during query, waiting for 1.71s and retrying Network error during query, waiting for 2.56s and retrying _____________________________ test_simple_not_null _____________________________ self = http_class = req = http_conn_args = {'check_hostname': None, 'context': None}, host = 'DBpedia.org' h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: > h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) /usr/lib64/python3.11/urllib/request.py:1348: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.11/http/client.py:1286: in request self._send_request(method, url, body, headers, encode_chunked) /usr/lib64/python3.11/http/client.py:1332: in _send_request self.endheaders(body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1281: in endheaders self._send_output(message_body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1041: in _send_output self.send(msg) /usr/lib64/python3.11/http/client.py:979: in send self.connect() /usr/lib64/python3.11/http/client.py:1451: in connect super().connect() /usr/lib64/python3.11/http/client.py:945: in connect self.sock = self._create_connection( /usr/lib64/python3.11/socket.py:827: in create_connection for res in getaddrinfo(host, port, 0, SOCK_STREAM): _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ host = 'DBpedia.org', port = 443, family = 0, type = proto = 0, flags = 0 def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): """Resolve host and port into list of address info entries. Translate the host/port argument into a sequence of 5-tuples that contain all the necessary arguments for creating a socket connected to that service. host is a domain name, a string representation of an IPv4/v6 address or None. port is a string service name such as 'http', a numeric port number or None. By passing None as the value of host and port, you can pass NULL to the underlying C API. The family, type and proto arguments can be optionally specified in order to narrow the list of addresses returned. Passing zero as a value for each of these arguments selects the full range of results. """ # We override this function since we want to translate the numeric family # and socket type values to enum constants. addrlist = [] > for res in _socket.getaddrinfo(host, port, family, type, proto, flags): E socket.gaierror: [Errno -3] Temporary failure in name resolution /usr/lib64/python3.11/socket.py:962: gaierror During handling of the above exception, another exception occurred: def test_simple_not_null(): """Test service returns simple literals not as NULL. Issue: https://github.com/RDFLib/rdflib/issues/1278 """ g = Graph() q = """SELECT ?s ?p ?o WHERE { SERVICE { VALUES (?s ?p ?o) {( "c")} } }""" > results = helper.query_with_retry(g, q) test/test_sparql/test_service.py:181: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ test/utils/helper.py:47: in query_with_retry raise e test/utils/helper.py:43: in query_with_retry result.bindings # access bindings to ensure no lazy loading rdflib/query.py:184: in bindings self._bindings += list(self._genbindings) rdflib/plugins/sparql/evaluate.py:550: in return (row.project(project.PV) for row in res) rdflib/plugins/sparql/evaluate.py:338: in evalServiceQuery response = urlopen( /usr/lib64/python3.11/urllib/request.py:216: in urlopen return opener.open(url, data, timeout) /usr/lib64/python3.11/urllib/request.py:519: in open response = self._open(req, data) /usr/lib64/python3.11/urllib/request.py:536: in _open result = self._call_chain(self.handle_open, protocol, protocol + /usr/lib64/python3.11/urllib/request.py:496: in _call_chain result = func(*args) /usr/lib64/python3.11/urllib/request.py:1391: in https_open return self.do_open(http.client.HTTPSConnection, req, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = http_class = req = http_conn_args = {'check_hostname': None, 'context': None}, host = 'DBpedia.org' h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) except OSError as err: # timeout error > raise URLError(err) E urllib.error.URLError: /usr/lib64/python3.11/urllib/request.py:1351: URLError ----------------------------- Captured stdout call ----------------------------- Network error during query, waiting for 0.10s and retrying Network error during query, waiting for 0.15s and retrying Network error during query, waiting for 0.23s and retrying Network error during query, waiting for 0.34s and retrying Network error during query, waiting for 0.51s and retrying Network error during query, waiting for 0.76s and retrying Network error during query, waiting for 1.14s and retrying Network error during query, waiting for 1.71s and retrying Network error during query, waiting for 2.56s and retrying ___________________________ test_service_node_types ____________________________ self = http_class = req = http_conn_args = {'check_hostname': None, 'context': None}, host = 'dbpedia.org' h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: > h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) /usr/lib64/python3.11/urllib/request.py:1348: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.11/http/client.py:1286: in request self._send_request(method, url, body, headers, encode_chunked) /usr/lib64/python3.11/http/client.py:1332: in _send_request self.endheaders(body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1281: in endheaders self._send_output(message_body, encode_chunked=encode_chunked) /usr/lib64/python3.11/http/client.py:1041: in _send_output self.send(msg) /usr/lib64/python3.11/http/client.py:979: in send self.connect() /usr/lib64/python3.11/http/client.py:1451: in connect super().connect() /usr/lib64/python3.11/http/client.py:945: in connect self.sock = self._create_connection( /usr/lib64/python3.11/socket.py:827: in create_connection for res in getaddrinfo(host, port, 0, SOCK_STREAM): _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ host = 'dbpedia.org', port = 443, family = 0, type = proto = 0, flags = 0 def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): """Resolve host and port into list of address info entries. Translate the host/port argument into a sequence of 5-tuples that contain all the necessary arguments for creating a socket connected to that service. host is a domain name, a string representation of an IPv4/v6 address or None. port is a string service name such as 'http', a numeric port number or None. By passing None as the value of host and port, you can pass NULL to the underlying C API. The family, type and proto arguments can be optionally specified in order to narrow the list of addresses returned. Passing zero as a value for each of these arguments selects the full range of results. """ # We override this function since we want to translate the numeric family # and socket type values to enum constants. addrlist = [] > for res in _socket.getaddrinfo(host, port, family, type, proto, flags): E socket.gaierror: [Errno -3] Temporary failure in name resolution /usr/lib64/python3.11/socket.py:962: gaierror During handling of the above exception, another exception occurred: def test_service_node_types(): """Test if SERVICE properly returns different types of nodes: - URI; - Simple Literal; - Literal with datatype ; - Literal with language tag . """ g = Graph() q = """ SELECT ?o WHERE { SERVICE { VALUES (?s ?p ?o) { ( ) ( "Simple Literal") ( "String Literal"^^xsd:string) ( "String Language"@en) ( "String Language"@en) } } FILTER( ?o IN (, "Simple Literal", "String Literal"^^xsd:string, "String Language"@en) ) }""" > results = helper.query_with_retry(g, q) test/test_sparql/test_service.py:208: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ test/utils/helper.py:47: in query_with_retry raise e test/utils/helper.py:43: in query_with_retry result.bindings # access bindings to ensure no lazy loading rdflib/query.py:184: in bindings self._bindings += list(self._genbindings) rdflib/plugins/sparql/evaluate.py:550: in return (row.project(project.PV) for row in res) rdflib/plugins/sparql/evaluate.py:185: in evalFilter for c in evalPart(ctx, part.p): rdflib/plugins/sparql/evaluate.py:342: in evalServiceQuery response = urlopen( /usr/lib64/python3.11/urllib/request.py:216: in urlopen return opener.open(url, data, timeout) /usr/lib64/python3.11/urllib/request.py:519: in open response = self._open(req, data) /usr/lib64/python3.11/urllib/request.py:536: in _open result = self._call_chain(self.handle_open, protocol, protocol + /usr/lib64/python3.11/urllib/request.py:496: in _call_chain result = func(*args) /usr/lib64/python3.11/urllib/request.py:1391: in https_open return self.do_open(http.client.HTTPSConnection, req, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = http_class = req = http_conn_args = {'check_hostname': None, 'context': None}, host = 'dbpedia.org' h = def do_open(self, http_class, req, **http_conn_args): """Return an HTTPResponse object for the request, using http_class. http_class must implement the HTTPConnection API from http.client. """ host = req.host if not host: raise URLError('no host given') # will parse host:port h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update({k: v for k, v in req.headers.items() if k not in headers}) # TODO(jhylton): Should this be redesigned to handle # persistent connections? # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: try: h.request(req.get_method(), req.selector, req.data, headers, encode_chunked=req.has_header('Transfer-encoding')) except OSError as err: # timeout error > raise URLError(err) E urllib.error.URLError: /usr/lib64/python3.11/urllib/request.py:1351: URLError ----------------------------- Captured stdout call ----------------------------- Network error during query, waiting for 0.10s and retrying Network error during query, waiting for 0.15s and retrying Network error during query, waiting for 0.23s and retrying Network error during query, waiting for 0.34s and retrying Network error during query, waiting for 0.51s and retrying Network error during query, waiting for 0.76s and retrying Network error during query, waiting for 1.14s and retrying Network error during query, waiting for 1.71s and retrying Network error during query, waiting for 2.56s and retrying =============================== warnings summary =============================== test/test_roundtrip.py: 12 warnings /builddir/build/BUILD/rdflib-6.2.0/rdflib/term.py:1578: UserWarning: Serializing weird numerical rdflib.term.Literal('+1.0z', datatype=rdflib.term.URIRef('http://www.w3.org/2001/XMLSchema#double')) warnings.warn("Serializing weird numerical %r" % self) test/test_roundtrip.py: 12 warnings /builddir/build/BUILD/rdflib-6.2.0/rdflib/term.py:1578: UserWarning: Serializing weird numerical rdflib.term.Literal('xy.z', datatype=rdflib.term.URIRef('http://www.w3.org/2001/XMLSchema#double')) warnings.warn("Serializing weird numerical %r" % self) test/test_roundtrip.py: 12 warnings /builddir/build/BUILD/rdflib-6.2.0/rdflib/term.py:1578: UserWarning: Serializing weird numerical rdflib.term.Literal('ab.c', datatype=rdflib.term.URIRef('http://www.w3.org/2001/XMLSchema#double')) warnings.warn("Serializing weird numerical %r" % self) test/test_util.py::TestUtilTermConvert::test_util_from_n3_expectliteralandlangdtype /usr/lib/python3.11/site-packages/_pytest/python.py:194: UserWarning: Code: fr is not defined in namespace XSD result = testfunction(**testargs) test/test_util.py::TestUtilTermConvert::test_util_from_n3_not_escapes[\\I] /builddir/build/BUILD/rdflib-6.2.0/rdflib/util.py:191: DeprecationWarning: invalid escape sequence '\I' value = value.encode("raw-unicode-escape").decode("unicode-escape") test/test_literal/test_literal.py::TestNewPT::test_ill_typed_literals[yes-http://www.w3.org/2001/XMLSchema#boolean-True] /builddir/build/BUILD/rdflib-6.2.0/rdflib/term.py:1697: UserWarning: Parsing weird boolean, 'yes' does not map to True or False warnings.warn( test/test_namespace/test_definednamespace.py::test_inspect[DFNSDefaults] /usr/lib64/python3.11/inspect.py:2486: UserWarning: Code: _partialmethod is not defined in namespace DFNSDefaults partialmethod = obj._partialmethod test/test_namespace/test_definednamespace.py::test_inspect[DFNSWarnNoFail] /usr/lib64/python3.11/inspect.py:2486: UserWarning: Code: _partialmethod is not defined in namespace DFNSWarnNoFail partialmethod = obj._partialmethod test/test_namespace/test_definednamespace.py::test_inspect[DFNSDefaultsEmpty] /usr/lib64/python3.11/inspect.py:2486: UserWarning: Code: _partialmethod is not defined in namespace DFNSDefaultsEmpty partialmethod = obj._partialmethod test/test_namespace/test_namespace.py::TestNamespacePrefix::test_closed_namespace /builddir/build/BUILD/rdflib-6.2.0/test/test_namespace/test_namespace.py:230: UserWarning: DefinedNamespace does not address deprecated properties warn("DefinedNamespace does not address deprecated properties") test/test_parsers/test_n3parse_of_rdf_lists.py::TestOWLCollectionTest::test_collection_rdfxml /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/serializers/rdfxml.py:280: UserWarning: Assertions on rdflib.term.BNode('N61424164ca51408a9fb5f88f285e6dce') other than RDF.first and RDF.rest are ignored ... including RDF.List self.predicate(predicate, object, depth + 1) test/test_serializers/test_prettyxml.py::TestPrettyXmlSerializer::test_serialize_and_reparse /usr/lib/python3.11/site-packages/_pytest/fixtures.py:895: PytestRemovedIn8Warning: Support for nose tests is deprecated and will be removed in a future release. test/test_serializers/test_prettyxml.py::TestPrettyXmlSerializer::test_serialize_and_reparse is using nose-specific method: `setup(self)` To remove this warning, rename it to `setup_method(self)` See docs: https://docs.pytest.org/en/stable/deprecations.html#support-for-tests-written-for-nose fixture_result = next(generator) test/test_serializers/test_prettyxml.py::TestPrettyXmlSerializer::test_multiple /usr/lib/python3.11/site-packages/_pytest/fixtures.py:895: PytestRemovedIn8Warning: Support for nose tests is deprecated and will be removed in a future release. test/test_serializers/test_prettyxml.py::TestPrettyXmlSerializer::test_multiple is using nose-specific method: `setup(self)` To remove this warning, rename it to `setup_method(self)` See docs: https://docs.pytest.org/en/stable/deprecations.html#support-for-tests-written-for-nose fixture_result = next(generator) test/test_serializers/test_prettyxml.py::TestPrettyXmlSerializer::test_result_fragments /usr/lib/python3.11/site-packages/_pytest/fixtures.py:895: PytestRemovedIn8Warning: Support for nose tests is deprecated and will be removed in a future release. test/test_serializers/test_prettyxml.py::TestPrettyXmlSerializer::test_result_fragments is using nose-specific method: `setup(self)` To remove this warning, rename it to `setup_method(self)` See docs: https://docs.pytest.org/en/stable/deprecations.html#support-for-tests-written-for-nose fixture_result = next(generator) test/test_serializers/test_prettyxml.py::TestPrettyXmlSerializer::test_result_fragments_with_base /usr/lib/python3.11/site-packages/_pytest/fixtures.py:895: PytestRemovedIn8Warning: Support for nose tests is deprecated and will be removed in a future release. test/test_serializers/test_prettyxml.py::TestPrettyXmlSerializer::test_result_fragments_with_base is using nose-specific method: `setup(self)` To remove this warning, rename it to `setup_method(self)` See docs: https://docs.pytest.org/en/stable/deprecations.html#support-for-tests-written-for-nose fixture_result = next(generator) test/test_serializers/test_prettyxml.py::TestPrettyXmlSerializer::test_subClassOf_objects /usr/lib/python3.11/site-packages/_pytest/fixtures.py:895: PytestRemovedIn8Warning: Support for nose tests is deprecated and will be removed in a future release. test/test_serializers/test_prettyxml.py::TestPrettyXmlSerializer::test_subClassOf_objects is using nose-specific method: `setup(self)` To remove this warning, rename it to `setup_method(self)` See docs: https://docs.pytest.org/en/stable/deprecations.html#support-for-tests-written-for-nose fixture_result = next(generator) test/test_serializers/test_prettyxml.py::TestPrettyXmlSerializer::test_pretty_xmlliteral /usr/lib/python3.11/site-packages/_pytest/fixtures.py:895: PytestRemovedIn8Warning: Support for nose tests is deprecated and will be removed in a future release. test/test_serializers/test_prettyxml.py::TestPrettyXmlSerializer::test_pretty_xmlliteral is using nose-specific method: `setup(self)` To remove this warning, rename it to `setup_method(self)` See docs: https://docs.pytest.org/en/stable/deprecations.html#support-for-tests-written-for-nose fixture_result = next(generator) test/test_serializers/test_prettyxml.py::TestPrettyXmlSerializer::test_pretty_broken_xmlliteral /usr/lib/python3.11/site-packages/_pytest/fixtures.py:895: PytestRemovedIn8Warning: Support for nose tests is deprecated and will be removed in a future release. test/test_serializers/test_prettyxml.py::TestPrettyXmlSerializer::test_pretty_broken_xmlliteral is using nose-specific method: `setup(self)` To remove this warning, rename it to `setup_method(self)` See docs: https://docs.pytest.org/en/stable/deprecations.html#support-for-tests-written-for-nose fixture_result = next(generator) test/test_serializers/test_serializer.py::test_serialize_parse[nt11-TRIPLE-PURE_PATH-None] test/test_serializers/test_serializer.py::test_serialize_parse[nt11-TRIPLE-STR_PATH-None] test/test_serializers/test_serializer.py::test_serialize_parse[nt11-TRIPLE-BINARY_IO-None] test/test_serializers/test_serializer.py::test_serialize_parse[nt11-TRIPLE-PATH-None] test/test_serializers/test_serializer.py::test_serialize_parse[ntriples-TRIPLE-PURE_PATH-None] test/test_serializers/test_serializer.py::test_serialize_parse[ntriples-TRIPLE-STR_PATH-None] test/test_serializers/test_serializer.py::test_serialize_parse[ntriples-TRIPLE-BINARY_IO-None] test/test_serializers/test_serializer.py::test_serialize_parse[ntriples-TRIPLE-PATH-None] /builddir/build/BUILD/rdflib-6.2.0/rdflib/plugins/serializers/nt.py:35: UserWarning: NTSerializer always uses UTF-8 encoding. Given encoding was: None warnings.warn( test/test_serializers/test_serializer_xml.py::TestXMLSerializer::test_serialize_and_reparse /usr/lib/python3.11/site-packages/_pytest/fixtures.py:895: PytestRemovedIn8Warning: Support for nose tests is deprecated and will be removed in a future release. test/test_serializers/test_serializer_xml.py::TestXMLSerializer::test_serialize_and_reparse is using nose-specific method: `setup(self)` To remove this warning, rename it to `setup_method(self)` See docs: https://docs.pytest.org/en/stable/deprecations.html#support-for-tests-written-for-nose fixture_result = next(generator) test/test_serializers/test_serializer_xml.py::TestXMLSerializer::test_multiple /usr/lib/python3.11/site-packages/_pytest/fixtures.py:895: PytestRemovedIn8Warning: Support for nose tests is deprecated and will be removed in a future release. test/test_serializers/test_serializer_xml.py::TestXMLSerializer::test_multiple is using nose-specific method: `setup(self)` To remove this warning, rename it to `setup_method(self)` See docs: https://docs.pytest.org/en/stable/deprecations.html#support-for-tests-written-for-nose fixture_result = next(generator) test/test_serializers/test_serializer_xml.py::TestXMLSerializer::test_result_fragments /usr/lib/python3.11/site-packages/_pytest/fixtures.py:895: PytestRemovedIn8Warning: Support for nose tests is deprecated and will be removed in a future release. test/test_serializers/test_serializer_xml.py::TestXMLSerializer::test_result_fragments is using nose-specific method: `setup(self)` To remove this warning, rename it to `setup_method(self)` See docs: https://docs.pytest.org/en/stable/deprecations.html#support-for-tests-written-for-nose fixture_result = next(generator) test/test_serializers/test_serializer_xml.py::TestXMLSerializer::test_result_fragments_with_base /usr/lib/python3.11/site-packages/_pytest/fixtures.py:895: PytestRemovedIn8Warning: Support for nose tests is deprecated and will be removed in a future release. test/test_serializers/test_serializer_xml.py::TestXMLSerializer::test_result_fragments_with_base is using nose-specific method: `setup(self)` To remove this warning, rename it to `setup_method(self)` See docs: https://docs.pytest.org/en/stable/deprecations.html#support-for-tests-written-for-nose fixture_result = next(generator) test/test_serializers/test_serializer_xml.py::TestXMLSerializer::test_subClassOf_objects /usr/lib/python3.11/site-packages/_pytest/fixtures.py:895: PytestRemovedIn8Warning: Support for nose tests is deprecated and will be removed in a future release. test/test_serializers/test_serializer_xml.py::TestXMLSerializer::test_subClassOf_objects is using nose-specific method: `setup(self)` To remove this warning, rename it to `setup_method(self)` See docs: https://docs.pytest.org/en/stable/deprecations.html#support-for-tests-written-for-nose fixture_result = next(generator) test/test_store/test_store_sparqlstore.py::SPARQLStoreUpdateTestCase::test_Query /builddir/build/BUILD/rdflib-6.2.0/test/test_store/test_store_sparqlstore.py:436: DeprecationWarning: setDaemon() is deprecated, set the daemon attribute instead mock_server_thread.setDaemon(True) test/test_w3c_spec/test_sparql10_w3c.py: 13 warnings test/test_w3c_spec/test_sparql11_w3c.py: 40 warnings /builddir/build/BUILD/rdflib-6.2.0/rdflib/term.py:1151: DeprecationWarning: NotImplemented should not be used in a boolean context return not self.__gt__(other) and not self.eq(other) -- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html =========================== short test summary info ============================ FAILED rdflib/__init__.py::rdflib FAILED rdflib/extras/infixowl.py::rdflib.extras.infixowl FAILED test/jsonld/test_onedotone.py::test_suite[https://w3c.github.io/json-ld-api/tests/toRdf-manifest#tc034-do_test_parser-https://w3c.github.io/json-ld-api/tests/-toRdf-c034-toRdf/c034-in.jsonld-toRdf/c034-out.nq-False-options66] FAILED test/jsonld/test_onedotone.py::test_suite[https://w3c.github.io/json-ld-api/tests/toRdf-manifest#te126-do_test_parser-https://w3c.github.io/json-ld-api/tests/-toRdf-e126-toRdf/e126-in.jsonld-toRdf/e126-out.nq-False-options167] FAILED test/jsonld/test_onedotone.py::test_suite[https://w3c.github.io/json-ld-api/tests/toRdf-manifest#te127-do_test_parser-https://w3c.github.io/json-ld-api/tests/-toRdf-e127-toRdf/e127-in.jsonld-toRdf/e127-out.nq-False-options168] FAILED test/jsonld/test_onedotone.py::test_suite[https://w3c.github.io/json-ld-api/tests/toRdf-manifest#tso05-do_test_parser-https://w3c.github.io/json-ld-api/tests/-toRdf-so05-toRdf/so05-in.jsonld-toRdf/so05-out.nq-False-options253] FAILED test/jsonld/test_onedotone.py::test_suite[https://w3c.github.io/json-ld-api/tests/toRdf-manifest#tso08-do_test_parser-https://w3c.github.io/json-ld-api/tests/-toRdf-so08-toRdf/so08-in.jsonld-toRdf/so08-out.nq-False-options254] FAILED test/jsonld/test_onedotone.py::test_suite[https://w3c.github.io/json-ld-api/tests/toRdf-manifest#tso09-do_test_parser-https://w3c.github.io/json-ld-api/tests/-toRdf-so09-toRdf/so09-in.jsonld-toRdf/so09-out.nq-False-options255] FAILED test/jsonld/test_onedotone.py::test_suite[https://w3c.github.io/json-ld-api/tests/toRdf-manifest#tso11-do_test_parser-https://w3c.github.io/json-ld-api/tests/-toRdf-so11-toRdf/so11-in.jsonld-toRdf/so11-out.nq-False-options256] FAILED test/test_extras/test_infixowl/test_basic.py::test_infix_owl_example1 FAILED test/test_extras/test_infixowl/test_context.py::test_context - urllib.... FAILED test/test_graph/test_graph.py::test_guess_format_for_parse[None] - url... FAILED test/test_sparql/test_prepare.py::test_prepare_query - FileNotFoundErr... FAILED test/test_sparql/test_service.py::test_service - urllib.error.URLError... FAILED test/test_sparql/test_service.py::test_service_with_bind - urllib.erro... FAILED test/test_sparql/test_service.py::test_service_with_values - urllib.er... FAILED test/test_sparql/test_service.py::test_service_with_implicit_select - ... FAILED test/test_sparql/test_service.py::test_service_with_implicit_select_and_prefix FAILED test/test_sparql/test_service.py::test_service_with_implicit_select_and_base FAILED test/test_sparql/test_service.py::test_service_with_implicit_select_and_allcaps FAILED test/test_sparql/test_service.py::test_simple_not_null - urllib.error.... FAILED test/test_sparql/test_service.py::test_service_node_types - urllib.err... = 22 failed, 6625 passed, 73 skipped, 373 xfailed, 2 xpassed, 118 warnings in 179.84s (0:02:59) = + : + RPM_EC=0 ++ jobs -p + exit 0 Processing files: python3-rdflib-6.2.0-4.ocs23.noarch Executing(%doc): /bin/sh -e /var/tmp/rpm-tmp.2hEwYm + umask 022 + cd /builddir/build/BUILD + cd rdflib-6.2.0 + DOCDIR=/builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/share/doc/python3-rdflib + export LC_ALL=C + LC_ALL=C + export DOCDIR + /usr/bin/mkdir -p /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/share/doc/python3-rdflib + cp -pr CHANGELOG.md /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/share/doc/python3-rdflib + cp -pr README.md /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/share/doc/python3-rdflib + RPM_EC=0 ++ jobs -p + exit 0 Executing(%license): /bin/sh -e /var/tmp/rpm-tmp.31Eytp + umask 022 + cd /builddir/build/BUILD + cd rdflib-6.2.0 + LICENSEDIR=/builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/share/licenses/python3-rdflib + export LC_ALL=C + LC_ALL=C + export LICENSEDIR + /usr/bin/mkdir -p /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/share/licenses/python3-rdflib + cp -pr LICENSE /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch/usr/share/licenses/python3-rdflib + RPM_EC=0 ++ jobs -p + exit 0 Provides: python-rdflib = 6.2.0-4.ocs23 python3-rdflib = 6.2.0-4.ocs23 python3.11-rdflib = 6.2.0-4.ocs23 python3.11dist(rdflib) = 6.2 python3dist(rdflib) = 6.2 Requires(rpmlib): rpmlib(CompressedFileNames) <= 3.0.4-1 rpmlib(FileDigests) <= 4.6.0-1 rpmlib(PartialHardlinkSets) <= 4.0.4-1 rpmlib(PayloadFilesHavePrefix) <= 4.0-1 Requires: /usr/bin/python3 python(abi) = 3.11 python3.11dist(isodate) python3.11dist(pyparsing) python3.11dist(setuptools) Obsoletes: python3.11-rdflib < 6.2.0-4.ocs23 Checking for unpackaged file(s): /usr/lib/rpm/check-files /builddir/build/BUILDROOT/python-rdflib-6.2.0-4.ocs23.noarch Wrote: /builddir/build/SRPMS/python-rdflib-6.2.0-4.ocs23.src.rpm Wrote: /builddir/build/RPMS/python3-rdflib-6.2.0-4.ocs23.noarch.rpm Child return code was: 0